Skip to content

Commit

Permalink
Add hash map for metadata fields and add a 'zonMeta' attribute to out…
Browse files Browse the repository at this point in the history
…put nix expression
  • Loading branch information
icetan committed Jun 11, 2024
1 parent 4394c25 commit 60317f4
Show file tree
Hide file tree
Showing 4 changed files with 98 additions and 47 deletions.
36 changes: 33 additions & 3 deletions src/codegen.zig
Original file line number Diff line number Diff line change
Expand Up @@ -6,14 +6,15 @@ const mem = std.mem;
const Dependency = @import("Dependency.zig");

const Entry = StringHashMap(Dependency).Entry;
const MetaEntry = StringHashMap([]const u8).Entry;

pub fn write(alloc: Allocator, out: anytype, deps: StringHashMap(Dependency)) !void {
pub fn write(alloc: Allocator, out: anytype, meta: StringHashMap([]const u8), deps: StringHashMap(Dependency)) !void {
try out.writeAll(
\\# generated by zon2nix (https://github.com/nix-community/zon2nix)
\\
\\{ linkFarm, fetchzip }:
\\
\\linkFarm "zig-packages" [
\\(linkFarm "zig-packages" [
\\
);

Expand All @@ -40,9 +41,38 @@ pub fn write(alloc: Allocator, out: anytype, deps: StringHashMap(Dependency)) !v
, .{ key, dep.url, dep.nix_hash });
}

try out.writeAll("]\n");
try out.writeAll("])");

const meta_len = meta.count();
if (meta_len > 0) {
try out.writeAll(".overrideAttrs {\n passthru.zonMeta = {\n");

var meta_entries = try alloc.alloc(MetaEntry, meta_len);
var meta_iter = meta.iterator();
for (0..meta_len) |i| {
meta_entries[i] = meta_iter.next().?;
}
mem.sortUnstable(MetaEntry, meta_entries, {}, metaLessThan);

for (meta_entries) |entry| {
const key = entry.key_ptr.*;
const value = entry.value_ptr.*;

try out.print(
\\ "{s}" = "{s}";
\\
, .{ key, value });
}

try out.writeAll(" };\n}");
}
try out.writeAll("\n");
}

fn lessThan(_: void, lhs: Entry, rhs: Entry) bool {
return mem.order(u8, lhs.key_ptr.*, rhs.key_ptr.*) == .lt;
}

fn metaLessThan(_: void, lhs: MetaEntry, rhs: MetaEntry) bool {
return mem.order(u8, lhs.key_ptr.*, rhs.key_ptr.*) == .lt;
}
3 changes: 2 additions & 1 deletion src/fetch.zig
Original file line number Diff line number Diff line change
Expand Up @@ -79,7 +79,8 @@ pub fn fetch(alloc: Allocator, deps: *StringHashMap(Dependency)) !void {
};
defer file.close();

try parse(alloc, deps, file);
var meta = StringHashMap([]const u8).init(alloc);
try parse(alloc, &meta, deps, file);
if (deps.count() > len_before) {
done = false;
}
Expand Down
5 changes: 3 additions & 2 deletions src/main.zig
Original file line number Diff line number Diff line change
Expand Up @@ -28,12 +28,13 @@ pub fn main() !void {
defer arena.deinit();
const alloc = arena.allocator();

var meta = StringHashMap([]const u8).init(alloc);
var deps = StringHashMap(Dependency).init(alloc);
try parse(alloc, &deps, file);
try parse(alloc, &meta, &deps, file);
try fetch(alloc, &deps);

var out = io.bufferedWriter(io.getStdOut().writer());
try write(alloc, out.writer(), deps);
try write(alloc, out.writer(), meta, deps);
try out.flush();
}

Expand Down
101 changes: 60 additions & 41 deletions src/parse.zig
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,7 @@ const string_literal = std.zig.string_literal;

const Dependency = @import("Dependency.zig");

pub fn parse(alloc: Allocator, deps: *StringHashMap(Dependency), file: File) !void {
pub fn parse(alloc: Allocator, meta: *StringHashMap([]const u8), deps: *StringHashMap(Dependency), file: File) !void {
const content = try alloc.allocSentinel(u8, try file.getEndPos(), 0);
_ = try file.reader().readAll(content);

Expand All @@ -21,44 +21,16 @@ pub fn parse(alloc: Allocator, deps: *StringHashMap(Dependency), file: File) !vo
};

for (root_init.ast.fields) |field_idx| {
if (!mem.eql(u8, try parseFieldName(alloc, ast, field_idx), "dependencies")) {
continue;
}

const deps_init = ast.fullStructInit(&buf, field_idx) orelse {
return error.ParseError;
};

for (deps_init.ast.fields) |dep_idx| {
var dep: Dependency = .{
.url = undefined,
.nix_hash = undefined,
.done = false,
};
var hash: []const u8 = undefined;
var has_url = false;
var has_hash = false;

const dep_init = ast.fullStructInit(&buf, dep_idx) orelse {
return error.parseError;
};

for (dep_init.ast.fields) |dep_field_idx| {
const name = try parseFieldName(alloc, ast, dep_field_idx);

if (mem.eql(u8, name, "url")) {
dep.url = try parseString(alloc, ast, dep_field_idx);
has_url = true;
} else if (mem.eql(u8, name, "hash")) {
hash = try parseString(alloc, ast, dep_field_idx);
has_hash = true;
}
}

if (has_url and has_hash) {
_ = try deps.getOrPutValue(hash, dep);
} else {
return error.parseError;
const field_name = try parseFieldName(alloc, ast, field_idx);
if (mem.eql(u8, field_name, "dependencies")) {
try parseDependency(alloc, ast, field_idx, deps);
} else if (mem.eql(u8, field_name, "paths")) {
// TODO: implement parsing of 'paths' array
} else {
if (parseString(alloc, ast, field_idx)) |value| {
_ = try meta.getOrPutValue(field_name, value);
} else |_| {
// Ignore field if metadata value isn't a string.
}
}
}
Expand All @@ -70,7 +42,53 @@ fn parseFieldName(alloc: Allocator, ast: Ast, idx: Index) ![]const u8 {
}

fn parseString(alloc: Allocator, ast: Ast, idx: Index) ![]const u8 {
return string_literal.parseAlloc(alloc, ast.tokenSlice(ast.nodes.items(.main_token)[idx]));
const token = ast.tokenSlice(ast.nodes.items(.main_token)[idx]);
return switch (token[0]) {
// Check if the start of the token looks like a string to avoid
// unreachable error when trying to parse a non-string.
'"', '\\' => string_literal.parseAlloc(alloc, token),
else => error.ParseError,
};
}

fn parseDependency(alloc: Allocator, ast: Ast, field_idx: Index, deps: *StringHashMap(Dependency)) !void {
var buf: [2]Index = undefined;
const deps_init = ast.fullStructInit(&buf, field_idx) orelse {
return error.ParseError;
};

for (deps_init.ast.fields) |dep_idx| {
var dep: Dependency = .{
.url = undefined,
.nix_hash = undefined,
.done = false,
};
var hash: []const u8 = undefined;
var has_url = false;
var has_hash = false;

const dep_init = ast.fullStructInit(&buf, dep_idx) orelse {
return error.parseError;
};

for (dep_init.ast.fields) |dep_field_idx| {
const name = try parseFieldName(alloc, ast, dep_field_idx);

if (mem.eql(u8, name, "url")) {
dep.url = try parseString(alloc, ast, dep_field_idx);
has_url = true;
} else if (mem.eql(u8, name, "hash")) {
hash = try parseString(alloc, ast, dep_field_idx);
has_hash = true;
}
}

if (has_url and has_hash) {
_ = try deps.getOrPutValue(hash, dep);
} else {
return error.parseError;
}
}
}

test parse {
Expand All @@ -82,9 +100,10 @@ test parse {
defer arena.deinit();
const alloc = arena.allocator();

var meta = StringHashMap([]const u8).init(alloc);
var deps = StringHashMap(Dependency).init(alloc);
const basic = try fs.cwd().openFile("fixtures/basic.zon", .{});
try parse(alloc, &deps, basic);
try parse(alloc, &meta, &deps, basic);
basic.close();

try testing.expectEqual(deps.count(), 3);
Expand Down

0 comments on commit 60317f4

Please sign in to comment.