Skip to content

Commit cf207df

Browse files
committed
Module: improve source spans for initializers and var types
```zig const U = union { foo: u32, bar: u32 }; test { var a = U{ .foo = 1213, .bar = 1123 }; _ = a; } test { var a: (123 + 5238094) = 0; _ = a; } ``` before: ``` :30: note: additional initializer here var a = U{ .foo = 1213, .bar = 1123 }; ^~~ :12: error: expected type 'type', found 'comptime_int' var a: (123 + 5238094) = 0; ^ ``` after: ``` :30: note: additional initializer here var a = U{ .foo = 1213, .bar = 1123 }; ~^~~~~~~~~~ :12: error: expected type 'type', found 'comptime_int' var a: (123 + 5238094) = 0; ^~~~~~~~~~~~~~~ ```
1 parent 1463144 commit cf207df

File tree

2 files changed

+38
-33
lines changed

2 files changed

+38
-33
lines changed

src/Module.zig

Lines changed: 30 additions & 25 deletions
Original file line numberDiff line numberDiff line change
@@ -2132,13 +2132,15 @@ pub const SrcLoc = struct {
21322132
assert(src_loc.file_scope.tree_loaded);
21332133
return nodeToSpan(tree, node);
21342134
},
2135-
.node_offset_back2tok => |node_off| {
2135+
.node_offset_initializer => |node_off| {
21362136
const tree = try src_loc.file_scope.getTree(gpa);
21372137
const node = src_loc.declRelativeToNodeIndex(node_off);
2138-
const tok_index = tree.firstToken(node) - 2;
2139-
const start = tree.tokens.items(.start)[tok_index];
2140-
const end = start + @intCast(u32, tree.tokenSlice(tok_index).len);
2141-
return Span{ .start = start, .end = end, .main = start };
2138+
return tokensToSpan(
2139+
tree,
2140+
tree.firstToken(node) - 3,
2141+
tree.lastToken(node),
2142+
tree.nodes.items(.main_token)[node] - 2,
2143+
);
21422144
},
21432145
.node_offset_var_decl_ty => |node_off| {
21442146
const tree = try src_loc.file_scope.getTree(gpa);
@@ -2151,12 +2153,10 @@ pub const SrcLoc = struct {
21512153
.aligned_var_decl => tree.alignedVarDecl(node),
21522154
else => unreachable,
21532155
};
2154-
const tok_index = if (full.ast.type_node != 0) blk: {
2155-
const main_tokens = tree.nodes.items(.main_token);
2156-
break :blk main_tokens[full.ast.type_node];
2157-
} else blk: {
2158-
break :blk full.ast.mut_token + 1; // the name token
2159-
};
2156+
if (full.ast.type_node != 0) {
2157+
return nodeToSpan(tree, full.ast.type_node);
2158+
}
2159+
const tok_index = full.ast.mut_token + 1; // the name token
21602160
const start = tree.tokens.items(.start)[tok_index];
21612161
const end = start + @intCast(u32, tree.tokenSlice(tok_index).len);
21622162
return Span{ .start = start, .end = end, .main = start };
@@ -2492,26 +2492,32 @@ pub const SrcLoc = struct {
24922492
}
24932493

24942494
pub fn nodeToSpan(tree: *const Ast, node: u32) Span {
2495+
return tokensToSpan(
2496+
tree,
2497+
tree.firstToken(node),
2498+
tree.lastToken(node),
2499+
tree.nodes.items(.main_token)[node],
2500+
);
2501+
}
2502+
2503+
fn tokensToSpan(tree: *const Ast, start: Ast.TokenIndex, end: Ast.TokenIndex, main: Ast.TokenIndex) Span {
24952504
const token_starts = tree.tokens.items(.start);
2496-
const main_token = tree.nodes.items(.main_token)[node];
2497-
const start = tree.firstToken(node);
2498-
const end = tree.lastToken(node);
24992505
var start_tok = start;
25002506
var end_tok = end;
25012507

25022508
if (tree.tokensOnSameLine(start, end)) {
25032509
// do nothing
2504-
} else if (tree.tokensOnSameLine(start, main_token)) {
2505-
end_tok = main_token;
2506-
} else if (tree.tokensOnSameLine(main_token, end)) {
2507-
start_tok = main_token;
2510+
} else if (tree.tokensOnSameLine(start, main)) {
2511+
end_tok = main;
2512+
} else if (tree.tokensOnSameLine(main, end)) {
2513+
start_tok = main;
25082514
} else {
2509-
start_tok = main_token;
2510-
end_tok = main_token;
2515+
start_tok = main;
2516+
end_tok = main;
25112517
}
25122518
const start_off = token_starts[start_tok];
25132519
const end_off = token_starts[end_tok] + @intCast(u32, tree.tokenSlice(end_tok).len);
2514-
return Span{ .start = start_off, .end = end_off, .main = token_starts[main_token] };
2520+
return Span{ .start = start_off, .end = end_off, .main = token_starts[main] };
25152521
}
25162522
};
25172523

@@ -2565,10 +2571,9 @@ pub const LazySrcLoc = union(enum) {
25652571
/// from its containing Decl node AST index.
25662572
/// The Decl is determined contextually.
25672573
node_offset: TracedOffset,
2568-
/// The source location points to two tokens left of the first token of an AST node,
2569-
/// which is this value offset from its containing Decl node AST index.
2574+
/// The source location points to the beginning of a struct initializer.
25702575
/// The Decl is determined contextually.
2571-
node_offset_back2tok: i32,
2576+
node_offset_initializer: i32,
25722577
/// The source location points to a variable declaration type expression,
25732578
/// found by taking this AST node index offset from the containing
25742579
/// Decl AST node, which points to a variable declaration AST node. Next, navigate
@@ -2764,7 +2769,7 @@ pub const LazySrcLoc = union(enum) {
27642769
.byte_offset,
27652770
.token_offset,
27662771
.node_offset,
2767-
.node_offset_back2tok,
2772+
.node_offset_initializer,
27682773
.node_offset_var_decl_ty,
27692774
.node_offset_for_cond,
27702775
.node_offset_builtin_call_arg0,

src/Sema.zig

Lines changed: 8 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -3403,7 +3403,7 @@ fn validateUnionInit(
34033403

34043404
for (instrs[1..]) |inst| {
34053405
const inst_data = sema.code.instructions.items(.data)[inst].pl_node;
3406-
const inst_src: LazySrcLoc = .{ .node_offset_back2tok = inst_data.src_node };
3406+
const inst_src: LazySrcLoc = .{ .node_offset_initializer = inst_data.src_node };
34073407
try sema.errNote(block, inst_src, msg, "additional initializer here", .{});
34083408
}
34093409
try sema.addDeclaredHereNote(msg, union_ty);
@@ -3421,7 +3421,7 @@ fn validateUnionInit(
34213421

34223422
const field_ptr = instrs[0];
34233423
const field_ptr_data = sema.code.instructions.items(.data)[field_ptr].pl_node;
3424-
const field_src: LazySrcLoc = .{ .node_offset_back2tok = field_ptr_data.src_node };
3424+
const field_src: LazySrcLoc = .{ .node_offset_initializer = field_ptr_data.src_node };
34253425
const field_ptr_extra = sema.code.extraData(Zir.Inst.Field, field_ptr_data.payload_index).data;
34263426
const field_name = sema.code.nullTerminatedString(field_ptr_extra.field_name_start);
34273427
const field_index = try sema.unionFieldIndex(block, union_ty, field_name, field_src);
@@ -3523,15 +3523,15 @@ fn validateStructInit(
35233523

35243524
for (instrs) |field_ptr| {
35253525
const field_ptr_data = sema.code.instructions.items(.data)[field_ptr].pl_node;
3526-
const field_src: LazySrcLoc = .{ .node_offset_back2tok = field_ptr_data.src_node };
3526+
const field_src: LazySrcLoc = .{ .node_offset_initializer = field_ptr_data.src_node };
35273527
const field_ptr_extra = sema.code.extraData(Zir.Inst.Field, field_ptr_data.payload_index).data;
35283528
struct_ptr_zir_ref = field_ptr_extra.lhs;
35293529
const field_name = sema.code.nullTerminatedString(field_ptr_extra.field_name_start);
35303530
const field_index = try sema.structFieldIndex(block, struct_ty, field_name, field_src);
35313531
if (found_fields[field_index] != 0) {
35323532
const other_field_ptr = found_fields[field_index];
35333533
const other_field_ptr_data = sema.code.instructions.items(.data)[other_field_ptr].pl_node;
3534-
const other_field_src: LazySrcLoc = .{ .node_offset_back2tok = other_field_ptr_data.src_node };
3534+
const other_field_src: LazySrcLoc = .{ .node_offset_initializer = other_field_ptr_data.src_node };
35353535
const msg = msg: {
35363536
const msg = try sema.errMsg(block, field_src, "duplicate field", .{});
35373537
errdefer msg.destroy(gpa);
@@ -3606,7 +3606,7 @@ fn validateStructInit(
36063606
field: for (found_fields) |field_ptr, i| {
36073607
if (field_ptr != 0) {
36083608
const field_ptr_data = sema.code.instructions.items(.data)[field_ptr].pl_node;
3609-
const field_src: LazySrcLoc = .{ .node_offset_back2tok = field_ptr_data.src_node };
3609+
const field_src: LazySrcLoc = .{ .node_offset_initializer = field_ptr_data.src_node };
36103610

36113611
// Determine whether the value stored to this pointer is comptime-known.
36123612
const field_ty = struct_ty.structFieldType(i);
@@ -13999,14 +13999,14 @@ fn zirStructInit(
1399913999
extra_index = item.end;
1400014000

1400114001
const field_type_data = zir_datas[item.data.field_type].pl_node;
14002-
const field_src: LazySrcLoc = .{ .node_offset_back2tok = field_type_data.src_node };
14002+
const field_src: LazySrcLoc = .{ .node_offset_initializer = field_type_data.src_node };
1400314003
const field_type_extra = sema.code.extraData(Zir.Inst.FieldType, field_type_data.payload_index).data;
1400414004
const field_name = sema.code.nullTerminatedString(field_type_extra.name_start);
1400514005
const field_index = try sema.structFieldIndex(block, resolved_ty, field_name, field_src);
1400614006
if (field_inits[field_index] != .none) {
1400714007
const other_field_type = found_fields[field_index];
1400814008
const other_field_type_data = zir_datas[other_field_type].pl_node;
14009-
const other_field_src: LazySrcLoc = .{ .node_offset_back2tok = other_field_type_data.src_node };
14009+
const other_field_src: LazySrcLoc = .{ .node_offset_initializer = other_field_type_data.src_node };
1401014010
const msg = msg: {
1401114011
const msg = try sema.errMsg(block, field_src, "duplicate field", .{});
1401214012
errdefer msg.destroy(gpa);
@@ -14028,7 +14028,7 @@ fn zirStructInit(
1402814028
const item = sema.code.extraData(Zir.Inst.StructInit.Item, extra.end);
1402914029

1403014030
const field_type_data = zir_datas[item.data.field_type].pl_node;
14031-
const field_src: LazySrcLoc = .{ .node_offset_back2tok = field_type_data.src_node };
14031+
const field_src: LazySrcLoc = .{ .node_offset_initializer = field_type_data.src_node };
1403214032
const field_type_extra = sema.code.extraData(Zir.Inst.FieldType, field_type_data.payload_index).data;
1403314033
const field_name = sema.code.nullTerminatedString(field_type_extra.name_start);
1403414034
const field_index = try sema.unionFieldIndex(block, resolved_ty, field_name, field_src);

0 commit comments

Comments
 (0)