Skip to content

Commit 115de8c

Browse files
committed
Format
1 parent 9cef558 commit 115de8c

File tree

2 files changed

+132
-147
lines changed

2 files changed

+132
-147
lines changed

src/parse_error.ml

Lines changed: 56 additions & 67 deletions
Original file line numberDiff line numberDiff line change
@@ -4,81 +4,77 @@ open Base
44
open Lexing
55

66
(* Structured parse error *)
7-
type parse_error = {
8-
position: Lexing.position;
9-
end_position: Lexing.position option;
10-
message: string;
11-
hint: string option;
12-
severity: [`Error | `Warning];
13-
}
7+
type parse_error =
8+
{ position : Lexing.position
9+
; end_position : Lexing.position option
10+
; message : string
11+
; hint : string option
12+
; severity : [ `Error | `Warning ]
13+
}
1414

1515
(* Error recovery strategy *)
1616
type recovery_action =
17-
| Skip of int (* Skip n tokens *)
18-
| SkipUntil of Parser.token (* Skip until we see this token *)
19-
| SkipToDelimiter (* Skip to next top-level delimiter *)
20-
| Abort (* Cannot recover *)
17+
| Skip of int (* Skip n tokens *)
18+
| SkipUntil of Parser.token (* Skip until we see this token *)
19+
| SkipToDelimiter (* Skip to next top-level delimiter *)
20+
| Abort (* Cannot recover *)
2121

2222
(* Error collection *)
23-
type error_collector = {
24-
mutable errors: parse_error list;
25-
max_errors: int;
26-
}
23+
type error_collector =
24+
{ mutable errors : parse_error list
25+
; max_errors : int
26+
}
2727

28-
let create_collector ?(max_errors=10) () =
29-
{ errors = []; max_errors }
28+
let create_collector ?(max_errors = 10) () = { errors = []; max_errors }
3029

3130
let add_error collector error =
3231
if List.length collector.errors < collector.max_errors then
3332
collector.errors <- error :: collector.errors
3433

35-
let has_errors collector =
36-
not (List.is_empty collector.errors)
34+
let has_errors collector = not (List.is_empty collector.errors)
3735

38-
let get_errors collector =
39-
List.rev collector.errors
36+
let get_errors collector = List.rev collector.errors
4037

4138
(* Format error position *)
4239
let format_position pos =
4340
let column = pos.pos_cnum - pos.pos_bol + 1 in
4441
Printf.sprintf "%s:%d:%d" pos.pos_fname pos.pos_lnum column
4542

4643
(* Create a parse error from parser state *)
47-
let create_error ~position ?end_position ~message ?hint ?(severity=`Error) () =
44+
let create_error ~position ?end_position ~message ?hint ?(severity = `Error) ()
45+
=
4846
{ position; end_position; message; hint; severity }
4947

5048
(* Determine recovery action based on error context *)
5149
let recovery_strategy last_token delimiters_depth =
5250
match last_token with
5351
| Some Parser.EOF ->
54-
(* At EOF, can't recover by skipping *)
55-
Abort
52+
(* At EOF, can't recover by skipping *)
53+
Abort
5654
| Some (Parser.RPAR | Parser.RBRACK | Parser.RBRACE | Parser.RANGLE) ->
57-
(* Extra closing delimiter - skip it and try to continue *)
58-
Skip 1
55+
(* Extra closing delimiter - skip it and try to continue *)
56+
Skip 1
5957
| Some Parser.LPAR ->
60-
(* Opening paren error - skip until we balance or find next top-level *)
61-
SkipUntil Parser.LPAR
58+
(* Opening paren error - skip until we balance or find next top-level *)
59+
SkipUntil Parser.LPAR
6260
| Some _ when delimiters_depth > 0 ->
63-
(* Inside delimiters - skip to closing of current level *)
64-
SkipToDelimiter
61+
(* Inside delimiters - skip to closing of current level *)
62+
SkipToDelimiter
6563
| Some _ ->
66-
(* Top level error - skip until we see opening paren (start of new expr) *)
67-
SkipUntil Parser.LPAR
68-
| None ->
69-
Abort
64+
(* Top level error - skip until we see opening paren (start of new expr) *)
65+
SkipUntil Parser.LPAR
66+
| None -> Abort
7067

7168
(* Check if token is a delimiter *)
7269
let is_delimiter = function
73-
| Parser.LPAR | Parser.RPAR
74-
| Parser.LBRACK | Parser.RBRACK
75-
| Parser.LBRACE | Parser.RBRACE
76-
| Parser.LANGLE | Parser.RANGLE -> true
70+
| Parser.LPAR | Parser.RPAR | Parser.LBRACK | Parser.RBRACK | Parser.LBRACE
71+
| Parser.RBRACE | Parser.LANGLE | Parser.RANGLE ->
72+
true
7773
| _ -> false
7874

7975
(* Check if token could start a new top-level expression *)
8076
let is_top_level_start = function
81-
| Parser.LPAR -> true (* Most top-level expressions start with ( *)
77+
| Parser.LPAR -> true (* Most top-level expressions start with ( *)
8278
| _ -> false
8379

8480
(* Convert token to string for error messages *)
@@ -101,38 +97,31 @@ let string_of_token = function
10197
let contextualize_error last_token delimiters_stack =
10298
match last_token with
10399
| Some Parser.EOF when not (List.is_empty delimiters_stack) ->
104-
let delim_char, _ = List.hd_exn delimiters_stack in
105-
( Printf.sprintf "unclosed delimiter '%c'" delim_char,
106-
Some "add the missing closing delimiter" )
107-
| Some Parser.EOF ->
108-
( "unexpected end of file",
109-
Some "the input is incomplete" )
110-
| Some (Parser.RPAR | Parser.RBRACK | Parser.RBRACE | Parser.RANGLE as tok) ->
111-
let tok_str = match tok with
112-
| Parser.RPAR -> ")"
113-
| Parser.RBRACK -> "]"
114-
| Parser.RBRACE -> "}"
115-
| Parser.RANGLE -> ">"
116-
| _ -> "?"
117-
in
118-
( Printf.sprintf "no opening delimiter for '%s'" tok_str,
119-
Some "remove this delimiter or add a matching opening delimiter" )
100+
let delim_char, _ = List.hd_exn delimiters_stack in
101+
( Printf.sprintf "unclosed delimiter '%c'" delim_char
102+
, Some "add the missing closing delimiter" )
103+
| Some Parser.EOF -> ("unexpected end of file", Some "the input is incomplete")
104+
| Some ((Parser.RPAR | Parser.RBRACK | Parser.RBRACE | Parser.RANGLE) as tok)
105+
->
106+
let tok_str =
107+
match tok with
108+
| Parser.RPAR -> ")"
109+
| Parser.RBRACK -> "]"
110+
| Parser.RBRACE -> "}"
111+
| Parser.RANGLE -> ">"
112+
| _ -> "?"
113+
in
114+
( Printf.sprintf "no opening delimiter for '%s'" tok_str
115+
, Some "remove this delimiter or add a matching opening delimiter" )
120116
| Some tok ->
121-
let tok_str = string_of_token tok in
122-
( Printf.sprintf "unexpected symbol '%s'" tok_str,
123-
Some "check if this symbol is in the right place" )
124-
| None ->
125-
( "unexpected end of input",
126-
None )
117+
let tok_str = string_of_token tok in
118+
( Printf.sprintf "unexpected symbol '%s'" tok_str
119+
, Some "check if this symbol is in the right place" )
120+
| None -> ("unexpected end of input", None)
127121

128122
(* Extract error information from parser environment *)
129123
let error_from_env env last_token delimiters_stack =
130124
let error_pos, end_pos = Parser.MenhirInterpreter.positions env in
131125
let message, hint = contextualize_error last_token delimiters_stack in
132126

133-
create_error
134-
~position:error_pos
135-
~end_position:end_pos
136-
~message
137-
?hint
138-
()
127+
create_error ~position:error_pos ~end_position:end_pos ~message ?hint ()

src/sgen_parsing.ml

Lines changed: 76 additions & 80 deletions
Original file line numberDiff line numberDiff line change
@@ -97,124 +97,120 @@ let parse_with_error_recovery filename lexbuf =
9797
let lex_next () =
9898
match !token_buffer with
9999
| tok :: rest ->
100-
token_buffer := rest;
101-
tok
100+
token_buffer := rest;
101+
tok
102102
| [] ->
103-
let token = read lexbuf in
104-
let start_pos, end_pos = Sedlexing.lexing_positions lexbuf in
105-
(token, start_pos, end_pos)
103+
let token = read lexbuf in
104+
let start_pos, end_pos = Sedlexing.lexing_positions lexbuf in
105+
(token, start_pos, end_pos)
106106
in
107107

108108
(* Start incremental parsing *)
109109
let initial_checkpoint = Parser.Incremental.expr_file Lexing.dummy_pos in
110110

111111
(* Attempt error recovery by skipping tokens *)
112112
let rec attempt_recovery checkpoint skip_count =
113-
if skip_count <= 0 then
114-
checkpoint
113+
if skip_count <= 0 then checkpoint
115114
else
116115
let token, _, _ = lex_next () in
117116
match token with
118-
| EOF -> checkpoint (* Don't skip EOF *)
117+
| EOF -> checkpoint (* Don't skip EOF *)
119118
| _ -> attempt_recovery checkpoint (skip_count - 1)
120119
in
121120

122121
(* Drive the incremental parser with error recovery *)
123122
let rec drive checkpoint =
124123
match checkpoint with
125124
| Parser.MenhirInterpreter.InputNeeded _env ->
126-
let token, start_pos, end_pos = lex_next () in
127-
let checkpoint = Parser.MenhirInterpreter.offer checkpoint (token, start_pos, end_pos) in
128-
drive checkpoint
129-
125+
let token, start_pos, end_pos = lex_next () in
126+
let checkpoint =
127+
Parser.MenhirInterpreter.offer checkpoint (token, start_pos, end_pos)
128+
in
129+
drive checkpoint
130130
| Parser.MenhirInterpreter.Shifting _
131131
| Parser.MenhirInterpreter.AboutToReduce _ ->
132-
let checkpoint = Parser.MenhirInterpreter.resume checkpoint in
133-
drive checkpoint
134-
135-
| Parser.MenhirInterpreter.HandlingError env ->
136-
(* Collect the error *)
137-
let error = Parse_error.error_from_env env !last_token !delimiters_stack in
138-
Parse_error.add_error error_collector error;
139-
140-
(* Determine recovery strategy *)
141-
let recovery = Parse_error.recovery_strategy !last_token (List.length !delimiters_stack) in
142-
143-
(match recovery with
144-
| Parse_error.Abort ->
145-
(* Cannot recover - return empty list and report errors *)
146-
[]
147-
148-
| Parse_error.Skip n ->
149-
(* Skip n tokens and restart from initial state *)
150-
let _ = attempt_recovery checkpoint n in
151-
let new_checkpoint = Parser.Incremental.expr_file Lexing.dummy_pos in
152-
drive new_checkpoint
153-
154-
| Parse_error.SkipToDelimiter ->
155-
(* Skip until we find a delimiter at current nesting level *)
156-
let target_depth = List.length !delimiters_stack in
157-
let rec skip_to_matching () =
158-
let token, _, _ = lex_next () in
159-
match token with
160-
| EOF -> ()
161-
| _ when List.length !delimiters_stack = target_depth -> ()
162-
| _ -> skip_to_matching ()
163-
in
164-
skip_to_matching ();
165-
let new_checkpoint = Parser.Incremental.expr_file Lexing.dummy_pos in
166-
drive new_checkpoint
167-
168-
| Parse_error.SkipUntil target_token ->
169-
(* Skip until we see target token *)
170-
let rec skip_until () =
171-
let token, _, _ = lex_next () in
172-
if not (Poly.equal token target_token) && not (Poly.equal token EOF) then
173-
skip_until ()
174-
in
175-
skip_until ();
176-
let new_checkpoint = Parser.Incremental.expr_file Lexing.dummy_pos in
177-
drive new_checkpoint)
178-
179-
| Parser.MenhirInterpreter.Accepted result ->
180-
result
132+
let checkpoint = Parser.MenhirInterpreter.resume checkpoint in
133+
drive checkpoint
134+
| Parser.MenhirInterpreter.HandlingError env -> (
135+
(* Collect the error *)
136+
let error =
137+
Parse_error.error_from_env env !last_token !delimiters_stack
138+
in
139+
Parse_error.add_error error_collector error;
181140

182-
| Parser.MenhirInterpreter.Rejected ->
183-
let error = Parse_error.create_error
184-
~position:Lexing.dummy_pos
185-
~message:"parse rejected"
186-
()
187-
in
188-
Parse_error.add_error error_collector error;
141+
(* Determine recovery strategy *)
142+
let recovery =
143+
Parse_error.recovery_strategy !last_token
144+
(List.length !delimiters_stack)
145+
in
146+
147+
match recovery with
148+
| Parse_error.Abort ->
149+
(* Cannot recover - return empty list and report errors *)
189150
[]
151+
| Parse_error.Skip n ->
152+
(* Skip n tokens and restart from initial state *)
153+
let _ = attempt_recovery checkpoint n in
154+
let new_checkpoint = Parser.Incremental.expr_file Lexing.dummy_pos in
155+
drive new_checkpoint
156+
| Parse_error.SkipToDelimiter ->
157+
(* Skip until we find a delimiter at current nesting level *)
158+
let target_depth = List.length !delimiters_stack in
159+
let rec skip_to_matching () =
160+
let token, _, _ = lex_next () in
161+
match token with
162+
| EOF -> ()
163+
| _ when List.length !delimiters_stack = target_depth -> ()
164+
| _ -> skip_to_matching ()
165+
in
166+
skip_to_matching ();
167+
let new_checkpoint = Parser.Incremental.expr_file Lexing.dummy_pos in
168+
drive new_checkpoint
169+
| Parse_error.SkipUntil target_token ->
170+
(* Skip until we see target token *)
171+
let rec skip_until () =
172+
let token, _, _ = lex_next () in
173+
if (not (Poly.equal token target_token)) && not (Poly.equal token EOF)
174+
then skip_until ()
175+
in
176+
skip_until ();
177+
let new_checkpoint = Parser.Incremental.expr_file Lexing.dummy_pos in
178+
drive new_checkpoint )
179+
| Parser.MenhirInterpreter.Accepted result -> result
180+
| Parser.MenhirInterpreter.Rejected ->
181+
let error =
182+
Parse_error.create_error ~position:Lexing.dummy_pos
183+
~message:"parse rejected" ()
184+
in
185+
Parse_error.add_error error_collector error;
186+
[]
190187
in
191188

192189
let result =
193-
try drive initial_checkpoint with
194-
| LexerError (msg, pos) ->
195-
let error = Parse_error.create_error ~position:pos ~message:msg () in
196-
Parse_error.add_error error_collector error;
197-
[]
190+
try drive initial_checkpoint
191+
with LexerError (msg, pos) ->
192+
let error = Parse_error.create_error ~position:pos ~message:msg () in
193+
Parse_error.add_error error_collector error;
194+
[]
198195
in
199196

200197
(* Report all collected errors *)
201198
if Parse_error.has_errors error_collector then begin
202199
let errors = Parse_error.get_errors error_collector in
203200
List.iter errors ~f:(fun error ->
204-
let hint_msg = match error.hint with
201+
let hint_msg =
202+
match error.hint with
205203
| Some h -> "\n " ^ yellow "hint" ^ ": " ^ h
206204
| None -> ""
207205
in
208206
print_syntax_error error.position error.message filename;
209-
if Option.is_some error.hint then
210-
Stdlib.Printf.eprintf "%s\n" hint_msg
211-
);
212-
Stdlib.Printf.eprintf "\n%s\n" (bold (red (Printf.sprintf "found %d error(s)" (List.length errors))));
207+
if Option.is_some error.hint then Stdlib.Printf.eprintf "%s\n" hint_msg );
208+
Stdlib.Printf.eprintf "\n%s\n"
209+
(bold (red (Printf.sprintf "found %d error(s)" (List.length errors))));
213210
Stdlib.exit 1
214211
end;
215212

216213
result
217214

218215
(* Original parse function for backward compatibility - now uses error recovery *)
219-
let parse_with_error filename lexbuf =
220-
parse_with_error_recovery filename lexbuf
216+
let parse_with_error filename lexbuf = parse_with_error_recovery filename lexbuf

0 commit comments

Comments
 (0)