@@ -97,124 +97,120 @@ let parse_with_error_recovery filename lexbuf =
9797  let  lex_next  ()  = 
9898    match  ! token_buffer with 
9999    |  tok  :: rest  ->
100-          token_buffer :=  rest;
101-          tok
100+       token_buffer :=  rest;
101+       tok
102102    |  []  ->
103-          let  token =  read lexbuf in 
104-          let  start_pos, end_pos =  Sedlexing. lexing_positions lexbuf in 
105-          (token, start_pos, end_pos)
103+       let  token =  read lexbuf in 
104+       let  start_pos, end_pos =  Sedlexing. lexing_positions lexbuf in 
105+       (token, start_pos, end_pos)
106106  in 
107107
108108  (*  Start incremental parsing *) 
109109  let  initial_checkpoint =  Parser.Incremental. expr_file Lexing. dummy_pos in 
110110
111111  (*  Attempt error recovery by skipping tokens *) 
112112  let  rec  attempt_recovery  checkpoint  skip_count  = 
113-     if  skip_count < =  0  then 
114-       checkpoint
113+     if  skip_count < =  0  then  checkpoint
115114    else 
116115      let  token, _, _ =  lex_next ()  in 
117116      match  token with 
118-       |  EOF  -> checkpoint   (*  Don't skip EOF *) 
117+       |  EOF  -> checkpoint (*  Don't skip EOF *) 
119118      |  _  -> attempt_recovery checkpoint (skip_count -  1 )
120119  in 
121120
122121  (*  Drive the incremental parser with error recovery *) 
123122  let  rec  drive  checkpoint  = 
124123    match  checkpoint with 
125124    |  Parser.MenhirInterpreter. InputNeeded  _env  ->
126-         let  token, start_pos, end_pos =  lex_next ()  in 
127-         let  checkpoint =  Parser.MenhirInterpreter. offer checkpoint (token, start_pos, end_pos) in 
128-         drive checkpoint
129- 
125+       let  token, start_pos, end_pos =  lex_next ()  in 
126+       let  checkpoint = 
127+         Parser.MenhirInterpreter. offer checkpoint (token, start_pos, end_pos)
128+       in 
129+       drive checkpoint
130130    |  Parser.MenhirInterpreter. Shifting  _
131131    |  Parser.MenhirInterpreter. AboutToReduce  _  ->
132-         let  checkpoint =  Parser.MenhirInterpreter. resume checkpoint in 
133-         drive checkpoint
134- 
135-     |  Parser.MenhirInterpreter. HandlingError  env  ->
136-         (*  Collect the error *) 
137-         let  error =  Parse_error. error_from_env env ! last_token ! delimiters_stack in 
138-         Parse_error. add_error error_collector error;
139- 
140-         (*  Determine recovery strategy *) 
141-         let  recovery =  Parse_error. recovery_strategy ! last_token (List. length ! delimiters_stack) in 
142- 
143-         (match  recovery with 
144-         |  Parse_error. Abort  ->
145-             (*  Cannot recover - return empty list and report errors *) 
146-             [] 
147- 
148-         |  Parse_error. Skip  n  ->
149-             (*  Skip n tokens and restart from initial state *) 
150-             let  _ =  attempt_recovery checkpoint n in 
151-             let  new_checkpoint =  Parser.Incremental. expr_file Lexing. dummy_pos in 
152-             drive new_checkpoint
153- 
154-         |  Parse_error. SkipToDelimiter  ->
155-             (*  Skip until we find a delimiter at current nesting level *) 
156-             let  target_depth =  List. length ! delimiters_stack in 
157-             let  rec  skip_to_matching  ()  = 
158-               let  token, _, _ =  lex_next ()  in 
159-               match  token with 
160-               |  EOF  -> () 
161-               |  _  when  List. length ! delimiters_stack =  target_depth -> () 
162-               |  _  -> skip_to_matching () 
163-             in 
164-             skip_to_matching () ;
165-             let  new_checkpoint =  Parser.Incremental. expr_file Lexing. dummy_pos in 
166-             drive new_checkpoint
167- 
168-         |  Parse_error. SkipUntil  target_token  ->
169-             (*  Skip until we see target token *) 
170-             let  rec  skip_until  ()  = 
171-               let  token, _, _ =  lex_next ()  in 
172-               if  not  (Poly. equal token target_token) &&  not  (Poly. equal token EOF ) then 
173-                 skip_until () 
174-             in 
175-             skip_until () ;
176-             let  new_checkpoint =  Parser.Incremental. expr_file Lexing. dummy_pos in 
177-             drive new_checkpoint)
178- 
179-     |  Parser.MenhirInterpreter. Accepted  result  ->
180-         result
132+       let  checkpoint =  Parser.MenhirInterpreter. resume checkpoint in 
133+       drive checkpoint
134+     |  Parser.MenhirInterpreter. HandlingError  env  -> (
135+       (*  Collect the error *) 
136+       let  error = 
137+         Parse_error. error_from_env env ! last_token ! delimiters_stack
138+       in 
139+       Parse_error. add_error error_collector error;
181140
182-     |  Parser.MenhirInterpreter. Rejected  ->
183-         let  error =  Parse_error. create_error
184-           ~position: Lexing. dummy_pos
185-           ~message: " parse rejected" 
186-           () 
187-         in 
188-         Parse_error. add_error error_collector error;
141+       (*  Determine recovery strategy *) 
142+       let  recovery = 
143+         Parse_error. recovery_strategy ! last_token
144+           (List. length ! delimiters_stack)
145+       in 
146+ 
147+       match  recovery with 
148+       |  Parse_error. Abort  ->
149+         (*  Cannot recover - return empty list and report errors *) 
189150        [] 
151+       |  Parse_error. Skip  n  ->
152+         (*  Skip n tokens and restart from initial state *) 
153+         let  _ =  attempt_recovery checkpoint n in 
154+         let  new_checkpoint =  Parser.Incremental. expr_file Lexing. dummy_pos in 
155+         drive new_checkpoint
156+       |  Parse_error. SkipToDelimiter  ->
157+         (*  Skip until we find a delimiter at current nesting level *) 
158+         let  target_depth =  List. length ! delimiters_stack in 
159+         let  rec  skip_to_matching  ()  = 
160+           let  token, _, _ =  lex_next ()  in 
161+           match  token with 
162+           |  EOF  -> () 
163+           |  _  when  List. length ! delimiters_stack =  target_depth -> () 
164+           |  _  -> skip_to_matching () 
165+         in 
166+         skip_to_matching () ;
167+         let  new_checkpoint =  Parser.Incremental. expr_file Lexing. dummy_pos in 
168+         drive new_checkpoint
169+       |  Parse_error. SkipUntil  target_token  ->
170+         (*  Skip until we see target token *) 
171+         let  rec  skip_until  ()  = 
172+           let  token, _, _ =  lex_next ()  in 
173+           if  (not  (Poly. equal token target_token)) &&  not  (Poly. equal token EOF )
174+           then  skip_until () 
175+         in 
176+         skip_until () ;
177+         let  new_checkpoint =  Parser.Incremental. expr_file Lexing. dummy_pos in 
178+         drive new_checkpoint )
179+     |  Parser.MenhirInterpreter. Accepted  result  -> result
180+     |  Parser.MenhirInterpreter. Rejected  ->
181+       let  error = 
182+         Parse_error. create_error ~position: Lexing. dummy_pos
183+           ~message: " parse rejected" () 
184+       in 
185+       Parse_error. add_error error_collector error;
186+       [] 
190187  in 
191188
192189  let  result = 
193-     try  drive initial_checkpoint  with 
194-     |  LexerError  (msg , pos ) ->
195-          let  error =  Parse_error. create_error ~position: pos ~message: msg ()  in 
196-          Parse_error. add_error error_collector error;
197-          [] 
190+     try  drive initial_checkpoint
191+     with  LexerError  (msg , pos ) -> 
192+       let  error =  Parse_error. create_error ~position: pos ~message: msg ()  in 
193+       Parse_error. add_error error_collector error;
194+       [] 
198195  in 
199196
200197  (*  Report all collected errors *) 
201198  if  Parse_error. has_errors error_collector then  begin 
202199    let  errors =  Parse_error. get_errors error_collector in 
203200    List. iter errors ~f: (fun  error  ->
204-       let  hint_msg =  match  error.hint with 
201+       let  hint_msg = 
202+         match  error.hint with 
205203        |  Some  h  -> " \n   " ^  yellow " hint" ^  " : " ^  h
206204        |  None  -> " " 
207205      in 
208206      print_syntax_error error.position error.message filename;
209-       if  Option. is_some error.hint then 
210-         Stdlib.Printf. eprintf " %s\n " 
211-     );
212-     Stdlib.Printf. eprintf " \n %s\n " Printf. sprintf " found %d error(s)" List. length errors))));
207+       if  Option. is_some error.hint then  Stdlib.Printf. eprintf " %s\n " 
208+     Stdlib.Printf. eprintf " \n %s\n " 
209+       (bold (red (Printf. sprintf " found %d error(s)" List. length errors))));
213210    Stdlib. exit 1 
214211  end ;
215212
216213  result
217214
218215(*  Original parse function for backward compatibility - now uses error recovery *) 
219- let  parse_with_error  filename  lexbuf  = 
220-   parse_with_error_recovery filename lexbuf
216+ let  parse_with_error  filename  lexbuf  =  parse_with_error_recovery filename lexbuf
0 commit comments