@@ -3754,10 +3754,10 @@ MacroExpander::transcribe_rule (
3754
3754
= substitute_tokens (invoc_stream, macro_rule_tokens, matched_fragments);
3755
3755
3756
3756
// // handy for debugging
3757
- // for (auto &tok : substituted_tokens)
3758
- // {
3759
- // rust_debug ("tok: [%s]", tok->as_string ().c_str ());
3760
- // }
3757
+ for (auto &tok : substituted_tokens)
3758
+ {
3759
+ rust_debug (" tok: [%s]" , tok->as_string ().c_str ());
3760
+ }
3761
3761
3762
3762
// parse it to an ASTFragment
3763
3763
MacroInvocLexer lex (std::move (substituted_tokens));
@@ -3874,6 +3874,67 @@ MacroExpander::transcribe_rule (
3874
3874
return AST::ASTFragment (std::move (nodes));
3875
3875
}
3876
3876
3877
+ std::vector<std::unique_ptr<AST::Token>>
3878
+ MacroExpander::substitute_metavar (
3879
+ std::vector<std::unique_ptr<AST::Token>> &input,
3880
+ std::map<std::string, MatchedFragment> &fragments,
3881
+ std::unique_ptr<AST::Token> &metavar)
3882
+ {
3883
+ auto metavar_name = metavar->get_str ();
3884
+
3885
+ rust_debug (" expanding metavar: %s" , metavar_name.c_str ());
3886
+ std::vector<std::unique_ptr<AST::Token>> expanded;
3887
+ auto it = fragments.find (metavar_name);
3888
+ if (it == fragments.end ())
3889
+ {
3890
+ // Return a copy of the original token
3891
+ expanded.push_back (metavar->clone_token ());
3892
+ }
3893
+ else
3894
+ {
3895
+ // Replace
3896
+ MatchedFragment &frag = it->second ;
3897
+ for (size_t offs = frag.token_offset_begin ; offs < frag.token_offset_end ;
3898
+ offs++)
3899
+ {
3900
+ auto &tok = input.at (offs);
3901
+ expanded.push_back (tok->clone_token ());
3902
+ }
3903
+ }
3904
+
3905
+ return expanded;
3906
+ }
3907
+
3908
+ std::pair<std::vector<std::unique_ptr<AST::Token>>, size_t >
3909
+ MacroExpander::substitute_token (
3910
+ std::vector<std::unique_ptr<AST::Token>> &input,
3911
+ std::map<std::string, MatchedFragment> &fragments,
3912
+ std::unique_ptr<AST::Token> &token)
3913
+ {
3914
+ switch (token->get_id ())
3915
+ {
3916
+ case IDENTIFIER:
3917
+ rust_debug (" expanding metavar" );
3918
+ return {substitute_metavar (input, fragments, token), 1 };
3919
+ case LEFT_PAREN:
3920
+ rust_debug (" expanding repetition" );
3921
+ break ;
3922
+ // TODO: We need to check if the $ was alone. In that case, do
3923
+ // not error out: Simply act as if there was an empty identifier
3924
+ // with no associated fragment and paste the dollar sign in the
3925
+ // transcription. Unsure how to do that since we always have at
3926
+ // least the closing curly brace after an empty $...
3927
+ default :
3928
+ rust_error_at (token->get_locus (),
3929
+ " unexpected token in macro transcribe: expected "
3930
+ " %<(%> or identifier after %<$%>, got %<%s%>" ,
3931
+ get_token_description (token->get_id ()));
3932
+ }
3933
+
3934
+ // FIXME: gcc_unreachable() error case?
3935
+ return {std::vector<std::unique_ptr<AST::Token>> (), 0 };
3936
+ }
3937
+
3877
3938
std::vector<std::unique_ptr<AST::Token>>
3878
3939
MacroExpander::substitute_tokens (
3879
3940
std::vector<std::unique_ptr<AST::Token>> &input,
@@ -3882,60 +3943,83 @@ MacroExpander::substitute_tokens (
3882
3943
{
3883
3944
std::vector<std::unique_ptr<AST::Token>> replaced_tokens;
3884
3945
3946
+ // for token in macro
3947
+ // if token == ?:
3948
+ // // That's not always true: If it's a left paren, it's repetition
3949
+ // // We probably want to store the matched amount in the fragment so
3950
+ // // we can expand it here
3951
+ // id = next_token();
3952
+ // frag = fragment.find(id);
3953
+
3885
3954
for (size_t i = 0 ; i < macro.size (); i++)
3886
3955
{
3887
3956
auto &tok = macro.at (i);
3888
3957
if (tok->get_id () == DOLLAR_SIGN)
3889
3958
{
3890
- std::vector<std::unique_ptr<AST::Token>> parsed_toks;
3959
+ auto &next_tok = macro.at (i + 1 );
3960
+ // Aaaaah, if only we had C++17 :)
3961
+ // auto [expanded, tok_to_skip] = ...
3962
+ auto p = substitute_token (input, fragments, next_tok);
3963
+ auto expanded = std::move (p.first );
3964
+ auto tok_to_skip = p.second ;
3891
3965
3892
- std::string ident;
3893
- for (size_t offs = i; i < macro.size (); offs++)
3894
- {
3895
- auto &tok = macro.at (offs);
3896
- if (tok->get_id () == DOLLAR_SIGN && offs == i)
3897
- {
3898
- parsed_toks.push_back (tok->clone_token ());
3899
- }
3900
- else if (tok->get_id () == IDENTIFIER)
3901
- {
3902
- rust_assert (tok->as_string ().size () == 1 );
3903
- ident.push_back (tok->as_string ().at (0 ));
3904
- parsed_toks.push_back (tok->clone_token ());
3905
- }
3906
- else
3907
- {
3908
- break ;
3909
- }
3910
- }
3966
+ i += tok_to_skip;
3911
3967
3912
- // lookup the ident
3913
- auto it = fragments.find (ident);
3914
- if (it == fragments.end ())
3915
- {
3916
- // just leave the tokens in
3917
- for (auto &tok : parsed_toks)
3918
- {
3919
- replaced_tokens.push_back (tok->clone_token ());
3920
- }
3921
- }
3922
- else
3923
- {
3924
- // replace
3925
- MatchedFragment &frag = it->second ;
3926
- for (size_t offs = frag.token_offset_begin ;
3927
- offs < frag.token_offset_end ; offs++)
3928
- {
3929
- auto &tok = input.at (offs);
3930
- replaced_tokens.push_back (tok->clone_token ());
3931
- }
3932
- }
3933
- i += parsed_toks.size () - 1 ;
3968
+ for (auto &token : expanded)
3969
+ replaced_tokens.emplace_back (token->clone_token ());
3934
3970
}
3935
3971
else
3936
3972
{
3937
- replaced_tokens.push_back (tok->clone_token ());
3973
+ replaced_tokens.emplace_back (tok->clone_token ());
3938
3974
}
3975
+
3976
+ // std::vector<std::unique_ptr<AST::Token>> parsed_toks;
3977
+
3978
+ // std::string ident;
3979
+ // for (size_t offs = i; i < macro.size (); offs++)
3980
+ // {
3981
+ // auto &tok = macro.at (offs);
3982
+ // if (tok->get_id () == DOLLAR_SIGN && offs == i)
3983
+ // {
3984
+ // parsed_toks.push_back (tok->clone_token ());
3985
+ // }
3986
+ // else if (tok->get_id () == IDENTIFIER)
3987
+ // {
3988
+ // rust_assert (tok->as_string ().size () == 1);
3989
+ // ident.push_back (tok->as_string ().at (0));
3990
+ // parsed_toks.push_back (tok->clone_token ());
3991
+ // }
3992
+ // else
3993
+ // {
3994
+ // break;
3995
+ // }
3996
+ // }
3997
+
3998
+ // // lookup the ident
3999
+ // auto it = fragments.find (ident);
4000
+ // if (it == fragments.end ())
4001
+ // {
4002
+ // // just leave the tokens in
4003
+ // for (auto &tok : parsed_toks)
4004
+ // {
4005
+ // replaced_tokens.push_back (tok->clone_token ());
4006
+ // }
4007
+ // }
4008
+ // else
4009
+ // {
4010
+ // // replace
4011
+ // MatchedFragment &frag = it->second;
4012
+ // for (size_t offs = frag.token_offset_begin;
4013
+ // offs < frag.token_offset_end; offs++)
4014
+ // {
4015
+ // auto &tok = input.at (offs);
4016
+ // replaced_tokens.push_back (tok->clone_token ());
4017
+ // }
4018
+ // }
4019
+ // i += parsed_toks.size () - 1;
4020
+ //
4021
+ // }
4022
+ // else { replaced_tokens.push_back (tok->clone_token ()); }
3939
4023
}
3940
4024
3941
4025
return replaced_tokens;
0 commit comments