Skip to content
Draft
Show file tree
Hide file tree
Changes from 1 commit
Commits
Show all changes
32 commits
Select commit Hold shift + click to select a range
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Prev Previous commit
Next Next commit
fixup
  • Loading branch information
qingshi163 committed Jan 31, 2023
commit 77a4219e2acf2d433f97b471be6de82b98597e07
4 changes: 2 additions & 2 deletions compiler/parser/src/parser.rs
Original file line number Diff line number Diff line change
Expand Up @@ -99,8 +99,8 @@ pub fn parse_located(
// Parse a given token iterator.
fn parse_tokens(lxr: impl IntoIterator<Item = LexResult>, mode: Mode, source_path: &str) -> Result<ast::Mod, ParseError> {
let parser = peg_parser::Parser::from(lxr).unwrap();
dbg!(mode);
dbg!(&parser);
// dbg!(mode);
// dbg!(&parser);
Ok(parser.parse(mode).unwrap())
}

Expand Down
18 changes: 2 additions & 16 deletions compiler/parser/src/peg_parser.rs
Original file line number Diff line number Diff line change
Expand Up @@ -411,7 +411,7 @@ peg::parser! { grammar python_parser(zelf: &Parser) for Parser {
// rule match_stmt() -> Stmt =
// [Match]

rule expressions() -> Expr = pack_tuple_expr(<star_expression()>, ExprContext::Load)
rule expressions() -> Expr = pack_tuple_expr(<expression()>, ExprContext::Load)

rule expression() -> Expr =
loc(<a:disjunction() [If] b:disjunction() [Else] c:expression() {
Expand Down Expand Up @@ -925,6 +925,7 @@ peg::parser! { grammar python_parser(zelf: &Parser) for Parser {
// not yet supported by lexer
rule func_type_comment() -> Option<String> = { None }

// TODO: optimize
rule pack_tuple_expr(r:rule<Expr>, ctx: ExprContext) -> Expr =
loc(<z:r() **<2,> [Comma] [Comma]? {
ExprKind::Tuple { elts: z, ctx: ctx.clone() }
Expand Down Expand Up @@ -1040,18 +1041,3 @@ fn make_arguments(
defaults: posdefaults,
}
}

#[cfg(test)]
mod tests {
use super::*;
use crate::lexer::make_tokenizer;

#[test]
fn test_return() {
let source = "'Hello'";
let lexer = make_tokenizer(source);
let parser = Parser::from(lexer).unwrap();
dbg!(&parser);
dbg!(python_parser::file(&parser, &parser));
}
}