From 48050650c2b6b8ec6a7ca37ea3c67bf33184dc74 Mon Sep 17 00:00:00 2001 From: Nguyen Nguyen Date: Sat, 30 Mar 2024 16:07:17 -0700 Subject: [PATCH] Code refactoring --- rat24s.py | 64 ++++++++++++++++++++++++++++++++----------------------- 1 file changed, 37 insertions(+), 27 deletions(-) diff --git a/rat24s.py b/rat24s.py index 93821c4..0d6f276 100644 --- a/rat24s.py +++ b/rat24s.py @@ -3,44 +3,54 @@ from components.syntax_analyzer import Parser -def extract_tokens(args): - # Create a Lexer instance - lexer = Lexer(args.input) +def print_tokens(lexer: Lexer): + for token in lexer.tokens: + text = f"{token.token_type.name.lower():<20} {token.lexeme}" + print(text) - if args.output: - # Open a file for writing - with open(args.output, 'w') as file: - # Write table headers - file.write(f"{'token':<20} {'lexeme':<10}\n") - file.write(f'{"-" * 31}\n') - for token in lexer.tokens: - text = f"{token.token_type.name.lower():<20} {token.lexeme}\n" - file.write(text) - if args.verbose: - print(text, end="") - # Verbose mode - elif args.verbose: +def write_tokens_to_file(lexer: Lexer, output_file: str): + with open(output_file, 'w') as file: + # Write table headers + file.write(f"{'token':<20} {'lexeme':<10}\n") + file.write(f'{"-" * 31}\n') + + # Write tokens for token in lexer.tokens: text = f"{token.token_type.name.lower():<20} {token.lexeme}\n" - print(text, end="") + file.write(text) + + +def lexical_analyze(input_file, output_file, verbose): + lexer = Lexer(input_file) + if verbose: + print_tokens(lexer) -def check_syntax(args): - # Create a Lexer instance - lexer = Lexer(args.input) + if output_file: + write_tokens_to_file(lexer, output_file) - parser = Parser(lexer, debug_print=args.verbose) + return lexer + + +def syntax_analyze(input_file, output_file, verbose): + lexer = lexical_analyze(input_file, output_file, False) + + parser = Parser(lexer, debug_print=False) parsing_success = parser.parse() - if args.output: + if verbose: + for text in parser.get_logs(): + print(text) + + if output_file: # Open a file for writing - with open(args.output, 'w') as file: + with open(output_file, 'w') as file: for text in parser.get_logs(): file.write(text) file.write('\n') - # Display parsing result + # Print parsing result to console print('-' * 50) print(f"Filename: {args.input}") print(f"Number of Tokens: {len(lexer.tokens)}") @@ -66,10 +76,10 @@ def main(args): """ # Token-only mode if args.tokens: - extract_tokens(args) + lexical_analyze(args.input, args.output, args.verbose) # Check syntax mode - elif args.syntax: - check_syntax(args) + else: + syntax_analyze(args.input, args.output, args.verbose) if __name__ == '__main__':