diff --git a/gdb-port/top-level-parse.py b/gdb-port/top-level-parse.py index f46714db6827a2386b68556681a5923ebc964389..821ad3d5ae8f19b3f8578f5f4e354d9bad8f490e 100644 --- a/gdb-port/top-level-parse.py +++ b/gdb-port/top-level-parse.py @@ -1,3 +1,5 @@ +import code # DEBUG + class TopLevelParse: def __init__(self): self.parser_stacks = [] @@ -29,6 +31,7 @@ class TopLevelParse: self.parse_state_stack = [] # TODO: remove self.current_token_start = 0 # TODO: remove self.current_token_end = 0 #TODO: remove + self.printing_tokens = [] # Holds the tokens yet to be printed by print_input_chunk as it walks the AST def init_parser(self): self.vt_types = VTTypes() @@ -463,10 +466,17 @@ class TopLevelParse: charbuf_dynamic = [] current_row = 0 info_strings = [] - printing_tokens = [token] - while current_row < h and printing_tokens: - token = printing_tokens.pop() + if not self.printing_tokens: # This ignores the passed-in token parameter if there are still tokens left to print + self.printing_tokens = [token] + while current_row < h and self.printing_tokens: + #print(self.printing_tokens) + token = self.printing_tokens[-1] + #token = self.printing_tokens.pop() tokenmap_entry = self.input_token_map[int(token.address)] + if tokenmap_entry['start'] > end: # Next token won't fit on the screen + #print("Token start past chunk end, break, tokenmap_entry['start']:", tokenmap_entry['start'], "end:", end) + break + self.printing_tokens.pop() token_length = tokenmap_entry['end'] - tokenmap_entry['start'] # TODO: save tokenmap_values_sorted[i] start_col = max(tokenmap_entry['start'] - start, 0) end_col = min(tokenmap_entry['end'] - start, w) @@ -476,7 +486,7 @@ class TopLevelParse: current_row += 1 if token.token_type == HParsedToken.TT_SEQUENCE: - printing_tokens.extend(token.children[::-1]) + self.printing_tokens.extend(token.children[::-1]) try: hpr = HParseResult(int(hparseresult_addr, 16)) @@ -513,11 +523,13 @@ class TopLevelParse: inputfragment = (self.input_ptr + (start + w * chunk) ).string('UTF-8', 'replace', w) encoded_inputfragment = inputfragment.replace("\n", ".").replace("\r", ".").replace("\t", ".") print(encoded_inputfragment) - print("chunk: ", chunk) - print("last chunk:", last_chunk_width) + print("chunk: ", chunk) # DEBUG + self.print_input_chunk(token, rows, start + w * chunk, (start + w * chunk) + min(w, end-start)) + print("last chunk width:", last_chunk_width) # DEBUG inputfragment = (self.input_ptr + (start + w * screens)).string('UTF-8', 'replace', last_chunk_width) encoded_inputfragment = inputfragment.replace("\n", ".").replace("\r", ".").replace("\t", ".") print(encoded_inputfragment) + self.print_input_chunk(token, rows, start + w * screens, (start + w * screens) + min(w, end-start)) if rec_depth == 0: print(encoded_inputfragment)