diff --git a/gdb-port/top-level-parse.py b/gdb-port/top-level-parse.py index 2fa8c64d433d730e85c92eefc67b260b81254420..353deadddf79f49c58139cd4476aa7aec1f6029c 100644 --- a/gdb-port/top-level-parse.py +++ b/gdb-port/top-level-parse.py @@ -31,6 +31,7 @@ class TopLevelParse: self.current_token_end = 0 #TODO: remove self.printing_tokens = [] # Holds the tokens yet to be printed by print_input_chunk as it walks the AST self.printing_index = 0 # Tokens have been printed up to this position in the array + self.tokens_from_previous_chunk = [] def init_parser(self): self.vt_types = VTTypes() @@ -411,6 +412,25 @@ class TopLevelParse: current_row = 0 info_strings = [] printing_index = self.printing_index + for index, token in enumerate(self.tokens_from_previous_chunk): + tokenmap_entry = self.input_token_map[int(token.address)] + token_length = tokenmap_entry['end'] - tokenmap_entry['start'] + start_col = max(tokenmap_entry['start'] - start, 0) + end_col = min(tokenmap_entry['end'] - start, w) + charbuf_dynamic.append([' '] * w) + charbuf_dynamic[current_row][start_col:end_col] = ['X'] * min(token_length, end_col-start_col) # TODO: charbuf_dynamic[-1] + hparseresult_addr = tokenmap_entry['hparseresult'] + current_row += 1 + + try: + hpr = HParseResult(int(hparseresult_addr, 16)) + info_strings.append(" ".join([hpr.str_no_deref(), str(tokenmap_entry)])) + except: + info_strings.append(" ".join(["invalid", str(tokenmap_entry)])) + + if tokenmap_entry['end'] <= end: + del self.tokens_from_previous_chunk[index] + while printing_index < len(token_list): token = token_list[printing_index] tokenmap_entry = self.input_token_map[int(token.address)] @@ -430,6 +450,9 @@ class TopLevelParse: except: info_strings.append(" ".join(["invalid", str(tokenmap_entry)])) + if tokenmap_entry['end'] > end: + self.tokens_from_previous_chunk.append(token) + printing_index += 1 self.printing_index = printing_index @@ -469,6 +492,7 @@ class TopLevelParse: info_strings.append(" ".join([hpr.str_no_deref(), str(tokenmap_entry)])) except: info_strings.append(" ".join(["invalid", str(tokenmap_entry)])) + info_dict = dict(enumerate(info_strings)) charbufrows_token_debug = [ "".join(row) + "\ntokeninfo: " + info_dict.get(index, "no token here") for index, row in enumerate(charbuf_dynamic)] #charbufrows = ["".join(row) for row in charbuf_dynamic]