Skip to content
Snippets Groups Projects
Commit 1335b259 authored by pompolic's avatar pompolic
Browse files

More cleanup

parent 10c123f3
No related branches found
No related tags found
No related merge requests found
......@@ -401,47 +401,13 @@ class TopLevelParse:
# TODO: remove first printing loop
def print_input_chunk(self, token, rows, start, end):
#print("print_input_chunk(): start: ", start, "end: ", end)
#h = min(rows, len(token.children))
tokenmap_values = list(self.input_token_map.values())
#tokenmap_val_list = [token for token in list(self.input_token_map.values()) if (token['start'] <= start and token['start'] <= end)]
tokenmap_val_list = [dict(token, addr=hex(key)) for key, token in self.input_token_map.items() if (token['start'] >= start and token['start'] <= end)]
numrows = min(rows, len(tokenmap_val_list))
#row_lengths_for_columns = [len([token for token in list(tokenmap_values) if (token['start'] <= x and x >= token['end'])]) for x in range(start,end+1)]
#numrows = min(rows, max(row_lengths_for_columns))
#print("numrows: ", numrows)
h = numrows
w = end-start
if w == 0:
print("all tokens passed to display are zero-width")
return
charbuf = [[' '] * w for i in range(0,h)]
#print("h=len(charbuf):", len(charbuf), "w :", w)
tokenmap_values_sorted = sorted(tokenmap_val_list, key=lambda token: (token['start'], token['end']))
#print("len(tokenmap_values_sorted):", len(tokenmap_values_sorted), "rows:", numrows)
hpr_strings = []
for i in range(0, numrows):
try: # try-block for debugging
token_length = tokenmap_values_sorted[i]['end'] - tokenmap_values_sorted[i]['start'] # TODO: save tokenmap_values_sorted[i]
start_col = max(tokenmap_values_sorted[i]['start'] - start, 0)
end_col = min(tokenmap_values_sorted[i]['end'] - start, w-1)
charbuf[i][start_col:end_col] = ['X'] * min(token_length, w) #TODO: this appends to the row in question -- charbuf[0] is input[start]
hparseresult_addr = tokenmap_values_sorted[i]['hparseresult']
except IndexError as ie: #DEBUG
print(hpr_strings)
print(i)
print(ie)
try:
hpr = HParseResult(int(hparseresult_addr, 16))
hpr_strings.append(hpr.str_no_deref())
except:
hpr_strings.append("invalid")
charbufrows_token_debug = [ "".join(row) + "\n" + " ".join([hpr_strings[index], str(tokenmap_values_sorted[index])]) for index, row in enumerate(charbuf)]
charbufrows = ["".join(row) for row in charbuf]
#charbuf_final = "\n".join(charbufrows)
charbuf_final = "\n".join(charbufrows_token_debug)
#print(charbuf_final)
#print(hpr_strings)
#print(len(hpr_strings), len(charbuf), len(tokenmap_values_sorted))
# Print by getting a token, and recursively walking its children if applicable
charbuf = [[' '] * w for i in range(0,h)]
......@@ -451,12 +417,10 @@ class TopLevelParse:
if not self.printing_tokens: # This ignores the passed-in token parameter if there are still tokens left to print
self.printing_tokens = [token]
while current_row < h and self.printing_tokens:
#print(self.printing_tokens)
token = self.printing_tokens[-1]
#token = self.printing_tokens.pop()
tokenmap_entry = self.input_token_map[int(token.address)]
if tokenmap_entry['start'] > end: # Next token won't fit on the screen
#print("Token start past chunk end, break, tokenmap_entry['start']:", tokenmap_entry['start'], "end:", end)
break
self.printing_tokens.pop()
token_length = tokenmap_entry['end'] - tokenmap_entry['start'] # TODO: save tokenmap_values_sorted[i]
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment