From d07be33ab01216ee258c5005b5a14626ab0312cc Mon Sep 17 00:00:00 2001
From: pompolic <pompolic@special-circumstanc.es>
Date: Wed, 12 Oct 2022 21:10:47 +0200
Subject: [PATCH] WIP recursive printing starting from the token given to
 print_input_map()

---
 gdb-port/top-level-parse.py | 35 ++++++++++++++++++++++++++++++++---
 1 file changed, 32 insertions(+), 3 deletions(-)

diff --git a/gdb-port/top-level-parse.py b/gdb-port/top-level-parse.py
index 376d5f9..dc889a7 100644
--- a/gdb-port/top-level-parse.py
+++ b/gdb-port/top-level-parse.py
@@ -234,6 +234,7 @@ class TopLevelParse:
 		token_addr = hparseresult_addr.cast(gdb.lookup_type("HParseResult").pointer())['ast']
 		if token_addr:
 			#print(HParseResult(hparseresult_addr).str_no_deref())
+			#TODO: save parser directly, extracting the name at printing
 			parsername = getattr(self.peek_parser(), 'name', '!!!UNNAMED!!!') # DEBUG
 			self.input_token_map[int(token_addr)] = {'start': self.starting_input_index[self.current_parse_depth], 'end': index, 'hparseresult': hex(hparseresult_addr), 'parser': parsername}
 			#print(self.input_token_map[int(token_addr)])
@@ -415,7 +416,7 @@ class TopLevelParse:
 		hpr_strings = []
 		for i in range(0, numrows):
 			try: # try-block for debugging
-				token_length = tokenmap_values_sorted[i]['end'] - tokenmap_values_sorted[i]['start']
+				token_length = tokenmap_values_sorted[i]['end'] - tokenmap_values_sorted[i]['start'] # TODO: save tokenmap_values_sorted[i]
 				start_col = max(tokenmap_values_sorted[i]['start'] - start, 0)
 				end_col = min(tokenmap_values_sorted[i]['end'] - start, w-1)
 				charbuf[i][start_col:end_col] = ['X'] * min(token_length, w) #TODO: this appends to the row in question -- charbuf[0] is input[start]
@@ -453,10 +454,38 @@ class TopLevelParse:
 		charbufrows = ["".join(row) for row in charbuf]
 		#charbuf_final = "\n".join(charbufrows)
 		charbuf_final = "\n".join(charbufrows_token_debug)
-		print(charbuf_final)
+		#print(charbuf_final)
 		#print(hpr_strings)
 		#print(len(hpr_strings), len(charbuf), len(tokenmap_values_sorted))
 
+		# Print by getting a token, and recursively walking its children if applicable
+		charbuf = [[' '] * w for i in range(0,h)]
+		current_row = 0
+		info_strings = []
+		printing_tokens = [token]
+		while current_row < h and printing_tokens:
+			token = printing_tokens.pop()
+			tokenmap_entry = self.input_token_map[int(token.address)]
+			token_length = tokenmap_entry['end'] - tokenmap_entry['start'] # TODO: save tokenmap_values_sorted[i]
+			start_col = max(tokenmap_entry['start'] - start, 0)
+			end_col = min(tokenmap_entry['end'] - start, w-1)
+			charbuf[i][start_col:end_col] = ['X'] * min(token_length, w) #TODO: this appends to the row in question -- charbuf[0] is input[start]
+			hparseresult_addr = tokenmap_entry['hparseresult']
+			current_row += 1
+
+			if token.token_type == HParsedToken.TT_SEQUENCE:
+				printing_tokens.extend(token.children[::-1])
+
+			try:
+				hpr = HParseResult(int(hparseresult_addr, 16))
+				info_strings.append(" ".join([hpr.str_no_deref(), str(tokenmap_entry)]))
+			except:
+				hpr_strings.append(" ".join(["invalid", str(tokenmap_entry)]))
+		info_dict = dict(enumerate(info_strings))
+		charbufrows_token_debug = [ "".join(row) + "\n" + info_dict.get(index, "no token here") for index, row in enumerate(charbuf)]
+		charbuf_final = "\n".join(charbufrows_token_debug)
+		print(charbuf_final)
+
 	def print_input_map(self, token, rec_depth=0, parent_bounds=None):
 		w = gdb.parameter("width")
 		rows = gdb.parameter("height")
@@ -477,7 +506,7 @@ class TopLevelParse:
 		encoded_inputfragment = inputfragment.replace("\n", "<0A>").replace("\r", "<0D>").replace(" ", "<20>").replace("\t", "<09>")
 		if rec_depth == 0:
 			print(encoded_inputfragment)
-			self.print_input_chunk([], rows, start, start+min(w, end-start))
+			self.print_input_chunk(token, rows, start, start+min(w, end-start))
 		#if parent_bounds:
 		#	lead = [" "] * (start-parent_bounds[0])
 		#	span = ["X"] * (end-start)
-- 
GitLab