diff --git a/gdb-port/top-level-parse.py b/gdb-port/top-level-parse.py
index ba2da6127d6917abd399a28d0aa91313a6bb3e26..cbd200638b22967caf1eebe3af113849c7a62f36 100644
--- a/gdb-port/top-level-parse.py
+++ b/gdb-port/top-level-parse.py
@@ -160,34 +160,6 @@ class TopLevelParse:
 	def begin_new_token(self, index):
 		token_bounds = (index, None)
 
-		if len(self.input_index_tree_path) == 1 and self.current_parse_depth == 0:
-			node = self.input_stream_indices.select_by_path([0])
-			try:
-				node.begin_token(index)
-			except ValueError:
-				self.input_stream_indices.root = None
-				node = IndexTreeNode(None)
-				node.begin_token(index)
-				self.input_stream_indices.root = node
-			#node.add_child_node(token_bounds)
-			#self.input_index_tree_path.append(0)
-		elif len(self.input_index_tree_path) == self.current_parse_depth+1: # We returned to the parent node, and now we're applying
-		# the next parser in a seq -- increment last index in the path
-			node = self.input_stream_indices.select_by_path(self.input_index_tree_path[0:self.current_parse_depth]) # Select the parent of our new node
-			node.add_child_node(token_bounds)
-			self.input_index_tree_path[self.current_parse_depth] += 1 # Increment the last index in the path to the current node being constructed
-		elif (self.current_parse_depth+1) -  len(self.input_index_tree_path) == 1:
-			node = self.input_stream_indices.select_by_path(self.input_index_tree_path[0:self.current_parse_depth])
-			node.add_child_node(token_bounds)
-			self.input_index_tree_path.append(0)
-		else:
-			#print("len(self.input_index_tree_path):", len(self.input_index_tree_path), "self.current_parse_depth:", self.current_parse_depth)
-			#gdb.execute("tbreak")
-			print("parse depth + 1:", self.current_parse_depth+1, "tree path length:", len(self.input_index_tree_path))
-			print("Unexpected node path length at parser:", str(self.peek_parser()))
-			print("index tree stack", self.index_tree_stack)
-			raise RuntimeError("Unexpected node path length")
-
 		diff = (self.current_parse_depth+1) - len(self.starting_input_index)
 		newindices = [None] * diff
 		self.starting_input_index.extend(newindices)
@@ -211,15 +183,7 @@ class TopLevelParse:
 		#print("DEBUG: path:", self.input_index_tree_path)
 		#print("DEBUG: path (truncated):", self.input_index_tree_path[0:self.current_parse_depth+1])
 		#token = self.input_stream_indices[self.current_parse_depth][-1]
-		node = self.input_stream_indices.select_by_path(self.input_index_tree_path[0:self.current_parse_depth+1])
-		token = node.indices
-		if token[1] is not None:
-			raise RuntimeError("Attempted to determine token bounds twice:", token, "new end:", index)
-		#finished_token = (token[0], index)
-		#self.input_stream_indices[self.current_parse_depth][-1] = finished_token
-		node.finish_token(index)
-		#self.input_index_tree_path[self.current_parse_depth] += 1
-		del self.input_index_tree_path[self.current_parse_depth+1:]
+
 		token_addr = hparseresult_addr.cast(gdb.lookup_type("HParseResult").pointer())['ast']
 		if token_addr:
 			#print(HParseResult(hparseresult_addr).str_no_deref())
@@ -235,30 +199,13 @@ class TopLevelParse:
 
 	# Discard a single item in the "outer" list, which is a list of tokens at that depth in the call graph (one if the parser in the stack is not a sequence, multiple if it is)
 	def discard_pending_tokens(self):
+		#TODO: clean out unfinished tokens at the end
 		#print("DEBUG: discard_pending_tokens()")
 		#print("DEBUG: current parse depth:", self.current_parse_depth)
 		#print("DEBUG: path:", self.input_index_tree_path)
 		#print("DEBUG: tokens:", self.input_stream_indices)
 		#tokens = self.input_stream_indices.pop()
-		node = self.input_stream_indices.select_by_path(self.input_index_tree_path[0:self.current_parse_depth+1])
-		if type(node) is IndexTree:
-			node = node.root
-		#self.input_index_tree_path.pop()
-		del self.input_index_tree_path[self.current_parse_depth+1:]
-		#print("DEBUG: path after discard:", self.input_index_tree_path)
-		if node.indices[1] is not None:
-			raise RuntimeError("DEBUG: failed parse but token still has bounds:", node.indices[1])
-		node.delete_all_children()
-		parent = self.input_stream_indices.select_by_path(self.input_index_tree_path[0:self.current_parse_depth])
-		if type(parent) is IndexTreeNode:
-			parent.delete_last_child()
-		else:
-			parent.root = IndexTreeNode(None)
-		if self.input_index_tree_path[self.current_parse_depth] > 0:
-			self.input_index_tree_path[self.current_parse_depth] -= 1
-		else:
-			del self.input_index_tree_path[self.current_parse_depth:]
-		self.starting_input_index[self.current_parse_depth] = None
+		return
 
 	def peek_parserstack(self):
 		try:
@@ -385,7 +332,7 @@ class TopLevelParse:
 	# TODO: get_avg_mem_use_all_arenas, get_total_mem_use
 
 	def print_input_chunk(self, token, token_list, rows, start, end):
-		#print("print_input_chunk(): start: ", start, "end: ", end)
+		#print("print_input_chunk(): start: ", start, "end: ", end) # DEBUG
 		w = end-start
 		if w == 0:
 			print("all tokens passed to display are zero-width")
@@ -505,6 +452,7 @@ class TopLevelParse:
 		print(encoded_inputfragment)
 		self.print_input_chunk(token, printing_tokens, rows, start + w * screens, (start + w * screens) + min(w, end-start))
 		self.tokens_from_previous_chunk = []
+		self.starting_input_index = []
 
 		#if rec_depth == 0:
 		#	print(encoded_inputfragment)