From 8ca8a590d91feeeb7e3498424895405b5713ac1d Mon Sep 17 00:00:00 2001 From: Kia <kia@special-circumstanc.es> Date: Sun, 8 Nov 2020 17:11:38 -0700 Subject: [PATCH] rule number & state number normalization working --- bison_xml_file_ingest.py | 20 ++++++++------------ 1 file changed, 8 insertions(+), 12 deletions(-) diff --git a/bison_xml_file_ingest.py b/bison_xml_file_ingest.py index d3b4053..288ca3e 100644 --- a/bison_xml_file_ingest.py +++ b/bison_xml_file_ingest.py @@ -128,7 +128,7 @@ for child in root.findall("./automaton/*"): #print("len ", len(child), "tag", child.tag, "text", child.text, "attrib", child.attrib) for x in child.findall("itemset/"): - rule_number = int(x.attrib["rule-number"]) + rule_number = rule_original_to_monotone[int(x.attrib["rule-number"])] dot_position = int(x.attrib["dot"]) itemset.append((rule_number,dot_position)) @@ -140,30 +140,26 @@ for child in root.findall("./automaton/*"): if (rule == "accept"): rule = acceptrule_number else: - rule = int(rule) + rule = rule_original_to_monotone[int(rule)] reduces.append((symbol, rule)) for x in child.findall("actions/transitions/transition"): symbol = symbol_to_integer(x.attrib["symbol"]) type_of_txn = x.attrib["type"] - state = int(x.attrib["state"]) + state = state_original_to_monotone[int(x.attrib["state"])] transitions.append((type_of_txn,symbol,state)) - list_of_automaton_states[int(state_number)] = (itemset, transitions, reduces) + list_of_automaton_states[state_original_to_monotone[int(state_number)]] = (itemset, transitions, reduces) -renormalized_rule_number = 0 -translation_table = {} -reverse_translation_table = {} -for rule_number, x in list_of_automaton_states.items(): - print("rule number", rule_number, "ITEM SET", x[0],"\n", "TRANSITIONS", x[1],"\n", "REDUCES", x[2],"\n") - translation_table[rule_number] = renormalized_rule_number - reverse_translation_table[renormalized_rule_number] = rule_number - renormalized_rule_number += 1 + +for state_number, x in list_of_automaton_states.items(): + print("state number", state_number, "ITEM SET", x[0],"\n", "TRANSITIONS", x[1],"\n", "REDUCES", x[2],"\n") + # we normalize rule numbers so we can use our convention elsewhere of there not being gaps, starting at 0 etc -- GitLab