import os from BisonGen import Preprocess from BisonGen.Grammar import Output import Actions, Scanner class Parser(Output.OutputBase): def generate(self, outfile, parser): self.output(outfile) self.output_parser(outfile) # Parse tree action routines Actions.Generate(outfile, parser) # Token scanner Scanner.Generate(outfile, parser) outfile.write("if __name__ == '__main__':\n") outfile.write(" import sys\n") outfile.write(" try: import readline\n") outfile.write(" except: pass\n") outfile.write("\n") outfile.write(" try:\n") outfile.write(" import %sc\n" % parser.name) outfile.write(" parser = %sc.new(1)\n" % parser.name) outfile.write(" print 'Using C parser'\n") outfile.write(" except:\n") outfile.write(" import %s\n" % parser.name) outfile.write(" parser = %s.new(1)\n" % parser.name) outfile.write(" print 'Using Python parser'\n") outfile.write("\n") outfile.write(" if len(sys.argv) > 1:\n") outfile.write(" result = parser.parse(sys.argv[1])\n") outfile.write(" result.pprint()\n") outfile.write(" raise SystemExit()\n") outfile.write(" print 'Use -C to exit.'\n") outfile.write(" try:\n") outfile.write(" while 1:\n") outfile.write(" expr = raw_input('>>>')\n") outfile.write(" result = parser.parse(expr)\n") outfile.write(" result.pprint()\n") outfile.write(" except KeyboardInterrupt:\n") outfile.write(" raise SystemExit\n") return def render_table(self, write, name, table, render=repr): write('%s = [' % name) indent = ' '*(len(name) + 4) for item in table: write('%s,\n%s' % (render(item), indent)) write(']\n') return def output_number_table(self, write, name, table): write('%s = [' % name) width = len(name) + 4 for item in table[:-1]: if width > 70: write('\n') width = 0 num = '%d, ' % item write(num) width = width + len(num) if width > 70: write('\n') write('%d]\n' % table[-1]) return def output_token_defines(self, write): write('\n# token definitions\n') named_tokens = map(None, self.tags, self.user_toknums) # Start of user specified tokens user_tokens = self.user_toknums[self.error_token_number] for (name, token) in named_tokens: if token > user_tokens and name[0] != '"': write('%s = %d\n' % (name, token)) return def output_token_translations(self, write): write('\n# vector mapping lexer token numbers into internal token numbers\n') self.output_number_table(write, 'token_translations', self.token_translations) write('\nYYTRANSLATE = lambda x: x > %d and %d or token_translations[x]\n' % (self.max_user_token, self.nsyms)) return def output_gram(self, write): rhs_tokens = [None, []] # ritem ends with a rule followed by 0, safe to ignore these for item in self.ritem[:-2]: if item > 0: rhs_tokens[-1].append(item) else: # The goto rule; skip and start a new list rhs_tokens.append([]) write('\n# vector of items of all rules.\n') self.render_table(write, 'rhs_tokens', rhs_tokens) return def output_rule_data(self, write): repr_rule_info = lambda item: '"%s: line %d"' % item write('\n# vector of line numbers and filename of all rules\n') self.render_table(write, 'rule_info', self.rline, repr_rule_info) def repr_token_names(item): item = str(item) if item[0] == item[-1] and item[0] in ['"',"'"]: item = item[1:-1] return repr(item) write('\n# vector of string-names indexed by token number\n') self.render_table(write, 'token_names', self.tags, repr_token_names) write('\n# symbol number of symbol that rule derives.\n') self.output_number_table(write, 'derives', self.rlhs) # Calculate the length of the rhs for each rule rhs_size = [0] for i in range(1, self.nrules): rhs_size.append(self.rrhs[i+1] - self.rrhs[i] - 1) rhs_size.append(self.nitems - self.rrhs[self.nrules] - 1) write('\n# number of symbols composing right hand side of rule.\n') self.output_number_table(write, 'rhs_size', rhs_size) return def token_actions(self, write): """ figure out the actions for the specified state, indexed by lookahead token type. """ actions = map(lambda s, f=self.action_row: f(s), range(self.nstates)) write('\n# default rule to reduce with in state. 0 means the default is an error.') write('\n# indexed by state number\n') self.output_number_table(write, 'default_action', actions) return def goto_actions(self, write): """ figure out what to do after reducing with each rule, depending on the saved state from before the beginning of parsing the data that matched this rule. """ gotos = map(lambda t, f=self.default_goto: f(t), range(self.ntokens, self.nsyms)) write('\n# default state to go to after a reduction of a rule.\n') write('# indexed by variable number (lhs token)\n') self.output_number_table(write, 'default_goto', gotos) return # the following functions output yytable, yycheck and the # vectors whose elements index the portion starts def output_base(self, write): write('\n# index in yytable of the portion describing state (indexed by state number)\n') write('# If the value in yytable is positive, we shift the token and go to that state.\n') write('# If the value is negative, it is minus a rule number to reduce by.\n') write('# If the value is zero, the default action from yydefact[s] is used.\n') self.output_number_table(write, 'action_idx', self.base[:self.nstates]) write('\n# the index in yytable of the portion describing what to do after reducing a rule.\n') write('# The value from yytable is the state to go to.\n') self.output_number_table(write, 'goto_idx', self.base[self.nstates:]) del self.base return def output_table(self, write): write('\n# a vector filled with portions for different uses. (using action_idx and goto_idx)\n') self.output_number_table(write, 'yytable', self.table[:self.high+1]) del self.table return def output_check(self, write): write('\n# a vector indexed in parallel with yytable.\n') write('# It indicates the bounds of the portion you are trying to examine.\n') self.output_number_table(write, 'yycheck', self.check[:self.high+1]) del self.check return def output_defines(self, write): write("\nYYLAST = %d" % self.high) write("\nYYFINAL = %d" % self.final_state) write("\nYYFLAG = %d" % Output.MINSHORT) write("\nYYNTBASE = %d\n" % self.ntokens) return def output_parser(self, outfile): dirname = os.path.dirname(__file__) filename = os.path.join(dirname, 'Simple.py') src = Preprocess.Process(filename, prefix='@', defines=self.options) outfile.write(src) return