Mercurial > ~dholland > hg > ag > index.cgi
view anagram/agcore/p.cpp @ 12:aab9ff6af791
Strengthen the build hack for non-DOS targets.
author | David A. Holland |
---|---|
date | Tue, 31 May 2022 00:58:42 -0400 |
parents | 13d2b8934445 |
children |
line wrap: on
line source
/* * AnaGram, A System for Syntax Directed Programming * Copyright 1993-2002 Parsifal Software. All Rights Reserved. * See the file COPYING for license and usage terms. * * p.cpp */ #include "agdict.h" #include "assert.h" #include "bpe3.h" #include "binsort.h" #include "config.h" #include "cs.h" #include "data.h" #include "dict.h" #include "error.h" #include "lexeme.h" #include "p.h" #include "pgg24-defs.h" #include "q1a.h" #include "q1glbl.h" #include "rule.h" #include "stacks.h" #include "tsd.h" #include "token.h" //#define INCLUDE_LOGGING #include "log.h" AgString etr_file_name; AgString infile_name(""); int parse_abort_flag = 0; AgString simple_file_name; int syntax_error_flag = 0; AgBalancedTree<int> valueToken; static void open_result_windows(void) { LOGSECTION("open_result_windows"); LOGV(unres_con->nt) LCV(prr->nt) LCV(key_mess->nt); if (unres_con->nt) { log_error("Grammar is ambiguous. See Conflicts Window"); } if (prr->nt) { log_error("Conflicts resolved by precedence rules"); } if (key_mess->nt) { log_error("Keyword anomalies found"); } } static void checkValueTokens(void) { LOGSECTION("checkValueTokens"); int n = valueToken.size(); LOGV(n); int k; AgBalancedTree<int> badRules; for (k = 0; k < n; k++) { Token token = valueToken[k]; int nRules = token->ruleList.size(); LOGV(token) LCV(nRules) LCV(token->value_type); int i; for (i = 0; i < nRules; i++) { Rule rule = token.rule(i); if (rule->proc_name || rule->length() == 0) { continue; } LOGV(rule) LCV(rule->proc_name) LCV(rule->length()) LCV(rule->length() ? rule.token(0)->value_type : 0); LOGV(token->value_type) LCV(rule.token(0)->value_type); if (token->value_type == rule.token(0)->value_type) { continue; } LOGS("Bad rule") LV(rule); badRules.insert((int)rule); } } for (Each<Rule> r; r.loopNotFinished(); r.getNext()) { //if (r->proc_name) continue; n = r->elementList.size(); int j; int variableCount = 0; for (j = 0; j < n; j++) { RuleElement element = r->elementList[j]; if (element.cVariable) { variableCount++; if (r->proc_name == 0) { continue; } if ((int)element.token->value_type == void_token_type) { ssprintf("Parameter %s has type void", cVariableList[element.cVariable].pointer()); log_error(r->line, r->col); } } if (element.token->immediate_action) { variableCount = 0; } } if (r->proc_name == 0 && variableCount) { badRules.insert((int) r); } } n = badRules.size(); for (k = 0; k < n; k++) { char buf[100]; Rule r = badRules[k]; sprintf(buf, "Missing reduction procedure, R%03d", (int) r); errorList.push(Error(r->line, r->col, buf)); } } void new_syntax_analyzer(void) { LOGSECTION("new_syntax_analyzer"); q1(); LOGS("Returned from q1"); nstates = nits; build_parse_table(); LOGS("Parse table built"); checkValueTokens(); LOGS("Tokens checked"); open_result_windows(); LOGS("Result windows open"); syntax_state = syntax_analyzed; } void scan_input(void) { LOGSECTION("scan_input"); unsigned i, *l; Rule ruleZero = Rule::create(); ruleZero->prim_tkn = Token(0); prod_dict = null_list_dict(); LOGS("ready to parse"); parse(); LOGS("parse complete"); prod_dict = delete_list_dict(prod_dict); nprods = bnf_table->nt; if (!(syntax_error_flag || parse_abort_flag)) { if (nprods == 0) { log_error("No productions in syntax file"); errorList.top().setFatal(); } } if (syntax_error_flag || parse_abort_flag || nprods == 0) { syntax_state = syntax_error; return; } syntax_state = syntax_parsed; if (default_input_type == 0) { default_input_type = int_token_type; } for (Each<Token> token; token.loopNotFinished(); token.getNext()) { if (token->value_type) { continue; } token->value_type = (!token->non_terminal_flag || token->token_set_id) ? default_input_type : default_token_type; } #ifdef INCLUDE_LOGGING if (174 < Token::count()) { Token token174 = Token(174); LOGV(token174->immediate_action) LCV(token174->ruleList.size()); } #endif LOGS("call set_universe"); set_universe(); LOGS("call check_key_reserve"); check_key_reserve(); LOGS("call build_sets"); build_sets(); LOGS("call set_lexemes"); set_lexemes(); if (grammar_token == 0 && !(syntax_error_flag || parse_abort_flag)) { log_error("No grammar token specified"); errorList.top().setFatal(); } nprods = bnf_table->nt; ibnf_table = spec_tsd(nprods,2); l = (unsigned *) bnf_table->sb; for (i = 0; i < nprods; i++,l += 2) { at(ibnf_table, l[1], l[0]); } LOGS("Sort bnf tables"); LOGV(nprods); sort_tuples(bnf_table, 2); sort_tuples(ibnf_table, 1); AgStack<RuleElement> elementStack; if (disregard_token) { elementStack.push(RuleElement(disregard_token,0)); } elementStack.push(RuleElement(grammar_token, 0)); if ((int)map_token_number[grammar_token].value_type != void_token_type) { valueToken.insert(grammar_token); } LOGS("Set up ruleZero->elementList"); ruleZero->elementList = AgArray<RuleElement>(elementStack); summarize_grammar(); bnf_table = delete_tsd(bnf_table); no_assertions = 0; }