Mercurial > ~dholland > hg > ag > index.cgi
comparison anagram/agcore/p.cpp @ 0:13d2b8934445
Import AnaGram (near-)release tree into Mercurial.
author | David A. Holland |
---|---|
date | Sat, 22 Dec 2007 17:52:45 -0500 |
parents | |
children |
comparison
equal
deleted
inserted
replaced
-1:000000000000 | 0:13d2b8934445 |
---|---|
1 /* | |
2 * AnaGram, A System for Syntax Directed Programming | |
3 * Copyright 1993-2002 Parsifal Software. All Rights Reserved. | |
4 * See the file COPYING for license and usage terms. | |
5 * | |
6 * p.cpp | |
7 */ | |
8 | |
9 #include "agdict.h" | |
10 #include "assert.h" | |
11 #include "bpe3.h" | |
12 #include "binsort.h" | |
13 #include "config.h" | |
14 #include "cs.h" | |
15 #include "data.h" | |
16 #include "dict.h" | |
17 #include "error.h" | |
18 #include "lexeme.h" | |
19 #include "p.h" | |
20 #include "pgg24-defs.h" | |
21 #include "q1a.h" | |
22 #include "q1glbl.h" | |
23 #include "rule.h" | |
24 #include "stacks.h" | |
25 #include "tsd.h" | |
26 #include "token.h" | |
27 | |
28 //#define INCLUDE_LOGGING | |
29 #include "log.h" | |
30 | |
31 | |
32 AgString etr_file_name; | |
33 AgString infile_name(""); | |
34 int parse_abort_flag = 0; | |
35 AgString simple_file_name; | |
36 int syntax_error_flag = 0; | |
37 AgBalancedTree<int> valueToken; | |
38 | |
39 static void open_result_windows(void) { | |
40 LOGSECTION("open_result_windows"); | |
41 LOGV(unres_con->nt) LCV(prr->nt) LCV(key_mess->nt); | |
42 if (unres_con->nt) { | |
43 log_error("Grammar is ambiguous. See Conflicts Window"); | |
44 } | |
45 if (prr->nt) { | |
46 log_error("Conflicts resolved by precedence rules"); | |
47 } | |
48 if (key_mess->nt) { | |
49 log_error("Keyword anomalies found"); | |
50 } | |
51 } | |
52 | |
53 static void checkValueTokens(void) { | |
54 LOGSECTION("checkValueTokens"); | |
55 int n = valueToken.size(); | |
56 LOGV(n); | |
57 int k; | |
58 AgBalancedTree<int> badRules; | |
59 for (k = 0; k < n; k++) { | |
60 Token token = valueToken[k]; | |
61 int nRules = token->ruleList.size(); | |
62 LOGV(token) LCV(nRules) LCV(token->value_type); | |
63 int i; | |
64 for (i = 0; i < nRules; i++) { | |
65 Rule rule = token.rule(i); | |
66 if (rule->proc_name || rule->length() == 0) { | |
67 continue; | |
68 } | |
69 LOGV(rule) LCV(rule->proc_name) LCV(rule->length()) | |
70 LCV(rule->length() ? rule.token(0)->value_type : 0); | |
71 LOGV(token->value_type) LCV(rule.token(0)->value_type); | |
72 | |
73 if (token->value_type == rule.token(0)->value_type) { | |
74 continue; | |
75 } | |
76 LOGS("Bad rule") LV(rule); | |
77 badRules.insert((int)rule); | |
78 } | |
79 } | |
80 | |
81 for (Each<Rule> r; r.loopNotFinished(); r.getNext()) { | |
82 //if (r->proc_name) continue; | |
83 n = r->elementList.size(); | |
84 int j; | |
85 int variableCount = 0; | |
86 for (j = 0; j < n; j++) { | |
87 RuleElement element = r->elementList[j]; | |
88 if (element.cVariable) { | |
89 variableCount++; | |
90 if (r->proc_name == 0) { | |
91 continue; | |
92 } | |
93 if ((int)element.token->value_type == void_token_type) { | |
94 ssprintf("Parameter %s has type void", | |
95 cVariableList[element.cVariable].pointer()); | |
96 log_error(r->line, r->col); | |
97 } | |
98 } | |
99 if (element.token->immediate_action) { | |
100 variableCount = 0; | |
101 } | |
102 } | |
103 if (r->proc_name == 0 && variableCount) { | |
104 badRules.insert((int) r); | |
105 } | |
106 } | |
107 n = badRules.size(); | |
108 for (k = 0; k < n; k++) { | |
109 char buf[100]; | |
110 Rule r = badRules[k]; | |
111 sprintf(buf, "Missing reduction procedure, R%03d", (int) r); | |
112 errorList.push(Error(r->line, r->col, buf)); | |
113 } | |
114 } | |
115 | |
116 void new_syntax_analyzer(void) { | |
117 LOGSECTION("new_syntax_analyzer"); | |
118 q1(); | |
119 LOGS("Returned from q1"); | |
120 nstates = nits; | |
121 build_parse_table(); | |
122 LOGS("Parse table built"); | |
123 checkValueTokens(); | |
124 LOGS("Tokens checked"); | |
125 open_result_windows(); | |
126 LOGS("Result windows open"); | |
127 syntax_state = syntax_analyzed; | |
128 } | |
129 | |
130 void scan_input(void) { | |
131 LOGSECTION("scan_input"); | |
132 unsigned i, *l; | |
133 | |
134 Rule ruleZero = Rule::create(); | |
135 ruleZero->prim_tkn = Token(0); | |
136 | |
137 prod_dict = null_list_dict(); | |
138 LOGS("ready to parse"); | |
139 parse(); | |
140 LOGS("parse complete"); | |
141 | |
142 prod_dict = delete_list_dict(prod_dict); | |
143 nprods = bnf_table->nt; | |
144 if (!(syntax_error_flag || parse_abort_flag)) { | |
145 if (nprods == 0) { | |
146 log_error("No productions in syntax file"); | |
147 errorList.top().setFatal(); | |
148 } | |
149 } | |
150 if (syntax_error_flag || | |
151 parse_abort_flag || | |
152 nprods == 0) { | |
153 syntax_state = syntax_error; | |
154 return; | |
155 } | |
156 syntax_state = syntax_parsed; | |
157 if (default_input_type == 0) { | |
158 default_input_type = int_token_type; | |
159 } | |
160 for (Each<Token> token; token.loopNotFinished(); token.getNext()) { | |
161 if (token->value_type) { | |
162 continue; | |
163 } | |
164 token->value_type = (!token->non_terminal_flag || token->token_set_id) ? | |
165 default_input_type : default_token_type; | |
166 } | |
167 #ifdef INCLUDE_LOGGING | |
168 if (174 < Token::count()) { | |
169 Token token174 = Token(174); | |
170 LOGV(token174->immediate_action) LCV(token174->ruleList.size()); | |
171 } | |
172 #endif | |
173 LOGS("call set_universe"); | |
174 set_universe(); | |
175 LOGS("call check_key_reserve"); | |
176 check_key_reserve(); | |
177 LOGS("call build_sets"); | |
178 build_sets(); | |
179 LOGS("call set_lexemes"); | |
180 set_lexemes(); | |
181 if (grammar_token == 0 && !(syntax_error_flag || parse_abort_flag)) { | |
182 log_error("No grammar token specified"); | |
183 errorList.top().setFatal(); | |
184 } | |
185 nprods = bnf_table->nt; | |
186 | |
187 ibnf_table = spec_tsd(nprods,2); | |
188 l = (unsigned *) bnf_table->sb; | |
189 for (i = 0; i < nprods; i++,l += 2) { | |
190 at(ibnf_table, l[1], l[0]); | |
191 } | |
192 | |
193 LOGS("Sort bnf tables"); | |
194 LOGV(nprods); | |
195 sort_tuples(bnf_table, 2); | |
196 sort_tuples(ibnf_table, 1); | |
197 | |
198 AgStack<RuleElement> elementStack; | |
199 if (disregard_token) { | |
200 elementStack.push(RuleElement(disregard_token,0)); | |
201 } | |
202 elementStack.push(RuleElement(grammar_token, 0)); | |
203 | |
204 if ((int)map_token_number[grammar_token].value_type != void_token_type) { | |
205 valueToken.insert(grammar_token); | |
206 } | |
207 | |
208 LOGS("Set up ruleZero->elementList"); | |
209 ruleZero->elementList = AgArray<RuleElement>(elementStack); | |
210 summarize_grammar(); | |
211 bnf_table = delete_tsd(bnf_table); | |
212 no_assertions = 0; | |
213 } | |
214 |