comparison anagram/agcore/q1a.cpp @ 0:13d2b8934445

Import AnaGram (near-)release tree into Mercurial.
author David A. Holland
date Sat, 22 Dec 2007 17:52:45 -0500
parents
children
comparison
equal deleted inserted replaced
-1:000000000000 0:13d2b8934445
1 /*
2 * AnaGram, A System for Syntax Directed Programming
3 * Copyright 1993-1999 Parsifal Software. All Rights Reserved.
4 * See the file COPYING for license and usage terms.
5 *
6 * q1a.cpp
7 */
8
9 #include "agarray.h"
10 #include "arrays.h"
11 #include "assert.h"
12 #include "binsort.h"
13 #include "bitmap.h"
14 #include "config.h"
15 #include "cs.h"
16 #include "dict.h"
17 #include "error.h"
18 #include "keyword.h"
19 #include "lexeme.h"
20 #include "myalloc.h"
21 #include "q1a.h"
22 #include "q1glbl.h"
23 #include "q5.h"
24 #include "q8.h"
25 #include "rproc.h"
26 #include "rule.h"
27 #include "stacks.h"
28 #include "token.h"
29 #include "tsd.h"
30 #include "ut.h"
31
32 //#define INCLUDE_LOGGING
33 #include "log.h"
34
35
36 /*
37 The principal functions in Q1A are
38 . summarize_grammar()
39 . q1()
40
41 summarize_grammar() is an umbrella procedure that is used to call all
42 the functions that are used to create all the tables that describe
43 a grammar. summarize_grammar() is called after the input file has
44 been successfully parsed.
45
46 q1() is a wrapper function which calls lalr() and rlalr(), both in Q5.
47 lalr() generates the parser states and the goto table. rlalr() generates
48 reduction tokens and the reduce table.
49 */
50
51 int n_states_est;
52
53 const char *badRecursionFlag = 0;
54
55 int token_name_length = 0;
56 int item_length = 0;
57
58 static unsigned libnfs;
59
60
61 static AgArray<Token> findLeadingTokens(Token t) {
62 LOGSECTION("findLeadingTokens");
63 if (!t->non_terminal_flag) {
64 return AgArray<Token>();
65 }
66 LOGV(t);
67 //LOGV(myallocs) LCV(myfrees) LV(myallocBytes) LCV(myfreeBytes);
68 Bitmap bitmap(Token::count());
69 AgStack<Token> terminalStack;
70 AgStack<Token> nonTerminalStack;
71 bitmap.setBit(t);
72 nonTerminalStack.push(t);
73 for (unsigned i = 0; i < nonTerminalStack.size(); i++) {
74 Token token = nonTerminalStack[i];
75 int nRules = token->ruleList.size();
76 for (int j = 0; j < nRules; j++) {
77 Rule rule = token.rule(j);
78 RuleDescriptor &ruleDescriptor(rule);
79 //int length = rule->length();
80 int length = ruleDescriptor.length();
81 for (int k = 0; k < length; k++) {
82 //Token ruleToken = rule.token(k);
83 Token ruleToken = ruleDescriptor.token(k);
84 token_number_map &ruleTokenDescriptor(ruleToken);
85 if (!bitmap.setBit(ruleToken)) {
86 if (ruleTokenDescriptor.non_terminal_flag) {
87 nonTerminalStack.push(ruleToken);
88 }
89 else {
90 terminalStack.push(ruleToken);
91 }
92 }
93 if (ruleTokenDescriptor.zero_length_flag) {
94 continue;
95 }
96 break;
97 }
98 }
99 }
100 return terminalStack;
101 }
102
103
104 static void s8(Token token) { /* build list of leading tokens */
105 LOGSECTION("s8");
106 LOGV(token);
107 token->leadingTokenList = findLeadingTokens(token);
108 if (token->leadingTokenList.size()) {
109 return;
110 }
111
112 // No terminal tokens for token t, Is a diagnostic needed?
113 Bitmap map(Token::count());
114 while (1) {
115 if (token->ruleList.size() != 1) {
116 // Diagnose if more than one rule
117 break;
118 }
119 Rule rule = token.rule(0);
120 int lf = rule->length();
121 if (lf == 0) {
122 // No diagnostic for null production
123 return;
124 }
125 if (lf != 1) {
126 // Diagnose if not a shell production
127 break;
128 }
129 // shell production, get nested token
130 Token producedToken = rule.token(0);
131 if (producedToken->immediate_action) {
132 return;
133 }
134 if (map.setBit(producedToken)) {
135 // don't loop forever
136 break;
137 }
138 // since leading token count is zero.
139 assert(producedToken->non_terminal_flag);
140 }
141 ssprintf("No terminal tokens in expansion of T%03d: ", (int) token);
142 atkn(token);
143 log_error();
144 }
145
146 /*
147 s7a() builds a list of (left) expansion rules for the token t.
148 This is done by building a list of expansion rules, and for each
149 rule in the list, adding its expansion rules to the list if they
150 are not already there. This process continues until all rules in
151 the list have been expanded.
152 */
153 /*
154 static void s7a(Token token) {
155 LOGSECTION("s7a");
156 int n = token->ruleList.size();
157
158 LOGV(token) LCV(n);
159 iws();
160 for (int i = 0; i < n; i++) {
161 aws(token.rule(i));
162 }
163 for (n = 0; n < tis(); n++) {
164 Rule rule(list_base[n]);
165 int nr;
166 LOGV(n) LCV(tis()) LCV(list_base[n]) LCV(nforms);
167 assert(list_base[n] <= nforms);
168 if (rule->length() == 0) continue;
169 Token token(rule->token(0));
170
171 if (!token->non_terminal_flag) continue;
172 nr = token->ruleList.size();
173 for (int i = 0; i < nr; i++) xws(token.rule(i));
174 }
175 token->expansionRuleList = makeArray(list_base, rws());
176 }
177 */
178 static void findExpansionRules(Token t) {
179 LOGSECTION("findExpansionRules");
180 if (!t->non_terminal_flag) return;
181 LOGV(t);
182 Bitmap map(Rule::count());
183 AgStack<Rule> ruleStack;
184 int nRules = t->ruleList.size();
185 for (int j = 0; j < nRules; j++) {
186 Rule rule = t.rule(j);
187 if (map.setBit(rule)) {
188 continue;
189 }
190 ruleStack.push(rule);
191 }
192 for (unsigned i = 0; i < ruleStack.size(); i++) {
193 Rule rule = ruleStack[i];
194 int length = rule->length();
195 if (length == 0) {
196 continue;
197 }
198 Token token = rule.token(0);
199 /*
200 if (length == 1 && token == t) {
201 ssprintf("Circular definition of T%03d: ", (int) token);
202 atkn(token);
203 ssprintf(" in ");
204 append_item(rule, -1);
205 concat_string();
206 log_error(rule->line, rule->col);
207 badRecursionFlag = "circular definition";
208 }
209 */
210 if (!token->non_terminal_flag) {
211 continue;
212 }
213 int nRules = token->ruleList.size();
214 for (int j = 0; j < nRules; j++) {
215 Rule tokenRule = token.rule(j);
216 if (map.setBit(tokenRule)) {
217 continue;
218 }
219 ruleStack.push(tokenRule);
220 }
221 }
222 t->expansionRuleList = ruleStack;
223 }
224
225 static void s5(Token token, char *sta) {
226 int n, k;
227
228 ok_ptr(sta);
229 if (sta[token]) return;
230 LOGSECTION("s5");
231 LOGV(token) LCV(token->ruleList.size()) LCV(token->immediate_action);
232 sta[token] = 1;
233 if (token->ruleList.size() == 0) {
234 token->non_terminal_flag = 0;
235 assert(perm_index <= ntkns);
236 token_perm[perm_index] = token;
237 perm_index++;
238 return;
239 }
240 token->non_terminal_flag = 1;
241 n = token->ruleList.size();
242 while (n--) {
243 if (token.rule(n)->length()) {
244 continue;
245 }
246 token->zero_length_flag = 1;
247 break;
248 }
249 n = token->ruleList.size();
250 for (int i = 0; i < n; i++) {
251 Rule rule = token.rule(i);
252 k = rule->length();
253 if (k == 0) continue;
254 int tokenIndex = 0;
255 while (k--) {
256 Token ruleToken = rule.token(tokenIndex++);
257 s5(ruleToken,sta);
258 if (ruleToken->zero_length_flag == 0) {
259 break;
260 }
261 }
262 if (k == -1) {
263 token->zero_length_flag = 1;
264 }
265 }
266 assert(perm_index <= ntkns);
267 token_perm[perm_index] = token;
268 perm_index++;
269 }
270
271
272 static int s6(Token token, char *sta) {
273 LOGSECTION("s6");
274 int zc, n, k;
275
276 zc = 0;
277 ok_ptr(sta);
278 if (sta[token]) {
279 return zc;
280 }
281 if (token->zero_length_flag) {
282 return zc;
283 }
284 if (!token->non_terminal_flag) {
285 return zc;
286 }
287
288 sta[token] = 1;
289 n = token->ruleList.size();
290 for (int i = 0; i < n; i++) {
291 Rule rule = token.rule(i);
292 if ((k = rule->length()) == 0) {
293 continue;
294 }
295 int tokenIndex = 0;
296 while (k--) {
297 Token ruleToken = rule.token(tokenIndex++);
298 s6(ruleToken,sta);
299 if (ruleToken->zero_length_flag==0) {
300 break;
301 }
302 }
303 if (k == -1) {
304 token->zero_length_flag = 1;
305 zc++;
306 }
307 }
308 return zc;
309 }
310
311 static void s4(void) {
312 LOGSECTION("s4");
313 int n, t, zc;
314 char *sta;
315
316 perm_index = 0;
317 n = ntkns + 1;
318 sta = ZALLOCATE(n, char);
319 Each<Token> token;
320 for (token.restart(); token.loopNotFinished(); token.getNext()) {
321 s5(token, sta);
322 }
323 for (zc = 1; zc; ) {
324 zc = 0;
325 ok_ptr(sta);
326 memset(sta, 0, n*sizeof(*sta));
327 for (t = 0; t < n; t++) {
328 zc += s6(token_perm[t],sta);
329 }
330 }
331 DEALLOCATE(sta);
332 }
333
334 /*
335 s3() builds the inverted bnf table from the bnf table.
336 The ibnf table provides the mapping from rule to
337 reduction token list.
338 */
339
340 static void s3(void) {
341 LOGSECTION("s3");
342 //unsigned t, f, n, cf;
343 unsigned n, cf;
344 unsigned sp;
345 int *p;
346 unsigned nt;
347
348 cf = 0;
349 sp = 0;
350 ibnfb[0] = sp;
351 n = 0;
352 p = ibnf_table->sb;
353 nt = ibnf_table->nt;
354 while (nt--) {
355 Rule rule = *p++;
356 Token token = *p++;
357 if (token->sem_det_flag) {
358 rule->fast_loop = 0;
359 }
360 if ((unsigned)rule != cf) {
361 ibnfn[cf] = (int) n;
362 ibnfb[rule] = sp;
363 cf = rule;
364 n = 0;
365 }
366 assert(sp < libnfs);
367 ibnfs[sp] = token;
368 sp++;
369 n++;
370 }
371 ibnfn[cf] = (int) n;
372 ibnf_table = delete_tsd(ibnf_table);
373 }
374
375 static void makeRuleLists(void) {
376 LOGSECTION("makeRuleLists");
377 AgArray< AgBalancedTree<int> > tree(Token::count());
378 int *bnf = bnf_table->sb;
379 int nbnf = bnf_table->nt;
380 while (nbnf--) {
381 Token token = *bnf++;
382 Rule rule = *bnf++;
383 tree[token].insert(rule);
384 }
385 Each<Token> token;
386 for (token.restart(); token.loopNotFinished(); token.getNext()) {
387 int n = tree[token].size();
388 if (n == 0) {
389 continue;
390 }
391 AgArray<Rule> list(n);
392 while (n--) {
393 list[n] = tree[token][n];
394 }
395 token->ruleList = list;
396 }
397 }
398
399 static int stack_size_arbitrary;
400 int stack_size;
401
402 static int find_token_length(int tn) {
403 Token token = tn;
404 int nf;
405 int length = 0;
406 if (tut[token]) {
407 return tut[token];
408 }
409 ok_ptr(tut);
410 tut[token] = -1;
411 nf = token->ruleList.size();
412 for (int i = 0; i < nf; i++) {
413 Rule rule = token.rule(i);
414 int form_length = rule->length();
415 int fx = 0;
416 while (fx < form_length) {
417 int k = find_token_length(rule.token(fx));
418 if (k == -1) {
419 if (fx) {
420 stack_size_arbitrary = 1;
421 }
422 k = 0;
423 }
424 k += fx;
425 if (length < k) {
426 length = k;
427 }
428 fx++;
429 }
430 }
431 tut[tn] = length;
432 return length;
433 }
434
435 unsigned disregard_cont_rule;
436 unsigned disregard_skip_rule;
437
438 void summarize_grammar(void) {
439 LOGSECTION("summarize_grammar");
440 unsigned n, t, tt; //, f;
441 unsigned k;
442
443 if (errorResynchDiagnostic) {
444 auto_resynch = 0;
445 }
446
447 LOGS("Scan rules to finish up intermediate actions");
448 Each<Rule> rule;
449 for (rule.restart(); rule.loopNotFinished(); rule.getNext()) {
450 LOGSECTION("Intermediate action cleanup");
451 LOGV(rule);
452 Procedure proc = rule->proc_name;
453 Token primaryToken = rule->prim_tkn;
454
455 if (primaryToken->sem_det_flag) {
456 rule->fast_loop = 0;
457 }
458 if (rule->immediate_proc) {
459 continue;
460 }
461 if (proc.isNotNull()) {
462 int type = primaryToken->value_type;
463 if (type == 0) {
464 type = primaryToken->value_type = default_token_type;
465 }
466 if (proc->cSegment.length() == 0) {
467 type = void_token_type;
468 }
469 proc->cast = type;
470 //finish_proc_def(proc, rule, rule->length(), type);
471 }
472 }
473 LOGS("Prod definitions finished");
474 ZALLOCATE_ST(token_perm, ntkns+1);
475 tut = ZALLOCATE(ntkns+1, int);
476
477 makeRuleLists();
478 s4(); /* identify zero length tokens */
479 LOGS("s4 finished");
480 for (rule.restart(); rule.loopNotFinished(); rule.getNext()) {
481 int i, length;
482
483 if (rule->immediate_proc) {
484 continue;
485 }
486 length = rule->length();
487 LOGV(rule) LCV(rule->length()) LCV(rule->elementList.size());
488 for (i = 0; i < length; i++) {
489 Token ruleToken = rule.token(i);
490 LOGV(ruleToken) LCV(ruleToken->immediate_action)
491 LCV(ruleToken->ruleList.size());
492 if (!ruleToken->immediate_action) {
493 continue;
494 }
495 Rule actionRule = ruleToken.rule(0);
496 LOGV(actionRule);
497 Procedure proc = actionRule->proc_name;
498 int type = default_token_type;
499 if (proc->cSegment.length() == 0) {
500 type = void_token_type;
501 }
502 proc->cast = type;
503 }
504 }
505 LOGS("Immediate actions finished");
506 if (disregard_token &&
507 map_token_number[disregard_token].non_terminal_flag) {
508 Token token = disregard_token;
509 int nf = token->ruleList.size();
510 for (int i = 0; i < nf; i++) {
511 Rule rule = token.rule(i);
512 if (rule->length() == 0) {
513 disregard_skip_rule = rule;
514 }
515 else {
516 disregard_cont_rule = rule;
517 }
518 }
519 }
520 LOGS("Disregard token analysis complete");
521
522 n = nforms + 1;
523 ibnfn = ZALLOCATE(n, int);
524 ALLOCATE_ST(ibnfb, n);
525 n = bnf_table->nt;
526 resize_tsd(bnf_table,n);
527 ALLOCATE_ST(ibnfs, n);
528 libnfs = n;
529 badRecursionFlag = 0;
530 for (t = 0; t++ < ntkns; ) {
531 tt = token_perm[t];
532 memset(tut, 0, sizeof(*tut) * (ntkns+1));
533 if (map_token_number[tt].non_terminal_flag) {
534 //checkRecursion(tt);
535 s8(tt); // expand forms
536 }
537 }
538 memset(tut, 0, sizeof(*tut) * (ntkns+1));
539 stack_size = stack_size_arbitrary = 0;
540 Each<Token> token;
541 for (token.restart(); token.loopNotFinished(); token.getNext()) {
542 int length = find_token_length(token);
543
544 if (stack_size < length) {
545 stack_size = length;
546 }
547 Token permutedToken = token_perm[token];
548 if (!permutedToken->non_terminal_flag) {
549 continue;
550 }
551 //s7a(permutedToken);
552 findExpansionRules(permutedToken);
553 }
554 s3(); /* make inverted bnf tables */
555
556 n_states_est = -(int)nforms;
557 memset(tut, 0, sizeof(*tut) * (ntkns+1));
558 for (rule.restart(); rule.loopNotFinished(); rule.getNext()) {
559 int nr = ibnfn[rule];
560 int *rl = ibnfs + ibnfb[rule];
561 k = n = rule->length();
562 n_states_est += n;
563 //LOGV(rule) LCV(rule->length());
564 while (k--) {
565 int t = rule.token(k);
566 int j = 0;
567 while (j < nr) {
568 if (rl[j] != t) {
569 break;
570 }
571 else {
572 j++;
573 }
574 }
575 if (j != nr) {
576 tut[t] = 1;
577 }
578 }
579 k = n;
580 //LOGV(rule) LCV(rule->precedence_level);
581 if (rule->precedence_level == 0) {
582 while (k--) {
583 Token token = rule.token(k);
584 token_number_map &td = map_token_number[token];
585 if (td.non_terminal_flag && !td.operatorCandidate &&
586 td.parse_tree.isNull()) {
587 continue;
588 }
589 int pl = token->precedence_level;
590 if (pl != 0) {
591 rule->precedence_level = pl;
592 rule->left_associative = token->left_associative;
593 rule->right_associative = token->right_associative;
594 }
595 break;
596 }
597 }
598 //LOGV(n);
599 while (n) {
600 n--;
601 if (rule.token(n)->zero_length_flag) {
602 continue;
603 }
604 n++;
605 break;
606 }
607 assert(n <= rule->length());
608 rule->non_vanishing_length = n;
609 }
610 for (rule.restart(); rule.loopNotFinished(); rule.getNext()) {
611 int *rl = ibnfs + ibnfb[rule];
612 int nr = ibnfn[rule];
613 int length = rule->length();
614 int fx;
615
616 if (nr <= 1 || length == 0) {
617 continue;
618 }
619 while (nr) {
620 int tn = *rl++;
621 if (tut[tn]) {
622 break;
623 }
624 nr--;
625 }
626 if (nr == 0) {
627 continue;
628 }
629 for (fx = 0; fx < length; ) {
630 tut[rule.token(fx++)] = 1;
631 }
632 }
633 for (t = 0; t++ < ntkns;){
634 if (!tut[t]) {
635 if ((int) t == error_token) {
636 error_token = 0;
637 continue;
638 }
639 if ((int) t == grammar_token) {
640 continue;
641 }
642 ssprintf("Token not used, T%03d: ",t);
643 atkn(t);
644 log_error();
645 }
646 }
647 for (Each<Keyword> keyword; keyword.loopNotFinished(); keyword.getNext()) {
648 int pt = keyword->reserve;
649 if (pt == 0) {
650 continue;
651 }
652 keyword->reserve = build_char_set(pt);
653 }
654 LOGS("Reserve sets built");
655 if (n_states_est <= 0) {
656 n_states_est = 1;
657 }
658 bfst3();
659 DEALLOCATE(tut);
660 LOGS("tut deallocated");
661 if ((auto_resynch || error_token) && eof_token == 0) {
662 log_error("eof token not defined");
663 }
664 token_name_length = 0;
665 for (token.restart(); token.loopNotFinished(); token.getNext()) {
666 int n;
667 ics();
668 atkn(token);
669 n = rcs();
670 if (token_name_length < n) {
671 token_name_length = n;
672 }
673 }
674 LOGS("name length loop finished");
675 for (token.restart(); token.loopNotFinished(); token.getNext()) {
676 AgStack<Token> testTokenList;
677 LOGV((int) token);
678 testTokenList.push(token);
679 unsigned i;
680 int badRule = 0;
681 for (i = 0; !badRule && i < testTokenList.size(); i++) {
682 AgArray<Rule> &ruleList = testTokenList[i]->ruleList;
683 LOGV(i) LCV((int) testTokenList[i]);
684 unsigned j;
685 for (j = 0; !badRule && j < ruleList.size(); j++) {
686 Rule &r = ruleList[j];
687 if (r->length() == 0) {
688 continue;
689 }
690 unsigned k;
691 int nNonZeroLength = 0;
692 int kDerived = -1;
693 for (k = 0; k < r->length(); k++) {
694 if (r.token(k)->zero_length_flag) {
695 continue;
696 }
697 kDerived = k;
698 nNonZeroLength++;
699 }
700 if (nNonZeroLength > 1) {
701 continue;
702 }
703 Token testToken;
704 LOGV(nNonZeroLength);
705 if (nNonZeroLength) {
706 LOGV(kDerived);
707 testToken = r.token(kDerived);
708 LOGV((int) testToken);
709 if (testToken == token) {
710 badRule = (int) r;
711 }
712 LOGV(badRule);
713 }
714 else {
715 for (k = 0; k < r->length(); k++) {
716 testToken = r.token(k);
717 if (testToken == token) {
718 badRule = (int) r;
719 break;
720 }
721 }
722 }
723 LOGV((int) testToken) LCV(nNonZeroLength);
724 LOGV(badRule);
725 if (badRule) {
726 LOGV(r->length());
727 if (r->length() > 1) {
728 ssprintf("Empty recursion: Rule R%03d in expansion of T%03d: ",
729 (int) r, (int) token);
730 atkn(token);
731 badRecursionFlag = "empty recursion";
732 }
733 else {
734 ssprintf("Circular definition of T%03d: ", (int) token);
735 atkn(token);
736 ssprintf(" in ");
737 append_item(r, -1);
738 concat_string();
739 badRecursionFlag = "circular definition";
740 }
741 LOGV(badRecursionFlag);
742 log_error(r->line, r->col);
743 }
744 else {
745 LOGV(testTokenList.size());
746 for (k = 0; k < testTokenList.size(); k++) {
747 if (testTokenList[k] == testToken) {
748 break;
749 }
750 }
751 LOGV(k);
752 if (k == testTokenList.size()) {
753 testTokenList.push(testToken);
754 }
755 }
756 LOGV(badRecursionFlag);
757 }
758 }
759 }
760
761 item_length = 0;
762 for (rule.restart(); rule.loopNotFinished(); rule.getNext()) {
763 int n;
764 ics();
765 append_item(rule, -1);
766 n = rcs();
767 if (item_length < n) {
768 item_length = n;
769 }
770 }
771 item_length++;
772 for (rule.restart(); rule.loopNotFinished(); rule.getNext()) {
773 if (rule->proc_name) {
774 rule->reductionRequired = 1;
775 continue;
776 }
777 int n = rule->length();
778 while (n-- > 1) {
779 Token t = rule->elementList[n].token;
780 Cast c = t->value_type;
781 if (!c.wrapperRequired()) {
782 continue;
783 }
784 rule->reductionRequired = 1;
785 break;
786 }
787 }
788 }
789
790 int get_reduction_states(int s, int f, int n) {
791 LOGSECTION("get_reduction_states");
792 LOGV(s) LCV(f) LCV(n);
793 int k = add_tuple_dict_new(completed_form_dict,s,f);
794 completed_form_map *cfp;
795
796 check_size(map_completed_form,k,k);
797 cfp = &map_completed_form[k];
798 if (cfp->reduction_states_index == 0) {
799 LOGS("Calling frss12)");
800 frss12(s,f,n);
801 cfp = &map_completed_form[k];
802 cfp->external_reduction = external_reduction;
803 cfp->reduction_states_index = store_list(reduction_states_list);
804 cfp->n_reduction_states = rws();
805 }
806 return k;
807 }
808
809 static void find_reduction_states(tsd *rrc) {
810 LOGSECTION("find_reduction_states");
811 LOGV((int) rrc);
812 int *p, nt;
813 sort_tuples(rrc,2);
814 p = rrc->sb;
815 nt = rrc->nt;
816 while (nt--) {
817 int s, f;
818 unsigned n;
819
820 s = *p++;
821 p++;
822 f = *p++;
823 n = *p++;
824
825 if (n != (Rule(f)->length())) {
826 continue;
827 }
828 LOGV(nt);
829 get_reduction_states(s, f, n);
830 }
831 LOGS("find_reduction_states finished");
832 }
833
834 void q1(void) {
835 LOGSECTION("q1");
836 LOGV(ntkns);
837 tut = ZALLOCATE(ntkns+1,int);
838 LOGS("call lalr");
839 lalr();
840 //State::createStates(Rule());
841 LOGS("call rlalr");
842 rlalr();
843 LOGV(nstates);
844 map_state_number = (state_number_map *)
845 set_array_size(map_state_number, nstates);
846
847 LOGS("call find_reduction_states");
848 if (res_con->nt) {
849 find_reduction_states(res_con);
850 }
851 LOGS("returned from first call to find_reduction_states");
852
853 if (unres_con->nt) {
854 find_reduction_states(unres_con);
855 }
856 LOGS("returned from second call to find_reduction_states");
857
858 DEALLOCATE(tut);
859 empty_tuple_dict(frss_dict);
860 LOGS("q1 finished");
861 }
862