view anagram/agcore/q5.cpp @ 21:1c9dac05d040

Add lint-style FALLTHROUGH annotations to fallthrough cases. (in the parse engine and thus the output code) Document this, because the old output causes warnings with gcc10.
author David A. Holland
date Mon, 13 Jun 2022 00:04:38 -0400
parents 13d2b8934445
children
line wrap: on
line source

/*
 * AnaGram, A System for Syntax Directed Programming
 * Copyright 1993-2002 Parsifal Software. All Rights Reserved.
 * See the file COPYING for license and usage terms.
 *
 * q5.cpp
 */

#include "arrays.h"
#include "assert.h"
#include "config.h"
#include "bpu.h"
#include "cra.h"
#include "data.h"
#include "dict.h"
#include "error.h"
#include "keyword.h"
#include "lexeme.h"
#include "minmax.h"
#include "myalloc.h"
#include "q1a.h"
#include "q1glbl.h"
#include "q5.h"
#include "q8.h"
#include "rule.h"
#include "stacks.h"
#include "token.h"
#include "tsd.h"
#include "ut.h"

//#define INCLUDE_LOGGING
#include "log.h"


static unsigned     items_length;

unsigned n_gotos;
unsigned n_completions;
unsigned n_conflicts;
unsigned n_reductions;
unsigned n_default_reductions;
static int reduce_proc_flag;


tsd *lcft = NULL;
tsd *lcftp = NULL;
tsd *lgt = NULL;

int nInputRules = 0;

//extern tsd *items_table;

tsd *expand_state(int sn) {
  int *list = dict_str(isht_dict, sn);
  int n = (*list++ - 1)/2;
  int fn, fx, k = n;
  tsd *sx = init_tsd(2);
  while (k--) {
    fn = *list++;
    fx = *list++;
    at(sx, fn, fx);
  }
  list -= 2*n;
  k = n;
  iws();
  while (k--) {
    int nx;

    fn = *list++;
    fx = *list++;
    if (fx >= (int) map_form_number[fn].length()) {
      continue;
    }
    //tn = lstptr(map_form_number[fn],tokens)[fx];
    Token token = Rule(fn).token(fx);
    if (!token->non_terminal_flag) {
      continue;
    }
    //xl = lstptr(map_token_number[tn], expansion_forms);
    //nx = map_token_number[tn].n_expansion_forms;
    //while (nx--) xws(*xl++);
    nx = token->expansionRuleList.size();
    for (int i = 0; i < nx; i++) {
      xws(token.expansionRule(i));
    }
  }
  k = tis();
  list = list_base;
  while (k--) {
    at(sx, *list++, 0);
  }
  rws();
  return sx;
}

static AgBalancedTree<OrderedPair<int> > pairs;
static AgBalancedTree<int> frameTokens;
static AgStack<int> frameRules;
static AgBalancedTree<int> frameStates;
int frameIndex;

static void findFrameStates(int sn, int rx) {
  LOGSECTION("findFrameStates");
  LOGV(sn) LCV(rx);
  if (pairs.insert(OrderedPair<int>(sn, rx))) {
    return;
  }
  LOGV(sn) LCV(rx);
  if (rx) {
    unsigned *p = lstptr(map_state_number[sn],previous_states);
    int nt = map_state_number[sn].n_previous_states;
    LOGV(nt);
    while (nt--) {
      LOGV(*p);
      findFrameStates(*p++, rx - 1);
    }
    return;
  }
  frameStates.insert(sn);
}

static void findFrameTokens(void) {
  LOGSECTION("findFrameTokens");
  frameTokens.reset();
  int n = frameStates.size();
  LOGV(n);
  if (n == 0) {
    return;
  }

  int state = frameStates[0];
  tsd *expansion = expand_state(state);
  int k = expansion->nt;
  LOGV(state) LCV(k);

  { LOGSECTION("Scan expansion rules"); }

  while (k--) {
    int r, index;
    xtx(expansion, k, &r, &index);
    Rule rule(r);
    RuleDescriptor &ruleDescriptor(rule);
    if (index >= (int) ruleDescriptor.length()) {
      continue;
    }
    Token token = ruleDescriptor.token(index);
    token_number_map &tokenDescriptor(token);
    if (tokenDescriptor.key || tokenDescriptor.fine_structure ||
	tokenDescriptor.junky) {
      continue;
    }
    if ((int) token == grammar_token) {
      continue;
    }
    LOGV(rule) LCV(index) LCV(token);
    int nFrameRules = frameRules.size();
    // Does there exist a frameRule which is an expansion rule of token?
    while (nFrameRules--) {
      if (token.isExpansionRule(frameRules[nFrameRules])) {
	break;
      }
    }
    if (nFrameRules < 0) {
      continue;
    }
    // Yes, at least one
    nFrameRules = frameRules.size();
    // Is there a frameRule which is not an expansion rule of token?
    while (nFrameRules--) {
      if (!token.isExpansionRule(frameRules[nFrameRules])) {
	break;
      }
    }
    if (nFrameRules >= 0 && index == 0) {
      continue;
    }
    LOGV(token);
    frameTokens.insert(token);
  }
  delete_tsd(expansion);

  { LOGSECTION("Scan frameStates"); }

  int i = 1;
  while (i < n) {
    AgBalancedTree<int> tokens;
    state = frameStates[i++];
    LOGV(state);
    expansion = expand_state(state);
    k = expansion->nt;
    while (k--) {
      int r, index;
      xtx(expansion, k, &r, &index);
      Rule rule(r);
      RuleDescriptor &ruleDescriptor(rule);
      if (index >= (int) ruleDescriptor.length()) {
	continue;
      }
      Token token = rule.token(index);
      if (!frameTokens.includes(token)) {
	continue;
      }
      token_number_map &tokenDescriptor(token);
      if (tokenDescriptor.key || tokenDescriptor.fine_structure || 
	  tokenDescriptor.junky) {
	continue;
      }
      if ((int) token == grammar_token) {
	continue;
      }
      LOGV(rule) LCV(index) LCV(token);
      int nFrameRules = frameRules.size();
      while (nFrameRules--) {
	if (token.isExpansionRule(frameRules[nFrameRules])) {
	  break;
	}
      }
      if (nFrameRules < 0) {
	continue;
      }
      nFrameRules = frameRules.size();
      while (nFrameRules--) {
	if (!token.isExpansionRule(frameRules[nFrameRules])) {
	  break;
	}
      }
      LOGV(nFrameRules);
      if (nFrameRules >= 0 && index == 0) continue;
      LOGV(token);
      tokens.insert(token);
    }
    delete_tsd(expansion);
    LOGS("Expansion deleted");
    frameTokens *= tokens;
    LOGS("Intersection complete");
  }
}

int find_ctn(int state) {
  LOGSECTION("find_ctn");
  int *items, nitems;
  int *p;

  frameRules.reset();
  items = dict_str(isht_dict,state);
  nitems = (*items++ -1)/2;
  p = items;
  frameIndex = 0;
  LOGV(state) LCV(nitems);
  // Scan the items that define this state
  // The goal is to identify the rule, or rules, that have the fewest tokens
  // on the parse stack, that is the rules for which the rule index
  // is at a minimum
  while (nitems--) {
    Rule rule(*p++);
    RuleDescriptor &ruleDescriptor(rule);
    int index = *p++;
    if (index >= (int) ruleDescriptor.length()) {
      continue;
    }
    Token token(ruleDescriptor.prim_tkn);
    if (frameRules.size() == 0 || index < frameIndex) {
      frameRules.reset().push(rule);
      frameIndex = index;
      LOGV(rule) LCV(frameIndex);
      continue;
    }
    else if (index == frameIndex) {
      frameRules.push(rule);
      LOGV(rule);
    }
  }
  if (frameRules.size() == 0) {
    frameIndex = 0;
    return 0;
  }
  frameStates.reset();
  pairs.reset();

  { LOGSECTION("frame states"); }

  // identify states where frame rules originated
  findFrameStates(state, frameIndex);
  pairs.reset();

  { LOGSECTION("frame tokens"); }
  findFrameTokens();
  { LOGSECTION("frame tokens complete"); }

  int n = frameTokens.size();
  if (n == 0) {
    frameIndex = 0;
    return 0;
  }
  Token token = frameTokens[0];
  LOGV(n) LCV(token);
  if (n == 1) {
    return token;
  }
  int i = 1;
  for (i = 1; i < n; i++) {
    LOGV(token) LCV(frameTokens[i]);
    if (token.isExpansionToken(frameTokens[i])) {
      token = frameTokens[i];
    }
  }
  LOGV(token);
  frameTokens.reset();
  frameRules.reset();
  return (int) token;
}

static int zeros[] = {0,0,0};

//int *token_list;


static void cmpits3a(unsigned t) {
  LOGSECTION("cmpits3a");
  LOGV(t) LCV(tis());
  unsigned lis_id;

  if ((lits = tis()) == 1) {                   // if single rule on stack
    Rule rule = list_base[0];
    if (rule->length() == (unsigned) list_base[1]) {
      if (!traditional_engine || rule.isNull() || (int) t == eof_token) {
        LOGV(rule) LCV(list_base[1]);
        at(lcft, t, (int) rule);
        n_completions++;
        return;
      }
    }
  }
  Rule rule(list_base[0]);
  int index = list_base[1] - 1;
  LOGV(rule) LCV(index);
  int charToken = rule.token(index);
  LOGV(charToken);
  lis_id = add_list_dict(list_base, 2*lits, isht_dict);
  check_size(map_state_number, lis_id, lis_id);
  at(lgt, t, lis_id);
  map_state_number[lis_id].char_token = charToken;
  LOGV(lis_id) LCV(t);
  n_gotos++;
}

void *size_prop(unsigned *list, unsigned n) {
  unsigned m = nits + 1;
  unsigned k = kits + 1;
  n += list[0];
  if (m > 3*k) m = 3*k;
  return check_array_size(list, n, n*m/k);
}

#define resize(list, n) list = (unsigned *) size_prop(list, n)

void lalr(void) {
  LOGSECTION("lalr");
  int f, n, s, g;
  int nt;
  item_map item;
  //int *chain_tokens = local_array(ntkns+1, int);
  LocalArray<int> chain_tokens(ntkns+1);

  nits = n_conflicts = 0;

  check_size(map_state_number, n_states_est, n_states_est);
  n = 4*n_states_est;
  check_size(completions_list,n, n);
  check_size(completed_forms_list, n_states_est, n_states_est);
  check_size(gotos_list, n_gotos, n_gotos);
  check_size(reductions_list, n_states_est, n_states_est);
  check_size(chain_completions_list, n, n);
  check_size(chain_gotos_list, n_gotos, n_gotos);
  MAP(item, 100);
  n_gotos = n_completions = 0;
  n_reductions = n_default_reductions = 0;
  //token_list = local_array(ntkns+1, int);
  AgStack<int> tokenStack;
  lcftp = spec_tsd(300, 2);
  lcft = spec_tsd(300, 2);
  lgt = spec_tsd(300, 2);
  nitems = 1;
  add_list_dict(zeros, 2, isht_dict);

  nits = 1;

  kits = 0;
  while (kits < nits) {
    LOGV(kits) LCV(nits);
    state_number_map *sp;
    //int *tnl;

    memset(chain_tokens, 0, (ntkns+1)*sizeof(*chain_tokens));
    sp = &map_state_number[nstates = kits];
    nitems = 1;

    items = (unsigned *) dict_str(isht_dict,kits);
    items_length = (*items++ - 1)/2;
    n = nitems + items_length + 1;
    check_size(map_item, n, n);

    ncssa = 0;
    tokenStack.reset();
    iws();
    while (items_length--) {
      Rule rule(item.form_number = f = *items++);
      item.form_index = n = *items++;
      LOGS("item") LS(f) LCS(n);
      if (n > 0 && Token(rule.token(n-1))->sticky) {
	sp->sticky = 1;
      }
      if ((unsigned) n >= rule->length()) {
        aws(f);
        continue;
      }
      Token t(rule.token(n));
      //token_list[ncssa++] = t;
      tokenStack.push(t);
      LOGS("token") LS(t);
      item.chain_item = chain_tokens[t];
      map_item[nitems] = item;
      chain_tokens[t]  = nitems++;

      nt = t->expansionRuleList.size();
      check_size(map_item, nitems+nt, nitems+nt);
      if (nt == 0) {
	continue;
      }
      for (int i = 0; i < nt; i++) {
        Rule expansionRule(item.form_number = g = t.expansionRule(i));
        if (expansionRule->length() == 0) {
          xws(expansionRule);
          continue;
        }
        item.form_index = 0;
        s = expansionRule.token(0);
        LOGV(expansionRule)LCS("token is") LS(s);
        item.chain_item = chain_tokens[s];
        //if (item.chain_item == 0) token_list[ncssa++] = s;
        if (item.chain_item == 0) {
	  tokenStack.push(s);
	}
        map_item[nitems] = item;
        chain_tokens[s] = nitems++;
      }
    }
    resize(completed_forms_list, tis());
    sp->completed_forms_index = store_list(completed_forms_list);
    sp->n_completed_forms = rws();

    reset_tsd(lgt);
    reset_tsd(lcft);
/*
    iws();
    n = 0;
    while (n<ncssa) {
      LOGV(n) LCV(token_list[n]);
      xws(token_list[n++]);
    }
    tnl = list_base;
    ncssa = tis();
*/
    AgStack<int> tnl;
    n = 0;
    ncssa = tokenStack.size();
    LOGV(ncssa);
    while (n < ncssa) {
      //int tokenNumber = token_list[n++];
      int tokenNumber = tokenStack[n++];
      int i = tnl.size();
      LOGV(tokenNumber);
      while (i--) {
	LOGV(i) LCV(tnl[i]);
	if (tokenNumber == tnl[i]) {
	  break;
	}
      }
      if (i >= 0) {
	continue;
      }
      tnl.push(tokenNumber);
      LOGV(i) LCV(tokenNumber) LCV(tnl.size());
      LOGV(tnl.top()) LCV(tnl[tnl.size()-1]);
    }
    ncssa = tnl.size();
    LOGS("Create") LS(ncssa) LS("proto-states");
#ifdef INCLUDE_LOGGING
    n = ncssa;
    if (n) {
      LOGV(tnl.top()) LCV(tnl[tnl.size()-1]);
    }
    while (n--) {
      LOGV(n) LCV(tnl[n]);
    }
#endif
    while (ncssa--) {
      LOGV(ncssa) LCV(tnl[ncssa]);
      Token token(tnl[ncssa]);
      if (token->non_terminal_flag && token->token_set_id && !token->junky) {
	continue;
      }
      iws();
      n = chain_tokens[token];
      LOGV(token) LCV(n);
      while (n) {
        item = map_item[n];
        Token pt(Rule(item.form_number)->prim_tkn);
        LOGV(item.form_number) LCV(pt) LCV(pt->token_set_id);
        LOGV(pt->junky) LCV(pt->pure);

        if (pt->non_terminal_flag && pt->token_set_id && !pt->junky) {
	  LOGV(n);
          int n = chain_tokens[pt];
          while (n) {
            item_map item = map_item[n];
            xps(item.form_number, item.form_index + 1);
            LOGV(item.form_number) LCV(item.form_index + 1);
            n = item.chain_item;
            LOGV(n);
          }
        }
        else {
          xps(item.form_number, item.form_index + 1);
        }
        nitems--;
        n = item.chain_item;
      }
      cmpits3a(token);
      sp = &map_state_number[kits];
      rps();
    }
    //rws();
    resize(completions_list, lcft->nt);
    sp->completions_index = store_tuples(lcft, completions_list);
    sp->n_completions = lcft->nt;
    resize(gotos_list, lgt->nt);
    sp->gotos_index = store_tuples(lgt,gotos_list);
    sp->n_gotos = lgt->nt;
    nits = isht_dict->nsx;
    check_size(map_state_number, nits, nits + nits/2);
    if (!traditional_engine && !rule_coverage) {
      cra();
    }
    kits++;
  }
  delete_tsd(lgt);
  delete_tsd(lcft);
  delete_tsd(lcftp);
  delete_array(map_item);
  map_state_number = (state_number_map *)
    set_array_size(map_state_number, nits);
  //close_list_dict(isht_dict);
  n_states_est = nits - 1;
  nstates = nits;
}

static void find_followers(int f) {
  LOGSECTION("find_followers");
  int nt, nq;
  unsigned *p;

  if (f == 0) {
    sws(0);
    return;
  }
  iws();
  p = (unsigned *) ibnfs + ibnfb[f];
  nt = ibnfn[f];
  LOGV(nt);
  while (nt--) {
    Token token = *p++;
    nq = token->followerList.size();
    LOGV(nq);
    if (nq == 0 || (nq == 1 && token.follower(0).isNull())) {
      int errorNumber = errorList.size();
      ssprintf("Nothing reduces T%03d -> ",(int)token);
      append_item(f, 1+map_form_number[f].length());
      log_error(map_form_number[f].line, map_form_number[f].col);

      for (int k = 0; k < errorNumber; k++) {
        if (errorList[k].message != errorList[errorNumber].message) {
	  continue;
	}
        errorList.pop();
        break;
      }
    }
    for (int k = 0; k < nq; k++) {
      int nlt;
      Token follower = token.follower(k);
      LOGV(follower);
      if(!follower->non_terminal_flag) {
        isws(follower);
        continue;
      }
      if ((nlt = follower->leadingTokenList.size()) == 0) {
	continue;
      }
      for (int i = 0; i < nlt; i++) {
	isws(follower.leadingToken(i));
      }
    }
  }
}

static void bsgt(void) {
  LOGSECTION("bsgt");
  int kn, rtk, s, n, g, k;
  int *p;
  unsigned *px;
  state_number_map *sp = &map_state_number[kits];

  px = lstptr(*sp,gotos);
  kn = sp->n_gotos;
  while (kn--) {
    rtk = *px++;
    s = *px++;
    if (map_token_number[rtk].non_terminal_flag) {
      continue;
    }
    p = dict_str(isht_dict, s);
    n = (*p++ - 1)/2;
    while (n--) {
      g = *p++;
      k = *p++;
      at(sgt, rtk, g, k-1);
      tut[rtk]++;
      LOGV(tut[rtk]) LCV(rtk) LCV(k);
    }
  }
  px = lstptr(*sp,completions);
  kn = sp->n_completions;
  while (kn--) {
    rtk = *px++;
    g = *px++;
    if (map_token_number[rtk].non_terminal_flag) {
      continue;
    }
    at(sgt, rtk, g, map_form_number[g].length() - 1);
    tut[rtk]++;
    LOGV(tut[rtk]) LCV(rtk);
  }
}

static void logcon(tsd *rrc, int *p, int nt) {
  LOGSECTION("logcon");
  LOGV(kits) LCV(nits);
  int n;
  int t, f;
  int *q, nq,qt;

  while (nt--) {
    if (rrc != res_con) {
      if (n_conflicts >= (unsigned) max_conflicts) {
	return;
      }
      n_conflicts++;
    }
    t = *p++;
    q = sgt->sb;
    nq = sgt->nt;
    while (nq--) {
      if (*q++ != t) {
        q += 2;
        continue;
      }
      f = *q++;
      n = *q++;
      at(rrc, kits, t, f, n);
    }
    q = srt->sb; nq = srt->nt;
    while (nq--) {
      qt = *q++;
      f = *q++;
      if (qt != t) {
	continue;
      }
      at(rrc, kits, t, f, map_form_number[f].length());
    }
  }
}

static void log_prr(tsd *s, tsd *r) {
  int *p, n;

  p = s->sb;
  n = s->nt;
  while (n--) {
    int t = *p++;
    int f = *p++;

    at(prr, kits, 0, t, f);
  }
  p = r->sb;
  n = r->nt;
  while (n--) {
    int t = *p++;
    int f = *p++;

    at(prr, kits, 1, t, f);
  }
}

static tsd *purge_ts = NULL;

static void purge_gotos(tsd *st) {
  tsd *t = purge_ts;
  int kn;
  state_number_map *sp = &map_state_number[kits];

  t->tw = 2;
  if ((kn = sp->n_gotos) != 0) {
    t->nt = t->na = kn;
    t->sb = (int *) lstptr(*sp,gotos);
    p1_tsd(t, 0, st, 0);
    sp->n_gotos = t->nt;
  }
  if ((kn = sp->n_chain_gotos) != 0) {
    t->nt = t->na = kn;
    t->sb = (int *) lstptr(*sp,chain_gotos);
    p1_tsd(t, 0, st, 0);
    sp->n_chain_gotos = t->nt;
  }
  if ((kn = sp->n_completions) != 0) {
    t->nt = t->na = kn;
    t->sb = (int *)lstptr(*sp,completions);
    p1_tsd(t, 0, st, 0);
    sp->n_completions = t->nt;
  }
  if ((kn = sp->n_chain_completions) != 0) {
    t->nt = t-> na = kn;
    t->sb = (int *) lstptr(*sp,chain_completions);
    p1_tsd(t, 0, st, 0);
    sp->n_chain_completions = t->nt;
  }
}

static void find_terminals(state_number_map *msn) {
  unsigned *p = lstptr(*msn, gotos);
  int n = msn->n_gotos;

  while (n--) {
    int t = *p++;
    p++;
    if (!map_token_number[t].non_terminal_flag) {
      xws(t);
    }
  }
  p = lstptr(*msn, completions);
  n = msn->n_completions;
  while (n--) {
    int t = *p++;
    p++;
    if (!map_token_number[t].non_terminal_flag) {
      xws(t);
    }
  }
}

static int disregardToken(const Token &t) {
  int k = disregardList.size();
  while (k--) {
    Token disregardToken = disregardList[k];
    if (t == disregardToken) {
      return 1;
    }
    AgArray<Token> &list = disregardToken->leadingTokenList;
    int n = list.size();
    while (n--) {
      if (t == list[n]) {
	return 1;
      }
    }
  }
  return 0;
}

void rlalr(void) {
  LOGSECTION("rlalr");
  unsigned f, k, n, s;
  int flag;
  int nf;
  unsigned nt;
  int *q, nq;
  unsigned *p, *px;
  int no_default;
  tsd *spr = init_tsd(2);
  tsd *sps = init_tsd(2);
  tsd *discardedReductions = init_tsd(2);
  int crc;                             /* conflict resolution count */
  tsd *resolved_tokens = init_tsd(1);
  unsigned char *stf = (unsigned char *) alloca(ntkns+1);


  purge_ts = local_array(1,tsd);
  no_default = error_token != 0 ||
               !default_reductions ||
               traditional_engine;
  check_size(previous_states_list, n_gotos, n_gotos);
  ivgtt();

  if (key_list_dict != NULL) {
    reset_list_dict(key_list_dict);
  }
  else {
    key_list_dict = null_list_dict();
  }
  add_list_dict(zeros, 0, key_list_dict);

  for (kits = 0; kits < nits; kits++) {
    LOGV(kits) LCV(nits);
    state_number_map *sp = &map_state_number[kits];
    int sticky = sp->sticky;
    int error_state = 0;

    items = (unsigned *) dict_str(isht_dict, kits);
    f = items[1];
    n = items[2];
    if (n > 0 && Rule(f).token(n-1) == Token(error_token)) {
      error_state = 1;
    }
    crc = 0;
    reset_tsd(sgt);
    memset(tut, 0, sizeof(*tut) * (ntkns+1));
    LOGS("tut zeroed");
    bsgt();
    if (tut[error_token]) {
      error_state = 1;
    }
    if ((nf = sp->n_completed_forms) == 0) {
      {
        int n1, n2;
        n1 = sp->n_gotos;
        n2 = sp->n_chain_gotos;
        nt = max(n1,n2);
        n1 = sp->n_completions;
        n2 = sp->n_chain_completions;
        nt += max(n1,n2) + 1;
      }
      iws();
      find_key_tokens(sgt->sb, sgt->nt,3);
      k = tis();
      if (k) {
	k = add_list_dict(list_base, k, key_list_dict);
      }
      rws();
      sp->key_list = k;
      continue;
    }
    reset_tsd(srt);
    kfrs = nf;
    n = nf;
    px = lstptr(*sp, completed_forms);
    flag = 0;
    int noDefaultOnNullProduction = 0;
    while (n-- && (flag == 0)) {
      Rule rule(frs = f = *px++);
      RuleDescriptor &ruleDescriptor(rule);

      flag = traditional_engine || error_state || !default_reductions;
      if (flag) break;
      //flag = noDefaultOnNullProduction = (nf == 1) && rule->length() == 0;
      flag = noDefaultOnNullProduction = 
	(nf == 1) && ruleDescriptor.length() == 0;
      if (flag) {
	break;
      }
      //reduce_proc_flag = flag = rule->proc_name && !rule->immediate_proc;
      reduce_proc_flag = flag = 
	ruleDescriptor.proc_name && !ruleDescriptor.immediate_proc;
        /*|| noise_token; */
      if ((no_default && reduce_proc_flag)) {
	break;
      }

      find_followers(f);
      p = (unsigned *) list_base;
      nt = rws();
      while (nt--) {
        s = *p++;
        if ((int) s == error_token) {
	  continue;
	}
        at(srt, s, f);
        if (map_token_number[s].key || tut[s]) {
          flag++;
          break;
        }
        else {
	  tut[s] = (char) -1;
	  LOGV(tut[s]) LCV(s);
	}
      }
    }
    if (flag) {
      LOGSECTION("Detailed resolution");
      LOGV(noDefaultOnNullProduction);
      flag = crc = 0;
      memset(stf, 0, sizeof(*stf) * (ntkns+1));
      memset(tut,0,sizeof(*tut) * (ntkns+1));
      LOGS("tut zeroed") LCV(ntkns);
      LOGV(tut[16]);
      p = (unsigned *) sgt->sb;
      nt = sgt->nt;
      while (nt--){
        s = *p;
        assert(s <= ntkns);
        tut[s]++;
        LOGV(tut[s]) LCV(s);
        stf[s]++;
        p += 3;
      }
      LOGV(tut[16]);
      reset_tsd(srt);
      iws();               /* for list of tokens which have conflicts */
      unsigned *unsortedForms = lstptr(*sp, completed_forms);
      unsigned *sortedForms = local_array(nf, unsigned);
      memcpy(sortedForms, unsortedForms, nf*sizeof(unsigned));
      int i, j;
      for (i = 0; i < nf; i++) {
        for (j = i+1; j < nf; j++) {
          if (sortedForms[i] > sortedForms[j]) {
            unsigned temp = sortedForms[i];
            sortedForms[i] = sortedForms[j];
            sortedForms[j] = temp;
          }
        }
      }
      LOGV(tut[16]);
      px = sortedForms;
      while (nf--) {
        int fpl;
        int fra;
        Rule rule(frs = f = *px++);
        RuleDescriptor &ruleDescriptor(rule);
        completed_form_map *mcf;

        fpl = ruleDescriptor.precedence_level;
        fra = ruleDescriptor.right_associative;

        k = add_tuple_dict_new(completed_form_dict, kits, f);
        check_size(map_completed_form, k, k*(nits+1)/(kits+1));
        mcf = &map_completed_form[k];
        if (mcf->reduction_states_index == 0) {
          unsigned nrs;
          extern int external_reduction;

          frss12(kits, f, ruleDescriptor.length());
          mcf = &map_completed_form[k];
          mcf->external_reduction = external_reduction;
          resize(reduction_states_list, tis());
          mcf->reduction_states_index = store_list(reduction_states_list);
          nrs = rws();
          mcf->n_reduction_states = nrs;
        }
	LOGV(mcf->reduction_states_index);
        if (mcf->reduction_states_index == 0) {
          if(sit(srt, 0, f)) {
	    continue;
	  }
          s = 0;
          if (tut[s] > 0) {
            tut[s] = - tut[s];
	    LOGV(s) LCV(tut[s]);
            xws(s);
            flag++;
            continue;
          }
          else if (tut[s] < 0) {
	    continue;
	  }
          tut[s]++;
	  LOGV(s) LCV(tut[s]);
          continue;
        }
        p = lstptr(*mcf,reduction_states);
        nt = mcf->n_reduction_states;
        LOGV(f) LCV(nt);
        iws();
        while (nt--) {
          int sn = *p++;
          state_number_map *msn = &map_state_number[sn];
	  LOGV(sn);
          if (sn == 0) {
	    xws(0);
	  }
          else {
            find_terminals(msn);
            if (tis() == 0 && mcf->external_reduction) {
	      xws(0);
	    }
          }
        }
#ifdef INCLUDE_LOGGING
	LOGS("terminals");
	for (int i = 0; i < tis(); i++) {
	  LOGV(i) LCV(list_base[i]) LCV(tut[list_base[i]]);
	}
#endif
        {
          int *terminals;
          q = terminals = build_list();
          nq = fis();
          if (nq == 0) {
	    continue;
	  }
          while (nq--) {
            int sk;

            s = *q++;
            LOGV(s) LCV(tut[s]);
            if ((int) s == error_token) {
	      continue;
	    }
            sk = map_token_number[s].key;
            LOGV(sk) LCV(sticky) LCV(distinguish_lexemes) 
	      LCV(disregard_skip_rule);
            LOGV(f) LCV(map_form_number[f].lexical);
            if (sk && !map_token_number[s].lexical && (
	      sticky || (
		//!map_token_number[s].lexical &&
		distinguish_lexemes && (f == disregard_skip_rule
					|| map_form_number[f].lexical)
		//) // || map_form_number[f].lexical)
		)
	      )
              )
	    {
              int c = *(unsigned char *) Keyword(sk)->string.pointer();
              assert(c >= min_char_number);
              int t = map_char_number[c - min_char_number].token_number;
	      LOGV(c) LCV(t) LCV(stf[t]);
              if (stf[t]) continue;
            }
            if (sit(srt,s,f)) continue;
            LOGV(s) LCV(tut[s]);
            if (tut[s] > 0) {
              int spl = map_token_number[s].precedence_level;
	      //int sla = map_token_number[s].left_associative;
	      //int sra = map_token_number[s].right_associative;
              if (sticky || (fpl && spl)) {
                if (sticky || spl > fpl || (fra && spl == fpl)) {
                  /* shift */
                  LOGS("Shift") LCV(s) LCV(f);
                  at(sps, s, f);
                }
                else {
                  LOGS("Reduce") LCV(s) LCV(f);
                  /* reduce */
                  at(spr,s,f);
                }
                at(resolved_tokens, s);
                crc++;
                continue;
              }
/*
  // not clear whether this should be done
	      else if (sla) {
                LOGS("Shift -- left associative") LCV(s) LCV(f);
                at(sps, s, f);
                at(resolved_tokens, s);
                crc++;
                continue;
	      }
	      else if (sra) {
                LOGS("Reduce -- right associative") LCV(s) LCV(f);
                // reduce
                at(spr, s, f);
                at(resolved_tokens, s);
                crc++;
                continue;
	      }
*/
              else if (disregardToken(s)) {
                if (f == disregard_skip_rule) {
		  //LOGS("Reduce") LCV(s) LCV(f);
		  //at(spr, s, f);                  /* reduce */
                  LOGS("Shift") LCV(s) LCV(f);
                  at(sps, s, f);                    /*shift */
                  at(resolved_tokens, s);
                  crc++;
                  continue;
                }
                else if (f == disregard_cont_rule
			 || map_form_number[f].lexical) {
                  LOGS("Shift") LCV(s) LCV(f);
                  at(sps, s, f);                    /*shift */
                  at(resolved_tokens, s);
                  crc++;
                  continue;
                }
              }

              else if (distinguish_lexemes && //!map_token_number[s].lexical &&
		       (f == disregard_skip_rule
			|| map_form_number[f].lexical))
              {
                LOGV(s) LCV(f);
                LOGS("Shift") LCV(s) LCV(f);
                at(sps, s, f);                    /* shift */
                continue;
              }
              xws(s);
              at(discardedReductions, s, f);
              LOGS("Discarding reduction") LCV(s) LCV(f);
              flag++;
              continue;
            }
            else if (tut[s] < 0) {
              xws(s);
              at(discardedReductions, s, f);
              LOGS("Discarding reduction") LCV(s) LCV(f);
              flag++;
              continue;
            }
            else {
              tut[s] = -1;              // Assert this is a reducing token
	      LOGV(s) LCV(tut[s]);
            }
          }
          DEALLOCATE(terminals);
        }
      }
      if (flag) {
	logcon(unres_con, list_base, tis());
      }
      rws();
    }
    if (crc) {
      logcon(res_con, resolved_tokens->sb, resolved_tokens->nt);
      purge_tsd(srt, sps);
      purge_gotos(spr);
      log_prr(sps, spr);
      reset_tsd(spr);
      reset_tsd(sps);
    }
    if (sps->nt) {
      purge_tsd(srt, sps);
      reset_tsd(sps);
    }
    if (discardedReductions->nt) {
      purge_tsd(srt, discardedReductions);
      reset_tsd(discardedReductions);
    }
    reset_tsd(resolved_tokens);
    iws();
    find_key_tokens(sgt->sb, sgt->nt, 3);
    find_key_tokens(srt->sb, srt->nt, 2);
    k = tis();
    if (k) {
      k = add_list_dict(list_base, k, key_list_dict);
    }
    sp->key_list = k;
    rws();
    if (error_state || (no_default && reduce_proc_flag) ||
	!default_reductions ||
	noDefaultOnNullProduction ||
	traditional_engine ||
	kfrs != 1) {
      p = (unsigned *) srt->sb;
      nt = srt->nt;
      resize(reductions_list, nt);
      sp->reductions_index = store_tuples(srt,reductions_list);
      sp->n_reductions = nt;
      n_reductions += nt;
    }
    else {
      n_default_reductions++;
    }
    {
      int n1, n2;
      n1 = sp->n_gotos;
      n2 = sp->n_chain_gotos;
      nt = max(n1, n2);
      n1 = sp->n_completions;
      n2 = sp->n_chain_completions;
      nt += max(n1, n2) + 1;
    }
    nt += sp->n_reductions;
  }
  LOGS("rlalr state loop complete");
  LOGV(previous_states_list[0]);
  previous_states_list = reset_list(previous_states_list,
				    previous_states_list[0]);
  ivgtt();
  delete_tsd(resolved_tokens);
  delete_tsd(spr);
  delete_tsd(sps);
  delete_tsd(discardedReductions);
  //close_list_dict(key_list_dict);
  //LOGS("list dict closed");
}