view anagram/agcore/cd.cpp @ 0:13d2b8934445

Import AnaGram (near-)release tree into Mercurial.
author David A. Holland
date Sat, 22 Dec 2007 17:52:45 -0500
parents
children
line wrap: on
line source

/*
 * AnaGram, A System for Syntax Directed Programming
 * Copyright 1993-1999 Parsifal Software. All Rights Reserved.
 * See the file COPYING for license and usage terms.
 *
 * cd.cpp - Conflict Derivation module
 */

#include "arrays.h"
#include "cd.h"
#include "data.h"
#include "dict.h"
#include "q1glbl.h"
#include "rule.h"
#include "stacks.h"
#include "tsd.h"
#include "token.h"

//#define INCLUDE_LOGGING
#include "log.h"


int conflict_token = 0;
tsd *token_derivation = NULL;

char *tried;

int find_gotos(int s, const unsigned **p) {
  state_number_map *sp = &map_state_number[s];
  int n = sp->n_chain_gotos;
  if (n == 0) {
    *p = lstptr(*sp, gotos);
    n = sp->n_gotos;
  }
  else {
    *p = lstptr(*sp, chain_gotos);
  }
  return n;
}

/*
 * x2 returns true if llt is an expansion token of hlt, false otherwise
 */

/*
int x2(unsigned hlt, unsigned llt) {
  //LOGSECTION("x2");
  //token_number_map *tp;
  unsigned *fl;
  int nf;

  Token token = hlt;
  Token candidate = llt;
  if (token == candidate) return 1;
  //LOGV(hlt);
  //LOGV(llt);
  //assert(hlt <= ntkns);
  //tp = &map_token_number[hlt];
  //fl = lstptr(*tp, expansion_forms);
  //nf = tp->n_expansion_forms;
  nf = token->n_expansion_forms;
  while (nf--) {
    //form_number_map *fp = &map_form_number[*fl++];
    Rule rule = *fl++;
    int k = rule->length();
    if (k == 0) {
      continue;
    }
    //if (llt == lstptr(*fp,tokens)[0]) return 1;
    if (candidate == rule->token(0)) {
      return 1;
    }
  }
  return 0;
}
*/

/*
static int x2x(unsigned hlt, unsigned llt) {
  //token_number_map *tp;
  //unsigned *tl;
  int nt;

  Token highLevel(hlt);
  Token lowLevel(llt);
  if (highLevel == lowLevel) return 1;
  //tp = &map_token_number[hlt];
  //tl = lstptr(*tp,leading_tokens);
  //nt = tp->n_leading_tokens;
  //while (nt--) if (llt == *tl++) return 1;
  while (nt--) if (lowLevel == highLevel->leadingToken(nt)) return 1;
  return 0;
}
*/

/*
 * x2d checks the characteristic rules in state sn, and returns true if
 * tn is next token for some characteristic rule. Otherwise, return 0
 */

int x2d(int sn, int tn) {
  int *items = dict_str(isht_dict,sn);
  int nitems = (*items++ -1)/2 ;
  while (nitems--) {
    Rule rule = *items++;
    unsigned fx = *items++;
    if (rule->length() <= fx) {
      continue;
    }
    //if (x2x(lstptr(map_form_number[fn],tokens)[fx],tn)) return 1;
    //if (x2x(Rule(fn)->token(fx),tn)) return 1;
    if (rule.token(fx).isLeadingToken(tn)) {
      return 1;
    }
  }
  return 0;
}


/*
 * x4 returns true if fn is a left expansion rule of hlt, false otherwise
 */

/*
int x4(unsigned hlt, unsigned fn) {
  assert(hlt <= ntkns);
  token_number_map *tp = &map_token_number[hlt];
  unsigned *fl = lstptr(*tp, expansion_forms);
  unsigned nf =  tp->n_expansion_forms;

  assert(nf <= nforms);
  while (nf--) if (*fl++ == fn) return 1;
  return 0;
}
*/

#ifndef COMMAND_LINE
int x3(tsd *isl, int sx, int fn, int fx) {
  unsigned k;

  check_tsd(isl);
  assert((unsigned)fn <= nforms);
  sx++;
  for (k = 0; k < isl->nt; k++) {
    int fsx, fsn, ffn, ffx;
    xtxf(isl, k, &fsx, &fsn, &ffn, &ffx);
    if (fsx < sx) {
      return 0;
    }
    if (fsx > sx) {
      continue;
    }
    if (ffn == fn && ffx == fx + 1) {
      return 1;
    }
    if (ffx != 1) {
      continue;
    }
    //if (x4(lstptr(map_form_number[fn],tokens)[fx],ffn)) return 1;
    //if (x4(Rule(fn)->token(fx),ffn)) return 1;
    if (Rule(fn).token(fx).isExpansionRule(ffn)) {
      return 1;
    }
  }
  return 0;
}

int x3a(tsd *isl, int sx, int fn, int fx) {
  unsigned k;

  check_tsd(isl);
  assert((unsigned)fn <= nforms);
  for (k = 0; k < isl->nt; k++) {
    int fsx, fsn, ffn, ffx;
    xtxf(isl, k, &fsx, &fsn, &ffn, &ffx);
    if (fsx < sx) {
      return 0;
    }
    if (fsx > sx) {
      continue;
    }
    if (ffn == fn && ffx == fx) {
      return 0;
    }
    //if (x4(lstptr(map_form_number[fn],tokens)[fx],ffn)) return 1;
    //if (x4(Rule(fn)->token(fx),ffn)) return 1;
    if (Rule(fn).token(fx).isExpansionRule(ffn)) {
      return 1;
    }
  }
  return 0;
}


/* x1 builds a rule stack from a token stack */

tuple_dict *xis(unsigned sn) {
  tuple_dict *d = null_tuple_dict(2);
  int *items, nitems, length;
  unsigned k;

  assert(sn < isht_dict->nsx);
  items = dict_str(isht_dict, sn);
  length = *items++ - 1;
  nitems = length/2;
  while (nitems--) {
    int fn = *items++;
    int fx = *items++;
    insert_tuple_dict(d, fn, fx);
  }
  for (k = 0; k < d->nsx; k++) {
    int *t = d->text+2*k;
    int fn = *t++;
    unsigned fx = *t;
    if (fx < Rule(fn)->length()) {
      //unsigned tn = lstptr(map_form_number[fn],tokens)[fx];
      Token token(Rule(fn).token(fx));
      //token_number_map *tp = &map_token_number[tn];
      //int nf =  token->n_forms;
      int nf = token->ruleList.size();
      //unsigned *fl = lstptr(*tp,forms);
      //unsigned *fl = token->forms();
      iws();
      //while (nf--) {
      for (int i = 0; i < nf; i++) {
        //form_number_map *fp = &map_form_number[*fl];
        //Rule rule(*fl);
        Rule rule = token.rule(i);
        //if (fp->length && lstptr(*fp, tokens)[0] == tn)
        if (rule->length() && rule.token(0) == token) {
	  //insert_tuple_dict(d,*fl, 0);
	  insert_tuple_dict(d, (int) rule, 0);
	}
        else {
	  aws(rule);
	}
      }
      unsigned *fl = (unsigned *) list_base;
      nf = rws();
      while (nf--) {
	insert_tuple_dict(d, *fl++, 0);
      }
    }
  }
  return d;
}

#endif

/*
tsd *x1x(tsd *sl) {
  int n = sl->nt;
  int sx,sn,tn;
  unsigned fx;
  int *items;
  int nitems;
  tsd *isl = init_tsd(4);

  check_tsd(sl);
  sx = n-1;
  xtxf(sl,sx,&sn,&tn);
  if (!shift_token(tn,sn)) {
    tn = 0;
  }

  {
    tuple_dict *d;
    d = xis(sn);
    items = d->text;
    nitems = d->nsx;
    items += 2*nitems;
    while (nitems--) {
      fx = *--items;
      Rule rule = *--items;
      if (tn == 0 ||
          fx >= rule->non_vanishing_length ||
          rule.token(fx).isExpansionToken(tn)) {
        at(isl,sx,sn,(int) rule,fx);
      }
    }
    delete_tuple_dict(d);
  }
  while (sx-- > 0) {
    tuple_dict *d;
    unsigned psn = sn;
    const unsigned *p;
    int n;
    xtxf(sl,sx,&sn,&tn);
    n = find_gotos(sn, &p);
    p += 2*n;
    while (n-- && *--p != psn) {
      p--;
    }
    assert(*p == psn && n >= 0);
    d = xis(sn);
    items = d->text;
    nitems = d->nsx;
    items += 2*nitems;
    while (nitems--) {
      fx = *--items;
      Rule rule = *--items;
      if (fx >= rule->length()) {
        continue;
      }
      if (x3(isl, sx,rule,fx)) {
        at(isl, sx, sn, (int)rule, fx);
      }
    }
    delete_tuple_dict(d);
  }
  return isl;
}
*/

int x9x(int tn) {
  Token token = tn;
  //token_number_map *tp = &map_token_number[tn];
  //unsigned *fl = lstptr(*tp,forms);
  //int nf = token->n_forms;
  int nf = token->ruleList.size();

  if (tn == conflict_token) {
    return 1;
  }
  if (tried[tn]) {
    return 0;
  }
  tried[tn] = 1;
  //while (nf--) {
  for (int i = 0; i < nf; i++) {
    //int fn = *fl++;
    Rule rule = token.rule(i);
    //form_number_map *fp = &map_form_number[fn];
    int length = rule->length();
    int fx = 0;
    while (fx < length) {
      //int t = lstptr(*fp,tokens)[fx];
      Token ruleToken = rule.token(fx);
      if ((int) ruleToken == conflict_token || x9x(ruleToken)) {
        at(token_derivation, (int) rule, fx);
        return 1;
      }
      //if (!map_token_number[t].zero_length_flag) break;
      if (!token->zero_length_flag) {
	break;
      }
      fx++;
    }
  }
  return 0;
}

int new_next_state(int s, unsigned t) {
  LOGSECTION("new_next_state");
  LOGV(s) LCV(t);
  Token token = t;
  //token_number_map *tp;
  int ts;
  int np;
  const unsigned *gl;
  int ng = find_gotos(s, &gl);
  const unsigned *glx = gl;
  int ngx = ng;

  //check_stack;
  while (ng--) {
    LOGV(gl[0]) LCV(gl[1]);
    Token token = *gl;
    if (*gl == t ||
	(token->junky && (unsigned) token.rule(0).token(0) == t)) {
      return gl[1]; 
    }
    else {
      gl += 2;
    }
  }
  LOGS("Didn't find a goto");
  //tp = &map_token_number[t];
  //ts = tp->token_set_id;
  ts = token->token_set_id;
  Token pureToken;
  LOGV(pureToken) LCV(ts);
  if (token->junky) {
    pureToken = token.rule(0).token(0);
  }
  if (ts == 0 && pureToken.isNotNull()) {
    ts = pureToken->token_set_id;
  }
  LOGV(pureToken) LCV(ts);
  np = map_char_set[ts].nparts;
  LOGV(np);
  if (ts && np > 1) {
    unsigned *fl = lstptr(map_char_set[ts], part);
    while (np--) {
      const unsigned *g = glx;
      int n = ngx;
      unsigned pn = fl[np];
      unsigned pt = map_part_number[pn].token_number;
      while (n--) {
        if (*g++ == pt) {
	  return *g;
	}
	else {
	  g++;
	}
      }
    }
  }
  LOGS("Didn't find a partition set either");
  return 0;
}

Token transitionToken(unsigned fromState, unsigned toState) {
  LOGSECTION("transitionToken");
  LOGV(fromState) LCV(toState);
  const unsigned *gl;
  int nGotos = find_gotos(fromState,&gl);
  //const unsigned *glx = gl;
  int ngx = nGotos;

  //check_stack;
  while (ngx--) {
    LOGV(gl[0]) LCV(gl[1]);
    Token token = *gl;
    if (gl[1] == toState) {
      return gl[0];
    }
    gl += 2;
  }
  LOGS("Didn't find a goto");
  return Token();
}