Skip to content

Commit

Permalink
expanded tabs to spaces (otherwise code display will be
Browse files Browse the repository at this point in the history
broken if indentation is set to the project's default of
2 characters)

git-svn-id: https://pet.opendfki.de/repos/pet/main@331 4200e16c-5112-0410-ac55-d7fb557a720a
  • Loading branch information
pead01 committed Jun 1, 2007
1 parent 3c16179 commit 9cbcf9b
Show file tree
Hide file tree
Showing 61 changed files with 2,243 additions and 2,243 deletions.
2 changes: 1 addition & 1 deletion Makefile.am
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@ SUBDIRS += fspp
endif
DIST_SUBDIRS = $(SUBDIRS) common
EXTRA_DIST = BUGS CHANGELOG LICENSE README TODO \
doxyconfig.cheap doxyconfig.flop
doxyconfig.cheap doxyconfig.flop

# For ebrowse (Emacs)
BROWSE:
Expand Down
4 changes: 2 additions & 2 deletions borland/l2lib.h
Original file line number Diff line number Diff line change
Expand Up @@ -8,8 +8,8 @@
#include <vector>

/* The entire contents of this file, in printed or electronic form, are
(c) Copyright YY Technologies, Mountain View, CA 1999,2000,2001
Unpublished work -- all rights reserved -- proprietary, trade secret
(c) Copyright YY Technologies, Mountain View, CA 1999,2000,2001
Unpublished work -- all rights reserved -- proprietary, trade secret
*/

/*
Expand Down
24 changes: 12 additions & 12 deletions cheap/Makefile.am
Original file line number Diff line number Diff line change
Expand Up @@ -65,18 +65,18 @@ nodist_cheap_SOURCES = bitcode.cpp \
# The headers do not need to be listed for compilation, but they need
# to be found for making a distribution.

cheap_SOURCES = agenda.h chart.cpp chart.h cheap.cpp cheap.h \
fs.cpp fs.h grammar.cpp grammar.h \
input-modules.h item-printer.cpp item-printer.h item.cpp item.h \
lexicon.cpp lexicon.h lexparser.cpp lexparser.h \
lingo-tokenizer.cpp lingo-tokenizer.h \
morph.cpp morph.h options.cpp options.h parse.cpp parse.h \
paths.cpp paths.h position-mapper.h postags.cpp postags.h \
restrictor.h sm.cpp sm.h task.cpp task.h itsdb.h timer.h tsdb++.h \
tsdb++.cpp yy-tokenizer.cpp yy-tokenizer.h

TOMABECHI_UNIF_FILES = dag-tomabechi.cpp dag-tomabechi.h failure.h \
failure.cpp qc.h qc.cpp
cheap_SOURCES = agenda.h chart.cpp chart.h cheap.cpp cheap.h \
fs.cpp fs.h grammar.cpp grammar.h \
input-modules.h item-printer.cpp item-printer.h item.cpp item.h \
lexicon.cpp lexicon.h lexparser.cpp lexparser.h \
lingo-tokenizer.cpp lingo-tokenizer.h \
morph.cpp morph.h options.cpp options.h parse.cpp parse.h \
paths.cpp paths.h position-mapper.h postags.cpp postags.h \
restrictor.h sm.cpp sm.h task.cpp task.h itsdb.h timer.h tsdb++.h \
tsdb++.cpp yy-tokenizer.cpp yy-tokenizer.h

TOMABECHI_UNIF_FILES = dag-tomabechi.cpp dag-tomabechi.h failure.h \
failure.cpp qc.h qc.cpp
if TOMABECHI_UNIFIER
DEFS += -DDAG_TOMABECHI
cheap_SOURCES += $(TOMABECHI_UNIF_FILES)
Expand Down
8 changes: 4 additions & 4 deletions cheap/chart.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -141,18 +141,18 @@ void chart::get_statistics()
tItem *it = iter.current();

if(it->trait() == INFL_TRAIT)
{
{
stats.medges++;
}
}
else if(it -> passive())
{
{
stats.pedges++;
if(it -> result_contrib())
stats.rpedges++;

fs f = it -> get_fs();
totalsize += f.size();
}
}
else
{
stats.aedges++;
Expand Down
24 changes: 12 additions & 12 deletions cheap/dag-tomabechi.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -901,7 +901,7 @@ dag_node *dag_cyclic_copy(dag_node *src, list_int *del) {
if(!contains(del, arc->attr)) {
if((v = dag_cyclic_copy(arc->val, 0)) == FAIL)
return FAIL;
new_arcs = dag_cons_arc(arc->attr, v, new_arcs);
}
arc = arc->next;
Expand Down Expand Up @@ -1025,10 +1025,10 @@ dag_node *dag_copy(dag_node *src, list_int *del) {
{
if((v = dag_copy(arc->val, 0)) == FAIL)
return FAIL;
if(arc->val != v)
copy_p = true;
new_arcs = dag_cons_arc(arc->attr, v, new_arcs);
}
arc = arc->next;
Expand Down Expand Up @@ -1071,7 +1071,7 @@ inline bool arcs_contain(dag_arc *arc, attr_t attr) {
inline dag_arc *clone_arcs_del(dag_arc *src, dag_arc *dst, dag_arc *del) {
while(src) {
if(!arcs_contain(del, src->attr))
dst = dag_cons_arc(src->attr, src->val, dst);
dst = dag_cons_arc(src->attr, src->val, dst);
src = src->next;
}

Expand All @@ -1088,7 +1088,7 @@ clone_arcs_del_del(dag_arc *src, dag_arc *dst,
dag_arc *del_arcs, list_int *del_attrs) {
while(src) {
if(!contains(del_attrs,src->attr) && !arcs_contain(del_arcs, src->attr))
dst = dag_cons_arc(src->attr, src->val, dst);
dst = dag_cons_arc(src->attr, src->val, dst);
src = src->next;
}

Expand Down Expand Up @@ -1175,7 +1175,7 @@ dag_node *dag_copy(dag_node *src, list_int *del) {
if(!contains(del, arc->attr)) {
if((v = dag_copy(arc->val, 0)) == FAIL)
return FAIL;
if(arc->val != v) {
copy_p = true;
new_arcs = dag_cons_arc(arc->attr, v, new_arcs);
Expand Down Expand Up @@ -1233,7 +1233,7 @@ dag_node *dag_copy(dag_node *src, list_int *del) {
if(!contains(del, arc->attr)) {
if((v = dag_copy(arc->val, 0)) == FAIL)
return FAIL;
new_arcs = dag_cons_arc(arc->attr, v, new_arcs);
}
arc = arc->next;
Expand Down Expand Up @@ -1313,8 +1313,8 @@ void dag_get_qc_vector_temp(qc_node *path, dag_node *dag, type_t *qc_vector)

for(qc_arc *arc = path->arcs; arc != 0; arc = arc->next)
dag_get_qc_vector_temp(arc->val,
dag_get_attr_value_temp(dag, arc->attr),
qc_vector);
dag_get_attr_value_temp(dag, arc->attr),
qc_vector);
}


Expand Down Expand Up @@ -1813,14 +1813,14 @@ dag_partial_copy1(dag_node *dag, attr_t attr, const restrictor &del)
{
arc = dag->arcs;
while(arc != 0)
{
{
dag_add_arc(copy,
new_arc(arc->attr,dag_partial_copy1(arc->val,
arc->attr,
del)));
arc = arc->next;
}
}
}
}
else if(attr != -1)
{
copy->type = maxapp[attr];
Expand Down
4 changes: 2 additions & 2 deletions cheap/failure.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -160,10 +160,10 @@ unification_failure::print(FILE *f) const
fprintf(f, ":");
for(list<list_int *>::const_iterator iter = _cyclic_paths.begin();
iter != _cyclic_paths.end(); ++iter)
{
{
fprintf(f, "\n ");
::print_path(f, *iter);
}
}
}
}

Expand Down
6 changes: 3 additions & 3 deletions cheap/fs.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -389,7 +389,7 @@ record_failures(list<unification_failure *> fails, bool unification,
}
i++;
delete f;
// _fix_me_ may not delete f if opt_print_failure is on
// _fix_me_ may not delete f if opt_print_failure is on
}

delete[] value;
Expand All @@ -405,7 +405,7 @@ record_failures(list<unification_failure *> fails, bool unification,
(*iter)->print(ferr);
fprintf(ferr, "\n");
}
// _fix_me_ need to delete f here
// _fix_me_ need to delete f here
}
}

Expand Down Expand Up @@ -523,7 +523,7 @@ subsumes(const fs &a, const fs &b, bool &forward, bool &backward)
}
if(good)
filtered.push_back(*f);
else
else
delete *f;

}
Expand Down
22 changes: 11 additions & 11 deletions cheap/grammar.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -58,7 +58,7 @@ bool
predle_status(type_t t)
{
return cheap_settings->statusmember("predict-lexentry-status-values",
typestatus[t]);
typestatus[t]);
}

bool
Expand Down Expand Up @@ -471,10 +471,10 @@ tGrammar::tGrammar(const char * filename)
_generics = cons(i, _generics);
_lexicon[i] = new lex_stem(i);
}
else if (predle_status(i)) {
_predicts = cons(i, _predicts);
_lexicon[i] = new lex_stem(i);
}
else if (predle_status(i)) {
_predicts = cons(i, _predicts);
_lexicon[i] = new lex_stem(i);
}
}

/*
Expand Down Expand Up @@ -531,7 +531,7 @@ tGrammar::tGrammar(const char * filename)

if(r == 0)
continue;
if(t == -1)
_morph->add_global(string(r));
else
Expand All @@ -547,12 +547,12 @@ tGrammar::tGrammar(const char * filename)
fprintf(ferr, "warning: found syntax `%s' rule "
"with attached infl rule `%s'\n",
print_name(t), r);
iter.current()->trait(INFL_TRAIT);
found = true;
}
}
if(!found)
fprintf(ferr, "warning: rule `%s' with infl annotation "
"`%s' doesn't correspond to any of the parser's "
Expand Down Expand Up @@ -587,7 +587,7 @@ tGrammar::tGrammar(const char * filename)
delete[] form; delete[] infl; delete[] stem;
continue;
}
_morph->add_irreg(string(stem), inflr, string(form));
delete[] form; delete[] infl; delete[] stem;
}
Expand Down Expand Up @@ -642,8 +642,8 @@ tGrammar::tGrammar(const char * filename)
if ((lexsm_file = cheap_settings->value("lexsm")) != 0) {
try { _lexsm = new tMEM(this, lexsm_file, filename); }
catch(tError &e) {
fprintf(ferr, "\n%s", e.getMessage().c_str());
_lexsm = 0;
fprintf(ferr, "\n%s", e.getMessage().c_str());
_lexsm = 0;
}
}

Expand Down
Loading

0 comments on commit 9cbcf9b

Please sign in to comment.