remove support for unindexed tokens

This was a special feature of the legacy tokenizer who would not
index very frequent tokens.
This commit is contained in:
Sarah Hoffmann
2024-09-22 10:39:10 +02:00
parent 290c22a153
commit a690605a96
6 changed files with 23 additions and 30 deletions

View File

@@ -20,7 +20,7 @@ class MyToken(Token):
def make_query(*args):
q = QueryStruct([Phrase(args[0][1], '')])
dummy = MyToken(penalty=3.0, token=45, count=1, addr_count=1,
lookup_word='foo', is_indexed=True)
lookup_word='foo')
for btype, ptype, _ in args[1:]:
q.add_node(btype, ptype)