require tokeinzer for indexer

This commit is contained in:
Sarah Hoffmann
2021-04-24 11:25:47 +02:00
parent 1b1ed820c3
commit e1c5673ac3
11 changed files with 88 additions and 56 deletions

View File

@@ -109,7 +109,7 @@ def import_and_index_data_from_place_table(context):
# Call directly as the refresh function does not include postcodes.
indexer.LOG.setLevel(logging.ERROR)
indexer.Indexer(context.nominatim.get_libpq_dsn(), 1).index_full(analyse=False)
indexer.Indexer(context.nominatim.get_libpq_dsn(), tokenizer, 1).index_full(analyse=False)
check_database_integrity(context)
@@ -234,7 +234,7 @@ def check_search_name_contents(context, exclude):
if exclude:
assert not present, "Found term for {}/{}: {}".format(row['object'], name, wid[1])
else:
assert present, "Missing term for {}/{}: {}".fromat(row['object'], name, wid[1])
assert present, "Missing term for {}/{}: {}".format(row['object'], name, wid[1])
elif name != 'object':
assert db_row.contains(name, value), db_row.assert_msg(name, value)