require tokeinzer for indexer

This commit is contained in:
Sarah Hoffmann
2021-04-24 11:25:47 +02:00
parent 1b1ed820c3
commit e1c5673ac3
11 changed files with 88 additions and 56 deletions

View File

@@ -24,7 +24,7 @@ Feature: Creation of search terms
When importing
Then search_name contains
| object | nameaddress_vector |
| N1 | Rose, Street, Walltown |
| N1 | #Rose Street, Walltown |
When searching for "23 Rose Street, Walltown"
Then results contain
| osm_type | osm_id | name |
@@ -248,7 +248,7 @@ Feature: Creation of search terms
When importing
Then search_name contains
| object | name_vector | nameaddress_vector |
| N1 | #Green Moss | Rose, Street, Walltown |
| N1 | #Green Moss | #Rose Street, Walltown |
When searching for "Green Moss, Rose Street, Walltown"
Then results contain
| osm_type | osm_id | name |
@@ -299,7 +299,7 @@ Feature: Creation of search terms
When importing
Then search_name contains
| object | name_vector | nameaddress_vector |
| N1 | foo | the road |
| N1 | foo | #the road |
Scenario: Some addr: tags are added to address
Given the scene roads-with-pois

View File

@@ -109,7 +109,7 @@ def import_and_index_data_from_place_table(context):
# Call directly as the refresh function does not include postcodes.
indexer.LOG.setLevel(logging.ERROR)
indexer.Indexer(context.nominatim.get_libpq_dsn(), 1).index_full(analyse=False)
indexer.Indexer(context.nominatim.get_libpq_dsn(), tokenizer, 1).index_full(analyse=False)
check_database_integrity(context)
@@ -234,7 +234,7 @@ def check_search_name_contents(context, exclude):
if exclude:
assert not present, "Found term for {}/{}: {}".format(row['object'], name, wid[1])
else:
assert present, "Missing term for {}/{}: {}".fromat(row['object'], name, wid[1])
assert present, "Missing term for {}/{}: {}".format(row['object'], name, wid[1])
elif name != 'object':
assert db_row.contains(name, value), db_row.assert_msg(name, value)