adapt tests for new word count mechanism

This commit is contained in:
Sarah Hoffmann
2021-10-19 12:03:48 +02:00
parent ec7184c533
commit 824562357b
2 changed files with 12 additions and 4 deletions

View File

@@ -144,6 +144,7 @@ class TestCliWithDb:
def __init__(self, *args, **kwargs):
self.update_sql_functions_called = False
self.finalize_import_called = False
self.update_statistics_called = False
def update_sql_functions(self, *args):
self.update_sql_functions_called = True
@@ -151,6 +152,10 @@ class TestCliWithDb:
def finalize_import(self, *args):
self.finalize_import_called = True
def update_statistics(self):
self.update_statistics_called = True
tok = DummyTokenizer()
monkeypatch.setattr(nominatim.tokenizer.factory, 'get_tokenizer_for_db',
lambda *args: tok)
@@ -316,7 +321,6 @@ class TestCliWithDb:
assert func.called == 1
@pytest.mark.parametrize("command,func", [
('word-counts', 'recompute_word_counts'),
('address-levels', 'load_address_levels_from_file'),
('wiki-data', 'import_wikipedia_articles'),
('importance', 'recompute_importance'),
@@ -329,6 +333,11 @@ class TestCliWithDb:
assert func_mock.called == 1
def test_refresh_word_count(self):
assert self.call_nominatim('refresh', '--word-count') == 0
assert self.tokenizer_mock.update_statistics_called
def test_refresh_postcodes(self, mock_func_factory, place_table):
func_mock = mock_func_factory(nominatim.tools.postcodes, 'update_postcodes')
idx_mock = mock_func_factory(nominatim.indexer.indexer.Indexer, 'index_postcodes')

View File

@@ -160,7 +160,7 @@ def test_init_new(tokenizer_factory, test_config, monkeypatch, db_prop):
assert db_prop(icu_tokenizer.DBCFG_TERM_NORMALIZATION) == ':: lower();'
def test_init_word_table(tokenizer_factory, test_config, place_row, word_table):
def test_init_word_table(tokenizer_factory, test_config, place_row, temp_db_cursor):
place_row(names={'name' : 'Test Area', 'ref' : '52'})
place_row(names={'name' : 'No Area'})
place_row(names={'name' : 'Holzstrasse'})
@@ -168,8 +168,7 @@ def test_init_word_table(tokenizer_factory, test_config, place_row, word_table):
tok = tokenizer_factory()
tok.init_new_db(test_config)
assert word_table.get_partial_words() == {('test', 1),
('no', 1), ('area', 2)}
assert temp_db_cursor.table_exists('word')
def test_init_from_project(monkeypatch, test_config, tokenizer_factory):