mirror of
https://github.com/osm-search/Nominatim.git
synced 2026-02-15 02:47:59 +00:00
tests for legacy tokenizer
This commit is contained in:
@@ -31,12 +31,11 @@ def test_config(def_config, tmp_path):
|
||||
def_config.lib_dir.sql = sqldir
|
||||
def_config.lib_dir.data = sqldir
|
||||
|
||||
|
||||
return def_config
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def tokenizer_factory(dsn, tmp_path, monkeypatch):
|
||||
def tokenizer_factory(dsn, tmp_path, monkeypatch, property_table):
|
||||
|
||||
def _maker():
|
||||
return legacy_tokenizer.create(dsn, tmp_path / 'tokenizer')
|
||||
@@ -44,14 +43,32 @@ def tokenizer_factory(dsn, tmp_path, monkeypatch):
|
||||
return _maker
|
||||
|
||||
@pytest.fixture
|
||||
def tokenizer_setup(tokenizer_factory, test_config, property_table,
|
||||
monkeypatch, sql_preprocessor):
|
||||
def tokenizer_setup(tokenizer_factory, test_config, monkeypatch, sql_preprocessor):
|
||||
monkeypatch.setattr(legacy_tokenizer, '_check_module' , lambda m, c: None)
|
||||
tok = tokenizer_factory()
|
||||
tok.init_new_db(test_config)
|
||||
|
||||
|
||||
def test_init_new(tokenizer_factory, test_config, property_table, monkeypatch,
|
||||
@pytest.fixture
|
||||
def analyzer(tokenizer_factory, test_config, monkeypatch, sql_preprocessor,
|
||||
word_table, temp_db_with_extensions, tmp_path):
|
||||
sql = tmp_path / 'sql' / 'tokenizer' / 'legacy_tokenizer.sql'
|
||||
sql.write_text("""
|
||||
CREATE OR REPLACE FUNCTION getorcreate_housenumber_id(lookup_word TEXT)
|
||||
RETURNS INTEGER AS $$ SELECT 342; $$ LANGUAGE SQL;
|
||||
""")
|
||||
|
||||
monkeypatch.setattr(legacy_tokenizer, '_check_module' , lambda m, c: None)
|
||||
monkeypatch.setenv('NOMINATIM_TERM_NORMALIZATION', ':: lower();')
|
||||
tok = tokenizer_factory()
|
||||
tok.init_new_db(test_config)
|
||||
monkeypatch.undo()
|
||||
|
||||
with tok.name_analyzer() as analyzer:
|
||||
yield analyzer
|
||||
|
||||
|
||||
def test_init_new(tokenizer_factory, test_config, monkeypatch,
|
||||
temp_db_conn, sql_preprocessor):
|
||||
monkeypatch.setenv('NOMINATIM_TERM_NORMALIZATION', 'xxvv')
|
||||
monkeypatch.setattr(legacy_tokenizer, '_check_module' , lambda m, c: None)
|
||||
@@ -68,7 +85,7 @@ def test_init_new(tokenizer_factory, test_config, property_table, monkeypatch,
|
||||
assert outfile.stat().st_mode == 33261
|
||||
|
||||
|
||||
def test_init_module_load_failed(tokenizer_factory, test_config, property_table,
|
||||
def test_init_module_load_failed(tokenizer_factory, test_config,
|
||||
monkeypatch, temp_db_conn):
|
||||
tok = tokenizer_factory()
|
||||
|
||||
@@ -76,7 +93,7 @@ def test_init_module_load_failed(tokenizer_factory, test_config, property_table,
|
||||
tok.init_new_db(test_config)
|
||||
|
||||
|
||||
def test_init_module_custom(tokenizer_factory, test_config, property_table,
|
||||
def test_init_module_custom(tokenizer_factory, test_config,
|
||||
monkeypatch, tmp_path, sql_preprocessor):
|
||||
module_dir = (tmp_path / 'custom').resolve()
|
||||
module_dir.mkdir()
|
||||
@@ -97,3 +114,45 @@ def test_init_from_project(tokenizer_setup, tokenizer_factory):
|
||||
tok.init_from_project()
|
||||
|
||||
assert tok.normalization is not None
|
||||
|
||||
|
||||
def test_update_sql_functions(sql_preprocessor, temp_db_conn,
|
||||
tokenizer_factory, test_config, table_factory,
|
||||
monkeypatch, temp_db_cursor):
|
||||
monkeypatch.setenv('NOMINATIM_MAX_WORD_FREQUENCY', '1133')
|
||||
monkeypatch.setattr(legacy_tokenizer, '_check_module' , lambda m, c: None)
|
||||
tok = tokenizer_factory()
|
||||
tok.init_new_db(test_config)
|
||||
monkeypatch.undo()
|
||||
|
||||
assert properties.get_property(temp_db_conn, legacy_tokenizer.DBCFG_MAXWORDFREQ) == '1133'
|
||||
|
||||
table_factory('test', 'txt TEXT')
|
||||
|
||||
func_file = test_config.lib_dir.sql / 'tokenizer' / 'legacy_tokenizer.sql'
|
||||
func_file.write_text("""INSERT INTO test VALUES ('{{max_word_freq}}'),
|
||||
('{{modulepath}}')""")
|
||||
|
||||
tok.update_sql_functions(test_config)
|
||||
|
||||
test_content = temp_db_cursor.row_set('SELECT * FROM test')
|
||||
assert test_content == set((('1133', ), (str(test_config.project_dir / 'module'), )))
|
||||
|
||||
|
||||
def test_migrate_database(tokenizer_factory, test_config, temp_db_conn, monkeypatch):
|
||||
monkeypatch.setattr(legacy_tokenizer, '_check_module' , lambda m, c: None)
|
||||
tok = tokenizer_factory()
|
||||
tok.migrate_database(test_config)
|
||||
|
||||
assert properties.get_property(temp_db_conn, legacy_tokenizer.DBCFG_MAXWORDFREQ) is not None
|
||||
assert properties.get_property(temp_db_conn, legacy_tokenizer.DBCFG_NORMALIZATION) is not None
|
||||
|
||||
outfile = test_config.project_dir / 'module' / 'nominatim.so'
|
||||
|
||||
assert outfile.exists()
|
||||
assert outfile.read_text() == 'TEST nomiantim.so'
|
||||
assert outfile.stat().st_mode == 33261
|
||||
|
||||
|
||||
def test_normalize(analyzer):
|
||||
assert analyzer.normalize('TEsT') == 'test'
|
||||
|
||||
Reference in New Issue
Block a user