mirror of
https://github.com/osm-search/Nominatim.git
synced 2026-02-14 01:47:57 +00:00
Compare commits
6 Commits
5314e6c881
...
2ddb19c0b0
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
2ddb19c0b0 | ||
|
|
3f14f89bdf | ||
|
|
8ed7a3875a | ||
|
|
70b9140f13 | ||
|
|
3285948130 | ||
|
|
9d0732a941 |
@@ -7,7 +7,7 @@
|
||||
|
||||
DROP TABLE IF EXISTS search_name;
|
||||
|
||||
{% if not db.reverse_only %}
|
||||
{% if not create_reverse_only %}
|
||||
|
||||
CREATE TABLE search_name (
|
||||
place_id BIGINT NOT NULL,
|
||||
|
||||
@@ -152,12 +152,11 @@ def create_tables(conn: Connection, config: Configuration, reverse_only: bool =
|
||||
When `reverse_only` is True, then the main table for searching will
|
||||
be skipped and only reverse search is possible.
|
||||
"""
|
||||
sql = SQLPreprocessor(conn, config)
|
||||
sql.env.globals['db']['reverse_only'] = reverse_only
|
||||
SQLPreprocessor(conn, config).run_sql_file(conn, 'tables.sql',
|
||||
create_reverse_only=reverse_only)
|
||||
|
||||
sql.run_sql_file(conn, 'tables.sql')
|
||||
|
||||
sql.run_sql_file(conn, 'grants.sql')
|
||||
# reinitiate the preprocessor to get all the newly created tables
|
||||
SQLPreprocessor(conn, config).run_sql_file(conn, 'grants.sql')
|
||||
|
||||
|
||||
def create_table_triggers(conn: Connection, config: Configuration) -> None:
|
||||
|
||||
@@ -280,6 +280,44 @@ def osmline_table(temp_db_with_extensions, load_sql):
|
||||
load_sql('tables/interpolation.sql')
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def osmline_row(osmline_table, temp_db_cursor):
|
||||
idseq = itertools.count(20001)
|
||||
|
||||
def _add(osm_id=None, geom='LINESTRING(12.0 11.0, 12.003 11.0)'):
|
||||
return temp_db_cursor.insert_row(
|
||||
'location_property_osmline',
|
||||
place_id=pysql.SQL("nextval('seq_place')"),
|
||||
osm_id=osm_id or next(idseq),
|
||||
geometry_sector=pysql.Literal(20),
|
||||
partition=pysql.Literal(0),
|
||||
indexed_status=1,
|
||||
linegeo=_with_srid(geom))
|
||||
|
||||
return _add
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def postcode_table(temp_db_with_extensions, load_sql):
|
||||
load_sql('tables/postcodes.sql')
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def postcode_row(postcode_table, temp_db_cursor):
|
||||
def _add(country, postcode, x=34.5, y=-9.33):
|
||||
geom = _with_srid(f"POINT({x} {y})")
|
||||
return temp_db_cursor.insert_row(
|
||||
'location_postcodes',
|
||||
place_id=pysql.SQL("nextval('seq_place')"),
|
||||
indexed_status=pysql.Literal(1),
|
||||
country_code=country, postcode=postcode,
|
||||
centroid=geom,
|
||||
rank_search=pysql.Literal(16),
|
||||
geometry=('ST_Expand(%s::geometry, 0.005)', geom))
|
||||
|
||||
return _add
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def sql_preprocessor_cfg(tmp_path, table_factory, temp_db_with_extensions, country_row):
|
||||
for part in range(3):
|
||||
|
||||
@@ -1,13 +1,12 @@
|
||||
# SPDX-License-Identifier: GPL-3.0-or-later
|
||||
#
|
||||
# This file is part of Nominatim. (https://nominatim.org)
|
||||
#
|
||||
# Copyright (C) 2025 by the Nominatim developer community.
|
||||
# Copyright (C) 2026 by the Nominatim developer community.
|
||||
# For a full list of authors see the git log.
|
||||
"""
|
||||
Tests for running the indexing.
|
||||
"""
|
||||
import itertools
|
||||
|
||||
import pytest
|
||||
import pytest_asyncio # noqa
|
||||
|
||||
@@ -15,129 +14,57 @@ from nominatim_db.indexer import indexer
|
||||
from nominatim_db.tokenizer import factory
|
||||
|
||||
|
||||
class IndexerTestDB:
|
||||
class TestIndexing:
|
||||
@pytest.fixture(autouse=True)
|
||||
def setup(self, temp_db_conn, project_env, tokenizer_mock,
|
||||
placex_table, postcode_table, osmline_table):
|
||||
self.conn = temp_db_conn
|
||||
temp_db_conn.execute("""
|
||||
CREATE OR REPLACE FUNCTION date_update() RETURNS TRIGGER AS $$
|
||||
BEGIN
|
||||
IF NEW.indexed_status = 0 and OLD.indexed_status != 0 THEN
|
||||
NEW.indexed_date = now();
|
||||
END IF;
|
||||
RETURN NEW;
|
||||
END; $$ LANGUAGE plpgsql;
|
||||
|
||||
def __init__(self, conn):
|
||||
self.placex_id = itertools.count(100000)
|
||||
self.osmline_id = itertools.count(500000)
|
||||
self.postcode_id = itertools.count(700000)
|
||||
DROP TYPE IF EXISTS prepare_update_info CASCADE;
|
||||
CREATE TYPE prepare_update_info AS (
|
||||
name HSTORE,
|
||||
address HSTORE,
|
||||
rank_address SMALLINT,
|
||||
country_code TEXT,
|
||||
class TEXT,
|
||||
type TEXT,
|
||||
linked_place_id BIGINT
|
||||
);
|
||||
CREATE OR REPLACE FUNCTION placex_indexing_prepare(p placex,
|
||||
OUT result prepare_update_info) AS $$
|
||||
BEGIN
|
||||
result.address := p.address;
|
||||
result.name := p.name;
|
||||
result.class := p.class;
|
||||
result.type := p.type;
|
||||
result.country_code := p.country_code;
|
||||
result.rank_address := p.rank_address;
|
||||
END; $$ LANGUAGE plpgsql STABLE;
|
||||
|
||||
self.conn = conn
|
||||
self.conn.autocimmit = True
|
||||
with self.conn.cursor() as cur:
|
||||
cur.execute("""CREATE TABLE placex (place_id BIGINT,
|
||||
name HSTORE,
|
||||
class TEXT,
|
||||
type TEXT,
|
||||
linked_place_id BIGINT,
|
||||
rank_address SMALLINT,
|
||||
rank_search SMALLINT,
|
||||
indexed_status SMALLINT,
|
||||
indexed_date TIMESTAMP,
|
||||
partition SMALLINT,
|
||||
admin_level SMALLINT,
|
||||
country_code TEXT,
|
||||
address HSTORE,
|
||||
token_info JSONB,
|
||||
geometry_sector INTEGER)""")
|
||||
cur.execute("""CREATE TABLE location_property_osmline (
|
||||
place_id BIGINT,
|
||||
osm_id BIGINT,
|
||||
address HSTORE,
|
||||
token_info JSONB,
|
||||
indexed_status SMALLINT,
|
||||
indexed_date TIMESTAMP,
|
||||
geometry_sector INTEGER)""")
|
||||
cur.execute("""CREATE TABLE location_postcodes (
|
||||
place_id BIGINT,
|
||||
indexed_status SMALLINT,
|
||||
indexed_date TIMESTAMP,
|
||||
country_code varchar(2),
|
||||
postcode TEXT)""")
|
||||
cur.execute("""CREATE OR REPLACE FUNCTION date_update() RETURNS TRIGGER
|
||||
AS $$
|
||||
BEGIN
|
||||
IF NEW.indexed_status = 0 and OLD.indexed_status != 0 THEN
|
||||
NEW.indexed_date = now();
|
||||
END IF;
|
||||
RETURN NEW;
|
||||
END; $$ LANGUAGE plpgsql;""")
|
||||
cur.execute("DROP TYPE IF EXISTS prepare_update_info CASCADE")
|
||||
cur.execute("""CREATE TYPE prepare_update_info AS (
|
||||
name HSTORE,
|
||||
address HSTORE,
|
||||
rank_address SMALLINT,
|
||||
country_code TEXT,
|
||||
class TEXT,
|
||||
type TEXT,
|
||||
linked_place_id BIGINT
|
||||
)""")
|
||||
cur.execute("""CREATE OR REPLACE FUNCTION placex_indexing_prepare(p placex,
|
||||
OUT result prepare_update_info)
|
||||
AS $$
|
||||
BEGIN
|
||||
result.address := p.address;
|
||||
result.name := p.name;
|
||||
result.class := p.class;
|
||||
result.type := p.type;
|
||||
result.country_code := p.country_code;
|
||||
result.rank_address := p.rank_address;
|
||||
END;
|
||||
$$ LANGUAGE plpgsql STABLE;
|
||||
""")
|
||||
cur.execute("""CREATE OR REPLACE FUNCTION
|
||||
get_interpolation_address(in_address HSTORE, wayid BIGINT)
|
||||
RETURNS HSTORE AS $$
|
||||
BEGIN
|
||||
RETURN in_address;
|
||||
END;
|
||||
$$ LANGUAGE plpgsql STABLE;
|
||||
""")
|
||||
CREATE OR REPLACE FUNCTION get_interpolation_address(in_address HSTORE, wayid BIGINT)
|
||||
RETURNS HSTORE AS $$ SELECT in_address $$ LANGUAGE sql STABLE;
|
||||
""")
|
||||
|
||||
for table in ('placex', 'location_property_osmline', 'location_postcodes'):
|
||||
cur.execute("""CREATE TRIGGER {0}_update BEFORE UPDATE ON {0}
|
||||
FOR EACH ROW EXECUTE PROCEDURE date_update()
|
||||
""".format(table))
|
||||
for table in ('placex', 'location_property_osmline', 'location_postcodes'):
|
||||
temp_db_conn.execute("""CREATE TRIGGER {0}_update BEFORE UPDATE ON {0}
|
||||
FOR EACH ROW EXECUTE PROCEDURE date_update()
|
||||
""".format(table))
|
||||
|
||||
self.tokenizer = factory.create_tokenizer(project_env)
|
||||
|
||||
def scalar(self, query):
|
||||
with self.conn.cursor() as cur:
|
||||
cur.execute(query)
|
||||
return cur.fetchone()[0]
|
||||
|
||||
def add_place(self, cls='place', typ='locality',
|
||||
rank_search=30, rank_address=30, sector=20):
|
||||
next_id = next(self.placex_id)
|
||||
with self.conn.cursor() as cur:
|
||||
cur.execute("""INSERT INTO placex
|
||||
(place_id, class, type, rank_search, rank_address,
|
||||
indexed_status, geometry_sector)
|
||||
VALUES (%s, %s, %s, %s, %s, 1, %s)""",
|
||||
(next_id, cls, typ, rank_search, rank_address, sector))
|
||||
return next_id
|
||||
|
||||
def add_admin(self, **kwargs):
|
||||
kwargs['cls'] = 'boundary'
|
||||
kwargs['typ'] = 'administrative'
|
||||
return self.add_place(**kwargs)
|
||||
|
||||
def add_osmline(self, sector=20):
|
||||
next_id = next(self.osmline_id)
|
||||
with self.conn.cursor() as cur:
|
||||
cur.execute("""INSERT INTO location_property_osmline
|
||||
(place_id, osm_id, indexed_status, geometry_sector)
|
||||
VALUES (%s, %s, 1, %s)""",
|
||||
(next_id, next_id, sector))
|
||||
return next_id
|
||||
|
||||
def add_postcode(self, country, postcode):
|
||||
next_id = next(self.postcode_id)
|
||||
with self.conn.cursor() as cur:
|
||||
cur.execute("""INSERT INTO location_postcodes
|
||||
(place_id, indexed_status, country_code, postcode)
|
||||
VALUES (%s, 1, %s, %s)""",
|
||||
(next_id, country, postcode))
|
||||
return next_id
|
||||
|
||||
def placex_unindexed(self):
|
||||
return self.scalar('SELECT count(*) from placex where indexed_status > 0')
|
||||
|
||||
@@ -145,148 +72,133 @@ class IndexerTestDB:
|
||||
return self.scalar("""SELECT count(*) from location_property_osmline
|
||||
WHERE indexed_status > 0""")
|
||||
|
||||
@pytest.mark.parametrize("threads", [1, 15])
|
||||
@pytest.mark.asyncio
|
||||
async def test_index_all_by_rank(self, dsn, threads, placex_row, osmline_row):
|
||||
for rank in range(31):
|
||||
placex_row(rank_address=rank, rank_search=rank, indexed_status=1)
|
||||
osmline_row()
|
||||
|
||||
@pytest.fixture
|
||||
def test_db(temp_db_conn):
|
||||
yield IndexerTestDB(temp_db_conn)
|
||||
assert self.placex_unindexed() == 31
|
||||
assert self.osmline_unindexed() == 1
|
||||
|
||||
idx = indexer.Indexer(dsn, self.tokenizer, threads)
|
||||
await idx.index_by_rank(0, 30)
|
||||
|
||||
@pytest.fixture
|
||||
def test_tokenizer(tokenizer_mock, project_env):
|
||||
return factory.create_tokenizer(project_env)
|
||||
assert self.placex_unindexed() == 0
|
||||
assert self.osmline_unindexed() == 0
|
||||
|
||||
assert self.scalar("""SELECT count(*) from placex
|
||||
WHERE indexed_status = 0 and indexed_date is null""") == 0
|
||||
# ranks come in order of rank address
|
||||
assert self.scalar("""
|
||||
SELECT count(*) FROM placex p WHERE rank_address > 0
|
||||
AND indexed_date >= (SELECT min(indexed_date) FROM placex o
|
||||
WHERE p.rank_address < o.rank_address)""") == 0
|
||||
# placex address ranked objects come before interpolations
|
||||
assert self.scalar(
|
||||
"""SELECT count(*) FROM placex WHERE rank_address > 0
|
||||
AND indexed_date >
|
||||
(SELECT min(indexed_date) FROM location_property_osmline)""") == 0
|
||||
# rank 0 comes after all other placex objects
|
||||
assert self.scalar(
|
||||
"""SELECT count(*) FROM placex WHERE rank_address > 0
|
||||
AND indexed_date >
|
||||
(SELECT min(indexed_date) FROM placex WHERE rank_address = 0)""") == 0
|
||||
|
||||
@pytest.mark.parametrize("threads", [1, 15])
|
||||
@pytest.mark.asyncio
|
||||
async def test_index_all_by_rank(test_db, threads, test_tokenizer):
|
||||
for rank in range(31):
|
||||
test_db.add_place(rank_address=rank, rank_search=rank)
|
||||
test_db.add_osmline()
|
||||
@pytest.mark.parametrize("threads", [1, 15])
|
||||
@pytest.mark.asyncio
|
||||
async def test_index_partial_without_30(self, dsn, threads, placex_row, osmline_row):
|
||||
for rank in range(31):
|
||||
placex_row(rank_address=rank, rank_search=rank, indexed_status=1)
|
||||
osmline_row()
|
||||
|
||||
assert test_db.placex_unindexed() == 31
|
||||
assert test_db.osmline_unindexed() == 1
|
||||
assert self.placex_unindexed() == 31
|
||||
assert self.osmline_unindexed() == 1
|
||||
|
||||
idx = indexer.Indexer('dbname=test_nominatim_python_unittest', test_tokenizer, threads)
|
||||
await idx.index_by_rank(0, 30)
|
||||
idx = indexer.Indexer(dsn, self.tokenizer, threads)
|
||||
await idx.index_by_rank(4, 15)
|
||||
|
||||
assert test_db.placex_unindexed() == 0
|
||||
assert test_db.osmline_unindexed() == 0
|
||||
assert self.placex_unindexed() == 19
|
||||
assert self.osmline_unindexed() == 1
|
||||
|
||||
assert test_db.scalar("""SELECT count(*) from placex
|
||||
WHERE indexed_status = 0 and indexed_date is null""") == 0
|
||||
# ranks come in order of rank address
|
||||
assert test_db.scalar("""
|
||||
SELECT count(*) FROM placex p WHERE rank_address > 0
|
||||
AND indexed_date >= (SELECT min(indexed_date) FROM placex o
|
||||
WHERE p.rank_address < o.rank_address)""") == 0
|
||||
# placex address ranked objects come before interpolations
|
||||
assert test_db.scalar(
|
||||
"""SELECT count(*) FROM placex WHERE rank_address > 0
|
||||
AND indexed_date >
|
||||
(SELECT min(indexed_date) FROM location_property_osmline)""") == 0
|
||||
# rank 0 comes after all other placex objects
|
||||
assert test_db.scalar(
|
||||
"""SELECT count(*) FROM placex WHERE rank_address > 0
|
||||
AND indexed_date >
|
||||
(SELECT min(indexed_date) FROM placex WHERE rank_address = 0)""") == 0
|
||||
assert self.scalar("""
|
||||
SELECT count(*) FROM placex
|
||||
WHERE indexed_status = 0 AND not rank_address between 4 and 15""") == 0
|
||||
|
||||
@pytest.mark.parametrize("threads", [1, 15])
|
||||
@pytest.mark.asyncio
|
||||
async def test_index_partial_with_30(self, dsn, threads, placex_row, osmline_row):
|
||||
for rank in range(31):
|
||||
placex_row(rank_address=rank, rank_search=rank, indexed_status=1)
|
||||
osmline_row()
|
||||
|
||||
@pytest.mark.parametrize("threads", [1, 15])
|
||||
@pytest.mark.asyncio
|
||||
async def test_index_partial_without_30(test_db, threads, test_tokenizer):
|
||||
for rank in range(31):
|
||||
test_db.add_place(rank_address=rank, rank_search=rank)
|
||||
test_db.add_osmline()
|
||||
assert self.placex_unindexed() == 31
|
||||
assert self.osmline_unindexed() == 1
|
||||
|
||||
assert test_db.placex_unindexed() == 31
|
||||
assert test_db.osmline_unindexed() == 1
|
||||
idx = indexer.Indexer(dsn, self.tokenizer, threads)
|
||||
await idx.index_by_rank(28, 30)
|
||||
|
||||
idx = indexer.Indexer('dbname=test_nominatim_python_unittest',
|
||||
test_tokenizer, threads)
|
||||
await idx.index_by_rank(4, 15)
|
||||
assert self.placex_unindexed() == 28
|
||||
assert self.osmline_unindexed() == 0
|
||||
|
||||
assert test_db.placex_unindexed() == 19
|
||||
assert test_db.osmline_unindexed() == 1
|
||||
assert self.scalar("""
|
||||
SELECT count(*) FROM placex
|
||||
WHERE indexed_status = 0 AND rank_address between 0 and 27""") == 0
|
||||
|
||||
assert test_db.scalar("""
|
||||
SELECT count(*) FROM placex
|
||||
WHERE indexed_status = 0 AND not rank_address between 4 and 15""") == 0
|
||||
@pytest.mark.parametrize("threads", [1, 15])
|
||||
@pytest.mark.asyncio
|
||||
async def test_index_boundaries(self, dsn, threads, placex_row, osmline_row):
|
||||
for rank in range(4, 10):
|
||||
placex_row(cls='boundary', typ='administrative',
|
||||
rank_address=rank, rank_search=rank, indexed_status=1)
|
||||
for rank in range(31):
|
||||
placex_row(rank_address=rank, rank_search=rank, indexed_status=1)
|
||||
osmline_row()
|
||||
|
||||
assert self.placex_unindexed() == 37
|
||||
assert self.osmline_unindexed() == 1
|
||||
|
||||
@pytest.mark.parametrize("threads", [1, 15])
|
||||
@pytest.mark.asyncio
|
||||
async def test_index_partial_with_30(test_db, threads, test_tokenizer):
|
||||
for rank in range(31):
|
||||
test_db.add_place(rank_address=rank, rank_search=rank)
|
||||
test_db.add_osmline()
|
||||
idx = indexer.Indexer(dsn, self.tokenizer, threads)
|
||||
await idx.index_boundaries()
|
||||
|
||||
assert test_db.placex_unindexed() == 31
|
||||
assert test_db.osmline_unindexed() == 1
|
||||
assert self.placex_unindexed() == 31
|
||||
assert self.osmline_unindexed() == 1
|
||||
|
||||
idx = indexer.Indexer('dbname=test_nominatim_python_unittest', test_tokenizer, threads)
|
||||
await idx.index_by_rank(28, 30)
|
||||
assert self.scalar("""
|
||||
SELECT count(*) FROM placex
|
||||
WHERE indexed_status = 0 AND class != 'boundary'""") == 0
|
||||
|
||||
assert test_db.placex_unindexed() == 28
|
||||
assert test_db.osmline_unindexed() == 0
|
||||
@pytest.mark.parametrize("threads", [1, 15])
|
||||
@pytest.mark.asyncio
|
||||
async def test_index_postcodes(self, dsn, threads, postcode_row):
|
||||
for postcode in range(1000):
|
||||
postcode_row(country='de', postcode=postcode)
|
||||
for postcode in range(32000, 33000):
|
||||
postcode_row(country='us', postcode=postcode)
|
||||
|
||||
assert test_db.scalar("""
|
||||
SELECT count(*) FROM placex
|
||||
WHERE indexed_status = 0 AND rank_address between 0 and 27""") == 0
|
||||
idx = indexer.Indexer(dsn, self.tokenizer, threads)
|
||||
await idx.index_postcodes()
|
||||
|
||||
assert self.scalar("""SELECT count(*) FROM location_postcodes
|
||||
WHERE indexed_status != 0""") == 0
|
||||
|
||||
@pytest.mark.parametrize("threads", [1, 15])
|
||||
@pytest.mark.asyncio
|
||||
async def test_index_boundaries(test_db, threads, test_tokenizer):
|
||||
for rank in range(4, 10):
|
||||
test_db.add_admin(rank_address=rank, rank_search=rank)
|
||||
for rank in range(31):
|
||||
test_db.add_place(rank_address=rank, rank_search=rank)
|
||||
test_db.add_osmline()
|
||||
@pytest.mark.parametrize("analyse", [True, False])
|
||||
@pytest.mark.asyncio
|
||||
async def test_index_full(self, dsn, analyse, placex_row, osmline_row, postcode_row):
|
||||
for rank in range(4, 10):
|
||||
placex_row(cls='boundary', typ='administrative',
|
||||
rank_address=rank, rank_search=rank, indexed_status=1)
|
||||
for rank in range(31):
|
||||
placex_row(rank_address=rank, rank_search=rank, indexed_status=1)
|
||||
osmline_row()
|
||||
for postcode in range(1000):
|
||||
postcode_row(country='de', postcode=postcode)
|
||||
|
||||
assert test_db.placex_unindexed() == 37
|
||||
assert test_db.osmline_unindexed() == 1
|
||||
idx = indexer.Indexer(dsn, self.tokenizer, 4)
|
||||
await idx.index_full(analyse=analyse)
|
||||
|
||||
idx = indexer.Indexer('dbname=test_nominatim_python_unittest', test_tokenizer, threads)
|
||||
await idx.index_boundaries()
|
||||
|
||||
assert test_db.placex_unindexed() == 31
|
||||
assert test_db.osmline_unindexed() == 1
|
||||
|
||||
assert test_db.scalar("""
|
||||
SELECT count(*) FROM placex
|
||||
WHERE indexed_status = 0 AND class != 'boundary'""") == 0
|
||||
|
||||
|
||||
@pytest.mark.parametrize("threads", [1, 15])
|
||||
@pytest.mark.asyncio
|
||||
async def test_index_postcodes(test_db, threads, test_tokenizer):
|
||||
for postcode in range(1000):
|
||||
test_db.add_postcode('de', postcode)
|
||||
for postcode in range(32000, 33000):
|
||||
test_db.add_postcode('us', postcode)
|
||||
|
||||
idx = indexer.Indexer('dbname=test_nominatim_python_unittest', test_tokenizer, threads)
|
||||
await idx.index_postcodes()
|
||||
|
||||
assert test_db.scalar("""SELECT count(*) FROM location_postcodes
|
||||
WHERE indexed_status != 0""") == 0
|
||||
|
||||
|
||||
@pytest.mark.parametrize("analyse", [True, False])
|
||||
@pytest.mark.asyncio
|
||||
async def test_index_full(test_db, analyse, test_tokenizer):
|
||||
for rank in range(4, 10):
|
||||
test_db.add_admin(rank_address=rank, rank_search=rank)
|
||||
for rank in range(31):
|
||||
test_db.add_place(rank_address=rank, rank_search=rank)
|
||||
test_db.add_osmline()
|
||||
for postcode in range(1000):
|
||||
test_db.add_postcode('de', postcode)
|
||||
|
||||
idx = indexer.Indexer('dbname=test_nominatim_python_unittest', test_tokenizer, 4)
|
||||
await idx.index_full(analyse=analyse)
|
||||
|
||||
assert test_db.placex_unindexed() == 0
|
||||
assert test_db.osmline_unindexed() == 0
|
||||
assert test_db.scalar("""SELECT count(*) FROM location_postcodes
|
||||
WHERE indexed_status != 0""") == 0
|
||||
assert self.placex_unindexed() == 0
|
||||
assert self.osmline_unindexed() == 0
|
||||
assert self.scalar("""SELECT count(*) FROM location_postcodes
|
||||
WHERE indexed_status != 0""") == 0
|
||||
|
||||
@@ -2,7 +2,7 @@
|
||||
#
|
||||
# This file is part of Nominatim. (https://nominatim.org)
|
||||
#
|
||||
# Copyright (C) 2025 by the Nominatim developer community.
|
||||
# Copyright (C) 2026 by the Nominatim developer community.
|
||||
# For a full list of authors see the git log.
|
||||
"""
|
||||
Test for tiger data function
|
||||
@@ -13,59 +13,10 @@ from textwrap import dedent
|
||||
import pytest
|
||||
import pytest_asyncio # noqa: F401
|
||||
|
||||
from nominatim_db.db.connection import execute_scalar
|
||||
from nominatim_db.tools import tiger_data, freeze
|
||||
from nominatim_db.tools import tiger_data
|
||||
from nominatim_db.errors import UsageError
|
||||
|
||||
|
||||
class MockTigerTable:
|
||||
|
||||
def __init__(self, conn):
|
||||
self.conn = conn
|
||||
with conn.cursor() as cur:
|
||||
cur.execute("""CREATE TABLE tiger (linegeo GEOMETRY,
|
||||
start INTEGER,
|
||||
stop INTEGER,
|
||||
interpol TEXT,
|
||||
token_info JSONB,
|
||||
postcode TEXT)""")
|
||||
|
||||
# We need this table to determine if the database is frozen or not
|
||||
cur.execute("CREATE TABLE place (number INTEGER)")
|
||||
# We need this table to determine if the database is in reverse-only mode
|
||||
cur.execute("CREATE TABLE search_name (place_id BIGINT)")
|
||||
|
||||
def count(self):
|
||||
return execute_scalar(self.conn, "SELECT count(*) FROM tiger")
|
||||
|
||||
def row(self):
|
||||
with self.conn.cursor() as cur:
|
||||
cur.execute("SELECT * FROM tiger LIMIT 1")
|
||||
return cur.fetchone()
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def tiger_table(def_config, temp_db_conn, sql_preprocessor,
|
||||
temp_db_with_extensions, tmp_path):
|
||||
def_config.lib_dir.sql = tmp_path / 'sql'
|
||||
def_config.lib_dir.sql.mkdir()
|
||||
|
||||
(def_config.lib_dir.sql / 'tiger_import_start.sql').write_text(
|
||||
"""CREATE OR REPLACE FUNCTION tiger_line_import(linegeo GEOMETRY, start INTEGER,
|
||||
stop INTEGER, interpol TEXT,
|
||||
token_info JSONB, postcode TEXT)
|
||||
RETURNS INTEGER AS $$
|
||||
INSERT INTO tiger VALUES(linegeo, start, stop, interpol, token_info, postcode)
|
||||
RETURNING 1
|
||||
$$ LANGUAGE SQL;""", encoding='utf-8')
|
||||
(def_config.lib_dir.sql / 'tiger_import_finish.sql').write_text(
|
||||
"""DROP FUNCTION tiger_line_import (linegeo GEOMETRY, in_startnumber INTEGER,
|
||||
in_endnumber INTEGER, interpolationtype TEXT,
|
||||
token_info JSONB, in_postcode TEXT);""", encoding='utf-8')
|
||||
|
||||
return MockTigerTable(temp_db_conn)
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def csv_factory(tmp_path):
|
||||
def _mk_file(fname, hnr_from=1, hnr_to=9, interpol='odd', street='Main St',
|
||||
@@ -80,107 +31,110 @@ def csv_factory(tmp_path):
|
||||
return _mk_file
|
||||
|
||||
|
||||
@pytest.mark.parametrize("threads", (1, 5))
|
||||
@pytest.mark.asyncio
|
||||
async def test_add_tiger_data(def_config, src_dir, tiger_table, tokenizer_mock, threads):
|
||||
await tiger_data.add_tiger_data(str(src_dir / 'test' / 'testdb' / 'tiger'),
|
||||
def_config, threads, tokenizer_mock())
|
||||
class TestTiger:
|
||||
|
||||
assert tiger_table.count() == 6213
|
||||
@pytest.fixture(autouse=True)
|
||||
def setup(self, temp_db_conn, placex_row, load_sql):
|
||||
load_sql('tables/search_name.sql', create_reverse_only=False)
|
||||
load_sql('tables/tiger.sql')
|
||||
|
||||
# fake parent roads
|
||||
for x in range(-870, -863):
|
||||
for y in range(323, 328):
|
||||
placex_row(rank_search=26, rank_address=26,
|
||||
geom=f"LINESTRING({x/10 - 0.1} {y/10}, {x/10 + 0.1} {y/10})")
|
||||
|
||||
@pytest.mark.parametrize("threads", (1, 5))
|
||||
@pytest.mark.asyncio
|
||||
async def test_add_tiger_data_database_frozen(def_config, src_dir, temp_db_conn, tiger_table,
|
||||
tokenizer_mock, threads):
|
||||
freeze.drop_update_tables(temp_db_conn)
|
||||
temp_db_conn.execute("""
|
||||
CREATE OR REPLACE FUNCTION get_partition(cc VARCHAR(10)) RETURNS INTEGER AS $$
|
||||
SELECT 0;
|
||||
$$ LANGUAGE sql;
|
||||
CREATE OR REPLACE FUNCTION token_matches_street(i JSONB, s INT[]) RETURNS BOOLEAN AS $$
|
||||
SELECT false
|
||||
$$ LANGUAGE SQL IMMUTABLE STRICT PARALLEL SAFE;
|
||||
""")
|
||||
|
||||
await tiger_data.add_tiger_data(str(src_dir / 'test' / 'testdb' / 'tiger'),
|
||||
def_config, threads, tokenizer_mock())
|
||||
|
||||
assert tiger_table.count() == 6213
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_add_tiger_data_reverse_only(def_config, src_dir, temp_db_conn, tiger_table,
|
||||
tokenizer_mock):
|
||||
with temp_db_conn.cursor() as cur:
|
||||
cur.execute("DROP TABLE search_name")
|
||||
temp_db_conn.commit()
|
||||
|
||||
with pytest.raises(UsageError,
|
||||
match="Cannot perform tiger import: required tables are missing. "
|
||||
"See https://github.com/osm-search/Nominatim/issues/2463 for details."):
|
||||
@pytest.mark.parametrize("threads", (1, 5))
|
||||
@pytest.mark.asyncio
|
||||
async def test_add_tiger_data_database_frozen(self, def_config, src_dir, temp_db_cursor,
|
||||
tokenizer_mock, threads):
|
||||
await tiger_data.add_tiger_data(str(src_dir / 'test' / 'testdb' / 'tiger'),
|
||||
def_config, 1, tokenizer_mock())
|
||||
def_config, threads, tokenizer_mock())
|
||||
|
||||
assert tiger_table.count() == 0
|
||||
assert temp_db_cursor.table_rows('location_property_tiger') == 6209
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_add_tiger_data_reverse_only(self, def_config, src_dir, temp_db_cursor,
|
||||
tokenizer_mock):
|
||||
temp_db_cursor.execute("DROP TABLE search_name")
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_add_tiger_data_no_files(def_config, tiger_table, tokenizer_mock,
|
||||
tmp_path):
|
||||
await tiger_data.add_tiger_data(str(tmp_path), def_config, 1, tokenizer_mock())
|
||||
with pytest.raises(UsageError,
|
||||
match="Cannot perform tiger import: required tables are missing. "
|
||||
"See https://github.com/osm-search/Nominatim/issues/2463 for details."):
|
||||
await tiger_data.add_tiger_data(str(src_dir / 'test' / 'testdb' / 'tiger'),
|
||||
def_config, 1, tokenizer_mock())
|
||||
|
||||
assert tiger_table.count() == 0
|
||||
assert temp_db_cursor.table_rows('location_property_tiger') == 0
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_add_tiger_data_no_files(self, def_config, temp_db_cursor, tokenizer_mock,
|
||||
tmp_path):
|
||||
await tiger_data.add_tiger_data(str(tmp_path), def_config, 1, tokenizer_mock())
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_add_tiger_data_bad_file(def_config, tiger_table, tokenizer_mock,
|
||||
tmp_path):
|
||||
sqlfile = tmp_path / '1010.csv'
|
||||
sqlfile.write_text("""Random text""", encoding='utf-8')
|
||||
assert temp_db_cursor.table_rows('location_property_tiger') == 0
|
||||
|
||||
await tiger_data.add_tiger_data(str(tmp_path), def_config, 1, tokenizer_mock())
|
||||
@pytest.mark.asyncio
|
||||
async def test_add_tiger_data_bad_file(self, def_config, temp_db_cursor, tokenizer_mock,
|
||||
tmp_path):
|
||||
sqlfile = tmp_path / '1010.csv'
|
||||
sqlfile.write_text('Random text', encoding='utf-8')
|
||||
|
||||
assert tiger_table.count() == 0
|
||||
await tiger_data.add_tiger_data(str(tmp_path), def_config, 1, tokenizer_mock())
|
||||
|
||||
assert temp_db_cursor.table_rows('location_property_tiger') == 0
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_add_tiger_data_hnr_nan(def_config, tiger_table, tokenizer_mock,
|
||||
csv_factory, tmp_path):
|
||||
csv_factory('file1', hnr_from=99)
|
||||
csv_factory('file2', hnr_from='L12')
|
||||
csv_factory('file3', hnr_to='12.4')
|
||||
@pytest.mark.asyncio
|
||||
async def test_add_tiger_data_hnr_nan(self, def_config, temp_db_cursor, tokenizer_mock,
|
||||
csv_factory, tmp_path):
|
||||
csv_factory('file1', hnr_to=99)
|
||||
csv_factory('file2', hnr_from='L12')
|
||||
csv_factory('file3', hnr_to='12.4')
|
||||
|
||||
await tiger_data.add_tiger_data(str(tmp_path), def_config, 1, tokenizer_mock())
|
||||
await tiger_data.add_tiger_data(str(tmp_path), def_config, 1, tokenizer_mock())
|
||||
|
||||
assert tiger_table.count() == 1
|
||||
assert tiger_table.row().start == 99
|
||||
rows = temp_db_cursor.row_set("""
|
||||
SELECT startnumber, endnumber FROM location_property_tiger""")
|
||||
|
||||
assert rows == {(1, 99)}
|
||||
|
||||
@pytest.mark.parametrize("threads", (1, 5))
|
||||
@pytest.mark.asyncio
|
||||
async def test_add_tiger_data_tarfile(def_config, tiger_table, tokenizer_mock,
|
||||
tmp_path, src_dir, threads):
|
||||
tar = tarfile.open(str(tmp_path / 'sample.tar.gz'), "w:gz")
|
||||
tar.add(str(src_dir / 'test' / 'testdb' / 'tiger' / '01001.csv'))
|
||||
tar.close()
|
||||
@pytest.mark.parametrize("threads", (1, 5))
|
||||
@pytest.mark.asyncio
|
||||
async def test_add_tiger_data_tarfile(self, def_config, temp_db_cursor, tokenizer_mock,
|
||||
tmp_path, src_dir, threads):
|
||||
tar = tarfile.open(str(tmp_path / 'sample.tar.gz'), "w:gz")
|
||||
tar.add(str(src_dir / 'test' / 'testdb' / 'tiger' / '01001.csv'))
|
||||
tar.close()
|
||||
|
||||
await tiger_data.add_tiger_data(str(tmp_path / 'sample.tar.gz'), def_config, threads,
|
||||
tokenizer_mock())
|
||||
await tiger_data.add_tiger_data(str(tmp_path / 'sample.tar.gz'), def_config, threads,
|
||||
tokenizer_mock())
|
||||
|
||||
assert tiger_table.count() == 6213
|
||||
assert temp_db_cursor.table_rows('location_property_tiger') == 6209
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_add_tiger_data_bad_tarfile(self, def_config, tokenizer_mock, tmp_path):
|
||||
tarfile = tmp_path / 'sample.tar.gz'
|
||||
tarfile.write_text("""Random text""", encoding='utf-8')
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_add_tiger_data_bad_tarfile(def_config, tiger_table, tokenizer_mock,
|
||||
tmp_path):
|
||||
tarfile = tmp_path / 'sample.tar.gz'
|
||||
tarfile.write_text("""Random text""", encoding='utf-8')
|
||||
with pytest.raises(UsageError):
|
||||
await tiger_data.add_tiger_data(str(tarfile), def_config, 1, tokenizer_mock())
|
||||
|
||||
with pytest.raises(UsageError):
|
||||
await tiger_data.add_tiger_data(str(tarfile), def_config, 1, tokenizer_mock())
|
||||
@pytest.mark.asyncio
|
||||
async def test_add_tiger_data_empty_tarfile(self, def_config, temp_db_cursor, tokenizer_mock,
|
||||
tmp_path):
|
||||
tar = tarfile.open(str(tmp_path / 'sample.tar.gz'), "w:gz")
|
||||
tar.add(__file__)
|
||||
tar.close()
|
||||
|
||||
await tiger_data.add_tiger_data(str(tmp_path / 'sample.tar.gz'), def_config, 1,
|
||||
tokenizer_mock())
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_add_tiger_data_empty_tarfile(def_config, tiger_table, tokenizer_mock,
|
||||
tmp_path):
|
||||
tar = tarfile.open(str(tmp_path / 'sample.tar.gz'), "w:gz")
|
||||
tar.add(__file__)
|
||||
tar.close()
|
||||
|
||||
await tiger_data.add_tiger_data(str(tmp_path / 'sample.tar.gz'), def_config, 1,
|
||||
tokenizer_mock())
|
||||
|
||||
assert tiger_table.count() == 0
|
||||
assert temp_db_cursor.table_rows('location_property_tiger') == 0
|
||||
|
||||
Reference in New Issue
Block a user