make DB helper functions free functions

Also changes the drop function so that it can drop multiple tables
at once.
This commit is contained in:
Sarah Hoffmann
2024-07-02 15:15:50 +02:00
parent 71249bd94a
commit 3742fa2929
30 changed files with 347 additions and 364 deletions

View File

@@ -33,13 +33,13 @@ def simple_conns(temp_db):
conn2.close()
def test_simple_query(conn, temp_db_conn):
def test_simple_query(conn, temp_db_cursor):
conn.connect()
conn.perform('CREATE TABLE foo (id INT)')
conn.wait()
temp_db_conn.table_exists('foo')
assert temp_db_cursor.table_exists('foo')
def test_wait_for_query(conn):

View File

@@ -10,61 +10,74 @@ Tests for specialised connection and cursor classes.
import pytest
import psycopg2
from nominatim_db.db.connection import connect, get_pg_env
import nominatim_db.db.connection as nc
@pytest.fixture
def db(dsn):
with connect(dsn) as conn:
with nc.connect(dsn) as conn:
yield conn
def test_connection_table_exists(db, table_factory):
assert not db.table_exists('foobar')
assert not nc.table_exists(db, 'foobar')
table_factory('foobar')
assert db.table_exists('foobar')
assert nc.table_exists(db, 'foobar')
def test_has_column_no_table(db):
assert not db.table_has_column('sometable', 'somecolumn')
assert not nc.table_has_column(db, 'sometable', 'somecolumn')
@pytest.mark.parametrize('name,result', [('tram', True), ('car', False)])
def test_has_column(db, table_factory, name, result):
table_factory('stuff', 'tram TEXT')
assert db.table_has_column('stuff', name) == result
assert nc.table_has_column(db, 'stuff', name) == result
def test_connection_index_exists(db, table_factory, temp_db_cursor):
assert not db.index_exists('some_index')
assert not nc.index_exists(db, 'some_index')
table_factory('foobar')
temp_db_cursor.execute('CREATE INDEX some_index ON foobar(id)')
assert db.index_exists('some_index')
assert db.index_exists('some_index', table='foobar')
assert not db.index_exists('some_index', table='bar')
assert nc.index_exists(db, 'some_index')
assert nc.index_exists(db, 'some_index', table='foobar')
assert not nc.index_exists(db, 'some_index', table='bar')
def test_drop_table_existing(db, table_factory):
table_factory('dummy')
assert db.table_exists('dummy')
assert nc.table_exists(db, 'dummy')
db.drop_table('dummy')
assert not db.table_exists('dummy')
nc.drop_tables(db, 'dummy')
assert not nc.table_exists(db, 'dummy')
def test_drop_table_non_existsing(db):
db.drop_table('dfkjgjriogjigjgjrdghehtre')
def test_drop_table_non_existing(db):
nc.drop_tables(db, 'dfkjgjriogjigjgjrdghehtre')
def test_drop_many_tables(db, table_factory):
tables = [f'table{n}' for n in range(5)]
for t in tables:
table_factory(t)
assert nc.table_exists(db, t)
nc.drop_tables(db, *tables)
for t in tables:
assert not nc.table_exists(db, t)
def test_drop_table_non_existing_force(db):
with pytest.raises(psycopg2.ProgrammingError, match='.*does not exist.*'):
db.drop_table('dfkjgjriogjigjgjrdghehtre', if_exists=False)
nc.drop_tables(db, 'dfkjgjriogjigjgjrdghehtre', if_exists=False)
def test_connection_server_version_tuple(db):
ver = db.server_version_tuple()
ver = nc.server_version_tuple(db)
assert isinstance(ver, tuple)
assert len(ver) == 2
@@ -72,7 +85,7 @@ def test_connection_server_version_tuple(db):
def test_connection_postgis_version_tuple(db, temp_db_with_extensions):
ver = db.postgis_version_tuple()
ver = nc.postgis_version_tuple(db)
assert isinstance(ver, tuple)
assert len(ver) == 2
@@ -82,27 +95,24 @@ def test_connection_postgis_version_tuple(db, temp_db_with_extensions):
def test_cursor_scalar(db, table_factory):
table_factory('dummy')
with db.cursor() as cur:
assert cur.scalar('SELECT count(*) FROM dummy') == 0
assert nc.execute_scalar(db, 'SELECT count(*) FROM dummy') == 0
def test_cursor_scalar_many_rows(db):
with db.cursor() as cur:
with pytest.raises(RuntimeError):
cur.scalar('SELECT * FROM pg_tables')
with pytest.raises(RuntimeError, match='Query did not return a single row.'):
nc.execute_scalar(db, 'SELECT * FROM pg_tables')
def test_cursor_scalar_no_rows(db, table_factory):
table_factory('dummy')
with db.cursor() as cur:
with pytest.raises(RuntimeError):
cur.scalar('SELECT id FROM dummy')
with pytest.raises(RuntimeError, match='Query did not return a single row.'):
nc.execute_scalar(db, 'SELECT id FROM dummy')
def test_get_pg_env_add_variable(monkeypatch):
monkeypatch.delenv('PGPASSWORD', raising=False)
env = get_pg_env('user=fooF')
env = nc.get_pg_env('user=fooF')
assert env['PGUSER'] == 'fooF'
assert 'PGPASSWORD' not in env
@@ -110,12 +120,12 @@ def test_get_pg_env_add_variable(monkeypatch):
def test_get_pg_env_overwrite_variable(monkeypatch):
monkeypatch.setenv('PGUSER', 'some default')
env = get_pg_env('user=overwriter')
env = nc.get_pg_env('user=overwriter')
assert env['PGUSER'] == 'overwriter'
def test_get_pg_env_ignore_unknown():
env = get_pg_env('client_encoding=stuff', base_env={})
env = nc.get_pg_env('client_encoding=stuff', base_env={})
assert env == {}

View File

@@ -8,6 +8,7 @@
Legacy word table for testing with functions to prefil and test contents
of the table.
"""
from nominatim_db.db.connection import execute_scalar
class MockIcuWordTable:
""" A word table for testing using legacy word table structure.
@@ -77,18 +78,15 @@ class MockIcuWordTable:
def count(self):
with self.conn.cursor() as cur:
return cur.scalar("SELECT count(*) FROM word")
return execute_scalar(self.conn, "SELECT count(*) FROM word")
def count_special(self):
with self.conn.cursor() as cur:
return cur.scalar("SELECT count(*) FROM word WHERE type = 'S'")
return execute_scalar(self.conn, "SELECT count(*) FROM word WHERE type = 'S'")
def count_housenumbers(self):
with self.conn.cursor() as cur:
return cur.scalar("SELECT count(*) FROM word WHERE type = 'H'")
return execute_scalar(self.conn, "SELECT count(*) FROM word WHERE type = 'H'")
def get_special(self):

View File

@@ -8,6 +8,7 @@
Legacy word table for testing with functions to prefil and test contents
of the table.
"""
from nominatim_db.db.connection import execute_scalar
class MockLegacyWordTable:
""" A word table for testing using legacy word table structure.
@@ -58,13 +59,11 @@ class MockLegacyWordTable:
def count(self):
with self.conn.cursor() as cur:
return cur.scalar("SELECT count(*) FROM word")
return execute_scalar(self.conn, "SELECT count(*) FROM word")
def count_special(self):
with self.conn.cursor() as cur:
return cur.scalar("SELECT count(*) FROM word WHERE class != 'place'")
return execute_scalar(self.conn, "SELECT count(*) FROM word WHERE class != 'place'")
def get_special(self):

View File

@@ -199,16 +199,16 @@ def test_update_sql_functions(db_prop, temp_db_cursor,
assert test_content == set((('1133', ), ))
def test_finalize_import(tokenizer_factory, temp_db_conn,
temp_db_cursor, test_config, sql_preprocessor_cfg):
def test_finalize_import(tokenizer_factory, temp_db_cursor,
test_config, sql_preprocessor_cfg):
tok = tokenizer_factory()
tok.init_new_db(test_config)
assert not temp_db_conn.index_exists('idx_word_word_id')
assert not temp_db_cursor.index_exists('word', 'idx_word_word_id')
tok.finalize_import(test_config)
assert temp_db_conn.index_exists('idx_word_word_id')
assert temp_db_cursor.index_exists('word', 'idx_word_word_id')
def test_check_database(test_config, tokenizer_factory,

View File

@@ -132,7 +132,7 @@ def test_import_osm_data_simple_ignore_no_data(table_factory, osm2pgsql_options)
ignore_errors=True)
def test_import_osm_data_drop(table_factory, temp_db_conn, tmp_path, osm2pgsql_options):
def test_import_osm_data_drop(table_factory, temp_db_cursor, tmp_path, osm2pgsql_options):
table_factory('place', content=((1, ), ))
table_factory('planet_osm_nodes')
@@ -144,7 +144,7 @@ def test_import_osm_data_drop(table_factory, temp_db_conn, tmp_path, osm2pgsql_o
database_import.import_osm_data(Path('file.pbf'), osm2pgsql_options, drop=True)
assert not flatfile.exists()
assert not temp_db_conn.table_exists('planet_osm_nodes')
assert not temp_db_cursor.table_exists('planet_osm_nodes')
def test_import_osm_data_default_cache(table_factory, osm2pgsql_options, capfd):

View File

@@ -75,7 +75,8 @@ def test_load_white_and_black_lists(sp_importer):
assert isinstance(black_list, dict) and isinstance(white_list, dict)
def test_create_place_classtype_indexes(temp_db_with_extensions, temp_db_conn,
def test_create_place_classtype_indexes(temp_db_with_extensions,
temp_db_conn, temp_db_cursor,
table_factory, sp_importer):
"""
Test that _create_place_classtype_indexes() create the
@@ -88,10 +89,11 @@ def test_create_place_classtype_indexes(temp_db_with_extensions, temp_db_conn,
table_factory(table_name, 'place_id BIGINT, centroid GEOMETRY')
sp_importer._create_place_classtype_indexes('', phrase_class, phrase_type)
temp_db_conn.commit()
assert check_placeid_and_centroid_indexes(temp_db_conn, phrase_class, phrase_type)
assert check_placeid_and_centroid_indexes(temp_db_cursor, phrase_class, phrase_type)
def test_create_place_classtype_table(temp_db_conn, placex_table, sp_importer):
def test_create_place_classtype_table(temp_db_conn, temp_db_cursor, placex_table, sp_importer):
"""
Test that _create_place_classtype_table() create
the right place_classtype table.
@@ -99,10 +101,12 @@ def test_create_place_classtype_table(temp_db_conn, placex_table, sp_importer):
phrase_class = 'class'
phrase_type = 'type'
sp_importer._create_place_classtype_table('', phrase_class, phrase_type)
temp_db_conn.commit()
assert check_table_exist(temp_db_conn, phrase_class, phrase_type)
assert check_table_exist(temp_db_cursor, phrase_class, phrase_type)
def test_grant_access_to_web_user(temp_db_conn, table_factory, def_config, sp_importer):
def test_grant_access_to_web_user(temp_db_conn, temp_db_cursor, table_factory,
def_config, sp_importer):
"""
Test that _grant_access_to_webuser() give
right access to the web user.
@@ -114,12 +118,13 @@ def test_grant_access_to_web_user(temp_db_conn, table_factory, def_config, sp_im
table_factory(table_name)
sp_importer._grant_access_to_webuser(phrase_class, phrase_type)
temp_db_conn.commit()
assert check_grant_access(temp_db_conn, def_config.DATABASE_WEBUSER, phrase_class, phrase_type)
assert check_grant_access(temp_db_cursor, def_config.DATABASE_WEBUSER, phrase_class, phrase_type)
def test_create_place_classtype_table_and_indexes(
temp_db_conn, def_config, placex_table,
sp_importer):
temp_db_cursor, def_config, placex_table,
sp_importer, temp_db_conn):
"""
Test that _create_place_classtype_table_and_indexes()
create the right place_classtype tables and place_id indexes
@@ -129,14 +134,15 @@ def test_create_place_classtype_table_and_indexes(
pairs = set([('class1', 'type1'), ('class2', 'type2')])
sp_importer._create_classtype_table_and_indexes(pairs)
temp_db_conn.commit()
for pair in pairs:
assert check_table_exist(temp_db_conn, pair[0], pair[1])
assert check_placeid_and_centroid_indexes(temp_db_conn, pair[0], pair[1])
assert check_grant_access(temp_db_conn, def_config.DATABASE_WEBUSER, pair[0], pair[1])
assert check_table_exist(temp_db_cursor, pair[0], pair[1])
assert check_placeid_and_centroid_indexes(temp_db_cursor, pair[0], pair[1])
assert check_grant_access(temp_db_cursor, def_config.DATABASE_WEBUSER, pair[0], pair[1])
def test_remove_non_existent_tables_from_db(sp_importer, default_phrases,
temp_db_conn):
temp_db_conn, temp_db_cursor):
"""
Check for the remove_non_existent_phrases_from_db() method.
@@ -159,15 +165,14 @@ def test_remove_non_existent_tables_from_db(sp_importer, default_phrases,
"""
sp_importer._remove_non_existent_tables_from_db()
temp_db_conn.commit()
# Changes are not committed yet. Use temp_db_conn for checking results.
with temp_db_conn.cursor(cursor_factory=CursorForTesting) as cur:
assert cur.row_set(query_tables) \
assert temp_db_cursor.row_set(query_tables) \
== {('place_classtype_testclasstypetable_to_keep', )}
@pytest.mark.parametrize("should_replace", [(True), (False)])
def test_import_phrases(monkeypatch, temp_db_conn, def_config, sp_importer,
def test_import_phrases(monkeypatch, temp_db_cursor, def_config, sp_importer,
placex_table, table_factory, tokenizer_mock,
xml_wiki_content, should_replace):
"""
@@ -193,49 +198,49 @@ def test_import_phrases(monkeypatch, temp_db_conn, def_config, sp_importer,
class_test = 'aerialway'
type_test = 'zip_line'
assert check_table_exist(temp_db_conn, class_test, type_test)
assert check_placeid_and_centroid_indexes(temp_db_conn, class_test, type_test)
assert check_grant_access(temp_db_conn, def_config.DATABASE_WEBUSER, class_test, type_test)
assert check_table_exist(temp_db_conn, 'amenity', 'animal_shelter')
assert check_table_exist(temp_db_cursor, class_test, type_test)
assert check_placeid_and_centroid_indexes(temp_db_cursor, class_test, type_test)
assert check_grant_access(temp_db_cursor, def_config.DATABASE_WEBUSER, class_test, type_test)
assert check_table_exist(temp_db_cursor, 'amenity', 'animal_shelter')
if should_replace:
assert not check_table_exist(temp_db_conn, 'wrong_class', 'wrong_type')
assert not check_table_exist(temp_db_cursor, 'wrong_class', 'wrong_type')
assert temp_db_conn.table_exists('place_classtype_amenity_animal_shelter')
assert temp_db_cursor.table_exists('place_classtype_amenity_animal_shelter')
if should_replace:
assert not temp_db_conn.table_exists('place_classtype_wrongclass_wrongtype')
assert not temp_db_cursor.table_exists('place_classtype_wrongclass_wrongtype')
def check_table_exist(temp_db_conn, phrase_class, phrase_type):
def check_table_exist(temp_db_cursor, phrase_class, phrase_type):
"""
Verify that the place_classtype table exists for the given
phrase_class and phrase_type.
"""
return temp_db_conn.table_exists('place_classtype_{}_{}'.format(phrase_class, phrase_type))
return temp_db_cursor.table_exists('place_classtype_{}_{}'.format(phrase_class, phrase_type))
def check_grant_access(temp_db_conn, user, phrase_class, phrase_type):
def check_grant_access(temp_db_cursor, user, phrase_class, phrase_type):
"""
Check that the web user has been granted right access to the
place_classtype table of the given phrase_class and phrase_type.
"""
table_name = 'place_classtype_{}_{}'.format(phrase_class, phrase_type)
with temp_db_conn.cursor() as temp_db_cursor:
temp_db_cursor.execute("""
SELECT * FROM information_schema.role_table_grants
WHERE table_name='{}'
AND grantee='{}'
AND privilege_type='SELECT'""".format(table_name, user))
return temp_db_cursor.fetchone()
temp_db_cursor.execute("""
SELECT * FROM information_schema.role_table_grants
WHERE table_name='{}'
AND grantee='{}'
AND privilege_type='SELECT'""".format(table_name, user))
return temp_db_cursor.fetchone()
def check_placeid_and_centroid_indexes(temp_db_conn, phrase_class, phrase_type):
def check_placeid_and_centroid_indexes(temp_db_cursor, phrase_class, phrase_type):
"""
Check that the place_id index and centroid index exist for the
place_classtype table of the given phrase_class and phrase_type.
"""
table_name = 'place_classtype_{}_{}'.format(phrase_class, phrase_type)
index_prefix = 'idx_place_classtype_{}_{}_'.format(phrase_class, phrase_type)
return (
temp_db_conn.index_exists(index_prefix + 'centroid')
temp_db_cursor.index_exists(table_name, index_prefix + 'centroid')
and
temp_db_conn.index_exists(index_prefix + 'place_id')
temp_db_cursor.index_exists(table_name, index_prefix + 'place_id')
)

View File

@@ -12,6 +12,7 @@ import psycopg2.extras
from nominatim_db.tools import migration
from nominatim_db.errors import UsageError
from nominatim_db.db.connection import server_version_tuple
import nominatim_db.version
from mock_legacy_word_table import MockLegacyWordTable
@@ -63,7 +64,7 @@ def test_set_up_migration_for_36(temp_db_with_extensions, temp_db_cursor,
WHERE property = 'database_version'""")
def test_already_at_version(def_config, property_table):
def test_already_at_version(temp_db_with_extensions, def_config, property_table):
property_table.set('database_version',
str(nominatim_db.version.NOMINATIM_VERSION))
@@ -71,8 +72,8 @@ def test_already_at_version(def_config, property_table):
assert migration.migrate(def_config, {}) == 0
def test_run_single_migration(def_config, temp_db_cursor, property_table,
monkeypatch, postprocess_mock):
def test_run_single_migration(temp_db_with_extensions, def_config, temp_db_cursor,
property_table, monkeypatch, postprocess_mock):
oldversion = [x for x in nominatim_db.version.NOMINATIM_VERSION]
oldversion[0] -= 1
property_table.set('database_version',
@@ -226,7 +227,7 @@ def test_create_tiger_housenumber_index(temp_db_conn, temp_db_cursor, table_fact
migration.create_tiger_housenumber_index(temp_db_conn)
temp_db_conn.commit()
if temp_db_conn.server_version_tuple() >= (11, 0, 0):
if server_version_tuple(temp_db_conn) >= (11, 0, 0):
assert temp_db_cursor.index_exists('location_property_tiger',
'idx_location_property_tiger_housenumber_migrated')

View File

@@ -12,6 +12,7 @@ from pathlib import Path
import pytest
from nominatim_db.tools import refresh
from nominatim_db.db.connection import postgis_version_tuple
def test_refresh_import_wikipedia_not_existing(dsn):
assert refresh.import_wikipedia_articles(dsn, Path('.')) == 1
@@ -23,13 +24,13 @@ def test_refresh_import_secondary_importance_non_existing(dsn):
def test_refresh_import_secondary_importance_testdb(dsn, src_dir, temp_db_conn, temp_db_cursor):
temp_db_cursor.execute('CREATE EXTENSION postgis')
if temp_db_conn.postgis_version_tuple()[0] < 3:
if postgis_version_tuple(temp_db_conn)[0] < 3:
assert refresh.import_secondary_importance(dsn, src_dir / 'test' / 'testdb') > 0
else:
temp_db_cursor.execute('CREATE EXTENSION postgis_raster')
assert refresh.import_secondary_importance(dsn, src_dir / 'test' / 'testdb') == 0
assert temp_db_conn.table_exists('secondary_importance')
assert temp_db_cursor.table_exists('secondary_importance')
@pytest.mark.parametrize("replace", (True, False))

View File

@@ -12,6 +12,7 @@ from textwrap import dedent
import pytest
from nominatim_db.db.connection import execute_scalar
from nominatim_db.tools import tiger_data, freeze
from nominatim_db.errors import UsageError
@@ -31,8 +32,7 @@ class MockTigerTable:
cur.execute("CREATE TABLE place (number INTEGER)")
def count(self):
with self.conn.cursor() as cur:
return cur.scalar("SELECT count(*) FROM tiger")
return execute_scalar(self.conn, "SELECT count(*) FROM tiger")
def row(self):
with self.conn.cursor() as cur: