forked from hans/Nominatim
port code to psycopg3
This commit is contained in:
@@ -1,113 +0,0 @@
|
||||
# SPDX-License-Identifier: GPL-3.0-or-later
|
||||
#
|
||||
# This file is part of Nominatim. (https://nominatim.org)
|
||||
#
|
||||
# Copyright (C) 2024 by the Nominatim developer community.
|
||||
# For a full list of authors see the git log.
|
||||
"""
|
||||
Tests for function providing a non-blocking query interface towards PostgreSQL.
|
||||
"""
|
||||
from contextlib import closing
|
||||
import concurrent.futures
|
||||
|
||||
import pytest
|
||||
import psycopg2
|
||||
|
||||
from nominatim_db.db.async_connection import DBConnection, DeadlockHandler
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def conn(temp_db):
|
||||
with closing(DBConnection('dbname=' + temp_db)) as connection:
|
||||
yield connection
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def simple_conns(temp_db):
|
||||
conn1 = psycopg2.connect('dbname=' + temp_db)
|
||||
conn2 = psycopg2.connect('dbname=' + temp_db)
|
||||
|
||||
yield conn1.cursor(), conn2.cursor()
|
||||
|
||||
conn1.close()
|
||||
conn2.close()
|
||||
|
||||
|
||||
def test_simple_query(conn, temp_db_cursor):
|
||||
conn.connect()
|
||||
|
||||
conn.perform('CREATE TABLE foo (id INT)')
|
||||
conn.wait()
|
||||
|
||||
assert temp_db_cursor.table_exists('foo')
|
||||
|
||||
|
||||
def test_wait_for_query(conn):
|
||||
conn.connect()
|
||||
|
||||
conn.perform('SELECT pg_sleep(1)')
|
||||
|
||||
assert not conn.is_done()
|
||||
|
||||
conn.wait()
|
||||
|
||||
|
||||
def test_bad_query(conn):
|
||||
conn.connect()
|
||||
|
||||
conn.perform('SELECT efasfjsea')
|
||||
|
||||
with pytest.raises(psycopg2.ProgrammingError):
|
||||
conn.wait()
|
||||
|
||||
|
||||
def test_bad_query_ignore(temp_db):
|
||||
with closing(DBConnection('dbname=' + temp_db, ignore_sql_errors=True)) as conn:
|
||||
conn.connect()
|
||||
|
||||
conn.perform('SELECT efasfjsea')
|
||||
|
||||
conn.wait()
|
||||
|
||||
|
||||
def exec_with_deadlock(cur, sql, detector):
|
||||
with DeadlockHandler(lambda *args: detector.append(1)):
|
||||
cur.execute(sql)
|
||||
|
||||
|
||||
def test_deadlock(simple_conns):
|
||||
cur1, cur2 = simple_conns
|
||||
|
||||
cur1.execute("""CREATE TABLE t1 (id INT PRIMARY KEY, t TEXT);
|
||||
INSERT into t1 VALUES (1, 'a'), (2, 'b')""")
|
||||
cur1.connection.commit()
|
||||
|
||||
cur1.execute("UPDATE t1 SET t = 'x' WHERE id = 1")
|
||||
cur2.execute("UPDATE t1 SET t = 'x' WHERE id = 2")
|
||||
|
||||
# This is the tricky part of the test. The first SQL command runs into
|
||||
# a lock and blocks, so we have to run it in a separate thread. When the
|
||||
# second deadlocking SQL statement is issued, Postgresql will abort one of
|
||||
# the two transactions that cause the deadlock. There is no way to tell
|
||||
# which one of the two. Therefore wrap both in a DeadlockHandler and
|
||||
# expect that exactly one of the two triggers.
|
||||
with concurrent.futures.ThreadPoolExecutor(max_workers=1) as executor:
|
||||
deadlock_check = []
|
||||
try:
|
||||
future = executor.submit(exec_with_deadlock, cur2,
|
||||
"UPDATE t1 SET t = 'y' WHERE id = 1",
|
||||
deadlock_check)
|
||||
|
||||
while not future.running():
|
||||
pass
|
||||
|
||||
|
||||
exec_with_deadlock(cur1, "UPDATE t1 SET t = 'y' WHERE id = 2",
|
||||
deadlock_check)
|
||||
finally:
|
||||
# Whatever happens, make sure the deadlock gets resolved.
|
||||
cur1.connection.rollback()
|
||||
|
||||
future.result()
|
||||
|
||||
assert len(deadlock_check) == 1
|
||||
@@ -8,7 +8,7 @@
|
||||
Tests for specialised connection and cursor classes.
|
||||
"""
|
||||
import pytest
|
||||
import psycopg2
|
||||
import psycopg
|
||||
|
||||
import nominatim_db.db.connection as nc
|
||||
|
||||
@@ -73,7 +73,7 @@ def test_drop_many_tables(db, table_factory):
|
||||
|
||||
|
||||
def test_drop_table_non_existing_force(db):
|
||||
with pytest.raises(psycopg2.ProgrammingError, match='.*does not exist.*'):
|
||||
with pytest.raises(psycopg.ProgrammingError, match='.*does not exist.*'):
|
||||
nc.drop_tables(db, 'dfkjgjriogjigjgjrdghehtre', if_exists=False)
|
||||
|
||||
def test_connection_server_version_tuple(db):
|
||||
|
||||
@@ -8,6 +8,7 @@
|
||||
Tests for SQL preprocessing.
|
||||
"""
|
||||
import pytest
|
||||
import pytest_asyncio
|
||||
|
||||
from nominatim_db.db.sql_preprocessor import SQLPreprocessor
|
||||
|
||||
@@ -54,3 +55,17 @@ def test_load_file_with_params(sql_preprocessor, sql_factory, temp_db_conn, temp
|
||||
sql_preprocessor.run_sql_file(temp_db_conn, sqlfile, bar='XX', foo='ZZ')
|
||||
|
||||
assert temp_db_cursor.scalar('SELECT test()') == 'ZZ XX'
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_load_parallel_file(dsn, sql_preprocessor, tmp_path, temp_db_cursor):
|
||||
(tmp_path / 'test.sql').write_text("""
|
||||
CREATE TABLE foo (a TEXT);
|
||||
CREATE TABLE foo2(a TEXT);""" +
|
||||
"\n---\nCREATE TABLE bar (b INT);")
|
||||
|
||||
await sql_preprocessor.run_parallel_sql_file(dsn, 'test.sql', num_threads=4)
|
||||
|
||||
assert temp_db_cursor.table_exists('foo')
|
||||
assert temp_db_cursor.table_exists('foo2')
|
||||
assert temp_db_cursor.table_exists('bar')
|
||||
|
||||
@@ -58,103 +58,3 @@ def test_execute_file_with_post_code(dsn, tmp_path, temp_db_cursor):
|
||||
db_utils.execute_file(dsn, tmpfile, post_code='INSERT INTO test VALUES(23)')
|
||||
|
||||
assert temp_db_cursor.row_set('SELECT * FROM test') == {(23, )}
|
||||
|
||||
|
||||
class TestCopyBuffer:
|
||||
TABLE_NAME = 'copytable'
|
||||
|
||||
@pytest.fixture(autouse=True)
|
||||
def setup_test_table(self, table_factory):
|
||||
table_factory(self.TABLE_NAME, 'col_a INT, col_b TEXT')
|
||||
|
||||
|
||||
def table_rows(self, cursor):
|
||||
return cursor.row_set('SELECT * FROM ' + self.TABLE_NAME)
|
||||
|
||||
|
||||
def test_copybuffer_empty(self):
|
||||
with db_utils.CopyBuffer() as buf:
|
||||
buf.copy_out(None, "dummy")
|
||||
|
||||
|
||||
def test_all_columns(self, temp_db_cursor):
|
||||
with db_utils.CopyBuffer() as buf:
|
||||
buf.add(3, 'hum')
|
||||
buf.add(None, 'f\\t')
|
||||
|
||||
buf.copy_out(temp_db_cursor, self.TABLE_NAME)
|
||||
|
||||
assert self.table_rows(temp_db_cursor) == {(3, 'hum'), (None, 'f\\t')}
|
||||
|
||||
|
||||
def test_selected_columns(self, temp_db_cursor):
|
||||
with db_utils.CopyBuffer() as buf:
|
||||
buf.add('foo')
|
||||
|
||||
buf.copy_out(temp_db_cursor, self.TABLE_NAME,
|
||||
columns=['col_b'])
|
||||
|
||||
assert self.table_rows(temp_db_cursor) == {(None, 'foo')}
|
||||
|
||||
|
||||
def test_reordered_columns(self, temp_db_cursor):
|
||||
with db_utils.CopyBuffer() as buf:
|
||||
buf.add('one', 1)
|
||||
buf.add(' two ', 2)
|
||||
|
||||
buf.copy_out(temp_db_cursor, self.TABLE_NAME,
|
||||
columns=['col_b', 'col_a'])
|
||||
|
||||
assert self.table_rows(temp_db_cursor) == {(1, 'one'), (2, ' two ')}
|
||||
|
||||
|
||||
def test_special_characters(self, temp_db_cursor):
|
||||
with db_utils.CopyBuffer() as buf:
|
||||
buf.add('foo\tbar')
|
||||
buf.add('sun\nson')
|
||||
buf.add('\\N')
|
||||
|
||||
buf.copy_out(temp_db_cursor, self.TABLE_NAME,
|
||||
columns=['col_b'])
|
||||
|
||||
assert self.table_rows(temp_db_cursor) == {(None, 'foo\tbar'),
|
||||
(None, 'sun\nson'),
|
||||
(None, '\\N')}
|
||||
|
||||
|
||||
|
||||
class TestCopyBufferJson:
|
||||
TABLE_NAME = 'copytable'
|
||||
|
||||
@pytest.fixture(autouse=True)
|
||||
def setup_test_table(self, table_factory):
|
||||
table_factory(self.TABLE_NAME, 'col_a INT, col_b JSONB')
|
||||
|
||||
|
||||
def table_rows(self, cursor):
|
||||
cursor.execute('SELECT * FROM ' + self.TABLE_NAME)
|
||||
results = {k: v for k,v in cursor}
|
||||
|
||||
assert len(results) == cursor.rowcount
|
||||
|
||||
return results
|
||||
|
||||
|
||||
def test_json_object(self, temp_db_cursor):
|
||||
with db_utils.CopyBuffer() as buf:
|
||||
buf.add(1, json.dumps({'test': 'value', 'number': 1}))
|
||||
|
||||
buf.copy_out(temp_db_cursor, self.TABLE_NAME)
|
||||
|
||||
assert self.table_rows(temp_db_cursor) == \
|
||||
{1: {'test': 'value', 'number': 1}}
|
||||
|
||||
|
||||
def test_json_object_special_chras(self, temp_db_cursor):
|
||||
with db_utils.CopyBuffer() as buf:
|
||||
buf.add(1, json.dumps({'te\tst': 'va\nlue', 'nu"mber': None}))
|
||||
|
||||
buf.copy_out(temp_db_cursor, self.TABLE_NAME)
|
||||
|
||||
assert self.table_rows(temp_db_cursor) == \
|
||||
{1: {'te\tst': 'va\nlue', 'nu"mber': None}}
|
||||
|
||||
Reference in New Issue
Block a user