mirror of
https://github.com/osm-search/Nominatim.git
synced 2026-02-15 10:57:58 +00:00
tests: add fixture for making test project directory
This commit is contained in:
@@ -5,10 +5,9 @@ from pathlib import Path
|
||||
import psycopg2
|
||||
import pytest
|
||||
|
||||
SRC_DIR = (Path(__file__) / '..' / '..' / '..').resolve()
|
||||
|
||||
# always test against the source
|
||||
sys.path.insert(0, str(SRC_DIR.resolve()))
|
||||
SRC_DIR = (Path(__file__) / '..' / '..' / '..').resolve()
|
||||
sys.path.insert(0, str(SRC_DIR))
|
||||
|
||||
from nominatim.config import Configuration
|
||||
from nominatim.db import connection
|
||||
@@ -20,6 +19,11 @@ import mocks
|
||||
from cursor import CursorForTesting
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def src_dir():
|
||||
return SRC_DIR
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def temp_db(monkeypatch):
|
||||
""" Create an empty database for the test. The database name is also
|
||||
@@ -97,18 +101,25 @@ def table_factory(temp_db_cursor):
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def def_config():
|
||||
cfg = Configuration(None, SRC_DIR.resolve() / 'settings')
|
||||
def def_config(src_dir):
|
||||
cfg = Configuration(None, src_dir / 'settings')
|
||||
cfg.set_libdirs(module='.', osm2pgsql='.',
|
||||
php=SRC_DIR / 'lib-php',
|
||||
sql=SRC_DIR / 'lib-sql',
|
||||
data=SRC_DIR / 'data')
|
||||
php=src_dir / 'lib-php',
|
||||
sql=src_dir / 'lib-sql',
|
||||
data=src_dir / 'data')
|
||||
return cfg
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def src_dir():
|
||||
return SRC_DIR.resolve()
|
||||
def project_env(src_dir, tmp_path):
|
||||
projdir = tmp_path / 'project'
|
||||
projdir.mkdir()
|
||||
cfg = Configuration(projdir, src_dir / 'settings')
|
||||
cfg.set_libdirs(module='.', osm2pgsql='.',
|
||||
php=src_dir / 'lib-php',
|
||||
sql=src_dir / 'lib-sql',
|
||||
data=src_dir / 'data')
|
||||
return cfg
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
|
||||
@@ -145,9 +145,8 @@ def test_db(temp_db_conn):
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def test_tokenizer(tokenizer_mock, def_config, tmp_path):
|
||||
def_config.project_dir = tmp_path
|
||||
return factory.create_tokenizer(def_config)
|
||||
def test_tokenizer(tokenizer_mock, project_env):
|
||||
return factory.create_tokenizer(project_env)
|
||||
|
||||
|
||||
@pytest.mark.parametrize("threads", [1, 15])
|
||||
|
||||
@@ -8,68 +8,68 @@ from nominatim.tokenizer import factory
|
||||
from nominatim.errors import UsageError
|
||||
from dummy_tokenizer import DummyTokenizer
|
||||
|
||||
@pytest.fixture
|
||||
def test_config(def_config, tmp_path, property_table, tokenizer_mock):
|
||||
def_config.project_dir = tmp_path
|
||||
return def_config
|
||||
|
||||
|
||||
def test_setup_dummy_tokenizer(temp_db_conn, test_config):
|
||||
tokenizer = factory.create_tokenizer(test_config)
|
||||
|
||||
assert isinstance(tokenizer, DummyTokenizer)
|
||||
assert tokenizer.init_state == "new"
|
||||
assert (test_config.project_dir / 'tokenizer').is_dir()
|
||||
|
||||
assert properties.get_property(temp_db_conn, 'tokenizer') == 'dummy'
|
||||
|
||||
|
||||
def test_setup_tokenizer_dir_exists(test_config):
|
||||
(test_config.project_dir / 'tokenizer').mkdir()
|
||||
|
||||
tokenizer = factory.create_tokenizer(test_config)
|
||||
|
||||
assert isinstance(tokenizer, DummyTokenizer)
|
||||
assert tokenizer.init_state == "new"
|
||||
|
||||
|
||||
def test_setup_tokenizer_dir_failure(test_config):
|
||||
(test_config.project_dir / 'tokenizer').write_text("foo")
|
||||
|
||||
with pytest.raises(UsageError):
|
||||
factory.create_tokenizer(test_config)
|
||||
|
||||
|
||||
def test_setup_bad_tokenizer_name(def_config, tmp_path, monkeypatch):
|
||||
def_config.project_dir = tmp_path
|
||||
def test_setup_bad_tokenizer_name(project_env, monkeypatch):
|
||||
monkeypatch.setenv('NOMINATIM_TOKENIZER', 'dummy')
|
||||
|
||||
with pytest.raises(UsageError):
|
||||
factory.create_tokenizer(def_config)
|
||||
factory.create_tokenizer(project_env)
|
||||
|
||||
|
||||
def test_load_tokenizer(test_config):
|
||||
factory.create_tokenizer(test_config)
|
||||
|
||||
tokenizer = factory.get_tokenizer_for_db(test_config)
|
||||
|
||||
assert isinstance(tokenizer, DummyTokenizer)
|
||||
assert tokenizer.init_state == "loaded"
|
||||
class TestFactory:
|
||||
@pytest.fixture(autouse=True)
|
||||
def init_env(self, project_env, property_table, tokenizer_mock):
|
||||
self.config = project_env
|
||||
|
||||
|
||||
def test_load_no_tokenizer_dir(test_config):
|
||||
factory.create_tokenizer(test_config)
|
||||
def test_setup_dummy_tokenizer(self, temp_db_conn):
|
||||
tokenizer = factory.create_tokenizer(self.config)
|
||||
|
||||
test_config.project_dir = test_config.project_dir / 'foo'
|
||||
assert isinstance(tokenizer, DummyTokenizer)
|
||||
assert tokenizer.init_state == "new"
|
||||
assert (self.config.project_dir / 'tokenizer').is_dir()
|
||||
|
||||
with pytest.raises(UsageError):
|
||||
factory.get_tokenizer_for_db(test_config)
|
||||
assert properties.get_property(temp_db_conn, 'tokenizer') == 'dummy'
|
||||
|
||||
|
||||
def test_load_missing_propoerty(temp_db_cursor, test_config):
|
||||
factory.create_tokenizer(test_config)
|
||||
def test_setup_tokenizer_dir_exists(self):
|
||||
(self.config.project_dir / 'tokenizer').mkdir()
|
||||
|
||||
temp_db_cursor.execute("TRUNCATE TABLE nominatim_properties")
|
||||
tokenizer = factory.create_tokenizer(self.config)
|
||||
|
||||
with pytest.raises(UsageError):
|
||||
factory.get_tokenizer_for_db(test_config)
|
||||
assert isinstance(tokenizer, DummyTokenizer)
|
||||
assert tokenizer.init_state == "new"
|
||||
|
||||
|
||||
def test_setup_tokenizer_dir_failure(self):
|
||||
(self.config.project_dir / 'tokenizer').write_text("foo")
|
||||
|
||||
with pytest.raises(UsageError):
|
||||
factory.create_tokenizer(self.config)
|
||||
|
||||
|
||||
def test_load_tokenizer(self):
|
||||
factory.create_tokenizer(self.config)
|
||||
|
||||
tokenizer = factory.get_tokenizer_for_db(self.config)
|
||||
|
||||
assert isinstance(tokenizer, DummyTokenizer)
|
||||
assert tokenizer.init_state == "loaded"
|
||||
|
||||
|
||||
def test_load_no_tokenizer_dir(self):
|
||||
factory.create_tokenizer(self.config)
|
||||
|
||||
self.config.project_dir = self.config.project_dir / 'foo'
|
||||
|
||||
with pytest.raises(UsageError):
|
||||
factory.get_tokenizer_for_db(self.config)
|
||||
|
||||
|
||||
def test_load_missing_property(self, temp_db_cursor):
|
||||
factory.create_tokenizer(self.config)
|
||||
|
||||
temp_db_cursor.execute("TRUNCATE TABLE nominatim_properties")
|
||||
|
||||
with pytest.raises(UsageError):
|
||||
factory.get_tokenizer_for_db(self.config)
|
||||
|
||||
@@ -20,20 +20,17 @@ def word_table(temp_db_conn):
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def test_config(def_config, tmp_path):
|
||||
def_config.project_dir = tmp_path / 'project'
|
||||
def_config.project_dir.mkdir()
|
||||
|
||||
def test_config(project_env, tmp_path):
|
||||
sqldir = tmp_path / 'sql'
|
||||
sqldir.mkdir()
|
||||
(sqldir / 'tokenizer').mkdir()
|
||||
(sqldir / 'tokenizer' / 'icu_tokenizer.sql').write_text("SELECT 'a'")
|
||||
shutil.copy(str(def_config.lib_dir.sql / 'tokenizer' / 'icu_tokenizer_tables.sql'),
|
||||
shutil.copy(str(project_env.lib_dir.sql / 'tokenizer' / 'icu_tokenizer_tables.sql'),
|
||||
str(sqldir / 'tokenizer' / 'icu_tokenizer_tables.sql'))
|
||||
|
||||
def_config.lib_dir.sql = sqldir
|
||||
project_env.lib_dir.sql = sqldir
|
||||
|
||||
return def_config
|
||||
return project_env
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
|
||||
@@ -11,18 +11,20 @@ from nominatim.errors import UsageError
|
||||
|
||||
from icu import Transliterator
|
||||
|
||||
@pytest.fixture
|
||||
def test_config(def_config, tmp_path):
|
||||
project_dir = tmp_path / 'project_dir'
|
||||
project_dir.mkdir()
|
||||
def_config.project_dir = project_dir
|
||||
CONFIG_SECTIONS = ('normalization', 'transliteration', 'token-analysis')
|
||||
|
||||
return def_config
|
||||
class TestIcuRuleLoader:
|
||||
|
||||
@pytest.fixture(autouse=True)
|
||||
def init_env(self, project_env):
|
||||
self.project_env = project_env
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def cfgrules(test_config):
|
||||
def _create_config(*variants, **kwargs):
|
||||
def write_config(self, content):
|
||||
(self.project_env.project_dir / 'icu_tokenizer.yaml').write_text(dedent(content))
|
||||
|
||||
|
||||
def config_rules(self, *variants):
|
||||
content = dedent("""\
|
||||
normalization:
|
||||
- ":: NFD ()"
|
||||
@@ -33,122 +35,116 @@ def cfgrules(test_config):
|
||||
transliteration:
|
||||
- ":: Latin ()"
|
||||
- "[[:Punctuation:][:Space:]]+ > ' '"
|
||||
""")
|
||||
content += "token-analysis:\n - analyzer: generic\n variants:\n - words:\n"
|
||||
content += '\n'.join((" - " + s for s in variants)) + '\n'
|
||||
for k, v in kwargs:
|
||||
content += " {}: {}\n".format(k, v)
|
||||
(test_config.project_dir / 'icu_tokenizer.yaml').write_text(content)
|
||||
|
||||
return test_config
|
||||
|
||||
return _create_config
|
||||
|
||||
|
||||
def test_empty_rule_set(test_config):
|
||||
(test_config.project_dir / 'icu_tokenizer.yaml').write_text(dedent("""\
|
||||
normalization:
|
||||
transliteration:
|
||||
token-analysis:
|
||||
- analyzer: generic
|
||||
variants:
|
||||
"""))
|
||||
|
||||
rules = ICURuleLoader(test_config)
|
||||
assert rules.get_search_rules() == ''
|
||||
assert rules.get_normalization_rules() == ''
|
||||
assert rules.get_transliteration_rules() == ''
|
||||
|
||||
CONFIG_SECTIONS = ('normalization', 'transliteration', 'token-analysis')
|
||||
|
||||
@pytest.mark.parametrize("section", CONFIG_SECTIONS)
|
||||
def test_missing_section(section, test_config):
|
||||
rule_cfg = { s: [] for s in CONFIG_SECTIONS if s != section}
|
||||
(test_config.project_dir / 'icu_tokenizer.yaml').write_text(yaml.dump(rule_cfg))
|
||||
|
||||
with pytest.raises(UsageError):
|
||||
ICURuleLoader(test_config)
|
||||
|
||||
|
||||
def test_get_search_rules(cfgrules):
|
||||
loader = ICURuleLoader(cfgrules())
|
||||
|
||||
rules = loader.get_search_rules()
|
||||
trans = Transliterator.createFromRules("test", rules)
|
||||
|
||||
assert trans.transliterate(" Baum straße ") == " baum straße "
|
||||
assert trans.transliterate(" Baumstraße ") == " baumstraße "
|
||||
assert trans.transliterate(" Baumstrasse ") == " baumstrasse "
|
||||
assert trans.transliterate(" Baumstr ") == " baumstr "
|
||||
assert trans.transliterate(" Baumwegstr ") == " baumwegstr "
|
||||
assert trans.transliterate(" Αθήνα ") == " athēna "
|
||||
assert trans.transliterate(" проспект ") == " prospekt "
|
||||
|
||||
|
||||
def test_get_normalization_rules(cfgrules):
|
||||
loader = ICURuleLoader(cfgrules())
|
||||
rules = loader.get_normalization_rules()
|
||||
trans = Transliterator.createFromRules("test", rules)
|
||||
|
||||
assert trans.transliterate(" проспект-Prospekt ") == " проспект prospekt "
|
||||
|
||||
|
||||
def test_get_transliteration_rules(cfgrules):
|
||||
loader = ICURuleLoader(cfgrules())
|
||||
rules = loader.get_transliteration_rules()
|
||||
trans = Transliterator.createFromRules("test", rules)
|
||||
|
||||
assert trans.transliterate(" проспект-Prospekt ") == " prospekt Prospekt "
|
||||
|
||||
|
||||
def test_transliteration_rules_from_file(test_config):
|
||||
cfgpath = test_config.project_dir / ('icu_tokenizer.yaml')
|
||||
cfgpath.write_text(dedent("""\
|
||||
normalization:
|
||||
transliteration:
|
||||
- "'ax' > 'b'"
|
||||
- !include transliteration.yaml
|
||||
token-analysis:
|
||||
- analyzer: generic
|
||||
variants:
|
||||
"""))
|
||||
transpath = test_config.project_dir / ('transliteration.yaml')
|
||||
transpath.write_text('- "x > y"')
|
||||
- words:
|
||||
""")
|
||||
content += '\n'.join((" - " + s for s in variants)) + '\n'
|
||||
self.write_config(content)
|
||||
|
||||
loader = ICURuleLoader(test_config)
|
||||
rules = loader.get_transliteration_rules()
|
||||
trans = Transliterator.createFromRules("test", rules)
|
||||
|
||||
assert trans.transliterate(" axxt ") == " byt "
|
||||
|
||||
|
||||
def test_search_rules(cfgrules):
|
||||
config = cfgrules('~street => s,st', 'master => mstr')
|
||||
proc = ICURuleLoader(config).make_token_analysis()
|
||||
|
||||
assert proc.search.transliterate('Master Street').strip() == 'master street'
|
||||
assert proc.search.transliterate('Earnes St').strip() == 'earnes st'
|
||||
assert proc.search.transliterate('Nostreet').strip() == 'nostreet'
|
||||
|
||||
|
||||
class TestGetReplacements:
|
||||
|
||||
@pytest.fixture(autouse=True)
|
||||
def setup_cfg(self, cfgrules):
|
||||
self.cfgrules = cfgrules
|
||||
|
||||
def get_replacements(self, *variants):
|
||||
loader = ICURuleLoader(self.cfgrules(*variants))
|
||||
self.config_rules(*variants)
|
||||
loader = ICURuleLoader(self.project_env)
|
||||
rules = loader.analysis[None].config['replacements']
|
||||
|
||||
return sorted((k, sorted(v)) for k,v in rules)
|
||||
|
||||
|
||||
def test_empty_rule_set(self):
|
||||
self.write_config("""\
|
||||
normalization:
|
||||
transliteration:
|
||||
token-analysis:
|
||||
- analyzer: generic
|
||||
variants:
|
||||
""")
|
||||
|
||||
rules = ICURuleLoader(self.project_env)
|
||||
assert rules.get_search_rules() == ''
|
||||
assert rules.get_normalization_rules() == ''
|
||||
assert rules.get_transliteration_rules() == ''
|
||||
|
||||
|
||||
@pytest.mark.parametrize("section", CONFIG_SECTIONS)
|
||||
def test_missing_section(self, section):
|
||||
rule_cfg = { s: [] for s in CONFIG_SECTIONS if s != section}
|
||||
self.write_config(yaml.dump(rule_cfg))
|
||||
|
||||
with pytest.raises(UsageError):
|
||||
ICURuleLoader(self.project_env)
|
||||
|
||||
|
||||
def test_get_search_rules(self):
|
||||
self.config_rules()
|
||||
loader = ICURuleLoader(self.project_env)
|
||||
|
||||
rules = loader.get_search_rules()
|
||||
trans = Transliterator.createFromRules("test", rules)
|
||||
|
||||
assert trans.transliterate(" Baum straße ") == " baum straße "
|
||||
assert trans.transliterate(" Baumstraße ") == " baumstraße "
|
||||
assert trans.transliterate(" Baumstrasse ") == " baumstrasse "
|
||||
assert trans.transliterate(" Baumstr ") == " baumstr "
|
||||
assert trans.transliterate(" Baumwegstr ") == " baumwegstr "
|
||||
assert trans.transliterate(" Αθήνα ") == " athēna "
|
||||
assert trans.transliterate(" проспект ") == " prospekt "
|
||||
|
||||
|
||||
def test_get_normalization_rules(self):
|
||||
self.config_rules()
|
||||
loader = ICURuleLoader(self.project_env)
|
||||
rules = loader.get_normalization_rules()
|
||||
trans = Transliterator.createFromRules("test", rules)
|
||||
|
||||
assert trans.transliterate(" проспект-Prospekt ") == " проспект prospekt "
|
||||
|
||||
|
||||
def test_get_transliteration_rules(self):
|
||||
self.config_rules()
|
||||
loader = ICURuleLoader(self.project_env)
|
||||
rules = loader.get_transliteration_rules()
|
||||
trans = Transliterator.createFromRules("test", rules)
|
||||
|
||||
assert trans.transliterate(" проспект-Prospekt ") == " prospekt Prospekt "
|
||||
|
||||
|
||||
def test_transliteration_rules_from_file(self):
|
||||
self.write_config("""\
|
||||
normalization:
|
||||
transliteration:
|
||||
- "'ax' > 'b'"
|
||||
- !include transliteration.yaml
|
||||
token-analysis:
|
||||
- analyzer: generic
|
||||
variants:
|
||||
""")
|
||||
transpath = self.project_env.project_dir / ('transliteration.yaml')
|
||||
transpath.write_text('- "x > y"')
|
||||
|
||||
loader = ICURuleLoader(self.project_env)
|
||||
rules = loader.get_transliteration_rules()
|
||||
trans = Transliterator.createFromRules("test", rules)
|
||||
|
||||
assert trans.transliterate(" axxt ") == " byt "
|
||||
|
||||
|
||||
def test_search_rules(self):
|
||||
self.config_rules('~street => s,st', 'master => mstr')
|
||||
proc = ICURuleLoader(self.project_env).make_token_analysis()
|
||||
|
||||
assert proc.search.transliterate('Master Street').strip() == 'master street'
|
||||
assert proc.search.transliterate('Earnes St').strip() == 'earnes st'
|
||||
assert proc.search.transliterate('Nostreet').strip() == 'nostreet'
|
||||
|
||||
|
||||
@pytest.mark.parametrize("variant", ['foo > bar', 'foo -> bar -> bar',
|
||||
'~foo~ -> bar', 'fo~ o -> bar'])
|
||||
def test_invalid_variant_description(self, variant):
|
||||
self.config_rules(variant)
|
||||
with pytest.raises(UsageError):
|
||||
ICURuleLoader(self.cfgrules(variant))
|
||||
ICURuleLoader(self.project_env)
|
||||
|
||||
def test_add_full(self):
|
||||
repl = self.get_replacements("foo -> bar")
|
||||
|
||||
@@ -11,28 +11,25 @@ from nominatim.db import properties
|
||||
from nominatim.errors import UsageError
|
||||
|
||||
@pytest.fixture
|
||||
def test_config(def_config, tmp_path):
|
||||
def_config.project_dir = tmp_path / 'project'
|
||||
def_config.project_dir.mkdir()
|
||||
|
||||
def test_config(project_env, tmp_path):
|
||||
module_dir = tmp_path / 'module_src'
|
||||
module_dir.mkdir()
|
||||
(module_dir / 'nominatim.so').write_text('TEST nomiantim.so')
|
||||
|
||||
def_config.lib_dir.module = module_dir
|
||||
project_env.lib_dir.module = module_dir
|
||||
|
||||
sqldir = tmp_path / 'sql'
|
||||
sqldir.mkdir()
|
||||
(sqldir / 'tokenizer').mkdir()
|
||||
(sqldir / 'tokenizer' / 'legacy_tokenizer.sql').write_text("SELECT 'a'")
|
||||
(sqldir / 'words.sql').write_text("SELECT 'a'")
|
||||
shutil.copy(str(def_config.lib_dir.sql / 'tokenizer' / 'legacy_tokenizer_tables.sql'),
|
||||
shutil.copy(str(project_env.lib_dir.sql / 'tokenizer' / 'legacy_tokenizer_tables.sql'),
|
||||
str(sqldir / 'tokenizer' / 'legacy_tokenizer_tables.sql'))
|
||||
|
||||
def_config.lib_dir.sql = sqldir
|
||||
def_config.lib_dir.data = sqldir
|
||||
project_env.lib_dir.sql = sqldir
|
||||
project_env.lib_dir.data = sqldir
|
||||
|
||||
return def_config
|
||||
return project_env
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
|
||||
@@ -13,24 +13,21 @@ def test_load_ranks_def_config(temp_db_conn, temp_db_cursor, def_config):
|
||||
|
||||
assert temp_db_cursor.table_rows('address_levels') > 0
|
||||
|
||||
def test_load_ranks_from_project_dir(def_config, temp_db_conn, temp_db_cursor,
|
||||
tmp_path):
|
||||
test_file = tmp_path / 'address-levels.json'
|
||||
def test_load_ranks_from_project_dir(project_env, temp_db_conn, temp_db_cursor):
|
||||
test_file = project_env.project_dir / 'address-levels.json'
|
||||
test_file.write_text('[{"tags":{"place":{"sea":2}}}]')
|
||||
def_config.project_dir = tmp_path
|
||||
|
||||
load_address_levels_from_config(temp_db_conn, def_config)
|
||||
load_address_levels_from_config(temp_db_conn, project_env)
|
||||
|
||||
assert temp_db_cursor.table_rows('address_levels') == 1
|
||||
|
||||
|
||||
def test_load_ranks_from_broken_file(def_config, temp_db_conn, tmp_path):
|
||||
test_file = tmp_path / 'address-levels.json'
|
||||
def test_load_ranks_from_broken_file(project_env, temp_db_conn):
|
||||
test_file = project_env.project_dir / 'address-levels.json'
|
||||
test_file.write_text('[{"tags":"place":{"sea":2}}}]')
|
||||
def_config.project_dir = tmp_path
|
||||
|
||||
with pytest.raises(json.decoder.JSONDecodeError):
|
||||
load_address_levels_from_config(temp_db_conn, def_config)
|
||||
load_address_levels_from_config(temp_db_conn, project_env)
|
||||
|
||||
|
||||
def test_load_ranks_country(temp_db_conn, temp_db_cursor):
|
||||
|
||||
@@ -5,47 +5,47 @@ import pytest
|
||||
|
||||
from nominatim.tools.refresh import create_functions
|
||||
|
||||
@pytest.fixture
|
||||
def sql_tmp_path(tmp_path, def_config):
|
||||
def_config.lib_dir.sql = tmp_path
|
||||
return tmp_path
|
||||
|
||||
@pytest.fixture
|
||||
def conn(sql_preprocessor, temp_db_conn):
|
||||
return temp_db_conn
|
||||
class TestCreateFunctions:
|
||||
@pytest.fixture(autouse=True)
|
||||
def init_env(self, sql_preprocessor, temp_db_conn, def_config, tmp_path):
|
||||
self.conn = temp_db_conn
|
||||
self.config = def_config
|
||||
def_config.lib_dir.sql = tmp_path
|
||||
|
||||
|
||||
def test_create_functions(temp_db_cursor, conn, def_config, sql_tmp_path):
|
||||
sqlfile = sql_tmp_path / 'functions.sql'
|
||||
sqlfile.write_text("""CREATE OR REPLACE FUNCTION test() RETURNS INTEGER
|
||||
AS $$
|
||||
BEGIN
|
||||
RETURN 43;
|
||||
END;
|
||||
$$ LANGUAGE plpgsql IMMUTABLE;
|
||||
""")
|
||||
|
||||
create_functions(conn, def_config)
|
||||
|
||||
assert temp_db_cursor.scalar('SELECT test()') == 43
|
||||
def write_functions(self, content):
|
||||
sqlfile = self.config.lib_dir.sql / 'functions.sql'
|
||||
sqlfile.write_text(content)
|
||||
|
||||
|
||||
@pytest.mark.parametrize("dbg,ret", ((True, 43), (False, 22)))
|
||||
def test_create_functions_with_template(temp_db_cursor, conn, def_config, sql_tmp_path,
|
||||
dbg, ret):
|
||||
sqlfile = sql_tmp_path / 'functions.sql'
|
||||
sqlfile.write_text("""CREATE OR REPLACE FUNCTION test() RETURNS INTEGER
|
||||
AS $$
|
||||
BEGIN
|
||||
{% if debug %}
|
||||
RETURN 43;
|
||||
{% else %}
|
||||
RETURN 22;
|
||||
{% endif %}
|
||||
END;
|
||||
$$ LANGUAGE plpgsql IMMUTABLE;
|
||||
""")
|
||||
def test_create_functions(self, temp_db_cursor):
|
||||
self.write_functions("""CREATE OR REPLACE FUNCTION test() RETURNS INTEGER
|
||||
AS $$
|
||||
BEGIN
|
||||
RETURN 43;
|
||||
END;
|
||||
$$ LANGUAGE plpgsql IMMUTABLE;
|
||||
""")
|
||||
|
||||
create_functions(conn, def_config, enable_debug=dbg)
|
||||
create_functions(self.conn, self.config)
|
||||
|
||||
assert temp_db_cursor.scalar('SELECT test()') == ret
|
||||
assert temp_db_cursor.scalar('SELECT test()') == 43
|
||||
|
||||
|
||||
@pytest.mark.parametrize("dbg,ret", ((True, 43), (False, 22)))
|
||||
def test_create_functions_with_template(self, temp_db_cursor, dbg, ret):
|
||||
self.write_functions("""CREATE OR REPLACE FUNCTION test() RETURNS INTEGER
|
||||
AS $$
|
||||
BEGIN
|
||||
{% if debug %}
|
||||
RETURN 43;
|
||||
{% else %}
|
||||
RETURN 22;
|
||||
{% endif %}
|
||||
END;
|
||||
$$ LANGUAGE plpgsql IMMUTABLE;
|
||||
""")
|
||||
|
||||
create_functions(self.conn, self.config, enable_debug=dbg)
|
||||
|
||||
assert temp_db_cursor.scalar('SELECT test()') == ret
|
||||
|
||||
@@ -22,12 +22,11 @@ def test_script(tmp_path):
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def run_website_script(tmp_path, def_config, temp_db_conn):
|
||||
def_config.lib_dir.php = tmp_path / 'php'
|
||||
def_config.project_dir = tmp_path
|
||||
def run_website_script(tmp_path, project_env, temp_db_conn):
|
||||
project_env.lib_dir.php = tmp_path / 'php'
|
||||
|
||||
def _runner():
|
||||
refresh.setup_website(tmp_path, def_config, temp_db_conn)
|
||||
refresh.setup_website(tmp_path, project_env, temp_db_conn)
|
||||
|
||||
proc = subprocess.run(['/usr/bin/env', 'php', '-Cq',
|
||||
tmp_path / 'search.php'], check=False)
|
||||
|
||||
Reference in New Issue
Block a user