Merge pull request #2336 from lonvia/do-not-mask-error-when-loading-tokenizer

Do not hide errors when importing tokenizer
This commit is contained in:
Sarah Hoffmann
2021-05-18 23:00:10 +02:00
committed by GitHub
2 changed files with 8 additions and 5 deletions

View File

@@ -15,6 +15,7 @@ normalizer module is installed, when the tokenizer is created.
""" """
import logging import logging
import importlib import importlib
from pathlib import Path
from ..errors import UsageError from ..errors import UsageError
from ..db import properties from ..db import properties
@@ -25,12 +26,13 @@ LOG = logging.getLogger()
def _import_tokenizer(name): def _import_tokenizer(name):
""" Load the tokenizer.py module from project directory. """ Load the tokenizer.py module from project directory.
""" """
try: src_file = Path(__file__).parent / (name + '_tokenizer.py')
return importlib.import_module('nominatim.tokenizer.' + name + '_tokenizer') if not src_file.is_file():
except ModuleNotFoundError as exp:
LOG.fatal("No tokenizer named '%s' available. " LOG.fatal("No tokenizer named '%s' available. "
"Check the setting of NOMINATIM_TOKENIZER.", name) "Check the setting of NOMINATIM_TOKENIZER.", name)
raise UsageError('Tokenizer not found') from exp raise UsageError('Tokenizer not found')
return importlib.import_module('nominatim.tokenizer.' + name + '_tokenizer')
def create_tokenizer(config, init_db=True, module_name=None): def create_tokenizer(config, init_db=True, module_name=None):

View File

@@ -17,6 +17,7 @@ from nominatim.config import Configuration
from nominatim.db import connection from nominatim.db import connection
from nominatim.db.sql_preprocessor import SQLPreprocessor from nominatim.db.sql_preprocessor import SQLPreprocessor
from nominatim.db import properties from nominatim.db import properties
import nominatim.tokenizer.factory
import dummy_tokenizer import dummy_tokenizer
import mocks import mocks
@@ -273,7 +274,7 @@ def tokenizer_mock(monkeypatch, property_table, temp_db_conn, tmp_path):
def _import_dummy(module, *args, **kwargs): def _import_dummy(module, *args, **kwargs):
return dummy_tokenizer return dummy_tokenizer
monkeypatch.setattr(importlib, "import_module", _import_dummy) monkeypatch.setattr(nominatim.tokenizer.factory, "_import_tokenizer", _import_dummy)
properties.set_property(temp_db_conn, 'tokenizer', 'dummy') properties.set_property(temp_db_conn, 'tokenizer', 'dummy')
def _create_tokenizer(): def _create_tokenizer():