split up large setup function

This commit is contained in:
Sarah Hoffmann
2021-08-15 12:24:13 +02:00
parent 56d24085f9
commit 75a5c7013f
2 changed files with 75 additions and 38 deletions

View File

@@ -1,7 +1,12 @@
"""
Provides custom functions over command-line arguments.
"""
import logging
from pathlib import Path
from nominatim.errors import UsageError
LOG = logging.getLogger()
class NominatimArgs:
""" Customized namespace class for the nominatim command line tool
@@ -25,3 +30,20 @@ class NominatimArgs:
main_index=self.config.TABLESPACE_PLACE_INDEX
)
)
def get_osm_file_list(self):
""" Return the --osm-file argument as a list of Paths or None
if no argument was given. The function also checks if the files
exist and raises a UsageError if one cannot be found.
"""
if not self.osm_file:
return None
files = [Path(f) for f in self.osm_file]
for fname in files:
if not fname.is_file():
LOG.fatal("OSM file '%s' does not exist.", fname)
raise UsageError('Cannot access file.')
return files

View File

@@ -9,7 +9,6 @@ import psutil
from nominatim.db.connection import connect
from nominatim.db import status, properties
from nominatim.version import NOMINATIM_VERSION
from nominatim.errors import UsageError
# Do not repeat documentation of subcommand classes.
# pylint: disable=C0111
@@ -52,19 +51,13 @@ class SetupAll:
@staticmethod
def run(args): # pylint: disable=too-many-statements
def run(args):
from ..tools import database_import, refresh, postcodes, freeze
from ..indexer.indexer import Indexer
from ..tokenizer import factory as tokenizer_factory
if args.osm_file:
files = [Path(f) for f in args.osm_file]
for fname in files:
if not fname.is_file():
LOG.fatal("OSM file '%s' does not exist.", fname)
raise UsageError('Cannot access file.')
if args.continue_at is None:
files = args.get_osm_file_list()
database_import.setup_database_skeleton(args.config.get_libpq_dsn(),
args.data_dir,
args.no_partitions,
@@ -76,21 +69,7 @@ class SetupAll:
drop=args.no_updates,
ignore_errors=args.ignore_errors)
with connect(args.config.get_libpq_dsn()) as conn:
LOG.warning('Create functions (1st pass)')
refresh.create_functions(conn, args.config, False, False)
LOG.warning('Create tables')
database_import.create_tables(conn, args.config,
reverse_only=args.reverse_only)
refresh.load_address_levels_from_file(conn, Path(args.config.ADDRESS_LEVEL_CONFIG))
LOG.warning('Create functions (2nd pass)')
refresh.create_functions(conn, args.config, False, False)
LOG.warning('Create table triggers')
database_import.create_table_triggers(conn, args.config)
LOG.warning('Create partition tables')
database_import.create_partition_tables(conn, args.config)
LOG.warning('Create functions (3rd pass)')
refresh.create_functions(conn, args.config, False, False)
SetupAll._setup_tables(args.config, args.reverse_only)
LOG.warning('Importing wikipedia importance data')
data_path = Path(args.config.WIKIPEDIA_DATA_PATH or args.project_dir)
@@ -109,12 +88,7 @@ class SetupAll:
args.threads or psutil.cpu_count() or 1)
LOG.warning("Setting up tokenizer")
if args.continue_at is None or args.continue_at == 'load-data':
# (re)initialise the tokenizer data
tokenizer = tokenizer_factory.create_tokenizer(args.config)
else:
# just load the tokenizer
tokenizer = tokenizer_factory.get_tokenizer_for_db(args.config)
tokenizer = SetupAll._get_tokenizer(args.continue_at, args.config)
if args.continue_at is None or args.continue_at == 'load-data':
LOG.warning('Calculate postcodes')
@@ -149,19 +123,48 @@ class SetupAll:
refresh.setup_website(webdir, args.config, conn)
with connect(args.config.get_libpq_dsn()) as conn:
try:
dbdate = status.compute_database_date(conn)
status.set_status(conn, dbdate)
LOG.info('Database is at %s.', dbdate)
except Exception as exc: # pylint: disable=broad-except
LOG.error('Cannot determine date of database: %s', exc)
SetupAll._set_database_date(conn)
properties.set_property(conn, 'database_version',
'{0[0]}.{0[1]}.{0[2]}-{0[3]}'.format(NOMINATIM_VERSION))
return 0
@staticmethod
def _setup_tables(config, reverse_only):
""" Set up the basic database layout: tables, indexes and functions.
"""
from ..tools import database_import, refresh
with connect(config.get_libpq_dsn()) as conn:
LOG.warning('Create functions (1st pass)')
refresh.create_functions(conn, config, False, False)
LOG.warning('Create tables')
database_import.create_tables(conn, config, reverse_only=reverse_only)
refresh.load_address_levels_from_file(conn, Path(config.ADDRESS_LEVEL_CONFIG))
LOG.warning('Create functions (2nd pass)')
refresh.create_functions(conn, config, False, False)
LOG.warning('Create table triggers')
database_import.create_table_triggers(conn, config)
LOG.warning('Create partition tables')
database_import.create_partition_tables(conn, config)
LOG.warning('Create functions (3rd pass)')
refresh.create_functions(conn, config, False, False)
@staticmethod
def _get_tokenizer(continue_at, config):
""" Set up a new tokenizer or load an already initialised one.
"""
from ..tokenizer import factory as tokenizer_factory
if continue_at is None or continue_at == 'load-data':
# (re)initialise the tokenizer data
return tokenizer_factory.create_tokenizer(config)
# just load the tokenizer
return tokenizer_factory.get_tokenizer_for_db(config)
@staticmethod
def _create_pending_index(conn, tablespace):
""" Add a supporting index for finding places still to be indexed.
@@ -182,3 +185,15 @@ class SetupAll:
{} WHERE indexed_status > 0
""".format(tablespace))
conn.commit()
@staticmethod
def _set_database_date(conn):
""" Determine the database date and set the status accordingly.
"""
try:
dbdate = status.compute_database_date(conn)
status.set_status(conn, dbdate)
LOG.info('Database is at %s.', dbdate)
except Exception as exc: # pylint: disable=broad-except
LOG.error('Cannot determine date of database: %s', exc)