Ported tiger-data-import to python and Added Tarball Support

This commit is contained in:
Darkshredder
2021-03-08 21:57:56 +05:30
parent 9d103503f7
commit 2af82975cd
4 changed files with 120 additions and 8 deletions

View File

@@ -13,6 +13,7 @@ from .tools.exec_utils import run_legacy_script, run_php_server
from .errors import UsageError
from . import clicmd
from .clicmd.args import NominatimArgs
from .tools import tiger_data
LOG = logging.getLogger()
@@ -166,8 +167,11 @@ class UpdateAddData:
@staticmethod
def run(args):
if args.tiger_data:
os.environ['NOMINATIM_TIGER_DATA_PATH'] = args.tiger_data
return run_legacy_script('setup.php', '--import-tiger-data', nominatim_env=args)
return tiger_data.add_tiger_data(args.config.get_libpq_dsn(),
args.tiger_data,
args.threads or 1,
args.config,
args.sqllib_dir)
params = ['update.php']
if args.file:

View File

@@ -58,10 +58,12 @@ class AdminTransition:
help="Ignore certain erros on import.")
group.add_argument('--reverse-only', action='store_true',
help='Do not create search tables and indexes')
group.add_argument('--tiger-data', metavar='FILE',
help='File to import')
@staticmethod
def run(args):
from ..tools import database_import
from ..tools import database_import, tiger_data
from ..tools import refresh
if args.create_db:
@@ -127,3 +129,11 @@ class AdminTransition:
LOG.warning('Create Search indices')
with connect(args.config.get_libpq_dsn()) as conn:
database_import.create_search_indices(conn, args.config, args.sqllib_dir, args.drop)
if args.tiger_data:
LOG.warning('Tiger data')
tiger_data.add_tiger_data(args.config.get_libpq_dsn(),
args.tiger_data,
args.threads or 1,
args.config,
args.sqllib_dir)

View File

@@ -0,0 +1,96 @@
"""
Functions for setting up and importing a new Nominatim database.
"""
import logging
import os
import time
import tarfile
import selectors
from ..db.connection import connect
from ..db.async_connection import DBConnection
from ..db.sql_preprocessor import SQLPreprocessor
LOG = logging.getLogger()
def add_tiger_data(dsn, data_dir, threads, config, sqllib_dir):
""" Import tiger data from directory or tar file
"""
# Handling directory or tarball file.
is_tarfile = False
if(data_dir.endswith('.tar.gz')):
is_tarfile = True
tar = tarfile.open(data_dir)
sql_files = [i for i in tar.getmembers() if i.name.endswith('.sql')]
LOG.warning(f'Found {len(sql_files)} SQL files in tarfile with path {data_dir}')
if(not len(sql_files)):
LOG.warning(f'Tiger data import selected but no files found in tarfile with path {data_dir}')
return
else:
files = os.listdir(data_dir)
sql_files = [i for i in files if i.endswith('.sql')]
LOG.warning(f'Found {len(sql_files)} SQL files in path {data_dir}')
if(not len(sql_files)):
LOG.warning(f'Tiger data import selected but no files found in path {data_dir}')
return
with connect(dsn) as conn:
sql = SQLPreprocessor(conn, config, sqllib_dir)
sql.run_sql_file(conn, 'tiger_import_start.sql')
# Reading sql_files and then for each file line handling
# sql_query in <threads - 1> chunks.
sel = selectors.DefaultSelector()
place_threads = max(1, threads - 1)
for sql_file in sql_files:
if(not is_tarfile):
file_path = os.path.join(data_dir, sql_file)
file = open(file_path)
else:
file = tar.extractfile(sql_file)
lines = 0
end_of_file = False
total_used_threads = place_threads
while(True):
if(end_of_file):
break
for imod in range(place_threads):
conn = DBConnection(dsn)
conn.connect()
sql_query = file.readline()
lines+=1
if(not sql_query):
end_of_file = True
total_used_threads = imod
break
conn.perform(sql_query)
sel.register(conn, selectors.EVENT_READ, conn)
if(lines==1000):
print('. ', end='', flush=True)
lines=0
todo = min(place_threads,total_used_threads)
while todo > 0:
for key, _ in sel.select(1):
try:
conn = key.data
sel.unregister(conn)
conn.wait()
conn.close()
todo -= 1
except:
todo -=1
if(is_tarfile):
tar.close()
print('\n')
LOG.warning("Creating indexes on Tiger data")
with connect(dsn) as conn:
sql = SQLPreprocessor(conn, config, sqllib_dir)
sql.run_sql_file(conn, 'tiger_import_finish.sql')