mirror of
https://github.com/osm-search/Nominatim.git
synced 2026-02-15 19:07:58 +00:00
Linting fixes
This commit is contained in:
@@ -168,10 +168,10 @@ class UpdateAddData:
|
||||
def run(args):
|
||||
if args.tiger_data:
|
||||
return tiger_data.add_tiger_data(args.config.get_libpq_dsn(),
|
||||
args.tiger_data,
|
||||
args.threads or 1,
|
||||
args.config,
|
||||
args.sqllib_dir)
|
||||
args.tiger_data,
|
||||
args.threads or 1,
|
||||
args.config,
|
||||
args.sqllib_dir)
|
||||
|
||||
params = ['update.php']
|
||||
if args.file:
|
||||
|
||||
@@ -129,7 +129,7 @@ class AdminTransition:
|
||||
LOG.warning('Create Search indices')
|
||||
with connect(args.config.get_libpq_dsn()) as conn:
|
||||
database_import.create_search_indices(conn, args.config, args.sqllib_dir, args.drop)
|
||||
|
||||
|
||||
if args.tiger_data:
|
||||
LOG.warning('Tiger data')
|
||||
tiger_data.add_tiger_data(args.config.get_libpq_dsn(),
|
||||
|
||||
@@ -3,7 +3,6 @@ Functions for setting up and importing a new Nominatim database.
|
||||
"""
|
||||
import logging
|
||||
import os
|
||||
import time
|
||||
import tarfile
|
||||
import selectors
|
||||
|
||||
@@ -11,6 +10,9 @@ from ..db.connection import connect
|
||||
from ..db.async_connection import DBConnection
|
||||
from ..db.sql_preprocessor import SQLPreprocessor
|
||||
|
||||
# pylint: disable=R0912
|
||||
# pylint: disable=R0914,R0915,W0702
|
||||
|
||||
LOG = logging.getLogger()
|
||||
|
||||
|
||||
@@ -19,22 +21,22 @@ def add_tiger_data(dsn, data_dir, threads, config, sqllib_dir):
|
||||
"""
|
||||
# Handling directory or tarball file.
|
||||
is_tarfile = False
|
||||
if(data_dir.endswith('.tar.gz')):
|
||||
if data_dir.endswith('.tar.gz'):
|
||||
is_tarfile = True
|
||||
tar = tarfile.open(data_dir)
|
||||
sql_files = [i for i in tar.getmembers() if i.name.endswith('.sql')]
|
||||
LOG.warning(f'Found {len(sql_files)} SQL files in tarfile with path {data_dir}')
|
||||
if(not len(sql_files)):
|
||||
LOG.warning(f'Tiger data import selected but no files found in tarfile with path {data_dir}')
|
||||
LOG.warning("Found %d SQL files in tarfile with path %s", len(sql_files), data_dir)
|
||||
if not sql_files:
|
||||
LOG.warning("Tiger data import selected but no files in tarfile's path %s", data_dir)
|
||||
return
|
||||
else:
|
||||
files = os.listdir(data_dir)
|
||||
sql_files = [i for i in files if i.endswith('.sql')]
|
||||
LOG.warning(f'Found {len(sql_files)} SQL files in path {data_dir}')
|
||||
if(not len(sql_files)):
|
||||
LOG.warning(f'Tiger data import selected but no files found in path {data_dir}')
|
||||
LOG.warning("Found %d SQL files in path %s", len(sql_files), data_dir)
|
||||
if not sql_files:
|
||||
LOG.warning("Tiger data import selected but no files found in path %s", data_dir)
|
||||
return
|
||||
|
||||
|
||||
with connect(dsn) as conn:
|
||||
sql = SQLPreprocessor(conn, config, sqllib_dir)
|
||||
sql.run_sql_file(conn, 'tiger_import_start.sql')
|
||||
@@ -44,7 +46,7 @@ def add_tiger_data(dsn, data_dir, threads, config, sqllib_dir):
|
||||
sel = selectors.DefaultSelector()
|
||||
place_threads = max(1, threads - 1)
|
||||
for sql_file in sql_files:
|
||||
if(not is_tarfile):
|
||||
if not is_tarfile:
|
||||
file_path = os.path.join(data_dir, sql_file)
|
||||
file = open(file_path)
|
||||
else:
|
||||
@@ -52,17 +54,17 @@ def add_tiger_data(dsn, data_dir, threads, config, sqllib_dir):
|
||||
lines = 0
|
||||
end_of_file = False
|
||||
total_used_threads = place_threads
|
||||
while(True):
|
||||
if(end_of_file):
|
||||
while True :
|
||||
if end_of_file:
|
||||
break
|
||||
for imod in range(place_threads):
|
||||
conn = DBConnection(dsn)
|
||||
conn.connect()
|
||||
|
||||
sql_query = file.readline()
|
||||
lines+=1
|
||||
lines += 1
|
||||
|
||||
if(not sql_query):
|
||||
if not sql_query:
|
||||
end_of_file = True
|
||||
total_used_threads = imod
|
||||
break
|
||||
@@ -70,11 +72,11 @@ def add_tiger_data(dsn, data_dir, threads, config, sqllib_dir):
|
||||
conn.perform(sql_query)
|
||||
sel.register(conn, selectors.EVENT_READ, conn)
|
||||
|
||||
if(lines==1000):
|
||||
if lines == 1000:
|
||||
print('. ', end='', flush=True)
|
||||
lines=0
|
||||
lines = 0
|
||||
|
||||
todo = min(place_threads,total_used_threads)
|
||||
todo = min(place_threads, total_used_threads)
|
||||
while todo > 0:
|
||||
for key, _ in sel.select(1):
|
||||
try:
|
||||
@@ -84,9 +86,9 @@ def add_tiger_data(dsn, data_dir, threads, config, sqllib_dir):
|
||||
conn.close()
|
||||
todo -= 1
|
||||
except:
|
||||
todo -=1
|
||||
todo -= 1
|
||||
|
||||
if(is_tarfile):
|
||||
if is_tarfile:
|
||||
tar.close()
|
||||
print('\n')
|
||||
LOG.warning("Creating indexes on Tiger data")
|
||||
|
||||
Reference in New Issue
Block a user