Compare commits

..

4 Commits

Author SHA1 Message Date
Sarah Hoffmann
f69890c86f adapt to release 4.3.2 2023-11-17 16:33:04 +01:00
Sarah Hoffmann
7ab73f5698 python deployment: add systemd service for the socket 2023-10-25 14:55:35 +02:00
Sarah Hoffmann
a2b89309ff adapt to release 4.3.1 2023-10-04 21:25:03 +02:00
Sarah Hoffmann
0a892133af adapt docs for release 2023-09-07 11:04:12 +02:00
26 changed files with 111 additions and 211 deletions

View File

@@ -20,7 +20,7 @@ project(nominatim)
set(NOMINATIM_VERSION_MAJOR 4) set(NOMINATIM_VERSION_MAJOR 4)
set(NOMINATIM_VERSION_MINOR 3) set(NOMINATIM_VERSION_MINOR 3)
set(NOMINATIM_VERSION_PATCH 2) set(NOMINATIM_VERSION_PATCH 0)
set(NOMINATIM_VERSION "${NOMINATIM_VERSION_MAJOR}.${NOMINATIM_VERSION_MINOR}.${NOMINATIM_VERSION_PATCH}") set(NOMINATIM_VERSION "${NOMINATIM_VERSION_MAJOR}.${NOMINATIM_VERSION_MINOR}.${NOMINATIM_VERSION_PATCH}")

View File

@@ -1,15 +1,3 @@
4.3.2
* fix potential SQL injection issue for 'nominatim admin --collect-os-info'
* PHP frontend: fix on-the-fly lookup of postcode areas near boundaries
* Python frontend: improve handling of viewbox
* Python frontend: correct deployment instructions
4.3.1
* reintroduce result rematching
* improve search of multi-part names
* fix accidentally switched meaning of --reverse-only and --search-only in
warm command
4.3.0 4.3.0
* fix failing importance recalculation command * fix failing importance recalculation command
* fix merging of linked names into unnamed boundaries * fix merging of linked names into unnamed boundaries

View File

@@ -1,4 +1,4 @@
site_name: Nominatim Manual site_name: Nominatim 4.3.2 Manual
theme: theme:
name: material name: material
features: features:

View File

@@ -261,7 +261,7 @@ BEGIN
-- If the place had a postcode assigned, take this one only -- If the place had a postcode assigned, take this one only
-- into consideration when it is an area and the place does not have -- into consideration when it is an area and the place does not have
-- a postcode itself. -- a postcode itself.
IF location.fromarea AND location_isaddress IF location.fromarea AND location.isaddress
AND (place.address is null or not place.address ? 'postcode') AND (place.address is null or not place.address ? 'postcode')
THEN THEN
place.postcode := null; -- remove the less exact postcode place.postcode := null; -- remove the less exact postcode

View File

@@ -292,6 +292,12 @@ class SearchResults(List[SearchResult]):
May be empty when no result was found. May be empty when no result was found.
""" """
def localize(self, locales: Locales) -> None:
""" Apply the given locales to all results.
"""
for result in self:
result.localize(locales)
def _filter_geometries(row: SaRow) -> Dict[str, str]: def _filter_geometries(row: SaRow) -> Dict[str, str]:
return {k[9:]: v for k, v in row._mapping.items() # pylint: disable=W0212 return {k[9:]: v for k, v in row._mapping.items() # pylint: disable=W0212
@@ -453,8 +459,6 @@ async def add_result_details(conn: SearchConnection, results: List[BaseResultT],
log().comment('Query keywords') log().comment('Query keywords')
for result in results: for result in results:
await complete_keywords(conn, result) await complete_keywords(conn, result)
for result in results:
result.localize(details.locales)
def _result_row_to_address_row(row: SaRow) -> AddressLine: def _result_row_to_address_row(row: SaRow) -> AddressLine:

View File

@@ -206,7 +206,7 @@ class SearchBuilder:
partials_indexed = all(t.is_indexed for t in name_partials) \ partials_indexed = all(t.is_indexed for t in name_partials) \
and all(t.is_indexed for t in addr_partials) and all(t.is_indexed for t in addr_partials)
exp_count = min(t.count for t in name_partials) / (2**(len(name_partials) - 1)) exp_count = min(t.count for t in name_partials)
if (len(name_partials) > 3 or exp_count < 3000) and partials_indexed: if (len(name_partials) > 3 or exp_count < 3000) and partials_indexed:
yield penalty, exp_count, dbf.lookup_by_names(name_tokens, addr_tokens) yield penalty, exp_count, dbf.lookup_by_names(name_tokens, addr_tokens)

View File

@@ -512,8 +512,8 @@ class PostcodeSearch(AbstractSearch):
sql = sql.where(t.c.geometry.intersects(VIEWBOX_PARAM)) sql = sql.where(t.c.geometry.intersects(VIEWBOX_PARAM))
else: else:
penalty += sa.case((t.c.geometry.intersects(VIEWBOX_PARAM), 0.0), penalty += sa.case((t.c.geometry.intersects(VIEWBOX_PARAM), 0.0),
(t.c.geometry.intersects(VIEWBOX2_PARAM), 0.5), (t.c.geometry.intersects(VIEWBOX2_PARAM), 1.0),
else_=1.0) else_=2.0)
if details.near is not None: if details.near is not None:
if details.near_radius is not None: if details.near_radius is not None:
@@ -634,8 +634,8 @@ class PlaceSearch(AbstractSearch):
sql = sql.where(tsearch.c.centroid.ST_Intersects_no_index(VIEWBOX2_PARAM)) sql = sql.where(tsearch.c.centroid.ST_Intersects_no_index(VIEWBOX2_PARAM))
else: else:
penalty += sa.case((t.c.geometry.intersects(VIEWBOX_PARAM), 0.0), penalty += sa.case((t.c.geometry.intersects(VIEWBOX_PARAM), 0.0),
(t.c.geometry.intersects(VIEWBOX2_PARAM), 0.5), (t.c.geometry.intersects(VIEWBOX2_PARAM), 1.0),
else_=1.0) else_=2.0)
if details.near is not None: if details.near is not None:
if details.near_radius is not None: if details.near_radius is not None:

View File

@@ -7,15 +7,13 @@
""" """
Public interface to the search code. Public interface to the search code.
""" """
from typing import List, Any, Optional, Iterator, Tuple, Dict from typing import List, Any, Optional, Iterator, Tuple
import itertools import itertools
import re
import datetime as dt import datetime as dt
import difflib
from nominatim.api.connection import SearchConnection from nominatim.api.connection import SearchConnection
from nominatim.api.types import SearchDetails from nominatim.api.types import SearchDetails
from nominatim.api.results import SearchResult, SearchResults, add_result_details from nominatim.api.results import SearchResults, add_result_details
from nominatim.api.search.token_assignment import yield_token_assignments from nominatim.api.search.token_assignment import yield_token_assignments
from nominatim.api.search.db_search_builder import SearchBuilder, build_poi_search, wrap_near_search from nominatim.api.search.db_search_builder import SearchBuilder, build_poi_search, wrap_near_search
from nominatim.api.search.db_searches import AbstractSearch from nominatim.api.search.db_searches import AbstractSearch
@@ -75,86 +73,42 @@ class ForwardGeocoder:
is found. is found.
""" """
log().section('Execute database searches') log().section('Execute database searches')
results: Dict[Any, SearchResult] = {} results = SearchResults()
end_time = dt.datetime.now() + self.timeout end_time = dt.datetime.now() + self.timeout
num_results = 0
min_ranking = 1000.0 min_ranking = 1000.0
prev_penalty = 0.0 prev_penalty = 0.0
for i, search in enumerate(searches): for i, search in enumerate(searches):
if search.penalty > prev_penalty and (search.penalty > min_ranking or i > 20): if search.penalty > prev_penalty and (search.penalty > min_ranking or i > 20):
break break
log().table_dump(f"{i + 1}. Search", _dump_searches([search], query)) log().table_dump(f"{i + 1}. Search", _dump_searches([search], query))
lookup_results = await search.lookup(self.conn, self.params) for result in await search.lookup(self.conn, self.params):
for result in lookup_results: results.append(result)
rhash = (result.source_table, result.place_id,
result.housenumber, result.country_code)
prevresult = results.get(rhash)
if prevresult:
prevresult.accuracy = min(prevresult.accuracy, result.accuracy)
else:
results[rhash] = result
min_ranking = min(min_ranking, result.ranking + 0.5, search.penalty + 0.3) min_ranking = min(min_ranking, result.ranking + 0.5, search.penalty + 0.3)
log().result_dump('Results', ((r.accuracy, r) for r in lookup_results)) log().result_dump('Results', ((r.accuracy, r) for r in results[num_results:]))
num_results = len(results)
prev_penalty = search.penalty prev_penalty = search.penalty
if dt.datetime.now() >= end_time: if dt.datetime.now() >= end_time:
break break
return SearchResults(results.values())
def sort_and_cut_results(self, results: SearchResults) -> SearchResults:
""" Remove badly matching results, sort by ranking and
limit to the configured number of results.
"""
if results: if results:
min_ranking = min(r.ranking for r in results) min_ranking = min(r.ranking for r in results)
results = SearchResults(r for r in results if r.ranking < min_ranking + 0.5) results = SearchResults(r for r in results if r.ranking < min_ranking + 0.5)
results.sort(key=lambda r: r.ranking)
if results: if results:
min_rank = results[0].rank_search min_rank = min(r.rank_search for r in results)
results = SearchResults(r for r in results results = SearchResults(r for r in results
if r.ranking + 0.05 * (r.rank_search - min_rank) if r.ranking + 0.05 * (r.rank_search - min_rank)
< min_ranking + 0.5) < min_ranking + 0.5)
results.sort(key=lambda r: r.accuracy - r.calculated_importance())
results = SearchResults(results[:self.limit]) results = SearchResults(results[:self.limit])
return results return results
def rerank_by_query(self, query: QueryStruct, results: SearchResults) -> None:
""" Adjust the accuracy of the localized result according to how well
they match the original query.
"""
assert self.query_analyzer is not None
qwords = [word for phrase in query.source
for word in re.split('[, ]+', phrase.text) if word]
if not qwords:
return
for result in results:
if not result.display_name:
continue
distance = 0.0
norm = self.query_analyzer.normalize_text(result.display_name)
words = set((w for w in norm.split(' ') if w))
if not words:
continue
for qword in qwords:
wdist = max(difflib.SequenceMatcher(a=qword, b=w).quick_ratio() for w in words)
if wdist < 0.5:
distance += len(qword)
else:
distance += (1.0 - wdist) * len(qword)
# Compensate for the fact that country names do not get a
# match penalty yet by the tokenizer.
# Temporary hack that needs to be removed!
if result.rank_address == 4:
distance *= 2
result.accuracy += distance * 0.4 / sum(len(w) for w in qwords)
async def lookup_pois(self, categories: List[Tuple[str, str]], async def lookup_pois(self, categories: List[Tuple[str, str]],
phrases: List[Phrase]) -> SearchResults: phrases: List[Phrase]) -> SearchResults:
""" Look up places by category. If phrase is given, a place search """ Look up places by category. If phrase is given, a place search
@@ -169,16 +123,13 @@ class ForwardGeocoder:
if query: if query:
searches = [wrap_near_search(categories, s) for s in searches[:50]] searches = [wrap_near_search(categories, s) for s in searches[:50]]
results = await self.execute_searches(query, searches) results = await self.execute_searches(query, searches)
await add_result_details(self.conn, results, self.params)
log().result_dump('Preliminary Results', ((r.accuracy, r) for r in results))
results = self.sort_and_cut_results(results)
else: else:
results = SearchResults() results = SearchResults()
else: else:
search = build_poi_search(categories, self.params.countries) search = build_poi_search(categories, self.params.countries)
results = await search.lookup(self.conn, self.params) results = await search.lookup(self.conn, self.params)
await add_result_details(self.conn, results, self.params)
await add_result_details(self.conn, results, self.params)
log().result_dump('Final Results', ((r.accuracy, r) for r in results)) log().result_dump('Final Results', ((r.accuracy, r) for r in results))
return results return results
@@ -199,10 +150,6 @@ class ForwardGeocoder:
# Execute SQL until an appropriate result is found. # Execute SQL until an appropriate result is found.
results = await self.execute_searches(query, searches[:50]) results = await self.execute_searches(query, searches[:50])
await add_result_details(self.conn, results, self.params) await add_result_details(self.conn, results, self.params)
log().result_dump('Preliminary Results', ((r.accuracy, r) for r in results))
self.rerank_by_query(query, results)
log().result_dump('Results after reranking', ((r.accuracy, r) for r in results))
results = self.sort_and_cut_results(results)
log().result_dump('Final Results', ((r.accuracy, r) for r in results)) log().result_dump('Final Results', ((r.accuracy, r) for r in results))
return results return results

View File

@@ -127,15 +127,6 @@ class LegacyQueryAnalyzer(AbstractQueryAnalyzer):
return query return query
def normalize_text(self, text: str) -> str:
""" Bring the given text into a normalized form.
This only removes case, so some difference with the normalization
in the phrase remains.
"""
return text.lower()
def split_query(self, query: qmod.QueryStruct) -> Tuple[List[str], def split_query(self, query: qmod.QueryStruct) -> Tuple[List[str],
Dict[str, List[qmod.TokenRange]]]: Dict[str, List[qmod.TokenRange]]]:
""" Transliterate the phrases and split them into tokens. """ Transliterate the phrases and split them into tokens.

View File

@@ -30,15 +30,6 @@ class AbstractQueryAnalyzer(ABC):
""" """
@abstractmethod
def normalize_text(self, text: str) -> str:
""" Bring the given text into a normalized form. That is the
standardized form search will work with. All information removed
at this stage is inevitably lost.
"""
async def make_query_analyzer(conn: SearchConnection) -> AbstractQueryAnalyzer: async def make_query_analyzer(conn: SearchConnection) -> AbstractQueryAnalyzer:
""" Create a query analyzer for the tokenizer used by the database. """ Create a query analyzer for the tokenizer used by the database.
""" """

View File

@@ -17,7 +17,6 @@ from struct import unpack
from binascii import unhexlify from binascii import unhexlify
from nominatim.errors import UsageError from nominatim.errors import UsageError
from nominatim.api.localization import Locales
# pylint: disable=no-member,too-many-boolean-expressions,too-many-instance-attributes # pylint: disable=no-member,too-many-boolean-expressions,too-many-instance-attributes
@@ -387,7 +386,7 @@ TParam = TypeVar('TParam', bound='LookupDetails') # pylint: disable=invalid-name
@dataclasses.dataclass @dataclasses.dataclass
class LookupDetails: class LookupDetails:
""" Collection of parameters that define which kind of details are """ Collection of parameters that define the amount of details
returned with a lookup or details result. returned with a lookup or details result.
""" """
geometry_output: GeometryFormat = GeometryFormat.NONE geometry_output: GeometryFormat = GeometryFormat.NONE
@@ -414,9 +413,6 @@ class LookupDetails:
0.0 means the original geometry is kept. The higher the value, the 0.0 means the original geometry is kept. The higher the value, the
more the geometry gets simplified. more the geometry gets simplified.
""" """
locales: Locales = Locales()
""" Prefered languages for localization of results.
"""
@classmethod @classmethod
def from_kwargs(cls: Type[TParam], kwargs: Dict[str, Any]) -> TParam: def from_kwargs(cls: Type[TParam], kwargs: Dict[str, Any]) -> TParam:

View File

@@ -37,7 +37,7 @@ def zoom_to_rank(zoom: int) -> int:
return REVERSE_MAX_RANKS[max(0, min(18, zoom))] return REVERSE_MAX_RANKS[max(0, min(18, zoom))]
FEATURE_TYPE_TO_RANK: Dict[Optional[str], Tuple[int, int]] = { FEATURE_TYPE_TO_RANK: Dict[Optional[str], Any] = {
'country': (4, 4), 'country': (4, 4),
'state': (8, 8), 'state': (8, 8),
'city': (14, 16), 'city': (14, 16),

View File

@@ -308,8 +308,7 @@ async def details_endpoint(api: napi.NominatimAPIAsync, params: ASGIAdaptor) ->
keywords=params.get_bool('keywords', False), keywords=params.get_bool('keywords', False),
geometry_output = napi.GeometryFormat.GEOJSON geometry_output = napi.GeometryFormat.GEOJSON
if params.get_bool('polygon_geojson', False) if params.get_bool('polygon_geojson', False)
else napi.GeometryFormat.NONE, else napi.GeometryFormat.NONE
locales=locales
) )
if debug: if debug:
@@ -318,6 +317,8 @@ async def details_endpoint(api: napi.NominatimAPIAsync, params: ASGIAdaptor) ->
if result is None: if result is None:
params.raise_error('No place with that OSM ID found.', status=404) params.raise_error('No place with that OSM ID found.', status=404)
result.localize(locales)
output = formatting.format_result(result, fmt, output = formatting.format_result(result, fmt,
{'locales': locales, {'locales': locales,
'group_hierarchy': params.get_bool('group_hierarchy', False), 'group_hierarchy': params.get_bool('group_hierarchy', False),
@@ -336,7 +337,6 @@ async def reverse_endpoint(api: napi.NominatimAPIAsync, params: ASGIAdaptor) ->
details = params.parse_geometry_details(fmt) details = params.parse_geometry_details(fmt)
details['max_rank'] = helpers.zoom_to_rank(params.get_int('zoom', 18)) details['max_rank'] = helpers.zoom_to_rank(params.get_int('zoom', 18))
details['layers'] = params.get_layers() details['layers'] = params.get_layers()
details['locales'] = napi.Locales.from_accept_languages(params.get_accepted_languages())
result = await api.reverse(coord, **details) result = await api.reverse(coord, **details)
@@ -357,6 +357,9 @@ async def reverse_endpoint(api: napi.NominatimAPIAsync, params: ASGIAdaptor) ->
'namedetails': params.get_bool('namedetails', False), 'namedetails': params.get_bool('namedetails', False),
'addressdetails': params.get_bool('addressdetails', True)} 'addressdetails': params.get_bool('addressdetails', True)}
if result:
result.localize(napi.Locales.from_accept_languages(params.get_accepted_languages()))
output = formatting.format_result(napi.ReverseResults([result] if result else []), output = formatting.format_result(napi.ReverseResults([result] if result else []),
fmt, fmt_options) fmt, fmt_options)
@@ -369,7 +372,6 @@ async def lookup_endpoint(api: napi.NominatimAPIAsync, params: ASGIAdaptor) -> A
fmt = params.parse_format(napi.SearchResults, 'xml') fmt = params.parse_format(napi.SearchResults, 'xml')
debug = params.setup_debugging() debug = params.setup_debugging()
details = params.parse_geometry_details(fmt) details = params.parse_geometry_details(fmt)
details['locales'] = napi.Locales.from_accept_languages(params.get_accepted_languages())
places = [] places = []
for oid in (params.get('osm_ids') or '').split(','): for oid in (params.get('osm_ids') or '').split(','):
@@ -392,6 +394,8 @@ async def lookup_endpoint(api: napi.NominatimAPIAsync, params: ASGIAdaptor) -> A
'namedetails': params.get_bool('namedetails', False), 'namedetails': params.get_bool('namedetails', False),
'addressdetails': params.get_bool('addressdetails', True)} 'addressdetails': params.get_bool('addressdetails', True)}
results.localize(napi.Locales.from_accept_languages(params.get_accepted_languages()))
output = formatting.format_result(results, fmt, fmt_options) output = formatting.format_result(results, fmt, fmt_options)
return params.build_response(output, num_results=len(results)) return params.build_response(output, num_results=len(results))
@@ -452,8 +456,6 @@ async def search_endpoint(api: napi.NominatimAPIAsync, params: ASGIAdaptor) -> A
else: else:
details['layers'] = params.get_layers() details['layers'] = params.get_layers()
details['locales'] = napi.Locales.from_accept_languages(params.get_accepted_languages())
# unstructured query parameters # unstructured query parameters
query = params.get('q', None) query = params.get('q', None)
# structured query parameters # structured query parameters
@@ -478,6 +480,8 @@ async def search_endpoint(api: napi.NominatimAPIAsync, params: ASGIAdaptor) -> A
except UsageError as err: except UsageError as err:
params.raise_error(str(err)) params.raise_error(str(err))
results.localize(napi.Locales.from_accept_languages(params.get_accepted_languages()))
if details['dedupe'] and len(results) > 1: if details['dedupe'] and len(results) > 1:
results = helpers.deduplicate_results(results, max_results) results = helpers.deduplicate_results(results, max_results)

View File

@@ -90,20 +90,17 @@ class AdminFuncs:
api = napi.NominatimAPI(args.project_dir) api = napi.NominatimAPI(args.project_dir)
try: try:
if args.target != 'search': if args.target != 'reverse':
for _ in range(1000): for _ in range(1000):
api.reverse((random.uniform(-90, 90), random.uniform(-180, 180)), api.reverse((random.uniform(-90, 90), random.uniform(-180, 180)),
address_details=True) address_details=True)
if args.target != 'reverse': if args.target != 'search':
from ..tokenizer import factory as tokenizer_factory from ..tokenizer import factory as tokenizer_factory
tokenizer = tokenizer_factory.get_tokenizer_for_db(args.config) tokenizer = tokenizer_factory.get_tokenizer_for_db(args.config)
with connect(args.config.get_libpq_dsn()) as conn: with connect(args.config.get_libpq_dsn()) as conn:
if conn.table_exists('search_name'): words = tokenizer.most_frequent_words(conn, 1000)
words = tokenizer.most_frequent_words(conn, 1000)
else:
words = []
for word in words: for word in words:
api.search(word) api.search(word)

View File

@@ -109,8 +109,7 @@ class APISearch:
'countries': args.countrycodes, 'countries': args.countrycodes,
'excluded': args.exclude_place_ids, 'excluded': args.exclude_place_ids,
'viewbox': args.viewbox, 'viewbox': args.viewbox,
'bounded_viewbox': args.bounded, 'bounded_viewbox': args.bounded
'locales': args.get_locales(api.config.DEFAULT_LANGUAGE)
} }
if args.query: if args.query:
@@ -125,6 +124,9 @@ class APISearch:
country=args.country, country=args.country,
**params) **params)
for result in results:
result.localize(args.get_locales(api.config.DEFAULT_LANGUAGE))
if args.dedupe and len(results) > 1: if args.dedupe and len(results) > 1:
results = deduplicate_results(results, args.limit) results = deduplicate_results(results, args.limit)
@@ -185,14 +187,14 @@ class APIReverse:
layers=args.get_layers(napi.DataLayer.ADDRESS | napi.DataLayer.POI), layers=args.get_layers(napi.DataLayer.ADDRESS | napi.DataLayer.POI),
address_details=True, # needed for display name address_details=True, # needed for display name
geometry_output=args.get_geometry_output(), geometry_output=args.get_geometry_output(),
geometry_simplification=args.polygon_threshold, geometry_simplification=args.polygon_threshold)
locales=args.get_locales(api.config.DEFAULT_LANGUAGE))
if args.format == 'debug': if args.format == 'debug':
print(loglib.get_and_disable()) print(loglib.get_and_disable())
return 0 return 0
if result: if result:
result.localize(args.get_locales(api.config.DEFAULT_LANGUAGE))
output = api_output.format_result( output = api_output.format_result(
napi.ReverseResults([result]), napi.ReverseResults([result]),
args.format, args.format,
@@ -247,8 +249,10 @@ class APILookup:
results = api.lookup(places, results = api.lookup(places,
address_details=True, # needed for display name address_details=True, # needed for display name
geometry_output=args.get_geometry_output(), geometry_output=args.get_geometry_output(),
geometry_simplification=args.polygon_threshold or 0.0, geometry_simplification=args.polygon_threshold or 0.0)
locales=args.get_locales(api.config.DEFAULT_LANGUAGE))
for result in results:
result.localize(args.get_locales(api.config.DEFAULT_LANGUAGE))
output = api_output.format_result( output = api_output.format_result(
results, results,
@@ -322,7 +326,6 @@ class APIDetails:
api = napi.NominatimAPI(args.project_dir) api = napi.NominatimAPI(args.project_dir)
locales = args.get_locales(api.config.DEFAULT_LANGUAGE)
result = api.details(place, result = api.details(place,
address_details=args.addressdetails, address_details=args.addressdetails,
linked_places=args.linkedplaces, linked_places=args.linkedplaces,
@@ -330,11 +333,13 @@ class APIDetails:
keywords=args.keywords, keywords=args.keywords,
geometry_output=napi.GeometryFormat.GEOJSON geometry_output=napi.GeometryFormat.GEOJSON
if args.polygon_geojson if args.polygon_geojson
else napi.GeometryFormat.NONE, else napi.GeometryFormat.NONE)
locales=locales)
if result: if result:
locales = args.get_locales(api.config.DEFAULT_LANGUAGE)
result.localize(locales)
output = api_output.format_result( output = api_output.format_result(
result, result,
'json', 'json',

View File

@@ -69,8 +69,8 @@ class DBConnection:
self.current_params: Optional[Sequence[Any]] = None self.current_params: Optional[Sequence[Any]] = None
self.ignore_sql_errors = ignore_sql_errors self.ignore_sql_errors = ignore_sql_errors
self.conn: Optional['psycopg2._psycopg.connection'] = None self.conn: Optional['psycopg2.connection'] = None
self.cursor: Optional['psycopg2._psycopg.cursor'] = None self.cursor: Optional['psycopg2.cursor'] = None
self.connect(cursor_factory=cursor_factory) self.connect(cursor_factory=cursor_factory)
def close(self) -> None: def close(self) -> None:
@@ -78,7 +78,7 @@ class DBConnection:
""" """
if self.conn is not None: if self.conn is not None:
if self.cursor is not None: if self.cursor is not None:
self.cursor.close() self.cursor.close() # type: ignore[no-untyped-call]
self.cursor = None self.cursor = None
self.conn.close() self.conn.close()

View File

@@ -31,7 +31,7 @@ class Cursor(psycopg2.extras.DictCursor):
""" Query execution that logs the SQL query when debugging is enabled. """ Query execution that logs the SQL query when debugging is enabled.
""" """
if LOG.isEnabledFor(logging.DEBUG): if LOG.isEnabledFor(logging.DEBUG):
LOG.debug(self.mogrify(query, args).decode('utf-8')) LOG.debug(self.mogrify(query, args).decode('utf-8')) # type: ignore[no-untyped-call]
super().execute(query, args) super().execute(query, args)

View File

@@ -118,4 +118,4 @@ class CopyBuffer:
""" """
if self.buffer.tell() > 0: if self.buffer.tell() > 0:
self.buffer.seek(0) self.buffer.seek(0)
cur.copy_from(self.buffer, table, columns=columns) cur.copy_from(self.buffer, table, columns=columns) # type: ignore[no-untyped-call]

View File

@@ -12,13 +12,14 @@ import os
import subprocess import subprocess
import sys import sys
from pathlib import Path from pathlib import Path
from typing import List, Optional, Tuple, Union from typing import List, Optional, Tuple, Union, cast
import psutil import psutil
from psycopg2.extensions import make_dsn, parse_dsn from psycopg2.extensions import make_dsn, parse_dsn
from nominatim.config import Configuration from nominatim.config import Configuration
from nominatim.db.connection import connect from nominatim.db.connection import connect
from nominatim.typing import DictCursorResults
from nominatim.version import NOMINATIM_VERSION from nominatim.version import NOMINATIM_VERSION
@@ -106,15 +107,15 @@ def report_system_information(config: Configuration) -> None:
postgresql_ver: str = convert_version(conn.server_version_tuple()) postgresql_ver: str = convert_version(conn.server_version_tuple())
with conn.cursor() as cur: with conn.cursor() as cur:
num = cur.scalar("SELECT count(*) FROM pg_catalog.pg_database WHERE datname=%s", cur.execute(f"""
(parse_dsn(config.get_libpq_dsn())['dbname'], )) SELECT datname FROM pg_catalog.pg_database
nominatim_db_exists = num == 1 if isinstance(num, int) else False WHERE datname='{parse_dsn(config.get_libpq_dsn())['dbname']}'""")
nominatim_db_exists = cast(Optional[DictCursorResults], cur.fetchall())
if nominatim_db_exists: if nominatim_db_exists:
with connect(config.get_libpq_dsn()) as conn: with connect(config.get_libpq_dsn()) as conn:
postgis_ver: str = convert_version(conn.postgis_version_tuple()) postgis_ver: str = convert_version(conn.postgis_version_tuple())
else: else:
postgis_ver = "Unable to connect to database" postgis_ver = "Unable to connect to database"
postgresql_config: str = get_postgresql_config(int(float(postgresql_ver))) postgresql_config: str = get_postgresql_config(int(float(postgresql_ver)))

View File

@@ -34,7 +34,7 @@ class NominatimVersion(NamedTuple):
return f"{self.major}.{self.minor}.{self.patch_level}-{self.db_patch_level}" return f"{self.major}.{self.minor}.{self.patch_level}-{self.db_patch_level}"
NOMINATIM_VERSION = NominatimVersion(4, 3, 2, 0) NOMINATIM_VERSION = NominatimVersion(4, 3, 0, 0)
POSTGRESQL_REQUIRED_VERSION = (9, 6) POSTGRESQL_REQUIRED_VERSION = (9, 6)
POSTGIS_REQUIRED_VERSION = (2, 2) POSTGIS_REQUIRED_VERSION = (2, 2)

View File

@@ -56,7 +56,7 @@ Feature: Structured search queries
| Liechtenstein | | Liechtenstein |
And results contain And results contain
| class | type | | class | type |
| amenity | ^(pub)\|(bar)\|(restaurant) | | amenity | ^(pub)\|(bar) |
#176 #176
Scenario: Structured search restricts rank Scenario: Structured search restricts rank

View File

@@ -133,7 +133,7 @@ class TestNameOnlySearches:
@pytest.mark.parametrize('viewbox', ['5.0,4.0,6.0,5.0', '5.7,4.0,6.0,5.0']) @pytest.mark.parametrize('viewbox', ['5.0,4.0,6.0,5.0', '5.7,4.0,6.0,5.0'])
def test_prefer_viewbox(self, apiobj, viewbox): def test_prefer_viewbox(self, apiobj, viewbox):
lookup = FieldLookup('name_vector', [1, 2], 'lookup_all') lookup = FieldLookup('name_vector', [1, 2], 'lookup_all')
ranking = FieldRanking('name_vector', 0.2, [RankedTokens(0.0, [21])]) ranking = FieldRanking('name_vector', 0.9, [RankedTokens(0.0, [21])])
results = run_search(apiobj, 0.1, [lookup], [ranking]) results = run_search(apiobj, 0.1, [lookup], [ranking])
assert [r.place_id for r in results] == [101, 100] assert [r.place_id for r in results] == [101, 100]

View File

@@ -150,20 +150,17 @@ def test_lookup_placex_with_address_details(apiobj):
category=('highway', 'residential'), category=('highway', 'residential'),
names={'name': 'Street'}, extratags={}, names={'name': 'Street'}, extratags={},
admin_level=15, fromarea=True, isaddress=True, admin_level=15, fromarea=True, isaddress=True,
rank_address=26, distance=0.0, rank_address=26, distance=0.0),
local_name='Street'),
napi.AddressLine(place_id=1000, osm_object=('N', 3333), napi.AddressLine(place_id=1000, osm_object=('N', 3333),
category=('place', 'suburb'), category=('place', 'suburb'),
names={'name': 'Smallplace'}, extratags={}, names={'name': 'Smallplace'}, extratags={},
admin_level=13, fromarea=False, isaddress=True, admin_level=13, fromarea=False, isaddress=True,
rank_address=23, distance=0.0034, rank_address=23, distance=0.0034),
local_name='Smallplace'),
napi.AddressLine(place_id=1001, osm_object=('N', 3334), napi.AddressLine(place_id=1001, osm_object=('N', 3334),
category=('place', 'city'), category=('place', 'city'),
names={'name': 'Bigplace'}, extratags={}, names={'name': 'Bigplace'}, extratags={},
admin_level=15, fromarea=True, isaddress=True, admin_level=15, fromarea=True, isaddress=True,
rank_address=16, distance=0.0, rank_address=16, distance=0.0),
local_name='Bigplace'),
napi.AddressLine(place_id=None, osm_object=None, napi.AddressLine(place_id=None, osm_object=None,
category=('place', 'country_code'), category=('place', 'country_code'),
names={'ref': 'pl'}, extratags={}, names={'ref': 'pl'}, extratags={},
@@ -344,26 +341,22 @@ def test_lookup_osmline_with_address_details(apiobj):
category=('place', 'house_number'), category=('place', 'house_number'),
names={'ref': '2'}, extratags={}, names={'ref': '2'}, extratags={},
admin_level=None, fromarea=True, isaddress=True, admin_level=None, fromarea=True, isaddress=True,
rank_address=28, distance=0.0, rank_address=28, distance=0.0),
local_name='2'),
napi.AddressLine(place_id=332, osm_object=('W', 4), napi.AddressLine(place_id=332, osm_object=('W', 4),
category=('highway', 'residential'), category=('highway', 'residential'),
names={'name': 'Street'}, extratags={}, names={'name': 'Street'}, extratags={},
admin_level=15, fromarea=True, isaddress=True, admin_level=15, fromarea=True, isaddress=True,
rank_address=26, distance=0.0, rank_address=26, distance=0.0),
local_name='Street'),
napi.AddressLine(place_id=1000, osm_object=('N', 3333), napi.AddressLine(place_id=1000, osm_object=('N', 3333),
category=('place', 'suburb'), category=('place', 'suburb'),
names={'name': 'Smallplace'}, extratags={}, names={'name': 'Smallplace'}, extratags={},
admin_level=13, fromarea=False, isaddress=True, admin_level=13, fromarea=False, isaddress=True,
rank_address=23, distance=0.0034, rank_address=23, distance=0.0034),
local_name='Smallplace'),
napi.AddressLine(place_id=1001, osm_object=('N', 3334), napi.AddressLine(place_id=1001, osm_object=('N', 3334),
category=('place', 'city'), category=('place', 'city'),
names={'name': 'Bigplace'}, extratags={}, names={'name': 'Bigplace'}, extratags={},
admin_level=15, fromarea=True, isaddress=True, admin_level=15, fromarea=True, isaddress=True,
rank_address=16, distance=0.0, rank_address=16, distance=0.0),
local_name='Bigplace'),
napi.AddressLine(place_id=None, osm_object=None, napi.AddressLine(place_id=None, osm_object=None,
category=('place', 'country_code'), category=('place', 'country_code'),
names={'ref': 'pl'}, extratags={}, names={'ref': 'pl'}, extratags={},
@@ -448,26 +441,22 @@ def test_lookup_tiger_with_address_details(apiobj):
category=('place', 'house_number'), category=('place', 'house_number'),
names={'ref': '2'}, extratags={}, names={'ref': '2'}, extratags={},
admin_level=None, fromarea=True, isaddress=True, admin_level=None, fromarea=True, isaddress=True,
rank_address=28, distance=0.0, rank_address=28, distance=0.0),
local_name='2'),
napi.AddressLine(place_id=332, osm_object=('W', 4), napi.AddressLine(place_id=332, osm_object=('W', 4),
category=('highway', 'residential'), category=('highway', 'residential'),
names={'name': 'Street'}, extratags={}, names={'name': 'Street'}, extratags={},
admin_level=15, fromarea=True, isaddress=True, admin_level=15, fromarea=True, isaddress=True,
rank_address=26, distance=0.0, rank_address=26, distance=0.0),
local_name='Street'),
napi.AddressLine(place_id=1000, osm_object=('N', 3333), napi.AddressLine(place_id=1000, osm_object=('N', 3333),
category=('place', 'suburb'), category=('place', 'suburb'),
names={'name': 'Smallplace'}, extratags={}, names={'name': 'Smallplace'}, extratags={},
admin_level=13, fromarea=False, isaddress=True, admin_level=13, fromarea=False, isaddress=True,
rank_address=23, distance=0.0034, rank_address=23, distance=0.0034),
local_name='Smallplace'),
napi.AddressLine(place_id=1001, osm_object=('N', 3334), napi.AddressLine(place_id=1001, osm_object=('N', 3334),
category=('place', 'city'), category=('place', 'city'),
names={'name': 'Bigplace'}, extratags={}, names={'name': 'Bigplace'}, extratags={},
admin_level=15, fromarea=True, isaddress=True, admin_level=15, fromarea=True, isaddress=True,
rank_address=16, distance=0.0, rank_address=16, distance=0.0),
local_name='Bigplace'),
napi.AddressLine(place_id=None, osm_object=None, napi.AddressLine(place_id=None, osm_object=None,
category=('place', 'country_code'), category=('place', 'country_code'),
names={'ref': 'us'}, extratags={}, names={'ref': 'us'}, extratags={},
@@ -547,20 +536,17 @@ def test_lookup_postcode_with_address_details(apiobj):
category=('place', 'suburb'), category=('place', 'suburb'),
names={'name': 'Smallplace'}, extratags={}, names={'name': 'Smallplace'}, extratags={},
admin_level=13, fromarea=True, isaddress=True, admin_level=13, fromarea=True, isaddress=True,
rank_address=23, distance=0.0, rank_address=23, distance=0.0),
local_name='Smallplace'),
napi.AddressLine(place_id=1001, osm_object=('N', 3334), napi.AddressLine(place_id=1001, osm_object=('N', 3334),
category=('place', 'city'), category=('place', 'city'),
names={'name': 'Bigplace'}, extratags={}, names={'name': 'Bigplace'}, extratags={},
admin_level=15, fromarea=True, isaddress=True, admin_level=15, fromarea=True, isaddress=True,
rank_address=16, distance=0.0, rank_address=16, distance=0.0),
local_name='Bigplace'),
napi.AddressLine(place_id=None, osm_object=None, napi.AddressLine(place_id=None, osm_object=None,
category=('place', 'postcode'), category=('place', 'postcode'),
names={'ref': '34 425'}, extratags={}, names={'ref': '34 425'}, extratags={},
admin_level=None, fromarea=False, isaddress=True, admin_level=None, fromarea=False, isaddress=True,
rank_address=5, distance=0.0, rank_address=5, distance=0.0),
local_name='34 425'),
napi.AddressLine(place_id=None, osm_object=None, napi.AddressLine(place_id=None, osm_object=None,
category=('place', 'country_code'), category=('place', 'country_code'),
names={'ref': 'gb'}, extratags={}, names={'ref': 'gb'}, extratags={},

View File

@@ -67,9 +67,7 @@ class TestCliReverseCall:
result = napi.ReverseResult(napi.SourceTable.PLACEX, ('place', 'thing'), result = napi.ReverseResult(napi.SourceTable.PLACEX, ('place', 'thing'),
napi.Point(1.0, -3.0), napi.Point(1.0, -3.0),
names={'name':'Name', 'name:fr': 'Nom'}, names={'name':'Name', 'name:fr': 'Nom'},
extratags={'extra':'Extra'}, extratags={'extra':'Extra'})
locale_name='Name',
display_name='Name')
monkeypatch.setattr(napi.NominatimAPI, 'reverse', monkeypatch.setattr(napi.NominatimAPI, 'reverse',
lambda *args, **kwargs: result) lambda *args, **kwargs: result)
@@ -111,6 +109,16 @@ class TestCliReverseCall:
assert out['type'] == 'FeatureCollection' assert out['type'] == 'FeatureCollection'
def test_reverse_language(self, cli_call, tmp_path, capsys):
result = cli_call('reverse', '--project-dir', str(tmp_path),
'--lat', '34', '--lon', '34', '--lang', 'fr')
assert result == 0
out = json.loads(capsys.readouterr().out)
assert out['name'] == 'Nom'
class TestCliLookupCall: class TestCliLookupCall:
@pytest.fixture(autouse=True) @pytest.fixture(autouse=True)
@@ -118,9 +126,7 @@ class TestCliLookupCall:
result = napi.SearchResult(napi.SourceTable.PLACEX, ('place', 'thing'), result = napi.SearchResult(napi.SourceTable.PLACEX, ('place', 'thing'),
napi.Point(1.0, -3.0), napi.Point(1.0, -3.0),
names={'name':'Name', 'name:fr': 'Nom'}, names={'name':'Name', 'name:fr': 'Nom'},
extratags={'extra':'Extra'}, extratags={'extra':'Extra'})
locale_name='Name',
display_name='Name')
monkeypatch.setattr(napi.NominatimAPI, 'lookup', monkeypatch.setattr(napi.NominatimAPI, 'lookup',
lambda *args, **kwargs: napi.SearchResults([result])) lambda *args, **kwargs: napi.SearchResults([result]))
@@ -144,11 +150,9 @@ class TestCliLookupCall:
]) ])
def test_search(cli_call, tmp_path, capsys, monkeypatch, endpoint, params): def test_search(cli_call, tmp_path, capsys, monkeypatch, endpoint, params):
result = napi.SearchResult(napi.SourceTable.PLACEX, ('place', 'thing'), result = napi.SearchResult(napi.SourceTable.PLACEX, ('place', 'thing'),
napi.Point(1.0, -3.0), napi.Point(1.0, -3.0),
names={'name':'Name', 'name:fr': 'Nom'}, names={'name':'Name', 'name:fr': 'Nom'},
extratags={'extra':'Extra'}, extratags={'extra':'Extra'})
locale_name='Name',
display_name='Name')
monkeypatch.setattr(napi.NominatimAPI, endpoint, monkeypatch.setattr(napi.NominatimAPI, endpoint,
lambda *args, **kwargs: napi.SearchResults([result])) lambda *args, **kwargs: napi.SearchResults([result]))

View File

@@ -27,7 +27,7 @@ export DEBIAN_FRONTEND=noninteractive #DOCS:
postgresql-contrib-12 postgresql-12-postgis-3-scripts \ postgresql-contrib-12 postgresql-12-postgis-3-scripts \
php-cli php-pgsql php-intl libicu-dev python3-dotenv \ php-cli php-pgsql php-intl libicu-dev python3-dotenv \
python3-psycopg2 python3-psutil python3-jinja2 python3-pip \ python3-psycopg2 python3-psutil python3-jinja2 python3-pip \
python3-icu python3-datrie python3-yaml git python3-icu python3-datrie python3-yaml
# Some of the Python packages that come with Ubuntu 20.04 are too old, so # Some of the Python packages that come with Ubuntu 20.04 are too old, so
# install the latest version from pip: # install the latest version from pip:
@@ -109,25 +109,18 @@ fi #DOCS:
# #
if [ "x$1" == "xyes" ]; then #DOCS: :::sh if [ "x$1" == "xyes" ]; then #DOCS: :::sh
cd $USERHOME cd $USERHOME
git clone --recursive https://github.com/openstreetmap/Nominatim.git wget https://nominatim.org/release/Nominatim-4.3.2.tar.bz2
cd Nominatim tar xf Nominatim-4.3.2.tar.bz2
else #DOCS: else #DOCS:
cd $USERHOME/Nominatim #DOCS: cd $USERHOME/Nominatim #DOCS:
fi #DOCS: fi #DOCS:
# When installing the latest source from github, you also need to
# download the country grid:
if [ ! -f data/country_osm_grid.sql.gz ]; then #DOCS: :::sh
wget -O data/country_osm_grid.sql.gz https://nominatim.org/data/country_grid.sql.gz
fi #DOCS:
# The code must be built in a separate directory. Create this directory, # The code must be built in a separate directory. Create this directory,
# then configure and build Nominatim in there: # then configure and build Nominatim in there:
mkdir $USERHOME/build mkdir $USERHOME/build
cd $USERHOME/build cd $USERHOME/build
cmake $USERHOME/Nominatim cmake $USERHOME/Nominatim-4.3.2
make make
sudo make install sudo make install

View File

@@ -28,7 +28,7 @@ export DEBIAN_FRONTEND=noninteractive #DOCS:
php-cli php-pgsql php-intl libicu-dev python3-dotenv \ php-cli php-pgsql php-intl libicu-dev python3-dotenv \
python3-psycopg2 python3-psutil python3-jinja2 \ python3-psycopg2 python3-psutil python3-jinja2 \
python3-icu python3-datrie python3-sqlalchemy \ python3-icu python3-datrie python3-sqlalchemy \
python3-asyncpg python3-yaml git python3-asyncpg python3-yaml
# #
# System Configuration # System Configuration
@@ -104,25 +104,18 @@ fi #DOCS:
# #
if [ "x$1" == "xyes" ]; then #DOCS: :::sh if [ "x$1" == "xyes" ]; then #DOCS: :::sh
cd $USERHOME cd $USERHOME
git clone --recursive https://github.com/openstreetmap/Nominatim.git wget https://nominatim.org/release/Nominatim-4.3.2.tar.bz2
cd Nominatim tar xf Nominatim-4.3.2.tar.bz2
else #DOCS: else #DOCS:
cd $USERHOME/Nominatim #DOCS: cd $USERHOME/Nominatim #DOCS:
fi #DOCS: fi #DOCS:
# When installing the latest source from github, you also need to
# download the country grid:
if [ ! -f data/country_osm_grid.sql.gz ]; then #DOCS: :::sh
wget -O data/country_osm_grid.sql.gz https://nominatim.org/data/country_grid.sql.gz
fi #DOCS:
# The code must be built in a separate directory. Create this directory, # The code must be built in a separate directory. Create this directory,
# then configure and build Nominatim in there: # then configure and build Nominatim in there:
mkdir $USERHOME/build mkdir $USERHOME/build
cd $USERHOME/build cd $USERHOME/build
cmake $USERHOME/Nominatim cmake $USERHOME/Nominatim-4.3.2
make make
sudo make install sudo make install