mirror of
https://github.com/osm-search/Nominatim.git
synced 2026-02-26 11:08:13 +00:00
split code into submodules
This commit is contained in:
21
src/nominatim_api/v1/__init__.py
Normal file
21
src/nominatim_api/v1/__init__.py
Normal file
@@ -0,0 +1,21 @@
|
||||
# SPDX-License-Identifier: GPL-3.0-or-later
|
||||
#
|
||||
# This file is part of Nominatim. (https://nominatim.org)
|
||||
#
|
||||
# Copyright (C) 2024 by the Nominatim developer community.
|
||||
# For a full list of authors see the git log.
|
||||
"""
|
||||
Implementation of API version v1 (aka the legacy version).
|
||||
"""
|
||||
|
||||
#pylint: disable=useless-import-alias
|
||||
|
||||
from .server_glue import (ASGIAdaptor as ASGIAdaptor,
|
||||
EndpointFunc as EndpointFunc,
|
||||
ROUTES as ROUTES)
|
||||
|
||||
from . import format as _format
|
||||
|
||||
list_formats = _format.dispatch.list_formats
|
||||
supports_format = _format.dispatch.supports_format
|
||||
format_result = _format.dispatch.format_result
|
||||
201
src/nominatim_api/v1/classtypes.py
Normal file
201
src/nominatim_api/v1/classtypes.py
Normal file
@@ -0,0 +1,201 @@
|
||||
# SPDX-License-Identifier: GPL-3.0-or-later
|
||||
#
|
||||
# This file is part of Nominatim. (https://nominatim.org)
|
||||
#
|
||||
# Copyright (C) 2024 by the Nominatim developer community.
|
||||
# For a full list of authors see the git log.
|
||||
"""
|
||||
Hard-coded information about tag categories.
|
||||
|
||||
These tables have been copied verbatim from the old PHP code. For future
|
||||
version a more flexible formatting is required.
|
||||
"""
|
||||
from typing import Tuple, Optional, Mapping, Union
|
||||
|
||||
from ..results import ReverseResult, SearchResult
|
||||
from ..types import Bbox
|
||||
|
||||
def get_label_tag(category: Tuple[str, str], extratags: Optional[Mapping[str, str]],
|
||||
rank: int, country: Optional[str]) -> str:
|
||||
""" Create a label tag for the given place that can be used as an XML name.
|
||||
"""
|
||||
if rank < 26 and extratags and 'place' in extratags:
|
||||
label = extratags['place']
|
||||
elif rank < 26 and extratags and 'linked_place' in extratags:
|
||||
label = extratags['linked_place']
|
||||
elif category == ('boundary', 'administrative'):
|
||||
label = ADMIN_LABELS.get((country or '', int(rank/2)))\
|
||||
or ADMIN_LABELS.get(('', int(rank/2)))\
|
||||
or 'Administrative'
|
||||
elif category[1] == 'postal_code':
|
||||
label = 'postcode'
|
||||
elif rank < 26:
|
||||
label = category[1] if category[1] != 'yes' else category[0]
|
||||
elif rank < 28:
|
||||
label = 'road'
|
||||
elif category[0] == 'place'\
|
||||
and category[1] in ('house_number', 'house_name', 'country_code'):
|
||||
label = category[1]
|
||||
else:
|
||||
label = category[0]
|
||||
|
||||
return label.lower().replace(' ', '_')
|
||||
|
||||
|
||||
def bbox_from_result(result: Union[ReverseResult, SearchResult]) -> Bbox:
|
||||
""" Compute a bounding box for the result. For ways and relations
|
||||
a given boundingbox is used. For all other object, a box is computed
|
||||
around the centroid according to dimensions derived from the
|
||||
search rank.
|
||||
"""
|
||||
if (result.osm_object and result.osm_object[0] == 'N') or result.bbox is None:
|
||||
extent = NODE_EXTENT.get(result.category, 0.00005)
|
||||
return Bbox.from_point(result.centroid, extent)
|
||||
|
||||
return result.bbox
|
||||
|
||||
|
||||
# pylint: disable=line-too-long
|
||||
OSM_ATTRIBUTION = 'Data © OpenStreetMap contributors, ODbL 1.0. http://osm.org/copyright'
|
||||
|
||||
|
||||
OSM_TYPE_NAME = {
|
||||
'N': 'node',
|
||||
'W': 'way',
|
||||
'R': 'relation'
|
||||
}
|
||||
|
||||
|
||||
ADMIN_LABELS = {
|
||||
('', 1): 'Continent',
|
||||
('', 2): 'Country',
|
||||
('', 3): 'Region',
|
||||
('', 4): 'State',
|
||||
('', 5): 'State District',
|
||||
('', 6): 'County',
|
||||
('', 7): 'Municipality',
|
||||
('', 8): 'City',
|
||||
('', 9): 'City District',
|
||||
('', 10): 'Suburb',
|
||||
('', 11): 'Neighbourhood',
|
||||
('', 12): 'City Block',
|
||||
('no', 3): 'State',
|
||||
('no', 4): 'County',
|
||||
('se', 3): 'State',
|
||||
('se', 4): 'County'
|
||||
}
|
||||
|
||||
|
||||
ICONS = {
|
||||
('boundary', 'administrative'): 'poi_boundary_administrative',
|
||||
('place', 'city'): 'poi_place_city',
|
||||
('place', 'town'): 'poi_place_town',
|
||||
('place', 'village'): 'poi_place_village',
|
||||
('place', 'hamlet'): 'poi_place_village',
|
||||
('place', 'suburb'): 'poi_place_village',
|
||||
('place', 'locality'): 'poi_place_village',
|
||||
('place', 'airport'): 'transport_airport2',
|
||||
('aeroway', 'aerodrome'): 'transport_airport2',
|
||||
('railway', 'station'): 'transport_train_station2',
|
||||
('amenity', 'place_of_worship'): 'place_of_worship_unknown3',
|
||||
('amenity', 'pub'): 'food_pub',
|
||||
('amenity', 'bar'): 'food_bar',
|
||||
('amenity', 'university'): 'education_university',
|
||||
('tourism', 'museum'): 'tourist_museum',
|
||||
('amenity', 'arts_centre'): 'tourist_art_gallery2',
|
||||
('tourism', 'zoo'): 'tourist_zoo',
|
||||
('tourism', 'theme_park'): 'poi_point_of_interest',
|
||||
('tourism', 'attraction'): 'poi_point_of_interest',
|
||||
('leisure', 'golf_course'): 'sport_golf',
|
||||
('historic', 'castle'): 'tourist_castle',
|
||||
('amenity', 'hospital'): 'health_hospital',
|
||||
('amenity', 'school'): 'education_school',
|
||||
('amenity', 'theatre'): 'tourist_theatre',
|
||||
('amenity', 'library'): 'amenity_library',
|
||||
('amenity', 'fire_station'): 'amenity_firestation3',
|
||||
('amenity', 'police'): 'amenity_police2',
|
||||
('amenity', 'bank'): 'money_bank2',
|
||||
('amenity', 'post_office'): 'amenity_post_office',
|
||||
('tourism', 'hotel'): 'accommodation_hotel2',
|
||||
('amenity', 'cinema'): 'tourist_cinema',
|
||||
('tourism', 'artwork'): 'tourist_art_gallery2',
|
||||
('historic', 'archaeological_site'): 'tourist_archaeological2',
|
||||
('amenity', 'doctors'): 'health_doctors',
|
||||
('leisure', 'sports_centre'): 'sport_leisure_centre',
|
||||
('leisure', 'swimming_pool'): 'sport_swimming_outdoor',
|
||||
('shop', 'supermarket'): 'shopping_supermarket',
|
||||
('shop', 'convenience'): 'shopping_convenience',
|
||||
('amenity', 'restaurant'): 'food_restaurant',
|
||||
('amenity', 'fast_food'): 'food_fastfood',
|
||||
('amenity', 'cafe'): 'food_cafe',
|
||||
('tourism', 'guest_house'): 'accommodation_bed_and_breakfast',
|
||||
('amenity', 'pharmacy'): 'health_pharmacy_dispensing',
|
||||
('amenity', 'fuel'): 'transport_fuel',
|
||||
('natural', 'peak'): 'poi_peak',
|
||||
('natural', 'wood'): 'landuse_coniferous_and_deciduous',
|
||||
('shop', 'bicycle'): 'shopping_bicycle',
|
||||
('shop', 'clothes'): 'shopping_clothes',
|
||||
('shop', 'hairdresser'): 'shopping_hairdresser',
|
||||
('shop', 'doityourself'): 'shopping_diy',
|
||||
('shop', 'estate_agent'): 'shopping_estateagent2',
|
||||
('shop', 'car'): 'shopping_car',
|
||||
('shop', 'garden_centre'): 'shopping_garden_centre',
|
||||
('shop', 'car_repair'): 'shopping_car_repair',
|
||||
('shop', 'bakery'): 'shopping_bakery',
|
||||
('shop', 'butcher'): 'shopping_butcher',
|
||||
('shop', 'apparel'): 'shopping_clothes',
|
||||
('shop', 'laundry'): 'shopping_laundrette',
|
||||
('shop', 'beverages'): 'shopping_alcohol',
|
||||
('shop', 'alcohol'): 'shopping_alcohol',
|
||||
('shop', 'optician'): 'health_opticians',
|
||||
('shop', 'chemist'): 'health_pharmacy',
|
||||
('shop', 'gallery'): 'tourist_art_gallery2',
|
||||
('shop', 'jewelry'): 'shopping_jewelry',
|
||||
('tourism', 'information'): 'amenity_information',
|
||||
('historic', 'ruins'): 'tourist_ruin',
|
||||
('amenity', 'college'): 'education_school',
|
||||
('historic', 'monument'): 'tourist_monument',
|
||||
('historic', 'memorial'): 'tourist_monument',
|
||||
('historic', 'mine'): 'poi_mine',
|
||||
('tourism', 'caravan_site'): 'accommodation_caravan_park',
|
||||
('amenity', 'bus_station'): 'transport_bus_station',
|
||||
('amenity', 'atm'): 'money_atm2',
|
||||
('tourism', 'viewpoint'): 'tourist_view_point',
|
||||
('tourism', 'guesthouse'): 'accommodation_bed_and_breakfast',
|
||||
('railway', 'tram'): 'transport_tram_stop',
|
||||
('amenity', 'courthouse'): 'amenity_court',
|
||||
('amenity', 'recycling'): 'amenity_recycling',
|
||||
('amenity', 'dentist'): 'health_dentist',
|
||||
('natural', 'beach'): 'tourist_beach',
|
||||
('railway', 'tram_stop'): 'transport_tram_stop',
|
||||
('amenity', 'prison'): 'amenity_prison',
|
||||
('highway', 'bus_stop'): 'transport_bus_stop2'
|
||||
}
|
||||
|
||||
NODE_EXTENT = {
|
||||
('place', 'continent'): 25,
|
||||
('place', 'country'): 7,
|
||||
('place', 'state'): 2.6,
|
||||
('place', 'province'): 2.6,
|
||||
('place', 'region'): 1.0,
|
||||
('place', 'county'): 0.7,
|
||||
('place', 'city'): 0.16,
|
||||
('place', 'municipality'): 0.16,
|
||||
('place', 'island'): 0.32,
|
||||
('place', 'postcode'): 0.16,
|
||||
('place', 'town'): 0.04,
|
||||
('place', 'village'): 0.02,
|
||||
('place', 'hamlet'): 0.02,
|
||||
('place', 'district'): 0.02,
|
||||
('place', 'borough'): 0.02,
|
||||
('place', 'suburb'): 0.02,
|
||||
('place', 'locality'): 0.01,
|
||||
('place', 'neighbourhood'): 0.01,
|
||||
('place', 'quarter'): 0.01,
|
||||
('place', 'city_block'): 0.01,
|
||||
('landuse', 'farm'): 0.01,
|
||||
('place', 'farm'): 0.01,
|
||||
('place', 'airport'): 0.015,
|
||||
('aeroway', 'aerodrome'): 0.015,
|
||||
('railway', 'station'): 0.005
|
||||
}
|
||||
259
src/nominatim_api/v1/format.py
Normal file
259
src/nominatim_api/v1/format.py
Normal file
@@ -0,0 +1,259 @@
|
||||
# SPDX-License-Identifier: GPL-3.0-or-later
|
||||
#
|
||||
# This file is part of Nominatim. (https://nominatim.org)
|
||||
#
|
||||
# Copyright (C) 2024 by the Nominatim developer community.
|
||||
# For a full list of authors see the git log.
|
||||
"""
|
||||
Output formatters for API version v1.
|
||||
"""
|
||||
from typing import List, Dict, Mapping, Any
|
||||
import collections
|
||||
import datetime as dt
|
||||
|
||||
from nominatim_core.utils.json_writer import JsonWriter
|
||||
from ..status import StatusResult
|
||||
from ..results import DetailedResult, ReverseResults, SearchResults, \
|
||||
AddressLines, AddressLine
|
||||
from ..localization import Locales
|
||||
from ..result_formatting import FormatDispatcher
|
||||
from .classtypes import ICONS
|
||||
from . import format_json, format_xml
|
||||
|
||||
class RawDataList(List[Dict[str, Any]]):
|
||||
""" Data type for formatting raw data lists 'as is' in json.
|
||||
"""
|
||||
|
||||
dispatch = FormatDispatcher()
|
||||
|
||||
@dispatch.format_func(StatusResult, 'text')
|
||||
def _format_status_text(result: StatusResult, _: Mapping[str, Any]) -> str:
|
||||
if result.status:
|
||||
return f"ERROR: {result.message}"
|
||||
|
||||
return 'OK'
|
||||
|
||||
|
||||
@dispatch.format_func(StatusResult, 'json')
|
||||
def _format_status_json(result: StatusResult, _: Mapping[str, Any]) -> str:
|
||||
out = JsonWriter()
|
||||
|
||||
out.start_object()\
|
||||
.keyval('status', result.status)\
|
||||
.keyval('message', result.message)\
|
||||
.keyval_not_none('data_updated', result.data_updated,
|
||||
lambda v: v.isoformat())\
|
||||
.keyval('software_version', str(result.software_version))\
|
||||
.keyval_not_none('database_version', result.database_version, str)\
|
||||
.end_object()
|
||||
|
||||
return out()
|
||||
|
||||
|
||||
def _add_address_row(writer: JsonWriter, row: AddressLine,
|
||||
locales: Locales) -> None:
|
||||
writer.start_object()\
|
||||
.keyval('localname', locales.display_name(row.names))\
|
||||
.keyval_not_none('place_id', row.place_id)
|
||||
|
||||
if row.osm_object is not None:
|
||||
writer.keyval('osm_id', row.osm_object[1])\
|
||||
.keyval('osm_type', row.osm_object[0])
|
||||
|
||||
if row.extratags:
|
||||
writer.keyval_not_none('place_type', row.extratags.get('place_type'))
|
||||
|
||||
writer.keyval('class', row.category[0])\
|
||||
.keyval('type', row.category[1])\
|
||||
.keyval_not_none('admin_level', row.admin_level)\
|
||||
.keyval('rank_address', row.rank_address)\
|
||||
.keyval('distance', row.distance)\
|
||||
.keyval('isaddress', row.isaddress)\
|
||||
.end_object()
|
||||
|
||||
|
||||
def _add_address_rows(writer: JsonWriter, section: str, rows: AddressLines,
|
||||
locales: Locales) -> None:
|
||||
writer.key(section).start_array()
|
||||
for row in rows:
|
||||
_add_address_row(writer, row, locales)
|
||||
writer.next()
|
||||
writer.end_array().next()
|
||||
|
||||
|
||||
def _add_parent_rows_grouped(writer: JsonWriter, rows: AddressLines,
|
||||
locales: Locales) -> None:
|
||||
# group by category type
|
||||
data = collections.defaultdict(list)
|
||||
for row in rows:
|
||||
sub = JsonWriter()
|
||||
_add_address_row(sub, row, locales)
|
||||
data[row.category[1]].append(sub())
|
||||
|
||||
writer.key('hierarchy').start_object()
|
||||
for group, grouped in data.items():
|
||||
writer.key(group).start_array()
|
||||
grouped.sort() # sorts alphabetically by local name
|
||||
for line in grouped:
|
||||
writer.raw(line).next()
|
||||
writer.end_array().next()
|
||||
|
||||
writer.end_object().next()
|
||||
|
||||
|
||||
@dispatch.format_func(DetailedResult, 'json')
|
||||
def _format_details_json(result: DetailedResult, options: Mapping[str, Any]) -> str:
|
||||
locales = options.get('locales', Locales())
|
||||
geom = result.geometry.get('geojson')
|
||||
centroid = result.centroid.to_geojson()
|
||||
|
||||
out = JsonWriter()
|
||||
out.start_object()\
|
||||
.keyval_not_none('place_id', result.place_id)\
|
||||
.keyval_not_none('parent_place_id', result.parent_place_id)
|
||||
|
||||
if result.osm_object is not None:
|
||||
out.keyval('osm_type', result.osm_object[0])\
|
||||
.keyval('osm_id', result.osm_object[1])
|
||||
|
||||
out.keyval('category', result.category[0])\
|
||||
.keyval('type', result.category[1])\
|
||||
.keyval('admin_level', result.admin_level)\
|
||||
.keyval('localname', result.locale_name or '')\
|
||||
.keyval('names', result.names or {})\
|
||||
.keyval('addresstags', result.address or {})\
|
||||
.keyval_not_none('housenumber', result.housenumber)\
|
||||
.keyval_not_none('calculated_postcode', result.postcode)\
|
||||
.keyval_not_none('country_code', result.country_code)\
|
||||
.keyval_not_none('indexed_date', result.indexed_date, lambda v: v.isoformat())\
|
||||
.keyval_not_none('importance', result.importance)\
|
||||
.keyval('calculated_importance', result.calculated_importance())\
|
||||
.keyval('extratags', result.extratags or {})\
|
||||
.keyval_not_none('calculated_wikipedia', result.wikipedia)\
|
||||
.keyval('rank_address', result.rank_address)\
|
||||
.keyval('rank_search', result.rank_search)\
|
||||
.keyval('isarea', 'Polygon' in (geom or result.geometry.get('type') or ''))\
|
||||
.key('centroid').raw(centroid).next()\
|
||||
.key('geometry').raw(geom or centroid).next()
|
||||
|
||||
if options.get('icon_base_url', None):
|
||||
icon = ICONS.get(result.category)
|
||||
if icon:
|
||||
out.keyval('icon', f"{options['icon_base_url']}/{icon}.p.20.png")
|
||||
|
||||
if result.address_rows is not None:
|
||||
_add_address_rows(out, 'address', result.address_rows, locales)
|
||||
|
||||
if result.linked_rows:
|
||||
_add_address_rows(out, 'linked_places', result.linked_rows, locales)
|
||||
|
||||
if result.name_keywords is not None or result.address_keywords is not None:
|
||||
out.key('keywords').start_object()
|
||||
|
||||
for sec, klist in (('name', result.name_keywords), ('address', result.address_keywords)):
|
||||
out.key(sec).start_array()
|
||||
for word in (klist or []):
|
||||
out.start_object()\
|
||||
.keyval('id', word.word_id)\
|
||||
.keyval('token', word.word_token)\
|
||||
.end_object().next()
|
||||
out.end_array().next()
|
||||
|
||||
out.end_object().next()
|
||||
|
||||
if result.parented_rows is not None:
|
||||
if options.get('group_hierarchy', False):
|
||||
_add_parent_rows_grouped(out, result.parented_rows, locales)
|
||||
else:
|
||||
_add_address_rows(out, 'hierarchy', result.parented_rows, locales)
|
||||
|
||||
out.end_object()
|
||||
|
||||
return out()
|
||||
|
||||
|
||||
@dispatch.format_func(ReverseResults, 'xml')
|
||||
def _format_reverse_xml(results: ReverseResults, options: Mapping[str, Any]) -> str:
|
||||
return format_xml.format_base_xml(results,
|
||||
options, True, 'reversegeocode',
|
||||
{'querystring': options.get('query', '')})
|
||||
|
||||
|
||||
@dispatch.format_func(ReverseResults, 'geojson')
|
||||
def _format_reverse_geojson(results: ReverseResults,
|
||||
options: Mapping[str, Any]) -> str:
|
||||
return format_json.format_base_geojson(results, options, True)
|
||||
|
||||
|
||||
@dispatch.format_func(ReverseResults, 'geocodejson')
|
||||
def _format_reverse_geocodejson(results: ReverseResults,
|
||||
options: Mapping[str, Any]) -> str:
|
||||
return format_json.format_base_geocodejson(results, options, True)
|
||||
|
||||
|
||||
@dispatch.format_func(ReverseResults, 'json')
|
||||
def _format_reverse_json(results: ReverseResults,
|
||||
options: Mapping[str, Any]) -> str:
|
||||
return format_json.format_base_json(results, options, True,
|
||||
class_label='class')
|
||||
|
||||
|
||||
@dispatch.format_func(ReverseResults, 'jsonv2')
|
||||
def _format_reverse_jsonv2(results: ReverseResults,
|
||||
options: Mapping[str, Any]) -> str:
|
||||
return format_json.format_base_json(results, options, True,
|
||||
class_label='category')
|
||||
|
||||
|
||||
@dispatch.format_func(SearchResults, 'xml')
|
||||
def _format_search_xml(results: SearchResults, options: Mapping[str, Any]) -> str:
|
||||
extra = {'querystring': options.get('query', '')}
|
||||
for attr in ('more_url', 'exclude_place_ids', 'viewbox'):
|
||||
if options.get(attr):
|
||||
extra[attr] = options[attr]
|
||||
return format_xml.format_base_xml(results, options, False, 'searchresults',
|
||||
extra)
|
||||
|
||||
|
||||
|
||||
@dispatch.format_func(SearchResults, 'geojson')
|
||||
def _format_search_geojson(results: SearchResults,
|
||||
options: Mapping[str, Any]) -> str:
|
||||
return format_json.format_base_geojson(results, options, False)
|
||||
|
||||
|
||||
@dispatch.format_func(SearchResults, 'geocodejson')
|
||||
def _format_search_geocodejson(results: SearchResults,
|
||||
options: Mapping[str, Any]) -> str:
|
||||
return format_json.format_base_geocodejson(results, options, False)
|
||||
|
||||
|
||||
@dispatch.format_func(SearchResults, 'json')
|
||||
def _format_search_json(results: SearchResults,
|
||||
options: Mapping[str, Any]) -> str:
|
||||
return format_json.format_base_json(results, options, False,
|
||||
class_label='class')
|
||||
|
||||
|
||||
@dispatch.format_func(SearchResults, 'jsonv2')
|
||||
def _format_search_jsonv2(results: SearchResults,
|
||||
options: Mapping[str, Any]) -> str:
|
||||
return format_json.format_base_json(results, options, False,
|
||||
class_label='category')
|
||||
|
||||
@dispatch.format_func(RawDataList, 'json')
|
||||
def _format_raw_data_json(results: RawDataList, _: Mapping[str, Any]) -> str:
|
||||
out = JsonWriter()
|
||||
out.start_array()
|
||||
for res in results:
|
||||
out.start_object()
|
||||
for k, v in res.items():
|
||||
if isinstance(v, dt.datetime):
|
||||
out.keyval(k, v.isoformat(sep= ' ', timespec='seconds'))
|
||||
else:
|
||||
out.keyval(k, v)
|
||||
out.end_object().next()
|
||||
|
||||
out.end_array()
|
||||
|
||||
return out()
|
||||
275
src/nominatim_api/v1/format_json.py
Normal file
275
src/nominatim_api/v1/format_json.py
Normal file
@@ -0,0 +1,275 @@
|
||||
# SPDX-License-Identifier: GPL-3.0-or-later
|
||||
#
|
||||
# This file is part of Nominatim. (https://nominatim.org)
|
||||
#
|
||||
# Copyright (C) 2024 by the Nominatim developer community.
|
||||
# For a full list of authors see the git log.
|
||||
"""
|
||||
Helper functions for output of results in json formats.
|
||||
"""
|
||||
from typing import Mapping, Any, Optional, Tuple, Union
|
||||
|
||||
from nominatim_core.utils.json_writer import JsonWriter
|
||||
from ..results import AddressLines, ReverseResults, SearchResults
|
||||
from . import classtypes as cl
|
||||
|
||||
#pylint: disable=too-many-branches
|
||||
|
||||
def _write_osm_id(out: JsonWriter, osm_object: Optional[Tuple[str, int]]) -> None:
|
||||
if osm_object is not None:
|
||||
out.keyval_not_none('osm_type', cl.OSM_TYPE_NAME.get(osm_object[0], None))\
|
||||
.keyval('osm_id', osm_object[1])
|
||||
|
||||
|
||||
def _write_typed_address(out: JsonWriter, address: Optional[AddressLines],
|
||||
country_code: Optional[str]) -> None:
|
||||
parts = {}
|
||||
for line in (address or []):
|
||||
if line.isaddress:
|
||||
if line.local_name:
|
||||
label = cl.get_label_tag(line.category, line.extratags,
|
||||
line.rank_address, country_code)
|
||||
if label not in parts:
|
||||
parts[label] = line.local_name
|
||||
if line.names and 'ISO3166-2' in line.names and line.admin_level:
|
||||
parts[f"ISO3166-2-lvl{line.admin_level}"] = line.names['ISO3166-2']
|
||||
|
||||
for k, v in parts.items():
|
||||
out.keyval(k, v)
|
||||
|
||||
if country_code:
|
||||
out.keyval('country_code', country_code)
|
||||
|
||||
|
||||
def _write_geocodejson_address(out: JsonWriter,
|
||||
address: Optional[AddressLines],
|
||||
obj_place_id: Optional[int],
|
||||
country_code: Optional[str]) -> None:
|
||||
extra = {}
|
||||
for line in (address or []):
|
||||
if line.isaddress and line.local_name:
|
||||
if line.category[1] in ('postcode', 'postal_code'):
|
||||
out.keyval('postcode', line.local_name)
|
||||
elif line.category[1] == 'house_number':
|
||||
out.keyval('housenumber', line.local_name)
|
||||
elif (obj_place_id is None or obj_place_id != line.place_id) \
|
||||
and line.rank_address >= 4 and line.rank_address < 28:
|
||||
rank_name = GEOCODEJSON_RANKS[line.rank_address]
|
||||
if rank_name not in extra:
|
||||
extra[rank_name] = line.local_name
|
||||
|
||||
|
||||
for k, v in extra.items():
|
||||
out.keyval(k, v)
|
||||
|
||||
if country_code:
|
||||
out.keyval('country_code', country_code)
|
||||
|
||||
|
||||
def format_base_json(results: Union[ReverseResults, SearchResults],
|
||||
options: Mapping[str, Any], simple: bool,
|
||||
class_label: str) -> str:
|
||||
""" Return the result list as a simple json string in custom Nominatim format.
|
||||
"""
|
||||
out = JsonWriter()
|
||||
|
||||
if simple:
|
||||
if not results:
|
||||
return '{"error":"Unable to geocode"}'
|
||||
else:
|
||||
out.start_array()
|
||||
|
||||
for result in results:
|
||||
out.start_object()\
|
||||
.keyval_not_none('place_id', result.place_id)\
|
||||
.keyval('licence', cl.OSM_ATTRIBUTION)\
|
||||
|
||||
_write_osm_id(out, result.osm_object)
|
||||
|
||||
out.keyval('lat', f"{result.centroid.lat}")\
|
||||
.keyval('lon', f"{result.centroid.lon}")\
|
||||
.keyval(class_label, result.category[0])\
|
||||
.keyval('type', result.category[1])\
|
||||
.keyval('place_rank', result.rank_search)\
|
||||
.keyval('importance', result.calculated_importance())\
|
||||
.keyval('addresstype', cl.get_label_tag(result.category, result.extratags,
|
||||
result.rank_address,
|
||||
result.country_code))\
|
||||
.keyval('name', result.locale_name or '')\
|
||||
.keyval('display_name', result.display_name or '')
|
||||
|
||||
|
||||
if options.get('icon_base_url', None):
|
||||
icon = cl.ICONS.get(result.category)
|
||||
if icon:
|
||||
out.keyval('icon', f"{options['icon_base_url']}/{icon}.p.20.png")
|
||||
|
||||
if options.get('addressdetails', False):
|
||||
out.key('address').start_object()
|
||||
_write_typed_address(out, result.address_rows, result.country_code)
|
||||
out.end_object().next()
|
||||
|
||||
if options.get('extratags', False):
|
||||
out.keyval('extratags', result.extratags)
|
||||
|
||||
if options.get('namedetails', False):
|
||||
out.keyval('namedetails', result.names)
|
||||
|
||||
bbox = cl.bbox_from_result(result)
|
||||
out.key('boundingbox').start_array()\
|
||||
.value(f"{bbox.minlat:0.7f}").next()\
|
||||
.value(f"{bbox.maxlat:0.7f}").next()\
|
||||
.value(f"{bbox.minlon:0.7f}").next()\
|
||||
.value(f"{bbox.maxlon:0.7f}").next()\
|
||||
.end_array().next()
|
||||
|
||||
if result.geometry:
|
||||
for key in ('text', 'kml'):
|
||||
out.keyval_not_none('geo' + key, result.geometry.get(key))
|
||||
if 'geojson' in result.geometry:
|
||||
out.key('geojson').raw(result.geometry['geojson']).next()
|
||||
out.keyval_not_none('svg', result.geometry.get('svg'))
|
||||
|
||||
out.end_object()
|
||||
|
||||
if simple:
|
||||
return out()
|
||||
|
||||
out.next()
|
||||
|
||||
out.end_array()
|
||||
|
||||
return out()
|
||||
|
||||
|
||||
def format_base_geojson(results: Union[ReverseResults, SearchResults],
|
||||
options: Mapping[str, Any],
|
||||
simple: bool) -> str:
|
||||
""" Return the result list as a geojson string.
|
||||
"""
|
||||
if not results and simple:
|
||||
return '{"error":"Unable to geocode"}'
|
||||
|
||||
out = JsonWriter()
|
||||
|
||||
out.start_object()\
|
||||
.keyval('type', 'FeatureCollection')\
|
||||
.keyval('licence', cl.OSM_ATTRIBUTION)\
|
||||
.key('features').start_array()
|
||||
|
||||
for result in results:
|
||||
out.start_object()\
|
||||
.keyval('type', 'Feature')\
|
||||
.key('properties').start_object()
|
||||
|
||||
out.keyval_not_none('place_id', result.place_id)
|
||||
|
||||
_write_osm_id(out, result.osm_object)
|
||||
|
||||
out.keyval('place_rank', result.rank_search)\
|
||||
.keyval('category', result.category[0])\
|
||||
.keyval('type', result.category[1])\
|
||||
.keyval('importance', result.calculated_importance())\
|
||||
.keyval('addresstype', cl.get_label_tag(result.category, result.extratags,
|
||||
result.rank_address,
|
||||
result.country_code))\
|
||||
.keyval('name', result.locale_name or '')\
|
||||
.keyval('display_name', result.display_name or '')
|
||||
|
||||
if options.get('addressdetails', False):
|
||||
out.key('address').start_object()
|
||||
_write_typed_address(out, result.address_rows, result.country_code)
|
||||
out.end_object().next()
|
||||
|
||||
if options.get('extratags', False):
|
||||
out.keyval('extratags', result.extratags)
|
||||
|
||||
if options.get('namedetails', False):
|
||||
out.keyval('namedetails', result.names)
|
||||
|
||||
out.end_object().next() # properties
|
||||
|
||||
out.key('bbox').start_array()
|
||||
for coord in cl.bbox_from_result(result).coords:
|
||||
out.float(coord, 7).next()
|
||||
out.end_array().next()
|
||||
|
||||
out.key('geometry').raw(result.geometry.get('geojson')
|
||||
or result.centroid.to_geojson()).next()
|
||||
|
||||
out.end_object().next()
|
||||
|
||||
out.end_array().next().end_object()
|
||||
|
||||
return out()
|
||||
|
||||
|
||||
def format_base_geocodejson(results: Union[ReverseResults, SearchResults],
|
||||
options: Mapping[str, Any], simple: bool) -> str:
|
||||
""" Return the result list as a geocodejson string.
|
||||
"""
|
||||
if not results and simple:
|
||||
return '{"error":"Unable to geocode"}'
|
||||
|
||||
out = JsonWriter()
|
||||
|
||||
out.start_object()\
|
||||
.keyval('type', 'FeatureCollection')\
|
||||
.key('geocoding').start_object()\
|
||||
.keyval('version', '0.1.0')\
|
||||
.keyval('attribution', cl.OSM_ATTRIBUTION)\
|
||||
.keyval('licence', 'ODbL')\
|
||||
.keyval_not_none('query', options.get('query'))\
|
||||
.end_object().next()\
|
||||
.key('features').start_array()
|
||||
|
||||
for result in results:
|
||||
out.start_object()\
|
||||
.keyval('type', 'Feature')\
|
||||
.key('properties').start_object()\
|
||||
.key('geocoding').start_object()
|
||||
|
||||
out.keyval_not_none('place_id', result.place_id)
|
||||
|
||||
_write_osm_id(out, result.osm_object)
|
||||
|
||||
out.keyval('osm_key', result.category[0])\
|
||||
.keyval('osm_value', result.category[1])\
|
||||
.keyval('type', GEOCODEJSON_RANKS[max(3, min(28, result.rank_address))])\
|
||||
.keyval_not_none('accuracy', getattr(result, 'distance', None), transform=int)\
|
||||
.keyval('label', result.display_name or '')\
|
||||
.keyval_not_none('name', result.locale_name or None)\
|
||||
|
||||
if options.get('addressdetails', False):
|
||||
_write_geocodejson_address(out, result.address_rows, result.place_id,
|
||||
result.country_code)
|
||||
|
||||
out.key('admin').start_object()
|
||||
if result.address_rows:
|
||||
for line in result.address_rows:
|
||||
if line.isaddress and (line.admin_level or 15) < 15 and line.local_name \
|
||||
and line.category[0] == 'boundary' and line.category[1] == 'administrative':
|
||||
out.keyval(f"level{line.admin_level}", line.local_name)
|
||||
out.end_object().next()
|
||||
|
||||
out.end_object().next().end_object().next()
|
||||
|
||||
out.key('geometry').raw(result.geometry.get('geojson')
|
||||
or result.centroid.to_geojson()).next()
|
||||
|
||||
out.end_object().next()
|
||||
|
||||
out.end_array().next().end_object()
|
||||
|
||||
return out()
|
||||
|
||||
|
||||
GEOCODEJSON_RANKS = {
|
||||
3: 'locality',
|
||||
4: 'country',
|
||||
5: 'state', 6: 'state', 7: 'state', 8: 'state', 9: 'state',
|
||||
10: 'county', 11: 'county', 12: 'county',
|
||||
13: 'city', 14: 'city', 15: 'city', 16: 'city',
|
||||
17: 'district', 18: 'district', 19: 'district', 20: 'district', 21: 'district',
|
||||
22: 'locality', 23: 'locality', 24: 'locality',
|
||||
25: 'street', 26: 'street', 27: 'street', 28: 'house'}
|
||||
126
src/nominatim_api/v1/format_xml.py
Normal file
126
src/nominatim_api/v1/format_xml.py
Normal file
@@ -0,0 +1,126 @@
|
||||
# SPDX-License-Identifier: GPL-3.0-or-later
|
||||
#
|
||||
# This file is part of Nominatim. (https://nominatim.org)
|
||||
#
|
||||
# Copyright (C) 2024 by the Nominatim developer community.
|
||||
# For a full list of authors see the git log.
|
||||
"""
|
||||
Helper functions for output of results in XML format.
|
||||
"""
|
||||
from typing import Mapping, Any, Optional, Union
|
||||
import datetime as dt
|
||||
import xml.etree.ElementTree as ET
|
||||
|
||||
from ..results import AddressLines, ReverseResult, ReverseResults, \
|
||||
SearchResult, SearchResults
|
||||
from . import classtypes as cl
|
||||
|
||||
#pylint: disable=too-many-branches
|
||||
|
||||
def _write_xml_address(root: ET.Element, address: AddressLines,
|
||||
country_code: Optional[str]) -> None:
|
||||
parts = {}
|
||||
for line in address:
|
||||
if line.isaddress:
|
||||
if line.local_name:
|
||||
label = cl.get_label_tag(line.category, line.extratags,
|
||||
line.rank_address, country_code)
|
||||
if label not in parts:
|
||||
parts[label] = line.local_name
|
||||
if line.names and 'ISO3166-2' in line.names and line.admin_level:
|
||||
parts[f"ISO3166-2-lvl{line.admin_level}"] = line.names['ISO3166-2']
|
||||
|
||||
for k,v in parts.items():
|
||||
ET.SubElement(root, k).text = v
|
||||
|
||||
if country_code:
|
||||
ET.SubElement(root, 'country_code').text = country_code
|
||||
|
||||
|
||||
def _create_base_entry(result: Union[ReverseResult, SearchResult],
|
||||
root: ET.Element, simple: bool) -> ET.Element:
|
||||
place = ET.SubElement(root, 'result' if simple else 'place')
|
||||
if result.place_id is not None:
|
||||
place.set('place_id', str(result.place_id))
|
||||
if result.osm_object:
|
||||
osm_type = cl.OSM_TYPE_NAME.get(result.osm_object[0], None)
|
||||
if osm_type is not None:
|
||||
place.set('osm_type', osm_type)
|
||||
place.set('osm_id', str(result.osm_object[1]))
|
||||
if result.names and 'ref' in result.names:
|
||||
place.set('ref', result.names['ref'])
|
||||
elif result.locale_name:
|
||||
# bug reproduced from PHP
|
||||
place.set('ref', result.locale_name)
|
||||
place.set('lat', f"{result.centroid.lat:.7f}")
|
||||
place.set('lon', f"{result.centroid.lon:.7f}")
|
||||
|
||||
bbox = cl.bbox_from_result(result)
|
||||
place.set('boundingbox',
|
||||
f"{bbox.minlat:.7f},{bbox.maxlat:.7f},{bbox.minlon:.7f},{bbox.maxlon:.7f}")
|
||||
|
||||
place.set('place_rank', str(result.rank_search))
|
||||
place.set('address_rank', str(result.rank_address))
|
||||
|
||||
if result.geometry:
|
||||
for key in ('text', 'svg'):
|
||||
if key in result.geometry:
|
||||
place.set('geo' + key, result.geometry[key])
|
||||
if 'kml' in result.geometry:
|
||||
ET.SubElement(root if simple else place, 'geokml')\
|
||||
.append(ET.fromstring(result.geometry['kml']))
|
||||
if 'geojson' in result.geometry:
|
||||
place.set('geojson', result.geometry['geojson'])
|
||||
|
||||
if simple:
|
||||
place.text = result.display_name or ''
|
||||
else:
|
||||
place.set('display_name', result.display_name or '')
|
||||
place.set('class', result.category[0])
|
||||
place.set('type', result.category[1])
|
||||
place.set('importance', str(result.calculated_importance()))
|
||||
|
||||
return place
|
||||
|
||||
|
||||
def format_base_xml(results: Union[ReverseResults, SearchResults],
|
||||
options: Mapping[str, Any],
|
||||
simple: bool, xml_root_tag: str,
|
||||
xml_extra_info: Mapping[str, str]) -> str:
|
||||
""" Format the result into an XML response. With 'simple' exactly one
|
||||
result will be output, otherwise a list.
|
||||
"""
|
||||
root = ET.Element(xml_root_tag)
|
||||
root.set('timestamp', dt.datetime.utcnow().strftime('%a, %d %b %Y %H:%M:%S +00:00'))
|
||||
root.set('attribution', cl.OSM_ATTRIBUTION)
|
||||
for k, v in xml_extra_info.items():
|
||||
root.set(k, v)
|
||||
|
||||
if simple and not results:
|
||||
ET.SubElement(root, 'error').text = 'Unable to geocode'
|
||||
|
||||
for result in results:
|
||||
place = _create_base_entry(result, root, simple)
|
||||
|
||||
if not simple and options.get('icon_base_url', None):
|
||||
icon = cl.ICONS.get(result.category)
|
||||
if icon:
|
||||
place.set('icon', icon)
|
||||
|
||||
if options.get('addressdetails', False) and result.address_rows:
|
||||
_write_xml_address(ET.SubElement(root, 'addressparts') if simple else place,
|
||||
result.address_rows, result.country_code)
|
||||
|
||||
if options.get('extratags', False):
|
||||
eroot = ET.SubElement(root if simple else place, 'extratags')
|
||||
if result.extratags:
|
||||
for k, v in result.extratags.items():
|
||||
ET.SubElement(eroot, 'tag', attrib={'key': k, 'value': v})
|
||||
|
||||
if options.get('namedetails', False):
|
||||
eroot = ET.SubElement(root if simple else place, 'namedetails')
|
||||
if result.names:
|
||||
for k,v in result.names.items():
|
||||
ET.SubElement(eroot, 'name', attrib={'desc': k}).text = v
|
||||
|
||||
return '<?xml version="1.0" encoding="UTF-8" ?>\n' + ET.tostring(root, encoding='unicode')
|
||||
201
src/nominatim_api/v1/helpers.py
Normal file
201
src/nominatim_api/v1/helpers.py
Normal file
@@ -0,0 +1,201 @@
|
||||
# SPDX-License-Identifier: GPL-3.0-or-later
|
||||
#
|
||||
# This file is part of Nominatim. (https://nominatim.org)
|
||||
#
|
||||
# Copyright (C) 2024 by the Nominatim developer community.
|
||||
# For a full list of authors see the git log.
|
||||
"""
|
||||
Helper function for parsing parameters and and outputting data
|
||||
specifically for the v1 version of the API.
|
||||
"""
|
||||
from typing import Tuple, Optional, Any, Dict, Iterable
|
||||
from itertools import chain
|
||||
import re
|
||||
|
||||
from ..results import SearchResult, SearchResults, SourceTable
|
||||
from ..types import SearchDetails, GeometryFormat
|
||||
|
||||
REVERSE_MAX_RANKS = [2, 2, 2, # 0-2 Continent/Sea
|
||||
4, 4, # 3-4 Country
|
||||
8, # 5 State
|
||||
10, 10, # 6-7 Region
|
||||
12, 12, # 8-9 County
|
||||
16, 17, # 10-11 City
|
||||
18, # 12 Town
|
||||
19, # 13 Village/Suburb
|
||||
22, # 14 Hamlet/Neighbourhood
|
||||
25, # 15 Localities
|
||||
26, # 16 Major Streets
|
||||
27, # 17 Minor Streets
|
||||
30 # 18 Building
|
||||
]
|
||||
|
||||
|
||||
def zoom_to_rank(zoom: int) -> int:
|
||||
""" Convert a zoom parameter into a rank according to the v1 API spec.
|
||||
"""
|
||||
return REVERSE_MAX_RANKS[max(0, min(18, zoom))]
|
||||
|
||||
|
||||
FEATURE_TYPE_TO_RANK: Dict[Optional[str], Tuple[int, int]] = {
|
||||
'country': (4, 4),
|
||||
'state': (8, 8),
|
||||
'city': (14, 16),
|
||||
'settlement': (8, 20)
|
||||
}
|
||||
|
||||
|
||||
def feature_type_to_rank(feature_type: Optional[str]) -> Tuple[int, int]:
|
||||
""" Convert a feature type parameter to a tuple of
|
||||
feature type name, minimum rank and maximum rank.
|
||||
"""
|
||||
return FEATURE_TYPE_TO_RANK.get(feature_type, (0, 30))
|
||||
|
||||
|
||||
#pylint: disable=too-many-arguments,too-many-branches
|
||||
def extend_query_parts(queryparts: Dict[str, Any], details: Dict[str, Any],
|
||||
feature_type: Optional[str],
|
||||
namedetails: bool, extratags: bool,
|
||||
excluded: Iterable[str]) -> None:
|
||||
""" Add parameters from details dictionary to the query parts
|
||||
dictionary which is suitable as URL parameter dictionary.
|
||||
"""
|
||||
parsed = SearchDetails.from_kwargs(details)
|
||||
if parsed.geometry_output != GeometryFormat.NONE:
|
||||
if GeometryFormat.GEOJSON in parsed.geometry_output:
|
||||
queryparts['polygon_geojson'] = '1'
|
||||
if GeometryFormat.KML in parsed.geometry_output:
|
||||
queryparts['polygon_kml'] = '1'
|
||||
if GeometryFormat.SVG in parsed.geometry_output:
|
||||
queryparts['polygon_svg'] = '1'
|
||||
if GeometryFormat.TEXT in parsed.geometry_output:
|
||||
queryparts['polygon_text'] = '1'
|
||||
if parsed.address_details:
|
||||
queryparts['addressdetails'] = '1'
|
||||
if namedetails:
|
||||
queryparts['namedetails'] = '1'
|
||||
if extratags:
|
||||
queryparts['extratags'] = '1'
|
||||
if parsed.geometry_simplification > 0.0:
|
||||
queryparts['polygon_threshold'] = f"{parsed.geometry_simplification:.6g}"
|
||||
if parsed.max_results != 10:
|
||||
queryparts['limit'] = str(parsed.max_results)
|
||||
if parsed.countries:
|
||||
queryparts['countrycodes'] = ','.join(parsed.countries)
|
||||
queryparts['exclude_place_ids'] = \
|
||||
','.join(chain(excluded, map(str, (e for e in parsed.excluded if e > 0))))
|
||||
if parsed.viewbox:
|
||||
queryparts['viewbox'] = ','.join(f"{c:.7g}" for c in parsed.viewbox.coords)
|
||||
if parsed.bounded_viewbox:
|
||||
queryparts['bounded'] = '1'
|
||||
if not details['dedupe']:
|
||||
queryparts['dedupe'] = '0'
|
||||
if feature_type in FEATURE_TYPE_TO_RANK:
|
||||
queryparts['featureType'] = feature_type
|
||||
|
||||
|
||||
def deduplicate_results(results: SearchResults, max_results: int) -> SearchResults:
|
||||
""" Remove results that look like duplicates.
|
||||
|
||||
Two results are considered the same if they have the same OSM ID
|
||||
or if they have the same category, display name and rank.
|
||||
"""
|
||||
osm_ids_done = set()
|
||||
classification_done = set()
|
||||
deduped = SearchResults()
|
||||
for result in results:
|
||||
if result.source_table == SourceTable.POSTCODE:
|
||||
assert result.names and 'ref' in result.names
|
||||
if any(_is_postcode_relation_for(r, result.names['ref']) for r in results):
|
||||
continue
|
||||
if result.source_table == SourceTable.PLACEX:
|
||||
classification = (result.osm_object[0] if result.osm_object else None,
|
||||
result.category,
|
||||
result.display_name,
|
||||
result.rank_address)
|
||||
if result.osm_object not in osm_ids_done \
|
||||
and classification not in classification_done:
|
||||
deduped.append(result)
|
||||
osm_ids_done.add(result.osm_object)
|
||||
classification_done.add(classification)
|
||||
else:
|
||||
deduped.append(result)
|
||||
if len(deduped) >= max_results:
|
||||
break
|
||||
|
||||
return deduped
|
||||
|
||||
|
||||
def _is_postcode_relation_for(result: SearchResult, postcode: str) -> bool:
|
||||
return result.source_table == SourceTable.PLACEX \
|
||||
and result.osm_object is not None \
|
||||
and result.osm_object[0] == 'R' \
|
||||
and result.category == ('boundary', 'postal_code') \
|
||||
and result.names is not None \
|
||||
and result.names.get('ref') == postcode
|
||||
|
||||
|
||||
def _deg(axis:str) -> str:
|
||||
return f"(?P<{axis}_deg>\\d+\\.\\d+)°?"
|
||||
|
||||
def _deg_min(axis: str) -> str:
|
||||
return f"(?P<{axis}_deg>\\d+)[°\\s]+(?P<{axis}_min>[\\d.]+)[′']*"
|
||||
|
||||
def _deg_min_sec(axis: str) -> str:
|
||||
return f"(?P<{axis}_deg>\\d+)[°\\s]+(?P<{axis}_min>\\d+)[′'\\s]+(?P<{axis}_sec>[\\d.]+)[\"″]*"
|
||||
|
||||
COORD_REGEX = [re.compile(r'(?:(?P<pre>.*?)\s+)??' + r + r'(?:\s+(?P<post>.*))?') for r in (
|
||||
r"(?P<ns>[NS])\s*" + _deg('lat') + r"[\s,]+" + r"(?P<ew>[EW])\s*" + _deg('lon'),
|
||||
_deg('lat') + r"\s*(?P<ns>[NS])[\s,]+" + _deg('lon') + r"\s*(?P<ew>[EW])",
|
||||
r"(?P<ns>[NS])\s*" + _deg_min('lat') + r"[\s,]+" + r"(?P<ew>[EW])\s*" + _deg_min('lon'),
|
||||
_deg_min('lat') + r"\s*(?P<ns>[NS])[\s,]+" + _deg_min('lon') + r"\s*(?P<ew>[EW])",
|
||||
r"(?P<ns>[NS])\s*" + _deg_min_sec('lat') + r"[\s,]+" + r"(?P<ew>[EW])\s*" + _deg_min_sec('lon'),
|
||||
_deg_min_sec('lat') + r"\s*(?P<ns>[NS])[\s,]+" + _deg_min_sec('lon') + r"\s*(?P<ew>[EW])",
|
||||
r"\[?(?P<lat_deg>[+-]?\d+\.\d+)[\s,]+(?P<lon_deg>[+-]?\d+\.\d+)\]?"
|
||||
)]
|
||||
|
||||
def extract_coords_from_query(query: str) -> Tuple[str, Optional[float], Optional[float]]:
|
||||
""" Look for something that is formatted like a coordinate at the
|
||||
beginning or end of the query. If found, extract the coordinate and
|
||||
return the remaining query (or the empty string if the query
|
||||
consisted of nothing but a coordinate).
|
||||
|
||||
Only the first match will be returned.
|
||||
"""
|
||||
for regex in COORD_REGEX:
|
||||
match = regex.fullmatch(query)
|
||||
if match is None:
|
||||
continue
|
||||
groups = match.groupdict()
|
||||
if not groups['pre'] or not groups['post']:
|
||||
x = float(groups['lon_deg']) \
|
||||
+ float(groups.get('lon_min', 0.0)) / 60.0 \
|
||||
+ float(groups.get('lon_sec', 0.0)) / 3600.0
|
||||
if groups.get('ew') == 'W':
|
||||
x = -x
|
||||
y = float(groups['lat_deg']) \
|
||||
+ float(groups.get('lat_min', 0.0)) / 60.0 \
|
||||
+ float(groups.get('lat_sec', 0.0)) / 3600.0
|
||||
if groups.get('ns') == 'S':
|
||||
y = -y
|
||||
return groups['pre'] or groups['post'] or '', x, y
|
||||
|
||||
return query, None, None
|
||||
|
||||
|
||||
CATEGORY_REGEX = re.compile(r'(?P<pre>.*?)\[(?P<cls>[a-zA-Z_]+)=(?P<typ>[a-zA-Z_]+)\](?P<post>.*)')
|
||||
|
||||
def extract_category_from_query(query: str) -> Tuple[str, Optional[str], Optional[str]]:
|
||||
""" Extract a hidden category specification of the form '[key=value]' from
|
||||
the query. If found, extract key and value and
|
||||
return the remaining query (or the empty string if the query
|
||||
consisted of nothing but a category).
|
||||
|
||||
Only the first match will be returned.
|
||||
"""
|
||||
match = CATEGORY_REGEX.search(query)
|
||||
if match is not None:
|
||||
return (match.group('pre').strip() + ' ' + match.group('post').strip()).strip(), \
|
||||
match.group('cls'), match.group('typ')
|
||||
|
||||
return query, None, None
|
||||
577
src/nominatim_api/v1/server_glue.py
Normal file
577
src/nominatim_api/v1/server_glue.py
Normal file
@@ -0,0 +1,577 @@
|
||||
# SPDX-License-Identifier: GPL-3.0-or-later
|
||||
#
|
||||
# This file is part of Nominatim. (https://nominatim.org)
|
||||
#
|
||||
# Copyright (C) 2024 by the Nominatim developer community.
|
||||
# For a full list of authors see the git log.
|
||||
"""
|
||||
Generic part of the server implementation of the v1 API.
|
||||
Combine with the scaffolding provided for the various Python ASGI frameworks.
|
||||
"""
|
||||
from typing import Optional, Any, Type, Callable, NoReturn, Dict, cast
|
||||
from functools import reduce
|
||||
import abc
|
||||
import dataclasses
|
||||
import math
|
||||
from urllib.parse import urlencode
|
||||
|
||||
import sqlalchemy as sa
|
||||
|
||||
from nominatim_core.errors import UsageError
|
||||
from nominatim_core.config import Configuration
|
||||
from .. import logging as loglib
|
||||
from ..core import NominatimAPIAsync
|
||||
from .format import dispatch as formatting
|
||||
from .format import RawDataList
|
||||
from ..types import DataLayer, GeometryFormat, PlaceRef, PlaceID, OsmID, Point
|
||||
from ..status import StatusResult
|
||||
from ..results import DetailedResult, ReverseResults, SearchResult, SearchResults
|
||||
from ..localization import Locales
|
||||
from . import helpers
|
||||
|
||||
CONTENT_TEXT = 'text/plain; charset=utf-8'
|
||||
CONTENT_XML = 'text/xml; charset=utf-8'
|
||||
CONTENT_HTML = 'text/html; charset=utf-8'
|
||||
CONTENT_JSON = 'application/json; charset=utf-8'
|
||||
|
||||
CONTENT_TYPE = {'text': CONTENT_TEXT, 'xml': CONTENT_XML, 'debug': CONTENT_HTML}
|
||||
|
||||
class ASGIAdaptor(abc.ABC):
|
||||
""" Adapter class for the different ASGI frameworks.
|
||||
Wraps functionality over concrete requests and responses.
|
||||
"""
|
||||
content_type: str = CONTENT_TEXT
|
||||
|
||||
@abc.abstractmethod
|
||||
def get(self, name: str, default: Optional[str] = None) -> Optional[str]:
|
||||
""" Return an input parameter as a string. If the parameter was
|
||||
not provided, return the 'default' value.
|
||||
"""
|
||||
|
||||
@abc.abstractmethod
|
||||
def get_header(self, name: str, default: Optional[str] = None) -> Optional[str]:
|
||||
""" Return a HTTP header parameter as a string. If the parameter was
|
||||
not provided, return the 'default' value.
|
||||
"""
|
||||
|
||||
|
||||
@abc.abstractmethod
|
||||
def error(self, msg: str, status: int = 400) -> Exception:
|
||||
""" Construct an appropriate exception from the given error message.
|
||||
The exception must result in a HTTP error with the given status.
|
||||
"""
|
||||
|
||||
|
||||
@abc.abstractmethod
|
||||
def create_response(self, status: int, output: str, num_results: int) -> Any:
|
||||
""" Create a response from the given parameters. The result will
|
||||
be returned by the endpoint functions. The adaptor may also
|
||||
return None when the response is created internally with some
|
||||
different means.
|
||||
|
||||
The response must return the HTTP given status code 'status', set
|
||||
the HTTP content-type headers to the string provided and the
|
||||
body of the response to 'output'.
|
||||
"""
|
||||
|
||||
@abc.abstractmethod
|
||||
def base_uri(self) -> str:
|
||||
""" Return the URI of the original request.
|
||||
"""
|
||||
|
||||
|
||||
@abc.abstractmethod
|
||||
def config(self) -> Configuration:
|
||||
""" Return the current configuration object.
|
||||
"""
|
||||
|
||||
|
||||
def build_response(self, output: str, status: int = 200, num_results: int = 0) -> Any:
|
||||
""" Create a response from the given output. Wraps a JSONP function
|
||||
around the response, if necessary.
|
||||
"""
|
||||
if self.content_type == CONTENT_JSON and status == 200:
|
||||
jsonp = self.get('json_callback')
|
||||
if jsonp is not None:
|
||||
if any(not part.isidentifier() for part in jsonp.split('.')):
|
||||
self.raise_error('Invalid json_callback value')
|
||||
output = f"{jsonp}({output})"
|
||||
self.content_type = 'application/javascript; charset=utf-8'
|
||||
|
||||
return self.create_response(status, output, num_results)
|
||||
|
||||
|
||||
def raise_error(self, msg: str, status: int = 400) -> NoReturn:
|
||||
""" Raise an exception resulting in the given HTTP status and
|
||||
message. The message will be formatted according to the
|
||||
output format chosen by the request.
|
||||
"""
|
||||
if self.content_type == CONTENT_XML:
|
||||
msg = f"""<?xml version="1.0" encoding="UTF-8" ?>
|
||||
<error>
|
||||
<code>{status}</code>
|
||||
<message>{msg}</message>
|
||||
</error>
|
||||
"""
|
||||
elif self.content_type == CONTENT_JSON:
|
||||
msg = f"""{{"error":{{"code":{status},"message":"{msg}"}}}}"""
|
||||
elif self.content_type == CONTENT_HTML:
|
||||
loglib.log().section('Execution error')
|
||||
loglib.log().var_dump('Status', status)
|
||||
loglib.log().var_dump('Message', msg)
|
||||
msg = loglib.get_and_disable()
|
||||
|
||||
raise self.error(msg, status)
|
||||
|
||||
|
||||
def get_int(self, name: str, default: Optional[int] = None) -> int:
|
||||
""" Return an input parameter as an int. Raises an exception if
|
||||
the parameter is given but not in an integer format.
|
||||
|
||||
If 'default' is given, then it will be returned when the parameter
|
||||
is missing completely. When 'default' is None, an error will be
|
||||
raised on a missing parameter.
|
||||
"""
|
||||
value = self.get(name)
|
||||
|
||||
if value is None:
|
||||
if default is not None:
|
||||
return default
|
||||
|
||||
self.raise_error(f"Parameter '{name}' missing.")
|
||||
|
||||
try:
|
||||
intval = int(value)
|
||||
except ValueError:
|
||||
self.raise_error(f"Parameter '{name}' must be a number.")
|
||||
|
||||
return intval
|
||||
|
||||
|
||||
def get_float(self, name: str, default: Optional[float] = None) -> float:
|
||||
""" Return an input parameter as a flaoting-point number. Raises an
|
||||
exception if the parameter is given but not in an float format.
|
||||
|
||||
If 'default' is given, then it will be returned when the parameter
|
||||
is missing completely. When 'default' is None, an error will be
|
||||
raised on a missing parameter.
|
||||
"""
|
||||
value = self.get(name)
|
||||
|
||||
if value is None:
|
||||
if default is not None:
|
||||
return default
|
||||
|
||||
self.raise_error(f"Parameter '{name}' missing.")
|
||||
|
||||
try:
|
||||
fval = float(value)
|
||||
except ValueError:
|
||||
self.raise_error(f"Parameter '{name}' must be a number.")
|
||||
|
||||
if math.isnan(fval) or math.isinf(fval):
|
||||
self.raise_error(f"Parameter '{name}' must be a number.")
|
||||
|
||||
return fval
|
||||
|
||||
|
||||
def get_bool(self, name: str, default: Optional[bool] = None) -> bool:
|
||||
""" Return an input parameter as bool. Only '0' is accepted as
|
||||
an input for 'false' all other inputs will be interpreted as 'true'.
|
||||
|
||||
If 'default' is given, then it will be returned when the parameter
|
||||
is missing completely. When 'default' is None, an error will be
|
||||
raised on a missing parameter.
|
||||
"""
|
||||
value = self.get(name)
|
||||
|
||||
if value is None:
|
||||
if default is not None:
|
||||
return default
|
||||
|
||||
self.raise_error(f"Parameter '{name}' missing.")
|
||||
|
||||
return value != '0'
|
||||
|
||||
|
||||
def get_accepted_languages(self) -> str:
|
||||
""" Return the accepted languages.
|
||||
"""
|
||||
return self.get('accept-language')\
|
||||
or self.get_header('accept-language')\
|
||||
or self.config().DEFAULT_LANGUAGE
|
||||
|
||||
|
||||
def setup_debugging(self) -> bool:
|
||||
""" Set up collection of debug information if requested.
|
||||
|
||||
Return True when debugging was requested.
|
||||
"""
|
||||
if self.get_bool('debug', False):
|
||||
loglib.set_log_output('html')
|
||||
self.content_type = CONTENT_HTML
|
||||
return True
|
||||
|
||||
return False
|
||||
|
||||
|
||||
def get_layers(self) -> Optional[DataLayer]:
|
||||
""" Return a parsed version of the layer parameter.
|
||||
"""
|
||||
param = self.get('layer', None)
|
||||
if param is None:
|
||||
return None
|
||||
|
||||
return cast(DataLayer,
|
||||
reduce(DataLayer.__or__,
|
||||
(getattr(DataLayer, s.upper()) for s in param.split(','))))
|
||||
|
||||
|
||||
def parse_format(self, result_type: Type[Any], default: str) -> str:
|
||||
""" Get and check the 'format' parameter and prepare the formatter.
|
||||
`result_type` is the type of result to be returned by the function
|
||||
and `default` the format value to assume when no parameter is present.
|
||||
"""
|
||||
fmt = self.get('format', default=default)
|
||||
assert fmt is not None
|
||||
|
||||
if not formatting.supports_format(result_type, fmt):
|
||||
self.raise_error("Parameter 'format' must be one of: " +
|
||||
', '.join(formatting.list_formats(result_type)))
|
||||
|
||||
self.content_type = CONTENT_TYPE.get(fmt, CONTENT_JSON)
|
||||
return fmt
|
||||
|
||||
|
||||
def parse_geometry_details(self, fmt: str) -> Dict[str, Any]:
|
||||
""" Create details structure from the supplied geometry parameters.
|
||||
"""
|
||||
numgeoms = 0
|
||||
output = GeometryFormat.NONE
|
||||
if self.get_bool('polygon_geojson', False):
|
||||
output |= GeometryFormat.GEOJSON
|
||||
numgeoms += 1
|
||||
if fmt not in ('geojson', 'geocodejson'):
|
||||
if self.get_bool('polygon_text', False):
|
||||
output |= GeometryFormat.TEXT
|
||||
numgeoms += 1
|
||||
if self.get_bool('polygon_kml', False):
|
||||
output |= GeometryFormat.KML
|
||||
numgeoms += 1
|
||||
if self.get_bool('polygon_svg', False):
|
||||
output |= GeometryFormat.SVG
|
||||
numgeoms += 1
|
||||
|
||||
if numgeoms > self.config().get_int('POLYGON_OUTPUT_MAX_TYPES'):
|
||||
self.raise_error('Too many polygon output options selected.')
|
||||
|
||||
return {'address_details': True,
|
||||
'geometry_simplification': self.get_float('polygon_threshold', 0.0),
|
||||
'geometry_output': output
|
||||
}
|
||||
|
||||
|
||||
async def status_endpoint(api: NominatimAPIAsync, params: ASGIAdaptor) -> Any:
|
||||
""" Server glue for /status endpoint. See API docs for details.
|
||||
"""
|
||||
result = await api.status()
|
||||
|
||||
fmt = params.parse_format(StatusResult, 'text')
|
||||
|
||||
if fmt == 'text' and result.status:
|
||||
status_code = 500
|
||||
else:
|
||||
status_code = 200
|
||||
|
||||
return params.build_response(formatting.format_result(result, fmt, {}),
|
||||
status=status_code)
|
||||
|
||||
|
||||
async def details_endpoint(api: NominatimAPIAsync, params: ASGIAdaptor) -> Any:
|
||||
""" Server glue for /details endpoint. See API docs for details.
|
||||
"""
|
||||
fmt = params.parse_format(DetailedResult, 'json')
|
||||
place_id = params.get_int('place_id', 0)
|
||||
place: PlaceRef
|
||||
if place_id:
|
||||
place = PlaceID(place_id)
|
||||
else:
|
||||
osmtype = params.get('osmtype')
|
||||
if osmtype is None:
|
||||
params.raise_error("Missing ID parameter 'place_id' or 'osmtype'.")
|
||||
place = OsmID(osmtype, params.get_int('osmid'), params.get('class'))
|
||||
|
||||
debug = params.setup_debugging()
|
||||
|
||||
locales = Locales.from_accept_languages(params.get_accepted_languages())
|
||||
|
||||
result = await api.details(place,
|
||||
address_details=params.get_bool('addressdetails', False),
|
||||
linked_places=params.get_bool('linkedplaces', True),
|
||||
parented_places=params.get_bool('hierarchy', False),
|
||||
keywords=params.get_bool('keywords', False),
|
||||
geometry_output = GeometryFormat.GEOJSON
|
||||
if params.get_bool('polygon_geojson', False)
|
||||
else GeometryFormat.NONE,
|
||||
locales=locales
|
||||
)
|
||||
|
||||
if debug:
|
||||
return params.build_response(loglib.get_and_disable())
|
||||
|
||||
if result is None:
|
||||
params.raise_error('No place with that OSM ID found.', status=404)
|
||||
|
||||
output = formatting.format_result(result, fmt,
|
||||
{'locales': locales,
|
||||
'group_hierarchy': params.get_bool('group_hierarchy', False),
|
||||
'icon_base_url': params.config().MAPICON_URL})
|
||||
|
||||
return params.build_response(output, num_results=1)
|
||||
|
||||
|
||||
async def reverse_endpoint(api: NominatimAPIAsync, params: ASGIAdaptor) -> Any:
|
||||
""" Server glue for /reverse endpoint. See API docs for details.
|
||||
"""
|
||||
fmt = params.parse_format(ReverseResults, 'xml')
|
||||
debug = params.setup_debugging()
|
||||
coord = Point(params.get_float('lon'), params.get_float('lat'))
|
||||
|
||||
details = params.parse_geometry_details(fmt)
|
||||
details['max_rank'] = helpers.zoom_to_rank(params.get_int('zoom', 18))
|
||||
details['layers'] = params.get_layers()
|
||||
details['locales'] = Locales.from_accept_languages(params.get_accepted_languages())
|
||||
|
||||
result = await api.reverse(coord, **details)
|
||||
|
||||
if debug:
|
||||
return params.build_response(loglib.get_and_disable(), num_results=1 if result else 0)
|
||||
|
||||
if fmt == 'xml':
|
||||
queryparts = {'lat': str(coord.lat), 'lon': str(coord.lon), 'format': 'xml'}
|
||||
zoom = params.get('zoom', None)
|
||||
if zoom:
|
||||
queryparts['zoom'] = zoom
|
||||
query = urlencode(queryparts)
|
||||
else:
|
||||
query = ''
|
||||
|
||||
fmt_options = {'query': query,
|
||||
'extratags': params.get_bool('extratags', False),
|
||||
'namedetails': params.get_bool('namedetails', False),
|
||||
'addressdetails': params.get_bool('addressdetails', True)}
|
||||
|
||||
output = formatting.format_result(ReverseResults([result] if result else []),
|
||||
fmt, fmt_options)
|
||||
|
||||
return params.build_response(output, num_results=1 if result else 0)
|
||||
|
||||
|
||||
async def lookup_endpoint(api: NominatimAPIAsync, params: ASGIAdaptor) -> Any:
|
||||
""" Server glue for /lookup endpoint. See API docs for details.
|
||||
"""
|
||||
fmt = params.parse_format(SearchResults, 'xml')
|
||||
debug = params.setup_debugging()
|
||||
details = params.parse_geometry_details(fmt)
|
||||
details['locales'] = Locales.from_accept_languages(params.get_accepted_languages())
|
||||
|
||||
places = []
|
||||
for oid in (params.get('osm_ids') or '').split(','):
|
||||
oid = oid.strip()
|
||||
if len(oid) > 1 and oid[0] in 'RNWrnw' and oid[1:].isdigit():
|
||||
places.append(OsmID(oid[0].upper(), int(oid[1:])))
|
||||
|
||||
if len(places) > params.config().get_int('LOOKUP_MAX_COUNT'):
|
||||
params.raise_error('Too many object IDs.')
|
||||
|
||||
if places:
|
||||
results = await api.lookup(places, **details)
|
||||
else:
|
||||
results = SearchResults()
|
||||
|
||||
if debug:
|
||||
return params.build_response(loglib.get_and_disable(), num_results=len(results))
|
||||
|
||||
fmt_options = {'extratags': params.get_bool('extratags', False),
|
||||
'namedetails': params.get_bool('namedetails', False),
|
||||
'addressdetails': params.get_bool('addressdetails', True)}
|
||||
|
||||
output = formatting.format_result(results, fmt, fmt_options)
|
||||
|
||||
return params.build_response(output, num_results=len(results))
|
||||
|
||||
|
||||
async def _unstructured_search(query: str, api: NominatimAPIAsync,
|
||||
details: Dict[str, Any]) -> SearchResults:
|
||||
if not query:
|
||||
return SearchResults()
|
||||
|
||||
# Extract special format for coordinates from query.
|
||||
query, x, y = helpers.extract_coords_from_query(query)
|
||||
if x is not None:
|
||||
assert y is not None
|
||||
details['near'] = Point(x, y)
|
||||
details['near_radius'] = 0.1
|
||||
|
||||
# If no query is left, revert to reverse search.
|
||||
if x is not None and not query:
|
||||
result = await api.reverse(details['near'], **details)
|
||||
if not result:
|
||||
return SearchResults()
|
||||
|
||||
return SearchResults(
|
||||
[SearchResult(**{f.name: getattr(result, f.name)
|
||||
for f in dataclasses.fields(SearchResult)
|
||||
if hasattr(result, f.name)})])
|
||||
|
||||
query, cls, typ = helpers.extract_category_from_query(query)
|
||||
if cls is not None:
|
||||
assert typ is not None
|
||||
return await api.search_category([(cls, typ)], near_query=query, **details)
|
||||
|
||||
return await api.search(query, **details)
|
||||
|
||||
|
||||
async def search_endpoint(api: NominatimAPIAsync, params: ASGIAdaptor) -> Any:
|
||||
""" Server glue for /search endpoint. See API docs for details.
|
||||
"""
|
||||
fmt = params.parse_format(SearchResults, 'jsonv2')
|
||||
debug = params.setup_debugging()
|
||||
details = params.parse_geometry_details(fmt)
|
||||
|
||||
details['countries'] = params.get('countrycodes', None)
|
||||
details['excluded'] = params.get('exclude_place_ids', None)
|
||||
details['viewbox'] = params.get('viewbox', None) or params.get('viewboxlbrt', None)
|
||||
details['bounded_viewbox'] = params.get_bool('bounded', False)
|
||||
details['dedupe'] = params.get_bool('dedupe', True)
|
||||
|
||||
max_results = max(1, min(50, params.get_int('limit', 10)))
|
||||
details['max_results'] = max_results + min(10, max_results) \
|
||||
if details['dedupe'] else max_results
|
||||
|
||||
details['min_rank'], details['max_rank'] = \
|
||||
helpers.feature_type_to_rank(params.get('featureType', ''))
|
||||
if params.get('featureType', None) is not None:
|
||||
details['layers'] = DataLayer.ADDRESS
|
||||
else:
|
||||
details['layers'] = params.get_layers()
|
||||
|
||||
details['locales'] = Locales.from_accept_languages(params.get_accepted_languages())
|
||||
|
||||
# unstructured query parameters
|
||||
query = params.get('q', None)
|
||||
# structured query parameters
|
||||
queryparts = {}
|
||||
for key in ('amenity', 'street', 'city', 'county', 'state', 'postalcode', 'country'):
|
||||
details[key] = params.get(key, None)
|
||||
if details[key]:
|
||||
queryparts[key] = details[key]
|
||||
|
||||
try:
|
||||
if query is not None:
|
||||
if queryparts:
|
||||
params.raise_error("Structured query parameters"
|
||||
"(amenity, street, city, county, state, postalcode, country)"
|
||||
" cannot be used together with 'q' parameter.")
|
||||
queryparts['q'] = query
|
||||
results = await _unstructured_search(query, api, details)
|
||||
else:
|
||||
query = ', '.join(queryparts.values())
|
||||
|
||||
results = await api.search_address(**details)
|
||||
except UsageError as err:
|
||||
params.raise_error(str(err))
|
||||
|
||||
if details['dedupe'] and len(results) > 1:
|
||||
results = helpers.deduplicate_results(results, max_results)
|
||||
|
||||
if debug:
|
||||
return params.build_response(loglib.get_and_disable(), num_results=len(results))
|
||||
|
||||
if fmt == 'xml':
|
||||
helpers.extend_query_parts(queryparts, details,
|
||||
params.get('featureType', ''),
|
||||
params.get_bool('namedetails', False),
|
||||
params.get_bool('extratags', False),
|
||||
(str(r.place_id) for r in results if r.place_id))
|
||||
queryparts['format'] = fmt
|
||||
|
||||
moreurl = params.base_uri() + '/search?' + urlencode(queryparts)
|
||||
else:
|
||||
moreurl = ''
|
||||
|
||||
fmt_options = {'query': query, 'more_url': moreurl,
|
||||
'exclude_place_ids': queryparts.get('exclude_place_ids'),
|
||||
'viewbox': queryparts.get('viewbox'),
|
||||
'extratags': params.get_bool('extratags', False),
|
||||
'namedetails': params.get_bool('namedetails', False),
|
||||
'addressdetails': params.get_bool('addressdetails', False)}
|
||||
|
||||
output = formatting.format_result(results, fmt, fmt_options)
|
||||
|
||||
return params.build_response(output, num_results=len(results))
|
||||
|
||||
|
||||
async def deletable_endpoint(api: NominatimAPIAsync, params: ASGIAdaptor) -> Any:
|
||||
""" Server glue for /deletable endpoint.
|
||||
This is a special endpoint that shows polygons that have been
|
||||
deleted or are broken in the OSM data but are kept in the
|
||||
Nominatim database to minimize disruption.
|
||||
"""
|
||||
fmt = params.parse_format(RawDataList, 'json')
|
||||
|
||||
async with api.begin() as conn:
|
||||
sql = sa.text(""" SELECT p.place_id, country_code,
|
||||
name->'name' as name, i.*
|
||||
FROM placex p, import_polygon_delete i
|
||||
WHERE p.osm_id = i.osm_id AND p.osm_type = i.osm_type
|
||||
AND p.class = i.class AND p.type = i.type
|
||||
""")
|
||||
results = RawDataList(r._asdict() for r in await conn.execute(sql))
|
||||
|
||||
return params.build_response(formatting.format_result(results, fmt, {}))
|
||||
|
||||
|
||||
async def polygons_endpoint(api: NominatimAPIAsync, params: ASGIAdaptor) -> Any:
|
||||
""" Server glue for /polygons endpoint.
|
||||
This is a special endpoint that shows polygons that have changed
|
||||
their size but are kept in the Nominatim database with their
|
||||
old area to minimize disruption.
|
||||
"""
|
||||
fmt = params.parse_format(RawDataList, 'json')
|
||||
sql_params: Dict[str, Any] = {
|
||||
'days': params.get_int('days', -1),
|
||||
'cls': params.get('class')
|
||||
}
|
||||
reduced = params.get_bool('reduced', False)
|
||||
|
||||
async with api.begin() as conn:
|
||||
sql = sa.select(sa.text("""osm_type, osm_id, class, type,
|
||||
name->'name' as name,
|
||||
country_code, errormessage, updated"""))\
|
||||
.select_from(sa.text('import_polygon_error'))
|
||||
if sql_params['days'] > 0:
|
||||
sql = sql.where(sa.text("updated > 'now'::timestamp - make_interval(days => :days)"))
|
||||
if reduced:
|
||||
sql = sql.where(sa.text("errormessage like 'Area reduced%'"))
|
||||
if sql_params['cls'] is not None:
|
||||
sql = sql.where(sa.text("class = :cls"))
|
||||
|
||||
sql = sql.order_by(sa.literal_column('updated').desc()).limit(1000)
|
||||
|
||||
results = RawDataList(r._asdict() for r in await conn.execute(sql, sql_params))
|
||||
|
||||
return params.build_response(formatting.format_result(results, fmt, {}))
|
||||
|
||||
|
||||
EndpointFunc = Callable[[NominatimAPIAsync, ASGIAdaptor], Any]
|
||||
|
||||
ROUTES = [
|
||||
('status', status_endpoint),
|
||||
('details', details_endpoint),
|
||||
('reverse', reverse_endpoint),
|
||||
('lookup', lookup_endpoint),
|
||||
('search', search_endpoint),
|
||||
('deletable', deletable_endpoint),
|
||||
('polygons', polygons_endpoint),
|
||||
]
|
||||
Reference in New Issue
Block a user