Merge pull request #3233 from lonvia/support-for-sqlite

Add support for SQLite DBs in frontend: reverse
This commit is contained in:
Sarah Hoffmann
2023-10-24 11:54:35 +02:00
committed by GitHub
38 changed files with 967 additions and 222 deletions

View File

@@ -25,12 +25,12 @@ runs:
shell: bash shell: bash
- name: Install${{ matrix.flavour }} prerequisites - name: Install${{ matrix.flavour }} prerequisites
run: | run: |
sudo apt-get install -y -qq libboost-system-dev libboost-filesystem-dev libexpat1-dev zlib1g-dev libbz2-dev libpq-dev libproj-dev libicu-dev liblua${LUA_VERSION}-dev lua${LUA_VERSION} lua-dkjson nlohmann-json3-dev sudo apt-get install -y -qq libboost-system-dev libboost-filesystem-dev libexpat1-dev zlib1g-dev libbz2-dev libpq-dev libproj-dev libicu-dev liblua${LUA_VERSION}-dev lua${LUA_VERSION} lua-dkjson nlohmann-json3-dev libspatialite7 libsqlite3-mod-spatialite
if [ "$FLAVOUR" == "oldstuff" ]; then if [ "$FLAVOUR" == "oldstuff" ]; then
pip3 install MarkupSafe==2.0.1 python-dotenv psycopg2==2.7.7 jinja2==2.8 psutil==5.4.2 pyicu==2.9 osmium PyYAML==5.1 sqlalchemy==1.4.31 datrie asyncpg pip3 install MarkupSafe==2.0.1 python-dotenv psycopg2==2.7.7 jinja2==2.8 psutil==5.4.2 pyicu==2.9 osmium PyYAML==5.1 sqlalchemy==1.4.31 datrie asyncpg aiosqlite
else else
sudo apt-get install -y -qq python3-icu python3-datrie python3-pyosmium python3-jinja2 python3-psutil python3-psycopg2 python3-dotenv python3-yaml sudo apt-get install -y -qq python3-icu python3-datrie python3-pyosmium python3-jinja2 python3-psutil python3-psycopg2 python3-dotenv python3-yaml
pip3 install sqlalchemy psycopg pip3 install sqlalchemy psycopg aiosqlite
fi fi
shell: bash shell: bash
env: env:

View File

@@ -113,18 +113,21 @@ jobs:
if: matrix.flavour == 'oldstuff' if: matrix.flavour == 'oldstuff'
- name: Install Python webservers - name: Install Python webservers
run: pip3 install falcon starlette run: pip3 install falcon starlette asgi_lifespan
- name: Install latest pylint - name: Install latest pylint
run: pip3 install -U pylint asgi_lifespan run: pip3 install -U pylint
if: matrix.flavour != 'oldstuff'
- name: PHP linting - name: PHP linting
run: phpcs --report-width=120 . run: phpcs --report-width=120 .
working-directory: Nominatim working-directory: Nominatim
if: matrix.flavour != 'oldstuff'
- name: Python linting - name: Python linting
run: python3 -m pylint nominatim run: python3 -m pylint nominatim
working-directory: Nominatim working-directory: Nominatim
if: matrix.flavour != 'oldstuff'
- name: PHP unit tests - name: PHP unit tests
run: phpunit ./ run: phpunit ./

View File

@@ -81,21 +81,34 @@ class NominatimAPIAsync: #pylint: disable=too-many-instance-attributes
if self._engine: if self._engine:
return return
dsn = self.config.get_database_params() extra_args: Dict[str, Any] = {'future': True,
pool_size = self.config.get_int('API_POOL_SIZE') 'echo': self.config.get_bool('DEBUG_SQL')}
query = {k: v for k, v in dsn.items() is_sqlite = self.config.DATABASE_DSN.startswith('sqlite:')
if k not in ('user', 'password', 'dbname', 'host', 'port')}
dburl = sa.engine.URL.create( if is_sqlite:
f'postgresql+{PGCORE_LIB}', params = dict((p.split('=', 1)
database=dsn.get('dbname'), for p in self.config.DATABASE_DSN[7:].split(';')))
username=dsn.get('user'), password=dsn.get('password'), dburl = sa.engine.URL.create('sqlite+aiosqlite',
host=dsn.get('host'), port=int(dsn['port']) if 'port' in dsn else None, database=params.get('dbname'))
query=query)
engine = sa_asyncio.create_async_engine(dburl, future=True, else:
max_overflow=0, pool_size=pool_size, dsn = self.config.get_database_params()
echo=self.config.get_bool('DEBUG_SQL')) query = {k: v for k, v in dsn.items()
if k not in ('user', 'password', 'dbname', 'host', 'port')}
dburl = sa.engine.URL.create(
f'postgresql+{PGCORE_LIB}',
database=dsn.get('dbname'),
username=dsn.get('user'),
password=dsn.get('password'),
host=dsn.get('host'),
port=int(dsn['port']) if 'port' in dsn else None,
query=query)
extra_args['max_overflow'] = 0
extra_args['pool_size'] = self.config.get_int('API_POOL_SIZE')
engine = sa_asyncio.create_async_engine(dburl, **extra_args)
try: try:
async with engine.begin() as conn: async with engine.begin() as conn:
@@ -104,7 +117,7 @@ class NominatimAPIAsync: #pylint: disable=too-many-instance-attributes
except (PGCORE_ERROR, sa.exc.OperationalError): except (PGCORE_ERROR, sa.exc.OperationalError):
server_version = 0 server_version = 0
if server_version >= 110000: if server_version >= 110000 and not is_sqlite:
@sa.event.listens_for(engine.sync_engine, "connect") @sa.event.listens_for(engine.sync_engine, "connect")
def _on_connect(dbapi_con: Any, _: Any) -> None: def _on_connect(dbapi_con: Any, _: Any) -> None:
cursor = dbapi_con.cursor() cursor = dbapi_con.cursor()
@@ -113,6 +126,15 @@ class NominatimAPIAsync: #pylint: disable=too-many-instance-attributes
# Make sure that all connections get the new settings # Make sure that all connections get the new settings
await self.close() await self.close()
if is_sqlite:
@sa.event.listens_for(engine.sync_engine, "connect")
def _on_sqlite_connect(dbapi_con: Any, _: Any) -> None:
dbapi_con.run_async(lambda conn: conn.enable_load_extension(True))
cursor = dbapi_con.cursor()
cursor.execute("SELECT load_extension('mod_spatialite')")
cursor.execute('SELECT SetDecimalPrecision(7)')
dbapi_con.run_async(lambda conn: conn.enable_load_extension(False))
self._property_cache['DB:server_version'] = server_version self._property_cache['DB:server_version'] = server_version
self._tables = SearchTables(sa.MetaData(), engine.name) # pylint: disable=no-member self._tables = SearchTables(sa.MetaData(), engine.name) # pylint: disable=no-member

View File

@@ -77,8 +77,8 @@ async def find_in_osmline(conn: SearchConnection, place: ntyp.PlaceRef,
sql = sql.where(t.c.osm_id == place.osm_id).limit(1) sql = sql.where(t.c.osm_id == place.osm_id).limit(1)
if place.osm_class and place.osm_class.isdigit(): if place.osm_class and place.osm_class.isdigit():
sql = sql.order_by(sa.func.greatest(0, sql = sql.order_by(sa.func.greatest(0,
sa.func.least(int(place.osm_class) - t.c.endnumber), int(place.osm_class) - t.c.endnumber,
t.c.startnumber - int(place.osm_class))) t.c.startnumber - int(place.osm_class)))
else: else:
return None return None
@@ -163,11 +163,10 @@ async def get_detailed_place(conn: SearchConnection, place: ntyp.PlaceRef,
if details.geometry_output & ntyp.GeometryFormat.GEOJSON: if details.geometry_output & ntyp.GeometryFormat.GEOJSON:
def _add_geometry(sql: SaSelect, column: SaColumn) -> SaSelect: def _add_geometry(sql: SaSelect, column: SaColumn) -> SaSelect:
return sql.add_columns(sa.literal_column(f""" return sql.add_columns(sa.func.ST_AsGeoJSON(
ST_AsGeoJSON(CASE WHEN ST_NPoints({column.name}) > 5000 sa.case((sa.func.ST_NPoints(column) > 5000,
THEN ST_SimplifyPreserveTopology({column.name}, 0.0001) sa.func.ST_SimplifyPreserveTopology(column, 0.0001)),
ELSE {column.name} END) else_=column), 7).label('geometry_geojson'))
""").label('geometry_geojson'))
else: else:
def _add_geometry(sql: SaSelect, column: SaColumn) -> SaSelect: def _add_geometry(sql: SaSelect, column: SaColumn) -> SaSelect:
return sql.add_columns(sa.func.ST_GeometryType(column).label('geometry_type')) return sql.add_columns(sa.func.ST_GeometryType(column).label('geometry_type'))
@@ -183,6 +182,9 @@ async def get_detailed_place(conn: SearchConnection, place: ntyp.PlaceRef,
# add missing details # add missing details
assert result is not None assert result is not None
if 'type' in result.geometry:
result.geometry['type'] = GEOMETRY_TYPE_MAP.get(result.geometry['type'],
result.geometry['type'])
indexed_date = getattr(row, 'indexed_date', None) indexed_date = getattr(row, 'indexed_date', None)
if indexed_date is not None: if indexed_date is not None:
result.indexed_date = indexed_date.replace(tzinfo=dt.timezone.utc) result.indexed_date = indexed_date.replace(tzinfo=dt.timezone.utc)
@@ -208,13 +210,13 @@ async def get_simple_place(conn: SearchConnection, place: ntyp.PlaceRef,
col = sa.func.ST_SimplifyPreserveTopology(col, details.geometry_simplification) col = sa.func.ST_SimplifyPreserveTopology(col, details.geometry_simplification)
if details.geometry_output & ntyp.GeometryFormat.GEOJSON: if details.geometry_output & ntyp.GeometryFormat.GEOJSON:
out.append(sa.func.ST_AsGeoJSON(col).label('geometry_geojson')) out.append(sa.func.ST_AsGeoJSON(col, 7).label('geometry_geojson'))
if details.geometry_output & ntyp.GeometryFormat.TEXT: if details.geometry_output & ntyp.GeometryFormat.TEXT:
out.append(sa.func.ST_AsText(col).label('geometry_text')) out.append(sa.func.ST_AsText(col).label('geometry_text'))
if details.geometry_output & ntyp.GeometryFormat.KML: if details.geometry_output & ntyp.GeometryFormat.KML:
out.append(sa.func.ST_AsKML(col).label('geometry_kml')) out.append(sa.func.ST_AsKML(col, 7).label('geometry_kml'))
if details.geometry_output & ntyp.GeometryFormat.SVG: if details.geometry_output & ntyp.GeometryFormat.SVG:
out.append(sa.func.ST_AsSVG(col).label('geometry_svg')) out.append(sa.func.ST_AsSVG(col, 0, 7).label('geometry_svg'))
return sql.add_columns(*out) return sql.add_columns(*out)
@@ -236,3 +238,14 @@ async def get_simple_place(conn: SearchConnection, place: ntyp.PlaceRef,
await nres.add_result_details(conn, [result], details) await nres.add_result_details(conn, [result], details)
return result return result
GEOMETRY_TYPE_MAP = {
'POINT': 'ST_Point',
'MULTIPOINT': 'ST_MultiPoint',
'LINESTRING': 'ST_LineString',
'MULTILINESTRING': 'ST_MultiLineString',
'POLYGON': 'ST_Polygon',
'MULTIPOLYGON': 'ST_MultiPolygon',
'GEOMETRYCOLLECTION': 'ST_GeometryCollection'
}

View File

@@ -19,7 +19,7 @@ import datetime as dt
import sqlalchemy as sa import sqlalchemy as sa
from nominatim.typing import SaSelect, SaRow from nominatim.typing import SaSelect, SaRow
from nominatim.db.sqlalchemy_functions import CrosscheckNames from nominatim.db.sqlalchemy_types import Geometry
from nominatim.api.types import Point, Bbox, LookupDetails from nominatim.api.types import Point, Bbox, LookupDetails
from nominatim.api.connection import SearchConnection from nominatim.api.connection import SearchConnection
from nominatim.api.logging import log from nominatim.api.logging import log
@@ -589,7 +589,7 @@ async def complete_address_details(conn: SearchConnection, results: List[BaseRes
if not lookup_ids: if not lookup_ids:
return return
ltab = sa.func.json_array_elements(sa.type_coerce(lookup_ids, sa.JSON))\ ltab = sa.func.JsonArrayEach(sa.type_coerce(lookup_ids, sa.JSON))\
.table_valued(sa.column('value', type_=sa.JSON)) # type: ignore[no-untyped-call] .table_valued(sa.column('value', type_=sa.JSON)) # type: ignore[no-untyped-call]
t = conn.t.placex t = conn.t.placex
@@ -608,7 +608,7 @@ async def complete_address_details(conn: SearchConnection, results: List[BaseRes
.order_by('src_place_id')\ .order_by('src_place_id')\
.order_by(sa.column('rank_address').desc())\ .order_by(sa.column('rank_address').desc())\
.order_by((taddr.c.place_id == ltab.c.value['pid'].as_integer()).desc())\ .order_by((taddr.c.place_id == ltab.c.value['pid'].as_integer()).desc())\
.order_by(sa.case((CrosscheckNames(t.c.name, ltab.c.value['names']), 2), .order_by(sa.case((sa.func.CrosscheckNames(t.c.name, ltab.c.value['names']), 2),
(taddr.c.isaddress, 0), (taddr.c.isaddress, 0),
(sa.and_(taddr.c.fromarea, (sa.and_(taddr.c.fromarea,
t.c.geometry.ST_Contains( t.c.geometry.ST_Contains(
@@ -652,7 +652,7 @@ async def complete_address_details(conn: SearchConnection, results: List[BaseRes
parent_lookup_ids = list(filter(lambda e: e['pid'] != e['lid'], lookup_ids)) parent_lookup_ids = list(filter(lambda e: e['pid'] != e['lid'], lookup_ids))
if parent_lookup_ids: if parent_lookup_ids:
ltab = sa.func.json_array_elements(sa.type_coerce(parent_lookup_ids, sa.JSON))\ ltab = sa.func.JsonArrayEach(sa.type_coerce(parent_lookup_ids, sa.JSON))\
.table_valued(sa.column('value', type_=sa.JSON)) # type: ignore[no-untyped-call] .table_valued(sa.column('value', type_=sa.JSON)) # type: ignore[no-untyped-call]
sql = sa.select(ltab.c.value['pid'].as_integer().label('src_place_id'), sql = sa.select(ltab.c.value['pid'].as_integer().label('src_place_id'),
t.c.place_id, t.c.osm_type, t.c.osm_id, t.c.name, t.c.place_id, t.c.osm_type, t.c.osm_id, t.c.name,
@@ -687,14 +687,10 @@ def _placex_select_address_row(conn: SearchConnection,
return sa.select(t.c.place_id, t.c.osm_type, t.c.osm_id, t.c.name, return sa.select(t.c.place_id, t.c.osm_type, t.c.osm_id, t.c.name,
t.c.class_.label('class'), t.c.type, t.c.class_.label('class'), t.c.type,
t.c.admin_level, t.c.housenumber, t.c.admin_level, t.c.housenumber,
sa.literal_column("""ST_GeometryType(geometry) in t.c.geometry.is_area().label('fromarea'),
('ST_Polygon','ST_MultiPolygon')""").label('fromarea'),
t.c.rank_address, t.c.rank_address,
sa.literal_column( t.c.geometry.distance_spheroid(
f"""ST_DistanceSpheroid(geometry, sa.bindparam('centroid', value=centroid, type_=Geometry)).label('distance'))
'SRID=4326;{centroid.to_wkt()}'::geometry,
'SPHEROID["WGS 84",6378137,298.257223563, AUTHORITY["EPSG","7030"]]')
""").label('distance'))
async def complete_linked_places(conn: SearchConnection, result: BaseResult) -> None: async def complete_linked_places(conn: SearchConnection, result: BaseResult) -> None:
@@ -728,10 +724,10 @@ async def complete_keywords(conn: SearchConnection, result: BaseResult) -> None:
sel = sa.select(t.c.word_id, t.c.word_token, t.c.word) sel = sa.select(t.c.word_id, t.c.word_token, t.c.word)
for name_tokens, address_tokens in await conn.execute(sql): for name_tokens, address_tokens in await conn.execute(sql):
for row in await conn.execute(sel.where(t.c.word_id == sa.any_(name_tokens))): for row in await conn.execute(sel.where(t.c.word_id.in_(name_tokens))):
result.name_keywords.append(WordInfo(*row)) result.name_keywords.append(WordInfo(*row))
for row in await conn.execute(sel.where(t.c.word_id == sa.any_(address_tokens))): for row in await conn.execute(sel.where(t.c.word_id.in_(address_tokens))):
result.address_keywords.append(WordInfo(*row)) result.address_keywords.append(WordInfo(*row))

View File

@@ -19,7 +19,6 @@ import nominatim.api.results as nres
from nominatim.api.logging import log from nominatim.api.logging import log
from nominatim.api.types import AnyPoint, DataLayer, ReverseDetails, GeometryFormat, Bbox from nominatim.api.types import AnyPoint, DataLayer, ReverseDetails, GeometryFormat, Bbox
from nominatim.db.sqlalchemy_types import Geometry from nominatim.db.sqlalchemy_types import Geometry
import nominatim.db.sqlalchemy_functions as snfn
# In SQLAlchemy expression which compare with NULL need to be expressed with # In SQLAlchemy expression which compare with NULL need to be expressed with
# the equal sign. # the equal sign.
@@ -85,12 +84,6 @@ def _locate_interpolation(table: SaFromClause) -> SaLabel:
else_=0).label('position') else_=0).label('position')
def _is_address_point(table: SaFromClause) -> SaColumn:
return sa.and_(table.c.rank_address == 30,
sa.or_(table.c.housenumber != None,
table.c.name.has_key('addr:housename')))
def _get_closest(*rows: Optional[SaRow]) -> Optional[SaRow]: def _get_closest(*rows: Optional[SaRow]) -> Optional[SaRow]:
return min(rows, key=lambda row: 1000 if row is None else row.distance) return min(rows, key=lambda row: 1000 if row is None else row.distance)
@@ -147,13 +140,13 @@ class ReverseGeocoder:
col = sa.func.ST_SimplifyPreserveTopology(col, self.params.geometry_simplification) col = sa.func.ST_SimplifyPreserveTopology(col, self.params.geometry_simplification)
if self.params.geometry_output & GeometryFormat.GEOJSON: if self.params.geometry_output & GeometryFormat.GEOJSON:
out.append(sa.func.ST_AsGeoJSON(col).label('geometry_geojson')) out.append(sa.func.ST_AsGeoJSON(col, 7).label('geometry_geojson'))
if self.params.geometry_output & GeometryFormat.TEXT: if self.params.geometry_output & GeometryFormat.TEXT:
out.append(sa.func.ST_AsText(col).label('geometry_text')) out.append(sa.func.ST_AsText(col).label('geometry_text'))
if self.params.geometry_output & GeometryFormat.KML: if self.params.geometry_output & GeometryFormat.KML:
out.append(sa.func.ST_AsKML(col).label('geometry_kml')) out.append(sa.func.ST_AsKML(col, 7).label('geometry_kml'))
if self.params.geometry_output & GeometryFormat.SVG: if self.params.geometry_output & GeometryFormat.SVG:
out.append(sa.func.ST_AsSVG(col).label('geometry_svg')) out.append(sa.func.ST_AsSVG(col, 0, 7).label('geometry_svg'))
return sql.add_columns(*out) return sql.add_columns(*out)
@@ -204,7 +197,7 @@ class ReverseGeocoder:
max_rank = min(29, self.max_rank) max_rank = min(29, self.max_rank)
restrict.append(lambda: no_index(t.c.rank_address).between(26, max_rank)) restrict.append(lambda: no_index(t.c.rank_address).between(26, max_rank))
if self.max_rank == 30: if self.max_rank == 30:
restrict.append(lambda: _is_address_point(t)) restrict.append(lambda: sa.func.IsAddressPoint(t))
if self.layer_enabled(DataLayer.POI) and self.max_rank == 30: if self.layer_enabled(DataLayer.POI) and self.max_rank == 30:
restrict.append(lambda: sa.and_(no_index(t.c.rank_search) == 30, restrict.append(lambda: sa.and_(no_index(t.c.rank_search) == 30,
t.c.class_.not_in(('place', 'building')), t.c.class_.not_in(('place', 'building')),
@@ -228,7 +221,7 @@ class ReverseGeocoder:
sql: SaLambdaSelect = sa.lambda_stmt(lambda: _select_from_placex(t) sql: SaLambdaSelect = sa.lambda_stmt(lambda: _select_from_placex(t)
.where(t.c.geometry.ST_DWithin(WKT_PARAM, 0.001)) .where(t.c.geometry.ST_DWithin(WKT_PARAM, 0.001))
.where(t.c.parent_place_id == parent_place_id) .where(t.c.parent_place_id == parent_place_id)
.where(_is_address_point(t)) .where(sa.func.IsAddressPoint(t))
.where(t.c.indexed_status == 0) .where(t.c.indexed_status == 0)
.where(t.c.linked_place_id == None) .where(t.c.linked_place_id == None)
.order_by('distance') .order_by('distance')
@@ -371,7 +364,7 @@ class ReverseGeocoder:
inner = sa.select(t, sa.literal(0.0).label('distance'))\ inner = sa.select(t, sa.literal(0.0).label('distance'))\
.where(t.c.rank_search.between(5, MAX_RANK_PARAM))\ .where(t.c.rank_search.between(5, MAX_RANK_PARAM))\
.where(t.c.geometry.intersects(WKT_PARAM))\ .where(t.c.geometry.intersects(WKT_PARAM))\
.where(snfn.select_index_placex_geometry_reverse_lookuppolygon('placex'))\ .where(sa.func.PlacexGeometryReverseLookuppolygon())\
.order_by(sa.desc(t.c.rank_search))\ .order_by(sa.desc(t.c.rank_search))\
.limit(50)\ .limit(50)\
.subquery('area') .subquery('area')
@@ -401,10 +394,7 @@ class ReverseGeocoder:
.where(t.c.rank_search > address_rank)\ .where(t.c.rank_search > address_rank)\
.where(t.c.rank_search <= MAX_RANK_PARAM)\ .where(t.c.rank_search <= MAX_RANK_PARAM)\
.where(t.c.indexed_status == 0)\ .where(t.c.indexed_status == 0)\
.where(snfn.select_index_placex_geometry_reverse_lookupplacenode('placex'))\ .where(sa.func.IntersectsReverseDistance(t, WKT_PARAM))\
.where(t.c.geometry
.ST_Buffer(sa.func.reverse_place_diameter(t.c.rank_search))
.intersects(WKT_PARAM))\
.order_by(sa.desc(t.c.rank_search))\ .order_by(sa.desc(t.c.rank_search))\
.limit(50)\ .limit(50)\
.subquery('places') .subquery('places')
@@ -413,7 +403,7 @@ class ReverseGeocoder:
return _select_from_placex(inner, False)\ return _select_from_placex(inner, False)\
.join(touter, touter.c.geometry.ST_Contains(inner.c.geometry))\ .join(touter, touter.c.geometry.ST_Contains(inner.c.geometry))\
.where(touter.c.place_id == address_id)\ .where(touter.c.place_id == address_id)\
.where(inner.c.distance < sa.func.reverse_place_diameter(inner.c.rank_search))\ .where(sa.func.IsBelowReverseDistance(inner.c.distance, inner.c.rank_search))\
.order_by(sa.desc(inner.c.rank_search), inner.c.distance)\ .order_by(sa.desc(inner.c.rank_search), inner.c.distance)\
.limit(1) .limit(1)
@@ -440,10 +430,9 @@ class ReverseGeocoder:
.where(t.c.indexed_status == 0)\ .where(t.c.indexed_status == 0)\
.where(t.c.linked_place_id == None)\ .where(t.c.linked_place_id == None)\
.where(self._filter_by_layer(t))\ .where(self._filter_by_layer(t))\
.where(t.c.geometry .where(t.c.geometry.intersects(sa.func.ST_Expand(WKT_PARAM, 0.007)))\
.ST_Buffer(sa.func.reverse_place_diameter(t.c.rank_search))
.intersects(WKT_PARAM))\
.order_by(sa.desc(t.c.rank_search))\ .order_by(sa.desc(t.c.rank_search))\
.order_by('distance')\
.limit(50)\ .limit(50)\
.subquery() .subquery()
@@ -514,16 +503,13 @@ class ReverseGeocoder:
.where(t.c.rank_search <= MAX_RANK_PARAM)\ .where(t.c.rank_search <= MAX_RANK_PARAM)\
.where(t.c.indexed_status == 0)\ .where(t.c.indexed_status == 0)\
.where(t.c.country_code.in_(ccodes))\ .where(t.c.country_code.in_(ccodes))\
.where(snfn.select_index_placex_geometry_reverse_lookupplacenode('placex'))\ .where(sa.func.IntersectsReverseDistance(t, WKT_PARAM))\
.where(t.c.geometry
.ST_Buffer(sa.func.reverse_place_diameter(t.c.rank_search))
.intersects(WKT_PARAM))\
.order_by(sa.desc(t.c.rank_search))\ .order_by(sa.desc(t.c.rank_search))\
.limit(50)\ .limit(50)\
.subquery('area') .subquery('area')
return _select_from_placex(inner, False)\ return _select_from_placex(inner, False)\
.where(inner.c.distance < sa.func.reverse_place_diameter(inner.c.rank_search))\ .where(sa.func.IsBelowReverseDistance(inner.c.distance, inner.c.rank_search))\
.order_by(sa.desc(inner.c.rank_search), inner.c.distance)\ .order_by(sa.desc(inner.c.rank_search), inner.c.distance)\
.limit(1) .limit(1)

View File

@@ -73,13 +73,13 @@ def _add_geometry_columns(sql: SaLambdaSelect, col: SaColumn, details: SearchDet
col = sa.func.ST_SimplifyPreserveTopology(col, details.geometry_simplification) col = sa.func.ST_SimplifyPreserveTopology(col, details.geometry_simplification)
if details.geometry_output & GeometryFormat.GEOJSON: if details.geometry_output & GeometryFormat.GEOJSON:
out.append(sa.func.ST_AsGeoJSON(col).label('geometry_geojson')) out.append(sa.func.ST_AsGeoJSON(col, 7).label('geometry_geojson'))
if details.geometry_output & GeometryFormat.TEXT: if details.geometry_output & GeometryFormat.TEXT:
out.append(sa.func.ST_AsText(col).label('geometry_text')) out.append(sa.func.ST_AsText(col).label('geometry_text'))
if details.geometry_output & GeometryFormat.KML: if details.geometry_output & GeometryFormat.KML:
out.append(sa.func.ST_AsKML(col).label('geometry_kml')) out.append(sa.func.ST_AsKML(col, 7).label('geometry_kml'))
if details.geometry_output & GeometryFormat.SVG: if details.geometry_output & GeometryFormat.SVG:
out.append(sa.func.ST_AsSVG(col).label('geometry_svg')) out.append(sa.func.ST_AsSVG(col, 0, 7).label('geometry_svg'))
return sql.add_columns(*out) return sql.add_columns(*out)

View File

@@ -36,6 +36,9 @@ async def get_status(conn: SearchConnection) -> StatusResult:
sql = sa.select(conn.t.import_status.c.lastimportdate).limit(1) sql = sa.select(conn.t.import_status.c.lastimportdate).limit(1)
status.data_updated = await conn.scalar(sql) status.data_updated = await conn.scalar(sql)
if status.data_updated is not None:
status.data_updated = status.data_updated.replace(tzinfo=dt.timezone.utc)
# Database version # Database version
try: try:
verstr = await conn.get_property('database_version') verstr = await conn.get_property('database_version')

View File

@@ -206,6 +206,7 @@ def get_set_parser() -> CommandlineParser:
parser.add_subcommand('admin', clicmd.AdminFuncs()) parser.add_subcommand('admin', clicmd.AdminFuncs())
parser.add_subcommand('export', clicmd.QueryExport()) parser.add_subcommand('export', clicmd.QueryExport())
parser.add_subcommand('convert', clicmd.ConvertDB())
parser.add_subcommand('serve', AdminServe()) parser.add_subcommand('serve', AdminServe())
parser.add_subcommand('search', clicmd.APISearch()) parser.add_subcommand('search', clicmd.APISearch())

View File

@@ -25,3 +25,4 @@ from nominatim.clicmd.admin import AdminFuncs as AdminFuncs
from nominatim.clicmd.freeze import SetupFreeze as SetupFreeze from nominatim.clicmd.freeze import SetupFreeze as SetupFreeze
from nominatim.clicmd.special_phrases import ImportSpecialPhrases as ImportSpecialPhrases from nominatim.clicmd.special_phrases import ImportSpecialPhrases as ImportSpecialPhrases
from nominatim.clicmd.export import QueryExport as QueryExport from nominatim.clicmd.export import QueryExport as QueryExport
from nominatim.clicmd.convert import ConvertDB as ConvertDB

View File

@@ -101,6 +101,9 @@ class NominatimArgs:
language: Optional[str] language: Optional[str]
restrict_to_country: Optional[str] restrict_to_country: Optional[str]
# Arguments to 'convert'
output: Path
# Arguments to 'refresh' # Arguments to 'refresh'
postcodes: bool postcodes: bool
word_tokens: bool word_tokens: bool

View File

@@ -0,0 +1,95 @@
# SPDX-License-Identifier: GPL-3.0-or-later
#
# This file is part of Nominatim. (https://nominatim.org)
#
# Copyright (C) 2023 by the Nominatim developer community.
# For a full list of authors see the git log.
"""
Implementation of the 'convert' subcommand.
"""
from typing import Set, Any, Union, Optional, Sequence
import argparse
import asyncio
from pathlib import Path
from nominatim.clicmd.args import NominatimArgs
from nominatim.errors import UsageError
# Do not repeat documentation of subcommand classes.
# pylint: disable=C0111
# Using non-top-level imports to avoid eventually unused imports.
# pylint: disable=E0012,C0415
class WithAction(argparse.Action):
""" Special action that saves a list of flags, given on the command-line
as `--with-foo` or `--without-foo`.
"""
def __init__(self, option_strings: Sequence[str], dest: Any,
default: bool = True, **kwargs: Any) -> None:
if 'nargs' in kwargs:
raise ValueError("nargs not allowed.")
if option_strings is None:
raise ValueError("Positional parameter not allowed.")
self.dest_set = kwargs.pop('dest_set')
full_option_strings = []
for opt in option_strings:
if not opt.startswith('--'):
raise ValueError("short-form options not allowed")
if default:
self.dest_set.add(opt[2:])
full_option_strings.append(f"--with-{opt[2:]}")
full_option_strings.append(f"--without-{opt[2:]}")
super().__init__(full_option_strings, argparse.SUPPRESS, nargs=0, **kwargs)
def __call__(self, parser: argparse.ArgumentParser, namespace: argparse.Namespace,
values: Union[str, Sequence[Any], None],
option_string: Optional[str] = None) -> None:
assert option_string
if option_string.startswith('--with-'):
self.dest_set.add(option_string[7:])
if option_string.startswith('--without-'):
self.dest_set.discard(option_string[10:])
class ConvertDB:
""" Convert an existing database into a different format. (EXPERIMENTAL)
Dump a read-only version of the database in a different format.
At the moment only a SQLite database suitable for reverse lookup
can be created.
"""
def __init__(self) -> None:
self.options: Set[str] = set()
def add_args(self, parser: argparse.ArgumentParser) -> None:
parser.add_argument('--format', default='sqlite',
choices=('sqlite', ),
help='Format of the output database (must be sqlite currently)')
parser.add_argument('--output', '-o', required=True, type=Path,
help='File to write the database to.')
group = parser.add_argument_group('Switches to define database layout'
'(currently no effect)')
group.add_argument('--reverse', action=WithAction, dest_set=self.options, default=True,
help='Enable/disable support for reverse and lookup API'
' (default: enabled)')
group.add_argument('--search', action=WithAction, dest_set=self.options, default=False,
help='Enable/disable support for search API (default: disabled)')
group.add_argument('--details', action=WithAction, dest_set=self.options, default=True,
help='Enable/disable support for details API (default: enabled)')
def run(self, args: NominatimArgs) -> int:
if args.output.exists():
raise UsageError(f"File '{args.output}' already exists. Refusing to overwrite.")
if args.format == 'sqlite':
from ..tools import convert_sqlite
asyncio.run(convert_sqlite.convert(args.project_dir, args.output, self.options))
return 0
return 1

View File

@@ -7,27 +7,115 @@
""" """
Custom functions and expressions for SQLAlchemy. Custom functions and expressions for SQLAlchemy.
""" """
from __future__ import annotations
from typing import Any from typing import Any
import sqlalchemy as sa import sqlalchemy as sa
from sqlalchemy.sql.expression import FunctionElement
from sqlalchemy.ext.compiler import compiles from sqlalchemy.ext.compiler import compiles
from nominatim.typing import SaColumn from nominatim.typing import SaColumn
# pylint: disable=abstract-method,missing-function-docstring,consider-using-f-string # pylint: disable=all
def select_index_placex_geometry_reverse_lookuppolygon(table: str) -> 'sa.TextClause': class PlacexGeometryReverseLookuppolygon(sa.sql.functions.GenericFunction[Any]):
""" Create an expression with the necessary conditions over a placex """ Check for conditions that allow partial index use on
table that the index 'idx_placex_geometry_reverse_lookupPolygon' 'idx_placex_geometry_reverse_lookupPolygon'.
can be used.
Needs to be constant, so that the query planner picks them up correctly
in prepared statements.
""" """
return sa.text(f"ST_GeometryType({table}.geometry) in ('ST_Polygon', 'ST_MultiPolygon')" name = 'PlacexGeometryReverseLookuppolygon'
f" AND {table}.rank_address between 4 and 25" inherit_cache = True
f" AND {table}.type != 'postcode'"
f" AND {table}.name is not null"
f" AND {table}.indexed_status = 0" @compiles(PlacexGeometryReverseLookuppolygon) # type: ignore[no-untyped-call, misc]
f" AND {table}.linked_place_id is null") def _default_intersects(element: SaColumn,
compiler: 'sa.Compiled', **kw: Any) -> str:
return ("(ST_GeometryType(placex.geometry) in ('ST_Polygon', 'ST_MultiPolygon')"
" AND placex.rank_address between 4 and 25"
" AND placex.type != 'postcode'"
" AND placex.name is not null"
" AND placex.indexed_status = 0"
" AND placex.linked_place_id is null)")
@compiles(PlacexGeometryReverseLookuppolygon, 'sqlite') # type: ignore[no-untyped-call, misc]
def _sqlite_intersects(element: SaColumn,
compiler: 'sa.Compiled', **kw: Any) -> str:
return ("(ST_GeometryType(placex.geometry) in ('POLYGON', 'MULTIPOLYGON')"
" AND placex.rank_address between 4 and 25"
" AND placex.type != 'postcode'"
" AND placex.name is not null"
" AND placex.indexed_status = 0"
" AND placex.linked_place_id is null)")
class IntersectsReverseDistance(sa.sql.functions.GenericFunction[Any]):
name = 'IntersectsReverseDistance'
inherit_cache = True
def __init__(self, table: sa.Table, geom: SaColumn) -> None:
super().__init__(table.c.geometry, # type: ignore[no-untyped-call]
table.c.rank_search, geom)
self.tablename = table.name
@compiles(IntersectsReverseDistance) # type: ignore[no-untyped-call, misc]
def default_reverse_place_diameter(element: SaColumn,
compiler: 'sa.Compiled', **kw: Any) -> str:
table = element.tablename
return f"({table}.rank_address between 4 and 25"\
f" AND {table}.type != 'postcode'"\
f" AND {table}.name is not null"\
f" AND {table}.linked_place_id is null"\
f" AND {table}.osm_type = 'N'" + \
" AND ST_Buffer(%s, reverse_place_diameter(%s)) && %s)" % \
tuple(map(lambda c: compiler.process(c, **kw), element.clauses))
@compiles(IntersectsReverseDistance, 'sqlite') # type: ignore[no-untyped-call, misc]
def sqlite_reverse_place_diameter(element: SaColumn,
compiler: 'sa.Compiled', **kw: Any) -> str:
geom1, rank, geom2 = list(element.clauses)
table = element.tablename
return (f"({table}.rank_address between 4 and 25"\
f" AND {table}.type != 'postcode'"\
f" AND {table}.name is not null"\
f" AND {table}.linked_place_id is null"\
f" AND {table}.osm_type = 'N'"\
" AND MbrIntersects(%s, ST_Expand(%s, 14.0 * exp(-0.2 * %s) - 0.03))"\
f" AND {table}.place_id IN"\
" (SELECT place_id FROM placex_place_node_areas"\
" WHERE ROWID IN (SELECT ROWID FROM SpatialIndex"\
" WHERE f_table_name = 'placex_place_node_areas'"\
" AND search_frame = %s)))") % (
compiler.process(geom1, **kw),
compiler.process(geom2, **kw),
compiler.process(rank, **kw),
compiler.process(geom2, **kw))
class IsBelowReverseDistance(sa.sql.functions.GenericFunction[Any]):
name = 'IsBelowReverseDistance'
inherit_cache = True
@compiles(IsBelowReverseDistance) # type: ignore[no-untyped-call, misc]
def default_is_below_reverse_distance(element: SaColumn,
compiler: 'sa.Compiled', **kw: Any) -> str:
dist, rank = list(element.clauses)
return "%s < reverse_place_diameter(%s)" % (compiler.process(dist, **kw),
compiler.process(rank, **kw))
@compiles(IsBelowReverseDistance, 'sqlite') # type: ignore[no-untyped-call, misc]
def sqlite_is_below_reverse_distance(element: SaColumn,
compiler: 'sa.Compiled', **kw: Any) -> str:
dist, rank = list(element.clauses)
return "%s < 14.0 * exp(-0.2 * %s) - 0.03" % (compiler.process(dist, **kw),
compiler.process(rank, **kw))
def select_index_placex_geometry_reverse_lookupplacenode(table: str) -> 'sa.TextClause': def select_index_placex_geometry_reverse_lookupplacenode(table: str) -> 'sa.TextClause':
""" Create an expression with the necessary conditions over a placex """ Create an expression with the necessary conditions over a placex
@@ -41,7 +129,36 @@ def select_index_placex_geometry_reverse_lookupplacenode(table: str) -> 'sa.Text
f" AND {table}.osm_type = 'N'") f" AND {table}.osm_type = 'N'")
class CrosscheckNames(FunctionElement[Any]): class IsAddressPoint(sa.sql.functions.GenericFunction[Any]):
name = 'IsAddressPoint'
inherit_cache = True
def __init__(self, table: sa.Table) -> None:
super().__init__(table.c.rank_address, # type: ignore[no-untyped-call]
table.c.housenumber, table.c.name)
@compiles(IsAddressPoint) # type: ignore[no-untyped-call, misc]
def default_is_address_point(element: SaColumn,
compiler: 'sa.Compiled', **kw: Any) -> str:
rank, hnr, name = list(element.clauses)
return "(%s = 30 AND (%s IS NOT NULL OR %s ? 'addr:housename'))" % (
compiler.process(rank, **kw),
compiler.process(hnr, **kw),
compiler.process(name, **kw))
@compiles(IsAddressPoint, 'sqlite') # type: ignore[no-untyped-call, misc]
def sqlite_is_address_point(element: SaColumn,
compiler: 'sa.Compiled', **kw: Any) -> str:
rank, hnr, name = list(element.clauses)
return "(%s = 30 AND coalesce(%s, json_extract(%s, '$.addr:housename')) IS NOT NULL)" % (
compiler.process(rank, **kw),
compiler.process(hnr, **kw),
compiler.process(name, **kw))
class CrosscheckNames(sa.sql.functions.GenericFunction[Any]):
""" Check if in the given list of names in parameters 1 any of the names """ Check if in the given list of names in parameters 1 any of the names
from the JSON array in parameter 2 are contained. from the JSON array in parameter 2 are contained.
""" """
@@ -54,3 +171,42 @@ def compile_crosscheck_names(element: SaColumn,
arg1, arg2 = list(element.clauses) arg1, arg2 = list(element.clauses)
return "coalesce(avals(%s) && ARRAY(SELECT * FROM json_array_elements_text(%s)), false)" % ( return "coalesce(avals(%s) && ARRAY(SELECT * FROM json_array_elements_text(%s)), false)" % (
compiler.process(arg1, **kw), compiler.process(arg2, **kw)) compiler.process(arg1, **kw), compiler.process(arg2, **kw))
@compiles(CrosscheckNames, 'sqlite') # type: ignore[no-untyped-call, misc]
def compile_sqlite_crosscheck_names(element: SaColumn,
compiler: 'sa.Compiled', **kw: Any) -> str:
arg1, arg2 = list(element.clauses)
return "EXISTS(SELECT *"\
" FROM json_each(%s) as name, json_each(%s) as match_name"\
" WHERE name.value = match_name.value)"\
% (compiler.process(arg1, **kw), compiler.process(arg2, **kw))
class JsonArrayEach(sa.sql.functions.GenericFunction[Any]):
""" Return elements of a json array as a set.
"""
name = 'JsonArrayEach'
inherit_cache = True
@compiles(JsonArrayEach) # type: ignore[no-untyped-call, misc]
def default_json_array_each(element: SaColumn, compiler: 'sa.Compiled', **kw: Any) -> str:
return "json_array_elements(%s)" % compiler.process(element.clauses, **kw)
@compiles(JsonArrayEach, 'sqlite') # type: ignore[no-untyped-call, misc]
def sqlite_json_array_each(element: SaColumn, compiler: 'sa.Compiled', **kw: Any) -> str:
return "json_each(%s)" % compiler.process(element.clauses, **kw)
class Greatest(sa.sql.functions.GenericFunction[Any]):
""" Function to compute maximum of all its input parameters.
"""
name = 'greatest'
inherit_cache = True
@compiles(Greatest, 'sqlite') # type: ignore[no-untyped-call, misc]
def sqlite_greatest(element: SaColumn, compiler: 'sa.Compiled', **kw: Any) -> str:
return "max(%s)" % compiler.process(element.clauses, **kw)

View File

@@ -13,6 +13,7 @@ import sqlalchemy as sa
from sqlalchemy.dialects.postgresql import HSTORE, ARRAY, JSONB, array from sqlalchemy.dialects.postgresql import HSTORE, ARRAY, JSONB, array
from sqlalchemy.dialects.sqlite import JSON as sqlite_json from sqlalchemy.dialects.sqlite import JSON as sqlite_json
import nominatim.db.sqlalchemy_functions #pylint: disable=unused-import
from nominatim.db.sqlalchemy_types import Geometry from nominatim.db.sqlalchemy_types import Geometry
class PostgresTypes: class PostgresTypes:
@@ -41,6 +42,9 @@ class SqliteTypes:
#pylint: disable=too-many-instance-attributes #pylint: disable=too-many-instance-attributes
class SearchTables: class SearchTables:
""" Data class that holds the tables of the Nominatim database. """ Data class that holds the tables of the Nominatim database.
This schema strictly reflects the read-access view of the database.
Any data used for updates only will not be visible.
""" """
def __init__(self, meta: sa.MetaData, engine_name: str) -> None: def __init__(self, meta: sa.MetaData, engine_name: str) -> None:
@@ -63,14 +67,13 @@ class SearchTables:
sa.Column('value', sa.Text)) sa.Column('value', sa.Text))
self.placex = sa.Table('placex', meta, self.placex = sa.Table('placex', meta,
sa.Column('place_id', sa.BigInteger, nullable=False, unique=True), sa.Column('place_id', sa.BigInteger, nullable=False),
sa.Column('parent_place_id', sa.BigInteger), sa.Column('parent_place_id', sa.BigInteger),
sa.Column('linked_place_id', sa.BigInteger), sa.Column('linked_place_id', sa.BigInteger),
sa.Column('importance', sa.Float), sa.Column('importance', sa.Float),
sa.Column('indexed_date', sa.DateTime), sa.Column('indexed_date', sa.DateTime),
sa.Column('rank_address', sa.SmallInteger), sa.Column('rank_address', sa.SmallInteger),
sa.Column('rank_search', sa.SmallInteger), sa.Column('rank_search', sa.SmallInteger),
sa.Column('partition', sa.SmallInteger),
sa.Column('indexed_status', sa.SmallInteger), sa.Column('indexed_status', sa.SmallInteger),
sa.Column('osm_type', sa.String(1), nullable=False), sa.Column('osm_type', sa.String(1), nullable=False),
sa.Column('osm_id', sa.BigInteger, nullable=False), sa.Column('osm_id', sa.BigInteger, nullable=False),
@@ -88,33 +91,31 @@ class SearchTables:
sa.Column('centroid', Geometry)) sa.Column('centroid', Geometry))
self.addressline = sa.Table('place_addressline', meta, self.addressline = sa.Table('place_addressline', meta,
sa.Column('place_id', sa.BigInteger, index=True), sa.Column('place_id', sa.BigInteger),
sa.Column('address_place_id', sa.BigInteger, index=True), sa.Column('address_place_id', sa.BigInteger),
sa.Column('distance', sa.Float), sa.Column('distance', sa.Float),
sa.Column('cached_rank_address', sa.SmallInteger),
sa.Column('fromarea', sa.Boolean), sa.Column('fromarea', sa.Boolean),
sa.Column('isaddress', sa.Boolean)) sa.Column('isaddress', sa.Boolean))
self.postcode = sa.Table('location_postcode', meta, self.postcode = sa.Table('location_postcode', meta,
sa.Column('place_id', sa.BigInteger, unique=True), sa.Column('place_id', sa.BigInteger),
sa.Column('parent_place_id', sa.BigInteger), sa.Column('parent_place_id', sa.BigInteger),
sa.Column('rank_search', sa.SmallInteger), sa.Column('rank_search', sa.SmallInteger),
sa.Column('rank_address', sa.SmallInteger), sa.Column('rank_address', sa.SmallInteger),
sa.Column('indexed_status', sa.SmallInteger), sa.Column('indexed_status', sa.SmallInteger),
sa.Column('indexed_date', sa.DateTime), sa.Column('indexed_date', sa.DateTime),
sa.Column('country_code', sa.String(2)), sa.Column('country_code', sa.String(2)),
sa.Column('postcode', sa.Text, index=True), sa.Column('postcode', sa.Text),
sa.Column('geometry', Geometry)) sa.Column('geometry', Geometry))
self.osmline = sa.Table('location_property_osmline', meta, self.osmline = sa.Table('location_property_osmline', meta,
sa.Column('place_id', sa.BigInteger, nullable=False, unique=True), sa.Column('place_id', sa.BigInteger, nullable=False),
sa.Column('osm_id', sa.BigInteger), sa.Column('osm_id', sa.BigInteger),
sa.Column('parent_place_id', sa.BigInteger), sa.Column('parent_place_id', sa.BigInteger),
sa.Column('indexed_date', sa.DateTime), sa.Column('indexed_date', sa.DateTime),
sa.Column('startnumber', sa.Integer), sa.Column('startnumber', sa.Integer),
sa.Column('endnumber', sa.Integer), sa.Column('endnumber', sa.Integer),
sa.Column('step', sa.SmallInteger), sa.Column('step', sa.SmallInteger),
sa.Column('partition', sa.SmallInteger),
sa.Column('indexed_status', sa.SmallInteger), sa.Column('indexed_status', sa.SmallInteger),
sa.Column('linegeo', Geometry), sa.Column('linegeo', Geometry),
sa.Column('address', self.types.Composite), sa.Column('address', self.types.Composite),
@@ -125,7 +126,6 @@ class SearchTables:
sa.Column('country_code', sa.String(2)), sa.Column('country_code', sa.String(2)),
sa.Column('name', self.types.Composite), sa.Column('name', self.types.Composite),
sa.Column('derived_name', self.types.Composite), sa.Column('derived_name', self.types.Composite),
sa.Column('country_default_language_code', sa.Text),
sa.Column('partition', sa.Integer)) sa.Column('partition', sa.Integer))
self.country_grid = sa.Table('country_osm_grid', meta, self.country_grid = sa.Table('country_osm_grid', meta,
@@ -135,12 +135,12 @@ class SearchTables:
# The following tables are not necessarily present. # The following tables are not necessarily present.
self.search_name = sa.Table('search_name', meta, self.search_name = sa.Table('search_name', meta,
sa.Column('place_id', sa.BigInteger, index=True), sa.Column('place_id', sa.BigInteger),
sa.Column('importance', sa.Float), sa.Column('importance', sa.Float),
sa.Column('search_rank', sa.SmallInteger), sa.Column('search_rank', sa.SmallInteger),
sa.Column('address_rank', sa.SmallInteger), sa.Column('address_rank', sa.SmallInteger),
sa.Column('name_vector', self.types.IntArray, index=True), sa.Column('name_vector', self.types.IntArray),
sa.Column('nameaddress_vector', self.types.IntArray, index=True), sa.Column('nameaddress_vector', self.types.IntArray),
sa.Column('country_code', sa.String(2)), sa.Column('country_code', sa.String(2)),
sa.Column('centroid', Geometry)) sa.Column('centroid', Geometry))
@@ -150,6 +150,5 @@ class SearchTables:
sa.Column('startnumber', sa.Integer), sa.Column('startnumber', sa.Integer),
sa.Column('endnumber', sa.Integer), sa.Column('endnumber', sa.Integer),
sa.Column('step', sa.SmallInteger), sa.Column('step', sa.SmallInteger),
sa.Column('partition', sa.SmallInteger),
sa.Column('linegeo', Geometry), sa.Column('linegeo', Geometry),
sa.Column('postcode', sa.Text)) sa.Column('postcode', sa.Text))

View File

@@ -7,16 +7,165 @@
""" """
Custom types for SQLAlchemy. Custom types for SQLAlchemy.
""" """
from __future__ import annotations
from typing import Callable, Any, cast from typing import Callable, Any, cast
import sys import sys
import sqlalchemy as sa import sqlalchemy as sa
from sqlalchemy.ext.compiler import compiles
from sqlalchemy import types from sqlalchemy import types
from nominatim.typing import SaColumn, SaBind from nominatim.typing import SaColumn, SaBind
#pylint: disable=all #pylint: disable=all
class Geometry_DistanceSpheroid(sa.sql.expression.FunctionElement[float]):
""" Function to compute the spherical distance in meters.
"""
type = sa.Float()
name = 'Geometry_DistanceSpheroid'
inherit_cache = True
@compiles(Geometry_DistanceSpheroid) # type: ignore[no-untyped-call, misc]
def _default_distance_spheroid(element: SaColumn,
compiler: 'sa.Compiled', **kw: Any) -> str:
return "ST_DistanceSpheroid(%s,"\
" 'SPHEROID[\"WGS 84\",6378137,298.257223563, AUTHORITY[\"EPSG\",\"7030\"]]')"\
% compiler.process(element.clauses, **kw)
@compiles(Geometry_DistanceSpheroid, 'sqlite') # type: ignore[no-untyped-call, misc]
def _spatialite_distance_spheroid(element: SaColumn,
compiler: 'sa.Compiled', **kw: Any) -> str:
return "COALESCE(Distance(%s, true), 0.0)" % compiler.process(element.clauses, **kw)
class Geometry_IsLineLike(sa.sql.expression.FunctionElement[Any]):
""" Check if the geometry is a line or multiline.
"""
name = 'Geometry_IsLineLike'
inherit_cache = True
@compiles(Geometry_IsLineLike) # type: ignore[no-untyped-call, misc]
def _default_is_line_like(element: SaColumn,
compiler: 'sa.Compiled', **kw: Any) -> str:
return "ST_GeometryType(%s) IN ('ST_LineString', 'ST_MultiLineString')" % \
compiler.process(element.clauses, **kw)
@compiles(Geometry_IsLineLike, 'sqlite') # type: ignore[no-untyped-call, misc]
def _sqlite_is_line_like(element: SaColumn,
compiler: 'sa.Compiled', **kw: Any) -> str:
return "ST_GeometryType(%s) IN ('LINESTRING', 'MULTILINESTRING')" % \
compiler.process(element.clauses, **kw)
class Geometry_IsAreaLike(sa.sql.expression.FunctionElement[Any]):
""" Check if the geometry is a polygon or multipolygon.
"""
name = 'Geometry_IsLineLike'
inherit_cache = True
@compiles(Geometry_IsAreaLike) # type: ignore[no-untyped-call, misc]
def _default_is_area_like(element: SaColumn,
compiler: 'sa.Compiled', **kw: Any) -> str:
return "ST_GeometryType(%s) IN ('ST_Polygon', 'ST_MultiPolygon')" % \
compiler.process(element.clauses, **kw)
@compiles(Geometry_IsAreaLike, 'sqlite') # type: ignore[no-untyped-call, misc]
def _sqlite_is_area_like(element: SaColumn,
compiler: 'sa.Compiled', **kw: Any) -> str:
return "ST_GeometryType(%s) IN ('POLYGON', 'MULTIPOLYGON')" % \
compiler.process(element.clauses, **kw)
class Geometry_IntersectsBbox(sa.sql.expression.FunctionElement[Any]):
""" Check if the bounding boxes of the given geometries intersect.
"""
name = 'Geometry_IntersectsBbox'
inherit_cache = True
@compiles(Geometry_IntersectsBbox) # type: ignore[no-untyped-call, misc]
def _default_intersects(element: SaColumn,
compiler: 'sa.Compiled', **kw: Any) -> str:
arg1, arg2 = list(element.clauses)
return "%s && %s" % (compiler.process(arg1, **kw), compiler.process(arg2, **kw))
@compiles(Geometry_IntersectsBbox, 'sqlite') # type: ignore[no-untyped-call, misc]
def _sqlite_intersects(element: SaColumn,
compiler: 'sa.Compiled', **kw: Any) -> str:
return "MbrIntersects(%s) = 1" % compiler.process(element.clauses, **kw)
class Geometry_ColumnIntersectsBbox(sa.sql.expression.FunctionElement[Any]):
""" Check if the bounding box of the geometry intersects with the
given table column, using the spatial index for the column.
The index must exist or the query may return nothing.
"""
name = 'Geometry_ColumnIntersectsBbox'
inherit_cache = True
@compiles(Geometry_ColumnIntersectsBbox) # type: ignore[no-untyped-call, misc]
def default_intersects_column(element: SaColumn,
compiler: 'sa.Compiled', **kw: Any) -> str:
arg1, arg2 = list(element.clauses)
return "%s && %s" % (compiler.process(arg1, **kw), compiler.process(arg2, **kw))
@compiles(Geometry_ColumnIntersectsBbox, 'sqlite') # type: ignore[no-untyped-call, misc]
def spatialite_intersects_column(element: SaColumn,
compiler: 'sa.Compiled', **kw: Any) -> str:
arg1, arg2 = list(element.clauses)
return "MbrIntersects(%s, %s) = 1 and "\
"%s.ROWID IN (SELECT ROWID FROM SpatialIndex "\
"WHERE f_table_name = '%s' AND f_geometry_column = '%s' "\
"AND search_frame = %s)" %(
compiler.process(arg1, **kw),
compiler.process(arg2, **kw),
arg1.table.name, arg1.table.name, arg1.name,
compiler.process(arg2, **kw))
class Geometry_ColumnDWithin(sa.sql.expression.FunctionElement[Any]):
""" Check if the geometry is within the distance of the
given table column, using the spatial index for the column.
The index must exist or the query may return nothing.
"""
name = 'Geometry_ColumnDWithin'
inherit_cache = True
@compiles(Geometry_ColumnDWithin) # type: ignore[no-untyped-call, misc]
def default_dwithin_column(element: SaColumn,
compiler: 'sa.Compiled', **kw: Any) -> str:
return "ST_DWithin(%s)" % compiler.process(element.clauses, **kw)
@compiles(Geometry_ColumnDWithin, 'sqlite') # type: ignore[no-untyped-call, misc]
def spatialite_dwithin_column(element: SaColumn,
compiler: 'sa.Compiled', **kw: Any) -> str:
geom1, geom2, dist = list(element.clauses)
return "ST_Distance(%s, %s) < %s and "\
"%s.ROWID IN (SELECT ROWID FROM SpatialIndex "\
"WHERE f_table_name = '%s' AND f_geometry_column = '%s' "\
"AND search_frame = ST_Expand(%s, %s))" %(
compiler.process(geom1, **kw),
compiler.process(geom2, **kw),
compiler.process(dist, **kw),
geom1.table.name, geom1.table.name, geom1.name,
compiler.process(geom2, **kw),
compiler.process(dist, **kw))
class Geometry(types.UserDefinedType): # type: ignore[type-arg] class Geometry(types.UserDefinedType): # type: ignore[type-arg]
""" Simplified type decorator for PostGIS geometry. This type """ Simplified type decorator for PostGIS geometry. This type
only supports geometries in 4326 projection. only supports geometries in 4326 projection.
@@ -47,6 +196,10 @@ class Geometry(types.UserDefinedType): # type: ignore[type-arg]
return process return process
def column_expression(self, col: SaColumn) -> SaColumn:
return sa.func.ST_AsEWKB(col)
def bind_expression(self, bindvalue: SaBind) -> SaColumn: def bind_expression(self, bindvalue: SaBind) -> SaColumn:
return sa.func.ST_GeomFromText(bindvalue, sa.text('4326'), type_=self) return sa.func.ST_GeomFromText(bindvalue, sa.text('4326'), type_=self)
@@ -54,28 +207,34 @@ class Geometry(types.UserDefinedType): # type: ignore[type-arg]
class comparator_factory(types.UserDefinedType.Comparator): # type: ignore[type-arg] class comparator_factory(types.UserDefinedType.Comparator): # type: ignore[type-arg]
def intersects(self, other: SaColumn) -> 'sa.Operators': def intersects(self, other: SaColumn) -> 'sa.Operators':
return self.op('&&')(other) if isinstance(self.expr, sa.Column):
return Geometry_ColumnIntersectsBbox(self.expr, other)
return Geometry_IntersectsBbox(self.expr, other)
def is_line_like(self) -> SaColumn: def is_line_like(self) -> SaColumn:
return sa.func.ST_GeometryType(self, type_=sa.String).in_(('ST_LineString', return Geometry_IsLineLike(self)
'ST_MultiLineString'))
def is_area(self) -> SaColumn: def is_area(self) -> SaColumn:
return sa.func.ST_GeometryType(self, type_=sa.String).in_(('ST_Polygon', return Geometry_IsAreaLike(self)
'ST_MultiPolygon'))
def ST_DWithin(self, other: SaColumn, distance: SaColumn) -> SaColumn: def ST_DWithin(self, other: SaColumn, distance: SaColumn) -> SaColumn:
return sa.func.ST_DWithin(self, other, distance, type_=sa.Boolean) if isinstance(self.expr, sa.Column):
return Geometry_ColumnDWithin(self.expr, other, distance)
return sa.func.ST_DWithin(self.expr, other, distance)
def ST_DWithin_no_index(self, other: SaColumn, distance: SaColumn) -> SaColumn: def ST_DWithin_no_index(self, other: SaColumn, distance: SaColumn) -> SaColumn:
return sa.func.ST_DWithin(sa.func.coalesce(sa.null(), self), return sa.func.ST_DWithin(sa.func.coalesce(sa.null(), self),
other, distance, type_=sa.Boolean) other, distance)
def ST_Intersects_no_index(self, other: SaColumn) -> 'sa.Operators': def ST_Intersects_no_index(self, other: SaColumn) -> 'sa.Operators':
return sa.func.coalesce(sa.null(), self).op('&&')(other) return Geometry_IntersectsBbox(sa.func.coalesce(sa.null(), self), other)
def ST_Distance(self, other: SaColumn) -> SaColumn: def ST_Distance(self, other: SaColumn) -> SaColumn:
@@ -91,7 +250,8 @@ class Geometry(types.UserDefinedType): # type: ignore[type-arg]
def ST_ClosestPoint(self, other: SaColumn) -> SaColumn: def ST_ClosestPoint(self, other: SaColumn) -> SaColumn:
return sa.func.ST_ClosestPoint(self, other, type_=Geometry) return sa.func.coalesce(sa.func.ST_ClosestPoint(self, other, type_=Geometry),
other)
def ST_Buffer(self, other: SaColumn) -> SaColumn: def ST_Buffer(self, other: SaColumn) -> SaColumn:
@@ -116,3 +276,55 @@ class Geometry(types.UserDefinedType): # type: ignore[type-arg]
def ST_LineLocatePoint(self, other: SaColumn) -> SaColumn: def ST_LineLocatePoint(self, other: SaColumn) -> SaColumn:
return sa.func.ST_LineLocatePoint(self, other, type_=sa.Float) return sa.func.ST_LineLocatePoint(self, other, type_=sa.Float)
def distance_spheroid(self, other: SaColumn) -> SaColumn:
return Geometry_DistanceSpheroid(self, other)
@compiles(Geometry, 'sqlite') # type: ignore[no-untyped-call]
def get_col_spec(self, *args, **kwargs): # type: ignore[no-untyped-def]
return 'GEOMETRY'
SQLITE_FUNCTION_ALIAS = (
('ST_AsEWKB', sa.Text, 'AsEWKB'),
('ST_GeomFromEWKT', Geometry, 'GeomFromEWKT'),
('ST_AsGeoJSON', sa.Text, 'AsGeoJSON'),
('ST_AsKML', sa.Text, 'AsKML'),
('ST_AsSVG', sa.Text, 'AsSVG'),
('ST_LineLocatePoint', sa.Float, 'ST_Line_Locate_Point'),
('ST_LineInterpolatePoint', sa.Float, 'ST_Line_Interpolate_Point'),
)
def _add_function_alias(func: str, ftype: type, alias: str) -> None:
_FuncDef = type(func, (sa.sql.functions.GenericFunction, ), {
"type": ftype(),
"name": func,
"identifier": func,
"inherit_cache": True})
func_templ = f"{alias}(%s)"
def _sqlite_impl(element: Any, compiler: Any, **kw: Any) -> Any:
return func_templ % compiler.process(element.clauses, **kw)
compiles(_FuncDef, 'sqlite')(_sqlite_impl) # type: ignore[no-untyped-call]
for alias in SQLITE_FUNCTION_ALIAS:
_add_function_alias(*alias)
class ST_DWithin(sa.sql.functions.GenericFunction[Any]):
name = 'ST_DWithin'
inherit_cache = True
@compiles(ST_DWithin, 'sqlite') # type: ignore[no-untyped-call, misc]
def default_json_array_each(element: SaColumn, compiler: 'sa.Compiled', **kw: Any) -> str:
geom1, geom2, dist = list(element.clauses)
return "(MbrIntersects(%s, ST_Expand(%s, %s)) = 1 AND ST_Distance(%s, %s) <= %s)" % (
compiler.process(geom1, **kw), compiler.process(geom2, **kw),
compiler.process(dist, **kw),
compiler.process(geom1, **kw), compiler.process(geom2, **kw),
compiler.process(dist, **kw))

View File

@@ -0,0 +1,156 @@
# SPDX-License-Identifier: GPL-3.0-or-later
#
# This file is part of Nominatim. (https://nominatim.org)
#
# Copyright (C) 2023 by the Nominatim developer community.
# For a full list of authors see the git log.
"""
Exporting a Nominatim database to SQlite.
"""
from typing import Set
import logging
from pathlib import Path
import sqlalchemy as sa
from nominatim.typing import SaSelect
from nominatim.db.sqlalchemy_types import Geometry
import nominatim.api as napi
LOG = logging.getLogger()
async def convert(project_dir: Path, outfile: Path, options: Set[str]) -> None:
""" Export an existing database to sqlite. The resulting database
will be usable against the Python frontend of Nominatim.
"""
api = napi.NominatimAPIAsync(project_dir)
try:
outapi = napi.NominatimAPIAsync(project_dir,
{'NOMINATIM_DATABASE_DSN': f"sqlite:dbname={outfile}"})
async with api.begin() as src, outapi.begin() as dest:
writer = SqliteWriter(src, dest, options)
await writer.write()
finally:
await api.close()
class SqliteWriter:
""" Worker class which creates a new SQLite database.
"""
def __init__(self, src: napi.SearchConnection,
dest: napi.SearchConnection, options: Set[str]) -> None:
self.src = src
self.dest = dest
self.options = options
async def write(self) -> None:
""" Create the database structure and copy the data from
the source database to the destination.
"""
await self.dest.execute(sa.select(sa.func.InitSpatialMetaData(True, 'WGS84')))
await self.create_tables()
await self.copy_data()
await self.create_indexes()
async def create_tables(self) -> None:
""" Set up the database tables.
"""
if 'search' not in self.options:
self.dest.t.meta.remove(self.dest.t.search_name)
await self.dest.connection.run_sync(self.dest.t.meta.create_all)
# Convert all Geometry columns to Spatialite geometries
for table in self.dest.t.meta.sorted_tables:
for col in table.c:
if isinstance(col.type, Geometry):
await self.dest.execute(sa.select(
sa.func.RecoverGeometryColumn(table.name, col.name, 4326,
col.type.subtype.upper(), 'XY')))
async def copy_data(self) -> None:
""" Copy data for all registered tables.
"""
for table in self.dest.t.meta.sorted_tables:
LOG.warning("Copying '%s'", table.name)
async_result = await self.src.connection.stream(self.select_from(table.name))
async for partition in async_result.partitions(10000):
data = [{('class_' if k == 'class' else k): getattr(r, k) for k in r._fields}
for r in partition]
await self.dest.execute(table.insert(), data)
async def create_indexes(self) -> None:
""" Add indexes necessary for the frontend.
"""
# reverse place node lookup needs an extra table to simulate a
# partial index with adaptive buffering.
await self.dest.execute(sa.text(
""" CREATE TABLE placex_place_node_areas AS
SELECT place_id, ST_Expand(geometry,
14.0 * exp(-0.2 * rank_search) - 0.03) as geometry
FROM placex
WHERE rank_address between 5 and 25
and osm_type = 'N'
and linked_place_id is NULL """))
await self.dest.execute(sa.select(
sa.func.RecoverGeometryColumn('placex_place_node_areas', 'geometry',
4326, 'GEOMETRY', 'XY')))
await self.dest.execute(sa.select(sa.func.CreateSpatialIndex(
'placex_place_node_areas', 'geometry')))
# Remaining indexes.
await self.create_spatial_index('country_grid', 'geometry')
await self.create_spatial_index('placex', 'geometry')
await self.create_spatial_index('osmline', 'linegeo')
await self.create_spatial_index('tiger', 'linegeo')
await self.create_index('placex', 'place_id')
await self.create_index('placex', 'parent_place_id')
await self.create_index('placex', 'rank_address')
await self.create_index('addressline', 'place_id')
async def create_spatial_index(self, table: str, column: str) -> None:
""" Create a spatial index on the given table and column.
"""
await self.dest.execute(sa.select(
sa.func.CreateSpatialIndex(getattr(self.dest.t, table).name, column)))
async def create_index(self, table_name: str, column: str) -> None:
""" Create a simple index on the given table and column.
"""
table = getattr(self.dest.t, table_name)
await self.dest.connection.run_sync(
sa.Index(f"idx_{table}_{column}", getattr(table.c, column)).create)
def select_from(self, table: str) -> SaSelect:
""" Create the SQL statement to select the source columns and rows.
"""
columns = self.src.t.meta.tables[table].c
if table == 'placex':
# SQLite struggles with Geometries that are larger than 5MB,
# so simplify those.
return sa.select(*(c for c in columns if not isinstance(c.type, Geometry)),
sa.func.ST_AsText(columns.centroid).label('centroid'),
sa.func.ST_AsText(
sa.case((sa.func.ST_MemSize(columns.geometry) < 5000000,
columns.geometry),
else_=sa.func.ST_SimplifyPreserveTopology(
columns.geometry, 0.0001)
)).label('geometry'))
sql = sa.select(*(sa.func.ST_AsText(c).label(c.name)
if isinstance(c.type, Geometry) else c for c in columns))
return sql

View File

@@ -1,3 +1,4 @@
@SQLITE
@APIDB @APIDB
Feature: Localization of search results Feature: Localization of search results

View File

@@ -2,6 +2,7 @@
Feature: Object details Feature: Object details
Testing different parameter options for details API. Testing different parameter options for details API.
@SQLITE
Scenario: JSON Details Scenario: JSON Details
When sending json details query for W297699560 When sending json details query for W297699560
Then the result is valid json Then the result is valid json
@@ -11,6 +12,7 @@ Feature: Object details
| type | | type |
| Point | | Point |
@SQLITE
Scenario: JSON Details with pretty printing Scenario: JSON Details with pretty printing
When sending json details query for W297699560 When sending json details query for W297699560
| pretty | | pretty |
@@ -19,6 +21,7 @@ Feature: Object details
And result has attributes geometry And result has attributes geometry
And result has not attributes keywords,address,linked_places,parentof And result has not attributes keywords,address,linked_places,parentof
@SQLITE
Scenario: JSON Details with addressdetails Scenario: JSON Details with addressdetails
When sending json details query for W297699560 When sending json details query for W297699560
| addressdetails | | addressdetails |
@@ -26,6 +29,7 @@ Feature: Object details
Then the result is valid json Then the result is valid json
And result has attributes address And result has attributes address
@SQLITE
Scenario: JSON Details with linkedplaces Scenario: JSON Details with linkedplaces
When sending json details query for R123924 When sending json details query for R123924
| linkedplaces | | linkedplaces |
@@ -33,6 +37,7 @@ Feature: Object details
Then the result is valid json Then the result is valid json
And result has attributes linked_places And result has attributes linked_places
@SQLITE
Scenario: JSON Details with hierarchy Scenario: JSON Details with hierarchy
When sending json details query for W297699560 When sending json details query for W297699560
| hierarchy | | hierarchy |
@@ -40,6 +45,7 @@ Feature: Object details
Then the result is valid json Then the result is valid json
And result has attributes hierarchy And result has attributes hierarchy
@SQLITE
Scenario: JSON Details with grouped hierarchy Scenario: JSON Details with grouped hierarchy
When sending json details query for W297699560 When sending json details query for W297699560
| hierarchy | group_hierarchy | | hierarchy | group_hierarchy |
@@ -69,6 +75,7 @@ Feature: Object details
Then the result is valid json Then the result is valid json
And result has attributes keywords And result has attributes keywords
@SQLITE
Scenario Outline: JSON details with full geometry Scenario Outline: JSON details with full geometry
When sending json details query for <osmid> When sending json details query for <osmid>
| polygon_geojson | | polygon_geojson |

View File

@@ -1,3 +1,4 @@
@SQLITE
@APIDB @APIDB
Feature: Object details Feature: Object details
Check details page for correctness Check details page for correctness

View File

@@ -1,3 +1,4 @@
@SQLITE
@APIDB @APIDB
Feature: Places by osm_type and osm_id Tests Feature: Places by osm_type and osm_id Tests
Simple tests for errors in various response formats. Simple tests for errors in various response formats.

View File

@@ -1,3 +1,4 @@
@SQLITE
@APIDB @APIDB
Feature: Places by osm_type and osm_id Tests Feature: Places by osm_type and osm_id Tests
Simple tests for response format. Simple tests for response format.

View File

@@ -1,3 +1,4 @@
@SQLITE
@APIDB @APIDB
Feature: Geometries for reverse geocoding Feature: Geometries for reverse geocoding
Tests for returning geometries with reverse Tests for returning geometries with reverse
@@ -9,7 +10,7 @@ Feature: Geometries for reverse geocoding
| 1 | | 1 |
Then results contain Then results contain
| geotext | | geotext |
| POLYGON((9.5225302 47.138066,9.5225348 47.1379282,9.5226142 47.1379294,9.5226143 47.1379257,9.522615 47.137917,9.5226225 47.1379098,9.5226334 47.1379052,9.5226461 47.1379037,9.5226588 47.1379056,9.5226693 47.1379107,9.5226762 47.1379181,9.5226762 47.1379268,9.5226761 47.1379308,9.5227366 47.1379317,9.5227352 47.1379753,9.5227608 47.1379757,9.5227595 47.1380148,9.5227355 47.1380145,9.5227337 47.1380692,9.5225302 47.138066)) | | ^POLYGON\(\(9.5225302 47.138066, ?9.5225348 47.1379282, ?9.5226142 47.1379294, ?9.5226143 47.1379257, ?9.522615 47.137917, ?9.5226225 47.1379098, ?9.5226334 47.1379052, ?9.5226461 47.1379037, ?9.5226588 47.1379056, ?9.5226693 47.1379107, ?9.5226762 47.1379181, ?9.5226762 47.1379268, ?9.5226761 47.1379308, ?9.5227366 47.1379317, ?9.5227352 47.1379753, ?9.5227608 47.1379757, ?9.5227595 47.1380148, ?9.5227355 47.1380145, ?9.5227337 47.1380692, ?9.5225302 47.138066\)\) |
Scenario: Polygons can be slightly simplified Scenario: Polygons can be slightly simplified
@@ -18,7 +19,7 @@ Feature: Geometries for reverse geocoding
| 1 | 0.00001 | | 1 | 0.00001 |
Then results contain Then results contain
| geotext | | geotext |
| POLYGON((9.5225302 47.138066,9.5225348 47.1379282,9.5226142 47.1379294,9.5226225 47.1379098,9.5226588 47.1379056,9.5226761 47.1379308,9.5227366 47.1379317,9.5227352 47.1379753,9.5227608 47.1379757,9.5227595 47.1380148,9.5227355 47.1380145,9.5227337 47.1380692,9.5225302 47.138066)) | | ^POLYGON\(\(9.5225302 47.138066, ?9.5225348 47.1379282, ?9.5226142 47.1379294, ?9.5226225 47.1379098, ?9.5226588 47.1379056, ?9.5226761 47.1379308, ?9.5227366 47.1379317, ?9.5227352 47.1379753, ?9.5227608 47.1379757, ?9.5227595 47.1380148, ?9.5227355 47.1380145, ?9.5227337 47.1380692, ?9.5225302 47.138066\)\) |
Scenario: Polygons can be much simplified Scenario: Polygons can be much simplified
@@ -27,7 +28,7 @@ Feature: Geometries for reverse geocoding
| 1 | 0.9 | | 1 | 0.9 |
Then results contain Then results contain
| geotext | | geotext |
| POLYGON((9.5225302 47.138066,9.5225348 47.1379282,9.5227608 47.1379757,9.5227337 47.1380692,9.5225302 47.138066)) | | ^POLYGON\(\(9.5225302 47.138066, ?9.5225348 47.1379282, ?9.5227608 47.1379757, ?9.5227337 47.1380692, ?9.5225302 47.138066\)\) |
Scenario: For polygons return the centroid as center point Scenario: For polygons return the centroid as center point

View File

@@ -1,3 +1,4 @@
@SQLITE
@APIDB @APIDB
Feature: Localization of reverse search results Feature: Localization of reverse search results

View File

@@ -1,3 +1,4 @@
@SQLITE
@APIDB @APIDB
Feature: Layer parameter in reverse geocoding Feature: Layer parameter in reverse geocoding
Testing correct function of layer selection while reverse geocoding Testing correct function of layer selection while reverse geocoding
@@ -57,7 +58,7 @@ Feature: Layer parameter in reverse geocoding
@v1-api-python-only @v1-api-python-only
Scenario Outline: Search for mountain peaks begins at level 12 Scenario Outline: Search for mountain peaks begins at level 12
When sending v1/reverse at 47.08221,9.56769 When sending v1/reverse at 47.08293,9.57109
| layer | zoom | | layer | zoom |
| natural | <zoom> | | natural | <zoom> |
Then results contain Then results contain
@@ -71,7 +72,7 @@ Feature: Layer parameter in reverse geocoding
@v1-api-python-only @v1-api-python-only
Scenario Outline: Reverse serach with manmade layers Scenario Outline: Reverse search with manmade layers
When sending v1/reverse at 32.46904,-86.44439 When sending v1/reverse at 32.46904,-86.44439
| layer | | layer |
| <layer> | | <layer> |
@@ -84,5 +85,5 @@ Feature: Layer parameter in reverse geocoding
| manmade | leisure | park | | manmade | leisure | park |
| address | highway | residential | | address | highway | residential |
| poi | leisure | pitch | | poi | leisure | pitch |
| natural | waterway | stream | | natural | waterway | river |
| natural,manmade | leisure | park | | natural,manmade | leisure | park |

View File

@@ -1,3 +1,4 @@
@SQLITE
@APIDB @APIDB
Feature: Reverse geocoding Feature: Reverse geocoding
Testing the reverse function Testing the reverse function

View File

@@ -1,3 +1,4 @@
@SQLITE
@APIDB @APIDB
Feature: Geocodejson for Reverse API Feature: Geocodejson for Reverse API
Testing correctness of geocodejson output (API version v1). Testing correctness of geocodejson output (API version v1).

View File

@@ -1,3 +1,4 @@
@SQLITE
@APIDB @APIDB
Feature: Geojson for Reverse API Feature: Geojson for Reverse API
Testing correctness of geojson output (API version v1). Testing correctness of geojson output (API version v1).

View File

@@ -1,3 +1,4 @@
@SQLITE
@APIDB @APIDB
Feature: Json output for Reverse API Feature: Json output for Reverse API
Testing correctness of json and jsonv2 output (API version v1). Testing correctness of json and jsonv2 output (API version v1).
@@ -93,7 +94,7 @@ Feature: Json output for Reverse API
| polygon_text | 1 | | polygon_text | 1 |
Then results contain Then results contain
| geotext | | geotext |
| LINESTRING(9.5039353 47.0657546,9.5040437 47.0657781,9.5040808 47.065787,9.5054298 47.0661407) | | ^LINESTRING\(9.5039353 47.0657546, ?9.5040437 47.0657781, ?9.5040808 47.065787, ?9.5054298 47.0661407\) |
Examples: Examples:
| format | | format |

View File

@@ -1,3 +1,4 @@
@SQLITE
@APIDB @APIDB
Feature: v1/reverse Parameter Tests Feature: v1/reverse Parameter Tests
Tests for parameter inputs for the v1 reverse endpoint. Tests for parameter inputs for the v1 reverse endpoint.

View File

@@ -1,3 +1,4 @@
@SQLITE
@APIDB @APIDB
Feature: XML output for Reverse API Feature: XML output for Reverse API
Testing correctness of xml output (API version v1). Testing correctness of xml output (API version v1).
@@ -66,7 +67,7 @@ Feature: XML output for Reverse API
| polygon_text | 1 | | polygon_text | 1 |
Then results contain Then results contain
| geotext | | geotext |
| LINESTRING(9.5039353 47.0657546,9.5040437 47.0657781,9.5040808 47.065787,9.5054298 47.0661407) | | ^LINESTRING\(9.5039353 47.0657546, ?9.5040437 47.0657781, ?9.5040808 47.065787, ?9.5054298 47.0661407\) |
Scenario: Output of SVG Scenario: Output of SVG

View File

@@ -1,3 +1,4 @@
@SQLITE
@APIDB @APIDB
Feature: Status queries Feature: Status queries
Testing status query Testing status query

View File

@@ -46,7 +46,10 @@ def before_all(context):
def before_scenario(context, scenario): def before_scenario(context, scenario):
if 'DB' in context.tags: if not 'SQLITE' in context.tags \
and context.config.userdata['API_TEST_DB'].startswith('sqlite:'):
context.scenario.skip("Not usable with Sqlite database.")
elif 'DB' in context.tags:
context.nominatim.setup_db(context) context.nominatim.setup_db(context)
elif 'APIDB' in context.tags: elif 'APIDB' in context.tags:
context.nominatim.setup_api_db() context.nominatim.setup_api_db()

View File

@@ -86,7 +86,10 @@ class NominatimEnvironment:
be picked up by dotenv and creates a project directory with the be picked up by dotenv and creates a project directory with the
appropriate website scripts. appropriate website scripts.
""" """
dsn = 'pgsql:dbname={}'.format(dbname) if dbname.startswith('sqlite:'):
dsn = 'sqlite:dbname={}'.format(dbname[7:])
else:
dsn = 'pgsql:dbname={}'.format(dbname)
if self.db_host: if self.db_host:
dsn += ';host=' + self.db_host dsn += ';host=' + self.db_host
if self.db_port: if self.db_port:
@@ -197,6 +200,9 @@ class NominatimEnvironment:
""" """
self.write_nominatim_config(self.api_test_db) self.write_nominatim_config(self.api_test_db)
if self.api_test_db.startswith('sqlite:'):
return
if not self.api_db_done: if not self.api_db_done:
self.api_db_done = True self.api_db_done = True

View File

@@ -16,6 +16,7 @@ import sqlalchemy as sa
import nominatim.api as napi import nominatim.api as napi
from nominatim.db.sql_preprocessor import SQLPreprocessor from nominatim.db.sql_preprocessor import SQLPreprocessor
from nominatim.tools import convert_sqlite
import nominatim.api.logging as loglib import nominatim.api.logging as loglib
class APITester: class APITester:
@@ -178,7 +179,6 @@ def apiobj(temp_db_with_extensions, temp_db_conn, monkeypatch):
testapi.async_to_sync(testapi.create_tables()) testapi.async_to_sync(testapi.create_tables())
proc = SQLPreprocessor(temp_db_conn, testapi.api.config) proc = SQLPreprocessor(temp_db_conn, testapi.api.config)
proc.run_sql_file(temp_db_conn, 'functions/address_lookup.sql')
proc.run_sql_file(temp_db_conn, 'functions/ranking.sql') proc.run_sql_file(temp_db_conn, 'functions/ranking.sql')
loglib.set_log_output('text') loglib.set_log_output('text')
@@ -186,3 +186,21 @@ def apiobj(temp_db_with_extensions, temp_db_conn, monkeypatch):
print(loglib.get_and_disable()) print(loglib.get_and_disable())
testapi.api.close() testapi.api.close()
@pytest.fixture(params=['postgres_db', 'sqlite_db'])
def frontend(request, event_loop, tmp_path):
if request.param == 'sqlite_db':
db = str(tmp_path / 'test_nominatim_python_unittest.sqlite')
def mkapi(apiobj, options={'reverse'}):
event_loop.run_until_complete(convert_sqlite.convert(Path('/invalid'),
db, options))
return napi.NominatimAPI(Path('/invalid'),
{'NOMINATIM_DATABASE_DSN': f"sqlite:dbname={db}",
'NOMINATIM_USE_US_TIGER_DATA': 'yes'})
elif request.param == 'postgres_db':
def mkapi(apiobj, options=None):
return apiobj.api
return mkapi

View File

@@ -15,7 +15,7 @@ import nominatim.api as napi
@pytest.mark.parametrize('idobj', (napi.PlaceID(332), napi.OsmID('W', 4), @pytest.mark.parametrize('idobj', (napi.PlaceID(332), napi.OsmID('W', 4),
napi.OsmID('W', 4, 'highway'))) napi.OsmID('W', 4, 'highway')))
def test_lookup_in_placex(apiobj, idobj): def test_lookup_in_placex(apiobj, frontend, idobj):
import_date = dt.datetime(2022, 12, 7, 14, 14, 46, 0) import_date = dt.datetime(2022, 12, 7, 14, 14, 46, 0)
apiobj.add_placex(place_id=332, osm_type='W', osm_id=4, apiobj.add_placex(place_id=332, osm_type='W', osm_id=4,
class_='highway', type='residential', class_='highway', type='residential',
@@ -31,7 +31,8 @@ def test_lookup_in_placex(apiobj, idobj):
indexed_date=import_date, indexed_date=import_date,
geometry='LINESTRING(23 34, 23.1 34, 23.1 34.1, 23 34)') geometry='LINESTRING(23 34, 23.1 34, 23.1 34.1, 23 34)')
result = apiobj.api.details(idobj) api = frontend(apiobj, options={'details'})
result = api.details(idobj)
assert result is not None assert result is not None
@@ -69,7 +70,7 @@ def test_lookup_in_placex(apiobj, idobj):
assert result.geometry == {'type': 'ST_LineString'} assert result.geometry == {'type': 'ST_LineString'}
def test_lookup_in_placex_minimal_info(apiobj): def test_lookup_in_placex_minimal_info(apiobj, frontend):
import_date = dt.datetime(2022, 12, 7, 14, 14, 46, 0) import_date = dt.datetime(2022, 12, 7, 14, 14, 46, 0)
apiobj.add_placex(place_id=332, osm_type='W', osm_id=4, apiobj.add_placex(place_id=332, osm_type='W', osm_id=4,
class_='highway', type='residential', class_='highway', type='residential',
@@ -79,7 +80,8 @@ def test_lookup_in_placex_minimal_info(apiobj):
indexed_date=import_date, indexed_date=import_date,
geometry='LINESTRING(23 34, 23.1 34, 23.1 34.1, 23 34)') geometry='LINESTRING(23 34, 23.1 34, 23.1 34.1, 23 34)')
result = apiobj.api.details(napi.PlaceID(332)) api = frontend(apiobj, options={'details'})
result = api.details(napi.PlaceID(332))
assert result is not None assert result is not None
@@ -117,16 +119,17 @@ def test_lookup_in_placex_minimal_info(apiobj):
assert result.geometry == {'type': 'ST_LineString'} assert result.geometry == {'type': 'ST_LineString'}
def test_lookup_in_placex_with_geometry(apiobj): def test_lookup_in_placex_with_geometry(apiobj, frontend):
apiobj.add_placex(place_id=332, apiobj.add_placex(place_id=332,
geometry='LINESTRING(23 34, 23.1 34)') geometry='LINESTRING(23 34, 23.1 34)')
result = apiobj.api.details(napi.PlaceID(332), geometry_output=napi.GeometryFormat.GEOJSON) api = frontend(apiobj, options={'details'})
result = api.details(napi.PlaceID(332), geometry_output=napi.GeometryFormat.GEOJSON)
assert result.geometry == {'geojson': '{"type":"LineString","coordinates":[[23,34],[23.1,34]]}'} assert result.geometry == {'geojson': '{"type":"LineString","coordinates":[[23,34],[23.1,34]]}'}
def test_lookup_placex_with_address_details(apiobj): def test_lookup_placex_with_address_details(apiobj, frontend):
apiobj.add_placex(place_id=332, osm_type='W', osm_id=4, apiobj.add_placex(place_id=332, osm_type='W', osm_id=4,
class_='highway', type='residential', name='Street', class_='highway', type='residential', name='Street',
country_code='pl', country_code='pl',
@@ -143,7 +146,8 @@ def test_lookup_placex_with_address_details(apiobj):
country_code='pl', country_code='pl',
rank_search=17, rank_address=16) rank_search=17, rank_address=16)
result = apiobj.api.details(napi.PlaceID(332), address_details=True) api = frontend(apiobj, options={'details'})
result = api.details(napi.PlaceID(332), address_details=True)
assert result.address_rows == [ assert result.address_rows == [
napi.AddressLine(place_id=332, osm_object=('W', 4), napi.AddressLine(place_id=332, osm_object=('W', 4),
@@ -172,18 +176,19 @@ def test_lookup_placex_with_address_details(apiobj):
] ]
def test_lookup_place_with_linked_places_none_existing(apiobj): def test_lookup_place_with_linked_places_none_existing(apiobj, frontend):
apiobj.add_placex(place_id=332, osm_type='W', osm_id=4, apiobj.add_placex(place_id=332, osm_type='W', osm_id=4,
class_='highway', type='residential', name='Street', class_='highway', type='residential', name='Street',
country_code='pl', linked_place_id=45, country_code='pl', linked_place_id=45,
rank_search=27, rank_address=26) rank_search=27, rank_address=26)
result = apiobj.api.details(napi.PlaceID(332), linked_places=True) api = frontend(apiobj, options={'details'})
result = api.details(napi.PlaceID(332), linked_places=True)
assert result.linked_rows == [] assert result.linked_rows == []
def test_lookup_place_with_linked_places_existing(apiobj): def test_lookup_place_with_linked_places_existing(apiobj, frontend):
apiobj.add_placex(place_id=332, osm_type='W', osm_id=4, apiobj.add_placex(place_id=332, osm_type='W', osm_id=4,
class_='highway', type='residential', name='Street', class_='highway', type='residential', name='Street',
country_code='pl', linked_place_id=45, country_code='pl', linked_place_id=45,
@@ -197,7 +202,8 @@ def test_lookup_place_with_linked_places_existing(apiobj):
country_code='pl', linked_place_id=332, country_code='pl', linked_place_id=332,
rank_search=27, rank_address=26) rank_search=27, rank_address=26)
result = apiobj.api.details(napi.PlaceID(332), linked_places=True) api = frontend(apiobj, options={'details'})
result = api.details(napi.PlaceID(332), linked_places=True)
assert result.linked_rows == [ assert result.linked_rows == [
napi.AddressLine(place_id=1001, osm_object=('W', 5), napi.AddressLine(place_id=1001, osm_object=('W', 5),
@@ -213,18 +219,19 @@ def test_lookup_place_with_linked_places_existing(apiobj):
] ]
def test_lookup_place_with_parented_places_not_existing(apiobj): def test_lookup_place_with_parented_places_not_existing(apiobj, frontend):
apiobj.add_placex(place_id=332, osm_type='W', osm_id=4, apiobj.add_placex(place_id=332, osm_type='W', osm_id=4,
class_='highway', type='residential', name='Street', class_='highway', type='residential', name='Street',
country_code='pl', parent_place_id=45, country_code='pl', parent_place_id=45,
rank_search=27, rank_address=26) rank_search=27, rank_address=26)
result = apiobj.api.details(napi.PlaceID(332), parented_places=True) api = frontend(apiobj, options={'details'})
result = api.details(napi.PlaceID(332), parented_places=True)
assert result.parented_rows == [] assert result.parented_rows == []
def test_lookup_place_with_parented_places_existing(apiobj): def test_lookup_place_with_parented_places_existing(apiobj, frontend):
apiobj.add_placex(place_id=332, osm_type='W', osm_id=4, apiobj.add_placex(place_id=332, osm_type='W', osm_id=4,
class_='highway', type='residential', name='Street', class_='highway', type='residential', name='Street',
country_code='pl', parent_place_id=45, country_code='pl', parent_place_id=45,
@@ -238,7 +245,8 @@ def test_lookup_place_with_parented_places_existing(apiobj):
country_code='pl', parent_place_id=332, country_code='pl', parent_place_id=332,
rank_search=27, rank_address=26) rank_search=27, rank_address=26)
result = apiobj.api.details(napi.PlaceID(332), parented_places=True) api = frontend(apiobj, options={'details'})
result = api.details(napi.PlaceID(332), parented_places=True)
assert result.parented_rows == [ assert result.parented_rows == [
napi.AddressLine(place_id=1001, osm_object=('N', 5), napi.AddressLine(place_id=1001, osm_object=('N', 5),
@@ -250,7 +258,7 @@ def test_lookup_place_with_parented_places_existing(apiobj):
@pytest.mark.parametrize('idobj', (napi.PlaceID(4924), napi.OsmID('W', 9928))) @pytest.mark.parametrize('idobj', (napi.PlaceID(4924), napi.OsmID('W', 9928)))
def test_lookup_in_osmline(apiobj, idobj): def test_lookup_in_osmline(apiobj, frontend, idobj):
import_date = dt.datetime(2022, 12, 7, 14, 14, 46, 0) import_date = dt.datetime(2022, 12, 7, 14, 14, 46, 0)
apiobj.add_osmline(place_id=4924, osm_id=9928, apiobj.add_osmline(place_id=4924, osm_id=9928,
parent_place_id=12, parent_place_id=12,
@@ -260,7 +268,8 @@ def test_lookup_in_osmline(apiobj, idobj):
indexed_date=import_date, indexed_date=import_date,
geometry='LINESTRING(23 34, 23 35)') geometry='LINESTRING(23 34, 23 35)')
result = apiobj.api.details(idobj) api = frontend(apiobj, options={'details'})
result = api.details(idobj)
assert result is not None assert result is not None
@@ -298,7 +307,7 @@ def test_lookup_in_osmline(apiobj, idobj):
assert result.geometry == {'type': 'ST_LineString'} assert result.geometry == {'type': 'ST_LineString'}
def test_lookup_in_osmline_split_interpolation(apiobj): def test_lookup_in_osmline_split_interpolation(apiobj, frontend):
apiobj.add_osmline(place_id=1000, osm_id=9, apiobj.add_osmline(place_id=1000, osm_id=9,
startnumber=2, endnumber=4, step=1) startnumber=2, endnumber=4, step=1)
apiobj.add_osmline(place_id=1001, osm_id=9, apiobj.add_osmline(place_id=1001, osm_id=9,
@@ -306,18 +315,19 @@ def test_lookup_in_osmline_split_interpolation(apiobj):
apiobj.add_osmline(place_id=1002, osm_id=9, apiobj.add_osmline(place_id=1002, osm_id=9,
startnumber=11, endnumber=20, step=1) startnumber=11, endnumber=20, step=1)
api = frontend(apiobj, options={'details'})
for i in range(1, 6): for i in range(1, 6):
result = apiobj.api.details(napi.OsmID('W', 9, str(i))) result = api.details(napi.OsmID('W', 9, str(i)))
assert result.place_id == 1000 assert result.place_id == 1000
for i in range(7, 11): for i in range(7, 11):
result = apiobj.api.details(napi.OsmID('W', 9, str(i))) result = api.details(napi.OsmID('W', 9, str(i)))
assert result.place_id == 1001 assert result.place_id == 1001
for i in range(12, 22): for i in range(12, 22):
result = apiobj.api.details(napi.OsmID('W', 9, str(i))) result = api.details(napi.OsmID('W', 9, str(i)))
assert result.place_id == 1002 assert result.place_id == 1002
def test_lookup_osmline_with_address_details(apiobj): def test_lookup_osmline_with_address_details(apiobj, frontend):
apiobj.add_osmline(place_id=9000, osm_id=9, apiobj.add_osmline(place_id=9000, osm_id=9,
startnumber=2, endnumber=4, step=1, startnumber=2, endnumber=4, step=1,
parent_place_id=332) parent_place_id=332)
@@ -337,7 +347,8 @@ def test_lookup_osmline_with_address_details(apiobj):
country_code='pl', country_code='pl',
rank_search=17, rank_address=16) rank_search=17, rank_address=16)
result = apiobj.api.details(napi.PlaceID(9000), address_details=True) api = frontend(apiobj, options={'details'})
result = api.details(napi.PlaceID(9000), address_details=True)
assert result.address_rows == [ assert result.address_rows == [
napi.AddressLine(place_id=332, osm_object=('W', 4), napi.AddressLine(place_id=332, osm_object=('W', 4),
@@ -366,7 +377,7 @@ def test_lookup_osmline_with_address_details(apiobj):
] ]
def test_lookup_in_tiger(apiobj): def test_lookup_in_tiger(apiobj, frontend):
apiobj.add_tiger(place_id=4924, apiobj.add_tiger(place_id=4924,
parent_place_id=12, parent_place_id=12,
startnumber=1, endnumber=4, step=1, startnumber=1, endnumber=4, step=1,
@@ -377,7 +388,8 @@ def test_lookup_in_tiger(apiobj):
osm_type='W', osm_id=6601223, osm_type='W', osm_id=6601223,
geometry='LINESTRING(23 34, 23 35)') geometry='LINESTRING(23 34, 23 35)')
result = apiobj.api.details(napi.PlaceID(4924)) api = frontend(apiobj, options={'details'})
result = api.details(napi.PlaceID(4924))
assert result is not None assert result is not None
@@ -415,7 +427,7 @@ def test_lookup_in_tiger(apiobj):
assert result.geometry == {'type': 'ST_LineString'} assert result.geometry == {'type': 'ST_LineString'}
def test_lookup_tiger_with_address_details(apiobj): def test_lookup_tiger_with_address_details(apiobj, frontend):
apiobj.add_tiger(place_id=9000, apiobj.add_tiger(place_id=9000,
startnumber=2, endnumber=4, step=1, startnumber=2, endnumber=4, step=1,
parent_place_id=332) parent_place_id=332)
@@ -435,7 +447,8 @@ def test_lookup_tiger_with_address_details(apiobj):
country_code='us', country_code='us',
rank_search=17, rank_address=16) rank_search=17, rank_address=16)
result = apiobj.api.details(napi.PlaceID(9000), address_details=True) api = frontend(apiobj, options={'details'})
result = api.details(napi.PlaceID(9000), address_details=True)
assert result.address_rows == [ assert result.address_rows == [
napi.AddressLine(place_id=332, osm_object=('W', 4), napi.AddressLine(place_id=332, osm_object=('W', 4),
@@ -464,7 +477,7 @@ def test_lookup_tiger_with_address_details(apiobj):
] ]
def test_lookup_in_postcode(apiobj): def test_lookup_in_postcode(apiobj, frontend):
import_date = dt.datetime(2022, 12, 7, 14, 14, 46, 0) import_date = dt.datetime(2022, 12, 7, 14, 14, 46, 0)
apiobj.add_postcode(place_id=554, apiobj.add_postcode(place_id=554,
parent_place_id=152, parent_place_id=152,
@@ -474,7 +487,8 @@ def test_lookup_in_postcode(apiobj):
indexed_date=import_date, indexed_date=import_date,
geometry='POINT(-9.45 5.6)') geometry='POINT(-9.45 5.6)')
result = apiobj.api.details(napi.PlaceID(554)) api = frontend(apiobj, options={'details'})
result = api.details(napi.PlaceID(554))
assert result is not None assert result is not None
@@ -512,7 +526,7 @@ def test_lookup_in_postcode(apiobj):
assert result.geometry == {'type': 'ST_Point'} assert result.geometry == {'type': 'ST_Point'}
def test_lookup_postcode_with_address_details(apiobj): def test_lookup_postcode_with_address_details(apiobj, frontend):
apiobj.add_postcode(place_id=9000, apiobj.add_postcode(place_id=9000,
parent_place_id=332, parent_place_id=332,
postcode='34 425', postcode='34 425',
@@ -528,7 +542,8 @@ def test_lookup_postcode_with_address_details(apiobj):
country_code='gb', country_code='gb',
rank_search=17, rank_address=16) rank_search=17, rank_address=16)
result = apiobj.api.details(napi.PlaceID(9000), address_details=True) api = frontend(apiobj, options={'details'})
result = api.details(napi.PlaceID(9000), address_details=True)
assert result.address_rows == [ assert result.address_rows == [
napi.AddressLine(place_id=9000, osm_object=None, napi.AddressLine(place_id=9000, osm_object=None,
@@ -559,18 +574,20 @@ def test_lookup_postcode_with_address_details(apiobj):
@pytest.mark.parametrize('objid', [napi.PlaceID(1736), @pytest.mark.parametrize('objid', [napi.PlaceID(1736),
napi.OsmID('W', 55), napi.OsmID('W', 55),
napi.OsmID('N', 55, 'amenity')]) napi.OsmID('N', 55, 'amenity')])
def test_lookup_missing_object(apiobj, objid): def test_lookup_missing_object(apiobj, frontend, objid):
apiobj.add_placex(place_id=1, osm_type='N', osm_id=55, apiobj.add_placex(place_id=1, osm_type='N', osm_id=55,
class_='place', type='suburb') class_='place', type='suburb')
assert apiobj.api.details(objid) is None api = frontend(apiobj, options={'details'})
assert api.details(objid) is None
@pytest.mark.parametrize('gtype', (napi.GeometryFormat.KML, @pytest.mark.parametrize('gtype', (napi.GeometryFormat.KML,
napi.GeometryFormat.SVG, napi.GeometryFormat.SVG,
napi.GeometryFormat.TEXT)) napi.GeometryFormat.TEXT))
def test_lookup_unsupported_geometry(apiobj, gtype): def test_lookup_unsupported_geometry(apiobj, frontend, gtype):
apiobj.add_placex(place_id=332) apiobj.add_placex(place_id=332)
api = frontend(apiobj, options={'details'})
with pytest.raises(ValueError): with pytest.raises(ValueError):
apiobj.api.details(napi.PlaceID(332), geometry_output=gtype) api.details(napi.PlaceID(332), geometry_output=gtype)

View File

@@ -7,22 +7,26 @@
""" """
Tests for lookup API call. Tests for lookup API call.
""" """
import json
import pytest import pytest
import nominatim.api as napi import nominatim.api as napi
def test_lookup_empty_list(apiobj): def test_lookup_empty_list(apiobj, frontend):
assert apiobj.api.lookup([]) == [] api = frontend(apiobj, options={'details'})
assert api.lookup([]) == []
def test_lookup_non_existing(apiobj): def test_lookup_non_existing(apiobj, frontend):
assert apiobj.api.lookup((napi.PlaceID(332), napi.OsmID('W', 4), api = frontend(apiobj, options={'details'})
napi.OsmID('W', 4, 'highway'))) == [] assert api.lookup((napi.PlaceID(332), napi.OsmID('W', 4),
napi.OsmID('W', 4, 'highway'))) == []
@pytest.mark.parametrize('idobj', (napi.PlaceID(332), napi.OsmID('W', 4), @pytest.mark.parametrize('idobj', (napi.PlaceID(332), napi.OsmID('W', 4),
napi.OsmID('W', 4, 'highway'))) napi.OsmID('W', 4, 'highway')))
def test_lookup_single_placex(apiobj, idobj): def test_lookup_single_placex(apiobj, frontend, idobj):
apiobj.add_placex(place_id=332, osm_type='W', osm_id=4, apiobj.add_placex(place_id=332, osm_type='W', osm_id=4,
class_='highway', type='residential', class_='highway', type='residential',
name={'name': 'Road'}, address={'city': 'Barrow'}, name={'name': 'Road'}, address={'city': 'Barrow'},
@@ -36,7 +40,8 @@ def test_lookup_single_placex(apiobj, idobj):
centroid=(23, 34), centroid=(23, 34),
geometry='LINESTRING(23 34, 23.1 34, 23.1 34.1, 23 34)') geometry='LINESTRING(23 34, 23.1 34, 23.1 34.1, 23 34)')
result = apiobj.api.lookup([idobj]) api = frontend(apiobj, options={'details'})
result = api.lookup([idobj])
assert len(result) == 1 assert len(result) == 1
@@ -72,7 +77,7 @@ def test_lookup_single_placex(apiobj, idobj):
assert result.geometry == {} assert result.geometry == {}
def test_lookup_multiple_places(apiobj): def test_lookup_multiple_places(apiobj, frontend):
apiobj.add_placex(place_id=332, osm_type='W', osm_id=4, apiobj.add_placex(place_id=332, osm_type='W', osm_id=4,
class_='highway', type='residential', class_='highway', type='residential',
name={'name': 'Road'}, address={'city': 'Barrow'}, name={'name': 'Road'}, address={'city': 'Barrow'},
@@ -93,9 +98,10 @@ def test_lookup_multiple_places(apiobj):
geometry='LINESTRING(23 34, 23 35)') geometry='LINESTRING(23 34, 23 35)')
result = apiobj.api.lookup((napi.OsmID('W', 1), api = frontend(apiobj, options={'details'})
napi.OsmID('W', 4), result = api.lookup((napi.OsmID('W', 1),
napi.OsmID('W', 9928))) napi.OsmID('W', 4),
napi.OsmID('W', 9928)))
assert len(result) == 2 assert len(result) == 2
@@ -103,7 +109,7 @@ def test_lookup_multiple_places(apiobj):
@pytest.mark.parametrize('gtype', list(napi.GeometryFormat)) @pytest.mark.parametrize('gtype', list(napi.GeometryFormat))
def test_simple_place_with_geometry(apiobj, gtype): def test_simple_place_with_geometry(apiobj, frontend, gtype):
apiobj.add_placex(place_id=332, osm_type='W', osm_id=4, apiobj.add_placex(place_id=332, osm_type='W', osm_id=4,
class_='highway', type='residential', class_='highway', type='residential',
name={'name': 'Road'}, address={'city': 'Barrow'}, name={'name': 'Road'}, address={'city': 'Barrow'},
@@ -117,8 +123,8 @@ def test_simple_place_with_geometry(apiobj, gtype):
centroid=(23, 34), centroid=(23, 34),
geometry='POLYGON((23 34, 23.1 34, 23.1 34.1, 23 34))') geometry='POLYGON((23 34, 23.1 34, 23.1 34.1, 23 34))')
result = apiobj.api.lookup([napi.OsmID('W', 4)], api = frontend(apiobj, options={'details'})
geometry_output=gtype) result = api.lookup([napi.OsmID('W', 4)], geometry_output=gtype)
assert len(result) == 1 assert len(result) == 1
assert result[0].place_id == 332 assert result[0].place_id == 332
@@ -129,7 +135,7 @@ def test_simple_place_with_geometry(apiobj, gtype):
assert list(result[0].geometry.keys()) == [gtype.name.lower()] assert list(result[0].geometry.keys()) == [gtype.name.lower()]
def test_simple_place_with_geometry_simplified(apiobj): def test_simple_place_with_geometry_simplified(apiobj, frontend):
apiobj.add_placex(place_id=332, osm_type='W', osm_id=4, apiobj.add_placex(place_id=332, osm_type='W', osm_id=4,
class_='highway', type='residential', class_='highway', type='residential',
name={'name': 'Road'}, address={'city': 'Barrow'}, name={'name': 'Road'}, address={'city': 'Barrow'},
@@ -143,11 +149,15 @@ def test_simple_place_with_geometry_simplified(apiobj):
centroid=(23, 34), centroid=(23, 34),
geometry='POLYGON((23 34, 22.999 34, 23.1 34, 23.1 34.1, 23 34))') geometry='POLYGON((23 34, 22.999 34, 23.1 34, 23.1 34.1, 23 34))')
result = apiobj.api.lookup([napi.OsmID('W', 4)], api = frontend(apiobj, options={'details'})
geometry_output=napi.GeometryFormat.TEXT, result = api.lookup([napi.OsmID('W', 4)],
geometry_simplification=0.1) geometry_output=napi.GeometryFormat.GEOJSON,
geometry_simplification=0.1)
assert len(result) == 1 assert len(result) == 1
assert result[0].place_id == 332 assert result[0].place_id == 332
assert result[0].geometry == {'text': 'POLYGON((23 34,23.1 34,23.1 34.1,23 34))'}
geom = json.loads(result[0].geometry['geojson'])
assert geom['type'] == 'Polygon'
assert geom['coordinates'] == [[[23, 34], [23.1, 34], [23.1, 34.1], [23, 34]]]

View File

@@ -16,20 +16,23 @@ import pytest
import nominatim.api as napi import nominatim.api as napi
def test_reverse_rank_30(apiobj): API_OPTIONS = {'reverse'}
def test_reverse_rank_30(apiobj, frontend):
apiobj.add_placex(place_id=223, class_='place', type='house', apiobj.add_placex(place_id=223, class_='place', type='house',
housenumber='1', housenumber='1',
centroid=(1.3, 0.7), centroid=(1.3, 0.7),
geometry='POINT(1.3 0.7)') geometry='POINT(1.3 0.7)')
result = apiobj.api.reverse((1.3, 0.7)) api = frontend(apiobj, options=API_OPTIONS)
result = api.reverse((1.3, 0.7))
assert result is not None assert result is not None
assert result.place_id == 223 assert result.place_id == 223
@pytest.mark.parametrize('country', ['de', 'us']) @pytest.mark.parametrize('country', ['de', 'us'])
def test_reverse_street(apiobj, country): def test_reverse_street(apiobj, frontend, country):
apiobj.add_placex(place_id=990, class_='highway', type='service', apiobj.add_placex(place_id=990, class_='highway', type='service',
rank_search=27, rank_address=27, rank_search=27, rank_address=27,
name = {'name': 'My Street'}, name = {'name': 'My Street'},
@@ -37,17 +40,19 @@ def test_reverse_street(apiobj, country):
country_code=country, country_code=country,
geometry='LINESTRING(9.995 10, 10.005 10)') geometry='LINESTRING(9.995 10, 10.005 10)')
assert apiobj.api.reverse((9.995, 10)).place_id == 990 api = frontend(apiobj, options=API_OPTIONS)
assert api.reverse((9.995, 10)).place_id == 990
def test_reverse_ignore_unindexed(apiobj): def test_reverse_ignore_unindexed(apiobj, frontend):
apiobj.add_placex(place_id=223, class_='place', type='house', apiobj.add_placex(place_id=223, class_='place', type='house',
housenumber='1', housenumber='1',
indexed_status=2, indexed_status=2,
centroid=(1.3, 0.7), centroid=(1.3, 0.7),
geometry='POINT(1.3 0.7)') geometry='POINT(1.3 0.7)')
result = apiobj.api.reverse((1.3, 0.7)) api = frontend(apiobj, options=API_OPTIONS)
result = api.reverse((1.3, 0.7))
assert result is None assert result is None
@@ -62,7 +67,7 @@ def test_reverse_ignore_unindexed(apiobj):
(0.70003, napi.DataLayer.MANMADE | napi.DataLayer.RAILWAY, 225), (0.70003, napi.DataLayer.MANMADE | napi.DataLayer.RAILWAY, 225),
(0.70003, napi.DataLayer.MANMADE | napi.DataLayer.NATURAL, 225), (0.70003, napi.DataLayer.MANMADE | napi.DataLayer.NATURAL, 225),
(5, napi.DataLayer.ADDRESS, 229)]) (5, napi.DataLayer.ADDRESS, 229)])
def test_reverse_rank_30_layers(apiobj, y, layer, place_id): def test_reverse_rank_30_layers(apiobj, frontend, y, layer, place_id):
apiobj.add_placex(place_id=223, class_='place', type='house', apiobj.add_placex(place_id=223, class_='place', type='house',
housenumber='1', housenumber='1',
rank_address=30, rank_address=30,
@@ -90,21 +95,23 @@ def test_reverse_rank_30_layers(apiobj, y, layer, place_id):
rank_search=30, rank_search=30,
centroid=(1.3, 5)) centroid=(1.3, 5))
assert apiobj.api.reverse((1.3, y), layers=layer).place_id == place_id api = frontend(apiobj, options=API_OPTIONS)
assert api.reverse((1.3, y), layers=layer).place_id == place_id
def test_reverse_poi_layer_with_no_pois(apiobj): def test_reverse_poi_layer_with_no_pois(apiobj, frontend):
apiobj.add_placex(place_id=223, class_='place', type='house', apiobj.add_placex(place_id=223, class_='place', type='house',
housenumber='1', housenumber='1',
rank_address=30, rank_address=30,
rank_search=30, rank_search=30,
centroid=(1.3, 0.70001)) centroid=(1.3, 0.70001))
assert apiobj.api.reverse((1.3, 0.70001), max_rank=29, api = frontend(apiobj, options=API_OPTIONS)
assert api.reverse((1.3, 0.70001), max_rank=29,
layers=napi.DataLayer.POI) is None layers=napi.DataLayer.POI) is None
def test_reverse_housenumber_on_street(apiobj): def test_reverse_housenumber_on_street(apiobj, frontend):
apiobj.add_placex(place_id=990, class_='highway', type='service', apiobj.add_placex(place_id=990, class_='highway', type='service',
rank_search=27, rank_address=27, rank_search=27, rank_address=27,
name = {'name': 'My Street'}, name = {'name': 'My Street'},
@@ -116,12 +123,13 @@ def test_reverse_housenumber_on_street(apiobj):
housenumber='23', housenumber='23',
centroid=(10.0, 10.00001)) centroid=(10.0, 10.00001))
assert apiobj.api.reverse((10.0, 10.0), max_rank=30).place_id == 991 api = frontend(apiobj, options=API_OPTIONS)
assert apiobj.api.reverse((10.0, 10.0), max_rank=27).place_id == 990 assert api.reverse((10.0, 10.0), max_rank=30).place_id == 991
assert apiobj.api.reverse((10.0, 10.00001), max_rank=30).place_id == 991 assert api.reverse((10.0, 10.0), max_rank=27).place_id == 990
assert api.reverse((10.0, 10.00001), max_rank=30).place_id == 991
def test_reverse_housenumber_interpolation(apiobj): def test_reverse_housenumber_interpolation(apiobj, frontend):
apiobj.add_placex(place_id=990, class_='highway', type='service', apiobj.add_placex(place_id=990, class_='highway', type='service',
rank_search=27, rank_address=27, rank_search=27, rank_address=27,
name = {'name': 'My Street'}, name = {'name': 'My Street'},
@@ -138,10 +146,11 @@ def test_reverse_housenumber_interpolation(apiobj):
centroid=(10.0, 10.00001), centroid=(10.0, 10.00001),
geometry='LINESTRING(9.995 10.00001, 10.005 10.00001)') geometry='LINESTRING(9.995 10.00001, 10.005 10.00001)')
assert apiobj.api.reverse((10.0, 10.0)).place_id == 992 api = frontend(apiobj, options=API_OPTIONS)
assert api.reverse((10.0, 10.0)).place_id == 992
def test_reverse_housenumber_point_interpolation(apiobj): def test_reverse_housenumber_point_interpolation(apiobj, frontend):
apiobj.add_placex(place_id=990, class_='highway', type='service', apiobj.add_placex(place_id=990, class_='highway', type='service',
rank_search=27, rank_address=27, rank_search=27, rank_address=27,
name = {'name': 'My Street'}, name = {'name': 'My Street'},
@@ -153,12 +162,13 @@ def test_reverse_housenumber_point_interpolation(apiobj):
centroid=(10.0, 10.00001), centroid=(10.0, 10.00001),
geometry='POINT(10.0 10.00001)') geometry='POINT(10.0 10.00001)')
res = apiobj.api.reverse((10.0, 10.0)) api = frontend(apiobj, options=API_OPTIONS)
res = api.reverse((10.0, 10.0))
assert res.place_id == 992 assert res.place_id == 992
assert res.housenumber == '42' assert res.housenumber == '42'
def test_reverse_tiger_number(apiobj): def test_reverse_tiger_number(apiobj, frontend):
apiobj.add_placex(place_id=990, class_='highway', type='service', apiobj.add_placex(place_id=990, class_='highway', type='service',
rank_search=27, rank_address=27, rank_search=27, rank_address=27,
name = {'name': 'My Street'}, name = {'name': 'My Street'},
@@ -171,11 +181,12 @@ def test_reverse_tiger_number(apiobj):
centroid=(10.0, 10.00001), centroid=(10.0, 10.00001),
geometry='LINESTRING(9.995 10.00001, 10.005 10.00001)') geometry='LINESTRING(9.995 10.00001, 10.005 10.00001)')
assert apiobj.api.reverse((10.0, 10.0)).place_id == 992 api = frontend(apiobj, options=API_OPTIONS)
assert apiobj.api.reverse((10.0, 10.00001)).place_id == 992 assert api.reverse((10.0, 10.0)).place_id == 992
assert api.reverse((10.0, 10.00001)).place_id == 992
def test_reverse_point_tiger(apiobj): def test_reverse_point_tiger(apiobj, frontend):
apiobj.add_placex(place_id=990, class_='highway', type='service', apiobj.add_placex(place_id=990, class_='highway', type='service',
rank_search=27, rank_address=27, rank_search=27, rank_address=27,
name = {'name': 'My Street'}, name = {'name': 'My Street'},
@@ -188,12 +199,13 @@ def test_reverse_point_tiger(apiobj):
centroid=(10.0, 10.00001), centroid=(10.0, 10.00001),
geometry='POINT(10.0 10.00001)') geometry='POINT(10.0 10.00001)')
res = apiobj.api.reverse((10.0, 10.0)) api = frontend(apiobj, options=API_OPTIONS)
res = api.reverse((10.0, 10.0))
assert res.place_id == 992 assert res.place_id == 992
assert res.housenumber == '1' assert res.housenumber == '1'
def test_reverse_low_zoom_address(apiobj): def test_reverse_low_zoom_address(apiobj, frontend):
apiobj.add_placex(place_id=1001, class_='place', type='house', apiobj.add_placex(place_id=1001, class_='place', type='house',
housenumber='1', housenumber='1',
rank_address=30, rank_address=30,
@@ -207,11 +219,12 @@ def test_reverse_low_zoom_address(apiobj):
geometry="""POLYGON((59.3 80.70001, 59.3001 80.70001, geometry="""POLYGON((59.3 80.70001, 59.3001 80.70001,
59.3001 80.70101, 59.3 80.70101, 59.3 80.70001))""") 59.3001 80.70101, 59.3 80.70101, 59.3 80.70001))""")
assert apiobj.api.reverse((59.30005, 80.7005)).place_id == 1001 api = frontend(apiobj, options=API_OPTIONS)
assert apiobj.api.reverse((59.30005, 80.7005), max_rank=18).place_id == 1002 assert api.reverse((59.30005, 80.7005)).place_id == 1001
assert api.reverse((59.30005, 80.7005), max_rank=18).place_id == 1002
def test_reverse_place_node_in_area(apiobj): def test_reverse_place_node_in_area(apiobj, frontend):
apiobj.add_placex(place_id=1002, class_='place', type='town', apiobj.add_placex(place_id=1002, class_='place', type='town',
name={'name': 'Town Area'}, name={'name': 'Town Area'},
rank_address=16, rank_address=16,
@@ -226,7 +239,8 @@ def test_reverse_place_node_in_area(apiobj):
rank_search=18, rank_search=18,
centroid=(59.30004, 80.70055)) centroid=(59.30004, 80.70055))
assert apiobj.api.reverse((59.30004, 80.70055)).place_id == 1003 api = frontend(apiobj, options=API_OPTIONS)
assert api.reverse((59.30004, 80.70055)).place_id == 1003
@pytest.mark.parametrize('layer,place_id', [(napi.DataLayer.MANMADE, 225), @pytest.mark.parametrize('layer,place_id', [(napi.DataLayer.MANMADE, 225),
@@ -234,7 +248,7 @@ def test_reverse_place_node_in_area(apiobj):
(napi.DataLayer.NATURAL, 227), (napi.DataLayer.NATURAL, 227),
(napi.DataLayer.MANMADE | napi.DataLayer.RAILWAY, 225), (napi.DataLayer.MANMADE | napi.DataLayer.RAILWAY, 225),
(napi.DataLayer.MANMADE | napi.DataLayer.NATURAL, 225)]) (napi.DataLayer.MANMADE | napi.DataLayer.NATURAL, 225)])
def test_reverse_larger_area_layers(apiobj, layer, place_id): def test_reverse_larger_area_layers(apiobj, frontend, layer, place_id):
apiobj.add_placex(place_id=225, class_='man_made', type='dam', apiobj.add_placex(place_id=225, class_='man_made', type='dam',
name={'name': 'Dam'}, name={'name': 'Dam'},
rank_address=0, rank_address=0,
@@ -251,17 +265,19 @@ def test_reverse_larger_area_layers(apiobj, layer, place_id):
rank_search=16, rank_search=16,
centroid=(1.3, 0.70005)) centroid=(1.3, 0.70005))
assert apiobj.api.reverse((1.3, 0.7), layers=layer).place_id == place_id api = frontend(apiobj, options=API_OPTIONS)
assert api.reverse((1.3, 0.7), layers=layer).place_id == place_id
def test_reverse_country_lookup_no_objects(apiobj): def test_reverse_country_lookup_no_objects(apiobj, frontend):
apiobj.add_country('xx', 'POLYGON((0 0, 0 1, 1 1, 1 0, 0 0))') apiobj.add_country('xx', 'POLYGON((0 0, 0 1, 1 1, 1 0, 0 0))')
assert apiobj.api.reverse((0.5, 0.5)) is None api = frontend(apiobj, options=API_OPTIONS)
assert api.reverse((0.5, 0.5)) is None
@pytest.mark.parametrize('rank', [4, 30]) @pytest.mark.parametrize('rank', [4, 30])
def test_reverse_country_lookup_country_only(apiobj, rank): def test_reverse_country_lookup_country_only(apiobj, frontend, rank):
apiobj.add_country('xx', 'POLYGON((0 0, 0 1, 1 1, 1 0, 0 0))') apiobj.add_country('xx', 'POLYGON((0 0, 0 1, 1 1, 1 0, 0 0))')
apiobj.add_placex(place_id=225, class_='place', type='country', apiobj.add_placex(place_id=225, class_='place', type='country',
name={'name': 'My Country'}, name={'name': 'My Country'},
@@ -270,10 +286,11 @@ def test_reverse_country_lookup_country_only(apiobj, rank):
country_code='xx', country_code='xx',
centroid=(0.7, 0.7)) centroid=(0.7, 0.7))
assert apiobj.api.reverse((0.5, 0.5), max_rank=rank).place_id == 225 api = frontend(apiobj, options=API_OPTIONS)
assert api.reverse((0.5, 0.5), max_rank=rank).place_id == 225
def test_reverse_country_lookup_place_node_inside(apiobj): def test_reverse_country_lookup_place_node_inside(apiobj, frontend):
apiobj.add_country('xx', 'POLYGON((0 0, 0 1, 1 1, 1 0, 0 0))') apiobj.add_country('xx', 'POLYGON((0 0, 0 1, 1 1, 1 0, 0 0))')
apiobj.add_placex(place_id=225, class_='place', type='state', apiobj.add_placex(place_id=225, class_='place', type='state',
osm_type='N', osm_type='N',
@@ -283,11 +300,12 @@ def test_reverse_country_lookup_place_node_inside(apiobj):
country_code='xx', country_code='xx',
centroid=(0.5, 0.505)) centroid=(0.5, 0.505))
assert apiobj.api.reverse((0.5, 0.5)).place_id == 225 api = frontend(apiobj, options=API_OPTIONS)
assert api.reverse((0.5, 0.5)).place_id == 225
@pytest.mark.parametrize('gtype', list(napi.GeometryFormat)) @pytest.mark.parametrize('gtype', list(napi.GeometryFormat))
def test_reverse_geometry_output_placex(apiobj, gtype): def test_reverse_geometry_output_placex(apiobj, frontend, gtype):
apiobj.add_country('xx', 'POLYGON((0 0, 0 1, 1 1, 1 0, 0 0))') apiobj.add_country('xx', 'POLYGON((0 0, 0 1, 1 1, 1 0, 0 0))')
apiobj.add_placex(place_id=1001, class_='place', type='house', apiobj.add_placex(place_id=1001, class_='place', type='house',
housenumber='1', housenumber='1',
@@ -302,34 +320,37 @@ def test_reverse_geometry_output_placex(apiobj, gtype):
country_code='xx', country_code='xx',
centroid=(0.5, 0.5)) centroid=(0.5, 0.5))
assert apiobj.api.reverse((59.3, 80.70001), geometry_output=gtype).place_id == 1001 api = frontend(apiobj, options=API_OPTIONS)
assert apiobj.api.reverse((0.5, 0.5), geometry_output=gtype).place_id == 1003 assert api.reverse((59.3, 80.70001), geometry_output=gtype).place_id == 1001
assert api.reverse((0.5, 0.5), geometry_output=gtype).place_id == 1003
def test_reverse_simplified_geometry(apiobj): def test_reverse_simplified_geometry(apiobj, frontend):
apiobj.add_placex(place_id=1001, class_='place', type='house', apiobj.add_placex(place_id=1001, class_='place', type='house',
housenumber='1', housenumber='1',
rank_address=30, rank_address=30,
rank_search=30, rank_search=30,
centroid=(59.3, 80.70001)) centroid=(59.3, 80.70001))
api = frontend(apiobj, options=API_OPTIONS)
details = dict(geometry_output=napi.GeometryFormat.GEOJSON, details = dict(geometry_output=napi.GeometryFormat.GEOJSON,
geometry_simplification=0.1) geometry_simplification=0.1)
assert apiobj.api.reverse((59.3, 80.70001), **details).place_id == 1001 assert api.reverse((59.3, 80.70001), **details).place_id == 1001
def test_reverse_interpolation_geometry(apiobj): def test_reverse_interpolation_geometry(apiobj, frontend):
apiobj.add_osmline(place_id=992, apiobj.add_osmline(place_id=992,
parent_place_id=990, parent_place_id=990,
startnumber=1, endnumber=3, step=1, startnumber=1, endnumber=3, step=1,
centroid=(10.0, 10.00001), centroid=(10.0, 10.00001),
geometry='LINESTRING(9.995 10.00001, 10.005 10.00001)') geometry='LINESTRING(9.995 10.00001, 10.005 10.00001)')
assert apiobj.api.reverse((10.0, 10.0), geometry_output=napi.GeometryFormat.TEXT)\ api = frontend(apiobj, options=API_OPTIONS)
assert api.reverse((10.0, 10.0), geometry_output=napi.GeometryFormat.TEXT)\
.geometry['text'] == 'POINT(10 10.00001)' .geometry['text'] == 'POINT(10 10.00001)'
def test_reverse_tiger_geometry(apiobj): def test_reverse_tiger_geometry(apiobj, frontend):
apiobj.add_placex(place_id=990, class_='highway', type='service', apiobj.add_placex(place_id=990, class_='highway', type='service',
rank_search=27, rank_address=27, rank_search=27, rank_address=27,
name = {'name': 'My Street'}, name = {'name': 'My Street'},
@@ -342,7 +363,8 @@ def test_reverse_tiger_geometry(apiobj):
centroid=(10.0, 10.00001), centroid=(10.0, 10.00001),
geometry='LINESTRING(9.995 10.00001, 10.005 10.00001)') geometry='LINESTRING(9.995 10.00001, 10.005 10.00001)')
output = apiobj.api.reverse((10.0, 10.0), api = frontend(apiobj, options=API_OPTIONS)
output = api.reverse((10.0, 10.0),
geometry_output=napi.GeometryFormat.GEOJSON).geometry['geojson'] geometry_output=napi.GeometryFormat.GEOJSON).geometry['geojson']
assert json.loads(output) == {'coordinates': [10, 10.00001], 'type': 'Point'} assert json.loads(output) == {'coordinates': [10, 10.00001], 'type': 'Point'}

View File

@@ -14,8 +14,9 @@ import pytest
from nominatim.version import NOMINATIM_VERSION, NominatimVersion from nominatim.version import NOMINATIM_VERSION, NominatimVersion
import nominatim.api as napi import nominatim.api as napi
def test_status_no_extra_info(apiobj): def test_status_no_extra_info(apiobj, frontend):
result = apiobj.api.status() api = frontend(apiobj)
result = api.status()
assert result.status == 0 assert result.status == 0
assert result.message == 'OK' assert result.message == 'OK'
@@ -24,14 +25,15 @@ def test_status_no_extra_info(apiobj):
assert result.data_updated is None assert result.data_updated is None
def test_status_full(apiobj): def test_status_full(apiobj, frontend):
import_date = dt.datetime(2022, 12, 7, 14, 14, 46, 0, tzinfo=dt.timezone.utc) import_date = dt.datetime(2022, 12, 7, 14, 14, 46, 0, tzinfo=dt.timezone.utc)
apiobj.add_data('import_status', apiobj.add_data('import_status',
[{'lastimportdate': import_date}]) [{'lastimportdate': import_date}])
apiobj.add_data('properties', apiobj.add_data('properties',
[{'property': 'database_version', 'value': '99.5.4-2'}]) [{'property': 'database_version', 'value': '99.5.4-2'}])
result = apiobj.api.status() api = frontend(apiobj)
result = api.status()
assert result.status == 0 assert result.status == 0
assert result.message == 'OK' assert result.message == 'OK'