Compare commits

..

55 Commits

Author SHA1 Message Date
Sarah Hoffmann
c44cac4c22 Merge pull request #4020 from kad-link/fix/add-admin-level-in-extratags
fix: add admin_level in extratags for boundary=administrative
2026-03-10 22:42:45 +01:00
Sri Charan Chittineni
b71389274b add BDD scenario for admin_level when boundary=administrative 2026-03-10 06:49:19 +00:00
Sri Charan Chittineni
c24bc292ea add unit tests for admin_level when boundary=administrative 2026-03-09 20:00:32 +00:00
Sarah Hoffmann
f11b21b0ba Merge pull request #4024 from lonvia/remove-fallback-importance
Clean up importance computation
2026-03-09 17:05:48 +01:00
Sarah Hoffmann
cd84386004 add migration to backfill missing importance values 2026-03-09 10:17:04 +01:00
Sarah Hoffmann
7a8a09ca78 streamline selected columns in search CTEs 2026-03-09 10:05:31 +01:00
Sarah Hoffmann
7bbfb4b972 drop search_rank column from search_name
This is no longer needed for the initial lookup.
2026-03-09 10:05:31 +01:00
Sarah Hoffmann
46dfb12844 remove importance fallbacks in search queries 2026-03-09 10:05:31 +01:00
Sarah Hoffmann
a16c698fb2 enforce presence of importance value in placex 2026-03-09 10:05:31 +01:00
Sri Charan Chittineni
2b0c18d333 modify import paths to helpers.py 2026-03-09 05:08:05 +00:00
Sri Charan Chittineni
b195550c07 add _add_admin_level helper function to helpers.py 2026-03-09 04:59:20 +00:00
Sri CHaRan
11e458ca47 add admin_level to extratags for XML and details API endpoint 2026-03-08 19:32:01 +05:30
Sri CHaRan
047e9bc4ad add admin_level to extatags in JSON format 2026-03-08 19:29:33 +05:30
Sarah Hoffmann
d43e95f177 Merge pull request #4012 from lonvia/interpolation-separation
Move interpolations into a separate table
2026-03-04 17:27:02 +01:00
Sarah Hoffmann
e22dda2a86 Merge pull request #3995 from Itz-Agasta/exp
Add Language-aware country penalty in forward geocoding
2026-03-04 17:24:46 +01:00
Sarah Hoffmann
639f05fecc docs: update database layout in developer section 2026-03-04 16:29:24 +01:00
Sarah Hoffmann
d759b6ed00 add migration for interpolation tables 2026-03-04 16:29:24 +01:00
Sarah Hoffmann
abd5cbada6 revert importing of housenumber interpolations 2026-03-04 16:29:24 +01:00
Sarah Hoffmann
b71543b03b add test for new interpolation variants 2026-03-04 16:29:24 +01:00
Sarah Hoffmann
c25204ce31 adapt BDD tests to separate interpolation table 2026-03-04 16:29:24 +01:00
Sarah Hoffmann
b43116ff52 BDD tests: factor out insert sql code 2026-03-04 16:29:24 +01:00
Sarah Hoffmann
c0f1aeea4d adapt unit tests to use separate interpolation table 2026-03-04 16:29:24 +01:00
Sarah Hoffmann
c2d6821f2f adapt interpolation handling to use separate place_interpolation table 2026-03-04 16:29:24 +01:00
Sarah Hoffmann
a115eeeb40 copy interpolation data from new place_interpolation table 2026-03-04 16:29:24 +01:00
Sarah Hoffmann
e93c6809a9 import interpolation into separate place table 2026-03-04 16:29:24 +01:00
Sarah Hoffmann
e8836a91bb Merge pull request #4011 from lonvia/fix-expected-count-hnr-search-by-addr
Adjust expected count when doing housenumber search by address
2026-03-04 16:23:39 +01:00
Sarah Hoffmann
fd3dc5aeab adjust expected count when doing housenumber search by address 2026-03-04 11:26:17 +01:00
Itz-Agasta
36a364ec25 Adds test for locale-sensitive country name matching
Introduces a scenario to verify that a country's alternate-language name
does not dominate search results when the requested locale differs.
Ensures correct result selection for locale-aware geocoding.

Relates to #3210
2026-03-03 18:08:05 +05:30
Itz-Agasta
9c2d4f4285 Adds language-aware country penalty in forward geocoding
Threads the caller's Accept-Language preference into ForwardGeocoder
and uses it in rerank_by_query to check whether the query matches the
localized name of a country result.

If the caller's locale renders the country name differently (e.g. pt-BR
gives "Brasil" ≠ "Brasilia"), the country's importance is added as an
accuracy penalty, neutralising its dominance over lower-ranked places.

If the locale matches (e.g. Finnish gives "Brasilia" = "Brasilia"),
no penalty is applied and the country correctly wins.
2026-03-03 18:06:22 +05:30
Sarah Hoffmann
c81fb58b63 Merge pull request #3971 from jayaddison/issue-3969/indexer-avoid-addressrank-loop
Indexer: allow 'has_pending' to consider address-rank subsets
2026-03-02 22:02:06 +01:00
Sarah Hoffmann
d7249a135b Merge pull request #4009 from jonas-endter-optimax-energy/fix-documentation-wikipedia-data-link
fix link to importance data in documentation
2026-03-02 19:34:49 +01:00
Jonas Endter
757a2a6cd8 fix link to importance data in documentation 2026-03-02 16:38:11 +01:00
Sarah Hoffmann
6c00169666 Merge pull request #3997 from lonvia/fix-postcode-index
Reenable index on centroid column for location_postcodes
2026-02-22 17:20:15 +01:00
Sarah Hoffmann
f0d32501e4 location_postcodes does geometry lookups on centroid 2026-02-22 15:51:38 +01:00
Sarah Hoffmann
3e35d7fe26 Merge pull request #3996 from lonvia/improved-postcode-import
Avoid updates on initial filling of postcode table
2026-02-22 13:12:49 +01:00
Sarah Hoffmann
fff5858b53 add option to force a postcode reimport 2026-02-21 13:03:04 +01:00
Sarah Hoffmann
2507d5a298 avoid updates on initial filling of postcode table 2026-02-20 18:53:48 +01:00
Sarah Hoffmann
af9458a601 Merge pull request #3981 from Itz-Agasta/test
Implement Lazy Loading Search Endpoint
2026-02-18 19:38:05 +01:00
Itz-Agasta
855f451a5f Adds lazy loading for search endpoint availability
Introduces a mechanism to defer the search endpoint's availability check until the first request, improving startup robustness. If the search table is unavailable due to DB issues, the endpoint now responds with a 503 or 404 as appropriate, and retries the check on subsequent requests. This ensures that downtime or partial DB failures no longer prevent the API from initializing or serving reverse-only mode.
2026-02-18 21:46:55 +05:30
Sarah Hoffmann
bf17f1d01a Merge pull request #3991 from lonvia/interpolation-on-addresses
Add support for addr:interpolation on housenumbers
2026-02-18 14:25:38 +01:00
Sarah Hoffmann
9ac56c2078 add support for expanding interpolations on housenumbers 2026-02-18 11:52:21 +01:00
Sarah Hoffmann
fbe0be9301 Merge pull request #3923 from kad-link/ci/windows-smoke
CI: add dev-only Windows smoke workflow
2026-02-16 09:27:23 +01:00
Sarah Hoffmann
0249cd54da Merge pull request #3989 from lonvia/rework-misc-tests
More production SQL use in unit tests
2026-02-16 09:20:37 +01:00
Sarah Hoffmann
52b5337f36 Merge pull request #3988 from jayaddison/pr-3957-followup/empty-name-field
Indexing: fixup: add presence check for hstore 'name' field
2026-02-16 09:17:36 +01:00
James Addison
53e8334206 Indexing: fixup: add presence check for hstore name field 2026-02-16 00:01:57 +00:00
James Addison
8c3c1f0a15 Indexer: allow has_pending to consider address-rank subsets 2026-02-15 23:40:26 +00:00
Sarah Hoffmann
c31abf58d0 make database import unit tests against real SQL 2026-02-15 21:43:17 +01:00
Sarah Hoffmann
d0bd42298e use original tables for database check tests 2026-02-15 21:43:17 +01:00
Sarah Hoffmann
d1b0bcaea7 Merge pull request #3987 from lonvia/rework-postcode-tests
Rework postcode unit tests
2026-02-15 21:42:54 +01:00
Sarah Hoffmann
c3e8fa8c43 replace postcode mock with standard postcode table fixtures 2026-02-15 16:48:31 +01:00
Sri CHaRan
24ba9651ba ci/windows: install osm2pgsql binary and enable full unit tests suite 2026-02-13 22:01:39 +05:30
Sri CHaRan
bf5ef0140a ci/windows: enable full python unit test setup for windows 2026-02-13 21:47:14 +05:30
Sri CHaRan
238f3dd1d9 ci/windows: add Postgresql setup action to tests 2026-02-13 21:47:14 +05:30
Sri Charan Chittineni
abd7c302f8 implement stage 1 : python unit tests 2026-02-13 21:47:14 +05:30
Sri CHaRan
2197236872 Add experimental Windows CI workflow 2026-02-13 21:47:14 +05:30
67 changed files with 1770 additions and 1051 deletions

View File

@@ -0,0 +1,95 @@
name: 'Setup Postgresql and Postgis on Windows'
description: 'Installs PostgreSQL and PostGIS for Windows and configures it for CI tests'
inputs:
postgresql-version:
description: 'Version of PostgreSQL to install'
required: true
runs:
using: "composite"
steps:
- name: Set up PostgreSQL variables
shell: pwsh
run: |
$version = "${{ inputs.postgresql-version }}"
$root = "C:\Program Files\PostgreSQL\$version"
$bin = "$root\bin"
echo "PGROOT=$root" | Out-File -FilePath $env:GITHUB_ENV -Encoding utf8 -Append
echo "PGBIN=$bin" | Out-File -FilePath $env:GITHUB_ENV -Encoding utf8 -Append
echo "$bin" | Out-File -FilePath $env:GITHUB_PATH -Encoding utf8 -Append
- name: Decide Postgis version (Windows)
id: postgis-ver
shell: pwsh
run: |
echo "PowerShell version: ${PSVersionTable.PSVersion}"
$PG_VERSION = Split-Path $env:PGROOT -Leaf
$postgis_page = "https://download.osgeo.org/postgis/windows/pg$PG_VERSION"
echo "Detecting PostGIS version from $postgis_page for PostgreSQL $PG_VERSION"
$pgis_bundle = (Invoke-WebRequest -Uri $postgis_page -ErrorAction Stop).Links.Where({$_.href -match "^postgis.*zip$"}).href
if (!$pgis_bundle) {
Write-Error "Could not find latest PostGIS version in $postgis_page that would match ^postgis.*zip$ pattern"
exit 1
}
$pgis_bundle = [IO.Path]::ChangeExtension($pgis_bundle, [NullString]::Value)
$pgis_bundle_url = "$postgis_page/$pgis_bundle.zip"
Add-Content $env:GITHUB_OUTPUT "postgis_file=$pgis_bundle"
Add-Content $env:GITHUB_OUTPUT "postgis_bundle_url=$pgis_bundle_url"
- uses: actions/cache@v4
with:
path: |
C:/postgis.zip
key: postgis-cache-${{ steps.postgis-ver.outputs.postgis_file }}
- name: Download postgis
shell: pwsh
run: |
if (!(Test-Path "C:\postgis.zip")){(new-object net.webclient).DownloadFile($env:PGIS_BUNDLE_URL, "c:\postgis.zip")}
if (Test-path "c:\postgis_archive"){Remove-Item "c:\postgis_archive" -Recurse -Force}
7z x c:\postgis.zip -oc:\postgis_archive
env:
PGIS_BUNDLE_URL: ${{ steps.postgis-ver.outputs.postgis_bundle_url }}
- name: Install postgis
shell: bash
run: |
echo "Root: $PGROOT, Bin: $PGBIN"
cp -r c:/postgis_archive/postgis-bundle-*/* "$PGROOT"
- name: Start PostgreSQL on Windows
run: |
$pgService = Get-Service -Name postgresql*
Set-Service -InputObject $pgService -Status running -StartupType automatic
Start-Process -FilePath "$env:PGBIN\pg_isready" -Wait -PassThru
shell: pwsh
- name: Adapt postgresql configuration
shell: pwsh
env:
PGPASSWORD: root
run: |
& "$env:PGBIN\psql" -U postgres -d postgres -c "ALTER SYSTEM SET fsync = 'off';"
& "$env:PGBIN\psql" -U postgres -d postgres -c "ALTER SYSTEM SET synchronous_commit = 'off';"
& "$env:PGBIN\psql" -U postgres -d postgres -c "ALTER SYSTEM SET full_page_writes = 'off';"
& "$env:PGBIN\psql" -U postgres -d postgres -c "ALTER SYSTEM SET shared_buffers = '1GB';"
& "$env:PGBIN\psql" -U postgres -d postgres -c "ALTER SYSTEM SET port = 5432;"
Restart-Service -Name postgresql*
Start-Process -FilePath "$env:PGBIN\pg_isready" -Wait -PassThru
- name: Setup database users
shell: pwsh
env:
PGPASSWORD: root
run: |
& "$env:PGBIN\createuser" -U postgres -S www-data
& "$env:PGBIN\createuser" -U postgres -s runner

View File

@@ -1,5 +1,7 @@
name: 'Setup Postgresql and Postgis' name: 'Setup Postgresql and Postgis'
description: 'Installs PostgreSQL and PostGIS and configures it for CI tests'
inputs: inputs:
postgresql-version: postgresql-version:
description: 'Version of PostgreSQL to install' description: 'Version of PostgreSQL to install'

View File

@@ -140,6 +140,65 @@ jobs:
../venv/bin/python -m pytest test/bdd --nominatim-purge ../venv/bin/python -m pytest test/bdd --nominatim-purge
working-directory: Nominatim working-directory: Nominatim
tests-windows:
needs: create-archive
runs-on: windows-latest
steps:
- uses: actions/download-artifact@v4
with:
name: full-source
- name: Unpack Nominatim
run: tar xf nominatim-src.tar.bz2
- uses: ./Nominatim/.github/actions/setup-postgresql-windows
with:
postgresql-version: 17
- name: Set up Python
uses: actions/setup-python@v5
with:
python-version: '3.14'
- name: Install Spatialite
run: |
Invoke-WebRequest -Uri "https://www.gaia-gis.it/gaia-sins/windows-bin-amd64/mod_spatialite-5.1.0-win-amd64.7z" -OutFile "spatialite.7z"
7z x spatialite.7z -o"C:\spatialite"
echo "C:\spatialite\mod_spatialite-5.1.0-win-amd64" | Out-File -FilePath $env:GITHUB_PATH -Encoding utf8 -Append
- name: Install osm2pgsql
run: |
Invoke-WebRequest -Uri "https://osm2pgsql.org/download/windows/osm2pgsql-latest-x64.zip" -OutFile "osm2pgsql.zip"
Expand-Archive -Path "osm2pgsql.zip" -DestinationPath "C:\osm2pgsql"
$BinDir = Get-ChildItem -Path "C:\osm2pgsql" -Recurse -Filter "osm2pgsql.exe" | Select-Object -ExpandProperty DirectoryName | Select-Object -First 1
if (-not $BinDir) {
Write-Error "Could not find osm2pgsql.exe"
exit 1
}
echo "$BinDir" | Out-File -FilePath $env:GITHUB_PATH -Encoding utf8 -Append
$FullExePath = Join-Path $BinDir "osm2pgsql.exe"
echo "NOMINATIM_OSM2PGSQL_BINARY=$FullExePath" | Out-File -FilePath $env:GITHUB_ENV -Encoding utf8 -Append
- name: Set UTF-8 encoding
run: |
echo "PYTHONUTF8=1" >> $env:GITHUB_ENV
[System.Console]::OutputEncoding = [System.Text.Encoding]::UTF8
- name: Install PyICU from wheel
run: |
python -m pip install https://github.com/cgohlke/pyicu-build/releases/download/v2.16.0/pyicu-2.16-cp314-cp314-win_amd64.whl
- name: Install test prerequisites
run: |
python -m pip install -U pip
python -m pip install pytest pytest-asyncio "psycopg[binary]!=3.3.0" python-dotenv pyyaml jinja2 psutil sqlalchemy pytest-bdd falcon starlette uvicorn asgi_lifespan aiosqlite osmium mwparserfromhell
- name: Python unit tests
run: |
python -m pytest test/python -k "not (import_osm or run_osm2pgsql)"
working-directory: Nominatim
install: install:
runs-on: ubuntu-latest runs-on: ubuntu-latest
needs: create-archive needs: create-archive

View File

@@ -20,7 +20,7 @@ functions as a tie-breaker between places with very similar primary
importance values. importance values.
nominatim.org has preprocessed importance tables for the nominatim.org has preprocessed importance tables for the
[primary Wikipedia rankings](https://nominatim.org/data/wikimedia-importance.sql.gz) [primary Wikipedia rankings](https://nominatim.org/data/wikimedia-importance.csv.gz)
and for [secondary importance](https://nominatim.org/data/wikimedia-secondary-importance.sql.gz) and for [secondary importance](https://nominatim.org/data/wikimedia-secondary-importance.sql.gz)
based on Wikipedia importance of the administrative areas. based on Wikipedia importance of the administrative areas.

View File

@@ -11,10 +11,38 @@ The import process creates the following tables:
The `planet_osm_*` tables are the usual backing tables for OSM data. Note The `planet_osm_*` tables are the usual backing tables for OSM data. Note
that Nominatim uses them to look up special relations and to find nodes on that Nominatim uses them to look up special relations and to find nodes on
ways. ways. Apart from those the osm2pgsql import produces three tables as output.
The osm2pgsql import produces a single table `place` as output with the following The **place_postcode** table collects postcode information that is not
columns: already present on an object in the place table. That is for one thing
[postcode area relations](https://wiki.openstreetmap.org/wiki/Tag:boundary%3Dpostal_code)
and for another objects with a postcode tag but no other tagging that
qualifies it for inclusion into the geocoding database.
The table has the following fields:
* `osm_type` - kind of OSM object (**N** - node, **W** - way, **R** - relation)
* `osm_id` - original OSM ID
* `postcode` - postcode as extacted from the `postcal_code` tag
* `country_code` - computed country code for this postcode. This field
functions as a cache and is only computed when the table is used for
the computation of the final postcodes.
* `centroid` - centroid of the object
* `geometry` - the full geometry of the area for postcode areas only
The **place_interpolation** table holds all
[address interpolation lines](https://wiki.openstreetmap.org/wiki/Addresses#Interpolation)
and has the following fields:
* `osm_id` - original OSM ID
* `type` - type of interpolation as extracted from the `addr:interpolation` tag
* `address` - any other `addr:*` tags
* `nodes` - list of OSM nodes contained in this interpolation,
needed to compute the involved housenumbers later
* `geometry` - the linestring for the interpolation (in WSG84)
The **place** table holds all other OSM object that are interesting and
has the following fields:
* `osm_type` - kind of OSM object (**N** - node, **W** - way, **R** - relation) * `osm_type` - kind of OSM object (**N** - node, **W** - way, **R** - relation)
* `osm_id` - original OSM ID * `osm_id` - original OSM ID
@@ -65,23 +93,32 @@ additional columns:
* `indexed_status` - processing status of the place (0 - ready, 1 - freshly inserted, 2 - needs updating, 100 - needs deletion) * `indexed_status` - processing status of the place (0 - ready, 1 - freshly inserted, 2 - needs updating, 100 - needs deletion)
* `indexed_date` - timestamp when the place was processed last * `indexed_date` - timestamp when the place was processed last
* `centroid` - a point feature for the place * `centroid` - a point feature for the place
* `token_info` - a dummy field used to inject information from the tokenizer
into the indexing process
The **location_property_osmline** table is a special table for The **location_property_osmline** table is a special table for
[address interpolations](https://wiki.openstreetmap.org/wiki/Addresses#Using_interpolation). [address interpolations](https://wiki.openstreetmap.org/wiki/Addresses#Using_interpolation).
The columns have the same meaning and use as the columns with the same name in The columns have the same meaning and use as the columns with the same name in
the placex table. Only three columns are special: the placex table. Only the following columns are special:
* `startnumber` and `endnumber` - beginning and end of the number range * `startnumber`, `endnumber` and `step` - beginning and end of the number range
for the interpolation for the interpolation and the increment steps
* `interpolationtype` - a string `odd`, `even` or `all` to indicate * `type` - a string to indicate the interval between the numbers as imported
the interval between the numbers from the OSM `addr:interpolation` tag; valid values are `odd`, `even`, `all`
or a single digit number; interpolations with other values are silently
dropped
Address interpolations are always ways in OSM, which is why there is no column Address interpolations are always ways in OSM, which is why there is no column
`osm_type`. `osm_type`.
The **location_postcodes** table holds computed centroids of all postcodes that The **location_postcodes** table holds computed postcode assembled from the
can be found in the OSM data. The meaning of the columns is again the same postcode information available in OSM. When a postcode has a postcode area
as that of the placex table. relation, then the table stores its full geometry. For all other postcode
the centroid is computed using the position of all OSM object that reference
the same postoce. The `osm_id` field can be used to distinguish the two.
When set, it refers to the OSM relation with the postcode area.
The meaning of the columns in the table is again the same as that of the
placex table.
Every place needs an address, a set of surrounding places that describe the Every place needs an address, a set of surrounding places that describe the
location of the place. The set of address places is made up of OSM places location of the place. The set of address places is made up of OSM places

View File

@@ -35,10 +35,31 @@ map place {
geometry => GEOMETRY geometry => GEOMETRY
} }
map place_postcode {
osm_type => CHAR(1)
osm_id => BIGINT
postcode => TEXT
country_code => TEXT
centroid => GEOMETRY
geometry => GEOMETRY
}
map place_interpolation {
osm_id => BIGINT
type => TEXT
address => HSTORE
nodes => BIGINT[]
geometry => GEOMETRY
}
planet_osm_nodes -[hidden]> planet_osm_ways planet_osm_nodes -[hidden]> planet_osm_ways
planet_osm_ways -[hidden]> planet_osm_rels planet_osm_ways -[hidden]> planet_osm_rels
planet_osm_ways -[hidden]-> place planet_osm_ways -[hidden]-> place
place -[hidden]-> place_postcode
place -[hidden]-> place_interpolation
planet_osm_nodes::id <- planet_osm_ways::nodes planet_osm_nodes::id <- planet_osm_ways::nodes
planet_osm_nodes::id <- place_interpolation::nodes
@enduml @enduml

File diff suppressed because one or more lines are too long

Before

Width:  |  Height:  |  Size: 13 KiB

After

Width:  |  Height:  |  Size: 19 KiB

View File

@@ -29,6 +29,7 @@ map placex {
indexed_date => TIMESTAMP indexed_date => TIMESTAMP
centroid => GEOMETRY centroid => GEOMETRY
geometry => GEOMETRY geometry => GEOMETRY
token_info JSONB
} }
map search_name { map search_name {
@@ -51,11 +52,11 @@ map word {
map location_property_osmline { map location_property_osmline {
place_id => BIGINT place_id => BIGINT
osm_id => BIGINT osm_id => BIGINT
type => TEXT
startnumber => INT startnumber => INT
endnumber => INT endnumber => INT
interpolationtype => TEXT step => int
address => HSTORE address => HSTORE
partition => SMALLINT
geometry_sector => INT geometry_sector => INT
parent_place_id => BIGINT parent_place_id => BIGINT
country_code => VARCHAR(2) country_code => VARCHAR(2)
@@ -63,6 +64,7 @@ map location_property_osmline {
indexed_status => SMALLINT indexed_status => SMALLINT
indexed_date => TIMESTAMP indexed_date => TIMESTAMP
linegeo => GEOMETRY linegeo => GEOMETRY
token_info JSONB
} }
map place_addressline { map place_addressline {
@@ -78,6 +80,7 @@ map location_postcodes {
place_id => BIGINT place_id => BIGINT
osm_id => BIGINT osm_id => BIGINT
postcode => TEXT postcode => TEXT
country_code => TEXT
parent_place_id => BIGINT parent_place_id => BIGINT
rank_search => SMALLINT rank_search => SMALLINT
indexed_status => SMALLINT indexed_status => SMALLINT

File diff suppressed because one or more lines are too long

Before

Width:  |  Height:  |  Size: 35 KiB

After

Width:  |  Height:  |  Size: 36 KiB

View File

@@ -77,6 +77,18 @@ local table_definitions = {
indexes = { indexes = {
{ column = 'postcode', method = 'btree' } { column = 'postcode', method = 'btree' }
} }
},
place_interpolation = {
ids = { type = 'way', id_column = 'osm_id' },
columns = {
{ column = 'type', type = 'text', not_null = true },
{ column = 'address', type = 'hstore' },
{ column = 'nodes', type = 'text', sql_type = 'bigint[]', not_null = true },
{ column = 'geometry', type = 'linestring', projection = 'WGS84', not_null = true },
},
indexes = {
{ column = 'nodes', method = 'gin' }
}
} }
} }
@@ -703,9 +715,24 @@ function module.process_tags(o)
o.address['country'] = nil o.address['country'] = nil
end end
if o.address.interpolation ~= nil then if o.address.interpolation ~= nil and o.address.housenumber == nil
o:write_place('place', 'houses', PlaceTransform.always) and o.object.type == 'way' and o.object.nodes ~= nil then
return local extra_addr = nil
for k, v in pairs(o.address) do
if k ~= 'interpolation' then
if extra_addr == nil then
extra_addr = {}
end
extra_addr[k] = v
end
end
insert_row.place_interpolation{
type = o.address.interpolation,
address = extra_addr,
nodes = '{' .. table.concat(o.object.nodes, ',') .. '}',
geometry = o.object:as_linestring()
}
end end
-- collect main keys -- collect main keys
@@ -728,7 +755,7 @@ function module.process_tags(o)
} }
end end
elseif ktype == 'fallback' and o.has_name then elseif ktype == 'fallback' and o.has_name then
fallback = {k, v, PlaceTransform.named} fallback = {k, v, PlaceTransform.always}
end end
end end
end end

View File

@@ -2,11 +2,99 @@
-- --
-- This file is part of Nominatim. (https://nominatim.org) -- This file is part of Nominatim. (https://nominatim.org)
-- --
-- Copyright (C) 2022 by the Nominatim developer community. -- Copyright (C) 2026 by the Nominatim developer community.
-- For a full list of authors see the git log. -- For a full list of authors see the git log.
-- Functions for address interpolation objects in location_property_osmline. -- Functions for address interpolation objects in location_property_osmline.
CREATE OR REPLACE FUNCTION place_interpolation_insert()
RETURNS TRIGGER
AS $$
DECLARE
existing RECORD;
existingplacex BIGINT[];
BEGIN
IF NOT (NEW.type in ('odd', 'even', 'all') OR NEW.type similar to '[1-9]') THEN
-- the new interpolation is illegal, simply remove existing entries
DELETE FROM location_property_osmline o WHERE o.osm_id = NEW.osm_id;
RETURN NULL;
END IF;
-- Remove the place from the list of places to be deleted
DELETE FROM place_interpolation_to_be_deleted pdel WHERE pdel.osm_id = NEW.osm_id;
SELECT * INTO existing FROM place_interpolation p WHERE p.osm_id = NEW.osm_id;
-- Get the existing entry from the interpolation table.
SELECT array_agg(place_id) INTO existingplacex
FROM location_property_osmline o WHERE o.osm_id = NEW.osm_id;
IF array_length(existingplacex, 1) is NULL THEN
INSERT INTO location_property_osmline (osm_id, type, address, linegeo)
VALUES (NEW.osm_id, NEW.type, NEW.address, NEW.geometry);
ELSE
-- Update the interpolation table:
-- The first entry gets the original data, all other entries
-- are removed and will be recreated on indexing.
-- (An interpolation can be split up, if it has more than 2 address nodes)
-- Update unconditionally here as the changes might be coming from the
-- nodes on the interpolation.
UPDATE location_property_osmline
SET type = NEW.type,
address = NEW.address,
linegeo = NEW.geometry,
startnumber = null,
indexed_status = 1
WHERE place_id = existingplacex[1];
IF array_length(existingplacex, 1) > 1 THEN
DELETE FROM location_property_osmline WHERE place_id = any(existingplacex[2:]);
END IF;
END IF;
-- need to invalidate nodes because they might copy address info
IF NEW.address is not NULL
AND (existing.osm_id is NULL
OR coalesce(existing.address, ''::hstore) != NEW.address)
THEN
UPDATE placex SET indexed_status = 2
WHERE osm_type = 'N' AND osm_id = ANY(NEW.nodes) AND indexed_status = 0;
END IF;
-- finally update/insert place_interpolation itself
IF existing.osm_id is not NULL THEN
-- Always updates as the nodes with the housenumber might be the reason
-- for the change.
UPDATE place_interpolation p
SET type = NEW.type,
address = NEW.address,
nodes = NEW.nodes,
geometry = NEW.geometry
WHERE p.osm_id = NEW.osm_id;
RETURN NULL;
END IF;
RETURN NEW;
END;
$$ LANGUAGE plpgsql;
CREATE OR REPLACE FUNCTION place_interpolation_delete()
RETURNS TRIGGER
AS $$
DECLARE
deferred BOOLEAN;
BEGIN
{% if debug %}RAISE WARNING 'Delete for interpolation %', OLD.osm_id;{% endif %}
INSERT INTO place_interpolation_to_be_deleted (osm_id) VALUES(OLD.osm_id);
RETURN NULL;
END;
$$ LANGUAGE plpgsql;
CREATE OR REPLACE FUNCTION get_interpolation_address(in_address HSTORE, wayid BIGINT) CREATE OR REPLACE FUNCTION get_interpolation_address(in_address HSTORE, wayid BIGINT)
RETURNS HSTORE RETURNS HSTORE
@@ -19,7 +107,9 @@ BEGIN
RETURN in_address; RETURN in_address;
END IF; END IF;
SELECT nodes INTO waynodes FROM planet_osm_ways WHERE id = wayid; SELECT nodes INTO waynodes FROM place_interpolation WHERE osm_id = wayid;
IF array_upper(waynodes, 1) IS NOT NULL THEN
FOR location IN FOR location IN
SELECT placex.address, placex.osm_id FROM placex SELECT placex.address, placex.osm_id FROM placex
WHERE osm_type = 'N' and osm_id = ANY(waynodes) WHERE osm_type = 'N' and osm_id = ANY(waynodes)
@@ -28,8 +118,9 @@ BEGIN
and indexed_status < 100 and indexed_status < 100
LOOP LOOP
-- mark it as a derived address -- mark it as a derived address
RETURN location.address || in_address || hstore('_inherited', ''); RETURN location.address || coalesce(in_address, ''::hstore) || hstore('_inherited', '');
END LOOP; END LOOP;
END IF;
RETURN in_address; RETURN in_address;
END; END;
@@ -73,51 +164,6 @@ $$
LANGUAGE plpgsql STABLE PARALLEL SAFE; LANGUAGE plpgsql STABLE PARALLEL SAFE;
CREATE OR REPLACE FUNCTION reinsert_interpolation(way_id BIGINT, addr HSTORE,
geom GEOMETRY)
RETURNS INT
AS $$
DECLARE
existing BIGINT[];
BEGIN
IF addr is NULL OR NOT addr ? 'interpolation'
OR NOT (addr->'interpolation' in ('odd', 'even', 'all')
or addr->'interpolation' similar to '[1-9]')
THEN
-- the new interpolation is illegal, simply remove existing entries
DELETE FROM location_property_osmline WHERE osm_id = way_id;
ELSE
-- Get the existing entry from the interpolation table.
SELECT array_agg(place_id) INTO existing
FROM location_property_osmline WHERE osm_id = way_id;
IF existing IS NULL or array_length(existing, 1) = 0 THEN
INSERT INTO location_property_osmline (osm_id, address, linegeo)
VALUES (way_id, addr, geom);
ELSE
-- Update the interpolation table:
-- The first entry gets the original data, all other entries
-- are removed and will be recreated on indexing.
-- (An interpolation can be split up, if it has more than 2 address nodes)
UPDATE location_property_osmline
SET address = addr,
linegeo = geom,
startnumber = null,
indexed_status = 1
WHERE place_id = existing[1];
IF array_length(existing, 1) > 1 THEN
DELETE FROM location_property_osmline
WHERE place_id = any(existing[2:]);
END IF;
END IF;
END IF;
RETURN 1;
END;
$$
LANGUAGE plpgsql;
CREATE OR REPLACE FUNCTION osmline_insert() CREATE OR REPLACE FUNCTION osmline_insert()
RETURNS TRIGGER RETURNS TRIGGER
AS $$ AS $$
@@ -128,16 +174,13 @@ BEGIN
NEW.indexed_date := now(); NEW.indexed_date := now();
IF NEW.indexed_status IS NULL THEN IF NEW.indexed_status IS NULL THEN
IF NEW.address is NULL OR NOT NEW.address ? 'interpolation' IF NOT(NEW.type in ('odd', 'even', 'all') OR NEW.type similar to '[1-9]') THEN
OR NOT (NEW.address->'interpolation' in ('odd', 'even', 'all')
or NEW.address->'interpolation' similar to '[1-9]')
THEN
-- alphabetic interpolation is not supported -- alphabetic interpolation is not supported
RETURN NULL; RETURN NULL;
END IF; END IF;
NEW.indexed_status := 1; --STATUS_NEW
centroid := get_center_point(NEW.linegeo); centroid := get_center_point(NEW.linegeo);
NEW.indexed_status := 1; --STATUS_NEW
NEW.country_code := lower(get_country_code(centroid)); NEW.country_code := lower(get_country_code(centroid));
NEW.partition := get_partition(NEW.country_code); NEW.partition := get_partition(NEW.country_code);
@@ -182,32 +225,22 @@ BEGIN
get_center_point(NEW.linegeo), get_center_point(NEW.linegeo),
NEW.linegeo); NEW.linegeo);
-- Cannot find a parent street. We will not be able to display a reliable
-- address, so drop entire interpolation.
IF NEW.parent_place_id is NULL THEN
DELETE FROM location_property_osmline where place_id = OLD.place_id;
RETURN NULL;
END IF;
NEW.token_info := token_strip_info(NEW.token_info); NEW.token_info := token_strip_info(NEW.token_info);
IF NEW.address ? '_inherited' THEN IF NEW.address ? '_inherited' THEN
NEW.address := hstore('interpolation', NEW.address->'interpolation'); NEW.address := NULL;
END IF; END IF;
-- If the line was newly inserted, split the line as necessary. -- If the line was newly inserted, split the line as necessary.
IF OLD.indexed_status = 1 THEN IF NEW.parent_place_id is not NULL AND NEW.startnumber is NULL THEN
IF NEW.address->'interpolation' in ('odd', 'even') THEN IF NEW.type in ('odd', 'even') THEN
NEW.step := 2; NEW.step := 2;
stepmod := CASE WHEN NEW.address->'interpolation' = 'odd' THEN 1 ELSE 0 END; stepmod := CASE WHEN NEW.type = 'odd' THEN 1 ELSE 0 END;
ELSE ELSE
NEW.step := CASE WHEN NEW.address->'interpolation' = 'all' NEW.step := CASE WHEN NEW.type = 'all' THEN 1 ELSE (NEW.type)::SMALLINT END;
THEN 1
ELSE (NEW.address->'interpolation')::SMALLINT END;
stepmod := NULL; stepmod := NULL;
END IF; END IF;
SELECT nodes INTO waynodes SELECT nodes INTO waynodes FROM place_interpolation WHERE osm_id = NEW.osm_id;
FROM planet_osm_ways WHERE id = NEW.osm_id;
IF array_upper(waynodes, 1) IS NULL THEN IF array_upper(waynodes, 1) IS NULL THEN
RETURN NEW; RETURN NEW;
@@ -314,12 +347,12 @@ BEGIN
ELSE ELSE
INSERT INTO location_property_osmline INSERT INTO location_property_osmline
(linegeo, partition, osm_id, parent_place_id, (linegeo, partition, osm_id, parent_place_id,
startnumber, endnumber, step, startnumber, endnumber, step, type,
address, postcode, country_code, address, postcode, country_code,
geometry_sector, indexed_status) geometry_sector, indexed_status)
VALUES (ST_ReducePrecision(sectiongeo, 0.0000001), VALUES (ST_ReducePrecision(sectiongeo, 0.0000001),
NEW.partition, NEW.osm_id, NEW.parent_place_id, NEW.partition, NEW.osm_id, NEW.parent_place_id,
startnumber, endnumber, NEW.step, startnumber, endnumber, NEW.step, NEW.type,
NEW.address, postcode, NEW.address, postcode,
NEW.country_code, NEW.geometry_sector, 0); NEW.country_code, NEW.geometry_sector, 0);
END IF; END IF;

View File

@@ -14,7 +14,6 @@ DECLARE
existing RECORD; existing RECORD;
existingplacex RECORD; existingplacex RECORD;
existingline BIGINT[]; existingline BIGINT[];
interpol RECORD;
BEGIN BEGIN
{% if debug %} {% if debug %}
RAISE WARNING 'place_insert: % % % % %',NEW.osm_type,NEW.osm_id,NEW.class,NEW.type,st_area(NEW.geometry); RAISE WARNING 'place_insert: % % % % %',NEW.osm_type,NEW.osm_id,NEW.class,NEW.type,st_area(NEW.geometry);
@@ -55,41 +54,6 @@ BEGIN
DELETE from import_polygon_error where osm_type = NEW.osm_type and osm_id = NEW.osm_id; DELETE from import_polygon_error where osm_type = NEW.osm_type and osm_id = NEW.osm_id;
DELETE from import_polygon_delete where osm_type = NEW.osm_type and osm_id = NEW.osm_id; DELETE from import_polygon_delete where osm_type = NEW.osm_type and osm_id = NEW.osm_id;
-- ---- Interpolation Lines
IF NEW.class='place' and NEW.type='houses'
and NEW.osm_type='W' and ST_GeometryType(NEW.geometry) = 'ST_LineString'
THEN
PERFORM reinsert_interpolation(NEW.osm_id, NEW.address, NEW.geometry);
-- Now invalidate all address nodes on the line.
-- They get their parent from the interpolation.
UPDATE placex p SET indexed_status = 2
FROM planet_osm_ways w
WHERE w.id = NEW.osm_id and p.osm_type = 'N' and p.osm_id = any(w.nodes)
and indexed_status = 0;
-- If there is already an entry in place, just update that, if necessary.
IF existing.osm_type is not null THEN
IF coalesce(existing.address, ''::hstore) != coalesce(NEW.address, ''::hstore)
OR existing.geometry::text != NEW.geometry::text
THEN
UPDATE place
SET name = NEW.name,
address = NEW.address,
extratags = NEW.extratags,
admin_level = NEW.admin_level,
geometry = NEW.geometry
WHERE osm_type = NEW.osm_type and osm_id = NEW.osm_id
and class = NEW.class and type = NEW.type;
END IF;
RETURN NULL;
END IF;
RETURN NEW;
END IF;
-- ---- All other place types. -- ---- All other place types.
-- When an area is changed from large to small: log and discard change -- When an area is changed from large to small: log and discard change
@@ -109,29 +73,6 @@ BEGIN
RETURN null; RETURN null;
END IF; END IF;
-- If an address node is part of a interpolation line and changes or is
-- newly inserted (happens when the node already existed but now gets address
-- information), then mark the interpolation line for reparenting.
-- (Already here, because interpolation lines are reindexed before nodes,
-- so in the second call it would be too late.)
IF NEW.osm_type='N'
and coalesce(existing.address, ''::hstore) != coalesce(NEW.address, ''::hstore)
THEN
FOR interpol IN
SELECT DISTINCT osm_id, address, geometry FROM place, planet_osm_ways w
WHERE NEW.geometry && place.geometry
and place.osm_type = 'W'
and place.address ? 'interpolation'
and exists (SELECT * FROM location_property_osmline
WHERE osm_id = place.osm_id
and indexed_status in (0, 2))
and w.id = place.osm_id and NEW.osm_id = any (w.nodes)
LOOP
PERFORM reinsert_interpolation(interpol.osm_id, interpol.address,
interpol.geometry);
END LOOP;
END IF;
-- Get the existing placex entry. -- Get the existing placex entry.
SELECT * INTO existingplacex SELECT * INTO existingplacex
FROM placex FROM placex

View File

@@ -53,12 +53,8 @@ BEGIN
-- See if we can inherit additional address tags from an interpolation. -- See if we can inherit additional address tags from an interpolation.
-- These will become permanent. -- These will become permanent.
FOR location IN FOR location IN
SELECT (address - 'interpolation'::text - 'housenumber'::text) as address SELECT address FROM place_interpolation
FROM place, planet_osm_ways w WHERE ARRAY[p.osm_id] && place_interpolation.nodes AND address is not NULL
WHERE place.osm_type = 'W' and place.address ? 'interpolation'
and place.geometry && p.geometry
and place.osm_id = w.id
and p.osm_id = any(w.nodes)
LOOP LOOP
result.address := location.address || result.address; result.address := location.address || result.address;
END LOOP; END LOOP;
@@ -89,7 +85,7 @@ BEGIN
-- Add the linked-place (e.g. city) name as a searchable placename in the default language (if any) -- Add the linked-place (e.g. city) name as a searchable placename in the default language (if any)
default_language := get_country_language_code(location.country_code); default_language := get_country_language_code(location.country_code);
IF default_language is not NULL AND NOT location.name ? ('name:' || default_language) THEN IF default_language is not NULL AND location.name ? 'name' AND NOT location.name ? ('name:' || default_language) THEN
location.name := location.name || hstore('name:' || default_language, location.name->'name'); location.name := location.name || hstore('name:' || default_language, location.name->'name');
END IF; END IF;
@@ -735,6 +731,10 @@ BEGIN
END IF; END IF;
IF NEW.importance IS NULL THEN
NEW.importance := 0.40001 - (NEW.rank_search::float / 75);
END IF;
{% if debug %}RAISE WARNING 'placex_insert:END: % % % %',NEW.osm_type,NEW.osm_id,NEW.class,NEW.type;{% endif %} {% if debug %}RAISE WARNING 'placex_insert:END: % % % %',NEW.osm_type,NEW.osm_id,NEW.class,NEW.type;{% endif %}
{% if not disable_diff_updates %} {% if not disable_diff_updates %}
@@ -1080,10 +1080,8 @@ BEGIN
{% if debug %}RAISE WARNING 'Waterway processed';{% endif %} {% if debug %}RAISE WARNING 'Waterway processed';{% endif %}
END IF; END IF;
NEW.importance := null; SELECT wikipedia, importance INTO NEW.wikipedia, NEW.importance
SELECT wikipedia, importance FROM compute_importance(NEW.extratags, NEW.country_code, NEW.rank_search, NEW.centroid);
FROM compute_importance(NEW.extratags, NEW.country_code, NEW.rank_search, NEW.centroid)
INTO NEW.wikipedia,NEW.importance;
{% if debug %}RAISE WARNING 'Importance computed from wikipedia: %', NEW.importance;{% endif %} {% if debug %}RAISE WARNING 'Importance computed from wikipedia: %', NEW.importance;{% endif %}
@@ -1140,10 +1138,10 @@ BEGIN
NEW.token_info, NEW.centroid); NEW.token_info, NEW.centroid);
IF array_length(name_vector, 1) is not NULL THEN IF array_length(name_vector, 1) is not NULL THEN
INSERT INTO search_name (place_id, search_rank, address_rank, INSERT INTO search_name (place_id, address_rank,
importance, country_code, name_vector, importance, country_code, name_vector,
nameaddress_vector, centroid) nameaddress_vector, centroid)
VALUES (NEW.place_id, NEW.rank_search, NEW.rank_address, VALUES (NEW.place_id, NEW.rank_address,
NEW.importance, NEW.country_code, name_vector, NEW.importance, NEW.country_code, name_vector,
nameaddress_vector, NEW.centroid); nameaddress_vector, NEW.centroid);
{% if debug %}RAISE WARNING 'Place added to search table';{% endif %} {% if debug %}RAISE WARNING 'Place added to search table';{% endif %}
@@ -1313,10 +1311,10 @@ BEGIN
{% if debug %}RAISE WARNING 'added to search name (full)';{% endif %} {% if debug %}RAISE WARNING 'added to search name (full)';{% endif %}
{% if not db.reverse_only %} {% if not db.reverse_only %}
INSERT INTO search_name (place_id, search_rank, address_rank, INSERT INTO search_name (place_id, address_rank,
importance, country_code, name_vector, importance, country_code, name_vector,
nameaddress_vector, centroid) nameaddress_vector, centroid)
VALUES (NEW.place_id, NEW.rank_search, NEW.rank_address, VALUES (NEW.place_id, NEW.rank_address,
NEW.importance, NEW.country_code, name_vector, NEW.importance, NEW.country_code, name_vector,
nameaddress_vector, NEW.centroid); nameaddress_vector, NEW.centroid);
{% endif %} {% endif %}

View File

@@ -153,8 +153,7 @@ BEGIN
IF ST_GeometryType(geom) in ('ST_Polygon','ST_MultiPolygon') THEN IF ST_GeometryType(geom) in ('ST_Polygon','ST_MultiPolygon') THEN
SELECT min(postcode), count(*) FROM SELECT min(postcode), count(*) FROM
(SELECT postcode FROM location_postcodes (SELECT postcode FROM location_postcodes
WHERE geom && location_postcodes.geometry -- want to use the index WHERE ST_Contains(geom, location_postcodes.centroid)
AND ST_Contains(geom, location_postcodes.centroid)
AND country_code = country AND country_code = country
LIMIT 2) sub LIMIT 2) sub
INTO outcode, cnt; INTO outcode, cnt;
@@ -625,17 +624,21 @@ BEGIN
and placex.type = place_to_be_deleted.type and placex.type = place_to_be_deleted.type
and not deferred; and not deferred;
-- Mark for delete in interpolations
UPDATE location_property_osmline SET indexed_status = 100 FROM place_to_be_deleted
WHERE place_to_be_deleted.osm_type = 'W'
and place_to_be_deleted.class = 'place'
and place_to_be_deleted.type = 'houses'
and location_property_osmline.osm_id = place_to_be_deleted.osm_id
and not deferred;
-- Clear todo list. -- Clear todo list.
TRUNCATE TABLE place_to_be_deleted; TRUNCATE TABLE place_to_be_deleted;
-- delete from place_interpolation table
ALTER TABLE place_interpolation DISABLE TRIGGER place_interpolation_before_delete;
DELETE FROM place_interpolation p USING place_interpolation_to_be_deleted d
WHERE p.osm_id = d.osm_id;
ALTER TABLE place_interpolation ENABLE TRIGGER place_interpolation_before_delete;
UPDATE location_property_osmline o SET indexed_status = 100
FROM place_interpolation_to_be_deleted d
WHERE o.osm_id = d.osm_id;
TRUNCATE TABLE place_interpolation_to_be_deleted;
RETURN NULL; RETURN NULL;
END; END;
$$ LANGUAGE plpgsql; $$ LANGUAGE plpgsql;

View File

@@ -2,7 +2,7 @@
-- --
-- This file is part of Nominatim. (https://nominatim.org) -- This file is part of Nominatim. (https://nominatim.org)
-- --
-- Copyright (C) 2025 by the Nominatim developer community. -- Copyright (C) 2026 by the Nominatim developer community.
-- For a full list of authors see the git log. -- For a full list of authors see the git log.
-- Indices used only during search and update. -- Indices used only during search and update.
@@ -67,11 +67,15 @@ CREATE INDEX IF NOT EXISTS idx_osmline_parent_osm_id
--- ---
-- Table needed for running updates with osm2pgsql on place. -- Table needed for running updates with osm2pgsql on place.
CREATE TABLE IF NOT EXISTS place_to_be_deleted ( CREATE TABLE IF NOT EXISTS place_to_be_deleted (
osm_type CHAR(1), osm_type CHAR(1) NOT NULL,
osm_id BIGINT, osm_id BIGINT NOT NULL,
class TEXT, class TEXT NOT NULL,
type TEXT, type TEXT NOT NULL,
deferred BOOLEAN deferred BOOLEAN NOT NULL
);
CREATE TABLE IF NOT EXISTS place_interpolation_to_be_deleted (
osm_id BIGINT NOT NULL
); );
--- ---
CREATE INDEX IF NOT EXISTS idx_location_postcodes_parent_place_id CREATE INDEX IF NOT EXISTS idx_location_postcodes_parent_place_id

View File

@@ -2,7 +2,7 @@
-- --
-- This file is part of Nominatim. (https://nominatim.org) -- This file is part of Nominatim. (https://nominatim.org)
-- --
-- Copyright (C) 2025 by the Nominatim developer community. -- Copyright (C) 2026 by the Nominatim developer community.
-- For a full list of authors see the git log. -- For a full list of authors see the git log.
-- insert creates the location tables, creates location indexes if indexed == true -- insert creates the location tables, creates location indexes if indexed == true
@@ -31,3 +31,8 @@ CREATE TRIGGER location_postcodes_before_delete BEFORE DELETE ON location_postco
FOR EACH ROW EXECUTE PROCEDURE postcodes_delete(); FOR EACH ROW EXECUTE PROCEDURE postcodes_delete();
CREATE TRIGGER location_postcodes_before_insert BEFORE INSERT ON location_postcodes CREATE TRIGGER location_postcodes_before_insert BEFORE INSERT ON location_postcodes
FOR EACH ROW EXECUTE PROCEDURE postcodes_insert(); FOR EACH ROW EXECUTE PROCEDURE postcodes_insert();
CREATE TRIGGER place_interpolation_before_insert BEFORE INSERT ON place_interpolation
FOR EACH ROW EXECUTE PROCEDURE place_interpolation_insert();
CREATE TRIGGER place_interpolation_before_delete BEFORE DELETE ON place_interpolation
FOR EACH ROW EXECUTE PROCEDURE place_interpolation_delete();

View File

@@ -32,8 +32,3 @@ CREATE INDEX planet_osm_rels_relation_members_idx ON planet_osm_rels USING gin(p
WITH (fastupdate=off) WITH (fastupdate=off)
{{db.tablespace.address_index}}; {{db.tablespace.address_index}};
{% endif %} {% endif %}
-- Needed for lookups if a node is part of an interpolation.
CREATE INDEX IF NOT EXISTS idx_place_interpolations
ON place USING gist(geometry) {{db.tablespace.address_index}}
WHERE osm_type = 'W' and address ? 'interpolation';

View File

@@ -12,12 +12,13 @@ CREATE TABLE location_property_osmline (
parent_place_id BIGINT, parent_place_id BIGINT,
geometry_sector INTEGER NOT NULL, geometry_sector INTEGER NOT NULL,
indexed_date TIMESTAMP, indexed_date TIMESTAMP,
type TEXT,
startnumber INTEGER, startnumber INTEGER,
endnumber INTEGER, endnumber INTEGER,
step SMALLINT, step SMALLINT,
partition SMALLINT NOT NULL, partition SMALLINT NOT NULL,
indexed_status SMALLINT NOT NULL, indexed_status SMALLINT NOT NULL,
linegeo GEOMETRY NOT NULL, linegeo GEOMETRY(Geometry, 4326) NOT NULL,
address HSTORE, address HSTORE,
token_info JSONB, -- custom column for tokenizer use only token_info JSONB, -- custom column for tokenizer use only
postcode TEXT, postcode TEXT,

View File

@@ -12,7 +12,7 @@ CREATE TABLE placex (
place_id BIGINT NOT NULL, place_id BIGINT NOT NULL,
parent_place_id BIGINT, parent_place_id BIGINT,
linked_place_id BIGINT, linked_place_id BIGINT,
importance FLOAT, importance FLOAT NOT NULL,
indexed_date TIMESTAMP, indexed_date TIMESTAMP,
geometry_sector INTEGER NOT NULL, geometry_sector INTEGER NOT NULL,
rank_address SMALLINT NOT NULL, rank_address SMALLINT NOT NULL,

View File

@@ -23,6 +23,8 @@ CREATE UNIQUE INDEX idx_location_postcodes_id ON location_postcodes
USING BTREE (place_id) {{db.tablespace.search_index}}; USING BTREE (place_id) {{db.tablespace.search_index}};
CREATE INDEX idx_location_postcodes_geometry ON location_postcodes CREATE INDEX idx_location_postcodes_geometry ON location_postcodes
USING GIST (geometry) {{db.tablespace.search_index}}; USING GIST (geometry) {{db.tablespace.search_index}};
CREATE INDEX idx_location_postcodes_centroid ON location_postcodes
USING GIST (centroid) {{db.tablespace.search_index}};
CREATE INDEX IF NOT EXISTS idx_location_postcodes_postcode ON location_postcodes CREATE INDEX IF NOT EXISTS idx_location_postcodes_postcode ON location_postcodes
USING BTREE (postcode, country_code) {{db.tablespace.search_index}}; USING BTREE (postcode, country_code) {{db.tablespace.search_index}};
CREATE INDEX IF NOT EXISTS idx_location_postcodes_osmid ON location_postcodes CREATE INDEX IF NOT EXISTS idx_location_postcodes_osmid ON location_postcodes

View File

@@ -12,7 +12,6 @@ DROP TABLE IF EXISTS search_name;
CREATE TABLE search_name ( CREATE TABLE search_name (
place_id BIGINT NOT NULL, place_id BIGINT NOT NULL,
importance FLOAT NOT NULL, importance FLOAT NOT NULL,
search_rank SMALLINT NOT NULL,
address_rank SMALLINT NOT NULL, address_rank SMALLINT NOT NULL,
name_vector integer[] NOT NULL, name_vector integer[] NOT NULL,
nameaddress_vector integer[] NOT NULL, nameaddress_vector integer[] NOT NULL,

View File

@@ -177,6 +177,7 @@ class SearchBuilder:
sdata.lookups = partials.split_lookup(split, 'nameaddress_vector') sdata.lookups = partials.split_lookup(split, 'nameaddress_vector')
sdata.lookups.append( sdata.lookups.append(
dbf.FieldLookup('name_vector', hnr_tokens, lookups.Restrict)) dbf.FieldLookup('name_vector', hnr_tokens, lookups.Restrict))
expected_count = partials.min_count() / (5**(split - 1))
else: else:
addr_fulls = [t.token for t in addr_fulls = [t.token for t in
self.query.get_tokens(address[0], qmod.TOKEN_WORD)] self.query.get_tokens(address[0], qmod.TOKEN_WORD)]

View File

@@ -2,7 +2,7 @@
# #
# This file is part of Nominatim. (https://nominatim.org) # This file is part of Nominatim. (https://nominatim.org)
# #
# Copyright (C) 2025 by the Nominatim developer community. # Copyright (C) 2026 by the Nominatim developer community.
# For a full list of authors see the git log. # For a full list of authors see the git log.
""" """
Implementation of search for an address (search with housenumber). Implementation of search for an address (search with housenumber).
@@ -155,13 +155,7 @@ class AddressSearch(base.AbstractSearch):
for ranking in self.rankings: for ranking in self.rankings:
penalty += ranking.sql_penalty(t) penalty += ranking.sql_penalty(t)
sql = sa.select(t.c.place_id, t.c.search_rank, t.c.address_rank, sql = sa.select(t.c.place_id, penalty.label('penalty'))
t.c.country_code, t.c.centroid,
t.c.name_vector, t.c.nameaddress_vector,
sa.case((t.c.importance > 0, t.c.importance),
else_=0.40001-(sa.cast(t.c.search_rank, sa.Float())/75))
.label('importance'),
penalty.label('penalty'))
for lookup in self.lookups: for lookup in self.lookups:
sql = sql.where(lookup.sql_condition(t)) sql = sql.where(lookup.sql_condition(t))
@@ -202,9 +196,7 @@ class AddressSearch(base.AbstractSearch):
inner = sql.limit(10000).order_by(sa.desc(sa.text('importance'))).subquery() inner = sql.limit(10000).order_by(sa.desc(sa.text('importance'))).subquery()
sql = sa.select(inner.c.place_id, inner.c.search_rank, inner.c.address_rank, sql = sa.select(inner.c.place_id, inner.c.penalty)
inner.c.country_code, inner.c.centroid, inner.c.importance,
inner.c.penalty)
return sql.cte('searches') return sql.cte('searches')
@@ -240,12 +232,12 @@ class AddressSearch(base.AbstractSearch):
else_=1.0) else_=1.0)
if details.near is not None: if details.near is not None:
sql = sql.add_columns((-tsearch.c.centroid.ST_Distance(NEAR_PARAM)) sql = sql.add_columns((-t.c.centroid.ST_Distance(NEAR_PARAM))
.label('importance')) .label('importance'))
sql = sql.order_by(sa.desc(sa.text('importance'))) sql = sql.order_by(sa.desc(sa.text('importance')))
else: else:
sql = sql.order_by(penalty - tsearch.c.importance) sql = sql.order_by(penalty - t.c.importance)
sql = sql.add_columns(tsearch.c.importance) sql = sql.add_columns(t.c.importance)
sql = sql.add_columns(penalty.label('accuracy'))\ sql = sql.add_columns(penalty.label('accuracy'))\
.order_by(sa.text('accuracy')) .order_by(sa.text('accuracy'))
@@ -253,7 +245,7 @@ class AddressSearch(base.AbstractSearch):
hnr_list = '|'.join(self.housenumbers.values) hnr_list = '|'.join(self.housenumbers.values)
if self.has_address_terms: if self.has_address_terms:
sql = sql.where(sa.or_(tsearch.c.address_rank < 30, sql = sql.where(sa.or_(t.c.rank_address < 30,
sa.func.RegexpWord(hnr_list, t.c.housenumber))) sa.func.RegexpWord(hnr_list, t.c.housenumber)))
inner = sql.subquery() inner = sql.subquery()

View File

@@ -2,7 +2,7 @@
# #
# This file is part of Nominatim. (https://nominatim.org) # This file is part of Nominatim. (https://nominatim.org)
# #
# Copyright (C) 2025 by the Nominatim developer community. # Copyright (C) 2026 by the Nominatim developer community.
# For a full list of authors see the git log. # For a full list of authors see the git log.
""" """
Implementation of search for a named place (without housenumber). Implementation of search for a named place (without housenumber).
@@ -58,12 +58,7 @@ class PlaceSearch(base.AbstractSearch):
for ranking in self.rankings: for ranking in self.rankings:
penalty += ranking.sql_penalty(t) penalty += ranking.sql_penalty(t)
sql = sa.select(t.c.place_id, t.c.search_rank, t.c.address_rank, sql = sa.select(t.c.place_id, t.c.importance)
t.c.country_code, t.c.centroid,
t.c.name_vector, t.c.nameaddress_vector,
sa.case((t.c.importance > 0, t.c.importance),
else_=0.40001-(sa.cast(t.c.search_rank, sa.Float())/75))
.label('importance'))
for lookup in self.lookups: for lookup in self.lookups:
sql = sql.where(lookup.sql_condition(t)) sql = sql.where(lookup.sql_condition(t))
@@ -103,12 +98,13 @@ class PlaceSearch(base.AbstractSearch):
if details.excluded: if details.excluded:
sql = sql.where(base.exclude_places(t)) sql = sql.where(base.exclude_places(t))
if details.min_rank > 0: # Do not restrict ranks too much yet because rank restriction
sql = sql.where(sa.or_(t.c.address_rank >= MIN_RANK_PARAM, # currently also depends on search_rank to account for state-cities
t.c.search_rank >= MIN_RANK_PARAM)) # like Berlin.
if details.max_rank < 30: if details.max_rank < 26:
sql = sql.where(sa.or_(t.c.address_rank <= MAX_RANK_PARAM, sql = sql.where(t.c.address_rank < 26)
t.c.search_rank <= MAX_RANK_PARAM)) elif details.max_rank < 30:
sql = sql.where(t.c.address_rank < MAX_RANK_PARAM)
sql = sql.add_columns(penalty.label('penalty')) sql = sql.add_columns(penalty.label('penalty'))
@@ -116,11 +112,9 @@ class PlaceSearch(base.AbstractSearch):
.order_by(sa.desc(sa.text('importance')))\ .order_by(sa.desc(sa.text('importance')))\
.subquery() .subquery()
sql = sa.select(inner.c.place_id, inner.c.search_rank, inner.c.address_rank, sql = sa.select(inner.c.place_id, inner.c.importance, inner.c.penalty)
inner.c.country_code, inner.c.centroid, inner.c.importance,
inner.c.penalty)
# If the query is not an address search or has a geographic preference, # If the query has no geographic preference,
# preselect most important items to restrict the number of places # preselect most important items to restrict the number of places
# that need to be looked up in placex. # that need to be looked up in placex.
if (details.viewbox is None or not details.bounded_viewbox)\ if (details.viewbox is None or not details.bounded_viewbox)\
@@ -132,9 +126,7 @@ class PlaceSearch(base.AbstractSearch):
inner = sql.subquery() inner = sql.subquery()
sql = sa.select(inner.c.place_id, inner.c.search_rank, inner.c.address_rank, sql = sa.select(inner.c.place_id, inner.c.penalty)\
inner.c.country_code, inner.c.centroid, inner.c.importance,
inner.c.penalty)\
.where(inner.c.penalty - inner.c.importance < inner.c.min_penalty + 0.5) .where(inner.c.penalty - inner.c.importance < inner.c.min_penalty + 0.5)
return sql.cte('searches') return sql.cte('searches')
@@ -169,12 +161,19 @@ class PlaceSearch(base.AbstractSearch):
penalty += sa.case((t.c.postcode.in_(self.postcodes.values), 0.0), else_=1.0) penalty += sa.case((t.c.postcode.in_(self.postcodes.values), 0.0), else_=1.0)
if details.near is not None: if details.near is not None:
sql = sql.add_columns((-tsearch.c.centroid.ST_Distance(NEAR_PARAM)) sql = sql.add_columns((-t.c.centroid.ST_Distance(NEAR_PARAM))
.label('importance')) .label('importance'))
sql = sql.order_by(sa.desc(sa.text('importance'))) sql = sql.order_by(sa.desc(sa.text('importance')))
else: else:
sql = sql.order_by(penalty - tsearch.c.importance) sql = sql.order_by(penalty - t.c.importance)
sql = sql.add_columns(tsearch.c.importance) sql = sql.add_columns(t.c.importance)
if details.min_rank > 0:
sql = sql.where(sa.or_(t.c.rank_address >= MIN_RANK_PARAM,
t.c.rank_search >= MIN_RANK_PARAM))
if details.max_rank < 30:
sql = sql.where(sa.or_(t.c.rank_address <= MAX_RANK_PARAM,
t.c.rank_search <= MAX_RANK_PARAM))
sql = sql.add_columns(penalty.label('accuracy'))\ sql = sql.add_columns(penalty.label('accuracy'))\
.order_by(sa.text('accuracy')) .order_by(sa.text('accuracy'))

View File

@@ -170,10 +170,19 @@ class ForwardGeocoder:
if qword not in words: if qword not in words:
wdist = max(difflib.SequenceMatcher(a=qword, b=w).quick_ratio() for w in words) wdist = max(difflib.SequenceMatcher(a=qword, b=w).quick_ratio() for w in words)
distance += len(qword) if wdist < 0.4 else 1 distance += len(qword) if wdist < 0.4 else 1
# Compensate for the fact that country names do not get a # Countries with high importance can dominate results when matched
# match penalty yet by the tokenizer. # via an alternate-language name. Apply a language-aware penalty
# Temporary hack that needs to be removed! # to offset this.
if result.rank_address == 4: if result.rank_address == 4:
if self.params.locales and result.names:
loc_names = [result.names[t] for t in self.params.locales.name_tags
if t in result.names]
if loc_names:
norm_loc = self.query_analyzer.normalize_text(' '.join(loc_names))
loc_words = set(w for w in re.split('[-,: ]+', norm_loc) if w)
if loc_words and loc_words.isdisjoint(qwords):
result.accuracy += result.calculated_importance() * 0.5
else:
distance *= 2 distance *= 2
result.accuracy += distance * 0.3 / sum(len(w) for w in qwords) result.accuracy += distance * 0.3 / sum(len(w) for w in qwords)

View File

@@ -184,6 +184,10 @@ class APIMiddleware:
formatter = load_format_dispatcher('v1', self.api.config.project_dir) formatter = load_format_dispatcher('v1', self.api.config.project_dir)
for name, func in await api_impl.get_routes(self.api): for name, func in await api_impl.get_routes(self.api):
endpoint = EndpointWrapper(name, func, self.api, formatter) endpoint = EndpointWrapper(name, func, self.api, formatter)
# If func is a LazySearchEndpoint, give it a reference to wrapper
# so it can replace wrapper.func dynamically
if hasattr(func, 'set_wrapper'):
func.set_wrapper(endpoint)
self.app.add_route(f"/{name}", endpoint) self.app.add_route(f"/{name}", endpoint)
if legacy_urls: if legacy_urls:
self.app.add_route(f"/{name}.php", endpoint) self.app.add_route(f"/{name}.php", endpoint)

View File

@@ -2,7 +2,7 @@
# #
# This file is part of Nominatim. (https://nominatim.org) # This file is part of Nominatim. (https://nominatim.org)
# #
# Copyright (C) 2025 by the Nominatim developer community. # Copyright (C) 2026 by the Nominatim developer community.
# For a full list of authors see the git log. # For a full list of authors see the git log.
""" """
SQLAlchemy definitions for all tables used by the frontend. SQLAlchemy definitions for all tables used by the frontend.
@@ -112,7 +112,6 @@ class SearchTables:
'search_name', meta, 'search_name', meta,
sa.Column('place_id', sa.BigInteger), sa.Column('place_id', sa.BigInteger),
sa.Column('importance', sa.Float), sa.Column('importance', sa.Float),
sa.Column('search_rank', sa.SmallInteger),
sa.Column('address_rank', sa.SmallInteger), sa.Column('address_rank', sa.SmallInteger),
sa.Column('name_vector', IntArray), sa.Column('name_vector', IntArray),
sa.Column('nameaddress_vector', IntArray), sa.Column('nameaddress_vector', IntArray),

View File

@@ -8,7 +8,7 @@
Complex datatypes used by the Nominatim API. Complex datatypes used by the Nominatim API.
""" """
from typing import Optional, Union, Tuple, NamedTuple, TypeVar, Type, Dict, \ from typing import Optional, Union, Tuple, NamedTuple, TypeVar, Type, Dict, \
Any, List, Sequence Any, List, Sequence, TYPE_CHECKING
from collections import abc from collections import abc
import dataclasses import dataclasses
import datetime as dt import datetime as dt
@@ -17,6 +17,8 @@ import math
from struct import unpack from struct import unpack
from binascii import unhexlify from binascii import unhexlify
if TYPE_CHECKING:
from .localization import Locales
from .errors import UsageError from .errors import UsageError
@@ -573,6 +575,13 @@ class SearchDetails(LookupDetails):
viewbox_x2: Optional[Bbox] = None viewbox_x2: Optional[Bbox] = None
locales: Optional['Locales'] = dataclasses.field(
default=None, metadata={'transform': lambda v: v})
""" Locale preferences of the caller.
Used during result re-ranking to prefer results that match the
caller's locale over results that only match in an alternate language.
"""
def __post_init__(self) -> None: def __post_init__(self) -> None:
if self.viewbox is not None: if self.viewbox is not None:
xext = (self.viewbox.maxlon - self.viewbox.minlon)/2 xext = (self.viewbox.maxlon - self.viewbox.minlon)/2

View File

@@ -19,6 +19,7 @@ from ..localization import Locales
from ..result_formatting import FormatDispatcher from ..result_formatting import FormatDispatcher
from .classtypes import ICONS from .classtypes import ICONS
from . import format_json, format_xml from . import format_json, format_xml
from .helpers import _add_admin_level
from .. import logging as loglib from .. import logging as loglib
from ..server import content_types as ct from ..server import content_types as ct
@@ -157,7 +158,7 @@ def _format_details_json(result: DetailedResult, options: Mapping[str, Any]) ->
.keyval_not_none('indexed_date', result.indexed_date, lambda v: v.isoformat())\ .keyval_not_none('indexed_date', result.indexed_date, lambda v: v.isoformat())\
.keyval_not_none('importance', result.importance)\ .keyval_not_none('importance', result.importance)\
.keyval('calculated_importance', result.calculated_importance())\ .keyval('calculated_importance', result.calculated_importance())\
.keyval('extratags', result.extratags or {})\ .keyval('extratags', _add_admin_level(result) or {})\
.keyval_not_none('calculated_wikipedia', result.wikipedia)\ .keyval_not_none('calculated_wikipedia', result.wikipedia)\
.keyval('rank_address', result.rank_address)\ .keyval('rank_address', result.rank_address)\
.keyval('rank_search', result.rank_search)\ .keyval('rank_search', result.rank_search)\

View File

@@ -12,6 +12,7 @@ from typing import Mapping, Any, Optional, Tuple, Union, List
from ..utils.json_writer import JsonWriter from ..utils.json_writer import JsonWriter
from ..results import AddressLines, ReverseResults, SearchResults from ..results import AddressLines, ReverseResults, SearchResults
from . import classtypes as cl from . import classtypes as cl
from .helpers import _add_admin_level
from ..types import EntranceDetails from ..types import EntranceDetails
@@ -134,7 +135,7 @@ def format_base_json(results: Union[ReverseResults, SearchResults],
write_entrances(out, result.entrances) write_entrances(out, result.entrances)
if options.get('extratags', False): if options.get('extratags', False):
out.keyval('extratags', result.extratags) out.keyval('extratags', _add_admin_level(result))
if options.get('namedetails', False): if options.get('namedetails', False):
out.keyval('namedetails', result.names) out.keyval('namedetails', result.names)
@@ -210,7 +211,7 @@ def format_base_geojson(results: Union[ReverseResults, SearchResults],
write_entrances(out, result.entrances) write_entrances(out, result.entrances)
if options.get('extratags', False): if options.get('extratags', False):
out.keyval('extratags', result.extratags) out.keyval('extratags', _add_admin_level(result))
if options.get('namedetails', False): if options.get('namedetails', False):
out.keyval('namedetails', result.names) out.keyval('namedetails', result.names)
@@ -284,7 +285,7 @@ def format_base_geocodejson(results: Union[ReverseResults, SearchResults],
write_entrances(out, result.entrances) write_entrances(out, result.entrances)
if options.get('extratags', False): if options.get('extratags', False):
out.keyval('extra', result.extratags) out.keyval('extra', _add_admin_level(result))
out.end_object().next().end_object().next() out.end_object().next().end_object().next()

View File

@@ -14,6 +14,7 @@ import xml.etree.ElementTree as ET
from ..results import AddressLines, ReverseResult, ReverseResults, \ from ..results import AddressLines, ReverseResult, ReverseResults, \
SearchResult, SearchResults SearchResult, SearchResults
from . import classtypes as cl from . import classtypes as cl
from .helpers import _add_admin_level
from ..types import EntranceDetails from ..types import EntranceDetails
@@ -125,8 +126,9 @@ def format_base_xml(results: Union[ReverseResults, SearchResults],
if options.get('extratags', False): if options.get('extratags', False):
eroot = ET.SubElement(root if simple else place, 'extratags') eroot = ET.SubElement(root if simple else place, 'extratags')
if result.extratags: tags = _add_admin_level(result)
for k, v in result.extratags.items(): if tags:
for k, v in tags.items():
ET.SubElement(eroot, 'tag', attrib={'key': k, 'value': v}) ET.SubElement(eroot, 'tag', attrib={'key': k, 'value': v})
if options.get('namedetails', False): if options.get('namedetails', False):

View File

@@ -12,10 +12,20 @@ from typing import Tuple, Optional, Any, Dict, Iterable
from itertools import chain from itertools import chain
import re import re
from ..results import SearchResults, SourceTable from ..results import SearchResults, SourceTable, BaseResult
from ..types import SearchDetails, GeometryFormat from ..types import SearchDetails, GeometryFormat
def _add_admin_level(result: BaseResult) -> Optional[Dict[str, str]]:
""" Inject admin_level into extratags for boundary=administrative results.
"""
tags = result.extratags
if result.category == ('boundary', 'administrative') and result.admin_level < 15:
tags = dict(tags) if tags else {}
tags['admin_level'] = str(result.admin_level)
return tags
REVERSE_MAX_RANKS = [2, 2, 2, # 0-2 Continent/Sea REVERSE_MAX_RANKS = [2, 2, 2, # 0-2 Continent/Sea
4, 4, # 3-4 Country 4, 4, # 3-4 Country
8, # 5 State 8, # 5 State

View File

@@ -12,6 +12,7 @@ from typing import Optional, Any, Type, Dict, cast, Sequence, Tuple
from functools import reduce from functools import reduce
import dataclasses import dataclasses
from urllib.parse import urlencode from urllib.parse import urlencode
import asyncio
import sqlalchemy as sa import sqlalchemy as sa
@@ -124,6 +125,12 @@ def parse_geometry_details(adaptor: ASGIAdaptor, fmt: str) -> Dict[str, Any]:
} }
def has_search_name(conn: sa.engine.Connection) -> bool:
""" Check if the search_name table exists in the database.
"""
return sa.inspect(conn).has_table('search_name')
async def status_endpoint(api: NominatimAPIAsync, params: ASGIAdaptor) -> Any: async def status_endpoint(api: NominatimAPIAsync, params: ASGIAdaptor) -> Any:
""" Server glue for /status endpoint. See API docs for details. """ Server glue for /status endpoint. See API docs for details.
""" """
@@ -327,6 +334,8 @@ async def search_endpoint(api: NominatimAPIAsync, params: ASGIAdaptor) -> Any:
details['layers'] = DataLayer.ADDRESS details['layers'] = DataLayer.ADDRESS
else: else:
details['layers'] = get_layers(params) details['layers'] = get_layers(params)
details['locales'] = Locales.from_accept_languages(get_accepted_languages(params),
params.config().OUTPUT_NAMES)
# unstructured query parameters # unstructured query parameters
query = params.get('q', None) query = params.get('q', None)
@@ -352,8 +361,7 @@ async def search_endpoint(api: NominatimAPIAsync, params: ASGIAdaptor) -> Any:
except UsageError as err: except UsageError as err:
params.raise_error(str(err)) params.raise_error(str(err))
Locales.from_accept_languages(get_accepted_languages(params), details['locales'].localize_results(results)
params.config().OUTPUT_NAMES).localize_results(results)
if details['dedupe'] and len(results) > 1: if details['dedupe'] and len(results) > 1:
results = helpers.deduplicate_results(results, max_results) results = helpers.deduplicate_results(results, max_results)
@@ -441,6 +449,61 @@ async def polygons_endpoint(api: NominatimAPIAsync, params: ASGIAdaptor) -> Any:
return build_response(params, params.formatting().format_result(results, fmt, {})) return build_response(params, params.formatting().format_result(results, fmt, {}))
async def search_unavailable_endpoint(api: NominatimAPIAsync, params: ASGIAdaptor) -> Any:
""" Server glue for /search endpoint in reverse-only mode.
Returns 404 when search functionality is not available.
"""
params.raise_error('Search not available (reverse-only mode)', 404)
class LazySearchEndpoint:
"""
Lazy-loading search endpoint that replaces itself after first successful check.
- Falcon: EndpointWrapper stores this instance in wrapper.func
On first request, replace wrapper.func directly with real endpoint
- Starlette: _wrap_endpoint wraps this instance in a callback
store a delegate function and call it on subsequent requests
"""
def __init__(self, api: NominatimAPIAsync, real_endpoint: EndpointFunc):
self.api = api
self.real_endpoint = real_endpoint
self._lock = asyncio.Lock()
self._wrapper: Any = None # Store reference to Falcon's EndpointWrapper
self._delegate: Optional[EndpointFunc] = None
def set_wrapper(self, wrapper: Any) -> None:
self._wrapper = wrapper
async def __call__(self, api: NominatimAPIAsync, params: ASGIAdaptor) -> Any:
if self._delegate is None:
async with self._lock:
# Double-check after acquiring lock (thread safety)
if self._delegate is None:
try:
async with api.begin() as conn:
has_table = await conn.connection.run_sync(
has_search_name)
if has_table:
# For Starlette
self._delegate = self.real_endpoint
# For Falcon
if self._wrapper is not None:
self._wrapper.func = self.real_endpoint
else:
self._delegate = search_unavailable_endpoint
if self._wrapper is not None:
self._wrapper.func = search_unavailable_endpoint
except (PGCORE_ERROR, sa.exc.OperationalError, OSError):
# No _delegate set, so retry on next request
params.raise_error('Search temporarily unavailable', 503)
return await self._delegate(api, params)
async def get_routes(api: NominatimAPIAsync) -> Sequence[Tuple[str, EndpointFunc]]: async def get_routes(api: NominatimAPIAsync) -> Sequence[Tuple[str, EndpointFunc]]:
routes = [ routes = [
('status', status_endpoint), ('status', status_endpoint),
@@ -451,15 +514,13 @@ async def get_routes(api: NominatimAPIAsync) -> Sequence[Tuple[str, EndpointFunc
('polygons', polygons_endpoint), ('polygons', polygons_endpoint),
] ]
def has_search_name(conn: sa.engine.Connection) -> bool:
insp = sa.inspect(conn)
return insp.has_table('search_name')
try: try:
async with api.begin() as conn: async with api.begin() as conn:
if await conn.connection.run_sync(has_search_name): if await conn.connection.run_sync(has_search_name):
routes.append(('search', search_endpoint)) routes.append(('search', search_endpoint))
except (PGCORE_ERROR, sa.exc.OperationalError): else:
pass # ignored routes.append(('search', search_unavailable_endpoint))
except (PGCORE_ERROR, sa.exc.OperationalError, OSError):
routes.append(('search', LazySearchEndpoint(api, search_endpoint)))
return routes return routes

View File

@@ -120,6 +120,7 @@ class NominatimArgs:
data_object: Sequence[Tuple[str, int]] data_object: Sequence[Tuple[str, int]]
data_area: Sequence[Tuple[str, int]] data_area: Sequence[Tuple[str, int]]
ro_access: bool ro_access: bool
postcode_force_reimport: bool
# Arguments to 'replication' # Arguments to 'replication'
init: bool init: bool

View File

@@ -64,4 +64,4 @@ class UpdateIndex:
if not args.boundaries_only: if not args.boundaries_only:
await indexer.index_by_rank(args.minrank, args.maxrank) await indexer.index_by_rank(args.minrank, args.maxrank)
await indexer.index_postcodes() await indexer.index_postcodes()
has_pending = indexer.has_pending() has_pending = indexer.has_pending(args.minrank, args.maxrank)

View File

@@ -84,6 +84,10 @@ class UpdateRefresh:
help='Do not enable code for propagating updates') help='Do not enable code for propagating updates')
group.add_argument('--enable-debug-statements', action='store_true', group.add_argument('--enable-debug-statements', action='store_true',
help='Enable debug warning statements in functions') help='Enable debug warning statements in functions')
group = parser.add_argument_group('Arguments for postcode refresh')
group.add_argument('--force-reimport', action='store_true',
dest='postcode_force_reimport',
help='Recompute the postcodes from scratch instead of updating')
def run(self, args: NominatimArgs) -> int: def run(self, args: NominatimArgs) -> int:
from ..tools import refresh, postcodes from ..tools import refresh, postcodes
@@ -96,7 +100,8 @@ class UpdateRefresh:
LOG.warning("Update postcodes centroid") LOG.warning("Update postcodes centroid")
tokenizer = self._get_tokenizer(args.config) tokenizer = self._get_tokenizer(args.config)
postcodes.update_postcodes(args.config.get_libpq_dsn(), postcodes.update_postcodes(args.config.get_libpq_dsn(),
args.project_dir, tokenizer) args.project_dir, tokenizer,
force_reimport=args.postcode_force_reimport)
indexer = Indexer(args.config.get_libpq_dsn(), tokenizer, indexer = Indexer(args.config.get_libpq_dsn(), tokenizer,
args.threads or 1) args.threads or 1)
asyncio.run(indexer.index_postcodes()) asyncio.run(indexer.index_postcodes())

View File

@@ -31,14 +31,19 @@ class Indexer:
self.tokenizer = tokenizer self.tokenizer = tokenizer
self.num_threads = num_threads self.num_threads = num_threads
def has_pending(self) -> bool: def has_pending(self, minrank: int = 0, maxrank: int = 30) -> bool:
""" Check if any data still needs indexing. """ Check if any data still needs indexing.
This function must only be used after the import has finished. This function must only be used after the import has finished.
Otherwise it will be very expensive. Otherwise it will be very expensive.
""" """
with connect(self.dsn) as conn: with connect(self.dsn) as conn:
with conn.cursor() as cur: with conn.cursor() as cur:
cur.execute("SELECT 'a' FROM placex WHERE indexed_status > 0 LIMIT 1") cur.execute(""" SELECT 'a'
FROM placex
WHERE rank_address BETWEEN %s AND %s
AND indexed_status > 0
LIMIT 1""",
(minrank, maxrank))
return cur.rowcount > 0 return cur.rowcount > 0
async def index_full(self, analyse: bool = True) -> None: async def index_full(self, analyse: bool = True) -> None:

View File

@@ -2,7 +2,7 @@
# #
# This file is part of Nominatim. (https://nominatim.org) # This file is part of Nominatim. (https://nominatim.org)
# #
# Copyright (C) 2024 by the Nominatim developer community. # Copyright (C) 2026 by the Nominatim developer community.
# For a full list of authors see the git log. # For a full list of authors see the git log.
""" """
Sanitizer that preprocesses address tags for house numbers. The sanitizer Sanitizer that preprocesses address tags for house numbers. The sanitizer
@@ -10,6 +10,7 @@ allows to
* define which tags are to be considered house numbers (see 'filter-kind') * define which tags are to be considered house numbers (see 'filter-kind')
* split house number lists into individual numbers (see 'delimiters') * split house number lists into individual numbers (see 'delimiters')
* expand interpolated house numbers
Arguments: Arguments:
delimiters: Define the set of characters to be used for delimiters: Define the set of characters to be used for
@@ -23,13 +24,19 @@ Arguments:
instead of a house number. Either takes a single string instead of a house number. Either takes a single string
or a list of strings, where each string is a regular or a list of strings, where each string is a regular
expression that must match the full house number value. expression that must match the full house number value.
expand-interpolations: When true, expand house number ranges to separate numbers
when an 'interpolation' is present. (default: true)
""" """
from typing import Callable, Iterator, List from typing import Callable, Iterator, Iterable, Union
import re
from ...data.place_name import PlaceName from ...data.place_name import PlaceName
from .base import ProcessInfo from .base import ProcessInfo
from .config import SanitizerConfig from .config import SanitizerConfig
RANGE_REGEX = re.compile(r'\d+-\d+')
class _HousenumberSanitizer: class _HousenumberSanitizer:
@@ -38,21 +45,40 @@ class _HousenumberSanitizer:
self.split_regexp = config.get_delimiter() self.split_regexp = config.get_delimiter()
self.filter_name = config.get_filter('convert-to-name', 'FAIL_ALL') self.filter_name = config.get_filter('convert-to-name', 'FAIL_ALL')
self.expand_interpolations = config.get_bool('expand-interpolations', True)
def __call__(self, obj: ProcessInfo) -> None: def __call__(self, obj: ProcessInfo) -> None:
if not obj.address: if not obj.address:
return return
new_address: List[PlaceName] = [] itype: Union[int, str, None] = None
if self.expand_interpolations:
itype = next((i.name for i in obj.address if i.kind == 'interpolation'), None)
if itype is not None:
if itype == 'all':
itype = 1
elif len(itype) == 1 and itype.isdigit():
itype = int(itype)
elif itype not in ('odd', 'even'):
itype = None
new_address: list[PlaceName] = []
for item in obj.address: for item in obj.address:
if self.filter_kind(item.kind): if self.filter_kind(item.kind):
if itype is not None and RANGE_REGEX.fullmatch(item.name):
hnrs = self._expand_range(itype, item.name)
if hnrs:
new_address.extend(item.clone(kind='housenumber', name=str(hnr))
for hnr in hnrs)
continue
if self.filter_name(item.name): if self.filter_name(item.name):
obj.names.append(item.clone(kind='housenumber')) obj.names.append(item.clone(kind='housenumber'))
else: else:
new_address.extend(item.clone(kind='housenumber', name=n) new_address.extend(item.clone(kind='housenumber', name=n)
for n in self.sanitize(item.name)) for n in self.sanitize(item.name))
else: elif item.kind != 'interpolation':
# Don't touch other address items. # Ignore interpolation, otherwise don't touch other address items.
new_address.append(item) new_address.append(item)
obj.address = new_address obj.address = new_address
@@ -70,6 +96,22 @@ class _HousenumberSanitizer:
def _regularize(self, hnr: str) -> Iterator[str]: def _regularize(self, hnr: str) -> Iterator[str]:
yield hnr yield hnr
def _expand_range(self, itype: Union[str, int], hnr: str) -> Iterable[int]:
first, last = (int(i) for i in hnr.split('-'))
if isinstance(itype, int):
step = itype
else:
step = 2
if (itype == 'even' and first % 2 == 1)\
or (itype == 'odd' and first % 2 == 0):
first += 1
if (last + 1 - first) / step < 10:
return range(first, last + 1, step)
return []
def create(config: SanitizerConfig) -> Callable[[ProcessInfo], None]: def create(config: SanitizerConfig) -> Callable[[ProcessInfo], None]:
""" Create a housenumber processing function. """ Create a housenumber processing function.

View File

@@ -2,7 +2,7 @@
# #
# This file is part of Nominatim. (https://nominatim.org) # This file is part of Nominatim. (https://nominatim.org)
# #
# Copyright (C) 2025 by the Nominatim developer community. # Copyright (C) 2026 by the Nominatim developer community.
# For a full list of authors see the git log. # For a full list of authors see the git log.
""" """
Functions for setting up and importing a new Nominatim database. Functions for setting up and importing a new Nominatim database.
@@ -219,19 +219,16 @@ async def load_data(dsn: str, threads: int) -> None:
pysql.SQL("""INSERT INTO placex ({columns}) pysql.SQL("""INSERT INTO placex ({columns})
SELECT {columns} FROM place SELECT {columns} FROM place
WHERE osm_id % {total} = {mod} WHERE osm_id % {total} = {mod}
AND NOT (class='place'
and (type='houses' or type='postcode'))
AND ST_IsValid(geometry)
""").format(columns=_COPY_COLUMNS, """).format(columns=_COPY_COLUMNS,
total=pysql.Literal(placex_threads), total=pysql.Literal(placex_threads),
mod=pysql.Literal(imod)), None) mod=pysql.Literal(imod)), None)
# Interpolations need to be copied separately # Interpolations need to be copied separately
await pool.put_query(""" await pool.put_query("""
INSERT INTO location_property_osmline (osm_id, address, linegeo) INSERT INTO location_property_osmline (osm_id, type, address, linegeo)
SELECT osm_id, address, geometry FROM place SELECT osm_id, type, address, geometry
WHERE class='place' and type='houses' and osm_type='W' FROM place_interpolation
and ST_GeometryType(geometry) = 'ST_LineString' """, None) """, None)
progress.cancel() progress.cancel()

View File

@@ -2,7 +2,7 @@
# #
# This file is part of Nominatim. (https://nominatim.org) # This file is part of Nominatim. (https://nominatim.org)
# #
# Copyright (C) 2025 by the Nominatim developer community. # Copyright (C) 2026 by the Nominatim developer community.
# For a full list of authors see the git log. # For a full list of authors see the git log.
""" """
Functions for database migration to newer software versions. Functions for database migration to newer software versions.
@@ -14,7 +14,7 @@ from ..errors import UsageError
from ..config import Configuration from ..config import Configuration
from ..db import properties from ..db import properties
from ..db.connection import connect, Connection, \ from ..db.connection import connect, Connection, \
table_exists, register_hstore table_exists, register_hstore, table_has_column
from ..db.sql_preprocessor import SQLPreprocessor from ..db.sql_preprocessor import SQLPreprocessor
from ..version import NominatimVersion, NOMINATIM_VERSION, parse_version from ..version import NominatimVersion, NOMINATIM_VERSION, parse_version
from ..tokenizer import factory as tokenizer_factory from ..tokenizer import factory as tokenizer_factory
@@ -350,3 +350,90 @@ def create_place_postcode_table(conn: Connection, config: Configuration, **_: An
WHERE osm_type = 'N' and rank_search < 26 and class = 'place'; WHERE osm_type = 'N' and rank_search < 26 and class = 'place';
ANALYSE; ANALYSE;
""") """)
@_migration(5, 2, 99, 3)
def create_place_interpolation_table(conn: Connection, config: Configuration, **_: Any) -> None:
""" Create place_interpolation table
"""
sqlp = SQLPreprocessor(conn, config)
mutable = not is_frozen(conn)
has_place_table = table_exists(conn, 'place_interpolation')
if mutable and not has_place_table:
# create tables
conn.execute("""
CREATE TABLE place_interpolation (
osm_id BIGINT NOT NULL,
type TEXT NOT NULL,
address HSTORE,
nodes BIGINT[] NOT NULL,
geometry GEOMETRY(LineString, 4326)
);
CREATE TABLE IF NOT EXISTS place_interpolation_to_be_deleted (
osm_id BIGINT NOT NULL
);
""")
# copy data over
conn.execute("""
ALTER TABLE place DISABLE TRIGGER ALL;
WITH deleted AS (
DELETE FROM place
WHERE class='place' and type = 'houses'
RETURNING osm_type, osm_id,
address->'interpolation' as itype,
address - 'interpolation'::TEXT as address,
geometry)
INSERT INTO place_interpolation (osm_id, type, address, nodes, geometry)
(SELECT d.osm_id, d.itype, d.address, p.nodes, d.geometry
FROM deleted d, planet_osm_ways p
WHERE osm_type = 'W'
AND d.osm_id = p.id
AND itype is not null
AND ST_GeometryType(geometry) = 'ST_LineString');
ALTER TABLE place ENABLE TRIGGER ALL;
""")
# create indices
conn.execute("""
CREATE INDEX place_interpolation_nodes_idx ON place_interpolation
USING gin(nodes);
CREATE INDEX place_interpolation_osm_id_idx ON place_interpolation
USING btree(osm_id);
""")
# create triggers
sqlp.run_sql_file(conn, 'functions/interpolation.sql')
conn.execute("""
CREATE TRIGGER place_interpolation_before_insert BEFORE INSERT ON place_interpolation
FOR EACH ROW EXECUTE PROCEDURE place_interpolation_insert();
CREATE TRIGGER place_interpolation_before_delete BEFORE DELETE ON place_interpolation
FOR EACH ROW EXECUTE PROCEDURE place_interpolation_delete();
""")
# mutate location_property_osmline table
conn.execute("""
ALTER TABLE location_property_osmline ADD COLUMN type TEXT;
UPDATE location_property_osmline
SET type = coalesce(address->'interpolation', 'all'),
address = address - 'interpolation'::TEXT;
""")
@_migration(5, 2, 99, 4)
def backfill_importance(conn: Connection, **_: Any) -> None:
""" Backfill missing importance values.
"""
conn.execute("""UPDATE placex
SET importance = 0.40001 - (rank_search::float / 75)
WHERE importance is NULL OR importance <= 0
""")
if table_exists(conn, 'search_name')\
and table_has_column(conn, 'search_name', 'search_rank'):
conn.execute("""UPDATE search_name
SET importance = 0.40001 - (search_rank::float / 75)
WHERE importance is NULL OR importance <= 0
""")
conn.execute("ALTER TABLE search_name DROP COLUMN search_rank")

View File

@@ -78,7 +78,7 @@ class _PostcodeCollector:
self.collected[normalized] += (x, y) self.collected[normalized] += (x, y)
def commit(self, conn: Connection, analyzer: AbstractAnalyzer, def commit(self, conn: Connection, analyzer: AbstractAnalyzer,
project_dir: Optional[Path]) -> None: project_dir: Optional[Path], is_initial: bool) -> None:
""" Update postcodes for the country from the postcodes selected so far. """ Update postcodes for the country from the postcodes selected so far.
When 'project_dir' is set, then any postcode files found in this When 'project_dir' is set, then any postcode files found in this
@@ -87,6 +87,9 @@ class _PostcodeCollector:
if project_dir is not None: if project_dir is not None:
self._update_from_external(analyzer, project_dir) self._update_from_external(analyzer, project_dir)
if is_initial:
to_delete = []
else:
with conn.cursor() as cur: with conn.cursor() as cur:
cur.execute("""SELECT postcode FROM location_postcodes cur.execute("""SELECT postcode FROM location_postcodes
WHERE country_code = %s AND osm_id is null""", WHERE country_code = %s AND osm_id is null""",
@@ -102,22 +105,32 @@ class _PostcodeCollector:
with conn.cursor() as cur: with conn.cursor() as cur:
if to_add: if to_add:
cur.executemany(pysql.SQL( columns = ['country_code',
"""INSERT INTO location_postcodes 'rank_search',
(country_code, rank_search, postcode, centroid, geometry) 'postcode',
VALUES ({}, {}, %(pc)s, 'centroid',
ST_SetSRID(ST_MakePoint(%(x)s, %(y)s), 4326), 'geometry']
expand_by_meters(ST_SetSRID(ST_MakePoint(%(x)s, %(y)s), 4326), {})) values = [pysql.Literal(self.country),
""").format(pysql.Literal(self.country),
pysql.Literal(_extent_to_rank(self.extent)), pysql.Literal(_extent_to_rank(self.extent)),
pysql.Literal(self.extent)), pysql.Placeholder('pc'),
pysql.SQL('ST_SetSRID(ST_MakePoint(%(x)s, %(y)s), 4326)'),
pysql.SQL("""expand_by_meters(
ST_SetSRID(ST_MakePoint(%(x)s, %(y)s), 4326), {})""")
.format(pysql.Literal(self.extent))]
if is_initial:
columns.extend(('place_id', 'indexed_status'))
values.extend((pysql.SQL("nextval('seq_place')"), pysql.Literal(1)))
cur.executemany(pysql.SQL("INSERT INTO location_postcodes ({}) VALUES ({})")
.format(pysql.SQL(',')
.join(pysql.Identifier(c) for c in columns),
pysql.SQL(',').join(values)),
to_add) to_add)
if to_delete: if to_delete:
cur.execute("""DELETE FROM location_postcodes cur.execute("""DELETE FROM location_postcodes
WHERE country_code = %s and postcode = any(%s) WHERE country_code = %s and postcode = any(%s)
AND osm_id is null AND osm_id is null
""", (self.country, to_delete)) """, (self.country, to_delete))
cur.execute("ANALYSE location_postcodes")
def _update_from_external(self, analyzer: AbstractAnalyzer, project_dir: Path) -> None: def _update_from_external(self, analyzer: AbstractAnalyzer, project_dir: Path) -> None:
""" Look for an external postcode file for the active country in """ Look for an external postcode file for the active country in
@@ -164,7 +177,8 @@ class _PostcodeCollector:
return None return None
def update_postcodes(dsn: str, project_dir: Optional[Path], tokenizer: AbstractTokenizer) -> None: def update_postcodes(dsn: str, project_dir: Optional[Path],
tokenizer: AbstractTokenizer, force_reimport: bool = False) -> None:
""" Update the table of postcodes from the input tables """ Update the table of postcodes from the input tables
placex and place_postcode. placex and place_postcode.
""" """
@@ -176,38 +190,69 @@ def update_postcodes(dsn: str, project_dir: Optional[Path], tokenizer: AbstractT
SET country_code = get_country_code(centroid) SET country_code = get_country_code(centroid)
WHERE country_code is null WHERE country_code is null
""") """)
if force_reimport:
conn.execute("TRUNCATE location_postcodes")
is_initial = True
else:
is_initial = _is_postcode_table_empty(conn)
if is_initial:
conn.execute("""ALTER TABLE location_postcodes
DISABLE TRIGGER location_postcodes_before_insert""")
# Now update first postcode areas # Now update first postcode areas
_update_postcode_areas(conn, analyzer, matcher) _update_postcode_areas(conn, analyzer, matcher, is_initial)
# Then fill with estimated postcode centroids from other info # Then fill with estimated postcode centroids from other info
_update_guessed_postcode(conn, analyzer, matcher, project_dir) _update_guessed_postcode(conn, analyzer, matcher, project_dir, is_initial)
if is_initial:
conn.execute("""ALTER TABLE location_postcodes
ENABLE TRIGGER location_postcodes_before_insert""")
conn.commit() conn.commit()
analyzer.update_postcodes_from_db() analyzer.update_postcodes_from_db()
def _is_postcode_table_empty(conn: Connection) -> bool:
""" Check if there are any entries in the location_postcodes table yet.
"""
with conn.cursor() as cur:
cur.execute("SELECT place_id FROM location_postcodes LIMIT 1")
return cur.fetchone() is None
def _insert_postcode_areas(conn: Connection, country_code: str, def _insert_postcode_areas(conn: Connection, country_code: str,
extent: int, pcs: list[dict[str, str]]) -> None: extent: int, pcs: list[dict[str, str]],
is_initial: bool) -> None:
if pcs: if pcs:
with conn.cursor() as cur: with conn.cursor() as cur:
columns = ['osm_id', 'country_code',
'rank_search', 'postcode',
'centroid', 'geometry']
values = [pysql.Identifier('osm_id'), pysql.Identifier('country_code'),
pysql.Literal(_extent_to_rank(extent)), pysql.Placeholder('out'),
pysql.Identifier('centroid'), pysql.Identifier('geometry')]
if is_initial:
columns.extend(('place_id', 'indexed_status'))
values.extend((pysql.SQL("nextval('seq_place')"), pysql.Literal(1)))
cur.executemany( cur.executemany(
pysql.SQL( pysql.SQL(
""" INSERT INTO location_postcodes """ INSERT INTO location_postcodes ({})
(osm_id, country_code, rank_search, postcode, centroid, geometry) SELECT {} FROM place_postcode
SELECT osm_id, country_code, {}, %(out)s, centroid, geometry
FROM place_postcode
WHERE osm_type = 'R' WHERE osm_type = 'R'
and country_code = {} and postcode = %(in)s and country_code = {} and postcode = %(in)s
and geometry is not null and geometry is not null
""").format(pysql.Literal(_extent_to_rank(extent)), """).format(pysql.SQL(',')
.join(pysql.Identifier(c) for c in columns),
pysql.SQL(',').join(values),
pysql.Literal(country_code)), pysql.Literal(country_code)),
pcs) pcs)
def _update_postcode_areas(conn: Connection, analyzer: AbstractAnalyzer, def _update_postcode_areas(conn: Connection, analyzer: AbstractAnalyzer,
matcher: PostcodeFormatter) -> None: matcher: PostcodeFormatter, is_initial: bool) -> None:
""" Update the postcode areas made from postcode boundaries. """ Update the postcode areas made from postcode boundaries.
""" """
# first delete all areas that have gone # first delete all areas that have gone
if not is_initial:
conn.execute(""" DELETE FROM location_postcodes pc conn.execute(""" DELETE FROM location_postcodes pc
WHERE pc.osm_id is not null WHERE pc.osm_id is not null
AND NOT EXISTS( AND NOT EXISTS(
@@ -230,7 +275,8 @@ def _update_postcode_areas(conn: Connection, analyzer: AbstractAnalyzer,
fmt = matcher.get_matcher(country_code) fmt = matcher.get_matcher(country_code)
elif country_code != cc: elif country_code != cc:
_insert_postcode_areas(conn, country_code, _insert_postcode_areas(conn, country_code,
matcher.get_postcode_extent(country_code), pcs) matcher.get_postcode_extent(country_code), pcs,
is_initial)
country_code = cc country_code = cc
fmt = matcher.get_matcher(country_code) fmt = matcher.get_matcher(country_code)
pcs = [] pcs = []
@@ -241,17 +287,22 @@ def _update_postcode_areas(conn: Connection, analyzer: AbstractAnalyzer,
if country_code is not None and pcs: if country_code is not None and pcs:
_insert_postcode_areas(conn, country_code, _insert_postcode_areas(conn, country_code,
matcher.get_postcode_extent(country_code), pcs) matcher.get_postcode_extent(country_code), pcs,
is_initial)
def _update_guessed_postcode(conn: Connection, analyzer: AbstractAnalyzer, def _update_guessed_postcode(conn: Connection, analyzer: AbstractAnalyzer,
matcher: PostcodeFormatter, project_dir: Optional[Path]) -> None: matcher: PostcodeFormatter, project_dir: Optional[Path],
is_initial: bool) -> None:
""" Computes artificial postcode centroids from the placex table, """ Computes artificial postcode centroids from the placex table,
potentially enhances it with external data and then updates the potentially enhances it with external data and then updates the
postcodes in the table 'location_postcodes'. postcodes in the table 'location_postcodes'.
""" """
# First get the list of countries that currently have postcodes. # First get the list of countries that currently have postcodes.
# (Doing this before starting to insert, so it is fast on import.) # (Doing this before starting to insert, so it is fast on import.)
if is_initial:
todo_countries: set[str] = set()
else:
with conn.cursor() as cur: with conn.cursor() as cur:
cur.execute("""SELECT DISTINCT country_code FROM location_postcodes cur.execute("""SELECT DISTINCT country_code FROM location_postcodes
WHERE osm_id is null""") WHERE osm_id is null""")
@@ -275,6 +326,7 @@ def _update_guessed_postcode(conn: Connection, analyzer: AbstractAnalyzer,
FROM place_postcode WHERE geometry is not null) FROM place_postcode WHERE geometry is not null)
""") """)
cur.execute("CREATE INDEX ON _global_postcode_area USING gist(geometry)") cur.execute("CREATE INDEX ON _global_postcode_area USING gist(geometry)")
# Recompute the list of valid postcodes from placex. # Recompute the list of valid postcodes from placex.
with conn.cursor(name="placex_postcodes") as cur: with conn.cursor(name="placex_postcodes") as cur:
cur.execute(""" cur.execute("""
@@ -296,7 +348,7 @@ def _update_guessed_postcode(conn: Connection, analyzer: AbstractAnalyzer,
for country, postcode, x, y in cur: for country, postcode, x, y in cur:
if collector is None or country != collector.country: if collector is None or country != collector.country:
if collector is not None: if collector is not None:
collector.commit(conn, analyzer, project_dir) collector.commit(conn, analyzer, project_dir, is_initial)
collector = _PostcodeCollector(country, matcher.get_matcher(country), collector = _PostcodeCollector(country, matcher.get_matcher(country),
matcher.get_postcode_extent(country), matcher.get_postcode_extent(country),
exclude=area_pcs[country]) exclude=area_pcs[country])
@@ -304,14 +356,14 @@ def _update_guessed_postcode(conn: Connection, analyzer: AbstractAnalyzer,
collector.add(postcode, x, y) collector.add(postcode, x, y)
if collector is not None: if collector is not None:
collector.commit(conn, analyzer, project_dir) collector.commit(conn, analyzer, project_dir, is_initial)
# Now handle any countries that are only in the postcode table. # Now handle any countries that are only in the postcode table.
for country in todo_countries: for country in todo_countries:
fmt = matcher.get_matcher(country) fmt = matcher.get_matcher(country)
ext = matcher.get_postcode_extent(country) ext = matcher.get_postcode_extent(country)
_PostcodeCollector(country, fmt, ext, _PostcodeCollector(country, fmt, ext,
exclude=area_pcs[country]).commit(conn, analyzer, project_dir) exclude=area_pcs[country]).commit(conn, analyzer, project_dir, False)
conn.execute("DROP TABLE IF EXISTS _global_postcode_area") conn.execute("DROP TABLE IF EXISTS _global_postcode_area")

View File

@@ -2,7 +2,7 @@
# #
# This file is part of Nominatim. (https://nominatim.org) # This file is part of Nominatim. (https://nominatim.org)
# #
# Copyright (C) 2025 by the Nominatim developer community. # Copyright (C) 2026 by the Nominatim developer community.
# For a full list of authors see the git log. # For a full list of authors see the git log.
""" """
Version information for Nominatim. Version information for Nominatim.
@@ -55,7 +55,7 @@ def parse_version(version: str) -> NominatimVersion:
return NominatimVersion(*[int(x) for x in parts[:2] + parts[2].split('-')]) return NominatimVersion(*[int(x) for x in parts[:2] + parts[2].split('-')])
NOMINATIM_VERSION = parse_version('5.2.99-2') NOMINATIM_VERSION = parse_version('5.2.99-4')
POSTGRESQL_REQUIRED_VERSION = (12, 0) POSTGRESQL_REQUIRED_VERSION = (12, 0)
POSTGIS_REQUIRED_VERSION = (3, 0) POSTGIS_REQUIRED_VERSION = (3, 0)

View File

@@ -318,6 +318,28 @@ Feature: Search queries
| jsonv2 | json | | jsonv2 | json |
| geojson | geojson | | geojson | geojson |
Scenario Outline: Search boundary=administrative with extratags=1 returns admin_level
When sending v1/search with format <format>
| q | featureType | extratags |
| Triesenberg | city | 1 |
Then a HTTP 200 is returned
And the result is valid <outformat>
And more than 0 results are returned
And result 0 contains
| <cname> | <tname> |
| boundary | administrative |
And result 0 contains in field <ename>
| param | value |
| admin_level | 8 |
Examples:
| format | outformat | cname | tname | ename |
| xml | xml | class | type | extratags |
| json | json | class | type | extratags |
| jsonv2 | json | category | type | extratags |
| geojson | geojson | category | type | extratags |
| geocodejson | geocodejson | osm_key | osm_value | extra |
Scenario Outline: Search with namedetails Scenario Outline: Search with namedetails
When sending v1/search with format <format> When sending v1/search with format <format>
| q | namedetails | | q | namedetails |

View File

@@ -241,8 +241,8 @@ Feature: Address computation
Scenario: buildings with only addr:postcodes do not appear in the address of a way Scenario: buildings with only addr:postcodes do not appear in the address of a way
Given the grid with origin DE Given the grid with origin DE
| 1 | | | | | 8 | | 6 | | 2 | | 1 | | | | | 8 | | 6 | | 2 |
| |10 |11 | | | | | | | | | | | | | | | | | | |
| |13 |12 | | | | | | | | | |13 | | | | | | | | |
| 20| | | 21| | | | | | | | 20| | | 21| | | | | | |
| | | | | | | | | | | | | | | | | | | | | |
| | | | | | 9 | | | | | | | | | | | 9 | | | | |
@@ -255,9 +255,9 @@ Feature: Address computation
And the named places And the named places
| osm | class | type | geometry | | osm | class | type | geometry |
| W93 | highway | residential | 20,21 | | W93 | highway | residential | 20,21 |
And the places And the postcodes
| osm | class | type | addr+postcode | geometry | | osm | postcode | centroid |
| W22 | place | postcode | 11234 | (10,11,12,13,10) | | W22 | 11234 | 13 |
When importing When importing
Then place_addressline contains exactly Then place_addressline contains exactly
| object | address | | object | address |

View File

@@ -8,12 +8,9 @@ Feature: Import of address interpolations
| osm | class | type | housenr | | osm | class | type | housenr |
| N1 | place | house | 2 | | N1 | place | house | 2 |
| N2 | place | house | 6 | | N2 | place | house | 6 |
And the places And the interpolations
| osm | class | type | addr+interpolation | geometry | | osm | type | geometry | nodes |
| W1 | place | houses | even | 1,2 | | W1 | even | 1,2 | 1,2 |
And the ways
| id | nodes |
| 1 | 1,2 |
When importing When importing
Then W1 expands to no interpolation Then W1 expands to no interpolation
@@ -25,15 +22,12 @@ Feature: Import of address interpolations
| osm | class | type | housenr | | osm | class | type | housenr |
| N1 | place | house | 2 | | N1 | place | house | 2 |
| N2 | place | house | 6 | | N2 | place | house | 6 |
And the places And the interpolations
| osm | class | type | addr+interpolation | geometry | | osm | type | geometry | nodes |
| W1 | place | houses | even | 1,2 | | W1 | even | 1,2 | 1,2 |
And the named places And the named places
| osm | class | type | geometry | | osm | class | type | geometry |
| W10 | highway | residential | 4,5 | | W10 | highway | residential | 4,5 |
And the ways
| id | nodes |
| 1 | 1,2 |
When importing When importing
Then W1 expands to interpolation Then W1 expands to interpolation
| start | end | geometry | | start | end | geometry |
@@ -47,15 +41,12 @@ Feature: Import of address interpolations
| osm | class | type | housenr | | osm | class | type | housenr |
| N1 | place | house | 2 | | N1 | place | house | 2 |
| N2 | place | house | 8 | | N2 | place | house | 8 |
And the places And the interpolations
| osm | class | type | addr+interpolation | geometry | | osm | type | geometry | nodes |
| W1 | place | houses | even | 2,1 | | W1 | even | 2,1 | 2,1 |
And the named places And the named places
| osm | class | type | geometry | | osm | class | type | geometry |
| W10 | highway | residential | 4,5 | | W10 | highway | residential | 4,5 |
And the ways
| id | nodes |
| 1 | 2,1 |
When importing When importing
Then W1 expands to interpolation Then W1 expands to interpolation
| start | end | geometry | | start | end | geometry |
@@ -69,15 +60,12 @@ Feature: Import of address interpolations
| osm | class | type | housenr | | osm | class | type | housenr |
| N1 | place | house | 1 | | N1 | place | house | 1 |
| N2 | place | house | 11 | | N2 | place | house | 11 |
And the places And the interpolations
| osm | class | type | addr+interpolation | geometry | | osm | type | geometry | nodes |
| W1 | place | houses | odd | 1,2 | | W1 | odd | 1,2 | 1,2 |
And the named places And the named places
| osm | class | type | geometry | | osm | class | type | geometry |
| W10 | highway | residential | 4,5 | | W10 | highway | residential | 4,5 |
And the ways
| id | nodes |
| 1 | 1,2 |
When importing When importing
Then W1 expands to interpolation Then W1 expands to interpolation
| start | end | geometry | | start | end | geometry |
@@ -91,15 +79,12 @@ Feature: Import of address interpolations
| osm | class | type | housenr | | osm | class | type | housenr |
| N1 | place | house | 1 | | N1 | place | house | 1 |
| N2 | place | house | 4 | | N2 | place | house | 4 |
And the places And the interpolations
| osm | class | type | addr+interpolation | geometry | | osm | type | geometry | nodes |
| W1 | place | houses | all | 1,2 | | W1 | all | 1,2 | 1,2 |
And the named places And the named places
| osm | class | type | geometry | | osm | class | type | geometry |
| W10 | highway | residential | 4,5 | | W10 | highway | residential | 4,5 |
And the ways
| id | nodes |
| 1 | 1,2 |
When importing When importing
Then W1 expands to interpolation Then W1 expands to interpolation
| start | end | geometry | | start | end | geometry |
@@ -113,15 +98,12 @@ Feature: Import of address interpolations
| osm | class | type | housenr | | osm | class | type | housenr |
| N1 | place | house | 2 | | N1 | place | house | 2 |
| N2 | place | house | 12 | | N2 | place | house | 12 |
And the places And the interpolations
| osm | class | type | addr+interpolation | geometry | | osm | type | geometry | nodes |
| W1 | place | houses | even | 1,3,2 | | W1 | even | 1,3,2 | 1,3,2 |
And the named places And the named places
| osm | class | type | geometry | | osm | class | type | geometry |
| W10 | highway | residential | 4,5 | | W10 | highway | residential | 4,5 |
And the ways
| id | nodes |
| 1 | 1,3,2 |
When importing When importing
Then W1 expands to interpolation Then W1 expands to interpolation
| start | end | geometry | | start | end | geometry |
@@ -135,15 +117,12 @@ Feature: Import of address interpolations
| osm | class | type | housenr | | osm | class | type | housenr |
| N1 | place | house | 2 | | N1 | place | house | 2 |
| N2 | place | house | 10 | | N2 | place | house | 10 |
And the places And the interpolations
| osm | class | type | addr+interpolation | geometry | | osm | type | geometry | nodes |
| W1 | place | houses | even | 1,3,2 | | W1 | even | 1,3,2 | 1,3,3,2 |
And the named places And the named places
| osm | class | type | geometry | | osm | class | type | geometry |
| W10 | highway | residential | 4,5 | | W10 | highway | residential | 4,5 |
And the ways
| id | nodes |
| 1 | 1,3,3,2 |
When importing When importing
Then W1 expands to interpolation Then W1 expands to interpolation
| start | end | geometry | | start | end | geometry |
@@ -158,15 +137,12 @@ Feature: Import of address interpolations
| N1 | place | house | 2 | | N1 | place | house | 2 |
| N2 | place | house | 14 | | N2 | place | house | 14 |
| N3 | place | house | 10 | | N3 | place | house | 10 |
And the places And the interpolations
| osm | class | type | addr+interpolation | geometry | | osm | type | geometry | nodes |
| W1 | place | houses | even | 1,3,2 | | W1 | even | 1,3,2 | 1,3,2 |
And the named places And the named places
| osm | class | type | geometry | | osm | class | type | geometry |
| W10 | highway | residential | 4,5 | | W10 | highway | residential | 4,5 |
And the ways
| id | nodes |
| 1 | 1,3,2 |
When importing When importing
Then W1 expands to interpolation Then W1 expands to interpolation
| start | end | geometry | | start | end | geometry |
@@ -184,15 +160,12 @@ Feature: Import of address interpolations
| N2 | place | house | 14 | | N2 | place | house | 14 |
| N3 | place | house | 10 | | N3 | place | house | 10 |
| N4 | place | house | 18 | | N4 | place | house | 18 |
And the places And the interpolations
| osm | class | type | addr+interpolation | geometry | | osm | type | geometry | nodes |
| W1 | place | houses | even | 1,3,2,4 | | W1 | even | 1,3,2,4 | 1,3,2,4 |
And the named places And the named places
| osm | class | type | geometry | | osm | class | type | geometry |
| W10 | highway | residential | 1,3,2,4 | | W10 | highway | residential | 1,3,2,4 |
And the ways
| id | nodes |
| 1 | 1,3,2,4 |
When importing When importing
Then W1 expands to interpolation Then W1 expands to interpolation
| start | end | geometry | | start | end | geometry |
@@ -209,15 +182,12 @@ Feature: Import of address interpolations
| N1 | place | house | 2 | | N1 | place | house | 2 |
| N2 | place | house | 14 | | N2 | place | house | 14 |
| N3 | place | house | 10 | | N3 | place | house | 10 |
And the places And the interpolations
| osm | class | type | addr+interpolation | geometry | | osm | type | geometry | nodes |
| W1 | place | houses | even | 2,3,1 | | W1 | even | 2,3,1 | 2,3,1 |
And the named places And the named places
| osm | class | type | geometry | | osm | class | type | geometry |
| W10 | highway | residential | 4,5 | | W10 | highway | residential | 4,5 |
And the ways
| id | nodes |
| 1 | 2,3,1 |
When importing When importing
Then W1 expands to interpolation Then W1 expands to interpolation
| start | end | geometry | | start | end | geometry |
@@ -233,15 +203,12 @@ Feature: Import of address interpolations
| N1 | place | house | 2 | | N1 | place | house | 2 |
| N2 | place | house | 8 | | N2 | place | house | 8 |
| N3 | place | house | 7 | | N3 | place | house | 7 |
And the places And the interpolations
| osm | class | type | addr+interpolation | geometry | | osm | type | geometry | nodes |
| W1 | place | houses | even | 1,3,2 | | W1 | even | 1,3,2 | 1,3,2 |
And the named places And the named places
| osm | class | type | geometry | | osm | class | type | geometry |
| W10 | highway | residential | 4,5 | | W10 | highway | residential | 4,5 |
And the ways
| id | nodes |
| 1 | 1,3,2 |
When importing When importing
Then W1 expands to interpolation Then W1 expands to interpolation
| start | end | geometry | | start | end | geometry |
@@ -257,15 +224,12 @@ Feature: Import of address interpolations
| N1 | place | house | 2 | | N1 | place | house | 2 |
| N2 | place | house | 6 | | N2 | place | house | 6 |
| N3 | place | house | 10 | | N3 | place | house | 10 |
And the places And the interpolations
| osm | class | type | addr+interpolation | geometry | | osm | type | geometry | nodes |
| W1 | place | houses | even | 1,2,3,2 | | W1 | even | 1,2,3,2 | 1,2,3,2 |
And the named places And the named places
| osm | class | type | geometry | | osm | class | type | geometry |
| W10 | highway | residential | 1,2,3 | | W10 | highway | residential | 1,2,3 |
And the ways
| id | nodes |
| 1 | 1,2,3,2 |
When importing When importing
Then W1 expands to interpolation Then W1 expands to interpolation
| start | end | geometry | | start | end | geometry |
@@ -281,15 +245,12 @@ Feature: Import of address interpolations
| osm | class | type | housenr | | osm | class | type | housenr |
| N1 | place | house | 2 | | N1 | place | house | 2 |
| N2 | place | house | 6 | | N2 | place | house | 6 |
And the places And the interpolations
| osm | class | type | addr+interpolation | geometry | | osm | type | geometry | nodes |
| W1 | place | houses | even | 1,2,3,2 | | W1 | even | 1,2,3,2 | 1,2,3,2 |
And the named places And the named places
| osm | class | type | geometry | | osm | class | type | geometry |
| W10 | highway | residential | 1,2,3 | | W10 | highway | residential | 1,2,3 |
And the ways
| id | nodes |
| 1 | 1,2,3,2 |
When importing When importing
Then W1 expands to interpolation Then W1 expands to interpolation
| start | end | geometry | | start | end | geometry |
@@ -306,18 +267,14 @@ Feature: Import of address interpolations
| N2 | place | house | 6 | 2 | | N2 | place | house | 6 | 2 |
| N3 | place | house | 12 | 1 | | N3 | place | house | 12 | 1 |
| N4 | place | house | 16 | 2 | | N4 | place | house | 16 | 2 |
And the places And the interpolations
| osm | class | type | addr+interpolation | street | geometry | | osm | type | street | nodes | geometry | nodes |
| W10 | place | houses | even | | 1,2 | | W10 | even | | 1,2 | 1,2 | 1,2 |
| W11 | place | houses | even | Cloud Street | 1,2 | | W11 | even | Cloud Street | 3,4 | 1,2 | 3,4 |
And the places And the places
| osm | class | type | name | geometry | | osm | class | type | name | geometry |
| W2 | highway | tertiary | Sun Way | 10,11 | | W2 | highway | tertiary | Sun Way | 10,11 |
| W3 | highway | tertiary | Cloud Street | 20,21 | | W3 | highway | tertiary | Cloud Street | 20,21 |
And the ways
| id | nodes |
| 10 | 1,2 |
| 11 | 3,4 |
When importing When importing
Then placex contains Then placex contains
| object | parent_place_id | | object | parent_place_id |
@@ -351,18 +308,14 @@ Feature: Import of address interpolations
| N2 | place | house | 6 | | 2 | | N2 | place | house | 6 | | 2 |
| N3 | place | house | 12 | Cloud Street | 1 | | N3 | place | house | 12 | Cloud Street | 1 |
| N4 | place | house | 16 | Cloud Street | 2 | | N4 | place | house | 16 | Cloud Street | 2 |
And the places And the interpolations
| osm | class | type | addr+interpolation | geometry | | osm | type | geometry | nodes |
| W10 | place | houses | even | 1,2 | | W10 | even | 1,2 | 1,2 |
| W11 | place | houses | even | 1,2 | | W11 | even | 1,2 | 3,4 |
And the places And the places
| osm | class | type | name | geometry | | osm | class | type | name | geometry |
| W2 | highway | tertiary | Sun Way | 10,11 | | W2 | highway | tertiary | Sun Way | 10,11 |
| W3 | highway | tertiary | Cloud Street | 20,21 | | W3 | highway | tertiary | Cloud Street | 20,21 |
And the ways
| id | nodes |
| 10 | 1,2 |
| 11 | 3,4 |
When importing When importing
Then placex contains Then placex contains
| object | parent_place_id | | object | parent_place_id |
@@ -391,15 +344,12 @@ Feature: Import of address interpolations
| N1 | place | house | 10 | 144.9632341 -37.76163 | | N1 | place | house | 10 | 144.9632341 -37.76163 |
| N2 | place | house | 6 | 144.9630541 -37.7628174 | | N2 | place | house | 6 | 144.9630541 -37.7628174 |
| N3 | shop | supermarket | 2 | 144.9629794 -37.7630755 | | N3 | shop | supermarket | 2 | 144.9629794 -37.7630755 |
And the places And the interpolations
| osm | class | type | addr+interpolation | geometry | | osm | type | geometry | nodes |
| W1 | place | houses | even | 144.9632341 -37.76163,144.9630541 -37.7628172,144.9629794 -37.7630755 | | W1 | even | 144.9632341 -37.76163,144.9630541 -37.7628172,144.9629794 -37.7630755 | 1,2,3 |
And the named places And the named places
| osm | class | type | geometry | | osm | class | type | geometry |
| W10 | highway | residential | 144.9632341 -37.76163,144.9629794 -37.7630755 | | W10 | highway | residential | 144.9632341 -37.76163,144.9629794 -37.7630755 |
And the ways
| id | nodes |
| 1 | 1,2,3 |
When importing When importing
Then W1 expands to interpolation Then W1 expands to interpolation
| start | end | geometry | | start | end | geometry |
@@ -415,24 +365,21 @@ Feature: Import of address interpolations
| N1 | place | house | 23 | | N1 | place | house | 23 |
| N2 | amenity | school | | | N2 | amenity | school | |
| N3 | place | house | 29 | | N3 | place | house | 29 |
And the places And the interpolations
| osm | class | type | addr+interpolation | geometry | | osm | type | geometry | nodes |
| W1 | place | houses | odd | 1,2,3 | | W1 | odd | 1,2,3 | 1,2,3 |
And the named places And the named places
| osm | class | type | geometry | | osm | class | type | geometry |
| W10 | highway | residential | 4,5 | | W10 | highway | residential | 4,5 |
And the ways
| id | nodes |
| 1 | 1,2,3 |
When importing When importing
Then W1 expands to interpolation Then W1 expands to interpolation
| start | end | geometry | | start | end | geometry |
| 25 | 27 | 0.0000166 0,0.00002 0,0.0000333 0 | | 25 | 27 | 0.0000166 0,0.00002 0,0.0000333 0 |
Scenario: Ways without node entries are ignored Scenario: Ways without node entries are ignored
Given the places Given the interpolations
| osm | class | type | housenr | geometry | | osm | type | geometry | nodes |
| W1 | place | houses | even | 1 1, 1 1.001 | | W1 | even | 1 1, 1 1.001 | 34,45 |
And the named places And the named places
| osm | class | type | geometry | | osm | class | type | geometry |
| W10 | highway | residential | 1 1, 1 1.001 | | W10 | highway | residential | 1 1, 1 1.001 |
@@ -447,9 +394,9 @@ Feature: Import of address interpolations
| osm | class | type | | osm | class | type |
| N1 | place | house | | N1 | place | house |
| N2 | place | house | | N2 | place | house |
Given the places Given the interpolations
| osm | class | type | housenr | geometry | | osm | type | geometry | nodes |
| W1 | place | houses | even | 1,2 | | W1 | even | 1,2 | 1,2 |
And the named places And the named places
| osm | class | type | geometry | | osm | class | type | geometry |
| W10 | highway | residential | 4,5 | | W10 | highway | residential | 4,5 |
@@ -464,15 +411,12 @@ Feature: Import of address interpolations
| osm | class | type | housenr | | osm | class | type | housenr |
| N1 | place | house | 0 | | N1 | place | house | 0 |
| N2 | place | house | 10 | | N2 | place | house | 10 |
And the places And the interpolations
| osm | class | type | addr+interpolation | geometry | | osm | type | geometry | nodes |
| W1 | place | houses | even | 1,2 | | W1 | even | 1,2 | 1,2 |
And the places And the places
| osm | class | type | name | geometry | | osm | class | type | name | geometry |
| W10 | highway | residential | London Road |4,5 | | W10 | highway | residential | London Road |4,5 |
And the ways
| id | nodes |
| 1 | 1,2 |
When importing When importing
Then W1 expands to interpolation Then W1 expands to interpolation
| start | end | geometry | | start | end | geometry |
@@ -497,12 +441,9 @@ Feature: Import of address interpolations
| osm | class | type | name | geometry | | osm | class | type | name | geometry |
| W1 | highway | residential | Vert St | 1,2 | | W1 | highway | residential | Vert St | 1,2 |
| W2 | highway | residential | Horiz St | 2,3 | | W2 | highway | residential | Horiz St | 2,3 |
And the places And the interpolations
| osm | class | type | addr+interpolation | addr+inclusion | geometry | | osm | type | addr+inclusion | geometry | nodes |
| W10 | place | houses | even | actual | 8,9 | | W10 | even | actual | 8,9 | 8,9 |
And the ways
| id | nodes |
| 10 | 8,9 |
When importing When importing
Then placex contains Then placex contains
| object | parent_place_id | | object | parent_place_id |
@@ -521,15 +462,12 @@ Feature: Import of address interpolations
| osm | class | type | housenr | | osm | class | type | housenr |
| N1 | place | house | 2 | | N1 | place | house | 2 |
| N2 | place | house | 6 | | N2 | place | house | 6 |
And the places And the interpolations
| osm | class | type | addr+interpolation | geometry | | osm | type | geometry | nodes |
| W1 | place | houses | <value> | 1,2 | | W1 | <value> | 1,2 | 1,2 |
And the named places And the named places
| osm | class | type | geometry | | osm | class | type | geometry |
| W10 | highway | residential | 4,5 | | W10 | highway | residential | 4,5 |
And the ways
| id | nodes |
| 1 | 1,2 |
When importing When importing
Then W1 expands to no interpolation Then W1 expands to no interpolation
@@ -549,15 +487,12 @@ Feature: Import of address interpolations
| N2 | place | house | 18 | 3 | | N2 | place | house | 18 | 3 |
| N3 | place | house | 24 | 9 | | N3 | place | house | 24 | 9 |
| N4 | place | house | 42 | 4 | | N4 | place | house | 42 | 4 |
And the places And the interpolations
| osm | class | type | addr+interpolation | geometry | | osm | type | geometry | nodes |
| W1 | place | houses | even | 1,2,3,4 | | W1 | even | 1,2,3,4 | 1,2,3,4 |
And the named places And the named places
| osm | class | type | geometry | | osm | class | type | geometry |
| W10 | highway | residential | 1,4 | | W10 | highway | residential | 1,4 |
And the ways
| id | nodes |
| 1 | 1,2,3,4 |
When importing When importing
Then W1 expands to interpolation Then W1 expands to interpolation
| start | end | | start | end |
@@ -576,15 +511,12 @@ Feature: Import of address interpolations
| N2 | place | house | 6 | 8 | | N2 | place | house | 6 | 8 |
| N3 | place | house | 10 | 8 | | N3 | place | house | 10 | 8 |
| N4 | place | house | 14 | 9 | | N4 | place | house | 14 | 9 |
And the places And the interpolations
| osm | class | type | addr+interpolation | geometry | | osm | type | geometry | nodes |
| W1 | place | houses | even | 7,8,8,9 | | W1 | even | 7,8,8,9 | 1,2,3,4 |
And the named places And the named places
| osm | class | type | geometry | | osm | class | type | geometry |
| W10 | highway | residential | 4,5 | | W10 | highway | residential | 4,5 |
And the ways
| id | nodes |
| 1 | 1,2,3,4 |
When importing When importing
Then W1 expands to interpolation Then W1 expands to interpolation
| start | end | geometry | | start | end | geometry |
@@ -601,15 +533,12 @@ Feature: Import of address interpolations
| N2 | place | house | 8 | | N2 | place | house | 8 |
| N3 | place | house | 12 | | N3 | place | house | 12 |
| N4 | place | house | 14 | | N4 | place | house | 14 |
And the places And the interpolations
| osm | class | type | addr+interpolation | geometry | | osm | type | geometry | nodes |
| W1 | place | houses | even | 8,9 | | W1 | even | 8,9 | 1,8,9,2,3,4 |
And the named places And the named places
| osm | class | type | geometry | | osm | class | type | geometry |
| W10 | highway | residential | 1,4 | | W10 | highway | residential | 1,4 |
And the ways
| id | nodes |
| 1 | 1,8,9,2,3,4 |
When importing When importing
Then W1 expands to interpolation Then W1 expands to interpolation
| start | end | geometry | | start | end | geometry |

View File

@@ -297,8 +297,9 @@ Feature: Linking of places
| R1 | LabelPlace | | R1 | LabelPlace |
@skip
Scenario: Linked places expand default language names Scenario: Linked places expand default language names
Given the grid with origin CO Given the grid
| 1 | | 2 | | 1 | | 2 |
| | 9 | | | | 9 | |
| 4 | | 3 | | 4 | | 3 |

View File

@@ -287,34 +287,62 @@ Feature: Searching of house numbers
| N1 | | N1 |
Scenario: Interpolations are found according to their type Scenario: A housenumber with interpolation is found
Given the grid Given the places
| 10 | | 11 | | osm | class | type | housenr | addr+interpolation | geometry |
| 100 | | 101 | | N1 | building | yes | 1-5 | odd | 9 |
| 20 | | 21 |
And the places And the places
| osm | class | type | name | geometry | | osm | class | type | name | geometry |
| W100 | highway | residential | Ringstr | 100, 101 | | W10 | highway | path | Rue Paris | 1,2,3 |
And the places
| osm | class | type | addr+interpolation | geometry |
| W10 | place | houses | even | 10, 11 |
| W20 | place | houses | odd | 20, 21 |
And the places
| osm | class | type | housenr | geometry |
| N10 | place | house | 10 | 10 |
| N11 | place | house | 20 | 11 |
| N20 | place | house | 11 | 20 |
| N21 | place | house | 21 | 21 |
And the ways
| id | nodes |
| 10 | 10, 11 |
| 20 | 20, 21 |
When importing When importing
When geocoding "Ringstr 12" When geocoding "Rue Paris 1"
Then the result set contains
| object | address+house_number |
| N1 | 1-5 |
When geocoding "Rue Paris 3"
Then the result set contains
| object | address+house_number |
| N1 | 1-5 |
When geocoding "Rue Paris 5"
Then the result set contains
| object | address+house_number |
| N1 | 1-5 |
When geocoding "Rue Paris 2"
Then the result set contains Then the result set contains
| object | | object |
| W10 | | W10 |
When geocoding "Ringstr 13"
Scenario: A housenumber with bad interpolation is ignored
Given the places
| osm | class | type | housenr | addr+interpolation | geometry |
| N1 | building | yes | 1-5 | bad | 9 |
And the places
| osm | class | type | name | geometry |
| W10 | highway | path | Rue Paris | 1,2,3 |
When importing
When geocoding "Rue Paris 1-5"
Then the result set contains
| object | address+house_number |
| N1 | 1-5 |
When geocoding "Rue Paris 3"
Then the result set contains Then the result set contains
| object | | object |
| W20 | | W10 |
Scenario: A bad housenumber with a good interpolation is just a housenumber
Given the places
| osm | class | type | housenr | addr+interpolation | geometry |
| N1 | building | yes | 1-100 | all | 9 |
And the places
| osm | class | type | name | geometry |
| W10 | highway | path | Rue Paris | 1,2,3 |
When importing
When geocoding "Rue Paris 1-100"
Then the result set contains
| object | address+house_number |
| N1 | 1-100 |
When geocoding "Rue Paris 3"
Then the result set contains
| object |
| W10 |

View File

@@ -11,16 +11,13 @@ Feature: Query of address interpolations
Given the places Given the places
| osm | class | type | name | geometry | | osm | class | type | name | geometry |
| W10 | highway | primary | Nickway | 10,12,13 | | W10 | highway | primary | Nickway | 10,12,13 |
And the places And the interpolations
| osm | class | type | addr+interpolation | geometry | | osm | type | geometry | nodes |
| W1 | place | houses | odd | 1,3 | | W1 | odd | 1,3 | 1,3 |
And the places And the places
| osm | class | type | housenr | geometry | | osm | class | type | housenr | geometry |
| N1 | place | house | 1 | 1 | | N1 | place | house | 1 | 1 |
| N3 | place | house | 5 | 3 | | N3 | place | house | 5 | 3 |
And the ways
| id | nodes |
| 1 | 1,3 |
When importing When importing
When reverse geocoding at node 2 When reverse geocoding at node 2
Then the result contains Then the result contains
@@ -36,16 +33,13 @@ Feature: Query of address interpolations
Given the places Given the places
| osm | class | type | name | geometry | | osm | class | type | name | geometry |
| W10 | highway | primary | Nickway | 10,12,13 | | W10 | highway | primary | Nickway | 10,12,13 |
And the places And the interpolations
| osm | class | type | addr+interpolation | geometry | | osm | type | geometry | nodes |
| W1 | place | houses | even | 1,3 | | W1 | even | 1,3 | 1,3 |
And the places And the places
| osm | class | type | housenr | geometry | | osm | class | type | housenr | geometry |
| N1 | place | house | 2 | 1 | | N1 | place | house | 2 | 1 |
| N3 | place | house | 18 | 3 | | N3 | place | house | 18 | 3 |
And the ways
| id | nodes |
| 1 | 1,3 |
When importing When importing
When reverse geocoding at node 2 When reverse geocoding at node 2
Then the result contains Then the result contains
@@ -55,3 +49,32 @@ Feature: Query of address interpolations
Then all results contain Then all results contain
| object | display_name | centroid!wkt | | object | display_name | centroid!wkt |
| W1 | 10, Nickway | 2 | | W1 | 10, Nickway | 2 |
Scenario: Interpolations are found according to their type
Given the grid
| 10 | | 11 |
| 100 | | 101 |
| 20 | | 21 |
And the places
| osm | class | type | name | geometry |
| W100 | highway | residential | Ringstr | 100, 101 |
And the interpolations
| osm | type | geometry | nodes |
| W10 | even | 10, 11 | 10, 11 |
| W20 | odd | 20, 21 | 20, 21 |
And the places
| osm | class | type | housenr | geometry |
| N10 | place | house | 10 | 10 |
| N11 | place | house | 20 | 11 |
| N20 | place | house | 11 | 20 |
| N21 | place | house | 21 | 21 |
When importing
When geocoding "Ringstr 12"
Then the result set contains
| object |
| W10 |
When geocoding "Ringstr 13"
Then the result set contains
| object |
| W20 |

View File

@@ -80,3 +80,23 @@ Feature: Searching of simple objects
| Chicago | Illinois | IL | | Chicago | Illinois | IL |
| Auburn | Alabama | AL | | Auburn | Alabama | AL |
| New Orleans | Louisiana | LA | | New Orleans | Louisiana | LA |
# github #3210
Scenario: Country with alternate-language name does not dominate when locale differs
Given the 1.0 grid with origin DE
| 1 | | 2 |
| | 10 | |
| 4 | | 3 |
Given the places
| osm | class | type | admin | name+name | name+name:fi | name+name:de | country | geometry |
| R1 | boundary | administrative | 2 | Turgei | Turgi | Testland | de | (1,2,3,4,1) |
Given the places
| osm | class | type | name+name | geometry |
| N10 | place | village | Turgi | 10 |
When importing
And geocoding "Turgi"
| accept-language |
| de |
Then result 0 contains
| object |
| N10 |

View File

@@ -11,18 +11,15 @@ Feature: Update of address interpolations
| osm | class | type | name | geometry | | osm | class | type | name | geometry |
| W2 | highway | unclassified | Sun Way | 10,11 | | W2 | highway | unclassified | Sun Way | 10,11 |
| W3 | highway | unclassified | Cloud Street | 20,21 | | W3 | highway | unclassified | Cloud Street | 20,21 |
And the ways
| id | nodes |
| 10 | 1,2 |
When importing When importing
Then W10 expands to no interpolation Then W10 expands to no interpolation
When updating places When updating places
| osm | class | type | housenr | | osm | class | type | housenr |
| N1 | place | house | 2 | | N1 | place | house | 2 |
| N2 | place | house | 6 | | N2 | place | house | 6 |
And updating places And updating interpolations
| osm | class | type | addr+interpolation | geometry | | osm | type | geometry | nodes |
| W10 | place | houses | even | 1,2 | | W10 | even | 1,2 | 1,2 |
Then placex contains Then placex contains
| object | parent_place_id | | object | parent_place_id |
| N1 | W2 | | N1 | W2 |
@@ -41,16 +38,13 @@ Feature: Update of address interpolations
| osm | class | type | housenr | | osm | class | type | housenr |
| N1 | place | house | 2 | | N1 | place | house | 2 |
| N2 | place | house | 6 | | N2 | place | house | 6 |
And the places And the interpolations
| osm | class | type | addr+interpolation | geometry | | osm | type | geometry | nodes |
| W10 | place | houses | even | 1,2 | | W10 | even | 1,2 | 1,2 |
And the places And the places
| osm | class | type | name | geometry | | osm | class | type | name | geometry |
| W2 | highway | unclassified | Sun Way | 10,11 | | W2 | highway | unclassified | Sun Way | 10,11 |
| W3 | highway | unclassified | Cloud Street | 20,21 | | W3 | highway | unclassified | Cloud Street | 20,21 |
And the ways
| id | nodes |
| 10 | 1,2 |
When importing When importing
Then placex contains Then placex contains
| object | parent_place_id | | object | parent_place_id |
@@ -59,9 +53,9 @@ Feature: Update of address interpolations
And W10 expands to interpolation And W10 expands to interpolation
| parent_place_id | start | end | | parent_place_id | start | end |
| W2 | 4 | 4 | | W2 | 4 | 4 |
When updating places When updating interpolations
| osm | class | type | addr+interpolation | street | geometry | | osm | type | street | nodes | geometry |
| W10 | place | houses | even | Cloud Street | 1,2 | | W10 | even | Cloud Street | 1,2 | 1,2 |
Then placex contains Then placex contains
| object | parent_place_id | | object | parent_place_id |
| N1 | W3 | | N1 | W3 |
@@ -80,16 +74,13 @@ Feature: Update of address interpolations
| osm | class | type | housenr | | osm | class | type | housenr |
| N1 | place | house | 2 | | N1 | place | house | 2 |
| N2 | place | house | 6 | | N2 | place | house | 6 |
And the places And the interpolations
| osm | class | type | addr+interpolation | geometry | | osm | type | geometry | nodes |
| W10 | place | houses | even | 1,2 | | W10 | even | 1,2 | 1,2 |
And the places And the places
| osm | class | type | name | geometry | | osm | class | type | name | geometry |
| W2 | highway | unclassified | Sun Way | 10,11 | | W2 | highway | unclassified | Sun Way | 10,11 |
| W3 | highway | unclassified | Cloud Street | 20,21 | | W3 | highway | unclassified | Cloud Street | 20,21 |
And the ways
| id | nodes |
| 10 | 1,2 |
When importing When importing
Then placex contains Then placex contains
| object | parent_place_id | | object | parent_place_id |
@@ -120,16 +111,13 @@ Feature: Update of address interpolations
| osm | class | type | housenr | | osm | class | type | housenr |
| N1 | place | house | 2 | | N1 | place | house | 2 |
| N2 | place | house | 6 | | N2 | place | house | 6 |
And the places And the interpolations
| osm | class | type | addr+interpolation | geometry | | osm | type | geometry | nodes |
| W10 | place | houses | even | 1,2 | | W10 | even | 1,2 | 1,2 |
And the places And the places
| osm | class | type | name | geometry | | osm | class | type | name | geometry |
| W2 | highway | unclassified | Sun Way | 10,11 | | W2 | highway | unclassified | Sun Way | 10,11 |
| W3 | highway | unclassified | Cloud Street | 20,21 | | W3 | highway | unclassified | Cloud Street | 20,21 |
And the ways
| id | nodes |
| 10 | 1,2 |
When importing When importing
Then placex contains Then placex contains
| object | parent_place_id | | object | parent_place_id |
@@ -155,15 +143,12 @@ Feature: Update of address interpolations
| osm | class | type | housenr | | osm | class | type | housenr |
| N1 | place | house | 2 | | N1 | place | house | 2 |
| N2 | place | house | 6 | | N2 | place | house | 6 |
And the places And the interpolations
| osm | class | type | addr+interpolation | street | geometry | | osm | type | street | geometry | nodes |
| W10 | place | houses | even | Cloud Street| 1,2 | | W10 | even | Cloud Street| 1,2 | 1,2 |
And the places And the places
| osm | class | type | name | geometry | | osm | class | type | name | geometry |
| W2 | highway | unclassified | Sun Way | 10,11 | | W2 | highway | unclassified | Sun Way | 10,11 |
And the ways
| id | nodes |
| 10 | 1,2 |
When importing When importing
Then placex contains Then placex contains
| object | parent_place_id | | object | parent_place_id |
@@ -193,16 +178,13 @@ Feature: Update of address interpolations
| osm | class | type | housenr | | osm | class | type | housenr |
| N1 | place | house | 2 | | N1 | place | house | 2 |
| N2 | place | house | 6 | | N2 | place | house | 6 |
And the places And the interpolations
| osm | class | type | addr+interpolation | street | geometry | | osm | type | street | geometry | nodes |
| W10 | place | houses | even | Cloud Street| 1,2 | | W10 | even | Cloud Street| 1,2 | 1,2 |
And the places And the places
| osm | class | type | name | geometry | | osm | class | type | name | geometry |
| W2 | highway | unclassified | Sun Way | 10,11 | | W2 | highway | unclassified | Sun Way | 10,11 |
| W3 | highway | unclassified | Cloud Street | 20,21 | | W3 | highway | unclassified | Cloud Street | 20,21 |
And the ways
| id | nodes |
| 10 | 1,2 |
When importing When importing
Then placex contains Then placex contains
| object | parent_place_id | | object | parent_place_id |
@@ -220,67 +202,6 @@ Feature: Update of address interpolations
| parent_place_id | start | end | | parent_place_id | start | end |
| W2 | 4 | 4 | | W2 | 4 | 4 |
Scenario: building becomes interpolation
Given the grid
| 10 | | | | 11 |
| | 1 | | 2 | |
| | 4 | | 3 | |
And the places
| osm | class | type | housenr | geometry |
| W1 | place | house | 3 | (1,2,3,4,1) |
And the places
| osm | class | type | name | geometry |
| W2 | highway | unclassified | Cloud Street | 10,11 |
When importing
Then placex contains
| object | parent_place_id |
| W1 | W2 |
Given the ways
| id | nodes |
| 1 | 1,2 |
When updating places
| osm | class | type | housenr |
| N1 | place | house | 2 |
| N2 | place | house | 6 |
And updating places
| osm | class | type | addr+interpolation | street | geometry |
| W1 | place | houses | even | Cloud Street| 1,2 |
Then placex has no entry for W1
And W1 expands to interpolation
| parent_place_id | start | end |
| W2 | 4 | 4 |
Scenario: interpolation becomes building
Given the grid
| 10 | | | | 11 |
| | 1 | | 2 | |
| | 4 | | 3 | |
And the places
| osm | class | type | housenr |
| N1 | place | house | 2 |
| N2 | place | house | 6 |
And the places
| osm | class | type | name | geometry |
| W2 | highway | unclassified | Cloud Street | 10,11 |
And the ways
| id | nodes |
| 1 | 1,2 |
And the places
| osm | class | type | addr+interpolation | street | geometry |
| W1 | place | houses | even | Cloud Street| 1,2 |
When importing
Then placex has no entry for W1
And W1 expands to interpolation
| parent_place_id | start | end |
| W2 | 4 | 4 |
When updating places
| osm | class | type | housenr | geometry |
| W1 | place | house | 3 | (1,2,3,4,1) |
Then placex contains
| object | parent_place_id |
| W1 | W2 |
And W1 expands to no interpolation
Scenario: housenumbers added to interpolation Scenario: housenumbers added to interpolation
Given the grid Given the grid
| 10 | | | | 11 | | 10 | | | | 11 |
@@ -288,18 +209,18 @@ Feature: Update of address interpolations
And the places And the places
| osm | class | type | name | geometry | | osm | class | type | name | geometry |
| W2 | highway | unclassified | Cloud Street | 10,11 | | W2 | highway | unclassified | Cloud Street | 10,11 |
And the ways And the interpolations
| id | nodes | | osm | type | geometry | nodes |
| 1 | 1,2 | | W1 | even | 1,2 | 1,2 |
And the places
| osm | class | type | addr+interpolation | geometry |
| W1 | place | houses | even | 1,2 |
When importing When importing
Then W1 expands to no interpolation Then W1 expands to no interpolation
When updating places When updating places
| osm | class | type | housenr | | osm | class | type | housenr |
| N1 | place | house | 2 | | N1 | place | house | 2 |
| N2 | place | house | 6 | | N2 | place | house | 6 |
And updating interpolations
| osm | type | geometry | nodes |
| W1 | even | 1,2 | 1,2 |
Then W1 expands to interpolation Then W1 expands to interpolation
| parent_place_id | start | end | | parent_place_id | start | end |
| W2 | 4 | 4 | | W2 | 4 | 4 |
@@ -311,12 +232,9 @@ Feature: Update of address interpolations
And the places And the places
| osm | class | type | name | geometry | | osm | class | type | name | geometry |
| W1 | highway | unclassified | Cloud Street | 1, 2 | | W1 | highway | unclassified | Cloud Street | 1, 2 |
And the ways And the interpolations
| id | nodes | | osm | type | geometry | nodes |
| 2 | 3,4,5 | | W2 | even | 3,4,5 | 3,4,5 |
And the places
| osm | class | type | addr+interpolation | geometry |
| W2 | place | houses | even | 3,4,5 |
And the places And the places
| osm | class | type | housenr | | osm | class | type | housenr |
| N3 | place | house | 2 | | N3 | place | house | 2 |
@@ -328,12 +246,14 @@ Feature: Update of address interpolations
When updating places When updating places
| osm | class | type | housenr | | osm | class | type | housenr |
| N4 | place | house | 6 | | N4 | place | house | 6 |
And updating interpolations
| osm | type | geometry | nodes |
| W2 | even | 3,4,5 | 3,4,5 |
Then W2 expands to interpolation Then W2 expands to interpolation
| parent_place_id | start | end | | parent_place_id | start | end |
| W1 | 4 | 4 | | W1 | 4 | 4 |
| W1 | 8 | 8 | | W1 | 8 | 8 |
@skip
Scenario: housenumber removed in middle of interpolation Scenario: housenumber removed in middle of interpolation
Given the grid Given the grid
| 1 | | | | | 2 | | 1 | | | | | 2 |
@@ -341,12 +261,9 @@ Feature: Update of address interpolations
And the places And the places
| osm | class | type | name | geometry | | osm | class | type | name | geometry |
| W1 | highway | unclassified | Cloud Street | 1, 2 | | W1 | highway | unclassified | Cloud Street | 1, 2 |
And the ways And the interpolations
| id | nodes | | osm | type | geometry | nodes |
| 2 | 3,4,5 | | W2 | even | 3,4,5 | 3,4,5 |
And the places
| osm | class | type | addr+interpolation | geometry |
| W2 | place | houses | even | 3,4,5 |
And the places And the places
| osm | class | type | housenr | | osm | class | type | housenr |
| N3 | place | house | 2 | | N3 | place | house | 2 |
@@ -358,6 +275,9 @@ Feature: Update of address interpolations
| W1 | 4 | 4 | | W1 | 4 | 4 |
| W1 | 8 | 8 | | W1 | 8 | 8 |
When marking for delete N4 When marking for delete N4
And updating interpolations
| osm | type | geometry | nodes |
| W2 | even | 3,4,5 | 3,4,5 |
Then W2 expands to interpolation Then W2 expands to interpolation
| parent_place_id | start | end | | parent_place_id | start | end |
| W1 | 4 | 8 | | W1 | 4 | 8 |
@@ -369,12 +289,9 @@ Feature: Update of address interpolations
And the places And the places
| osm | class | type | name | geometry | | osm | class | type | name | geometry |
| W1 | highway | unclassified | Cloud Street | 1, 2 | | W1 | highway | unclassified | Cloud Street | 1, 2 |
And the ways And the interpolations
| id | nodes | | osm | type | geometry | nodes |
| 2 | 3,4 | | W2 | even | 3,4 | 3,4 |
And the places
| osm | class | type | addr+interpolation | geometry |
| W2 | place | houses | even | 3,4 |
And the places And the places
| osm | class | type | housenr | | osm | class | type | housenr |
| N3 | place | house | 2 | | N3 | place | house | 2 |
@@ -386,33 +303,9 @@ Feature: Update of address interpolations
When updating places When updating places
| osm | class | type | housenr | | osm | class | type | housenr |
| N4 | place | house | 8 | | N4 | place | house | 8 |
And updating interpolations
| osm | type | geometry | nodes |
| W2 | even | 3,4 | 3,4 |
Then W2 expands to interpolation Then W2 expands to interpolation
| parent_place_id | start | end | | parent_place_id | start | end |
| W1 | 4 | 6 | | W1 | 4 | 6 |
Scenario: Legal interpolation type changed to illegal one
Given the grid
| 1 | | 2 |
| 3 | | 4 |
And the places
| osm | class | type | name | geometry |
| W1 | highway | unclassified | Cloud Street | 1, 2 |
And the ways
| id | nodes |
| 2 | 3,4 |
And the places
| osm | class | type | addr+interpolation | geometry |
| W2 | place | houses | even | 3,4 |
And the places
| osm | class | type | housenr |
| N3 | place | house | 2 |
| N4 | place | house | 6 |
When importing
Then W2 expands to interpolation
| parent_place_id | start | end |
| W1 | 4 | 4 |
When updating places
| osm | class | type | addr+interpolation | geometry |
| W2 | place | houses | 12-2 | 3,4 |
Then W2 expands to no interpolation

View File

@@ -68,19 +68,6 @@ Feature: Update of simple objects
| object | class | type | centroid!wkt | | object | class | type | centroid!wkt |
| N3 | shop | grocery | 1 -1 | | N3 | shop | grocery | 1 -1 |
Scenario: remove postcode place when house number is added
Given the places
| osm | class | type | postcode | geometry |
| N3 | place | postcode | 12345 | country:de |
When importing
Then placex has no entry for N3
When updating places
| osm | class | type | postcode | housenr | geometry |
| N3 | place | house | 12345 | 13 | country:de |
Then placex contains
| object | class | type |
| N3 | place | house |
Scenario: remove boundary when changing from polygon to way Scenario: remove boundary when changing from polygon to way
Given the grid Given the grid
| 1 | 2 | | 1 | 2 |

View File

@@ -0,0 +1,42 @@
Feature: Import of interpolations
Test if interpolation objects are correctly imported into the
place_interpolation table
Background:
Given the grid
| 1 | 2 |
| 4 | 3 |
Scenario: Simple address interpolations
When loading osm data
"""
n1
n2
w13001 Taddr:interpolation=odd,addr:street=Blumenstrasse Nn1,n2
w13002 Taddr:interpolation=even,place=city Nn1,n2
w13003 Taddr:interpolation=odd Nn1,n1
"""
Then place contains exactly
| object | class | type |
| W13002 | place | city |
And place_interpolation contains exactly
| osm_id | type | address!dict | nodes!ints | geometry!wkt |
| 13001 | odd | "street": "Blumenstrasse" | 1,2 | 1,2 |
| 13002 | even | - | 1,2 | 1,2 |
Scenario: Address interpolation with housenumber
When loading osm data
"""
n1
n2
n3
n4
w34 Taddr:interpolation=all,addr:housenumber=2-4,building=yes Nn1,n2,n3,n4,n1
w35 Taddr:interpolation=all,addr:housenumber=5,building=yes Nn1,n2,n3,n4,n1
w36 Taddr:interpolation=all,addr:housenumber=2a-c Nn1,n2,n3,n4,n1
"""
Then place contains exactly
| object | class | type | address!dict |
| W35 | building | yes | "housenumber": "5", "interpolation": "all" |
| W34 | building | yes | "housenumber": "2-4", "interpolation": "all" |
| W36 | place | house | "housenumber": "2a-c", "interpolation": "all" |

View File

@@ -205,18 +205,6 @@ Feature: Tag evaluation
| N12005 | 12345 | - | | N12005 | 12345 | - |
Scenario: Address interpolations
When loading osm data
"""
n13001 Taddr:interpolation=odd
n13002 Taddr:interpolation=even,place=city
"""
Then place contains exactly
| object | class | type | address!dict |
| N13001 | place | houses | 'interpolation': 'odd' |
| N13002 | place | houses | 'interpolation': 'even' |
Scenario: Footways Scenario: Footways
When loading osm data When loading osm data
""" """

View File

@@ -14,20 +14,24 @@ Feature: Updates of address interpolation objects
n2 Taddr:housenumber=17 n2 Taddr:housenumber=17
w33 Thighway=residential,name=Tao Nn1,n2 w33 Thighway=residential,name=Tao Nn1,n2
""" """
Then place contains Then place contains exactly
| object | class | type | | object | class | type |
| N1 | place | house | | N1 | place | house |
| N2 | place | house | | N2 | place | house |
| W33 | highway | residential |
When updating osm data When updating osm data
""" """
w99 Taddr:interpolation=odd Nn1,n2 w99 Taddr:interpolation=odd Nn1,n2
""" """
Then place contains Then place contains exactly
| object | class | type | | object | class | type |
| N1 | place | house | | N1 | place | house |
| N2 | place | house | | N2 | place | house |
| W99 | place | houses | | W33 | highway | residential |
And place_interpolation contains exactly
| osm_id | type |
| 99 | odd |
When indexing When indexing
Then placex contains exactly Then placex contains exactly
| object | class | type | | object | class | type |
@@ -46,11 +50,13 @@ Feature: Updates of address interpolation objects
n2 Taddr:housenumber=7 n2 Taddr:housenumber=7
w99 Taddr:interpolation=odd Nn1,n2 w99 Taddr:interpolation=odd Nn1,n2
""" """
Then place contains Then place contains exactly
| object | class | type | | object | class | type |
| N1 | place | house | | N1 | place | house |
| N2 | place | house | | N2 | place | house |
| W99 | place | houses | And place_interpolation contains exactly
| osm_id | type |
| 99 | odd |
When updating osm data When updating osm data
""" """
@@ -60,6 +66,8 @@ Feature: Updates of address interpolation objects
| object | class | type | | object | class | type |
| N1 | place | house | | N1 | place | house |
| N2 | place | house | | N2 | place | house |
And place_interpolation contains exactly
| osm_id |
When indexing When indexing
Then placex contains exactly Then placex contains exactly
| object | class | type | | object | class | type |
@@ -77,21 +85,27 @@ Feature: Updates of address interpolation objects
w33 Thighway=residential Nn1,n2 w33 Thighway=residential Nn1,n2
w99 Thighway=residential Nn1,n2 w99 Thighway=residential Nn1,n2
""" """
Then place contains Then place contains exactly
| object | class | type | | object | class | type |
| N1 | place | house | | N1 | place | house |
| N2 | place | house | | N2 | place | house |
| W33 | highway | residential |
| W99 | highway | residential | | W99 | highway | residential |
And place_interpolation contains exactly
| osm_id |
When updating osm data When updating osm data
""" """
w99 Taddr:interpolation=odd Nn1,n2 w99 Taddr:interpolation=odd Nn1,n2
""" """
Then place contains Then place contains exactly
| object | class | type | | object | class | type |
| N1 | place | house | | N1 | place | house |
| N2 | place | house | | N2 | place | house |
| W99 | place | houses | | W33 | highway | residential |
And place_interpolation contains exactly
| osm_id | type |
| 99 | odd |
When indexing When indexing
Then placex contains exactly Then placex contains exactly
| object | class | type | | object | class | type |
@@ -110,11 +124,13 @@ Feature: Updates of address interpolation objects
n2 Taddr:housenumber=17 n2 Taddr:housenumber=17
w99 Taddr:interpolation=odd Nn1,n2 w99 Taddr:interpolation=odd Nn1,n2
""" """
Then place contains Then place contains exactly
| object | class | type | | object | class | type |
| N1 | place | house | | N1 | place | house |
| N2 | place | house | | N2 | place | house |
| W99 | place | houses | And place_interpolation contains exactly
| osm_id | type |
| 99 | odd |
When updating osm data When updating osm data
""" """
@@ -125,6 +141,8 @@ Feature: Updates of address interpolation objects
| N1 | place | house | | N1 | place | house |
| N2 | place | house | | N2 | place | house |
| W99 | highway | residential | | W99 | highway | residential |
And place_interpolation contains exactly
| osm_id |
When indexing When indexing
Then placex contains exactly Then placex contains exactly
| object | class | type | | object | class | type |

View File

@@ -112,7 +112,9 @@ Feature: Update of postcode only objects
| object | class | type | | object | class | type |
| N1 | place | house | | N1 | place | house |
| N2 | place | house | | N2 | place | house |
| W34 | place | houses | And place_interpolation contains exactly
| osm_id | type |
| 34 | odd |
When updating osm data When updating osm data
""" """
@@ -122,9 +124,11 @@ Feature: Update of postcode only objects
| object | class | type | | object | class | type |
| N1 | place | house | | N1 | place | house |
| N2 | place | house | | N2 | place | house |
Then place_postcode contains exactly And place_postcode contains exactly
| object | postcode | | object | postcode |
| W34 | 4456 | | W34 | 4456 |
And place_interpolation contains exactly
| osm_id |
When indexing When indexing
Then location_property_osmline contains exactly Then location_property_osmline contains exactly
| osm_id | | osm_id |
@@ -158,7 +162,9 @@ Feature: Update of postcode only objects
| N1 | place | house | | N1 | place | house |
| N2 | place | house | | N2 | place | house |
| W33 | highway | residential | | W33 | highway | residential |
| W34 | place | houses | And place_interpolation contains exactly
| osm_id | type |
| 34 | odd |
And place_postcode contains exactly And place_postcode contains exactly
| object | | object |
When indexing When indexing

View File

@@ -2,7 +2,7 @@
# #
# This file is part of Nominatim. (https://nominatim.org) # This file is part of Nominatim. (https://nominatim.org)
# #
# Copyright (C) 2025 by the Nominatim developer community. # Copyright (C) 2026 by the Nominatim developer community.
# For a full list of authors see the git log. # For a full list of authors see the git log.
""" """
Collector for BDD import acceptance tests. Collector for BDD import acceptance tests.
@@ -14,6 +14,7 @@ import re
from collections import defaultdict from collections import defaultdict
import psycopg import psycopg
import psycopg.sql as pysql
import pytest import pytest
from pytest_bdd import when, then, given from pytest_bdd import when, then, given
@@ -50,6 +51,34 @@ def _collect_place_ids(conn):
return pids return pids
@pytest.fixture
def row_factory(db_conn):
def _insert_row(table, **data):
columns = []
placeholders = []
values = []
for k, v in data.items():
columns.append(pysql.Identifier(k))
if isinstance(v, tuple):
placeholders.append(pysql.SQL(v[0]))
values.append(v[1])
elif isinstance(v, (pysql.Literal, pysql.SQL)):
placeholders.append(v)
else:
placeholders.append(pysql.Placeholder())
values.append(v)
sql = pysql.SQL("INSERT INTO {table} ({columns}) VALUES({values})")\
.format(table=pysql.Identifier(table),
columns=pysql.SQL(',').join(columns),
values=pysql.SQL(',').join(placeholders))
db_conn.execute(sql, values)
db_conn.commit()
return _insert_row
@pytest.fixture @pytest.fixture
def test_config_env(pytestconfig): def test_config_env(pytestconfig):
dbname = pytestconfig.getini('nominatim_test_db') dbname = pytestconfig.getini('nominatim_test_db')
@@ -85,18 +114,36 @@ def import_places(db_conn, named, datatable, node_grid):
@given(step_parse('the entrances'), target_fixture=None) @given(step_parse('the entrances'), target_fixture=None)
def import_place_entrances(db_conn, datatable, node_grid): def import_place_entrances(row_factory, datatable, node_grid):
""" Insert todo rows into the place_entrance table. """ Insert todo rows into the place_entrance table.
""" """
with db_conn.cursor() as cur:
for row in datatable[1:]: for row in datatable[1:]:
data = PlaceColumn(node_grid).add_row(datatable[0], row, False) data = PlaceColumn(node_grid).add_row(datatable[0], row, False)
assert data.columns['osm_type'] == 'N' assert data.columns['osm_type'] == 'N'
cur.execute("""INSERT INTO place_entrance (osm_id, type, extratags, geometry) params = {'osm_id': data.columns['osm_id'],
VALUES (%s, %s, %s, {})""".format(data.get_wkt()), 'type': data.columns['type'],
(data.columns['osm_id'], data.columns['type'], 'extratags': data.columns.get('extratags'),
data.columns.get('extratags'))) 'geometry': pysql.SQL(data.get_wkt())}
row_factory('place_entrance', **params)
@given(step_parse('the interpolations'), target_fixture=None)
def import_place_interpolations(row_factory, datatable, node_grid):
""" Insert todo rows into the place_entrance table.
"""
for row in datatable[1:]:
data = PlaceColumn(node_grid).add_row(datatable[0], row, False)
assert data.columns['osm_type'] == 'W'
params = {'osm_id': data.columns['osm_id'],
'type': data.columns['type'],
'address': data.columns.get('address'),
'nodes': [int(x) for x in data.columns['nodes'].split(',')],
'geometry': pysql.SQL(data.get_wkt())}
row_factory('place_interpolation', **params)
@given(step_parse('the postcodes'), target_fixture=None) @given(step_parse('the postcodes'), target_fixture=None)
@@ -135,27 +182,24 @@ def import_place_postcode(db_conn, datatable, node_grid):
@given('the ways', target_fixture=None) @given('the ways', target_fixture=None)
def import_ways(db_conn, datatable): def import_ways(row_factory, datatable):
""" Import raw ways into the osm2pgsql way middle table. """ Import raw ways into the osm2pgsql way middle table.
""" """
with db_conn.cursor() as cur:
id_idx = datatable[0].index('id') id_idx = datatable[0].index('id')
node_idx = datatable[0].index('nodes') node_idx = datatable[0].index('nodes')
for line in datatable[1:]: for line in datatable[1:]:
row_factory('planet_osm_ways',
id=line[id_idx],
nodes=[int(x) for x in line[node_idx].split(',')],
tags=psycopg.types.json.Json( tags=psycopg.types.json.Json(
{k[5:]: v for k, v in zip(datatable[0], line) {k[5:]: v for k, v in zip(datatable[0], line)
if k.startswith("tags+")}) if k.startswith("tags+")}))
nodes = [int(x) for x in line[node_idx].split(',')]
cur.execute("INSERT INTO planet_osm_ways (id, nodes, tags) VALUES (%s, %s, %s)",
(line[id_idx], nodes, tags))
@given('the relations', target_fixture=None) @given('the relations', target_fixture=None)
def import_rels(db_conn, datatable): def import_rels(row_factory, datatable):
""" Import raw relations into the osm2pgsql relation middle table. """ Import raw relations into the osm2pgsql relation middle table.
""" """
with db_conn.cursor() as cur:
id_idx = datatable[0].index('id') id_idx = datatable[0].index('id')
memb_idx = datatable[0].index('members') memb_idx = datatable[0].index('members')
for line in datatable[1:]: for line in datatable[1:]:
@@ -170,8 +214,9 @@ def import_rels(db_conn, datatable):
raise ValueError(f'Illegal member {member}.') raise ValueError(f'Illegal member {member}.')
members.append({'ref': int(m[2]), 'role': m[3] or '', 'type': m[1]}) members.append({'ref': int(m[2]), 'role': m[3] or '', 'type': m[1]})
cur.execute('INSERT INTO planet_osm_rels (id, tags, members) VALUES (%s, %s, %s)', row_factory('planet_osm_rels',
(int(line[id_idx]), tags, psycopg.types.json.Json(members))) id=int(line[id_idx]), tags=tags,
members=psycopg.types.json.Json(members))
@when('importing', target_fixture='place_ids') @when('importing', target_fixture='place_ids')
@@ -221,6 +266,28 @@ def update_place_entrances(db_conn, datatable, node_grid):
db_conn.commit() db_conn.commit()
@when('updating interpolations', target_fixture=None)
def update_place_interpolations(db_conn, row_factory, update_config, datatable, node_grid):
""" Update rows in the place_entrance table.
"""
for row in datatable[1:]:
data = PlaceColumn(node_grid).add_row(datatable[0], row, False)
assert data.columns['osm_type'] == 'W'
params = {'osm_id': data.columns['osm_id'],
'type': data.columns['type'],
'address': data.columns.get('address'),
'nodes': [int(x) for x in data.columns['nodes'].split(',')],
'geometry': pysql.SQL(data.get_wkt())}
row_factory('place_interpolation', **params)
db_conn.execute('SELECT flush_deleted_places()')
db_conn.commit()
cli.nominatim(['index', '-q', '--minrank', '30'], update_config.environ)
@when('refreshing postcodes') @when('refreshing postcodes')
def do_postcode_update(update_config): def do_postcode_update(update_config):
""" Recompute the postcode centroids. """ Recompute the postcode centroids.
@@ -237,6 +304,8 @@ def do_delete_place(db_conn, update_config, node_grid, otype, oid):
cur.execute('TRUNCATE place_to_be_deleted') cur.execute('TRUNCATE place_to_be_deleted')
cur.execute('DELETE FROM place WHERE osm_type = %s and osm_id = %s', cur.execute('DELETE FROM place WHERE osm_type = %s and osm_id = %s',
(otype, oid)) (otype, oid))
cur.execute('DELETE FROM place_interpolation WHERE osm_id = %s',
(oid, ))
cur.execute('SELECT flush_deleted_places()') cur.execute('SELECT flush_deleted_places()')
if otype == 'N': if otype == 'N':
cur.execute('DELETE FROM place_entrance WHERE osm_id = %s', cur.execute('DELETE FROM place_entrance WHERE osm_id = %s',

View File

@@ -2,7 +2,7 @@
# #
# This file is part of Nominatim. (https://nominatim.org) # This file is part of Nominatim. (https://nominatim.org)
# #
# Copyright (C) 2025 by the Nominatim developer community. # Copyright (C) 2026 by the Nominatim developer community.
# For a full list of authors see the git log. # For a full list of authors see the git log.
""" """
Helper functions to compare expected values. Helper functions to compare expected values.
@@ -61,6 +61,8 @@ COMPARISON_FUNCS = {
'fm': lambda val, exp: re.fullmatch(exp, val) is not None, 'fm': lambda val, exp: re.fullmatch(exp, val) is not None,
'dict': lambda val, exp: (val is None if exp == '-' 'dict': lambda val, exp: (val is None if exp == '-'
else (val == ast.literal_eval('{' + exp + '}'))), else (val == ast.literal_eval('{' + exp + '}'))),
'ints': lambda val, exp: (val is None if exp == '-'
else (val == [int(i) for i in exp.split(',')])),
'in_box': within_box 'in_box': within_box
} }
@@ -84,6 +86,8 @@ class ResultAttr:
!fm - consider comparison string a regular expression and match full value !fm - consider comparison string a regular expression and match full value
!wkt - convert the expected value to a WKT string before comparing !wkt - convert the expected value to a WKT string before comparing
!in_box - the expected value is a comma-separated bbox description !in_box - the expected value is a comma-separated bbox description
!dict - compare as a dictitionary, member order does not matter
!ints - compare as integer array
""" """
def __init__(self, obj, key, grid=None): def __init__(self, obj, key, grid=None):

View File

@@ -54,7 +54,6 @@ class PlaceColumn:
elif key in ('name', 'address', 'extratags'): elif key in ('name', 'address', 'extratags'):
self.columns[key] = ast.literal_eval('{' + value + '}') self.columns[key] = ast.literal_eval('{' + value + '}')
else: else:
assert key in ('class', 'type'), "Unknown column '{}'.".format(key)
self.columns[key] = None if value == '' else value self.columns[key] = None if value == '' else value
def _set_key_name(self, value): def _set_key_name(self, value):

View File

@@ -12,6 +12,7 @@ For functional tests see BDD test suite.
""" """
import datetime as dt import datetime as dt
import json import json
import xml.etree.ElementTree as ET
import pytest import pytest
@@ -332,3 +333,98 @@ def test_search_details_keywords_address():
assert js['keywords'] == {'address': [{'id': 23, 'token': 'foo'}, assert js['keywords'] == {'address': [{'id': 23, 'token': 'foo'},
{'id': 24, 'token': 'foo'}], {'id': 24, 'token': 'foo'}],
'name': []} 'name': []}
# admin_level injection into extratags
SEARCH_FORMATS = ['json', 'jsonv2', 'geojson', 'geocodejson', 'xml']
@pytest.mark.parametrize('fmt', SEARCH_FORMATS)
def test_search_extratags_boundary_administrative_injects_admin_level(fmt):
search = napi.SearchResult(napi.SourceTable.PLACEX,
('boundary', 'administrative'),
napi.Point(1.0, 2.0),
admin_level=6,
extratags={'place': 'city'})
raw = v1_format.format_result(napi.SearchResults([search]), fmt,
{'extratags': True})
if fmt == 'xml':
root = ET.fromstring(raw)
tags = {tag.attrib['key']: tag.attrib['value']
for tag in root.find('.//extratags').findall('tag')}
assert tags['admin_level'] == '6'
assert tags['place'] == 'city'
else:
result = json.loads(raw)
if fmt == 'geocodejson':
extra = result['features'][0]['properties']['geocoding']['extra']
elif fmt == 'geojson':
extra = result['features'][0]['properties']['extratags']
else:
extra = result[0]['extratags']
assert extra['admin_level'] == '6'
assert extra['place'] == 'city'
@pytest.mark.parametrize('fmt', SEARCH_FORMATS)
def test_search_extratags_non_boundary_no_admin_level_injection(fmt):
search = napi.SearchResult(napi.SourceTable.PLACEX,
('place', 'city'),
napi.Point(1.0, 2.0),
admin_level=8,
extratags={'place': 'city'})
raw = v1_format.format_result(napi.SearchResults([search]), fmt,
{'extratags': True})
if fmt == 'xml':
root = ET.fromstring(raw)
tags = {tag.attrib['key']: tag.attrib['value']
for tag in root.find('.//extratags').findall('tag')}
assert 'admin_level' not in tags
assert tags['place'] == 'city'
else:
result = json.loads(raw)
if fmt == 'geocodejson':
extra = result['features'][0]['properties']['geocoding']['extra']
elif fmt == 'geojson':
extra = result['features'][0]['properties']['extratags']
else:
extra = result[0]['extratags']
assert 'admin_level' not in extra
assert extra['place'] == 'city'
@pytest.mark.parametrize('fmt', SEARCH_FORMATS)
def test_search_extratags_boundary_admin_level_15_no_injection(fmt):
search = napi.SearchResult(napi.SourceTable.PLACEX,
('boundary', 'administrative'),
napi.Point(1.0, 2.0),
admin_level=15,
extratags={'place': 'city'})
raw = v1_format.format_result(napi.SearchResults([search]), fmt,
{'extratags': True})
if fmt == 'xml':
root = ET.fromstring(raw)
tags = {tag.attrib['key']: tag.attrib['value']
for tag in root.find('.//extratags').findall('tag')}
assert 'admin_level' not in tags
assert tags['place'] == 'city'
else:
result = json.loads(raw)
if fmt == 'geocodejson':
extra = result['features'][0]['properties']['geocoding']['extra']
elif fmt == 'geojson':
extra = result['features'][0]['properties']['extratags']
else:
extra = result[0]['extratags']
assert 'admin_level' not in extra
assert extra['place'] == 'city'

View File

@@ -0,0 +1,42 @@
# SPDX-License-Identifier: GPL-2.0-or-later
#
# This file is part of Nominatim. (https://nominatim.org)
#
# Copyright (C) 2025 by the Nominatim developer community.
# For a full list of authors see the git log.
"""
Tests for index command of the command-line interface wrapper.
"""
import pytest
import nominatim_db.indexer.indexer
class TestCliIndexWithDb:
@pytest.fixture(autouse=True)
def setup_cli_call(self, cli_call, cli_tokenizer_mock):
self.call_nominatim = cli_call
self.tokenizer_mock = cli_tokenizer_mock
def test_index_empty_subset(self, monkeypatch, async_mock_func_factory, placex_row):
placex_row(rank_address=1, indexed_status=1)
placex_row(rank_address=20, indexed_status=1)
mocks = [
async_mock_func_factory(nominatim_db.indexer.indexer.Indexer, 'index_boundaries'),
async_mock_func_factory(nominatim_db.indexer.indexer.Indexer, 'index_by_rank'),
async_mock_func_factory(nominatim_db.indexer.indexer.Indexer, 'index_postcodes'),
]
def _reject_repeat_call(*args, **kwargs):
assert False, "Did not expect multiple Indexer.has_pending invocations"
has_pending_calls = [nominatim_db.indexer.indexer.Indexer.has_pending, _reject_repeat_call]
monkeypatch.setattr(nominatim_db.indexer.indexer.Indexer, 'has_pending',
lambda *args, **kwargs: has_pending_calls.pop(0)(*args, **kwargs))
assert self.call_nominatim('index', '--minrank', '5', '--maxrank', '10') == 0
for mock in mocks:
assert mock.called == 1, "Mock '{}' not called".format(mock.func_name)

View File

@@ -200,14 +200,15 @@ def test_get_path_empty(make_config):
assert not config.get_path('TOKENIZER_CONFIG') assert not config.get_path('TOKENIZER_CONFIG')
def test_get_path_absolute(make_config, monkeypatch): def test_get_path_absolute(make_config, monkeypatch, tmp_path):
config = make_config() config = make_config()
monkeypatch.setenv('NOMINATIM_FOOBAR', '/dont/care') p = (tmp_path / "does_not_exist").resolve()
monkeypatch.setenv('NOMINATIM_FOOBAR', str(p))
result = config.get_path('FOOBAR') result = config.get_path('FOOBAR')
assert isinstance(result, Path) assert isinstance(result, Path)
assert str(result) == '/dont/care' assert str(result) == str(p)
def test_get_path_relative(make_config, monkeypatch, tmp_path): def test_get_path_relative(make_config, monkeypatch, tmp_path):

View File

@@ -6,8 +6,12 @@
# For a full list of authors see the git log. # For a full list of authors see the git log.
import itertools import itertools
import sys import sys
import asyncio
from pathlib import Path from pathlib import Path
if sys.platform == 'win32':
asyncio.set_event_loop_policy(asyncio.WindowsSelectorEventLoopPolicy())
import psycopg import psycopg
from psycopg import sql as pysql from psycopg import sql as pysql
import pytest import pytest
@@ -145,11 +149,12 @@ def country_row(country_table, temp_db_cursor):
@pytest.fixture @pytest.fixture
def load_sql(temp_db_conn, country_row): def load_sql(temp_db_conn, country_table):
proc = SQLPreprocessor(temp_db_conn, Configuration(None)) conf = Configuration(None)
def _run(filename, **kwargs): def _run(*filename, **kwargs):
proc.run_sql_file(temp_db_conn, filename, **kwargs) for fn in filename:
SQLPreprocessor(temp_db_conn, conf).run_sql_file(temp_db_conn, fn, **kwargs)
return _run return _run
@@ -187,9 +192,9 @@ def place_table(temp_db_with_extensions, table_factory):
type text NOT NULL, type text NOT NULL,
name hstore, name hstore,
admin_level smallint, admin_level smallint,
address hstore, address HSTORE,
extratags hstore, extratags HSTORE,
geometry Geometry(Geometry,4326) NOT NULL""") geometry GEOMETRY(Geometry,4326) NOT NULL""")
@pytest.fixture @pytest.fixture
@@ -218,9 +223,9 @@ def place_postcode_table(temp_db_with_extensions, table_factory):
"""osm_type char(1) NOT NULL, """osm_type char(1) NOT NULL,
osm_id bigint NOT NULL, osm_id bigint NOT NULL,
postcode text NOT NULL, postcode text NOT NULL,
country_code text, country_code TEXT,
centroid Geometry(Point, 4326) NOT NULL, centroid GEOMETRY(Point, 4326) NOT NULL,
geometry Geometry(Geometry, 4326)""") geometry GEOMETRY(Geometry, 4326)""")
@pytest.fixture @pytest.fixture
@@ -241,6 +246,35 @@ def place_postcode_row(place_postcode_table, temp_db_cursor):
return _insert return _insert
@pytest.fixture
def place_interpolation_table(temp_db_with_extensions, table_factory):
""" Create an empty version of the place_interpolation table.
"""
table_factory('place_interpolation',
"""osm_id bigint NOT NULL,
type TEXT,
address HSTORE,
nodes BIGINT[],
geometry GEOMETRY(Geometry, 4326)""")
@pytest.fixture
def place_interpolation_row(place_interpolation_table, temp_db_cursor):
""" A factory for rows in the place_interpolation table. The table is created as a
prerequisite to the fixture.
"""
idseq = itertools.count(30001)
def _insert(osm_id=None, typ='odd', address=None,
nodes=None, geom='LINESTRING(0.1 0.21, 0.1 0.2)'):
params = {'osm_id': osm_id or next(idseq),
'type': typ, 'address': address, 'nodes': nodes,
'geometry': _with_srid(geom)}
temp_db_cursor.insert_row('place_interpolation', **params)
return _insert
@pytest.fixture @pytest.fixture
def placex_table(temp_db_with_extensions, temp_db_conn, load_sql, place_table): def placex_table(temp_db_with_extensions, temp_db_conn, load_sql, place_table):
""" Create an empty version of the placex table. """ Create an empty version of the placex table.
@@ -259,13 +293,14 @@ def placex_row(placex_table, temp_db_cursor):
def _add(osm_type='N', osm_id=None, cls='amenity', typ='cafe', names=None, def _add(osm_type='N', osm_id=None, cls='amenity', typ='cafe', names=None,
admin_level=None, address=None, extratags=None, geom='POINT(10 4)', admin_level=None, address=None, extratags=None, geom='POINT(10 4)',
country=None, housenumber=None, rank_search=30, rank_address=30, country=None, housenumber=None, rank_search=30, rank_address=30,
centroid='POINT(10 4)', indexed_status=0, indexed_date=None): centroid='POINT(10 4)', indexed_status=0, indexed_date=None,
importance=0.00001):
args = {'place_id': pysql.SQL("nextval('seq_place')"), args = {'place_id': pysql.SQL("nextval('seq_place')"),
'osm_type': osm_type, 'osm_id': osm_id or next(idseq), 'osm_type': osm_type, 'osm_id': osm_id or next(idseq),
'class': cls, 'type': typ, 'name': names, 'admin_level': admin_level, 'class': cls, 'type': typ, 'name': names, 'admin_level': admin_level,
'address': address, 'housenumber': housenumber, 'address': address, 'housenumber': housenumber,
'rank_search': rank_search, 'rank_address': rank_address, 'rank_search': rank_search, 'rank_address': rank_address,
'extratags': extratags, 'extratags': extratags, 'importance': importance,
'centroid': _with_srid(centroid), 'geometry': _with_srid(geom), 'centroid': _with_srid(centroid), 'geometry': _with_srid(geom),
'country_code': country, 'country_code': country,
'indexed_status': indexed_status, 'indexed_date': indexed_date, 'indexed_status': indexed_status, 'indexed_date': indexed_date,

View File

@@ -2,7 +2,7 @@
# #
# This file is part of Nominatim. (https://nominatim.org) # This file is part of Nominatim. (https://nominatim.org)
# #
# Copyright (C) 2025 by the Nominatim developer community. # Copyright (C) 2026 by the Nominatim developer community.
# For a full list of authors see the git log. # For a full list of authors see the git log.
""" """
Tests for the sanitizer that normalizes housenumbers. Tests for the sanitizer that normalizes housenumbers.
@@ -67,3 +67,25 @@ def test_convert_to_name_unconverted(def_config, number):
assert 'housenumber' not in set(p.kind for p in names) assert 'housenumber' not in set(p.kind for p in names)
assert ('housenumber', number) in set((p.kind, p.name) for p in address) assert ('housenumber', number) in set((p.kind, p.name) for p in address)
@pytest.mark.parametrize('hnr,itype,out', [
('1-5', 'all', (1, 2, 3, 4, 5)),
('1-5', 'odd', (1, 3, 5)),
('1-5', 'even', (2, 4)),
('6-9', '1', (6, 7, 8, 9)),
('6-9', '2', (6, 8)),
('6-9', '3', (6, 9)),
('6-9', '5', (6,)),
('6-9', 'odd', (7, 9)),
('6-9', 'even', (6, 8)),
('6-22', 'even', (6, 8, 10, 12, 14, 16, 18, 20, 22))
])
def test_convert_interpolations(sanitize, hnr, itype, out):
assert set(sanitize(housenumber=hnr, interpolation=itype)) \
== {('housenumber', str(i)) for i in out}
@pytest.mark.parametrize('hnr', ('23', '23-', '3z-f', '1-10', '5-1', '1-4-5'))
def test_ignore_interpolation_with_bad_housenumber(sanitize, hnr):
assert sanitize(housenumber=hnr, interpolation='all') == [('housenumber', hnr)]

View File

@@ -76,8 +76,8 @@ def test_analyse_indexing_with_osm_id(project_env, placex_row):
class TestAdminCleanDeleted: class TestAdminCleanDeleted:
@pytest.fixture(autouse=True) @pytest.fixture(autouse=True)
def setup_polygon_delete(self, project_env, table_factory, place_table, placex_row, def setup_polygon_delete(self, project_env, table_factory, place_interpolation_table,
osmline_table, temp_db_cursor, load_sql): placex_row, osmline_table, temp_db_cursor, load_sql):
""" Set up place_force_delete function and related tables """ Set up place_force_delete function and related tables
""" """
self.project_env = project_env self.project_env = project_env
@@ -106,16 +106,24 @@ class TestAdminCleanDeleted:
class TEXT NOT NULL, class TEXT NOT NULL,
type TEXT NOT NULL, type TEXT NOT NULL,
deferred BOOLEAN""") deferred BOOLEAN""")
table_factory('place_interpolation_to_be_deleted',
"""osm_id BIGINT,
osm_type CHAR(1)""")
table_factory('import_polygon_error', """osm_id BIGINT, table_factory('import_polygon_error', """osm_id BIGINT,
osm_type CHAR(1), osm_type CHAR(1),
class TEXT NOT NULL, class TEXT NOT NULL,
type TEXT NOT NULL""") type TEXT NOT NULL""")
temp_db_cursor.execute("""CREATE OR REPLACE FUNCTION place_delete() temp_db_cursor.execute("""
RETURNS TRIGGER AS $$ CREATE OR REPLACE FUNCTION place_delete() RETURNS TRIGGER AS $$
BEGIN RETURN NULL; END; BEGIN RETURN NULL; END;
$$ LANGUAGE plpgsql;""") $$ LANGUAGE plpgsql;
temp_db_cursor.execute("""CREATE TRIGGER place_before_delete BEFORE DELETE ON place
FOR EACH ROW EXECUTE PROCEDURE place_delete();""") CREATE TRIGGER place_before_delete BEFORE DELETE ON place
FOR EACH ROW EXECUTE PROCEDURE place_delete();
CREATE TRIGGER place_interpolation_before_delete BEFORE DELETE ON place_interpolation
FOR EACH ROW EXECUTE PROCEDURE place_delete();
""")
load_sql('functions/utils.sql') load_sql('functions/utils.sql')
def test_admin_clean_deleted_no_records(self): def test_admin_clean_deleted_no_records(self):

View File

@@ -2,7 +2,7 @@
# #
# This file is part of Nominatim. (https://nominatim.org) # This file is part of Nominatim. (https://nominatim.org)
# #
# Copyright (C) 2025 by the Nominatim developer community. # Copyright (C) 2026 by the Nominatim developer community.
# For a full list of authors see the git log. # For a full list of authors see the git log.
""" """
Tests for database integrity checks. Tests for database integrity checks.
@@ -46,8 +46,7 @@ def test_check_database_version_bad(property_table, temp_db_conn, def_config):
assert chkdb.check_database_version(temp_db_conn, def_config) == chkdb.CheckState.FATAL assert chkdb.check_database_version(temp_db_conn, def_config) == chkdb.CheckState.FATAL
def test_check_placex_table_good(table_factory, temp_db_conn, def_config): def test_check_placex_table_good(placex_table, temp_db_conn, def_config):
table_factory('placex')
assert chkdb.check_placex_table(temp_db_conn, def_config) == chkdb.CheckState.OK assert chkdb.check_placex_table(temp_db_conn, def_config) == chkdb.CheckState.OK
@@ -55,13 +54,13 @@ def test_check_placex_table_bad(temp_db_conn, def_config):
assert chkdb.check_placex_table(temp_db_conn, def_config) == chkdb.CheckState.FATAL assert chkdb.check_placex_table(temp_db_conn, def_config) == chkdb.CheckState.FATAL
def test_check_placex_table_size_good(table_factory, temp_db_conn, def_config): def test_check_placex_table_size_good(placex_row, temp_db_conn, def_config):
table_factory('placex', content=((1, ), (2, ))) for _ in range(2):
placex_row()
assert chkdb.check_placex_size(temp_db_conn, def_config) == chkdb.CheckState.OK assert chkdb.check_placex_size(temp_db_conn, def_config) == chkdb.CheckState.OK
def test_check_placex_table_size_bad(table_factory, temp_db_conn, def_config): def test_check_placex_table_size_bad(placex_table, temp_db_conn, def_config):
table_factory('placex')
assert chkdb.check_placex_size(temp_db_conn, def_config) == chkdb.CheckState.FATAL assert chkdb.check_placex_size(temp_db_conn, def_config) == chkdb.CheckState.FATAL
@@ -84,15 +83,22 @@ def test_check_tokenizer(temp_db_conn, def_config, monkeypatch,
assert chkdb.check_tokenizer(temp_db_conn, def_config) == state assert chkdb.check_tokenizer(temp_db_conn, def_config) == state
def test_check_indexing_good(table_factory, temp_db_conn, def_config): def test_check_indexing_good(placex_row, temp_db_conn, def_config):
table_factory('placex', 'place_id int, indexed_status smallint', for _ in range(2):
content=((1, 0), (2, 0))) placex_row(indexed_status=0)
assert chkdb.check_indexing(temp_db_conn, def_config) == chkdb.CheckState.OK assert chkdb.check_indexing(temp_db_conn, def_config) == chkdb.CheckState.OK
def test_check_indexing_bad(table_factory, temp_db_conn, def_config): def test_check_indexing_bad(placex_row, temp_db_conn, def_config):
table_factory('placex', 'place_id int, indexed_status smallint', for status in (0, 2):
content=((1, 0), (2, 2))) placex_row(indexed_status=status)
assert chkdb.check_indexing(temp_db_conn, def_config) == chkdb.CheckState.FAIL
def test_check_indexing_bad_frozen(placex_row, temp_db_conn, def_config):
for status in (0, 2):
placex_row(indexed_status=status)
temp_db_conn.execute('DROP TABLE place')
assert chkdb.check_indexing(temp_db_conn, def_config) == chkdb.CheckState.WARN assert chkdb.check_indexing(temp_db_conn, def_config) == chkdb.CheckState.WARN

View File

@@ -78,8 +78,8 @@ def test_setup_skeleton_already_exists(temp_db):
database_import.setup_database_skeleton(f'dbname={temp_db}') database_import.setup_database_skeleton(f'dbname={temp_db}')
def test_import_osm_data_simple(table_factory, osm2pgsql_options, capfd): def test_import_osm_data_simple(place_row, osm2pgsql_options, capfd):
table_factory('place', content=((1, ), )) place_row()
database_import.import_osm_data(Path('file.pbf'), osm2pgsql_options) database_import.import_osm_data(Path('file.pbf'), osm2pgsql_options)
captured = capfd.readouterr() captured = capfd.readouterr()
@@ -92,8 +92,8 @@ def test_import_osm_data_simple(table_factory, osm2pgsql_options, capfd):
assert 'file.pbf' in captured.out assert 'file.pbf' in captured.out
def test_import_osm_data_multifile(table_factory, tmp_path, osm2pgsql_options, capfd): def test_import_osm_data_multifile(place_row, tmp_path, osm2pgsql_options, capfd):
table_factory('place', content=((1, ), )) place_row()
osm2pgsql_options['osm2pgsql_cache'] = 0 osm2pgsql_options['osm2pgsql_cache'] = 0
files = [tmp_path / 'file1.osm', tmp_path / 'file2.osm'] files = [tmp_path / 'file1.osm', tmp_path / 'file2.osm']
@@ -107,22 +107,19 @@ def test_import_osm_data_multifile(table_factory, tmp_path, osm2pgsql_options, c
assert 'file2.osm' in captured.out assert 'file2.osm' in captured.out
def test_import_osm_data_simple_no_data(table_factory, osm2pgsql_options): def test_import_osm_data_simple_no_data(place_row, osm2pgsql_options):
table_factory('place')
with pytest.raises(UsageError, match='No data imported'): with pytest.raises(UsageError, match='No data imported'):
database_import.import_osm_data(Path('file.pbf'), osm2pgsql_options) database_import.import_osm_data(Path('file.pbf'), osm2pgsql_options)
def test_import_osm_data_simple_ignore_no_data(table_factory, osm2pgsql_options): def test_import_osm_data_simple_ignore_no_data(place_table, osm2pgsql_options):
table_factory('place')
database_import.import_osm_data(Path('file.pbf'), osm2pgsql_options, database_import.import_osm_data(Path('file.pbf'), osm2pgsql_options,
ignore_errors=True) ignore_errors=True)
def test_import_osm_data_drop(table_factory, temp_db_cursor, tmp_path, osm2pgsql_options): def test_import_osm_data_drop(place_row, table_factory, temp_db_cursor,
table_factory('place', content=((1, ), )) tmp_path, osm2pgsql_options):
place_row()
table_factory('planet_osm_nodes') table_factory('planet_osm_nodes')
flatfile = tmp_path / 'flatfile' flatfile = tmp_path / 'flatfile'
@@ -136,8 +133,8 @@ def test_import_osm_data_drop(table_factory, temp_db_cursor, tmp_path, osm2pgsql
assert not temp_db_cursor.table_exists('planet_osm_nodes') assert not temp_db_cursor.table_exists('planet_osm_nodes')
def test_import_osm_data_default_cache(table_factory, osm2pgsql_options, capfd): def test_import_osm_data_default_cache(place_row, osm2pgsql_options, capfd):
table_factory('place', content=((1, ), )) place_row()
osm2pgsql_options['osm2pgsql_cache'] = 0 osm2pgsql_options['osm2pgsql_cache'] = 0
@@ -168,18 +165,18 @@ def test_truncate_database_tables(temp_db_conn, temp_db_cursor, table_factory, w
@pytest.mark.parametrize("threads", (1, 5)) @pytest.mark.parametrize("threads", (1, 5))
@pytest.mark.asyncio @pytest.mark.asyncio
async def test_load_data(dsn, place_row, placex_table, osmline_table, async def test_load_data(dsn, place_row, place_interpolation_row, placex_table, osmline_table,
temp_db_cursor, threads): temp_db_cursor, threads):
for oid in range(100, 130): for oid in range(100, 130):
place_row(osm_id=oid) place_row(osm_id=oid)
place_row(osm_type='W', osm_id=342, cls='place', typ='houses', place_interpolation_row(osm_id=342, typ='odd', geom='LINESTRING(0 0, 10 10)')
geom='LINESTRING(0 0, 10 10)')
temp_db_cursor.execute(""" temp_db_cursor.execute("""
CREATE OR REPLACE FUNCTION placex_insert() RETURNS TRIGGER AS $$ CREATE OR REPLACE FUNCTION placex_insert() RETURNS TRIGGER AS $$
BEGIN BEGIN
NEW.place_id := nextval('seq_place'); NEW.place_id := nextval('seq_place');
NEW.indexed_status := 1; NEW.indexed_status := 1;
NEW.importance := 0.00001;
NEW.centroid := ST_Centroid(NEW.geometry); NEW.centroid := ST_Centroid(NEW.geometry);
NEW.partition := 0; NEW.partition := 0;
NEW.geometry_sector := 2424; NEW.geometry_sector := 2424;
@@ -215,52 +212,53 @@ async def test_load_data(dsn, place_row, placex_table, osmline_table,
class TestSetupSQL: class TestSetupSQL:
@pytest.fixture(autouse=True) @pytest.fixture(autouse=True)
def init_env(self, temp_db, tmp_path, def_config, sql_preprocessor_cfg): def osm2ppsql_skel(self, def_config, temp_db_with_extensions, place_row,
def_config.lib_dir.sql = tmp_path / 'sql' country_table, table_factory, temp_db_conn):
def_config.lib_dir.sql.mkdir()
self.config = def_config self.config = def_config
place_row()
table_factory('osm2pgsql_properties', 'property TEXT, value TEXT',
(('db_format', 2),))
def write_sql(self, fname, content): table_factory('planet_osm_rels', 'id BIGINT, members JSONB, tags JSONB')
(self.config.lib_dir.sql / fname).write_text(content, encoding='utf-8') temp_db_conn.execute("""
CREATE OR REPLACE FUNCTION planet_osm_member_ids(jsonb, character)
RETURNS bigint[] AS $$
SELECT array_agg((el->>'ref')::int8)
FROM jsonb_array_elements($1) AS el WHERE el->>'type' = $2
$$ LANGUAGE sql IMMUTABLE;
""")
@pytest.mark.parametrize("reverse", [True, False]) @pytest.mark.parametrize("reverse", [True, False])
def test_create_tables(self, temp_db_conn, temp_db_cursor, reverse): def test_create_tables(self, table_factory, temp_db_conn, temp_db_cursor, reverse):
self.write_sql('tables.sql', table_factory('country_osm_grid')
"""CREATE FUNCTION test() RETURNS bool
AS $$ SELECT {{db.reverse_only}} $$ LANGUAGE SQL""")
self.write_sql('grants.sql', "-- Mock grants file for testing\n")
database_import.create_tables(temp_db_conn, self.config, reverse) database_import.create_tables(temp_db_conn, self.config, reverse)
temp_db_cursor.scalar('SELECT test()') == reverse assert temp_db_cursor.table_exists('placex')
assert not reverse == temp_db_cursor.table_exists('search_name')
def test_create_table_triggers(self, temp_db_conn, temp_db_cursor): def test_create_table_triggers(self, temp_db_conn, placex_table, osmline_table,
self.write_sql('table-triggers.sql', place_interpolation_table, postcode_table, load_sql):
"""CREATE FUNCTION test() RETURNS TEXT load_sql('functions.sql')
AS $$ SELECT 'a'::text $$ LANGUAGE SQL""")
database_import.create_table_triggers(temp_db_conn, self.config) database_import.create_table_triggers(temp_db_conn, self.config)
temp_db_cursor.scalar('SELECT test()') == 'a' def test_create_partition_tables(self, country_row, temp_db_conn, temp_db_cursor, load_sql):
for i in range(3):
def test_create_partition_tables(self, temp_db_conn, temp_db_cursor): country_row(partition=i)
self.write_sql('partition-tables.src.sql', load_sql('tables/location_area.sql')
"""CREATE FUNCTION test() RETURNS TEXT
AS $$ SELECT 'b'::text $$ LANGUAGE SQL""")
database_import.create_partition_tables(temp_db_conn, self.config) database_import.create_partition_tables(temp_db_conn, self.config)
temp_db_cursor.scalar('SELECT test()') == 'b' for i in range(3):
assert temp_db_cursor.table_exists(f"location_area_large_{i}")
assert temp_db_cursor.table_exists(f"search_name_{i}")
@pytest.mark.parametrize("drop", [True, False]) @pytest.mark.parametrize("drop", [True, False])
@pytest.mark.asyncio @pytest.mark.asyncio
async def test_create_search_indices(self, temp_db_conn, temp_db_cursor, drop): async def test_create_search_indices(self, temp_db_conn, temp_db_cursor, drop, load_sql):
self.write_sql('indices.sql', load_sql('tables.sql', 'functions/ranking.sql')
"""CREATE FUNCTION test() RETURNS bool
AS $$ SELECT {{drop}} $$ LANGUAGE SQL""")
await database_import.create_search_indices(temp_db_conn, self.config, drop) await database_import.create_search_indices(temp_db_conn, self.config, drop)
temp_db_cursor.scalar('SELECT test()') == drop assert temp_db_cursor.index_exists('placex', 'idx_placex_geometry')
assert not drop == temp_db_cursor.index_exists('placex', 'idx_placex_geometry_buildings')

View File

@@ -11,73 +11,14 @@ import subprocess
import pytest import pytest
from psycopg.rows import tuple_row
from nominatim_db.tools import postcodes from nominatim_db.tools import postcodes
from nominatim_db.data import country_info from nominatim_db.data import country_info
from nominatim_db.db.sql_preprocessor import SQLPreprocessor
import dummy_tokenizer import dummy_tokenizer
class MockPostcodeTable:
""" A location_postcodes table for testing.
"""
def __init__(self, conn, config):
self.conn = conn
SQLPreprocessor(conn, config).run_sql_file(conn, 'functions/postcode_triggers.sql')
with conn.cursor() as cur:
cur.execute("""CREATE TABLE location_postcodes (
place_id BIGINT,
osm_id BIGINT,
parent_place_id BIGINT,
rank_search SMALLINT,
indexed_status SMALLINT,
indexed_date TIMESTAMP,
country_code varchar(2),
postcode TEXT,
geometry GEOMETRY(Geometry, 4326),
centroid GEOMETRY(Point, 4326))""")
cur.execute("""CREATE OR REPLACE FUNCTION token_normalized_postcode(postcode TEXT)
RETURNS TEXT AS $$ BEGIN RETURN postcode; END; $$ LANGUAGE plpgsql;
CREATE OR REPLACE FUNCTION get_country_code(place geometry)
RETURNS TEXT AS $$ BEGIN
RETURN null;
END; $$ LANGUAGE plpgsql;
""")
cur.execute("""CREATE OR REPLACE FUNCTION expand_by_meters(geom GEOMETRY, meters FLOAT)
RETURNS GEOMETRY AS $$
SELECT ST_Envelope(ST_Buffer(geom::geography, meters, 1)::geometry)
$$ LANGUAGE sql;""")
conn.commit()
def add(self, country, postcode, x, y):
with self.conn.cursor() as cur:
cur.execute(
"""INSERT INTO location_postcodes
(place_id, indexed_status, country_code, postcode, centroid, geometry)
VALUES (nextval('seq_place'), 1, %(cc)s, %(pc)s,
ST_SetSRID(ST_MakePoint(%(x)s, %(y)s), 4326),
ST_Expand(ST_SetSRID(ST_MakePoint(%(x)s, %(y)s), 4326), 0.005))""",
{'cc': country, 'pc': postcode, 'x': x, 'y': y})
self.conn.commit()
@property
def row_set(self):
with self.conn.cursor() as cur:
cur.execute("""SELECT osm_id, country_code, postcode,
ST_X(centroid), ST_Y(centroid)
FROM location_postcodes""")
return set((tuple(row) for row in cur))
@pytest.fixture
def postcode_table(def_config, temp_db_conn, placex_table, table_factory):
country_info.setup_country_config(def_config)
return MockPostcodeTable(temp_db_conn, def_config)
@pytest.fixture @pytest.fixture
def insert_implicit_postcode(placex_row, place_postcode_row): def insert_implicit_postcode(placex_row, place_postcode_row):
""" Insert data into the placex and place table """ Insert data into the placex and place table
@@ -86,11 +27,11 @@ def insert_implicit_postcode(placex_row, place_postcode_row):
def _insert_implicit_postcode(osm_id, country, geometry, postcode, in_placex=False): def _insert_implicit_postcode(osm_id, country, geometry, postcode, in_placex=False):
if in_placex: if in_placex:
placex_row(osm_id=osm_id, country=country, geom=geometry, placex_row(osm_id=osm_id, country=country, geom=geometry,
centroid=geometry, address={'postcode': postcode}) centroid=geometry,
address={'postcode': postcode})
else: else:
place_postcode_row(osm_id=osm_id, centroid=geometry, place_postcode_row(osm_id=osm_id, centroid=geometry,
country=country, postcode=postcode) country=country, postcode=postcode)
return _insert_implicit_postcode return _insert_implicit_postcode
@@ -103,7 +44,6 @@ def insert_postcode_area(place_postcode_row):
place_postcode_row(osm_type='R', osm_id=osm_id, postcode=postcode, country=country, place_postcode_row(osm_type='R', osm_id=osm_id, postcode=postcode, country=country,
centroid=f"POINT({x} {y})", centroid=f"POINT({x} {y})",
geom=f"POLYGON(({x1} {y1}, {x1} {y2}, {x2} {y2}, {x2} {y1}, {x1} {y1}))") geom=f"POLYGON(({x1} {y1}, {x1} {y2}, {x2} {y2}, {x2} {y1}, {x1} {y1}))")
return _do return _do
@@ -123,106 +63,126 @@ def postcode_update(dsn, temp_db_conn):
BEFORE INSERT ON location_postcodes BEFORE INSERT ON location_postcodes
FOR EACH ROW EXECUTE PROCEDURE postcodes_insert()""") FOR EACH ROW EXECUTE PROCEDURE postcodes_insert()""")
temp_db_conn.commit() temp_db_conn.commit()
postcodes.update_postcodes(dsn, data_path, tokenizer) postcodes.update_postcodes(dsn, data_path, tokenizer)
return _do return _do
def test_postcodes_empty(postcode_update, postcode_table, place_postcode_table): class TestPostcodes:
@pytest.fixture(autouse=True)
def setup(self, def_config, postcode_table, placex_table, place_postcode_table,
load_sql, temp_db_conn):
self.conn = temp_db_conn
country_info.setup_country_config(def_config)
load_sql('functions/postcode_triggers.sql')
temp_db_conn.execute("""
CREATE OR REPLACE FUNCTION token_normalized_postcode(postcode TEXT)
RETURNS TEXT AS $$
SELECT postcode
$$ LANGUAGE sql;
CREATE OR REPLACE FUNCTION get_country_code(place geometry)
RETURNS TEXT AS $$
SELECT NULL
$$ LANGUAGE sql;
CREATE OR REPLACE FUNCTION expand_by_meters(geom GEOMETRY, meters FLOAT)
RETURNS GEOMETRY AS $$
SELECT ST_Envelope(ST_Buffer(geom::geography, meters, 1)::geometry)
$$ LANGUAGE sql;
""")
@property
def row_set(self):
with self.conn.cursor(row_factory=tuple_row) as cur:
cur.execute("""SELECT osm_id, country_code, postcode,
ST_X(centroid), ST_Y(centroid)
FROM location_postcodes""")
return {r for r in cur}
def test_postcodes_empty(self, postcode_update):
postcode_update() postcode_update()
assert not postcode_table.row_set assert not self.row_set
@pytest.mark.parametrize('in_placex', [True, False]) @pytest.mark.parametrize('in_placex', [True, False])
def test_postcodes_add_new_point(postcode_update, postcode_table, def test_postcodes_add_new_point(self, postcode_update, postcode_row,
insert_implicit_postcode, in_placex): insert_implicit_postcode, in_placex):
insert_implicit_postcode(1, 'xx', 'POINT(10 12)', '9486', in_placex) insert_implicit_postcode(1, 'xx', 'POINT(10 12)', '9486', in_placex)
postcode_table.add('yy', '9486', 99, 34) postcode_row('yy', '9486', 99, 34)
postcode_update() postcode_update()
assert postcode_table.row_set == {(None, 'xx', '9486', 10, 12), } assert self.row_set == {(None, 'xx', '9486', 10, 12), }
def test_postcodes_add_new_area(self, postcode_update, insert_postcode_area):
def test_postcodes_add_new_area(postcode_update, insert_postcode_area, postcode_table):
insert_postcode_area(345, 'de', '10445', 23.5, 46.2) insert_postcode_area(345, 'de', '10445', 23.5, 46.2)
postcode_update() postcode_update()
assert postcode_table.row_set == {(345, 'de', '10445', 23.5, 46.2)} assert self.row_set == {(345, 'de', '10445', 23.5, 46.2)}
@pytest.mark.parametrize('in_placex', [True, False]) @pytest.mark.parametrize('in_placex', [True, False])
def test_postcodes_add_area_and_point(postcode_update, insert_postcode_area, def test_postcodes_add_area_and_point(self, postcode_update, insert_postcode_area,
insert_implicit_postcode, postcode_table, in_placex): insert_implicit_postcode, in_placex):
insert_implicit_postcode(1, 'xx', 'POINT(10 12)', '10445', in_placex) insert_implicit_postcode(1, 'xx', 'POINT(10 12)', '10445', in_placex)
insert_postcode_area(345, 'xx', '10445', 23.5, 46.2) insert_postcode_area(345, 'xx', '10445', 23.5, 46.2)
postcode_update() postcode_update()
assert postcode_table.row_set == {(345, 'xx', '10445', 23.5, 46.2)} assert self.row_set == {(345, 'xx', '10445', 23.5, 46.2)}
@pytest.mark.parametrize('in_placex', [True, False]) @pytest.mark.parametrize('in_placex', [True, False])
def test_postcodes_add_point_within_area(postcode_update, insert_postcode_area, def test_postcodes_add_point_within_area(self, postcode_update, insert_postcode_area,
insert_implicit_postcode, postcode_table, in_placex): insert_implicit_postcode, in_placex):
insert_implicit_postcode(1, 'xx', 'POINT(23.5 46.2)', '10446', in_placex) insert_implicit_postcode(1, 'xx', 'POINT(23.5 46.2)', '10446', in_placex)
insert_postcode_area(345, 'xx', '10445', 23.5, 46.2) insert_postcode_area(345, 'xx', '10445', 23.5, 46.2)
postcode_update() postcode_update()
assert postcode_table.row_set == {(345, 'xx', '10445', 23.5, 46.2)} assert self.row_set == {(345, 'xx', '10445', 23.5, 46.2)}
@pytest.mark.parametrize('coords', [(99, 34), (10, 34), (99, 12), @pytest.mark.parametrize('coords', [(99, 34), (10, 34), (99, 12),
(9, 34), (9, 11), (23, 11)]) (9, 34), (9, 11), (23, 11)])
def test_postcodes_replace_coordinates(postcode_update, postcode_table, tmp_path, def test_postcodes_replace_coordinates(self, postcode_update, postcode_row, tmp_path,
insert_implicit_postcode, coords): insert_implicit_postcode, coords):
insert_implicit_postcode(1, 'xx', 'POINT(10 12)', 'AB 4511') insert_implicit_postcode(1, 'xx', 'POINT(10 12)', 'AB 4511')
postcode_table.add('xx', 'AB 4511', *coords) postcode_row('xx', 'AB 4511', *coords)
postcode_update(tmp_path) postcode_update(tmp_path)
assert postcode_table.row_set == {(None, 'xx', 'AB 4511', 10, 12)} assert self.row_set == {(None, 'xx', 'AB 4511', 10, 12)}
def test_postcodes_replace_coordinates_close(self, postcode_update, postcode_row,
def test_postcodes_replace_coordinates_close(postcode_update, postcode_table,
insert_implicit_postcode): insert_implicit_postcode):
insert_implicit_postcode(1, 'xx', 'POINT(10 12)', 'AB 4511') insert_implicit_postcode(1, 'xx', 'POINT(10 12)', 'AB 4511')
postcode_table.add('xx', 'AB 4511', 10, 11.99999999) postcode_row('xx', 'AB 4511', 10, 11.99999999)
postcode_update() postcode_update()
assert postcode_table.row_set == {(None, 'xx', 'AB 4511', 10, 11.99999999)} assert self.row_set == {(None, 'xx', 'AB 4511', 10, 11.99999999)}
def test_postcodes_remove_point(self, postcode_update, postcode_row,
def test_postcodes_remove_point(postcode_update, postcode_table,
insert_implicit_postcode): insert_implicit_postcode):
insert_implicit_postcode(1, 'xx', 'POINT(10 12)', 'AB 4511') insert_implicit_postcode(1, 'xx', 'POINT(10 12)', 'AB 4511')
postcode_table.add('xx', 'badname', 10, 12) postcode_row('xx', 'badname', 10, 12)
postcode_update() postcode_update()
assert postcode_table.row_set == {(None, 'xx', 'AB 4511', 10, 12)} assert self.row_set == {(None, 'xx', 'AB 4511', 10, 12)}
def test_postcodes_ignore_empty_country(self, postcode_update, insert_implicit_postcode):
def test_postcodes_ignore_empty_country(postcode_update, postcode_table,
insert_implicit_postcode):
insert_implicit_postcode(1, None, 'POINT(10 12)', 'AB 4511') insert_implicit_postcode(1, None, 'POINT(10 12)', 'AB 4511')
postcode_update() postcode_update()
assert not postcode_table.row_set assert not self.row_set
def test_postcodes_remove_all(self, postcode_update, postcode_row, place_postcode_table):
def test_postcodes_remove_all(postcode_update, postcode_table, place_postcode_table): postcode_row('ch', '5613', 10, 12)
postcode_table.add('ch', '5613', 10, 12)
postcode_update() postcode_update()
assert not postcode_table.row_set assert not self.row_set
def test_postcodes_multi_country(self, postcode_update,
def test_postcodes_multi_country(postcode_update, postcode_table,
insert_implicit_postcode): insert_implicit_postcode):
insert_implicit_postcode(1, 'de', 'POINT(10 12)', '54451') insert_implicit_postcode(1, 'de', 'POINT(10 12)', '54451')
insert_implicit_postcode(2, 'cc', 'POINT(100 56)', 'DD23 T') insert_implicit_postcode(2, 'cc', 'POINT(100 56)', 'DD23 T')
@@ -231,14 +191,13 @@ def test_postcodes_multi_country(postcode_update, postcode_table,
postcode_update() postcode_update()
assert postcode_table.row_set == {(None, 'de', '54451', 10, 12), assert self.row_set == {(None, 'de', '54451', 10, 12),
(None, 'de', '54452', 10.3, 11.0), (None, 'de', '54452', 10.3, 11.0),
(None, 'cc', '54452', 10.3, 11.0), (None, 'cc', '54452', 10.3, 11.0),
(None, 'cc', 'DD23 T', 100, 56)} (None, 'cc', 'DD23 T', 100, 56)}
@pytest.mark.parametrize("gzipped", [True, False]) @pytest.mark.parametrize("gzipped", [True, False])
def test_postcodes_extern(postcode_update, postcode_table, tmp_path, def test_postcodes_extern(self, postcode_update, tmp_path,
insert_implicit_postcode, gzipped): insert_implicit_postcode, gzipped):
insert_implicit_postcode(1, 'xx', 'POINT(10 12)', 'AB 4511') insert_implicit_postcode(1, 'xx', 'POINT(10 12)', 'AB 4511')
@@ -251,11 +210,10 @@ def test_postcodes_extern(postcode_update, postcode_table, tmp_path,
postcode_update(tmp_path) postcode_update(tmp_path)
assert postcode_table.row_set == {(None, 'xx', 'AB 4511', 10, 12), assert self.row_set == {(None, 'xx', 'AB 4511', 10, 12),
(None, 'xx', 'CD 4511', -10, -5)} (None, 'xx', 'CD 4511', -10, -5)}
def test_postcodes_extern_bad_column(self, postcode_update, tmp_path,
def test_postcodes_extern_bad_column(postcode_update, postcode_table, tmp_path,
insert_implicit_postcode): insert_implicit_postcode):
insert_implicit_postcode(1, 'xx', 'POINT(10 12)', 'AB 4511') insert_implicit_postcode(1, 'xx', 'POINT(10 12)', 'AB 4511')
@@ -264,11 +222,10 @@ def test_postcodes_extern_bad_column(postcode_update, postcode_table, tmp_path,
postcode_update(tmp_path) postcode_update(tmp_path)
assert postcode_table.row_set == {(None, 'xx', 'AB 4511', 10, 12)} assert self.row_set == {(None, 'xx', 'AB 4511', 10, 12)}
def test_postcodes_extern_bad_number(self, postcode_update, insert_implicit_postcode,
def test_postcodes_extern_bad_number(postcode_update, insert_implicit_postcode, tmp_path):
postcode_table, tmp_path):
insert_implicit_postcode(1, 'xx', 'POINT(10 12)', 'AB 4511') insert_implicit_postcode(1, 'xx', 'POINT(10 12)', 'AB 4511')
extfile = tmp_path / 'xx_postcodes.csv' extfile = tmp_path / 'xx_postcodes.csv'
@@ -277,32 +234,27 @@ def test_postcodes_extern_bad_number(postcode_update, insert_implicit_postcode,
postcode_update(tmp_path) postcode_update(tmp_path)
assert postcode_table.row_set == {(None, 'xx', 'AB 4511', 10, 12), assert self.row_set == {(None, 'xx', 'AB 4511', 10, 12),
(None, 'xx', 'CD 4511', -10, -5)} (None, 'xx', 'CD 4511', -10, -5)}
def test_no_placex_entry(self, postcode_update, temp_db_cursor, place_postcode_row):
# Rewrite the get_country_code function to verify its execution.
temp_db_cursor.execute("""
CREATE OR REPLACE FUNCTION get_country_code(place geometry) RETURNS TEXT AS $$
SELECT 'yy' $$ LANGUAGE sql""")
place_postcode_row(centroid='POINT(10 12)', postcode='AB 4511')
postcode_update()
assert self.row_set == {(None, 'yy', 'AB 4511', 10, 12)}
def test_discard_badly_formatted_postcodes(self, postcode_update, place_postcode_row):
place_postcode_row(centroid='POINT(10 12)', country='fr', postcode='AB 4511')
postcode_update()
assert not self.row_set
def test_can_compute(dsn, table_factory): def test_can_compute(dsn, table_factory):
assert not postcodes.can_compute(dsn) assert not postcodes.can_compute(dsn)
table_factory('place_postcode') table_factory('place_postcode')
assert postcodes.can_compute(dsn) assert postcodes.can_compute(dsn)
def test_no_placex_entry(postcode_update, temp_db_cursor, place_postcode_row, postcode_table):
# Rewrite the get_country_code function to verify its execution.
temp_db_cursor.execute("""
CREATE OR REPLACE FUNCTION get_country_code(place geometry)
RETURNS TEXT AS $$ BEGIN
RETURN 'yy';
END; $$ LANGUAGE plpgsql;
""")
place_postcode_row(centroid='POINT(10 12)', postcode='AB 4511')
postcode_update()
assert postcode_table.row_set == {(None, 'yy', 'AB 4511', 10, 12)}
def test_discard_badly_formatted_postcodes(postcode_update, place_postcode_row, postcode_table):
place_postcode_row(centroid='POINT(10 12)', country='fr', postcode='AB 4511')
postcode_update()
assert not postcode_table.row_set