mirror of
https://github.com/osm-search/Nominatim.git
synced 2026-02-16 15:47:58 +00:00
Compare commits
13 Commits
2ddb19c0b0
...
master
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
fbe0be9301 | ||
|
|
0249cd54da | ||
|
|
52b5337f36 | ||
|
|
53e8334206 | ||
|
|
c31abf58d0 | ||
|
|
d0bd42298e | ||
|
|
d1b0bcaea7 | ||
|
|
c3e8fa8c43 | ||
|
|
24ba9651ba | ||
|
|
bf5ef0140a | ||
|
|
238f3dd1d9 | ||
|
|
abd7c302f8 | ||
|
|
2197236872 |
95
.github/actions/setup-postgresql-windows/action.yml
vendored
Normal file
95
.github/actions/setup-postgresql-windows/action.yml
vendored
Normal file
@@ -0,0 +1,95 @@
|
||||
name: 'Setup Postgresql and Postgis on Windows'
|
||||
|
||||
description: 'Installs PostgreSQL and PostGIS for Windows and configures it for CI tests'
|
||||
|
||||
inputs:
|
||||
postgresql-version:
|
||||
description: 'Version of PostgreSQL to install'
|
||||
required: true
|
||||
|
||||
runs:
|
||||
using: "composite"
|
||||
|
||||
steps:
|
||||
- name: Set up PostgreSQL variables
|
||||
shell: pwsh
|
||||
run: |
|
||||
$version = "${{ inputs.postgresql-version }}"
|
||||
$root = "C:\Program Files\PostgreSQL\$version"
|
||||
$bin = "$root\bin"
|
||||
|
||||
echo "PGROOT=$root" | Out-File -FilePath $env:GITHUB_ENV -Encoding utf8 -Append
|
||||
echo "PGBIN=$bin" | Out-File -FilePath $env:GITHUB_ENV -Encoding utf8 -Append
|
||||
|
||||
echo "$bin" | Out-File -FilePath $env:GITHUB_PATH -Encoding utf8 -Append
|
||||
|
||||
- name: Decide Postgis version (Windows)
|
||||
id: postgis-ver
|
||||
shell: pwsh
|
||||
run: |
|
||||
echo "PowerShell version: ${PSVersionTable.PSVersion}"
|
||||
$PG_VERSION = Split-Path $env:PGROOT -Leaf
|
||||
$postgis_page = "https://download.osgeo.org/postgis/windows/pg$PG_VERSION"
|
||||
echo "Detecting PostGIS version from $postgis_page for PostgreSQL $PG_VERSION"
|
||||
$pgis_bundle = (Invoke-WebRequest -Uri $postgis_page -ErrorAction Stop).Links.Where({$_.href -match "^postgis.*zip$"}).href
|
||||
if (!$pgis_bundle) {
|
||||
Write-Error "Could not find latest PostGIS version in $postgis_page that would match ^postgis.*zip$ pattern"
|
||||
exit 1
|
||||
}
|
||||
$pgis_bundle = [IO.Path]::ChangeExtension($pgis_bundle, [NullString]::Value)
|
||||
$pgis_bundle_url = "$postgis_page/$pgis_bundle.zip"
|
||||
Add-Content $env:GITHUB_OUTPUT "postgis_file=$pgis_bundle"
|
||||
Add-Content $env:GITHUB_OUTPUT "postgis_bundle_url=$pgis_bundle_url"
|
||||
|
||||
- uses: actions/cache@v4
|
||||
with:
|
||||
path: |
|
||||
C:/postgis.zip
|
||||
key: postgis-cache-${{ steps.postgis-ver.outputs.postgis_file }}
|
||||
|
||||
- name: Download postgis
|
||||
shell: pwsh
|
||||
run: |
|
||||
if (!(Test-Path "C:\postgis.zip")){(new-object net.webclient).DownloadFile($env:PGIS_BUNDLE_URL, "c:\postgis.zip")}
|
||||
if (Test-path "c:\postgis_archive"){Remove-Item "c:\postgis_archive" -Recurse -Force}
|
||||
7z x c:\postgis.zip -oc:\postgis_archive
|
||||
env:
|
||||
PGIS_BUNDLE_URL: ${{ steps.postgis-ver.outputs.postgis_bundle_url }}
|
||||
|
||||
- name: Install postgis
|
||||
shell: bash
|
||||
run: |
|
||||
echo "Root: $PGROOT, Bin: $PGBIN"
|
||||
cp -r c:/postgis_archive/postgis-bundle-*/* "$PGROOT"
|
||||
|
||||
- name: Start PostgreSQL on Windows
|
||||
run: |
|
||||
$pgService = Get-Service -Name postgresql*
|
||||
Set-Service -InputObject $pgService -Status running -StartupType automatic
|
||||
Start-Process -FilePath "$env:PGBIN\pg_isready" -Wait -PassThru
|
||||
shell: pwsh
|
||||
|
||||
- name: Adapt postgresql configuration
|
||||
shell: pwsh
|
||||
env:
|
||||
PGPASSWORD: root
|
||||
run: |
|
||||
& "$env:PGBIN\psql" -U postgres -d postgres -c "ALTER SYSTEM SET fsync = 'off';"
|
||||
& "$env:PGBIN\psql" -U postgres -d postgres -c "ALTER SYSTEM SET synchronous_commit = 'off';"
|
||||
& "$env:PGBIN\psql" -U postgres -d postgres -c "ALTER SYSTEM SET full_page_writes = 'off';"
|
||||
& "$env:PGBIN\psql" -U postgres -d postgres -c "ALTER SYSTEM SET shared_buffers = '1GB';"
|
||||
& "$env:PGBIN\psql" -U postgres -d postgres -c "ALTER SYSTEM SET port = 5432;"
|
||||
|
||||
Restart-Service -Name postgresql*
|
||||
Start-Process -FilePath "$env:PGBIN\pg_isready" -Wait -PassThru
|
||||
|
||||
- name: Setup database users
|
||||
shell: pwsh
|
||||
env:
|
||||
PGPASSWORD: root
|
||||
run: |
|
||||
& "$env:PGBIN\createuser" -U postgres -S www-data
|
||||
& "$env:PGBIN\createuser" -U postgres -s runner
|
||||
|
||||
|
||||
|
||||
2
.github/actions/setup-postgresql/action.yml
vendored
2
.github/actions/setup-postgresql/action.yml
vendored
@@ -1,5 +1,7 @@
|
||||
name: 'Setup Postgresql and Postgis'
|
||||
|
||||
description: 'Installs PostgreSQL and PostGIS and configures it for CI tests'
|
||||
|
||||
inputs:
|
||||
postgresql-version:
|
||||
description: 'Version of PostgreSQL to install'
|
||||
|
||||
59
.github/workflows/ci-tests.yml
vendored
59
.github/workflows/ci-tests.yml
vendored
@@ -140,6 +140,65 @@ jobs:
|
||||
../venv/bin/python -m pytest test/bdd --nominatim-purge
|
||||
working-directory: Nominatim
|
||||
|
||||
tests-windows:
|
||||
needs: create-archive
|
||||
runs-on: windows-latest
|
||||
|
||||
steps:
|
||||
- uses: actions/download-artifact@v4
|
||||
with:
|
||||
name: full-source
|
||||
|
||||
- name: Unpack Nominatim
|
||||
run: tar xf nominatim-src.tar.bz2
|
||||
|
||||
- uses: ./Nominatim/.github/actions/setup-postgresql-windows
|
||||
with:
|
||||
postgresql-version: 17
|
||||
|
||||
- name: Set up Python
|
||||
uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: '3.14'
|
||||
|
||||
- name: Install Spatialite
|
||||
run: |
|
||||
Invoke-WebRequest -Uri "https://www.gaia-gis.it/gaia-sins/windows-bin-amd64/mod_spatialite-5.1.0-win-amd64.7z" -OutFile "spatialite.7z"
|
||||
7z x spatialite.7z -o"C:\spatialite"
|
||||
echo "C:\spatialite\mod_spatialite-5.1.0-win-amd64" | Out-File -FilePath $env:GITHUB_PATH -Encoding utf8 -Append
|
||||
|
||||
- name: Install osm2pgsql
|
||||
run: |
|
||||
Invoke-WebRequest -Uri "https://osm2pgsql.org/download/windows/osm2pgsql-latest-x64.zip" -OutFile "osm2pgsql.zip"
|
||||
Expand-Archive -Path "osm2pgsql.zip" -DestinationPath "C:\osm2pgsql"
|
||||
$BinDir = Get-ChildItem -Path "C:\osm2pgsql" -Recurse -Filter "osm2pgsql.exe" | Select-Object -ExpandProperty DirectoryName | Select-Object -First 1
|
||||
if (-not $BinDir) {
|
||||
Write-Error "Could not find osm2pgsql.exe"
|
||||
exit 1
|
||||
}
|
||||
echo "$BinDir" | Out-File -FilePath $env:GITHUB_PATH -Encoding utf8 -Append
|
||||
$FullExePath = Join-Path $BinDir "osm2pgsql.exe"
|
||||
echo "NOMINATIM_OSM2PGSQL_BINARY=$FullExePath" | Out-File -FilePath $env:GITHUB_ENV -Encoding utf8 -Append
|
||||
|
||||
- name: Set UTF-8 encoding
|
||||
run: |
|
||||
echo "PYTHONUTF8=1" >> $env:GITHUB_ENV
|
||||
[System.Console]::OutputEncoding = [System.Text.Encoding]::UTF8
|
||||
|
||||
- name: Install PyICU from wheel
|
||||
run: |
|
||||
python -m pip install https://github.com/cgohlke/pyicu-build/releases/download/v2.16.0/pyicu-2.16-cp314-cp314-win_amd64.whl
|
||||
|
||||
- name: Install test prerequisites
|
||||
run: |
|
||||
python -m pip install -U pip
|
||||
python -m pip install pytest pytest-asyncio "psycopg[binary]!=3.3.0" python-dotenv pyyaml jinja2 psutil sqlalchemy pytest-bdd falcon starlette uvicorn asgi_lifespan aiosqlite osmium mwparserfromhell
|
||||
|
||||
- name: Python unit tests
|
||||
run: |
|
||||
python -m pytest test/python -k "not (import_osm or run_osm2pgsql)"
|
||||
working-directory: Nominatim
|
||||
|
||||
install:
|
||||
runs-on: ubuntu-latest
|
||||
needs: create-archive
|
||||
|
||||
@@ -89,7 +89,7 @@ BEGIN
|
||||
|
||||
-- Add the linked-place (e.g. city) name as a searchable placename in the default language (if any)
|
||||
default_language := get_country_language_code(location.country_code);
|
||||
IF default_language is not NULL AND NOT location.name ? ('name:' || default_language) THEN
|
||||
IF default_language is not NULL AND location.name ? 'name' AND NOT location.name ? ('name:' || default_language) THEN
|
||||
location.name := location.name || hstore('name:' || default_language, location.name->'name');
|
||||
END IF;
|
||||
|
||||
|
||||
@@ -299,18 +299,23 @@ Feature: Linking of places
|
||||
|
||||
Scenario: Linked places expand default language names
|
||||
Given the grid with origin CO
|
||||
| 1 | | 2 |
|
||||
| | 9 | |
|
||||
| 4 | | 3 |
|
||||
Given the places
|
||||
| 1 | | 2 | | 5 | | 6 |
|
||||
| | 9 | | | | 10 | |
|
||||
| 4 | | 3 | | 8 | | 7 |
|
||||
And the places
|
||||
| osm | class | type | name+name | geometry |
|
||||
| N9 | place | city | Popayán | 9 |
|
||||
Given the places
|
||||
And the places
|
||||
| osm | class | type | name+name:en | geometry |
|
||||
| N10 | place | city | Open | 10 |
|
||||
And the places
|
||||
| osm | class | type | name+name | geometry | admin |
|
||||
| R1 | boundary | administrative | Perímetro Urbano Popayán | (1,2,3,4,1) | 8 |
|
||||
| R2 | boundary | administrative | Abre | (5,6,7,8,5) | 8 |
|
||||
And the relations
|
||||
| id | members |
|
||||
| 1 | N9:label |
|
||||
| id | members |
|
||||
| 1 | N9:label |
|
||||
| 2 | N10:label |
|
||||
When importing
|
||||
Then placex contains
|
||||
| object | linked_place_id |
|
||||
|
||||
@@ -200,14 +200,15 @@ def test_get_path_empty(make_config):
|
||||
assert not config.get_path('TOKENIZER_CONFIG')
|
||||
|
||||
|
||||
def test_get_path_absolute(make_config, monkeypatch):
|
||||
def test_get_path_absolute(make_config, monkeypatch, tmp_path):
|
||||
config = make_config()
|
||||
|
||||
monkeypatch.setenv('NOMINATIM_FOOBAR', '/dont/care')
|
||||
p = (tmp_path / "does_not_exist").resolve()
|
||||
monkeypatch.setenv('NOMINATIM_FOOBAR', str(p))
|
||||
result = config.get_path('FOOBAR')
|
||||
|
||||
assert isinstance(result, Path)
|
||||
assert str(result) == '/dont/care'
|
||||
assert str(result) == str(p)
|
||||
|
||||
|
||||
def test_get_path_relative(make_config, monkeypatch, tmp_path):
|
||||
|
||||
@@ -6,8 +6,12 @@
|
||||
# For a full list of authors see the git log.
|
||||
import itertools
|
||||
import sys
|
||||
import asyncio
|
||||
from pathlib import Path
|
||||
|
||||
if sys.platform == 'win32':
|
||||
asyncio.set_event_loop_policy(asyncio.WindowsSelectorEventLoopPolicy())
|
||||
|
||||
import psycopg
|
||||
from psycopg import sql as pysql
|
||||
import pytest
|
||||
@@ -145,11 +149,12 @@ def country_row(country_table, temp_db_cursor):
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def load_sql(temp_db_conn, country_row):
|
||||
proc = SQLPreprocessor(temp_db_conn, Configuration(None))
|
||||
def load_sql(temp_db_conn, country_table):
|
||||
conf = Configuration(None)
|
||||
|
||||
def _run(filename, **kwargs):
|
||||
proc.run_sql_file(temp_db_conn, filename, **kwargs)
|
||||
def _run(*filename, **kwargs):
|
||||
for fn in filename:
|
||||
SQLPreprocessor(temp_db_conn, conf).run_sql_file(temp_db_conn, fn, **kwargs)
|
||||
|
||||
return _run
|
||||
|
||||
|
||||
@@ -2,7 +2,7 @@
|
||||
#
|
||||
# This file is part of Nominatim. (https://nominatim.org)
|
||||
#
|
||||
# Copyright (C) 2025 by the Nominatim developer community.
|
||||
# Copyright (C) 2026 by the Nominatim developer community.
|
||||
# For a full list of authors see the git log.
|
||||
"""
|
||||
Tests for database integrity checks.
|
||||
@@ -46,8 +46,7 @@ def test_check_database_version_bad(property_table, temp_db_conn, def_config):
|
||||
assert chkdb.check_database_version(temp_db_conn, def_config) == chkdb.CheckState.FATAL
|
||||
|
||||
|
||||
def test_check_placex_table_good(table_factory, temp_db_conn, def_config):
|
||||
table_factory('placex')
|
||||
def test_check_placex_table_good(placex_table, temp_db_conn, def_config):
|
||||
assert chkdb.check_placex_table(temp_db_conn, def_config) == chkdb.CheckState.OK
|
||||
|
||||
|
||||
@@ -55,13 +54,13 @@ def test_check_placex_table_bad(temp_db_conn, def_config):
|
||||
assert chkdb.check_placex_table(temp_db_conn, def_config) == chkdb.CheckState.FATAL
|
||||
|
||||
|
||||
def test_check_placex_table_size_good(table_factory, temp_db_conn, def_config):
|
||||
table_factory('placex', content=((1, ), (2, )))
|
||||
def test_check_placex_table_size_good(placex_row, temp_db_conn, def_config):
|
||||
for _ in range(2):
|
||||
placex_row()
|
||||
assert chkdb.check_placex_size(temp_db_conn, def_config) == chkdb.CheckState.OK
|
||||
|
||||
|
||||
def test_check_placex_table_size_bad(table_factory, temp_db_conn, def_config):
|
||||
table_factory('placex')
|
||||
def test_check_placex_table_size_bad(placex_table, temp_db_conn, def_config):
|
||||
assert chkdb.check_placex_size(temp_db_conn, def_config) == chkdb.CheckState.FATAL
|
||||
|
||||
|
||||
@@ -84,15 +83,22 @@ def test_check_tokenizer(temp_db_conn, def_config, monkeypatch,
|
||||
assert chkdb.check_tokenizer(temp_db_conn, def_config) == state
|
||||
|
||||
|
||||
def test_check_indexing_good(table_factory, temp_db_conn, def_config):
|
||||
table_factory('placex', 'place_id int, indexed_status smallint',
|
||||
content=((1, 0), (2, 0)))
|
||||
def test_check_indexing_good(placex_row, temp_db_conn, def_config):
|
||||
for _ in range(2):
|
||||
placex_row(indexed_status=0)
|
||||
assert chkdb.check_indexing(temp_db_conn, def_config) == chkdb.CheckState.OK
|
||||
|
||||
|
||||
def test_check_indexing_bad(table_factory, temp_db_conn, def_config):
|
||||
table_factory('placex', 'place_id int, indexed_status smallint',
|
||||
content=((1, 0), (2, 2)))
|
||||
def test_check_indexing_bad(placex_row, temp_db_conn, def_config):
|
||||
for status in (0, 2):
|
||||
placex_row(indexed_status=status)
|
||||
assert chkdb.check_indexing(temp_db_conn, def_config) == chkdb.CheckState.FAIL
|
||||
|
||||
|
||||
def test_check_indexing_bad_frozen(placex_row, temp_db_conn, def_config):
|
||||
for status in (0, 2):
|
||||
placex_row(indexed_status=status)
|
||||
temp_db_conn.execute('DROP TABLE place')
|
||||
assert chkdb.check_indexing(temp_db_conn, def_config) == chkdb.CheckState.WARN
|
||||
|
||||
|
||||
|
||||
@@ -78,8 +78,8 @@ def test_setup_skeleton_already_exists(temp_db):
|
||||
database_import.setup_database_skeleton(f'dbname={temp_db}')
|
||||
|
||||
|
||||
def test_import_osm_data_simple(table_factory, osm2pgsql_options, capfd):
|
||||
table_factory('place', content=((1, ), ))
|
||||
def test_import_osm_data_simple(place_row, osm2pgsql_options, capfd):
|
||||
place_row()
|
||||
|
||||
database_import.import_osm_data(Path('file.pbf'), osm2pgsql_options)
|
||||
captured = capfd.readouterr()
|
||||
@@ -92,8 +92,8 @@ def test_import_osm_data_simple(table_factory, osm2pgsql_options, capfd):
|
||||
assert 'file.pbf' in captured.out
|
||||
|
||||
|
||||
def test_import_osm_data_multifile(table_factory, tmp_path, osm2pgsql_options, capfd):
|
||||
table_factory('place', content=((1, ), ))
|
||||
def test_import_osm_data_multifile(place_row, tmp_path, osm2pgsql_options, capfd):
|
||||
place_row()
|
||||
osm2pgsql_options['osm2pgsql_cache'] = 0
|
||||
|
||||
files = [tmp_path / 'file1.osm', tmp_path / 'file2.osm']
|
||||
@@ -107,22 +107,19 @@ def test_import_osm_data_multifile(table_factory, tmp_path, osm2pgsql_options, c
|
||||
assert 'file2.osm' in captured.out
|
||||
|
||||
|
||||
def test_import_osm_data_simple_no_data(table_factory, osm2pgsql_options):
|
||||
table_factory('place')
|
||||
|
||||
def test_import_osm_data_simple_no_data(place_row, osm2pgsql_options):
|
||||
with pytest.raises(UsageError, match='No data imported'):
|
||||
database_import.import_osm_data(Path('file.pbf'), osm2pgsql_options)
|
||||
|
||||
|
||||
def test_import_osm_data_simple_ignore_no_data(table_factory, osm2pgsql_options):
|
||||
table_factory('place')
|
||||
|
||||
def test_import_osm_data_simple_ignore_no_data(place_table, osm2pgsql_options):
|
||||
database_import.import_osm_data(Path('file.pbf'), osm2pgsql_options,
|
||||
ignore_errors=True)
|
||||
|
||||
|
||||
def test_import_osm_data_drop(table_factory, temp_db_cursor, tmp_path, osm2pgsql_options):
|
||||
table_factory('place', content=((1, ), ))
|
||||
def test_import_osm_data_drop(place_row, table_factory, temp_db_cursor,
|
||||
tmp_path, osm2pgsql_options):
|
||||
place_row()
|
||||
table_factory('planet_osm_nodes')
|
||||
|
||||
flatfile = tmp_path / 'flatfile'
|
||||
@@ -136,8 +133,8 @@ def test_import_osm_data_drop(table_factory, temp_db_cursor, tmp_path, osm2pgsql
|
||||
assert not temp_db_cursor.table_exists('planet_osm_nodes')
|
||||
|
||||
|
||||
def test_import_osm_data_default_cache(table_factory, osm2pgsql_options, capfd):
|
||||
table_factory('place', content=((1, ), ))
|
||||
def test_import_osm_data_default_cache(place_row, osm2pgsql_options, capfd):
|
||||
place_row()
|
||||
|
||||
osm2pgsql_options['osm2pgsql_cache'] = 0
|
||||
|
||||
@@ -215,52 +212,53 @@ async def test_load_data(dsn, place_row, placex_table, osmline_table,
|
||||
class TestSetupSQL:
|
||||
|
||||
@pytest.fixture(autouse=True)
|
||||
def init_env(self, temp_db, tmp_path, def_config, sql_preprocessor_cfg):
|
||||
def_config.lib_dir.sql = tmp_path / 'sql'
|
||||
def_config.lib_dir.sql.mkdir()
|
||||
|
||||
def osm2ppsql_skel(self, def_config, temp_db_with_extensions, place_row,
|
||||
country_table, table_factory, temp_db_conn):
|
||||
self.config = def_config
|
||||
place_row()
|
||||
table_factory('osm2pgsql_properties', 'property TEXT, value TEXT',
|
||||
(('db_format', 2),))
|
||||
|
||||
def write_sql(self, fname, content):
|
||||
(self.config.lib_dir.sql / fname).write_text(content, encoding='utf-8')
|
||||
table_factory('planet_osm_rels', 'id BIGINT, members JSONB, tags JSONB')
|
||||
temp_db_conn.execute("""
|
||||
CREATE OR REPLACE FUNCTION planet_osm_member_ids(jsonb, character)
|
||||
RETURNS bigint[] AS $$
|
||||
SELECT array_agg((el->>'ref')::int8)
|
||||
FROM jsonb_array_elements($1) AS el WHERE el->>'type' = $2
|
||||
$$ LANGUAGE sql IMMUTABLE;
|
||||
""")
|
||||
|
||||
@pytest.mark.parametrize("reverse", [True, False])
|
||||
def test_create_tables(self, temp_db_conn, temp_db_cursor, reverse):
|
||||
self.write_sql('tables.sql',
|
||||
"""CREATE FUNCTION test() RETURNS bool
|
||||
AS $$ SELECT {{db.reverse_only}} $$ LANGUAGE SQL""")
|
||||
|
||||
self.write_sql('grants.sql', "-- Mock grants file for testing\n")
|
||||
def test_create_tables(self, table_factory, temp_db_conn, temp_db_cursor, reverse):
|
||||
table_factory('country_osm_grid')
|
||||
|
||||
database_import.create_tables(temp_db_conn, self.config, reverse)
|
||||
|
||||
temp_db_cursor.scalar('SELECT test()') == reverse
|
||||
assert temp_db_cursor.table_exists('placex')
|
||||
assert not reverse == temp_db_cursor.table_exists('search_name')
|
||||
|
||||
def test_create_table_triggers(self, temp_db_conn, temp_db_cursor):
|
||||
self.write_sql('table-triggers.sql',
|
||||
"""CREATE FUNCTION test() RETURNS TEXT
|
||||
AS $$ SELECT 'a'::text $$ LANGUAGE SQL""")
|
||||
def test_create_table_triggers(self, temp_db_conn, placex_table, osmline_table,
|
||||
postcode_table, load_sql):
|
||||
load_sql('functions.sql')
|
||||
|
||||
database_import.create_table_triggers(temp_db_conn, self.config)
|
||||
|
||||
temp_db_cursor.scalar('SELECT test()') == 'a'
|
||||
|
||||
def test_create_partition_tables(self, temp_db_conn, temp_db_cursor):
|
||||
self.write_sql('partition-tables.src.sql',
|
||||
"""CREATE FUNCTION test() RETURNS TEXT
|
||||
AS $$ SELECT 'b'::text $$ LANGUAGE SQL""")
|
||||
def test_create_partition_tables(self, country_row, temp_db_conn, temp_db_cursor, load_sql):
|
||||
for i in range(3):
|
||||
country_row(partition=i)
|
||||
load_sql('tables/location_area.sql')
|
||||
|
||||
database_import.create_partition_tables(temp_db_conn, self.config)
|
||||
|
||||
temp_db_cursor.scalar('SELECT test()') == 'b'
|
||||
for i in range(3):
|
||||
assert temp_db_cursor.table_exists(f"location_area_large_{i}")
|
||||
assert temp_db_cursor.table_exists(f"search_name_{i}")
|
||||
|
||||
@pytest.mark.parametrize("drop", [True, False])
|
||||
@pytest.mark.asyncio
|
||||
async def test_create_search_indices(self, temp_db_conn, temp_db_cursor, drop):
|
||||
self.write_sql('indices.sql',
|
||||
"""CREATE FUNCTION test() RETURNS bool
|
||||
AS $$ SELECT {{drop}} $$ LANGUAGE SQL""")
|
||||
|
||||
async def test_create_search_indices(self, temp_db_conn, temp_db_cursor, drop, load_sql):
|
||||
load_sql('tables.sql', 'functions/ranking.sql')
|
||||
await database_import.create_search_indices(temp_db_conn, self.config, drop)
|
||||
|
||||
temp_db_cursor.scalar('SELECT test()') == drop
|
||||
assert temp_db_cursor.index_exists('placex', 'idx_placex_geometry')
|
||||
assert not drop == temp_db_cursor.index_exists('placex', 'idx_placex_geometry_buildings')
|
||||
|
||||
@@ -11,73 +11,14 @@ import subprocess
|
||||
|
||||
import pytest
|
||||
|
||||
from psycopg.rows import tuple_row
|
||||
|
||||
from nominatim_db.tools import postcodes
|
||||
from nominatim_db.data import country_info
|
||||
from nominatim_db.db.sql_preprocessor import SQLPreprocessor
|
||||
|
||||
import dummy_tokenizer
|
||||
|
||||
|
||||
class MockPostcodeTable:
|
||||
""" A location_postcodes table for testing.
|
||||
"""
|
||||
def __init__(self, conn, config):
|
||||
self.conn = conn
|
||||
SQLPreprocessor(conn, config).run_sql_file(conn, 'functions/postcode_triggers.sql')
|
||||
with conn.cursor() as cur:
|
||||
cur.execute("""CREATE TABLE location_postcodes (
|
||||
place_id BIGINT,
|
||||
osm_id BIGINT,
|
||||
parent_place_id BIGINT,
|
||||
rank_search SMALLINT,
|
||||
indexed_status SMALLINT,
|
||||
indexed_date TIMESTAMP,
|
||||
country_code varchar(2),
|
||||
postcode TEXT,
|
||||
geometry GEOMETRY(Geometry, 4326),
|
||||
centroid GEOMETRY(Point, 4326))""")
|
||||
cur.execute("""CREATE OR REPLACE FUNCTION token_normalized_postcode(postcode TEXT)
|
||||
RETURNS TEXT AS $$ BEGIN RETURN postcode; END; $$ LANGUAGE plpgsql;
|
||||
|
||||
CREATE OR REPLACE FUNCTION get_country_code(place geometry)
|
||||
RETURNS TEXT AS $$ BEGIN
|
||||
RETURN null;
|
||||
END; $$ LANGUAGE plpgsql;
|
||||
""")
|
||||
cur.execute("""CREATE OR REPLACE FUNCTION expand_by_meters(geom GEOMETRY, meters FLOAT)
|
||||
RETURNS GEOMETRY AS $$
|
||||
SELECT ST_Envelope(ST_Buffer(geom::geography, meters, 1)::geometry)
|
||||
$$ LANGUAGE sql;""")
|
||||
|
||||
conn.commit()
|
||||
|
||||
def add(self, country, postcode, x, y):
|
||||
with self.conn.cursor() as cur:
|
||||
cur.execute(
|
||||
"""INSERT INTO location_postcodes
|
||||
(place_id, indexed_status, country_code, postcode, centroid, geometry)
|
||||
VALUES (nextval('seq_place'), 1, %(cc)s, %(pc)s,
|
||||
ST_SetSRID(ST_MakePoint(%(x)s, %(y)s), 4326),
|
||||
ST_Expand(ST_SetSRID(ST_MakePoint(%(x)s, %(y)s), 4326), 0.005))""",
|
||||
{'cc': country, 'pc': postcode, 'x': x, 'y': y})
|
||||
|
||||
self.conn.commit()
|
||||
|
||||
@property
|
||||
def row_set(self):
|
||||
with self.conn.cursor() as cur:
|
||||
cur.execute("""SELECT osm_id, country_code, postcode,
|
||||
ST_X(centroid), ST_Y(centroid)
|
||||
FROM location_postcodes""")
|
||||
return set((tuple(row) for row in cur))
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def postcode_table(def_config, temp_db_conn, placex_table, table_factory):
|
||||
country_info.setup_country_config(def_config)
|
||||
return MockPostcodeTable(temp_db_conn, def_config)
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def insert_implicit_postcode(placex_row, place_postcode_row):
|
||||
""" Insert data into the placex and place table
|
||||
@@ -86,11 +27,11 @@ def insert_implicit_postcode(placex_row, place_postcode_row):
|
||||
def _insert_implicit_postcode(osm_id, country, geometry, postcode, in_placex=False):
|
||||
if in_placex:
|
||||
placex_row(osm_id=osm_id, country=country, geom=geometry,
|
||||
centroid=geometry, address={'postcode': postcode})
|
||||
centroid=geometry,
|
||||
address={'postcode': postcode})
|
||||
else:
|
||||
place_postcode_row(osm_id=osm_id, centroid=geometry,
|
||||
country=country, postcode=postcode)
|
||||
|
||||
return _insert_implicit_postcode
|
||||
|
||||
|
||||
@@ -103,7 +44,6 @@ def insert_postcode_area(place_postcode_row):
|
||||
place_postcode_row(osm_type='R', osm_id=osm_id, postcode=postcode, country=country,
|
||||
centroid=f"POINT({x} {y})",
|
||||
geom=f"POLYGON(({x1} {y1}, {x1} {y2}, {x2} {y2}, {x2} {y1}, {x1} {y1}))")
|
||||
|
||||
return _do
|
||||
|
||||
|
||||
@@ -123,186 +63,198 @@ def postcode_update(dsn, temp_db_conn):
|
||||
BEFORE INSERT ON location_postcodes
|
||||
FOR EACH ROW EXECUTE PROCEDURE postcodes_insert()""")
|
||||
temp_db_conn.commit()
|
||||
|
||||
postcodes.update_postcodes(dsn, data_path, tokenizer)
|
||||
|
||||
return _do
|
||||
|
||||
|
||||
def test_postcodes_empty(postcode_update, postcode_table, place_postcode_table):
|
||||
postcode_update()
|
||||
class TestPostcodes:
|
||||
@pytest.fixture(autouse=True)
|
||||
def setup(self, def_config, postcode_table, placex_table, place_postcode_table,
|
||||
load_sql, temp_db_conn):
|
||||
self.conn = temp_db_conn
|
||||
country_info.setup_country_config(def_config)
|
||||
load_sql('functions/postcode_triggers.sql')
|
||||
|
||||
assert not postcode_table.row_set
|
||||
temp_db_conn.execute("""
|
||||
CREATE OR REPLACE FUNCTION token_normalized_postcode(postcode TEXT)
|
||||
RETURNS TEXT AS $$
|
||||
SELECT postcode
|
||||
$$ LANGUAGE sql;
|
||||
|
||||
CREATE OR REPLACE FUNCTION get_country_code(place geometry)
|
||||
RETURNS TEXT AS $$
|
||||
SELECT NULL
|
||||
$$ LANGUAGE sql;
|
||||
|
||||
@pytest.mark.parametrize('in_placex', [True, False])
|
||||
def test_postcodes_add_new_point(postcode_update, postcode_table,
|
||||
insert_implicit_postcode, in_placex):
|
||||
insert_implicit_postcode(1, 'xx', 'POINT(10 12)', '9486', in_placex)
|
||||
postcode_table.add('yy', '9486', 99, 34)
|
||||
CREATE OR REPLACE FUNCTION expand_by_meters(geom GEOMETRY, meters FLOAT)
|
||||
RETURNS GEOMETRY AS $$
|
||||
SELECT ST_Envelope(ST_Buffer(geom::geography, meters, 1)::geometry)
|
||||
$$ LANGUAGE sql;
|
||||
""")
|
||||
|
||||
postcode_update()
|
||||
@property
|
||||
def row_set(self):
|
||||
with self.conn.cursor(row_factory=tuple_row) as cur:
|
||||
cur.execute("""SELECT osm_id, country_code, postcode,
|
||||
ST_X(centroid), ST_Y(centroid)
|
||||
FROM location_postcodes""")
|
||||
return {r for r in cur}
|
||||
|
||||
assert postcode_table.row_set == {(None, 'xx', '9486', 10, 12), }
|
||||
def test_postcodes_empty(self, postcode_update):
|
||||
postcode_update()
|
||||
|
||||
assert not self.row_set
|
||||
|
||||
def test_postcodes_add_new_area(postcode_update, insert_postcode_area, postcode_table):
|
||||
insert_postcode_area(345, 'de', '10445', 23.5, 46.2)
|
||||
@pytest.mark.parametrize('in_placex', [True, False])
|
||||
def test_postcodes_add_new_point(self, postcode_update, postcode_row,
|
||||
insert_implicit_postcode, in_placex):
|
||||
insert_implicit_postcode(1, 'xx', 'POINT(10 12)', '9486', in_placex)
|
||||
postcode_row('yy', '9486', 99, 34)
|
||||
|
||||
postcode_update()
|
||||
postcode_update()
|
||||
|
||||
assert postcode_table.row_set == {(345, 'de', '10445', 23.5, 46.2)}
|
||||
assert self.row_set == {(None, 'xx', '9486', 10, 12), }
|
||||
|
||||
def test_postcodes_add_new_area(self, postcode_update, insert_postcode_area):
|
||||
insert_postcode_area(345, 'de', '10445', 23.5, 46.2)
|
||||
|
||||
@pytest.mark.parametrize('in_placex', [True, False])
|
||||
def test_postcodes_add_area_and_point(postcode_update, insert_postcode_area,
|
||||
insert_implicit_postcode, postcode_table, in_placex):
|
||||
insert_implicit_postcode(1, 'xx', 'POINT(10 12)', '10445', in_placex)
|
||||
insert_postcode_area(345, 'xx', '10445', 23.5, 46.2)
|
||||
postcode_update()
|
||||
|
||||
postcode_update()
|
||||
assert self.row_set == {(345, 'de', '10445', 23.5, 46.2)}
|
||||
|
||||
assert postcode_table.row_set == {(345, 'xx', '10445', 23.5, 46.2)}
|
||||
@pytest.mark.parametrize('in_placex', [True, False])
|
||||
def test_postcodes_add_area_and_point(self, postcode_update, insert_postcode_area,
|
||||
insert_implicit_postcode, in_placex):
|
||||
insert_implicit_postcode(1, 'xx', 'POINT(10 12)', '10445', in_placex)
|
||||
insert_postcode_area(345, 'xx', '10445', 23.5, 46.2)
|
||||
|
||||
postcode_update()
|
||||
|
||||
@pytest.mark.parametrize('in_placex', [True, False])
|
||||
def test_postcodes_add_point_within_area(postcode_update, insert_postcode_area,
|
||||
insert_implicit_postcode, postcode_table, in_placex):
|
||||
insert_implicit_postcode(1, 'xx', 'POINT(23.5 46.2)', '10446', in_placex)
|
||||
insert_postcode_area(345, 'xx', '10445', 23.5, 46.2)
|
||||
assert self.row_set == {(345, 'xx', '10445', 23.5, 46.2)}
|
||||
|
||||
postcode_update()
|
||||
@pytest.mark.parametrize('in_placex', [True, False])
|
||||
def test_postcodes_add_point_within_area(self, postcode_update, insert_postcode_area,
|
||||
insert_implicit_postcode, in_placex):
|
||||
insert_implicit_postcode(1, 'xx', 'POINT(23.5 46.2)', '10446', in_placex)
|
||||
insert_postcode_area(345, 'xx', '10445', 23.5, 46.2)
|
||||
|
||||
assert postcode_table.row_set == {(345, 'xx', '10445', 23.5, 46.2)}
|
||||
postcode_update()
|
||||
|
||||
assert self.row_set == {(345, 'xx', '10445', 23.5, 46.2)}
|
||||
|
||||
@pytest.mark.parametrize('coords', [(99, 34), (10, 34), (99, 12),
|
||||
(9, 34), (9, 11), (23, 11)])
|
||||
def test_postcodes_replace_coordinates(postcode_update, postcode_table, tmp_path,
|
||||
insert_implicit_postcode, coords):
|
||||
insert_implicit_postcode(1, 'xx', 'POINT(10 12)', 'AB 4511')
|
||||
postcode_table.add('xx', 'AB 4511', *coords)
|
||||
@pytest.mark.parametrize('coords', [(99, 34), (10, 34), (99, 12),
|
||||
(9, 34), (9, 11), (23, 11)])
|
||||
def test_postcodes_replace_coordinates(self, postcode_update, postcode_row, tmp_path,
|
||||
insert_implicit_postcode, coords):
|
||||
insert_implicit_postcode(1, 'xx', 'POINT(10 12)', 'AB 4511')
|
||||
postcode_row('xx', 'AB 4511', *coords)
|
||||
|
||||
postcode_update(tmp_path)
|
||||
postcode_update(tmp_path)
|
||||
|
||||
assert postcode_table.row_set == {(None, 'xx', 'AB 4511', 10, 12)}
|
||||
assert self.row_set == {(None, 'xx', 'AB 4511', 10, 12)}
|
||||
|
||||
def test_postcodes_replace_coordinates_close(self, postcode_update, postcode_row,
|
||||
insert_implicit_postcode):
|
||||
insert_implicit_postcode(1, 'xx', 'POINT(10 12)', 'AB 4511')
|
||||
postcode_row('xx', 'AB 4511', 10, 11.99999999)
|
||||
|
||||
def test_postcodes_replace_coordinates_close(postcode_update, postcode_table,
|
||||
insert_implicit_postcode):
|
||||
insert_implicit_postcode(1, 'xx', 'POINT(10 12)', 'AB 4511')
|
||||
postcode_table.add('xx', 'AB 4511', 10, 11.99999999)
|
||||
postcode_update()
|
||||
|
||||
postcode_update()
|
||||
assert self.row_set == {(None, 'xx', 'AB 4511', 10, 11.99999999)}
|
||||
|
||||
assert postcode_table.row_set == {(None, 'xx', 'AB 4511', 10, 11.99999999)}
|
||||
def test_postcodes_remove_point(self, postcode_update, postcode_row,
|
||||
insert_implicit_postcode):
|
||||
insert_implicit_postcode(1, 'xx', 'POINT(10 12)', 'AB 4511')
|
||||
postcode_row('xx', 'badname', 10, 12)
|
||||
|
||||
postcode_update()
|
||||
|
||||
def test_postcodes_remove_point(postcode_update, postcode_table,
|
||||
insert_implicit_postcode):
|
||||
insert_implicit_postcode(1, 'xx', 'POINT(10 12)', 'AB 4511')
|
||||
postcode_table.add('xx', 'badname', 10, 12)
|
||||
assert self.row_set == {(None, 'xx', 'AB 4511', 10, 12)}
|
||||
|
||||
postcode_update()
|
||||
def test_postcodes_ignore_empty_country(self, postcode_update, insert_implicit_postcode):
|
||||
insert_implicit_postcode(1, None, 'POINT(10 12)', 'AB 4511')
|
||||
postcode_update()
|
||||
assert not self.row_set
|
||||
|
||||
assert postcode_table.row_set == {(None, 'xx', 'AB 4511', 10, 12)}
|
||||
def test_postcodes_remove_all(self, postcode_update, postcode_row, place_postcode_table):
|
||||
postcode_row('ch', '5613', 10, 12)
|
||||
postcode_update()
|
||||
|
||||
assert not self.row_set
|
||||
|
||||
def test_postcodes_ignore_empty_country(postcode_update, postcode_table,
|
||||
insert_implicit_postcode):
|
||||
insert_implicit_postcode(1, None, 'POINT(10 12)', 'AB 4511')
|
||||
postcode_update()
|
||||
assert not postcode_table.row_set
|
||||
|
||||
|
||||
def test_postcodes_remove_all(postcode_update, postcode_table, place_postcode_table):
|
||||
postcode_table.add('ch', '5613', 10, 12)
|
||||
postcode_update()
|
||||
|
||||
assert not postcode_table.row_set
|
||||
|
||||
|
||||
def test_postcodes_multi_country(postcode_update, postcode_table,
|
||||
insert_implicit_postcode):
|
||||
insert_implicit_postcode(1, 'de', 'POINT(10 12)', '54451')
|
||||
insert_implicit_postcode(2, 'cc', 'POINT(100 56)', 'DD23 T')
|
||||
insert_implicit_postcode(3, 'de', 'POINT(10.3 11.0)', '54452')
|
||||
insert_implicit_postcode(4, 'cc', 'POINT(10.3 11.0)', '54452')
|
||||
|
||||
postcode_update()
|
||||
|
||||
assert postcode_table.row_set == {(None, 'de', '54451', 10, 12),
|
||||
(None, 'de', '54452', 10.3, 11.0),
|
||||
(None, 'cc', '54452', 10.3, 11.0),
|
||||
(None, 'cc', 'DD23 T', 100, 56)}
|
||||
|
||||
|
||||
@pytest.mark.parametrize("gzipped", [True, False])
|
||||
def test_postcodes_extern(postcode_update, postcode_table, tmp_path,
|
||||
insert_implicit_postcode, gzipped):
|
||||
insert_implicit_postcode(1, 'xx', 'POINT(10 12)', 'AB 4511')
|
||||
|
||||
extfile = tmp_path / 'xx_postcodes.csv'
|
||||
extfile.write_text("postcode,lat,lon\nAB 4511,-4,-1\nCD 4511,-5, -10", encoding='utf-8')
|
||||
|
||||
if gzipped:
|
||||
subprocess.run(['gzip', str(extfile)])
|
||||
assert not extfile.is_file()
|
||||
|
||||
postcode_update(tmp_path)
|
||||
|
||||
assert postcode_table.row_set == {(None, 'xx', 'AB 4511', 10, 12),
|
||||
(None, 'xx', 'CD 4511', -10, -5)}
|
||||
|
||||
|
||||
def test_postcodes_extern_bad_column(postcode_update, postcode_table, tmp_path,
|
||||
def test_postcodes_multi_country(self, postcode_update,
|
||||
insert_implicit_postcode):
|
||||
insert_implicit_postcode(1, 'xx', 'POINT(10 12)', 'AB 4511')
|
||||
insert_implicit_postcode(1, 'de', 'POINT(10 12)', '54451')
|
||||
insert_implicit_postcode(2, 'cc', 'POINT(100 56)', 'DD23 T')
|
||||
insert_implicit_postcode(3, 'de', 'POINT(10.3 11.0)', '54452')
|
||||
insert_implicit_postcode(4, 'cc', 'POINT(10.3 11.0)', '54452')
|
||||
|
||||
extfile = tmp_path / 'xx_postcodes.csv'
|
||||
extfile.write_text("postode,lat,lon\nAB 4511,-4,-1\nCD 4511,-5, -10", encoding='utf-8')
|
||||
postcode_update()
|
||||
|
||||
postcode_update(tmp_path)
|
||||
assert self.row_set == {(None, 'de', '54451', 10, 12),
|
||||
(None, 'de', '54452', 10.3, 11.0),
|
||||
(None, 'cc', '54452', 10.3, 11.0),
|
||||
(None, 'cc', 'DD23 T', 100, 56)}
|
||||
|
||||
assert postcode_table.row_set == {(None, 'xx', 'AB 4511', 10, 12)}
|
||||
@pytest.mark.parametrize("gzipped", [True, False])
|
||||
def test_postcodes_extern(self, postcode_update, tmp_path,
|
||||
insert_implicit_postcode, gzipped):
|
||||
insert_implicit_postcode(1, 'xx', 'POINT(10 12)', 'AB 4511')
|
||||
|
||||
extfile = tmp_path / 'xx_postcodes.csv'
|
||||
extfile.write_text("postcode,lat,lon\nAB 4511,-4,-1\nCD 4511,-5, -10", encoding='utf-8')
|
||||
|
||||
def test_postcodes_extern_bad_number(postcode_update, insert_implicit_postcode,
|
||||
postcode_table, tmp_path):
|
||||
insert_implicit_postcode(1, 'xx', 'POINT(10 12)', 'AB 4511')
|
||||
if gzipped:
|
||||
subprocess.run(['gzip', str(extfile)])
|
||||
assert not extfile.is_file()
|
||||
|
||||
extfile = tmp_path / 'xx_postcodes.csv'
|
||||
extfile.write_text(
|
||||
"postcode,lat,lon\nXX 4511,-4,NaN\nCD 4511,-5, -10\n34,200,0", encoding='utf-8')
|
||||
postcode_update(tmp_path)
|
||||
|
||||
postcode_update(tmp_path)
|
||||
assert self.row_set == {(None, 'xx', 'AB 4511', 10, 12),
|
||||
(None, 'xx', 'CD 4511', -10, -5)}
|
||||
|
||||
assert postcode_table.row_set == {(None, 'xx', 'AB 4511', 10, 12),
|
||||
(None, 'xx', 'CD 4511', -10, -5)}
|
||||
def test_postcodes_extern_bad_column(self, postcode_update, tmp_path,
|
||||
insert_implicit_postcode):
|
||||
insert_implicit_postcode(1, 'xx', 'POINT(10 12)', 'AB 4511')
|
||||
|
||||
extfile = tmp_path / 'xx_postcodes.csv'
|
||||
extfile.write_text("postode,lat,lon\nAB 4511,-4,-1\nCD 4511,-5, -10", encoding='utf-8')
|
||||
|
||||
postcode_update(tmp_path)
|
||||
|
||||
assert self.row_set == {(None, 'xx', 'AB 4511', 10, 12)}
|
||||
|
||||
def test_postcodes_extern_bad_number(self, postcode_update, insert_implicit_postcode,
|
||||
tmp_path):
|
||||
insert_implicit_postcode(1, 'xx', 'POINT(10 12)', 'AB 4511')
|
||||
|
||||
extfile = tmp_path / 'xx_postcodes.csv'
|
||||
extfile.write_text(
|
||||
"postcode,lat,lon\nXX 4511,-4,NaN\nCD 4511,-5, -10\n34,200,0", encoding='utf-8')
|
||||
|
||||
postcode_update(tmp_path)
|
||||
|
||||
assert self.row_set == {(None, 'xx', 'AB 4511', 10, 12),
|
||||
(None, 'xx', 'CD 4511', -10, -5)}
|
||||
|
||||
def test_no_placex_entry(self, postcode_update, temp_db_cursor, place_postcode_row):
|
||||
# Rewrite the get_country_code function to verify its execution.
|
||||
temp_db_cursor.execute("""
|
||||
CREATE OR REPLACE FUNCTION get_country_code(place geometry) RETURNS TEXT AS $$
|
||||
SELECT 'yy' $$ LANGUAGE sql""")
|
||||
place_postcode_row(centroid='POINT(10 12)', postcode='AB 4511')
|
||||
postcode_update()
|
||||
|
||||
assert self.row_set == {(None, 'yy', 'AB 4511', 10, 12)}
|
||||
|
||||
def test_discard_badly_formatted_postcodes(self, postcode_update, place_postcode_row):
|
||||
place_postcode_row(centroid='POINT(10 12)', country='fr', postcode='AB 4511')
|
||||
postcode_update()
|
||||
|
||||
assert not self.row_set
|
||||
|
||||
|
||||
def test_can_compute(dsn, table_factory):
|
||||
assert not postcodes.can_compute(dsn)
|
||||
table_factory('place_postcode')
|
||||
assert postcodes.can_compute(dsn)
|
||||
|
||||
|
||||
def test_no_placex_entry(postcode_update, temp_db_cursor, place_postcode_row, postcode_table):
|
||||
# Rewrite the get_country_code function to verify its execution.
|
||||
temp_db_cursor.execute("""
|
||||
CREATE OR REPLACE FUNCTION get_country_code(place geometry)
|
||||
RETURNS TEXT AS $$ BEGIN
|
||||
RETURN 'yy';
|
||||
END; $$ LANGUAGE plpgsql;
|
||||
""")
|
||||
place_postcode_row(centroid='POINT(10 12)', postcode='AB 4511')
|
||||
postcode_update()
|
||||
|
||||
assert postcode_table.row_set == {(None, 'yy', 'AB 4511', 10, 12)}
|
||||
|
||||
|
||||
def test_discard_badly_formatted_postcodes(postcode_update, place_postcode_row, postcode_table):
|
||||
place_postcode_row(centroid='POINT(10 12)', country='fr', postcode='AB 4511')
|
||||
postcode_update()
|
||||
|
||||
assert not postcode_table.row_set
|
||||
|
||||
Reference in New Issue
Block a user