Compare commits

..

9 Commits

Author SHA1 Message Date
Sarah Hoffmann
b3796b9e25 remove download instructions for country grid 2022-11-19 17:07:18 +01:00
Sarah Hoffmann
4efd5f6444 adapt to 4.1.1 release 2022-11-19 16:21:26 +01:00
Marc Tobias
75f2efaf49 Tiger install doc: add -refresh website- step 2022-11-19 16:18:35 +01:00
marc tobias
4f82a9a789 Documentation: remove year from TIGER filename 2022-11-19 16:18:15 +01:00
Mauricio Scheffer
ebfdee62d7 docs: fix links to rank docs 2022-11-19 16:18:06 +01:00
Sarah Hoffmann
912db1cbc4 docs: add types-psutil requirement 2022-11-19 16:17:47 +01:00
Sarah Hoffmann
456520750e Merge pull request #2827 from mtmail/docs-4.1.x
Documentation: Add missing wget to Ubuntu install instructions
2022-09-25 12:42:51 +02:00
marc tobias
2950fa6ad5 Documentation: Add missing wget to Ubuntu install instructions 2022-09-23 20:49:14 +02:00
Sarah Hoffmann
d259f01d62 adapt to 4.1.0 release 2022-08-05 15:30:06 +02:00
32 changed files with 141 additions and 369 deletions

View File

@@ -23,7 +23,7 @@ runs:
run: |
sudo apt-get install -y -qq libboost-system-dev libboost-filesystem-dev libexpat1-dev zlib1g-dev libbz2-dev libpq-dev libproj-dev libicu-dev
if [ "x$UBUNTUVER" == "x18" ]; then
pip3 install python-dotenv psycopg2==2.7.7 jinja2==2.8 psutil==5.4.2 pyicu==2.9 osmium PyYAML==5.1 datrie
pip3 install python-dotenv psycopg2==2.7.7 jinja2==2.8 psutil==5.4.2 pyicu osmium PyYAML==5.1 datrie
else
sudo apt-get install -y -qq python3-icu python3-datrie python3-pyosmium python3-jinja2 python3-psutil python3-psycopg2 python3-dotenv python3-yaml
fi

View File

@@ -7,11 +7,11 @@ jobs:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3
- uses: actions/checkout@v2
with:
submodules: true
- uses: actions/cache@v3
- uses: actions/cache@v2
with:
path: |
data/country_osm_grid.sql.gz
@@ -27,7 +27,7 @@ jobs:
mv nominatim-src.tar.bz2 Nominatim
- name: 'Upload Artifact'
uses: actions/upload-artifact@v3
uses: actions/upload-artifact@v2
with:
name: full-source
path: nominatim-src.tar.bz2
@@ -58,7 +58,7 @@ jobs:
runs-on: ubuntu-${{ matrix.ubuntu }}.04
steps:
- uses: actions/download-artifact@v3
- uses: actions/download-artifact@v2
with:
name: full-source
@@ -72,7 +72,7 @@ jobs:
tools: phpunit, phpcs, composer
ini-values: opcache.jit=disable
- uses: actions/setup-python@v4
- uses: actions/setup-python@v2
with:
python-version: 3.6
if: matrix.ubuntu == 18
@@ -99,7 +99,7 @@ jobs:
if: matrix.ubuntu == 22
- name: Install latest pylint/mypy
run: pip3 install -U pylint mypy types-PyYAML types-jinja2 types-psycopg2 types-psutil types-requests typing-extensions
run: pip3 install -U pylint mypy types-PyYAML types-jinja2 types-psycopg2 types-psutil typing-extensions
- name: PHP linting
run: phpcs --report-width=120 .
@@ -136,7 +136,7 @@ jobs:
runs-on: ubuntu-20.04
steps:
- uses: actions/download-artifact@v3
- uses: actions/download-artifact@v2
with:
name: full-source
@@ -231,7 +231,7 @@ jobs:
OS: ${{ matrix.name }}
INSTALL_MODE: ${{ matrix.install_mode }}
- uses: actions/download-artifact@v3
- uses: actions/download-artifact@v2
with:
name: full-source
path: /home/nominatim

View File

@@ -20,7 +20,7 @@ project(nominatim)
set(NOMINATIM_VERSION_MAJOR 4)
set(NOMINATIM_VERSION_MINOR 1)
set(NOMINATIM_VERSION_PATCH 2)
set(NOMINATIM_VERSION_PATCH 0)
set(NOMINATIM_VERSION "${NOMINATIM_VERSION_MAJOR}.${NOMINATIM_VERSION_MINOR}.${NOMINATIM_VERSION_PATCH}")

View File

@@ -1,21 +1,3 @@
4.1.2
* fix XSS vulnerability in debug view
4.1.1
* fix crash on update when addr:interpolation receives an illegal value
* fix minimum number of retrived results to be at least 10
* fix search for combinations of special term + name (e.g Hotel Bellevue)
* do not return interpolations without a parent street on reverse search
* improve invalidation of linked places on updates
* fix address parsing for interpolation lines
* make sure socket timeouts are respected during replication
(working around a bug in some versions of pyosmium)
* update bundled osm2pgsql to 1.7.1
* typing fixes to work with latest type annotations from typeshed
* smaller improvements to documention (thanks to @mausch)
4.1.0
* switch to ICU tokenizer as default
@@ -52,10 +34,6 @@
* add setup instructions for updates and systemd
* drop support for PostgreSQL 9.5
4.0.2
* fix XSS vulnerability in debug view
4.0.1
* fix initialisation error in replication script
@@ -94,10 +72,6 @@
* add testing of installation scripts via CI
* drop support for Python < 3.6 and Postgresql < 9.5
3.7.3
* fix XSS vulnerability in debug view
3.7.2
* fix database check for reverse-only imports

View File

@@ -99,7 +99,7 @@ Unix socket instead, change the pool configuration
``` ini
; Replace the tcp listener and add the unix socket
listen = /var/run/php-fpm-nominatim.sock
listen = /var/run/php-fpm.sock
; Ensure that the daemon runs as the correct user
listen.owner = www-data
@@ -121,7 +121,7 @@ location @php {
fastcgi_param SCRIPT_FILENAME "$document_root$uri.php";
fastcgi_param PATH_TRANSLATED "$document_root$uri.php";
fastcgi_param QUERY_STRING $args;
fastcgi_pass unix:/var/run/php-fpm-nominatim.sock;
fastcgi_pass unix:/var/run/php-fpm.sock;
fastcgi_index index.php;
include fastcgi_params;
}
@@ -131,7 +131,7 @@ location ~ [^/]\.php(/|$) {
if (!-f $document_root$fastcgi_script_name) {
return 404;
}
fastcgi_pass unix:/var/run/php-fpm-nominatim.sock;
fastcgi_pass unix:/var/run/php-fpm.sock;
fastcgi_index search.php;
include fastcgi.conf;
}

View File

@@ -1,4 +1,4 @@
site_name: Nominatim Documentation
site_name: Nominatim 4.1.1
theme: readthedocs
docs_dir: ${CMAKE_CURRENT_BINARY_DIR}
site_url: https://nominatim.org

View File

@@ -135,7 +135,7 @@ class Debug
public static function printSQL($sSQL)
{
echo '<p><tt><font color="#aaa">'.htmlspecialchars($sSQL, ENT_QUOTES | ENT_SUBSTITUTE | ENT_HTML401).'</font></tt></p>'."\n";
echo '<p><tt><font color="#aaa">'.$sSQL.'</font></tt></p>'."\n";
}
private static function outputVar($mVar, $sPreNL)
@@ -178,12 +178,11 @@ class Debug
}
if (is_string($mVar)) {
$sOut = "'$mVar'";
} else {
$sOut = (string)$mVar;
echo "'$mVar'";
return strlen($mVar) + 2;
}
echo htmlspecialchars($sOut, ENT_QUOTES | ENT_SUBSTITUTE | ENT_HTML401);
return strlen($sOut);
echo (string)$mVar;
return strlen((string)$mVar);
}
}

View File

@@ -103,7 +103,7 @@ class Geocode
}
$this->iFinalLimit = $iLimit;
$this->iLimit = $iLimit + max($iLimit, 10);
$this->iLimit = $iLimit + min($iLimit, 10);
}
public function setFeatureType($sFeatureType)

View File

@@ -71,8 +71,7 @@ class ReverseGeocode
$sSQL .= ' ST_Distance(linegeo,'.$sPointSQL.') as distance';
$sSQL .= ' FROM location_property_osmline';
$sSQL .= ' WHERE ST_DWithin('.$sPointSQL.', linegeo, '.$fSearchDiam.')';
$sSQL .= ' and indexed_status = 0 and startnumber is not NULL ';
$sSQL .= ' and parent_place_id != 0';
$sSQL .= ' and indexed_status = 0 and startnumber is not NULL ';
$sSQL .= ' ORDER BY distance ASC limit 1';
Debug::printSQL($sSQL);

View File

@@ -69,31 +69,19 @@ class SpecialTerm
*/
public function extendSearch($oSearch, $oPosition)
{
$iSearchCost = 0;
$iSearchCost = 2;
$iOp = $this->iOperator;
if ($iOp == \Nominatim\Operator::NONE) {
if ($oPosition->isFirstToken()
|| $oSearch->hasName()
|| $oSearch->getContext()->isBoundedSearch()
) {
if ($oSearch->hasName() || $oSearch->getContext()->isBoundedSearch()) {
$iOp = \Nominatim\Operator::NAME;
$iSearchCost += 3;
} else {
$iOp = \Nominatim\Operator::NEAR;
$iSearchCost += 4;
if (!$oPosition->isFirstToken()) {
$iSearchCost += 3;
}
$iSearchCost += 2;
}
} elseif ($oPosition->isFirstToken()) {
} elseif (!$oPosition->isFirstToken() && !$oPosition->isLastToken()) {
$iSearchCost += 2;
} elseif ($oPosition->isLastToken()) {
$iSearchCost += 4;
} else {
$iSearchCost += 6;
}
if ($oSearch->hasHousenumber()) {
$iSearchCost ++;
}

View File

@@ -15,7 +15,7 @@ DECLARE
location RECORD;
waynodes BIGINT[];
BEGIN
IF in_address ? 'street' or in_address ? 'place' THEN
IF akeys(in_address) != ARRAY['interpolation'] THEN
RETURN in_address;
END IF;
@@ -82,35 +82,27 @@ CREATE OR REPLACE FUNCTION reinsert_interpolation(way_id BIGINT, addr HSTORE,
DECLARE
existing BIGINT[];
BEGIN
IF addr is NULL OR NOT addr ? 'interpolation'
OR NOT (addr->'interpolation' in ('odd', 'even', 'all')
or addr->'interpolation' similar to '[1-9]')
THEN
-- the new interpolation is illegal, simply remove existing entries
DELETE FROM location_property_osmline WHERE osm_id = way_id;
ELSE
-- Get the existing entry from the interpolation table.
SELECT array_agg(place_id) INTO existing
FROM location_property_osmline WHERE osm_id = way_id;
-- Get the existing entry from the interpolation table.
SELECT array_agg(place_id) INTO existing
FROM location_property_osmline WHERE osm_id = way_id;
IF existing IS NULL or array_length(existing, 1) = 0 THEN
INSERT INTO location_property_osmline (osm_id, address, linegeo)
VALUES (way_id, addr, geom);
ELSE
-- Update the interpolation table:
-- The first entry gets the original data, all other entries
-- are removed and will be recreated on indexing.
-- (An interpolation can be split up, if it has more than 2 address nodes)
UPDATE location_property_osmline
SET address = addr,
linegeo = geom,
startnumber = null,
indexed_status = 1
WHERE place_id = existing[1];
IF array_length(existing, 1) > 1 THEN
DELETE FROM location_property_osmline
WHERE place_id = any(existing[2:]);
END IF;
IF existing IS NULL or array_length(existing, 1) = 0 THEN
INSERT INTO location_property_osmline (osm_id, address, linegeo)
VALUES (way_id, addr, geom);
ELSE
-- Update the interpolation table:
-- The first entry gets the original data, all other entries
-- are removed and will be recreated on indexing.
-- (An interpolation can be split up, if it has more than 2 address nodes)
UPDATE location_property_osmline
SET address = addr,
linegeo = geom,
startnumber = null,
indexed_status = 1
WHERE place_id = existing[1];
IF array_length(existing, 1) > 1 THEN
DELETE FROM location_property_osmline
WHERE place_id = any(existing[2:]);
END IF;
END IF;

View File

@@ -916,8 +916,7 @@ BEGIN
LATERAL compute_place_rank(country_code, 'A', class, type,
admin_level, False, null) prank
WHERE osm_type = 'R'
and ((class = 'place' and prank.address_rank = NEW.rank_address)
or (class = 'boundary' and rank_address = NEW.rank_address))
and prank.address_rank = NEW.rank_address
and geometry && NEW.centroid and _ST_Covers(geometry, NEW.centroid)
LIMIT 1
LOOP
@@ -1102,15 +1101,6 @@ BEGIN
END IF;
END IF;
{% if not disable_diff_updates %}
IF OLD.rank_address != NEW.rank_address THEN
-- After a rank shift all addresses containing us must be updated.
UPDATE placex p SET indexed_status = 2 FROM place_addressline pa
WHERE pa.address_place_id = NEW.place_id and p.place_id = pa.place_id
and p.indexed_status = 0 and p.rank_address between 4 and 25;
END IF;
{% endif %}
IF NEW.admin_level = 2
AND NEW.class = 'boundary' AND NEW.type = 'administrative'
AND NEW.country_code IS NOT NULL AND NEW.osm_type = 'R'

View File

@@ -76,8 +76,7 @@ class UpdateReplication:
LOG.warning("Initialising replication updates")
with connect(args.config.get_libpq_dsn()) as conn:
replication.init_replication(conn, base_url=args.config.REPLICATION_URL,
socket_timeout=args.socket_timeout)
replication.init_replication(conn, base_url=args.config.REPLICATION_URL)
if args.update_functions:
LOG.warning("Create functions")
refresh.create_functions(conn, args.config, True, False)
@@ -88,8 +87,7 @@ class UpdateReplication:
from ..tools import replication
with connect(args.config.get_libpq_dsn()) as conn:
return replication.check_for_updates(conn, base_url=args.config.REPLICATION_URL,
socket_timeout=args.socket_timeout)
return replication.check_for_updates(conn, base_url=args.config.REPLICATION_URL)
def _report_update(self, batchdate: dt.datetime,
@@ -150,7 +148,7 @@ class UpdateReplication:
while True:
with connect(args.config.get_libpq_dsn()) as conn:
start = dt.datetime.now(dt.timezone.utc)
state = replication.update(conn, params, socket_timeout=args.socket_timeout)
state = replication.update(conn, params)
if state is not replication.UpdateState.NO_CHANGES:
status.log_status(conn, start, 'import')
batchdate, _, _ = status.get_status(conn)

View File

@@ -94,8 +94,7 @@ class DBConnection:
# Use a dict to hand in the parameters because async is a reserved
# word in Python3.
self.conn = psycopg2.connect(**{'dsn': self.dsn, 'async': True}) # type: ignore
assert self.conn
self.conn = psycopg2.connect(**{'dsn': self.dsn, 'async': True})
self.wait()
if cursor_factory is not None:

View File

@@ -55,7 +55,7 @@ class Cursor(psycopg2.extras.DictCursor):
if self.rowcount != 1:
raise RuntimeError("Query did not return a single row.")
result = self.fetchone()
result = self.fetchone() # type: ignore[no-untyped-call]
assert result is not None
return result[0]
@@ -131,7 +131,7 @@ class Connection(psycopg2.extensions.connection):
return False
if table is not None:
row = cur.fetchone()
row = cur.fetchone() # type: ignore[no-untyped-call]
if row is None or not isinstance(row[0], str):
return False
return row[0] == table
@@ -189,7 +189,7 @@ def connect(dsn: str) -> ConnectionContext:
try:
conn = psycopg2.connect(dsn, connection_factory=Connection)
ctxmgr = cast(ConnectionContext, contextlib.closing(conn))
ctxmgr.connection = conn
ctxmgr.connection = cast(Connection, conn)
return ctxmgr
except psycopg2.OperationalError as err:
raise UsageError(f"Cannot connect to database: {err}") from err
@@ -236,7 +236,7 @@ def get_pg_env(dsn: str,
"""
env = dict(base_env if base_env is not None else os.environ)
for param, value in psycopg2.extensions.parse_dsn(dsn).items():
for param, value in psycopg2.extensions.parse_dsn(dsn).items(): # type: ignore
if param in _PG_CONNECTION_STRINGS:
env[_PG_CONNECTION_STRINGS[param]] = value
else:

View File

@@ -41,7 +41,4 @@ def get_property(conn: Connection, name: str) -> Optional[str]:
if cur.rowcount == 0:
return None
result = cur.fetchone()
assert result is not None
return cast(Optional[str], result[0])
return cast(Optional[str], cur.fetchone()[0]) # type: ignore[no-untyped-call]

View File

@@ -90,7 +90,7 @@ def get_status(conn: Connection) -> Tuple[Optional[dt.datetime], Optional[int],
if cur.rowcount < 1:
return None, None, None
row = cast(StatusRow, cur.fetchone())
row = cast(StatusRow, cur.fetchone()) # type: ignore[no-untyped-call]
return row['lastimportdate'], row['sequence_id'], row['indexed']

View File

@@ -566,9 +566,8 @@ class ICUNameAnalyzer(AbstractAnalyzer):
result = self._cache.housenumbers.get(norm_name, result)
if result[0] is None:
with self.conn.cursor() as cur:
hid = cur.scalar("SELECT getorcreate_hnr_id(%s)", (norm_name, ))
result = hid, norm_name
cur.execute("SELECT getorcreate_hnr_id(%s)", (norm_name, ))
result = cur.fetchone()[0], norm_name # type: ignore[no-untyped-call]
self._cache.housenumbers[norm_name] = result
else:
# Otherwise use the analyzer to determine the canonical name.
@@ -581,9 +580,9 @@ class ICUNameAnalyzer(AbstractAnalyzer):
variants = analyzer.compute_variants(word_id)
if variants:
with self.conn.cursor() as cur:
hid = cur.scalar("SELECT create_analyzed_hnr_id(%s, %s)",
(word_id, list(variants)))
result = hid, variants[0]
cur.execute("SELECT create_analyzed_hnr_id(%s, %s)",
(word_id, list(variants)))
result = cur.fetchone()[0], variants[0] # type: ignore[no-untyped-call]
self._cache.housenumbers[word_id] = result
return result
@@ -666,7 +665,8 @@ class ICUNameAnalyzer(AbstractAnalyzer):
with self.conn.cursor() as cur:
cur.execute("SELECT * FROM getorcreate_full_word(%s, %s)",
(token_id, variants))
full, part = cast(Tuple[int, List[int]], cur.fetchone())
full, part = cast(Tuple[int, List[int]],
cur.fetchone()) # type: ignore[no-untyped-call]
self._cache.names[token_id] = (full, part)

View File

@@ -544,9 +544,8 @@ class _TokenInfo:
with conn.cursor() as cur:
cur.execute("SELECT * FROM create_housenumbers(%s)", (simple_list, ))
result = cur.fetchone()
assert result is not None
self.data['hnr_tokens'], self.data['hnr'] = result
self.data['hnr_tokens'], self.data['hnr'] = \
cur.fetchone() # type: ignore[no-untyped-call]
def set_postcode(self, postcode: str) -> None:
@@ -575,7 +574,8 @@ class _TokenInfo:
cur.execute("""SELECT make_keywords(hstore('name' , %s))::text,
word_ids_from_name(%s)::text""",
(name, name))
return cast(Tuple[List[int], List[int]], cur.fetchone())
return cast(Tuple[List[int], List[int]],
cur.fetchone()) # type: ignore[no-untyped-call]
self.data['place_search'], self.data['place_match'] = \
self.cache.places.get(place, _get_place)
@@ -589,7 +589,8 @@ class _TokenInfo:
cur.execute("""SELECT addr_ids_from_name(%s)::text,
word_ids_from_name(%s)::text""",
(name, name))
return cast(Tuple[List[int], List[int]], cur.fetchone())
return cast(Tuple[List[int], List[int]],
cur.fetchone()) # type: ignore[no-untyped-call]
tokens = {}
for key, value in terms:

View File

@@ -49,7 +49,7 @@ def _get_place_info(cursor: Cursor, osm_id: Optional[str],
LOG.fatal("OSM object %s not found in database.", osm_id)
raise UsageError("OSM object not found")
return cast(DictCursorResult, cursor.fetchone())
return cast(DictCursorResult, cursor.fetchone()) # type: ignore[no-untyped-call]
def analyse_indexing(config: Configuration, osm_id: Optional[str] = None,

View File

@@ -268,7 +268,7 @@ def check_database_index_valid(conn: Connection, _: Configuration) -> CheckResul
WHERE pg_index.indisvalid = false
AND pg_index.indexrelid = pg_class.oid""")
broken = [c[0] for c in cur]
broken = list(cur)
if broken:
return CheckState.FAIL, dict(indexes='\n '.join(broken))

View File

@@ -95,7 +95,7 @@ def import_osm_data(osm_files: Union[Path, Sequence[Path]],
if not options['flatnode_file'] and options['osm2pgsql_cache'] == 0:
# Make some educated guesses about cache size based on the size
# of the import file and the available memory.
mem = psutil.virtual_memory()
mem = psutil.virtual_memory() # type: ignore[no-untyped-call]
fsize = 0
if isinstance(osm_files, list):
for fname in osm_files:

View File

@@ -7,16 +7,13 @@
"""
Functions for updating a database from a replication source.
"""
from typing import ContextManager, MutableMapping, Any, Generator, cast, Iterator
from typing import ContextManager, MutableMapping, Any, Generator, cast
from contextlib import contextmanager
import datetime as dt
from enum import Enum
import logging
import time
import types
import urllib.request as urlrequest
import requests
from nominatim.db import status
from nominatim.db.connection import Connection
from nominatim.tools.exec_utils import run_osm2pgsql
@@ -25,7 +22,6 @@ from nominatim.errors import UsageError
try:
from osmium.replication.server import ReplicationServer
from osmium import WriteHandler
from osmium import version as pyo_version
except ImportError as exc:
logging.getLogger().critical("pyosmium not installed. Replication functions not available.\n"
"To install pyosmium via pip: pip3 install osmium")
@@ -33,8 +29,7 @@ except ImportError as exc:
LOG = logging.getLogger()
def init_replication(conn: Connection, base_url: str,
socket_timeout: int = 60) -> None:
def init_replication(conn: Connection, base_url: str) -> None:
""" Set up replication for the server at the given base URL.
"""
LOG.info("Using replication source: %s", base_url)
@@ -43,8 +38,9 @@ def init_replication(conn: Connection, base_url: str,
# margin of error to make sure we get all data
date -= dt.timedelta(hours=3)
with _make_replication_server(base_url, socket_timeout) as repl:
seq = repl.timestamp_to_sequence(date)
repl = ReplicationServer(base_url)
seq = repl.timestamp_to_sequence(date)
if seq is None:
LOG.fatal("Cannot reach the configured replication service '%s'.\n"
@@ -57,8 +53,7 @@ def init_replication(conn: Connection, base_url: str,
LOG.warning("Updates initialised at sequence %s (%s)", seq, date)
def check_for_updates(conn: Connection, base_url: str,
socket_timeout: int = 60) -> int:
def check_for_updates(conn: Connection, base_url: str) -> int:
""" Check if new data is available from the replication service at the
given base URL.
"""
@@ -69,8 +64,7 @@ def check_for_updates(conn: Connection, base_url: str,
"Please run 'nominatim replication --init' first.")
return 254
with _make_replication_server(base_url, socket_timeout) as repl:
state = repl.get_state_info()
state = ReplicationServer(base_url).get_state_info()
if state is None:
LOG.error("Cannot get state for URL %s.", base_url)
@@ -92,8 +86,7 @@ class UpdateState(Enum):
NO_CHANGES = 3
def update(conn: Connection, options: MutableMapping[str, Any],
socket_timeout: int = 60) -> UpdateState:
def update(conn: Connection, options: MutableMapping[str, Any]) -> UpdateState:
""" Update database from the next batch of data. Returns the state of
updates according to `UpdateState`.
"""
@@ -121,7 +114,7 @@ def update(conn: Connection, options: MutableMapping[str, Any],
options['import_file'].unlink()
# Read updates into file.
with _make_replication_server(options['base_url'], socket_timeout) as repl:
with _make_replication_server(options['base_url']) as repl:
outhandler = WriteHandler(str(options['import_file']))
endseq = repl.apply_diffs(outhandler, startseq + 1,
max_size=options['max_diff_size'] * 1024)
@@ -143,40 +136,14 @@ def update(conn: Connection, options: MutableMapping[str, Any],
return UpdateState.UP_TO_DATE
def _make_replication_server(url: str, timeout: int) -> ContextManager[ReplicationServer]:
def _make_replication_server(url: str) -> ContextManager[ReplicationServer]:
""" Returns a ReplicationServer in form of a context manager.
Creates a light wrapper around older versions of pyosmium that did
not support the context manager interface.
"""
if hasattr(ReplicationServer, '__enter__'):
# Patches the open_url function for pyosmium >= 3.2
# where the socket timeout is no longer respected.
def patched_open_url(self: ReplicationServer, url: urlrequest.Request) -> Any:
""" Download a resource from the given URL and return a byte sequence
of the content.
"""
headers = {"User-Agent" : f"Nominatim (pyosmium/{pyo_version.pyosmium_release})"}
if self.session is not None:
return self.session.get(url.get_full_url(),
headers=headers, timeout=timeout or None,
stream=True)
@contextmanager
def _get_url_with_session() -> Iterator[requests.Response]:
with requests.Session() as session:
request = session.get(url.get_full_url(),
headers=headers, timeout=timeout or None,
stream=True)
yield request
return _get_url_with_session()
repl = ReplicationServer(url)
setattr(repl, 'open_url', types.MethodType(patched_open_url, repl))
return cast(ContextManager[ReplicationServer], repl)
return cast(ContextManager[ReplicationServer], ReplicationServer(url))
@contextmanager
def get_cm() -> Generator[ReplicationServer, None, None]:

View File

@@ -25,7 +25,7 @@ from typing import Optional, Tuple
# patch level when cherry-picking the commit with the migration.
#
# Released versions always have a database patch level of 0.
NOMINATIM_VERSION = (4, 1, 2, 0)
NOMINATIM_VERSION = (4, 1, 0, 0)
POSTGRESQL_REQUIRED_VERSION = (9, 6)
POSTGIS_REQUIRED_VERSION = (2, 2)

View File

@@ -403,56 +403,3 @@ Feature: Import of address interpolations
Then results contain
| ID | osm_type | osm_id | type | display_name |
| 0 | node | 1 | house | 0 |
Scenario: Parenting of interpolation with additional tags
Given the grid
| 1 | | | | | |
| | | | | | |
| | 8 | | | 9 | |
| | | | | | |
| 2 | | | | | 3 |
Given the places
| osm | class | type | housenr | addr+street |
| N8 | place | house | 10 | Horiz St |
| N9 | place | house | 16 | Horiz St |
And the places
| osm | class | type | name | geometry |
| W1 | highway | residential | Vert St | 1,2 |
| W2 | highway | residential | Horiz St | 2,3 |
And the places
| osm | class | type | addr+interpolation | addr+inclusion | geometry |
| W10 | place | houses | even | actual | 8,9 |
And the ways
| id | nodes |
| 10 | 8,9 |
When importing
Then placex contains
| object | parent_place_id |
| N8 | W2 |
| N9 | W2 |
And W10 expands to interpolation
| start | end | parent_place_id |
| 12 | 14 | W2 |
Scenario Outline: Bad interpolation values are ignored
Given the grid with origin 1,1
| 1 | | 9 | | 2 |
Given the places
| osm | class | type | housenr |
| N1 | place | house | 2 |
| N2 | place | house | 6 |
And the places
| osm | class | type | addr+interpolation | geometry |
| W1 | place | houses | <value> | 1,2 |
And the ways
| id | nodes |
| 1 | 1,2 |
When importing
Then W1 expands to no interpolation
Examples:
| value |
| foo |
| x |
| 12-2 |

View File

@@ -391,29 +391,3 @@ Feature: Update of address interpolations
| parent_place_id | start | end |
| W1 | 4 | 6 |
Scenario: Legal interpolation type changed to illegal one
Given the grid
| 1 | | 2 |
| 3 | | 4 |
And the places
| osm | class | type | name | geometry |
| W1 | highway | unclassified | Cloud Street | 1, 2 |
And the ways
| id | nodes |
| 2 | 3,4 |
And the places
| osm | class | type | addr+interpolation | geometry |
| W2 | place | houses | even | 3,4 |
And the places
| osm | class | type | housenr |
| N3 | place | house | 2 |
| N4 | place | house | 6 |
When importing
Then W2 expands to interpolation
| parent_place_id | start | end |
| W1 | 4 | 4 |
When updating places
| osm | class | type | addr+interpolation | geometry |
| W2 | place | houses | 12-2 | 3,4 |
Then W2 expands to no interpolation

View File

@@ -307,35 +307,3 @@ Feature: Updates of linked places
| object | linked_place_id | rank_address |
| N1 | R1 | 16 |
| R1 | - | 16 |
Scenario: Invalidate surrounding place nodes when place type changes
Given the grid
| 1 | | | 2 |
| | 8 | 9 | |
| 4 | | | 3 |
And the places
| osm | class | type | name | admin | geometry |
| R1 | boundary | administrative | foo | 8 | (1,2,3,4,1) |
And the places
| osm | class | type | name | geometry |
| N1 | place | town | foo | 9 |
| N2 | place | city | bar | 8 |
And the relations
| id | members |
| 1 | N1:label |
When importing
Then placex contains
| object | linked_place_id | rank_address |
| N1 | R1 | 16 |
| R1 | - | 16 |
| N2 | - | 18 |
When updating places
| osm | class | type | name | geometry |
| N1 | place | suburb | foo | 9 |
Then placex contains
| object | linked_place_id | rank_address |
| N1 | R1 | 20 |
| R1 | - | 20 |
| N2 | - | 16 |

View File

@@ -370,6 +370,6 @@ def check_location_property_osmline(context, oid, neg):
DBRow(oid, res, context).assert_row(row, ('start', 'end'))
assert not todo, f"Unmatched lines in table: {list(context.table[i] for i in todo)}"
assert not todo

View File

@@ -37,14 +37,14 @@ class DebugTest extends \PHPUnit\Framework\TestCase
<pre><b>Var1:</b> <i>True</i></pre>
<pre><b>Var2:</b> <i>False</i></pre>
<pre><b>Var3:</b> 0</pre>
<pre><b>Var4:</b> &#039;String&#039;</pre>
<pre><b>Var5:</b> 0 => &#039;one&#039;
1 => &#039;two&#039;
2 => &#039;three&#039;</pre>
<pre><b>Var6:</b> &#039;key&#039; => &#039;value&#039;
&#039;key2&#039; => &#039;value2&#039;</pre>
<pre><b>Var4:</b> 'String'</pre>
<pre><b>Var5:</b> 0 => 'one'
1 => 'two'
2 => 'three'</pre>
<pre><b>Var6:</b> 'key' => 'value'
'key2' => 'value2'</pre>
<pre><b>Var7:</b> me as string</pre>
<pre><b>Var8:</b> &#039;value&#039;, &#039;value2&#039;</pre>
<pre><b>Var8:</b> 'value', 'value2'</pre>
EOT
);
@@ -64,10 +64,10 @@ EOT
public function testDebugArray()
{
$this->expectOutputString(<<<EOT
<pre><b>Arr0:</b> &#039;null&#039;</pre>
<pre><b>Arr1:</b> &#039;key1&#039; => &#039;val1&#039;
&#039;key2&#039; => &#039;val2&#039;
&#039;key3&#039; => &#039;val3&#039;</pre>
<pre><b>Arr0:</b> 'null'</pre>
<pre><b>Arr1:</b> 'key1' => 'val1'
'key2' => 'val2'
'key3' => 'val3'</pre>
EOT
);
@@ -93,12 +93,12 @@ EOT
<th><small>1</small></th>
</tr>
<tr>
<td><pre>&#039;one&#039;</pre></td>
<td><pre>&#039;two&#039;</pre></td>
<td><pre>'one'</pre></td>
<td><pre>'two'</pre></td>
</tr>
<tr>
<td><pre>&#039;three&#039;</pre></td>
<td><pre>&#039;four&#039;</pre></td>
<td><pre>'three'</pre></td>
<td><pre>'four'</pre></td>
</tr>
</table>
<b>Table4:</b>
@@ -109,9 +109,9 @@ EOT
<th><small>key3</small></th>
</tr>
<tr>
<td><pre>&#039;val1&#039;</pre></td>
<td><pre>&#039;val2&#039;</pre></td>
<td><pre>&#039;val3&#039;</pre></td>
<td><pre>'val1'</pre></td>
<td><pre>'val2'</pre></td>
<td><pre>'val3'</pre></td>
</tr>
</table>
@@ -147,18 +147,18 @@ EOT
</tr>
<tr>
<td><pre>group1</pre></td>
<td><pre>&#039;val1&#039;</pre></td>
<td><pre>&#039;val2&#039;</pre></td>
<td><pre>'val1'</pre></td>
<td><pre>'val2'</pre></td>
</tr>
<tr>
<td><pre>group1</pre></td>
<td><pre>&#039;one&#039;</pre></td>
<td><pre>&#039;two&#039;</pre></td>
<td><pre>'one'</pre></td>
<td><pre>'two'</pre></td>
</tr>
<tr>
<td><pre>group2</pre></td>
<td><pre>&#039;val1&#039;</pre></td>
<td><pre>&#039;val2&#039;</pre></td>
<td><pre>'val1'</pre></td>
<td><pre>'val2'</pre></td>
</tr>
</table>
<b>Table4:</b>
@@ -171,15 +171,15 @@ EOT
</tr>
<tr>
<td><pre>group1</pre></td>
<td><pre>&#039;val1&#039;</pre></td>
<td><pre>&#039;val2&#039;</pre></td>
<td><pre>&#039;val3&#039;</pre></td>
<td><pre>'val1'</pre></td>
<td><pre>'val2'</pre></td>
<td><pre>'val3'</pre></td>
</tr>
<tr>
<td><pre>group1</pre></td>
<td><pre>&#039;val1&#039;</pre></td>
<td><pre>&#039;val2&#039;</pre></td>
<td><pre>&#039;val3&#039;</pre></td>
<td><pre>'val1'</pre></td>
<td><pre>'val2'</pre></td>
<td><pre>'val3'</pre></td>
</tr>
</table>

View File

@@ -28,7 +28,7 @@ export DEBIAN_FRONTEND=noninteractive #DOCS:
postgresql-10-postgis-2.4 \
postgresql-contrib-10 postgresql-10-postgis-scripts \
php-cli php-pgsql php-intl libicu-dev python3-pip \
python3-psutil python3-jinja2 python3-yaml python3-icu git
python3-psutil python3-jinja2 python3-yaml python3-icu
# Some of the Python packages that come with Ubuntu 18.04 are too old, so
# install the latest version from pip:
@@ -105,25 +105,18 @@ fi #DOCS:
#
if [ "x$1" == "xyes" ]; then #DOCS: :::sh
cd $USERHOME
git clone --recursive https://github.com/openstreetmap/Nominatim.git
cd Nominatim
wget https://nominatim.org/release/Nominatim-4.1.1.tar.bz2
tar xf Nominatim-4.1.1.tar.bz2
else #DOCS:
cd $USERHOME/Nominatim #DOCS:
fi #DOCS:
# When installing the latest source from github, you also need to
# download the country grid:
if [ ! -f data/country_osm_grid.sql.gz ]; then #DOCS: :::sh
wget -O data/country_osm_grid.sql.gz https://www.nominatim.org/data/country_grid.sql.gz
fi #DOCS:
# The code must be built in a separate directory. Create this directory,
# then configure and build Nominatim in there:
mkdir $USERHOME/build
cd $USERHOME/build
cmake $USERHOME/Nominatim
cmake $USERHOME/Nominatim-4.1.1
make
sudo make install
@@ -206,7 +199,7 @@ if [ "x$2" == "xinstall-nginx" ]; then #DOCS:
sudo tee /etc/php/7.2/fpm/pool.d/www.conf << EOF_PHP_FPM_CONF
[www]
; Replace the tcp listener and add the unix socket
listen = /var/run/php-fpm-nominatim.sock
listen = /var/run/php7.2-fpm.sock
; Ensure that the daemon runs as the correct user
listen.owner = www-data
@@ -241,7 +234,7 @@ server {
fastcgi_param SCRIPT_FILENAME "\$document_root\$uri.php";
fastcgi_param PATH_TRANSLATED "\$document_root\$uri.php";
fastcgi_param QUERY_STRING \$args;
fastcgi_pass unix:/var/run/php-fpm-nominatim.sock;
fastcgi_pass unix:/var/run/php7.2-fpm.sock;
fastcgi_index index.php;
include fastcgi_params;
}
@@ -251,7 +244,7 @@ server {
if (!-f \$document_root\$fastcgi_script_name) {
return 404;
}
fastcgi_pass unix:/var/run/php-fpm-nominatim.sock;
fastcgi_pass unix:/var/run/php7.2-fpm.sock;
fastcgi_index search.php;
include fastcgi.conf;
}

View File

@@ -28,7 +28,7 @@ export DEBIAN_FRONTEND=noninteractive #DOCS:
postgresql-contrib-12 postgresql-12-postgis-3-scripts \
php-cli php-pgsql php-intl libicu-dev python3-dotenv \
python3-psycopg2 python3-psutil python3-jinja2 \
python3-icu python3-datrie python3-yaml git
python3-icu python3-datrie python3-yaml
#
# System Configuration
@@ -99,25 +99,18 @@ fi #DOCS:
#
if [ "x$1" == "xyes" ]; then #DOCS: :::sh
cd $USERHOME
git clone --recursive https://github.com/openstreetmap/Nominatim.git
cd Nominatim
wget https://nominatim.org/release/Nominatim-4.1.1.tar.bz2
tar xf Nominatim-4.1.1.tar.bz2
else #DOCS:
cd $USERHOME/Nominatim #DOCS:
fi #DOCS:
# When installing the latest source from github, you also need to
# download the country grid:
if [ ! -f data/country_osm_grid.sql.gz ]; then #DOCS: :::sh
wget -O data/country_osm_grid.sql.gz https://www.nominatim.org/data/country_grid.sql.gz
fi #DOCS:
# The code must be built in a separate directory. Create this directory,
# then configure and build Nominatim in there:
mkdir $USERHOME/build
cd $USERHOME/build
cmake $USERHOME/Nominatim
cmake $USERHOME/Nominatim-4.1.1
make
sudo make install
@@ -197,7 +190,7 @@ if [ "x$2" == "xinstall-nginx" ]; then #DOCS:
sudo tee /etc/php/7.4/fpm/pool.d/www.conf << EOF_PHP_FPM_CONF
[www]
; Replace the tcp listener and add the unix socket
listen = /var/run/php-fpm-nominatim.sock
listen = /var/run/php7.4-fpm.sock
; Ensure that the daemon runs as the correct user
listen.owner = www-data
@@ -232,7 +225,7 @@ server {
fastcgi_param SCRIPT_FILENAME "\$document_root\$uri.php";
fastcgi_param PATH_TRANSLATED "\$document_root\$uri.php";
fastcgi_param QUERY_STRING \$args;
fastcgi_pass unix:/var/run/php-fpm-nominatim.sock;
fastcgi_pass unix:/var/run/php7.4-fpm.sock;
fastcgi_index index.php;
include fastcgi_params;
}
@@ -242,7 +235,7 @@ server {
if (!-f \$document_root\$fastcgi_script_name) {
return 404;
}
fastcgi_pass unix:/var/run/php-fpm-nominatim.sock;
fastcgi_pass unix:/var/run/php7.4-fpm.sock;
fastcgi_index search.php;
include fastcgi.conf;
}
@@ -250,9 +243,9 @@ server {
EOF_NGINX_CONF
#DOCS:```
# If you have some errors, make sure that php-fpm-nominatim.sock is well under
# If you have some errors, make sure that php7.4-fpm.sock is well under
# /var/run/ and not under /var/run/php. Otherwise change the Nginx configuration
# to /var/run/php/php-fpm-nominatim.sock.
# to /var/run/php/php7.4-fpm.sock.
#
# Enable the configuration and restart Nginx
#

View File

@@ -28,7 +28,7 @@ export DEBIAN_FRONTEND=noninteractive #DOCS:
postgresql-contrib-14 postgresql-14-postgis-3-scripts \
php-cli php-pgsql php-intl libicu-dev python3-dotenv \
python3-psycopg2 python3-psutil python3-jinja2 \
python3-icu python3-datrie git
python3-icu python3-datrie
#
# System Configuration
@@ -99,25 +99,18 @@ fi #DOCS:
#
if [ "x$1" == "xyes" ]; then #DOCS: :::sh
cd $USERHOME
git clone --recursive https://github.com/openstreetmap/Nominatim.git
cd Nominatim
wget https://nominatim.org/release/Nominatim-4.1.1.tar.bz2
tar xf Nominatim-4.1.1.tar.bz2
else #DOCS:
cd $USERHOME/Nominatim #DOCS:
fi #DOCS:
# When installing the latest source from github, you also need to
# download the country grid:
if [ ! -f data/country_osm_grid.sql.gz ]; then #DOCS: :::sh
wget -O data/country_osm_grid.sql.gz https://www.nominatim.org/data/country_grid.sql.gz
fi #DOCS:
# The code must be built in a separate directory. Create this directory,
# then configure and build Nominatim in there:
mkdir $USERHOME/build
cd $USERHOME/build
cmake $USERHOME/Nominatim
cmake $USERHOME/Nominatim-4.1.1
make
sudo make install
@@ -197,7 +190,7 @@ if [ "x$2" == "xinstall-nginx" ]; then #DOCS:
sudo tee /etc/php/8.1/fpm/pool.d/www.conf << EOF_PHP_FPM_CONF
[www]
; Replace the tcp listener and add the unix socket
listen = /var/run/php-fpm-nominatim.sock
listen = /var/run/php8.1-fpm.sock
; Ensure that the daemon runs as the correct user
listen.owner = www-data
@@ -232,7 +225,7 @@ server {
fastcgi_param SCRIPT_FILENAME "\$document_root\$uri.php";
fastcgi_param PATH_TRANSLATED "\$document_root\$uri.php";
fastcgi_param QUERY_STRING \$args;
fastcgi_pass unix:/var/run/php-fpm-nominatim.sock;
fastcgi_pass unix:/var/run/php8.1-fpm.sock;
fastcgi_index index.php;
include fastcgi_params;
}
@@ -242,7 +235,7 @@ server {
if (!-f \$document_root\$fastcgi_script_name) {
return 404;
}
fastcgi_pass unix:/var/run/php-fpm-nominatim.sock;
fastcgi_pass unix:/var/run/php7.4-fpm.sock;
fastcgi_index search.php;
include fastcgi.conf;
}
@@ -250,9 +243,9 @@ server {
EOF_NGINX_CONF
#DOCS:```
# If you have some errors, make sure that php-fpm-nominatim.sock is well under
# If you have some errors, make sure that php8.1-fpm.sock is well under
# /var/run/ and not under /var/run/php. Otherwise change the Nginx configuration
# to /var/run/php/php-fpm-nominatim.sock.
# to /var/run/php/php8.1-fpm.sock.
#
# Enable the configuration and restart Nginx
#