Compare commits

...

26 Commits

Author SHA1 Message Date
Sarah Hoffmann
63852d2252 prepare release 4.1.2 2023-02-20 17:52:00 +01:00
Sarah Hoffmann
5c3691fb64 harmonize flags for PHP's htmlspecialchars 2023-02-20 17:44:59 +01:00
Sarah Hoffmann
6d94af3b5a adapt PHP tests for debug output 2023-02-20 17:44:32 +01:00
Sarah Hoffmann
a1592faf5f properly encode special HTML characters in debug mode 2023-02-20 17:44:29 +01:00
Sarah Hoffmann
ec533f6a1a prepare release 4.1.1 2022-11-19 16:15:47 +01:00
Sarah Hoffmann
9f5adabd12 update osm2pgsql to 1.7.1 2022-11-19 15:54:27 +01:00
Sarah Hoffmann
3d9c33192b drop illegal values for addr:interpolation on update 2022-11-19 15:53:29 +01:00
Sarah Hoffmann
05863ae5ca correctly handle special term + name combination
Special terms with operator name usually appear in combination with the
name. The current penalties only took name + special term into account
not special term + name.

Fixes #2876.
2022-11-19 15:52:19 +01:00
Sarah Hoffmann
a856c56450 fix type issues with calls to pyosmium 2022-11-19 15:51:09 +01:00
Marc Tobias
aa2e4e411b Tiger install doc: add -refresh website- step 2022-11-19 15:51:02 +01:00
Sarah Hoffmann
5cdeaac967 add types-requests dependency 2022-11-19 15:50:45 +01:00
Sarah Hoffmann
6a7b2b823a respect socket timeout also in other replication functions 2022-11-19 15:50:38 +01:00
Sarah Hoffmann
2dd8433ab6 fix timeout use for replication timeout
The timeout parameter is no longer taken into account since
pyosmium switched to the requests library. This adds the parameter
back.
2022-11-19 15:50:30 +01:00
Marc Tobias
951f92f665 update those github action packages still using node12 2022-11-19 15:49:58 +01:00
Sarah Hoffmann
9d009c7967 ignore interpolations without parent on reverse search
If no parent can be found for an interpolation, there is most
likely a data error involved. So don' t show these interpolations
in reverse search results.
2022-11-19 15:49:17 +01:00
marc tobias
442e8fb411 Install scripts: remove version from /var/run/php-fpm filenames 2022-11-19 15:48:52 +01:00
Sarah Hoffmann
6a5bbdfae0 actions: pin pyicu to 2.9 2022-11-19 15:48:30 +01:00
marc tobias
6bac238760 Documentation: remove year from TIGER filename 2022-11-19 15:47:05 +01:00
Sarah Hoffmann
185c3cf7a8 mypy: fix new warnings due to external type updates 2022-11-19 15:45:20 +01:00
Mauricio Scheffer
ae5687539a docs: fix links to rank docs 2022-11-19 15:44:58 +01:00
Sarah Hoffmann
d71be2b60a ignore irrelevant extra tags on address interpolations
When deciding if an address interpolation has address information, only
look for addr:street and addr:place. If they are not there go looking
for the address on the address nodes. Ignores irrelevant tags like
addr:inclusion.

Fixes #2797.
2022-11-19 15:44:20 +01:00
Sarah Hoffmann
d910f52221 more invalidations when boundary changes rank
When a boundary or place changes its address rank, all places where
it participates as address need to be potentially reindexed.
Also use the computed rank when testing place nodes against
boundaries. Boundaries are computed earlier.

Fixes #2794.
2022-11-19 15:43:08 +01:00
Sarah Hoffmann
f48a37deea fix base number of returned results
The intent was to always search for at least 10 results.

Improves on #882.
2022-11-19 15:40:43 +01:00
Sarah Hoffmann
c08e3849b8 adapt to new type annotations from typeshed
Some more functions frrom psycopg are now properly annotated.
No ignoring necessary anymore.
2022-11-19 15:40:01 +01:00
Sarah Hoffmann
ec92167514 docs: add types-psutil requirement 2022-11-19 15:39:47 +01:00
Sarah Hoffmann
5a05608b34 remove mypy ignore for psutil.virtual_memory()
Now available in typeshed.
2022-11-19 15:39:09 +01:00
35 changed files with 345 additions and 138 deletions

View File

@@ -23,7 +23,7 @@ runs:
run: | run: |
sudo apt-get install -y -qq libboost-system-dev libboost-filesystem-dev libexpat1-dev zlib1g-dev libbz2-dev libpq-dev libproj-dev libicu-dev sudo apt-get install -y -qq libboost-system-dev libboost-filesystem-dev libexpat1-dev zlib1g-dev libbz2-dev libpq-dev libproj-dev libicu-dev
if [ "x$UBUNTUVER" == "x18" ]; then if [ "x$UBUNTUVER" == "x18" ]; then
pip3 install python-dotenv psycopg2==2.7.7 jinja2==2.8 psutil==5.4.2 pyicu osmium PyYAML==5.1 datrie pip3 install python-dotenv psycopg2==2.7.7 jinja2==2.8 psutil==5.4.2 pyicu==2.9 osmium PyYAML==5.1 datrie
else else
sudo apt-get install -y -qq python3-icu python3-datrie python3-pyosmium python3-jinja2 python3-psutil python3-psycopg2 python3-dotenv python3-yaml sudo apt-get install -y -qq python3-icu python3-datrie python3-pyosmium python3-jinja2 python3-psutil python3-psycopg2 python3-dotenv python3-yaml
fi fi

View File

@@ -7,11 +7,11 @@ jobs:
runs-on: ubuntu-latest runs-on: ubuntu-latest
steps: steps:
- uses: actions/checkout@v2 - uses: actions/checkout@v3
with: with:
submodules: true submodules: true
- uses: actions/cache@v2 - uses: actions/cache@v3
with: with:
path: | path: |
data/country_osm_grid.sql.gz data/country_osm_grid.sql.gz
@@ -27,7 +27,7 @@ jobs:
mv nominatim-src.tar.bz2 Nominatim mv nominatim-src.tar.bz2 Nominatim
- name: 'Upload Artifact' - name: 'Upload Artifact'
uses: actions/upload-artifact@v2 uses: actions/upload-artifact@v3
with: with:
name: full-source name: full-source
path: nominatim-src.tar.bz2 path: nominatim-src.tar.bz2
@@ -58,7 +58,7 @@ jobs:
runs-on: ubuntu-${{ matrix.ubuntu }}.04 runs-on: ubuntu-${{ matrix.ubuntu }}.04
steps: steps:
- uses: actions/download-artifact@v2 - uses: actions/download-artifact@v3
with: with:
name: full-source name: full-source
@@ -72,7 +72,7 @@ jobs:
tools: phpunit, phpcs, composer tools: phpunit, phpcs, composer
ini-values: opcache.jit=disable ini-values: opcache.jit=disable
- uses: actions/setup-python@v2 - uses: actions/setup-python@v4
with: with:
python-version: 3.6 python-version: 3.6
if: matrix.ubuntu == 18 if: matrix.ubuntu == 18
@@ -99,7 +99,7 @@ jobs:
if: matrix.ubuntu == 22 if: matrix.ubuntu == 22
- name: Install latest pylint/mypy - name: Install latest pylint/mypy
run: pip3 install -U pylint mypy types-PyYAML types-jinja2 types-psycopg2 types-psutil typing-extensions run: pip3 install -U pylint mypy types-PyYAML types-jinja2 types-psycopg2 types-psutil types-requests typing-extensions
- name: PHP linting - name: PHP linting
run: phpcs --report-width=120 . run: phpcs --report-width=120 .
@@ -136,7 +136,7 @@ jobs:
runs-on: ubuntu-20.04 runs-on: ubuntu-20.04
steps: steps:
- uses: actions/download-artifact@v2 - uses: actions/download-artifact@v3
with: with:
name: full-source name: full-source
@@ -231,7 +231,7 @@ jobs:
OS: ${{ matrix.name }} OS: ${{ matrix.name }}
INSTALL_MODE: ${{ matrix.install_mode }} INSTALL_MODE: ${{ matrix.install_mode }}
- uses: actions/download-artifact@v2 - uses: actions/download-artifact@v3
with: with:
name: full-source name: full-source
path: /home/nominatim path: /home/nominatim

View File

@@ -20,7 +20,7 @@ project(nominatim)
set(NOMINATIM_VERSION_MAJOR 4) set(NOMINATIM_VERSION_MAJOR 4)
set(NOMINATIM_VERSION_MINOR 1) set(NOMINATIM_VERSION_MINOR 1)
set(NOMINATIM_VERSION_PATCH 0) set(NOMINATIM_VERSION_PATCH 2)
set(NOMINATIM_VERSION "${NOMINATIM_VERSION_MAJOR}.${NOMINATIM_VERSION_MINOR}.${NOMINATIM_VERSION_PATCH}") set(NOMINATIM_VERSION "${NOMINATIM_VERSION_MAJOR}.${NOMINATIM_VERSION_MINOR}.${NOMINATIM_VERSION_PATCH}")

View File

@@ -1,3 +1,21 @@
4.1.2
* fix XSS vulnerability in debug view
4.1.1
* fix crash on update when addr:interpolation receives an illegal value
* fix minimum number of retrived results to be at least 10
* fix search for combinations of special term + name (e.g Hotel Bellevue)
* do not return interpolations without a parent street on reverse search
* improve invalidation of linked places on updates
* fix address parsing for interpolation lines
* make sure socket timeouts are respected during replication
(working around a bug in some versions of pyosmium)
* update bundled osm2pgsql to 1.7.1
* typing fixes to work with latest type annotations from typeshed
* smaller improvements to documention (thanks to @mausch)
4.1.0 4.1.0
* switch to ICU tokenizer as default * switch to ICU tokenizer as default
@@ -34,6 +52,10 @@
* add setup instructions for updates and systemd * add setup instructions for updates and systemd
* drop support for PostgreSQL 9.5 * drop support for PostgreSQL 9.5
4.0.2
* fix XSS vulnerability in debug view
4.0.1 4.0.1
* fix initialisation error in replication script * fix initialisation error in replication script
@@ -72,6 +94,10 @@
* add testing of installation scripts via CI * add testing of installation scripts via CI
* drop support for Python < 3.6 and Postgresql < 9.5 * drop support for Python < 3.6 and Postgresql < 9.5
3.7.3
* fix XSS vulnerability in debug view
3.7.2 3.7.2
* fix database check for reverse-only imports * fix database check for reverse-only imports

View File

@@ -99,7 +99,7 @@ Unix socket instead, change the pool configuration
``` ini ``` ini
; Replace the tcp listener and add the unix socket ; Replace the tcp listener and add the unix socket
listen = /var/run/php-fpm.sock listen = /var/run/php-fpm-nominatim.sock
; Ensure that the daemon runs as the correct user ; Ensure that the daemon runs as the correct user
listen.owner = www-data listen.owner = www-data
@@ -121,7 +121,7 @@ location @php {
fastcgi_param SCRIPT_FILENAME "$document_root$uri.php"; fastcgi_param SCRIPT_FILENAME "$document_root$uri.php";
fastcgi_param PATH_TRANSLATED "$document_root$uri.php"; fastcgi_param PATH_TRANSLATED "$document_root$uri.php";
fastcgi_param QUERY_STRING $args; fastcgi_param QUERY_STRING $args;
fastcgi_pass unix:/var/run/php-fpm.sock; fastcgi_pass unix:/var/run/php-fpm-nominatim.sock;
fastcgi_index index.php; fastcgi_index index.php;
include fastcgi_params; include fastcgi_params;
} }
@@ -131,7 +131,7 @@ location ~ [^/]\.php(/|$) {
if (!-f $document_root$fastcgi_script_name) { if (!-f $document_root$fastcgi_script_name) {
return 404; return 404;
} }
fastcgi_pass unix:/var/run/php-fpm.sock; fastcgi_pass unix:/var/run/php-fpm-nominatim.sock;
fastcgi_index search.php; fastcgi_index search.php;
include fastcgi.conf; include fastcgi.conf;
} }

View File

@@ -211,8 +211,8 @@ be more than one. The attributes of that element contain:
* `ref` - content of `ref` tag if it exists * `ref` - content of `ref` tag if it exists
* `lat`, `lon` - latitude and longitude of the centroid of the object * `lat`, `lon` - latitude and longitude of the centroid of the object
* `boundingbox` - comma-separated list of corner coordinates ([see notes](#boundingbox)) * `boundingbox` - comma-separated list of corner coordinates ([see notes](#boundingbox))
* `place_rank` - class [search rank](../develop/Ranking#search-rank) * `place_rank` - class [search rank](../customize/Ranking#search-rank)
* `address_rank` - place [address rank](../develop/Ranking#address-rank) * `address_rank` - place [address rank](../customize/Ranking#address-rank)
* `display_name` - full comma-separated address * `display_name` - full comma-separated address
* `class`, `type` - key and value of the main OSM tag * `class`, `type` - key and value of the main OSM tag
* `importance` - computed importance rank * `importance` - computed importance rank

View File

@@ -5,22 +5,22 @@ address set to complement the OSM house number data in the US. You can add
TIGER data to your own Nominatim instance by following these steps. The TIGER data to your own Nominatim instance by following these steps. The
entire US adds about 10GB to your database. entire US adds about 10GB to your database.
1. Get preprocessed TIGER 2021 data: 1. Get preprocessed TIGER data:
cd $PROJECT_DIR cd $PROJECT_DIR
wget https://nominatim.org/data/tiger2021-nominatim-preprocessed.csv.tar.gz wget https://nominatim.org/data/tiger-nominatim-preprocessed-latest.csv.tar.gz
2. Import the data into your Nominatim database: 2. Import the data into your Nominatim database:
nominatim add-data --tiger-data tiger2021-nominatim-preprocessed.csv.tar.gz nominatim add-data --tiger-data tiger-nominatim-preprocessed-latest.csv.tar.gz
3. Enable use of the Tiger data in your `.env` by adding: 3. Enable use of the Tiger data in your existing `.env` file by adding:
echo NOMINATIM_USE_US_TIGER_DATA=yes >> .env echo NOMINATIM_USE_US_TIGER_DATA=yes >> .env
4. Apply the new settings: 4. Apply the new settings:
nominatim refresh --functions nominatim refresh --functions --website
See the [TIGER-data project](https://github.com/osm-search/TIGER-data) for more See the [TIGER-data project](https://github.com/osm-search/TIGER-data) for more

View File

@@ -55,8 +55,8 @@ To install all necessary packages run:
sudo apt install php-cgi phpunit php-codesniffer \ sudo apt install php-cgi phpunit php-codesniffer \
python3-pip python3-setuptools python3-dev python3-pip python3-setuptools python3-dev
pip3 install --user behave mkdocs mkdocstrings pytest \ pip3 install --user behave mkdocs mkdocstrings pytest pylint \
pylint mypy types-PyYAML types-jinja2 types-psycopg2 mypy types-PyYAML types-jinja2 types-psycopg2 types-psutil
``` ```
The `mkdocs` executable will be located in `.local/bin`. You may have to add The `mkdocs` executable will be located in `.local/bin`. You may have to add

View File

@@ -135,7 +135,7 @@ class Debug
public static function printSQL($sSQL) public static function printSQL($sSQL)
{ {
echo '<p><tt><font color="#aaa">'.$sSQL.'</font></tt></p>'."\n"; echo '<p><tt><font color="#aaa">'.htmlspecialchars($sSQL, ENT_QUOTES | ENT_SUBSTITUTE | ENT_HTML401).'</font></tt></p>'."\n";
} }
private static function outputVar($mVar, $sPreNL) private static function outputVar($mVar, $sPreNL)
@@ -178,11 +178,12 @@ class Debug
} }
if (is_string($mVar)) { if (is_string($mVar)) {
echo "'$mVar'"; $sOut = "'$mVar'";
return strlen($mVar) + 2; } else {
$sOut = (string)$mVar;
} }
echo (string)$mVar; echo htmlspecialchars($sOut, ENT_QUOTES | ENT_SUBSTITUTE | ENT_HTML401);
return strlen((string)$mVar); return strlen($sOut);
} }
} }

View File

@@ -103,7 +103,7 @@ class Geocode
} }
$this->iFinalLimit = $iLimit; $this->iFinalLimit = $iLimit;
$this->iLimit = $iLimit + min($iLimit, 10); $this->iLimit = $iLimit + max($iLimit, 10);
} }
public function setFeatureType($sFeatureType) public function setFeatureType($sFeatureType)

View File

@@ -71,7 +71,8 @@ class ReverseGeocode
$sSQL .= ' ST_Distance(linegeo,'.$sPointSQL.') as distance'; $sSQL .= ' ST_Distance(linegeo,'.$sPointSQL.') as distance';
$sSQL .= ' FROM location_property_osmline'; $sSQL .= ' FROM location_property_osmline';
$sSQL .= ' WHERE ST_DWithin('.$sPointSQL.', linegeo, '.$fSearchDiam.')'; $sSQL .= ' WHERE ST_DWithin('.$sPointSQL.', linegeo, '.$fSearchDiam.')';
$sSQL .= ' and indexed_status = 0 and startnumber is not NULL '; $sSQL .= ' and indexed_status = 0 and startnumber is not NULL ';
$sSQL .= ' and parent_place_id != 0';
$sSQL .= ' ORDER BY distance ASC limit 1'; $sSQL .= ' ORDER BY distance ASC limit 1';
Debug::printSQL($sSQL); Debug::printSQL($sSQL);

View File

@@ -69,19 +69,31 @@ class SpecialTerm
*/ */
public function extendSearch($oSearch, $oPosition) public function extendSearch($oSearch, $oPosition)
{ {
$iSearchCost = 2; $iSearchCost = 0;
$iOp = $this->iOperator; $iOp = $this->iOperator;
if ($iOp == \Nominatim\Operator::NONE) { if ($iOp == \Nominatim\Operator::NONE) {
if ($oSearch->hasName() || $oSearch->getContext()->isBoundedSearch()) { if ($oPosition->isFirstToken()
|| $oSearch->hasName()
|| $oSearch->getContext()->isBoundedSearch()
) {
$iOp = \Nominatim\Operator::NAME; $iOp = \Nominatim\Operator::NAME;
$iSearchCost += 3;
} else { } else {
$iOp = \Nominatim\Operator::NEAR; $iOp = \Nominatim\Operator::NEAR;
$iSearchCost += 2; $iSearchCost += 4;
if (!$oPosition->isFirstToken()) {
$iSearchCost += 3;
}
} }
} elseif (!$oPosition->isFirstToken() && !$oPosition->isLastToken()) { } elseif ($oPosition->isFirstToken()) {
$iSearchCost += 2; $iSearchCost += 2;
} elseif ($oPosition->isLastToken()) {
$iSearchCost += 4;
} else {
$iSearchCost += 6;
} }
if ($oSearch->hasHousenumber()) { if ($oSearch->hasHousenumber()) {
$iSearchCost ++; $iSearchCost ++;
} }

View File

@@ -15,7 +15,7 @@ DECLARE
location RECORD; location RECORD;
waynodes BIGINT[]; waynodes BIGINT[];
BEGIN BEGIN
IF akeys(in_address) != ARRAY['interpolation'] THEN IF in_address ? 'street' or in_address ? 'place' THEN
RETURN in_address; RETURN in_address;
END IF; END IF;
@@ -82,27 +82,35 @@ CREATE OR REPLACE FUNCTION reinsert_interpolation(way_id BIGINT, addr HSTORE,
DECLARE DECLARE
existing BIGINT[]; existing BIGINT[];
BEGIN BEGIN
-- Get the existing entry from the interpolation table. IF addr is NULL OR NOT addr ? 'interpolation'
SELECT array_agg(place_id) INTO existing OR NOT (addr->'interpolation' in ('odd', 'even', 'all')
FROM location_property_osmline WHERE osm_id = way_id; or addr->'interpolation' similar to '[1-9]')
THEN
IF existing IS NULL or array_length(existing, 1) = 0 THEN -- the new interpolation is illegal, simply remove existing entries
INSERT INTO location_property_osmline (osm_id, address, linegeo) DELETE FROM location_property_osmline WHERE osm_id = way_id;
VALUES (way_id, addr, geom);
ELSE ELSE
-- Update the interpolation table: -- Get the existing entry from the interpolation table.
-- The first entry gets the original data, all other entries SELECT array_agg(place_id) INTO existing
-- are removed and will be recreated on indexing. FROM location_property_osmline WHERE osm_id = way_id;
-- (An interpolation can be split up, if it has more than 2 address nodes)
UPDATE location_property_osmline IF existing IS NULL or array_length(existing, 1) = 0 THEN
SET address = addr, INSERT INTO location_property_osmline (osm_id, address, linegeo)
linegeo = geom, VALUES (way_id, addr, geom);
startnumber = null, ELSE
indexed_status = 1 -- Update the interpolation table:
WHERE place_id = existing[1]; -- The first entry gets the original data, all other entries
IF array_length(existing, 1) > 1 THEN -- are removed and will be recreated on indexing.
DELETE FROM location_property_osmline -- (An interpolation can be split up, if it has more than 2 address nodes)
WHERE place_id = any(existing[2:]); UPDATE location_property_osmline
SET address = addr,
linegeo = geom,
startnumber = null,
indexed_status = 1
WHERE place_id = existing[1];
IF array_length(existing, 1) > 1 THEN
DELETE FROM location_property_osmline
WHERE place_id = any(existing[2:]);
END IF;
END IF; END IF;
END IF; END IF;

View File

@@ -916,7 +916,8 @@ BEGIN
LATERAL compute_place_rank(country_code, 'A', class, type, LATERAL compute_place_rank(country_code, 'A', class, type,
admin_level, False, null) prank admin_level, False, null) prank
WHERE osm_type = 'R' WHERE osm_type = 'R'
and prank.address_rank = NEW.rank_address and ((class = 'place' and prank.address_rank = NEW.rank_address)
or (class = 'boundary' and rank_address = NEW.rank_address))
and geometry && NEW.centroid and _ST_Covers(geometry, NEW.centroid) and geometry && NEW.centroid and _ST_Covers(geometry, NEW.centroid)
LIMIT 1 LIMIT 1
LOOP LOOP
@@ -1101,6 +1102,15 @@ BEGIN
END IF; END IF;
END IF; END IF;
{% if not disable_diff_updates %}
IF OLD.rank_address != NEW.rank_address THEN
-- After a rank shift all addresses containing us must be updated.
UPDATE placex p SET indexed_status = 2 FROM place_addressline pa
WHERE pa.address_place_id = NEW.place_id and p.place_id = pa.place_id
and p.indexed_status = 0 and p.rank_address between 4 and 25;
END IF;
{% endif %}
IF NEW.admin_level = 2 IF NEW.admin_level = 2
AND NEW.class = 'boundary' AND NEW.type = 'administrative' AND NEW.class = 'boundary' AND NEW.type = 'administrative'
AND NEW.country_code IS NOT NULL AND NEW.osm_type = 'R' AND NEW.country_code IS NOT NULL AND NEW.osm_type = 'R'

View File

@@ -76,7 +76,8 @@ class UpdateReplication:
LOG.warning("Initialising replication updates") LOG.warning("Initialising replication updates")
with connect(args.config.get_libpq_dsn()) as conn: with connect(args.config.get_libpq_dsn()) as conn:
replication.init_replication(conn, base_url=args.config.REPLICATION_URL) replication.init_replication(conn, base_url=args.config.REPLICATION_URL,
socket_timeout=args.socket_timeout)
if args.update_functions: if args.update_functions:
LOG.warning("Create functions") LOG.warning("Create functions")
refresh.create_functions(conn, args.config, True, False) refresh.create_functions(conn, args.config, True, False)
@@ -87,7 +88,8 @@ class UpdateReplication:
from ..tools import replication from ..tools import replication
with connect(args.config.get_libpq_dsn()) as conn: with connect(args.config.get_libpq_dsn()) as conn:
return replication.check_for_updates(conn, base_url=args.config.REPLICATION_URL) return replication.check_for_updates(conn, base_url=args.config.REPLICATION_URL,
socket_timeout=args.socket_timeout)
def _report_update(self, batchdate: dt.datetime, def _report_update(self, batchdate: dt.datetime,
@@ -148,7 +150,7 @@ class UpdateReplication:
while True: while True:
with connect(args.config.get_libpq_dsn()) as conn: with connect(args.config.get_libpq_dsn()) as conn:
start = dt.datetime.now(dt.timezone.utc) start = dt.datetime.now(dt.timezone.utc)
state = replication.update(conn, params) state = replication.update(conn, params, socket_timeout=args.socket_timeout)
if state is not replication.UpdateState.NO_CHANGES: if state is not replication.UpdateState.NO_CHANGES:
status.log_status(conn, start, 'import') status.log_status(conn, start, 'import')
batchdate, _, _ = status.get_status(conn) batchdate, _, _ = status.get_status(conn)

View File

@@ -94,7 +94,8 @@ class DBConnection:
# Use a dict to hand in the parameters because async is a reserved # Use a dict to hand in the parameters because async is a reserved
# word in Python3. # word in Python3.
self.conn = psycopg2.connect(**{'dsn': self.dsn, 'async': True}) self.conn = psycopg2.connect(**{'dsn': self.dsn, 'async': True}) # type: ignore
assert self.conn
self.wait() self.wait()
if cursor_factory is not None: if cursor_factory is not None:

View File

@@ -55,7 +55,7 @@ class Cursor(psycopg2.extras.DictCursor):
if self.rowcount != 1: if self.rowcount != 1:
raise RuntimeError("Query did not return a single row.") raise RuntimeError("Query did not return a single row.")
result = self.fetchone() # type: ignore[no-untyped-call] result = self.fetchone()
assert result is not None assert result is not None
return result[0] return result[0]
@@ -131,7 +131,7 @@ class Connection(psycopg2.extensions.connection):
return False return False
if table is not None: if table is not None:
row = cur.fetchone() # type: ignore[no-untyped-call] row = cur.fetchone()
if row is None or not isinstance(row[0], str): if row is None or not isinstance(row[0], str):
return False return False
return row[0] == table return row[0] == table
@@ -189,7 +189,7 @@ def connect(dsn: str) -> ConnectionContext:
try: try:
conn = psycopg2.connect(dsn, connection_factory=Connection) conn = psycopg2.connect(dsn, connection_factory=Connection)
ctxmgr = cast(ConnectionContext, contextlib.closing(conn)) ctxmgr = cast(ConnectionContext, contextlib.closing(conn))
ctxmgr.connection = cast(Connection, conn) ctxmgr.connection = conn
return ctxmgr return ctxmgr
except psycopg2.OperationalError as err: except psycopg2.OperationalError as err:
raise UsageError(f"Cannot connect to database: {err}") from err raise UsageError(f"Cannot connect to database: {err}") from err
@@ -236,7 +236,7 @@ def get_pg_env(dsn: str,
""" """
env = dict(base_env if base_env is not None else os.environ) env = dict(base_env if base_env is not None else os.environ)
for param, value in psycopg2.extensions.parse_dsn(dsn).items(): # type: ignore for param, value in psycopg2.extensions.parse_dsn(dsn).items():
if param in _PG_CONNECTION_STRINGS: if param in _PG_CONNECTION_STRINGS:
env[_PG_CONNECTION_STRINGS[param]] = value env[_PG_CONNECTION_STRINGS[param]] = value
else: else:

View File

@@ -41,4 +41,7 @@ def get_property(conn: Connection, name: str) -> Optional[str]:
if cur.rowcount == 0: if cur.rowcount == 0:
return None return None
return cast(Optional[str], cur.fetchone()[0]) # type: ignore[no-untyped-call] result = cur.fetchone()
assert result is not None
return cast(Optional[str], result[0])

View File

@@ -90,7 +90,7 @@ def get_status(conn: Connection) -> Tuple[Optional[dt.datetime], Optional[int],
if cur.rowcount < 1: if cur.rowcount < 1:
return None, None, None return None, None, None
row = cast(StatusRow, cur.fetchone()) # type: ignore[no-untyped-call] row = cast(StatusRow, cur.fetchone())
return row['lastimportdate'], row['sequence_id'], row['indexed'] return row['lastimportdate'], row['sequence_id'], row['indexed']

View File

@@ -566,8 +566,9 @@ class ICUNameAnalyzer(AbstractAnalyzer):
result = self._cache.housenumbers.get(norm_name, result) result = self._cache.housenumbers.get(norm_name, result)
if result[0] is None: if result[0] is None:
with self.conn.cursor() as cur: with self.conn.cursor() as cur:
cur.execute("SELECT getorcreate_hnr_id(%s)", (norm_name, )) hid = cur.scalar("SELECT getorcreate_hnr_id(%s)", (norm_name, ))
result = cur.fetchone()[0], norm_name # type: ignore[no-untyped-call]
result = hid, norm_name
self._cache.housenumbers[norm_name] = result self._cache.housenumbers[norm_name] = result
else: else:
# Otherwise use the analyzer to determine the canonical name. # Otherwise use the analyzer to determine the canonical name.
@@ -580,9 +581,9 @@ class ICUNameAnalyzer(AbstractAnalyzer):
variants = analyzer.compute_variants(word_id) variants = analyzer.compute_variants(word_id)
if variants: if variants:
with self.conn.cursor() as cur: with self.conn.cursor() as cur:
cur.execute("SELECT create_analyzed_hnr_id(%s, %s)", hid = cur.scalar("SELECT create_analyzed_hnr_id(%s, %s)",
(word_id, list(variants))) (word_id, list(variants)))
result = cur.fetchone()[0], variants[0] # type: ignore[no-untyped-call] result = hid, variants[0]
self._cache.housenumbers[word_id] = result self._cache.housenumbers[word_id] = result
return result return result
@@ -665,8 +666,7 @@ class ICUNameAnalyzer(AbstractAnalyzer):
with self.conn.cursor() as cur: with self.conn.cursor() as cur:
cur.execute("SELECT * FROM getorcreate_full_word(%s, %s)", cur.execute("SELECT * FROM getorcreate_full_word(%s, %s)",
(token_id, variants)) (token_id, variants))
full, part = cast(Tuple[int, List[int]], full, part = cast(Tuple[int, List[int]], cur.fetchone())
cur.fetchone()) # type: ignore[no-untyped-call]
self._cache.names[token_id] = (full, part) self._cache.names[token_id] = (full, part)

View File

@@ -544,8 +544,9 @@ class _TokenInfo:
with conn.cursor() as cur: with conn.cursor() as cur:
cur.execute("SELECT * FROM create_housenumbers(%s)", (simple_list, )) cur.execute("SELECT * FROM create_housenumbers(%s)", (simple_list, ))
self.data['hnr_tokens'], self.data['hnr'] = \ result = cur.fetchone()
cur.fetchone() # type: ignore[no-untyped-call] assert result is not None
self.data['hnr_tokens'], self.data['hnr'] = result
def set_postcode(self, postcode: str) -> None: def set_postcode(self, postcode: str) -> None:
@@ -574,8 +575,7 @@ class _TokenInfo:
cur.execute("""SELECT make_keywords(hstore('name' , %s))::text, cur.execute("""SELECT make_keywords(hstore('name' , %s))::text,
word_ids_from_name(%s)::text""", word_ids_from_name(%s)::text""",
(name, name)) (name, name))
return cast(Tuple[List[int], List[int]], return cast(Tuple[List[int], List[int]], cur.fetchone())
cur.fetchone()) # type: ignore[no-untyped-call]
self.data['place_search'], self.data['place_match'] = \ self.data['place_search'], self.data['place_match'] = \
self.cache.places.get(place, _get_place) self.cache.places.get(place, _get_place)
@@ -589,8 +589,7 @@ class _TokenInfo:
cur.execute("""SELECT addr_ids_from_name(%s)::text, cur.execute("""SELECT addr_ids_from_name(%s)::text,
word_ids_from_name(%s)::text""", word_ids_from_name(%s)::text""",
(name, name)) (name, name))
return cast(Tuple[List[int], List[int]], return cast(Tuple[List[int], List[int]], cur.fetchone())
cur.fetchone()) # type: ignore[no-untyped-call]
tokens = {} tokens = {}
for key, value in terms: for key, value in terms:

View File

@@ -49,7 +49,7 @@ def _get_place_info(cursor: Cursor, osm_id: Optional[str],
LOG.fatal("OSM object %s not found in database.", osm_id) LOG.fatal("OSM object %s not found in database.", osm_id)
raise UsageError("OSM object not found") raise UsageError("OSM object not found")
return cast(DictCursorResult, cursor.fetchone()) # type: ignore[no-untyped-call] return cast(DictCursorResult, cursor.fetchone())
def analyse_indexing(config: Configuration, osm_id: Optional[str] = None, def analyse_indexing(config: Configuration, osm_id: Optional[str] = None,

View File

@@ -268,7 +268,7 @@ def check_database_index_valid(conn: Connection, _: Configuration) -> CheckResul
WHERE pg_index.indisvalid = false WHERE pg_index.indisvalid = false
AND pg_index.indexrelid = pg_class.oid""") AND pg_index.indexrelid = pg_class.oid""")
broken = list(cur) broken = [c[0] for c in cur]
if broken: if broken:
return CheckState.FAIL, dict(indexes='\n '.join(broken)) return CheckState.FAIL, dict(indexes='\n '.join(broken))

View File

@@ -95,7 +95,7 @@ def import_osm_data(osm_files: Union[Path, Sequence[Path]],
if not options['flatnode_file'] and options['osm2pgsql_cache'] == 0: if not options['flatnode_file'] and options['osm2pgsql_cache'] == 0:
# Make some educated guesses about cache size based on the size # Make some educated guesses about cache size based on the size
# of the import file and the available memory. # of the import file and the available memory.
mem = psutil.virtual_memory() # type: ignore[no-untyped-call] mem = psutil.virtual_memory()
fsize = 0 fsize = 0
if isinstance(osm_files, list): if isinstance(osm_files, list):
for fname in osm_files: for fname in osm_files:

View File

@@ -7,13 +7,16 @@
""" """
Functions for updating a database from a replication source. Functions for updating a database from a replication source.
""" """
from typing import ContextManager, MutableMapping, Any, Generator, cast from typing import ContextManager, MutableMapping, Any, Generator, cast, Iterator
from contextlib import contextmanager from contextlib import contextmanager
import datetime as dt import datetime as dt
from enum import Enum from enum import Enum
import logging import logging
import time import time
import types
import urllib.request as urlrequest
import requests
from nominatim.db import status from nominatim.db import status
from nominatim.db.connection import Connection from nominatim.db.connection import Connection
from nominatim.tools.exec_utils import run_osm2pgsql from nominatim.tools.exec_utils import run_osm2pgsql
@@ -22,6 +25,7 @@ from nominatim.errors import UsageError
try: try:
from osmium.replication.server import ReplicationServer from osmium.replication.server import ReplicationServer
from osmium import WriteHandler from osmium import WriteHandler
from osmium import version as pyo_version
except ImportError as exc: except ImportError as exc:
logging.getLogger().critical("pyosmium not installed. Replication functions not available.\n" logging.getLogger().critical("pyosmium not installed. Replication functions not available.\n"
"To install pyosmium via pip: pip3 install osmium") "To install pyosmium via pip: pip3 install osmium")
@@ -29,7 +33,8 @@ except ImportError as exc:
LOG = logging.getLogger() LOG = logging.getLogger()
def init_replication(conn: Connection, base_url: str) -> None: def init_replication(conn: Connection, base_url: str,
socket_timeout: int = 60) -> None:
""" Set up replication for the server at the given base URL. """ Set up replication for the server at the given base URL.
""" """
LOG.info("Using replication source: %s", base_url) LOG.info("Using replication source: %s", base_url)
@@ -38,9 +43,8 @@ def init_replication(conn: Connection, base_url: str) -> None:
# margin of error to make sure we get all data # margin of error to make sure we get all data
date -= dt.timedelta(hours=3) date -= dt.timedelta(hours=3)
repl = ReplicationServer(base_url) with _make_replication_server(base_url, socket_timeout) as repl:
seq = repl.timestamp_to_sequence(date)
seq = repl.timestamp_to_sequence(date)
if seq is None: if seq is None:
LOG.fatal("Cannot reach the configured replication service '%s'.\n" LOG.fatal("Cannot reach the configured replication service '%s'.\n"
@@ -53,7 +57,8 @@ def init_replication(conn: Connection, base_url: str) -> None:
LOG.warning("Updates initialised at sequence %s (%s)", seq, date) LOG.warning("Updates initialised at sequence %s (%s)", seq, date)
def check_for_updates(conn: Connection, base_url: str) -> int: def check_for_updates(conn: Connection, base_url: str,
socket_timeout: int = 60) -> int:
""" Check if new data is available from the replication service at the """ Check if new data is available from the replication service at the
given base URL. given base URL.
""" """
@@ -64,7 +69,8 @@ def check_for_updates(conn: Connection, base_url: str) -> int:
"Please run 'nominatim replication --init' first.") "Please run 'nominatim replication --init' first.")
return 254 return 254
state = ReplicationServer(base_url).get_state_info() with _make_replication_server(base_url, socket_timeout) as repl:
state = repl.get_state_info()
if state is None: if state is None:
LOG.error("Cannot get state for URL %s.", base_url) LOG.error("Cannot get state for URL %s.", base_url)
@@ -86,7 +92,8 @@ class UpdateState(Enum):
NO_CHANGES = 3 NO_CHANGES = 3
def update(conn: Connection, options: MutableMapping[str, Any]) -> UpdateState: def update(conn: Connection, options: MutableMapping[str, Any],
socket_timeout: int = 60) -> UpdateState:
""" Update database from the next batch of data. Returns the state of """ Update database from the next batch of data. Returns the state of
updates according to `UpdateState`. updates according to `UpdateState`.
""" """
@@ -114,7 +121,7 @@ def update(conn: Connection, options: MutableMapping[str, Any]) -> UpdateState:
options['import_file'].unlink() options['import_file'].unlink()
# Read updates into file. # Read updates into file.
with _make_replication_server(options['base_url']) as repl: with _make_replication_server(options['base_url'], socket_timeout) as repl:
outhandler = WriteHandler(str(options['import_file'])) outhandler = WriteHandler(str(options['import_file']))
endseq = repl.apply_diffs(outhandler, startseq + 1, endseq = repl.apply_diffs(outhandler, startseq + 1,
max_size=options['max_diff_size'] * 1024) max_size=options['max_diff_size'] * 1024)
@@ -136,14 +143,40 @@ def update(conn: Connection, options: MutableMapping[str, Any]) -> UpdateState:
return UpdateState.UP_TO_DATE return UpdateState.UP_TO_DATE
def _make_replication_server(url: str) -> ContextManager[ReplicationServer]: def _make_replication_server(url: str, timeout: int) -> ContextManager[ReplicationServer]:
""" Returns a ReplicationServer in form of a context manager. """ Returns a ReplicationServer in form of a context manager.
Creates a light wrapper around older versions of pyosmium that did Creates a light wrapper around older versions of pyosmium that did
not support the context manager interface. not support the context manager interface.
""" """
if hasattr(ReplicationServer, '__enter__'): if hasattr(ReplicationServer, '__enter__'):
return cast(ContextManager[ReplicationServer], ReplicationServer(url)) # Patches the open_url function for pyosmium >= 3.2
# where the socket timeout is no longer respected.
def patched_open_url(self: ReplicationServer, url: urlrequest.Request) -> Any:
""" Download a resource from the given URL and return a byte sequence
of the content.
"""
headers = {"User-Agent" : f"Nominatim (pyosmium/{pyo_version.pyosmium_release})"}
if self.session is not None:
return self.session.get(url.get_full_url(),
headers=headers, timeout=timeout or None,
stream=True)
@contextmanager
def _get_url_with_session() -> Iterator[requests.Response]:
with requests.Session() as session:
request = session.get(url.get_full_url(),
headers=headers, timeout=timeout or None,
stream=True)
yield request
return _get_url_with_session()
repl = ReplicationServer(url)
setattr(repl, 'open_url', types.MethodType(patched_open_url, repl))
return cast(ContextManager[ReplicationServer], repl)
@contextmanager @contextmanager
def get_cm() -> Generator[ReplicationServer, None, None]: def get_cm() -> Generator[ReplicationServer, None, None]:

View File

@@ -25,7 +25,7 @@ from typing import Optional, Tuple
# patch level when cherry-picking the commit with the migration. # patch level when cherry-picking the commit with the migration.
# #
# Released versions always have a database patch level of 0. # Released versions always have a database patch level of 0.
NOMINATIM_VERSION = (4, 1, 0, 0) NOMINATIM_VERSION = (4, 1, 2, 0)
POSTGRESQL_REQUIRED_VERSION = (9, 6) POSTGRESQL_REQUIRED_VERSION = (9, 6)
POSTGIS_REQUIRED_VERSION = (2, 2) POSTGIS_REQUIRED_VERSION = (2, 2)

View File

@@ -403,3 +403,56 @@ Feature: Import of address interpolations
Then results contain Then results contain
| ID | osm_type | osm_id | type | display_name | | ID | osm_type | osm_id | type | display_name |
| 0 | node | 1 | house | 0 | | 0 | node | 1 | house | 0 |
Scenario: Parenting of interpolation with additional tags
Given the grid
| 1 | | | | | |
| | | | | | |
| | 8 | | | 9 | |
| | | | | | |
| 2 | | | | | 3 |
Given the places
| osm | class | type | housenr | addr+street |
| N8 | place | house | 10 | Horiz St |
| N9 | place | house | 16 | Horiz St |
And the places
| osm | class | type | name | geometry |
| W1 | highway | residential | Vert St | 1,2 |
| W2 | highway | residential | Horiz St | 2,3 |
And the places
| osm | class | type | addr+interpolation | addr+inclusion | geometry |
| W10 | place | houses | even | actual | 8,9 |
And the ways
| id | nodes |
| 10 | 8,9 |
When importing
Then placex contains
| object | parent_place_id |
| N8 | W2 |
| N9 | W2 |
And W10 expands to interpolation
| start | end | parent_place_id |
| 12 | 14 | W2 |
Scenario Outline: Bad interpolation values are ignored
Given the grid with origin 1,1
| 1 | | 9 | | 2 |
Given the places
| osm | class | type | housenr |
| N1 | place | house | 2 |
| N2 | place | house | 6 |
And the places
| osm | class | type | addr+interpolation | geometry |
| W1 | place | houses | <value> | 1,2 |
And the ways
| id | nodes |
| 1 | 1,2 |
When importing
Then W1 expands to no interpolation
Examples:
| value |
| foo |
| x |
| 12-2 |

View File

@@ -391,3 +391,29 @@ Feature: Update of address interpolations
| parent_place_id | start | end | | parent_place_id | start | end |
| W1 | 4 | 6 | | W1 | 4 | 6 |
Scenario: Legal interpolation type changed to illegal one
Given the grid
| 1 | | 2 |
| 3 | | 4 |
And the places
| osm | class | type | name | geometry |
| W1 | highway | unclassified | Cloud Street | 1, 2 |
And the ways
| id | nodes |
| 2 | 3,4 |
And the places
| osm | class | type | addr+interpolation | geometry |
| W2 | place | houses | even | 3,4 |
And the places
| osm | class | type | housenr |
| N3 | place | house | 2 |
| N4 | place | house | 6 |
When importing
Then W2 expands to interpolation
| parent_place_id | start | end |
| W1 | 4 | 4 |
When updating places
| osm | class | type | addr+interpolation | geometry |
| W2 | place | houses | 12-2 | 3,4 |
Then W2 expands to no interpolation

View File

@@ -307,3 +307,35 @@ Feature: Updates of linked places
| object | linked_place_id | rank_address | | object | linked_place_id | rank_address |
| N1 | R1 | 16 | | N1 | R1 | 16 |
| R1 | - | 16 | | R1 | - | 16 |
Scenario: Invalidate surrounding place nodes when place type changes
Given the grid
| 1 | | | 2 |
| | 8 | 9 | |
| 4 | | | 3 |
And the places
| osm | class | type | name | admin | geometry |
| R1 | boundary | administrative | foo | 8 | (1,2,3,4,1) |
And the places
| osm | class | type | name | geometry |
| N1 | place | town | foo | 9 |
| N2 | place | city | bar | 8 |
And the relations
| id | members |
| 1 | N1:label |
When importing
Then placex contains
| object | linked_place_id | rank_address |
| N1 | R1 | 16 |
| R1 | - | 16 |
| N2 | - | 18 |
When updating places
| osm | class | type | name | geometry |
| N1 | place | suburb | foo | 9 |
Then placex contains
| object | linked_place_id | rank_address |
| N1 | R1 | 20 |
| R1 | - | 20 |
| N2 | - | 16 |

View File

@@ -370,6 +370,6 @@ def check_location_property_osmline(context, oid, neg):
DBRow(oid, res, context).assert_row(row, ('start', 'end')) DBRow(oid, res, context).assert_row(row, ('start', 'end'))
assert not todo assert not todo, f"Unmatched lines in table: {list(context.table[i] for i in todo)}"

View File

@@ -37,14 +37,14 @@ class DebugTest extends \PHPUnit\Framework\TestCase
<pre><b>Var1:</b> <i>True</i></pre> <pre><b>Var1:</b> <i>True</i></pre>
<pre><b>Var2:</b> <i>False</i></pre> <pre><b>Var2:</b> <i>False</i></pre>
<pre><b>Var3:</b> 0</pre> <pre><b>Var3:</b> 0</pre>
<pre><b>Var4:</b> 'String'</pre> <pre><b>Var4:</b> &#039;String&#039;</pre>
<pre><b>Var5:</b> 0 => 'one' <pre><b>Var5:</b> 0 => &#039;one&#039;
1 => 'two' 1 => &#039;two&#039;
2 => 'three'</pre> 2 => &#039;three&#039;</pre>
<pre><b>Var6:</b> 'key' => 'value' <pre><b>Var6:</b> &#039;key&#039; => &#039;value&#039;
'key2' => 'value2'</pre> &#039;key2&#039; => &#039;value2&#039;</pre>
<pre><b>Var7:</b> me as string</pre> <pre><b>Var7:</b> me as string</pre>
<pre><b>Var8:</b> 'value', 'value2'</pre> <pre><b>Var8:</b> &#039;value&#039;, &#039;value2&#039;</pre>
EOT EOT
); );
@@ -64,10 +64,10 @@ EOT
public function testDebugArray() public function testDebugArray()
{ {
$this->expectOutputString(<<<EOT $this->expectOutputString(<<<EOT
<pre><b>Arr0:</b> 'null'</pre> <pre><b>Arr0:</b> &#039;null&#039;</pre>
<pre><b>Arr1:</b> 'key1' => 'val1' <pre><b>Arr1:</b> &#039;key1&#039; => &#039;val1&#039;
'key2' => 'val2' &#039;key2&#039; => &#039;val2&#039;
'key3' => 'val3'</pre> &#039;key3&#039; => &#039;val3&#039;</pre>
EOT EOT
); );
@@ -93,12 +93,12 @@ EOT
<th><small>1</small></th> <th><small>1</small></th>
</tr> </tr>
<tr> <tr>
<td><pre>'one'</pre></td> <td><pre>&#039;one&#039;</pre></td>
<td><pre>'two'</pre></td> <td><pre>&#039;two&#039;</pre></td>
</tr> </tr>
<tr> <tr>
<td><pre>'three'</pre></td> <td><pre>&#039;three&#039;</pre></td>
<td><pre>'four'</pre></td> <td><pre>&#039;four&#039;</pre></td>
</tr> </tr>
</table> </table>
<b>Table4:</b> <b>Table4:</b>
@@ -109,9 +109,9 @@ EOT
<th><small>key3</small></th> <th><small>key3</small></th>
</tr> </tr>
<tr> <tr>
<td><pre>'val1'</pre></td> <td><pre>&#039;val1&#039;</pre></td>
<td><pre>'val2'</pre></td> <td><pre>&#039;val2&#039;</pre></td>
<td><pre>'val3'</pre></td> <td><pre>&#039;val3&#039;</pre></td>
</tr> </tr>
</table> </table>
@@ -147,18 +147,18 @@ EOT
</tr> </tr>
<tr> <tr>
<td><pre>group1</pre></td> <td><pre>group1</pre></td>
<td><pre>'val1'</pre></td> <td><pre>&#039;val1&#039;</pre></td>
<td><pre>'val2'</pre></td> <td><pre>&#039;val2&#039;</pre></td>
</tr> </tr>
<tr> <tr>
<td><pre>group1</pre></td> <td><pre>group1</pre></td>
<td><pre>'one'</pre></td> <td><pre>&#039;one&#039;</pre></td>
<td><pre>'two'</pre></td> <td><pre>&#039;two&#039;</pre></td>
</tr> </tr>
<tr> <tr>
<td><pre>group2</pre></td> <td><pre>group2</pre></td>
<td><pre>'val1'</pre></td> <td><pre>&#039;val1&#039;</pre></td>
<td><pre>'val2'</pre></td> <td><pre>&#039;val2&#039;</pre></td>
</tr> </tr>
</table> </table>
<b>Table4:</b> <b>Table4:</b>
@@ -171,15 +171,15 @@ EOT
</tr> </tr>
<tr> <tr>
<td><pre>group1</pre></td> <td><pre>group1</pre></td>
<td><pre>'val1'</pre></td> <td><pre>&#039;val1&#039;</pre></td>
<td><pre>'val2'</pre></td> <td><pre>&#039;val2&#039;</pre></td>
<td><pre>'val3'</pre></td> <td><pre>&#039;val3&#039;</pre></td>
</tr> </tr>
<tr> <tr>
<td><pre>group1</pre></td> <td><pre>group1</pre></td>
<td><pre>'val1'</pre></td> <td><pre>&#039;val1&#039;</pre></td>
<td><pre>'val2'</pre></td> <td><pre>&#039;val2&#039;</pre></td>
<td><pre>'val3'</pre></td> <td><pre>&#039;val3&#039;</pre></td>
</tr> </tr>
</table> </table>

View File

@@ -206,7 +206,7 @@ if [ "x$2" == "xinstall-nginx" ]; then #DOCS:
sudo tee /etc/php/7.2/fpm/pool.d/www.conf << EOF_PHP_FPM_CONF sudo tee /etc/php/7.2/fpm/pool.d/www.conf << EOF_PHP_FPM_CONF
[www] [www]
; Replace the tcp listener and add the unix socket ; Replace the tcp listener and add the unix socket
listen = /var/run/php7.2-fpm.sock listen = /var/run/php-fpm-nominatim.sock
; Ensure that the daemon runs as the correct user ; Ensure that the daemon runs as the correct user
listen.owner = www-data listen.owner = www-data
@@ -241,7 +241,7 @@ server {
fastcgi_param SCRIPT_FILENAME "\$document_root\$uri.php"; fastcgi_param SCRIPT_FILENAME "\$document_root\$uri.php";
fastcgi_param PATH_TRANSLATED "\$document_root\$uri.php"; fastcgi_param PATH_TRANSLATED "\$document_root\$uri.php";
fastcgi_param QUERY_STRING \$args; fastcgi_param QUERY_STRING \$args;
fastcgi_pass unix:/var/run/php7.2-fpm.sock; fastcgi_pass unix:/var/run/php-fpm-nominatim.sock;
fastcgi_index index.php; fastcgi_index index.php;
include fastcgi_params; include fastcgi_params;
} }
@@ -251,7 +251,7 @@ server {
if (!-f \$document_root\$fastcgi_script_name) { if (!-f \$document_root\$fastcgi_script_name) {
return 404; return 404;
} }
fastcgi_pass unix:/var/run/php7.2-fpm.sock; fastcgi_pass unix:/var/run/php-fpm-nominatim.sock;
fastcgi_index search.php; fastcgi_index search.php;
include fastcgi.conf; include fastcgi.conf;
} }

View File

@@ -197,7 +197,7 @@ if [ "x$2" == "xinstall-nginx" ]; then #DOCS:
sudo tee /etc/php/7.4/fpm/pool.d/www.conf << EOF_PHP_FPM_CONF sudo tee /etc/php/7.4/fpm/pool.d/www.conf << EOF_PHP_FPM_CONF
[www] [www]
; Replace the tcp listener and add the unix socket ; Replace the tcp listener and add the unix socket
listen = /var/run/php7.4-fpm.sock listen = /var/run/php-fpm-nominatim.sock
; Ensure that the daemon runs as the correct user ; Ensure that the daemon runs as the correct user
listen.owner = www-data listen.owner = www-data
@@ -232,7 +232,7 @@ server {
fastcgi_param SCRIPT_FILENAME "\$document_root\$uri.php"; fastcgi_param SCRIPT_FILENAME "\$document_root\$uri.php";
fastcgi_param PATH_TRANSLATED "\$document_root\$uri.php"; fastcgi_param PATH_TRANSLATED "\$document_root\$uri.php";
fastcgi_param QUERY_STRING \$args; fastcgi_param QUERY_STRING \$args;
fastcgi_pass unix:/var/run/php7.4-fpm.sock; fastcgi_pass unix:/var/run/php-fpm-nominatim.sock;
fastcgi_index index.php; fastcgi_index index.php;
include fastcgi_params; include fastcgi_params;
} }
@@ -242,7 +242,7 @@ server {
if (!-f \$document_root\$fastcgi_script_name) { if (!-f \$document_root\$fastcgi_script_name) {
return 404; return 404;
} }
fastcgi_pass unix:/var/run/php7.4-fpm.sock; fastcgi_pass unix:/var/run/php-fpm-nominatim.sock;
fastcgi_index search.php; fastcgi_index search.php;
include fastcgi.conf; include fastcgi.conf;
} }
@@ -250,9 +250,9 @@ server {
EOF_NGINX_CONF EOF_NGINX_CONF
#DOCS:``` #DOCS:```
# If you have some errors, make sure that php7.4-fpm.sock is well under # If you have some errors, make sure that php-fpm-nominatim.sock is well under
# /var/run/ and not under /var/run/php. Otherwise change the Nginx configuration # /var/run/ and not under /var/run/php. Otherwise change the Nginx configuration
# to /var/run/php/php7.4-fpm.sock. # to /var/run/php/php-fpm-nominatim.sock.
# #
# Enable the configuration and restart Nginx # Enable the configuration and restart Nginx
# #

View File

@@ -197,7 +197,7 @@ if [ "x$2" == "xinstall-nginx" ]; then #DOCS:
sudo tee /etc/php/8.1/fpm/pool.d/www.conf << EOF_PHP_FPM_CONF sudo tee /etc/php/8.1/fpm/pool.d/www.conf << EOF_PHP_FPM_CONF
[www] [www]
; Replace the tcp listener and add the unix socket ; Replace the tcp listener and add the unix socket
listen = /var/run/php8.1-fpm.sock listen = /var/run/php-fpm-nominatim.sock
; Ensure that the daemon runs as the correct user ; Ensure that the daemon runs as the correct user
listen.owner = www-data listen.owner = www-data
@@ -232,7 +232,7 @@ server {
fastcgi_param SCRIPT_FILENAME "\$document_root\$uri.php"; fastcgi_param SCRIPT_FILENAME "\$document_root\$uri.php";
fastcgi_param PATH_TRANSLATED "\$document_root\$uri.php"; fastcgi_param PATH_TRANSLATED "\$document_root\$uri.php";
fastcgi_param QUERY_STRING \$args; fastcgi_param QUERY_STRING \$args;
fastcgi_pass unix:/var/run/php8.1-fpm.sock; fastcgi_pass unix:/var/run/php-fpm-nominatim.sock;
fastcgi_index index.php; fastcgi_index index.php;
include fastcgi_params; include fastcgi_params;
} }
@@ -242,7 +242,7 @@ server {
if (!-f \$document_root\$fastcgi_script_name) { if (!-f \$document_root\$fastcgi_script_name) {
return 404; return 404;
} }
fastcgi_pass unix:/var/run/php7.4-fpm.sock; fastcgi_pass unix:/var/run/php-fpm-nominatim.sock;
fastcgi_index search.php; fastcgi_index search.php;
include fastcgi.conf; include fastcgi.conf;
} }
@@ -250,9 +250,9 @@ server {
EOF_NGINX_CONF EOF_NGINX_CONF
#DOCS:``` #DOCS:```
# If you have some errors, make sure that php8.1-fpm.sock is well under # If you have some errors, make sure that php-fpm-nominatim.sock is well under
# /var/run/ and not under /var/run/php. Otherwise change the Nginx configuration # /var/run/ and not under /var/run/php. Otherwise change the Nginx configuration
# to /var/run/php/php8.1-fpm.sock. # to /var/run/php/php-fpm-nominatim.sock.
# #
# Enable the configuration and restart Nginx # Enable the configuration and restart Nginx
# #