mirror of
https://github.com/osm-search/Nominatim.git
synced 2026-02-14 10:27:57 +00:00
Compare commits
431 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
168c2e222e | ||
|
|
770f8e31a8 | ||
|
|
dd55a76d6d | ||
|
|
670cff0d09 | ||
|
|
a0e7d80daf | ||
|
|
d7e2f61e13 | ||
|
|
96ed4b02d7 | ||
|
|
cffc7c0121 | ||
|
|
d89000cc3d | ||
|
|
3661c75b39 | ||
|
|
3b20b11a9f | ||
|
|
cca366196d | ||
|
|
e09d444068 | ||
|
|
4956f5e710 | ||
|
|
5bebdfa434 | ||
|
|
aea915aa8d | ||
|
|
e0d29f398e | ||
|
|
c43b39bd88 | ||
|
|
8218da27b3 | ||
|
|
aa4bd00631 | ||
|
|
af6b9fdb39 | ||
|
|
c1b6493373 | ||
|
|
c386cca73f | ||
|
|
cadbdaff18 | ||
|
|
57510f517a | ||
|
|
3a2ddbe2e0 | ||
|
|
859347523f | ||
|
|
528fe6553f | ||
|
|
1faa0f4d41 | ||
|
|
82a11cae2d | ||
|
|
431948d768 | ||
|
|
f69c3d2b66 | ||
|
|
08b05964fa | ||
|
|
bd7f597682 | ||
|
|
6d4fbc9d32 | ||
|
|
124410a17b | ||
|
|
a543d57cbd | ||
|
|
8c3a0efe8b | ||
|
|
9e2841ad44 | ||
|
|
233e5f7c0e | ||
|
|
d5d9445cfd | ||
|
|
7be7417b5b | ||
|
|
0a14142156 | ||
|
|
a5e3785843 | ||
|
|
fc19ebb218 | ||
|
|
b45411f988 | ||
|
|
42f6371e47 | ||
|
|
be2aa6ab3a | ||
|
|
6e39ed9573 | ||
|
|
daf45a2993 | ||
|
|
d351b10fde | ||
|
|
0b21050904 | ||
|
|
644a7f524c | ||
|
|
53949ace36 | ||
|
|
14dba39157 | ||
|
|
43fd2a7423 | ||
|
|
4b0ac5356e | ||
|
|
c2f0d8e5ba | ||
|
|
0fb93b1e8a | ||
|
|
f94828c3f4 | ||
|
|
0e1e7c7df2 | ||
|
|
06110ba358 | ||
|
|
bae69f0102 | ||
|
|
77e7f4696b | ||
|
|
47fb2c9126 | ||
|
|
2ab9e4acd3 | ||
|
|
65ee7a8002 | ||
|
|
a5d0657d9b | ||
|
|
b8f01f91ca | ||
|
|
6cc6cf950c | ||
|
|
0b0349f746 | ||
|
|
2740974a13 | ||
|
|
97a9a262bb | ||
|
|
207efe700f | ||
|
|
e33315eaa6 | ||
|
|
5469d02d03 | ||
|
|
42c80893cb | ||
|
|
5c56ea3198 | ||
|
|
42f86329a9 | ||
|
|
08e273c0c7 | ||
|
|
5f8f98fa03 | ||
|
|
08c53ae27d | ||
|
|
f4f369895c | ||
|
|
38c21de0ee | ||
|
|
43cf36e0c7 | ||
|
|
9a9ff95989 | ||
|
|
fc40939775 | ||
|
|
553d8a828c | ||
|
|
80f7392fb1 | ||
|
|
61535c9972 | ||
|
|
b443c92a7a | ||
|
|
22da6c541d | ||
|
|
cd96354bc7 | ||
|
|
c6d859a08a | ||
|
|
ef47515420 | ||
|
|
a471a3d1b0 | ||
|
|
79a68fc2db | ||
|
|
93ddd46231 | ||
|
|
f5f0c197be | ||
|
|
4a30ec28b9 | ||
|
|
37ef9bb3d3 | ||
|
|
3e1d4a87fa | ||
|
|
320d46cc96 | ||
|
|
a06ceeef4c | ||
|
|
def573d7b4 | ||
|
|
7f7d29fdd1 | ||
|
|
c611d49941 | ||
|
|
11cd648699 | ||
|
|
98be5bf637 | ||
|
|
29df9771bb | ||
|
|
e68c1132da | ||
|
|
1047b1c191 | ||
|
|
9431e80eb4 | ||
|
|
178501de61 | ||
|
|
81c7f618fb | ||
|
|
eb2d816f2a | ||
|
|
244cb0e98c | ||
|
|
300ac4b77b | ||
|
|
0d189ac5df | ||
|
|
fed2c307a7 | ||
|
|
7aa2df5389 | ||
|
|
975ef0b305 | ||
|
|
e59146a733 | ||
|
|
8150c3602b | ||
|
|
ca8d776724 | ||
|
|
fdc40d5169 | ||
|
|
d0a97056c4 | ||
|
|
e98619f801 | ||
|
|
86eebc4305 | ||
|
|
4930f776fe | ||
|
|
19948c378a | ||
|
|
b3215b802d | ||
|
|
ed16d5b6aa | ||
|
|
7a94872413 | ||
|
|
98750922eb | ||
|
|
60c4c9ef2c | ||
|
|
101f04bbf2 | ||
|
|
4c593fa859 | ||
|
|
6603ad4006 | ||
|
|
d56c69dd01 | ||
|
|
e26a300c2f | ||
|
|
405482ede4 | ||
|
|
3db2b05069 | ||
|
|
ce5870223a | ||
|
|
9c1bb87493 | ||
|
|
1f7394dd54 | ||
|
|
bb569aa484 | ||
|
|
b0a350db37 | ||
|
|
78526a33b4 | ||
|
|
ab997b7fb1 | ||
|
|
6d431aebb7 | ||
|
|
a00ea23847 | ||
|
|
53ca751a02 | ||
|
|
8c444378bc | ||
|
|
55fdf0abda | ||
|
|
acd8ca2ebd | ||
|
|
06fdfad89e | ||
|
|
00ca493f33 | ||
|
|
b00d16fd7d | ||
|
|
03c373a4b3 | ||
|
|
bdaa39573f | ||
|
|
8a4c7f6e2b | ||
|
|
84ea0753d8 | ||
|
|
c1ef56c870 | ||
|
|
0e3252f045 | ||
|
|
65df218f91 | ||
|
|
5220a92be4 | ||
|
|
d643ca8dee | ||
|
|
de45152028 | ||
|
|
7fd9d0eeef | ||
|
|
d35a0b392e | ||
|
|
cbddfcde5b | ||
|
|
02ffa752ea | ||
|
|
6189e0c79b | ||
|
|
6ed6a0d447 | ||
|
|
484892ae97 | ||
|
|
027d9e938c | ||
|
|
e171f90d81 | ||
|
|
92c5d3b720 | ||
|
|
2a6e8ad68e | ||
|
|
55c8a0ac08 | ||
|
|
6073d948e6 | ||
|
|
b9171dd10b | ||
|
|
97b892fac2 | ||
|
|
b3fdf19b85 | ||
|
|
8c89e16ad2 | ||
|
|
960409c701 | ||
|
|
d1eeaa59a6 | ||
|
|
882f496e0a | ||
|
|
8b8aa1b4e6 | ||
|
|
932ac23f18 | ||
|
|
6c6560ca53 | ||
|
|
0698757e6e | ||
|
|
3a3f9b3496 | ||
|
|
97d87895bf | ||
|
|
c36fd72f99 | ||
|
|
57ae3d03a1 | ||
|
|
3737712044 | ||
|
|
8531339b4e | ||
|
|
540b12537a | ||
|
|
9e2fb45783 | ||
|
|
e7c128b973 | ||
|
|
d4a3470c9e | ||
|
|
4165b8c011 | ||
|
|
357ba2f64d | ||
|
|
3ce8818045 | ||
|
|
76082ac7cb | ||
|
|
4a451671d3 | ||
|
|
a3728b7188 | ||
|
|
a8711ab013 | ||
|
|
3e4754febd | ||
|
|
da1d661fa0 | ||
|
|
1801db523b | ||
|
|
2979c39628 | ||
|
|
bb9bb40287 | ||
|
|
8f6fdfeb0b | ||
|
|
b4e6d72fde | ||
|
|
a338ebfce0 | ||
|
|
4144364a15 | ||
|
|
11c0dd235b | ||
|
|
4a9502bf88 | ||
|
|
6c0d6d3178 | ||
|
|
0a26ca7104 | ||
|
|
2a15b2522f | ||
|
|
c11d1d78e9 | ||
|
|
7e51aa4cef | ||
|
|
9abb96fa6b | ||
|
|
879aafc916 | ||
|
|
5ec25122f6 | ||
|
|
9371b1aeb9 | ||
|
|
6f6d116451 | ||
|
|
3ff6eccfd7 | ||
|
|
5d1fa597ea | ||
|
|
3b6c2c9155 | ||
|
|
f863040b38 | ||
|
|
1033f8bce7 | ||
|
|
cf4dbbd681 | ||
|
|
6dccc693d0 | ||
|
|
c3dc66ce9c | ||
|
|
4856f56d61 | ||
|
|
2edc15dfb8 | ||
|
|
69e31baf68 | ||
|
|
586ff0c364 | ||
|
|
f3ba358d50 | ||
|
|
54bf4c3339 | ||
|
|
acda4344de | ||
|
|
6b0afd5d9b | ||
|
|
850910ed9e | ||
|
|
4ffa11a26c | ||
|
|
f5e60f8c40 | ||
|
|
ddaf1b79d4 | ||
|
|
d732dc3bb2 | ||
|
|
f0af5c5643 | ||
|
|
7a194789bc | ||
|
|
827d7a9a62 | ||
|
|
dae2761137 | ||
|
|
4304c1a7bb | ||
|
|
c537ea18a4 | ||
|
|
28fa7be75a | ||
|
|
f1a5862f3d | ||
|
|
4088e4e371 | ||
|
|
0ef6425847 | ||
|
|
7489deb1b7 | ||
|
|
2059e18e8b | ||
|
|
d11ee4c6d9 | ||
|
|
c74cbde329 | ||
|
|
20d541af06 | ||
|
|
2c163b3959 | ||
|
|
c6a7ef5574 | ||
|
|
7005c6af12 | ||
|
|
256986f01f | ||
|
|
33d322df9d | ||
|
|
631013be02 | ||
|
|
89a990e000 | ||
|
|
ccddd9d1de | ||
|
|
9587fc9909 | ||
|
|
22b7aed769 | ||
|
|
7db0da40ad | ||
|
|
2be70b2c36 | ||
|
|
0e03668cf2 | ||
|
|
28b89daa22 | ||
|
|
2bfa2f4292 | ||
|
|
3b22b9911b | ||
|
|
bc68ff1e43 | ||
|
|
f59af7483b | ||
|
|
394f85a96b | ||
|
|
626e3238f2 | ||
|
|
2051a84a09 | ||
|
|
f8bd4f5133 | ||
|
|
a4e514033d | ||
|
|
95f20ed7ab | ||
|
|
bfe92ea191 | ||
|
|
d3bacf475a | ||
|
|
3474464894 | ||
|
|
5b25bff2d8 | ||
|
|
c36896c524 | ||
|
|
d9fe25ac2e | ||
|
|
0bd006eef8 | ||
|
|
081d1f9779 | ||
|
|
546c975e28 | ||
|
|
05fb037edb | ||
|
|
5cdc196df1 | ||
|
|
0896c07972 | ||
|
|
be9f54d0a9 | ||
|
|
88fab44006 | ||
|
|
000fe3ddff | ||
|
|
f180f99a95 | ||
|
|
8d9aa9bf33 | ||
|
|
9fed91a47f | ||
|
|
12f830fbbb | ||
|
|
6d764a05b0 | ||
|
|
cd3ddec746 | ||
|
|
e4555a208d | ||
|
|
d53af96aa4 | ||
|
|
d1a9dc0f24 | ||
|
|
a1bcb28cea | ||
|
|
3fbba8b9db | ||
|
|
5fb850982a | ||
|
|
6f2e767c77 | ||
|
|
1ead5b0f3f | ||
|
|
eb6681d486 | ||
|
|
7503987630 | ||
|
|
1d337e8a76 | ||
|
|
f42e40712e | ||
|
|
b5fb8608ba | ||
|
|
dea1d67d03 | ||
|
|
9e6fc8f073 | ||
|
|
db6da75683 | ||
|
|
9cfd891fb9 | ||
|
|
f985680d2c | ||
|
|
8ae317e002 | ||
|
|
36e99c43ce | ||
|
|
eeb26aaa6f | ||
|
|
260dbe302a | ||
|
|
c297584726 | ||
|
|
5930383404 | ||
|
|
7395eb9791 | ||
|
|
e0de838b13 | ||
|
|
5292239714 | ||
|
|
b54fca4f7e | ||
|
|
26f47d2eb7 | ||
|
|
233e064f0b | ||
|
|
72c0898409 | ||
|
|
92d1f5122b | ||
|
|
314de3c3c0 | ||
|
|
544db43026 | ||
|
|
4aca3700b2 | ||
|
|
af833ff042 | ||
|
|
96c1a0a101 | ||
|
|
203e210d3a | ||
|
|
ff1c78fef5 | ||
|
|
d3a731dae4 | ||
|
|
73a4433d8e | ||
|
|
3b4ffea690 | ||
|
|
05d7f91392 | ||
|
|
e3e9f69654 | ||
|
|
34a4a9b08f | ||
|
|
e0836664e5 | ||
|
|
8d7499342f | ||
|
|
a7b24627b5 | ||
|
|
452324cf01 | ||
|
|
15c5c8db24 | ||
|
|
423efd54e4 | ||
|
|
5e45e0b3d7 | ||
|
|
a60e7f2376 | ||
|
|
ac7f0f7581 | ||
|
|
9c872345d6 | ||
|
|
bd312fa747 | ||
|
|
573fba55af | ||
|
|
39787f7d62 | ||
|
|
f4c067d527 | ||
|
|
8d3595c3e2 | ||
|
|
b81a57f1e4 | ||
|
|
a624f8b599 | ||
|
|
74f49a9d89 | ||
|
|
b7b89b30ea | ||
|
|
fb012504b2 | ||
|
|
7ed9ecf350 | ||
|
|
3af1520461 | ||
|
|
a7edda32ba | ||
|
|
7b09e320a8 | ||
|
|
46e077c40b | ||
|
|
7753ba6019 | ||
|
|
511204c158 | ||
|
|
65daef70c1 | ||
|
|
7ab373e86d | ||
|
|
79b81d39d8 | ||
|
|
2bbe5017d4 | ||
|
|
765a932561 | ||
|
|
4a2c9431ee | ||
|
|
de15d10f86 | ||
|
|
55d414bd72 | ||
|
|
1560685020 | ||
|
|
0e44659033 | ||
|
|
3b39cfb1cf | ||
|
|
15bca71b0d | ||
|
|
3c12455c5b | ||
|
|
927b4c928e | ||
|
|
be47cd2549 | ||
|
|
a4a17f93f5 | ||
|
|
745e52b798 | ||
|
|
bbc2da2a4b | ||
|
|
4c1793b4e3 | ||
|
|
d1ca73f813 | ||
|
|
cdc7d0fe0e | ||
|
|
a27a271034 | ||
|
|
6c097d24b1 | ||
|
|
0115b655bd | ||
|
|
e8f1463cc2 | ||
|
|
e164d53fcc | ||
|
|
b8f7b3cc8d | ||
|
|
b0e6fb73c6 | ||
|
|
dd50f1737b | ||
|
|
38a99856c0 | ||
|
|
09e7f0d013 | ||
|
|
e05e413cc4 | ||
|
|
2c21cbb5e6 | ||
|
|
3bc4b4bf9f | ||
|
|
a09f2a6987 | ||
|
|
1f57d730df | ||
|
|
eebc72b2bc | ||
|
|
2f3cf19afa | ||
|
|
10fbda702b | ||
|
|
17f130550e | ||
|
|
251f335fe3 | ||
|
|
ed2fb84e82 | ||
|
|
634684236c | ||
|
|
11e0d9ec14 | ||
|
|
5fd8f5aa27 | ||
|
|
c05ddb6119 | ||
|
|
043f9d8298 |
@@ -1,11 +1,15 @@
|
||||
---
|
||||
sudo: required
|
||||
dist: xenial
|
||||
os: linux
|
||||
dist: bionic
|
||||
language: python
|
||||
python:
|
||||
- "3.6"
|
||||
addons:
|
||||
postgresql: "9.6"
|
||||
apt:
|
||||
packages:
|
||||
postgresql-server-dev-9.6
|
||||
postgresql-client-9.6
|
||||
git:
|
||||
depth: 3
|
||||
env:
|
||||
@@ -30,5 +34,6 @@ script:
|
||||
- if [[ $TEST_SUITE == "monaco" ]]; then wget --no-verbose --output-document=../data/monaco.osm.pbf http://download.geofabrik.de/europe/monaco-latest.osm.pbf; fi
|
||||
- if [[ $TEST_SUITE == "monaco" ]]; then /usr/bin/env php ./utils/setup.php --osm-file ../data/monaco.osm.pbf --osm2pgsql-cache 1000 --all 2>&1 | grep -v 'ETA (seconds)'; fi
|
||||
- if [[ $TEST_SUITE == "monaco" ]]; then /usr/bin/env php ./utils/specialphrases.php --wiki-import | psql -d test_api_nominatim >/dev/null; fi
|
||||
- if [[ $TEST_SUITE == "monaco" ]]; then /usr/bin/env php ./utils/check_import_finished.php; fi
|
||||
notifications:
|
||||
email: false
|
||||
|
||||
213
CMakeLists.txt
213
CMakeLists.txt
@@ -19,7 +19,7 @@ list(APPEND CMAKE_MODULE_PATH "${CMAKE_SOURCE_DIR}/cmake")
|
||||
project(nominatim)
|
||||
|
||||
set(NOMINATIM_VERSION_MAJOR 3)
|
||||
set(NOMINATIM_VERSION_MINOR 3)
|
||||
set(NOMINATIM_VERSION_MINOR 5)
|
||||
set(NOMINATIM_VERSION_PATCH 1)
|
||||
|
||||
set(NOMINATIM_VERSION "${NOMINATIM_VERSION_MAJOR}.${NOMINATIM_VERSION_MINOR}.${NOMINATIM_VERSION_PATCH}")
|
||||
@@ -28,56 +28,57 @@ add_definitions(-DNOMINATIM_VERSION="${NOMINATIM_VERSION}")
|
||||
|
||||
|
||||
#-----------------------------------------------------------------------------
|
||||
#
|
||||
# Find external dependencies
|
||||
#
|
||||
# Configuration
|
||||
#-----------------------------------------------------------------------------
|
||||
|
||||
set(BUILD_TESTS off CACHE BOOL "Build test suite" FORCE)
|
||||
set(WITH_LUA off CACHE BOOL "Build with lua support" FORCE)
|
||||
set(BUILD_IMPORTER on CACHE BOOL "Build everything for importing/updating the database")
|
||||
set(BUILD_API on CACHE BOOL "Build everything for the API server")
|
||||
set(BUILD_MODULE on CACHE BOOL "Build PostgreSQL module")
|
||||
set(BUILD_TESTS on CACHE BOOL "Build test suite")
|
||||
set(BUILD_DOCS on CACHE BOOL "Build documentation")
|
||||
set(BUILD_OSM2PGSQL on CACHE BOOL "Build osm2pgsql (expert only)")
|
||||
|
||||
if (NOT EXISTS "${CMAKE_SOURCE_DIR}/osm2pgsql/CMakeLists.txt")
|
||||
message(FATAL_ERROR "The osm2pgsql directory is empty.\
|
||||
Did you forget to check out Nominatim recursively?\
|
||||
\nTry updating submodules with: git submodule update --init")
|
||||
endif()
|
||||
add_subdirectory(osm2pgsql)
|
||||
#-----------------------------------------------------------------------------
|
||||
# osm2pgsql (imports/updates only)
|
||||
#-----------------------------------------------------------------------------
|
||||
|
||||
find_package(Threads REQUIRED)
|
||||
|
||||
unset(PostgreSQL_TYPE_INCLUDE_DIR CACHE)
|
||||
set(PostgreSQL_TYPE_INCLUDE_DIR "/usr/include/")
|
||||
find_package(PostgreSQL REQUIRED)
|
||||
include_directories(${PostgreSQL_INCLUDE_DIRS})
|
||||
link_directories(${PostgreSQL_LIBRARY_DIRS})
|
||||
|
||||
find_program(PYOSMIUM pyosmium-get-changes)
|
||||
if (NOT EXISTS "${PYOSMIUM}")
|
||||
set(PYOSMIUM_PATH "")
|
||||
message(WARNING "pyosmium-get-changes not found (required for updates)")
|
||||
else()
|
||||
set(PYOSMIUM_PATH "${PYOSMIUM}")
|
||||
message(STATUS "Using pyosmium-get-changes at ${PYOSMIUM_PATH}")
|
||||
if (BUILD_IMPORTER AND BUILD_OSM2PGSQL)
|
||||
if (NOT EXISTS "${CMAKE_SOURCE_DIR}/osm2pgsql/CMakeLists.txt")
|
||||
message(FATAL_ERROR "The osm2pgsql directory is empty.\
|
||||
Did you forget to check out Nominatim recursively?\
|
||||
\nTry updating submodules with: git submodule update --init")
|
||||
endif()
|
||||
set(BUILD_TESTS_SAVED "${BUILD_TESTS}")
|
||||
set(BUILD_TESTS off)
|
||||
set(WITH_LUA off CACHE BOOL "")
|
||||
add_subdirectory(osm2pgsql)
|
||||
set(BUILD_TESTS ${BUILD_TESTS_SAVED})
|
||||
endif()
|
||||
|
||||
|
||||
find_program(PG_CONFIG pg_config)
|
||||
execute_process(COMMAND ${PG_CONFIG} --pgxs
|
||||
OUTPUT_VARIABLE PGXS
|
||||
OUTPUT_STRIP_TRAILING_WHITESPACE)
|
||||
#-----------------------------------------------------------------------------
|
||||
# python and pyosmium (imports/updates only)
|
||||
#-----------------------------------------------------------------------------
|
||||
|
||||
if (NOT EXISTS "${PGXS}")
|
||||
message(FATAL_ERROR "Postgresql server package not found.")
|
||||
if (BUILD_IMPORTER)
|
||||
find_package(PythonInterp 3)
|
||||
|
||||
find_program(PYOSMIUM pyosmium-get-changes)
|
||||
if (NOT EXISTS "${PYOSMIUM}")
|
||||
set(PYOSMIUM_PATH "")
|
||||
message(WARNING "pyosmium-get-changes not found (required for updates)")
|
||||
else()
|
||||
set(PYOSMIUM_PATH "${PYOSMIUM}")
|
||||
message(STATUS "Using pyosmium-get-changes at ${PYOSMIUM_PATH}")
|
||||
endif()
|
||||
endif()
|
||||
|
||||
find_package(ZLIB REQUIRED)
|
||||
|
||||
find_package(BZip2 REQUIRED)
|
||||
|
||||
find_package(LibXml2 REQUIRED)
|
||||
include_directories(${LIBXML2_INCLUDE_DIR})
|
||||
#-----------------------------------------------------------------------------
|
||||
# PHP
|
||||
#-----------------------------------------------------------------------------
|
||||
|
||||
# Setting PHP binary variable as to command line (prevailing) or auto detect
|
||||
|
||||
if (NOT PHP_BIN)
|
||||
find_program (PHP_BIN php)
|
||||
endif()
|
||||
@@ -88,81 +89,103 @@ endif()
|
||||
message (STATUS "Using PHP binary " ${PHP_BIN})
|
||||
|
||||
#-----------------------------------------------------------------------------
|
||||
#
|
||||
# Setup settings and paths
|
||||
#
|
||||
# import scripts and utilities (importer only)
|
||||
#-----------------------------------------------------------------------------
|
||||
|
||||
set(WEBSITESCRIPTS
|
||||
website/deletable.php
|
||||
website/details.php
|
||||
website/hierarchy.php
|
||||
website/lookup.php
|
||||
website/polygons.php
|
||||
website/reverse.php
|
||||
website/search.php
|
||||
website/status.php
|
||||
)
|
||||
if (BUILD_IMPORTER)
|
||||
set(CUSTOMSCRIPTS
|
||||
utils/check_import_finished.php
|
||||
utils/country_languages.php
|
||||
utils/importWikipedia.php
|
||||
utils/export.php
|
||||
utils/query.php
|
||||
utils/setup.php
|
||||
utils/specialphrases.php
|
||||
utils/update.php
|
||||
utils/warm.php
|
||||
)
|
||||
|
||||
set(CUSTOMSCRIPTS
|
||||
utils/country_languages.php
|
||||
utils/importWikipedia.php
|
||||
utils/export.php
|
||||
utils/query.php
|
||||
utils/setup.php
|
||||
utils/specialphrases.php
|
||||
utils/update.php
|
||||
utils/warm.php
|
||||
foreach (script_source ${CUSTOMSCRIPTS})
|
||||
configure_file(${PROJECT_SOURCE_DIR}/cmake/script.tmpl
|
||||
${PROJECT_BINARY_DIR}/${script_source})
|
||||
endforeach()
|
||||
endif()
|
||||
|
||||
#-----------------------------------------------------------------------------
|
||||
# webserver scripts (API only)
|
||||
#-----------------------------------------------------------------------------
|
||||
|
||||
if (BUILD_API)
|
||||
set(WEBSITESCRIPTS
|
||||
website/deletable.php
|
||||
website/details.php
|
||||
website/hierarchy.php
|
||||
website/lookup.php
|
||||
website/polygons.php
|
||||
website/reverse.php
|
||||
website/search.php
|
||||
website/status.php
|
||||
)
|
||||
|
||||
foreach (script_source ${CUSTOMSCRIPTS})
|
||||
configure_file(${PROJECT_SOURCE_DIR}/cmake/script.tmpl
|
||||
${PROJECT_BINARY_DIR}/${script_source})
|
||||
endforeach()
|
||||
foreach (script_source ${WEBSITESCRIPTS})
|
||||
configure_file(${PROJECT_SOURCE_DIR}/cmake/website.tmpl
|
||||
${PROJECT_BINARY_DIR}/${script_source})
|
||||
endforeach()
|
||||
|
||||
foreach (script_source ${WEBSITESCRIPTS})
|
||||
configure_file(${PROJECT_SOURCE_DIR}/cmake/website.tmpl
|
||||
${PROJECT_BINARY_DIR}/${script_source})
|
||||
endforeach()
|
||||
set(WEBPATHS css images js)
|
||||
|
||||
foreach (wp ${WEBPATHS})
|
||||
execute_process(
|
||||
COMMAND ln -sf ${PROJECT_SOURCE_DIR}/website/${wp} ${PROJECT_BINARY_DIR}/website/
|
||||
)
|
||||
endforeach()
|
||||
endif()
|
||||
|
||||
#-----------------------------------------------------------------------------
|
||||
# default settings
|
||||
#-----------------------------------------------------------------------------
|
||||
|
||||
configure_file(${PROJECT_SOURCE_DIR}/settings/defaults.php
|
||||
${PROJECT_BINARY_DIR}/settings/settings.php)
|
||||
|
||||
set(WEBPATHS css images js)
|
||||
|
||||
foreach (wp ${WEBPATHS})
|
||||
execute_process(
|
||||
COMMAND ln -sf ${PROJECT_SOURCE_DIR}/website/${wp} ${PROJECT_BINARY_DIR}/website/
|
||||
)
|
||||
endforeach()
|
||||
|
||||
|
||||
#-----------------------------------------------------------------------------
|
||||
#
|
||||
# Tests
|
||||
#
|
||||
#-----------------------------------------------------------------------------
|
||||
|
||||
include(CTest)
|
||||
if (BUILD_TESTS)
|
||||
include(CTest)
|
||||
|
||||
set(TEST_BDD db osm2pgsql api)
|
||||
set(TEST_BDD db osm2pgsql api)
|
||||
|
||||
foreach (test ${TEST_BDD})
|
||||
add_test(NAME bdd_${test}
|
||||
COMMAND lettuce features/${test}
|
||||
WORKING_DIRECTORY ${PROJECT_SOURCE_DIR}/tests)
|
||||
set_tests_properties(bdd_${test}
|
||||
PROPERTIES ENVIRONMENT "NOMINATIM_DIR=${PROJECT_BINARY_DIR}")
|
||||
endforeach()
|
||||
foreach (test ${TEST_BDD})
|
||||
add_test(NAME bdd_${test}
|
||||
COMMAND behave ${test}
|
||||
WORKING_DIRECTORY ${PROJECT_SOURCE_DIR}/test/bdd)
|
||||
set_tests_properties(bdd_${test}
|
||||
PROPERTIES ENVIRONMENT "NOMINATIM_DIR=${PROJECT_BINARY_DIR}")
|
||||
endforeach()
|
||||
|
||||
add_test(NAME php
|
||||
COMMAND phpunit ./
|
||||
WORKING_DIRECTORY ${PROJECT_SOURCE_DIR}/tests-php)
|
||||
add_test(NAME php
|
||||
COMMAND phpunit ./
|
||||
WORKING_DIRECTORY ${PROJECT_SOURCE_DIR}/test/php)
|
||||
|
||||
add_test(NAME phpcs
|
||||
COMMAND phpcs --report-width=120 --colors lib website utils
|
||||
WORKING_DIRECTORY ${PROJECT_SOURCE_DIR})
|
||||
endif()
|
||||
|
||||
#-----------------------------------------------------------------------------
|
||||
# Postgres module
|
||||
#-----------------------------------------------------------------------------
|
||||
|
||||
add_subdirectory(module)
|
||||
add_subdirectory(nominatim)
|
||||
add_subdirectory(docs)
|
||||
if (BUILD_MODULE)
|
||||
add_subdirectory(module)
|
||||
endif()
|
||||
|
||||
#-----------------------------------------------------------------------------
|
||||
# Documentation
|
||||
#-----------------------------------------------------------------------------
|
||||
|
||||
if (BUILD_DOCS)
|
||||
add_subdirectory(docs)
|
||||
endif()
|
||||
|
||||
@@ -15,9 +15,9 @@ Please make sure to add the following information:
|
||||
* the result you are getting
|
||||
* the expected result, preferably a link to the OSM object you want to find,
|
||||
otherwise an address that is as precise as possible
|
||||
|
||||
To get the link to the OSM object, you can try the following:
|
||||
|
||||
|
||||
To get the link to the OSM object, you can try the following:
|
||||
|
||||
* go to https://openstreetmap.org
|
||||
* zoom to the area of the map where you expect the result and
|
||||
zoom in as much as possible
|
||||
@@ -26,7 +26,7 @@ Please make sure to add the following information:
|
||||
* find the object of interest in the list that appears on the left side
|
||||
* click on the object and report the URL back that the browser shows
|
||||
|
||||
### When Reporting Problems with your Installation...
|
||||
### When Reporting Bugs...
|
||||
|
||||
Please add the following information to your issue:
|
||||
|
||||
@@ -38,6 +38,9 @@ Please add the following information to your issue:
|
||||
if you run from the git repo, the output of `git rev-parse HEAD`)
|
||||
* (if applicable) exact command line of the command that was causing the issue
|
||||
|
||||
Bug reports that do not include extensive information about your system,
|
||||
about the problem and about what you have been trying to debug the problem
|
||||
will be closed.
|
||||
|
||||
## Workflow for Pull Requests
|
||||
|
||||
|
||||
65
ChangeLog
65
ChangeLog
@@ -1,5 +1,66 @@
|
||||
3.3.1
|
||||
* security fix: fix possible SQL injection via details API
|
||||
3.5.1
|
||||
|
||||
* disable jit and parallel processing in PostgreSQL for osm2pgsql
|
||||
* update libosmium to 2.15.6 (fixes an issue with processing hanging
|
||||
on large multipolygons)
|
||||
|
||||
3.5.0
|
||||
|
||||
* structured select on HTML search page
|
||||
* new PHP Nominatim\Shell class to wrap shell escaping
|
||||
* remove polygon parameter from all API calls
|
||||
* improve handling of postcode areas
|
||||
* reorganise place linking algorithm, now using wikidata tag as well
|
||||
* remove linkees from search_name and larger_area tables
|
||||
* introduce country-specific address ranks
|
||||
* reorganise rank address computation
|
||||
* cleanup of partition function
|
||||
* improve parenting for large POIs
|
||||
* add support for Postgresql 12 and Postgis 3
|
||||
* add earlier cleanup when --drop is given, to reduce meory usage
|
||||
* remove use of place_id in URLs
|
||||
* replace C nominatim indexer with a simpler Python implementation
|
||||
* split up the huge sql/functions.sql file
|
||||
* move osm2pgsql tests to osm2pgsql
|
||||
* add new extratags style which imports all tags from OSM
|
||||
* add new script for checking the import after completion
|
||||
* update osm2pgsql, reducing memory usage
|
||||
* use new wikipedia importance and add processing of wikidata tags
|
||||
* add search form for details page
|
||||
* use ExtraDataPath for country_grid table
|
||||
* remove short_name from list of names to be displayed
|
||||
* split up CMakeFile, so that all parts can be built separately
|
||||
* update installation instructions for CentOS and Ubuntu
|
||||
* add script for importing/updating multiple country extracts
|
||||
* various documentation improvements
|
||||
|
||||
3.4.2
|
||||
|
||||
* fix security bug in /details endpoint where user input was not
|
||||
properly sanitized
|
||||
|
||||
3.4.1
|
||||
|
||||
* update osm2pgsql to fix hans during updates and lost address numbers
|
||||
during updates
|
||||
|
||||
3.4.0
|
||||
|
||||
* increase required version for PostgreSQL(9.3), PostGIS(2.2) and PHP(7.0)
|
||||
* better error reporting for out-of-memory errors
|
||||
* exclude postcode ranges separated by colon from centre point calculation
|
||||
* update osm2pgsql, better handling of imports without flatnode file
|
||||
* switch to more efficient algorithm for word set computation
|
||||
* use only boundries for country and state parts of addresses
|
||||
* improve updates of addresses with housenumbers and interpolations
|
||||
* remove country from place_addressline table and use country_code instead
|
||||
* optimise indexes on search_name partition tables
|
||||
* improve searching of attached streets for large objects like airports
|
||||
* drop support for python 2
|
||||
* new scripts for importing Wikidata for importance
|
||||
* create and drop indexes concurrently to not clash with auto vacuum
|
||||
* various documentation improvements
|
||||
|
||||
|
||||
3.3.0
|
||||
|
||||
|
||||
12
README.md
12
README.md
@@ -1,4 +1,4 @@
|
||||
[](https://travis-ci.org/openstreetmap/Nominatim)
|
||||
[](https://travis-ci.org/osm-search/Nominatim)
|
||||
|
||||
Nominatim
|
||||
=========
|
||||
@@ -19,8 +19,16 @@ https://nominatim.org/release-docs/develop/ .
|
||||
Installation
|
||||
============
|
||||
|
||||
**Nominatim is a complex piece of software and runs in a complex environment.
|
||||
Installing and running Nominatim is something for experienced system
|
||||
administrators only who can do some trouble-shooting themselves. We are sorry,
|
||||
but we can not provide installation support. We are all doing this in our free
|
||||
time and there is just so much of that time to go around. Do not open issues in
|
||||
our bug tracker if you need help. You can ask questions on the mailing list
|
||||
(see below) or on [help.openstreetmap.org](https://help.openstreetmap.org/).**
|
||||
|
||||
The latest stable release can be downloaded from https://nominatim.org.
|
||||
There you can also find [installation instructions for the release](https://nominatim.org/release-docs/latest/admin/Installation).
|
||||
There you can also find [installation instructions for the release](https://nominatim.org/release-docs/latest/admin/Installation), as well as an extensive [Troubleshooting/FAQ section](https://nominatim.org/release-docs/latest/admin/Faq/).
|
||||
|
||||
Detailed installation instructions for the development version can be
|
||||
found at [nominatim.org](https://nominatim.org/release-docs/develop/admin/Installation)
|
||||
|
||||
@@ -141,7 +141,7 @@ No. Long running Nominatim installations will differ once new import features (o
|
||||
bug fixes) get added since those usually only get applied to new/changed data.
|
||||
|
||||
Also this document skips the optional Wikipedia data import which affects ranking
|
||||
of search results. See [Nominatim installation](http://nominatim.org/release-docs/latest/Installation) for details.
|
||||
of search results. See [Nominatim installation](https://nominatim.org/release-docs/latest/admin/Installation) for details.
|
||||
|
||||
##### Why Ubuntu? Can I test CentOS/Fedora/CoreOS/FreeBSD?
|
||||
|
||||
|
||||
21
Vagrantfile
vendored
21
Vagrantfile
vendored
@@ -15,6 +15,15 @@ Vagrant.configure("2") do |config|
|
||||
end
|
||||
|
||||
config.vm.define "ubuntu", primary: true do |sub|
|
||||
sub.vm.box = "bento/ubuntu-20.04"
|
||||
sub.vm.provision :shell do |s|
|
||||
s.path = "vagrant/Install-on-Ubuntu-20.sh"
|
||||
s.privileged = false
|
||||
s.args = [checkout]
|
||||
end
|
||||
end
|
||||
|
||||
config.vm.define "ubuntu18", primary: true do |sub|
|
||||
sub.vm.box = "bento/ubuntu-18.04"
|
||||
sub.vm.provision :shell do |s|
|
||||
s.path = "vagrant/Install-on-Ubuntu-18.sh"
|
||||
@@ -61,6 +70,18 @@ Vagrant.configure("2") do |config|
|
||||
sub.vm.synced_folder ".", "/vagrant", disabled: true
|
||||
end
|
||||
|
||||
config.vm.define "centos8" do |sub|
|
||||
sub.vm.box = "generic/centos8"
|
||||
sub.vm.provision :shell do |s|
|
||||
s.path = "vagrant/Install-on-Centos-8.sh"
|
||||
s.privileged = false
|
||||
s.args = "yes"
|
||||
end
|
||||
sub.vm.synced_folder ".", "/home/vagrant/Nominatim", disabled: true
|
||||
sub.vm.synced_folder ".", "/vagrant", disabled: true
|
||||
end
|
||||
|
||||
|
||||
config.vm.provider "virtualbox" do |vb|
|
||||
vb.gui = false
|
||||
vb.memory = 2048
|
||||
|
||||
@@ -1,29 +1,26 @@
|
||||
# US TIGER address data
|
||||
|
||||
Convert [TIGER](https://www.census.gov/geo/maps-data/data/tiger.html)/Line dataset of the US Census Bureau to SQL files which can be imported by Nominatim. The created tables in the Nominatim database are separate from OpenStreetMap tables and get queried at search time separately.
|
||||
Convert [TIGER](https://www.census.gov/geographies/mapping-files/time-series/geo/tiger-line-file.html)/Line dataset of the US Census Bureau to SQL files which can be imported by Nominatim. The created tables in the Nominatim database are separate from OpenStreetMap tables and get queried at search time separately.
|
||||
|
||||
The dataset gets updated once per year. Downloading is prown to be slow (can take a full day) and converting them can take hours as well.
|
||||
The dataset gets updated once per year. Downloading is prone to be slow (can take a full day) and converting them can take hours as well.
|
||||
|
||||
Replace '2018' with the current year throughout.
|
||||
Replace '2019' with the current year throughout.
|
||||
|
||||
1. Install the GDAL library and python bindings and the unzip tool
|
||||
|
||||
# Ubuntu:
|
||||
sudo apt-get install python-gdal unzip
|
||||
# CentOS:
|
||||
sudo yum install gdal-python unzip
|
||||
sudo apt-get install python3-gdal unzip
|
||||
|
||||
2. Get the TIGER 2018 data. You will need the EDGES files
|
||||
2. Get the TIGER 2019 data. You will need the EDGES files
|
||||
(3,233 zip files, 11GB total).
|
||||
|
||||
wget -r ftp://ftp2.census.gov/geo/tiger/TIGER2018/EDGES/
|
||||
wget -r ftp://ftp2.census.gov/geo/tiger/TIGER2019/EDGES/
|
||||
|
||||
3. Convert the data into SQL statements. Adjust the file paths in the scripts as needed
|
||||
|
||||
cd data-sources/us-tiger
|
||||
./convert.sh <input-path> <output-path>
|
||||
|
||||
|
||||
4. Maybe: package the created files
|
||||
|
||||
tar -czf tiger2018-nominatim-preprocessed.tar.gz tiger
|
||||
|
||||
tar -czf tiger2019-nominatim-preprocessed.tar.gz tiger
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
#!/usr/bin/python
|
||||
#!/usr/bin/python3
|
||||
# Tiger road data to OSM conversion script
|
||||
# Creates Karlsruhe-style address ways beside the main way
|
||||
# based on the Massachusetts GIS script by christopher schmidt
|
||||
@@ -164,7 +164,7 @@ def parse_shp_for_geom_and_tags( filename ):
|
||||
if (statefp != None) and (countyfp != None):
|
||||
county_name = county_fips_data.get(statefp + '' + countyfp)
|
||||
if county_name:
|
||||
tags["tiger:county"] = county_name.encode("utf-8")
|
||||
tags["tiger:county"] = county_name
|
||||
|
||||
# tlid = poFeature.GetField("TLID")
|
||||
# if tlid != None:
|
||||
|
||||
58
data-sources/wikipedia-wikidata/README.md
Normal file
58
data-sources/wikipedia-wikidata/README.md
Normal file
@@ -0,0 +1,58 @@
|
||||
## Add Wikipedia and Wikidata to Nominatim
|
||||
|
||||
OSM contributors frequently tag items with links to Wikipedia and Wikidata. Nominatim can use the page ranking of Wikipedia pages to help indicate the relative importance of osm features. This is done by calculating an importance score between 0 and 1 based on the number of inlinks to an article for a location. If two places have the same name and one is more important than the other, the wikipedia score often points to the correct place.
|
||||
|
||||
These scripts extract and prepare both Wikipedia page rank and Wikidata links for use in Nominatim.
|
||||
|
||||
#### Create a new postgres DB for Processing
|
||||
|
||||
Due to the size of initial and intermediate tables, processing can be done in an external database:
|
||||
```
|
||||
CREATE DATABASE wikiprocessingdb;
|
||||
```
|
||||
---
|
||||
Wikipedia
|
||||
---
|
||||
|
||||
Processing these data requires a large amount of disk space (~1TB) and considerable time (>24 hours).
|
||||
|
||||
#### Import & Process Wikipedia tables
|
||||
|
||||
This step downloads and converts [Wikipedia](https://dumps.wikimedia.org/) page data SQL dumps to postgreSQL files which can be imported and processed with pagelink information from Wikipedia language sites to calculate importance scores.
|
||||
|
||||
- The script will processes data from whatever set of Wikipedia languages are specified in the initial languages array
|
||||
|
||||
- Note that processing the top 40 Wikipedia languages can take over a day, and will add nearly 1TB to the processing database. The final output tables will be approximately 11GB and 2GB in size
|
||||
|
||||
To download, convert, and import the data, then process summary statistics and compute importance scores, run:
|
||||
```
|
||||
./import_wikipedia.sh
|
||||
```
|
||||
---
|
||||
Wikidata
|
||||
---
|
||||
|
||||
This script downloads and processes Wikidata to enrich the previously created Wikipedia tables for use in Nominatim.
|
||||
|
||||
#### Import & Process Wikidata
|
||||
|
||||
This step downloads and converts [Wikidata](https://dumps.wikimedia.org/wikidatawiki/) page data SQL dumps to postgreSQL files which can be processed and imported into Nominatim database. Also utilizes Wikidata Query Service API to discover and include place types.
|
||||
|
||||
- Script presumes that the user has already processed Wikipedia tables as specified above
|
||||
|
||||
- Script requires wikidata_place_types.txt and wikidata_place_type_levles.csv
|
||||
|
||||
- script requires the [jq json parser](https://stedolan.github.io/jq/)
|
||||
|
||||
- Script processes data from whatever set of Wikipedia languages are specified in the initial languages array
|
||||
|
||||
- Script queries Wikidata Query Service API and imports all instances of place types listed in wikidata_place_types.txt
|
||||
|
||||
- Script updates wikipedia_articles table with extracted wikidata
|
||||
|
||||
By including Wikidata in the wikipedia_articles table, new connections can be made on the fly from the Nominatim placex table to wikipedia_article importance scores.
|
||||
|
||||
To download, convert, and import the data, then process required items, run:
|
||||
```
|
||||
./import_wikidata.sh
|
||||
```
|
||||
274
data-sources/wikipedia-wikidata/import_wikidata.sh
Executable file
274
data-sources/wikipedia-wikidata/import_wikidata.sh
Executable file
@@ -0,0 +1,274 @@
|
||||
#!/bin/bash
|
||||
|
||||
psqlcmd() {
|
||||
psql --quiet wikiprocessingdb
|
||||
}
|
||||
|
||||
mysql2pgsqlcmd() {
|
||||
./mysql2pgsql.perl /dev/stdin /dev/stdout
|
||||
}
|
||||
|
||||
download() {
|
||||
echo "Downloading $1"
|
||||
wget --quiet --no-clobber --tries 3 "$1"
|
||||
}
|
||||
|
||||
# languages to process (refer to List of Wikipedias here: https://en.wikipedia.org/wiki/List_of_Wikipedias)
|
||||
# requires Bash 4.0
|
||||
readarray -t LANGUAGES < languages.txt
|
||||
|
||||
|
||||
|
||||
echo "====================================================================="
|
||||
echo "Download wikidata dump tables"
|
||||
echo "====================================================================="
|
||||
|
||||
# 114M wikidatawiki-latest-geo_tags.sql.gz
|
||||
# 1.7G wikidatawiki-latest-page.sql.gz
|
||||
# 1.2G wikidatawiki-latest-wb_items_per_site.sql.gz
|
||||
download https://dumps.wikimedia.org/wikidatawiki/latest/wikidatawiki-latest-geo_tags.sql.gz
|
||||
download https://dumps.wikimedia.org/wikidatawiki/latest/wikidatawiki-latest-page.sql.gz
|
||||
download https://dumps.wikimedia.org/wikidatawiki/latest/wikidatawiki-latest-wb_items_per_site.sql.gz
|
||||
|
||||
|
||||
|
||||
|
||||
echo "====================================================================="
|
||||
echo "Import wikidata dump tables"
|
||||
echo "====================================================================="
|
||||
|
||||
echo "Importing wikidatawiki-latest-geo_tags"
|
||||
gzip -dc wikidatawiki-latest-geo_tags.sql.gz | mysql2pgsqlcmd | psqlcmd
|
||||
|
||||
echo "Importing wikidatawiki-latest-page"
|
||||
gzip -dc wikidatawiki-latest-page.sql.gz | mysql2pgsqlcmd | psqlcmd
|
||||
|
||||
echo "Importing wikidatawiki-latest-wb_items_per_site"
|
||||
gzip -dc wikidatawiki-latest-wb_items_per_site.sql.gz | mysql2pgsqlcmd | psqlcmd
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
echo "====================================================================="
|
||||
echo "Get wikidata places from wikidata query API"
|
||||
echo "====================================================================="
|
||||
|
||||
echo "Number of place types:"
|
||||
wc -l wikidata_place_types.txt
|
||||
|
||||
while read F ; do
|
||||
echo "Querying for place type $F..."
|
||||
wget --quiet "https://query.wikidata.org/bigdata/namespace/wdq/sparql?format=json&query=SELECT ?item WHERE{?item wdt:P31*/wdt:P279*wd:$F;}" -O $F.json
|
||||
jq -r '.results | .[] | .[] | [.item.value] | @csv' $F.json >> $F.txt
|
||||
awk -v qid=$F '{print $0 ","qid}' $F.txt | sed -e 's!"http://www.wikidata.org/entity/!!' | sed 's/"//g' >> $F.csv
|
||||
cat $F.csv >> wikidata_place_dump.csv
|
||||
rm $F.json $F.txt $F.csv
|
||||
done < wikidata_place_types.txt
|
||||
|
||||
|
||||
|
||||
|
||||
echo "====================================================================="
|
||||
echo "Import wikidata places"
|
||||
echo "====================================================================="
|
||||
|
||||
echo "CREATE TABLE wikidata_place_dump (
|
||||
item text,
|
||||
instance_of text
|
||||
);" | psqlcmd
|
||||
|
||||
echo "COPY wikidata_place_dump (item, instance_of)
|
||||
FROM '/srv/nominatim/Nominatim/data-sources/wikipedia-wikidata/wikidata_place_dump.csv'
|
||||
DELIMITER ','
|
||||
CSV
|
||||
;" | psqlcmd
|
||||
|
||||
echo "CREATE TABLE wikidata_place_type_levels (
|
||||
place_type text,
|
||||
level integer
|
||||
);" | psqlcmd
|
||||
|
||||
echo "COPY wikidata_place_type_levels (place_type, level)
|
||||
FROM '/srv/nominatim/Nominatim/data-sources/wikipedia-wikidata/wikidata_place_type_levels.csv'
|
||||
DELIMITER ','
|
||||
CSV
|
||||
HEADER
|
||||
;" | psqlcmd
|
||||
|
||||
|
||||
|
||||
|
||||
echo "====================================================================="
|
||||
echo "Create derived tables"
|
||||
echo "====================================================================="
|
||||
|
||||
echo "CREATE TABLE geo_earth_primary AS
|
||||
SELECT gt_page_id,
|
||||
gt_lat,
|
||||
gt_lon
|
||||
FROM geo_tags
|
||||
WHERE gt_globe = 'earth'
|
||||
AND gt_primary = 1
|
||||
AND NOT( gt_lat < -90
|
||||
OR gt_lat > 90
|
||||
OR gt_lon < -180
|
||||
OR gt_lon > 180
|
||||
OR gt_lat=0
|
||||
OR gt_lon=0)
|
||||
;" | psqlcmd
|
||||
|
||||
echo "CREATE TABLE geo_earth_wikidata AS
|
||||
SELECT DISTINCT geo_earth_primary.gt_page_id,
|
||||
geo_earth_primary.gt_lat,
|
||||
geo_earth_primary.gt_lon,
|
||||
page.page_title,
|
||||
page.page_namespace
|
||||
FROM geo_earth_primary
|
||||
LEFT OUTER JOIN page
|
||||
ON (geo_earth_primary.gt_page_id = page.page_id)
|
||||
ORDER BY geo_earth_primary.gt_page_id
|
||||
;" | psqlcmd
|
||||
|
||||
echo "ALTER TABLE wikidata_place_dump
|
||||
ADD COLUMN ont_level integer,
|
||||
ADD COLUMN lat numeric(11,8),
|
||||
ADD COLUMN lon numeric(11,8)
|
||||
;" | psqlcmd
|
||||
|
||||
echo "UPDATE wikidata_place_dump
|
||||
SET ont_level = wikidata_place_type_levels.level
|
||||
FROM wikidata_place_type_levels
|
||||
WHERE wikidata_place_dump.instance_of = wikidata_place_type_levels.place_type
|
||||
;" | psqlcmd
|
||||
|
||||
echo "CREATE TABLE wikidata_places
|
||||
AS
|
||||
SELECT DISTINCT ON (item) item,
|
||||
instance_of,
|
||||
MAX(ont_level) AS ont_level,
|
||||
lat,
|
||||
lon
|
||||
FROM wikidata_place_dump
|
||||
GROUP BY item,
|
||||
instance_of,
|
||||
ont_level,
|
||||
lat,
|
||||
lon
|
||||
ORDER BY item
|
||||
;" | psqlcmd
|
||||
|
||||
echo "UPDATE wikidata_places
|
||||
SET lat = geo_earth_wikidata.gt_lat,
|
||||
lon = geo_earth_wikidata.gt_lon
|
||||
FROM geo_earth_wikidata
|
||||
WHERE wikidata_places.item = geo_earth_wikidata.page_title
|
||||
;" | psqlcmd
|
||||
|
||||
|
||||
|
||||
|
||||
echo "====================================================================="
|
||||
echo "Process language pages"
|
||||
echo "====================================================================="
|
||||
|
||||
|
||||
echo "CREATE TABLE wikidata_pages (
|
||||
item text,
|
||||
instance_of text,
|
||||
lat numeric(11,8),
|
||||
lon numeric(11,8),
|
||||
ips_site_page text,
|
||||
language text
|
||||
);" | psqlcmd
|
||||
|
||||
for i in "${LANGUAGES[@]}"
|
||||
do
|
||||
echo "CREATE TABLE wikidata_${i}_pages AS
|
||||
SELECT wikidata_places.item,
|
||||
wikidata_places.instance_of,
|
||||
wikidata_places.lat,
|
||||
wikidata_places.lon,
|
||||
wb_items_per_site.ips_site_page
|
||||
FROM wikidata_places
|
||||
LEFT JOIN wb_items_per_site
|
||||
ON (CAST (( LTRIM(wikidata_places.item, 'Q')) AS INTEGER) = wb_items_per_site.ips_item_id)
|
||||
WHERE ips_site_id = '${i}wiki'
|
||||
AND LEFT(wikidata_places.item,1) = 'Q'
|
||||
ORDER BY wikidata_places.item
|
||||
;" | psqlcmd
|
||||
|
||||
echo "ALTER TABLE wikidata_${i}_pages
|
||||
ADD COLUMN language text
|
||||
;" | psqlcmd
|
||||
|
||||
echo "UPDATE wikidata_${i}_pages
|
||||
SET language = '${i}'
|
||||
;" | psqlcmd
|
||||
|
||||
echo "INSERT INTO wikidata_pages
|
||||
SELECT item,
|
||||
instance_of,
|
||||
lat,
|
||||
lon,
|
||||
ips_site_page,
|
||||
language
|
||||
FROM wikidata_${i}_pages
|
||||
;" | psqlcmd
|
||||
done
|
||||
|
||||
echo "ALTER TABLE wikidata_pages
|
||||
ADD COLUMN wp_page_title text
|
||||
;" | psqlcmd
|
||||
echo "UPDATE wikidata_pages
|
||||
SET wp_page_title = REPLACE(ips_site_page, ' ', '_')
|
||||
;" | psqlcmd
|
||||
echo "ALTER TABLE wikidata_pages
|
||||
DROP COLUMN ips_site_page
|
||||
;" | psqlcmd
|
||||
|
||||
|
||||
|
||||
|
||||
echo "====================================================================="
|
||||
echo "Add wikidata to wikipedia_article table"
|
||||
echo "====================================================================="
|
||||
|
||||
echo "UPDATE wikipedia_article
|
||||
SET lat = wikidata_pages.lat,
|
||||
lon = wikidata_pages.lon,
|
||||
wd_page_title = wikidata_pages.item,
|
||||
instance_of = wikidata_pages.instance_of
|
||||
FROM wikidata_pages
|
||||
WHERE wikipedia_article.language = wikidata_pages.language
|
||||
AND wikipedia_article.title = wikidata_pages.wp_page_title
|
||||
;" | psqlcmd
|
||||
|
||||
echo "CREATE TABLE wikipedia_article_slim
|
||||
AS
|
||||
SELECT * FROM wikipedia_article
|
||||
WHERE wikidata_id IS NOT NULL
|
||||
;" | psqlcmd
|
||||
|
||||
echo "ALTER TABLE wikipedia_article
|
||||
RENAME TO wikipedia_article_full
|
||||
;" | psqlcmd
|
||||
|
||||
echo "ALTER TABLE wikipedia_article_slim
|
||||
RENAME TO wikipedia_article
|
||||
;" | psqlcmd
|
||||
|
||||
|
||||
|
||||
|
||||
echo "====================================================================="
|
||||
echo "Dropping intermediate tables"
|
||||
echo "====================================================================="
|
||||
|
||||
echo "DROP TABLE wikidata_place_dump;" | psqlcmd
|
||||
echo "DROP TABLE geo_earth_primary;" | psqlcmd
|
||||
for i in "${LANGUAGES[@]}"
|
||||
do
|
||||
echo "DROP TABLE wikidata_${i}_pages;" | psqlcmd
|
||||
done
|
||||
297
data-sources/wikipedia-wikidata/import_wikipedia.sh
Executable file
297
data-sources/wikipedia-wikidata/import_wikipedia.sh
Executable file
@@ -0,0 +1,297 @@
|
||||
#!/bin/bash
|
||||
|
||||
psqlcmd() {
|
||||
psql --quiet wikiprocessingdb |& \
|
||||
grep -v 'does not exist, skipping' |& \
|
||||
grep -v 'violates check constraint' |& \
|
||||
grep -vi 'Failing row contains'
|
||||
}
|
||||
|
||||
mysql2pgsqlcmd() {
|
||||
./mysql2pgsql.perl --nodrop /dev/stdin /dev/stdout
|
||||
}
|
||||
|
||||
download() {
|
||||
echo "Downloading $1"
|
||||
wget --quiet --no-clobber --tries=3 "$1"
|
||||
}
|
||||
|
||||
|
||||
# languages to process (refer to List of Wikipedias here: https://en.wikipedia.org/wiki/List_of_Wikipedias)
|
||||
# requires Bash 4.0
|
||||
readarray -t LANGUAGES < languages.txt
|
||||
|
||||
|
||||
|
||||
echo "====================================================================="
|
||||
echo "Create wikipedia calculation tables"
|
||||
echo "====================================================================="
|
||||
|
||||
echo "CREATE TABLE linkcounts (
|
||||
language text,
|
||||
title text,
|
||||
count integer,
|
||||
sumcount integer,
|
||||
lat double precision,
|
||||
lon double precision
|
||||
);" | psqlcmd
|
||||
|
||||
echo "CREATE TABLE wikipedia_article (
|
||||
language text NOT NULL,
|
||||
title text NOT NULL,
|
||||
langcount integer,
|
||||
othercount integer,
|
||||
totalcount integer,
|
||||
lat double precision,
|
||||
lon double precision,
|
||||
importance double precision,
|
||||
title_en text,
|
||||
osm_type character(1),
|
||||
osm_id bigint
|
||||
);" | psqlcmd
|
||||
|
||||
echo "CREATE TABLE wikipedia_redirect (
|
||||
language text,
|
||||
from_title text,
|
||||
to_title text
|
||||
);" | psqlcmd
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
echo "====================================================================="
|
||||
echo "Download individual wikipedia language tables"
|
||||
echo "====================================================================="
|
||||
|
||||
|
||||
for i in "${LANGUAGES[@]}"
|
||||
do
|
||||
echo "Language: $i"
|
||||
|
||||
# english is the largest
|
||||
# 1.7G enwiki-latest-page.sql.gz
|
||||
# 6.2G enwiki-latest-pagelinks.sql.gz
|
||||
# 355M enwiki-latest-langlinks.sql.gz
|
||||
# 128M enwiki-latest-redirect.sql.gz
|
||||
|
||||
# example of smaller languge turkish
|
||||
# 53M trwiki-latest-page.sql.gz
|
||||
# 176M trwiki-latest-pagelinks.sql.gz
|
||||
# 106M trwiki-latest-langlinks.sql.gz
|
||||
# 3.2M trwiki-latest-redirect.sql.gz
|
||||
|
||||
download https://dumps.wikimedia.org/${i}wiki/latest/${i}wiki-latest-page.sql.gz
|
||||
download https://dumps.wikimedia.org/${i}wiki/latest/${i}wiki-latest-pagelinks.sql.gz
|
||||
download https://dumps.wikimedia.org/${i}wiki/latest/${i}wiki-latest-langlinks.sql.gz
|
||||
download https://dumps.wikimedia.org/${i}wiki/latest/${i}wiki-latest-redirect.sql.gz
|
||||
done
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
echo "====================================================================="
|
||||
echo "Import individual wikipedia language tables"
|
||||
echo "====================================================================="
|
||||
|
||||
for i in "${LANGUAGES[@]}"
|
||||
do
|
||||
echo "Language: $i"
|
||||
|
||||
# We pre-create the table schema. This allows us to
|
||||
# 1. Skip index creation. Most queries we do are full table scans
|
||||
# 2. Add constrain to only import namespace=0 (wikipedia articles)
|
||||
# Both cuts down data size considerably (50%+)
|
||||
|
||||
echo "Importing ${i}wiki-latest-pagelinks"
|
||||
|
||||
echo "DROP TABLE IF EXISTS ${i}pagelinks;" | psqlcmd
|
||||
echo "CREATE TABLE ${i}pagelinks (
|
||||
pl_from int NOT NULL DEFAULT '0',
|
||||
pl_namespace int NOT NULL DEFAULT '0',
|
||||
pl_title text NOT NULL DEFAULT '',
|
||||
pl_from_namespace int NOT NULL DEFAULT '0'
|
||||
);" | psqlcmd
|
||||
|
||||
time \
|
||||
gzip -dc ${i}wiki-latest-pagelinks.sql.gz | \
|
||||
sed "s/\`pagelinks\`/\`${i}pagelinks\`/g" | \
|
||||
mysql2pgsqlcmd | \
|
||||
grep -v '^CREATE INDEX ' | \
|
||||
psqlcmd
|
||||
|
||||
|
||||
|
||||
|
||||
echo "Importing ${i}wiki-latest-page"
|
||||
|
||||
# autoincrement serial8 4byte
|
||||
echo "DROP TABLE IF EXISTS ${i}page;" | psqlcmd
|
||||
echo "CREATE TABLE ${i}page (
|
||||
page_id int NOT NULL,
|
||||
page_namespace int NOT NULL DEFAULT '0',
|
||||
page_title text NOT NULL DEFAULT '',
|
||||
page_restrictions text NOT NULL,
|
||||
page_is_redirect smallint NOT NULL DEFAULT '0',
|
||||
page_is_new smallint NOT NULL DEFAULT '0',
|
||||
page_random double precision NOT NULL DEFAULT '0',
|
||||
page_touched text NOT NULL DEFAULT '',
|
||||
page_links_updated text DEFAULT NULL,
|
||||
page_latest int NOT NULL DEFAULT '0',
|
||||
page_len int NOT NULL DEFAULT '0',
|
||||
page_content_model text DEFAULT NULL,
|
||||
page_lang text DEFAULT NULL
|
||||
);" | psqlcmd
|
||||
|
||||
time \
|
||||
gzip -dc ${i}wiki-latest-page.sql.gz | \
|
||||
sed "s/\`page\`/\`${i}page\`/g" | \
|
||||
mysql2pgsqlcmd | \
|
||||
grep -v '^CREATE INDEX ' | \
|
||||
psqlcmd
|
||||
|
||||
|
||||
|
||||
|
||||
echo "Importing ${i}wiki-latest-langlinks"
|
||||
|
||||
echo "DROP TABLE IF EXISTS ${i}langlinks;" | psqlcmd
|
||||
echo "CREATE TABLE ${i}langlinks (
|
||||
ll_from int NOT NULL DEFAULT '0',
|
||||
ll_lang text NOT NULL DEFAULT '',
|
||||
ll_title text NOT NULL DEFAULT ''
|
||||
);" | psqlcmd
|
||||
|
||||
time \
|
||||
gzip -dc ${i}wiki-latest-langlinks.sql.gz | \
|
||||
sed "s/\`langlinks\`/\`${i}langlinks\`/g" | \
|
||||
mysql2pgsqlcmd | \
|
||||
grep -v '^CREATE INDEX ' | \
|
||||
psqlcmd
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
echo "Importing ${i}wiki-latest-redirect"
|
||||
|
||||
echo "DROP TABLE IF EXISTS ${i}redirect;" | psqlcmd
|
||||
echo "CREATE TABLE ${i}redirect (
|
||||
rd_from int NOT NULL DEFAULT '0',
|
||||
rd_namespace int NOT NULL DEFAULT '0',
|
||||
rd_title text NOT NULL DEFAULT '',
|
||||
rd_interwiki text DEFAULT NULL,
|
||||
rd_fragment text DEFAULT NULL
|
||||
);" | psqlcmd
|
||||
|
||||
time \
|
||||
gzip -dc ${i}wiki-latest-redirect.sql.gz | \
|
||||
sed "s/\`redirect\`/\`${i}redirect\`/g" | \
|
||||
mysql2pgsqlcmd | \
|
||||
grep -v '^CREATE INDEX ' | \
|
||||
psqlcmd
|
||||
done
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
echo "====================================================================="
|
||||
echo "Process language tables and associated pagelink counts"
|
||||
echo "====================================================================="
|
||||
|
||||
|
||||
for i in "${LANGUAGES[@]}"
|
||||
do
|
||||
echo "Language: $i"
|
||||
|
||||
echo "CREATE TABLE ${i}pagelinkcount
|
||||
AS
|
||||
SELECT pl_title AS title,
|
||||
COUNT(*) AS count,
|
||||
0::bigint as othercount
|
||||
FROM ${i}pagelinks
|
||||
WHERE pl_namespace = 0
|
||||
GROUP BY pl_title
|
||||
;" | psqlcmd
|
||||
|
||||
echo "INSERT INTO linkcounts
|
||||
SELECT '${i}',
|
||||
pl_title,
|
||||
COUNT(*)
|
||||
FROM ${i}pagelinks
|
||||
WHERE pl_namespace = 0
|
||||
GROUP BY pl_title
|
||||
;" | psqlcmd
|
||||
|
||||
echo "INSERT INTO wikipedia_redirect
|
||||
SELECT '${i}',
|
||||
page_title,
|
||||
rd_title
|
||||
FROM ${i}redirect
|
||||
JOIN ${i}page ON (rd_from = page_id)
|
||||
WHERE page_namespace = 0
|
||||
AND rd_namespace = 0
|
||||
;" | psqlcmd
|
||||
|
||||
done
|
||||
|
||||
|
||||
for i in "${LANGUAGES[@]}"
|
||||
do
|
||||
for j in "${LANGUAGES[@]}"
|
||||
do
|
||||
echo "UPDATE ${i}pagelinkcount
|
||||
SET othercount = ${i}pagelinkcount.othercount + x.count
|
||||
FROM (
|
||||
SELECT page_title AS title,
|
||||
count
|
||||
FROM ${i}langlinks
|
||||
JOIN ${i}page ON (ll_from = page_id)
|
||||
JOIN ${j}pagelinkcount ON (ll_lang = '${j}' AND ll_title = title)
|
||||
) AS x
|
||||
WHERE x.title = ${i}pagelinkcount.title
|
||||
;" | psqlcmd
|
||||
done
|
||||
|
||||
echo "INSERT INTO wikipedia_article
|
||||
SELECT '${i}',
|
||||
title,
|
||||
count,
|
||||
othercount,
|
||||
count + othercount
|
||||
FROM ${i}pagelinkcount
|
||||
;" | psqlcmd
|
||||
done
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
echo "====================================================================="
|
||||
echo "Calculate importance score for each wikipedia page"
|
||||
echo "====================================================================="
|
||||
|
||||
echo "UPDATE wikipedia_article
|
||||
SET importance = LOG(totalcount)/LOG((SELECT MAX(totalcount) FROM wikipedia_article))
|
||||
;" | psqlcmd
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
echo "====================================================================="
|
||||
echo "Clean up intermediate tables to conserve space"
|
||||
echo "====================================================================="
|
||||
|
||||
for i in "${LANGUAGES[@]}"
|
||||
do
|
||||
echo "DROP TABLE ${i}pagelinks;" | psqlcmd
|
||||
echo "DROP TABLE ${i}page;" | psqlcmd
|
||||
echo "DROP TABLE ${i}langlinks;" | psqlcmd
|
||||
echo "DROP TABLE ${i}redirect;" | psqlcmd
|
||||
echo "DROP TABLE ${i}pagelinkcount;" | psqlcmd
|
||||
done
|
||||
|
||||
echo "all done."
|
||||
39
data-sources/wikipedia-wikidata/languages.txt
Normal file
39
data-sources/wikipedia-wikidata/languages.txt
Normal file
@@ -0,0 +1,39 @@
|
||||
ar
|
||||
bg
|
||||
ca
|
||||
cs
|
||||
da
|
||||
de
|
||||
en
|
||||
es
|
||||
eo
|
||||
eu
|
||||
fa
|
||||
fr
|
||||
ko
|
||||
hi
|
||||
hr
|
||||
id
|
||||
it
|
||||
he
|
||||
lt
|
||||
hu
|
||||
ms
|
||||
nl
|
||||
ja
|
||||
no
|
||||
pl
|
||||
pt
|
||||
kk
|
||||
ro
|
||||
ru
|
||||
sk
|
||||
sl
|
||||
sr
|
||||
fi
|
||||
sv
|
||||
tr
|
||||
uk
|
||||
vi
|
||||
war
|
||||
zh
|
||||
@@ -341,7 +341,7 @@ if (/(create\s+table\s+)([-_\w]+)\s/i) { # example: CREATE TABLE `english_engli
|
||||
# in the foreign-key case it will only remove the foreign-key constraint, not the other table entirely.)
|
||||
# (source: 8.1.3 docs, section "drop table")
|
||||
warn "table $table will be dropped CASCADE\n";
|
||||
$pre_create_sql .= "DROP TABLE $table CASCADE\\g\n"; # custom dumps may be missing the 'dump' commands
|
||||
$pre_create_sql .= "DROP TABLE $table CASCADE;\n"; # custom dumps may be missing the 'dump' commands
|
||||
}
|
||||
|
||||
s/(create\s+table\s+)([-_\w]+)\s/$1 $table /i;
|
||||
@@ -367,6 +367,7 @@ if ($create_sql ne "") { # we are inside create table statement so lets
|
||||
s/INSERT METHOD[=\s+][^;\s]+//i;
|
||||
s/PASSWORD=[^;\s]+//i;
|
||||
s/ROW_FORMAT=(?:DEFAULT|DYNAMIC|FIXED|COMPRESSED|REDUNDANT|COMPACT)+//i;
|
||||
s/KEY_BLOCK_SIZE=8//i;
|
||||
s/DELAY KEY WRITE=[^;\s]+//i;
|
||||
s/INDEX DIRECTORY[=\s+][^;\s]+//i;
|
||||
s/DATA DIRECTORY=[^;\s]+//i;
|
||||
@@ -389,6 +390,7 @@ if ($create_sql ne "") { # we are inside create table statement so lets
|
||||
s/DEFAULT CHARSET=[^;\s]+//i; # my mysql version is 4.1.11
|
||||
s/ENGINE\s*=\s*[^;\s]+//i; # my mysql version is 4.1.11
|
||||
s/ROW_FORMAT=[^;\s]+//i; # my mysql version is 5.0.22
|
||||
s/KEY_BLOCK_SIZE=8//i;
|
||||
s/MIN_ROWS=[^;\s]+//i;
|
||||
s/MAX_ROWS=[^;\s]+//i;
|
||||
s/AVG_ROW_LENGTH=[^;\s]+//i;
|
||||
199
data-sources/wikipedia-wikidata/wikidata_place_type_levels.csv
Normal file
199
data-sources/wikipedia-wikidata/wikidata_place_type_levels.csv
Normal file
@@ -0,0 +1,199 @@
|
||||
place_type,level
|
||||
Q9842,4
|
||||
Q9430,3
|
||||
Q928830,4
|
||||
Q9259,1
|
||||
Q91028,5
|
||||
Q8514,2
|
||||
Q8502,2
|
||||
Q83405,3
|
||||
Q82794,2
|
||||
Q820477,1
|
||||
Q811979,1
|
||||
Q8072,2
|
||||
Q79007,2
|
||||
Q786014,3
|
||||
Q75848,2
|
||||
Q75520,2
|
||||
Q728937,4
|
||||
Q7275,2
|
||||
Q719456,3
|
||||
Q7075,3
|
||||
Q697295,4
|
||||
Q6852233,2
|
||||
Q682943,3
|
||||
Q665487,5
|
||||
Q655686,3
|
||||
Q643589,5
|
||||
Q641226,2
|
||||
Q631305,2
|
||||
Q6256,2
|
||||
Q6023295,2
|
||||
Q5773747,5
|
||||
Q56061,1
|
||||
Q55659167,4
|
||||
Q55488,4
|
||||
Q55465477,3
|
||||
Q54050,2
|
||||
Q532,3
|
||||
Q53060,2
|
||||
Q52177058,4
|
||||
Q515716,5
|
||||
Q5153984,4
|
||||
Q515,3
|
||||
Q5144960,5
|
||||
Q5119,4
|
||||
Q5119,4
|
||||
Q5107,2
|
||||
Q5084,4
|
||||
Q5031071,4
|
||||
Q5003624,2
|
||||
Q4989906,1
|
||||
Q4976993,3
|
||||
Q486972,1
|
||||
Q486972,2
|
||||
Q483110,3
|
||||
Q4830453,4
|
||||
Q47521,3
|
||||
Q473972,1
|
||||
Q46831,2
|
||||
Q46614560,5
|
||||
Q44782,3
|
||||
Q44613,4
|
||||
Q44539,4
|
||||
Q44494,2
|
||||
Q44377,2
|
||||
Q4421,2
|
||||
Q43501,2
|
||||
Q4286337,3
|
||||
Q42523,3
|
||||
Q41176,2
|
||||
Q40357,3
|
||||
Q4022,4
|
||||
Q40080,2
|
||||
Q39816,2
|
||||
Q39715,3
|
||||
Q39614,1
|
||||
Q3957,3
|
||||
Q3947,4
|
||||
Q3914,3
|
||||
Q38723,2
|
||||
Q38720,3
|
||||
Q3623867,5
|
||||
Q35666,2
|
||||
Q355304,3
|
||||
Q35509,2
|
||||
Q35112127,3
|
||||
Q34985575,4
|
||||
Q34876,5
|
||||
Q34763,2
|
||||
Q34627,4
|
||||
Q3455524,3
|
||||
Q34442,4
|
||||
Q33837,2
|
||||
Q33506,3
|
||||
Q32815,4
|
||||
Q3257686,2
|
||||
Q3240715,2
|
||||
Q3191695,5
|
||||
Q3153117,2
|
||||
Q30198,2
|
||||
Q30139652,3
|
||||
Q294422,3
|
||||
Q2870166,3
|
||||
Q27686,3
|
||||
Q274153,3
|
||||
Q271669,1
|
||||
Q2659904,2
|
||||
Q24529780,2
|
||||
Q24354,3
|
||||
Q2354973,4
|
||||
Q23442,2
|
||||
Q23413,3
|
||||
Q23397,3
|
||||
Q2327515,4
|
||||
Q2311958,5
|
||||
Q22927291,6
|
||||
Q22698,1
|
||||
Q2175765,4
|
||||
Q205495,4
|
||||
Q204832,3
|
||||
Q2042028,2
|
||||
Q202216,6
|
||||
Q1970725,3
|
||||
Q194203,5
|
||||
Q194195,2
|
||||
Q190429,2
|
||||
Q185187,3
|
||||
Q185113,2
|
||||
Q183366,2
|
||||
Q1799794,1
|
||||
Q1788454,4
|
||||
Q1785071,3
|
||||
Q1777138,3
|
||||
Q177634,2
|
||||
Q177380,2
|
||||
Q174814,4
|
||||
Q174782,2
|
||||
Q17350442,2
|
||||
Q17343829,3
|
||||
Q17334923,0
|
||||
Q17018380,3
|
||||
Q16970,4
|
||||
Q16917,3
|
||||
Q16831714,4
|
||||
Q165,3
|
||||
Q160742,4
|
||||
Q159719,3
|
||||
Q159334,4
|
||||
Q15640612,5
|
||||
Q15324,2
|
||||
Q15284,5
|
||||
Q15243209,6
|
||||
Q152081,1
|
||||
Q15195406,4
|
||||
Q1500350,5
|
||||
Q149621,5
|
||||
Q14757767,4
|
||||
Q14350,3
|
||||
Q1410668,3
|
||||
Q1394476,3
|
||||
Q1377575,2
|
||||
Q1353183,3
|
||||
Q134447,4
|
||||
Q133215,3
|
||||
Q133056,2
|
||||
Q13221722,3
|
||||
Q13220204,2
|
||||
Q1311958,4
|
||||
Q1303167,3
|
||||
Q130003,3
|
||||
Q12518,2
|
||||
Q12516,3
|
||||
Q1248784,3
|
||||
Q123705,3
|
||||
Q12323,3
|
||||
Q12284,4
|
||||
Q12280,4
|
||||
Q121359,2
|
||||
Q1210950,2
|
||||
Q11755880,3
|
||||
Q11707,3
|
||||
Q11315,3
|
||||
Q11303,3
|
||||
Q1115575,4
|
||||
Q1107656,1
|
||||
Q10864048,1
|
||||
Q1076486,2
|
||||
Q105731,3
|
||||
Q105190,3
|
||||
Q1048525,3
|
||||
Q102496,5
|
||||
Q28872924,1
|
||||
Q15617994,1
|
||||
Q159313,2
|
||||
Q24398318,3
|
||||
Q327333,2
|
||||
Q43229,1
|
||||
Q860861,1
|
||||
Q4989906,1
|
||||
|
195
data-sources/wikipedia-wikidata/wikidata_place_types.txt
Normal file
195
data-sources/wikipedia-wikidata/wikidata_place_types.txt
Normal file
@@ -0,0 +1,195 @@
|
||||
Q9842
|
||||
Q9430
|
||||
Q928830
|
||||
Q9259
|
||||
Q91028
|
||||
Q8514
|
||||
Q8502
|
||||
Q83405
|
||||
Q82794
|
||||
Q820477
|
||||
Q811979
|
||||
Q8072
|
||||
Q79007
|
||||
Q786014
|
||||
Q75848
|
||||
Q75520
|
||||
Q728937
|
||||
Q7275
|
||||
Q719456
|
||||
Q7075
|
||||
Q697295
|
||||
Q6852233
|
||||
Q682943
|
||||
Q665487
|
||||
Q655686
|
||||
Q643589
|
||||
Q641226
|
||||
Q631305
|
||||
Q6256
|
||||
Q6023295
|
||||
Q5773747
|
||||
Q56061
|
||||
Q55659167
|
||||
Q55488
|
||||
Q55465477
|
||||
Q54050
|
||||
Q532
|
||||
Q53060
|
||||
Q52177058
|
||||
Q515716
|
||||
Q5153984
|
||||
Q515
|
||||
Q5144960
|
||||
Q5119
|
||||
Q5107
|
||||
Q5084
|
||||
Q5031071
|
||||
Q5003624
|
||||
Q4989906
|
||||
Q4976993
|
||||
Q486972
|
||||
Q483110
|
||||
Q4830453
|
||||
Q47521
|
||||
Q473972
|
||||
Q46831
|
||||
Q46614560
|
||||
Q44782
|
||||
Q44613
|
||||
Q44539
|
||||
Q44494
|
||||
Q44377
|
||||
Q4421
|
||||
Q43501
|
||||
Q4286337
|
||||
Q42523
|
||||
Q41176
|
||||
Q40357
|
||||
Q4022
|
||||
Q40080
|
||||
Q39816
|
||||
Q39715
|
||||
Q39614
|
||||
Q3957
|
||||
Q3947
|
||||
Q3914
|
||||
Q38723
|
||||
Q38720
|
||||
Q3623867
|
||||
Q35666
|
||||
Q355304
|
||||
Q35509
|
||||
Q35112127
|
||||
Q34985575
|
||||
Q34876
|
||||
Q34763
|
||||
Q34627
|
||||
Q3455524
|
||||
Q34442
|
||||
Q33837
|
||||
Q33506
|
||||
Q32815
|
||||
Q3257686
|
||||
Q3240715
|
||||
Q3191695
|
||||
Q3153117
|
||||
Q30198
|
||||
Q30139652
|
||||
Q294422
|
||||
Q2870166
|
||||
Q27686
|
||||
Q274153
|
||||
Q271669
|
||||
Q2659904
|
||||
Q24529780
|
||||
Q24354
|
||||
Q2354973
|
||||
Q23442
|
||||
Q23413
|
||||
Q23397
|
||||
Q2327515
|
||||
Q2311958
|
||||
Q22927291
|
||||
Q22698
|
||||
Q2175765
|
||||
Q205495
|
||||
Q204832
|
||||
Q2042028
|
||||
Q202216
|
||||
Q1970725
|
||||
Q194203
|
||||
Q194195
|
||||
Q190429
|
||||
Q185187
|
||||
Q185113
|
||||
Q183366
|
||||
Q1799794
|
||||
Q1788454
|
||||
Q1785071
|
||||
Q1777138
|
||||
Q177634
|
||||
Q177380
|
||||
Q174814
|
||||
Q174782
|
||||
Q17350442
|
||||
Q17343829
|
||||
Q17334923
|
||||
Q17018380
|
||||
Q16970
|
||||
Q16917
|
||||
Q16831714
|
||||
Q165
|
||||
Q160742
|
||||
Q159719
|
||||
Q159334
|
||||
Q15640612
|
||||
Q15324
|
||||
Q15284
|
||||
Q15243209
|
||||
Q152081
|
||||
Q15195406
|
||||
Q1500350
|
||||
Q149621
|
||||
Q14757767
|
||||
Q14350
|
||||
Q1410668
|
||||
Q1394476
|
||||
Q1377575
|
||||
Q1353183
|
||||
Q134447
|
||||
Q133215
|
||||
Q133056
|
||||
Q13221722
|
||||
Q13220204
|
||||
Q1311958
|
||||
Q1303167
|
||||
Q130003
|
||||
Q12518
|
||||
Q12516
|
||||
Q1248784
|
||||
Q123705
|
||||
Q12323
|
||||
Q12284
|
||||
Q12280
|
||||
Q121359
|
||||
Q1210950
|
||||
Q11755880
|
||||
Q11707
|
||||
Q11315
|
||||
Q11303
|
||||
Q1115575
|
||||
Q1107656
|
||||
Q10864048
|
||||
Q1076486
|
||||
Q105731
|
||||
Q105190
|
||||
Q1048525
|
||||
Q102496
|
||||
Q28872924
|
||||
Q15617994
|
||||
Q159313
|
||||
Q24398318
|
||||
Q327333
|
||||
Q43229
|
||||
Q860861
|
||||
200
data-sources/wikipedia-wikidata/wikidata_places.md
Normal file
200
data-sources/wikipedia-wikidata/wikidata_places.md
Normal file
@@ -0,0 +1,200 @@
|
||||
|
||||
## Wikidata place types and related OSM Tags
|
||||
|
||||
Wikidata does not have any official ontologies, however the [DBpedia project](https://wiki.dbpedia.org/) has created an [ontology](https://wiki.dbpedia.org/services-resources/ontology) that covered [place types](http://mappings.dbpedia.org/server/ontology/classes/#Place). The table below used the DBpedia place ontology as a starting point, and is provided as a cross-reference to the relevant OSM tags.
|
||||
|
||||
The Wikidata place types listed in the table below can be used in conjunction with the [Wikidata Query Service](https://query.wikidata.org/) to retrieve instances of those place types from the Wikidata knowledgebase.
|
||||
|
||||
```
|
||||
SELECT ?item ?lat ?lon
|
||||
WHERE {
|
||||
?item wdt:P31*/wdt:P279*wd:Q9430; wdt:P625 ?pt.
|
||||
?item p:P625?loc.
|
||||
?loc psv:P625?cnode.
|
||||
?cnode wikibase:geoLatitude?lat.
|
||||
?cnode wikibase:geoLongitude?lon.
|
||||
}
|
||||
```
|
||||
|
||||
An example json return for all instances of the Wikidata item "Q9430" (Ocean) can be seen at [json](https://query.wikidata.org/bigdata/namespace/wdq/sparql?format=json&query=SELECT?item?lat?lon%20WHERE{?item%20wdt:P31*/wdt:P279*wd:Q9430;wdt:P625?pt.?item%20p:P625?loc.?loc%20psv:P625?cnode.?cnode%20wikibase:geoLatitude?lat.?cnode%20wikibase:geoLongitude?lon.})
|
||||
|
||||
**NOTE** the OSM tags listed are those listed in the wikidata entries, and not all the possible matches for tags within OSM.
|
||||
|
||||
|
||||
title | concept | OSM Tag |
|
||||
-----------|---------------------------------------|------------------|
|
||||
[Q17334923](https://www.wikidata.org/entity/Q17334923) | Location | |
|
||||
[Q811979](https://www.wikidata.org/entity/Q811979) | Architectural Structure | |
|
||||
[Q194195](https://www.wikidata.org/entity/Q194195) | Amusement park |
|
||||
[Q204832](https://www.wikidata.org/entity/Q204832) | Roller coaster | [attraction=roller_coaster](https://wiki.openstreetmap.org/wiki/Tag:attraction=roller_coaster) |
|
||||
[Q2870166](https://www.wikidata.org/entity/Q2870166) | Water ride | |
|
||||
[Q641226](https://www.wikidata.org/entity/Q641226) | Arena | [amenity=events_centre](https://wiki.openstreetmap.org/wiki/Tag:amenity=events_centre) |
|
||||
[Q41176](https://www.wikidata.org/entity/Q41176) | Building | [building=yes](https://wiki.openstreetmap.org/wiki/Key:building) |
|
||||
[Q1303167](https://www.wikidata.org/entity/Q1303167) | Barn | [building=barn](https://wiki.openstreetmap.org/wiki/Tag:building=barn) |
|
||||
[Q655686](https://www.wikidata.org/entity/Q655686) | Commercial building | [building=commercial](https://wiki.openstreetmap.org/wiki/Tag:building=commercial) |
|
||||
[Q4830453](https://www.wikidata.org/entity/Q4830453) | Business | |
|
||||
[Q7075](https://www.wikidata.org/entity/Q7075) | Library | [amenity=library](https://wiki.openstreetmap.org/wiki/Tag:amenity=library) |
|
||||
[Q133215](https://www.wikidata.org/entity/Q133215) | Casino | [amenity=casino](https://wiki.openstreetmap.org/wiki/Tag:amenity=casino) |
|
||||
[Q23413](https://www.wikidata.org/entity/Q23413) | Castle | [historic=castle](https://wiki.openstreetmap.org/wiki/Tag:historic=castle) |
|
||||
[Q83405](https://www.wikidata.org/entity/Q83405) | Factory | |
|
||||
[Q53060](https://www.wikidata.org/entity/Q53060) | Gate | [barrier=gate](https://wiki.openstreetmap.org/wiki/Tag:barrier=gate) |cnode%20wikibase:geoLatitude?lat.?cnode%20wikibase:geoLongitude?lon.})
|
||||
[Q11755880](https://www.wikidata.org/entity/Q11755880) | Residential Building | [building=residential](https://wiki.openstreetmap.org/wiki/Tag:building=residential) |
|
||||
[Q3947](https://www.wikidata.org/entity/Q3947) | House | [building=house](https://wiki.openstreetmap.org/wiki/Tag:building=house) |
|
||||
[Q35112127](https://www.wikidata.org/entity/Q35112127) | Historic Building | |
|
||||
[Q5773747](https://www.wikidata.org/entity/Q5773747) | Historic house | |
|
||||
[Q38723](https://www.wikidata.org/entity/Q38723) | Higher Education Institution |
|
||||
[Q3914](https://www.wikidata.org/entity/Q3914) | School | [amenity=school](https://wiki.openstreetmap.org/wiki/Tag:amenity=school) |
|
||||
[Q9842](https://www.wikidata.org/entity/Q9842) | Primary school | |
|
||||
[Q159334](https://www.wikidata.org/entity/Q159334) | Secondary school | |
|
||||
[Q16917](https://www.wikidata.org/entity/Q16917) | Hospital | [amenity=hospital](https://wiki.openstreetmap.org/wiki/Tag:amenity=hospital), [healthcare=hospital](https://wiki.openstreetmap.org/wiki/Tag:healthcare=hospital), [building=hospital](https://wiki.openstreetmap.org/wiki/Tag:building=hospital) |
|
||||
[Q27686](https://www.wikidata.org/entity/Q27686) | Hotel | [tourism=hotel](https://wiki.openstreetmap.org/wiki/Tag:tourism=hotel), [building=hotel](https://wiki.openstreetmap.org/wiki/Tag:building=hotel) |
|
||||
[Q33506](https://www.wikidata.org/entity/Q33506) | Museum | [tourism=museum](https://wiki.openstreetmap.org/wiki/Tag:tourism=museum) |
|
||||
[Q40357](https://www.wikidata.org/entity/Q40357) | Prison | [amenity=prison](https://wiki.openstreetmap.org/wiki/Tag:amenity=prison) |
|
||||
[Q24398318](https://www.wikidata.org/entity/Q24398318) | Religious Building | |
|
||||
[Q160742](https://www.wikidata.org/entity/Q160742) | Abbey | |
|
||||
[Q16970](https://www.wikidata.org/entity/Q16970) | Church (building) | [building=church](https://wiki.openstreetmap.org/wiki/Tag:building=church) |
|
||||
[Q44613](https://www.wikidata.org/entity/Q44613) | Monastery | [amenity=monastery](https://wiki.openstreetmap.org/wiki/Tag:amenity=monastery) |
|
||||
[Q32815](https://www.wikidata.org/entity/Q32815) | Mosque | [building=mosque](https://wiki.openstreetmap.org/wiki/Tag:building=mosque) |
|
||||
[Q697295](https://www.wikidata.org/entity/Q697295) | Shrine | [building=shrine](https://wiki.openstreetmap.org/wiki/Tag:building=shrine) |
|
||||
[Q34627](https://www.wikidata.org/entity/Q34627) | Synagogue | [building=synagogue](https://wiki.openstreetmap.org/wiki/Tag:building=synagogue) |
|
||||
[Q44539](https://www.wikidata.org/entity/Q44539) | Temple | [building=temple](https://wiki.openstreetmap.org/wiki/Tag:building=temple) |
|
||||
[Q11707](https://www.wikidata.org/entity/Q11707) | Restaurant | [amenity=restaurant](https://wiki.openstreetmap.org/wiki/Tag:amenity=restaurant) |
|
||||
[Q11315](https://www.wikidata.org/entity/Q11315) | Shopping mall | [shop=mall](https://wiki.openstreetmap.org/wiki/Tag:shop=mall), [shop=shopping_centre](https://wiki.openstreetmap.org/wiki/Tag:shop=shopping_centre) |
|
||||
[Q11303](https://www.wikidata.org/entity/Q11303) | Skyscraper | |
|
||||
[Q17350442](https://www.wikidata.org/entity/Q17350442) | Venue | |
|
||||
[Q41253](https://www.wikidata.org/entity/Q41253) | Movie Theater | [amenity=cinema](https://wiki.openstreetmap.org/wiki/Tag:amenity=cinema) |
|
||||
[Q483110](https://www.wikidata.org/entity/Q483110) | Stadium | [leisure=stadium](https://wiki.openstreetmap.org/wiki/Tag:leisure=stadium), [building=stadium](https://wiki.openstreetmap.org/wiki/Tag:building=stadium) |
|
||||
[Q24354](https://www.wikidata.org/entity/Q24354) | Theater (structure) | [amenity=theatre](https://wiki.openstreetmap.org/wiki/Tag:amenity=theatre) |
|
||||
[Q121359](https://www.wikidata.org/entity/Q121359) | Infrastructure | |
|
||||
[Q1248784](https://www.wikidata.org/entity/Q1248784) | Airport | |
|
||||
[Q12323](https://www.wikidata.org/entity/Q12323) | Dam | [waterway=dam](https://wiki.openstreetmap.org/wiki/Tag:waterway=dam) |
|
||||
[Q1353183](https://www.wikidata.org/entity/Q1353183) | Launch pad | |
|
||||
[Q105190](https://www.wikidata.org/entity/Q105190) | Levee | [man_made=dyke](https://wiki.openstreetmap.org/wiki/Tag:man_made=dyke) |
|
||||
[Q105731](https://www.wikidata.org/entity/Q105731) | Lock (water navigation) | [lock=yes](https://wiki.openstreetmap.org/wiki/Key:lock) |
|
||||
[Q44782](https://www.wikidata.org/entity/Q44782) | Port | |
|
||||
[Q159719](https://www.wikidata.org/entity/Q159719) | Power station | [power=plant](https://wiki.openstreetmap.org/wiki/Tag:power=plant) |
|
||||
[Q174814](https://www.wikidata.org/entity/Q174814) | Electrical substation | |
|
||||
[Q134447](https://www.wikidata.org/entity/Q134447) | Nuclear power plant | [plant:source=nuclear](https://wiki.openstreetmap.org/wiki/Tag:plant:source=nuclear) |
|
||||
[Q786014](https://www.wikidata.org/entity/Q786014) | Rest area | [highway=rest_area](https://wiki.openstreetmap.org/wiki/Tag:highway=rest_area), [highway=services](https://wiki.openstreetmap.org/wiki/Tag:highway=services) |
|
||||
[Q12280](https://www.wikidata.org/entity/Q12280) | Bridge | [bridge=* ](https://wiki.openstreetmap.org/wiki/Key:bridge), [man_made=bridge](https://wiki.openstreetmap.org/wiki/Tag:man_made=bridge) |
|
||||
[Q728937](https://www.wikidata.org/entity/Q728937) | Railroad Line | [railway=rail](https://wiki.openstreetmap.org/wiki/Tag:railway=rail) |
|
||||
[Q1311958](https://www.wikidata.org/entity/Q1311958) | Railway Tunnel | |
|
||||
[Q34442](https://www.wikidata.org/entity/Q34442) | Road | [highway=* ](https://wiki.openstreetmap.org/wiki/Key:highway), [route=road](https://wiki.openstreetmap.org/wiki/Tag:route=road) |
|
||||
[Q1788454](https://www.wikidata.org/entity/Q1788454) | Road junction | |
|
||||
[Q44377](https://www.wikidata.org/entity/Q44377) | Tunnel | [tunnel=* ](https://wiki.openstreetmap.org/wiki/Key:tunnel) |
|
||||
[Q5031071](https://www.wikidata.org/entity/Q5031071) | Canal tunnel | |
|
||||
[Q719456](https://www.wikidata.org/entity/Q719456) | Station | [public_transport=station](https://wiki.openstreetmap.org/wiki/Tag:public_transport=station) |
|
||||
[Q205495](https://www.wikidata.org/entity/Q205495) | Filling station | [amenity=fuel](https://wiki.openstreetmap.org/wiki/Tag:amenity=fuel) |
|
||||
[Q928830](https://www.wikidata.org/entity/Q928830) | Metro station | [station=subway](https://wiki.openstreetmap.org/wiki/Tag:station=subway) |
|
||||
[Q55488](https://www.wikidata.org/entity/Q55488) | Train station | [railway=station](https://wiki.openstreetmap.org/wiki/Tag:railway=station) |
|
||||
[Q2175765](https://www.wikidata.org/entity/Q2175765) | Tram stop | [railway=tram_stop](https://wiki.openstreetmap.org/wiki/Tag:railway=tram_stop), [public_transport=stop_position](https://wiki.openstreetmap.org/wiki/Tag:public_transport=stop_position) |
|
||||
[Q6852233](https://www.wikidata.org/entity/Q6852233) | Military building | |
|
||||
[Q44494](https://www.wikidata.org/entity/Q44494) | Mill (grinding) | |
|
||||
[Q185187](https://www.wikidata.org/entity/Q185187) | Watermill | [man_made=watermill](https://wiki.openstreetmap.org/wiki/Tag:man_made=watermill) |
|
||||
[Q38720](https://www.wikidata.org/entity/Q38720) | Windmill | [man_made=windmill](https://wiki.openstreetmap.org/wiki/Tag:man_made=windmill) |
|
||||
[Q4989906](https://www.wikidata.org/entity/Q4989906) | Monument | [historic=monument](https://wiki.openstreetmap.org/wiki/Tag:historic=monument) |
|
||||
[Q5003624](https://www.wikidata.org/entity/Q5003624) | Memorial | [historic=memorial](https://wiki.openstreetmap.org/wiki/Tag:historic=memorial) |
|
||||
[Q271669](https://www.wikidata.org/entity/Q271669) | Landform | |
|
||||
[Q190429](https://www.wikidata.org/entity/Q190429) | Depression (geology) | |
|
||||
[Q17018380](https://www.wikidata.org/entity/Q17018380) | Bight (geography) | |
|
||||
[Q54050](https://www.wikidata.org/entity/Q54050) | Hill | |
|
||||
[Q1210950](https://www.wikidata.org/entity/Q1210950) | Channel (geography) | |
|
||||
[Q23442](https://www.wikidata.org/entity/Q23442) | Island | [place=island](https://wiki.openstreetmap.org/wiki/Tag:place=island) |
|
||||
[Q42523](https://www.wikidata.org/entity/Q42523) | Atoll | |
|
||||
[Q34763](https://www.wikidata.org/entity/Q34763) | Peninsula | |
|
||||
[Q355304](https://www.wikidata.org/entity/Q355304) | Watercourse | |
|
||||
[Q30198](https://www.wikidata.org/entity/Q30198) | Marsh | [wetland=marsh](https://wiki.openstreetmap.org/wiki/Tag:wetland=marsh) |
|
||||
[Q75520](https://www.wikidata.org/entity/Q75520) | Plateau | |
|
||||
[Q2042028](https://www.wikidata.org/entity/Q2042028) | Ravine | |
|
||||
[Q631305](https://www.wikidata.org/entity/Q631305) | Rock formation | |
|
||||
[Q12516](https://www.wikidata.org/entity/Q12516) | Pyramid | |
|
||||
[Q1076486](https://www.wikidata.org/entity/Q1076486) | Sports venue | |
|
||||
[Q682943](https://www.wikidata.org/entity/Q682943) | Cricket field | [sport=cricket](https://wiki.openstreetmap.org/wiki/Tag:sport=cricket) |
|
||||
[Q1048525](https://www.wikidata.org/entity/Q1048525) | Golf course | [leisure=golf_course](https://wiki.openstreetmap.org/wiki/Tag:leisure=golf_course) |
|
||||
[Q1777138](https://www.wikidata.org/entity/Q1777138) | Race track | [highway=raceway](https://wiki.openstreetmap.org/wiki/Tag:highway=raceway) |
|
||||
[Q130003](https://www.wikidata.org/entity/Q130003) | Ski resort | |
|
||||
[Q174782](https://www.wikidata.org/entity/Q174782) | Town square | [place=square](https://wiki.openstreetmap.org/wiki/Tag:place=square) |
|
||||
[Q12518](https://www.wikidata.org/entity/Q12518) | Tower | [building=tower](https://wiki.openstreetmap.org/wiki/Tag:building=tower), [man_made=tower](https://wiki.openstreetmap.org/wiki/Tag:man_made=tower) |
|
||||
[Q39715](https://www.wikidata.org/entity/Q39715) | Lighthouse | [man_made=lighthouse](https://wiki.openstreetmap.org/wiki/Tag:man_made=lighthouse) |
|
||||
[Q274153](https://www.wikidata.org/entity/Q274153) | Water tower | [building=water_tower](https://wiki.openstreetmap.org/wiki/Tag:building=water_tower), [man_made=water_tower](https://wiki.openstreetmap.org/wiki/Tag:man_made=water_tower) |
|
||||
[Q43501](https://www.wikidata.org/entity/Q43501) | Zoo | [tourism=zoo](https://wiki.openstreetmap.org/wiki/Tag:tourism=zoo) |
|
||||
[Q39614](https://www.wikidata.org/entity/Q39614) | Cemetery | [amenity=grave_yard](https://wiki.openstreetmap.org/wiki/Tag:amenity=grave_yard), [landuse=cemetery](https://wiki.openstreetmap.org/wiki/Tag:landuse=cemetery) |
|
||||
[Q152081](https://www.wikidata.org/entity/Q152081) | Concentration camp | |
|
||||
[Q1107656](https://www.wikidata.org/entity/Q1107656) | Garden | [leisure=garden](https://wiki.openstreetmap.org/wiki/Tag:leisure=garden) |
|
||||
[Q820477](https://www.wikidata.org/entity/Q820477) | Mine | |
|
||||
[Q33837](https://www.wikidata.org/entity/Q33837) | Archipelago | [place=archipelago](https://wiki.openstreetmap.org/wiki/Tag:place=archipelago) |
|
||||
[Q40080](https://www.wikidata.org/entity/Q40080) | Beach | [natural=beach](https://wiki.openstreetmap.org/wiki/Tag:natural=beach) |
|
||||
[Q15324](https://www.wikidata.org/entity/Q15324) | Body of water | [natural=water](https://wiki.openstreetmap.org/wiki/Tag:natural=water) |
|
||||
[Q23397](https://www.wikidata.org/entity/Q23397) | Lake | [water=lake](https://wiki.openstreetmap.org/wiki/Tag:water=lake) |
|
||||
[Q9430](https://www.wikidata.org/entity/Q9430) | Ocean | |
|
||||
[Q165](https://www.wikidata.org/entity/Q165) | Sea | |
|
||||
[Q47521](https://www.wikidata.org/entity/Q47521) | Stream | |
|
||||
[Q12284](https://www.wikidata.org/entity/Q12284) | Canal | [waterway=canal](https://wiki.openstreetmap.org/wiki/Tag:waterway=canal) |
|
||||
[Q4022](https://www.wikidata.org/entity/Q4022) | River | [waterway=river](https://wiki.openstreetmap.org/wiki/Tag:waterway=river), [type=waterway](https://wiki.openstreetmap.org/wiki/Relation:waterway) |
|
||||
[Q185113](https://www.wikidata.org/entity/Q185113) | Cape | [natural=cape](https://wiki.openstreetmap.org/wiki/Tag:natural=cape) |
|
||||
[Q35509](https://www.wikidata.org/entity/Q35509) | Cave | [natural=cave_entrance](https://wiki.openstreetmap.org/wiki/Tag:natural=cave_entrance) |
|
||||
[Q8514](https://www.wikidata.org/entity/Q8514) | Desert | |
|
||||
[Q4421](https://www.wikidata.org/entity/Q4421) | Forest | [natural=wood](https://wiki.openstreetmap.org/wiki/Tag:natural=wood) |
|
||||
[Q35666](https://www.wikidata.org/entity/Q35666) | Glacier | [natural=glacier](https://wiki.openstreetmap.org/wiki/Tag:natural=glacier) |
|
||||
[Q177380](https://www.wikidata.org/entity/Q177380) | Hot spring | |
|
||||
[Q8502](https://www.wikidata.org/entity/Q8502) | Mountain | [natural=peak](https://wiki.openstreetmap.org/wiki/Tag:natural=peak) |
|
||||
[Q133056](https://www.wikidata.org/entity/Q133056) | Mountain pass | |
|
||||
[Q46831](https://www.wikidata.org/entity/Q46831) | Mountain range | |
|
||||
[Q39816](https://www.wikidata.org/entity/Q39816) | Valley | [natural=valley](https://wiki.openstreetmap.org/wiki/Tag:natural=valley) |
|
||||
[Q8072](https://www.wikidata.org/entity/Q8072) | Volcano | [natural=volcano](https://wiki.openstreetmap.org/wiki/Tag:natural=volcano) |
|
||||
[Q43229](https://www.wikidata.org/entity/Q43229) | Organization | |
|
||||
[Q327333](https://www.wikidata.org/entity/Q327333) | Government agency | [office=government](https://wiki.openstreetmap.org/wiki/Tag:office=government)|
|
||||
[Q22698](https://www.wikidata.org/entity/Q22698) | Park | [leisure=park](https://wiki.openstreetmap.org/wiki/Tag:leisure=park) |
|
||||
[Q159313](https://www.wikidata.org/entity/Q159313) | Urban agglomeration | |
|
||||
[Q177634](https://www.wikidata.org/entity/Q177634) | Community | |
|
||||
[Q5107](https://www.wikidata.org/entity/Q5107) | Continent | [place=continent](https://wiki.openstreetmap.org/wiki/Tag:place=continent) |
|
||||
[Q6256](https://www.wikidata.org/entity/Q6256) | Country | [place=country](https://wiki.openstreetmap.org/wiki/Tag:place=country) |
|
||||
[Q75848](https://www.wikidata.org/entity/Q75848) | Gated community | |
|
||||
[Q3153117](https://www.wikidata.org/entity/Q3153117) | Intercommunality | |
|
||||
[Q82794](https://www.wikidata.org/entity/Q82794) | Region | |
|
||||
[Q56061](https://www.wikidata.org/entity/Q56061) | Administrative division | [boundary=administrative](https://wiki.openstreetmap.org/wiki/Tag:boundary=administrative) |
|
||||
[Q665487](https://www.wikidata.org/entity/Q665487) | Diocese | |
|
||||
[Q4976993](https://www.wikidata.org/entity/Q4976993) | Parish | [boundary=civil_parish](https://wiki.openstreetmap.org/wiki/Tag:boundary=civil_parish) |
|
||||
[Q194203](https://www.wikidata.org/entity/Q194203) | Arrondissements of France | |
|
||||
[Q91028](https://www.wikidata.org/entity/Q91028) | Arrondissements of Belgium | |
|
||||
[Q3623867](https://www.wikidata.org/entity/Q3623867) | Arrondissements of Benin | |
|
||||
[Q2311958](https://www.wikidata.org/entity/Q2311958) | Canton (country subdivision) | [political_division=canton](https://wiki.openstreetmap.org/wiki/FR:Cantons_in_France) |
|
||||
[Q643589](https://www.wikidata.org/entity/Q643589) | Department | |
|
||||
[Q202216](https://www.wikidata.org/entity/Q202216) | Overseas department and region | |
|
||||
[Q149621](https://www.wikidata.org/entity/Q149621) | District | [place=district](https://wiki.openstreetmap.org/wiki/Tag:place=district) |
|
||||
[Q15243209](https://www.wikidata.org/wiki/Q15243209) | Historic district | |
|
||||
[Q5144960](https://www.wikidata.org/entity/Q5144960) | Microregion | |
|
||||
[Q15284](https://www.wikidata.org/entity/Q15284) | Municipality | |
|
||||
[Q515716](https://www.wikidata.org/entity/Q515716) | Prefecture | |
|
||||
[Q34876](https://www.wikidata.org/entity/Q34876) | Province | |
|
||||
[Q3191695](https://www.wikidata.org/entity/Q3191695) | Regency (Indonesia) | |
|
||||
[Q1970725](https://www.wikidata.org/entity/Q1970725) | Natural region | |
|
||||
[Q486972](https://www.wikidata.org/entity/Q486972) | Human settlement | |
|
||||
[Q515](https://www.wikidata.org/entity/Q515) | City | [place=city](https://wiki.openstreetmap.org/wiki/Tag:place=city) |
|
||||
[Q5119](https://www.wikidata.org/entity/Q5119) | Capital city | [capital=yes](https://wiki.openstreetmap.org/wiki/Key:capital) |
|
||||
[Q4286337](https://www.wikidata.org/entity/Q4286337) | City district | |
|
||||
[Q1394476](https://www.wikidata.org/entity/Q1394476) | Civil township | |
|
||||
[Q1115575](https://www.wikidata.org/entity/Q1115575) | Civil parish | [designation=civil_parish](https://wiki.openstreetmap.org/wiki/Tag:designation=civil_parish) |
|
||||
[Q5153984](https://www.wikidata.org/entity/Q5153984) | Commune-level subdivisions | |
|
||||
[Q123705](https://www.wikidata.org/entity/Q123705) | Neighbourhood | [place=neighbourhood](https://wiki.openstreetmap.org/wiki/Tag:place=neighbourhood) |
|
||||
[Q1500350](https://www.wikidata.org/entity/Q1500350) | Townships of China | |
|
||||
[Q17343829](https://www.wikidata.org/entity/Q17343829) | Unincorporated Community | |
|
||||
[Q3957](https://www.wikidata.org/entity/Q3957) | Town | [place=town](https://wiki.openstreetmap.org/wiki/Tag:place=town) |
|
||||
[Q532](https://www.wikidata.org/entity/Q532) | Village | [place=village](https://wiki.openstreetmap.org/wiki/Tag:place=village) |
|
||||
[Q5084](https://www.wikidata.org/entity/Q5084) | Hamlet | [place=hamlet](https://wiki.openstreetmap.org/wiki/Tag:place=hamlet) |
|
||||
[Q7275](https://www.wikidata.org/entity/Q7275) | State | |
|
||||
[Q79007](https://www.wikidata.org/entity/Q79007) | Street | |
|
||||
[Q473972](https://www.wikidata.org/entity/Q473972) | Protected area | [boundary=protected_area](https://wiki.openstreetmap.org/wiki/Tag:boundary=protected_area) |
|
||||
[Q1377575](https://www.wikidata.org/entity/Q1377575) | Wildlife refuge | |
|
||||
[Q1410668](https://www.wikidata.org/entity/Q1410668) | National Wildlife Refuge | [protection_title=National Wildlife Refuge](ownership=national), [ownership=national](https://wiki.openstreetmap.org/wiki/Tag:ownership=national)|
|
||||
[Q9259](https://www.wikidata.org/entity/Q9259) | World Heritage Site | |
|
||||
|
||||
---
|
||||
|
||||
### Future Work
|
||||
|
||||
The Wikidata improvements to Nominatim can be further enhanced by:
|
||||
|
||||
- continuing to add new Wikidata links to OSM objects
|
||||
- increasing the number of place types accounted for in the wikipedia_articles table
|
||||
- working to use place types in the wikipedia_article matching process
|
||||
38126
data/us_postcode.sql
38126
data/us_postcode.sql
File diff suppressed because it is too large
Load Diff
16
data/us_postcode_table.sql
Normal file
16
data/us_postcode_table.sql
Normal file
@@ -0,0 +1,16 @@
|
||||
SET statement_timeout = 0;
|
||||
SET client_encoding = 'UTF8';
|
||||
SET check_function_bodies = false;
|
||||
SET client_min_messages = warning;
|
||||
|
||||
SET search_path = public, pg_catalog;
|
||||
|
||||
SET default_tablespace = '';
|
||||
|
||||
SET default_with_oids = false;
|
||||
|
||||
CREATE TABLE us_postcode (
|
||||
postcode text,
|
||||
x double precision,
|
||||
y double precision
|
||||
);
|
||||
@@ -7,20 +7,43 @@ configure_file(mkdocs.yml ../mkdocs.yml)
|
||||
file(MAKE_DIRECTORY ${CMAKE_CURRENT_BINARY_DIR}/appendix)
|
||||
file(MAKE_DIRECTORY ${CMAKE_CURRENT_BINARY_DIR}/data-sources)
|
||||
|
||||
ADD_CUSTOM_TARGET(doc
|
||||
COMMAND ${CMAKE_COMMAND} -E create_symlink ${CMAKE_CURRENT_SOURCE_DIR}/admin ${CMAKE_CURRENT_BINARY_DIR}/admin
|
||||
COMMAND ${CMAKE_COMMAND} -E create_symlink ${CMAKE_CURRENT_SOURCE_DIR}/develop ${CMAKE_CURRENT_BINARY_DIR}/develop
|
||||
COMMAND ${CMAKE_COMMAND} -E create_symlink ${CMAKE_CURRENT_SOURCE_DIR}/api ${CMAKE_CURRENT_BINARY_DIR}/api
|
||||
COMMAND ${CMAKE_COMMAND} -E create_symlink ${CMAKE_CURRENT_SOURCE_DIR}/index.md ${CMAKE_CURRENT_BINARY_DIR}/index.md
|
||||
COMMAND ${CMAKE_COMMAND} -E create_symlink ${CMAKE_CURRENT_SOURCE_DIR}/extra.css ${CMAKE_CURRENT_BINARY_DIR}/extra.css
|
||||
COMMAND ${CMAKE_COMMAND} -E create_symlink ${CMAKE_CURRENT_SOURCE_DIR}/data-sources/overview.md ${CMAKE_CURRENT_BINARY_DIR}/data-sources/overview.md
|
||||
set (DOC_SOURCES
|
||||
admin
|
||||
develop
|
||||
api
|
||||
index.md
|
||||
extra.css
|
||||
styles.css
|
||||
data-sources/overview.md
|
||||
)
|
||||
|
||||
foreach (src ${DOC_SOURCES})
|
||||
execute_process(
|
||||
COMMAND ${CMAKE_COMMAND} -E create_symlink ${CMAKE_CURRENT_SOURCE_DIR}/${src} ${CMAKE_CURRENT_BINARY_DIR}/${src}
|
||||
)
|
||||
endforeach()
|
||||
|
||||
execute_process(
|
||||
COMMAND ${CMAKE_COMMAND} -E create_symlink ${PROJECT_SOURCE_DIR}/data-sources/us-tiger/README.md ${CMAKE_CURRENT_BINARY_DIR}/data-sources/US-Tiger.md
|
||||
)
|
||||
execute_process(
|
||||
COMMAND ${CMAKE_COMMAND} -E create_symlink ${PROJECT_SOURCE_DIR}/data-sources/gb-postcodes/README.md ${CMAKE_CURRENT_BINARY_DIR}/data-sources/GB-Postcodes.md
|
||||
)
|
||||
execute_process(
|
||||
COMMAND ${CMAKE_COMMAND} -E create_symlink ${PROJECT_SOURCE_DIR}/data-sources/country-grid/README.md ${CMAKE_CURRENT_BINARY_DIR}/data-sources/Country-Grid.md
|
||||
)
|
||||
execute_process(
|
||||
COMMAND ${CMAKE_COMMAND} -E create_symlink ${PROJECT_SOURCE_DIR}/data-sources/country-grid/mexico.quad.png ${CMAKE_CURRENT_BINARY_DIR}/data-sources/mexico.quad.png
|
||||
)
|
||||
execute_process(
|
||||
COMMAND ${CMAKE_COMMAND} -E create_symlink ${PROJECT_SOURCE_DIR}/data-sources/wikipedia-wikidata/README.md ${CMAKE_CURRENT_BINARY_DIR}/data-sources/Wikipedia-Wikidata.md
|
||||
)
|
||||
|
||||
ADD_CUSTOM_TARGET(doc
|
||||
COMMAND ${CMAKE_CURRENT_SOURCE_DIR}/bash2md.sh ${PROJECT_SOURCE_DIR}/vagrant/Install-on-Centos-7.sh ${CMAKE_CURRENT_BINARY_DIR}/appendix/Install-on-Centos-7.md
|
||||
COMMAND ${CMAKE_CURRENT_SOURCE_DIR}/bash2md.sh ${PROJECT_SOURCE_DIR}/vagrant/Install-on-Ubuntu-16.sh ${CMAKE_CURRENT_BINARY_DIR}/appendix/Install-on-Ubuntu-16.md
|
||||
COMMAND ${CMAKE_CURRENT_SOURCE_DIR}/bash2md.sh ${PROJECT_SOURCE_DIR}/vagrant/Install-on-Centos-8.sh ${CMAKE_CURRENT_BINARY_DIR}/appendix/Install-on-Centos-8.md
|
||||
COMMAND ${CMAKE_CURRENT_SOURCE_DIR}/bash2md.sh ${PROJECT_SOURCE_DIR}/vagrant/Install-on-Ubuntu-18.sh ${CMAKE_CURRENT_BINARY_DIR}/appendix/Install-on-Ubuntu-18.md
|
||||
COMMAND ${CMAKE_CURRENT_SOURCE_DIR}/bash2md.sh ${PROJECT_SOURCE_DIR}/vagrant/Install-on-Ubuntu-20.sh ${CMAKE_CURRENT_BINARY_DIR}/appendix/Install-on-Ubuntu-20.md
|
||||
COMMAND mkdocs build -d ${CMAKE_CURRENT_BINARY_DIR}/../site-html -f ${CMAKE_CURRENT_BINARY_DIR}/../mkdocs.yml
|
||||
)
|
||||
|
||||
|
||||
109
docs/admin/Advanced-Installations.md
Normal file
109
docs/admin/Advanced-Installations.md
Normal file
@@ -0,0 +1,109 @@
|
||||
# Advanced installations
|
||||
|
||||
This page contains instructions for setting up multiple countries in
|
||||
your Nominatim database. It is assumed that you have already successfully
|
||||
installed the Nominatim software itself, if not return to the
|
||||
[installation page](Installation.md).
|
||||
|
||||
## Importing multiple regions
|
||||
|
||||
To import multiple regions in your database, you need to configure and run `utils/import_multiple_regions.sh` file. This script will set up the update directory which has the following structure:
|
||||
|
||||
```bash
|
||||
update
|
||||
├── europe
|
||||
│ ├── andorra
|
||||
│ │ └── sequence.state
|
||||
│ └── monaco
|
||||
│ └── sequence.state
|
||||
└── tmp
|
||||
├── combined.osm.pbf
|
||||
└── europe
|
||||
├── andorra-latest.osm.pbf
|
||||
└── monaco-latest.osm.pbf
|
||||
|
||||
|
||||
```
|
||||
|
||||
The `sequence.state` files will contain the sequence ID, which will be used by pyosmium to get updates. The tmp folder is used for import dump.
|
||||
|
||||
### Configuring multiple regions
|
||||
|
||||
The file `import_multiple_regions.sh` needs to be edited as per your requirement:
|
||||
|
||||
1. List of countries. eg:
|
||||
|
||||
COUNTRIES="europe/monaco europe/andorra"
|
||||
|
||||
2. Path to Build directory. eg:
|
||||
|
||||
NOMINATIMBUILD="/srv/nominatim/build"
|
||||
|
||||
3. Path to Update directory. eg:
|
||||
|
||||
UPDATEDIR="/srv/nominatim/update"
|
||||
|
||||
4. Replication URL. eg:
|
||||
|
||||
BASEURL="https://download.geofabrik.de"
|
||||
DOWNCOUNTRYPOSTFIX="-latest.osm.pbf"
|
||||
|
||||
!!! tip
|
||||
If your database already exists and you want to add more countries, replace the setting up part
|
||||
`${SETUPFILE} --osm-file ${UPDATEDIR}/tmp/combined.osm.pbf --all 2>&1`
|
||||
with `${UPDATEFILE} --import-file ${UPDATEDIR}/tmp/combined.osm.pbf 2>&1`.
|
||||
|
||||
### Setting up multiple regions
|
||||
|
||||
Run the following command from your Nominatim directory after configuring the file.
|
||||
|
||||
bash ./utils/import_multiple_regions.sh
|
||||
|
||||
!!! danger "Important"
|
||||
This file uses osmium-tool. It must be installed before executing the import script.
|
||||
Installation instructions can be found [here](https://osmcode.org/osmium-tool/manual.html#installation).
|
||||
|
||||
## Updating multiple regions
|
||||
|
||||
To import multiple regions in your database, you need to configure and run ```utils/update_database.sh```.
|
||||
This uses the update directory set up while setting up the DB.
|
||||
|
||||
### Configuring multiple regions
|
||||
|
||||
The file `update_database.sh` needs to be edited as per your requirement:
|
||||
|
||||
1. List of countries. eg:
|
||||
|
||||
COUNTRIES="europe/monaco europe/andorra"
|
||||
|
||||
2. Path to Build directory. eg:
|
||||
|
||||
NOMINATIMBUILD="/srv/nominatim/build"
|
||||
|
||||
3. Path to Update directory. eg:
|
||||
|
||||
UPDATEDIR="/srv/nominatim/update"
|
||||
|
||||
4. Replication URL. eg:
|
||||
|
||||
BASEURL="https://download.geofabrik.de"
|
||||
DOWNCOUNTRYPOSTFIX="-updates"
|
||||
|
||||
5. Followup can be set according to your installation. eg: For Photon,
|
||||
|
||||
FOLLOWUP="curl http://localhost:2322/nominatim-update"
|
||||
|
||||
will handle the indexing.
|
||||
|
||||
### Updating the database
|
||||
|
||||
Run the following command from your Nominatim directory after configuring the file.
|
||||
|
||||
bash ./utils/update_database.sh
|
||||
|
||||
This will get diffs from the replication server, import diffs and index the database. The default replication server in the script([Geofabrik](https://download.geofabrik.de)) provides daily updates.
|
||||
|
||||
## Verification and further setup
|
||||
|
||||
Instructions for import verification and other details like importing Wikidata can be found in [import and update page](Import-and-Update.md)
|
||||
|
||||
@@ -24,16 +24,18 @@ If the reported rank is 26 or higher, you can also safely add `--index-noanalyse
|
||||
|
||||
### PHP "open_basedir restriction in effect" warnings
|
||||
|
||||
`PHP Warning: file_get_contents(): open_basedir restriction in effect.`
|
||||
PHP Warning: file_get_contents(): open_basedir restriction in effect.
|
||||
|
||||
You need to adjust the [open_basedir](http://www.php.net/manual/en/ini.core.php#ini.open-basedir) setting
|
||||
in your PHP configuration (`php.ini file`). By default this setting may look like this:
|
||||
You need to adjust the
|
||||
[open_basedir](https://www.php.net/manual/en/ini.core.php#ini.open-basedir)
|
||||
setting in your PHP configuration (`php.ini` file). By default this setting may
|
||||
look like this:
|
||||
|
||||
open_basedir = /srv/http/:/home/:/tmp/:/usr/share/pear/
|
||||
|
||||
Either add reported directories to the list or disable this setting temporarily by
|
||||
dding ";" at the beginning of the line. Don't forget to enable this setting again
|
||||
once you are done with the PHP command line operations.
|
||||
Either add reported directories to the list or disable this setting temporarily
|
||||
by adding ";" at the beginning of the line. Don't forget to enable this setting
|
||||
again once you are done with the PHP command line operations.
|
||||
|
||||
|
||||
### PHP timzeone warnings
|
||||
@@ -44,9 +46,9 @@ The Apache log may contain lots of PHP warnings like this:
|
||||
You should set the default time zone as instructed in the warning in
|
||||
your `php.ini` file. Find the entry about timezone and set it to
|
||||
something like this:
|
||||
|
||||
|
||||
; Defines the default timezone used by the date functions
|
||||
; http://php.net/date.timezone
|
||||
; https://php.net/date.timezone
|
||||
date.timezone = 'America/Denver'
|
||||
|
||||
Or
|
||||
@@ -66,6 +68,14 @@ server development libraries (`postgresql-server-dev-9.5` on Ubuntu)
|
||||
and recompile (`cmake .. && make`).
|
||||
|
||||
|
||||
### I see the error "ERROR: permission denied for language c"
|
||||
|
||||
`nominatim.so`, written in C, is required to be installed on the database
|
||||
server. Some managed database (cloud) services like Amazon RDS do not allow
|
||||
this. There is currently no work-around other than installing a database
|
||||
on a non-managed machine.
|
||||
|
||||
|
||||
### I see the error: "function transliteration(text) does not exist"
|
||||
|
||||
Reinstall the nominatim functions with `setup.php --create--functions`
|
||||
@@ -81,13 +91,18 @@ If you are using a flatnode file, then it may also be that the underlying
|
||||
filesystem does not fully support 'mmap'. A notable candidate is virtualbox's
|
||||
vboxfs.
|
||||
|
||||
### I see the error: "clang: Command not found" on CentOS
|
||||
|
||||
On CentOS 7 users reported `/opt/rh/llvm-toolset-7/root/usr/bin/clang: Command not found`.
|
||||
Double-check clang is installed. Instead of `make` try running `make CLANG=true`.
|
||||
|
||||
### nominatim UPDATE failed: ERROR: buffer 179261 is not owned by resource owner Portal
|
||||
|
||||
Several users [reported this](https://github.com/openstreetmap/Nominatim/issues/1168) during the initial import of the database. It's
|
||||
something Postgresql internal Nominatim doesn't control. And Postgresql forums
|
||||
Several users [reported this](https://github.com/openstreetmap/Nominatim/issues/1168) during the initial import of the database. It's
|
||||
something PostgreSQL internal Nominatim doesn't control. And PostgreSQL forums
|
||||
suggest it's threading related but definitely some kind of crash of a process.
|
||||
Users reported either rebooting the server, different hardware or just trying
|
||||
the import again worked.
|
||||
the import again worked.
|
||||
|
||||
### The website shows: "Could not get word tokens"
|
||||
|
||||
@@ -99,10 +114,11 @@ to get the full error message.
|
||||
|
||||
`could not connect to server: No such file or directory`
|
||||
|
||||
On CentOS v7 the PostgreSQL server is started with `systemd`.
|
||||
Check if `/usr/lib/systemd/system/httpd.service` contains a line `PrivateTmp=true`.
|
||||
If so then Apache cannot see the `/tmp/.s.PGSQL.5432` file. It's a good security feature,
|
||||
so use the [preferred solution](../appendix/Install-on-Centos-7/#adding-selinux-security-settings).
|
||||
On CentOS v7 the PostgreSQL server is started with `systemd`. Check if
|
||||
`/usr/lib/systemd/system/httpd.service` contains a line `PrivateTmp=true`. If
|
||||
so then Apache cannot see the `/tmp/.s.PGSQL.5432` file. It's a good security
|
||||
feature, so use the
|
||||
[preferred solution](../appendix/Install-on-Centos-7/#adding-selinux-security-settings).
|
||||
|
||||
However, you can solve this the quick and dirty way by commenting out that line and then run
|
||||
|
||||
@@ -110,14 +126,12 @@ However, you can solve this the quick and dirty way by commenting out that line
|
||||
sudo systemctl restart httpd
|
||||
|
||||
|
||||
### "must be an array or an object that implements Countable" warning in /usr/share/pear/DB.php
|
||||
|
||||
The warning started with PHP 7.2. Make sure you have at least [version 1.9.3 of PEAR DB](https://github.com/pear/DB/releases)
|
||||
installed.
|
||||
|
||||
### Website reports "DB Error: insufficient permissions"
|
||||
|
||||
The user the webserver, e.g. Apache, runs under needs to have access to the Nominatim database. You can find the user like [this](https://serverfault.com/questions/125865/finding-out-what-user-apache-is-running-as), for default Ubuntu operating system for example it's `www-data`.
|
||||
The user the webserver, e.g. Apache, runs under needs to have access to the
|
||||
Nominatim database. You can find the user like
|
||||
[this](https://serverfault.com/questions/125865/finding-out-what-user-apache-is-running-as),
|
||||
for default Ubuntu operating system for example it's `www-data`.
|
||||
|
||||
1. Repeat the `createuser` step of the installation instructions.
|
||||
|
||||
@@ -140,7 +154,7 @@ Example error message
|
||||
CONTEXT: PL/pgSQL function make_standard_name(text) line 5 at assignment]
|
||||
```
|
||||
|
||||
The Postgresql database, i.e. user postgres, needs to have access to that file.
|
||||
The PostgreSQL database, i.e. user `postgres`, needs to have access to that file.
|
||||
|
||||
The permission need to be read & executable by everybody, e.g.
|
||||
|
||||
@@ -155,20 +169,10 @@ When running SELinux, make sure that the
|
||||
|
||||
### Setup.php fails with "DB Error: extension not found"
|
||||
|
||||
Make sure you have the Postgres extensions hstore and postgis installed.
|
||||
See the installation instruction for a full list of required packages.
|
||||
Make sure you have the PostgreSQL extensions "hstore" and "postgis" installed.
|
||||
See the installation instructions for a full list of required packages.
|
||||
|
||||
|
||||
### Setup.php reports "Cannot redeclare getDB()"
|
||||
|
||||
`Cannot redeclare getDB() (previously declared in /your/path/Nominatim/lib/db.php:4)`
|
||||
|
||||
The message is a bit misleading as PHP needs to load the file `DB.php` and
|
||||
instead re-loads Nominatim's `db.php`. To solve this make sure you
|
||||
have the [Pear module 'DB'](http://pear.php.net/package/DB/) installed.
|
||||
|
||||
sudo pear install DB
|
||||
|
||||
### I forgot to delete the flatnodes file before starting an import.
|
||||
|
||||
That's fine. For each import the flatnodes file get overwritten.
|
||||
@@ -178,22 +182,6 @@ for more information.
|
||||
|
||||
## Running your own instance
|
||||
|
||||
### Can I import multiple countries and keep them up to date?
|
||||
|
||||
You should use the extracts and updates from https://download.geofabrik.de.
|
||||
For the initial import, download the countries you need and merge them.
|
||||
See [OSM Help](https://help.openstreetmap.org/questions/48843/merging-two-or-more-geographical-areas-to-import-two-or-more-osm-files-in-nominatim)
|
||||
for examples how to do that. Use the resulting single osm file when
|
||||
running `setup.php`.
|
||||
|
||||
For updates you need to download the change files for each country
|
||||
once per day and apply them **separately** using
|
||||
|
||||
./utils/update.php --import-diff <filename> --index
|
||||
|
||||
See [this issue](https://github.com/openstreetmap/Nominatim/issues/60#issuecomment-18679446)
|
||||
for a script that runs the updates using osmosis.
|
||||
|
||||
### Can I import negative OSM ids into Nominatim?
|
||||
|
||||
See [this question of Stackoverflow](https://help.openstreetmap.org/questions/64662/nominatim-flatnode-with-negative-id).
|
||||
|
||||
@@ -29,40 +29,42 @@ Add to your `settings/local.php`:
|
||||
@define('CONST_Osm2pgsql_Flatnode_File', '/path/to/flatnode.file');
|
||||
|
||||
Replace the second part with a suitable path on your system and make sure
|
||||
the directory exists. There should be at least 40GB of free space.
|
||||
the directory exists. There should be at least 64GB of free space.
|
||||
|
||||
## Downloading additional data
|
||||
|
||||
### Wikipedia rankings
|
||||
### Wikipedia/Wikidata rankings
|
||||
|
||||
Wikipedia can be used as an optional auxiliary data source to help indicate
|
||||
the importance of osm features. Nominatim will work without this information
|
||||
the importance of OSM features. Nominatim will work without this information
|
||||
but it will improve the quality of the results if this is installed.
|
||||
This data is available as a binary download:
|
||||
|
||||
cd $NOMINATIM_SOURCE_DIR/data
|
||||
wget https://www.nominatim.org/data/wikipedia_article.sql.bin
|
||||
wget https://www.nominatim.org/data/wikipedia_redirect.sql.bin
|
||||
wget https://www.nominatim.org/data/wikimedia-importance.sql.gz
|
||||
|
||||
Combined the 2 files are around 1.5GB and add around 30GB to the install
|
||||
size of nominatim. They also increase the install time by an hour or so.
|
||||
The file is about 400MB and adds around 4GB to Nominatim database.
|
||||
|
||||
*NOTE:* you'll need to download the Wikipedia rankings before performing
|
||||
the initial import of the data if you want the rankings applied to the
|
||||
loaded data.
|
||||
!!! tip
|
||||
If you forgot to download the wikipedia rankings, you can also add
|
||||
importances after the import. Download the files, then run
|
||||
`./utils/setup.php --import-wikipedia-articles`
|
||||
and `./utils/update.php --recompute-importance`.
|
||||
|
||||
### UK postcodes
|
||||
### Great Britain, USA postcodes
|
||||
|
||||
Nominatim can use postcodes from an external source to improve searches that involve a UK postcode. This data can be optionally downloaded:
|
||||
Nominatim can use postcodes from an external source to improve searches that
|
||||
involve a GB or US postcode. This data can be optionally downloaded:
|
||||
|
||||
cd $NOMINATIM_SOURCE_DIR/data
|
||||
wget https://www.nominatim.org/data/gb_postcode_data.sql.gz
|
||||
wget https://www.nominatim.org/data/us_postcode_data.sql.gz
|
||||
|
||||
## Choosing the Data to Import
|
||||
|
||||
In its default setup Nominatim is configured to import the full OSM data
|
||||
set for the entire planet. Such a setup requires a powerful machine with
|
||||
at least 32GB of RAM and around 800GB of SSD hard disks. Depending on your
|
||||
at least 64GB of RAM and around 800GB of SSD hard disks. Depending on your
|
||||
use case there are various ways to reduce the amount of data imported. This
|
||||
section discusses these methods. They can also be combined.
|
||||
|
||||
@@ -77,7 +79,7 @@ below. There are also
|
||||
|
||||
Please be aware that some extracts are not cut exactly along the country
|
||||
boundaries. As a result some parts of the boundary may be missing which means
|
||||
that cannot compute the areas for some administrative areas.
|
||||
that Nominatim cannot compute the areas for some administrative areas.
|
||||
|
||||
### Dropping Data Required for Dynamic Updates
|
||||
|
||||
@@ -99,7 +101,7 @@ database or reuse the space later.
|
||||
|
||||
If you only want to use the Nominatim database for reverse lookups or
|
||||
if you plan to use the installation only for exports to a
|
||||
[photon](http://photon.komoot.de/) database, then you can set up a database
|
||||
[photon](https://photon.komoot.de/) database, then you can set up a database
|
||||
without search indexes. Add `--reverse-only` to your setup command above.
|
||||
|
||||
This saves about 5% of disk space.
|
||||
@@ -118,13 +120,16 @@ import styles available which only read selected data:
|
||||
Import all data necessary to compute addresses down to house number level.
|
||||
* **settings/import-full.style**
|
||||
Default style that also includes points of interest.
|
||||
* **settings/import-extratags.style**
|
||||
Like the full style but also adds most of the OSM tags into the extratags
|
||||
column.
|
||||
|
||||
The style can be changed with the configuration `CONST_Import_Style`.
|
||||
|
||||
To give you an idea of the impact of using the different style, the table
|
||||
To give you an idea of the impact of using the different styles, the table
|
||||
below gives rough estimates of the final database size after import of a
|
||||
2018 planet and after using the `--drop` option. It also shows the time
|
||||
needed for the import on a machine with 32GB RAM, 4 CPUS and SSDs. Note that
|
||||
needed for the import on a machine with 64GB RAM, 4 CPUS and SSDs. Note that
|
||||
the given sizes are just an estimate meant for comparison of style requirements.
|
||||
Your planet import is likely to be larger as the OSM data grows with time.
|
||||
|
||||
@@ -134,31 +139,68 @@ admin | 5h | 190 GB | 20 GB
|
||||
street | 42h | 400 GB | 180 GB
|
||||
address | 59h | 500 GB | 260 GB
|
||||
full | 80h | 575 GB | 300 GB
|
||||
extratags | 80h | 585 GB | 310 GB
|
||||
|
||||
You can also customize the styles further. For an description of the
|
||||
style format see [the developement section](../develop/Import.md).
|
||||
You can also customize the styles further. For a description of the
|
||||
style format see [the development section](../develop/Import.md).
|
||||
|
||||
## Initial import of the data
|
||||
|
||||
**Important:** first try the import with a small extract, for example from
|
||||
[Geofabrik](https://download.geofabrik.de).
|
||||
!!! danger "Important"
|
||||
First try the import with a small extract, for example from
|
||||
[Geofabrik](https://download.geofabrik.de).
|
||||
|
||||
Download the data to import and load the data with the following command
|
||||
from the build directory:
|
||||
|
||||
```sh
|
||||
./utils/setup.php --osm-file <data file> --all [--osm2pgsql-cache 28000] 2>&1 | tee setup.log
|
||||
./utils/setup.php --osm-file <data file> --all 2>&1 | tee setup.log
|
||||
```
|
||||
|
||||
The `--osm2pgsql-cache` parameter is optional but strongly recommended for
|
||||
planet imports. It sets the node cache size for the osm2pgsql import part
|
||||
(see `-C` parameter in osm2pgsql help). As a rule of thumb, this should be
|
||||
about the same size as the file you are importing but never more than
|
||||
2/3 of RAM available. If your machine starts swapping reduce the size.
|
||||
***Note for full planet imports:*** Even on a perfectly configured machine
|
||||
the import of a full planet takes at least 2 days. Once you see messages
|
||||
with `Rank .. ETA` appear, the indexing process has started. This part takes
|
||||
the most time. There are 30 ranks to process. Rank 26 and 30 are the most complex.
|
||||
They take each about a third of the total import time. If you have not reached
|
||||
rank 26 after two days of import, it is worth revisiting your system
|
||||
configuration as it may not be optimal for the import.
|
||||
|
||||
Computing word frequency for search terms can improve the performance of
|
||||
forward geocoding in particular under high load as it helps Postgres' query
|
||||
planner to make the right decisions. To recompute word counts run:
|
||||
### Notes on memory usage
|
||||
|
||||
In the first step of the import Nominatim uses osm2pgsql to load the OSM data
|
||||
into the PostgreSQL database. This step is very demanding in terms of RAM usage.
|
||||
osm2pgsql and PostgreSQL are running in parallel at this point. PostgreSQL
|
||||
blocks at least the part of RAM that has been configured with the
|
||||
`shared_buffers` parameter during [PostgreSQL tuning](Installation#postgresql-tuning)
|
||||
and needs some memory on top of that. osm2pgsql needs at least 2GB of RAM for
|
||||
its internal data structures, potentially more when it has to process very large
|
||||
relations. In addition it needs to maintain a cache for node locations. The size
|
||||
of this cache can be configured with the parameter `--osm2pgsql-cache`.
|
||||
|
||||
When importing with a flatnode file, it is best to disable the node cache
|
||||
completely and leave the memory for the flatnode file. Nominatim will do this
|
||||
by default, so you do not need to configure anything in this case.
|
||||
|
||||
For imports without a flatnode file, set `--osm2pgsql-cache` approximately to
|
||||
the size of the OSM pbf file (in MB) you are importing. Make sure you leave
|
||||
enough RAM for PostgreSQL and osm2pgsql as mentioned above. If the system starts
|
||||
swapping or you are getting out-of-memory errors, reduce the cache size or
|
||||
even consider using a flatnode file.
|
||||
|
||||
### Verify import finished
|
||||
|
||||
Run this script to verify all required tables and indices got created successfully.
|
||||
|
||||
```sh
|
||||
./utils/check_import_finished.php
|
||||
```
|
||||
|
||||
|
||||
## Tuning the database
|
||||
|
||||
Accurate word frequency information for search terms helps PostgreSQL's query
|
||||
planner to make the right decisions. Recomputing them can improve the performance
|
||||
of forward geocoding in particular under high load. To recompute word counts run:
|
||||
|
||||
```sh
|
||||
./utils/update.php --recompute-word-counts
|
||||
@@ -176,26 +218,27 @@ you also need to enable these key phrases like this:
|
||||
./utils/specialphrases.php --wiki-import > specialphrases.sql
|
||||
psql -d nominatim -f specialphrases.sql
|
||||
|
||||
Note that this command downloads the phrases from the wiki link above.
|
||||
Note that this command downloads the phrases from the wiki link above. You
|
||||
need internet access for the step.
|
||||
|
||||
|
||||
## Installing Tiger housenumber data for the US
|
||||
|
||||
Nominatim is able to use the official [TIGER](https://www.census.gov/geo/maps-data/data/tiger.html)
|
||||
Nominatim is able to use the official [TIGER](https://www.census.gov/geographies/mapping-files/time-series/geo/tiger-line-file.html)
|
||||
address set to complement the OSM house number data in the US. You can add
|
||||
TIGER data to your own Nominatim instance by following these steps. The
|
||||
entire US adds about 10GB to your database.
|
||||
|
||||
1. Get preprocessed TIGER 2018 data and unpack it into the
|
||||
1. Get preprocessed TIGER 2019 data and unpack it into the
|
||||
data directory in your Nominatim sources:
|
||||
|
||||
cd Nominatim/data
|
||||
wget https://nominatim.org/data/tiger2018-nominatim-preprocessed.tar.gz
|
||||
tar xf tiger2018-nominatim-preprocessed.tar.gz
|
||||
wget https://nominatim.org/data/tiger2019-nominatim-preprocessed.tar.gz
|
||||
tar xf tiger2019-nominatim-preprocessed.tar.gz
|
||||
|
||||
`data-source/us-tiger/README.md` explains how the data got preprocessed.
|
||||
|
||||
2. Import the data into your Nominatim database:
|
||||
2. Import the data into your Nominatim database:
|
||||
|
||||
./utils/setup.php --import-tiger-data
|
||||
|
||||
@@ -212,20 +255,24 @@ entire US adds about 10GB to your database.
|
||||
|
||||
## Updates
|
||||
|
||||
There are many different possibilities to update your Nominatim database.
|
||||
There are many different ways to update your Nominatim database.
|
||||
The following section describes how to keep it up-to-date with Pyosmium.
|
||||
For a list of other methods see the output of `./utils/update.php --help`.
|
||||
|
||||
!!! warning
|
||||
If you have configured a flatnode file for the import, then you
|
||||
need to keep this flatnode file around for updates as well.
|
||||
|
||||
#### Installing the newest version of Pyosmium
|
||||
|
||||
It is recommended to install Pyosmium via pip. Run (as the same user who
|
||||
will later run the updates):
|
||||
It is recommended to install Pyosmium via pip. Make sure to use python3.
|
||||
Run (as the same user who will later run the updates):
|
||||
|
||||
```sh
|
||||
pip install --user osmium
|
||||
pip3 install --user osmium
|
||||
```
|
||||
|
||||
Nominatim needs a tool called `pyosmium-get-updates`, which comes with
|
||||
Nominatim needs a tool called `pyosmium-get-updates` which comes with
|
||||
Pyosmium. You need to tell Nominatim where to find it. Add the
|
||||
following line to your `settings/local.php`:
|
||||
|
||||
@@ -241,7 +288,7 @@ to update using the global minutely diffs.
|
||||
|
||||
If you want a different update source you will need to add some settings
|
||||
to `settings/local.php`. For example, to use the daily country extracts
|
||||
diffs for Ireland from geofabrik add the following:
|
||||
diffs for Ireland from Geofabrik add the following:
|
||||
|
||||
// base URL of the replication service
|
||||
@define('CONST_Replication_Url', 'https://download.geofabrik.de/europe/ireland-and-northern-ireland-updates');
|
||||
@@ -257,7 +304,7 @@ To set up the update process now run the following command:
|
||||
It outputs the date where updates will start. Recheck that this date is
|
||||
what you expect.
|
||||
|
||||
The --init-updates command needs to be rerun whenever the replication service
|
||||
The `--init-updates` command needs to be rerun whenever the replication service
|
||||
is changed.
|
||||
|
||||
#### Updating Nominatim
|
||||
@@ -266,9 +313,10 @@ The following command will keep your database constantly up to date:
|
||||
|
||||
./utils/update.php --import-osmosis-all
|
||||
|
||||
(Note that even though the old name "import-osmosis-all" has been kept for compatibility reasons, Osmosis is not required to run this - it uses pyosmium behind the scenes.)
|
||||
(Note that even though the old name "import-osmosis-all" has been kept for
|
||||
compatibility reasons, Osmosis is not required to run this - it uses pyosmium
|
||||
behind the scenes.)
|
||||
|
||||
If you have imported multiple country extracts and want to keep them
|
||||
up-to-date, have a look at the script in
|
||||
[issue #60](https://github.com/openstreetmap/Nominatim/issues/60).
|
||||
|
||||
up-to-date, [Advanced installations section](Advanced-Installations.md) contains instructions
|
||||
to set up and update multiple country extracts.
|
||||
|
||||
@@ -4,8 +4,9 @@ This page contains generic installation instructions for Nominatim and its
|
||||
prerequisites. There are also step-by-step instructions available for
|
||||
the following operating systems:
|
||||
|
||||
* [Ubuntu 20.04](../appendix/Install-on-Ubuntu-20.md)
|
||||
* [Ubuntu 18.04](../appendix/Install-on-Ubuntu-18.md)
|
||||
* [Ubuntu 16.04](../appendix/Install-on-Ubuntu-16.md)
|
||||
* [CentOS 8](../appendix/Install-on-Centos-8.md)
|
||||
* [CentOS 7.2](../appendix/Install-on-Centos-7.md)
|
||||
|
||||
These OS-specific instructions can also be found in executable form
|
||||
@@ -25,45 +26,47 @@ and can't offer support.
|
||||
For compiling:
|
||||
|
||||
* [cmake](https://cmake.org/)
|
||||
* [libxml2](http://xmlsoft.org/)
|
||||
* a recent C++ compiler
|
||||
|
||||
Nominatim comes with its own version of osm2pgsql. See the
|
||||
osm2pgsql README for additional dependencies required for compiling osm2pgsql.
|
||||
|
||||
For running tests:
|
||||
|
||||
* [behave](http://pythonhosted.org/behave/)
|
||||
* [Psycopg2](http://initd.org/psycopg)
|
||||
* [nose](https://nose.readthedocs.io)
|
||||
* [phpunit](https://phpunit.de)
|
||||
* [expat](https://libexpat.github.io/)
|
||||
* [proj](https://proj.org/)
|
||||
* [bzip2](http://www.bzip.org/)
|
||||
* [zlib](https://www.zlib.net/)
|
||||
* [Boost libraries](https://www.boost.org/), including system and filesystem
|
||||
* PostgreSQL client libraries
|
||||
* a recent C++ compiler (gcc 5+ or Clang 3.8+)
|
||||
|
||||
For running Nominatim:
|
||||
|
||||
* [PostgreSQL](http://www.postgresql.org) (9.1 or later)
|
||||
* [PostGIS](http://postgis.refractions.net) (2.0 or later)
|
||||
* [PHP](http://php.net) (5.4 or later)
|
||||
* [PostgreSQL](https://www.postgresql.org) (9.3+)
|
||||
* [PostGIS](https://postgis.org) (2.2+)
|
||||
* [Python 3](https://www.python.org/)
|
||||
* [Psycopg2](https://initd.org/psycopg)
|
||||
* [PHP](https://php.net) (7.0 or later)
|
||||
* PHP-pgsql
|
||||
* PHP-intl (bundled with PHP)
|
||||
* [PEAR::DB](http://pear.php.net/package/DB)
|
||||
* a webserver (apache or nginx are recommended)
|
||||
|
||||
For running continuous updates:
|
||||
|
||||
* [pyosmium](http://osmcode.org/pyosmium/)
|
||||
* [pyosmium](https://osmcode.org/pyosmium/) (with Python 3)
|
||||
|
||||
For running tests:
|
||||
|
||||
* [behave](http://pythonhosted.org/behave/)
|
||||
* [nose](https://nose.readthedocs.io)
|
||||
* [phpunit](https://phpunit.de) >= 7.3
|
||||
|
||||
### Hardware
|
||||
|
||||
A minimum of 2GB of RAM is required or installation will fail. For a full
|
||||
planet import 32GB of RAM or more strongly are recommended.
|
||||
planet import 64GB of RAM or more are strongly recommended. Do not report
|
||||
out of memory problems if you have less than 64GB RAM.
|
||||
|
||||
For a full planet install you will need at least 700GB of hard disk space
|
||||
For a full planet install you will need at least 800GB of hard disk space
|
||||
(take into account that the OSM database is growing fast). SSD disks
|
||||
will help considerably to speed up import and queries.
|
||||
|
||||
On a 6-core machine with 32GB RAM and SSDs the import of a full planet takes
|
||||
a bit more than 2 days. Without SSDs 7-8 days are more realistic.
|
||||
|
||||
Even on a well configured machine the import of a full planet takes
|
||||
at least 2 days. Without SSDs 7-8 days are more realistic.
|
||||
|
||||
## Setup of the server
|
||||
|
||||
@@ -73,17 +76,30 @@ You might want to tune your PostgreSQL installation so that the later steps
|
||||
make best use of your hardware. You should tune the following parameters in
|
||||
your `postgresql.conf` file.
|
||||
|
||||
shared_buffers (2GB)
|
||||
maintenance_work_mem (10GB)
|
||||
work_mem (50MB)
|
||||
effective_cache_size (24GB)
|
||||
shared_buffers = 2GB
|
||||
maintenance_work_mem = (10GB)
|
||||
autovacuum_work_mem = 2GB
|
||||
work_mem = (50MB)
|
||||
effective_cache_size = (24GB)
|
||||
synchronous_commit = off
|
||||
checkpoint_segments = 100 # only for postgresql <= 9.4
|
||||
max_wal_size = 1GB # postgresql > 9.4
|
||||
checkpoint_timeout = 10min
|
||||
checkpoint_completion_target = 0.9
|
||||
|
||||
The numbers in brackets behind some parameters seem to work fine for
|
||||
32GB RAM machine. Adjust to your setup.
|
||||
64GB RAM machine. Adjust to your setup. A higher number for `max_wal_size`
|
||||
means that PostgreSQL needs to run checkpoints less often but it does require
|
||||
the additional space on your disk.
|
||||
|
||||
Autovacuum must not be switched off because it ensures that the
|
||||
tables are frequently analysed. If your machine has very little memory,
|
||||
you might consider setting:
|
||||
|
||||
autovacuum_max_workers = 1
|
||||
|
||||
and even reduce `autovacuum_work_mem` further. This will reduce the amount
|
||||
of memory that autovacuum takes away from the import process.
|
||||
|
||||
For the initial import, you should also set:
|
||||
|
||||
@@ -91,8 +107,8 @@ For the initial import, you should also set:
|
||||
full_page_writes = off
|
||||
|
||||
Don't forget to reenable them after the initial import or you risk database
|
||||
corruption. Autovacuum must not be switched off because it ensures that the
|
||||
tables are frequently analysed.
|
||||
corruption.
|
||||
|
||||
|
||||
### Webserver setup
|
||||
|
||||
@@ -105,13 +121,15 @@ from there.
|
||||
Make sure your Apache configuration contains the required permissions for the
|
||||
directory and create an alias:
|
||||
|
||||
<Directory "/srv/nominatim/build/website">
|
||||
Options FollowSymLinks MultiViews
|
||||
AddType text/html .php
|
||||
DirectoryIndex search.php
|
||||
Require all granted
|
||||
</Directory>
|
||||
Alias /nominatim /srv/nominatim/build/website
|
||||
``` apache
|
||||
<Directory "/srv/nominatim/build/website">
|
||||
Options FollowSymLinks MultiViews
|
||||
AddType text/html .php
|
||||
DirectoryIndex search.php
|
||||
Require all granted
|
||||
</Directory>
|
||||
Alias /nominatim /srv/nominatim/build/website
|
||||
```
|
||||
|
||||
`/srv/nominatim/build` should be replaced with the location of your
|
||||
build directory.
|
||||
@@ -139,17 +157,32 @@ follows:
|
||||
Tell nginx that php files are special and to fastcgi_pass to the php-fpm
|
||||
unix socket by adding the location definition to the default configuration.
|
||||
|
||||
root /srv/nominatim/build/website;
|
||||
index search.php index.html;
|
||||
location ~ [^/]\.php(/|$) {
|
||||
fastcgi_split_path_info ^(.+?\.php)(/.*)$;
|
||||
if (!-f $document_root$fastcgi_script_name) {
|
||||
return 404;
|
||||
}
|
||||
fastcgi_pass unix:/var/run/php5-fpm.sock;
|
||||
fastcgi_index search.php;
|
||||
include fastcgi.conf;
|
||||
``` nginx
|
||||
root /srv/nominatim/build/website;
|
||||
index search.php;
|
||||
location / {
|
||||
try_files $uri $uri/ @php;
|
||||
}
|
||||
|
||||
location @php {
|
||||
fastcgi_param SCRIPT_FILENAME "$document_root$uri.php";
|
||||
fastcgi_param PATH_TRANSLATED "$document_root$uri.php";
|
||||
fastcgi_param QUERY_STRING $args;
|
||||
fastcgi_pass unix:/var/run/php/php7.3-fpm.sock;
|
||||
fastcgi_index index.php;
|
||||
include fastcgi_params;
|
||||
}
|
||||
|
||||
location ~ [^/]\.php(/|$) {
|
||||
fastcgi_split_path_info ^(.+?\.php)(/.*)$;
|
||||
if (!-f $document_root$fastcgi_script_name) {
|
||||
return 404;
|
||||
}
|
||||
fastcgi_pass unix:/var/run/php7.3-fpm.sock;
|
||||
fastcgi_index search.php;
|
||||
include fastcgi.conf;
|
||||
}
|
||||
```
|
||||
|
||||
Restart the nginx and php5-fpm services and the website should now be available
|
||||
at `http://localhost/`.
|
||||
|
||||
@@ -3,9 +3,40 @@
|
||||
This page describes database migrations necessary to update existing databases
|
||||
to newer versions of Nominatim.
|
||||
|
||||
SQL statements should be executed from the postgres commandline. Execute
|
||||
SQL statements should be executed from the PostgreSQL commandline. Execute
|
||||
`psql nominatim` to enter command line mode.
|
||||
|
||||
## 3.4.0 -> 3.5.0
|
||||
|
||||
### New Wikipedia/Wikidata importance tables
|
||||
|
||||
The `wikipedia_*` tables have a new format that also includes references to
|
||||
Wikidata. You need to update the computation functions and the tables as
|
||||
follows:
|
||||
|
||||
* download the new Wikipedia tables as described in the import section
|
||||
* reimport the tables: `./utils/setup.php --import-wikipedia-articles`
|
||||
* update the functions: `./utils/setup.php --create-functions --enable-diff-updates`
|
||||
* compute importance: `./utils/update.php --recompute-importance`
|
||||
|
||||
The last step takes about 10 hours on the full planet.
|
||||
|
||||
## 3.3.0 -> 3.4.0
|
||||
|
||||
### Reorganisation of location_area_country table
|
||||
|
||||
The table `location_area_country` has been optimized. You need to switch to the
|
||||
new format when you run updates. While updates are disabled, run the following
|
||||
SQL commands:
|
||||
|
||||
```sql
|
||||
CREATE TABLE location_area_country_new AS
|
||||
SELECT place_id, country_code, geometry FROM location_area_country;
|
||||
DROP TABLE location_area_country;
|
||||
ALTER TABLE location_area_country_new RENAME TO location_area_country;
|
||||
CREATE INDEX idx_location_area_country_geometry ON location_area_country USING GIST (geometry);
|
||||
CREATE INDEX idx_location_area_country_place_id ON location_area_country USING BTREE (place_id);
|
||||
```
|
||||
|
||||
## 3.2.0 -> 3.3.0
|
||||
|
||||
|
||||
@@ -24,7 +24,7 @@ but the `class` parameter is left out, then one of the places will be chosen
|
||||
at random and displayed.
|
||||
|
||||
```
|
||||
https://nominatim.openstreetmap.org/details?placeid=<value>
|
||||
https://nominatim.openstreetmap.org/details?place_id=<value>
|
||||
```
|
||||
|
||||
Placeids are assigned sequentially during Nominatim data import. The id for a place is different between Nominatim installation (servers) and changes when data gets reimported. Therefore it can't be used as permanent id and shouldn't be used in bug reports.
|
||||
@@ -40,7 +40,7 @@ See [Place Output Formats](Output.md) for details on each format. (Default: html
|
||||
|
||||
* `json_callback=<string>`
|
||||
|
||||
Wrap json output in a callback function (JSONP) i.e. `<string>(<json>)`.
|
||||
Wrap JSON output in a callback function (JSONP) i.e. `<string>(<json>)`.
|
||||
Only has an effect for JSON output formats.
|
||||
|
||||
* `pretty=[0|1]`
|
||||
|
||||
@@ -7,11 +7,11 @@
|
||||
Nominatim computes the address from two sources in the OpenStreetMap data:
|
||||
from administrative boundaries and from place nodes. Boundaries are the more
|
||||
useful source. They precisely describe an area. So it is very clear for
|
||||
Nominatim if a point belongs to an area of not. Place nodes are more complicated.
|
||||
These are only points without any precise extend. So Nominatim has to take a
|
||||
guess and assume that an address belongs to the closest place nose it can find.
|
||||
Nominatim if a point belongs to an area or not. Place nodes are more complicated.
|
||||
These are only points without any precise extent. So Nominatim has to take a
|
||||
guess and assume that an address belongs to the closest place node it can find.
|
||||
In an ideal world, Nominatim would not need the place nodes but there are
|
||||
many places on earth where there are not precise boundaries available for
|
||||
many places on earth where there are no precise boundaries available for
|
||||
all parts that make up an address. This is in particular true for the more
|
||||
local address parts, like villages and suburbs. Therefore it is not possible
|
||||
to completely dismiss place nodes. And sometimes they sneak in where they
|
||||
@@ -21,7 +21,7 @@ As a OpenStreetMap mapper, you can improve the situation in two ways: if you
|
||||
see a place node for which already an administrative area exists, then you
|
||||
should _link_ the two by adding the node with a 'label' role to the boundary
|
||||
relation. If there is no administrative area, you can add the approximate
|
||||
extend of the place and tag it place=<something> as well.
|
||||
extent of the place and tag it place=<something> as well.
|
||||
|
||||
#### 2. When doing reverse search, the address details have parts that don't contain the point I was looking up.
|
||||
|
||||
@@ -30,7 +30,7 @@ Reverse does not give you the address of the point you asked for. Reverse
|
||||
returns the closest object to the point you asked for and then returns the
|
||||
address of that object. Now, if you are close to a border, then the closest
|
||||
object may be across that border. When Nominatim then returns the address,
|
||||
contains the county/state/country across the border.
|
||||
it contains the county/state/country across the border.
|
||||
|
||||
#### 3. I get different counties/states/countries when I change the zoom parameter in the reverse query. How is that possible?
|
||||
|
||||
@@ -45,12 +45,12 @@ sometimes the other for the closest point.
|
||||
|
||||
Nominatim assigns each map feature one country. Those outside any administrative
|
||||
boundaries are assigned a special no-country. Continents or other super-national
|
||||
administrations (e.g. European Union, NATO, Custom unions) are not supported,
|
||||
administrations (e.g. European Union, NATO, Custom unions) are not supported,
|
||||
see also [Administrative Boundary](https://wiki.openstreetmap.org/wiki/Tag:boundary%3Dadministrative#Super-national_administrations).
|
||||
|
||||
#### 5. Can you return the timezone?
|
||||
|
||||
See this separate OpenStreetMap-based project [Timezone Boundary Builder](https://github.com/evansiroky/timezone-boundary-builder)
|
||||
See this separate OpenStreetMap-based project [Timezone Boundary Builder](https://github.com/evansiroky/timezone-boundary-builder).
|
||||
|
||||
#### 6. I want to download a list of streets/restaurants of a city/region
|
||||
|
||||
|
||||
@@ -19,13 +19,13 @@ Additional optional parameters are explained below.
|
||||
|
||||
### Output format
|
||||
|
||||
* `format=[html|xml|json|jsonv2|geojson|geocodejson]`
|
||||
* `format=[xml|json|jsonv2|geojson|geocodejson]`
|
||||
|
||||
See [Place Output Formats](Output.md) for details on each format. (Default: xml)
|
||||
|
||||
* `json_callback=<string>`
|
||||
|
||||
Wrap json output in a callback function (JSONP) i.e. `<string>(<json>)`.
|
||||
Wrap JSON output in a callback function (JSONP) i.e. `<string>(<json>)`.
|
||||
Only has an effect for JSON output formats.
|
||||
|
||||
### Output details
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
# Place Output
|
||||
|
||||
The [\reverse](Reverse.md), [\search](Search.md) and [\lookup](Lookup.md)
|
||||
The [/reverse](Reverse.md), [/search](Search.md) and [/lookup](Lookup.md)
|
||||
API calls produce very similar output which is explained in this section.
|
||||
There is one section for each format which is selectable via the `format`
|
||||
parameter.
|
||||
@@ -46,15 +46,16 @@ a single place (for reverse) of the following format:
|
||||
|
||||
The possible fields are:
|
||||
|
||||
* `place_id` - reference to the Nominatim internal database ID (see notes below)
|
||||
* `place_id` - reference to the Nominatim internal database ID ([see notes](#place_id-is-not-a-persistent-id))
|
||||
* `osm_type`, `osm_id` - reference to the OSM object
|
||||
* `boundingbox` - area of corner coordinates
|
||||
* `boundingbox` - area of corner coordinates ([see notes](#boundingbox))
|
||||
* `lat`, `lon` - latitude and longitude of the centroid of the object
|
||||
* `display_name` - full comma-separated address
|
||||
* `class`, `type` - key and value of the main OSM tag
|
||||
* `importance` - computed importance rank
|
||||
* `icon` - link to class icon (if available)
|
||||
* `address` - dictionary of address details (only with `addressdetails=1`)
|
||||
* `address` - dictionary of address details (only with `addressdetails=1`,
|
||||
[see notes](#addressdetails))
|
||||
* `extratags` - dictionary with additional useful tags like website or maxspeed
|
||||
(only with `extratags=1`)
|
||||
* `namedetails` - dictionary with full list of available names including ref etc.
|
||||
@@ -70,20 +71,21 @@ This is the same as the JSON format with two changes:
|
||||
|
||||
### GeoJSON
|
||||
|
||||
This format follows the [RFC7946](http://geojson.org). Every feature includes
|
||||
This format follows the [RFC7946](https://geojson.org). Every feature includes
|
||||
a bounding box (`bbox`).
|
||||
|
||||
The feature list has the following fields:
|
||||
|
||||
* `place_id` - reference to the Nominatim internal database ID (see notes below)
|
||||
* `place_id` - reference to the Nominatim internal database ID ([see notes](#place_id-is-not-a-persistent-id))
|
||||
* `osm_type`, `osm_id` - reference to the OSM object
|
||||
* `category`, `type` - key and value of the main OSM tag
|
||||
* `display_name` - full comma-separated address
|
||||
* `place_rank` - class search rank
|
||||
* `importance` - computed importance rank
|
||||
* `icon` - link to class icon (if available)
|
||||
* `address` - dictionary of address details (only with `addressdetails=1`)
|
||||
* `extratags` - dictionary with additional useful tags like website or maxspeed
|
||||
* `address` - dictionary of address details (only with `addressdetails=1`,
|
||||
[see notes](#addressdetails))
|
||||
* `extratags` - dictionary with additional useful tags like `website` or `maxspeed`
|
||||
(only with `extratags=1`)
|
||||
* `namedetails` - dictionary with full list of available names including ref etc.
|
||||
|
||||
@@ -100,12 +102,9 @@ The following feature attributes are implemented:
|
||||
* `type` - value of the main tag of the object (e.g. residential, restaurant, ...)
|
||||
* `label` - full comma-separated address
|
||||
* `name` - localised name of the place
|
||||
* `housenumber`, `street`, `locality`, `postcode`, `city`,
|
||||
`district`, `county`, `state`, `country` -
|
||||
* `housenumber`, `street`, `locality`, `district`, `postcode`, `city`,
|
||||
`county`, `state`, `country` -
|
||||
provided when it can be determined from the address
|
||||
(see [this issue](https://github.com/openstreetmap/Nominatim/issues/1080) for
|
||||
current limitations on the correctness of the address) and `addressdetails=1`
|
||||
was given
|
||||
* `admin` - list of localised names of administrative boundaries (only with `addressdetails=1`)
|
||||
|
||||
Use `polygon_geojson` to output the full geometry of the object instead
|
||||
@@ -120,7 +119,7 @@ formats depending on the API call.
|
||||
|
||||
```
|
||||
<reversegeocode timestamp="Sat, 11 Aug 18 11:53:21 +0000"
|
||||
attribution="Data © OpenStreetMap contributors, ODbL 1.0. http://www.openstreetmap.org/copyright"
|
||||
attribution="Data © OpenStreetMap contributors, ODbL 1.0. https://www.openstreetmap.org/copyright"
|
||||
querystring="lat=48.400381&lon=11.745876&zoom=5&format=xml">
|
||||
<result place_id="179509537" osm_type="relation" osm_id="2145268" ref="BY"
|
||||
lat="48.9467562" lon="11.4038717"
|
||||
@@ -148,13 +147,13 @@ attribution to OSM and the original querystring.
|
||||
|
||||
The place information can be found in the `result` element. The attributes of that element contain:
|
||||
|
||||
* `place_id` - reference to the Nominatim internal database ID (see notes below)
|
||||
* `place_id` - reference to the Nominatim internal database ID ([see notes](#place_id-is-not-a-persistent-id))
|
||||
* `osm_type`, `osm_id` - reference to the OSM object
|
||||
* `ref` - content of `ref` tag if it exists
|
||||
* `lat`, `lon` - latitude and longitude of the centroid of the object
|
||||
* `boundingbox` - comma-separated list of corner coordinates
|
||||
* `boundingbox` - comma-separated list of corner coordinates ([see notes](#boundingbox))
|
||||
|
||||
The full address address of the result can be found in the content of the
|
||||
The full address of the result can be found in the content of the
|
||||
`result` element as a comma-separated list.
|
||||
|
||||
Additional information requested with `addressdetails=1`, `extratags=1` and
|
||||
@@ -164,12 +163,12 @@ Additional information requested with `addressdetails=1`, `extratags=1` and
|
||||
|
||||
```
|
||||
<searchresults timestamp="Sat, 11 Aug 18 11:55:35 +0000"
|
||||
attribution="Data © OpenStreetMap contributors, ODbL 1.0. http://www.openstreetmap.org/copyright"
|
||||
attribution="Data © OpenStreetMap contributors, ODbL 1.0. https://www.openstreetmap.org/copyright"
|
||||
querystring="london" polygon="false" exclude_place_ids="100149"
|
||||
more_url="https://nominatim.openstreetmap.org/search.php?q=london&addressdetails=1&extratags=1&exclude_place_ids=100149&format=xml&accept-language=en-US%2Cen%3Bq%3D0.7%2Cde%3Bq%3D0.3">
|
||||
<place place_id="100149" osm_type="node" osm_id="107775" place_rank="15"
|
||||
boundingbox="51.3473219,51.6673219,-0.2876474,0.0323526" lat="51.5073219" lon="-0.1276474"
|
||||
display_name="London, Greater London, England, SW1A 2DU, United Kingdom"
|
||||
display_name="London, Greater London, England, SW1A 2DU, United Kingdom"
|
||||
class="place" type="city" importance="0.9654895765402"
|
||||
icon="https://nominatim.openstreetmap.org/images/mapicons/poi_place_city.p.20.png">
|
||||
<extratags>
|
||||
@@ -203,11 +202,11 @@ generic information about the query:
|
||||
The place information can be found in the `place` elements, of which there may
|
||||
be more than one. The attributes of that element contain:
|
||||
|
||||
* `place_id` - reference to the Nominatim internal database ID (see notes below)
|
||||
* `place_id` - reference to the Nominatim internal database ID ([see notes](#place_id-is-not-a-persistent-id))
|
||||
* `osm_type`, `osm_id` - reference to the OSM object
|
||||
* `ref` - content of `ref` tag if it exists
|
||||
* `lat`, `lon` - latitude and longitude of the centroid of the object
|
||||
* `boundingbox` - comma-separated list of corner coordinates
|
||||
* `boundingbox` - comma-separated list of corner coordinates ([see notes](#boundingbox))
|
||||
* `place_rank` - class search rank
|
||||
* `display_name` - full comma-separated address
|
||||
* `class`, `type` - key and value of the main OSM tag
|
||||
@@ -232,7 +231,7 @@ permanent for later use.
|
||||
|
||||
The combination `osm_type`+`osm_id` is slighly better but remember in
|
||||
OpenStreetMap mappers can delete, split, recreate places (and those
|
||||
get a new `osm_id`), there is no link between those old and new id.
|
||||
get a new `osm_id`), there is no link between those old and new ids.
|
||||
Places can also change their meaning without changing their `osm_id`,
|
||||
e.g. when a restaurant is retagged as supermarket. For a more in-depth
|
||||
discussion see [Permanent ID](https://wiki.openstreetmap.org/wiki/Permanent_ID).
|
||||
@@ -244,3 +243,32 @@ relation) so `osm_type`+`osm_id`+`class_name` would be more unique.
|
||||
|
||||
Comma separated list of min latitude, max latitude, min longitude, max longitude.
|
||||
The whole planet would be `-90,90,-180,180`.
|
||||
|
||||
Can we used to pan and center the map on the result, for example with leafletjs
|
||||
mapping library
|
||||
`map.fitBounds([[bbox[0],bbox[2]],[bbox[1],bbox[3]]], {padding: [20, 20], maxzoom: 16});`
|
||||
|
||||
Bounds crossing the antimeridian have a min latitude -180 and max latitude 180,
|
||||
essentially covering the planet (See [issue 184](https://github.com/openstreetmap/Nominatim/issues/184)).
|
||||
|
||||
### addressdetails
|
||||
|
||||
Address details in the xml and json formats return a list of names together
|
||||
with a designation label. Per default the following labels may appear:
|
||||
|
||||
* continent
|
||||
* country, country_code
|
||||
* region, state, state_district, county
|
||||
* municipality, city, town, village
|
||||
* city_district, district, borough, suburb, subdivision
|
||||
* hamlet, croft, isolated_dwelling
|
||||
* neighbourhood, allotments, quarter
|
||||
* city_block, residental, farm, farmyard, industrial, commercial, retail
|
||||
* road
|
||||
* house_number, house_name
|
||||
* emergency, historic, military, natural, landuse, place, railway,
|
||||
man_made, aerialway, boundary, amenity, aeroway, club, craft, leisure,
|
||||
office, mountain_pass, shop, tourism, bridge, tunnel, waterway
|
||||
|
||||
They roughly correspond to the classification of the OpenStreetMap data
|
||||
according to either the `place` tag or the main key of the object.
|
||||
|
||||
@@ -7,7 +7,7 @@ Its API has the following endpoints for querying the data:
|
||||
* __[/search](Search.md)__ - search OSM objects by name or type
|
||||
* __[/reverse](Reverse.md)__ - search OSM object by their location
|
||||
* __[/lookup](Lookup.md)__ - look up address details for OSM objects by their ID
|
||||
* __/status__ - query the status of the server
|
||||
* __[/status](Status.md)__ - query the status of the server
|
||||
* __/deletable__ - list objects that have been deleted in OSM but are held
|
||||
back in Nominatim in case the deletion was accidental
|
||||
* __/polygons__ - list of broken polygons detected by Nominatim
|
||||
|
||||
@@ -22,7 +22,7 @@ There are two ways how the requested location can be specified:
|
||||
|
||||
A specific OSM node(N), way(W) or relation(R) to return an address for.
|
||||
|
||||
In both cases exactly one object is returned. The two input paramters cannot
|
||||
In both cases exactly one object is returned. The two input parameters cannot
|
||||
be used at the same time. Both accept the additional optional parameters listed
|
||||
below.
|
||||
|
||||
@@ -34,7 +34,7 @@ See [Place Output Formats](Output.md) for details on each format. (Default: html
|
||||
|
||||
* `json_callback=<string>`
|
||||
|
||||
Wrap json output in a callback function ([JSONP](https://en.wikipedia.org/wiki/JSONP)) i.e. `<string>(<json>)`.
|
||||
Wrap JSON output in a callback function ([JSONP](https://en.wikipedia.org/wiki/JSONP)) i.e. `<string>(<json>)`.
|
||||
Only has an effect for JSON output formats.
|
||||
|
||||
### Output details
|
||||
@@ -135,7 +135,7 @@ This overrides the specified machine readable format. (Default: 0)
|
||||
<postcode>B72</postcode>
|
||||
<country>United Kingdom</country>
|
||||
<country_code>gb</country_code>
|
||||
</addressparts>
|
||||
</addressparts>
|
||||
</reversegeocode>
|
||||
```
|
||||
|
||||
@@ -146,7 +146,7 @@ This overrides the specified machine readable format. (Default: 0)
|
||||
```json
|
||||
{
|
||||
"place_id":"134140761",
|
||||
"licence":"Data © OpenStreetMap contributors, ODbL 1.0. http:\/\/www.openstreetmap.org\/copyright",
|
||||
"licence":"Data © OpenStreetMap contributors, ODbL 1.0. https:\/\/www.openstreetmap.org\/copyright",
|
||||
"osm_type":"way",
|
||||
"osm_id":"280940520",
|
||||
"lat":"-34.4391708",
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
# Search queries
|
||||
|
||||
The search API allows to look up a location from a textual description.
|
||||
The search API allows you to look up a location from a textual description.
|
||||
Nominatim supports structured as well as free-form search queries.
|
||||
|
||||
The search query may also contain
|
||||
@@ -46,7 +46,7 @@ In this form, the query may be given through two different sets of parameters:
|
||||
Structured requests are faster but are less robust against alternative
|
||||
OSM tagging schemas. **Do not combine with** `q=<query>` **parameter**.
|
||||
|
||||
All three query forms accept the additional paramters listed below.
|
||||
All three query forms accept the additional parameters listed below.
|
||||
|
||||
### Output format
|
||||
|
||||
@@ -56,7 +56,7 @@ See [Place Output Formats](Output.md) for details on each format. (Default: html
|
||||
|
||||
* `json_callback=<string>`
|
||||
|
||||
Wrap json output in a callback function ([JSONP](https://en.wikipedia.org/wiki/JSONP)) i.e. `<string>(<json>)`.
|
||||
Wrap JSON output in a callback function ([JSONP](https://en.wikipedia.org/wiki/JSONP)) i.e. `<string>(<json>)`.
|
||||
Only has an effect for JSON output formats.
|
||||
|
||||
### Output details
|
||||
@@ -92,8 +92,12 @@ comma-separated list of language codes.
|
||||
* `countrycodes=<countrycode>[,<countrycode>][,<countrycode>]...`
|
||||
|
||||
Limit search results to one or more countries. `<countrycode>` must be the
|
||||
ISO 3166-1alpha2 code, e.g. `gb` for the United Kingdom, `de` for Germany.
|
||||
[ISO 3166-1alpha2](https://en.wikipedia.org/wiki/ISO_3166-1_alpha-2) code,
|
||||
e.g. `gb` for the United Kingdom, `de` for Germany.
|
||||
|
||||
Each place in Nominatim is assigned to one country code based
|
||||
on `admin_level=2` tags, in rare cases to none (for example in
|
||||
international waters outside any country).
|
||||
|
||||
* `exclude_place_ids=<place_id,[place_id],[place_id]`
|
||||
|
||||
@@ -112,7 +116,8 @@ Limit the number of returned results. (Default: 10, Maximum: 50)
|
||||
* `viewbox=<x1>,<y1>,<x2>,<y2>`
|
||||
|
||||
The preferred area to find search results. Any two corner points of the box
|
||||
are accepted in any order as long as they span a real box.
|
||||
are accepted in any order as long as they span a real box. `x` is longitude,
|
||||
`y` is latitude.
|
||||
|
||||
|
||||
* `bounded=[0|1]`
|
||||
@@ -167,21 +172,27 @@ This overrides the specified machine readable format. (Default: 0)
|
||||
## Examples
|
||||
|
||||
|
||||
##### XML with polygon points
|
||||
##### XML with kml polygon
|
||||
|
||||
* [https://nominatim.openstreetmap.org/search?q=135+pilkington+avenue,+birmingham&format=xml&polygon=1&addressdetails=1](https://nominatim.openstreetmap.org/search?q=135+pilkington+avenue,+birmingham&format=xml&polygon=1&addressdetails=1)
|
||||
* [https://nominatim.openstreetmap.org/search/gb/birmingham/pilkington%20avenue/135?format=xml&polygon=1&addressdetails=1](https://nominatim.openstreetmap.org/search/gb/birmingham/pilkington%20avenue/135?format=xml&polygon=1&addressdetails=1)
|
||||
* [https://nominatim.openstreetmap.org/search/135%20pilkington%20avenue,%20birmingham?format=xml&polygon=1&addressdetails=1](https://nominatim.openstreetmap.org/search/135%20pilkington%20avenue,%20birmingham?format=xml&polygon=1&addressdetails=1)
|
||||
* [https://nominatim.openstreetmap.org/search?q=135+pilkington+avenue,+birmingham&format=xml&polygon_geojson=1&addressdetails=1](https://nominatim.openstreetmap.org/search?q=135+pilkington+avenue,+birmingham&format=xml&polygon_geojson=1&addressdetails=1)
|
||||
|
||||
```xml
|
||||
<searchresults timestamp="Sat, 07 Nov 09 14:42:10 +0000" querystring="135 pilkington, avenue birmingham" polygon="true">
|
||||
<place
|
||||
place_id="1620612" osm_type="node" osm_id="452010817"
|
||||
boundingbox="52.548641204834,52.5488433837891,-1.81612110137939,-1.81592094898224"
|
||||
polygonpoints="[['-1.81592098644987','52.5487429714954'],['-1.81592290792183','52.5487234624632'],...]"
|
||||
lat="52.5487429714954" lon="-1.81602098644987"
|
||||
display_name="135, Pilkington Avenue, Wylde Green, City of Birmingham, West Midlands (county), B72, United Kingdom"
|
||||
<place
|
||||
place_id="1620612" osm_type="node" osm_id="452010817"
|
||||
boundingbox="52.548641204834,52.5488433837891,-1.81612110137939,-1.81592094898224"
|
||||
lat="52.5487429714954" lon="-1.81602098644987"
|
||||
display_name="135, Pilkington Avenue, Wylde Green, City of Birmingham, West Midlands (county), B72, United Kingdom"
|
||||
class="place" type="house">
|
||||
<geokml>
|
||||
<Polygon>
|
||||
<outerBoundaryIs>
|
||||
<LinearRing>
|
||||
<coordinates>-1.816513,52.548756599999997 -1.816434,52.548747300000002 -1.816429,52.5487629 -1.8163717,52.548756099999999 -1.8163464,52.548834599999999 -1.8164599,52.548848100000001 -1.8164685,52.5488213 -1.8164913,52.548824000000003 -1.816513,52.548756599999997</coordinates>
|
||||
</LinearRing>
|
||||
</outerBoundaryIs>
|
||||
</Polygon>
|
||||
</geokml>
|
||||
<house_number>135</house_number>
|
||||
<road>Pilkington Avenue</road>
|
||||
<village>Wylde Green</village>
|
||||
@@ -237,7 +248,7 @@ This overrides the specified machine readable format. (Default: 0)
|
||||
|
||||
##### JSON with address details
|
||||
|
||||
[https://nominatim.openstreetmap.org/?format=json&addressdetails=1&q=bakery+in+berlin+wedding&format=json&limit=1](https://nominatim.openstreetmap.org/?format=json&addressdetails=1&q=bakery+in+berlin+wedding&format=json&limit=1)
|
||||
[https://nominatim.openstreetmap.org/?addressdetails=1&q=bakery+in+berlin+wedding&format=json&limit=1](https://nominatim.openstreetmap.org/?addressdetails=1&q=bakery+in+berlin+wedding&format=json&limit=1)
|
||||
|
||||
```json
|
||||
{
|
||||
|
||||
60
docs/api/Status.md
Normal file
60
docs/api/Status.md
Normal file
@@ -0,0 +1,60 @@
|
||||
# Status
|
||||
|
||||
Useful for checking if the service and database is running. The JSON output also shows
|
||||
when the database was last updated.
|
||||
|
||||
## Parameters
|
||||
|
||||
* `format=[text|json]` (defaults to 'text')
|
||||
|
||||
|
||||
## Output
|
||||
|
||||
#### Text format
|
||||
|
||||
```
|
||||
https://nominatim.openstreetmap.org/status.php
|
||||
```
|
||||
|
||||
will return HTTP status code 200 and print `OK`.
|
||||
|
||||
On error it will return HTTP status code 500 and print a message, e.g.
|
||||
`ERROR: Database connection failed`.
|
||||
|
||||
|
||||
|
||||
#### JSON format
|
||||
|
||||
```
|
||||
https://nominatim.openstreetmap.org/status.php?format=json
|
||||
```
|
||||
|
||||
will return HTTP code 200 and a structure
|
||||
|
||||
```json
|
||||
{
|
||||
"status": 0,
|
||||
"message": "OK",
|
||||
"data_updated": "2020-05-04T14:47:00+00:00"
|
||||
}
|
||||
```
|
||||
|
||||
On error will also return HTTP status code 200 and a structure with error
|
||||
code and message, e.g.
|
||||
|
||||
```json
|
||||
{
|
||||
"status": 700,
|
||||
"message": "Database connection failed"
|
||||
}
|
||||
```
|
||||
|
||||
Possible status codes are
|
||||
|
||||
| | message | notes |
|
||||
|-----|----------------------|---------------------------------------------------|
|
||||
| 700 | "No database" | connection failed |
|
||||
| 701 | "Module failed" | database could not load nominatim.so |
|
||||
| 702 | "Module call failed" | nominatim.so loaded but calling a function failed |
|
||||
| 703 | "Query failed" | test query against a database table failed |
|
||||
| 704 | "No value" | test query worked but returned no results |
|
||||
36
docs/develop/Documentation.md
Normal file
36
docs/develop/Documentation.md
Normal file
@@ -0,0 +1,36 @@
|
||||
# Documentation Pages
|
||||
|
||||
The [Nominatim documentation](https://nominatim.org/release-docs/develop/) is built using the [MkDocs](https://www.mkdocs.org/) static site generation framework. The master branch is automatically deployed every night on under [https://nominatim.org/release-docs/develop/](https://nominatim.org/release-docs/develop/)
|
||||
|
||||
To preview local changes, first install MkDocs
|
||||
|
||||
```
|
||||
pip3 install --user mkdocs
|
||||
```
|
||||
|
||||
|
||||
Then go to the build directory and run
|
||||
|
||||
```
|
||||
make doc
|
||||
INFO - Cleaning site directory
|
||||
INFO - Building documentation to directory: /home/vagrant/build/site-html
|
||||
```
|
||||
|
||||
This runs `mkdocs build` plus extra transformation of some files and adds
|
||||
symlinks (see `CMakeLists.txt` for the exact steps).
|
||||
|
||||
Now you can start webserver for local testing
|
||||
|
||||
```
|
||||
build> mkdocs serve
|
||||
[server:296] Serving on http://127.0.0.1:8000
|
||||
[handlers:62] Start watching changes
|
||||
```
|
||||
|
||||
If you develop inside a Vagrant virtual machine:
|
||||
|
||||
* add port forwarding to your Vagrantfile,
|
||||
e.g. `config.vm.network "forwarded_port", guest: 8000, host: 8000`
|
||||
* use `mkdocs serve --dev-addr 0.0.0.0:8000` because the default localhost
|
||||
IP does not get forwarded.
|
||||
@@ -24,7 +24,7 @@ with multiple tags that may constitute a principal tag. Take for example a
|
||||
motorway bridge. In OSM, this would be a way which is tagged with
|
||||
`highway=motorway` and `bridge=yes`. This way would appear in the `place` table
|
||||
once with `class` of `highway` and once with a `class` of `bridge`. Thus the
|
||||
*uique key* for `place` is (`osm_type`, `osm_id`, `class`).
|
||||
*unique key* for `place` is (`osm_type`, `osm_id`, `class`).
|
||||
|
||||
## Configuring the Import
|
||||
|
||||
@@ -55,8 +55,8 @@ suffix match can be defined similarly with a string that starts with a `*`. Any
|
||||
other string constitutes an exact match.
|
||||
|
||||
The second part of the rules defines a list of values and the properties that
|
||||
apply to a successful match. Value strings may be either empty, which again
|
||||
means that thy match against any value, or describe an exact match. Prefix
|
||||
apply to a successful match. Value strings may be either empty, which
|
||||
means that they match any value, or describe an exact match. Prefix
|
||||
or suffix matching of values is not possible.
|
||||
|
||||
For a rule to match, it has to find a valid combination of keys and values. The
|
||||
@@ -66,7 +66,7 @@ The rules in a configuration file are processed sequentially and the first
|
||||
match for each tag wins.
|
||||
|
||||
A rule where key and value are the empty string is special. This defines the
|
||||
fallback when none of the rules matches. The fallback is always used as a last
|
||||
fallback when none of the rules match. The fallback is always used as a last
|
||||
resort when nothing else matches, no matter where the rule appears in the file.
|
||||
Defining multiple fallback rules is not allowed. What happens in this case,
|
||||
is undefined.
|
||||
@@ -121,17 +121,17 @@ One or more of the following properties may be given for each tag:
|
||||
|
||||
* `address`
|
||||
|
||||
At tag to the list of address tags. If the tag starts with `addr:` or
|
||||
Add tag to the list of address tags. If the tag starts with `addr:` or
|
||||
`is_in:`, then this prefix is cut off before adding it to the list.
|
||||
|
||||
* `postcode`
|
||||
|
||||
At the value as a postcode to the address tags. If multiple tags are
|
||||
Add the value as a postcode to the address tags. If multiple tags are
|
||||
candidate for postcodes, one wins out and the others are dropped.
|
||||
|
||||
* `country`
|
||||
|
||||
At the value as a country code to the address tags. The value must be a
|
||||
Add the value as a country code to the address tags. The value must be a
|
||||
two letter country code, otherwise it is ignored. If there are multiple
|
||||
tags that match, then one wins out and the others are dropped.
|
||||
|
||||
|
||||
45
docs/develop/Postcodes.md
Normal file
45
docs/develop/Postcodes.md
Normal file
@@ -0,0 +1,45 @@
|
||||
# Postcodes in Nominatim
|
||||
|
||||
The blog post
|
||||
[Nominatim and Postcodes](https://www.openstreetmap.org/user/lonvia/diary/43143)
|
||||
describes the handling implemented since Nominatim 3.1.
|
||||
|
||||
Postcode centroids (aka 'calculated postcodes') are generated by looking at all
|
||||
postcodes of a country, grouping them and calculating the geometric centroid.
|
||||
There is currently no logic to deal with extreme outliers (typos or other
|
||||
mistakes in OSM data). There is also no check if a postcodes adheres to a
|
||||
country's format, e.g. if Swiss postcodes are 4 digits.
|
||||
|
||||
|
||||
## Regular updating calculated postcodes
|
||||
|
||||
The script to rerun the calculation is
|
||||
`build/utils/update.php --calculate-postcodes`
|
||||
and runs once per night on nominatim.openstreetmap.org.
|
||||
|
||||
|
||||
## Finding places that share a specific postcode
|
||||
|
||||
In the Nominatim database run
|
||||
|
||||
```sql
|
||||
SELECT address->'postcode' as pc,
|
||||
osm_type, osm_id, class, type,
|
||||
st_x(centroid) as lon, st_y(centroid) as lat
|
||||
FROM placex
|
||||
WHERE country_code='fr'
|
||||
AND upper(trim (both ' ' from address->'postcode')) = '33210';
|
||||
```
|
||||
|
||||
Alternatively on [Overpass](https://overpass-turbo.eu/) run the following query
|
||||
|
||||
```
|
||||
[out:json][timeout:250];
|
||||
area["name"="France"]->.boundaryarea;
|
||||
(
|
||||
nwr(area.boundaryarea)["addr:postcode"="33210"];
|
||||
);
|
||||
out body;
|
||||
>;
|
||||
out skel qt;
|
||||
```
|
||||
@@ -21,7 +21,7 @@ Usually only administrative boundaries and place nodes and areas are
|
||||
eligible to be part of an address. All other objects have an address rank
|
||||
of 0.
|
||||
|
||||
Note that the search rank of a place place a role in the address computation
|
||||
Note that the search rank of a place plays a role in the address computation
|
||||
as well. When collecting the places that should make up the address parts
|
||||
then only places are taken into account that have a lower address rank than
|
||||
the search rank of the base object.
|
||||
@@ -37,7 +37,7 @@ into the database. There are a few hard-coded rules for the assignment:
|
||||
* highway nodes
|
||||
* landuse that is not an area
|
||||
|
||||
Other than that, the ranks can be freely assigned via the json file
|
||||
Other than that, the ranks can be freely assigned via the JSON file
|
||||
defined with `CONST_Address_Level_Config` according to their type and
|
||||
the country they are in.
|
||||
|
||||
@@ -78,12 +78,13 @@ definition is used as a fallback, when nothing more specific for a given
|
||||
country exists.
|
||||
|
||||
`tags` contains the ranks for key/value pairs. The ranks can be either a
|
||||
single number, in which case they are to search and address rank, or a tuple
|
||||
single number, in which case they are the search and address rank, or an array
|
||||
of search and address rank (in that order). The value may be left empty.
|
||||
Then the rank is used when no more specific value is found for the given
|
||||
key.
|
||||
|
||||
Countries and key/value combination may appear in multiple defintions. Just
|
||||
Countries and key/value combination may appear in multiple definitions. Just
|
||||
make sure that each combination of counrty/key/value appears only once per
|
||||
file. Otherwise the import will fail with a UNIQUE INDEX constraint violation
|
||||
on import.
|
||||
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
# Basic Architecture
|
||||
|
||||
Nominatim provides geocoding based on OpenStreetMap data. It uses a Postgresql
|
||||
Nominatim provides geocoding based on OpenStreetMap data. It uses a PostgreSQL
|
||||
database as a backend for storing the data.
|
||||
|
||||
There are three basic parts to Nominatim's architecture: the data import,
|
||||
@@ -15,10 +15,10 @@ the import can be found in the database table `place`.
|
||||
The __address computation__ or __indexing__ stage takes the data from `place`
|
||||
and adds additional information needed for geocoding. It ranks the places by
|
||||
importance, links objects that belong together and computes addresses and
|
||||
the search index. Most of this work is done in Pl/pqSQL via database triggers
|
||||
the search index. Most of this work is done in PL/pgSQL via database triggers
|
||||
and can be found in the file `sql/functions.sql`.
|
||||
|
||||
The __search frontend__ implements the actual API. It takes queries for
|
||||
search and reverse geocoding queries from the user, looks up the data and
|
||||
The __search frontend__ implements the actual API. It takes search
|
||||
and reverse geocoding queries from the user, looks up the data and
|
||||
returns the results in the requested format. This part is written in PHP
|
||||
and can be found in the `lib/` and `website/` directories.
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
site_name: Nominatim Documentation
|
||||
theme: readthedocs
|
||||
docs_dir: ${CMAKE_CURRENT_BINARY_DIR}
|
||||
site_url: http://nominatim.org
|
||||
site_url: https://nominatim.org
|
||||
repo_url: https://github.com/openstreetmap/Nominatim
|
||||
pages:
|
||||
- 'Introduction' : 'index.md'
|
||||
@@ -11,29 +11,35 @@ pages:
|
||||
- 'Reverse': 'api/Reverse.md'
|
||||
- 'Address Lookup': 'api/Lookup.md'
|
||||
- 'Details' : 'api/Details.md'
|
||||
- 'Status' : 'api/Status.md'
|
||||
- 'Place Output Formats': 'api/Output.md'
|
||||
- 'FAQ': 'api/Faq.md'
|
||||
- 'Administration Guide':
|
||||
- 'Basic Installation': 'admin/Installation.md'
|
||||
- 'Importing and Updating' : 'admin/Import-and-Update.md'
|
||||
- 'Advanced Installations' : 'admin/Advanced-Installations.md'
|
||||
- 'Migration from older Versions' : 'admin/Migration.md'
|
||||
- 'Troubleshooting' : 'admin/Faq.md'
|
||||
- 'Developers Guide':
|
||||
- 'Overview' : 'develop/overview.md'
|
||||
- 'OSM Data Import' : 'develop/Import.md'
|
||||
- 'Place Ranking' : 'develop/Ranking.md'
|
||||
- 'Postcodes' : 'develop/Postcodes.md'
|
||||
- 'Documentation' : 'develop/Documentation.md'
|
||||
- 'External Data Sources':
|
||||
- 'Overview' : 'data-sources/overview.md'
|
||||
- 'US Census (Tiger)': 'data-sources/US-Tiger.md'
|
||||
- 'GB Postcodes': 'data-sources/GB-Postcodes.md'
|
||||
- 'Country Grid': 'data-sources/Country-Grid.md'
|
||||
- 'Wikipedia & Wikidata': 'data-sources/Wikipedia-Wikidata.md'
|
||||
- 'Appendix':
|
||||
- 'Installation on CentOS 7' : 'appendix/Install-on-Centos-7.md'
|
||||
- 'Installation on Ubuntu 16' : 'appendix/Install-on-Ubuntu-16.md'
|
||||
- 'Installation on CentOS 8' : 'appendix/Install-on-Centos-8.md'
|
||||
- 'Installation on Ubuntu 18' : 'appendix/Install-on-Ubuntu-18.md'
|
||||
- 'Installation on Ubuntu 20' : 'appendix/Install-on-Ubuntu-20.md'
|
||||
markdown_extensions:
|
||||
- codehilite:
|
||||
use_pygments: False
|
||||
- codehilite
|
||||
- admonition
|
||||
- toc:
|
||||
permalink:
|
||||
extra_css: [extra.css]
|
||||
extra_css: [extra.css, styles.css]
|
||||
|
||||
69
docs/styles.css
Normal file
69
docs/styles.css
Normal file
@@ -0,0 +1,69 @@
|
||||
.codehilite .hll { background-color: #ffffcc }
|
||||
.codehilite { background: #f0f0f0; }
|
||||
.codehilite .c { color: #60a0b0; font-style: italic } /* Comment */
|
||||
.codehilite .err { /* border: 1px solid #FF0000 */ } /* Error */
|
||||
.codehilite .k { color: #007020; font-weight: bold } /* Keyword */
|
||||
.codehilite .o { color: #666666 } /* Operator */
|
||||
.codehilite .ch { color: #60a0b0; font-style: italic } /* Comment.Hashbang */
|
||||
.codehilite .cm { color: #60a0b0; font-style: italic } /* Comment.Multiline */
|
||||
.codehilite .cp { color: #007020 } /* Comment.Preproc */
|
||||
.codehilite .cpf { color: #60a0b0; font-style: italic } /* Comment.PreprocFile */
|
||||
.codehilite .c1 { color: #60a0b0; font-style: italic } /* Comment.Single */
|
||||
.codehilite .cs { color: #60a0b0; background-color: #fff0f0 } /* Comment.Special */
|
||||
.codehilite .gd { color: #A00000 } /* Generic.Deleted */
|
||||
.codehilite .ge { font-style: italic } /* Generic.Emph */
|
||||
.codehilite .gr { color: #FF0000 } /* Generic.Error */
|
||||
.codehilite .gh { color: #000080; font-weight: bold } /* Generic.Heading */
|
||||
.codehilite .gi { color: #00A000 } /* Generic.Inserted */
|
||||
.codehilite .go { color: #888888 } /* Generic.Output */
|
||||
.codehilite .gp { color: #c65d09; font-weight: bold } /* Generic.Prompt */
|
||||
.codehilite .gs { font-weight: bold } /* Generic.Strong */
|
||||
.codehilite .gu { color: #800080; font-weight: bold } /* Generic.Subheading */
|
||||
.codehilite .gt { color: #0044DD } /* Generic.Traceback */
|
||||
.codehilite .kc { color: #007020; font-weight: bold } /* Keyword.Constant */
|
||||
.codehilite .kd { color: #007020; font-weight: bold } /* Keyword.Declaration */
|
||||
.codehilite .kn { color: #007020; font-weight: bold } /* Keyword.Namespace */
|
||||
.codehilite .kp { color: #007020 } /* Keyword.Pseudo */
|
||||
.codehilite .kr { color: #007020; font-weight: bold } /* Keyword.Reserved */
|
||||
.codehilite .kt { color: #902000 } /* Keyword.Type */
|
||||
.codehilite .m { color: #40a070 } /* Literal.Number */
|
||||
.codehilite .s { color: #4070a0 } /* Literal.String */
|
||||
.codehilite .na { color: #4070a0 } /* Name.Attribute */
|
||||
.codehilite .nb { color: #007020 } /* Name.Builtin */
|
||||
.codehilite .nc { color: #0e84b5; font-weight: bold } /* Name.Class */
|
||||
.codehilite .no { color: #60add5 } /* Name.Constant */
|
||||
.codehilite .nd { color: #555555; font-weight: bold } /* Name.Decorator */
|
||||
.codehilite .ni { color: #d55537; font-weight: bold } /* Name.Entity */
|
||||
.codehilite .ne { color: #007020 } /* Name.Exception */
|
||||
.codehilite .nf { color: #06287e } /* Name.Function */
|
||||
.codehilite .nl { color: #002070; font-weight: bold } /* Name.Label */
|
||||
.codehilite .nn { color: #0e84b5; font-weight: bold } /* Name.Namespace */
|
||||
.codehilite .nt { color: #062873; font-weight: bold } /* Name.Tag */
|
||||
.codehilite .nv { color: #bb60d5 } /* Name.Variable */
|
||||
.codehilite .ow { color: #007020; font-weight: bold } /* Operator.Word */
|
||||
.codehilite .w { color: #bbbbbb } /* Text.Whitespace */
|
||||
.codehilite .mb { color: #40a070 } /* Literal.Number.Bin */
|
||||
.codehilite .mf { color: #40a070 } /* Literal.Number.Float */
|
||||
.codehilite .mh { color: #40a070 } /* Literal.Number.Hex */
|
||||
.codehilite .mi { color: #40a070 } /* Literal.Number.Integer */
|
||||
.codehilite .mo { color: #40a070 } /* Literal.Number.Oct */
|
||||
.codehilite .sa { color: #4070a0 } /* Literal.String.Affix */
|
||||
.codehilite .sb { color: #4070a0 } /* Literal.String.Backtick */
|
||||
.codehilite .sc { color: #4070a0 } /* Literal.String.Char */
|
||||
.codehilite .dl { color: #4070a0 } /* Literal.String.Delimiter */
|
||||
.codehilite .sd { color: #4070a0; font-style: italic } /* Literal.String.Doc */
|
||||
.codehilite .s2 { color: #4070a0 } /* Literal.String.Double */
|
||||
.codehilite .se { color: #4070a0; font-weight: bold } /* Literal.String.Escape */
|
||||
.codehilite .sh { color: #4070a0 } /* Literal.String.Heredoc */
|
||||
.codehilite .si { color: #70a0d0; font-style: italic } /* Literal.String.Interpol */
|
||||
.codehilite .sx { color: #c65d09 } /* Literal.String.Other */
|
||||
.codehilite .sr { color: #235388 } /* Literal.String.Regex */
|
||||
.codehilite .s1 { color: #4070a0 } /* Literal.String.Single */
|
||||
.codehilite .ss { color: #517918 } /* Literal.String.Symbol */
|
||||
.codehilite .bp { color: #007020 } /* Name.Builtin.Pseudo */
|
||||
.codehilite .fm { color: #06287e } /* Name.Function.Magic */
|
||||
.codehilite .vc { color: #bb60d5 } /* Name.Variable.Class */
|
||||
.codehilite .vg { color: #bb60d5 } /* Name.Variable.Global */
|
||||
.codehilite .vi { color: #bb60d5 } /* Name.Variable.Instance */
|
||||
.codehilite .vm { color: #bb60d5 } /* Name.Variable.Magic */
|
||||
.codehilite .il { color: #40a070 } /* Literal.Number.Integer.Long */
|
||||
@@ -9,12 +9,15 @@ require_once(CONST_BasePath.'/lib/ClassTypes.php');
|
||||
*/
|
||||
class AddressDetails
|
||||
{
|
||||
private $iPlaceID;
|
||||
private $aAddressLines;
|
||||
|
||||
public function __construct(&$oDB, $iPlaceID, $sHousenumber, $mLangPref)
|
||||
{
|
||||
$this->iPlaceID = $iPlaceID;
|
||||
|
||||
if (is_array($mLangPref)) {
|
||||
$mLangPref = 'ARRAY['.join(',', array_map('getDBQuoted', $mLangPref)).']';
|
||||
$mLangPref = $oDB->getArraySQL($oDB->getDBQuotedList($mLangPref));
|
||||
}
|
||||
|
||||
if (!isset($sHousenumber)) {
|
||||
@@ -58,48 +61,94 @@ class AddressDetails
|
||||
return join(', ', $aParts);
|
||||
}
|
||||
|
||||
public function getAddressNames()
|
||||
public function getAddressNames($sCountry = null)
|
||||
{
|
||||
$aAddress = array();
|
||||
$aFallback = array();
|
||||
|
||||
foreach ($this->aAddressLines as $aLine) {
|
||||
if (!self::isAddress($aLine)) {
|
||||
continue;
|
||||
}
|
||||
|
||||
$bFallback = false;
|
||||
$aTypeLabel = ClassTypes\getInfo($aLine);
|
||||
$sTypeLabel = ClassTypes\getLabelTag($aLine);
|
||||
|
||||
if ($aTypeLabel === false) {
|
||||
$aTypeLabel = ClassTypes\getFallbackInfo($aLine);
|
||||
$bFallback = true;
|
||||
}
|
||||
|
||||
$sName = false;
|
||||
if (isset($aLine['localname']) && $aLine['localname']) {
|
||||
$sName = null;
|
||||
if (isset($aLine['localname']) && $aLine['localname']!=='') {
|
||||
$sName = $aLine['localname'];
|
||||
} elseif (isset($aLine['housenumber']) && $aLine['housenumber']) {
|
||||
} elseif (isset($aLine['housenumber']) && $aLine['housenumber']!=='') {
|
||||
$sName = $aLine['housenumber'];
|
||||
}
|
||||
|
||||
if ($sName) {
|
||||
$sTypeLabel = strtolower(isset($aTypeLabel['simplelabel']) ? $aTypeLabel['simplelabel'] : $aTypeLabel['label']);
|
||||
$sTypeLabel = str_replace(' ', '_', $sTypeLabel);
|
||||
if (isset($sName)) {
|
||||
$sTypeLabel = strtolower(str_replace(' ', '_', $sTypeLabel));
|
||||
if (!isset($aAddress[$sTypeLabel])
|
||||
|| isset($aFallback[$sTypeLabel])
|
||||
|| $aLine['class'] == 'place'
|
||||
) {
|
||||
$aAddress[$sTypeLabel] = $sName;
|
||||
if ($bFallback) {
|
||||
$aFallback[$sTypeLabel] = $bFallback;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return $aAddress;
|
||||
}
|
||||
|
||||
/**
|
||||
* Annotates the given json with geocodejson address information fields.
|
||||
*
|
||||
* @param array $aJson Json hash to add the fields to.
|
||||
*
|
||||
* Geocodejson has the following fields:
|
||||
* street, locality, postcode, city, district,
|
||||
* county, state, country
|
||||
*
|
||||
* Postcode and housenumber are added by type, district is not used.
|
||||
* All other fields are set according to address rank.
|
||||
*/
|
||||
public function addGeocodeJsonAddressParts(&$aJson)
|
||||
{
|
||||
foreach (array_reverse($this->aAddressLines) as $aLine) {
|
||||
if (!$aLine['isaddress']) {
|
||||
continue;
|
||||
}
|
||||
|
||||
if (!isset($aLine['localname']) || $aLine['localname'] == '') {
|
||||
continue;
|
||||
}
|
||||
|
||||
if ($aLine['type'] == 'postcode' || $aLine['type'] == 'postal_code') {
|
||||
$aJson['postcode'] = $aLine['localname'];
|
||||
continue;
|
||||
}
|
||||
|
||||
if ($aLine['type'] == 'house_number') {
|
||||
$aJson['housenumber'] = $aLine['localname'];
|
||||
continue;
|
||||
}
|
||||
|
||||
if ($this->iPlaceID == $aLine['place_id']) {
|
||||
continue;
|
||||
}
|
||||
|
||||
$iRank = (int)$aLine['rank_address'];
|
||||
|
||||
if ($iRank > 25 && $iRank < 28) {
|
||||
$aJson['street'] = $aLine['localname'];
|
||||
} elseif ($iRank >= 22 && $iRank <= 25) {
|
||||
$aJson['locality'] = $aLine['localname'];
|
||||
} elseif ($iRank >= 17 && $iRank <= 21) {
|
||||
$aJson['district'] = $aLine['localname'];
|
||||
} elseif ($iRank >= 13 && $iRank <= 16) {
|
||||
$aJson['city'] = $aLine['localname'];
|
||||
} elseif ($iRank >= 10 && $iRank <= 12) {
|
||||
$aJson['county'] = $aLine['localname'];
|
||||
} elseif ($iRank >= 5 && $iRank <= 9) {
|
||||
$aJson['state'] = $aLine['localname'];
|
||||
} elseif ($iRank == 4) {
|
||||
$aJson['country'] = $aLine['localname'];
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public function getAdminLevels()
|
||||
{
|
||||
$aAddress = array();
|
||||
|
||||
@@ -2,373 +2,559 @@
|
||||
|
||||
namespace Nominatim\ClassTypes;
|
||||
|
||||
function getInfo($aPlace)
|
||||
/**
|
||||
* Create a label tag for the given place that can be used as an XML name.
|
||||
*
|
||||
* @param array[] $aPlace Information about the place to label.
|
||||
*
|
||||
* A label tag groups various object types together under a common
|
||||
* label. The returned value is lower case and has no spaces
|
||||
*/
|
||||
function getLabelTag($aPlace, $sCountry = null)
|
||||
{
|
||||
$aClassType = getList();
|
||||
|
||||
if (isset($aPlace['admin_level'])) {
|
||||
$sName = $aPlace['class'].':'.$aPlace['type'].':'.$aPlace['admin_level'];
|
||||
if (isset($aClassType[$sName])) {
|
||||
return $aClassType[$sName];
|
||||
}
|
||||
$iRank = (int) ($aPlace['rank_address'] ?? 30);
|
||||
$sLabel;
|
||||
if (isset($aPlace['place_type'])) {
|
||||
$sLabel = $aPlace['place_type'];
|
||||
} elseif ($aPlace['class'] == 'boundary' && $aPlace['type'] == 'administrative') {
|
||||
$sLabel = getBoundaryLabel($iRank/2, $sCountry);
|
||||
} elseif ($iRank < 26) {
|
||||
$sLabel = $aPlace['type'];
|
||||
} elseif ($iRank < 28) {
|
||||
$sLabel = 'road';
|
||||
} elseif ($aPlace['class'] == 'place'
|
||||
&& ($aPlace['type'] == 'house_number' ||
|
||||
$aPlace['type'] == 'house_name' ||
|
||||
$aPlace['type'] == 'country_code')
|
||||
) {
|
||||
$sLabel = $aPlace['type'];
|
||||
} else {
|
||||
$sLabel = $aPlace['class'];
|
||||
}
|
||||
|
||||
$sName = $aPlace['class'].':'.$aPlace['type'];
|
||||
if (isset($aClassType[$sName])) {
|
||||
return $aClassType[$sName];
|
||||
}
|
||||
|
||||
return false;
|
||||
return strtolower(str_replace(' ', '_', $sLabel));
|
||||
}
|
||||
|
||||
function getFallbackInfo($aPlace)
|
||||
/**
|
||||
* Create a label for the given place.
|
||||
*
|
||||
* @param array[] $aPlace Information about the place to label.
|
||||
*/
|
||||
function getLabel($aPlace, $sCountry = null)
|
||||
{
|
||||
$aClassType = getList();
|
||||
|
||||
$sFallback = 'boundary:administrative:'.((int)($aPlace['rank_address']/2));
|
||||
if (isset($aClassType[$sFallback])) {
|
||||
return $aClassType[$sFallback];
|
||||
if (isset($aPlace['place_type'])) {
|
||||
return ucwords(str_replace('_', ' ', $aPlace['place_type']));
|
||||
}
|
||||
|
||||
return array('simplelabel' => 'address'.$aPlace['rank_address']);
|
||||
if ($aPlace['class'] == 'boundary' && $aPlace['type'] == 'administrative') {
|
||||
return getBoundaryLabel(($aPlace['rank_address'] ?? 30)/2, $sCountry ?? null);
|
||||
}
|
||||
|
||||
// Return a label only for 'important' class/type combinations
|
||||
if (getImportance($aPlace) !== null) {
|
||||
return ucwords(str_replace('_', ' ', $aPlace['type']));
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
function getProperty($aPlace, $sProp, $mDefault = false)
|
||||
|
||||
/**
|
||||
* Return a simple label for an administrative boundary for the given country.
|
||||
*
|
||||
* @param int $iAdminLevel Content of admin_level tag.
|
||||
* @param string $sCountry Country code of the country where the object is
|
||||
* in. May be null, in which case a world-wide
|
||||
* fallback is used.
|
||||
* @param string $sFallback String to return if no explicit string is listed.
|
||||
*
|
||||
* @return string
|
||||
*/
|
||||
function getBoundaryLabel($iAdminLevel, $sCountry, $sFallback = 'Administrative')
|
||||
{
|
||||
$aClassType = getList();
|
||||
static $aBoundaryList = array (
|
||||
'default' => array (
|
||||
1 => 'Continent',
|
||||
2 => 'Country',
|
||||
3 => 'Region',
|
||||
4 => 'State',
|
||||
5 => 'State District',
|
||||
6 => 'County',
|
||||
7 => 'Municipality',
|
||||
8 => 'City',
|
||||
9 => 'City District',
|
||||
10 => 'Suburb',
|
||||
11 => 'Neighbourhood'
|
||||
),
|
||||
'no' => array (
|
||||
3 => 'State',
|
||||
4 => 'County'
|
||||
),
|
||||
'se' => array (
|
||||
3 => 'State',
|
||||
4 => 'County'
|
||||
)
|
||||
);
|
||||
|
||||
if (isset($aPlace['admin_level'])) {
|
||||
$sName = $aPlace['class'].':'.$aPlace['type'].':'.$aPlace['admin_level'];
|
||||
if (isset($aClassType[$sName]) && isset($aClassType[$sName][$sProp])) {
|
||||
return $aClassType[$sName][$sProp];
|
||||
}
|
||||
if (isset($aBoundaryList[$sCountry])
|
||||
&& isset($aBoundaryList[$sCountry][$iAdminLevel])
|
||||
) {
|
||||
return $aBoundaryList[$sCountry][$iAdminLevel];
|
||||
}
|
||||
|
||||
$sName = $aPlace['class'].':'.$aPlace['type'];
|
||||
if (isset($aClassType[$sName]) && isset($aClassType[$sName][$sProp])) {
|
||||
return $aClassType[$sName][$sProp];
|
||||
}
|
||||
|
||||
return $mDefault;
|
||||
return $aBoundaryList['default'][$iAdminLevel] ?? $sFallback;
|
||||
}
|
||||
|
||||
function getListWithImportance()
|
||||
/**
|
||||
* Return an estimated radius of how far the object node extends.
|
||||
*
|
||||
* @param array[] $aPlace Information about the place. This must be a node
|
||||
* feature.
|
||||
*
|
||||
* @return float The radius around the feature in degrees.
|
||||
*/
|
||||
function getDefRadius($aPlace)
|
||||
{
|
||||
static $aOrders = null;
|
||||
if ($aOrders === null) {
|
||||
$aOrders = getList();
|
||||
$i = 1;
|
||||
foreach ($aOrders as $sID => $a) {
|
||||
$aOrders[$sID]['importance'] = $i++;
|
||||
}
|
||||
}
|
||||
|
||||
return $aOrders;
|
||||
}
|
||||
|
||||
function getList()
|
||||
{
|
||||
return array(
|
||||
'boundary:administrative:1' => array('label' => 'Continent', 'frequency' => 0, 'icon' => 'poi_boundary_administrative', 'defdiameter' => 0.32),
|
||||
'boundary:administrative:2' => array('label' => 'Country', 'frequency' => 0, 'icon' => 'poi_boundary_administrative', 'defdiameter' => 0.32),
|
||||
'place:country' => array('label' => 'Country', 'frequency' => 0, 'icon' => 'poi_boundary_administrative', 'defzoom' => 6, 'defdiameter' => 15),
|
||||
'boundary:administrative:3' => array('label' => 'State', 'frequency' => 0, 'icon' => 'poi_boundary_administrative', 'defdiameter' => 0.32),
|
||||
'boundary:administrative:4' => array('label' => 'State', 'frequency' => 0, 'icon' => 'poi_boundary_administrative', 'defdiameter' => 0.32),
|
||||
'place:state' => array('label' => 'State', 'frequency' => 0, 'icon' => 'poi_boundary_administrative', 'defzoom' => 8, 'defdiameter' => 5.12),
|
||||
'boundary:administrative:5' => array('label' => 'State District', 'frequency' => 0, 'icon' => 'poi_boundary_administrative', 'defdiameter' => 0.32),
|
||||
'boundary:administrative:6' => array('label' => 'County', 'frequency' => 0, 'icon' => 'poi_boundary_administrative', 'defdiameter' => 0.32),
|
||||
'boundary:administrative:7' => array('label' => 'County', 'frequency' => 0, 'icon' => 'poi_boundary_administrative', 'defdiameter' => 0.32),
|
||||
'place:county' => array('label' => 'County', 'frequency' => 108, 'icon' => 'poi_boundary_administrative', 'defzoom' => 10, 'defdiameter' => 1.28),
|
||||
'boundary:administrative:8' => array('label' => 'City', 'frequency' => 0, 'icon' => 'poi_boundary_administrative', 'defdiameter' => 0.32),
|
||||
'place:city' => array('label' => 'City', 'frequency' => 66, 'icon' => 'poi_place_city', 'defzoom' => 12, 'defdiameter' => 0.32),
|
||||
'boundary:administrative:9' => array('label' => 'City District', 'frequency' => 0, 'icon' => 'poi_boundary_administrative', 'defdiameter' => 0.32),
|
||||
'boundary:administrative:10' => array('label' => 'Suburb', 'frequency' => 0, 'icon' => 'poi_boundary_administrative', 'defdiameter' => 0.32),
|
||||
'boundary:administrative:11' => array('label' => 'Neighbourhood', 'frequency' => 0, 'icon' => 'poi_boundary_administrative', 'defdiameter' => 0.32),
|
||||
'place:region' => array('label' => 'Region', 'frequency' => 0, 'icon' => 'poi_boundary_administrative', 'defzoom' => 8, 'defdiameter' => 0.04),
|
||||
'place:island' => array('label' => 'Island', 'frequency' => 288, 'defzoom' => 11, 'defdiameter' => 0.64),
|
||||
'boundary:administrative' => array('label' => 'Administrative', 'frequency' => 413, 'icon' => 'poi_boundary_administrative', 'defdiameter' => 0.32),
|
||||
'boundary:postal_code' => array('label' => 'Postcode', 'frequency' => 413, 'icon' => 'poi_boundary_administrative', 'defdiameter' => 0.32),
|
||||
'place:town' => array('label' => 'Town', 'frequency' => 1497, 'icon' => 'poi_place_town', 'defzoom' => 14, 'defdiameter' => 0.08),
|
||||
'place:village' => array('label' => 'Village', 'frequency' => 11230, 'icon' => 'poi_place_village', 'defzoom' => 15, 'defdiameter' => 0.04),
|
||||
'place:hamlet' => array('label' => 'Hamlet', 'frequency' => 7075, 'icon' => 'poi_place_village', 'defzoom' => 15, 'defdiameter' => 0.04),
|
||||
'place:suburb' => array('label' => 'Suburb', 'frequency' => 2528, 'icon' => 'poi_place_village', 'defdiameter' => 0.04),
|
||||
'place:locality' => array('label' => 'Locality', 'frequency' => 4113, 'icon' => 'poi_place_village', 'defdiameter' => 0.02),
|
||||
'landuse:farm' => array('label' => 'Farm', 'frequency' => 1201, 'defdiameter' => 0.02),
|
||||
'place:farm' => array('label' => 'Farm', 'frequency' => 1162, 'defdiameter' => 0.02),
|
||||
|
||||
'highway:motorway_junction' => array('label' => 'Motorway Junction', 'frequency' => 1126, 'simplelabel' => 'Junction'),
|
||||
'highway:motorway' => array('label' => 'Motorway', 'frequency' => 4627, 'simplelabel' => 'Road'),
|
||||
'highway:trunk' => array('label' => 'Trunk', 'frequency' => 23084, 'simplelabel' => 'Road'),
|
||||
'highway:primary' => array('label' => 'Primary', 'frequency' => 32138, 'simplelabel' => 'Road'),
|
||||
'highway:secondary' => array('label' => 'Secondary', 'frequency' => 25807, 'simplelabel' => 'Road'),
|
||||
'highway:tertiary' => array('label' => 'Tertiary', 'frequency' => 29829, 'simplelabel' => 'Road'),
|
||||
'highway:residential' => array('label' => 'Residential', 'frequency' => 361498, 'simplelabel' => 'Road'),
|
||||
'highway:unclassified' => array('label' => 'Unclassified', 'frequency' => 66441, 'simplelabel' => 'Road'),
|
||||
'highway:living_street' => array('label' => 'Living Street', 'frequency' => 710, 'simplelabel' => 'Road'),
|
||||
'highway:service' => array('label' => 'Service', 'frequency' => 9963, 'simplelabel' => 'Road'),
|
||||
'highway:track' => array('label' => 'Track', 'frequency' => 2565, 'simplelabel' => 'Road'),
|
||||
'highway:road' => array('label' => 'Road', 'frequency' => 591, 'simplelabel' => 'Road'),
|
||||
'highway:byway' => array('label' => 'Byway', 'frequency' => 346, 'simplelabel' => 'Road'),
|
||||
'highway:bridleway' => array('label' => 'Bridleway', 'frequency' => 1556),
|
||||
'highway:cycleway' => array('label' => 'Cycleway', 'frequency' => 2419),
|
||||
'highway:pedestrian' => array('label' => 'Pedestrian', 'frequency' => 2757),
|
||||
'highway:footway' => array('label' => 'Footway', 'frequency' => 15008),
|
||||
'highway:steps' => array('label' => 'Steps', 'frequency' => 444, 'simplelabel' => 'Footway'),
|
||||
'highway:motorway_link' => array('label' => 'Motorway Link', 'frequency' => 795, 'simplelabel' => 'Road'),
|
||||
'highway:trunk_link' => array('label' => 'Trunk Link', 'frequency' => 1258, 'simplelabel' => 'Road'),
|
||||
'highway:primary_link' => array('label' => 'Primary Link', 'frequency' => 313, 'simplelabel' => 'Road'),
|
||||
|
||||
'landuse:industrial' => array('label' => 'Industrial', 'frequency' => 1062),
|
||||
'landuse:residential' => array('label' => 'Residential', 'frequency' => 886),
|
||||
'landuse:retail' => array('label' => 'Retail', 'frequency' => 754),
|
||||
'landuse:commercial' => array('label' => 'Commercial', 'frequency' => 657),
|
||||
|
||||
'place:airport' => array('label' => 'Airport', 'frequency' => 36, 'icon' => 'transport_airport2', 'defdiameter' => 0.03),
|
||||
'aeroway:aerodrome' => array('label' => 'Aerodrome', 'frequency' => 36, 'icon' => 'transport_airport2', 'defdiameter' => 0.03),
|
||||
'aeroway' => array('label' => 'Aeroway', 'frequency' => 36, 'icon' => 'transport_airport2', 'defdiameter' => 0.03),
|
||||
'railway:station' => array('label' => 'Station', 'frequency' => 3431, 'icon' => 'transport_train_station2', 'defdiameter' => 0.01),
|
||||
'amenity:place_of_worship' => array('label' => 'Place Of Worship', 'frequency' => 9049, 'icon' => 'place_of_worship_unknown3'),
|
||||
'amenity:pub' => array('label' => 'Pub', 'frequency' => 18969, 'icon' => 'food_pub'),
|
||||
'amenity:bar' => array('label' => 'Bar', 'frequency' => 164, 'icon' => 'food_bar'),
|
||||
'amenity:university' => array('label' => 'University', 'frequency' => 607, 'icon' => 'education_university'),
|
||||
'tourism:museum' => array('label' => 'Museum', 'frequency' => 543, 'icon' => 'tourist_museum'),
|
||||
'amenity:arts_centre' => array('label' => 'Arts Centre', 'frequency' => 136, 'icon' => 'tourist_art_gallery2'),
|
||||
'tourism:zoo' => array('label' => 'Zoo', 'frequency' => 47, 'icon' => 'tourist_zoo'),
|
||||
'tourism:theme_park' => array('label' => 'Theme Park', 'frequency' => 24, 'icon' => 'poi_point_of_interest'),
|
||||
'tourism:attraction' => array('label' => 'Attraction', 'frequency' => 1463, 'icon' => 'poi_point_of_interest'),
|
||||
'leisure:golf_course' => array('label' => 'Golf Course', 'frequency' => 712, 'icon' => 'sport_golf'),
|
||||
'historic:castle' => array('label' => 'Castle', 'frequency' => 316, 'icon' => 'tourist_castle'),
|
||||
'amenity:hospital' => array('label' => 'Hospital', 'frequency' => 879, 'icon' => 'health_hospital'),
|
||||
'amenity:school' => array('label' => 'School', 'frequency' => 8192, 'icon' => 'education_school'),
|
||||
'amenity:theatre' => array('label' => 'Theatre', 'frequency' => 371, 'icon' => 'tourist_theatre'),
|
||||
'amenity:public_building' => array('label' => 'Public Building', 'frequency' => 985),
|
||||
'amenity:library' => array('label' => 'Library', 'frequency' => 794, 'icon' => 'amenity_library'),
|
||||
'amenity:townhall' => array('label' => 'Townhall', 'frequency' => 242),
|
||||
'amenity:community_centre' => array('label' => 'Community Centre', 'frequency' => 157),
|
||||
'amenity:fire_station' => array('label' => 'Fire Station', 'frequency' => 221, 'icon' => 'amenity_firestation3'),
|
||||
'amenity:police' => array('label' => 'Police', 'frequency' => 334, 'icon' => 'amenity_police2'),
|
||||
'amenity:bank' => array('label' => 'Bank', 'frequency' => 1248, 'icon' => 'money_bank2'),
|
||||
'amenity:post_office' => array('label' => 'Post Office', 'frequency' => 859, 'icon' => 'amenity_post_office'),
|
||||
'leisure:park' => array('label' => 'Park', 'frequency' => 2378),
|
||||
'amenity:park' => array('label' => 'Park', 'frequency' => 53),
|
||||
'landuse:park' => array('label' => 'Park', 'frequency' => 50),
|
||||
'landuse:recreation_ground' => array('label' => 'Recreation Ground', 'frequency' => 517),
|
||||
'tourism:hotel' => array('label' => 'Hotel', 'frequency' => 2150, 'icon' => 'accommodation_hotel2'),
|
||||
'tourism:motel' => array('label' => 'Motel', 'frequency' => 43),
|
||||
'amenity:cinema' => array('label' => 'Cinema', 'frequency' => 277, 'icon' => 'tourist_cinema'),
|
||||
'tourism:artwork' => array('label' => 'Artwork', 'frequency' => 171, 'icon' => 'tourist_art_gallery2'),
|
||||
'historic:archaeological_site' => array('label' => 'Archaeological Site', 'frequency' => 407, 'icon' => 'tourist_archaeological2'),
|
||||
'amenity:doctors' => array('label' => 'Doctors', 'frequency' => 581, 'icon' => 'health_doctors'),
|
||||
'leisure:sports_centre' => array('label' => 'Sports Centre', 'frequency' => 767, 'icon' => 'sport_leisure_centre'),
|
||||
'leisure:swimming_pool' => array('label' => 'Swimming Pool', 'frequency' => 24, 'icon' => 'sport_swimming_outdoor'),
|
||||
'shop:supermarket' => array('label' => 'Supermarket', 'frequency' => 2673, 'icon' => 'shopping_supermarket'),
|
||||
'shop:convenience' => array('label' => 'Convenience', 'frequency' => 1469, 'icon' => 'shopping_convenience'),
|
||||
'amenity:restaurant' => array('label' => 'Restaurant', 'frequency' => 3179, 'icon' => 'food_restaurant'),
|
||||
'amenity:fast_food' => array('label' => 'Fast Food', 'frequency' => 2289, 'icon' => 'food_fastfood'),
|
||||
'amenity:cafe' => array('label' => 'Cafe', 'frequency' => 1780, 'icon' => 'food_cafe'),
|
||||
'tourism:guest_house' => array('label' => 'Guest House', 'frequency' => 223, 'icon' => 'accommodation_bed_and_breakfast'),
|
||||
'amenity:pharmacy' => array('label' => 'Pharmacy', 'frequency' => 733, 'icon' => 'health_pharmacy_dispensing'),
|
||||
'amenity:fuel' => array('label' => 'Fuel', 'frequency' => 1308, 'icon' => 'transport_fuel'),
|
||||
'natural:peak' => array('label' => 'Peak', 'frequency' => 3212, 'icon' => 'poi_peak'),
|
||||
'waterway:waterfall' => array('label' => 'Waterfall', 'frequency' => 24),
|
||||
'natural:wood' => array('label' => 'Wood', 'frequency' => 1845, 'icon' => 'landuse_coniferous_and_deciduous'),
|
||||
'natural:water' => array('label' => 'Water', 'frequency' => 1790),
|
||||
'landuse:forest' => array('label' => 'Forest', 'frequency' => 467),
|
||||
'landuse:cemetery' => array('label' => 'Cemetery', 'frequency' => 463),
|
||||
'landuse:allotments' => array('label' => 'Allotments', 'frequency' => 408),
|
||||
'landuse:farmyard' => array('label' => 'Farmyard', 'frequency' => 397),
|
||||
'railway:rail' => array('label' => 'Rail', 'frequency' => 4894),
|
||||
'waterway:canal' => array('label' => 'Canal', 'frequency' => 1723),
|
||||
'waterway:river' => array('label' => 'River', 'frequency' => 4089),
|
||||
'waterway:stream' => array('label' => 'Stream', 'frequency' => 2684),
|
||||
'shop:bicycle' => array('label' => 'Bicycle', 'frequency' => 349, 'icon' => 'shopping_bicycle'),
|
||||
'shop:clothes' => array('label' => 'Clothes', 'frequency' => 315, 'icon' => 'shopping_clothes'),
|
||||
'shop:hairdresser' => array('label' => 'Hairdresser', 'frequency' => 312, 'icon' => 'shopping_hairdresser'),
|
||||
'shop:doityourself' => array('label' => 'Doityourself', 'frequency' => 247, 'icon' => 'shopping_diy'),
|
||||
'shop:estate_agent' => array('label' => 'Estate Agent', 'frequency' => 162, 'icon' => 'shopping_estateagent2'),
|
||||
'shop:car' => array('label' => 'Car', 'frequency' => 159, 'icon' => 'shopping_car'),
|
||||
'shop:garden_centre' => array('label' => 'Garden Centre', 'frequency' => 143, 'icon' => 'shopping_garden_centre'),
|
||||
'shop:car_repair' => array('label' => 'Car Repair', 'frequency' => 141, 'icon' => 'shopping_car_repair'),
|
||||
'shop:newsagent' => array('label' => 'Newsagent', 'frequency' => 132),
|
||||
'shop:bakery' => array('label' => 'Bakery', 'frequency' => 129, 'icon' => 'shopping_bakery'),
|
||||
'shop:furniture' => array('label' => 'Furniture', 'frequency' => 124),
|
||||
'shop:butcher' => array('label' => 'Butcher', 'frequency' => 105, 'icon' => 'shopping_butcher'),
|
||||
'shop:apparel' => array('label' => 'Apparel', 'frequency' => 98, 'icon' => 'shopping_clothes'),
|
||||
'shop:electronics' => array('label' => 'Electronics', 'frequency' => 96),
|
||||
'shop:department_store' => array('label' => 'Department Store', 'frequency' => 86),
|
||||
'shop:books' => array('label' => 'Books', 'frequency' => 85),
|
||||
'shop:yes' => array('label' => 'Shop', 'frequency' => 68),
|
||||
'shop:outdoor' => array('label' => 'Outdoor', 'frequency' => 67),
|
||||
'shop:mall' => array('label' => 'Mall', 'frequency' => 63),
|
||||
'shop:florist' => array('label' => 'Florist', 'frequency' => 61),
|
||||
'shop:charity' => array('label' => 'Charity', 'frequency' => 60),
|
||||
'shop:hardware' => array('label' => 'Hardware', 'frequency' => 59),
|
||||
'shop:laundry' => array('label' => 'Laundry', 'frequency' => 51, 'icon' => 'shopping_laundrette'),
|
||||
'shop:shoes' => array('label' => 'Shoes', 'frequency' => 49),
|
||||
'shop:beverages' => array('label' => 'Beverages', 'frequency' => 48, 'icon' => 'shopping_alcohol'),
|
||||
'shop:dry_cleaning' => array('label' => 'Dry Cleaning', 'frequency' => 46),
|
||||
'shop:carpet' => array('label' => 'Carpet', 'frequency' => 45),
|
||||
'shop:computer' => array('label' => 'Computer', 'frequency' => 44),
|
||||
'shop:alcohol' => array('label' => 'Alcohol', 'frequency' => 44, 'icon' => 'shopping_alcohol'),
|
||||
'shop:optician' => array('label' => 'Optician', 'frequency' => 55, 'icon' => 'health_opticians'),
|
||||
'shop:chemist' => array('label' => 'Chemist', 'frequency' => 42, 'icon' => 'health_pharmacy'),
|
||||
'shop:gallery' => array('label' => 'Gallery', 'frequency' => 38, 'icon' => 'tourist_art_gallery2'),
|
||||
'shop:mobile_phone' => array('label' => 'Mobile Phone', 'frequency' => 37),
|
||||
'shop:sports' => array('label' => 'Sports', 'frequency' => 37),
|
||||
'shop:jewelry' => array('label' => 'Jewelry', 'frequency' => 32, 'icon' => 'shopping_jewelry'),
|
||||
'shop:pet' => array('label' => 'Pet', 'frequency' => 29),
|
||||
'shop:beauty' => array('label' => 'Beauty', 'frequency' => 28),
|
||||
'shop:stationery' => array('label' => 'Stationery', 'frequency' => 25),
|
||||
'shop:shopping_centre' => array('label' => 'Shopping Centre', 'frequency' => 25),
|
||||
'shop:general' => array('label' => 'General', 'frequency' => 25),
|
||||
'shop:electrical' => array('label' => 'Electrical', 'frequency' => 25),
|
||||
'shop:toys' => array('label' => 'Toys', 'frequency' => 23),
|
||||
'shop:jeweller' => array('label' => 'Jeweller', 'frequency' => 23),
|
||||
'shop:betting' => array('label' => 'Betting', 'frequency' => 23),
|
||||
'shop:household' => array('label' => 'Household', 'frequency' => 21),
|
||||
'shop:travel_agency' => array('label' => 'Travel Agency', 'frequency' => 21),
|
||||
'shop:hifi' => array('label' => 'Hifi', 'frequency' => 21),
|
||||
'amenity:shop' => array('label' => 'Shop', 'frequency' => 61),
|
||||
'tourism:information' => array('label' => 'Information', 'frequency' => 224, 'icon' => 'amenity_information'),
|
||||
|
||||
'place:house' => array('label' => 'House', 'frequency' => 2086, 'defzoom' => 18),
|
||||
'place:house_name' => array('label' => 'House', 'frequency' => 2086, 'defzoom' => 18),
|
||||
'place:house_number' => array('label' => 'House Number', 'frequency' => 2086, 'defzoom' => 18),
|
||||
'place:country_code' => array('label' => 'Country Code', 'frequency' => 2086, 'defzoom' => 18),
|
||||
|
||||
//
|
||||
|
||||
'leisure:pitch' => array('label' => 'Pitch', 'frequency' => 762),
|
||||
'highway:unsurfaced' => array('label' => 'Unsurfaced', 'frequency' => 492),
|
||||
'historic:ruins' => array('label' => 'Ruins', 'frequency' => 483, 'icon' => 'tourist_ruin'),
|
||||
'amenity:college' => array('label' => 'College', 'frequency' => 473, 'icon' => 'education_school'),
|
||||
'historic:monument' => array('label' => 'Monument', 'frequency' => 470, 'icon' => 'tourist_monument'),
|
||||
'railway:subway' => array('label' => 'Subway', 'frequency' => 385),
|
||||
'historic:memorial' => array('label' => 'Memorial', 'frequency' => 382, 'icon' => 'tourist_monument'),
|
||||
'leisure:nature_reserve' => array('label' => 'Nature Reserve', 'frequency' => 342),
|
||||
'leisure:common' => array('label' => 'Common', 'frequency' => 322),
|
||||
'waterway:lock_gate' => array('label' => 'Lock Gate', 'frequency' => 321),
|
||||
'natural:fell' => array('label' => 'Fell', 'frequency' => 308),
|
||||
'amenity:nightclub' => array('label' => 'Nightclub', 'frequency' => 292),
|
||||
'highway:path' => array('label' => 'Path', 'frequency' => 287),
|
||||
'leisure:garden' => array('label' => 'Garden', 'frequency' => 285),
|
||||
'landuse:reservoir' => array('label' => 'Reservoir', 'frequency' => 276),
|
||||
'leisure:playground' => array('label' => 'Playground', 'frequency' => 264),
|
||||
'leisure:stadium' => array('label' => 'Stadium', 'frequency' => 212),
|
||||
'historic:mine' => array('label' => 'Mine', 'frequency' => 193, 'icon' => 'poi_mine'),
|
||||
'natural:cliff' => array('label' => 'Cliff', 'frequency' => 193),
|
||||
'tourism:caravan_site' => array('label' => 'Caravan Site', 'frequency' => 183, 'icon' => 'accommodation_caravan_park'),
|
||||
'amenity:bus_station' => array('label' => 'Bus Station', 'frequency' => 181, 'icon' => 'transport_bus_station'),
|
||||
'amenity:kindergarten' => array('label' => 'Kindergarten', 'frequency' => 179),
|
||||
'highway:construction' => array('label' => 'Construction', 'frequency' => 176),
|
||||
'amenity:atm' => array('label' => 'Atm', 'frequency' => 172, 'icon' => 'money_atm2'),
|
||||
'amenity:emergency_phone' => array('label' => 'Emergency Phone', 'frequency' => 164),
|
||||
'waterway:lock' => array('label' => 'Lock', 'frequency' => 146),
|
||||
'waterway:riverbank' => array('label' => 'Riverbank', 'frequency' => 143),
|
||||
'natural:coastline' => array('label' => 'Coastline', 'frequency' => 142),
|
||||
'tourism:viewpoint' => array('label' => 'Viewpoint', 'frequency' => 140, 'icon' => 'tourist_view_point'),
|
||||
'tourism:hostel' => array('label' => 'Hostel', 'frequency' => 140),
|
||||
'tourism:bed_and_breakfast' => array('label' => 'Bed And Breakfast', 'frequency' => 140, 'icon' => 'accommodation_bed_and_breakfast'),
|
||||
'railway:halt' => array('label' => 'Halt', 'frequency' => 135),
|
||||
'railway:platform' => array('label' => 'Platform', 'frequency' => 134),
|
||||
'railway:tram' => array('label' => 'Tram', 'frequency' => 130, 'icon' => 'transport_tram_stop'),
|
||||
'amenity:courthouse' => array('label' => 'Courthouse', 'frequency' => 129, 'icon' => 'amenity_court'),
|
||||
'amenity:recycling' => array('label' => 'Recycling', 'frequency' => 126, 'icon' => 'amenity_recycling'),
|
||||
'amenity:dentist' => array('label' => 'Dentist', 'frequency' => 124, 'icon' => 'health_dentist'),
|
||||
'natural:beach' => array('label' => 'Beach', 'frequency' => 121, 'icon' => 'tourist_beach'),
|
||||
'place:moor' => array('label' => 'Moor', 'frequency' => 118),
|
||||
'amenity:grave_yard' => array('label' => 'Grave Yard', 'frequency' => 110),
|
||||
'waterway:drain' => array('label' => 'Drain', 'frequency' => 108),
|
||||
'landuse:grass' => array('label' => 'Grass', 'frequency' => 106),
|
||||
'landuse:village_green' => array('label' => 'Village Green', 'frequency' => 106),
|
||||
'natural:bay' => array('label' => 'Bay', 'frequency' => 102),
|
||||
'railway:tram_stop' => array('label' => 'Tram Stop', 'frequency' => 101, 'icon' => 'transport_tram_stop'),
|
||||
'leisure:marina' => array('label' => 'Marina', 'frequency' => 98),
|
||||
'highway:stile' => array('label' => 'Stile', 'frequency' => 97),
|
||||
'natural:moor' => array('label' => 'Moor', 'frequency' => 95),
|
||||
'railway:light_rail' => array('label' => 'Light Rail', 'frequency' => 91),
|
||||
'railway:narrow_gauge' => array('label' => 'Narrow Gauge', 'frequency' => 90),
|
||||
'natural:land' => array('label' => 'Land', 'frequency' => 86),
|
||||
'amenity:village_hall' => array('label' => 'Village Hall', 'frequency' => 82),
|
||||
'waterway:dock' => array('label' => 'Dock', 'frequency' => 80),
|
||||
'amenity:veterinary' => array('label' => 'Veterinary', 'frequency' => 79),
|
||||
'landuse:brownfield' => array('label' => 'Brownfield', 'frequency' => 77),
|
||||
'leisure:track' => array('label' => 'Track', 'frequency' => 76),
|
||||
'railway:historic_station' => array('label' => 'Historic Station', 'frequency' => 74),
|
||||
'landuse:construction' => array('label' => 'Construction', 'frequency' => 72),
|
||||
'amenity:prison' => array('label' => 'Prison', 'frequency' => 71, 'icon' => 'amenity_prison'),
|
||||
'landuse:quarry' => array('label' => 'Quarry', 'frequency' => 71),
|
||||
'amenity:telephone' => array('label' => 'Telephone', 'frequency' => 70),
|
||||
'highway:traffic_signals' => array('label' => 'Traffic Signals', 'frequency' => 66),
|
||||
'natural:heath' => array('label' => 'Heath', 'frequency' => 62),
|
||||
'historic:house' => array('label' => 'House', 'frequency' => 61),
|
||||
'amenity:social_club' => array('label' => 'Social Club', 'frequency' => 61),
|
||||
'landuse:military' => array('label' => 'Military', 'frequency' => 61),
|
||||
'amenity:health_centre' => array('label' => 'Health Centre', 'frequency' => 59),
|
||||
'historic:building' => array('label' => 'Building', 'frequency' => 58),
|
||||
'amenity:clinic' => array('label' => 'Clinic', 'frequency' => 57),
|
||||
'highway:services' => array('label' => 'Services', 'frequency' => 56),
|
||||
'amenity:ferry_terminal' => array('label' => 'Ferry Terminal', 'frequency' => 55),
|
||||
'natural:marsh' => array('label' => 'Marsh', 'frequency' => 55),
|
||||
'natural:hill' => array('label' => 'Hill', 'frequency' => 54),
|
||||
'highway:raceway' => array('label' => 'Raceway', 'frequency' => 53),
|
||||
'amenity:taxi' => array('label' => 'Taxi', 'frequency' => 47),
|
||||
'amenity:take_away' => array('label' => 'Take Away', 'frequency' => 45),
|
||||
'amenity:car_rental' => array('label' => 'Car Rental', 'frequency' => 44),
|
||||
'place:islet' => array('label' => 'Islet', 'frequency' => 44),
|
||||
'amenity:nursery' => array('label' => 'Nursery', 'frequency' => 44),
|
||||
'amenity:nursing_home' => array('label' => 'Nursing Home', 'frequency' => 43),
|
||||
'amenity:toilets' => array('label' => 'Toilets', 'frequency' => 38),
|
||||
'amenity:hall' => array('label' => 'Hall', 'frequency' => 38),
|
||||
'waterway:boatyard' => array('label' => 'Boatyard', 'frequency' => 36),
|
||||
'highway:mini_roundabout' => array('label' => 'Mini Roundabout', 'frequency' => 35),
|
||||
'historic:manor' => array('label' => 'Manor', 'frequency' => 35),
|
||||
'tourism:chalet' => array('label' => 'Chalet', 'frequency' => 34),
|
||||
'amenity:bicycle_parking' => array('label' => 'Bicycle Parking', 'frequency' => 34),
|
||||
'amenity:hotel' => array('label' => 'Hotel', 'frequency' => 34),
|
||||
'waterway:weir' => array('label' => 'Weir', 'frequency' => 33),
|
||||
'natural:wetland' => array('label' => 'Wetland', 'frequency' => 33),
|
||||
'natural:cave_entrance' => array('label' => 'Cave Entrance', 'frequency' => 32),
|
||||
'amenity:crematorium' => array('label' => 'Crematorium', 'frequency' => 31),
|
||||
'tourism:picnic_site' => array('label' => 'Picnic Site', 'frequency' => 31),
|
||||
'landuse:wood' => array('label' => 'Wood', 'frequency' => 30),
|
||||
'landuse:basin' => array('label' => 'Basin', 'frequency' => 30),
|
||||
'natural:tree' => array('label' => 'Tree', 'frequency' => 30),
|
||||
'leisure:slipway' => array('label' => 'Slipway', 'frequency' => 29),
|
||||
'landuse:meadow' => array('label' => 'Meadow', 'frequency' => 29),
|
||||
'landuse:piste' => array('label' => 'Piste', 'frequency' => 28),
|
||||
'amenity:care_home' => array('label' => 'Care Home', 'frequency' => 28),
|
||||
'amenity:club' => array('label' => 'Club', 'frequency' => 28),
|
||||
'amenity:medical_centre' => array('label' => 'Medical Centre', 'frequency' => 27),
|
||||
'historic:roman_road' => array('label' => 'Roman Road', 'frequency' => 27),
|
||||
'historic:fort' => array('label' => 'Fort', 'frequency' => 26),
|
||||
'railway:subway_entrance' => array('label' => 'Subway Entrance', 'frequency' => 26),
|
||||
'historic:yes' => array('label' => 'Historic', 'frequency' => 25),
|
||||
'highway:gate' => array('label' => 'Gate', 'frequency' => 25),
|
||||
'leisure:fishing' => array('label' => 'Fishing', 'frequency' => 24),
|
||||
'historic:museum' => array('label' => 'Museum', 'frequency' => 24),
|
||||
'amenity:car_wash' => array('label' => 'Car Wash', 'frequency' => 24),
|
||||
'railway:level_crossing' => array('label' => 'Level Crossing', 'frequency' => 23),
|
||||
'leisure:bird_hide' => array('label' => 'Bird Hide', 'frequency' => 23),
|
||||
'natural:headland' => array('label' => 'Headland', 'frequency' => 21),
|
||||
'tourism:apartments' => array('label' => 'Apartments', 'frequency' => 21),
|
||||
'amenity:shopping' => array('label' => 'Shopping', 'frequency' => 21),
|
||||
'natural:scrub' => array('label' => 'Scrub', 'frequency' => 20),
|
||||
'natural:fen' => array('label' => 'Fen', 'frequency' => 20),
|
||||
'building:yes' => array('label' => 'Building', 'frequency' => 200),
|
||||
'mountain_pass:yes' => array('label' => 'Mountain Pass', 'frequency' => 200),
|
||||
|
||||
'amenity:parking' => array('label' => 'Parking', 'frequency' => 3157),
|
||||
'highway:bus_stop' => array('label' => 'Bus Stop', 'frequency' => 35777, 'icon' => 'transport_bus_stop2'),
|
||||
'place:postcode' => array('label' => 'Postcode', 'frequency' => 27267),
|
||||
'amenity:post_box' => array('label' => 'Post Box', 'frequency' => 9613),
|
||||
|
||||
'place:houses' => array('label' => 'Houses', 'frequency' => 85),
|
||||
'railway:preserved' => array('label' => 'Preserved', 'frequency' => 227),
|
||||
'waterway:derelict_canal' => array('label' => 'Derelict Canal', 'frequency' => 21),
|
||||
'amenity:dead_pub' => array('label' => 'Dead Pub', 'frequency' => 20),
|
||||
'railway:disused_station' => array('label' => 'Disused Station', 'frequency' => 114),
|
||||
'railway:abandoned' => array('label' => 'Abandoned', 'frequency' => 641),
|
||||
'railway:disused' => array('label' => 'Disused', 'frequency' => 72),
|
||||
$aSpecialRadius = array(
|
||||
'place:continent' => 25,
|
||||
'place:country' => 7,
|
||||
'place:state' => 2.6,
|
||||
'place:province' => 2.6,
|
||||
'place:region' => 1.0,
|
||||
'place:county' => 0.7,
|
||||
'place:city' => 0.16,
|
||||
'place:municipality' => 0.16,
|
||||
'place:island' => 0.32,
|
||||
'place:postcode' => 0.16,
|
||||
'place:town' => 0.04,
|
||||
'place:village' => 0.02,
|
||||
'place:hamlet' => 0.02,
|
||||
'place:district' => 0.02,
|
||||
'place:borough' => 0.02,
|
||||
'place:suburb' => 0.02,
|
||||
'place:locality' => 0.01,
|
||||
'place:neighbourhood'=> 0.01,
|
||||
'place:quarter' => 0.01,
|
||||
'place:city_block' => 0.01,
|
||||
'landuse:farm' => 0.01,
|
||||
'place:farm' => 0.01,
|
||||
'place:airport' => 0.015,
|
||||
'aeroway:aerodrome' => 0.015,
|
||||
'railway:station' => 0.005
|
||||
);
|
||||
|
||||
$sClassPlace = $aPlace['class'].':'.$aPlace['type'];
|
||||
|
||||
return $aSpecialRadius[$sClassPlace] ?? 0.00005;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the icon to use with the given object.
|
||||
*/
|
||||
function getIcon($aPlace)
|
||||
{
|
||||
$aIcons = array(
|
||||
'boundary:administrative' => 'poi_boundary_administrative',
|
||||
'place:city' => 'poi_place_city',
|
||||
'place:town' => 'poi_place_town',
|
||||
'place:village' => 'poi_place_village',
|
||||
'place:hamlet' => 'poi_place_village',
|
||||
'place:suburb' => 'poi_place_village',
|
||||
'place:locality' => 'poi_place_village',
|
||||
'place:airport' => 'transport_airport2',
|
||||
'aeroway:aerodrome' => 'transport_airport2',
|
||||
'railway:station' => 'transport_train_station2',
|
||||
'amenity:place_of_worship' => 'place_of_worship_unknown3',
|
||||
'amenity:pub' => 'food_pub',
|
||||
'amenity:bar' => 'food_bar',
|
||||
'amenity:university' => 'education_university',
|
||||
'tourism:museum' => 'tourist_museum',
|
||||
'amenity:arts_centre' => 'tourist_art_gallery2',
|
||||
'tourism:zoo' => 'tourist_zoo',
|
||||
'tourism:theme_park' => 'poi_point_of_interest',
|
||||
'tourism:attraction' => 'poi_point_of_interest',
|
||||
'leisure:golf_course' => 'sport_golf',
|
||||
'historic:castle' => 'tourist_castle',
|
||||
'amenity:hospital' => 'health_hospital',
|
||||
'amenity:school' => 'education_school',
|
||||
'amenity:theatre' => 'tourist_theatre',
|
||||
'amenity:library' => 'amenity_library',
|
||||
'amenity:fire_station' => 'amenity_firestation3',
|
||||
'amenity:police' => 'amenity_police2',
|
||||
'amenity:bank' => 'money_bank2',
|
||||
'amenity:post_office' => 'amenity_post_office',
|
||||
'tourism:hotel' => 'accommodation_hotel2',
|
||||
'amenity:cinema' => 'tourist_cinema',
|
||||
'tourism:artwork' => 'tourist_art_gallery2',
|
||||
'historic:archaeological_site' => 'tourist_archaeological2',
|
||||
'amenity:doctors' => 'health_doctors',
|
||||
'leisure:sports_centre' => 'sport_leisure_centre',
|
||||
'leisure:swimming_pool' => 'sport_swimming_outdoor',
|
||||
'shop:supermarket' => 'shopping_supermarket',
|
||||
'shop:convenience' => 'shopping_convenience',
|
||||
'amenity:restaurant' => 'food_restaurant',
|
||||
'amenity:fast_food' => 'food_fastfood',
|
||||
'amenity:cafe' => 'food_cafe',
|
||||
'tourism:guest_house' => 'accommodation_bed_and_breakfast',
|
||||
'amenity:pharmacy' => 'health_pharmacy_dispensing',
|
||||
'amenity:fuel' => 'transport_fuel',
|
||||
'natural:peak' => 'poi_peak',
|
||||
'natural:wood' => 'landuse_coniferous_and_deciduous',
|
||||
'shop:bicycle' => 'shopping_bicycle',
|
||||
'shop:clothes' => 'shopping_clothes',
|
||||
'shop:hairdresser' => 'shopping_hairdresser',
|
||||
'shop:doityourself' => 'shopping_diy',
|
||||
'shop:estate_agent' => 'shopping_estateagent2',
|
||||
'shop:car' => 'shopping_car',
|
||||
'shop:garden_centre' => 'shopping_garden_centre',
|
||||
'shop:car_repair' => 'shopping_car_repair',
|
||||
'shop:bakery' => 'shopping_bakery',
|
||||
'shop:butcher' => 'shopping_butcher',
|
||||
'shop:apparel' => 'shopping_clothes',
|
||||
'shop:laundry' => 'shopping_laundrette',
|
||||
'shop:beverages' => 'shopping_alcohol',
|
||||
'shop:alcohol' => 'shopping_alcohol',
|
||||
'shop:optician' => 'health_opticians',
|
||||
'shop:chemist' => 'health_pharmacy',
|
||||
'shop:gallery' => 'tourist_art_gallery2',
|
||||
'shop:jewelry' => 'shopping_jewelry',
|
||||
'tourism:information' => 'amenity_information',
|
||||
'historic:ruins' => 'tourist_ruin',
|
||||
'amenity:college' => 'education_school',
|
||||
'historic:monument' => 'tourist_monument',
|
||||
'historic:memorial' => 'tourist_monument',
|
||||
'historic:mine' => 'poi_mine',
|
||||
'tourism:caravan_site' => 'accommodation_caravan_park',
|
||||
'amenity:bus_station' => 'transport_bus_station',
|
||||
'amenity:atm' => 'money_atm2',
|
||||
'tourism:viewpoint' => 'tourist_view_point',
|
||||
'tourism:guesthouse' => 'accommodation_bed_and_breakfast',
|
||||
'railway:tram' => 'transport_tram_stop',
|
||||
'amenity:courthouse' => 'amenity_court',
|
||||
'amenity:recycling' => 'amenity_recycling',
|
||||
'amenity:dentist' => 'health_dentist',
|
||||
'natural:beach' => 'tourist_beach',
|
||||
'railway:tram_stop' => 'transport_tram_stop',
|
||||
'amenity:prison' => 'amenity_prison',
|
||||
'highway:bus_stop' => 'transport_bus_stop2'
|
||||
);
|
||||
|
||||
$sClassPlace = $aPlace['class'].':'.$aPlace['type'];
|
||||
|
||||
return $aIcons[$sClassPlace] ?? null;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get an icon for the given object with its full URL.
|
||||
*/
|
||||
function getIconFile($aPlace)
|
||||
{
|
||||
$sIcon = getIcon($aPlace);
|
||||
|
||||
if (!isset($sIcon)) {
|
||||
return null;
|
||||
}
|
||||
|
||||
return CONST_Website_BaseURL.'images/mapicons/'.$sIcon.'.p.20.png';
|
||||
}
|
||||
|
||||
/**
|
||||
* Return a class importance value for the given place.
|
||||
*
|
||||
* @param array[] $aPlace Information about the place.
|
||||
*
|
||||
* @return int An importance value. The lower the value, the more
|
||||
* important the class.
|
||||
*/
|
||||
function getImportance($aPlace)
|
||||
{
|
||||
static $aWithImportance = null;
|
||||
|
||||
if ($aWithImportance === null) {
|
||||
$aWithImportance = array_flip(array(
|
||||
'place:country',
|
||||
'place:state',
|
||||
'place:province',
|
||||
'place:county',
|
||||
'place:city',
|
||||
'place:region',
|
||||
'place:island',
|
||||
'place:town',
|
||||
'place:village',
|
||||
'place:hamlet',
|
||||
'place:suburb',
|
||||
'place:locality',
|
||||
'landuse:farm',
|
||||
'place:farm',
|
||||
'highway:motorway_junction',
|
||||
'highway:motorway',
|
||||
'highway:trunk',
|
||||
'highway:primary',
|
||||
'highway:secondary',
|
||||
'highway:tertiary',
|
||||
'highway:residential',
|
||||
'highway:unclassified',
|
||||
'highway:living_street',
|
||||
'highway:service',
|
||||
'highway:track',
|
||||
'highway:road',
|
||||
'highway:byway',
|
||||
'highway:bridleway',
|
||||
'highway:cycleway',
|
||||
'highway:pedestrian',
|
||||
'highway:footway',
|
||||
'highway:steps',
|
||||
'highway:motorway_link',
|
||||
'highway:trunk_link',
|
||||
'highway:primary_link',
|
||||
'landuse:industrial',
|
||||
'landuse:residential',
|
||||
'landuse:retail',
|
||||
'landuse:commercial',
|
||||
'place:airport',
|
||||
'aeroway:aerodrome',
|
||||
'railway:station',
|
||||
'amenity:place_of_worship',
|
||||
'amenity:pub',
|
||||
'amenity:bar',
|
||||
'amenity:university',
|
||||
'tourism:museum',
|
||||
'amenity:arts_centre',
|
||||
'tourism:zoo',
|
||||
'tourism:theme_park',
|
||||
'tourism:attraction',
|
||||
'leisure:golf_course',
|
||||
'historic:castle',
|
||||
'amenity:hospital',
|
||||
'amenity:school',
|
||||
'amenity:theatre',
|
||||
'amenity:public_building',
|
||||
'amenity:library',
|
||||
'amenity:townhall',
|
||||
'amenity:community_centre',
|
||||
'amenity:fire_station',
|
||||
'amenity:police',
|
||||
'amenity:bank',
|
||||
'amenity:post_office',
|
||||
'leisure:park',
|
||||
'amenity:park',
|
||||
'landuse:park',
|
||||
'landuse:recreation_ground',
|
||||
'tourism:hotel',
|
||||
'tourism:motel',
|
||||
'amenity:cinema',
|
||||
'tourism:artwork',
|
||||
'historic:archaeological_site',
|
||||
'amenity:doctors',
|
||||
'leisure:sports_centre',
|
||||
'leisure:swimming_pool',
|
||||
'shop:supermarket',
|
||||
'shop:convenience',
|
||||
'amenity:restaurant',
|
||||
'amenity:fast_food',
|
||||
'amenity:cafe',
|
||||
'tourism:guest_house',
|
||||
'amenity:pharmacy',
|
||||
'amenity:fuel',
|
||||
'natural:peak',
|
||||
'waterway:waterfall',
|
||||
'natural:wood',
|
||||
'natural:water',
|
||||
'landuse:forest',
|
||||
'landuse:cemetery',
|
||||
'landuse:allotments',
|
||||
'landuse:farmyard',
|
||||
'railway:rail',
|
||||
'waterway:canal',
|
||||
'waterway:river',
|
||||
'waterway:stream',
|
||||
'shop:bicycle',
|
||||
'shop:clothes',
|
||||
'shop:hairdresser',
|
||||
'shop:doityourself',
|
||||
'shop:estate_agent',
|
||||
'shop:car',
|
||||
'shop:garden_centre',
|
||||
'shop:car_repair',
|
||||
'shop:newsagent',
|
||||
'shop:bakery',
|
||||
'shop:furniture',
|
||||
'shop:butcher',
|
||||
'shop:apparel',
|
||||
'shop:electronics',
|
||||
'shop:department_store',
|
||||
'shop:books',
|
||||
'shop:yes',
|
||||
'shop:outdoor',
|
||||
'shop:mall',
|
||||
'shop:florist',
|
||||
'shop:charity',
|
||||
'shop:hardware',
|
||||
'shop:laundry',
|
||||
'shop:shoes',
|
||||
'shop:beverages',
|
||||
'shop:dry_cleaning',
|
||||
'shop:carpet',
|
||||
'shop:computer',
|
||||
'shop:alcohol',
|
||||
'shop:optician',
|
||||
'shop:chemist',
|
||||
'shop:gallery',
|
||||
'shop:mobile_phone',
|
||||
'shop:sports',
|
||||
'shop:jewelry',
|
||||
'shop:pet',
|
||||
'shop:beauty',
|
||||
'shop:stationery',
|
||||
'shop:shopping_centre',
|
||||
'shop:general',
|
||||
'shop:electrical',
|
||||
'shop:toys',
|
||||
'shop:jeweller',
|
||||
'shop:betting',
|
||||
'shop:household',
|
||||
'shop:travel_agency',
|
||||
'shop:hifi',
|
||||
'amenity:shop',
|
||||
'tourism:information',
|
||||
'place:house',
|
||||
'place:house_name',
|
||||
'place:house_number',
|
||||
'place:country_code',
|
||||
'leisure:pitch',
|
||||
'highway:unsurfaced',
|
||||
'historic:ruins',
|
||||
'amenity:college',
|
||||
'historic:monument',
|
||||
'railway:subway',
|
||||
'historic:memorial',
|
||||
'leisure:nature_reserve',
|
||||
'leisure:common',
|
||||
'waterway:lock_gate',
|
||||
'natural:fell',
|
||||
'amenity:nightclub',
|
||||
'highway:path',
|
||||
'leisure:garden',
|
||||
'landuse:reservoir',
|
||||
'leisure:playground',
|
||||
'leisure:stadium',
|
||||
'historic:mine',
|
||||
'natural:cliff',
|
||||
'tourism:caravan_site',
|
||||
'amenity:bus_station',
|
||||
'amenity:kindergarten',
|
||||
'highway:construction',
|
||||
'amenity:atm',
|
||||
'amenity:emergency_phone',
|
||||
'waterway:lock',
|
||||
'waterway:riverbank',
|
||||
'natural:coastline',
|
||||
'tourism:viewpoint',
|
||||
'tourism:hostel',
|
||||
'tourism:bed_and_breakfast',
|
||||
'railway:halt',
|
||||
'railway:platform',
|
||||
'railway:tram',
|
||||
'amenity:courthouse',
|
||||
'amenity:recycling',
|
||||
'amenity:dentist',
|
||||
'natural:beach',
|
||||
'place:moor',
|
||||
'amenity:grave_yard',
|
||||
'waterway:drain',
|
||||
'landuse:grass',
|
||||
'landuse:village_green',
|
||||
'natural:bay',
|
||||
'railway:tram_stop',
|
||||
'leisure:marina',
|
||||
'highway:stile',
|
||||
'natural:moor',
|
||||
'railway:light_rail',
|
||||
'railway:narrow_gauge',
|
||||
'natural:land',
|
||||
'amenity:village_hall',
|
||||
'waterway:dock',
|
||||
'amenity:veterinary',
|
||||
'landuse:brownfield',
|
||||
'leisure:track',
|
||||
'railway:historic_station',
|
||||
'landuse:construction',
|
||||
'amenity:prison',
|
||||
'landuse:quarry',
|
||||
'amenity:telephone',
|
||||
'highway:traffic_signals',
|
||||
'natural:heath',
|
||||
'historic:house',
|
||||
'amenity:social_club',
|
||||
'landuse:military',
|
||||
'amenity:health_centre',
|
||||
'historic:building',
|
||||
'amenity:clinic',
|
||||
'highway:services',
|
||||
'amenity:ferry_terminal',
|
||||
'natural:marsh',
|
||||
'natural:hill',
|
||||
'highway:raceway',
|
||||
'amenity:taxi',
|
||||
'amenity:take_away',
|
||||
'amenity:car_rental',
|
||||
'place:islet',
|
||||
'amenity:nursery',
|
||||
'amenity:nursing_home',
|
||||
'amenity:toilets',
|
||||
'amenity:hall',
|
||||
'waterway:boatyard',
|
||||
'highway:mini_roundabout',
|
||||
'historic:manor',
|
||||
'tourism:chalet',
|
||||
'amenity:bicycle_parking',
|
||||
'amenity:hotel',
|
||||
'waterway:weir',
|
||||
'natural:wetland',
|
||||
'natural:cave_entrance',
|
||||
'amenity:crematorium',
|
||||
'tourism:picnic_site',
|
||||
'landuse:wood',
|
||||
'landuse:basin',
|
||||
'natural:tree',
|
||||
'leisure:slipway',
|
||||
'landuse:meadow',
|
||||
'landuse:piste',
|
||||
'amenity:care_home',
|
||||
'amenity:club',
|
||||
'amenity:medical_centre',
|
||||
'historic:roman_road',
|
||||
'historic:fort',
|
||||
'railway:subway_entrance',
|
||||
'historic:yes',
|
||||
'highway:gate',
|
||||
'leisure:fishing',
|
||||
'historic:museum',
|
||||
'amenity:car_wash',
|
||||
'railway:level_crossing',
|
||||
'leisure:bird_hide',
|
||||
'natural:headland',
|
||||
'tourism:apartments',
|
||||
'amenity:shopping',
|
||||
'natural:scrub',
|
||||
'natural:fen',
|
||||
'building:yes',
|
||||
'mountain_pass:yes',
|
||||
'amenity:parking',
|
||||
'highway:bus_stop',
|
||||
'place:postcode',
|
||||
'amenity:post_box',
|
||||
'place:houses',
|
||||
'railway:preserved',
|
||||
'waterway:derelict_canal',
|
||||
'amenity:dead_pub',
|
||||
'railway:disused_station',
|
||||
'railway:abandoned',
|
||||
'railway:disused'
|
||||
));
|
||||
}
|
||||
|
||||
$sClassPlace = $aPlace['class'].':'.$aPlace['type'];
|
||||
|
||||
return $aWithImportance[$sClassPlace] ?? null;
|
||||
}
|
||||
|
||||
131
lib/DB.php
131
lib/DB.php
@@ -135,7 +135,7 @@ class DB
|
||||
try {
|
||||
$stmt = $this->getQueryStatement($sSQL, $aInputVars, $sErrMessage);
|
||||
|
||||
while ($val = $stmt->fetchColumn(0)) { // returns first column or false
|
||||
while (($val = $stmt->fetchColumn(0)) !== false) { // returns first column or false
|
||||
$aVals[] = $val;
|
||||
}
|
||||
} catch (\PDOException $e) {
|
||||
@@ -241,11 +241,103 @@ class DB
|
||||
}
|
||||
|
||||
/**
|
||||
* Since the DSN includes the database name, checks if the connection works.
|
||||
* Returns a list of table names in the database
|
||||
*
|
||||
* @return array[]
|
||||
*/
|
||||
public function getListOfTables()
|
||||
{
|
||||
return $this->getCol("SELECT tablename FROM pg_tables WHERE schemaname='public'");
|
||||
}
|
||||
|
||||
/**
|
||||
* Deletes a table. Returns true if deleted or didn't exist.
|
||||
*
|
||||
* @param string $sTableName
|
||||
*
|
||||
* @return boolean
|
||||
*/
|
||||
public function databaseExists()
|
||||
public function deleteTable($sTableName)
|
||||
{
|
||||
return $this->exec('DROP TABLE IF EXISTS '.$sTableName.' CASCADE') == 0;
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if an index exists in the database. Optional filtered by tablename
|
||||
*
|
||||
* @param string $sTableName
|
||||
*
|
||||
* @return boolean
|
||||
*/
|
||||
public function indexExists($sIndexName, $sTableName = null)
|
||||
{
|
||||
return in_array($sIndexName, $this->getListOfIndices($sTableName));
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns a list of index names in the database, optional filtered by tablename
|
||||
*
|
||||
* @param string $sTableName
|
||||
*
|
||||
* @return array
|
||||
*/
|
||||
public function getListOfIndices($sTableName = null)
|
||||
{
|
||||
// table_name | index_name | column_name
|
||||
// -----------------------+---------------------------------+--------------
|
||||
// country_name | idx_country_name_country_code | country_code
|
||||
// country_osm_grid | idx_country_osm_grid_geometry | geometry
|
||||
// import_polygon_delete | idx_import_polygon_delete_osmid | osm_id
|
||||
// import_polygon_delete | idx_import_polygon_delete_osmid | osm_type
|
||||
// import_polygon_error | idx_import_polygon_error_osmid | osm_id
|
||||
// import_polygon_error | idx_import_polygon_error_osmid | osm_type
|
||||
$sSql = <<< END
|
||||
SELECT
|
||||
t.relname as table_name,
|
||||
i.relname as index_name,
|
||||
a.attname as column_name
|
||||
FROM
|
||||
pg_class t,
|
||||
pg_class i,
|
||||
pg_index ix,
|
||||
pg_attribute a
|
||||
WHERE
|
||||
t.oid = ix.indrelid
|
||||
and i.oid = ix.indexrelid
|
||||
and a.attrelid = t.oid
|
||||
and a.attnum = ANY(ix.indkey)
|
||||
and t.relkind = 'r'
|
||||
and i.relname NOT LIKE 'pg_%'
|
||||
FILTERS
|
||||
ORDER BY
|
||||
t.relname,
|
||||
i.relname,
|
||||
a.attname
|
||||
END;
|
||||
|
||||
$aRows = null;
|
||||
if ($sTableName) {
|
||||
$sSql = str_replace('FILTERS', 'and t.relname = :tablename', $sSql);
|
||||
$aRows = $this->getAll($sSql, array(':tablename' => $sTableName));
|
||||
} else {
|
||||
$sSql = str_replace('FILTERS', '', $sSql);
|
||||
$aRows = $this->getAll($sSql);
|
||||
}
|
||||
|
||||
$aIndexNames = array_unique(array_map(function ($aRow) {
|
||||
return $aRow['index_name'];
|
||||
}, $aRows));
|
||||
sort($aIndexNames);
|
||||
|
||||
return $aIndexNames;
|
||||
}
|
||||
|
||||
/**
|
||||
* Tries to connect to the database but on failure doesn't throw an exception.
|
||||
*
|
||||
* @return boolean
|
||||
*/
|
||||
public function checkConnection()
|
||||
{
|
||||
$bExists = true;
|
||||
try {
|
||||
@@ -280,11 +372,18 @@ class DB
|
||||
return (float) ($aMatches[1].'.'.$aMatches[2]);
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns an associate array of postgresql database connection settings. Keys can
|
||||
* be 'database', 'hostspec', 'port', 'username', 'password'.
|
||||
* Returns empty array on failure, thus check if at least 'database' is set.
|
||||
*
|
||||
* @return array[]
|
||||
*/
|
||||
public static function parseDSN($sDSN)
|
||||
{
|
||||
// https://secure.php.net/manual/en/ref.pdo-pgsql.connection.php
|
||||
$aInfo = array();
|
||||
if (preg_match('/^pgsql:(.+)/', $sDSN, $aMatches)) {
|
||||
if (preg_match('/^pgsql:(.+)$/', $sDSN, $aMatches)) {
|
||||
foreach (explode(';', $aMatches[1]) as $sKeyVal) {
|
||||
list($sKey, $sVal) = explode('=', $sKeyVal, 2);
|
||||
if ($sKey == 'host') $sKey = 'hostspec';
|
||||
@@ -295,4 +394,28 @@ class DB
|
||||
}
|
||||
return $aInfo;
|
||||
}
|
||||
|
||||
/**
|
||||
* Takes an array of settings and return the DNS string. Key names can be
|
||||
* 'database', 'hostspec', 'port', 'username', 'password' but aliases
|
||||
* 'dbname', 'host' and 'user' are also supported.
|
||||
*
|
||||
* @return string
|
||||
*
|
||||
*/
|
||||
public static function generateDSN($aInfo)
|
||||
{
|
||||
$sDSN = sprintf(
|
||||
'pgsql:host=%s;port=%s;dbname=%s;user=%s;password=%s;',
|
||||
$aInfo['host'] ?? $aInfo['hostspec'] ?? '',
|
||||
$aInfo['port'] ?? '',
|
||||
$aInfo['dbname'] ?? $aInfo['database'] ?? '',
|
||||
$aInfo['user'] ?? '',
|
||||
$aInfo['password'] ?? ''
|
||||
);
|
||||
$sDSN = preg_replace('/\b\w+=;/', '', $sDSN);
|
||||
$sDSN = preg_replace('/;\Z/', '', $sDSN);
|
||||
|
||||
return $sDSN;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -245,7 +245,6 @@ class Geocode
|
||||
}
|
||||
|
||||
$this->oPlaceLookup->loadParamArray($oParams, $sForceGeometryType);
|
||||
$this->oPlaceLookup->setIncludePolygonAsPoints($oParams->getBool('polygon'));
|
||||
$this->oPlaceLookup->setIncludeAddressDetails($oParams->getBool('addressdetails', false));
|
||||
}
|
||||
|
||||
@@ -348,10 +347,7 @@ class Geocode
|
||||
$aNewPhraseSearches = array();
|
||||
$sPhraseType = $bIsStructured ? $oPhrase->getPhraseType() : '';
|
||||
|
||||
foreach ($oPhrase->getWordSets() as $iWordSet => $aWordset) {
|
||||
// Too many permutations - too expensive
|
||||
if ($iWordSet > 120) break;
|
||||
|
||||
foreach ($oPhrase->getWordSets() as $aWordset) {
|
||||
$aWordsetSearches = $aSearches;
|
||||
|
||||
// Add all words from this wordset
|
||||
@@ -641,7 +637,6 @@ class Geocode
|
||||
}
|
||||
}
|
||||
|
||||
Debug::printDebugTable('Phrases', $aPhrases);
|
||||
Debug::printVar('Tokens', $aTokens);
|
||||
|
||||
$oValidTokens = new TokenList();
|
||||
@@ -686,6 +681,11 @@ class Geocode
|
||||
|
||||
Debug::printGroupTable('Valid Tokens', $oValidTokens->debugInfo());
|
||||
|
||||
foreach ($aPhrases as $oPhrase) {
|
||||
$oPhrase->computeWordSets($oValidTokens);
|
||||
}
|
||||
Debug::printDebugTable('Phrases', $aPhrases);
|
||||
|
||||
Debug::newSection('Search candidates');
|
||||
|
||||
$aGroupedSearches = $this->getGroupedSearches($aSearches, $aPhrases, $oValidTokens, $bStructuredPhrases);
|
||||
@@ -807,9 +807,7 @@ class Geocode
|
||||
$sSQL .= 'WHERE place_id in ('.$sPlaceIds.') ';
|
||||
$sSQL .= ' AND (';
|
||||
$sSQL .= " placex.rank_address between $this->iMinAddressRank and $this->iMaxAddressRank ";
|
||||
if (14 >= $this->iMinAddressRank && 14 <= $this->iMaxAddressRank) {
|
||||
$sSQL .= " OR (extratags->'place') = 'city'";
|
||||
}
|
||||
$sSQL .= " OR placex.rank_search between $this->iMinAddressRank and $this->iMaxAddressRank ";
|
||||
if ($this->aAddressRankList) {
|
||||
$sSQL .= ' OR placex.rank_address in ('.join(',', $this->aAddressRankList).')';
|
||||
}
|
||||
@@ -889,7 +887,6 @@ class Geocode
|
||||
|
||||
$aSearchResults = $this->oPlaceLookup->lookup($aResults);
|
||||
|
||||
$aClassType = ClassTypes\getListWithImportance();
|
||||
$aRecheckWords = preg_split('/\b[\s,\\-]*/u', $sQuery);
|
||||
foreach ($aRecheckWords as $i => $sWord) {
|
||||
if (!preg_match('/[\pL\pN]/', $sWord)) unset($aRecheckWords[$i]);
|
||||
@@ -898,33 +895,23 @@ class Geocode
|
||||
Debug::printVar('Recheck words', $aRecheckWords);
|
||||
|
||||
foreach ($aSearchResults as $iIdx => $aResult) {
|
||||
// Default
|
||||
$fDiameter = ClassTypes\getProperty($aResult, 'defdiameter', 0.0001);
|
||||
$fRadius = ClassTypes\getDefRadius($aResult);
|
||||
|
||||
$aOutlineResult = $this->oPlaceLookup->getOutlines($aResult['place_id'], $aResult['lon'], $aResult['lat'], $fDiameter/2);
|
||||
$aOutlineResult = $this->oPlaceLookup->getOutlines($aResult['place_id'], $aResult['lon'], $aResult['lat'], $fRadius);
|
||||
if ($aOutlineResult) {
|
||||
$aResult = array_merge($aResult, $aOutlineResult);
|
||||
}
|
||||
|
||||
if ($aResult['extra_place'] == 'city') {
|
||||
$aResult['class'] = 'place';
|
||||
$aResult['type'] = 'city';
|
||||
$aResult['rank_search'] = 16;
|
||||
}
|
||||
|
||||
// Is there an icon set for this type of result?
|
||||
$aClassInfo = ClassTypes\getInfo($aResult);
|
||||
|
||||
if ($aClassInfo) {
|
||||
if (isset($aClassInfo['icon'])) {
|
||||
$aResult['icon'] = CONST_Website_BaseURL.'images/mapicons/'.$aClassInfo['icon'].'.p.20.png';
|
||||
}
|
||||
|
||||
if (isset($aClassInfo['label'])) {
|
||||
$aResult['label'] = $aClassInfo['label'];
|
||||
}
|
||||
$sIcon = ClassTypes\getIconFile($aResult);
|
||||
if (isset($sIcon)) {
|
||||
$aResult['icon'] = $sIcon;
|
||||
}
|
||||
|
||||
$sLabel = ClassTypes\getLabel($aResult);
|
||||
if (isset($sLabel)) {
|
||||
$aResult['label'] = $sLabel;
|
||||
}
|
||||
$aResult['name'] = $aResult['langaddress'];
|
||||
|
||||
if ($oCtx->hasNearPoint()) {
|
||||
@@ -954,10 +941,9 @@ class Geocode
|
||||
// - number of exact matches from the query
|
||||
$aResult['foundorder'] -= $aResults[$aResult['place_id']]->iExactMatches;
|
||||
// - importance of the class/type
|
||||
if (isset($aClassType[$aResult['class'].':'.$aResult['type']]['importance'])
|
||||
&& $aClassType[$aResult['class'].':'.$aResult['type']]['importance']
|
||||
) {
|
||||
$aResult['foundorder'] += 0.0001 * $aClassType[$aResult['class'].':'.$aResult['type']]['importance'];
|
||||
$iClassImportance = ClassTypes\getImportance($aResult);
|
||||
if (isset($iClassImportance)) {
|
||||
$aResult['foundorder'] += 0.0001 * $iClassImportance;
|
||||
} else {
|
||||
$aResult['foundorder'] += 0.01;
|
||||
}
|
||||
|
||||
@@ -104,18 +104,29 @@ class ParameterParser
|
||||
}
|
||||
|
||||
foreach ($aLanguages as $sLanguage => $fLanguagePref) {
|
||||
$aLangPrefOrder['short_name:'.$sLanguage] = 'short_name:'.$sLanguage;
|
||||
$aLangPrefOrder['name:'.$sLanguage] = 'name:'.$sLanguage;
|
||||
}
|
||||
$aLangPrefOrder['short_name'] = 'short_name';
|
||||
$aLangPrefOrder['name'] = 'name';
|
||||
$aLangPrefOrder['brand'] = 'brand';
|
||||
foreach ($aLanguages as $sLanguage => $fLanguagePref) {
|
||||
$aLangPrefOrder['official_name:'.$sLanguage] = 'official_name:'.$sLanguage;
|
||||
$aLangPrefOrder['short_name:'.$sLanguage] = 'short_name:'.$sLanguage;
|
||||
}
|
||||
$aLangPrefOrder['official_name'] = 'official_name';
|
||||
$aLangPrefOrder['short_name'] = 'short_name';
|
||||
$aLangPrefOrder['ref'] = 'ref';
|
||||
$aLangPrefOrder['type'] = 'type';
|
||||
return $aLangPrefOrder;
|
||||
}
|
||||
|
||||
public function hasSetAny($aParamNames)
|
||||
{
|
||||
foreach ($aParamNames as $sName) {
|
||||
if ($this->getBool($sName)) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
104
lib/Phrase.php
104
lib/Phrase.php
@@ -9,7 +9,8 @@ namespace Nominatim;
|
||||
*/
|
||||
class Phrase
|
||||
{
|
||||
const MAX_DEPTH = 7;
|
||||
const MAX_WORDSET_LEN = 20;
|
||||
const MAX_WORDSETS = 100;
|
||||
|
||||
// Complete phrase as a string.
|
||||
private $sPhrase;
|
||||
@@ -20,13 +21,24 @@ class Phrase
|
||||
// Possible segmentations of the phrase.
|
||||
private $aWordSets;
|
||||
|
||||
public static function cmpByArraylen($aA, $aB)
|
||||
{
|
||||
$iALen = count($aA);
|
||||
$iBLen = count($aB);
|
||||
|
||||
if ($iALen == $iBLen) {
|
||||
return 0;
|
||||
}
|
||||
|
||||
return ($iALen < $iBLen) ? -1 : 1;
|
||||
}
|
||||
|
||||
|
||||
public function __construct($sPhrase, $sPhraseType)
|
||||
{
|
||||
$this->sPhrase = trim($sPhrase);
|
||||
$this->sPhraseType = $sPhraseType;
|
||||
$this->aWords = explode(' ', $this->sPhrase);
|
||||
$this->aWordSets = $this->createWordSets($this->aWords, 0);
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -60,10 +72,17 @@ class Phrase
|
||||
*/
|
||||
public function addTokens(&$aTokens)
|
||||
{
|
||||
foreach ($this->aWordSets as $aSet) {
|
||||
foreach ($aSet as $sWord) {
|
||||
$aTokens[' '.$sWord] = ' '.$sWord;
|
||||
$aTokens[$sWord] = $sWord;
|
||||
$iNumWords = count($this->aWords);
|
||||
|
||||
for ($i = 0; $i < $iNumWords; $i++) {
|
||||
$sPhrase = $this->aWords[$i];
|
||||
$aTokens[' '.$sPhrase] = ' '.$sPhrase;
|
||||
$aTokens[$sPhrase] = $sPhrase;
|
||||
|
||||
for ($j = $i + 1; $j < $iNumWords; $j++) {
|
||||
$sPhrase .= ' '.$this->aWords[$j];
|
||||
$aTokens[' '.$sPhrase] = ' '.$sPhrase;
|
||||
$aTokens[$sPhrase] = $sPhrase;
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -75,44 +94,59 @@ class Phrase
|
||||
*/
|
||||
public function invertWordSets()
|
||||
{
|
||||
$this->aWordSets = $this->createInverseWordSets($this->aWords, 0);
|
||||
foreach ($this->aWordSets as $i => $aSet) {
|
||||
$this->aWordSets[$i] = array_reverse($aSet);
|
||||
}
|
||||
}
|
||||
|
||||
private function createWordSets($aWords, $iDepth)
|
||||
public function computeWordSets($oTokens)
|
||||
{
|
||||
$aResult = array(array(join(' ', $aWords)));
|
||||
$sFirstToken = '';
|
||||
if ($iDepth < Phrase::MAX_DEPTH) {
|
||||
while (count($aWords) > 1) {
|
||||
$sWord = array_shift($aWords);
|
||||
$sFirstToken .= ($sFirstToken?' ':'').$sWord;
|
||||
$aRest = $this->createWordSets($aWords, $iDepth + 1);
|
||||
foreach ($aRest as $aSet) {
|
||||
$aResult[] = array_merge(array($sFirstToken), $aSet);
|
||||
$iNumWords = count($this->aWords);
|
||||
// Caches the word set for the partial phrase up to word i.
|
||||
$aSetCache = array_fill(0, $iNumWords, array());
|
||||
|
||||
// Initialise first element of cache. There can only be the word.
|
||||
if ($oTokens->containsAny($this->aWords[0])) {
|
||||
$aSetCache[0][] = array($this->aWords[0]);
|
||||
}
|
||||
|
||||
// Now do the next elements using what we already have.
|
||||
for ($i = 1; $i < $iNumWords; $i++) {
|
||||
for ($j = $i; $j > 0; $j--) {
|
||||
$sPartial = $j == $i ? $this->aWords[$j] : $this->aWords[$j].' '.$sPartial;
|
||||
if (!empty($aSetCache[$j - 1]) && $oTokens->containsAny($sPartial)) {
|
||||
$aPartial = array($sPartial);
|
||||
foreach ($aSetCache[$j - 1] as $aSet) {
|
||||
if (count($aSet) < Phrase::MAX_WORDSET_LEN) {
|
||||
$aSetCache[$i][] = array_merge($aSet, $aPartial);
|
||||
}
|
||||
}
|
||||
if (count($aSetCache[$i]) > 2 * Phrase::MAX_WORDSETS) {
|
||||
usort(
|
||||
$aSetCache[$i],
|
||||
array('\Nominatim\Phrase', 'cmpByArraylen')
|
||||
);
|
||||
$aSetCache[$i] = array_slice(
|
||||
$aSetCache[$i],
|
||||
0,
|
||||
Phrase::MAX_WORDSETS
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// finally the current full phrase
|
||||
$sPartial = $this->aWords[0].' '.$sPartial;
|
||||
if ($oTokens->containsAny($sPartial)) {
|
||||
$aSetCache[$i][] = array($sPartial);
|
||||
}
|
||||
}
|
||||
|
||||
return $aResult;
|
||||
$this->aWordSets = $aSetCache[$iNumWords - 1];
|
||||
usort($this->aWordSets, array('\Nominatim\Phrase', 'cmpByArraylen'));
|
||||
$this->aWordSets = array_slice($this->aWordSets, 0, Phrase::MAX_WORDSETS);
|
||||
}
|
||||
|
||||
private function createInverseWordSets($aWords, $iDepth)
|
||||
{
|
||||
$aResult = array(array(join(' ', $aWords)));
|
||||
$sFirstToken = '';
|
||||
if ($iDepth < Phrase::MAX_DEPTH) {
|
||||
while (count($aWords) > 1) {
|
||||
$sWord = array_pop($aWords);
|
||||
$sFirstToken = $sWord.($sFirstToken?' ':'').$sFirstToken;
|
||||
$aRest = $this->createInverseWordSets($aWords, $iDepth + 1);
|
||||
foreach ($aRest as $aSet) {
|
||||
$aResult[] = array_merge(array($sFirstToken), $aSet);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return $aResult;
|
||||
}
|
||||
|
||||
public function debugInfo()
|
||||
{
|
||||
|
||||
@@ -15,7 +15,6 @@ class PlaceLookup
|
||||
protected $bExtraTags = false;
|
||||
protected $bNameDetails = false;
|
||||
|
||||
protected $bIncludePolygonAsPoints = false;
|
||||
protected $bIncludePolygonAsText = false;
|
||||
protected $bIncludePolygonAsGeoJSON = false;
|
||||
protected $bIncludePolygonAsKML = false;
|
||||
@@ -38,11 +37,6 @@ class PlaceLookup
|
||||
return $this->bDeDupe;
|
||||
}
|
||||
|
||||
public function setIncludePolygonAsPoints($b = true)
|
||||
{
|
||||
$this->bIncludePolygonAsPoints = $b;
|
||||
}
|
||||
|
||||
public function setIncludeAddressDetails($b)
|
||||
{
|
||||
$this->bAddressDetails = $b;
|
||||
@@ -61,7 +55,6 @@ class PlaceLookup
|
||||
|
||||
if ($sGeomType === null || $sGeomType == 'geojson') {
|
||||
$this->bIncludePolygonAsGeoJSON = $oParams->getBool('polygon_geojson');
|
||||
$this->bIncludePolygonAsPoints = false;
|
||||
}
|
||||
|
||||
if ($oParams->getString('format', '') !== 'geojson') {
|
||||
@@ -100,7 +93,6 @@ class PlaceLookup
|
||||
if ($this->bExtraTags) $aParams['extratags'] = '1';
|
||||
if ($this->bNameDetails) $aParams['namedetails'] = '1';
|
||||
|
||||
if ($this->bIncludePolygonAsPoints) $aParams['polygon'] = '1';
|
||||
if ($this->bIncludePolygonAsText) $aParams['polygon_text'] = '1';
|
||||
if ($this->bIncludePolygonAsGeoJSON) $aParams['polygon_geojson'] = '1';
|
||||
if ($this->bIncludePolygonAsKML) $aParams['polygon_kml'] = '1';
|
||||
@@ -215,7 +207,7 @@ class PlaceLookup
|
||||
'ST_Collect(centroid)',
|
||||
'min(CASE WHEN placex.rank_search < 28 THEN placex.place_id ELSE placex.parent_place_id END)'
|
||||
);
|
||||
$sSQL .= " (extratags->'place') AS extra_place ";
|
||||
$sSQL .= " COALESCE(extratags->'place', extratags->'linked_place') AS extra_place ";
|
||||
$sSQL .= ' FROM placex';
|
||||
$sSQL .= " WHERE place_id in ($sPlaceIDs) ";
|
||||
$sSQL .= ' AND (';
|
||||
@@ -248,7 +240,7 @@ class PlaceLookup
|
||||
$sSQL .= ' ref, ';
|
||||
if ($this->bExtraTags) $sSQL .= 'extratags, ';
|
||||
if ($this->bNameDetails) $sSQL .= 'name, ';
|
||||
$sSQL .= " extratags->'place' ";
|
||||
$sSQL .= ' extra_place ';
|
||||
|
||||
$aSubSelects[] = $sSQL;
|
||||
}
|
||||
@@ -456,10 +448,9 @@ class PlaceLookup
|
||||
}
|
||||
}
|
||||
|
||||
$aPlace['addresstype'] = ClassTypes\getProperty(
|
||||
$aPlace['addresstype'] = ClassTypes\getLabelTag(
|
||||
$aPlace,
|
||||
'simplelabel',
|
||||
$aPlace['class']
|
||||
$aPlace['country_code']
|
||||
);
|
||||
}
|
||||
|
||||
@@ -500,7 +491,7 @@ class PlaceLookup
|
||||
if ($this->bIncludePolygonAsGeoJSON) $sSQL .= ',ST_AsGeoJSON(geometry) as asgeojson';
|
||||
if ($this->bIncludePolygonAsKML) $sSQL .= ',ST_AsKML(geometry) as askml';
|
||||
if ($this->bIncludePolygonAsSVG) $sSQL .= ',ST_AsSVG(geometry) as assvg';
|
||||
if ($this->bIncludePolygonAsText || $this->bIncludePolygonAsPoints) $sSQL .= ',ST_AsText(geometry) as astext';
|
||||
if ($this->bIncludePolygonAsText) $sSQL .= ',ST_AsText(geometry) as astext';
|
||||
if ($fLonReverse != null && $fLatReverse != null) {
|
||||
$sFrom = ' from (SELECT * , CASE WHEN (class = \'highway\') AND (ST_GeometryType(geometry) = \'ST_LineString\') THEN ';
|
||||
$sFrom .=' ST_ClosestPoint(geometry, ST_SetSRID(ST_Point('.$fLatReverse.','.$fLonReverse.'),4326))';
|
||||
@@ -527,8 +518,6 @@ class PlaceLookup
|
||||
if ($this->bIncludePolygonAsKML) $aOutlineResult['askml'] = $aPointPolygon['askml'];
|
||||
if ($this->bIncludePolygonAsSVG) $aOutlineResult['assvg'] = $aPointPolygon['assvg'];
|
||||
if ($this->bIncludePolygonAsText) $aOutlineResult['astext'] = $aPointPolygon['astext'];
|
||||
if ($this->bIncludePolygonAsPoints) $aOutlineResult['aPolyPoints'] = geometryText2Points($aPointPolygon['astext'], $fRadius);
|
||||
|
||||
|
||||
if (abs($aPointPolygon['minlat'] - $aPointPolygon['maxlat']) < 0.0000001) {
|
||||
$aPointPolygon['minlat'] = $aPointPolygon['minlat'] - $fRadius;
|
||||
@@ -551,17 +540,12 @@ class PlaceLookup
|
||||
|
||||
// as a fallback we generate a bounding box without knowing the size of the geometry
|
||||
if ((!isset($aOutlineResult['aBoundingBox'])) && isset($fLon)) {
|
||||
//
|
||||
if ($this->bIncludePolygonAsPoints) {
|
||||
$sGeometryText = 'POINT('.$fLon.','.$fLat.')';
|
||||
$aOutlineResult['aPolyPoints'] = geometryText2Points($sGeometryText, $fRadius);
|
||||
}
|
||||
|
||||
$aBounds = array();
|
||||
$aBounds['minlat'] = $fLat - $fRadius;
|
||||
$aBounds['maxlat'] = $fLat + $fRadius;
|
||||
$aBounds['minlon'] = $fLon - $fRadius;
|
||||
$aBounds['maxlon'] = $fLon + $fRadius;
|
||||
$aBounds = array(
|
||||
'minlat' => $fLat - $fRadius,
|
||||
'maxlat' => $fLat + $fRadius,
|
||||
'minlon' => $fLon - $fRadius,
|
||||
'maxlon' => $fLon + $fRadius
|
||||
);
|
||||
|
||||
$aOutlineResult['aBoundingBox'] = array(
|
||||
(string)$aBounds['minlat'],
|
||||
|
||||
@@ -203,7 +203,7 @@ class SearchContext
|
||||
}
|
||||
|
||||
/**
|
||||
* Get an SQL snipped for computing the distance from the reference point.
|
||||
* Get an SQL snippet for computing the distance from the reference point.
|
||||
*
|
||||
* @param string $sObj SQL variable name to compute the distance from.
|
||||
*
|
||||
@@ -215,7 +215,7 @@ class SearchContext
|
||||
}
|
||||
|
||||
/**
|
||||
* Get an SQL snipped for checking if something is within range of the
|
||||
* Get an SQL snippet for checking if something is within range of the
|
||||
* reference point.
|
||||
*
|
||||
* @param string $sObj SQL variable name to compute if it is within range.
|
||||
@@ -228,14 +228,14 @@ class SearchContext
|
||||
}
|
||||
|
||||
/**
|
||||
* Get an SQL snipped of the importance factor of the viewbox.
|
||||
* Get an SQL snippet of the importance factor of the viewbox.
|
||||
*
|
||||
* The importance factor is computed by checking if an object is within
|
||||
* the viewbox and/or the extended version of the viewbox.
|
||||
*
|
||||
* @param string $sObj SQL variable name of object to weight the importance
|
||||
*
|
||||
* @return string SQL snipped of the factor with a leading multiply sign.
|
||||
* @return string SQL snippet of the factor with a leading multiply sign.
|
||||
*/
|
||||
public function viewboxImportanceSQL($sObj)
|
||||
{
|
||||
@@ -252,7 +252,7 @@ class SearchContext
|
||||
}
|
||||
|
||||
/**
|
||||
* SQL snipped checking if a place ID should be excluded.
|
||||
* SQL snippet checking if a place ID should be excluded.
|
||||
*
|
||||
* @param string $sVariable SQL variable name of place ID to check,
|
||||
* potentially prefixed with more SQL.
|
||||
|
||||
@@ -447,8 +447,8 @@ class SearchDescription
|
||||
$iLimit
|
||||
);
|
||||
|
||||
//now search for housenumber, if housenumber provided
|
||||
if ($this->sHouseNumber && !empty($aResults)) {
|
||||
// Now search for housenumber, if housenumber provided. Can be zero.
|
||||
if (($this->sHouseNumber || $this->sHouseNumber === '0') && !empty($aResults)) {
|
||||
// Downgrade the rank of the street results, they are missing
|
||||
// the housenumber.
|
||||
foreach ($aResults as $oRes) {
|
||||
@@ -660,10 +660,7 @@ class SearchDescription
|
||||
$aTerms[] = 'address_rank between 16 and 27';
|
||||
} elseif (!$this->sClass || $this->iOperator == Operator::NAME) {
|
||||
if ($iMinAddressRank > 0) {
|
||||
$aTerms[] = 'address_rank >= '.$iMinAddressRank;
|
||||
}
|
||||
if ($iMaxAddressRank < 30) {
|
||||
$aTerms[] = 'address_rank <= '.$iMaxAddressRank;
|
||||
$aTerms[] = "((address_rank between $iMinAddressRank and $iMaxAddressRank) or (search_rank between $iMinAddressRank and $iMaxAddressRank))";
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
80
lib/Shell.php
Normal file
80
lib/Shell.php
Normal file
@@ -0,0 +1,80 @@
|
||||
<?php
|
||||
|
||||
namespace Nominatim;
|
||||
|
||||
class Shell
|
||||
{
|
||||
public function __construct($sBaseCmd, ...$aParams)
|
||||
{
|
||||
if (!$sBaseCmd) {
|
||||
throw new Exception('Command missing in new() call');
|
||||
}
|
||||
$this->baseCmd = $sBaseCmd;
|
||||
$this->aParams = array();
|
||||
$this->aEnv = null; // null = use the same environment as the current PHP process
|
||||
|
||||
$this->stdoutString = null;
|
||||
|
||||
foreach ($aParams as $sParam) {
|
||||
$this->addParams($sParam);
|
||||
}
|
||||
}
|
||||
|
||||
public function addParams(...$aParams)
|
||||
{
|
||||
foreach ($aParams as $sParam) {
|
||||
if (isset($sParam) && $sParam !== null && $sParam !== '') {
|
||||
array_push($this->aParams, $sParam);
|
||||
}
|
||||
}
|
||||
return $this;
|
||||
}
|
||||
|
||||
public function addEnvPair($sKey, $sVal)
|
||||
{
|
||||
if (isset($sKey) && $sKey && isset($sVal)) {
|
||||
if (!isset($this->aEnv)) $this->aEnv = $_ENV;
|
||||
$this->aEnv = array_merge($this->aEnv, array($sKey => $sVal), $_ENV);
|
||||
}
|
||||
return $this;
|
||||
}
|
||||
|
||||
public function escapedCmd()
|
||||
{
|
||||
$aEscaped = array_map(function ($sParam) {
|
||||
return $this->escapeParam($sParam);
|
||||
}, array_merge(array($this->baseCmd), $this->aParams));
|
||||
|
||||
return join(' ', $aEscaped);
|
||||
}
|
||||
|
||||
public function run()
|
||||
{
|
||||
$sCmd = $this->escapedCmd();
|
||||
// $aEnv does not need escaping, proc_open seems to handle it fine
|
||||
|
||||
$aFDs = array(
|
||||
0 => array('pipe', 'r'),
|
||||
1 => STDOUT,
|
||||
2 => STDERR
|
||||
);
|
||||
$aPipes = null;
|
||||
$hProc = @proc_open($sCmd, $aFDs, $aPipes, null, $this->aEnv);
|
||||
if (!is_resource($hProc)) {
|
||||
throw new \Exception('Unable to run command: ' . $sCmd);
|
||||
}
|
||||
|
||||
fclose($aPipes[0]); // no stdin
|
||||
|
||||
$iStat = proc_close($hProc);
|
||||
return $iStat;
|
||||
}
|
||||
|
||||
|
||||
|
||||
private function escapeParam($sParam)
|
||||
{
|
||||
if (preg_match('/^-*\w+$/', $sParam)) return $sParam;
|
||||
return escapeshellarg($sParam);
|
||||
}
|
||||
}
|
||||
@@ -55,6 +55,18 @@ class TokenList
|
||||
return isset($this->aTokens[$sWord]);
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if there are partial or full tokens for the given word.
|
||||
*
|
||||
* @param string $sWord Token word to look for.
|
||||
*
|
||||
* @return bool True if there is one or more token for the token word.
|
||||
*/
|
||||
public function containsAny($sWord)
|
||||
{
|
||||
return isset($this->aTokens[$sWord]) || isset($this->aTokens[' '.$sWord]);
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the list of tokens for the given token word.
|
||||
*
|
||||
|
||||
42
lib/cmd.php
42
lib/cmd.php
@@ -1,5 +1,6 @@
|
||||
<?php
|
||||
|
||||
require_once(CONST_BasePath.'/lib/Shell.php');
|
||||
|
||||
function getCmdOpt($aArg, $aSpec, &$aResult, $bExitOnError = false, $bExitOnUnknown = false)
|
||||
{
|
||||
@@ -148,30 +149,33 @@ function runSQLScript($sScript, $bfatal = true, $bVerbose = false, $bIgnoreError
|
||||
// Convert database DSN to psql parameters
|
||||
$aDSNInfo = \Nominatim\DB::parseDSN(CONST_Database_DSN);
|
||||
if (!isset($aDSNInfo['port']) || !$aDSNInfo['port']) $aDSNInfo['port'] = 5432;
|
||||
$sCMD = 'psql -p '.$aDSNInfo['port'].' -d '.$aDSNInfo['database'];
|
||||
|
||||
$oCmd = new \Nominatim\Shell('psql');
|
||||
$oCmd->addParams('--port', $aDSNInfo['port']);
|
||||
$oCmd->addParams('--dbname', $aDSNInfo['database']);
|
||||
if (isset($aDSNInfo['hostspec']) && $aDSNInfo['hostspec']) {
|
||||
$sCMD .= ' -h ' . $aDSNInfo['hostspec'];
|
||||
$oCmd->addParams('--host', $aDSNInfo['hostspec']);
|
||||
}
|
||||
if (isset($aDSNInfo['username']) && $aDSNInfo['username']) {
|
||||
$sCMD .= ' -U ' . $aDSNInfo['username'];
|
||||
$oCmd->addParams('--username', $aDSNInfo['username']);
|
||||
}
|
||||
$aProcEnv = null;
|
||||
if (isset($aDSNInfo['password']) && $aDSNInfo['password']) {
|
||||
$aProcEnv = array_merge(array('PGPASSWORD' => $aDSNInfo['password']), $_ENV);
|
||||
if (isset($aDSNInfo['password'])) {
|
||||
$oCmd->addEnvPair('PGPASSWORD', $aDSNInfo['password']);
|
||||
}
|
||||
if (!$bVerbose) {
|
||||
$sCMD .= ' -q';
|
||||
$oCmd->addParams('--quiet');
|
||||
}
|
||||
if ($bfatal && !$bIgnoreErrors) {
|
||||
$sCMD .= ' -v ON_ERROR_STOP=1';
|
||||
$oCmd->addParams('-v', 'ON_ERROR_STOP=1');
|
||||
}
|
||||
|
||||
$aDescriptors = array(
|
||||
0 => array('pipe', 'r'),
|
||||
1 => STDOUT,
|
||||
2 => STDERR
|
||||
);
|
||||
$ahPipes = null;
|
||||
$hProcess = @proc_open($sCMD, $aDescriptors, $ahPipes, null, $aProcEnv);
|
||||
$hProcess = @proc_open($oCmd->escapedCmd(), $aDescriptors, $ahPipes, null, $oCmd->aEnv);
|
||||
if (!is_resource($hProcess)) {
|
||||
fail('unable to start pgsql');
|
||||
}
|
||||
@@ -191,23 +195,3 @@ function runSQLScript($sScript, $bfatal = true, $bVerbose = false, $bIgnoreError
|
||||
fail("pgsql returned with error code ($iReturn)");
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
function runWithEnv($sCmd, $aEnv)
|
||||
{
|
||||
$aFDs = array(
|
||||
0 => array('pipe', 'r'),
|
||||
1 => STDOUT,
|
||||
2 => STDERR
|
||||
);
|
||||
$aPipes = null;
|
||||
$hProc = @proc_open($sCmd, $aFDs, $aPipes, null, $aEnv);
|
||||
if (!is_resource($hProc)) {
|
||||
fail('unable to run command:' . $sCmd);
|
||||
}
|
||||
|
||||
fclose($aPipes[0]); // no stdin
|
||||
|
||||
$iStat = proc_close($hProc);
|
||||
return $iStat;
|
||||
}
|
||||
|
||||
@@ -21,6 +21,7 @@ function exception_handler_html($exception)
|
||||
http_response_code($exception->getCode());
|
||||
header('Content-type: text/html; charset=UTF-8');
|
||||
include(CONST_BasePath.'/lib/template/error-html.php');
|
||||
exit();
|
||||
}
|
||||
|
||||
function exception_handler_json($exception)
|
||||
@@ -28,6 +29,7 @@ function exception_handler_json($exception)
|
||||
http_response_code($exception->getCode());
|
||||
header('Content-type: application/json; charset=utf-8');
|
||||
include(CONST_BasePath.'/lib/template/error-json.php');
|
||||
exit();
|
||||
}
|
||||
|
||||
function exception_handler_xml($exception)
|
||||
@@ -36,17 +38,51 @@ function exception_handler_xml($exception)
|
||||
header('Content-type: text/xml; charset=utf-8');
|
||||
echo '<?xml version="1.0" encoding="UTF-8" ?>'."\n";
|
||||
include(CONST_BasePath.'/lib/template/error-xml.php');
|
||||
exit();
|
||||
}
|
||||
|
||||
function shutdown_exception_handler_html()
|
||||
{
|
||||
$error = error_get_last();
|
||||
if ($error !== null && $error['type'] === E_ERROR) {
|
||||
exception_handler_html(new Exception($error['message'], 500));
|
||||
}
|
||||
}
|
||||
|
||||
function shutdown_exception_handler_xml()
|
||||
{
|
||||
$error = error_get_last();
|
||||
if ($error !== null && $error['type'] === E_ERROR) {
|
||||
exception_handler_xml(new Exception($error['message'], 500));
|
||||
}
|
||||
}
|
||||
|
||||
function shutdown_exception_handler_json()
|
||||
{
|
||||
$error = error_get_last();
|
||||
if ($error !== null && $error['type'] === E_ERROR) {
|
||||
exception_handler_json(new Exception($error['message'], 500));
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
function set_exception_handler_by_format($sFormat = 'html')
|
||||
function set_exception_handler_by_format($sFormat = null)
|
||||
{
|
||||
if ($sFormat == 'html') {
|
||||
// Multiple calls to register_shutdown_function will cause multiple callbacks
|
||||
// to be executed, we only want the last executed. Thus we don't want to register
|
||||
// one by default without an explicit $sFormat set.
|
||||
|
||||
if (!isset($sFormat)) {
|
||||
set_exception_handler('exception_handler_html');
|
||||
} elseif ($sFormat == 'html') {
|
||||
set_exception_handler('exception_handler_html');
|
||||
register_shutdown_function('shutdown_exception_handler_html');
|
||||
} elseif ($sFormat == 'xml') {
|
||||
set_exception_handler('exception_handler_xml');
|
||||
register_shutdown_function('shutdown_exception_handler_xml');
|
||||
} else {
|
||||
set_exception_handler('exception_handler_json');
|
||||
register_shutdown_function('shutdown_exception_handler_json');
|
||||
}
|
||||
}
|
||||
// set a default
|
||||
|
||||
61
lib/lib.php
61
lib/lib.php
@@ -95,8 +95,8 @@ function parseLatLon($sQuery)
|
||||
$fQueryLat = null;
|
||||
$fQueryLon = null;
|
||||
|
||||
if (preg_match('/\\s*([NS])[ ]+([0-9]+[0-9.]*)[° ]+([0-9.]+)?[′\']*[, ]+([EW])[ ]+([0-9]+)[° ]+([0-9]+[0-9.]*)[′\']*\\s*/', $sQuery, $aData)) {
|
||||
/* 1 2 3 4 5 6
|
||||
if (preg_match('/\\s*([NS])[\s]+([0-9]+[0-9.]*)[°\s]+([0-9.]+)?[′\']*[,\s]+([EW])[\s]+([0-9]+)[°\s]+([0-9]+[0-9.]*)[′\']*\\s*/', $sQuery, $aData)) {
|
||||
/* 1 2 3 4 5 6
|
||||
* degrees decimal minutes
|
||||
* N 40 26.767, W 79 58.933
|
||||
* N 40°26.767′, W 79°58.933′
|
||||
@@ -104,8 +104,8 @@ function parseLatLon($sQuery)
|
||||
$sFound = $aData[0];
|
||||
$fQueryLat = ($aData[1]=='N'?1:-1) * ($aData[2] + $aData[3]/60);
|
||||
$fQueryLon = ($aData[4]=='E'?1:-1) * ($aData[5] + $aData[6]/60);
|
||||
} elseif (preg_match('/\\s*([0-9]+)[° ]+([0-9]+[0-9.]*)?[′\']*[ ]+([NS])[, ]+([0-9]+)[° ]+([0-9]+[0-9.]*)?[′\' ]+([EW])\\s*/', $sQuery, $aData)) {
|
||||
/* 1 2 3 4 5 6
|
||||
} elseif (preg_match('/\\s*([0-9]+)[°\s]+([0-9]+[0-9.]*)?[′\']*[\s]+([NS])[,\s]+([0-9]+)[°\s]+([0-9]+[0-9.]*)?[′\'\s]+([EW])\\s*/', $sQuery, $aData)) {
|
||||
/* 1 2 3 4 5 6
|
||||
* degrees decimal minutes
|
||||
* 40 26.767 N, 79 58.933 W
|
||||
* 40° 26.767′ N 79° 58.933′ W
|
||||
@@ -113,8 +113,8 @@ function parseLatLon($sQuery)
|
||||
$sFound = $aData[0];
|
||||
$fQueryLat = ($aData[3]=='N'?1:-1) * ($aData[1] + $aData[2]/60);
|
||||
$fQueryLon = ($aData[6]=='E'?1:-1) * ($aData[4] + $aData[5]/60);
|
||||
} elseif (preg_match('/\\s*([NS])[ ]([0-9]+)[° ]+([0-9]+)[′\' ]+([0-9]+)[″"]*[, ]+([EW])[ ]([0-9]+)[° ]+([0-9]+)[′\' ]+([0-9]+)[″"]*\\s*/', $sQuery, $aData)) {
|
||||
/* 1 2 3 4 5 6 7 8
|
||||
} elseif (preg_match('/\\s*([NS])[\s]+([0-9]+)[°\s]+([0-9]+)[′\'\s]+([0-9]+)[″"]*[,\s]+([EW])[\s]+([0-9]+)[°\s]+([0-9]+)[′\'\s]+([0-9]+)[″"]*\\s*/', $sQuery, $aData)) {
|
||||
/* 1 2 3 4 5 6 7 8
|
||||
* degrees decimal seconds
|
||||
* N 40 26 46 W 79 58 56
|
||||
* N 40° 26′ 46″, W 79° 58′ 56″
|
||||
@@ -122,8 +122,8 @@ function parseLatLon($sQuery)
|
||||
$sFound = $aData[0];
|
||||
$fQueryLat = ($aData[1]=='N'?1:-1) * ($aData[2] + $aData[3]/60 + $aData[4]/3600);
|
||||
$fQueryLon = ($aData[5]=='E'?1:-1) * ($aData[6] + $aData[7]/60 + $aData[8]/3600);
|
||||
} elseif (preg_match('/\\s*([0-9]+)[° ]+([0-9]+)[′\' ]+([0-9]+[0-9.]*)[″" ]+([NS])[, ]+([0-9]+)[° ]+([0-9]+)[′\' ]+([0-9]+[0-9.]*)[″" ]+([EW])\\s*/', $sQuery, $aData)) {
|
||||
/* 1 2 3 4 5 6 7 8
|
||||
} elseif (preg_match('/\\s*([0-9]+)[°\s]+([0-9]+)[′\'\s]+([0-9]+[0-9.]*)[″"\s]+([NS])[,\s]+([0-9]+)[°\s]+([0-9]+)[′\'\s]+([0-9]+[0-9.]*)[″"\s]+([EW])\\s*/', $sQuery, $aData)) {
|
||||
/* 1 2 3 4 5 6 7 8
|
||||
* degrees decimal seconds
|
||||
* 40 26 46 N 79 58 56 W
|
||||
* 40° 26′ 46″ N, 79° 58′ 56″ W
|
||||
@@ -132,24 +132,24 @@ function parseLatLon($sQuery)
|
||||
$sFound = $aData[0];
|
||||
$fQueryLat = ($aData[4]=='N'?1:-1) * ($aData[1] + $aData[2]/60 + $aData[3]/3600);
|
||||
$fQueryLon = ($aData[8]=='E'?1:-1) * ($aData[5] + $aData[6]/60 + $aData[7]/3600);
|
||||
} elseif (preg_match('/\\s*([NS])[ ]([0-9]+[0-9]*\\.[0-9]+)[°]*[, ]+([EW])[ ]([0-9]+[0-9]*\\.[0-9]+)[°]*\\s*/', $sQuery, $aData)) {
|
||||
/* 1 2 3 4
|
||||
} elseif (preg_match('/\\s*([NS])[\s]+([0-9]+[0-9]*\\.[0-9]+)[°]*[,\s]+([EW])[\s]+([0-9]+[0-9]*\\.[0-9]+)[°]*\\s*/', $sQuery, $aData)) {
|
||||
/* 1 2 3 4
|
||||
* degrees decimal
|
||||
* N 40.446° W 79.982°
|
||||
*/
|
||||
$sFound = $aData[0];
|
||||
$fQueryLat = ($aData[1]=='N'?1:-1) * ($aData[2]);
|
||||
$fQueryLon = ($aData[3]=='E'?1:-1) * ($aData[4]);
|
||||
} elseif (preg_match('/\\s*([0-9]+[0-9]*\\.[0-9]+)[° ]+([NS])[, ]+([0-9]+[0-9]*\\.[0-9]+)[° ]+([EW])\\s*/', $sQuery, $aData)) {
|
||||
/* 1 2 3 4
|
||||
} elseif (preg_match('/\\s*([0-9]+[0-9]*\\.[0-9]+)[°\s]+([NS])[,\s]+([0-9]+[0-9]*\\.[0-9]+)[°\s]+([EW])\\s*/', $sQuery, $aData)) {
|
||||
/* 1 2 3 4
|
||||
* degrees decimal
|
||||
* 40.446° N 79.982° W
|
||||
*/
|
||||
$sFound = $aData[0];
|
||||
$fQueryLat = ($aData[2]=='N'?1:-1) * ($aData[1]);
|
||||
$fQueryLon = ($aData[4]=='E'?1:-1) * ($aData[3]);
|
||||
} elseif (preg_match('/(\\s*\\[|^\\s*|\\s*)(-?[0-9]+[0-9]*\\.[0-9]+)[, ]+(-?[0-9]+[0-9]*\\.[0-9]+)(\\]\\s*|\\s*$|\\s*)/', $sQuery, $aData)) {
|
||||
/* 1 2 3 4
|
||||
} elseif (preg_match('/(\\s*\\[|^\\s*|\\s*)(-?[0-9]+[0-9]*\\.[0-9]+)[,\s]+(-?[0-9]+[0-9]*\\.[0-9]+)(\\]\\s*|\\s*$|\\s*)/', $sQuery, $aData)) {
|
||||
/* 1 2 3 4
|
||||
* degrees decimal
|
||||
* 12.34, 56.78
|
||||
* 12.34 56.78
|
||||
@@ -165,39 +165,6 @@ function parseLatLon($sQuery)
|
||||
return array($sFound, $fQueryLat, $fQueryLon);
|
||||
}
|
||||
|
||||
|
||||
function geometryText2Points($geometry_as_text, $fRadius)
|
||||
{
|
||||
$aPolyPoints = null;
|
||||
if (preg_match('#POLYGON\\(\\(([- 0-9.,]+)#', $geometry_as_text, $aMatch)) {
|
||||
//
|
||||
preg_match_all('/(-?[0-9.]+) (-?[0-9.]+)/', $aMatch[1], $aPolyPoints, PREG_SET_ORDER);
|
||||
//
|
||||
} elseif (preg_match('#LINESTRING\\(([- 0-9.,]+)#', $geometry_as_text, $aMatch)) {
|
||||
//
|
||||
preg_match_all('/(-?[0-9.]+) (-?[0-9.]+)/', $aMatch[1], $aPolyPoints, PREG_SET_ORDER);
|
||||
//
|
||||
} elseif (preg_match('#MULTIPOLYGON\\(\\(\\(([- 0-9.,]+)#', $geometry_as_text, $aMatch)) {
|
||||
//
|
||||
preg_match_all('/(-?[0-9.]+) (-?[0-9.]+)/', $aMatch[1], $aPolyPoints, PREG_SET_ORDER);
|
||||
//
|
||||
} elseif (preg_match('#POINT\\((-?[0-9.]+) (-?[0-9.]+)\\)#', $geometry_as_text, $aMatch)) {
|
||||
//
|
||||
$aPolyPoints = createPointsAroundCenter($aMatch[1], $aMatch[2], $fRadius);
|
||||
//
|
||||
}
|
||||
|
||||
if (isset($aPolyPoints)) {
|
||||
$aResultPoints = array();
|
||||
foreach ($aPolyPoints as $aPoint) {
|
||||
$aResultPoints[] = array($aPoint[1], $aPoint[2]);
|
||||
}
|
||||
return $aResultPoints;
|
||||
}
|
||||
|
||||
return;
|
||||
}
|
||||
|
||||
function createPointsAroundCenter($fLon, $fLat, $fRadius)
|
||||
{
|
||||
$iSteps = max(8, min(100, ($fRadius * 40000)^2));
|
||||
|
||||
@@ -36,7 +36,7 @@ function logStart(&$oDB, $sType = '', $sQuery = '', $aLanguageList = array())
|
||||
$sUserAgent = $_SERVER['HTTP_USER_AGENT'];
|
||||
else $sUserAgent = '';
|
||||
$sSQL = 'insert into new_query_log (type,starttime,query,ipaddress,useragent,language,format,searchterm)';
|
||||
$sSQL .= ' values ('.
|
||||
$sSQL .= ' values (';
|
||||
$sSQL .= join(',', $oDB->getDBQuotedList(array(
|
||||
$sType,
|
||||
$hLog[0],
|
||||
@@ -48,7 +48,7 @@ function logStart(&$oDB, $sType = '', $sQuery = '', $aLanguageList = array())
|
||||
$hLog[3]
|
||||
)));
|
||||
$sSQL .= ')';
|
||||
$oDB->query($sSQL);
|
||||
$oDB->exec($sSQL);
|
||||
}
|
||||
|
||||
return $hLog;
|
||||
@@ -67,7 +67,7 @@ function logEnd(&$oDB, $hLog, $iNumResults)
|
||||
$sSQL .= ' where starttime = '.$oDB->getDBQuoted($hLog[0]);
|
||||
$sSQL .= ' and ipaddress = '.$oDB->getDBQuoted($hLog[1]);
|
||||
$sSQL .= ' and query = '.$oDB->getDBQuoted($hLog[2]);
|
||||
$oDB->query($sSQL);
|
||||
$oDB->exec($sSQL);
|
||||
}
|
||||
|
||||
if (CONST_Log_File) {
|
||||
|
||||
@@ -12,6 +12,8 @@ function formatOSMType($sType, $bIncludeExternal = true)
|
||||
if ($sType == 'T') return 'way';
|
||||
if ($sType == 'I') return 'way';
|
||||
|
||||
// not handled: P, L
|
||||
|
||||
return '';
|
||||
}
|
||||
|
||||
@@ -33,20 +35,39 @@ function wikipediaLink($aFeature)
|
||||
return '';
|
||||
}
|
||||
|
||||
function detailsLink($aFeature, $sTitle = false)
|
||||
function detailsLink($aFeature, $sTitle = false, $sExtraProperties = false)
|
||||
{
|
||||
if (!$aFeature['place_id']) return '';
|
||||
|
||||
return '<a href="details.php?place_id='.$aFeature['place_id'].'">'.($sTitle?$sTitle:$aFeature['place_id']).'</a>';
|
||||
$sHtml = '<a ';
|
||||
if ($sExtraProperties) {
|
||||
$sHtml .= $sExtraProperties.' ';
|
||||
}
|
||||
|
||||
$sHtml .= 'href="details.php?place_id='.$aFeature['place_id'].'">'.($sTitle?$sTitle:$aFeature['place_id']).'</a>';
|
||||
|
||||
return $sHtml;
|
||||
}
|
||||
|
||||
function detailsPermaLink($aFeature, $sRefText = false)
|
||||
function detailsPermaLink($aFeature, $sRefText = false, $sExtraProperties = false)
|
||||
{
|
||||
$sOSMType = formatOSMType($aFeature['osm_type'], false);
|
||||
|
||||
if ($sOSMType) {
|
||||
$sLabel = $sRefText ? $sRefText : $sOSMType.' '.$aFeature['osm_id'];
|
||||
return '<a href="details.php?osmtype='.$aFeature['osm_type'].'&osmid='.$aFeature['osm_id'].'&class='.$aFeature['class'].'">'.$sLabel.'</a>';
|
||||
$sHtml = '<a ';
|
||||
if ($sExtraProperties) {
|
||||
$sHtml .= $sExtraProperties.' ';
|
||||
}
|
||||
$sHtml .= 'href="details.php?osmtype='.$aFeature['osm_type']
|
||||
.'&osmid='.$aFeature['osm_id'].'&class='.$aFeature['class'].'">';
|
||||
|
||||
if ($sRefText) {
|
||||
$sHtml .= $sRefText.'</a>';
|
||||
} else {
|
||||
$sHtml .= $sOSMType.' '.$aFeature['osm_id'].'</a>';
|
||||
}
|
||||
|
||||
return $sHtml;
|
||||
}
|
||||
return '';
|
||||
return detailsLink($aFeature, $sRefText, $sExtraProperties);
|
||||
}
|
||||
|
||||
@@ -3,6 +3,7 @@
|
||||
namespace Nominatim\Setup;
|
||||
|
||||
require_once(CONST_BasePath.'/lib/setup/AddressLevelParser.php');
|
||||
require_once(CONST_BasePath.'/lib/Shell.php');
|
||||
|
||||
class SetupFunctions
|
||||
{
|
||||
@@ -10,11 +11,13 @@ class SetupFunctions
|
||||
protected $iInstances;
|
||||
protected $sModulePath;
|
||||
protected $aDSNInfo;
|
||||
protected $bQuiet;
|
||||
protected $bVerbose;
|
||||
protected $sIgnoreErrors;
|
||||
protected $bEnableDiffUpdates;
|
||||
protected $bEnableDebugStatements;
|
||||
protected $bNoPartitions;
|
||||
protected $bDrop;
|
||||
protected $oDB = null;
|
||||
|
||||
public function __construct(array $aCMDResult)
|
||||
@@ -29,10 +32,13 @@ class SetupFunctions
|
||||
warn('resetting threads to '.$this->iInstances);
|
||||
}
|
||||
|
||||
// Assume we can steal all the cache memory in the box (unless told otherwise)
|
||||
if (isset($aCMDResult['osm2pgsql-cache'])) {
|
||||
$this->iCacheMemory = $aCMDResult['osm2pgsql-cache'];
|
||||
} elseif (!is_null(CONST_Osm2pgsql_Flatnode_File)) {
|
||||
// When flatnode files are enabled then disable cache per default.
|
||||
$this->iCacheMemory = 0;
|
||||
} else {
|
||||
// Otherwise: Assume we can steal all the cache memory in the box.
|
||||
$this->iCacheMemory = getCacheMemoryMB();
|
||||
}
|
||||
|
||||
@@ -46,6 +52,7 @@ class SetupFunctions
|
||||
}
|
||||
|
||||
// setting member variables based on command line options stored in $aCMDResult
|
||||
$this->bQuiet = isset($aCMDResult['quiet']) && $aCMDResult['quiet'];
|
||||
$this->bVerbose = $aCMDResult['verbose'];
|
||||
|
||||
//setting default values which are not set by the update.php array
|
||||
@@ -69,6 +76,8 @@ class SetupFunctions
|
||||
} else {
|
||||
$this->bEnableDiffUpdates = false;
|
||||
}
|
||||
|
||||
$this->bDrop = isset($aCMDResult['drop']) && $aCMDResult['drop'];
|
||||
}
|
||||
|
||||
public function createDB()
|
||||
@@ -76,21 +85,27 @@ class SetupFunctions
|
||||
info('Create DB');
|
||||
$oDB = new \Nominatim\DB;
|
||||
|
||||
if ($oDB->databaseExists()) {
|
||||
if ($oDB->checkConnection()) {
|
||||
fail('database already exists ('.CONST_Database_DSN.')');
|
||||
}
|
||||
|
||||
$sCreateDBCmd = 'createdb -E UTF-8 -p '.$this->aDSNInfo['port'].' '.$this->aDSNInfo['database'];
|
||||
$oCmd = (new \Nominatim\Shell('createdb'))
|
||||
->addParams('-E', 'UTF-8')
|
||||
->addParams('-p', $this->aDSNInfo['port']);
|
||||
|
||||
if (isset($this->aDSNInfo['username'])) {
|
||||
$sCreateDBCmd .= ' -U '.$this->aDSNInfo['username'];
|
||||
$oCmd->addParams('-U', $this->aDSNInfo['username']);
|
||||
}
|
||||
if (isset($this->aDSNInfo['password'])) {
|
||||
$oCmd->addEnvPair('PGPASSWORD', $this->aDSNInfo['password']);
|
||||
}
|
||||
|
||||
if (isset($this->aDSNInfo['hostspec'])) {
|
||||
$sCreateDBCmd .= ' -h '.$this->aDSNInfo['hostspec'];
|
||||
$oCmd->addParams('-h', $this->aDSNInfo['hostspec']);
|
||||
}
|
||||
$oCmd->addParams($this->aDSNInfo['database']);
|
||||
|
||||
$result = $this->runWithPgEnv($sCreateDBCmd);
|
||||
if ($result != 0) fail('Error executing external command: '.$sCreateDBCmd);
|
||||
$result = $oCmd->run();
|
||||
if ($result != 0) fail('Error executing external command: '.$oCmd->escapedCmd());
|
||||
}
|
||||
|
||||
public function connect()
|
||||
@@ -106,34 +121,19 @@ class SetupFunctions
|
||||
$fPostgresVersion = $this->oDB->getPostgresVersion();
|
||||
echo 'Postgres version found: '.$fPostgresVersion."\n";
|
||||
|
||||
if ($fPostgresVersion < 9.01) {
|
||||
fail('Minimum supported version of Postgresql is 9.1.');
|
||||
if ($fPostgresVersion < 9.03) {
|
||||
fail('Minimum supported version of Postgresql is 9.3.');
|
||||
}
|
||||
|
||||
$this->pgsqlRunScript('CREATE EXTENSION IF NOT EXISTS hstore');
|
||||
$this->pgsqlRunScript('CREATE EXTENSION IF NOT EXISTS postgis');
|
||||
|
||||
// For extratags and namedetails the hstore_to_json converter is
|
||||
// needed which is only available from Postgresql 9.3+. For older
|
||||
// versions add a dummy function that returns nothing.
|
||||
$iNumFunc = $this->oDB->getOne("select count(*) from pg_proc where proname = 'hstore_to_json'");
|
||||
|
||||
if ($iNumFunc == 0) {
|
||||
$this->pgsqlRunScript("create function hstore_to_json(dummy hstore) returns text AS 'select null::text' language sql immutable");
|
||||
warn('Postgresql is too old. extratags and namedetails API not available.');
|
||||
}
|
||||
|
||||
|
||||
$fPostgisVersion = $this->oDB->getPostgisVersion();
|
||||
echo 'Postgis version found: '.$fPostgisVersion."\n";
|
||||
|
||||
if ($fPostgisVersion < 2.1) {
|
||||
// Functions were renamed in 2.1 and throw an annoying deprecation warning
|
||||
$this->pgsqlRunScript('ALTER FUNCTION st_line_interpolate_point(geometry, double precision) RENAME TO ST_LineInterpolatePoint');
|
||||
$this->pgsqlRunScript('ALTER FUNCTION ST_Line_Locate_Point(geometry, geometry) RENAME TO ST_LineLocatePoint');
|
||||
}
|
||||
if ($fPostgisVersion < 2.2) {
|
||||
$this->pgsqlRunScript('ALTER FUNCTION ST_Distance_Spheroid(geometry, geometry, spheroid) RENAME TO ST_DistanceSpheroid');
|
||||
echo "Minimum required Postgis version 2.2\n";
|
||||
exit(1);
|
||||
}
|
||||
|
||||
$i = $this->oDB->getOne("select count(*) from pg_user where usename = '".CONST_Database_Web_User."'");
|
||||
@@ -152,73 +152,85 @@ class SetupFunctions
|
||||
exit(1);
|
||||
}
|
||||
$this->pgsqlRunScriptFile(CONST_BasePath.'/data/country_name.sql');
|
||||
$this->pgsqlRunScriptFile(CONST_BasePath.'/data/country_osm_grid.sql.gz');
|
||||
$this->pgsqlRunScriptFile(CONST_ExtraDataPath.'/country_osm_grid.sql.gz');
|
||||
$this->pgsqlRunScriptFile(CONST_BasePath.'/data/gb_postcode_table.sql');
|
||||
$this->pgsqlRunScriptFile(CONST_BasePath.'/data/us_postcode_table.sql');
|
||||
|
||||
$sPostcodeFilename = CONST_BasePath.'/data/gb_postcode_data.sql.gz';
|
||||
if (file_exists($sPostcodeFilename)) {
|
||||
$this->pgsqlRunScriptFile($sPostcodeFilename);
|
||||
} else {
|
||||
warn('optional external UK postcode table file ('.$sPostcodeFilename.') not found. Skipping.');
|
||||
warn('optional external GB postcode table file ('.$sPostcodeFilename.') not found. Skipping.');
|
||||
}
|
||||
|
||||
if (CONST_Use_Extra_US_Postcodes) {
|
||||
$this->pgsqlRunScriptFile(CONST_BasePath.'/data/us_postcode.sql');
|
||||
$sPostcodeFilename = CONST_BasePath.'/data/us_postcode_data.sql.gz';
|
||||
if (file_exists($sPostcodeFilename)) {
|
||||
$this->pgsqlRunScriptFile($sPostcodeFilename);
|
||||
} else {
|
||||
warn('optional external US postcode table file ('.$sPostcodeFilename.') not found. Skipping.');
|
||||
}
|
||||
|
||||
if ($this->bNoPartitions) {
|
||||
$this->pgsqlRunScript('update country_name set partition = 0');
|
||||
}
|
||||
|
||||
// the following will be needed by createFunctions later but
|
||||
// is only defined in the subsequently called createTables
|
||||
// Create dummies here that will be overwritten by the proper
|
||||
// versions in create-tables.
|
||||
$this->pgsqlRunScript('CREATE TABLE IF NOT EXISTS place_boundingbox ()');
|
||||
$this->pgsqlRunScript('CREATE TYPE wikipedia_article_match AS ()', false);
|
||||
}
|
||||
|
||||
public function importData($sOSMFile)
|
||||
{
|
||||
info('Import data');
|
||||
|
||||
$osm2pgsql = CONST_Osm2pgsql_Binary;
|
||||
if (!file_exists($osm2pgsql)) {
|
||||
if (!file_exists(CONST_Osm2pgsql_Binary)) {
|
||||
echo "Check CONST_Osm2pgsql_Binary in your local settings file.\n";
|
||||
echo "Normally you should not need to set this manually.\n";
|
||||
fail("osm2pgsql not found in '$osm2pgsql'");
|
||||
fail("osm2pgsql not found in '".CONST_Osm2pgsql_Binary."'");
|
||||
}
|
||||
|
||||
$osm2pgsql .= ' -S '.CONST_Import_Style;
|
||||
$oCmd = new \Nominatim\Shell(CONST_Osm2pgsql_Binary);
|
||||
$oCmd->addParams('--style', CONST_Import_Style);
|
||||
|
||||
if (!is_null(CONST_Osm2pgsql_Flatnode_File) && CONST_Osm2pgsql_Flatnode_File) {
|
||||
$osm2pgsql .= ' --flat-nodes '.CONST_Osm2pgsql_Flatnode_File;
|
||||
$oCmd->addParams('--flat-nodes', CONST_Osm2pgsql_Flatnode_File);
|
||||
}
|
||||
if (CONST_Tablespace_Osm2pgsql_Data) {
|
||||
$oCmd->addParams('--tablespace-slim-data', CONST_Tablespace_Osm2pgsql_Data);
|
||||
}
|
||||
if (CONST_Tablespace_Osm2pgsql_Index) {
|
||||
$oCmd->addParams('--tablespace-slim-index', CONST_Tablespace_Osm2pgsql_Index);
|
||||
}
|
||||
if (CONST_Tablespace_Place_Data) {
|
||||
$oCmd->addParams('--tablespace-main-data', CONST_Tablespace_Place_Data);
|
||||
}
|
||||
if (CONST_Tablespace_Place_Index) {
|
||||
$oCmd->addParams('--tablespace-main-index', CONST_Tablespace_Place_Index);
|
||||
}
|
||||
$oCmd->addParams('--latlong', '--slim', '--create');
|
||||
$oCmd->addParams('--output', 'gazetteer');
|
||||
$oCmd->addParams('--hstore');
|
||||
$oCmd->addParams('--number-processes', 1);
|
||||
$oCmd->addParams('--cache', $this->iCacheMemory);
|
||||
$oCmd->addParams('--port', $this->aDSNInfo['port']);
|
||||
|
||||
if (CONST_Tablespace_Osm2pgsql_Data)
|
||||
$osm2pgsql .= ' --tablespace-slim-data '.CONST_Tablespace_Osm2pgsql_Data;
|
||||
if (CONST_Tablespace_Osm2pgsql_Index)
|
||||
$osm2pgsql .= ' --tablespace-slim-index '.CONST_Tablespace_Osm2pgsql_Index;
|
||||
if (CONST_Tablespace_Place_Data)
|
||||
$osm2pgsql .= ' --tablespace-main-data '.CONST_Tablespace_Place_Data;
|
||||
if (CONST_Tablespace_Place_Index)
|
||||
$osm2pgsql .= ' --tablespace-main-index '.CONST_Tablespace_Place_Index;
|
||||
$osm2pgsql .= ' -lsc -O gazetteer --hstore --number-processes 1';
|
||||
$osm2pgsql .= ' -C '.$this->iCacheMemory;
|
||||
$osm2pgsql .= ' -P '.$this->aDSNInfo['port'];
|
||||
if (isset($this->aDSNInfo['username'])) {
|
||||
$osm2pgsql .= ' -U '.$this->aDSNInfo['username'];
|
||||
$oCmd->addParams('--username', $this->aDSNInfo['username']);
|
||||
}
|
||||
if (isset($this->aDSNInfo['password'])) {
|
||||
$oCmd->addEnvPair('PGPASSWORD', $this->aDSNInfo['password']);
|
||||
}
|
||||
if (isset($this->aDSNInfo['hostspec'])) {
|
||||
$osm2pgsql .= ' -H '.$this->aDSNInfo['hostspec'];
|
||||
$oCmd->addParams('--host', $this->aDSNInfo['hostspec']);
|
||||
}
|
||||
$osm2pgsql .= ' -d '.$this->aDSNInfo['database'].' '.$sOSMFile;
|
||||
|
||||
$this->runWithPgEnv($osm2pgsql);
|
||||
$oCmd->addParams('--database', $this->aDSNInfo['database']);
|
||||
$oCmd->addParams($sOSMFile);
|
||||
$oCmd->run();
|
||||
|
||||
if (!$this->sIgnoreErrors && !$this->oDB->getRow('select * from place limit 1')) {
|
||||
fail('No Data');
|
||||
}
|
||||
|
||||
if ($this->bDrop) {
|
||||
$this->dropTable('planet_osm_nodes');
|
||||
$this->removeFlatnodeFile();
|
||||
}
|
||||
}
|
||||
|
||||
public function createFunctions()
|
||||
@@ -236,88 +248,34 @@ class SetupFunctions
|
||||
info('Create Tables');
|
||||
|
||||
$sTemplate = file_get_contents(CONST_BasePath.'/sql/tables.sql');
|
||||
$sTemplate = str_replace('{www-user}', CONST_Database_Web_User, $sTemplate);
|
||||
$sTemplate = $this->replaceTablespace(
|
||||
'{ts:address-data}',
|
||||
CONST_Tablespace_Address_Data,
|
||||
$sTemplate
|
||||
);
|
||||
$sTemplate = $this->replaceTablespace(
|
||||
'{ts:address-index}',
|
||||
CONST_Tablespace_Address_Index,
|
||||
$sTemplate
|
||||
);
|
||||
$sTemplate = $this->replaceTablespace(
|
||||
'{ts:search-data}',
|
||||
CONST_Tablespace_Search_Data,
|
||||
$sTemplate
|
||||
);
|
||||
$sTemplate = $this->replaceTablespace(
|
||||
'{ts:search-index}',
|
||||
CONST_Tablespace_Search_Index,
|
||||
$sTemplate
|
||||
);
|
||||
$sTemplate = $this->replaceTablespace(
|
||||
'{ts:aux-data}',
|
||||
CONST_Tablespace_Aux_Data,
|
||||
$sTemplate
|
||||
);
|
||||
$sTemplate = $this->replaceTablespace(
|
||||
'{ts:aux-index}',
|
||||
CONST_Tablespace_Aux_Index,
|
||||
$sTemplate
|
||||
);
|
||||
$sTemplate = $this->replaceSqlPatterns($sTemplate);
|
||||
|
||||
$this->pgsqlRunScript($sTemplate, false);
|
||||
|
||||
if ($bReverseOnly) {
|
||||
$this->pgExec('DROP TABLE search_name');
|
||||
$this->dropTable('search_name');
|
||||
}
|
||||
|
||||
$oAlParser = new AddressLevelParser(CONST_Address_Level_Config);
|
||||
$oAlParser->createTable($this->oDB, 'address_levels');
|
||||
}
|
||||
|
||||
public function createTableTriggers()
|
||||
{
|
||||
info('Create Tables');
|
||||
|
||||
$sTemplate = file_get_contents(CONST_BasePath.'/sql/table-triggers.sql');
|
||||
$sTemplate = $this->replaceSqlPatterns($sTemplate);
|
||||
|
||||
$this->pgsqlRunScript($sTemplate, false);
|
||||
}
|
||||
|
||||
public function createPartitionTables()
|
||||
{
|
||||
info('Create Partition Tables');
|
||||
|
||||
$sTemplate = file_get_contents(CONST_BasePath.'/sql/partition-tables.src.sql');
|
||||
$sTemplate = $this->replaceTablespace(
|
||||
'{ts:address-data}',
|
||||
CONST_Tablespace_Address_Data,
|
||||
$sTemplate
|
||||
);
|
||||
|
||||
$sTemplate = $this->replaceTablespace(
|
||||
'{ts:address-index}',
|
||||
CONST_Tablespace_Address_Index,
|
||||
$sTemplate
|
||||
);
|
||||
|
||||
$sTemplate = $this->replaceTablespace(
|
||||
'{ts:search-data}',
|
||||
CONST_Tablespace_Search_Data,
|
||||
$sTemplate
|
||||
);
|
||||
|
||||
$sTemplate = $this->replaceTablespace(
|
||||
'{ts:search-index}',
|
||||
CONST_Tablespace_Search_Index,
|
||||
$sTemplate
|
||||
);
|
||||
|
||||
$sTemplate = $this->replaceTablespace(
|
||||
'{ts:aux-data}',
|
||||
CONST_Tablespace_Aux_Data,
|
||||
$sTemplate
|
||||
);
|
||||
|
||||
$sTemplate = $this->replaceTablespace(
|
||||
'{ts:aux-index}',
|
||||
CONST_Tablespace_Aux_Index,
|
||||
$sTemplate
|
||||
);
|
||||
$sTemplate = $this->replaceSqlPatterns($sTemplate);
|
||||
|
||||
$this->pgsqlRunPartitionScript($sTemplate);
|
||||
}
|
||||
@@ -332,19 +290,14 @@ class SetupFunctions
|
||||
|
||||
public function importWikipediaArticles()
|
||||
{
|
||||
$sWikiArticlesFile = CONST_Wikipedia_Data_Path.'/wikipedia_article.sql.bin';
|
||||
$sWikiRedirectsFile = CONST_Wikipedia_Data_Path.'/wikipedia_redirect.sql.bin';
|
||||
$sWikiArticlesFile = CONST_Wikipedia_Data_Path.'/wikimedia-importance.sql.gz';
|
||||
if (file_exists($sWikiArticlesFile)) {
|
||||
info('Importing wikipedia articles');
|
||||
$this->pgsqlRunDropAndRestore($sWikiArticlesFile);
|
||||
info('Importing wikipedia articles and redirects');
|
||||
$this->dropTable('wikipedia_article');
|
||||
$this->dropTable('wikipedia_redirect');
|
||||
$this->pgsqlRunScriptFile($sWikiArticlesFile);
|
||||
} else {
|
||||
warn('wikipedia article dump file not found - places will have default importance');
|
||||
}
|
||||
if (file_exists($sWikiRedirectsFile)) {
|
||||
info('Importing wikipedia redirects');
|
||||
$this->pgsqlRunDropAndRestore($sWikiRedirectsFile);
|
||||
} else {
|
||||
warn('wikipedia redirect dump file not found - some place importance values may be missing');
|
||||
warn('wikipedia importance dump file not found - places will have default importance');
|
||||
}
|
||||
}
|
||||
|
||||
@@ -352,27 +305,25 @@ class SetupFunctions
|
||||
{
|
||||
info('Drop old Data');
|
||||
|
||||
$this->pgExec('TRUNCATE word');
|
||||
$this->oDB->exec('TRUNCATE word');
|
||||
echo '.';
|
||||
$this->pgExec('TRUNCATE placex');
|
||||
$this->oDB->exec('TRUNCATE placex');
|
||||
echo '.';
|
||||
$this->pgExec('TRUNCATE location_property_osmline');
|
||||
$this->oDB->exec('TRUNCATE location_property_osmline');
|
||||
echo '.';
|
||||
$this->pgExec('TRUNCATE place_addressline');
|
||||
$this->oDB->exec('TRUNCATE place_addressline');
|
||||
echo '.';
|
||||
$this->pgExec('TRUNCATE place_boundingbox');
|
||||
echo '.';
|
||||
$this->pgExec('TRUNCATE location_area');
|
||||
$this->oDB->exec('TRUNCATE location_area');
|
||||
echo '.';
|
||||
if (!$this->dbReverseOnly()) {
|
||||
$this->pgExec('TRUNCATE search_name');
|
||||
$this->oDB->exec('TRUNCATE search_name');
|
||||
echo '.';
|
||||
}
|
||||
$this->pgExec('TRUNCATE search_name_blank');
|
||||
$this->oDB->exec('TRUNCATE search_name_blank');
|
||||
echo '.';
|
||||
$this->pgExec('DROP SEQUENCE seq_place');
|
||||
$this->oDB->exec('DROP SEQUENCE seq_place');
|
||||
echo '.';
|
||||
$this->pgExec('CREATE SEQUENCE seq_place start 100000');
|
||||
$this->oDB->exec('CREATE SEQUENCE seq_place start 100000');
|
||||
echo '.';
|
||||
|
||||
$sSQL = 'select distinct partition from country_name';
|
||||
@@ -380,14 +331,14 @@ class SetupFunctions
|
||||
|
||||
if (!$this->bNoPartitions) $aPartitions[] = 0;
|
||||
foreach ($aPartitions as $sPartition) {
|
||||
$this->pgExec('TRUNCATE location_road_'.$sPartition);
|
||||
$this->oDB->exec('TRUNCATE location_road_'.$sPartition);
|
||||
echo '.';
|
||||
}
|
||||
|
||||
// used by getorcreate_word_id to ignore frequent partial words
|
||||
$sSQL = 'CREATE OR REPLACE FUNCTION get_maxwordfreq() RETURNS integer AS ';
|
||||
$sSQL .= '$$ SELECT '.CONST_Max_Word_Frequency.' as maxwordfreq; $$ LANGUAGE SQL IMMUTABLE';
|
||||
$this->pgExec($sSQL);
|
||||
$this->oDB->exec($sSQL);
|
||||
echo ".\n";
|
||||
|
||||
// pre-create the word list
|
||||
@@ -479,18 +430,15 @@ class SetupFunctions
|
||||
{
|
||||
info('Import Tiger data');
|
||||
|
||||
$aFilenames = glob(CONST_Tiger_Data_Path.'/*.sql');
|
||||
info('Found '.count($aFilenames).' SQL files in path '.CONST_Tiger_Data_Path);
|
||||
if (empty($aFilenames)) {
|
||||
warn('Tiger data import selected but no files found in path '.CONST_Tiger_Data_Path);
|
||||
return;
|
||||
}
|
||||
$sTemplate = file_get_contents(CONST_BasePath.'/sql/tiger_import_start.sql');
|
||||
$sTemplate = str_replace('{www-user}', CONST_Database_Web_User, $sTemplate);
|
||||
$sTemplate = $this->replaceTablespace(
|
||||
'{ts:aux-data}',
|
||||
CONST_Tablespace_Aux_Data,
|
||||
$sTemplate
|
||||
);
|
||||
$sTemplate = $this->replaceTablespace(
|
||||
'{ts:aux-index}',
|
||||
CONST_Tablespace_Aux_Index,
|
||||
$sTemplate
|
||||
);
|
||||
$sTemplate = $this->replaceSqlPatterns($sTemplate);
|
||||
|
||||
$this->pgsqlRunScript($sTemplate, false);
|
||||
|
||||
$aDBInstances = array();
|
||||
@@ -503,7 +451,7 @@ class SetupFunctions
|
||||
pg_ping($aDBInstances[$i]);
|
||||
}
|
||||
|
||||
foreach (glob(CONST_Tiger_Data_Path.'/*.sql') as $sFile) {
|
||||
foreach ($aFilenames as $sFile) {
|
||||
echo $sFile.': ';
|
||||
$hFile = fopen($sFile, 'r');
|
||||
$sSQL = fgets($hFile, 100000);
|
||||
@@ -543,24 +491,15 @@ class SetupFunctions
|
||||
|
||||
info('Creating indexes on Tiger data');
|
||||
$sTemplate = file_get_contents(CONST_BasePath.'/sql/tiger_import_finish.sql');
|
||||
$sTemplate = str_replace('{www-user}', CONST_Database_Web_User, $sTemplate);
|
||||
$sTemplate = $this->replaceTablespace(
|
||||
'{ts:aux-data}',
|
||||
CONST_Tablespace_Aux_Data,
|
||||
$sTemplate
|
||||
);
|
||||
$sTemplate = $this->replaceTablespace(
|
||||
'{ts:aux-index}',
|
||||
CONST_Tablespace_Aux_Index,
|
||||
$sTemplate
|
||||
);
|
||||
$sTemplate = $this->replaceSqlPatterns($sTemplate);
|
||||
|
||||
$this->pgsqlRunScript($sTemplate, false);
|
||||
}
|
||||
|
||||
public function calculatePostcodes($bCMDResultAll)
|
||||
{
|
||||
info('Calculate Postcodes');
|
||||
$this->pgExec('TRUNCATE location_postcode');
|
||||
$this->oDB->exec('TRUNCATE location_postcode');
|
||||
|
||||
$sSQL = 'INSERT INTO location_postcode';
|
||||
$sSQL .= ' (place_id, indexed_status, country_code, postcode, geometry) ';
|
||||
@@ -571,19 +510,17 @@ class SetupFunctions
|
||||
$sSQL .= " WHERE address ? 'postcode' AND address->'postcode' NOT SIMILAR TO '%(,|;)%'";
|
||||
$sSQL .= ' AND geometry IS NOT null';
|
||||
$sSQL .= ' GROUP BY country_code, pc';
|
||||
$this->pgExec($sSQL);
|
||||
$this->oDB->exec($sSQL);
|
||||
|
||||
if (CONST_Use_Extra_US_Postcodes) {
|
||||
// only add postcodes that are not yet available in OSM
|
||||
$sSQL = 'INSERT INTO location_postcode';
|
||||
$sSQL .= ' (place_id, indexed_status, country_code, postcode, geometry) ';
|
||||
$sSQL .= "SELECT nextval('seq_place'), 1, 'us', postcode,";
|
||||
$sSQL .= ' ST_SetSRID(ST_Point(x,y),4326)';
|
||||
$sSQL .= ' FROM us_postcode WHERE postcode NOT IN';
|
||||
$sSQL .= ' (SELECT postcode FROM location_postcode';
|
||||
$sSQL .= " WHERE country_code = 'us')";
|
||||
$this->pgExec($sSQL);
|
||||
}
|
||||
// only add postcodes that are not yet available in OSM
|
||||
$sSQL = 'INSERT INTO location_postcode';
|
||||
$sSQL .= ' (place_id, indexed_status, country_code, postcode, geometry) ';
|
||||
$sSQL .= "SELECT nextval('seq_place'), 1, 'us', postcode,";
|
||||
$sSQL .= ' ST_SetSRID(ST_Point(x,y),4326)';
|
||||
$sSQL .= ' FROM us_postcode WHERE postcode NOT IN';
|
||||
$sSQL .= ' (SELECT postcode FROM location_postcode';
|
||||
$sSQL .= " WHERE country_code = 'us')";
|
||||
$this->oDB->exec($sSQL);
|
||||
|
||||
// add missing postcodes for GB (if available)
|
||||
$sSQL = 'INSERT INTO location_postcode';
|
||||
@@ -592,80 +529,94 @@ class SetupFunctions
|
||||
$sSQL .= ' FROM gb_postcode WHERE postcode NOT IN';
|
||||
$sSQL .= ' (SELECT postcode FROM location_postcode';
|
||||
$sSQL .= " WHERE country_code = 'gb')";
|
||||
$this->pgExec($sSQL);
|
||||
$this->oDB->exec($sSQL);
|
||||
|
||||
if (!$bCMDResultAll) {
|
||||
$sSQL = "DELETE FROM word WHERE class='place' and type='postcode'";
|
||||
$sSQL .= 'and word NOT IN (SELECT postcode FROM location_postcode)';
|
||||
$this->pgExec($sSQL);
|
||||
$this->oDB->exec($sSQL);
|
||||
}
|
||||
|
||||
$sSQL = 'SELECT count(getorcreate_postcode_id(v)) FROM ';
|
||||
$sSQL .= '(SELECT distinct(postcode) as v FROM location_postcode) p';
|
||||
$this->pgExec($sSQL);
|
||||
$this->oDB->exec($sSQL);
|
||||
}
|
||||
|
||||
public function index($bIndexNoanalyse)
|
||||
{
|
||||
$sOutputFile = '';
|
||||
$sBaseCmd = CONST_InstallPath.'/nominatim/nominatim -i -d '.$this->aDSNInfo['database'].' -P '
|
||||
.$this->aDSNInfo['port'].' -t '.$this->iInstances.$sOutputFile;
|
||||
$oBaseCmd = (new \Nominatim\Shell(CONST_BasePath.'/nominatim/nominatim.py'))
|
||||
->addParams('--database', $this->aDSNInfo['database'])
|
||||
->addParams('--port', $this->aDSNInfo['port'])
|
||||
->addParams('--threads', $this->iInstances);
|
||||
|
||||
if (!$this->bQuiet) {
|
||||
$oBaseCmd->addParams('-v');
|
||||
}
|
||||
if ($this->bVerbose) {
|
||||
$oBaseCmd->addParams('-v');
|
||||
}
|
||||
if (isset($this->aDSNInfo['hostspec'])) {
|
||||
$sBaseCmd .= ' -H '.$this->aDSNInfo['hostspec'];
|
||||
$oBaseCmd->addParams('--host', $this->aDSNInfo['hostspec']);
|
||||
}
|
||||
if (isset($this->aDSNInfo['username'])) {
|
||||
$sBaseCmd .= ' -U '.$this->aDSNInfo['username'];
|
||||
$oBaseCmd->addParams('--user', $this->aDSNInfo['username']);
|
||||
}
|
||||
if (isset($this->aDSNInfo['password'])) {
|
||||
$oBaseCmd->addEnvPair('PGPASSWORD', $this->aDSNInfo['password']);
|
||||
}
|
||||
|
||||
info('Index ranks 0 - 4');
|
||||
$iStatus = $this->runWithPgEnv($sBaseCmd.' -R 4');
|
||||
$oCmd = (clone $oBaseCmd)->addParams('--maxrank', 4);
|
||||
echo $oCmd->escapedCmd();
|
||||
|
||||
$iStatus = $oCmd->run();
|
||||
if ($iStatus != 0) {
|
||||
fail('error status ' . $iStatus . ' running nominatim!');
|
||||
}
|
||||
if (!$bIndexNoanalyse) $this->pgsqlRunScript('ANALYSE');
|
||||
|
||||
info('Index ranks 5 - 25');
|
||||
$iStatus = $this->runWithPgEnv($sBaseCmd.' -r 5 -R 25');
|
||||
$oCmd = (clone $oBaseCmd)->addParams('--minrank', 5, '--maxrank', 25);
|
||||
$iStatus = $oCmd->run();
|
||||
if ($iStatus != 0) {
|
||||
fail('error status ' . $iStatus . ' running nominatim!');
|
||||
}
|
||||
if (!$bIndexNoanalyse) $this->pgsqlRunScript('ANALYSE');
|
||||
|
||||
info('Index ranks 26 - 30');
|
||||
$iStatus = $this->runWithPgEnv($sBaseCmd.' -r 26');
|
||||
$oCmd = (clone $oBaseCmd)->addParams('--minrank', 26);
|
||||
$iStatus = $oCmd->run();
|
||||
if ($iStatus != 0) {
|
||||
fail('error status ' . $iStatus . ' running nominatim!');
|
||||
}
|
||||
|
||||
info('Index postcodes');
|
||||
$sSQL = 'UPDATE location_postcode SET indexed_status = 0';
|
||||
$this->pgExec($sSQL);
|
||||
$this->oDB->exec($sSQL);
|
||||
}
|
||||
|
||||
public function createSearchIndices()
|
||||
{
|
||||
info('Create Search indices');
|
||||
|
||||
$sSQL = 'SELECT relname FROM pg_class, pg_index ';
|
||||
$sSQL .= 'WHERE pg_index.indisvalid = false AND pg_index.indexrelid = pg_class.oid';
|
||||
$aInvalidIndices = $this->oDB->getCol($sSQL);
|
||||
|
||||
foreach ($aInvalidIndices as $sIndexName) {
|
||||
info("Cleaning up invalid index $sIndexName");
|
||||
$this->oDB->exec("DROP INDEX $sIndexName;");
|
||||
}
|
||||
|
||||
$sTemplate = file_get_contents(CONST_BasePath.'/sql/indices.src.sql');
|
||||
if (!$this->bDrop) {
|
||||
$sTemplate .= file_get_contents(CONST_BasePath.'/sql/indices_updates.src.sql');
|
||||
}
|
||||
if (!$this->dbReverseOnly()) {
|
||||
$sTemplate .= file_get_contents(CONST_BasePath.'/sql/indices_search.src.sql');
|
||||
}
|
||||
$sTemplate = str_replace('{www-user}', CONST_Database_Web_User, $sTemplate);
|
||||
$sTemplate = $this->replaceTablespace(
|
||||
'{ts:address-index}',
|
||||
CONST_Tablespace_Address_Index,
|
||||
$sTemplate
|
||||
);
|
||||
$sTemplate = $this->replaceTablespace(
|
||||
'{ts:search-index}',
|
||||
CONST_Tablespace_Search_Index,
|
||||
$sTemplate
|
||||
);
|
||||
$sTemplate = $this->replaceTablespace(
|
||||
'{ts:aux-index}',
|
||||
CONST_Tablespace_Aux_Index,
|
||||
$sTemplate
|
||||
);
|
||||
$sTemplate = $this->replaceSqlPatterns($sTemplate);
|
||||
|
||||
$this->pgsqlRunScript($sTemplate);
|
||||
}
|
||||
|
||||
@@ -724,7 +675,7 @@ class SetupFunctions
|
||||
);
|
||||
|
||||
$aDropTables = array();
|
||||
$aHaveTables = $this->oDB->getCol("SELECT tablename FROM pg_tables WHERE schemaname='public'");
|
||||
$aHaveTables = $this->oDB->getListOfTables();
|
||||
|
||||
foreach ($aHaveTables as $sTable) {
|
||||
$bFound = false;
|
||||
@@ -737,12 +688,14 @@ class SetupFunctions
|
||||
if (!$bFound) array_push($aDropTables, $sTable);
|
||||
}
|
||||
foreach ($aDropTables as $sDrop) {
|
||||
if ($this->bVerbose) echo "Dropping table $sDrop\n";
|
||||
$this->oDB->exec("DROP TABLE $sDrop CASCADE");
|
||||
// ignore warnings/errors as they might be caused by a table having
|
||||
// been deleted already by CASCADE
|
||||
$this->dropTable($sDrop);
|
||||
}
|
||||
|
||||
$this->removeFlatnodeFile();
|
||||
}
|
||||
|
||||
private function removeFlatnodeFile()
|
||||
{
|
||||
if (!is_null(CONST_Osm2pgsql_Flatnode_File) && CONST_Osm2pgsql_Flatnode_File) {
|
||||
if (file_exists(CONST_Osm2pgsql_Flatnode_File)) {
|
||||
if ($this->bVerbose) echo 'Deleting '.CONST_Osm2pgsql_Flatnode_File."\n";
|
||||
@@ -751,22 +704,6 @@ class SetupFunctions
|
||||
}
|
||||
}
|
||||
|
||||
private function pgsqlRunDropAndRestore($sDumpFile)
|
||||
{
|
||||
$sCMD = 'pg_restore -p '.$this->aDSNInfo['port'].' -d '.$this->aDSNInfo['database'].' --no-owner -Fc --clean '.$sDumpFile;
|
||||
if ($this->oDB->getPostgresVersion() >= 9.04) {
|
||||
$sCMD .= ' --if-exists';
|
||||
}
|
||||
if (isset($this->aDSNInfo['hostspec'])) {
|
||||
$sCMD .= ' -h '.$this->aDSNInfo['hostspec'];
|
||||
}
|
||||
if (isset($this->aDSNInfo['username'])) {
|
||||
$sCMD .= ' -U '.$this->aDSNInfo['username'];
|
||||
}
|
||||
|
||||
$this->runWithPgEnv($sCMD);
|
||||
}
|
||||
|
||||
private function pgsqlRunScript($sScript, $bfatal = true)
|
||||
{
|
||||
runSQLScript(
|
||||
@@ -779,7 +716,22 @@ class SetupFunctions
|
||||
|
||||
private function createSqlFunctions()
|
||||
{
|
||||
$sTemplate = file_get_contents(CONST_BasePath.'/sql/functions.sql');
|
||||
$sBasePath = CONST_BasePath.'/sql/functions/';
|
||||
$sTemplate = file_get_contents($sBasePath.'utils.sql');
|
||||
$sTemplate .= file_get_contents($sBasePath.'normalization.sql');
|
||||
$sTemplate .= file_get_contents($sBasePath.'ranking.sql');
|
||||
$sTemplate .= file_get_contents($sBasePath.'importance.sql');
|
||||
$sTemplate .= file_get_contents($sBasePath.'address_lookup.sql');
|
||||
$sTemplate .= file_get_contents($sBasePath.'interpolation.sql');
|
||||
if ($this->oDB->tableExists('place')) {
|
||||
$sTemplate .= file_get_contents($sBasePath.'place_triggers.sql');
|
||||
}
|
||||
if ($this->oDB->tableExists('placex')) {
|
||||
$sTemplate .= file_get_contents($sBasePath.'placex_triggers.sql');
|
||||
}
|
||||
if ($this->oDB->tableExists('location_postcode')) {
|
||||
$sTemplate .= file_get_contents($sBasePath.'postcode_triggers.sql');
|
||||
}
|
||||
$sTemplate = str_replace('{modulepath}', $this->sModulePath, $sTemplate);
|
||||
if ($this->bEnableDiffUpdates) {
|
||||
$sTemplate = str_replace('RETURN NEW; -- %DIFFUPDATES%', '--', $sTemplate);
|
||||
@@ -825,19 +777,21 @@ class SetupFunctions
|
||||
{
|
||||
if (!file_exists($sFilename)) fail('unable to find '.$sFilename);
|
||||
|
||||
$sCMD = 'psql -p '.$this->aDSNInfo['port'].' -d '.$this->aDSNInfo['database'];
|
||||
$oCmd = (new \Nominatim\Shell('psql'))
|
||||
->addParams('--port', $this->aDSNInfo['port'])
|
||||
->addParams('--dbname', $this->aDSNInfo['database']);
|
||||
|
||||
if (!$this->bVerbose) {
|
||||
$sCMD .= ' -q';
|
||||
$oCmd->addParams('--quiet');
|
||||
}
|
||||
if (isset($this->aDSNInfo['hostspec'])) {
|
||||
$sCMD .= ' -h '.$this->aDSNInfo['hostspec'];
|
||||
$oCmd->addParams('--host', $this->aDSNInfo['hostspec']);
|
||||
}
|
||||
if (isset($this->aDSNInfo['username'])) {
|
||||
$sCMD .= ' -U '.$this->aDSNInfo['username'];
|
||||
$oCmd->addParams('--username', $this->aDSNInfo['username']);
|
||||
}
|
||||
$aProcEnv = null;
|
||||
if (isset($this->aDSNInfo['password'])) {
|
||||
$aProcEnv = array_merge(array('PGPASSWORD' => $this->aDSNInfo['password']), $_ENV);
|
||||
$oCmd->addEnvPair('PGPASSWORD', $this->aDSNInfo['password']);
|
||||
}
|
||||
$ahGzipPipes = null;
|
||||
if (preg_match('/\\.gz$/', $sFilename)) {
|
||||
@@ -846,12 +800,14 @@ class SetupFunctions
|
||||
1 => array('pipe', 'w'),
|
||||
2 => array('file', '/dev/null', 'a')
|
||||
);
|
||||
$hGzipProcess = proc_open('zcat '.$sFilename, $aDescriptors, $ahGzipPipes);
|
||||
$oZcatCmd = new \Nominatim\Shell('zcat', $sFilename);
|
||||
|
||||
$hGzipProcess = proc_open($oZcatCmd->escapedCmd(), $aDescriptors, $ahGzipPipes);
|
||||
if (!is_resource($hGzipProcess)) fail('unable to start zcat');
|
||||
$aReadPipe = $ahGzipPipes[1];
|
||||
fclose($ahGzipPipes[0]);
|
||||
} else {
|
||||
$sCMD .= ' -f '.$sFilename;
|
||||
$oCmd->addParams('--file', $sFilename);
|
||||
$aReadPipe = array('pipe', 'r');
|
||||
}
|
||||
$aDescriptors = array(
|
||||
@@ -860,7 +816,8 @@ class SetupFunctions
|
||||
2 => array('file', '/dev/null', 'a')
|
||||
);
|
||||
$ahPipes = null;
|
||||
$hProcess = proc_open($sCMD, $aDescriptors, $ahPipes, null, $aProcEnv);
|
||||
|
||||
$hProcess = proc_open($oCmd->escapedCmd(), $aDescriptors, $ahPipes, null, $oCmd->aEnv);
|
||||
if (!is_resource($hProcess)) fail('unable to start pgsql');
|
||||
// TODO: error checking
|
||||
while (!feof($ahPipes[1])) {
|
||||
@@ -877,43 +834,43 @@ class SetupFunctions
|
||||
}
|
||||
}
|
||||
|
||||
private function replaceTablespace($sTemplate, $sTablespace, $sSql)
|
||||
private function replaceSqlPatterns($sSql)
|
||||
{
|
||||
if ($sTablespace) {
|
||||
$sSql = str_replace($sTemplate, 'TABLESPACE "'.$sTablespace.'"', $sSql);
|
||||
} else {
|
||||
$sSql = str_replace($sTemplate, '', $sSql);
|
||||
$sSql = str_replace('{www-user}', CONST_Database_Web_User, $sSql);
|
||||
|
||||
$aPatterns = array(
|
||||
'{ts:address-data}' => CONST_Tablespace_Address_Data,
|
||||
'{ts:address-index}' => CONST_Tablespace_Address_Index,
|
||||
'{ts:search-data}' => CONST_Tablespace_Search_Data,
|
||||
'{ts:search-index}' => CONST_Tablespace_Search_Index,
|
||||
'{ts:aux-data}' => CONST_Tablespace_Aux_Data,
|
||||
'{ts:aux-index}' => CONST_Tablespace_Aux_Index,
|
||||
);
|
||||
|
||||
foreach ($aPatterns as $sPattern => $sTablespace) {
|
||||
if ($sTablespace) {
|
||||
$sSql = str_replace($sPattern, 'TABLESPACE "'.$sTablespace.'"', $sSql);
|
||||
} else {
|
||||
$sSql = str_replace($sPattern, '', $sSql);
|
||||
}
|
||||
}
|
||||
|
||||
return $sSql;
|
||||
}
|
||||
|
||||
private function runWithPgEnv($sCmd)
|
||||
{
|
||||
if ($this->bVerbose) {
|
||||
echo "Execute: $sCmd\n";
|
||||
}
|
||||
|
||||
$aProcEnv = null;
|
||||
|
||||
if (isset($this->aDSNInfo['password'])) {
|
||||
$aProcEnv = array_merge(array('PGPASSWORD' => $this->aDSNInfo['password']), $_ENV);
|
||||
}
|
||||
|
||||
return runWithEnv($sCmd, $aProcEnv);
|
||||
}
|
||||
|
||||
/**
|
||||
* Execute the SQL command on the open database.
|
||||
* Drop table with the given name if it exists.
|
||||
*
|
||||
* @param string $sSQL SQL command to execute.
|
||||
* @param string $sName Name of table to remove.
|
||||
*
|
||||
* @return null
|
||||
*
|
||||
* @pre connect() must have been called.
|
||||
*/
|
||||
private function pgExec($sSQL)
|
||||
private function dropTable($sName)
|
||||
{
|
||||
$this->oDB->exec($sSQL);
|
||||
if ($this->bVerbose) echo "Dropping table $sName\n";
|
||||
$this->oDB->deleteTable($sName);
|
||||
}
|
||||
|
||||
/**
|
||||
|
||||
@@ -30,27 +30,14 @@ if (empty($aPlace)) {
|
||||
|
||||
$aFilteredPlaces['properties']['geocoding']['label'] = $aPlace['langaddress'];
|
||||
|
||||
$aFilteredPlaces['properties']['geocoding']['name'] = $aPlace['placename'];
|
||||
if ($aPlace['placename'] !== null) {
|
||||
$aFilteredPlaces['properties']['geocoding']['name'] = $aPlace['placename'];
|
||||
}
|
||||
|
||||
if (isset($aPlace['address'])) {
|
||||
$aFieldMappings = array(
|
||||
'house_number' => 'housenumber',
|
||||
'road' => 'street',
|
||||
'locality' => 'locality',
|
||||
'postcode' => 'postcode',
|
||||
'city' => 'city',
|
||||
'district' => 'district',
|
||||
'county' => 'county',
|
||||
'state' => 'state',
|
||||
'country' => 'country'
|
||||
);
|
||||
|
||||
$aAddressNames = $aPlace['address']->getAddressNames();
|
||||
foreach ($aFieldMappings as $sFrom => $sTo) {
|
||||
if (isset($aAddressNames[$sFrom])) {
|
||||
$aFilteredPlaces['properties']['geocoding'][$sTo] = $aAddressNames[$sFrom];
|
||||
}
|
||||
}
|
||||
$aPlace['address']->addGeocodeJsonAddressParts(
|
||||
$aFilteredPlaces['properties']['geocoding']
|
||||
);
|
||||
|
||||
$aFilteredPlaces['properties']['geocoding']['admin']
|
||||
= $aPlace['address']->getAdminLevels();
|
||||
|
||||
@@ -9,7 +9,7 @@
|
||||
<body id="reverse-page">
|
||||
|
||||
<?php include(CONST_BasePath.'/lib/template/includes/html-top-navigation.php'); ?>
|
||||
|
||||
<div class="top-bar">
|
||||
<form class="form-inline" role="search" accept-charset="UTF-8" action="<?php echo CONST_Website_BaseURL; ?>reverse.php">
|
||||
<div class="form-group">
|
||||
<input name="format" type="hidden" value="html">
|
||||
@@ -64,7 +64,7 @@
|
||||
<a href="<?php echo CONST_Website_BaseURL; ?>search.php">forward search</a>
|
||||
</div>
|
||||
</form>
|
||||
|
||||
</div>
|
||||
|
||||
<div id="content">
|
||||
|
||||
@@ -85,7 +85,7 @@
|
||||
else
|
||||
echo ' <span class="type">('.ucwords(str_replace('_',' ',$aResult['type'])).')</span>';
|
||||
echo '<p>'.$aResult['lat'].','.$aResult['lon'].'</p>';
|
||||
echo ' <a class="btn btn-default btn-xs details" href="details.php?place_id='.$aResult['place_id'].'">details</a>';
|
||||
echo detailsPermaLink($aResult, 'details', 'class="btn btn-default btn-xs details"');
|
||||
echo '</div>';
|
||||
?>
|
||||
</div>
|
||||
|
||||
48
lib/template/deletable-html.php
Normal file
48
lib/template/deletable-html.php
Normal file
@@ -0,0 +1,48 @@
|
||||
<?php
|
||||
header("content-type: text/html; charset=UTF-8");
|
||||
include(CONST_BasePath.'/lib/template/includes/html-header.php');
|
||||
?>
|
||||
<title>Nominatim Deleted Data</title>
|
||||
<meta name="description" content="List of OSM data that has been deleted" lang="en-US" />
|
||||
</head>
|
||||
|
||||
<body>
|
||||
<div class="container">
|
||||
<h1>Deletable</h1>
|
||||
<p>
|
||||
<?php echo sizeof($aPolygons) ?> objects have been deleted in OSM but are still in the Nominatim database.
|
||||
Also available in <a href="<?php echo CONST_Website_BaseURL; ?>deletable.php?format=json">JSON format</a>.
|
||||
</p>
|
||||
|
||||
<table class="table table-striped table-hover">
|
||||
<?php
|
||||
|
||||
if (!empty($aPolygons)) {
|
||||
echo '<tr>';
|
||||
foreach (array_keys($aPolygons[0]) as $sCol) {
|
||||
echo '<th>'.$sCol.'</th>';
|
||||
}
|
||||
echo '</tr>';
|
||||
foreach ($aPolygons as $aRow) {
|
||||
echo '<tr>';
|
||||
foreach ($aRow as $sCol => $sVal) {
|
||||
switch ($sCol) {
|
||||
case 'osm_id':
|
||||
echo '<td>'.osmLink($aRow).'</td>';
|
||||
break;
|
||||
case 'place_id':
|
||||
echo '<td>'.detailsLink($aRow).'</td>';
|
||||
break;
|
||||
default:
|
||||
echo '<td>'.($sVal?$sVal:' ').'</td>';
|
||||
break;
|
||||
}
|
||||
}
|
||||
echo '</tr>';
|
||||
}
|
||||
}
|
||||
?>
|
||||
</table>
|
||||
</div>
|
||||
</body>
|
||||
</html>
|
||||
@@ -20,20 +20,25 @@
|
||||
}
|
||||
|
||||
|
||||
function format_distance($fDistance)
|
||||
function format_distance($fDistance, $bInMeters = false)
|
||||
{
|
||||
// $fDistance is in meters
|
||||
if ($fDistance < 1)
|
||||
{
|
||||
return '0';
|
||||
}
|
||||
elseif ($fDistance < 1000)
|
||||
{
|
||||
return'<abbr class="distance" title="'.$fDistance.'">~'.(round($fDistance,0)).' m</abbr>';
|
||||
}
|
||||
else
|
||||
{
|
||||
return'<abbr class="distance" title="'.$fDistance.'">~'.(round($fDistance/1000,1)).' km</abbr>';
|
||||
if ($bInMeters) {
|
||||
// $fDistance is in meters
|
||||
if ($fDistance < 1) {
|
||||
return '0';
|
||||
}
|
||||
elseif ($fDistance < 1000) {
|
||||
return '<abbr class="distance" title="'.$fDistance.' meters">~'.(round($fDistance,0)).' m</abbr>';
|
||||
}
|
||||
else {
|
||||
return '<abbr class="distance" title="'.$fDistance.' meters">~'.(round($fDistance/1000,1)).' km</abbr>';
|
||||
}
|
||||
} else {
|
||||
if ($fDistance == 0) {
|
||||
return '0';
|
||||
} else {
|
||||
return '<abbr class="distance" title="spheric distance '.$fDistance.'">'.(round($fDistance,4)).'</abbr>';
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -52,25 +57,33 @@
|
||||
return $sHTML;
|
||||
}
|
||||
|
||||
function map_icon($sIcon)
|
||||
function map_icon($aPlace)
|
||||
{
|
||||
if ($sIcon){
|
||||
echo '<img id="mapicon" src="'.CONST_Website_BaseURL.'images/mapicons/'.$sIcon.'.n.32.png'.'" alt="'.$sIcon.'" />';
|
||||
$sIcon = Nominatim\ClassTypes\getIconFile($aPlace);
|
||||
if (isset($sIcon)) {
|
||||
$sLabel = Nominatim\ClassTypes\getIcon($aPlace);
|
||||
echo '<img id="mapicon" src="'.$sIcon.'" alt="'.$sLabel.'" />';
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
function _one_row($aAddressLine){
|
||||
function _one_row($aAddressLine, $bDistanceInMeters = false){
|
||||
$bNotUsed = isset($aAddressLine['isaddress']) && !$aAddressLine['isaddress'];
|
||||
|
||||
echo '<tr class="' . ($bNotUsed?'notused':'') . '">'."\n";
|
||||
echo ' <td class="name">'.(trim($aAddressLine['localname'])?$aAddressLine['localname']:'<span class="noname">No Name</span>')."</td>\n";
|
||||
echo ' <td>' . $aAddressLine['class'].':'.$aAddressLine['type'] . "</td>\n";
|
||||
echo ' <td class="name">'.(trim($aAddressLine['localname'])!==null?$aAddressLine['localname']:'<span class="noname">No Name</span>')."</td>\n";
|
||||
echo ' <td>' . $aAddressLine['class'].':'.$aAddressLine['type'];
|
||||
if ($aAddressLine['type'] == 'administrative'
|
||||
&& isset($aAddressLine['place_type']))
|
||||
{
|
||||
echo '('.$aAddressLine['place_type'].')';
|
||||
}
|
||||
echo "</td>\n";
|
||||
echo ' <td>' . osmLink($aAddressLine) . "</td>\n";
|
||||
echo ' <td>' . (isset($aAddressLine['rank_address']) ? $aAddressLine['rank_address'] : '') . "</td>\n";
|
||||
echo ' <td>' . ($aAddressLine['admin_level'] < 15 ? $aAddressLine['admin_level'] : '') . "</td>\n";
|
||||
echo ' <td>' . format_distance($aAddressLine['distance'])."</td>\n";
|
||||
echo ' <td>' . detailsLink($aAddressLine,'details >') . "</td>\n";
|
||||
echo ' <td>' . format_distance($aAddressLine['distance'], $bDistanceInMeters)."</td>\n";
|
||||
echo ' <td>' . detailsPermaLink($aAddressLine,'details >') . "</td>\n";
|
||||
echo "</tr>\n";
|
||||
}
|
||||
|
||||
@@ -98,11 +111,10 @@
|
||||
<div class="col-sm-10">
|
||||
<h1>
|
||||
<?php echo $aPointDetails['localname'] ?>
|
||||
<small><?php echo detailsPermaLink($aPointDetails, 'link to this page') ?></small>
|
||||
</h1>
|
||||
</div>
|
||||
<div class="col-sm-2 text-right">
|
||||
<?php map_icon($aPointDetails['icon']) ?>
|
||||
<?php map_icon($aPointDetails) ?>
|
||||
</div>
|
||||
</div>
|
||||
<div class="row">
|
||||
@@ -122,6 +134,8 @@
|
||||
kv('Coverage' , ($aPointDetails['isarea']?'Polygon':'Point') );
|
||||
kv('Centre Point' , $aPointDetails['lat'].','.$aPointDetails['lon'] );
|
||||
kv('OSM' , osmLink($aPointDetails) );
|
||||
kv('Place Id (<a href="https://nominatim.org/release-docs/develop/api/Output/#place_id-is-not-a-persistent-id">on this server</a>)'
|
||||
, $aPointDetails['place_id'] );
|
||||
if ($aPointDetails['wikipedia'])
|
||||
{
|
||||
kv('Wikipedia Calculated' , wikipediaLink($aPointDetails) );
|
||||
@@ -173,7 +187,7 @@
|
||||
{
|
||||
headline('Linked Places');
|
||||
foreach ($aLinkedLines as $aAddressLine) {
|
||||
_one_row($aAddressLine);
|
||||
_one_row($aAddressLine, true);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -212,7 +226,7 @@
|
||||
headline3($sGroupHeading);
|
||||
|
||||
foreach ($aHierarchyLines as $aAddressLine) {
|
||||
_one_row($aAddressLine);
|
||||
_one_row($aAddressLine, true);
|
||||
}
|
||||
}
|
||||
if (count($aHierarchyLines) >= 500) {
|
||||
|
||||
55
lib/template/details-index-html.php
Normal file
55
lib/template/details-index-html.php
Normal file
@@ -0,0 +1,55 @@
|
||||
<?php
|
||||
header("content-type: text/html; charset=UTF-8");
|
||||
?>
|
||||
<?php include(CONST_BasePath.'/lib/template/includes/html-header.php'); ?>
|
||||
<link href="css/common.css" rel="stylesheet" type="text/css" />
|
||||
<link href="css/details.css" rel="stylesheet" type="text/css" />
|
||||
</head>
|
||||
|
||||
|
||||
<body id="details-index-page">
|
||||
<div class="container">
|
||||
<div class="row">
|
||||
<div class="col-md-12">
|
||||
|
||||
<h1>Show details for place</h1>
|
||||
|
||||
<div class="search-form">
|
||||
<h4>Search by place id</h4>
|
||||
|
||||
<form class="form-inline" action="details.php">
|
||||
<input type="edit" class="form-control input-sm" pattern="^[0-9]+$" name="place_id" placeholder="12345" />
|
||||
<input type="submit" class="btn btn-primary btn-sm" value="Show" />
|
||||
</form>
|
||||
</div>
|
||||
|
||||
<div class="search-form">
|
||||
<h4>Search by OSM type and OSM id</h4>
|
||||
|
||||
<form id="form-by-type-and-id" class="form-inline" action="details.php">
|
||||
<input type="edit" class="form-control input-sm" pattern="^[NWR][0-9]+$" placeholder="N123 or W123 or R123" />
|
||||
<input type="hidden" name="osmtype" />
|
||||
<input type="hidden" name="osmid" />
|
||||
<input type="submit" class="btn btn-primary btn-sm" value="Show" />
|
||||
</form>
|
||||
</div>
|
||||
|
||||
<div class="search-form">
|
||||
<h4>Search by openstreetmap.org URL</h4>
|
||||
|
||||
<form id="form-by-osm-url" class="form-inline" action="details.php">
|
||||
<input type="edit" class="form-control input-sm" pattern=".*openstreetmap.*" placeholder="https://www.openstreetmap.org/relation/123" />
|
||||
<input type="hidden" name="osmtype" />
|
||||
<input type="hidden" name="osmid" />
|
||||
<input type="submit" class="btn btn-primary btn-sm" value="Show" />
|
||||
</form>
|
||||
</div>
|
||||
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
|
||||
<?php include(CONST_BasePath.'/lib/template/includes/html-footer.php'); ?>
|
||||
</body>
|
||||
</html>
|
||||
@@ -26,8 +26,9 @@ $aPlaceDetails['calculated_importance'] = (float) $aPointDetails['calculated_imp
|
||||
|
||||
$aPlaceDetails['extratags'] = $aPointDetails['aExtraTags'];
|
||||
$aPlaceDetails['calculated_wikipedia'] = $aPointDetails['wikipedia'];
|
||||
if ($aPointDetails['icon']) {
|
||||
$aPlaceDetails['icon'] = CONST_Website_BaseURL.'images/mapicons/'.$aPointDetails['icon'].'.n.32.png';
|
||||
$sIcon = Nominatim\ClassTypes\getIconFile($aPointDetails);
|
||||
if (isset($sIcon)) {
|
||||
$aPlaceDetails['icon'] = $sIcon;
|
||||
}
|
||||
|
||||
$aPlaceDetails['rank_address'] = (int) $aPointDetails['rank_address'];
|
||||
@@ -47,11 +48,13 @@ $funcMapAddressLine = function ($aFull) {
|
||||
'place_id' => isset($aFull['place_id']) ? (int) $aFull['place_id'] : null,
|
||||
'osm_id' => isset($aFull['osm_id']) ? (int) $aFull['osm_id'] : null,
|
||||
'osm_type' => isset($aFull['osm_type']) ? $aFull['osm_type'] : null,
|
||||
'place_type' => isset($aFull['place_type']) ? $aFull['place_type'] : null,
|
||||
'class' => $aFull['class'],
|
||||
'type' => $aFull['type'],
|
||||
'admin_level' => isset($aFull['admin_level']) ? (int) $aFull['admin_level'] : null,
|
||||
'rank_address' => $aFull['rank_address'] ? (int) $aFull['rank_address'] : null,
|
||||
'distance' => (float) $aFull['distance']
|
||||
'distance' => (float) $aFull['distance'],
|
||||
'isaddress' => isset($aFull['isaddress']) ? (bool) $aFull['isaddress'] : null
|
||||
);
|
||||
|
||||
return $aMapped;
|
||||
|
||||
@@ -6,7 +6,6 @@
|
||||
<meta name="viewport" content="width=device-width, initial-scale=1">
|
||||
|
||||
<base href="<?php echo CONST_Website_BaseURL;?>" />
|
||||
<link href="nominatim.xml" rel="search" title="Nominatim Search" type="application/opensearchdescription+xml" />
|
||||
<link href="css/leaflet.css" rel="stylesheet" />
|
||||
<link href="css/Control.Minimap.min.css" rel="stylesheet" />
|
||||
<link href="css/bootstrap-theme.min.css" rel="stylesheet" />
|
||||
|
||||
@@ -21,8 +21,10 @@
|
||||
About & Help <span class="caret"></span>
|
||||
</button>
|
||||
<ul class="dropdown-menu dropdown-menu-right">
|
||||
<li><a href="https://wiki.openstreetmap.org/wiki/Nominatim" target="_blank">Documentation</a></li>
|
||||
<li><a href="https://wiki.openstreetmap.org/wiki/Nominatim/FAQ" target="_blank">FAQ</a></li>
|
||||
<li><a href="https://nominatim.org/release-docs/develop/api/Overview/" target="_blank">API Reference</a></li>
|
||||
<li><a href="https://nominatim.org/release-docs/develop/api/Faq/" target="_blank">FAQ</a></li>
|
||||
<li><a href="https://help.openstreetmap.org/tags/nominatim/">OpenStreetMap Help</a></li>
|
||||
<li><a href="https://github.com/openstreetmap/Nominatim">Nominatim on Github</a></li>
|
||||
<li role="separator" class="divider"></li>
|
||||
<li><a href="#" class="" data-toggle="modal" data-target="#report-modal">Report problem with results</a></li>
|
||||
</ul>
|
||||
|
||||
@@ -6,4 +6,4 @@ look up data by its geographic coordinate (reverse search). Each result comes wi
|
||||
link to a details page where you can inspect what data about the object is saved in
|
||||
the database and investigate how the address of the object has been computed.</p>
|
||||
|
||||
For more information visit the <a href="https://wiki.openstreetmap.org/wiki/Nominatim">Nominatim wiki page</a>.
|
||||
For more information visit the <a href="https://nominatim.org">Nominatim home page</a>.
|
||||
|
||||
@@ -1,24 +1,42 @@
|
||||
<p>
|
||||
Before reporting problems please read the <a target="_blank" href="https://wiki.openstreetmap.org/wiki/Nominatim">user documentation</a>
|
||||
and
|
||||
<a target="_blank" href="https://wiki.openstreetmap.org/wiki/Nominatim/FAQ">FAQ</a>.
|
||||
Before reporting problems please read the <a target="_blank" href="https://nominatim.org/release-docs/develop/api/Overview">user documentation</a>.
|
||||
|
||||
<h4>Finding the expected result</h4>
|
||||
|
||||
First of all, please make sure that the result that you expect is
|
||||
available in the OpenStreetMap data.
|
||||
|
||||
To find the OpenStreetMap data, do the following:
|
||||
|
||||
<ul>
|
||||
<li>Go to <a href="https://openstreetmap.org">https://openstreetmap.org</a>.</li>
|
||||
<li>Go to the area of the map where you expect the result
|
||||
and zoom in until you see the object you are looking for.</li>
|
||||
<li>Click on the question mark on the right side of the map,
|
||||
then with the question cursor on the map where your object is located.</li>
|
||||
<li>Find the object of interest in the list that appears on the left side.</li>
|
||||
<li>Click on the object and note down the URL that the browser shows.</li>
|
||||
</ul>
|
||||
|
||||
If you cannot find the data you are looking for, there is a good chance
|
||||
that it has not been entered yet. You should <a href="https://www.openstreetmap.org/fixthemap">report or fix the problem in OpenStreetMap</a> directly.
|
||||
|
||||
<h4>Reporting bad searches</h4>
|
||||
|
||||
Problems may be reported at the <a target="_blank" href="https://github.com/openstreetmap/nominatim/issues">issue tracker on github</a>. Please read through
|
||||
the open tickets first and check if your problem has not already been
|
||||
reported.
|
||||
|
||||
When reporting a problem, include the following:
|
||||
|
||||
<ul>
|
||||
<li>A full description of the problem, including the exact term you
|
||||
were searching for.</li>
|
||||
<li>The result you get.</li>
|
||||
<li>The OpenStreetMap object you expect to find (see above).</li>
|
||||
</ul>
|
||||
|
||||
For general questions about installing and searching in Nominatim, please
|
||||
use <a href="https://help.openstreetmap.org/tags/nominatim/">Help OpenStreetMap</a>.
|
||||
|
||||
If your problem relates to the address of a particular search result please use the 'details' link
|
||||
to check how the address was generated before reporting a problem.
|
||||
</p>
|
||||
<p>
|
||||
Use <a target="_blank" href="https://github.com/openstreetmap/nominatim/issues">Nominatim issues on github</a>
|
||||
to report problems.
|
||||
<!-- You can search for existing bug reports
|
||||
<a href="https://trac.openstreetmap.org/query?status=new&status=assigned&status=reopened&component=nominatim&order=priority">here</a>.</p>
|
||||
-->
|
||||
</p>
|
||||
<p>
|
||||
Please ensure that you include a full description of the problem, including the search
|
||||
query that you used, the problem with the result and, if the problem relates to missing data,
|
||||
the osm type (node, way, relation) and id of the item that is missing.
|
||||
</p>
|
||||
<p>
|
||||
Problems that contain enough detail are likely to get looked at before ones that require
|
||||
significant research.
|
||||
</p>
|
||||
|
||||
71
lib/template/polygons-html.php
Normal file
71
lib/template/polygons-html.php
Normal file
@@ -0,0 +1,71 @@
|
||||
<?php
|
||||
header("content-type: text/html; charset=UTF-8");
|
||||
include(CONST_BasePath.'/lib/template/includes/html-header.php');
|
||||
?>
|
||||
<title>Nominatim Broken Polygon Data</title>
|
||||
<meta name="description" content="List of broken OSM polygon data by date" lang="en-US" />
|
||||
</head>
|
||||
|
||||
<body>
|
||||
|
||||
<div class="container">
|
||||
<h1>Broken polygons</h1>
|
||||
|
||||
<p>
|
||||
Total number of broken polygons: <?php echo $iTotalBroken ?>.
|
||||
Also available in <a href="<?php echo CONST_Website_BaseURL; ?>polygons.php?format=json">JSON format</a>.
|
||||
</p>
|
||||
|
||||
<table class="table table-striped table-hover">
|
||||
|
||||
<?php
|
||||
if (!empty($aPolygons)) {
|
||||
|
||||
echo '<tr>';
|
||||
//var_dump($aPolygons[0]);
|
||||
foreach (array_keys($aPolygons[0]) as $sCol) {
|
||||
echo '<th>'.$sCol.'</th>';
|
||||
}
|
||||
echo '<th> </th>';
|
||||
echo '</tr>';
|
||||
$aSeen = array();
|
||||
foreach ($aPolygons as $aRow) {
|
||||
if (isset($aSeen[$aRow['osm_type'].$aRow['osm_id']])) continue;
|
||||
$aSeen[$aRow['osm_type'].$aRow['osm_id']] = 1;
|
||||
|
||||
echo '<tr>';
|
||||
$sOSMType = formatOSMType($aRow['osm_type']);
|
||||
foreach ($aRow as $sCol => $sVal) {
|
||||
switch ($sCol) {
|
||||
case 'errormessage':
|
||||
if (preg_match('/Self-intersection\\[([0-9.\\-]+) ([0-9.\\-]+)\\]/', $sVal, $aMatch)) {
|
||||
$aRow['lat'] = $aMatch[2];
|
||||
$aRow['lon'] = $aMatch[1];
|
||||
$sUrl = sprintf('https://www.openstreetmap.org/?lat=%f&lon=%f&zoom=18&layers=M&%s=%d',
|
||||
$aRow['lat'],
|
||||
$aRow['lon'],
|
||||
$sOSMType,
|
||||
$aRow['osm_id']);
|
||||
echo '<td><a href="'.$sUrl.'">'.($sVal?$sVal:' ').'</a></td>';
|
||||
} else {
|
||||
echo '<td>'.($sVal?$sVal:' ').'</td>';
|
||||
}
|
||||
break;
|
||||
case 'osm_id':
|
||||
echo '<td>'.osmLink(array('osm_type' => $aRow['osm_type'], 'osm_id' => $aRow['osm_id'])).'</td>';
|
||||
break;
|
||||
default:
|
||||
echo '<td>'.($sVal?$sVal:' ').'</td>';
|
||||
break;
|
||||
}
|
||||
}
|
||||
$sJosmUrl = 'http://localhost:8111/import?url=https://www.openstreetmap.org/api/0.6/'.$sOSMType.'/'.$aRow['osm_id'].'/full';
|
||||
echo '<td><a href="'.$sJosmUrl.'" target="josm">josm</a></td>';
|
||||
echo '</tr>';
|
||||
}
|
||||
echo '</table>';
|
||||
}
|
||||
?>
|
||||
</div>
|
||||
</body>
|
||||
</html>
|
||||
@@ -25,10 +25,6 @@ foreach ($aBatchResults as $aSearchResults) {
|
||||
$aPointDetails['aBoundingBox'][2],
|
||||
$aPointDetails['aBoundingBox'][3]
|
||||
);
|
||||
|
||||
if (isset($aPointDetails['aPolyPoints']) && $bShowPolygons) {
|
||||
$aPlace['polygonpoints'] = $aPointDetails['aPolyPoints'];
|
||||
}
|
||||
}
|
||||
|
||||
if (isset($aPointDetails['zoom'])) {
|
||||
|
||||
@@ -20,27 +20,14 @@ foreach ($aSearchResults as $iResNum => $aPointDetails) {
|
||||
|
||||
$aPlace['properties']['geocoding']['label'] = $aPointDetails['langaddress'];
|
||||
|
||||
$aPlace['properties']['geocoding']['name'] = $aPointDetails['placename'];
|
||||
if ($aPointDetails['placename'] !== null) {
|
||||
$aPlace['properties']['geocoding']['name'] = $aPointDetails['placename'];
|
||||
}
|
||||
|
||||
if (isset($aPointDetails['address'])) {
|
||||
$aFieldMappings = array(
|
||||
'house_number' => 'housenumber',
|
||||
'road' => 'street',
|
||||
'locality' => 'locality',
|
||||
'postcode' => 'postcode',
|
||||
'city' => 'city',
|
||||
'district' => 'district',
|
||||
'county' => 'county',
|
||||
'state' => 'state',
|
||||
'country' => 'country'
|
||||
);
|
||||
|
||||
$aAddrNames = $aPointDetails['address']->getAddressNames();
|
||||
foreach ($aFieldMappings as $sFrom => $sTo) {
|
||||
if (isset($aAddrNames[$sFrom])) {
|
||||
$aPlace['properties']['geocoding'][$sTo] = $aAddrNames[$sFrom];
|
||||
}
|
||||
}
|
||||
$aPointDetails['address']->addGeocodeJsonAddressParts(
|
||||
$aPlace['properties']['geocoding']
|
||||
);
|
||||
|
||||
$aPlace['properties']['geocoding']['admin']
|
||||
= $aPointDetails['address']->getAdminLevels();
|
||||
|
||||
@@ -10,26 +10,46 @@
|
||||
|
||||
<?php include(CONST_BasePath.'/lib/template/includes/html-top-navigation.php'); ?>
|
||||
|
||||
<form class="form-inline" role="search" accept-charset="UTF-8" action="<?php echo CONST_Website_BaseURL; ?>search.php">
|
||||
<div class="form-group">
|
||||
<input id="q" name="q" type="text" class="form-control input-sm" placeholder="Search" value="<?php echo htmlspecialchars($sQuery); ?>" >
|
||||
<div class="top-bar" id="structured-query-selector">
|
||||
<div class="search-type-link">
|
||||
<a id="switch-to-reverse" href="<?php echo CONST_Website_BaseURL; ?>reverse.php?format=html">reverse search</a>
|
||||
</div>
|
||||
|
||||
<div class="radio-inline">
|
||||
<input type="radio" name="query-selector" id="simple" value="simple">
|
||||
<label for="simple">simple</label>
|
||||
</div>
|
||||
<div class="radio-inline">
|
||||
<input type="radio" name="query-selector" id="structured" value="structured">
|
||||
<label for="structured">structured</label>
|
||||
</div>
|
||||
|
||||
<form role="search" accept-charset="UTF-8" action="<?php echo CONST_Website_BaseURL; ?>search.php">
|
||||
<div class="form-group-simple">
|
||||
<input id="q" name="q" type="text" class="form-control input-sm" placeholder="Search" value="<?php echo htmlspecialchars($aMoreParams['q'] ?? ''); ?>" >
|
||||
</div>
|
||||
<div class="form-group-structured">
|
||||
<div class="form-inline">
|
||||
<input id="street" name="street" type="text" class="form-control input-sm" placeholder="House number/Street" value="<?php echo htmlspecialchars($aMoreParams['street'] ?? ''); ?>" >
|
||||
<input id="city" name="city" type="text" class="form-control input-sm" placeholder="City" value="<?php echo htmlspecialchars($aMoreParams['city'] ?? ''); ?>" >
|
||||
<input id="county" name="county" type="text" class="form-control input-sm" placeholder="County" value="<?php echo htmlspecialchars($aMoreParams['county'] ?? ''); ?>" >
|
||||
<input id="state" name="state" type="text" class="form-control input-sm" placeholder="State" value="<?php echo htmlspecialchars($aMoreParams['state'] ?? ''); ?>" >
|
||||
<input id="country" name="country" type="text" class="form-control input-sm" placeholder="Country" value="<?php echo htmlspecialchars($aMoreParams['country'] ?? ''); ?>" >
|
||||
<input id="postalcode" name="postalcode" type="text" class="form-control input-sm" placeholder="Postal Code" value="<?php echo htmlspecialchars($aMoreParams['postalcode'] ?? ''); ?>" >
|
||||
</div></div>
|
||||
<div class="form-group search-button-group">
|
||||
<button type="submit" class="btn btn-primary btn-sm">Search</button>
|
||||
<?php if (CONST_Search_AreaPolygons) { ?>
|
||||
<input type="hidden" value="1" name="polygon_geojson" />
|
||||
<?php } ?>
|
||||
<input type="hidden" name="viewbox" value="<?php if (isset($aMoreParams['viewbox'])) echo ($aMoreParams['viewbox']); ?>" />
|
||||
<input type="hidden" name="viewbox" value="<?php echo htmlspecialchars($aMoreParams['viewbox'] ?? ''); ?>" />
|
||||
<div class="checkbox-inline">
|
||||
<input type="checkbox" id="use_viewbox" <?php if (isset($aMoreParams['viewbox'])) echo "checked='checked'"; ?>>
|
||||
<input type="checkbox" id="use_viewbox" <?php if (!empty($aMoreParams['viewbox'])) echo "checked='checked'"; ?>>
|
||||
<label for="use_viewbox">apply viewbox</label>
|
||||
</div>
|
||||
</div>
|
||||
<div class="search-type-link">
|
||||
<a id="switch-to-reverse" href="<?php echo CONST_Website_BaseURL; ?>reverse.php?format=html">reverse search</a>
|
||||
</div>
|
||||
</form>
|
||||
|
||||
</div>
|
||||
|
||||
<div id="content">
|
||||
|
||||
@@ -53,7 +73,7 @@
|
||||
echo ' <span class="type">('.ucwords(str_replace('_',' ',$aResult['class'])).')</span>';
|
||||
else
|
||||
echo ' <span class="type">('.ucwords(str_replace('_',' ',$aResult['type'])).')</span>';
|
||||
echo ' <a class="btn btn-default btn-xs details" href="details.php?place_id='.$aResult['place_id'].'">details</a>';
|
||||
echo detailsPermaLink($aResult, 'details', 'class="btn btn-default btn-xs details"');
|
||||
echo '</div>';
|
||||
$i = $i+1;
|
||||
}
|
||||
@@ -89,10 +109,6 @@
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
<script type="text/javascript">
|
||||
<?php
|
||||
|
||||
@@ -105,7 +121,16 @@
|
||||
);
|
||||
echo 'var nominatim_map_init = ' . json_encode($aNominatimMapInit, JSON_PRETTY_PRINT) . ';';
|
||||
|
||||
echo 'var nominatim_results = ' . json_encode($aSearchResults, JSON_PRETTY_PRINT) . ';';
|
||||
echo 'var nominatim_results = ' . json_encode($aSearchResults, JSON_PRETTY_PRINT) . ';';
|
||||
$sStructuredQuery = (empty($aMoreParams['q'])
|
||||
&& !(empty($aMoreParams['street'])
|
||||
&& empty($aMoreParams['city'])
|
||||
&& empty($aMoreParams['county'])
|
||||
&& empty($aMoreParams['state'])
|
||||
&& empty($aMoreParams['country'])
|
||||
&& empty($aMoreParams['postalcode'])))
|
||||
? 'true' : 'false';
|
||||
echo 'var nominatim_structured_query = '.$sStructuredQuery.';';
|
||||
?>
|
||||
</script>
|
||||
<?php include(CONST_BasePath.'/lib/template/includes/html-footer.php'); ?>
|
||||
|
||||
@@ -15,10 +15,6 @@ foreach ($aSearchResults as $iResNum => $aPointDetails) {
|
||||
|
||||
if (isset($aPointDetails['aBoundingBox'])) {
|
||||
$aPlace['boundingbox'] = $aPointDetails['aBoundingBox'];
|
||||
|
||||
if (isset($aPointDetails['aPolyPoints'])) {
|
||||
$aPlace['polygonpoints'] = $aPointDetails['aPolyPoints'];
|
||||
}
|
||||
}
|
||||
|
||||
if (isset($aPointDetails['zoom'])) {
|
||||
|
||||
@@ -11,7 +11,6 @@ echo " timestamp='".date(DATE_RFC822)."'";
|
||||
echo " attribution='Data © OpenStreetMap contributors, ODbL 1.0. http://www.openstreetmap.org/copyright'";
|
||||
echo " querystring='".htmlspecialchars($sQuery, ENT_QUOTES)."'";
|
||||
if (isset($aMoreParams['viewbox'])) echo " viewbox='".htmlspecialchars($aMoreParams['viewbox'], ENT_QUOTES)."'";
|
||||
echo " polygon='".(isset($aMoreParams['polygon'])?'true':'false')."'";
|
||||
if (isset($aMoreParams['exclude_place_ids'])) {
|
||||
echo " exclude_place_ids='".htmlspecialchars($aMoreParams['exclude_place_ids'])."'";
|
||||
}
|
||||
@@ -31,12 +30,6 @@ foreach ($aSearchResults as $iResNum => $aResult) {
|
||||
echo ' boundingbox="';
|
||||
echo join(',', $aResult['aBoundingBox']);
|
||||
echo '"';
|
||||
|
||||
if (isset($aResult['aPolyPoints'])) {
|
||||
echo ' polygonpoints=\'';
|
||||
echo json_encode($aResult['aPolyPoints']);
|
||||
echo '\'';
|
||||
}
|
||||
}
|
||||
|
||||
if (isset($aResult['asgeojson'])) {
|
||||
|
||||
@@ -1,4 +1,13 @@
|
||||
# just use the pgxs makefile
|
||||
find_program(PG_CONFIG pg_config)
|
||||
execute_process(COMMAND ${PG_CONFIG} --pgxs
|
||||
OUTPUT_VARIABLE PGXS
|
||||
OUTPUT_STRIP_TRAILING_WHITESPACE)
|
||||
|
||||
if (NOT EXISTS "${PGXS}")
|
||||
message(FATAL_ERROR "Postgresql server package not found.")
|
||||
endif()
|
||||
|
||||
ADD_CUSTOM_COMMAND( OUTPUT ${CMAKE_CURRENT_SOURCE_DIR}/dummy
|
||||
COMMAND PGXS=${PGXS} PG_CONFIG=${PG_CONFIG} MODSRCDIR=${CMAKE_CURRENT_SOURCE_DIR} $(MAKE) -f ${CMAKE_CURRENT_SOURCE_DIR}/Makefile
|
||||
COMMENT "Running external makefile ${PGXS}"
|
||||
|
||||
File diff suppressed because one or more lines are too long
@@ -1,12 +0,0 @@
|
||||
add_executable(nominatim export.c geometry.cpp import.c index.c input.c nominatim.c postgresql.c sprompt.c)
|
||||
|
||||
CHECK_SYMBOL_EXISTS(bswap_32 "byteswap.h" HAVE_BYTESWAP)
|
||||
CHECK_SYMBOL_EXISTS(bswap32 "sys/endian.h" HAVE_SYS_ENDIAN)
|
||||
|
||||
target_compile_definitions(nominatim
|
||||
PRIVATE HAVE_BYTESWAP=$<BOOL:${HAVE_BYTESWAP}>
|
||||
PRIVATE HAVE_SYS_ENDIAN=$<BOOL:${HAVE_SYS_ENDIAN}>
|
||||
)
|
||||
|
||||
target_link_libraries(nominatim ${LIBXML2_LIBRARIES} ${ZLIB_LIBRARIES} ${BZIP2_LIBRARIES} ${PostgreSQL_LIBRARIES} ${CMAKE_THREAD_LIBS_INIT})
|
||||
|
||||
@@ -1,558 +0,0 @@
|
||||
/*
|
||||
*/
|
||||
|
||||
#include <stdio.h>
|
||||
#include <unistd.h>
|
||||
#include <stdlib.h>
|
||||
#include <string.h>
|
||||
#include <assert.h>
|
||||
#include <time.h>
|
||||
#include <stdint.h>
|
||||
#include <pthread.h>
|
||||
|
||||
#include <libpq-fe.h>
|
||||
|
||||
#include "nominatim.h"
|
||||
#include "export.h"
|
||||
#include "postgresql.h"
|
||||
|
||||
extern int verbose;
|
||||
|
||||
int mode = 0;
|
||||
|
||||
void nominatim_export(int rank_min, int rank_max, const char *conninfo, const char *structuredoutputfile)
|
||||
{
|
||||
xmlTextWriterPtr writer;
|
||||
|
||||
int rankTotalDone;
|
||||
|
||||
PGconn *conn;
|
||||
PGresult * res;
|
||||
PGresult * resSectors;
|
||||
PGresult * resPlaces;
|
||||
|
||||
int rank;
|
||||
int i;
|
||||
int iSector;
|
||||
int tuples;
|
||||
|
||||
const char *paramValues[2];
|
||||
int paramLengths[2];
|
||||
int paramFormats[2];
|
||||
uint32_t paramRank;
|
||||
uint32_t paramSector;
|
||||
uint32_t sector;
|
||||
|
||||
Oid pg_prepare_params[2];
|
||||
|
||||
conn = PQconnectdb(conninfo);
|
||||
if (PQstatus(conn) != CONNECTION_OK)
|
||||
{
|
||||
fprintf(stderr, "Connection to database failed: %s\n", PQerrorMessage(conn));
|
||||
exit(EXIT_FAILURE);
|
||||
}
|
||||
|
||||
pg_prepare_params[0] = PG_OID_INT4;
|
||||
res = PQprepare(conn, "index_sectors",
|
||||
"select geometry_sector,count(*) from placex where rank_search = $1 and indexed_status = 0 group by geometry_sector order by geometry_sector",
|
||||
1, pg_prepare_params);
|
||||
if (PQresultStatus(res) != PGRES_COMMAND_OK) exit(EXIT_FAILURE);
|
||||
PQclear(res);
|
||||
|
||||
pg_prepare_params[0] = PG_OID_INT4;
|
||||
pg_prepare_params[1] = PG_OID_INT4;
|
||||
res = PQprepare(conn, "index_sector_places",
|
||||
"select place_id from placex where rank_search = $1 and geometry_sector = $2",
|
||||
2, pg_prepare_params);
|
||||
if (PQresultStatus(res) != PGRES_COMMAND_OK) exit(EXIT_FAILURE);
|
||||
PQclear(res);
|
||||
|
||||
nominatim_exportCreatePreparedQueries(conn);
|
||||
|
||||
// Create the output file
|
||||
writer = nominatim_exportXMLStart(structuredoutputfile);
|
||||
|
||||
for (rank = rank_min; rank <= rank_max; rank++)
|
||||
{
|
||||
printf("Starting rank %d\n", rank);
|
||||
|
||||
paramRank = PGint32(rank);
|
||||
paramValues[0] = (char *)¶mRank;
|
||||
paramLengths[0] = sizeof(paramRank);
|
||||
paramFormats[0] = 1;
|
||||
resSectors = PQexecPrepared(conn, "index_sectors", 1, paramValues, paramLengths, paramFormats, 1);
|
||||
if (PQresultStatus(resSectors) != PGRES_TUPLES_OK)
|
||||
{
|
||||
fprintf(stderr, "index_sectors: SELECT failed: %s", PQerrorMessage(conn));
|
||||
PQclear(resSectors);
|
||||
exit(EXIT_FAILURE);
|
||||
}
|
||||
if (PQftype(resSectors, 0) != PG_OID_INT4)
|
||||
{
|
||||
fprintf(stderr, "Sector value has unexpected type\n");
|
||||
PQclear(resSectors);
|
||||
exit(EXIT_FAILURE);
|
||||
}
|
||||
if (PQftype(resSectors, 1) != PG_OID_INT8)
|
||||
{
|
||||
fprintf(stderr, "Sector value has unexpected type\n");
|
||||
PQclear(resSectors);
|
||||
exit(EXIT_FAILURE);
|
||||
}
|
||||
|
||||
rankTotalDone = 0;
|
||||
for (iSector = 0; iSector < PQntuples(resSectors); iSector++)
|
||||
{
|
||||
sector = PGint32(*((uint32_t *)PQgetvalue(resSectors, iSector, 0)));
|
||||
|
||||
// Get all the place_id's for this sector
|
||||
paramRank = PGint32(rank);
|
||||
paramValues[0] = (char *)¶mRank;
|
||||
paramLengths[0] = sizeof(paramRank);
|
||||
paramFormats[0] = 1;
|
||||
paramSector = PGint32(sector);
|
||||
paramValues[1] = (char *)¶mSector;
|
||||
paramLengths[1] = sizeof(paramSector);
|
||||
paramFormats[1] = 1;
|
||||
resPlaces = PQexecPrepared(conn, "index_sector_places", 2, paramValues, paramLengths, paramFormats, 1);
|
||||
if (PQresultStatus(resPlaces) != PGRES_TUPLES_OK)
|
||||
{
|
||||
fprintf(stderr, "index_sector_places: SELECT failed: %s", PQerrorMessage(conn));
|
||||
PQclear(resPlaces);
|
||||
exit(EXIT_FAILURE);
|
||||
}
|
||||
if (PQftype(resPlaces, 0) != PG_OID_INT8)
|
||||
{
|
||||
fprintf(stderr, "Place_id value has unexpected type\n");
|
||||
PQclear(resPlaces);
|
||||
exit(EXIT_FAILURE);
|
||||
}
|
||||
|
||||
tuples = PQntuples(resPlaces);
|
||||
for (i = 0; i < tuples; i++)
|
||||
{
|
||||
nominatim_exportPlace(PGint64(*((uint64_t *)PQgetvalue(resPlaces, i, 0))), conn, writer, NULL, NULL);
|
||||
rankTotalDone++;
|
||||
if (rankTotalDone%1000 == 0) printf("Done %i (k)\n", rankTotalDone/1000);
|
||||
}
|
||||
PQclear(resPlaces);
|
||||
}
|
||||
PQclear(resSectors);
|
||||
}
|
||||
|
||||
nominatim_exportXMLEnd(writer);
|
||||
|
||||
PQfinish(conn);
|
||||
}
|
||||
|
||||
void nominatim_exportCreatePreparedQueries(PGconn * conn)
|
||||
{
|
||||
Oid pg_prepare_params[2];
|
||||
PGresult * res;
|
||||
|
||||
pg_prepare_params[0] = PG_OID_INT8;
|
||||
res = PQprepare(conn, "placex_details",
|
||||
"select placex.osm_type, placex.osm_id, placex.class, placex.type, placex.name, placex.housenumber, placex.country_code, ST_AsText(placex.geometry), placex.admin_level, placex.rank_address, placex.rank_search, placex.parent_place_id, parent.osm_type, parent.osm_id, placex.indexed_status, placex.linked_place_id from placex left outer join placex as parent on (placex.parent_place_id = parent.place_id) where placex.place_id = $1",
|
||||
1, pg_prepare_params);
|
||||
if (PQresultStatus(res) != PGRES_COMMAND_OK)
|
||||
{
|
||||
fprintf(stderr, "Error preparing placex_details: %s", PQerrorMessage(conn));
|
||||
exit(EXIT_FAILURE);
|
||||
}
|
||||
PQclear(res);
|
||||
|
||||
pg_prepare_params[0] = PG_OID_INT8;
|
||||
res = PQprepare(conn, "placex_address",
|
||||
"select osm_type,osm_id,class,type,distance,cached_rank_address,isaddress from place_addressline join placex on (address_place_id = placex.place_id) where place_addressline.place_id = $1 and address_place_id != place_addressline.place_id order by cached_rank_address asc,osm_type,osm_id",
|
||||
1, pg_prepare_params);
|
||||
if (PQresultStatus(res) != PGRES_COMMAND_OK)
|
||||
{
|
||||
fprintf(stderr, "Error preparing placex_address: %s", PQerrorMessage(conn));
|
||||
exit(EXIT_FAILURE);
|
||||
}
|
||||
PQclear(res);
|
||||
|
||||
pg_prepare_params[0] = PG_OID_INT8;
|
||||
res = PQprepare(conn, "placex_names",
|
||||
"select (each(name)).key,(each(name)).value from (select name from placex where place_id = $1) as x order by (each(name)).key",
|
||||
1, pg_prepare_params);
|
||||
if (PQresultStatus(res) != PGRES_COMMAND_OK)
|
||||
{
|
||||
fprintf(stderr, "Error preparing placex_names: %s", PQerrorMessage(conn));
|
||||
exit(EXIT_FAILURE);
|
||||
}
|
||||
PQclear(res);
|
||||
|
||||
pg_prepare_params[0] = PG_OID_INT8;
|
||||
res = PQprepare(conn, "placex_extratags",
|
||||
"select (each(extratags)).key,(each(extratags)).value from (select extratags from placex where place_id = $1) as x order by (each(extratags)).key",
|
||||
1, pg_prepare_params);
|
||||
if (PQresultStatus(res) != PGRES_COMMAND_OK)
|
||||
{
|
||||
fprintf(stderr, "Error preparing placex_extratags: %s", PQerrorMessage(conn));
|
||||
exit(EXIT_FAILURE);
|
||||
}
|
||||
PQclear(res);
|
||||
}
|
||||
|
||||
xmlTextWriterPtr nominatim_exportXMLStart(const char *structuredoutputfile)
|
||||
{
|
||||
xmlTextWriterPtr writer;
|
||||
|
||||
writer = xmlNewTextWriterFilename(structuredoutputfile, 0);
|
||||
if (writer==NULL)
|
||||
{
|
||||
fprintf(stderr, "Unable to open %s\n", structuredoutputfile);
|
||||
exit(EXIT_FAILURE);
|
||||
}
|
||||
xmlTextWriterSetIndent(writer, 1);
|
||||
if (xmlTextWriterStartDocument(writer, NULL, "UTF8", NULL) < 0)
|
||||
{
|
||||
fprintf(stderr, "xmlTextWriterStartDocument failed\n");
|
||||
exit(EXIT_FAILURE);
|
||||
}
|
||||
if (xmlTextWriterStartElement(writer, BAD_CAST "osmStructured") < 0)
|
||||
{
|
||||
fprintf(stderr, "xmlTextWriterStartElement failed\n");
|
||||
exit(EXIT_FAILURE);
|
||||
}
|
||||
if (xmlTextWriterWriteAttribute(writer, BAD_CAST "version", BAD_CAST "0.1") < 0)
|
||||
{
|
||||
fprintf(stderr, "xmlTextWriterWriteAttribute failed\n");
|
||||
exit(EXIT_FAILURE);
|
||||
}
|
||||
if (xmlTextWriterWriteAttribute(writer, BAD_CAST "generator", BAD_CAST "Nominatim") < 0)
|
||||
{
|
||||
fprintf(stderr, "xmlTextWriterWriteAttribute failed\n");
|
||||
exit(EXIT_FAILURE);
|
||||
}
|
||||
|
||||
mode = 0;
|
||||
|
||||
return writer;
|
||||
}
|
||||
|
||||
void nominatim_exportXMLEnd(xmlTextWriterPtr writer)
|
||||
{
|
||||
nominatim_exportEndMode(writer);
|
||||
|
||||
// End <osmStructured>
|
||||
if (xmlTextWriterEndElement(writer) < 0)
|
||||
{
|
||||
fprintf(stderr, "xmlTextWriterEndElement failed\n");
|
||||
exit(EXIT_FAILURE);
|
||||
}
|
||||
if (xmlTextWriterEndDocument(writer) < 0)
|
||||
{
|
||||
fprintf(stderr, "xmlTextWriterEndDocument failed\n");
|
||||
exit(EXIT_FAILURE);
|
||||
}
|
||||
xmlFreeTextWriter(writer);
|
||||
}
|
||||
|
||||
void nominatim_exportStartMode(xmlTextWriterPtr writer, int newMode)
|
||||
{
|
||||
if (mode == newMode) return;
|
||||
|
||||
nominatim_exportEndMode(writer);
|
||||
|
||||
switch(newMode)
|
||||
{
|
||||
case 0:
|
||||
break;
|
||||
|
||||
case 1:
|
||||
if (xmlTextWriterStartElement(writer, BAD_CAST "add") < 0)
|
||||
{
|
||||
fprintf(stderr, "xmlTextWriterStartElement failed\n");
|
||||
exit(EXIT_FAILURE);
|
||||
}
|
||||
break;
|
||||
|
||||
case 2:
|
||||
if (xmlTextWriterStartElement(writer, BAD_CAST "update") < 0)
|
||||
{
|
||||
fprintf(stderr, "xmlTextWriterStartElement failed\n");
|
||||
exit(EXIT_FAILURE);
|
||||
}
|
||||
break;
|
||||
|
||||
case 3:
|
||||
if (xmlTextWriterStartElement(writer, BAD_CAST "delete") < 0)
|
||||
{
|
||||
fprintf(stderr, "xmlTextWriterStartElement failed\n");
|
||||
exit(EXIT_FAILURE);
|
||||
}
|
||||
break;
|
||||
}
|
||||
mode = newMode;
|
||||
}
|
||||
|
||||
void nominatim_exportEndMode(xmlTextWriterPtr writer)
|
||||
{
|
||||
if (!mode) return;
|
||||
|
||||
if (xmlTextWriterEndElement(writer) < 0)
|
||||
{
|
||||
fprintf(stderr, "xmlTextWriterEndElement failed\n");
|
||||
exit(EXIT_FAILURE);
|
||||
}
|
||||
}
|
||||
|
||||
void nominatim_exportPlaceQueries(uint64_t place_id, PGconn * conn, struct export_data * querySet)
|
||||
{
|
||||
const char * paramValues[1];
|
||||
int paramLengths[1];
|
||||
int paramFormats[1];
|
||||
uint64_t paramPlaceID;
|
||||
|
||||
paramPlaceID = PGint64(place_id);
|
||||
paramValues[0] = (char *)¶mPlaceID;
|
||||
paramLengths[0] = sizeof(paramPlaceID);
|
||||
paramFormats[0] = 1;
|
||||
|
||||
querySet->res = PQexecPrepared(conn, "placex_details", 1, paramValues, paramLengths, paramFormats, 0);
|
||||
if (PQresultStatus(querySet->res) != PGRES_TUPLES_OK)
|
||||
{
|
||||
fprintf(stderr, "placex_details: SELECT failed: %s", PQerrorMessage(conn));
|
||||
PQclear(querySet->res);
|
||||
exit(EXIT_FAILURE);
|
||||
}
|
||||
|
||||
querySet->resNames = PQexecPrepared(conn, "placex_names", 1, paramValues, paramLengths, paramFormats, 0);
|
||||
if (PQresultStatus(querySet->resNames) != PGRES_TUPLES_OK)
|
||||
{
|
||||
fprintf(stderr, "placex_names: SELECT failed: %s", PQerrorMessage(conn));
|
||||
PQclear(querySet->resNames);
|
||||
exit(EXIT_FAILURE);
|
||||
}
|
||||
|
||||
querySet->resAddress = PQexecPrepared(conn, "placex_address", 1, paramValues, paramLengths, paramFormats, 0);
|
||||
if (PQresultStatus(querySet->resAddress) != PGRES_TUPLES_OK)
|
||||
{
|
||||
fprintf(stderr, "placex_address: SELECT failed: %s", PQerrorMessage(conn));
|
||||
PQclear(querySet->resAddress);
|
||||
exit(EXIT_FAILURE);
|
||||
}
|
||||
|
||||
querySet->resExtraTags = PQexecPrepared(conn, "placex_extratags", 1, paramValues, paramLengths, paramFormats, 0);
|
||||
if (PQresultStatus(querySet->resExtraTags) != PGRES_TUPLES_OK)
|
||||
{
|
||||
fprintf(stderr, "placex_extratags: SELECT failed: %s", PQerrorMessage(conn));
|
||||
PQclear(querySet->resExtraTags);
|
||||
exit(EXIT_FAILURE);
|
||||
}
|
||||
}
|
||||
|
||||
void nominatim_exportFreeQueries(struct export_data * querySet)
|
||||
{
|
||||
PQclear(querySet->res);
|
||||
PQclear(querySet->resNames);
|
||||
PQclear(querySet->resAddress);
|
||||
PQclear(querySet->resExtraTags);
|
||||
}
|
||||
|
||||
/*
|
||||
* Requirements: the prepared queries must exist
|
||||
*/
|
||||
void nominatim_exportPlace(uint64_t place_id, PGconn * conn,
|
||||
xmlTextWriterPtr writer, pthread_mutex_t * writer_mutex, struct export_data * prevQuerySet)
|
||||
{
|
||||
struct export_data querySet;
|
||||
|
||||
int i;
|
||||
|
||||
nominatim_exportPlaceQueries(place_id, conn, &querySet);
|
||||
|
||||
// Add, modify or delete?
|
||||
if (prevQuerySet)
|
||||
{
|
||||
if ((PQgetvalue(prevQuerySet->res, 0, 14) && strcmp(PQgetvalue(prevQuerySet->res, 0, 14), "100") == 0) || PQntuples(querySet.res) == 0)
|
||||
{
|
||||
// Delete
|
||||
if (writer_mutex) pthread_mutex_lock( writer_mutex );
|
||||
nominatim_exportStartMode(writer, 3);
|
||||
xmlTextWriterStartElement(writer, BAD_CAST "feature");
|
||||
xmlTextWriterWriteFormatAttribute(writer, BAD_CAST "place_id", "%li", place_id);
|
||||
xmlTextWriterWriteAttribute(writer, BAD_CAST "type", BAD_CAST PQgetvalue(prevQuerySet->res, 0, 0));
|
||||
xmlTextWriterWriteAttribute(writer, BAD_CAST "id", BAD_CAST PQgetvalue(prevQuerySet->res, 0, 1));
|
||||
xmlTextWriterWriteAttribute(writer, BAD_CAST "key", BAD_CAST PQgetvalue(prevQuerySet->res, 0, 2));
|
||||
xmlTextWriterWriteAttribute(writer, BAD_CAST "value", BAD_CAST PQgetvalue(prevQuerySet->res, 0, 3));
|
||||
xmlTextWriterEndElement(writer);
|
||||
if (writer_mutex) pthread_mutex_unlock( writer_mutex );
|
||||
nominatim_exportFreeQueries(&querySet);
|
||||
return;
|
||||
}
|
||||
if (PQgetvalue(prevQuerySet->res, 0, 14) && strcmp(PQgetvalue(prevQuerySet->res, 0, 14), "1") == 0)
|
||||
{
|
||||
// Add
|
||||
if (writer_mutex) pthread_mutex_lock( writer_mutex );
|
||||
nominatim_exportStartMode(writer, 1);
|
||||
}
|
||||
else
|
||||
{
|
||||
// Update, but only if something has changed
|
||||
|
||||
// TODO: detect changes
|
||||
|
||||
if (writer_mutex) pthread_mutex_lock( writer_mutex );
|
||||
nominatim_exportStartMode(writer, 2);
|
||||
}
|
||||
}
|
||||
else
|
||||
{
|
||||
// Add
|
||||
if (writer_mutex) pthread_mutex_lock( writer_mutex );
|
||||
nominatim_exportStartMode(writer, 1);
|
||||
}
|
||||
|
||||
xmlTextWriterStartElement(writer, BAD_CAST "feature");
|
||||
xmlTextWriterWriteFormatAttribute(writer, BAD_CAST "place_id", "%li", place_id);
|
||||
xmlTextWriterWriteAttribute(writer, BAD_CAST "type", BAD_CAST PQgetvalue(querySet.res, 0, 0));
|
||||
xmlTextWriterWriteAttribute(writer, BAD_CAST "id", BAD_CAST PQgetvalue(querySet.res, 0, 1));
|
||||
xmlTextWriterWriteAttribute(writer, BAD_CAST "key", BAD_CAST PQgetvalue(querySet.res, 0, 2));
|
||||
xmlTextWriterWriteAttribute(writer, BAD_CAST "value", BAD_CAST PQgetvalue(querySet.res, 0, 3));
|
||||
xmlTextWriterWriteAttribute(writer, BAD_CAST "rank", BAD_CAST PQgetvalue(querySet.res, 0, 9));
|
||||
xmlTextWriterWriteAttribute(writer, BAD_CAST "importance", BAD_CAST PQgetvalue(querySet.res, 0, 10));
|
||||
xmlTextWriterWriteAttribute(writer, BAD_CAST "parent_place_id", BAD_CAST PQgetvalue(querySet.res, 0, 11));
|
||||
xmlTextWriterWriteAttribute(writer, BAD_CAST "parent_type", BAD_CAST PQgetvalue(querySet.res, 0, 12));
|
||||
xmlTextWriterWriteAttribute(writer, BAD_CAST "parent_id", BAD_CAST PQgetvalue(querySet.res, 0, 13));
|
||||
xmlTextWriterWriteAttribute(writer, BAD_CAST "linked_place_id", BAD_CAST PQgetvalue(querySet.res, 0, 15));
|
||||
|
||||
if (PQntuples(querySet.resNames))
|
||||
{
|
||||
xmlTextWriterStartElement(writer, BAD_CAST "names");
|
||||
|
||||
for (i = 0; i < PQntuples(querySet.resNames); i++)
|
||||
{
|
||||
xmlTextWriterStartElement(writer, BAD_CAST "name");
|
||||
xmlTextWriterWriteAttribute(writer, BAD_CAST "type", BAD_CAST PQgetvalue(querySet.resNames, i, 0));
|
||||
xmlTextWriterWriteString(writer, BAD_CAST PQgetvalue(querySet.resNames, i, 1));
|
||||
xmlTextWriterEndElement(writer);
|
||||
}
|
||||
|
||||
xmlTextWriterEndElement(writer);
|
||||
}
|
||||
|
||||
if (PQgetvalue(querySet.res, 0, 5) && strlen(PQgetvalue(querySet.res, 0, 5)))
|
||||
{
|
||||
xmlTextWriterStartElement(writer, BAD_CAST "houseNumber");
|
||||
xmlTextWriterWriteString(writer, BAD_CAST PQgetvalue(querySet.res, 0, 5));
|
||||
xmlTextWriterEndElement(writer);
|
||||
}
|
||||
|
||||
if (PQgetvalue(querySet.res, 0, 8) && strlen(PQgetvalue(querySet.res, 0, 8)))
|
||||
{
|
||||
xmlTextWriterStartElement(writer, BAD_CAST "adminLevel");
|
||||
xmlTextWriterWriteString(writer, BAD_CAST PQgetvalue(querySet.res, 0, 8));
|
||||
xmlTextWriterEndElement(writer);
|
||||
}
|
||||
|
||||
if (PQgetvalue(querySet.res, 0, 6) && strlen(PQgetvalue(querySet.res, 0, 6)))
|
||||
{
|
||||
xmlTextWriterStartElement(writer, BAD_CAST "countryCode");
|
||||
xmlTextWriterWriteString(writer, BAD_CAST PQgetvalue(querySet.res, 0, 6));
|
||||
xmlTextWriterEndElement(writer);
|
||||
}
|
||||
|
||||
if (PQntuples(querySet.resAddress) > 0)
|
||||
{
|
||||
xmlTextWriterStartElement(writer, BAD_CAST "address");
|
||||
for (i = 0; i < PQntuples(querySet.resAddress); i++)
|
||||
{
|
||||
xmlTextWriterStartElement(writer, BAD_CAST getRankLabel(atoi(PQgetvalue(querySet.resAddress, i, 5))));
|
||||
xmlTextWriterWriteAttribute(writer, BAD_CAST "rank", BAD_CAST PQgetvalue(querySet.resAddress, i, 5));
|
||||
xmlTextWriterWriteAttribute(writer, BAD_CAST "type", BAD_CAST PQgetvalue(querySet.resAddress, i, 0));
|
||||
xmlTextWriterWriteAttribute(writer, BAD_CAST "id", BAD_CAST PQgetvalue(querySet.resAddress, i, 1));
|
||||
xmlTextWriterWriteAttribute(writer, BAD_CAST "key", BAD_CAST PQgetvalue(querySet.resAddress, i, 2));
|
||||
xmlTextWriterWriteAttribute(writer, BAD_CAST "value", BAD_CAST PQgetvalue(querySet.resAddress, i, 3));
|
||||
xmlTextWriterWriteAttribute(writer, BAD_CAST "distance", BAD_CAST PQgetvalue(querySet.resAddress, i, 4));
|
||||
xmlTextWriterWriteAttribute(writer, BAD_CAST "isaddress", BAD_CAST PQgetvalue(querySet.resAddress, i, 6));
|
||||
xmlTextWriterEndElement(writer);
|
||||
}
|
||||
xmlTextWriterEndElement(writer);
|
||||
}
|
||||
|
||||
if (PQntuples(querySet.resExtraTags))
|
||||
{
|
||||
xmlTextWriterStartElement(writer, BAD_CAST "tags");
|
||||
|
||||
for (i = 0; i < PQntuples(querySet.resExtraTags); i++)
|
||||
{
|
||||
xmlTextWriterStartElement(writer, BAD_CAST "tag");
|
||||
xmlTextWriterWriteAttribute(writer, BAD_CAST "type", BAD_CAST PQgetvalue(querySet.resExtraTags, i, 0));
|
||||
xmlTextWriterWriteString(writer, BAD_CAST PQgetvalue(querySet.resExtraTags, i, 1));
|
||||
xmlTextWriterEndElement(writer);
|
||||
}
|
||||
|
||||
xmlTextWriterEndElement(writer);
|
||||
}
|
||||
|
||||
|
||||
xmlTextWriterStartElement(writer, BAD_CAST "osmGeometry");
|
||||
xmlTextWriterWriteString(writer, BAD_CAST PQgetvalue(querySet.res, 0, 7));
|
||||
xmlTextWriterEndElement(writer);
|
||||
|
||||
xmlTextWriterEndElement(writer); // </feature>
|
||||
|
||||
if (writer_mutex) pthread_mutex_unlock( writer_mutex );
|
||||
|
||||
nominatim_exportFreeQueries(&querySet);
|
||||
}
|
||||
|
||||
const char * getRankLabel(int rank)
|
||||
{
|
||||
switch (rank)
|
||||
{
|
||||
case 0:
|
||||
case 1:
|
||||
return "continent";
|
||||
case 2:
|
||||
case 3:
|
||||
return "sea";
|
||||
case 4:
|
||||
case 5:
|
||||
case 6:
|
||||
case 7:
|
||||
return "country";
|
||||
case 8:
|
||||
case 9:
|
||||
case 10:
|
||||
case 11:
|
||||
return "state";
|
||||
case 12:
|
||||
case 13:
|
||||
case 14:
|
||||
case 15:
|
||||
return "county";
|
||||
case 16:
|
||||
return "city";
|
||||
case 17:
|
||||
return "town";
|
||||
case 18:
|
||||
return "village";
|
||||
case 19:
|
||||
return "unknown";
|
||||
case 20:
|
||||
return "suburb";
|
||||
case 21:
|
||||
return "postcode";
|
||||
case 22:
|
||||
return "neighborhood";
|
||||
case 23:
|
||||
return "postcode";
|
||||
case 24:
|
||||
return "unknown";
|
||||
case 25:
|
||||
return "postcode";
|
||||
case 26:
|
||||
return "street";
|
||||
case 27:
|
||||
return "access";
|
||||
case 28:
|
||||
return "building";
|
||||
case 29:
|
||||
default:
|
||||
return "other";
|
||||
}
|
||||
}
|
||||
@@ -1,31 +0,0 @@
|
||||
#ifndef EXPORT_H
|
||||
#define EXPORT_H
|
||||
|
||||
#include <libxml/encoding.h>
|
||||
#include <libxml/xmlwriter.h>
|
||||
#include <stdint.h>
|
||||
|
||||
struct export_data
|
||||
{
|
||||
PGresult * res;
|
||||
PGresult * resNames;
|
||||
PGresult * resAddress;
|
||||
PGresult * resExtraTags;
|
||||
};
|
||||
|
||||
void nominatim_export(int rank_min, int rank_max, const char *conninfo, const char *structuredoutputfile);
|
||||
void nominatim_exportCreatePreparedQueries(PGconn * conn);
|
||||
|
||||
xmlTextWriterPtr nominatim_exportXMLStart(const char *structuredoutputfile);
|
||||
void nominatim_exportXMLEnd(xmlTextWriterPtr writer);
|
||||
|
||||
void nominatim_exportEndMode(xmlTextWriterPtr writer);
|
||||
|
||||
void nominatim_exportPlaceQueries(uint64_t place_id, PGconn * conn, struct export_data * querySet);
|
||||
void nominatim_exportFreeQueries(struct export_data * querySet);
|
||||
|
||||
void nominatim_exportPlace(uint64_t place_id, PGconn * conn,
|
||||
xmlTextWriterPtr writer, pthread_mutex_t * writer_mutex, struct export_data * prevQuerySet);
|
||||
const char * getRankLabel(int rank);
|
||||
|
||||
#endif
|
||||
@@ -1,856 +0,0 @@
|
||||
/*
|
||||
*/
|
||||
#include <stdlib.h>
|
||||
#include <string.h>
|
||||
|
||||
#include <libpq-fe.h>
|
||||
|
||||
#include <libxml/xmlstring.h>
|
||||
#include <libxml/xmlreader.h>
|
||||
#include <libxml/hash.h>
|
||||
|
||||
#include "nominatim.h"
|
||||
#include "import.h"
|
||||
#include "input.h"
|
||||
|
||||
typedef enum { FILETYPE_NONE, FILETYPE_STRUCTUREDV0P1 } filetypes_t;
|
||||
typedef enum { FILEMODE_NONE, FILEMODE_ADD, FILEMODE_UPDATE, FILEMODE_DELETE } filemodes_t;
|
||||
|
||||
#define MAX_FEATUREADDRESS 5000
|
||||
#define MAX_FEATURENAMES 10000
|
||||
#define MAX_FEATUREEXTRATAGS 10000
|
||||
#define MAX_FEATURENAMESTRING 1000000
|
||||
#define MAX_FEATUREEXTRATAGSTRING 500000
|
||||
|
||||
struct feature_address
|
||||
{
|
||||
int place_id;
|
||||
int rankAddress;
|
||||
char isAddress[2];
|
||||
xmlChar * type;
|
||||
xmlChar * id;
|
||||
xmlChar * key;
|
||||
xmlChar * value;
|
||||
xmlChar * distance;
|
||||
};
|
||||
|
||||
struct feature_tag
|
||||
{
|
||||
xmlChar * type;
|
||||
xmlChar * value;
|
||||
};
|
||||
|
||||
struct feature
|
||||
{
|
||||
xmlChar * placeID;
|
||||
xmlChar * type;
|
||||
xmlChar * id;
|
||||
xmlChar * key;
|
||||
xmlChar * value;
|
||||
xmlChar * rankAddress;
|
||||
xmlChar * rankSearch;
|
||||
xmlChar * countryCode;
|
||||
xmlChar * parentPlaceID;
|
||||
xmlChar * parentType;
|
||||
xmlChar * parentID;
|
||||
xmlChar * adminLevel;
|
||||
xmlChar * houseNumber;
|
||||
xmlChar * geometry;
|
||||
} feature;
|
||||
|
||||
int fileType = FILETYPE_NONE;
|
||||
int fileMode = FILEMODE_ADD;
|
||||
PGconn * conn;
|
||||
struct feature_address featureAddress[MAX_FEATUREADDRESS];
|
||||
struct feature_tag featureName[MAX_FEATURENAMES];
|
||||
struct feature_tag featureExtraTag[MAX_FEATUREEXTRATAGS];
|
||||
struct feature feature;
|
||||
int featureAddressLines = 0;
|
||||
int featureNameLines = 0;
|
||||
int featureExtraTagLines = 0;
|
||||
int featureCount = 0;
|
||||
xmlHashTablePtr partionTableTagsHash;
|
||||
xmlHashTablePtr partionTableTagsHashDelete;
|
||||
char featureNameString[MAX_FEATURENAMESTRING];
|
||||
char featureExtraTagString[MAX_FEATUREEXTRATAGSTRING];
|
||||
|
||||
extern int verbose;
|
||||
|
||||
void StartElement(xmlTextReaderPtr reader, const xmlChar *name)
|
||||
{
|
||||
char * value;
|
||||
float version;
|
||||
int isAddressLine;
|
||||
|
||||
if (fileType == FILETYPE_NONE)
|
||||
{
|
||||
// Potential to handle other file types in the future / versions
|
||||
if (xmlStrEqual(name, BAD_CAST "osmStructured"))
|
||||
{
|
||||
value = (char*)xmlTextReaderGetAttribute(reader, BAD_CAST "version");
|
||||
version = strtof(value, NULL);
|
||||
xmlFree(value);
|
||||
|
||||
if (version == (float)0.1)
|
||||
{
|
||||
fileType = FILETYPE_STRUCTUREDV0P1;
|
||||
fileMode = FILEMODE_ADD;
|
||||
}
|
||||
else
|
||||
{
|
||||
fprintf( stderr, "Unknown osmStructured version %f (%s)\n", version, value );
|
||||
exit_nicely();
|
||||
}
|
||||
}
|
||||
else
|
||||
{
|
||||
fprintf( stderr, "Unknown XML document type: %s\n", name );
|
||||
exit_nicely();
|
||||
}
|
||||
return;
|
||||
}
|
||||
|
||||
if (xmlStrEqual(name, BAD_CAST "add"))
|
||||
{
|
||||
fileMode = FILEMODE_ADD;
|
||||
return;
|
||||
}
|
||||
if (xmlStrEqual(name, BAD_CAST "update"))
|
||||
{
|
||||
fileMode = FILEMODE_UPDATE;
|
||||
return;
|
||||
}
|
||||
if (xmlStrEqual(name, BAD_CAST "delete"))
|
||||
{
|
||||
fileMode = FILEMODE_DELETE;
|
||||
return;
|
||||
}
|
||||
if (fileMode == FILEMODE_NONE)
|
||||
{
|
||||
fprintf( stderr, "Unknown import mode in: %s\n", name );
|
||||
exit_nicely();
|
||||
}
|
||||
|
||||
if (xmlStrEqual(name, BAD_CAST "feature"))
|
||||
{
|
||||
feature.placeID = xmlTextReaderGetAttribute(reader, BAD_CAST "place_id");
|
||||
feature.type = xmlTextReaderGetAttribute(reader, BAD_CAST "type");
|
||||
feature.id = xmlTextReaderGetAttribute(reader, BAD_CAST "id");
|
||||
feature.key = xmlTextReaderGetAttribute(reader, BAD_CAST "key");
|
||||
feature.value = xmlTextReaderGetAttribute(reader, BAD_CAST "value");
|
||||
feature.rankAddress = xmlTextReaderGetAttribute(reader, BAD_CAST "rank");
|
||||
feature.rankSearch = xmlTextReaderGetAttribute(reader, BAD_CAST "importance");
|
||||
|
||||
feature.parentPlaceID = xmlTextReaderGetAttribute(reader, BAD_CAST "parent_place_id");
|
||||
/*
|
||||
if (strlen(feature.parentPlaceID) == 0)
|
||||
{
|
||||
xmlFree(feature.parentPlaceID);
|
||||
feature.parentPlaceID = NULL;
|
||||
}
|
||||
*/
|
||||
feature.parentType = xmlTextReaderGetAttribute(reader, BAD_CAST "parent_type");
|
||||
feature.parentID = xmlTextReaderGetAttribute(reader, BAD_CAST "parent_id");
|
||||
|
||||
feature.countryCode = NULL;
|
||||
feature.adminLevel = NULL;
|
||||
feature.houseNumber = NULL;
|
||||
feature.geometry = NULL;
|
||||
featureAddressLines = 0;
|
||||
featureNameLines = 0;
|
||||
featureExtraTagLines = 0;
|
||||
|
||||
return;
|
||||
}
|
||||
if (xmlStrEqual(name, BAD_CAST "names")) return;
|
||||
if (xmlStrEqual(name, BAD_CAST "name"))
|
||||
{
|
||||
if (featureNameLines < MAX_FEATURENAMES)
|
||||
{
|
||||
featureName[featureNameLines].type = xmlTextReaderGetAttribute(reader, BAD_CAST "type");
|
||||
featureName[featureNameLines].value = xmlTextReaderReadString(reader);
|
||||
featureNameLines++;
|
||||
}
|
||||
else
|
||||
{
|
||||
fprintf( stderr, "Too many name elements (%s%s)\n", feature.type, feature.id);
|
||||
// exit_nicely();
|
||||
}
|
||||
return;
|
||||
}
|
||||
if (xmlStrEqual(name, BAD_CAST "tags")) return;
|
||||
if (xmlStrEqual(name, BAD_CAST "tag"))
|
||||
{
|
||||
if (featureExtraTagLines < MAX_FEATUREEXTRATAGS)
|
||||
{
|
||||
featureExtraTag[featureExtraTagLines].type = xmlTextReaderGetAttribute(reader, BAD_CAST "type");
|
||||
featureExtraTag[featureExtraTagLines].value = xmlTextReaderReadString(reader);
|
||||
featureExtraTagLines++;
|
||||
}
|
||||
else
|
||||
{
|
||||
fprintf( stderr, "Too many extra tag elements (%s%s)\n", feature.type, feature.id);
|
||||
// exit_nicely();
|
||||
}
|
||||
return;
|
||||
}
|
||||
if (xmlStrEqual(name, BAD_CAST "osmGeometry"))
|
||||
{
|
||||
feature.geometry = xmlTextReaderReadString(reader);
|
||||
return;
|
||||
}
|
||||
if (xmlStrEqual(name, BAD_CAST "adminLevel"))
|
||||
{
|
||||
feature.adminLevel = xmlTextReaderReadString(reader);
|
||||
return;
|
||||
}
|
||||
if (xmlStrEqual(name, BAD_CAST "countryCode"))
|
||||
{
|
||||
feature.countryCode = xmlTextReaderReadString(reader);
|
||||
return;
|
||||
}
|
||||
if (xmlStrEqual(name, BAD_CAST "houseNumber"))
|
||||
{
|
||||
feature.houseNumber = xmlTextReaderReadString(reader);
|
||||
return;
|
||||
}
|
||||
if (xmlStrEqual(name, BAD_CAST "address"))
|
||||
{
|
||||
featureAddressLines = 0;
|
||||
return;
|
||||
}
|
||||
isAddressLine = 0;
|
||||
if (xmlStrEqual(name, BAD_CAST "continent"))
|
||||
{
|
||||
isAddressLine = 1;
|
||||
}
|
||||
else if (xmlStrEqual(name, BAD_CAST "sea"))
|
||||
{
|
||||
isAddressLine = 1;
|
||||
}
|
||||
else if (xmlStrEqual(name, BAD_CAST "country"))
|
||||
{
|
||||
isAddressLine = 1;
|
||||
}
|
||||
else if (xmlStrEqual(name, BAD_CAST "state"))
|
||||
{
|
||||
isAddressLine = 1;
|
||||
}
|
||||
else if (xmlStrEqual(name, BAD_CAST "county"))
|
||||
{
|
||||
isAddressLine = 1;
|
||||
}
|
||||
else if (xmlStrEqual(name, BAD_CAST "city"))
|
||||
{
|
||||
isAddressLine = 1;
|
||||
}
|
||||
else if (xmlStrEqual(name, BAD_CAST "town"))
|
||||
{
|
||||
isAddressLine = 1;
|
||||
}
|
||||
else if (xmlStrEqual(name, BAD_CAST "village"))
|
||||
{
|
||||
isAddressLine = 1;
|
||||
}
|
||||
else if (xmlStrEqual(name, BAD_CAST "unknown"))
|
||||
{
|
||||
isAddressLine = 1;
|
||||
}
|
||||
else if (xmlStrEqual(name, BAD_CAST "suburb"))
|
||||
{
|
||||
isAddressLine = 1;
|
||||
}
|
||||
else if (xmlStrEqual(name, BAD_CAST "postcode"))
|
||||
{
|
||||
isAddressLine = 1;
|
||||
}
|
||||
else if (xmlStrEqual(name, BAD_CAST "neighborhood"))
|
||||
{
|
||||
isAddressLine = 1;
|
||||
}
|
||||
else if (xmlStrEqual(name, BAD_CAST "street"))
|
||||
{
|
||||
isAddressLine = 1;
|
||||
}
|
||||
else if (xmlStrEqual(name, BAD_CAST "access"))
|
||||
{
|
||||
isAddressLine = 1;
|
||||
}
|
||||
else if (xmlStrEqual(name, BAD_CAST "building"))
|
||||
{
|
||||
isAddressLine = 1;
|
||||
}
|
||||
else if (xmlStrEqual(name, BAD_CAST "other"))
|
||||
{
|
||||
isAddressLine = 1;
|
||||
}
|
||||
if (isAddressLine)
|
||||
{
|
||||
if (featureAddressLines < MAX_FEATUREADDRESS)
|
||||
{
|
||||
value = (char*)xmlTextReaderGetAttribute(reader, BAD_CAST "rank");
|
||||
if (!value)
|
||||
{
|
||||
fprintf( stderr, "Address element missing rank\n");
|
||||
exit_nicely();
|
||||
}
|
||||
featureAddress[featureAddressLines].rankAddress = atoi(value);
|
||||
xmlFree(value);
|
||||
|
||||
value = (char*)xmlTextReaderGetAttribute(reader, BAD_CAST "isaddress");
|
||||
if (!value)
|
||||
{
|
||||
fprintf( stderr, "Address element missing rank\n");
|
||||
exit_nicely();
|
||||
}
|
||||
if (*value == 't') strcpy(featureAddress[featureAddressLines].isAddress, "t");
|
||||
else strcpy(featureAddress[featureAddressLines].isAddress, "f");
|
||||
xmlFree(value);
|
||||
|
||||
featureAddress[featureAddressLines].type = xmlTextReaderGetAttribute(reader, BAD_CAST "type");
|
||||
featureAddress[featureAddressLines].id = xmlTextReaderGetAttribute(reader, BAD_CAST "id");
|
||||
featureAddress[featureAddressLines].key = xmlTextReaderGetAttribute(reader, BAD_CAST "key");
|
||||
featureAddress[featureAddressLines].value = xmlTextReaderGetAttribute(reader, BAD_CAST "value");
|
||||
featureAddress[featureAddressLines].distance = xmlTextReaderGetAttribute(reader, BAD_CAST "distance");
|
||||
|
||||
featureAddressLines++;
|
||||
}
|
||||
else
|
||||
{
|
||||
fprintf( stderr, "Too many address elements (%s%s)\n", feature.type, feature.id);
|
||||
// exit_nicely();
|
||||
}
|
||||
|
||||
return;
|
||||
}
|
||||
fprintf(stderr, "%s: Unknown element name: %s\n", __FUNCTION__, name);
|
||||
}
|
||||
|
||||
void EndElement(xmlTextReaderPtr reader, const xmlChar *name)
|
||||
{
|
||||
PGresult * res;
|
||||
const char * paramValues[14];
|
||||
char * place_id;
|
||||
char * partionQueryName;
|
||||
int i, namePos, lineTypeLen, lineValueLen;
|
||||
|
||||
if (xmlStrEqual(name, BAD_CAST "feature"))
|
||||
{
|
||||
featureCount++;
|
||||
if (featureCount % 1000 == 0) printf("feature %i(k)\n", featureCount/1000);
|
||||
/*
|
||||
if (fileMode == FILEMODE_ADD)
|
||||
{
|
||||
resPlaceID = PQexecPrepared(conn, "get_new_place_id", 0, NULL, NULL, NULL, 0);
|
||||
if (PQresultStatus(resPlaceID) != PGRES_TUPLES_OK)
|
||||
{
|
||||
fprintf(stderr, "get_place_id: INSERT failed: %s", PQerrorMessage(conn));
|
||||
PQclear(resPlaceID);
|
||||
exit(EXIT_FAILURE);
|
||||
}
|
||||
}
|
||||
else
|
||||
{
|
||||
paramValues[0] = (const char *)feature.type;
|
||||
paramValues[1] = (const char *)feature.id;
|
||||
paramValues[2] = (const char *)feature.key;
|
||||
paramValues[3] = (const char *)feature.value;
|
||||
resPlaceID = PQexecPrepared(conn, "get_new_place_id", 4, paramValues, NULL, NULL, 0);
|
||||
if (PQresultStatus(resPlaceID) != PGRES_TUPLES_OK)
|
||||
{
|
||||
fprintf(stderr, "index_placex: INSERT failed: %s", PQerrorMessage(conn));
|
||||
PQclear(resPlaceID);
|
||||
exit(EXIT_FAILURE);
|
||||
}
|
||||
}
|
||||
*/
|
||||
place_id = (char *)feature.placeID;
|
||||
|
||||
if (fileMode == FILEMODE_UPDATE || fileMode == FILEMODE_DELETE || fileMode == FILEMODE_ADD)
|
||||
{
|
||||
paramValues[0] = (const char *)place_id;
|
||||
if (verbose) fprintf(stderr, "placex_delete: %s\n", paramValues[0]);
|
||||
res = PQexecPrepared(conn, "placex_delete", 1, paramValues, NULL, NULL, 0);
|
||||
if (PQresultStatus(res) != PGRES_COMMAND_OK)
|
||||
{
|
||||
fprintf(stderr, "placex_delete: DELETE failed: %s", PQerrorMessage(conn));
|
||||
PQclear(res);
|
||||
exit(EXIT_FAILURE);
|
||||
}
|
||||
PQclear(res);
|
||||
|
||||
if (verbose) fprintf(stderr, "search_name_delete: %s\n", paramValues[0]);
|
||||
res = PQexecPrepared(conn, "search_name_delete", 1, paramValues, NULL, NULL, 0);
|
||||
if (PQresultStatus(res) != PGRES_COMMAND_OK)
|
||||
{
|
||||
fprintf(stderr, "search_name_delete: DELETE failed: %s", PQerrorMessage(conn));
|
||||
PQclear(res);
|
||||
exit(EXIT_FAILURE);
|
||||
}
|
||||
PQclear(res);
|
||||
|
||||
if (verbose) fprintf(stderr, "place_addressline_delete: %s\n", paramValues[0]);
|
||||
res = PQexecPrepared(conn, "place_addressline_delete", 1, paramValues, NULL, NULL, 0);
|
||||
if (PQresultStatus(res) != PGRES_COMMAND_OK)
|
||||
{
|
||||
fprintf(stderr, "place_addressline_delete: DELETE failed: %s", PQerrorMessage(conn));
|
||||
PQclear(res);
|
||||
exit(EXIT_FAILURE);
|
||||
}
|
||||
PQclear(res);
|
||||
|
||||
partionQueryName = xmlHashLookup2(partionTableTagsHashDelete, feature.key, feature.value);
|
||||
if (partionQueryName)
|
||||
{
|
||||
res = PQexecPrepared(conn, partionQueryName, 1, paramValues, NULL, NULL, 0);
|
||||
if (PQresultStatus(res) != PGRES_COMMAND_OK)
|
||||
{
|
||||
fprintf(stderr, "%s: DELETE failed: %s", partionQueryName, PQerrorMessage(conn));
|
||||
PQclear(res);
|
||||
exit(EXIT_FAILURE);
|
||||
}
|
||||
PQclear(res);
|
||||
}
|
||||
}
|
||||
|
||||
if (fileMode == FILEMODE_UPDATE || fileMode == FILEMODE_ADD)
|
||||
{
|
||||
// Insert into placex
|
||||
paramValues[0] = (const char *)place_id;
|
||||
paramValues[1] = (const char *)feature.type;
|
||||
paramValues[2] = (const char *)feature.id;
|
||||
paramValues[3] = (const char *)feature.key;
|
||||
paramValues[4] = (const char *)feature.value;
|
||||
|
||||
featureNameString[0] = 0;
|
||||
if (featureNameLines)
|
||||
{
|
||||
namePos = 0;
|
||||
lineTypeLen = 0;
|
||||
lineValueLen = 0;
|
||||
for (i = 0; i < featureNameLines; i++)
|
||||
{
|
||||
lineTypeLen = (int)strlen((char *) featureName[i].type);
|
||||
lineValueLen = (int)strlen((char *) featureName[i].value);
|
||||
if (namePos+lineTypeLen+lineValueLen+7 > MAX_FEATURENAMESTRING)
|
||||
{
|
||||
fprintf(stderr, "feature name too long: %s", (const char *)featureName[i].value);
|
||||
break;
|
||||
}
|
||||
if (namePos) strcpy(featureNameString+(namePos++), ",");
|
||||
strcpy(featureNameString+(namePos++), "\"");
|
||||
strcpy(featureNameString+namePos, (char*) featureName[i].type);
|
||||
namePos += lineTypeLen;
|
||||
strcpy(featureNameString+namePos, "\"=>\"");
|
||||
namePos += 4;
|
||||
strcpy(featureNameString+namePos, (char *) featureName[i].value);
|
||||
namePos += lineValueLen;
|
||||
strcpy(featureNameString+(namePos++), "\"");
|
||||
|
||||
xmlFree(featureName[i].type);
|
||||
xmlFree(featureName[i].value);
|
||||
}
|
||||
}
|
||||
paramValues[5] = (const char *)featureNameString;
|
||||
|
||||
paramValues[6] = (const char *)feature.countryCode;
|
||||
|
||||
featureExtraTagString[0] = 0;
|
||||
if (featureExtraTagLines)
|
||||
{
|
||||
namePos = 0;
|
||||
lineTypeLen = 0;
|
||||
lineValueLen = 0;
|
||||
for (i = 0; i < featureExtraTagLines; i++)
|
||||
{
|
||||
lineTypeLen = strlen((char *) featureExtraTag[i].type);
|
||||
lineValueLen = strlen((char *) featureExtraTag[i].value);
|
||||
if (namePos+lineTypeLen+lineValueLen+7 > MAX_FEATUREEXTRATAGSTRING)
|
||||
{
|
||||
fprintf(stderr, "feature extra tag too long: %s", (const char *)featureExtraTag[i].value);
|
||||
break;
|
||||
}
|
||||
if (namePos) strcpy(featureExtraTagString+(namePos++),",");
|
||||
strcpy(featureExtraTagString+(namePos++), "\"");
|
||||
strcpy(featureExtraTagString+namePos, (char *) featureExtraTag[i].type);
|
||||
namePos += lineTypeLen;
|
||||
strcpy(featureExtraTagString+namePos, "\"=>\"");
|
||||
namePos += 4;
|
||||
strcpy(featureExtraTagString+namePos, (char *) featureExtraTag[i].value);
|
||||
namePos += lineValueLen;
|
||||
strcpy(featureExtraTagString+(namePos++), "\"");
|
||||
|
||||
xmlFree(featureExtraTag[i].type);
|
||||
xmlFree(featureExtraTag[i].value);
|
||||
}
|
||||
}
|
||||
paramValues[7] = (const char *)featureExtraTagString;
|
||||
|
||||
if (xmlStrlen(feature.parentPlaceID) == 0)
|
||||
paramValues[8] = "0";
|
||||
else
|
||||
paramValues[8] = (const char *)feature.parentPlaceID;
|
||||
|
||||
paramValues[9] = (const char *)feature.adminLevel;
|
||||
paramValues[10] = (const char *)feature.houseNumber;
|
||||
paramValues[11] = (const char *)feature.rankAddress;
|
||||
paramValues[12] = (const char *)feature.rankSearch;
|
||||
paramValues[13] = (const char *)feature.geometry;
|
||||
if (strlen(paramValues[3]) && strlen(paramValues[13]))
|
||||
{
|
||||
if (verbose) fprintf(stderr, "placex_insert: %s\n", paramValues[0]);
|
||||
res = PQexecPrepared(conn, "placex_insert", 14, paramValues, NULL, NULL, 0);
|
||||
if (PQresultStatus(res) != PGRES_COMMAND_OK)
|
||||
{
|
||||
fprintf(stderr, "index_placex: INSERT failed: %s", PQerrorMessage(conn));
|
||||
fprintf(stderr, "index_placex: INSERT failed: %s %s %s", paramValues[0], paramValues[1], paramValues[2]);
|
||||
PQclear(res);
|
||||
exit(EXIT_FAILURE);
|
||||
}
|
||||
PQclear(res);
|
||||
}
|
||||
|
||||
for (i = 0; i < featureAddressLines; i++)
|
||||
{
|
||||
// insert into place_address
|
||||
paramValues[0] = (const char *)place_id;
|
||||
paramValues[1] = (const char *)featureAddress[i].distance;
|
||||
if (paramValues[1] == NULL || strlen(paramValues[1]) == 0) paramValues[1] = "0";
|
||||
paramValues[2] = (const char *)featureAddress[i].type;
|
||||
paramValues[3] = (const char *)featureAddress[i].id;
|
||||
paramValues[4] = (const char *)featureAddress[i].key;
|
||||
paramValues[5] = (const char *)featureAddress[i].value;
|
||||
paramValues[6] = (const char *)featureAddress[i].isAddress;
|
||||
if (verbose) fprintf(stderr, "placex_insert: %s %s\n", paramValues[2], paramValues[3]);
|
||||
res = PQexecPrepared(conn, "place_addressline_insert", 7, paramValues, NULL, NULL, 0);
|
||||
if (PQresultStatus(res) != PGRES_COMMAND_OK)
|
||||
{
|
||||
fprintf(stderr, "place_addressline_insert: INSERT failed: %s", PQerrorMessage(conn));
|
||||
fprintf(stderr, "(%s,%s,%s,%s,%s,%s,%s)",paramValues[0],paramValues[1],paramValues[2],paramValues[3],paramValues[4],paramValues[5],paramValues[6]);
|
||||
PQclear(res);
|
||||
exit(EXIT_FAILURE);
|
||||
}
|
||||
PQclear(res);
|
||||
|
||||
xmlFree(featureAddress[i].type);
|
||||
xmlFree(featureAddress[i].id);
|
||||
xmlFree(featureAddress[i].key);
|
||||
xmlFree(featureAddress[i].value);
|
||||
xmlFree(featureAddress[i].distance);
|
||||
}
|
||||
|
||||
if (featureNameLines)
|
||||
{
|
||||
if (xmlStrlen(feature.parentPlaceID) > 0 && featureAddressLines == 0)
|
||||
{
|
||||
paramValues[0] = (const char *)place_id;
|
||||
paramValues[1] = (const char *)feature.parentPlaceID;
|
||||
if (verbose) fprintf(stderr, "search_name_from_parent_insert: INSERT %s %s\n", paramValues[0], paramValues[1]);
|
||||
res = PQexecPrepared(conn, "search_name_from_parent_insert", 2, paramValues, NULL, NULL, 0);
|
||||
if (PQresultStatus(res) != PGRES_COMMAND_OK)
|
||||
{
|
||||
fprintf(stderr, "search_name_from_parent_insert: INSERT failed: %s", PQerrorMessage(conn));
|
||||
PQclear(res);
|
||||
exit(EXIT_FAILURE);
|
||||
}
|
||||
PQclear(res);
|
||||
}
|
||||
else
|
||||
{
|
||||
paramValues[0] = (const char *)place_id;
|
||||
if (verbose) fprintf(stderr, "search_name_insert: INSERT %s\n", paramValues[0]);
|
||||
res = PQexecPrepared(conn, "search_name_insert", 1, paramValues, NULL, NULL, 0);
|
||||
if (PQresultStatus(res) != PGRES_COMMAND_OK)
|
||||
{
|
||||
fprintf(stderr, "search_name_insert: INSERT failed: %s", PQerrorMessage(conn));
|
||||
PQclear(res);
|
||||
exit(EXIT_FAILURE);
|
||||
}
|
||||
PQclear(res);
|
||||
}
|
||||
}
|
||||
|
||||
partionQueryName = xmlHashLookup2(partionTableTagsHash, feature.key, feature.value);
|
||||
if (partionQueryName)
|
||||
{
|
||||
// insert into partition table
|
||||
paramValues[0] = (const char *)place_id;
|
||||
paramValues[1] = (const char *)feature.geometry;
|
||||
res = PQexecPrepared(conn, partionQueryName, 2, paramValues, NULL, NULL, 0);
|
||||
if (PQresultStatus(res) != PGRES_COMMAND_OK)
|
||||
{
|
||||
fprintf(stderr, "%s: INSERT failed: %s", partionQueryName, PQerrorMessage(conn));
|
||||
PQclear(res);
|
||||
exit(EXIT_FAILURE);
|
||||
}
|
||||
PQclear(res);
|
||||
}
|
||||
|
||||
}
|
||||
else
|
||||
{
|
||||
for (i = 0; i < featureAddressLines; i++)
|
||||
{
|
||||
xmlFree(featureAddress[i].type);
|
||||
xmlFree(featureAddress[i].id);
|
||||
xmlFree(featureAddress[i].key);
|
||||
xmlFree(featureAddress[i].value);
|
||||
xmlFree(featureAddress[i].distance);
|
||||
}
|
||||
}
|
||||
|
||||
xmlFree(feature.placeID);
|
||||
xmlFree(feature.type);
|
||||
xmlFree(feature.id);
|
||||
xmlFree(feature.key);
|
||||
xmlFree(feature.value);
|
||||
xmlFree(feature.rankAddress);
|
||||
xmlFree(feature.rankSearch);
|
||||
if (feature.countryCode) xmlFree(feature.countryCode);
|
||||
if (feature.parentPlaceID) xmlFree(feature.parentPlaceID);
|
||||
if (feature.parentType) xmlFree(feature.parentType);
|
||||
if (feature.parentID) xmlFree(feature.parentID);
|
||||
// if (feature.name) xmlFree(feature.name);
|
||||
if (feature.adminLevel) xmlFree(feature.adminLevel);
|
||||
if (feature.houseNumber) xmlFree(feature.houseNumber);
|
||||
if (feature.geometry) xmlFree(feature.geometry);
|
||||
|
||||
// PQclear(resPlaceID);
|
||||
}
|
||||
}
|
||||
|
||||
static void processNode(xmlTextReaderPtr reader)
|
||||
{
|
||||
xmlChar *name;
|
||||
name = xmlTextReaderName(reader);
|
||||
if (name == NULL)
|
||||
{
|
||||
name = xmlStrdup(BAD_CAST "--");
|
||||
}
|
||||
|
||||
switch (xmlTextReaderNodeType(reader))
|
||||
{
|
||||
case XML_READER_TYPE_ELEMENT:
|
||||
StartElement(reader, name);
|
||||
if (xmlTextReaderIsEmptyElement(reader))
|
||||
EndElement(reader, name); /* No end_element for self closing tags! */
|
||||
break;
|
||||
case XML_READER_TYPE_END_ELEMENT:
|
||||
EndElement(reader, name);
|
||||
break;
|
||||
case XML_READER_TYPE_TEXT:
|
||||
case XML_READER_TYPE_CDATA:
|
||||
case XML_READER_TYPE_SIGNIFICANT_WHITESPACE:
|
||||
/* Ignore */
|
||||
break;
|
||||
default:
|
||||
fprintf(stderr, "Unknown node type %d\n", xmlTextReaderNodeType(reader));
|
||||
break;
|
||||
}
|
||||
|
||||
xmlFree(name);
|
||||
}
|
||||
|
||||
int nominatim_import(const char *conninfo, const char *partionTagsFilename, const char *filename)
|
||||
{
|
||||
xmlTextReaderPtr reader;
|
||||
int ret = 0;
|
||||
PGresult * res;
|
||||
FILE * partionTagsFile;
|
||||
char * partionQueryName;
|
||||
char partionQuerySQL[1024];
|
||||
|
||||
conn = PQconnectdb(conninfo);
|
||||
if (PQstatus(conn) != CONNECTION_OK)
|
||||
{
|
||||
fprintf(stderr, "Connection to database failed: %s\n", PQerrorMessage(conn));
|
||||
exit(EXIT_FAILURE);
|
||||
}
|
||||
|
||||
partionTableTagsHash = xmlHashCreate(200);
|
||||
partionTableTagsHashDelete = xmlHashCreate(200);
|
||||
|
||||
partionTagsFile = fopen(partionTagsFilename, "rt");
|
||||
if (!partionTagsFile)
|
||||
{
|
||||
fprintf(stderr, "Unable to read partition tags file: %s\n", partionTagsFilename);
|
||||
exit(EXIT_FAILURE);
|
||||
}
|
||||
|
||||
char buffer[1024], osmkey[256], osmvalue[256];
|
||||
int fields;
|
||||
while (fgets(buffer, sizeof(buffer), partionTagsFile) != NULL)
|
||||
{
|
||||
fields = sscanf( buffer, "%23s %63s", osmkey, osmvalue );
|
||||
|
||||
if ( fields <= 0 ) continue;
|
||||
|
||||
if ( fields != 2 )
|
||||
{
|
||||
fprintf( stderr, "Error partition file\n");
|
||||
exit_nicely();
|
||||
}
|
||||
partionQueryName = malloc(strlen("partition_insert_")+strlen(osmkey)+strlen(osmvalue)+2);
|
||||
strcpy(partionQueryName, "partition_insert_");
|
||||
strcat(partionQueryName, osmkey);
|
||||
strcat(partionQueryName, "_");
|
||||
strcat(partionQueryName, osmvalue);
|
||||
|
||||
strcpy(partionQuerySQL, "insert into place_classtype_");
|
||||
strcat(partionQuerySQL, osmkey);
|
||||
strcat(partionQuerySQL, "_");
|
||||
strcat(partionQuerySQL, osmvalue);
|
||||
strcat(partionQuerySQL, " (place_id, centroid) values ($1, ST_Centroid(st_setsrid($2, 4326)))");
|
||||
|
||||
res = PQprepare(conn, partionQueryName, partionQuerySQL, 2, NULL);
|
||||
if (PQresultStatus(res) != PGRES_COMMAND_OK)
|
||||
{
|
||||
fprintf(stderr, "Failed to prepare %s: %s\n", partionQueryName, PQerrorMessage(conn));
|
||||
exit(EXIT_FAILURE);
|
||||
}
|
||||
|
||||
xmlHashAddEntry2(partionTableTagsHash, BAD_CAST osmkey, BAD_CAST osmvalue, BAD_CAST partionQueryName);
|
||||
|
||||
partionQueryName = malloc(strlen("partition_delete_")+strlen(osmkey)+strlen(osmvalue)+2);
|
||||
strcpy(partionQueryName, "partition_delete_");
|
||||
strcat(partionQueryName, osmkey);
|
||||
strcat(partionQueryName, "_");
|
||||
strcat(partionQueryName, osmvalue);
|
||||
|
||||
strcpy(partionQuerySQL, "delete from place_classtype_");
|
||||
strcat(partionQuerySQL, osmkey);
|
||||
strcat(partionQuerySQL, "_");
|
||||
strcat(partionQuerySQL, osmvalue);
|
||||
strcat(partionQuerySQL, " where place_id = $1::integer");
|
||||
|
||||
res = PQprepare(conn, partionQueryName, partionQuerySQL, 1, NULL);
|
||||
if (PQresultStatus(res) != PGRES_COMMAND_OK)
|
||||
{
|
||||
fprintf(stderr, "Failed to prepare %s: %s\n", partionQueryName, PQerrorMessage(conn));
|
||||
exit(EXIT_FAILURE);
|
||||
}
|
||||
|
||||
xmlHashAddEntry2(partionTableTagsHashDelete, BAD_CAST osmkey, BAD_CAST osmvalue, BAD_CAST partionQueryName);
|
||||
}
|
||||
|
||||
res = PQprepare(conn, "get_new_place_id",
|
||||
"select nextval('seq_place')",
|
||||
0, NULL);
|
||||
if (PQresultStatus(res) != PGRES_COMMAND_OK)
|
||||
{
|
||||
fprintf(stderr, "Failed to prepare get_new_place_id: %s\n", PQerrorMessage(conn));
|
||||
exit(EXIT_FAILURE);
|
||||
}
|
||||
|
||||
res = PQprepare(conn, "get_place_id",
|
||||
"select place_id from placex where osm_type = $1 and osm_id = $2 and class = $3 and type = $4",
|
||||
4, NULL);
|
||||
if (PQresultStatus(res) != PGRES_COMMAND_OK)
|
||||
{
|
||||
fprintf(stderr, "Failed to prepare get_place_id: %s\n", PQerrorMessage(conn));
|
||||
exit(EXIT_FAILURE);
|
||||
}
|
||||
|
||||
res = PQprepare(conn, "placex_insert",
|
||||
"insert into placex (place_id,osm_type,osm_id,class,type,name,country_code,extratags,parent_place_id,admin_level,housenumber,rank_address,rank_search,geometry) "
|
||||
"values ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10, $11, $12, $13, st_setsrid($14, 4326))",
|
||||
12, NULL);
|
||||
if (PQresultStatus(res) != PGRES_COMMAND_OK)
|
||||
{
|
||||
fprintf(stderr, "Failed to prepare placex_insert: %s\n", PQerrorMessage(conn));
|
||||
exit(EXIT_FAILURE);
|
||||
}
|
||||
|
||||
res = PQprepare(conn, "search_name_insert",
|
||||
"insert into search_name (place_id, search_rank, address_rank, country_code, name_vector, nameaddress_vector, centroid) "
|
||||
"select place_id, rank_search, rank_address, country_code, make_keywords(name), "
|
||||
"(select uniq(sort(array_agg(parent_search_name.name_vector))) from search_name as parent_search_name where place_id in "
|
||||
"(select distinct address_place_id from place_addressline where place_addressline.place_id = $1 limit 1000)"
|
||||
"), st_centroid(geometry) from placex "
|
||||
"where place_id = $1",
|
||||
1, NULL);
|
||||
|
||||
if (PQresultStatus(res) != PGRES_COMMAND_OK)
|
||||
{
|
||||
fprintf(stderr, "Failed to prepare search_name_insert: %s\n", PQerrorMessage(conn));
|
||||
exit(EXIT_FAILURE);
|
||||
}
|
||||
|
||||
res = PQprepare(conn, "search_name_from_parent_insert",
|
||||
"insert into search_name (place_id, search_rank, address_rank, country_code, name_vector, nameaddress_vector, centroid) "
|
||||
"select place_id, rank_search, rank_address, country_code, make_keywords(name), "
|
||||
"(select uniq(sort(name_vector+nameaddress_vector)) from search_name as parent_search_name "
|
||||
"where parent_search_name.place_id = $2 ), st_centroid(geometry) from placex "
|
||||
"where place_id = $1",
|
||||
2, NULL);
|
||||
if (PQresultStatus(res) != PGRES_COMMAND_OK)
|
||||
{
|
||||
fprintf(stderr, "Failed to prepare search_name_insert: %s\n", PQerrorMessage(conn));
|
||||
exit(EXIT_FAILURE);
|
||||
}
|
||||
|
||||
res = PQprepare(conn, "place_addressline_insert",
|
||||
"insert into place_addressline (place_id, address_place_id, fromarea, isaddress, distance, cached_rank_address) "
|
||||
"select $1, place_id, false, $7, $2, rank_address from placex where osm_type = $3 and osm_id = $4 and class = $5 and type = $6",
|
||||
7, NULL);
|
||||
if (PQresultStatus(res) != PGRES_COMMAND_OK)
|
||||
{
|
||||
fprintf(stderr, "Failed to prepare place_addressline_insert: %s\n", PQerrorMessage(conn));
|
||||
exit(EXIT_FAILURE);
|
||||
}
|
||||
|
||||
res = PQprepare(conn, "placex_delete",
|
||||
"delete from placex where place_id = $1",
|
||||
1, NULL);
|
||||
if (PQresultStatus(res) != PGRES_COMMAND_OK)
|
||||
{
|
||||
fprintf(stderr, "Failed to prepare placex_delete: %s\n", PQerrorMessage(conn));
|
||||
exit(EXIT_FAILURE);
|
||||
}
|
||||
|
||||
res = PQprepare(conn, "search_name_delete",
|
||||
"delete from search_name where place_id = $1",
|
||||
1, NULL);
|
||||
if (PQresultStatus(res) != PGRES_COMMAND_OK)
|
||||
{
|
||||
fprintf(stderr, "Failed to prepare search_name_delete: %s\n", PQerrorMessage(conn));
|
||||
exit(EXIT_FAILURE);
|
||||
}
|
||||
|
||||
res = PQprepare(conn, "place_addressline_delete",
|
||||
"delete from place_addressline where place_id = $1",
|
||||
1, NULL);
|
||||
if (PQresultStatus(res) != PGRES_COMMAND_OK)
|
||||
{
|
||||
fprintf(stderr, "Failed to prepare place_addressline_delete: %s\n", PQerrorMessage(conn));
|
||||
exit(EXIT_FAILURE);
|
||||
}
|
||||
|
||||
featureCount = 0;
|
||||
|
||||
reader = inputUTF8(filename);
|
||||
|
||||
if (reader == NULL)
|
||||
{
|
||||
fprintf(stderr, "Unable to open %s\n", filename);
|
||||
return 1;
|
||||
}
|
||||
|
||||
ret = xmlTextReaderRead(reader);
|
||||
while (ret == 1)
|
||||
{
|
||||
processNode(reader);
|
||||
ret = xmlTextReaderRead(reader);
|
||||
}
|
||||
if (ret != 0)
|
||||
{
|
||||
fprintf(stderr, "%s : failed to parse\n", filename);
|
||||
return ret;
|
||||
}
|
||||
|
||||
xmlFreeTextReader(reader);
|
||||
xmlHashFree(partionTableTagsHash, NULL);
|
||||
xmlHashFree(partionTableTagsHashDelete, NULL);
|
||||
|
||||
return 0;
|
||||
}
|
||||
@@ -1,6 +0,0 @@
|
||||
#ifndef IMPORT_H
|
||||
#define IMPORT_H
|
||||
|
||||
int nominatim_import(const char *conninfo, const char *partionTagsFilename, const char *filename);
|
||||
|
||||
#endif
|
||||
@@ -1,547 +0,0 @@
|
||||
/*
|
||||
* triggers indexing (reparenting etc.) through setting resetting indexed_status: update placex/osmline set indexed_status = 0 where indexed_status > 0
|
||||
* triggers placex_update and osmline_update
|
||||
*/
|
||||
|
||||
#include <stdio.h>
|
||||
#include <unistd.h>
|
||||
#include <stdlib.h>
|
||||
#include <string.h>
|
||||
#include <assert.h>
|
||||
#include <pthread.h>
|
||||
#include <time.h>
|
||||
#include <stdint.h>
|
||||
|
||||
#include <libpq-fe.h>
|
||||
|
||||
#include "nominatim.h"
|
||||
#include "index.h"
|
||||
#include "export.h"
|
||||
#include "postgresql.h"
|
||||
|
||||
extern int verbose;
|
||||
|
||||
void run_indexing(int rank, int interpolation, PGconn *conn, int num_threads,
|
||||
struct index_thread_data * thread_data, const char *structuredoutputfile)
|
||||
{
|
||||
int tuples, count, sleepcount;
|
||||
pthread_mutex_t count_mutex = PTHREAD_MUTEX_INITIALIZER;
|
||||
|
||||
time_t rankStartTime;
|
||||
int rankTotalTuples;
|
||||
int rankCountTuples;
|
||||
float rankPerSecond;
|
||||
|
||||
PGresult * resSectors;
|
||||
PGresult * resPlaces;
|
||||
PGresult * resNULL;
|
||||
|
||||
int i;
|
||||
int iSector;
|
||||
int iResult;
|
||||
|
||||
const char *paramValues[2];
|
||||
int paramLengths[2];
|
||||
int paramFormats[2];
|
||||
uint32_t paramRank;
|
||||
uint32_t paramSector;
|
||||
uint32_t sector;
|
||||
|
||||
xmlTextWriterPtr writer;
|
||||
pthread_mutex_t writer_mutex = PTHREAD_MUTEX_INITIALIZER;
|
||||
|
||||
// Create the output file
|
||||
writer = NULL;
|
||||
if (structuredoutputfile)
|
||||
{
|
||||
writer = nominatim_exportXMLStart(structuredoutputfile);
|
||||
}
|
||||
|
||||
if (interpolation)
|
||||
{
|
||||
fprintf(stderr, "Starting interpolation lines (location_property_osmline)\n");
|
||||
}
|
||||
else
|
||||
{
|
||||
fprintf(stderr, "Starting rank %d\n", rank);
|
||||
}
|
||||
|
||||
rankCountTuples = 0;
|
||||
rankPerSecond = 0;
|
||||
|
||||
paramRank = PGint32(rank);
|
||||
paramValues[0] = (char *)¶mRank;
|
||||
paramLengths[0] = sizeof(paramRank);
|
||||
paramFormats[0] = 1;
|
||||
|
||||
if (interpolation)
|
||||
{
|
||||
resSectors = PQexecPrepared(conn, "index_sectors_osmline", 0, NULL, 0, NULL, 1);
|
||||
}
|
||||
else
|
||||
{
|
||||
resSectors = PQexecPrepared(conn, "index_sectors", 1, paramValues, paramLengths, paramFormats, 1);
|
||||
}
|
||||
if (PQresultStatus(resSectors) != PGRES_TUPLES_OK)
|
||||
{
|
||||
fprintf(stderr, "index_sectors: SELECT failed: %s", PQerrorMessage(conn));
|
||||
PQclear(resSectors);
|
||||
exit(EXIT_FAILURE);
|
||||
}
|
||||
if (PQftype(resSectors, 0) != PG_OID_INT4)
|
||||
{
|
||||
fprintf(stderr, "Sector value has unexpected type\n");
|
||||
PQclear(resSectors);
|
||||
exit(EXIT_FAILURE);
|
||||
}
|
||||
if (PQftype(resSectors, 1) != PG_OID_INT8)
|
||||
{
|
||||
fprintf(stderr, "Sector value has unexpected type\n");
|
||||
PQclear(resSectors);
|
||||
exit(EXIT_FAILURE);
|
||||
}
|
||||
|
||||
rankTotalTuples = 0;
|
||||
for (iSector = 0; iSector < PQntuples(resSectors); iSector++)
|
||||
{
|
||||
rankTotalTuples += PGint64(*((uint64_t *)PQgetvalue(resSectors, iSector, 1)));
|
||||
}
|
||||
|
||||
rankStartTime = time(0);
|
||||
for (iSector = 0; iSector <= PQntuples(resSectors); iSector++)
|
||||
{
|
||||
if (iSector > 0)
|
||||
{
|
||||
resPlaces = PQgetResult(conn);
|
||||
if (PQresultStatus(resPlaces) != PGRES_TUPLES_OK)
|
||||
{
|
||||
fprintf(stderr, "index_sector_places: SELECT failed: %s", PQerrorMessage(conn));
|
||||
PQclear(resPlaces);
|
||||
exit(EXIT_FAILURE);
|
||||
}
|
||||
if (PQftype(resPlaces, 0) != PG_OID_INT8)
|
||||
{
|
||||
fprintf(stderr, "Place_id value has unexpected type\n");
|
||||
PQclear(resPlaces);
|
||||
exit(EXIT_FAILURE);
|
||||
}
|
||||
resNULL = PQgetResult(conn);
|
||||
if (resNULL != NULL)
|
||||
{
|
||||
fprintf(stderr, "Unexpected non-null response\n");
|
||||
exit(EXIT_FAILURE);
|
||||
}
|
||||
}
|
||||
|
||||
if (iSector < PQntuples(resSectors))
|
||||
{
|
||||
sector = PGint32(*((uint32_t *)PQgetvalue(resSectors, iSector, 0)));
|
||||
// fprintf(stderr, "\n Starting sector %d size %ld\n", sector, PGint64(*((uint64_t *)PQgetvalue(resSectors, iSector, 1))));
|
||||
|
||||
// Get all the place_id's for this sector
|
||||
paramRank = PGint32(rank);
|
||||
paramSector = PGint32(sector);
|
||||
if (rankTotalTuples-rankCountTuples < num_threads*1000)
|
||||
{
|
||||
// no sectors
|
||||
if (interpolation)
|
||||
{
|
||||
iResult = PQsendQueryPrepared(conn, "index_nosector_places_osmline", 0, NULL, 0, NULL, 1);
|
||||
}
|
||||
else
|
||||
{
|
||||
paramValues[0] = (char *)¶mRank;
|
||||
paramLengths[0] = sizeof(paramRank);
|
||||
paramFormats[0] = 1;
|
||||
iResult = PQsendQueryPrepared(conn, "index_nosector_places", 1, paramValues, paramLengths, paramFormats, 1);
|
||||
}
|
||||
}
|
||||
else
|
||||
{
|
||||
if (interpolation)
|
||||
{
|
||||
iResult = PQsendQueryPrepared(conn, "index_sector_places_osmline", 1, paramValues, paramLengths, paramFormats, 1);
|
||||
paramValues[0] = (char *)¶mSector;
|
||||
paramLengths[0] = sizeof(paramSector);
|
||||
paramFormats[0] = 1;
|
||||
}
|
||||
else
|
||||
{
|
||||
paramValues[0] = (char *)¶mRank;
|
||||
paramLengths[0] = sizeof(paramRank);
|
||||
paramFormats[0] = 1;
|
||||
paramValues[1] = (char *)¶mSector;
|
||||
paramLengths[1] = sizeof(paramSector);
|
||||
paramFormats[1] = 1;
|
||||
iResult = PQsendQueryPrepared(conn, "index_sector_places", 2, paramValues, paramLengths, paramFormats, 1);
|
||||
}
|
||||
}
|
||||
if (!iResult)
|
||||
{
|
||||
fprintf(stderr, "index_sector_places: SELECT failed: %s", PQerrorMessage(conn));
|
||||
PQclear(resPlaces);
|
||||
exit(EXIT_FAILURE);
|
||||
}
|
||||
}
|
||||
if (iSector > 0)
|
||||
{
|
||||
count = 0;
|
||||
rankPerSecond = 0;
|
||||
tuples = PQntuples(resPlaces);
|
||||
|
||||
if (tuples > 0)
|
||||
{
|
||||
// Spawn threads
|
||||
for (i = 0; i < num_threads; i++)
|
||||
{
|
||||
thread_data[i].res = resPlaces;
|
||||
thread_data[i].tuples = tuples;
|
||||
thread_data[i].count = &count;
|
||||
thread_data[i].count_mutex = &count_mutex;
|
||||
thread_data[i].writer = writer;
|
||||
thread_data[i].writer_mutex = &writer_mutex;
|
||||
if (interpolation)
|
||||
{
|
||||
thread_data[i].table = 0; // use interpolations table
|
||||
}
|
||||
else
|
||||
{
|
||||
thread_data[i].table = 1; // use placex table
|
||||
}
|
||||
pthread_create(&thread_data[i].thread, NULL, &nominatim_indexThread, (void *)&thread_data[i]);
|
||||
}
|
||||
|
||||
// Monitor threads to give user feedback
|
||||
sleepcount = 0;
|
||||
while (count < tuples)
|
||||
{
|
||||
usleep(1000);
|
||||
|
||||
// Aim for one update per second
|
||||
if (sleepcount++ > 1000)
|
||||
{
|
||||
rankPerSecond = ((float)rankCountTuples + (float)count) / MAX(difftime(time(0), rankStartTime),1);
|
||||
if(interpolation)
|
||||
{
|
||||
fprintf(stderr, " Done %i in %i @ %f per second - Interpolation lines ETA (seconds): %f\n", (rankCountTuples + count), (int)(difftime(time(0), rankStartTime)), rankPerSecond, ((float)(rankTotalTuples - (rankCountTuples + count)))/rankPerSecond);
|
||||
}
|
||||
else
|
||||
{
|
||||
fprintf(stderr, " Done %i in %i @ %f per second - Rank %i ETA (seconds): %f\n", (rankCountTuples + count), (int)(difftime(time(0), rankStartTime)), rankPerSecond, rank, ((float)(rankTotalTuples - (rankCountTuples + count)))/rankPerSecond);
|
||||
}
|
||||
|
||||
sleepcount = 0;
|
||||
}
|
||||
}
|
||||
|
||||
// Wait for everything to finish
|
||||
for (i = 0; i < num_threads; i++)
|
||||
{
|
||||
pthread_join(thread_data[i].thread, NULL);
|
||||
}
|
||||
|
||||
rankCountTuples += tuples;
|
||||
}
|
||||
|
||||
// Finished sector
|
||||
rankPerSecond = (float)rankCountTuples / MAX(difftime(time(0), rankStartTime),1);
|
||||
fprintf(stderr, " Done %i in %i @ %f per second - ETA (seconds): %f\n", rankCountTuples, (int)(difftime(time(0), rankStartTime)), rankPerSecond, ((float)(rankTotalTuples - rankCountTuples))/rankPerSecond);
|
||||
|
||||
PQclear(resPlaces);
|
||||
}
|
||||
if (rankTotalTuples-rankCountTuples < num_threads*20 && iSector < PQntuples(resSectors))
|
||||
{
|
||||
iSector = PQntuples(resSectors) - 1;
|
||||
}
|
||||
}
|
||||
// Finished rank
|
||||
fprintf(stderr, "\r Done %i in %i @ %f per second - FINISHED\n\n", rankCountTuples, (int)(difftime(time(0), rankStartTime)), rankPerSecond);
|
||||
|
||||
PQclear(resSectors);
|
||||
}
|
||||
|
||||
void nominatim_index(int rank_min, int rank_max, int num_threads, const char *conninfo, const char *structuredoutputfile)
|
||||
{
|
||||
struct index_thread_data *thread_data;
|
||||
|
||||
PGconn *conn;
|
||||
PGresult *res;
|
||||
int num_rows = 0, status_code = 0;
|
||||
int db_has_locale = 0;
|
||||
char *result_string = NULL;
|
||||
|
||||
int rank;
|
||||
|
||||
int i;
|
||||
|
||||
xmlTextWriterPtr writer;
|
||||
pthread_mutex_t writer_mutex = PTHREAD_MUTEX_INITIALIZER;
|
||||
|
||||
Oid pg_prepare_params[2];
|
||||
|
||||
conn = PQconnectdb(conninfo);
|
||||
if (PQstatus(conn) != CONNECTION_OK)
|
||||
{
|
||||
fprintf(stderr, "Connection to database failed: %s\n", PQerrorMessage(conn));
|
||||
exit(EXIT_FAILURE);
|
||||
}
|
||||
|
||||
res = PQexec(conn, "SHOW lc_messages");
|
||||
status_code = PQresultStatus(res);
|
||||
if (status_code != PGRES_TUPLES_OK && status_code != PGRES_SINGLE_TUPLE) {
|
||||
fprintf(stderr, "Failed determining database locale: %s\n", PQerrorMessage(conn));
|
||||
exit(EXIT_FAILURE);
|
||||
}
|
||||
num_rows = PQntuples(res);
|
||||
if (num_rows > 0)
|
||||
{
|
||||
result_string = PQgetvalue(res, 0, 0);
|
||||
if (result_string && (strlen(result_string) > 0) && (strcasecmp(result_string, "C") != 0))
|
||||
{
|
||||
// non-default locale if the result exists, is non-empty, and is not "C"
|
||||
db_has_locale = 1;
|
||||
}
|
||||
}
|
||||
|
||||
pg_prepare_params[0] = PG_OID_INT4;
|
||||
res = PQprepare(conn, "index_sectors",
|
||||
"select geometry_sector,count(*) from placex where rank_search = $1 and indexed_status > 0 group by geometry_sector order by geometry_sector",
|
||||
1, pg_prepare_params);
|
||||
if (PQresultStatus(res) != PGRES_COMMAND_OK)
|
||||
{
|
||||
fprintf(stderr, "Failed preparing index_sectors: %s\n", PQerrorMessage(conn));
|
||||
exit(EXIT_FAILURE);
|
||||
}
|
||||
PQclear(res);
|
||||
|
||||
res = PQprepare(conn, "index_sectors_osmline",
|
||||
"select geometry_sector,count(*) from location_property_osmline where indexed_status > 0 group by geometry_sector order by geometry_sector",
|
||||
0, NULL);
|
||||
if (PQresultStatus(res) != PGRES_COMMAND_OK)
|
||||
{
|
||||
fprintf(stderr, "Failed preparing index_sectors: %s\n", PQerrorMessage(conn));
|
||||
exit(EXIT_FAILURE);
|
||||
}
|
||||
PQclear(res);
|
||||
|
||||
pg_prepare_params[0] = PG_OID_INT4;
|
||||
res = PQprepare(conn, "index_nosectors",
|
||||
"select 0::integer,count(*) from placex where rank_search = $1 and indexed_status > 0",
|
||||
1, pg_prepare_params);
|
||||
if (PQresultStatus(res) != PGRES_COMMAND_OK)
|
||||
{
|
||||
fprintf(stderr, "Failed preparing index_sectors: %s\n", PQerrorMessage(conn));
|
||||
exit(EXIT_FAILURE);
|
||||
}
|
||||
PQclear(res);
|
||||
|
||||
pg_prepare_params[0] = PG_OID_INT4;
|
||||
pg_prepare_params[1] = PG_OID_INT4;
|
||||
res = PQprepare(conn, "index_sector_places",
|
||||
"select place_id from placex where rank_search = $1 and geometry_sector = $2 and indexed_status > 0",
|
||||
2, pg_prepare_params);
|
||||
if (PQresultStatus(res) != PGRES_COMMAND_OK)
|
||||
{
|
||||
fprintf(stderr, "Failed preparing index_sector_places: %s\n", PQerrorMessage(conn));
|
||||
exit(EXIT_FAILURE);
|
||||
}
|
||||
PQclear(res);
|
||||
|
||||
pg_prepare_params[0] = PG_OID_INT4;
|
||||
res = PQprepare(conn, "index_nosector_places",
|
||||
"select place_id from placex where rank_search = $1 and indexed_status > 0 order by geometry_sector",
|
||||
1, pg_prepare_params);
|
||||
if (PQresultStatus(res) != PGRES_COMMAND_OK)
|
||||
{
|
||||
fprintf(stderr, "Failed preparing index_nosector_places: %s\n", PQerrorMessage(conn));
|
||||
exit(EXIT_FAILURE);
|
||||
}
|
||||
PQclear(res);
|
||||
|
||||
pg_prepare_params[0] = PG_OID_INT4;
|
||||
res = PQprepare(conn, "index_sector_places_osmline",
|
||||
"select place_id from location_property_osmline where geometry_sector = $1 and indexed_status > 0",
|
||||
1, pg_prepare_params);
|
||||
if (PQresultStatus(res) != PGRES_COMMAND_OK)
|
||||
{
|
||||
fprintf(stderr, "Failed preparing index_sector_places: %s\n", PQerrorMessage(conn));
|
||||
exit(EXIT_FAILURE);
|
||||
}
|
||||
PQclear(res);
|
||||
|
||||
res = PQprepare(conn, "index_nosector_places_osmline",
|
||||
"select place_id from location_property_osmline where indexed_status > 0 order by geometry_sector",
|
||||
0, NULL);
|
||||
if (PQresultStatus(res) != PGRES_COMMAND_OK)
|
||||
{
|
||||
fprintf(stderr, "Failed preparing index_nosector_places: %s\n", PQerrorMessage(conn));
|
||||
exit(EXIT_FAILURE);
|
||||
}
|
||||
PQclear(res);
|
||||
|
||||
// Build the data for each thread
|
||||
thread_data = (struct index_thread_data *)malloc(sizeof(struct index_thread_data)*num_threads);
|
||||
for (i = 0; i < num_threads; i++)
|
||||
{
|
||||
thread_data[i].conn = PQconnectdb(conninfo);
|
||||
if (PQstatus(thread_data[i].conn) != CONNECTION_OK)
|
||||
{
|
||||
fprintf(stderr, "Connection to database failed: %s\n", PQerrorMessage(thread_data[i].conn));
|
||||
exit(EXIT_FAILURE);
|
||||
}
|
||||
|
||||
pg_prepare_params[0] = PG_OID_INT8;
|
||||
res = PQprepare(thread_data[i].conn, "index_placex",
|
||||
"update placex set indexed_status = 0 where place_id = $1",
|
||||
1, pg_prepare_params);
|
||||
if (PQresultStatus(res) != PGRES_COMMAND_OK)
|
||||
{
|
||||
fprintf(stderr, "Failed preparing index_placex: %s\n", PQerrorMessage(thread_data[i].conn));
|
||||
exit(EXIT_FAILURE);
|
||||
}
|
||||
PQclear(res);
|
||||
|
||||
pg_prepare_params[0] = PG_OID_INT8;
|
||||
res = PQprepare(thread_data[i].conn, "index_osmline",
|
||||
"update location_property_osmline set indexed_status = 0 where place_id = $1",
|
||||
1, pg_prepare_params);
|
||||
if (PQresultStatus(res) != PGRES_COMMAND_OK)
|
||||
{
|
||||
fprintf(stderr, "Failed preparing index_osmline: %s\n", PQerrorMessage(thread_data[i].conn));
|
||||
exit(EXIT_FAILURE);
|
||||
}
|
||||
PQclear(res);
|
||||
|
||||
if (db_has_locale)
|
||||
{
|
||||
// Make sure the error message is not localized as we parse it later.
|
||||
res = PQexec(thread_data[i].conn, "SET lc_messages TO 'C'");
|
||||
if (PQresultStatus(res) != PGRES_COMMAND_OK)
|
||||
{
|
||||
fprintf(stderr, "Failed to set langauge: %s\n", PQerrorMessage(thread_data[i].conn));
|
||||
exit(EXIT_FAILURE);
|
||||
}
|
||||
PQclear(res);
|
||||
}
|
||||
nominatim_exportCreatePreparedQueries(thread_data[i].conn);
|
||||
}
|
||||
|
||||
fprintf(stderr, "Starting indexing rank (%i to %i) using %i threads\n", rank_min, rank_max, num_threads);
|
||||
|
||||
for (rank = rank_min; rank <= rank_max; rank++)
|
||||
{
|
||||
// OSMLINE: do reindexing (=> reparenting) for interpolation lines at rank 30, but before all other objects of rank 30
|
||||
// reason: houses (rank 30) depend on the updated interpolation line, when reparenting (see placex_update in functions.sql)
|
||||
if (rank == 30)
|
||||
{
|
||||
run_indexing(rank, 1, conn, num_threads, thread_data, structuredoutputfile);
|
||||
}
|
||||
run_indexing(rank, 0, conn, num_threads, thread_data, structuredoutputfile);
|
||||
}
|
||||
// Close all connections
|
||||
for (i = 0; i < num_threads; i++)
|
||||
{
|
||||
PQfinish(thread_data[i].conn);
|
||||
}
|
||||
PQfinish(conn);
|
||||
}
|
||||
|
||||
void *nominatim_indexThread(void * thread_data_in)
|
||||
{
|
||||
struct index_thread_data * thread_data = (struct index_thread_data * )thread_data_in;
|
||||
struct export_data querySet;
|
||||
|
||||
PGresult *res;
|
||||
|
||||
const char *paramValues[1];
|
||||
int paramLengths[1];
|
||||
int paramFormats[1];
|
||||
uint64_t paramPlaceID;
|
||||
uint64_t place_id;
|
||||
time_t updateStartTime;
|
||||
unsigned table;
|
||||
|
||||
table = thread_data->table;
|
||||
|
||||
while (1)
|
||||
{
|
||||
pthread_mutex_lock( thread_data->count_mutex );
|
||||
if (*(thread_data->count) >= thread_data->tuples)
|
||||
{
|
||||
pthread_mutex_unlock( thread_data->count_mutex );
|
||||
break;
|
||||
}
|
||||
|
||||
place_id = PGint64(*((uint64_t *)PQgetvalue(thread_data->res, *thread_data->count, 0)));
|
||||
(*thread_data->count)++;
|
||||
|
||||
pthread_mutex_unlock( thread_data->count_mutex );
|
||||
|
||||
if (verbose) fprintf(stderr, " Processing place_id %ld\n", place_id);
|
||||
|
||||
updateStartTime = time(0);
|
||||
int done = 0;
|
||||
|
||||
if (thread_data->writer)
|
||||
{
|
||||
nominatim_exportPlaceQueries(place_id, thread_data->conn, &querySet);
|
||||
}
|
||||
|
||||
while(!done)
|
||||
{
|
||||
paramPlaceID = PGint64(place_id);
|
||||
paramValues[0] = (char *)¶mPlaceID;
|
||||
paramLengths[0] = sizeof(paramPlaceID);
|
||||
paramFormats[0] = 1;
|
||||
if (table == 1) // table=1 for placex
|
||||
{
|
||||
res = PQexecPrepared(thread_data->conn, "index_placex", 1, paramValues, paramLengths, paramFormats, 1);
|
||||
}
|
||||
else // table=0 for osmline
|
||||
{
|
||||
res = PQexecPrepared(thread_data->conn, "index_osmline", 1, paramValues, paramLengths, paramFormats, 1);
|
||||
}
|
||||
if (PQresultStatus(res) == PGRES_COMMAND_OK)
|
||||
done = 1;
|
||||
else
|
||||
{
|
||||
if (!strncmp(PQerrorMessage(thread_data->conn), "ERROR: deadlock detected", 25))
|
||||
{
|
||||
if (table == 1)
|
||||
{
|
||||
fprintf(stderr, "index_placex: UPDATE failed - deadlock, retrying (%ld)\n", place_id);
|
||||
}
|
||||
else
|
||||
{
|
||||
fprintf(stderr, "index_osmline: UPDATE failed - deadlock, retrying (%ld)\n", place_id);
|
||||
}
|
||||
PQclear(res);
|
||||
sleep(rand() % 10);
|
||||
}
|
||||
else
|
||||
{
|
||||
if (table == 1)
|
||||
{
|
||||
fprintf(stderr, "index_placex: UPDATE failed: %s", PQerrorMessage(thread_data->conn));
|
||||
}
|
||||
else
|
||||
{
|
||||
fprintf(stderr, "index_osmline: UPDATE failed: %s", PQerrorMessage(thread_data->conn));
|
||||
}
|
||||
PQclear(res);
|
||||
exit(EXIT_FAILURE);
|
||||
}
|
||||
}
|
||||
}
|
||||
PQclear(res);
|
||||
if (difftime(time(0), updateStartTime) > 1) fprintf(stderr, " Slow place_id %ld\n", place_id);
|
||||
|
||||
if (thread_data->writer)
|
||||
{
|
||||
nominatim_exportPlace(place_id, thread_data->conn, thread_data->writer, thread_data->writer_mutex, &querySet);
|
||||
nominatim_exportFreeQueries(&querySet);
|
||||
}
|
||||
}
|
||||
|
||||
return NULL;
|
||||
}
|
||||
@@ -1,22 +0,0 @@
|
||||
#ifndef INDEX_H
|
||||
#define INDEX_H
|
||||
|
||||
#include <libxml/encoding.h>
|
||||
#include <libxml/xmlwriter.h>
|
||||
|
||||
struct index_thread_data
|
||||
{
|
||||
pthread_t thread;
|
||||
PGconn * conn;
|
||||
PGresult * res;
|
||||
int tuples;
|
||||
int * count;
|
||||
pthread_mutex_t * count_mutex;
|
||||
xmlTextWriterPtr writer;
|
||||
pthread_mutex_t * writer_mutex;
|
||||
unsigned table;
|
||||
};
|
||||
void nominatim_index(int rank_min, int rank_max, int num_threads, const char *conninfo, const char *structuredoutputfile);
|
||||
void *nominatim_indexThread(void * thread_data_in);
|
||||
|
||||
#endif
|
||||
@@ -1,242 +0,0 @@
|
||||
#define _FILE_OFFSET_BITS 64
|
||||
#define _LARGEFILE64_SOURCE
|
||||
|
||||
#ifdef __MINGW_H
|
||||
# include <windows.h>
|
||||
#else
|
||||
#include <stdio.h>
|
||||
#include <stdlib.h>
|
||||
#include <unistd.h>
|
||||
#include <string.h>
|
||||
#include <sys/types.h>
|
||||
#include <sys/stat.h>
|
||||
#include <fcntl.h>
|
||||
#include <zlib.h>
|
||||
#endif
|
||||
|
||||
#include <libxml/xmlreader.h>
|
||||
#include <bzlib.h>
|
||||
|
||||
#include "input.h"
|
||||
|
||||
struct Input
|
||||
{
|
||||
char *name;
|
||||
enum { plainFile, gzipFile, bzip2File } type;
|
||||
void *fileHandle;
|
||||
// needed by bzip2 when decompressing from multiple streams. other
|
||||
// decompressors must ignore it.
|
||||
FILE *systemHandle;
|
||||
int eof;
|
||||
char buf[4096];
|
||||
int buf_ptr, buf_fill;
|
||||
};
|
||||
|
||||
// tries to re-open the bz stream at the next stream start.
|
||||
// returns 0 on success, -1 on failure.
|
||||
int bzReOpen(struct Input *ctx, int *error)
|
||||
{
|
||||
// for copying out the last unused part of the block which
|
||||
// has an EOS token in it. needed for re-initialising the
|
||||
// next stream.
|
||||
unsigned char unused[BZ_MAX_UNUSED];
|
||||
void *unused_tmp_ptr = NULL;
|
||||
int nUnused, i;
|
||||
|
||||
BZ2_bzReadGetUnused(error, (BZFILE *)(ctx->fileHandle), &unused_tmp_ptr, &nUnused);
|
||||
if (*error != BZ_OK) return -1;
|
||||
|
||||
// when bzReadClose is called the unused buffer is deallocated,
|
||||
// so it needs to be copied somewhere safe first.
|
||||
for (i = 0; i < nUnused; ++i)
|
||||
unused[i] = ((unsigned char *)unused_tmp_ptr)[i];
|
||||
|
||||
BZ2_bzReadClose(error, (BZFILE *)(ctx->fileHandle));
|
||||
if (*error != BZ_OK) return -1;
|
||||
|
||||
// reassign the file handle
|
||||
ctx->fileHandle = BZ2_bzReadOpen(error, ctx->systemHandle, 0, 0, unused, nUnused);
|
||||
if (ctx->fileHandle == NULL || *error != BZ_OK) return -1;
|
||||
|
||||
return 0;
|
||||
}
|
||||
|
||||
int readFile(void *context, char * buffer, int len)
|
||||
{
|
||||
struct Input *ctx = context;
|
||||
void *f = ctx->fileHandle;
|
||||
int l = 0, error = 0;
|
||||
|
||||
if (ctx->eof || (len == 0))
|
||||
return 0;
|
||||
|
||||
switch (ctx->type)
|
||||
{
|
||||
case plainFile:
|
||||
l = read(*(int *)f, buffer, len);
|
||||
if (l <= 0) ctx->eof = 1;
|
||||
break;
|
||||
case gzipFile:
|
||||
l = gzread((gzFile)f, buffer, len);
|
||||
if (l <= 0) ctx->eof = 1;
|
||||
break;
|
||||
case bzip2File:
|
||||
l = BZ2_bzRead(&error, (BZFILE *)f, buffer, len);
|
||||
|
||||
// error codes BZ_OK and BZ_STREAM_END are both "OK", but the stream
|
||||
// end means the reader needs to be reset from the original handle.
|
||||
if (error != BZ_OK)
|
||||
{
|
||||
// for stream errors, try re-opening the stream before admitting defeat.
|
||||
if (error != BZ_STREAM_END || bzReOpen(ctx, &error) != 0)
|
||||
{
|
||||
l = 0;
|
||||
ctx->eof = 1;
|
||||
}
|
||||
}
|
||||
break;
|
||||
default:
|
||||
fprintf(stderr, "Bad file type\n");
|
||||
break;
|
||||
}
|
||||
|
||||
if (l < 0)
|
||||
{
|
||||
fprintf(stderr, "File reader received error %d (%d)\n", l, error);
|
||||
l = 0;
|
||||
}
|
||||
|
||||
return l;
|
||||
}
|
||||
|
||||
char inputGetChar(void *context)
|
||||
{
|
||||
struct Input *ctx = context;
|
||||
|
||||
if (ctx->buf_ptr == ctx->buf_fill)
|
||||
{
|
||||
ctx->buf_fill = readFile(context, &ctx->buf[0], sizeof(ctx->buf));
|
||||
ctx->buf_ptr = 0;
|
||||
if (ctx->buf_fill == 0)
|
||||
return 0;
|
||||
if (ctx->buf_fill < 0)
|
||||
{
|
||||
perror("Error while reading file");
|
||||
exit(1);
|
||||
}
|
||||
}
|
||||
//readFile(context, &c, 1);
|
||||
return ctx->buf[ctx->buf_ptr++];
|
||||
}
|
||||
|
||||
int inputEof(void *context)
|
||||
{
|
||||
return ((struct Input *)context)->eof;
|
||||
}
|
||||
|
||||
void *inputOpen(const char *name)
|
||||
{
|
||||
const char *ext = strrchr(name, '.');
|
||||
struct Input *ctx = malloc (sizeof(*ctx));
|
||||
|
||||
if (!ctx)
|
||||
return NULL;
|
||||
|
||||
memset(ctx, 0, sizeof(*ctx));
|
||||
|
||||
ctx->name = strdup(name);
|
||||
|
||||
if (ext && !strcmp(ext, ".gz"))
|
||||
{
|
||||
ctx->fileHandle = (void *)gzopen(name, "rb");
|
||||
ctx->type = gzipFile;
|
||||
}
|
||||
else if (ext && !strcmp(ext, ".bz2"))
|
||||
{
|
||||
int error = 0;
|
||||
ctx->systemHandle = fopen(name, "rb");
|
||||
if (!ctx->systemHandle)
|
||||
{
|
||||
fprintf(stderr, "error while opening file %s\n", name);
|
||||
exit(10);
|
||||
}
|
||||
|
||||
ctx->fileHandle = (void *)BZ2_bzReadOpen(&error, ctx->systemHandle, 0, 0, NULL, 0);
|
||||
ctx->type = bzip2File;
|
||||
|
||||
}
|
||||
else
|
||||
{
|
||||
int *pfd = malloc(sizeof(pfd));
|
||||
if (pfd)
|
||||
{
|
||||
if (!strcmp(name, "-"))
|
||||
{
|
||||
*pfd = STDIN_FILENO;
|
||||
}
|
||||
else
|
||||
{
|
||||
int flags = O_RDONLY;
|
||||
#ifdef O_LARGEFILE
|
||||
flags |= O_LARGEFILE;
|
||||
#endif
|
||||
*pfd = open(name, flags);
|
||||
if (*pfd < 0)
|
||||
{
|
||||
free(pfd);
|
||||
pfd = NULL;
|
||||
}
|
||||
}
|
||||
}
|
||||
ctx->fileHandle = (void *)pfd;
|
||||
ctx->type = plainFile;
|
||||
}
|
||||
if (!ctx->fileHandle)
|
||||
{
|
||||
fprintf(stderr, "error while opening file %s\n", name);
|
||||
exit(10);
|
||||
}
|
||||
ctx->buf_ptr = 0;
|
||||
ctx->buf_fill = 0;
|
||||
return (void *)ctx;
|
||||
}
|
||||
|
||||
int inputClose(void *context)
|
||||
{
|
||||
struct Input *ctx = context;
|
||||
void *f = ctx->fileHandle;
|
||||
|
||||
switch (ctx->type)
|
||||
{
|
||||
case plainFile:
|
||||
close(*(int *)f);
|
||||
free(f);
|
||||
break;
|
||||
case gzipFile:
|
||||
gzclose((gzFile)f);
|
||||
break;
|
||||
case bzip2File:
|
||||
BZ2_bzclose((BZFILE *)f);
|
||||
break;
|
||||
default:
|
||||
fprintf(stderr, "Bad file type\n");
|
||||
break;
|
||||
}
|
||||
|
||||
free(ctx->name);
|
||||
free(ctx);
|
||||
return 0;
|
||||
}
|
||||
|
||||
xmlTextReaderPtr inputUTF8(const char *name)
|
||||
{
|
||||
void *ctx = inputOpen(name);
|
||||
|
||||
if (!ctx)
|
||||
{
|
||||
fprintf(stderr, "Input reader create failed for: %s\n", name);
|
||||
return NULL;
|
||||
}
|
||||
|
||||
return xmlReaderForIO(readFile, inputClose, (void *)ctx, NULL, NULL, 0);
|
||||
}
|
||||
@@ -1,11 +0,0 @@
|
||||
#ifndef INPUT_H
|
||||
#define INPUT_H
|
||||
|
||||
int readFile(void *context, char * buffer, int len);
|
||||
int inputClose(void *context);
|
||||
void *inputOpen(const char *name);
|
||||
char inputGetChar(void *context);
|
||||
int inputEof(void *context);
|
||||
xmlTextReaderPtr inputUTF8(const char *name);
|
||||
|
||||
#endif
|
||||
@@ -1,255 +0,0 @@
|
||||
/*
|
||||
#-----------------------------------------------------------------------------
|
||||
# nominatim - [description]
|
||||
#-----------------------------------------------------------------------------
|
||||
# Copyright 2010, Brian Quinion
|
||||
# Based on osm2pgsql
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or
|
||||
# modify it under the terms of the GNU General Public License
|
||||
# as published by the Free Software Foundation; either version 2
|
||||
# of the License, or (at your option) any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program; if not, write to the Free Software
|
||||
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
|
||||
#-----------------------------------------------------------------------------
|
||||
*/
|
||||
|
||||
#define _GNU_SOURCE
|
||||
#include <stdio.h>
|
||||
#include <unistd.h>
|
||||
#include <stdlib.h>
|
||||
#include <string.h>
|
||||
#include <assert.h>
|
||||
#include <getopt.h>
|
||||
#include <libgen.h>
|
||||
#include <pthread.h>
|
||||
#include <time.h>
|
||||
|
||||
#include <libpq-fe.h>
|
||||
|
||||
#include "nominatim.h"
|
||||
#include "postgresql.h"
|
||||
#include "sprompt.h"
|
||||
#include "index.h"
|
||||
#include "export.h"
|
||||
#include "import.h"
|
||||
|
||||
int verbose;
|
||||
|
||||
void exit_nicely(void)
|
||||
{
|
||||
fprintf(stderr, "Error occurred, cleaning up\n");
|
||||
exit(1);
|
||||
}
|
||||
|
||||
void short_usage(char *arg0)
|
||||
{
|
||||
const char *name = basename(arg0);
|
||||
|
||||
fprintf(stderr, "Usage error. For further information see:\n");
|
||||
fprintf(stderr, "\t%s -h|--help\n", name);
|
||||
}
|
||||
|
||||
static void long_usage(char *arg0)
|
||||
{
|
||||
const char *name = basename(arg0);
|
||||
|
||||
fprintf(stderr, "Usage:\n");
|
||||
fprintf(stderr, "\t%s [options] planet.osms\n", name);
|
||||
fprintf(stderr, "\nThis will import the structured osm data into a PostgreSQL database\n");
|
||||
fprintf(stderr, "suitable for nominatim search engine\n");
|
||||
fprintf(stderr, "\nOptions:\n");
|
||||
fprintf(stderr, " -d|--database\tThe name of the PostgreSQL database to connect\n");
|
||||
fprintf(stderr, " \tto (default: nominatim).\n");
|
||||
fprintf(stderr, " -U|--username\tPostgresql user name.\n");
|
||||
fprintf(stderr, " -W|--password\tForce password prompt.\n");
|
||||
fprintf(stderr, " -H|--host\t\tDatabase server hostname or socket location.\n");
|
||||
fprintf(stderr, " -P|--port\t\tDatabase server port.\n");
|
||||
fprintf(stderr, " -i|--index\t\tIndex the database.\n");
|
||||
fprintf(stderr, " -e|--export\t\tGenerate a structured file.\n");
|
||||
fprintf(stderr, " -I|--import\t\tImport a structured file.\n");
|
||||
fprintf(stderr, " -r|--minrank\t\tMinimum / starting rank. (default: 0))\n");
|
||||
fprintf(stderr, " -R|--maxrank\t\tMaximum / finishing rank. (default: 30)\n");
|
||||
fprintf(stderr, " -t|--threads\t\tNumber of threads to create for indexing.\n");
|
||||
fprintf(stderr, " -F|--file\t\tfile to use (either to import or export).\n");
|
||||
fprintf(stderr, " -T|--tagfile\t\tfile containing 'special' tag pairs\n");
|
||||
fprintf(stderr, " \t(default: partitionedtags.def).\n");
|
||||
fprintf(stderr, " -h|--help\t\tHelp information.\n");
|
||||
fprintf(stderr, " -v|--verbose\t\tVerbose output.\n");
|
||||
fprintf(stderr, "\n");
|
||||
|
||||
if (sizeof(int*) == 4)
|
||||
{
|
||||
fprintf(stderr, "\n\nYou are running this on 32bit system - this will not work\n");
|
||||
}
|
||||
}
|
||||
|
||||
int main(int argc, char *argv[])
|
||||
{
|
||||
int long_usage_bool=0;
|
||||
int pass_prompt=0;
|
||||
const char *db = "nominatim";
|
||||
const char *username=NULL;
|
||||
const char *host=NULL;
|
||||
const char *password=NULL;
|
||||
const char *port = "5432";
|
||||
const char *conninfo = NULL;
|
||||
int index = 0;
|
||||
int export = 0;
|
||||
int import = 0;
|
||||
int minrank = 0;
|
||||
int maxrank = 30;
|
||||
int threads = 1;
|
||||
const char *file = NULL;
|
||||
const char *tagsfile = "partitionedtags.def";
|
||||
|
||||
//import = 1;
|
||||
//structuredinputfile = "out.osms";
|
||||
|
||||
PGconn *conn;
|
||||
|
||||
fprintf(stderr, "nominatim version %s\n\n", NOMINATIM_VERSION);
|
||||
|
||||
while (1)
|
||||
{
|
||||
int c, option_index = 0;
|
||||
static struct option long_options[] =
|
||||
{
|
||||
{"help", 0, 0, 'h'},
|
||||
|
||||
{"verbose", 0, 0, 'v'},
|
||||
|
||||
{"database", 1, 0, 'd'},
|
||||
{"username", 1, 0, 'U'},
|
||||
{"password", 0, 0, 'W'},
|
||||
{"host", 1, 0, 'H'},
|
||||
{"port", 1, 0, 'P'},
|
||||
|
||||
{"index", 0, 0, 'i'},
|
||||
{"export", 0, 0, 'e'},
|
||||
{"import", 1, 0, 'I'},
|
||||
{"threads", 1, 0, 't'},
|
||||
{"file", 1, 0, 'F'},
|
||||
{"tagsfile", 1, 0, 'T'},
|
||||
|
||||
{"minrank", 1, 0, 'r'},
|
||||
{"maxrank", 1, 0, 'R'},
|
||||
|
||||
|
||||
|
||||
{0, 0, 0, 0}
|
||||
};
|
||||
|
||||
c = getopt_long(argc, argv, "vhd:U:WH:P:ieIt:F:T:r:R:", long_options, &option_index);
|
||||
if (c == -1)
|
||||
break;
|
||||
|
||||
switch (c)
|
||||
{
|
||||
case 'v':
|
||||
verbose=1;
|
||||
break;
|
||||
case 'd':
|
||||
db=optarg;
|
||||
break;
|
||||
case 'U':
|
||||
username=optarg;
|
||||
break;
|
||||
case 'W':
|
||||
pass_prompt=1;
|
||||
break;
|
||||
case 'H':
|
||||
host=optarg;
|
||||
break;
|
||||
case 'P':
|
||||
port=optarg;
|
||||
break;
|
||||
case 'h':
|
||||
long_usage_bool=1;
|
||||
break;
|
||||
case 'i':
|
||||
index=1;
|
||||
break;
|
||||
case 'e':
|
||||
export=1;
|
||||
break;
|
||||
case 'I':
|
||||
import=1;
|
||||
break;
|
||||
case 't':
|
||||
threads=atoi(optarg);
|
||||
break;
|
||||
case 'r':
|
||||
minrank=atoi(optarg);
|
||||
break;
|
||||
case 'R':
|
||||
maxrank=atoi(optarg);
|
||||
break;
|
||||
case 'F':
|
||||
file=optarg;
|
||||
break;
|
||||
case 'T':
|
||||
tagsfile=optarg;
|
||||
break;
|
||||
case '?':
|
||||
default:
|
||||
short_usage(argv[0]);
|
||||
exit(EXIT_FAILURE);
|
||||
}
|
||||
}
|
||||
|
||||
if (long_usage_bool)
|
||||
{
|
||||
long_usage(argv[0]);
|
||||
exit(EXIT_FAILURE);
|
||||
}
|
||||
|
||||
if (threads < 1) threads = 1;
|
||||
|
||||
/*
|
||||
if (argc == optind) { // No non-switch arguments
|
||||
short_usage(argv[0]);
|
||||
exit(EXIT_FAILURE);
|
||||
}
|
||||
*/
|
||||
if (index && import)
|
||||
{
|
||||
fprintf(stderr, "Error: --index and --import options can not be used on the same database!\n");
|
||||
exit(EXIT_FAILURE);
|
||||
}
|
||||
|
||||
if (pass_prompt)
|
||||
password = simple_prompt("Password:", 100, 0);
|
||||
else
|
||||
{
|
||||
password = getenv("PGPASS");
|
||||
}
|
||||
|
||||
// Test the database connection
|
||||
conninfo = build_conninfo(db, username, password, host, port);
|
||||
conn = PQconnectdb(conninfo);
|
||||
if (PQstatus(conn) != CONNECTION_OK)
|
||||
{
|
||||
fprintf(stderr, "Connection to database failed: %s\n", PQerrorMessage(conn));
|
||||
exit(EXIT_FAILURE);
|
||||
}
|
||||
PQfinish(conn);
|
||||
|
||||
if (!index && !export && !import)
|
||||
{
|
||||
fprintf(stderr, "Please select index, export or import.\n");
|
||||
exit(EXIT_FAILURE);
|
||||
}
|
||||
if (index) nominatim_index(minrank, maxrank, threads, conninfo, file);
|
||||
if (export) nominatim_export(minrank, maxrank, conninfo, file);
|
||||
if (import) nominatim_import(conninfo, tagsfile, file);
|
||||
|
||||
return 0;
|
||||
}
|
||||
@@ -1,31 +0,0 @@
|
||||
#ifndef NOMINATIM_H
|
||||
#define NOMINATIM_H
|
||||
|
||||
#define MAX(x,y) (x > y?x:y)
|
||||
#define MIN(x,y) (x < y?x:y)
|
||||
|
||||
struct output_options
|
||||
{
|
||||
const char *conninfo; /* Connection info string */
|
||||
const char *prefix; /* prefix for table names */
|
||||
int scale; /* scale for converting coordinates to fixed point */
|
||||
int projection; /* SRS of projection */
|
||||
int append; /* Append to existing data */
|
||||
int slim; /* In slim mode */
|
||||
int cache; /* Memory usable for cache in MB */
|
||||
struct middle_t *mid; /* Mid storage to use */
|
||||
const char *tblsindex; /* Pg Tablespace to store indexes */
|
||||
const char *style; /* style file to use */
|
||||
int expire_tiles_zoom; /* Zoom level for tile expiry list */
|
||||
int expire_tiles_zoom_min; /* Minimum zoom level for tile expiry list */
|
||||
const char *expire_tiles_filename; /* File name to output expired tiles list to */
|
||||
int enable_hstore; /* add an additional hstore column with objects key/value pairs */
|
||||
int enable_multi; /* Output multi-geometries instead of several simple geometries */
|
||||
char** hstore_columns; /* list of columns that should be written into their own hstore column */
|
||||
int n_hstore_columns; /* number of hstore columns */
|
||||
};
|
||||
|
||||
void exit_nicely(void);
|
||||
void short_usage(char *arg0);
|
||||
|
||||
#endif
|
||||
370
nominatim/nominatim.py
Executable file
370
nominatim/nominatim.py
Executable file
@@ -0,0 +1,370 @@
|
||||
#! /usr/bin/env python3
|
||||
#-----------------------------------------------------------------------------
|
||||
# nominatim - [description]
|
||||
#-----------------------------------------------------------------------------
|
||||
#
|
||||
# Indexing tool for the Nominatim database.
|
||||
#
|
||||
# Based on C version by Brian Quinion
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or
|
||||
# modify it under the terms of the GNU General Public License
|
||||
# as published by the Free Software Foundation; either version 2
|
||||
# of the License, or (at your option) any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program; if not, write to the Free Software
|
||||
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
|
||||
#-----------------------------------------------------------------------------
|
||||
|
||||
from argparse import ArgumentParser, RawDescriptionHelpFormatter, ArgumentTypeError
|
||||
import logging
|
||||
import sys
|
||||
import re
|
||||
import getpass
|
||||
from datetime import datetime
|
||||
import psycopg2
|
||||
from psycopg2.extras import wait_select
|
||||
import select
|
||||
|
||||
log = logging.getLogger()
|
||||
|
||||
def make_connection(options, asynchronous=False):
|
||||
params = {'dbname' : options.dbname,
|
||||
'user' : options.user,
|
||||
'password' : options.password,
|
||||
'host' : options.host,
|
||||
'port' : options.port,
|
||||
'async' : asynchronous}
|
||||
|
||||
return psycopg2.connect(**params)
|
||||
|
||||
|
||||
class RankRunner(object):
|
||||
""" Returns SQL commands for indexing one rank within the placex table.
|
||||
"""
|
||||
|
||||
def __init__(self, rank):
|
||||
self.rank = rank
|
||||
|
||||
def name(self):
|
||||
return "rank {}".format(self.rank)
|
||||
|
||||
def sql_index_sectors(self):
|
||||
return """SELECT geometry_sector, count(*) FROM placex
|
||||
WHERE rank_search = {} and indexed_status > 0
|
||||
GROUP BY geometry_sector
|
||||
ORDER BY geometry_sector""".format(self.rank)
|
||||
|
||||
def sql_nosector_places(self):
|
||||
return """SELECT place_id FROM placex
|
||||
WHERE indexed_status > 0 and rank_search = {}
|
||||
ORDER BY geometry_sector""".format(self.rank)
|
||||
|
||||
def sql_sector_places(self):
|
||||
return """SELECT place_id FROM placex
|
||||
WHERE indexed_status > 0 and rank_search = {}
|
||||
and geometry_sector = %s""".format(self.rank)
|
||||
|
||||
def sql_index_place(self):
|
||||
return "UPDATE placex SET indexed_status = 0 WHERE place_id = %s"
|
||||
|
||||
|
||||
class InterpolationRunner(object):
|
||||
""" Returns SQL commands for indexing the address interpolation table
|
||||
location_property_osmline.
|
||||
"""
|
||||
|
||||
def name(self):
|
||||
return "interpolation lines (location_property_osmline)"
|
||||
|
||||
def sql_index_sectors(self):
|
||||
return """SELECT geometry_sector, count(*) FROM location_property_osmline
|
||||
WHERE indexed_status > 0
|
||||
GROUP BY geometry_sector
|
||||
ORDER BY geometry_sector"""
|
||||
|
||||
def sql_nosector_places(self):
|
||||
return """SELECT place_id FROM location_property_osmline
|
||||
WHERE indexed_status > 0
|
||||
ORDER BY geometry_sector"""
|
||||
|
||||
def sql_sector_places(self):
|
||||
return """SELECT place_id FROM location_property_osmline
|
||||
WHERE indexed_status > 0 and geometry_sector = %s
|
||||
ORDER BY geometry_sector"""
|
||||
|
||||
def sql_index_place(self):
|
||||
return """UPDATE location_property_osmline
|
||||
SET indexed_status = 0 WHERE place_id = %s"""
|
||||
|
||||
|
||||
class DBConnection(object):
|
||||
""" A single non-blocking database connection.
|
||||
"""
|
||||
|
||||
def __init__(self, options):
|
||||
self.current_query = None
|
||||
self.current_params = None
|
||||
|
||||
self.conn = None
|
||||
self.connect()
|
||||
|
||||
def connect(self):
|
||||
if self.conn is not None:
|
||||
self.cursor.close()
|
||||
self.conn.close()
|
||||
|
||||
self.conn = make_connection(options, asynchronous=True)
|
||||
self.wait()
|
||||
|
||||
self.cursor = self.conn.cursor()
|
||||
# Disable JIT and parallel workers as they are known to cause problems.
|
||||
# Update pg_settings instead of using SET because it does not yield
|
||||
# errors on older versions of Postgres where the settings are not
|
||||
# implemented.
|
||||
self.perform(
|
||||
""" UPDATE pg_settings SET setting = -1 WHERE name = 'jit_above_cost';
|
||||
UPDATE pg_settings SET setting = 0
|
||||
WHERE name = 'max_parallel_workers_per_gather';""")
|
||||
self.wait()
|
||||
|
||||
def wait(self):
|
||||
""" Block until any pending operation is done.
|
||||
"""
|
||||
while True:
|
||||
try:
|
||||
wait_select(self.conn)
|
||||
self.current_query = None
|
||||
return
|
||||
except psycopg2.extensions.TransactionRollbackError as e:
|
||||
if e.pgcode == '40P01':
|
||||
log.info("Deadlock detected (params = {}), retry."
|
||||
.format(self.current_params))
|
||||
self.cursor.execute(self.current_query, self.current_params)
|
||||
else:
|
||||
raise
|
||||
except psycopg2.errors.DeadlockDetected:
|
||||
self.cursor.execute(self.current_query, self.current_params)
|
||||
|
||||
def perform(self, sql, args=None):
|
||||
""" Send SQL query to the server. Returns immediately without
|
||||
blocking.
|
||||
"""
|
||||
self.current_query = sql
|
||||
self.current_params = args
|
||||
self.cursor.execute(sql, args)
|
||||
|
||||
def fileno(self):
|
||||
""" File descriptor to wait for. (Makes this class select()able.)
|
||||
"""
|
||||
return self.conn.fileno()
|
||||
|
||||
def is_done(self):
|
||||
""" Check if the connection is available for a new query.
|
||||
|
||||
Also checks if the previous query has run into a deadlock.
|
||||
If so, then the previous query is repeated.
|
||||
"""
|
||||
if self.current_query is None:
|
||||
return True
|
||||
|
||||
try:
|
||||
if self.conn.poll() == psycopg2.extensions.POLL_OK:
|
||||
self.current_query = None
|
||||
return True
|
||||
except psycopg2.extensions.TransactionRollbackError as e:
|
||||
if e.pgcode == '40P01':
|
||||
log.info("Deadlock detected (params = {}), retry.".format(self.current_params))
|
||||
self.cursor.execute(self.current_query, self.current_params)
|
||||
else:
|
||||
raise
|
||||
except psycopg2.errors.DeadlockDetected:
|
||||
self.cursor.execute(self.current_query, self.current_params)
|
||||
|
||||
return False
|
||||
|
||||
|
||||
class Indexer(object):
|
||||
""" Main indexing routine.
|
||||
"""
|
||||
|
||||
def __init__(self, options):
|
||||
self.minrank = max(0, options.minrank)
|
||||
self.maxrank = min(30, options.maxrank)
|
||||
self.conn = make_connection(options)
|
||||
self.threads = [DBConnection(options) for i in range(options.threads)]
|
||||
|
||||
def run(self):
|
||||
""" Run indexing over the entire database.
|
||||
"""
|
||||
log.warning("Starting indexing rank ({} to {}) using {} threads".format(
|
||||
self.minrank, self.maxrank, len(self.threads)))
|
||||
|
||||
for rank in range(self.minrank, self.maxrank):
|
||||
self.index(RankRunner(rank))
|
||||
|
||||
if self.maxrank == 30:
|
||||
self.index(InterpolationRunner())
|
||||
|
||||
self.index(RankRunner(self.maxrank))
|
||||
|
||||
def index(self, obj):
|
||||
""" Index a single rank or table. `obj` describes the SQL to use
|
||||
for indexing.
|
||||
"""
|
||||
log.warning("Starting {}".format(obj.name()))
|
||||
|
||||
cur = self.conn.cursor(name='main')
|
||||
cur.execute(obj.sql_index_sectors())
|
||||
|
||||
total_tuples = 0
|
||||
for r in cur:
|
||||
total_tuples += r[1]
|
||||
log.debug("Total number of rows; {}".format(total_tuples))
|
||||
|
||||
cur.scroll(0, mode='absolute')
|
||||
|
||||
next_thread = self.find_free_thread()
|
||||
done_tuples = 0
|
||||
rank_start_time = datetime.now()
|
||||
|
||||
sector_sql = obj.sql_sector_places()
|
||||
index_sql = obj.sql_index_place()
|
||||
min_grouped_tuples = total_tuples - len(self.threads) * 1000
|
||||
|
||||
next_info = 100 if log.isEnabledFor(logging.INFO) else total_tuples + 1
|
||||
|
||||
for r in cur:
|
||||
sector = r[0]
|
||||
|
||||
# Should we do the remaining ones together?
|
||||
do_all = done_tuples > min_grouped_tuples
|
||||
|
||||
pcur = self.conn.cursor(name='places')
|
||||
|
||||
if do_all:
|
||||
pcur.execute(obj.sql_nosector_places())
|
||||
else:
|
||||
pcur.execute(sector_sql, (sector, ))
|
||||
|
||||
for place in pcur:
|
||||
place_id = place[0]
|
||||
log.debug("Processing place {}".format(place_id))
|
||||
thread = next(next_thread)
|
||||
|
||||
thread.perform(index_sql, (place_id,))
|
||||
done_tuples += 1
|
||||
|
||||
if done_tuples >= next_info:
|
||||
now = datetime.now()
|
||||
done_time = (now - rank_start_time).total_seconds()
|
||||
tuples_per_sec = done_tuples / done_time
|
||||
log.info("Done {} in {} @ {:.3f} per second - {} ETA (seconds): {:.2f}"
|
||||
.format(done_tuples, int(done_time),
|
||||
tuples_per_sec, obj.name(),
|
||||
(total_tuples - done_tuples)/tuples_per_sec))
|
||||
next_info += int(tuples_per_sec)
|
||||
|
||||
pcur.close()
|
||||
|
||||
if do_all:
|
||||
break
|
||||
|
||||
cur.close()
|
||||
|
||||
for t in self.threads:
|
||||
t.wait()
|
||||
|
||||
rank_end_time = datetime.now()
|
||||
diff_seconds = (rank_end_time-rank_start_time).total_seconds()
|
||||
|
||||
log.warning("Done {}/{} in {} @ {:.3f} per second - FINISHED {}\n".format(
|
||||
done_tuples, total_tuples, int(diff_seconds),
|
||||
done_tuples/diff_seconds, obj.name()))
|
||||
|
||||
def find_free_thread(self):
|
||||
""" Generator that returns the next connection that is free for
|
||||
sending a query.
|
||||
"""
|
||||
ready = self.threads
|
||||
command_stat = 0
|
||||
|
||||
while True:
|
||||
for thread in ready:
|
||||
if thread.is_done():
|
||||
command_stat += 1
|
||||
yield thread
|
||||
|
||||
# refresh the connections occasionaly to avoid potential
|
||||
# memory leaks in Postgresql.
|
||||
if command_stat > 100000:
|
||||
for t in self.threads:
|
||||
while not t.is_done():
|
||||
t.wait()
|
||||
t.connect()
|
||||
command_stat = 0
|
||||
ready = self.threads
|
||||
else:
|
||||
ready, _, _ = select.select(self.threads, [], [])
|
||||
|
||||
assert False, "Unreachable code"
|
||||
|
||||
|
||||
def nominatim_arg_parser():
|
||||
""" Setup the command-line parser for the tool.
|
||||
"""
|
||||
def h(s):
|
||||
return re.sub("\s\s+" , " ", s)
|
||||
|
||||
p = ArgumentParser(description="Indexing tool for Nominatim.",
|
||||
formatter_class=RawDescriptionHelpFormatter)
|
||||
|
||||
p.add_argument('-d', '--database',
|
||||
dest='dbname', action='store', default='nominatim',
|
||||
help='Name of the PostgreSQL database to connect to.')
|
||||
p.add_argument('-U', '--username',
|
||||
dest='user', action='store',
|
||||
help='PostgreSQL user name.')
|
||||
p.add_argument('-W', '--password',
|
||||
dest='password_prompt', action='store_true',
|
||||
help='Force password prompt.')
|
||||
p.add_argument('-H', '--host',
|
||||
dest='host', action='store',
|
||||
help='PostgreSQL server hostname or socket location.')
|
||||
p.add_argument('-P', '--port',
|
||||
dest='port', action='store',
|
||||
help='PostgreSQL server port')
|
||||
p.add_argument('-r', '--minrank',
|
||||
dest='minrank', type=int, metavar='RANK', default=0,
|
||||
help='Minimum/starting rank.')
|
||||
p.add_argument('-R', '--maxrank',
|
||||
dest='maxrank', type=int, metavar='RANK', default=30,
|
||||
help='Maximum/finishing rank.')
|
||||
p.add_argument('-t', '--threads',
|
||||
dest='threads', type=int, metavar='NUM', default=1,
|
||||
help='Number of threads to create for indexing.')
|
||||
p.add_argument('-v', '--verbose',
|
||||
dest='loglevel', action='count', default=0,
|
||||
help='Increase verbosity')
|
||||
|
||||
return p
|
||||
|
||||
if __name__ == '__main__':
|
||||
logging.basicConfig(stream=sys.stderr, format='%(levelname)s: %(message)s')
|
||||
|
||||
options = nominatim_arg_parser().parse_args(sys.argv[1:])
|
||||
|
||||
log.setLevel(max(3 - options.loglevel, 0) * 10)
|
||||
|
||||
options.password = None
|
||||
if options.password_prompt:
|
||||
password = getpass.getpass("Database password: ")
|
||||
options.password = password
|
||||
|
||||
Indexer(options).run()
|
||||
@@ -1,55 +0,0 @@
|
||||
|
||||
%define svn @SVN@
|
||||
|
||||
Summary: Nominatim OpenStreetMap geocoding database
|
||||
Name: @PACKAGE@
|
||||
Group: Applications/Text
|
||||
Version: @VERSION@
|
||||
Release: 1.%{svn}%{?dist}
|
||||
|
||||
License: GPL
|
||||
URL: http://svn.openstreetmap.org/applications/utils/nominatim
|
||||
Source0: %{name}-%{version}-%{svn}.tar.bz2
|
||||
Source1: nominatim-svn.sh
|
||||
BuildRoot: %{_tmppath}/%{name}-%{version}-%{release}-root-%(%{__id_u} -n)
|
||||
|
||||
BuildRequires: geos-devel
|
||||
BuildRequires: libxml2-devel
|
||||
BuildRequires: postgresql-devel
|
||||
BuildRequires: bzip2-devel
|
||||
BuildRequires: proj-devel
|
||||
|
||||
%description
|
||||
Processes data imported using osm2pgsql from the communtiy mapping project
|
||||
at http://www.openstreetmap.org.
|
||||
|
||||
%prep
|
||||
%setup -q -n %{name}
|
||||
|
||||
|
||||
%build
|
||||
|
||||
export CFLAGS="$RPM_OPT_FLAGS"
|
||||
export CXXFLAGS="$RPM_OPT_FLAGS"
|
||||
|
||||
make all
|
||||
|
||||
|
||||
%install
|
||||
rm -rf $RPM_BUILD_ROOT
|
||||
install -D -p nominatim $RPM_BUILD_ROOT/usr/bin/nominatim
|
||||
|
||||
|
||||
%clean
|
||||
rm -rf $RPM_BUILD_ROOT
|
||||
|
||||
|
||||
%files
|
||||
%defattr(-,root,root)
|
||||
%doc README.txt
|
||||
%{_bindir}/nominatim
|
||||
|
||||
|
||||
%changelog
|
||||
* Fri Sep 09 2010 Brian Quinion <nominatim@brian.quinion.co.uk> 0.1-1.20070316svn
|
||||
- Initial build
|
||||
@@ -1,41 +0,0 @@
|
||||
/*
|
||||
*/
|
||||
#include <string.h>
|
||||
#include "postgresql.h"
|
||||
|
||||
const char *build_conninfo(const char *db, const char *username, const char *password, const char *host, const char *port)
|
||||
{
|
||||
static char conninfo[1024];
|
||||
|
||||
conninfo[0]='\0';
|
||||
strcat(conninfo, "dbname='");
|
||||
strcat(conninfo, db);
|
||||
strcat(conninfo, "'");
|
||||
|
||||
if (username)
|
||||
{
|
||||
strcat(conninfo, " user='");
|
||||
strcat(conninfo, username);
|
||||
strcat(conninfo, "'");
|
||||
}
|
||||
if (password)
|
||||
{
|
||||
strcat(conninfo, " password='");
|
||||
strcat(conninfo, password);
|
||||
strcat(conninfo, "'");
|
||||
}
|
||||
if (host)
|
||||
{
|
||||
strcat(conninfo, " host='");
|
||||
strcat(conninfo, host);
|
||||
strcat(conninfo, "'");
|
||||
}
|
||||
if (port)
|
||||
{
|
||||
strcat(conninfo, " port='");
|
||||
strcat(conninfo, port);
|
||||
strcat(conninfo, "'");
|
||||
}
|
||||
|
||||
return conninfo;
|
||||
}
|
||||
@@ -1,40 +0,0 @@
|
||||
/*
|
||||
*/
|
||||
|
||||
#ifndef POSTGRESQL_H
|
||||
#define POSTGRESQL_H
|
||||
|
||||
#define PG_OID_INT8 20
|
||||
#define PG_OID_INT4 23
|
||||
|
||||
#if HAVE_BYTESWAP
|
||||
#include <byteswap.h>
|
||||
#define PG_BSWAP32(x) bswap_32(x)
|
||||
#define PG_BSWAP64(x) bswap_64(x)
|
||||
#elif HAVE_SYS_ENDIAN
|
||||
#include <sys/endian.h>
|
||||
#define PG_BSWAP32(x) bswap32(x)
|
||||
#define PG_BSWAP64(x) bswap64(x)
|
||||
#else
|
||||
#error "No appropriate byteswap found for your system."
|
||||
#endif
|
||||
|
||||
#if defined(__BYTE_ORDER__) && (__BYTE_ORDER__ == __ORDER_BIG_ENDIAN__)
|
||||
#define PGint32(x) (x)
|
||||
#define PGint64(x) (x)
|
||||
#elif defined(__BYTE_ORDER__) && (__BYTE_ORDER__ == __ORDER_LITTLE_ENDIAN__)
|
||||
#define PGint32(x) PG_BSWAP32(x)
|
||||
#define PGint64(x) PG_BSWAP64(x)
|
||||
#elif defined(_BYTE_ORDER) && (_BYTE_ORDER == _BIG_ENDIAN)
|
||||
#define PGint32(x) (x)
|
||||
#define PGint64(x) (x)
|
||||
#elif defined(_BYTE_ORDER) && (_BYTE_ORDER == _LITTLE_ENDIAN)
|
||||
#define PGint32(x) PG_BSWAP32(x)
|
||||
#define PGint64(x) PG_BSWAP64(x)
|
||||
#else
|
||||
#error "Cannot determine byte order."
|
||||
#endif
|
||||
|
||||
const char *build_conninfo(const char *db, const char *username, const char *password, const char *host, const char *port);
|
||||
|
||||
#endif
|
||||
@@ -1,200 +0,0 @@
|
||||
/*-------------------------------------------------------------------------
|
||||
*
|
||||
* sprompt.c
|
||||
* simple_prompt() routine
|
||||
*
|
||||
* Portions Copyright (c) 1996-2006, PostgreSQL Global Development Group
|
||||
* Portions Copyright (c) 1994, Regents of the University of California
|
||||
*
|
||||
*
|
||||
* IDENTIFICATION
|
||||
* $PostgreSQL: pgsql/src/port/sprompt.c,v 1.18 2006/10/04 00:30:14 momjian Exp $
|
||||
*
|
||||
*-------------------------------------------------------------------------
|
||||
*
|
||||
* PostgreSQL Database Management System
|
||||
* (formerly known as Postgres, then as Postgres95)
|
||||
*
|
||||
* Portions Copyright (c) 1996-2006, PostgreSQL Global Development Group
|
||||
*
|
||||
* Portions Copyright (c) 1994, The Regents of the University of California
|
||||
*
|
||||
* Permission to use, copy, modify, and distribute this software and its
|
||||
* documentation for any purpose, without fee, and without a written agreement
|
||||
* is hereby granted, provided that the above copyright notice and this
|
||||
* paragraph and the following two paragraphs appear in all copies.
|
||||
*
|
||||
* IN NO EVENT SHALL THE UNIVERSITY OF CALIFORNIA BE LIABLE TO ANY PARTY FOR
|
||||
* DIRECT, INDIRECT, SPECIAL, INCIDENTAL, OR CONSEQUENTIAL DAMAGES, INCLUDING
|
||||
* LOST PROFITS, ARISING OUT OF THE USE OF THIS SOFTWARE AND ITS
|
||||
* DOCUMENTATION, EVEN IF THE UNIVERSITY OF CALIFORNIA HAS BEEN ADVISED OF THE
|
||||
* POSSIBILITY OF SUCH DAMAGE.
|
||||
*
|
||||
* THE UNIVERSITY OF CALIFORNIA SPECIFICALLY DISCLAIMS ANY WARRANTIES,
|
||||
* INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY
|
||||
* AND FITNESS FOR A PARTICULAR PURPOSE. THE SOFTWARE PROVIDED HEREUNDER IS
|
||||
* ON AN "AS IS" BASIS, AND THE UNIVERSITY OF CALIFORNIA HAS NO OBLIGATIONS TO
|
||||
* PROVIDE MAINTENANCE, SUPPORT, UPDATES, ENHANCEMENTS, OR MODIFICATIONS.
|
||||
*
|
||||
*/
|
||||
|
||||
|
||||
/*
|
||||
* simple_prompt
|
||||
*
|
||||
* Generalized function especially intended for reading in usernames and
|
||||
* password interactively. Reads from /dev/tty or stdin/stderr.
|
||||
*
|
||||
* prompt: The prompt to print
|
||||
* maxlen: How many characters to accept
|
||||
* echo: Set to false if you want to hide what is entered (for passwords)
|
||||
*
|
||||
* Returns a malloc()'ed string with the input (w/o trailing newline).
|
||||
*/
|
||||
|
||||
#define DEVTTY "/dev/tty"
|
||||
|
||||
#include <stdio.h>
|
||||
#include <unistd.h>
|
||||
#include <stdlib.h>
|
||||
#include <string.h>
|
||||
#include <assert.h>
|
||||
|
||||
#include <libpq-fe.h>
|
||||
|
||||
#ifdef __MINGW_H
|
||||
# include <windows.h>
|
||||
#else
|
||||
# define HAVE_TERMIOS_H
|
||||
# include <termios.h>
|
||||
#endif
|
||||
|
||||
/*
|
||||
extern char *simple_prompt(const char *prompt, int maxlen, int echo);
|
||||
*/
|
||||
|
||||
char *
|
||||
simple_prompt(const char *prompt, int maxlen, int echo)
|
||||
{
|
||||
int length;
|
||||
char *destination;
|
||||
FILE *termin,
|
||||
*termout;
|
||||
|
||||
#ifdef HAVE_TERMIOS_H
|
||||
struct termios t_orig,
|
||||
t;
|
||||
#else
|
||||
#ifdef WIN32
|
||||
HANDLE t = NULL;
|
||||
LPDWORD t_orig = NULL;
|
||||
#endif
|
||||
#endif
|
||||
|
||||
destination = (char *) malloc(maxlen + 1);
|
||||
if (!destination)
|
||||
return NULL;
|
||||
|
||||
/*
|
||||
* Do not try to collapse these into one "w+" mode file. Doesn't work on
|
||||
* some platforms (eg, HPUX 10.20).
|
||||
*/
|
||||
termin = fopen(DEVTTY, "r");
|
||||
termout = fopen(DEVTTY, "w");
|
||||
if (!termin || !termout
|
||||
#ifdef WIN32
|
||||
/* See DEVTTY comment for msys */
|
||||
|| (getenv("OSTYPE") && strcmp(getenv("OSTYPE"), "msys") == 0)
|
||||
#endif
|
||||
)
|
||||
{
|
||||
if (termin)
|
||||
fclose(termin);
|
||||
if (termout)
|
||||
fclose(termout);
|
||||
termin = stdin;
|
||||
termout = stderr;
|
||||
}
|
||||
|
||||
#ifdef HAVE_TERMIOS_H
|
||||
if (!echo)
|
||||
{
|
||||
tcgetattr(fileno(termin), &t);
|
||||
t_orig = t;
|
||||
t.c_lflag &= ~ECHO;
|
||||
tcsetattr(fileno(termin), TCSAFLUSH, &t);
|
||||
}
|
||||
#else
|
||||
#ifdef WIN32
|
||||
if (!echo)
|
||||
{
|
||||
/* get a new handle to turn echo off */
|
||||
t_orig = (LPDWORD) malloc(sizeof(DWORD));
|
||||
t = GetStdHandle(STD_INPUT_HANDLE);
|
||||
|
||||
/* save the old configuration first */
|
||||
GetConsoleMode(t, t_orig);
|
||||
|
||||
/* set to the new mode */
|
||||
SetConsoleMode(t, ENABLE_LINE_INPUT | ENABLE_PROCESSED_INPUT);
|
||||
}
|
||||
#endif
|
||||
#endif
|
||||
|
||||
if (prompt)
|
||||
{
|
||||
fputs(prompt, termout);
|
||||
fflush(termout);
|
||||
}
|
||||
|
||||
if (fgets(destination, maxlen + 1, termin) == NULL)
|
||||
destination[0] = '\0';
|
||||
|
||||
length = strlen(destination);
|
||||
if (length > 0 && destination[length - 1] != '\n')
|
||||
{
|
||||
/* eat rest of the line */
|
||||
char buf[128];
|
||||
int buflen;
|
||||
|
||||
do
|
||||
{
|
||||
if (fgets(buf, sizeof(buf), termin) == NULL)
|
||||
break;
|
||||
buflen = strlen(buf);
|
||||
}
|
||||
while (buflen > 0 && buf[buflen - 1] != '\n');
|
||||
}
|
||||
|
||||
if (length > 0 && destination[length - 1] == '\n')
|
||||
/* remove trailing newline */
|
||||
destination[length - 1] = '\0';
|
||||
|
||||
#ifdef HAVE_TERMIOS_H
|
||||
if (!echo)
|
||||
{
|
||||
tcsetattr(fileno(termin), TCSAFLUSH, &t_orig);
|
||||
fputs("\n", termout);
|
||||
fflush(termout);
|
||||
}
|
||||
#else
|
||||
#ifdef WIN32
|
||||
if (!echo)
|
||||
{
|
||||
/* reset to the original console mode */
|
||||
SetConsoleMode(t, *t_orig);
|
||||
fputs("\n", termout);
|
||||
fflush(termout);
|
||||
free(t_orig);
|
||||
}
|
||||
#endif
|
||||
#endif
|
||||
|
||||
if (termin != stdin)
|
||||
{
|
||||
fclose(termin);
|
||||
fclose(termout);
|
||||
}
|
||||
|
||||
return destination;
|
||||
}
|
||||
@@ -1,4 +0,0 @@
|
||||
#ifndef SPROMPT_H
|
||||
#define SPROMPT_H
|
||||
char *simple_prompt(const char *prompt, int maxlen, int echo);
|
||||
#endif
|
||||
File diff suppressed because it is too large
Load Diff
@@ -1,281 +0,0 @@
|
||||
/*
|
||||
* The author of this software is Steven Fortune. Copyright (c) 1994 by AT&T
|
||||
* Bell Laboratories.
|
||||
* Permission to use, copy, modify, and distribute this software for any
|
||||
* purpose without fee is hereby granted, provided that this entire notice
|
||||
* is included in all copies of any software which is or includes a copy
|
||||
* or modification of this software and in all copies of the supporting
|
||||
* documentation for such software.
|
||||
* THIS SOFTWARE IS BEING PROVIDED "AS IS", WITHOUT ANY EXPRESS OR IMPLIED
|
||||
* WARRANTY. IN PARTICULAR, NEITHER THE AUTHORS NOR AT&T MAKE ANY
|
||||
* REPRESENTATION OR WARRANTY OF ANY KIND CONCERNING THE MERCHANTABILITY
|
||||
* OF THIS SOFTWARE OR ITS FITNESS FOR ANY PARTICULAR PURPOSE.
|
||||
*/
|
||||
|
||||
/*
|
||||
* This code was originally written by Stephan Fortune in C code. I, Shane O'Sullivan,
|
||||
* have since modified it, encapsulating it in a C++ class and, fixing memory leaks and
|
||||
* adding accessors to the Voronoi Edges.
|
||||
* Permission to use, copy, modify, and distribute this software for any
|
||||
* purpose without fee is hereby granted, provided that this entire notice
|
||||
* is included in all copies of any software which is or includes a copy
|
||||
* or modification of this software and in all copies of the supporting
|
||||
* documentation for such software.
|
||||
* THIS SOFTWARE IS BEING PROVIDED "AS IS", WITHOUT ANY EXPRESS OR IMPLIED
|
||||
* WARRANTY. IN PARTICULAR, NEITHER THE AUTHORS NOR AT&T MAKE ANY
|
||||
* REPRESENTATION OR WARRANTY OF ANY KIND CONCERNING THE MERCHANTABILITY
|
||||
* OF THIS SOFTWARE OR ITS FITNESS FOR ANY PARTICULAR PURPOSE.
|
||||
*/
|
||||
|
||||
#ifndef VORONOI_DIAGRAM_GENERATOR
|
||||
#define VORONOI_DIAGRAM_GENERATOR
|
||||
|
||||
#include <math.h>
|
||||
#include <stdlib.h>
|
||||
#include <string.h>
|
||||
|
||||
|
||||
#ifndef NULL
|
||||
#define NULL 0
|
||||
#endif
|
||||
#define DELETED -2
|
||||
|
||||
#define le 0
|
||||
#define re 1
|
||||
|
||||
struct SourcePoint
|
||||
{
|
||||
int id;
|
||||
double weight;
|
||||
double x;
|
||||
double y;
|
||||
};
|
||||
|
||||
struct Freenode
|
||||
{
|
||||
struct Freenode *nextfree;
|
||||
};
|
||||
|
||||
struct FreeNodeArrayList
|
||||
{
|
||||
struct Freenode* memory;
|
||||
struct FreeNodeArrayList* next;
|
||||
|
||||
};
|
||||
|
||||
struct Freelist
|
||||
{
|
||||
struct Freenode *head;
|
||||
int nodesize;
|
||||
};
|
||||
|
||||
struct Point
|
||||
{
|
||||
float x,y;
|
||||
};
|
||||
|
||||
struct PolygonPoint
|
||||
{
|
||||
struct Point coord;
|
||||
double angle;
|
||||
int boundary;
|
||||
};
|
||||
|
||||
struct Polygon
|
||||
{
|
||||
int sitenbr;
|
||||
struct Point coord;
|
||||
int numpoints;
|
||||
struct PolygonPoint * pointlist;
|
||||
int boundary;
|
||||
};
|
||||
|
||||
|
||||
// structure used both for sites and for vertices
|
||||
struct Site
|
||||
{
|
||||
struct Point coord;
|
||||
struct Point coordout;
|
||||
double weight;
|
||||
int sitenbr;
|
||||
int refcnt;
|
||||
};
|
||||
|
||||
|
||||
|
||||
struct Edge
|
||||
{
|
||||
float a,b,c;
|
||||
struct Site *ep[2];
|
||||
struct Site *reg[2];
|
||||
int edgenbr;
|
||||
|
||||
};
|
||||
|
||||
struct GraphEdge
|
||||
{
|
||||
float x1,y1,x2,y2;
|
||||
struct GraphEdge* next;
|
||||
};
|
||||
|
||||
|
||||
|
||||
|
||||
struct Halfedge
|
||||
{
|
||||
struct Halfedge *ELleft, *ELright;
|
||||
struct Edge *ELedge;
|
||||
int ELrefcnt;
|
||||
char ELpm;
|
||||
struct Site *vertex;
|
||||
float ystar;
|
||||
struct Halfedge *PQnext;
|
||||
};
|
||||
|
||||
|
||||
|
||||
|
||||
class VoronoiDiagramGenerator
|
||||
{
|
||||
public:
|
||||
VoronoiDiagramGenerator();
|
||||
~VoronoiDiagramGenerator();
|
||||
|
||||
bool generateVoronoi(struct SourcePoint* srcPoints, int numPoints, float minX, float maxX, float minY, float maxY, float minDist=0);
|
||||
void getSitePoints(int sitenbr, int* numpoints, PolygonPoint** pS);
|
||||
|
||||
void resetIterator()
|
||||
{
|
||||
iteratorEdges = allEdges;
|
||||
}
|
||||
|
||||
bool getNext(float& x1, float& y1, float& x2, float& y2)
|
||||
{
|
||||
if(iteratorEdges == 0)
|
||||
return false;
|
||||
|
||||
x1 = iteratorEdges->x1;
|
||||
x2 = iteratorEdges->x2;
|
||||
y1 = iteratorEdges->y1;
|
||||
y2 = iteratorEdges->y2;
|
||||
|
||||
iteratorEdges = iteratorEdges->next;
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
|
||||
private:
|
||||
void cleanup();
|
||||
void cleanupEdges();
|
||||
char *getfree(struct Freelist *fl);
|
||||
struct Halfedge *PQfind();
|
||||
int PQempty();
|
||||
|
||||
|
||||
|
||||
struct Halfedge **ELhash;
|
||||
struct Halfedge *HEcreate(), *ELleft(), *ELright(), *ELleftbnd();
|
||||
struct Halfedge *HEcreate(struct Edge *e,int pm);
|
||||
|
||||
|
||||
struct Point PQ_min();
|
||||
struct Halfedge *PQextractmin();
|
||||
void freeinit(struct Freelist *fl,int size);
|
||||
void makefree(struct Freenode *curr,struct Freelist *fl);
|
||||
void geominit();
|
||||
void plotinit();
|
||||
bool voronoi(int triangulate);
|
||||
void ref(struct Site *v);
|
||||
void deref(struct Site *v);
|
||||
void endpoint(struct Edge *e,int lr,struct Site * s);
|
||||
void endpoint(struct Edge *e1,int lr,struct Site * s, struct Edge *e2, struct Edge *e3);
|
||||
|
||||
void ELdelete(struct Halfedge *he);
|
||||
struct Halfedge *ELleftbnd(struct Point *p);
|
||||
struct Halfedge *ELright(struct Halfedge *he);
|
||||
void makevertex(struct Site *v);
|
||||
void out_triple(struct Site *s1, struct Site *s2,struct Site * s3);
|
||||
|
||||
void PQinsert(struct Halfedge *he,struct Site * v, float offset);
|
||||
void PQdelete(struct Halfedge *he);
|
||||
bool ELinitialize();
|
||||
void ELinsert(struct Halfedge *lb, struct Halfedge *newHe);
|
||||
struct Halfedge * ELgethash(int b);
|
||||
struct Halfedge *ELleft(struct Halfedge *he);
|
||||
struct Site *leftreg(struct Halfedge *he);
|
||||
void out_site(struct Site *s);
|
||||
bool PQinitialize();
|
||||
int PQbucket(struct Halfedge *he);
|
||||
void pushpoint(int sitenbr, double x, double y, int boundary);
|
||||
int ccw( Point p0, Point p1, Point p2 );
|
||||
void clip_line(struct Edge *e);
|
||||
char *myalloc(unsigned n);
|
||||
int right_of(struct Halfedge *el,struct Point *p);
|
||||
|
||||
struct Site *rightreg(struct Halfedge *he);
|
||||
struct Edge *bisect(struct Site *s1,struct Site *s2);
|
||||
float dist(struct Site *s,struct Site *t);
|
||||
struct Site *intersect(struct Halfedge *el1, struct Halfedge *el2, struct Point *p=0);
|
||||
|
||||
void out_bisector(struct Edge *e);
|
||||
void out_ep(struct Edge *e);
|
||||
void out_vertex(struct Site *v);
|
||||
struct Site *nextone();
|
||||
|
||||
void pushGraphEdge(float x1, float y1, float x2, float y2);
|
||||
|
||||
void openpl();
|
||||
void line(float x1, float y1, float x2, float y2);
|
||||
void circle(float x, float y, float radius);
|
||||
void range(float minX, float minY, float maxX, float maxY);
|
||||
|
||||
|
||||
struct Freelist hfl;
|
||||
struct Halfedge *ELleftend, *ELrightend;
|
||||
int ELhashsize;
|
||||
|
||||
int triangulate, sorted, plot, debug;
|
||||
float xmin, xmax, ymin, ymax, deltax, deltay;
|
||||
|
||||
struct Site *sites;
|
||||
struct Polygon *polygons;
|
||||
struct Point corners[4];
|
||||
int nsites;
|
||||
int siteidx;
|
||||
int sqrt_nsites;
|
||||
int nvertices;
|
||||
struct Freelist sfl;
|
||||
struct Site *bottomsite;
|
||||
|
||||
int nedges;
|
||||
struct Freelist efl;
|
||||
int PQhashsize;
|
||||
struct Halfedge *PQhash;
|
||||
int PQcount;
|
||||
int PQmin;
|
||||
|
||||
int ntry, totalsearch;
|
||||
float pxmin, pxmax, pymin, pymax, cradius;
|
||||
int total_alloc;
|
||||
|
||||
float borderMinX, borderMaxX, borderMinY, borderMaxY;
|
||||
|
||||
FreeNodeArrayList* allMemoryList;
|
||||
FreeNodeArrayList* currentMemoryBlock;
|
||||
|
||||
GraphEdge* allEdges;
|
||||
GraphEdge* iteratorEdges;
|
||||
|
||||
float minDistanceBetweenSites;
|
||||
|
||||
};
|
||||
|
||||
int scomp(const void *p1,const void *p2);
|
||||
int spcomp(const void *p1,const void *p2);
|
||||
int anglecomp(const void * p1, const void * p2);
|
||||
|
||||
|
||||
#endif
|
||||
|
||||
|
||||
@@ -1,86 +0,0 @@
|
||||
/*
|
||||
* The author of this software is Shane O'Sullivan.
|
||||
* Permission to use, copy, modify, and distribute this software for any
|
||||
* purpose without fee is hereby granted, provided that this entire notice
|
||||
* is included in all copies of any software which is or includes a copy
|
||||
* or modification of this software and in all copies of the supporting
|
||||
* documentation for such software.
|
||||
* THIS SOFTWARE IS BEING PROVIDED "AS IS", WITHOUT ANY EXPRESS OR IMPLIED
|
||||
* WARRANTY. IN PARTICULAR, NEITHER THE AUTHORS NOR AT&T MAKE ANY
|
||||
* REPRESENTATION OR WARRANTY OF ANY KIND CONCERNING THE MERCHANTABILITY
|
||||
* OF THIS SOFTWARE OR ITS FITNESS FOR ANY PARTICULAR PURPOSE.
|
||||
*/
|
||||
|
||||
|
||||
#
|
||||
#include <stdio.h>
|
||||
#include <search.h>
|
||||
#include <malloc.h>
|
||||
#include "VoronoiDiagramGenerator.h"
|
||||
|
||||
|
||||
|
||||
int main(int argc, char **argv)
|
||||
{
|
||||
double xmin, xmax, ymin, ymax;
|
||||
scanf("%lf %lf %lf %lf", &xmin, &xmax, &ymin, &ymax) ;
|
||||
|
||||
SourcePoint * sites;
|
||||
long nsites;
|
||||
|
||||
nsites = 0;
|
||||
sites = (SourcePoint *) malloc(4000 * sizeof(SourcePoint));
|
||||
while (scanf("%d %lf %lf %lf", &sites[nsites].id, &sites[nsites].weight, &sites[nsites].x, &sites[nsites].y) != EOF)
|
||||
{
|
||||
nsites++;
|
||||
if (nsites % 4000 == 0) {
|
||||
sites = (SourcePoint *)realloc(sites,(nsites+4000)*sizeof(SourcePoint));
|
||||
}
|
||||
}
|
||||
|
||||
VoronoiDiagramGenerator * pvdg;
|
||||
pvdg = new VoronoiDiagramGenerator();
|
||||
pvdg->generateVoronoi(sites, nsites, xmin, xmax, ymin, ymax, 0);
|
||||
|
||||
// printf("sites %ld\n-------------------------------\n", nsites);
|
||||
PolygonPoint* pSitePoints;
|
||||
int numpoints, i, j;
|
||||
for(i = 0; i < nsites; i++)
|
||||
{
|
||||
pvdg->getSitePoints(i, &numpoints, &pSitePoints);
|
||||
if (numpoints == 0)
|
||||
{
|
||||
printf("-- no points for %d\n", i);
|
||||
}
|
||||
else
|
||||
{
|
||||
|
||||
|
||||
printf("update temp_child_4076440_0 set resultgeom = st_setsrid('POLYGON((");
|
||||
for(j = 0; j < numpoints; j++)
|
||||
{
|
||||
printf("%.15lf %.15lf,", pSitePoints[j].coord.x, pSitePoints[j].coord.y, (pSitePoints[j].angle/M_PI)*180);
|
||||
}
|
||||
printf("%.15lf %.15lf", pSitePoints[0].coord.x, pSitePoints[0].coord.y, (pSitePoints[j].angle/M_PI)*180);
|
||||
printf("))'::geometry,4326) where id = %d;\n", sites[i].id);
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
float x1,y1,x2,y2;
|
||||
// printf("sites %ld\n-------------------------------\n", nsites);
|
||||
pvdg->resetIterator();
|
||||
while(pvdg->getNext(x1,y1,x2,y2))
|
||||
{
|
||||
printf("(%f %f,%f %f)\n",x1,y1,x2, y2);
|
||||
|
||||
}
|
||||
|
||||
delete pvdg;
|
||||
free(sites);
|
||||
|
||||
return 0;
|
||||
}
|
||||
|
||||
|
||||
|
||||
Submodule osm2pgsql updated: 56c8f56b43...2909ff20ef
@@ -3,28 +3,31 @@
|
||||
"place" : {
|
||||
"sea" : [2, 0],
|
||||
"continent" : [2, 0],
|
||||
"country" : [4, 4],
|
||||
"state" : [8, 8],
|
||||
"country" : [4, 0],
|
||||
"state" : [8, 0],
|
||||
"province" : [8, 0],
|
||||
"region" : [18, 0],
|
||||
"county" : 12,
|
||||
"municipality" : [17, 14],
|
||||
"city" : 16,
|
||||
"island" : [17, 0],
|
||||
"town" : [18, 16],
|
||||
"village" : [19, 16],
|
||||
"hamlet" : [19, 16],
|
||||
"municipality" : [19, 16],
|
||||
"district" : [19, 16],
|
||||
"unincorporated_area" : [19, 16],
|
||||
"borough" : [19, 16],
|
||||
"borough" : [19, 18],
|
||||
"hamlet" : 20,
|
||||
"suburb" : 20,
|
||||
"croft" : 20,
|
||||
"subdivision" : 20,
|
||||
"isolated_dwelling" : 20,
|
||||
"allotments" : 20,
|
||||
"farm" : [20, 0],
|
||||
"locality" : [20, 0],
|
||||
"islet" : [20, 0],
|
||||
"mountain_pass" : [20, 0],
|
||||
"neighbourhood" : 22,
|
||||
"quarter" : 22,
|
||||
"city_block" : 22,
|
||||
"houses" : [28, 0]
|
||||
},
|
||||
"boundary" : {
|
||||
@@ -60,7 +63,11 @@
|
||||
"sea" : [4, 0]
|
||||
},
|
||||
"waterway" : {
|
||||
"" : [17, 0]
|
||||
"river" : [19, 0],
|
||||
"stream" : [22, 0],
|
||||
"ditch" : [22, 0],
|
||||
"drain" : [22, 0],
|
||||
"" : [20, 0]
|
||||
},
|
||||
"highway" : {
|
||||
"" : 26,
|
||||
@@ -78,6 +85,37 @@
|
||||
},
|
||||
"mountain_pass" : {
|
||||
"" : [20, 0]
|
||||
},
|
||||
"historic" : {
|
||||
"neighbourhood" : [30, 0]
|
||||
}
|
||||
}
|
||||
},
|
||||
{ "countries" : [ "de" ],
|
||||
"tags" : {
|
||||
"place" : {
|
||||
"region" : [10, 0],
|
||||
"county" : [12, 0]
|
||||
},
|
||||
"boundary" : {
|
||||
"administrative5" : [10, 0]
|
||||
}
|
||||
}
|
||||
},
|
||||
{ "countries" : [ "be" ],
|
||||
"tags" : {
|
||||
"boundary" : {
|
||||
"administrative7" : [14, 0]
|
||||
}
|
||||
}
|
||||
},
|
||||
{ "countries" : ["se", "no"],
|
||||
"tags" : {
|
||||
"place" : {
|
||||
},
|
||||
"boundary" : {
|
||||
"administrative3" : 8,
|
||||
"administrative4" : 12
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -23,8 +23,6 @@ if (isset($_GET['debug']) && $_GET['debug']) @define('CONST_Debug', true);
|
||||
// term. Spaces are kept but collapsed to one standard space.
|
||||
@define('CONST_Term_Normalization_Rules', ":: NFD (); [[:Nonspacing Mark:] [:Cf:]] >; :: lower (); [[:Punctuation:][:Space:]]+ > ' '; :: NFC ();");
|
||||
|
||||
// Set to false to avoid importing extra postcodes for the US.
|
||||
@define('CONST_Use_Extra_US_Postcodes', true);
|
||||
/* Set to true after importing Tiger house number data for the US.
|
||||
Note: The tables must already exist or queries will throw errors.
|
||||
After changing this setting run ./utils/setup --create-functions
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user