mirror of
https://github.com/osm-search/Nominatim.git
synced 2026-02-14 18:37:58 +00:00
Compare commits
71 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
59ea8d0830 | ||
|
|
a1a136778d | ||
|
|
4d6da459da | ||
|
|
fd6db5a13f | ||
|
|
d6f298f033 | ||
|
|
aae90ea5cb | ||
|
|
b6fd7b1857 | ||
|
|
4498c84d48 | ||
|
|
75cb02a497 | ||
|
|
a7d3146967 | ||
|
|
f94ee6ad57 | ||
|
|
031f20c3e6 | ||
|
|
8c7f8165bd | ||
|
|
86d90bc460 | ||
|
|
e7ee9c2d84 | ||
|
|
5f79cca558 | ||
|
|
0b545ba093 | ||
|
|
a27bf64a55 | ||
|
|
fcc731dcd4 | ||
|
|
6b0ecb0e2f | ||
|
|
8a4bd7fa4c | ||
|
|
2643225b72 | ||
|
|
c3dcb6535a | ||
|
|
1d248ac86d | ||
|
|
7c54c81b77 | ||
|
|
3aab35e4b6 | ||
|
|
6a5b5b3c36 | ||
|
|
0ab547793d | ||
|
|
be1d7f38ca | ||
|
|
a178a62877 | ||
|
|
665446e1a6 | ||
|
|
ea48e50c60 | ||
|
|
71883fc341 | ||
|
|
08b6fffc36 | ||
|
|
defb809b83 | ||
|
|
052d219205 | ||
|
|
bbd08742ce | ||
|
|
70deb7883e | ||
|
|
7b24f72870 | ||
|
|
11153633ff | ||
|
|
3eb21a7694 | ||
|
|
9ac57c92ee | ||
|
|
ac70726591 | ||
|
|
34adfd5041 | ||
|
|
bafd92093b | ||
|
|
2cc0fa341b | ||
|
|
21adb3740e | ||
|
|
ffe24a8a64 | ||
|
|
00768d321e | ||
|
|
6a1e599f31 | ||
|
|
5882a15bac | ||
|
|
e3f90f8dcf | ||
|
|
76a83b91d7 | ||
|
|
b4a8bc1fea | ||
|
|
7f95590a77 | ||
|
|
090e9e7fb6 | ||
|
|
369a0974bd | ||
|
|
488cd726ed | ||
|
|
496e1b0616 | ||
|
|
1bb47a6523 | ||
|
|
589bad809e | ||
|
|
346da9bc0d | ||
|
|
49fa768f67 | ||
|
|
f6c7c4fcef | ||
|
|
c2417dd1c4 | ||
|
|
0b9618e349 | ||
|
|
a92907186b | ||
|
|
e42ff1935f | ||
|
|
ff35dbd4db | ||
|
|
bc27cb2bd8 | ||
|
|
236750eba4 |
10
.gitignore
vendored
10
.gitignore
vendored
@@ -1,9 +1,11 @@
|
||||
*.log
|
||||
*.pyc
|
||||
|
||||
nominatim/*.d
|
||||
nominatim/*.o
|
||||
nominatim/nominatim
|
||||
module/nominatim.so
|
||||
module/nominatim.o
|
||||
settings/configuration.txt
|
||||
settings/download.lock
|
||||
settings/state.txt
|
||||
@@ -19,3 +21,11 @@ stamp-h1
|
||||
missing
|
||||
INSTALL
|
||||
aclocal.m4
|
||||
depcomp
|
||||
install-sh
|
||||
compile
|
||||
|
||||
data/wiki_import.sql
|
||||
data/wiki_specialphrases.sql
|
||||
data/osmosischange.osc
|
||||
|
||||
|
||||
1
AUTHORS
1
AUTHORS
@@ -14,3 +14,4 @@ Nominatim was written by:
|
||||
Spin0us
|
||||
Kurt Roeckx
|
||||
Rodolphe Quiédeville
|
||||
Marc Tobias Metten
|
||||
|
||||
17
ChangeLog
17
ChangeLog
@@ -27,7 +27,7 @@
|
||||
* refactoring of front-end PHP code
|
||||
* lots of smaller bug fixes
|
||||
|
||||
2.2
|
||||
2.2
|
||||
|
||||
* correct database rights for www-data
|
||||
* add timestamps for update output
|
||||
@@ -42,3 +42,18 @@
|
||||
* properly detect changes of admin_level
|
||||
* remove landuses when name is removed
|
||||
* smaller fixes
|
||||
|
||||
2.3
|
||||
|
||||
* further improve ordering of results
|
||||
* support for more lat/lon formats in search-as-reverse
|
||||
* fix handling of GB postcodes
|
||||
* new functional test suite
|
||||
* support for waterway relations
|
||||
* inherit postcodes from street to poi
|
||||
* fix housenumber normalisation to find non-latin house numbers
|
||||
* take viewbox into account for ordering of results
|
||||
* pois may now inherit address tags from surrounding buildings
|
||||
* improve what objects may participate in an address
|
||||
* clean up handled class/type combinations to current OSM usage
|
||||
* lots of bug fixes
|
||||
|
||||
21
Makefile.am
21
Makefile.am
@@ -1,6 +1,27 @@
|
||||
ACLOCAL_AMFLAGS = -I osm2pgsql/m4
|
||||
AUTOMAKE_OPTIONS = -Wno-portability
|
||||
|
||||
SUBDIRS = osm2pgsql module nominatim
|
||||
|
||||
NOMINATIM_SERVER ?= $(shell echo a | php -F lib/init.php -E 'echo CONST_Website_BaseURL."\n";')
|
||||
NOMINATIM_DATABASE ?= $(shell echo a | php -F lib/init.php -E 'echo DB::parseDSN(CONST_Database_DSN)["database"];')
|
||||
|
||||
install:
|
||||
@echo Nominatim needs to be executed directly from this directory. No install necessary.
|
||||
|
||||
test:
|
||||
cd tests; NOMINATIM_SERVER=${NOMINATIM_SERVER} lettuce -t -Fail -t -poldi-only
|
||||
|
||||
test-fast:
|
||||
cd tests; NOMINATIM_SERVER=${NOMINATIM_SERVER} NOMINATIM_REUSE_TEMPLATE=1 lettuce -t -Fail -t -poldi-only
|
||||
|
||||
test-db:
|
||||
cd tests; NOMINATIM_SERVER=${NOMINATIM_SERVER} lettuce -t -Fail -t -poldi-only features/db
|
||||
|
||||
test-db-fast:
|
||||
cd tests; NOMINATIM_SERVER=${NOMINATIM_SERVER} NOMINATIM_REUSE_TEMPLATE=1 lettuce -t -Fail -t -poldi-only features/db
|
||||
|
||||
test-api:
|
||||
cd tests; NOMINATIM_SERVER=${NOMINATIM_SERVER} lettuce -t -Fail -t -poldi-only features/api
|
||||
|
||||
.PHONY: test test-fast test-db test-db-fast test-api
|
||||
|
||||
267
lib/Geocode.php
267
lib/Geocode.php
@@ -27,6 +27,8 @@
|
||||
|
||||
protected $bBoundedSearch = false;
|
||||
protected $aViewBox = false;
|
||||
protected $sViewboxSmallSQL = false;
|
||||
protected $sViewboxLargeSQL = false;
|
||||
protected $aRoutePoints = false;
|
||||
|
||||
protected $iMaxRank = 20;
|
||||
@@ -202,6 +204,98 @@
|
||||
return $this->sQuery;
|
||||
}
|
||||
|
||||
|
||||
function loadParamArray($aParams)
|
||||
{
|
||||
if (isset($aParams['addressdetails'])) $this->bIncludeAddressDetails = (bool)$aParams['addressdetails'];
|
||||
if (isset($aParams['bounded'])) $this->bBoundedSearch = (bool)$aParams['bounded'];
|
||||
if (isset($aParams['dedupe'])) $this->bDeDupe = (bool)$aParams['dedupe'];
|
||||
|
||||
if (isset($aParams['limit'])) $this->setLimit((int)$aParams['limit']);
|
||||
if (isset($aParams['offset'])) $this->iOffset = (int)$aParams['offset'];
|
||||
|
||||
if (isset($aParams['fallback'])) $this->bFallback = (bool)$aParams['fallback'];
|
||||
|
||||
// List of excluded Place IDs - used for more acurate pageing
|
||||
if (isset($aParams['exclude_place_ids']) && $aParams['exclude_place_ids'])
|
||||
{
|
||||
foreach(explode(',',$aParams['exclude_place_ids']) as $iExcludedPlaceID)
|
||||
{
|
||||
$iExcludedPlaceID = (int)$iExcludedPlaceID;
|
||||
if ($iExcludedPlaceID) $aExcludePlaceIDs[$iExcludedPlaceID] = $iExcludedPlaceID;
|
||||
}
|
||||
$this->aExcludePlaceIDs = $aExcludePlaceIDs;
|
||||
}
|
||||
|
||||
// Only certain ranks of feature
|
||||
if (isset($aParams['featureType'])) $this->setFeatureType($aParams['featureType']);
|
||||
if (isset($aParams['featuretype'])) $this->setFeatureType($aParams['featuretype']);
|
||||
|
||||
// Country code list
|
||||
if (isset($aParams['countrycodes']))
|
||||
{
|
||||
$aCountryCodes = array();
|
||||
foreach(explode(',',$aParams['countrycodes']) as $sCountryCode)
|
||||
{
|
||||
if (preg_match('/^[a-zA-Z][a-zA-Z]$/', $sCountryCode))
|
||||
{
|
||||
$aCountryCodes[] = strtolower($sCountryCode);
|
||||
}
|
||||
}
|
||||
$this->aCountryCodes = $aCountryCodes;
|
||||
}
|
||||
|
||||
if (isset($aParams['viewboxlbrt']) && $aParams['viewboxlbrt'])
|
||||
{
|
||||
$aCoOrdinatesLBRT = explode(',',$aParams['viewboxlbrt']);
|
||||
$this->setViewBox($aCoOrdinatesLBRT[0], $aCoOrdinatesLBRT[1], $aCoOrdinatesLBRT[2], $aCoOrdinatesLBRT[3]);
|
||||
}
|
||||
else if (isset($aParams['viewbox']) && $aParams['viewbox'])
|
||||
{
|
||||
$aCoOrdinatesLTRB = explode(',',$aParams['viewbox']);
|
||||
$this->setViewBox($aCoOrdinatesLTRB[0], $aCoOrdinatesLTRB[3], $aCoOrdinatesLTRB[2], $aCoOrdinatesLTRB[1]);
|
||||
}
|
||||
|
||||
if (isset($aParams['route']) && $aParams['route'] && isset($aParams['routewidth']) && $aParams['routewidth'])
|
||||
{
|
||||
$aPoints = explode(',',$aParams['route']);
|
||||
if (sizeof($aPoints) % 2 != 0)
|
||||
{
|
||||
userError("Uneven number of points");
|
||||
exit;
|
||||
}
|
||||
$fPrevCoord = false;
|
||||
$aRoute = array();
|
||||
foreach($aPoints as $i => $fPoint)
|
||||
{
|
||||
if ($i%2)
|
||||
{
|
||||
$aRoute[] = array((float)$fPoint, $fPrevCoord);
|
||||
}
|
||||
else
|
||||
{
|
||||
$fPrevCoord = (float)$fPoint;
|
||||
}
|
||||
}
|
||||
$this->aRoutePoints = $aRoute;
|
||||
}
|
||||
}
|
||||
|
||||
function setQueryFromParams($aParams)
|
||||
{
|
||||
// Search query
|
||||
$sQuery = (isset($aParams['q'])?trim($aParams['q']):'');
|
||||
if (!$sQuery)
|
||||
{
|
||||
$this->setStructuredQuery(@$aParams['amenity'], @$aParams['street'], @$aParams['city'], @$aParams['county'], @$aParams['state'], @$aParams['country'], @$aParams['postalcode']);
|
||||
$this->setReverseInPlan(false);
|
||||
}
|
||||
else
|
||||
{
|
||||
$this->setQuery($sQuery);
|
||||
}
|
||||
}
|
||||
|
||||
function loadStructuredAddressElement($sValue, $sKey, $iNewMinAddressRank, $iNewMaxAddressRank, $aItemListValues)
|
||||
{
|
||||
$sValue = trim($sValue);
|
||||
@@ -278,12 +372,16 @@
|
||||
// Get the details for display (is this a redundant extra step?)
|
||||
$sPlaceIDs = join(',',$aPlaceIDs);
|
||||
|
||||
$sImportanceSQL = '';
|
||||
if ($this->sViewboxSmallSQL) $sImportanceSQL .= " case when ST_Contains($this->sViewboxSmallSQL, ST_Collect(centroid)) THEN 1 ELSE 0.75 END * ";
|
||||
if ($this->sViewboxLargeSQL) $sImportanceSQL .= " case when ST_Contains($this->sViewboxLargeSQL, ST_Collect(centroid)) THEN 1 ELSE 0.75 END * ";
|
||||
|
||||
$sSQL = "select osm_type,osm_id,class,type,admin_level,rank_search,rank_address,min(place_id) as place_id, min(parent_place_id) as parent_place_id, calculated_country_code as country_code,";
|
||||
$sSQL .= "get_address_by_language(place_id, $sLanguagePrefArraySQL) as langaddress,";
|
||||
$sSQL .= "get_name_by_language(name, $sLanguagePrefArraySQL) as placename,";
|
||||
$sSQL .= "get_name_by_language(name, ARRAY['ref']) as ref,";
|
||||
$sSQL .= "avg(ST_X(centroid)) as lon,avg(ST_Y(centroid)) as lat, ";
|
||||
$sSQL .= "coalesce(importance,0.75-(rank_search::float/40)) as importance, ";
|
||||
$sSQL .= $sImportanceSQL."coalesce(importance,0.75-(rank_search::float/40)) as importance, ";
|
||||
$sSQL .= "(select max(p.importance*(p.rank_address+2)) from place_addressline s, placex p where s.place_id = min(CASE WHEN placex.rank_search < 28 THEN placex.place_id ELSE placex.parent_place_id END) and p.place_id = s.address_place_id and s.isaddress and p.importance is not null) as addressimportance, ";
|
||||
$sSQL .= "(extratags->'place') as extra_place ";
|
||||
$sSQL .= "from placex where place_id in ($sPlaceIDs) ";
|
||||
@@ -308,20 +406,20 @@
|
||||
$sSQL .= "null as placename,";
|
||||
$sSQL .= "null as ref,";
|
||||
$sSQL .= "avg(ST_X(centroid)) as lon,avg(ST_Y(centroid)) as lat, ";
|
||||
$sSQL .= "-0.15 as importance, ";
|
||||
$sSQL .= $sImportanceSQL."-1.15 as importance, ";
|
||||
$sSQL .= "(select max(p.importance*(p.rank_address+2)) from place_addressline s, placex p where s.place_id = min(location_property_tiger.parent_place_id) and p.place_id = s.address_place_id and s.isaddress and p.importance is not null) as addressimportance, ";
|
||||
$sSQL .= "null as extra_place ";
|
||||
$sSQL .= "from location_property_tiger where place_id in ($sPlaceIDs) ";
|
||||
$sSQL .= "and 30 between $this->iMinAddressRank and $this->iMaxAddressRank ";
|
||||
$sSQL .= "group by place_id";
|
||||
if (!$this->bDeDupe) $sSQL .= ",place_id";
|
||||
if (!$this->bDeDupe) $sSQL .= ",place_id ";
|
||||
$sSQL .= " union ";
|
||||
$sSQL .= "select 'L' as osm_type,place_id as osm_id,'place' as class,'house' as type,null as admin_level,30 as rank_search,30 as rank_address,min(place_id) as place_id, min(parent_place_id) as parent_place_id,'us' as country_code,";
|
||||
$sSQL .= "get_address_by_language(place_id, $sLanguagePrefArraySQL) as langaddress,";
|
||||
$sSQL .= "null as placename,";
|
||||
$sSQL .= "null as ref,";
|
||||
$sSQL .= "avg(ST_X(centroid)) as lon,avg(ST_Y(centroid)) as lat, ";
|
||||
$sSQL .= "-0.10 as importance, ";
|
||||
$sSQL .= $sImportanceSQL."-1.10 as importance, ";
|
||||
$sSQL .= "(select max(p.importance*(p.rank_address+2)) from place_addressline s, placex p where s.place_id = min(location_property_aux.parent_place_id) and p.place_id = s.address_place_id and s.isaddress and p.importance is not null) as addressimportance, ";
|
||||
$sSQL .= "null as extra_place ";
|
||||
$sSQL .= "from location_property_aux where place_id in ($sPlaceIDs) ";
|
||||
@@ -331,7 +429,7 @@
|
||||
$sSQL .= ",get_address_by_language(place_id, $sLanguagePrefArraySQL) ";
|
||||
}
|
||||
|
||||
$sSQL .= "order by importance desc";
|
||||
$sSQL .= " order by importance desc";
|
||||
if (CONST_Debug) { echo "<hr>"; var_dump($sSQL); }
|
||||
$aSearchResults = $this->oDB->getAll($sSQL);
|
||||
|
||||
@@ -385,19 +483,18 @@
|
||||
$sCountryCodesSQL = join(',', array_map('addQuotes', $this->aCountryCodes));
|
||||
}
|
||||
|
||||
// Hack to make it handle "new york, ny" (and variants) correctly
|
||||
$sQuery = str_ireplace(array('New York, ny','new york, new york', 'New York ny','new york new york'), 'new york city, ny', $this->sQuery);
|
||||
$sQuery = $this->sQuery;
|
||||
|
||||
// Conflicts between US state abreviations and various words for 'the' in different languages
|
||||
if (isset($this->aLangPrefOrder['name:en']))
|
||||
{
|
||||
$sQuery = preg_replace('/,\s*il\s*(,|$)/',', illinois\1', $sQuery);
|
||||
$sQuery = preg_replace('/,\s*al\s*(,|$)/',', alabama\1', $sQuery);
|
||||
$sQuery = preg_replace('/,\s*la\s*(,|$)/',', louisiana\1', $sQuery);
|
||||
$sQuery = preg_replace('/(^|,)\s*il\s*(,|$)/','\1illinois\2', $sQuery);
|
||||
$sQuery = preg_replace('/(^|,)\s*al\s*(,|$)/','\1alabama\2', $sQuery);
|
||||
$sQuery = preg_replace('/(^|,)\s*la\s*(,|$)/','\1louisiana\2', $sQuery);
|
||||
}
|
||||
|
||||
// View Box SQL
|
||||
$sViewboxCentreSQL = $sViewboxSmallSQL = $sViewboxLargeSQL = false;
|
||||
$sViewboxCentreSQL = false;
|
||||
$bBoundingBoxSearch = false;
|
||||
if ($this->aViewBox)
|
||||
{
|
||||
@@ -408,8 +505,8 @@
|
||||
$aBigViewBox[1] = $this->aViewBox[1] + $fWidth;
|
||||
$aBigViewBox[3] = $this->aViewBox[3] - $fWidth;
|
||||
|
||||
$sViewboxSmallSQL = "ST_SetSRID(ST_MakeBox2D(ST_Point(".(float)$this->aViewBox[0].",".(float)$this->aViewBox[1]."),ST_Point(".(float)$this->aViewBox[2].",".(float)$this->aViewBox[3].")),4326)";
|
||||
$sViewboxLargeSQL = "ST_SetSRID(ST_MakeBox2D(ST_Point(".(float)$aBigViewBox[0].",".(float)$aBigViewBox[1]."),ST_Point(".(float)$aBigViewBox[2].",".(float)$aBigViewBox[3].")),4326)";
|
||||
$this->sViewboxSmallSQL = "ST_SetSRID(ST_MakeBox2D(ST_Point(".(float)$this->aViewBox[0].",".(float)$this->aViewBox[1]."),ST_Point(".(float)$this->aViewBox[2].",".(float)$this->aViewBox[3].")),4326)";
|
||||
$this->sViewboxLargeSQL = "ST_SetSRID(ST_MakeBox2D(ST_Point(".(float)$aBigViewBox[0].",".(float)$aBigViewBox[1]."),ST_Point(".(float)$aBigViewBox[2].",".(float)$aBigViewBox[3].")),4326)";
|
||||
$bBoundingBoxSearch = $this->bBoundedSearch;
|
||||
}
|
||||
|
||||
@@ -421,59 +518,33 @@
|
||||
foreach($this->aRoutePoints as $aPoint)
|
||||
{
|
||||
if (!$bFirst) $sViewboxCentreSQL .= ",";
|
||||
$sViewboxCentreSQL .= $aPoint[1].' '.$aPoint[0];
|
||||
$sViewboxCentreSQL .= $aPoint[0].' '.$aPoint[1];
|
||||
$bFirst = false;
|
||||
}
|
||||
$sViewboxCentreSQL .= ")'::geometry,4326)";
|
||||
|
||||
$sSQL = "select st_buffer(".$sViewboxCentreSQL.",".(float)($_GET['routewidth']/69).")";
|
||||
$sViewboxSmallSQL = $this->oDB->getOne($sSQL);
|
||||
if (PEAR::isError($sViewboxSmallSQL))
|
||||
$this->sViewboxSmallSQL = $this->oDB->getOne($sSQL);
|
||||
if (PEAR::isError($this->sViewboxSmallSQL))
|
||||
{
|
||||
failInternalError("Could not get small viewbox.", $sSQL, $sViewboxSmallSQL);
|
||||
failInternalError("Could not get small viewbox.", $sSQL, $this->sViewboxSmallSQL);
|
||||
}
|
||||
$sViewboxSmallSQL = "'".$sViewboxSmallSQL."'::geometry";
|
||||
$this->sViewboxSmallSQL = "'".$this->sViewboxSmallSQL."'::geometry";
|
||||
|
||||
$sSQL = "select st_buffer(".$sViewboxCentreSQL.",".(float)($_GET['routewidth']/30).")";
|
||||
$sViewboxLargeSQL = $this->oDB->getOne($sSQL);
|
||||
if (PEAR::isError($sViewboxLargeSQL))
|
||||
$this->sViewboxLargeSQL = $this->oDB->getOne($sSQL);
|
||||
if (PEAR::isError($this->sViewboxLargeSQL))
|
||||
{
|
||||
failInternalError("Could not get large viewbox.", $sSQL, $sViewboxLargeSQL);
|
||||
failInternalError("Could not get large viewbox.", $sSQL, $this->sViewboxLargeSQL);
|
||||
}
|
||||
$sViewboxLargeSQL = "'".$sViewboxLargeSQL."'::geometry";
|
||||
$this->sViewboxLargeSQL = "'".$this->sViewboxLargeSQL."'::geometry";
|
||||
$bBoundingBoxSearch = $this->bBoundedSearch;
|
||||
}
|
||||
|
||||
// Do we have anything that looks like a lat/lon pair?
|
||||
if (preg_match('/\\b([NS])[ ]+([0-9]+[0-9.]*)[ ]+([0-9.]+)?[, ]+([EW])[ ]+([0-9]+)[ ]+([0-9]+[0-9.]*)?\\b/', $sQuery, $aData))
|
||||
{
|
||||
$fQueryLat = ($aData[1]=='N'?1:-1) * ($aData[2] + $aData[3]/60);
|
||||
$fQueryLon = ($aData[4]=='E'?1:-1) * ($aData[5] + $aData[6]/60);
|
||||
if ($fQueryLat <= 90.1 && $fQueryLat >= -90.1 && $fQueryLon <= 180.1 && $fQueryLon >= -180.1)
|
||||
{
|
||||
$this->setNearPoint(array($fQueryLat, $fQueryLon));
|
||||
$sQuery = trim(str_replace($aData[0], ' ', $sQuery));
|
||||
}
|
||||
}
|
||||
elseif (preg_match('/\\b([0-9]+)[ ]+([0-9]+[0-9.]*)?[ ]+([NS])[, ]+([0-9]+)[ ]+([0-9]+[0-9.]*)?[ ]+([EW])\\b/', $sQuery, $aData))
|
||||
{
|
||||
$fQueryLat = ($aData[3]=='N'?1:-1) * ($aData[1] + $aData[2]/60);
|
||||
$fQueryLon = ($aData[6]=='E'?1:-1) * ($aData[4] + $aData[5]/60);
|
||||
if ($fQueryLat <= 90.1 && $fQueryLat >= -90.1 && $fQueryLon <= 180.1 && $fQueryLon >= -180.1)
|
||||
{
|
||||
$this->setNearPoint(array($fQueryLat, $fQueryLon));
|
||||
$sQuery = trim(str_replace($aData[0], ' ', $sQuery));
|
||||
}
|
||||
}
|
||||
elseif (preg_match('/(\\[|^|\\b)(-?[0-9]+[0-9]*\\.[0-9]+)[, ]+(-?[0-9]+[0-9]*\\.[0-9]+)(\\]|$|\\b)/', $sQuery, $aData))
|
||||
{
|
||||
$fQueryLat = $aData[2];
|
||||
$fQueryLon = $aData[3];
|
||||
if ($fQueryLat <= 90.1 && $fQueryLat >= -90.1 && $fQueryLon <= 180.1 && $fQueryLon >= -180.1)
|
||||
{
|
||||
$this->setNearPoint(array($fQueryLat, $fQueryLon));
|
||||
$sQuery = trim(str_replace($aData[0], ' ', $sQuery));
|
||||
}
|
||||
if ( $aLooksLike = looksLikeLatLonPair($sQuery) ){
|
||||
$this->setNearPoint(array($aLooksLike['lat'], $aLooksLike['lon']));
|
||||
$sQuery = $aLooksLike['query'];
|
||||
}
|
||||
|
||||
$aSearchResults = array();
|
||||
@@ -636,7 +707,7 @@
|
||||
{
|
||||
if (substr($aData[1],-2,1) != ' ')
|
||||
{
|
||||
$aData[0] = substr($aData[0],0,strlen($aData[1]-1)).' '.substr($aData[0],strlen($aData[1]-1));
|
||||
$aData[0] = substr($aData[0],0,strlen($aData[1])-1).' '.substr($aData[0],strlen($aData[1])-1);
|
||||
$aData[1] = substr($aData[1],0,-1).' '.substr($aData[1],-1,1);
|
||||
}
|
||||
$aGBPostcodeLocation = gbPostcodeCalculate($aData[0], $aData[1], $aData[2], $this->oDB);
|
||||
@@ -793,6 +864,9 @@
|
||||
if ($aSearch['sHouseNumber'] === '')
|
||||
{
|
||||
$aSearch['sHouseNumber'] = $sToken;
|
||||
// sanity check: if the housenumber is not mainly made
|
||||
// up of numbers, add a penalty
|
||||
if (preg_match_all("/[^0-9]/", $sToken, $aMatches) > 2) $aSearch['iSearchRank']++;
|
||||
if ($aSearch['iSearchRank'] < $this->iMaxRank) $aNewWordsetSearches[] = $aSearch;
|
||||
/*
|
||||
// Fall back to not searching for this item (better than nothing)
|
||||
@@ -811,6 +885,7 @@
|
||||
$aSearch['sType'] = $aSearchTerm['type'];
|
||||
if (sizeof($aSearch['aName'])) $aSearch['sOperator'] = 'name';
|
||||
else $aSearch['sOperator'] = 'near'; // near = in for the moment
|
||||
if (strlen($aSearchTerm['operator']) == 0) $aSearch['iSearchRank'] += 1;
|
||||
|
||||
// Do we have a shortcut id?
|
||||
if ($aSearch['sOperator'] == 'name')
|
||||
@@ -857,7 +932,7 @@
|
||||
{
|
||||
if (isset($aSearchTerm['word_id']) && $aSearchTerm['word_id'])
|
||||
{
|
||||
if ((!$bStructuredPhrases || $iPhrase > 0) && sizeof($aCurrentSearch['aName']) && strlen($sToken) >= 4)
|
||||
if ((!$bStructuredPhrases || $iPhrase > 0) && sizeof($aCurrentSearch['aName']) && strpos($sToken, ' ') === false)
|
||||
{
|
||||
$aSearch = $aCurrentSearch;
|
||||
$aSearch['iSearchRank'] += 1;
|
||||
@@ -866,8 +941,11 @@
|
||||
$aSearch['aAddress'][$aSearchTerm['word_id']] = $aSearchTerm['word_id'];
|
||||
if ($aSearch['iSearchRank'] < $this->iMaxRank) $aNewWordsetSearches[] = $aSearch;
|
||||
}
|
||||
elseif (isset($aValidTokens[' '.$sToken])) // revert to the token version?
|
||||
elseif (isset($aValidTokens[' '.$sToken]) && strlen($sToken) >= 4) // revert to the token version?
|
||||
{
|
||||
$aSearch['aAddressNonSearch'][$aSearchTerm['word_id']] = $aSearchTerm['word_id'];
|
||||
$aSearch['iSearchRank'] += 1;
|
||||
if ($aSearch['iSearchRank'] < $this->iMaxRank) $aNewWordsetSearches[] = $aSearch;
|
||||
foreach($aValidTokens[' '.$sToken] as $aSearchTermToken)
|
||||
{
|
||||
if (empty($aSearchTermToken['country_code'])
|
||||
@@ -884,6 +962,7 @@
|
||||
else
|
||||
{
|
||||
$aSearch['aAddressNonSearch'][$aSearchTerm['word_id']] = $aSearchTerm['word_id'];
|
||||
if (preg_match('#^[0-9]+$#', $sToken)) $aSearch['iSearchRank'] += 2;
|
||||
if ($aSearch['iSearchRank'] < $this->iMaxRank) $aNewWordsetSearches[] = $aSearch;
|
||||
}
|
||||
}
|
||||
@@ -891,7 +970,8 @@
|
||||
if (!sizeof($aCurrentSearch['aName']) || $aCurrentSearch['iNamePhrase'] == $iPhrase)
|
||||
{
|
||||
$aSearch = $aCurrentSearch;
|
||||
$aSearch['iSearchRank'] += 2;
|
||||
$aSearch['iSearchRank'] += 1;
|
||||
if (!sizeof($aCurrentSearch['aName'])) $aSearch['iSearchRank'] += 1;
|
||||
if (preg_match('#^[0-9]+$#', $sToken)) $aSearch['iSearchRank'] += 2;
|
||||
if ($aWordFrequencyScores[$aSearchTerm['word_id']] < CONST_Max_Word_Frequency)
|
||||
$aSearch['aName'][$aSearchTerm['word_id']] = $aSearchTerm['word_id'];
|
||||
@@ -1080,6 +1160,10 @@
|
||||
if (CONST_Debug) var_dump($sSQL);
|
||||
$aPlaceIDs = $this->oDB->getCol($sSQL);
|
||||
}
|
||||
else
|
||||
{
|
||||
$aPlaceIDs = array();
|
||||
}
|
||||
}
|
||||
else
|
||||
{
|
||||
@@ -1090,7 +1174,7 @@
|
||||
{
|
||||
$sSQL = "select place_id from place_classtype_".$aSearch['sClass']."_".$aSearch['sType']." ct";
|
||||
if ($sCountryCodesSQL) $sSQL .= " join placex using (place_id)";
|
||||
$sSQL .= " where st_contains($sViewboxSmallSQL, ct.centroid)";
|
||||
$sSQL .= " where st_contains($this->sViewboxSmallSQL, ct.centroid)";
|
||||
if ($sCountryCodesSQL) $sSQL .= " and calculated_country_code in ($sCountryCodesSQL)";
|
||||
if (sizeof($this->aExcludePlaceIDs))
|
||||
{
|
||||
@@ -1104,11 +1188,12 @@
|
||||
// If excluded place IDs are given, it is fair to assume that
|
||||
// there have been results in the small box, so no further
|
||||
// expansion in that case.
|
||||
if (!sizeof($aPlaceIDs) && !sizeof($this->aExcludePlaceIDs))
|
||||
// Also don't expand if bounded results were requested.
|
||||
if (!sizeof($aPlaceIDs) && !sizeof($this->aExcludePlaceIDs) && !$this->bBoundedSearch)
|
||||
{
|
||||
$sSQL = "select place_id from place_classtype_".$aSearch['sClass']."_".$aSearch['sType']." ct";
|
||||
if ($sCountryCodesSQL) $sSQL .= " join placex using (place_id)";
|
||||
$sSQL .= " where st_contains($sViewboxLargeSQL, ct.centroid)";
|
||||
$sSQL .= " where st_contains($this->sViewboxLargeSQL, ct.centroid)";
|
||||
if ($sCountryCodesSQL) $sSQL .= " and calculated_country_code in ($sCountryCodesSQL)";
|
||||
if ($sViewboxCentreSQL) $sSQL .= " order by st_distance($sViewboxCentreSQL, ct.centroid) asc";
|
||||
$sSQL .= " limit $this->iLimit";
|
||||
@@ -1119,7 +1204,7 @@
|
||||
else
|
||||
{
|
||||
$sSQL = "select place_id from placex where class='".$aSearch['sClass']."' and type='".$aSearch['sType']."'";
|
||||
$sSQL .= " and st_contains($sViewboxSmallSQL, geometry) and linked_place_id is null";
|
||||
$sSQL .= " and st_contains($this->sViewboxSmallSQL, geometry) and linked_place_id is null";
|
||||
if ($sCountryCodesSQL) $sSQL .= " and calculated_country_code in ($sCountryCodesSQL)";
|
||||
if ($sViewboxCentreSQL) $sSQL .= " order by st_distance($sViewboxCentreSQL, centroid) asc";
|
||||
$sSQL .= " limit $this->iLimit";
|
||||
@@ -1156,7 +1241,21 @@
|
||||
}
|
||||
}
|
||||
if ($aSearch['sCountryCode']) $aTerms[] = "country_code = '".pg_escape_string($aSearch['sCountryCode'])."'";
|
||||
if ($aSearch['sHouseNumber']) $aTerms[] = "address_rank between 16 and 27";
|
||||
if ($aSearch['sHouseNumber'])
|
||||
{
|
||||
$aTerms[] = "address_rank between 16 and 27";
|
||||
}
|
||||
else
|
||||
{
|
||||
if ($this->iMinAddressRank > 0)
|
||||
{
|
||||
$aTerms[] = "address_rank >= ".$this->iMinAddressRank;
|
||||
}
|
||||
if ($this->iMaxAddressRank < 30)
|
||||
{
|
||||
$aTerms[] = "address_rank <= ".$this->iMaxAddressRank;
|
||||
}
|
||||
}
|
||||
if ($aSearch['fLon'] && $aSearch['fLat'])
|
||||
{
|
||||
$aTerms[] = "ST_DWithin(centroid, ST_SetSRID(ST_Point(".$aSearch['fLon'].",".$aSearch['fLat']."),4326), ".$aSearch['fRadius'].")";
|
||||
@@ -1171,12 +1270,20 @@
|
||||
$aTerms[] = "country_code in ($sCountryCodesSQL)";
|
||||
}
|
||||
|
||||
if ($bBoundingBoxSearch) $aTerms[] = "centroid && $sViewboxSmallSQL";
|
||||
if ($bBoundingBoxSearch) $aTerms[] = "centroid && $this->sViewboxSmallSQL";
|
||||
if ($sNearPointSQL) $aOrder[] = "ST_Distance($sNearPointSQL, centroid) asc";
|
||||
|
||||
$sImportanceSQL = '(case when importance = 0 OR importance IS NULL then 0.75-(search_rank::float/40) else importance end)';
|
||||
if ($sViewboxSmallSQL) $sImportanceSQL .= " * case when ST_Contains($sViewboxSmallSQL, centroid) THEN 1 ELSE 0.5 END";
|
||||
if ($sViewboxLargeSQL) $sImportanceSQL .= " * case when ST_Contains($sViewboxLargeSQL, centroid) THEN 1 ELSE 0.5 END";
|
||||
if ($aSearch['sHouseNumber'])
|
||||
{
|
||||
$sImportanceSQL = '- abs(26 - address_rank) + 3';
|
||||
}
|
||||
else
|
||||
{
|
||||
$sImportanceSQL = '(case when importance = 0 OR importance IS NULL then 0.75-(search_rank::float/40) else importance end)';
|
||||
}
|
||||
if ($this->sViewboxSmallSQL) $sImportanceSQL .= " * case when ST_Contains($this->sViewboxSmallSQL, centroid) THEN 1 ELSE 0.5 END";
|
||||
if ($this->sViewboxLargeSQL) $sImportanceSQL .= " * case when ST_Contains($this->sViewboxLargeSQL, centroid) THEN 1 ELSE 0.5 END";
|
||||
|
||||
$aOrder[] = "$sImportanceSQL DESC";
|
||||
if (sizeof($aSearch['aFullNameAddress']))
|
||||
{
|
||||
@@ -1229,8 +1336,8 @@
|
||||
$sPlaceIDs = join(',',$aPlaceIDs);
|
||||
|
||||
// Now they are indexed look for a house attached to a street we found
|
||||
$sHouseNumberRegex = '\\\\m'.str_replace(' ','[-,/ ]',$aSearch['sHouseNumber']).'\\\\M';
|
||||
$sSQL = "select place_id from placex where parent_place_id in (".$sPlaceIDs.") and housenumber ~* E'".$sHouseNumberRegex."'";
|
||||
$sHouseNumberRegex = '\\\\m'.$aSearch['sHouseNumber'].'\\\\M';
|
||||
$sSQL = "select place_id from placex where parent_place_id in (".$sPlaceIDs.") and transliteration(housenumber) ~* E'".$sHouseNumberRegex."'";
|
||||
if (sizeof($this->aExcludePlaceIDs))
|
||||
{
|
||||
$sSQL .= " and place_id not in (".join(',',$this->aExcludePlaceIDs).")";
|
||||
@@ -1353,7 +1460,7 @@
|
||||
}
|
||||
if ($sCountryCodesSQL) $sSQL .= " and lp.calculated_country_code in ($sCountryCodesSQL)";
|
||||
if ($sOrderBySQL) $sSQL .= "order by ".$sOrderBySQL." asc";
|
||||
if ($iOffset) $sSQL .= " offset $iOffset";
|
||||
if ($this->iOffset) $sSQL .= " offset $this->iOffset";
|
||||
$sSQL .= " limit $this->iLimit";
|
||||
if (CONST_Debug) var_dump($sSQL);
|
||||
$aClassPlaceIDs = array_merge($aClassPlaceIDs, $this->oDB->getCol($sSQL));
|
||||
@@ -1375,7 +1482,7 @@
|
||||
}
|
||||
if ($sCountryCodesSQL) $sSQL .= " and l.calculated_country_code in ($sCountryCodesSQL)";
|
||||
if ($sOrderBy) $sSQL .= "order by ".$OrderBysSQL." asc";
|
||||
if ($iOffset) $sSQL .= " offset $iOffset";
|
||||
if ($this->iOffset) $sSQL .= " offset $this->iOffset";
|
||||
$sSQL .= " limit $this->iLimit";
|
||||
if (CONST_Debug) var_dump($sSQL);
|
||||
$aClassPlaceIDs = array_merge($aClassPlaceIDs, $this->oDB->getCol($sSQL));
|
||||
@@ -1456,7 +1563,7 @@
|
||||
}
|
||||
|
||||
$aClassType = getClassTypesWithImportance();
|
||||
$aRecheckWords = preg_split('/\b/u',$sQuery);
|
||||
$aRecheckWords = preg_split('/\b[\s,\\-]*/u',$sQuery);
|
||||
foreach($aRecheckWords as $i => $sWord)
|
||||
{
|
||||
if (!$sWord) unset($aRecheckWords[$i]);
|
||||
@@ -1581,7 +1688,7 @@
|
||||
$aResult['aPolyPoints'][] = array($aPoint[1], $aPoint[2]);
|
||||
}
|
||||
}
|
||||
$aResult['aBoundingBox'] = array($aPointPolygon['minlat'],$aPointPolygon['maxlat'],$aPointPolygon['minlon'],$aPointPolygon['maxlon']);
|
||||
$aResult['aBoundingBox'] = array((string)$aPointPolygon['minlat'],(string)$aPointPolygon['maxlat'],(string)$aPointPolygon['minlon'],(string)$aPointPolygon['maxlon']);
|
||||
}
|
||||
|
||||
// Is there an icon set for this type of result?
|
||||
@@ -1617,7 +1724,11 @@
|
||||
$sAddress = $aResult['langaddress'];
|
||||
foreach($aRecheckWords as $i => $sWord)
|
||||
{
|
||||
if (stripos($sAddress, $sWord)!==false) $iCountWords++;
|
||||
if (stripos($sAddress, $sWord)!==false)
|
||||
{
|
||||
$iCountWords++;
|
||||
if (preg_match("/(^|,)\s*$sWord\s*(,|$)/", $sAddress)) $iCountWords += 0.1;
|
||||
}
|
||||
}
|
||||
|
||||
$aResult['importance'] = $aResult['importance'] + ($iCountWords*0.1); // 0.1 is a completely arbitrary number but something in the range 0.1 to 0.5 would seem right
|
||||
@@ -1681,17 +1792,3 @@
|
||||
|
||||
} // end class
|
||||
|
||||
|
||||
/*
|
||||
if (isset($_GET['route']) && $_GET['route'] && isset($_GET['routewidth']) && $_GET['routewidth'])
|
||||
{
|
||||
$aPoints = explode(',',$_GET['route']);
|
||||
if (sizeof($aPoints) % 2 != 0)
|
||||
{
|
||||
userError("Uneven number of points");
|
||||
exit;
|
||||
}
|
||||
$sViewboxCentreSQL = "ST_SetSRID('LINESTRING(";
|
||||
$fPrevCoord = false;
|
||||
}
|
||||
*/
|
||||
|
||||
@@ -124,7 +124,7 @@
|
||||
{
|
||||
$sTypeLabel = strtolower(isset($aTypeLabel['simplelabel'])?$aTypeLabel['simplelabel']:$aTypeLabel['label']);
|
||||
$sTypeLabel = str_replace(' ','_',$sTypeLabel);
|
||||
if (!isset($aAddress[$sTypeLabel]) || (isset($aFallback[$sTypeLabel]) && $aFallback[$sTypeLabel]))
|
||||
if (!isset($aAddress[$sTypeLabel]) || (isset($aFallback[$sTypeLabel]) && $aFallback[$sTypeLabel]) || $aLine['class'] == 'place')
|
||||
{
|
||||
$aAddress[$sTypeLabel] = $aLine['localname']?$aLine['localname']:$aLine['housenumber'];
|
||||
}
|
||||
|
||||
@@ -1,7 +1,6 @@
|
||||
<?php
|
||||
if (file_exists(getenv('NOMINATIM_SETTINGS')))
|
||||
{
|
||||
echo 'Using local config: '.getenv('NOMINATIM_SETTINGS');
|
||||
require_once(getenv('NOMINATIM_SETTINGS'));
|
||||
}
|
||||
|
||||
|
||||
127
lib/lib.php
127
lib/lib.php
@@ -45,7 +45,7 @@
|
||||
if (!$sUserError) $sUserError = $sError;
|
||||
error_log('ERROR: '.$sError);
|
||||
echo $sUserError."\n";
|
||||
exit;
|
||||
exit(-1);
|
||||
}
|
||||
|
||||
|
||||
@@ -94,7 +94,8 @@
|
||||
|
||||
function bySearchRank($a, $b)
|
||||
{
|
||||
if ($a['iSearchRank'] == $b['iSearchRank']) return 0;
|
||||
if ($a['iSearchRank'] == $b['iSearchRank'])
|
||||
return strlen($a['sOperator']) + strlen($a['sHouseNumber']) - strlen($b['sOperator']) - strlen($b['sHouseNumber']);
|
||||
return ($a['iSearchRank'] < $b['iSearchRank']?-1:1);
|
||||
}
|
||||
|
||||
@@ -117,18 +118,26 @@
|
||||
}
|
||||
|
||||
|
||||
function getPreferredLanguages()
|
||||
function getPreferredLanguages($sLangString=false)
|
||||
{
|
||||
// If we have been provided the value in $_GET it overrides browser value
|
||||
if (isset($_GET['accept-language']) && $_GET['accept-language'])
|
||||
if (!$sLangString)
|
||||
{
|
||||
$_SERVER["HTTP_ACCEPT_LANGUAGE"] = $_GET['accept-language'];
|
||||
// If we have been provided the value in $_GET it overrides browser value
|
||||
if (isset($_GET['accept-language']) && $_GET['accept-language'])
|
||||
{
|
||||
$_SERVER["HTTP_ACCEPT_LANGUAGE"] = $_GET['accept-language'];
|
||||
$sLangString = $_GET['accept-language'];
|
||||
}
|
||||
else if (isset($_SERVER["HTTP_ACCEPT_LANGUAGE"]))
|
||||
{
|
||||
$sLangString = $_SERVER["HTTP_ACCEPT_LANGUAGE"];
|
||||
}
|
||||
}
|
||||
|
||||
$aLanguages = array();
|
||||
if (isset($_SERVER["HTTP_ACCEPT_LANGUAGE"]))
|
||||
if ($sLangString)
|
||||
{
|
||||
if (preg_match_all('/(([a-z]{1,8})(-[a-z]{1,8})?)\s*(;\s*q\s*=\s*(1|0\.[0-9]+))?/i', $_SERVER['HTTP_ACCEPT_LANGUAGE'], $aLanguagesParse, PREG_SET_ORDER))
|
||||
if (preg_match_all('/(([a-z]{1,8})(-[a-z]{1,8})?)\s*(;\s*q\s*=\s*(1|0\.[0-9]+))?/i', $sLangString, $aLanguagesParse, PREG_SET_ORDER))
|
||||
{
|
||||
foreach($aLanguagesParse as $iLang => $aLanguage)
|
||||
{
|
||||
@@ -246,7 +255,13 @@
|
||||
|
||||
if (sizeof($aNearPostcodes))
|
||||
{
|
||||
return array(array('lat' => $aNearPostcodes[0]['lat'], 'lon' => $aNearPostcodes[0]['lon'], 'radius' => 0.005));
|
||||
$aPostcodes = array();
|
||||
foreach($aNearPostcodes as $aPostcode)
|
||||
{
|
||||
$aPostcodes[] = array('lat' => $aPostcode['lat'], 'lon' => $aPostcode['lon'], 'radius' => 0.005);
|
||||
}
|
||||
|
||||
return $aPostcodes;
|
||||
}
|
||||
|
||||
return false;
|
||||
@@ -371,6 +386,8 @@
|
||||
'landuse:commercial' => array('label'=>'Commercial','frequency'=>657,'icon'=>'',),
|
||||
|
||||
'place:airport' => array('label'=>'Airport','frequency'=>36,'icon'=>'transport_airport2', 'defdiameter' => 0.03,),
|
||||
'aeroway:aerodrome' => array('label'=>'Aerodrome','frequency'=>36,'icon'=>'transport_airport2', 'defdiameter' => 0.03,),
|
||||
'aeroway' => array('label'=>'Aeroway','frequency'=>36,'icon'=>'transport_airport2', 'defdiameter' => 0.03,),
|
||||
'railway:station' => array('label'=>'Station','frequency'=>3431,'icon'=>'transport_train_station2', 'defdiameter' => 0.01,),
|
||||
'amenity:place_of_worship' => array('label'=>'Place Of Worship','frequency'=>9049,'icon'=>'place_of_worship_unknown3',),
|
||||
'amenity:pub' => array('label'=>'Pub','frequency'=>18969,'icon'=>'food_pub',),
|
||||
@@ -774,7 +791,7 @@
|
||||
{
|
||||
$sTypeLabel = strtolower(isset($aTypeLabel['simplelabel'])?$aTypeLabel['simplelabel']:$aTypeLabel['label']);
|
||||
$sTypeLabel = str_replace(' ','_',$sTypeLabel);
|
||||
if (!isset($aAddress[$sTypeLabel]) || (isset($aFallback[$sTypeLabel]) && $aFallback[$sTypeLabel]))
|
||||
if (!isset($aAddress[$sTypeLabel]) || (isset($aFallback[$sTypeLabel]) && $aFallback[$sTypeLabel]) || $aLine['class'] == 'place')
|
||||
{
|
||||
$aAddress[$sTypeLabel] = $aLine['localname']?$aLine['localname']:$aLine['housenumber'];
|
||||
}
|
||||
@@ -888,3 +905,93 @@
|
||||
{
|
||||
return "'".$s."'";
|
||||
}
|
||||
|
||||
// returns boolean
|
||||
function validLatLon($fLat,$fLon)
|
||||
{
|
||||
return ($fLat <= 90.1 && $fLat >= -90.1 && $fLon <= 180.1 && $fLon >= -180.1);
|
||||
}
|
||||
|
||||
// Do we have anything that looks like a lat/lon pair?
|
||||
// returns array(lat,lon,query_with_lat_lon_removed)
|
||||
// or null
|
||||
function looksLikeLatLonPair($sQuery)
|
||||
{
|
||||
$sFound = null;
|
||||
$fQueryLat = null;
|
||||
$fQueryLon = null;
|
||||
|
||||
// degrees decimal minutes
|
||||
// N 40 26.767, W 79 58.933
|
||||
// N 40°26.767′, W 79°58.933′
|
||||
// 1 2 3 4 5 6
|
||||
if (preg_match('/\\b([NS])[ ]+([0-9]+[0-9.]*)[° ]+([0-9.]+)?[′\']*[, ]+([EW])[ ]+([0-9]+)[° ]+([0-9]+[0-9.]*)[′\']*?\\b/', $sQuery, $aData))
|
||||
{
|
||||
$sFound = $aData[0];
|
||||
$fQueryLat = ($aData[1]=='N'?1:-1) * ($aData[2] + $aData[3]/60);
|
||||
$fQueryLon = ($aData[4]=='E'?1:-1) * ($aData[5] + $aData[6]/60);
|
||||
}
|
||||
// degrees decimal minutes
|
||||
// 40 26.767 N, 79 58.933 W
|
||||
// 40° 26.767′ N 79° 58.933′ W
|
||||
// 1 2 3 4 5 6
|
||||
elseif (preg_match('/\\b([0-9]+)[° ]+([0-9]+[0-9.]*)?[′\']*[ ]+([NS])[, ]+([0-9]+)[° ]+([0-9]+[0-9.]*)?[′\' ]+([EW])\\b/', $sQuery, $aData))
|
||||
{
|
||||
$sFound = $aData[0];
|
||||
$fQueryLat = ($aData[3]=='N'?1:-1) * ($aData[1] + $aData[2]/60);
|
||||
$fQueryLon = ($aData[6]=='E'?1:-1) * ($aData[4] + $aData[5]/60);
|
||||
}
|
||||
// degrees decimal seconds
|
||||
// N 40 26 46 W 79 58 56
|
||||
// N 40° 26′ 46″ W, 79° 58′ 56″
|
||||
// 1 2 3 4 5 6 7 8
|
||||
elseif (preg_match('/\\b([NS])[ ]([0-9]+)[° ]+([0-9]+)[′\' ]+([0-9]+)[″"]*[, ]+([EW])[ ]([0-9]+)[° ]+([0-9]+)[′\' ]+([0-9]+)[″"]*\\b/', $sQuery, $aData))
|
||||
{
|
||||
$sFound = $aData[0];
|
||||
$fQueryLat = ($aData[1]=='N'?1:-1) * ($aData[2] + $aData[3]/60 + $aData[4]/3600);
|
||||
$fQueryLon = ($aData[5]=='E'?1:-1) * ($aData[6] + $aData[7]/60 + $aData[8]/3600);
|
||||
}
|
||||
// degrees decimal seconds
|
||||
// 40 26 46 N 79 58 56 W
|
||||
// 40° 26′ 46″ N, 79° 58′ 56″ W
|
||||
// 1 2 3 4 5 6 7 8
|
||||
elseif (preg_match('/\\b([0-9]+)[° ]+([0-9]+)[′\' ]+([0-9]+)[″" ]+([NS])[, ]+([0-9]+)[° ]+([0-9]+)[′\' ]+([0-9]+)[″" ]+([EW])\\b/', $sQuery, $aData))
|
||||
{
|
||||
$sFound = $aData[0];
|
||||
$fQueryLat = ($aData[4]=='N'?1:-1) * ($aData[1] + $aData[2]/60 + $aData[3]/3600);
|
||||
$fQueryLon = ($aData[8]=='E'?1:-1) * ($aData[5] + $aData[6]/60 + $aData[7]/3600);
|
||||
}
|
||||
// degrees decimal
|
||||
// N 40.446° W 79.982°
|
||||
// 1 2 3 4
|
||||
elseif (preg_match('/\\b([NS])[ ]([0-9]+[0-9]*\\.[0-9]+)[°]*[, ]+([EW])[ ]([0-9]+[0-9]*\\.[0-9]+)[°]*\\b/', $sQuery, $aData))
|
||||
{
|
||||
$sFound = $aData[0];
|
||||
$fQueryLat = ($aData[1]=='N'?1:-1) * ($aData[2]);
|
||||
$fQueryLon = ($aData[3]=='E'?1:-1) * ($aData[4]);
|
||||
}
|
||||
// degrees decimal
|
||||
// 40.446° N 79.982° W
|
||||
// 1 2 3 4
|
||||
elseif (preg_match('/\\b([0-9]+[0-9]*\\.[0-9]+)[° ]+([NS])[, ]+([0-9]+[0-9]*\\.[0-9]+)[° ]+([EW])\\b/', $sQuery, $aData))
|
||||
{
|
||||
$sFound = $aData[0];
|
||||
$fQueryLat = ($aData[2]=='N'?1:-1) * ($aData[1]);
|
||||
$fQueryLon = ($aData[4]=='E'?1:-1) * ($aData[3]);
|
||||
}
|
||||
// degrees decimal
|
||||
// 12.34, 56.78
|
||||
// [12.456,-78.90]
|
||||
// 1 2 3 4
|
||||
elseif (preg_match('/(\\[|^|\\b)(-?[0-9]+[0-9]*\\.[0-9]+)[, ]+(-?[0-9]+[0-9]*\\.[0-9]+)(\\]|$|\\b)/', $sQuery, $aData))
|
||||
{
|
||||
$sFound = $aData[0];
|
||||
$fQueryLat = $aData[2];
|
||||
$fQueryLon = $aData[3];
|
||||
}
|
||||
|
||||
if (!validLatLon($fQueryLat, $fQueryLon)) return;
|
||||
$sQuery = trim(str_replace($sFound, ' ', $sQuery));
|
||||
|
||||
return array('lat' => $fQueryLat, 'lon' => $fQueryLon, 'query' => $sQuery);
|
||||
}
|
||||
@@ -3,7 +3,7 @@
|
||||
function logStart(&$oDB, $sType = '', $sQuery = '', $aLanguageList = array())
|
||||
{
|
||||
$aStartTime = explode('.',microtime(true));
|
||||
if (!$aStartTime[1]) $aStartTime[1] = '0';
|
||||
if (!isset($aStartTime[1])) $aStartTime[1] = '0';
|
||||
|
||||
$sOutputFormat = '';
|
||||
if (isset($_GET['format'])) $sOutputFormat = $_GET['format'];
|
||||
|
||||
@@ -82,7 +82,7 @@
|
||||
{
|
||||
echo '<img style="float:right;margin-right:40px;" src="'.CONST_Website_BaseURL.'images/mapicons/'.$aPointDetails['icon'].'.n.32.png'.'" alt="'.$aPointDetails['icon'].'" />';
|
||||
}
|
||||
echo $aPointDetails['localname'].'</h1>';
|
||||
echo $aPointDetails['localname']."</h1>\n";
|
||||
echo '<div class="locationdetails">';
|
||||
echo ' <div>Name: ';
|
||||
foreach($aPointDetails['aNames'] as $sKey => $sValue)
|
||||
@@ -110,9 +110,9 @@
|
||||
echo ' <div class="line"><span class="name">'.$sValue.'</span> ('.$sKey.')</div>';
|
||||
}
|
||||
echo ' </div>';
|
||||
echo '</div>';
|
||||
echo "</div>\n";
|
||||
|
||||
echo '<h2>Address</h2>';
|
||||
echo "<h2>Address</h2>\n";
|
||||
echo '<div class="address">';
|
||||
$iPrevRank = 1000000;
|
||||
$sPrevLocalName = '';
|
||||
@@ -136,14 +136,14 @@
|
||||
echo ', <span class="distance">'.$aAddressLine['distance'].'</span>';
|
||||
echo ' <a href="details.php?place_id='.$aAddressLine['place_id'].'">GOTO</a>';
|
||||
echo ')';
|
||||
echo '</div>';
|
||||
echo "</div>\n";
|
||||
}
|
||||
echo '</div>';
|
||||
echo "</div>\n";
|
||||
|
||||
if ($aLinkedLines)
|
||||
{
|
||||
echo '<h2>Linked Places</h2>';
|
||||
echo '<div class=\"linked\">';
|
||||
echo "<h2>Linked Places</h2>\n";
|
||||
echo '<div class="linked">';
|
||||
foreach($aLinkedLines as $aAddressLine)
|
||||
{
|
||||
$sOSMType = ($aAddressLine['osm_type'] == 'N'?'node':($aAddressLine['osm_type'] == 'W'?'way':($aAddressLine['osm_type'] == 'R'?'relation':'')));
|
||||
@@ -152,16 +152,16 @@
|
||||
echo '<span class="name">'.(trim($aAddressLine['localname'])?$aAddressLine['localname']:'<span class="noname">No Name</span>').'</span>';
|
||||
echo ' (';
|
||||
echo '<span class="type"><span class="label">Type: </span>'.$aAddressLine['class'].':'.$aAddressLine['type'].'</span>';
|
||||
if ($sOSMType) echo ', <span class="osm"><span class="label"></span>'.$sOSMType.' <a href="http://www.openstreetmap.org/browse/'.$sOSMType.'/'.$aAddressLine['osm_id'].'">'.$aAddressLine['osm_id'].'</a></span>';
|
||||
if ($sOSMType) echo ', <span class="osm">'.$sOSMType.' <a href="http://www.openstreetmap.org/browse/'.$sOSMType.'/'.$aAddressLine['osm_id'].'">'.$aAddressLine['osm_id'].'</a></span>';
|
||||
echo ', <span class="adminlevel">'.$aAddressLine['admin_level'].'</span>';
|
||||
echo ', <span class="rankaddress">'.$aAddressLine['rank_search_label'].'</span>';
|
||||
if (isset($aAddressLine['rank_search_label'])) echo ', <span class="rankaddress">'.$aAddressLine['rank_search_label'].'</span>';
|
||||
// echo ', <span class="area">'.($aAddressLine['fromarea']=='t'?'Polygon':'Point').'</span>';
|
||||
echo ', <span class="distance">'.$aAddressLine['distance'].'</span>';
|
||||
echo ' <a href="details.php?place_id='.$aAddressLine['place_id'].'">GOTO</a>';
|
||||
echo ')';
|
||||
echo '</div>';
|
||||
echo "</div>\n";
|
||||
}
|
||||
echo '</div>';
|
||||
echo "</div>\n";
|
||||
}
|
||||
|
||||
if ($aPlaceSearchNameKeywords)
|
||||
@@ -169,7 +169,7 @@
|
||||
echo '<h2>Name Keywords</h2>';
|
||||
foreach($aPlaceSearchNameKeywords as $aRow)
|
||||
{
|
||||
echo '<div>'.$aRow['word_token'].'</div>';
|
||||
echo '<div>'.$aRow['word_token']."</div>\n";
|
||||
}
|
||||
}
|
||||
|
||||
@@ -178,13 +178,13 @@
|
||||
echo '<h2>Address Keywords</h2>';
|
||||
foreach($aPlaceSearchAddressKeywords as $aRow)
|
||||
{
|
||||
echo '<div>'.($aRow['word_token'][0]==' '?'*':'').$aRow['word_token'].'('.$aRow['word_id'].')'.'</div>';
|
||||
echo '<div>'.($aRow['word_token'][0]==' '?'*':'').$aRow['word_token'].'('.$aRow['word_id'].')'."</div>\n";
|
||||
}
|
||||
}
|
||||
|
||||
if (sizeof($aParentOfLines))
|
||||
{
|
||||
echo '<h2>Parent Of:</h2>';
|
||||
echo "<h2>Parent Of:</h2>\n<div>\n";
|
||||
|
||||
$aGroupedAddressLines = array();
|
||||
foreach($aParentOfLines as $aAddressLine)
|
||||
@@ -195,7 +195,7 @@
|
||||
foreach($aGroupedAddressLines as $sGroupHeading => $aParentOfLines)
|
||||
{
|
||||
$sGroupHeading = ucwords($sGroupHeading);
|
||||
echo "<h3>$sGroupHeading</h3>";
|
||||
echo "<h3>$sGroupHeading</h3>\n";
|
||||
foreach($aParentOfLines as $aAddressLine)
|
||||
{
|
||||
$aAddressLine['localname'] = $aAddressLine['localname']?$aAddressLine['localname']:$aAddressLine['housenumber'];
|
||||
@@ -204,15 +204,12 @@
|
||||
echo '<div class="line">';
|
||||
echo '<span class="name">'.(trim($aAddressLine['localname'])?$aAddressLine['localname']:'<span class="noname">No Name</span>').'</span>';
|
||||
echo ' (';
|
||||
// echo '<span class="type"><span class="label">Type: </span>'.$aAddressLine['class'].':'.$aAddressLine['type'].'</span>';
|
||||
// echo ', <span class="adminlevel">'.$aAddressLine['admin_level'].'</span>';
|
||||
// echo ', <span class="rankaddress">'.$aAddressLine['rank_address'].'</span>';
|
||||
echo '<span class="area">'.($aAddressLine['isarea']=='t'?'Polygon':'Point').'</span>';
|
||||
echo ', <span class="distance">~'.(round($aAddressLine['distance']*69,1)).' miles</span>';
|
||||
if ($sOSMType) echo ', <span class="osm"><span class="label"></span>'.$sOSMType.' <a href="http://www.openstreetmap.org/browse/'.$sOSMType.'/'.$aAddressLine['osm_id'].'">'.$aAddressLine['osm_id'].'</a></span>';
|
||||
if ($sOSMType) echo ', <span class="osm">'.$sOSMType.' <a href="http://www.openstreetmap.org/browse/'.$sOSMType.'/'.$aAddressLine['osm_id'].'">'.$aAddressLine['osm_id'].'</a></span>';
|
||||
echo ', <a href="details.php?place_id='.$aAddressLine['place_id'].'">GOTO</a>';
|
||||
echo ')';
|
||||
echo '</div>';
|
||||
echo "</div>\n";
|
||||
}
|
||||
}
|
||||
if (sizeof($aParentOfLines) >= 500) {
|
||||
|
||||
Submodule osm2pgsql updated: d1d82e5fe6...7d780c931c
26
phpunit.xml
Normal file
26
phpunit.xml
Normal file
@@ -0,0 +1,26 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<phpunit backupGlobals="false"
|
||||
backupStaticAttributes="false"
|
||||
colors="true"
|
||||
convertErrorsToExceptions="true"
|
||||
convertNoticesToExceptions="true"
|
||||
convertWarningsToExceptions="true"
|
||||
processIsolation="false"
|
||||
stopOnFailure="false"
|
||||
syntaxCheck="true"
|
||||
bootstrap="tests-php/bootstrap.php"
|
||||
>
|
||||
<php>
|
||||
</php>
|
||||
<testsuites>
|
||||
<testsuite name="Nominatim PHP Test Suite">
|
||||
<directory>./tests-php/Nominatim</directory>
|
||||
</testsuite>
|
||||
</testsuites>
|
||||
<filter>
|
||||
<whitelist>
|
||||
<directory>./lib/</directory>
|
||||
</whitelist>
|
||||
</filter>
|
||||
|
||||
</phpunit>
|
||||
@@ -271,6 +271,20 @@ END;
|
||||
$$
|
||||
LANGUAGE plpgsql IMMUTABLE;
|
||||
|
||||
CREATE OR REPLACE FUNCTION get_name_ids(lookup_word TEXT)
|
||||
RETURNS INTEGER[]
|
||||
AS $$
|
||||
DECLARE
|
||||
lookup_token TEXT;
|
||||
return_word_ids INTEGER[];
|
||||
BEGIN
|
||||
lookup_token := ' '||trim(lookup_word);
|
||||
SELECT array_agg(word_id) FROM word WHERE word_token = lookup_token and class is null and type is null into return_word_ids;
|
||||
RETURN return_word_ids;
|
||||
END;
|
||||
$$
|
||||
LANGUAGE plpgsql IMMUTABLE;
|
||||
|
||||
CREATE OR REPLACE FUNCTION array_merge(a INTEGER[], b INTEGER[])
|
||||
RETURNS INTEGER[]
|
||||
AS $$
|
||||
@@ -554,20 +568,6 @@ END;
|
||||
$$
|
||||
LANGUAGE plpgsql IMMUTABLE;
|
||||
|
||||
CREATE OR REPLACE FUNCTION get_country_code(place geometry, in_country_code VARCHAR(2)) RETURNS TEXT
|
||||
AS $$
|
||||
DECLARE
|
||||
nearcountry RECORD;
|
||||
BEGIN
|
||||
FOR nearcountry IN select country_code from country_name where country_code = lower(in_country_code)
|
||||
LOOP
|
||||
RETURN nearcountry.country_code;
|
||||
END LOOP;
|
||||
RETURN get_country_code(place);
|
||||
END;
|
||||
$$
|
||||
LANGUAGE plpgsql IMMUTABLE;
|
||||
|
||||
CREATE OR REPLACE FUNCTION get_country_language_code(search_country_code VARCHAR(2)) RETURNS TEXT
|
||||
AS $$
|
||||
DECLARE
|
||||
@@ -596,10 +596,9 @@ END;
|
||||
$$
|
||||
LANGUAGE plpgsql IMMUTABLE;
|
||||
|
||||
CREATE OR REPLACE FUNCTION get_partition(place geometry, in_country_code VARCHAR(10)) RETURNS INTEGER
|
||||
CREATE OR REPLACE FUNCTION get_partition(in_country_code VARCHAR(10)) RETURNS INTEGER
|
||||
AS $$
|
||||
DECLARE
|
||||
place_centre GEOMETRY;
|
||||
nearcountry RECORD;
|
||||
BEGIN
|
||||
FOR nearcountry IN select partition from country_name where country_code = in_country_code
|
||||
@@ -965,9 +964,9 @@ BEGIN
|
||||
NEW.place_id := nextval('seq_place');
|
||||
NEW.indexed_status := 1; --STATUS_NEW
|
||||
|
||||
NEW.calculated_country_code := lower(get_country_code(NEW.geometry, NEW.country_code));
|
||||
NEW.calculated_country_code := lower(get_country_code(NEW.geometry));
|
||||
|
||||
NEW.partition := get_partition(NEW.geometry, NEW.calculated_country_code);
|
||||
NEW.partition := get_partition(NEW.calculated_country_code);
|
||||
NEW.geometry_sector := geometry_sector(NEW.partition, NEW.geometry);
|
||||
|
||||
-- copy 'name' to or from the default language (if there is a default language)
|
||||
@@ -1019,6 +1018,13 @@ BEGIN
|
||||
NEW.rank_address := 5;
|
||||
END IF;
|
||||
|
||||
ELSEIF NEW.calculated_country_code = 'sg' THEN
|
||||
|
||||
IF NEW.postcode ~ '^([0-9]{6})$' THEN
|
||||
NEW.rank_search := 25;
|
||||
NEW.rank_address := 11;
|
||||
END IF;
|
||||
|
||||
ELSEIF NEW.calculated_country_code = 'de' THEN
|
||||
|
||||
IF NEW.postcode ~ '^([0-9]{5})$' THEN
|
||||
@@ -1078,18 +1084,6 @@ BEGIN
|
||||
ELSEIF NEW.type in ('village','hamlet','municipality','district','unincorporated_area','borough') THEN
|
||||
NEW.rank_search := 19;
|
||||
NEW.rank_address := 16;
|
||||
ELSEIF NEW.type in ('airport') AND ST_GeometryType(NEW.geometry) in ('ST_Polygon','ST_MultiPolygon') THEN
|
||||
NEW.rank_search := 18;
|
||||
NEW.rank_address := 17;
|
||||
ELSEIF NEW.type in ('moor') AND ST_GeometryType(NEW.geometry) in ('ST_Polygon','ST_MultiPolygon') THEN
|
||||
NEW.rank_search := 17;
|
||||
NEW.rank_address := 18;
|
||||
ELSEIF NEW.type in ('moor') THEN
|
||||
NEW.rank_search := 17;
|
||||
NEW.rank_address := 0;
|
||||
ELSEIF NEW.type in ('national_park') THEN
|
||||
NEW.rank_search := 18;
|
||||
NEW.rank_address := 18;
|
||||
ELSEIF NEW.type in ('suburb','croft','subdivision','isolated_dwelling') THEN
|
||||
NEW.rank_search := 20;
|
||||
NEW.rank_address := NEW.rank_search;
|
||||
@@ -1100,12 +1094,9 @@ BEGIN
|
||||
IF (NEW.extratags -> 'locality') = 'townland' THEN
|
||||
NEW.rank_address := 20;
|
||||
END IF;
|
||||
ELSEIF NEW.type in ('hall_of_residence','neighbourhood','housing_estate','nature_reserve') THEN
|
||||
ELSEIF NEW.type in ('neighbourhood') THEN
|
||||
NEW.rank_search := 22;
|
||||
NEW.rank_address := 22;
|
||||
ELSEIF NEW.type in ('airport','street') THEN
|
||||
NEW.rank_search := 26;
|
||||
NEW.rank_address := NEW.rank_search;
|
||||
ELSEIF NEW.type in ('house','building') THEN
|
||||
NEW.rank_search := 30;
|
||||
NEW.rank_address := NEW.rank_search;
|
||||
@@ -1134,6 +1125,11 @@ BEGIN
|
||||
ELSEIF NEW.class = 'natural' and NEW.type in ('peak','volcano','mountain_range') THEN
|
||||
NEW.rank_search := 18;
|
||||
NEW.rank_address := 0;
|
||||
ELSEIF NEW.class = 'natural' and NEW.type = 'sea' THEN
|
||||
NEW.rank_search := 4;
|
||||
NEW.rank_address := NEW.rank_search;
|
||||
ELSEIF NEW.class = 'natural' and NEW.type in ('coastline') THEN
|
||||
RETURN NULL;
|
||||
-- any feature more than 5 square miles is probably worth indexing
|
||||
ELSEIF ST_GeometryType(NEW.geometry) in ('ST_Polygon','ST_MultiPolygon') AND ST_Area(NEW.geometry) > 0.1 THEN
|
||||
NEW.rank_search := 22;
|
||||
@@ -1147,18 +1143,18 @@ BEGIN
|
||||
ELSEIF NEW.class = 'waterway' AND NEW.name is NULL THEN
|
||||
RETURN NULL;
|
||||
ELSEIF NEW.class = 'waterway' THEN
|
||||
NEW.rank_address := 17;
|
||||
IF NEW.osm_type = 'R' THEN
|
||||
NEW.rank_search := 16;
|
||||
ELSE
|
||||
NEW.rank_search := 17;
|
||||
END IF;
|
||||
NEW.rank_address := 0;
|
||||
ELSEIF NEW.class = 'highway' AND NEW.osm_type != 'N' AND NEW.type in ('service','cycleway','path','footway','steps','bridleway','motorway_link','primary_link','trunk_link','secondary_link','tertiary_link') THEN
|
||||
NEW.rank_search := 27;
|
||||
NEW.rank_address := NEW.rank_search;
|
||||
ELSEIF NEW.class = 'highway' AND NEW.osm_type != 'N' THEN
|
||||
NEW.rank_search := 26;
|
||||
NEW.rank_address := NEW.rank_search;
|
||||
ELSEIF NEW.class = 'natural' and NEW.type = 'sea' THEN
|
||||
NEW.rank_search := 4;
|
||||
NEW.rank_address := NEW.rank_search;
|
||||
ELSEIF NEW.class = 'natural' and NEW.type in ('coastline') THEN
|
||||
RETURN NULL;
|
||||
ELSEIF NEW.class = 'mountain_pass' THEN
|
||||
NEW.rank_search := 20;
|
||||
NEW.rank_address := 0;
|
||||
@@ -1179,7 +1175,7 @@ BEGIN
|
||||
END IF;
|
||||
|
||||
-- a country code make no sense below rank 4 (country)
|
||||
IF NEW.rank_address < 4 THEN
|
||||
IF NEW.rank_search < 4 THEN
|
||||
NEW.calculated_country_code := NULL;
|
||||
END IF;
|
||||
|
||||
@@ -1192,53 +1188,54 @@ BEGIN
|
||||
|
||||
RETURN NEW; -- @DIFFUPDATES@ The following is not needed until doing diff updates, and slows the main index process down
|
||||
|
||||
IF (ST_GeometryType(NEW.geometry) in ('ST_Polygon','ST_MultiPolygon') AND ST_IsValid(NEW.geometry)) THEN
|
||||
-- Performance: We just can't handle re-indexing for country level changes
|
||||
IF st_area(NEW.geometry) < 1 THEN
|
||||
-- mark items within the geometry for re-indexing
|
||||
-- RAISE WARNING 'placex poly insert: % % % %',NEW.osm_type,NEW.osm_id,NEW.class,NEW.type;
|
||||
IF NEW.rank_address > 0 THEN
|
||||
IF (ST_GeometryType(NEW.geometry) in ('ST_Polygon','ST_MultiPolygon') AND ST_IsValid(NEW.geometry)) THEN
|
||||
-- Performance: We just can't handle re-indexing for country level changes
|
||||
IF st_area(NEW.geometry) < 1 THEN
|
||||
-- mark items within the geometry for re-indexing
|
||||
-- RAISE WARNING 'placex poly insert: % % % %',NEW.osm_type,NEW.osm_id,NEW.class,NEW.type;
|
||||
|
||||
-- work around bug in postgis, this may have been fixed in 2.0.0 (see http://trac.osgeo.org/postgis/ticket/547)
|
||||
update placex set indexed_status = 2 where (st_covers(NEW.geometry, placex.geometry) OR ST_Intersects(NEW.geometry, placex.geometry))
|
||||
AND rank_search > NEW.rank_search and indexed_status = 0 and ST_geometrytype(placex.geometry) = 'ST_Point' and (rank_search < 28 or name is not null or (NEW.rank_search >= 16 and addr_place is not null));
|
||||
update placex set indexed_status = 2 where (st_covers(NEW.geometry, placex.geometry) OR ST_Intersects(NEW.geometry, placex.geometry))
|
||||
AND rank_search > NEW.rank_search and indexed_status = 0 and ST_geometrytype(placex.geometry) != 'ST_Point' and (rank_search < 28 or name is not null or (NEW.rank_search >= 16 and addr_place is not null));
|
||||
END IF;
|
||||
ELSE
|
||||
-- mark nearby items for re-indexing, where 'nearby' depends on the features rank_search and is a complete guess :(
|
||||
diameter := 0;
|
||||
-- 16 = city, anything higher than city is effectively ignored (polygon required!)
|
||||
IF NEW.type='postcode' THEN
|
||||
diameter := 0.05;
|
||||
ELSEIF NEW.rank_search < 16 THEN
|
||||
-- work around bug in postgis, this may have been fixed in 2.0.0 (see http://trac.osgeo.org/postgis/ticket/547)
|
||||
update placex set indexed_status = 2 where (st_covers(NEW.geometry, placex.geometry) OR ST_Intersects(NEW.geometry, placex.geometry))
|
||||
AND rank_search > NEW.rank_search and indexed_status = 0 and ST_geometrytype(placex.geometry) = 'ST_Point' and (rank_search < 28 or name is not null or (NEW.rank_search >= 16 and addr_place is not null));
|
||||
update placex set indexed_status = 2 where (st_covers(NEW.geometry, placex.geometry) OR ST_Intersects(NEW.geometry, placex.geometry))
|
||||
AND rank_search > NEW.rank_search and indexed_status = 0 and ST_geometrytype(placex.geometry) != 'ST_Point' and (rank_search < 28 or name is not null or (NEW.rank_search >= 16 and addr_place is not null));
|
||||
END IF;
|
||||
ELSE
|
||||
-- mark nearby items for re-indexing, where 'nearby' depends on the features rank_search and is a complete guess :(
|
||||
diameter := 0;
|
||||
ELSEIF NEW.rank_search < 18 THEN
|
||||
diameter := 0.1;
|
||||
ELSEIF NEW.rank_search < 20 THEN
|
||||
diameter := 0.05;
|
||||
ELSEIF NEW.rank_search = 21 THEN
|
||||
diameter := 0.001;
|
||||
ELSEIF NEW.rank_search < 24 THEN
|
||||
diameter := 0.02;
|
||||
ELSEIF NEW.rank_search < 26 THEN
|
||||
diameter := 0.002; -- 100 to 200 meters
|
||||
ELSEIF NEW.rank_search < 28 THEN
|
||||
diameter := 0.001; -- 50 to 100 meters
|
||||
END IF;
|
||||
IF diameter > 0 THEN
|
||||
-- RAISE WARNING 'placex point insert: % % % % %',NEW.osm_type,NEW.osm_id,NEW.class,NEW.type,diameter;
|
||||
IF NEW.rank_search >= 26 THEN
|
||||
-- roads may cause reparenting for >27 rank places
|
||||
update placex set indexed_status = 2 where indexed_status = 0 and rank_search > NEW.rank_search and ST_DWithin(placex.geometry, NEW.geometry, diameter);
|
||||
ELSEIF NEW.rank_search >= 16 THEN
|
||||
-- up to rank 16, street-less addresses may need reparenting
|
||||
update placex set indexed_status = 2 where indexed_status = 0 and rank_search > NEW.rank_search and ST_DWithin(placex.geometry, NEW.geometry, diameter) and (rank_search < 28 or name is not null or addr_place is not null);
|
||||
ELSE
|
||||
-- for all other places the search terms may change as well
|
||||
update placex set indexed_status = 2 where indexed_status = 0 and rank_search > NEW.rank_search and ST_DWithin(placex.geometry, NEW.geometry, diameter) and (rank_search < 28 or name is not null);
|
||||
-- 16 = city, anything higher than city is effectively ignored (polygon required!)
|
||||
IF NEW.type='postcode' THEN
|
||||
diameter := 0.05;
|
||||
ELSEIF NEW.rank_search < 16 THEN
|
||||
diameter := 0;
|
||||
ELSEIF NEW.rank_search < 18 THEN
|
||||
diameter := 0.1;
|
||||
ELSEIF NEW.rank_search < 20 THEN
|
||||
diameter := 0.05;
|
||||
ELSEIF NEW.rank_search = 21 THEN
|
||||
diameter := 0.001;
|
||||
ELSEIF NEW.rank_search < 24 THEN
|
||||
diameter := 0.02;
|
||||
ELSEIF NEW.rank_search < 26 THEN
|
||||
diameter := 0.002; -- 100 to 200 meters
|
||||
ELSEIF NEW.rank_search < 28 THEN
|
||||
diameter := 0.001; -- 50 to 100 meters
|
||||
END IF;
|
||||
IF diameter > 0 THEN
|
||||
-- RAISE WARNING 'placex point insert: % % % % %',NEW.osm_type,NEW.osm_id,NEW.class,NEW.type,diameter;
|
||||
IF NEW.rank_search >= 26 THEN
|
||||
-- roads may cause reparenting for >27 rank places
|
||||
update placex set indexed_status = 2 where indexed_status = 0 and rank_search > NEW.rank_search and ST_DWithin(placex.geometry, NEW.geometry, diameter);
|
||||
ELSEIF NEW.rank_search >= 16 THEN
|
||||
-- up to rank 16, street-less addresses may need reparenting
|
||||
update placex set indexed_status = 2 where indexed_status = 0 and rank_search > NEW.rank_search and ST_DWithin(placex.geometry, NEW.geometry, diameter) and (rank_search < 28 or name is not null or addr_place is not null);
|
||||
ELSE
|
||||
-- for all other places the search terms may change as well
|
||||
update placex set indexed_status = 2 where indexed_status = 0 and rank_search > NEW.rank_search and ST_DWithin(placex.geometry, NEW.geometry, diameter) and (rank_search < 28 or name is not null);
|
||||
END IF;
|
||||
END IF;
|
||||
END IF;
|
||||
|
||||
END IF;
|
||||
|
||||
-- add to tables for special search
|
||||
@@ -1288,6 +1285,7 @@ DECLARE
|
||||
search_maxrank INTEGER;
|
||||
address_maxrank INTEGER;
|
||||
address_street_word_id INTEGER;
|
||||
address_street_word_ids INTEGER[];
|
||||
parent_place_id_rank BIGINT;
|
||||
|
||||
isin TEXT[];
|
||||
@@ -1357,15 +1355,47 @@ BEGIN
|
||||
NEW.centroid := null;
|
||||
|
||||
-- reclaculate country and partition
|
||||
IF NEW.rank_search >= 4 THEN
|
||||
--NEW.calculated_country_code := lower(get_country_code(NEW.geometry, NEW.country_code));
|
||||
NEW.calculated_country_code := lower(get_country_code(place_centroid));
|
||||
IF NEW.rank_search = 4 THEN
|
||||
-- for countries, believe the mapped country code,
|
||||
-- so that we remain in the right partition if the boundaries
|
||||
-- suddenly expand.
|
||||
NEW.partition := get_partition(lower(NEW.country_code));
|
||||
IF NEW.partition = 0 THEN
|
||||
NEW.calculated_country_code := lower(get_country_code(place_centroid));
|
||||
NEW.partition := get_partition(NEW.calculated_country_code);
|
||||
ELSE
|
||||
NEW.calculated_country_code := lower(NEW.country_code);
|
||||
END IF;
|
||||
ELSE
|
||||
NEW.calculated_country_code := NULL;
|
||||
IF NEW.rank_search > 4 THEN
|
||||
--NEW.calculated_country_code := lower(get_country_code(NEW.geometry, NEW.country_code));
|
||||
NEW.calculated_country_code := lower(get_country_code(place_centroid));
|
||||
ELSE
|
||||
NEW.calculated_country_code := NULL;
|
||||
END IF;
|
||||
NEW.partition := get_partition(NEW.calculated_country_code);
|
||||
END IF;
|
||||
NEW.partition := get_partition(place_centroid, NEW.calculated_country_code);
|
||||
NEW.geometry_sector := geometry_sector(NEW.partition, place_centroid);
|
||||
|
||||
-- waterway ways are linked when they are part of a relation and have the same class/type
|
||||
IF NEW.osm_type = 'R' and NEW.class = 'waterway' THEN
|
||||
FOR relation IN select * from planet_osm_rels r where r.id = NEW.osm_id and r.parts != array[]::bigint[]
|
||||
LOOP
|
||||
FOR i IN 1..array_upper(relation.members, 1) BY 2 LOOP
|
||||
IF relation.members[i+1] in ('', 'main_stream', 'side_stream') AND substring(relation.members[i],1,1) = 'w' THEN
|
||||
--DEBUG: RAISE WARNING 'waterway parent %, child %/%', NEW.osm_id, i, relation.parts[i];
|
||||
FOR location IN SELECT * FROM placex
|
||||
WHERE osm_type = 'W' and osm_id = substring(relation.members[i],2,200)::bigint
|
||||
and class = NEW.class and type = NEW.type
|
||||
and ( relation.members[i+1] != 'side_stream' or NEW.name->'name' = name->'name')
|
||||
LOOP
|
||||
UPDATE placex SET linked_place_id = NEW.place_id WHERE place_id = location.place_id;
|
||||
END LOOP;
|
||||
END IF;
|
||||
END LOOP;
|
||||
END LOOP;
|
||||
END IF;
|
||||
|
||||
-- Adding ourselves to the list simplifies address calculations later
|
||||
INSERT INTO place_addressline VALUES (NEW.place_id, NEW.place_id, true, true, 0, NEW.rank_address);
|
||||
|
||||
@@ -1422,6 +1452,16 @@ BEGIN
|
||||
-- Note that addr:street links can only be indexed once the street itself is indexed
|
||||
IF NEW.parent_place_id IS NULL AND NEW.osm_type = 'N' THEN
|
||||
|
||||
-- if there is no address information, see if we can get it from a surrounding building
|
||||
IF NEW.street IS NULL AND NEW.addr_place IS NULL AND NEW.housenumber IS NULL THEN
|
||||
FOR location IN select * from placex where ST_Covers(geometry, place_centroid) and rank_search > 28 and (housenumber is not null or street is not null or addr_place is not null) AND ST_GeometryType(geometry) in ('ST_Polygon','ST_MultiPolygon')
|
||||
LOOP
|
||||
NEW.housenumber := location.housenumber;
|
||||
NEW.street := location.street;
|
||||
NEW.addr_place := location.addr_place;
|
||||
END LOOP;
|
||||
END IF;
|
||||
|
||||
-- Is this node part of a relation?
|
||||
FOR relation IN select * from planet_osm_rels where parts @> ARRAY[NEW.osm_id] and members @> ARRAY['n'||NEW.osm_id]
|
||||
LOOP
|
||||
@@ -1431,7 +1471,7 @@ BEGIN
|
||||
IF NEW.parent_place_id IS NULL AND relation.members[i+1] = 'street' THEN
|
||||
--RAISE WARNING 'node in relation %',relation;
|
||||
SELECT place_id from placex where osm_type='W' and osm_id = substring(relation.members[i],2,200)::bigint
|
||||
and rank_search = 26 INTO NEW.parent_place_id;
|
||||
and rank_search = 26 and name is not null INTO NEW.parent_place_id;
|
||||
END IF;
|
||||
END LOOP;
|
||||
END IF;
|
||||
@@ -1460,7 +1500,7 @@ BEGIN
|
||||
IF NEW.parent_place_id IS NULL AND relation.members[i+1] = 'street' THEN
|
||||
--RAISE WARNING 'node in way that is in a relation %',relation;
|
||||
SELECT place_id from placex where osm_type='W' and osm_id = substring(relation.members[i],2,200)::bigint
|
||||
and rank_search = 26 INTO NEW.parent_place_id;
|
||||
and rank_search = 26 and name is not null INTO NEW.parent_place_id;
|
||||
END IF;
|
||||
END LOOP;
|
||||
END IF;
|
||||
@@ -1468,13 +1508,10 @@ BEGIN
|
||||
END IF;
|
||||
|
||||
-- If the way contains an explicit name of a street copy it
|
||||
IF NEW.street IS NULL AND NEW.addr_place IS NULL AND location.street IS NOT NULL THEN
|
||||
-- Slightly less strict then above because data is copied from any object.
|
||||
IF NEW.street IS NULL AND NEW.addr_place IS NULL THEN
|
||||
--RAISE WARNING 'node in way that has a streetname %',location;
|
||||
NEW.street := location.street;
|
||||
END IF;
|
||||
|
||||
-- IF the way contains an explicit name of a place copy it
|
||||
IF NEW.addr_place IS NULL AND NEW.street IS NULL AND location.addr_place IS NOT NULL THEN
|
||||
NEW.addr_place := location.addr_place;
|
||||
END IF;
|
||||
|
||||
@@ -1510,7 +1547,7 @@ BEGIN
|
||||
IF NEW.parent_place_id IS NULL AND relation.members[i+1] = 'street' THEN
|
||||
--RAISE WARNING 'way that is in a relation %',relation;
|
||||
SELECT place_id from placex where osm_type='W' and osm_id = substring(relation.members[i],2,200)::bigint
|
||||
and rank_search = 26 INTO NEW.parent_place_id;
|
||||
and rank_search = 26 and name is not null INTO NEW.parent_place_id;
|
||||
END IF;
|
||||
END LOOP;
|
||||
END IF;
|
||||
@@ -1520,18 +1557,18 @@ BEGIN
|
||||
--RAISE WARNING 'x3 %',NEW.parent_place_id;
|
||||
|
||||
IF NEW.parent_place_id IS NULL AND NEW.street IS NOT NULL THEN
|
||||
address_street_word_id := get_name_id(make_standard_name(NEW.street));
|
||||
IF address_street_word_id IS NOT NULL THEN
|
||||
FOR location IN SELECT * from getNearestNamedRoadFeature(NEW.partition, place_centroid, address_street_word_id) LOOP
|
||||
address_street_word_ids := get_name_ids(make_standard_name(NEW.street));
|
||||
IF address_street_word_ids IS NOT NULL THEN
|
||||
FOR location IN SELECT * from getNearestNamedRoadFeature(NEW.partition, place_centroid, address_street_word_ids) LOOP
|
||||
NEW.parent_place_id := location.place_id;
|
||||
END LOOP;
|
||||
END IF;
|
||||
END IF;
|
||||
|
||||
IF NEW.parent_place_id IS NULL AND NEW.addr_place IS NOT NULL THEN
|
||||
address_street_word_id := get_name_id(make_standard_name(NEW.addr_place));
|
||||
IF address_street_word_id IS NOT NULL THEN
|
||||
FOR location IN SELECT * from getNearestNamedPlaceFeature(NEW.partition, place_centroid, address_street_word_id) LOOP
|
||||
address_street_word_ids := get_name_ids(make_standard_name(NEW.addr_place));
|
||||
IF address_street_word_ids IS NOT NULL THEN
|
||||
FOR location IN SELECT * from getNearestNamedPlaceFeature(NEW.partition, place_centroid, address_street_word_ids) LOOP
|
||||
NEW.parent_place_id := location.place_id;
|
||||
END LOOP;
|
||||
END IF;
|
||||
@@ -1615,17 +1652,13 @@ BEGIN
|
||||
END IF;
|
||||
|
||||
-- merge in extra tags
|
||||
IF NOT linkedPlacex.extratags IS NULL THEN
|
||||
NEW.extratags := linkedPlacex.extratags || NEW.extratags;
|
||||
END IF;
|
||||
|
||||
IF NOT NEW.extratags ? linkedPlacex.class THEN
|
||||
NEW.extratags := NEW.extratags || hstore(linkedPlacex.class, linkedPlacex.type);
|
||||
END IF;
|
||||
NEW.extratags := hstore(linkedPlacex.class, linkedPlacex.type) || coalesce(linkedPlacex.extratags, ''::hstore) || coalesce(NEW.extratags, ''::hstore);
|
||||
|
||||
-- mark the linked place (excludes from search results)
|
||||
UPDATE placex set linked_place_id = NEW.place_id where place_id = linkedPlacex.place_id;
|
||||
|
||||
-- keep a note of the node id in case we need it for wikipedia in a bit
|
||||
linked_node_id := linkedPlacex.osm_id;
|
||||
END LOOP;
|
||||
|
||||
END LOOP;
|
||||
@@ -1654,13 +1687,7 @@ BEGIN
|
||||
END IF;
|
||||
|
||||
-- merge in extra tags
|
||||
IF NOT linkedPlacex.extratags IS NULL THEN
|
||||
NEW.extratags := linkedPlacex.extratags || NEW.extratags;
|
||||
END IF;
|
||||
|
||||
IF NOT NEW.extratags ? linkedPlacex.class THEN
|
||||
NEW.extratags := NEW.extratags || hstore(linkedPlacex.class, linkedPlacex.type);
|
||||
END IF;
|
||||
NEW.extratags := hstore(linkedPlacex.class, linkedPlacex.type) || coalesce(linkedPlacex.extratags, ''::hstore) || coalesce(NEW.extratags, ''::hstore);
|
||||
|
||||
-- mark the linked place (excludes from search results)
|
||||
UPDATE placex set linked_place_id = NEW.place_id where place_id = linkedPlacex.place_id;
|
||||
@@ -1702,11 +1729,7 @@ BEGIN
|
||||
name_vector := make_keywords(NEW.name);
|
||||
|
||||
-- merge in extra tags
|
||||
NEW.extratags := linkedPlacex.extratags || NEW.extratags;
|
||||
|
||||
IF NOT NEW.extratags ? linkedPlacex.class THEN
|
||||
NEW.extratags := NEW.extratags || hstore(linkedPlacex.class, linkedPlacex.type);
|
||||
END IF;
|
||||
NEW.extratags := hstore(linkedPlacex.class, linkedPlacex.type) || coalesce(linkedPlacex.extratags, ''::hstore) || coalesce(NEW.extratags, ''::hstore);
|
||||
|
||||
-- mark the linked place (excludes from search results)
|
||||
UPDATE placex set linked_place_id = NEW.place_id where place_id = linkedPlacex.place_id;
|
||||
@@ -2003,15 +2026,18 @@ LANGUAGE plpgsql;
|
||||
CREATE OR REPLACE FUNCTION place_delete() RETURNS TRIGGER
|
||||
AS $$
|
||||
DECLARE
|
||||
placeid BIGINT;
|
||||
has_rank BOOLEAN;
|
||||
BEGIN
|
||||
|
||||
--DEBUG: RAISE WARNING 'delete: % % % %',OLD.osm_type,OLD.osm_id,OLD.class,OLD.type;
|
||||
|
||||
-- deleting large polygons can have a massive effect on the system - require manual intervention to let them through
|
||||
IF st_area(OLD.geometry) > 2 and st_isvalid(OLD.geometry) THEN
|
||||
insert into import_polygon_delete values (OLD.osm_type,OLD.osm_id,OLD.class,OLD.type);
|
||||
RETURN NULL;
|
||||
SELECT bool_or(not (rank_address = 0 or rank_address > 26)) as ranked FROM placex WHERE osm_type = OLD.osm_type and osm_id = OLD.osm_id and class = OLD.class and type = OLD.type INTO has_rank;
|
||||
IF has_rank THEN
|
||||
insert into import_polygon_delete values (OLD.osm_type,OLD.osm_id,OLD.class,OLD.type);
|
||||
RETURN NULL;
|
||||
END IF;
|
||||
END IF;
|
||||
|
||||
-- mark for delete
|
||||
@@ -2071,8 +2097,8 @@ BEGIN
|
||||
-- Handle a place changing type by removing the old data
|
||||
-- My generated 'place' types are causing havok because they overlap with real keys
|
||||
-- TODO: move them to their own special purpose key/class to avoid collisions
|
||||
IF existing.osm_type IS NULL AND (NEW.type not in ('postcode','house','houses')) THEN
|
||||
DELETE FROM place where osm_type = NEW.osm_type and osm_id = NEW.osm_id and class = NEW.class and type not in ('postcode','house','houses');
|
||||
IF existing.osm_type IS NULL THEN
|
||||
DELETE FROM place where osm_type = NEW.osm_type and osm_id = NEW.osm_id and class = NEW.class;
|
||||
END IF;
|
||||
|
||||
--DEBUG: RAISE WARNING 'Existing: %',existing.osm_id;
|
||||
@@ -2258,6 +2284,16 @@ BEGIN
|
||||
geometry = NEW.geometry
|
||||
where osm_type = NEW.osm_type and osm_id = NEW.osm_id and class = NEW.class and type = NEW.type;
|
||||
|
||||
IF NEW.class in ('place','boundary') AND NEW.type in ('postcode','postal_code') THEN
|
||||
IF NEW.postcode IS NULL THEN
|
||||
-- postcode was deleted, no longer retain in placex
|
||||
DELETE FROM placex where place_id = existingplacex.place_id;
|
||||
RETURN NULL;
|
||||
END IF;
|
||||
|
||||
NEW.name := hstore('ref', NEW.postcode);
|
||||
END IF;
|
||||
|
||||
update placex set
|
||||
name = NEW.name,
|
||||
housenumber = NEW.housenumber,
|
||||
@@ -2307,29 +2343,29 @@ END;
|
||||
$$
|
||||
LANGUAGE plpgsql IMMUTABLE;
|
||||
|
||||
CREATE OR REPLACE FUNCTION get_connected_ways(way_ids INTEGER[]) RETURNS SETOF planet_osm_ways
|
||||
AS $$
|
||||
DECLARE
|
||||
searchnodes INTEGER[];
|
||||
location RECORD;
|
||||
j INTEGER;
|
||||
BEGIN
|
||||
|
||||
searchnodes := '{}';
|
||||
FOR j IN 1..array_upper(way_ids, 1) LOOP
|
||||
FOR location IN
|
||||
select nodes from planet_osm_ways where id = way_ids[j] LIMIT 1
|
||||
LOOP
|
||||
IF not (ARRAY[location.nodes] <@ searchnodes) THEN
|
||||
searchnodes := searchnodes || location.nodes;
|
||||
END IF;
|
||||
END LOOP;
|
||||
END LOOP;
|
||||
|
||||
RETURN QUERY select * from planet_osm_ways where nodes && searchnodes and NOT ARRAY[id] <@ way_ids;
|
||||
END;
|
||||
$$
|
||||
LANGUAGE plpgsql IMMUTABLE;
|
||||
--CREATE OR REPLACE FUNCTION get_connected_ways(way_ids INTEGER[]) RETURNS SETOF planet_osm_ways
|
||||
-- AS $$
|
||||
--DECLARE
|
||||
-- searchnodes INTEGER[];
|
||||
-- location RECORD;
|
||||
-- j INTEGER;
|
||||
--BEGIN
|
||||
--
|
||||
-- searchnodes := '{}';
|
||||
-- FOR j IN 1..array_upper(way_ids, 1) LOOP
|
||||
-- FOR location IN
|
||||
-- select nodes from planet_osm_ways where id = way_ids[j] LIMIT 1
|
||||
-- LOOP
|
||||
-- IF not (ARRAY[location.nodes] <@ searchnodes) THEN
|
||||
-- searchnodes := searchnodes || location.nodes;
|
||||
-- END IF;
|
||||
-- END LOOP;
|
||||
-- END LOOP;
|
||||
--
|
||||
-- RETURN QUERY select * from planet_osm_ways where nodes && searchnodes and NOT ARRAY[id] <@ way_ids;
|
||||
--END;
|
||||
--$$
|
||||
--LANGUAGE plpgsql IMMUTABLE;
|
||||
|
||||
CREATE OR REPLACE FUNCTION get_address_postcode(for_place_id BIGINT) RETURNS TEXT
|
||||
AS $$
|
||||
@@ -2461,7 +2497,7 @@ BEGIN
|
||||
CASE WHEN class = 'place' and type = 'postcode' THEN hstore('name', postcode) ELSE name END as name,
|
||||
class, type, admin_level, true as fromarea, true as isaddress,
|
||||
CASE WHEN rank_address = 0 THEN 100 WHEN rank_address = 11 THEN 5 ELSE rank_address END as rank_address,
|
||||
0 as distance, calculated_country_code
|
||||
0 as distance, calculated_country_code, postcode
|
||||
from placex
|
||||
where place_id = for_place_id
|
||||
LOOP
|
||||
@@ -2472,6 +2508,9 @@ BEGIN
|
||||
IF searchpostcode IS NOT NULL and location.type = 'postcode' THEN
|
||||
location.isaddress := FALSE;
|
||||
END IF;
|
||||
IF searchpostcode IS NULL and location.postcode IS NOT NULL THEN
|
||||
searchpostcode := location.postcode;
|
||||
END IF;
|
||||
IF location.rank_address = 4 AND location.isaddress THEN
|
||||
hadcountry := true;
|
||||
END IF;
|
||||
@@ -2492,7 +2531,9 @@ BEGIN
|
||||
FOR location IN
|
||||
select placex.place_id, osm_type, osm_id,
|
||||
CASE WHEN class = 'place' and type = 'postcode' THEN hstore('name', postcode) ELSE name END as name,
|
||||
class, type, admin_level, fromarea, isaddress,
|
||||
CASE WHEN extratags ? 'place' THEN 'place' ELSE class END as class,
|
||||
CASE WHEN extratags ? 'place' THEN extratags->'place' ELSE type END as type,
|
||||
admin_level, fromarea, isaddress,
|
||||
CASE WHEN address_place_id = for_place_id AND rank_address = 0 THEN 100 WHEN rank_address = 11 THEN 5 ELSE rank_address END as rank_address,
|
||||
distance,calculated_country_code,postcode
|
||||
from place_addressline join placex on (address_place_id = placex.place_id)
|
||||
@@ -2808,7 +2849,7 @@ DECLARE
|
||||
BEGIN
|
||||
|
||||
place_centroid := ST_Centroid(pointgeo);
|
||||
out_partition := get_partition(place_centroid, in_countrycode);
|
||||
out_partition := get_partition(in_countrycode);
|
||||
out_parent_place_id := null;
|
||||
|
||||
address_street_word_id := get_name_id(make_standard_name(in_street));
|
||||
@@ -2923,14 +2964,14 @@ BEGIN
|
||||
wiki_article := regexp_replace(wiki_article,E'^(.*?)([a-z]{2,3}).wikipedia.org/wiki/',E'\\2:');
|
||||
wiki_article := regexp_replace(wiki_article,E'^(.*?)([a-z]{2,3}).wikipedia.org/w/index.php\\?title=',E'\\2:');
|
||||
wiki_article := regexp_replace(wiki_article,E'^(.*?)/([a-z]{2,3})/wiki/',E'\\2:');
|
||||
wiki_article := regexp_replace(wiki_article,E'^(.*?)([a-z]{2,3})[=:]',E'\\2:');
|
||||
--wiki_article := regexp_replace(wiki_article,E'^(.*?)([a-z]{2,3})[=:]',E'\\2:');
|
||||
wiki_article := replace(wiki_article,' ','_');
|
||||
wiki_article_title := trim(split_part(wiki_article, ':', 2));
|
||||
IF wiki_article_title IS NULL OR wiki_article_title = '' THEN
|
||||
IF strpos(wiki_article, ':') IN (3,4) THEN
|
||||
wiki_article_language := lower(trim(split_part(wiki_article, ':', 1)));
|
||||
wiki_article_title := trim(substr(wiki_article, strpos(wiki_article, ':')+1));
|
||||
ELSE
|
||||
wiki_article_title := trim(wiki_article);
|
||||
wiki_article_language := CASE WHEN langs[i] = 'english' THEN 'en' WHEN langs[i] = 'country' THEN get_country_language_code(country_code) ELSE langs[i] END;
|
||||
ELSE
|
||||
wiki_article_language := lower(trim(split_part(wiki_article, ':', 1)));
|
||||
END IF;
|
||||
|
||||
select wikipedia_article.language,wikipedia_article.title,wikipedia_article.importance
|
||||
@@ -3082,6 +3123,7 @@ DECLARE
|
||||
diameter FLOAT;
|
||||
rank INTEGER;
|
||||
BEGIN
|
||||
UPDATE placex SET indexed_status = 2 WHERE place_id = placeid;
|
||||
SELECT geometry, rank_search FROM placex WHERE place_id = placeid INTO placegeom, rank;
|
||||
IF placegeom IS NOT NULL AND ST_IsValid(placegeom) THEN
|
||||
IF ST_GeometryType(placegeom) in ('ST_Polygon','ST_MultiPolygon') THEN
|
||||
|
||||
@@ -18,6 +18,7 @@ CREATE INDEX idx_placex_rank_address ON placex USING BTREE (rank_address);
|
||||
CREATE INDEX idx_placex_pendingsector ON placex USING BTREE (rank_search,geometry_sector) where indexed_status > 0;
|
||||
CREATE INDEX idx_placex_parent_place_id ON placex USING BTREE (parent_place_id) where parent_place_id IS NOT NULL;
|
||||
CREATE INDEX idx_placex_interpolation ON placex USING BTREE (geometry_sector) where indexed_status > 0 and class='place' and type='houses';
|
||||
CREATE INDEX idx_placex_reverse_geometry ON placex USING gist (geometry) where rank_search != 28 and (name is not null or housenumber is not null) and class not in ('waterway','railway','tunnel','bridge');
|
||||
CREATE INDEX idx_location_area_country_place_id ON location_area_country USING BTREE (place_id);
|
||||
|
||||
CREATE INDEX idx_search_name_country_centroid ON search_name_country USING GIST (centroid);
|
||||
|
||||
@@ -60,6 +60,9 @@ create or replace function insertLocationAreaLarge(
|
||||
in_centroid GEOMETRY, in_geometry GEOMETRY) RETURNS BOOLEAN AS $$
|
||||
DECLARE
|
||||
BEGIN
|
||||
IF in_rank_address = 0 THEN
|
||||
RETURN TRUE;
|
||||
END IF;
|
||||
|
||||
IF in_rank_search <= 4 THEN
|
||||
INSERT INTO location_area_country values (in_partition, in_place_id, in_country_code, in_keywords, in_rank_search, in_rank_address, in_estimate, in_centroid, in_geometry);
|
||||
@@ -111,7 +114,7 @@ END
|
||||
$$
|
||||
LANGUAGE plpgsql;
|
||||
|
||||
create or replace function getNearestNamedRoadFeature(in_partition INTEGER, point GEOMETRY, isin_token INTEGER)
|
||||
create or replace function getNearestNamedRoadFeature(in_partition INTEGER, point GEOMETRY, isin_token INTEGER[])
|
||||
RETURNS setof nearfeature AS $$
|
||||
DECLARE
|
||||
r nearfeature%rowtype;
|
||||
@@ -123,7 +126,7 @@ BEGIN
|
||||
SELECT place_id, name_vector, address_rank, search_rank,
|
||||
ST_Distance(centroid, point) as distance, null as isguess
|
||||
FROM search_name_-partition-
|
||||
WHERE name_vector @> ARRAY[isin_token]
|
||||
WHERE name_vector @> isin_token
|
||||
AND ST_DWithin(centroid, point, 0.01)
|
||||
AND search_rank between 26 and 27
|
||||
ORDER BY distance ASC limit 1
|
||||
@@ -139,7 +142,7 @@ END
|
||||
$$
|
||||
LANGUAGE plpgsql;
|
||||
|
||||
create or replace function getNearestNamedPlaceFeature(in_partition INTEGER, point GEOMETRY, isin_token INTEGER)
|
||||
create or replace function getNearestNamedPlaceFeature(in_partition INTEGER, point GEOMETRY, isin_token INTEGER[])
|
||||
RETURNS setof nearfeature AS $$
|
||||
DECLARE
|
||||
r nearfeature%rowtype;
|
||||
@@ -151,7 +154,7 @@ BEGIN
|
||||
SELECT place_id, name_vector, address_rank, search_rank,
|
||||
ST_Distance(centroid, point) as distance, null as isguess
|
||||
FROM search_name_-partition-
|
||||
WHERE name_vector @> ARRAY[isin_token]
|
||||
WHERE name_vector @> isin_token
|
||||
AND ST_DWithin(centroid, point, 0.03)
|
||||
AND search_rank between 16 and 22
|
||||
ORDER BY distance ASC limit 1
|
||||
@@ -205,16 +208,20 @@ BEGIN
|
||||
|
||||
IF in_rank_search <= 4 THEN
|
||||
DELETE FROM search_name_country WHERE place_id = in_place_id;
|
||||
INSERT INTO search_name_country values (in_place_id, in_rank_search, in_rank_address,
|
||||
in_name_vector, in_geometry);
|
||||
IF in_rank_address > 0 THEN
|
||||
INSERT INTO search_name_country values (in_place_id, in_rank_search, in_rank_address,
|
||||
in_name_vector, in_geometry);
|
||||
END IF;
|
||||
RETURN TRUE;
|
||||
END IF;
|
||||
|
||||
-- start
|
||||
IF in_partition = -partition- THEN
|
||||
DELETE FROM search_name_-partition- values WHERE place_id = in_place_id;
|
||||
INSERT INTO search_name_-partition- values (in_place_id, in_rank_search, in_rank_address,
|
||||
in_name_vector, in_geometry);
|
||||
IF in_rank_address > 0 THEN
|
||||
INSERT INTO search_name_-partition- values (in_place_id, in_rank_search, in_rank_address,
|
||||
in_name_vector, in_geometry);
|
||||
END IF;
|
||||
RETURN TRUE;
|
||||
END IF;
|
||||
-- end
|
||||
|
||||
@@ -53,12 +53,12 @@ BEGIN
|
||||
END IF;
|
||||
|
||||
place_centroid := ST_Centroid(linegeo);
|
||||
out_partition := get_partition(place_centroid, 'us');
|
||||
out_partition := get_partition('us');
|
||||
out_parent_place_id := null;
|
||||
|
||||
address_street_word_id := get_name_id(make_standard_name(in_street));
|
||||
IF address_street_word_id IS NOT NULL THEN
|
||||
FOR location IN SELECT * from getNearestNamedRoadFeature(out_partition, place_centroid, address_street_word_id) LOOP
|
||||
FOR location IN SELECT * from getNearestNamedRoadFeature(out_partition, place_centroid, ARRAY[address_street_word_id]) LOOP
|
||||
out_parent_place_id := location.place_id;
|
||||
END LOOP;
|
||||
END IF;
|
||||
|
||||
77
tests-php/Nominatim/NominatimTest.php
Normal file
77
tests-php/Nominatim/NominatimTest.php
Normal file
@@ -0,0 +1,77 @@
|
||||
<?php
|
||||
|
||||
namespace Nominatim;
|
||||
require 'lib/lib.php';
|
||||
|
||||
|
||||
class NominatimTest extends \PHPUnit_Framework_TestCase
|
||||
{
|
||||
|
||||
protected function setUp()
|
||||
{
|
||||
}
|
||||
|
||||
|
||||
public function test_addQuotes()
|
||||
{
|
||||
// FIXME: not quoting existing quote signs is probably a bug
|
||||
$this->assertSame("'St. John's'", addQuotes("St. John's"));
|
||||
$this->assertSame("''", addQuotes(''));
|
||||
}
|
||||
|
||||
public function test_looksLikeLatLonPair()
|
||||
{
|
||||
// no coordinates expected
|
||||
$this->assertNull(looksLikeLatLonPair(''));
|
||||
$this->assertNull(looksLikeLatLonPair('abc'));
|
||||
$this->assertNull(looksLikeLatLonPair('12 34'));
|
||||
$this->assertNull(looksLikeLatLonPair('200.1 89.9')); // because latitude > 180
|
||||
|
||||
// coordinates expected
|
||||
$this->assertNotNull(looksLikeLatLonPair('0.0 -0.0'));
|
||||
|
||||
$this->assertEquals(
|
||||
array( 'lat' => 12.456, 'lon' => -78.90, 'query' => 'abc def'),
|
||||
looksLikeLatLonPair(' abc 12.456 -78.90 def ')
|
||||
);
|
||||
|
||||
$this->assertEquals(
|
||||
array( 'lat' => 12.456, 'lon' => -78.90, 'query' => ''),
|
||||
looksLikeLatLonPair(' [12.456,-78.90] ')
|
||||
);
|
||||
|
||||
// http://en.wikipedia.org/wiki/Geographic_coordinate_conversion
|
||||
// these all represent the same location
|
||||
$aQueries = array(
|
||||
'40 26.767 N 79 58.933 W',
|
||||
'40° 26.767′ N 79° 58.933′ W',
|
||||
"40° 26.767' N 79° 58.933' W",
|
||||
'N 40 26.767, W 79 58.933',
|
||||
'N 40°26.767′, W 79°58.933′',
|
||||
"N 40°26.767', W 79°58.933'",
|
||||
|
||||
'40 26 46 N 79 58 56 W',
|
||||
'40° 26′ 46″ N 79° 58′ 56″ W',
|
||||
'N 40 26 46 W 79 58 56',
|
||||
'N 40° 26′ 46″, W 79° 58′ 56″',
|
||||
'N 40° 26\' 46", W 79° 58\' 56"',
|
||||
|
||||
'40.446 -79.982',
|
||||
'40.446,-79.982',
|
||||
'40.446° N 79.982° W',
|
||||
'N 40.446° W 79.982°',
|
||||
|
||||
'[40.446 -79.982]',
|
||||
' 40.446 , -79.982 ',
|
||||
);
|
||||
|
||||
|
||||
foreach($aQueries as $sQuery){
|
||||
$aRes = looksLikeLatLonPair($sQuery);
|
||||
$this->assertEquals( 40.446, $aRes['lat'], 'degrees decimal ' . $sQuery, 0.01);
|
||||
$this->assertEquals(-79.982, $aRes['lon'], 'degrees decimal ' . $sQuery, 0.01);
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
}
|
||||
13
tests-php/README.txt
Normal file
13
tests-php/README.txt
Normal file
@@ -0,0 +1,13 @@
|
||||
Basic unit tests of PHP code. Very low coverage. Doesn't cover interaction
|
||||
with the webserver/HTTP or database (yet).
|
||||
|
||||
You need to have
|
||||
https://phpunit.de/manual/4.2/en/
|
||||
installed.
|
||||
|
||||
To execute the test suite run
|
||||
$ phpunit
|
||||
|
||||
It will read phpunit.xml which points to the library, test path, bootstrap
|
||||
strip and set other parameters.
|
||||
|
||||
2
tests-php/bootstrap.php
Normal file
2
tests-php/bootstrap.php
Normal file
@@ -0,0 +1,2 @@
|
||||
<?php
|
||||
|
||||
98
tests/README.md
Normal file
98
tests/README.md
Normal file
@@ -0,0 +1,98 @@
|
||||
This directory contains functional tests for the Nominatim API,
|
||||
for the import/update from osm files and for indexing.
|
||||
|
||||
The tests use the lettuce framework (http://lettuce.it/) and
|
||||
nose (https://nose.readthedocs.org). API tests are meant to be run
|
||||
against a Nominatim installation with a complete planet-wide
|
||||
setup based on a fairly recent planet. If you only have an
|
||||
excerpt, some of the API tests may fail. Database tests can be
|
||||
run without having a database installed.
|
||||
|
||||
Prerequisites
|
||||
=============
|
||||
|
||||
* lettuce framework (http://lettuce.it/)
|
||||
* nose (https://nose.readthedocs.org)
|
||||
* pytidylib (http://countergram.com/open-source/pytidylib)
|
||||
* haversine (https://github.com/mapado/haversine)
|
||||
|
||||
Usage
|
||||
=====
|
||||
|
||||
* get prerequisites
|
||||
|
||||
[sudo] pip install lettuce nose pytidylib haversine psycopg2
|
||||
|
||||
* run the tests
|
||||
|
||||
NOMINATIM_SERVER=http://your.nominatim.instance/ lettuce features
|
||||
|
||||
The tests can be configured with a set of environment variables:
|
||||
|
||||
* `NOMINATIM_SERVER` - URL of the nominatim instance (API tests)
|
||||
* `NOMINATIM_DIR` - source directory of Nominatim (import tests)
|
||||
* `TEMPLATE_DB` - name of template database used as a skeleton for
|
||||
the test databases (db tests)
|
||||
* `TEST_DB` - name of test database (db tests)
|
||||
* `NOMINATIM_SETTINGS` - file to write temporary Nominatim settings to (db tests)
|
||||
* `NOMINATIM_REUSE_TEMPLATE` - if defined, the template database will not be
|
||||
deleted after the test runs and reused during
|
||||
the next run. This speeds up tests considerably
|
||||
but might lead to outdated errors for some
|
||||
changes in the database layout.
|
||||
* `NOMINATIM_KEEP_SCENARIO_DB` - if defined, the test database will not be
|
||||
dropped after a test is finished. Should
|
||||
only be used if one single scenario is run,
|
||||
otherwise the result is undefined.
|
||||
* `LOGLEVEL` - set to 'debug' to get more verbose output (only works properly
|
||||
when output to a logfile is configured)
|
||||
* `LOGFILE` - sends debug output to the given file
|
||||
|
||||
Writing Tests
|
||||
=============
|
||||
|
||||
The following explanation assume that the reader is familiar with the lettuce
|
||||
notations of features, scenarios and steps.
|
||||
|
||||
All possible steps can be found in the `steps` directory and should ideally
|
||||
be documented.
|
||||
|
||||
|
||||
API Tests (`features/api`)
|
||||
--------------------------
|
||||
|
||||
These tests are meant to test the different API calls and their parameters.
|
||||
|
||||
There are two kind of steps defined for these tests:
|
||||
request setup steps (see `steps/api_setup.py`)
|
||||
and steps for checking results (see `steps/api_result.py`).
|
||||
|
||||
Each scenario follows this simple sequence of steps:
|
||||
|
||||
1. One or more steps to define parameters and HTTP headers of the request.
|
||||
These are cumulative, so you can use multiple steps.
|
||||
2. A single step to call the API. This sends a HTTP request to the configured
|
||||
server and collects the answer. The cached parameters will be deleted,
|
||||
to ensure that the setup works properly with scenario outlines.
|
||||
3. As many result checks as necessary. The result remains cached, so that
|
||||
multiple tests can be added here.
|
||||
|
||||
Indexing Tests (`features/db`)
|
||||
---------------------------------------------------
|
||||
|
||||
These tests check the import and update of the Nominatim database. They do not
|
||||
test the correctness of osm2pgsql. Each test will write some data into the `place`
|
||||
table (and optionally `the planet_osm_*` tables if required) and then run
|
||||
Nominatim's processing functions on that.
|
||||
|
||||
These tests need to create their own test databases. By default they will be
|
||||
called `test_template_nominatim` and `test_nominatim`. Names can be changed with
|
||||
the environment variables `TEMPLATE_DB` and `TEST_DB`. The user running the tests
|
||||
needs superuser rights for postgres.
|
||||
|
||||
|
||||
Import Tests (`features/osm2pgsql`)
|
||||
-----------------------------------
|
||||
|
||||
These tests check that data is imported correctly into the place table. They
|
||||
use the same template database as the Indexing tests, so the same remarks apply.
|
||||
14
tests/features/api/details.feature
Normal file
14
tests/features/api/details.feature
Normal file
@@ -0,0 +1,14 @@
|
||||
Feature: Object details
|
||||
Check details page for correctness
|
||||
|
||||
Scenario Outline: Details via OSM id
|
||||
When looking up details for <object>
|
||||
Then the result is valid
|
||||
|
||||
Examples:
|
||||
| object
|
||||
| 1758375
|
||||
| N158845944
|
||||
| W72493656
|
||||
| R62422
|
||||
|
||||
100
tests/features/api/language.feature
Normal file
100
tests/features/api/language.feature
Normal file
@@ -0,0 +1,100 @@
|
||||
Feature: Localization of search results
|
||||
|
||||
Scenario: Search - default language
|
||||
When sending json search query "Germany"
|
||||
Then results contain
|
||||
| ID | display_name
|
||||
| 0 | Deutschland.*
|
||||
|
||||
Scenario: Search - accept-language first
|
||||
Given the request parameters
|
||||
| accept-language
|
||||
| en,de
|
||||
When sending json search query "Deutschland"
|
||||
Then results contain
|
||||
| ID | display_name
|
||||
| 0 | Germany.*
|
||||
|
||||
Scenario: Search - accept-language missing
|
||||
Given the request parameters
|
||||
| accept-language
|
||||
| xx,fr,en,de
|
||||
When sending json search query "Deutschland"
|
||||
Then results contain
|
||||
| ID | display_name
|
||||
| 0 | Allemagne.*
|
||||
|
||||
Scenario: Search - http accept language header first
|
||||
Given the HTTP header
|
||||
| accept-language
|
||||
| fr-ca,fr;q=0.8,en-ca;q=0.5,en;q=0.3
|
||||
When sending json search query "Deutschland"
|
||||
Then results contain
|
||||
| ID | display_name
|
||||
| 0 | Allemagne.*
|
||||
|
||||
Scenario: Search - http accept language header and accept-language
|
||||
Given the request parameters
|
||||
| accept-language
|
||||
| de,en
|
||||
Given the HTTP header
|
||||
| accept-language
|
||||
| fr-ca,fr;q=0.8,en-ca;q=0.5,en;q=0.3
|
||||
When sending json search query "Deutschland"
|
||||
Then results contain
|
||||
| ID | display_name
|
||||
| 0 | Deutschland.*
|
||||
|
||||
Scenario: Search - http accept language header fallback
|
||||
Given the HTTP header
|
||||
| accept-language
|
||||
| fr-ca,en-ca;q=0.5
|
||||
When sending json search query "Deutschland"
|
||||
Then results contain
|
||||
| ID | display_name
|
||||
| 0 | Allemagne.*
|
||||
|
||||
Scenario: Search - http accept language header fallback (upper case)
|
||||
Given the HTTP header
|
||||
| accept-language
|
||||
| fr-FR;q=0.8,en-ca;q=0.5
|
||||
When sending json search query "Deutschland"
|
||||
Then results contain
|
||||
| ID | display_name
|
||||
| 0 | Allemagne.*
|
||||
|
||||
Scenario: Reverse - default language
|
||||
When looking up coordinates 48.13921,11.57328
|
||||
Then result addresses contain
|
||||
| ID | city
|
||||
| 0 | München
|
||||
|
||||
Scenario: Reverse - accept-language parameter
|
||||
Given the request parameters
|
||||
| accept-language
|
||||
| en,fr
|
||||
When looking up coordinates 48.13921,11.57328
|
||||
Then result addresses contain
|
||||
| ID | city
|
||||
| 0 | Munich
|
||||
|
||||
Scenario: Reverse - HTTP accept language header
|
||||
Given the HTTP header
|
||||
| accept-language
|
||||
| fr-ca,fr;q=0.8,en-ca;q=0.5,en;q=0.3
|
||||
When looking up coordinates 48.13921,11.57328
|
||||
Then result addresses contain
|
||||
| ID | city
|
||||
| 0 | Munich
|
||||
|
||||
Scenario: Reverse - accept-language parameter and HTTP header
|
||||
Given the request parameters
|
||||
| accept-language
|
||||
| it
|
||||
Given the HTTP header
|
||||
| accept-language
|
||||
| fr-ca,fr;q=0.8,en-ca;q=0.5,en;q=0.3
|
||||
When looking up coordinates 48.13921,11.57328
|
||||
Then result addresses contain
|
||||
| ID | city
|
||||
| 0 | Monaco di Baviera
|
||||
179
tests/features/api/regression.feature
Normal file
179
tests/features/api/regression.feature
Normal file
@@ -0,0 +1,179 @@
|
||||
Feature: API regression tests
|
||||
Tests error cases reported in tickets.
|
||||
|
||||
@poldi-only
|
||||
Scenario Outline: github #36
|
||||
When sending json search query "<query>" with address
|
||||
Then result addresses contain
|
||||
| ID | road | city
|
||||
| 0 | Seegasse | Gemeinde Wieselburg-Land
|
||||
|
||||
Examples:
|
||||
| query
|
||||
| Seegasse, Gemeinde Wieselburg-Land
|
||||
| Seegasse, Wieselburg-Land
|
||||
| Seegasse, Wieselburg
|
||||
|
||||
Scenario: trac #2430
|
||||
When sending json search query "89 River Avenue, Hoddesdon, Hertfordshire, EN11 0JT"
|
||||
Then at least 1 result is returned
|
||||
|
||||
Scenario: trac #2440
|
||||
When sending json search query "East Harvard Avenue, Denver"
|
||||
Then more than 2 results are returned
|
||||
|
||||
Scenario: trac #2456
|
||||
When sending xml search query "Borlänge Kommun"
|
||||
Then results contain
|
||||
| ID | place_rank
|
||||
| 0 | 19
|
||||
|
||||
Scenario: trac #2530
|
||||
When sending json search query "Lange Straße, Bamberg" with address
|
||||
Then result addresses contain
|
||||
| ID | town
|
||||
| 0 | Bamberg
|
||||
|
||||
Scenario: trac #2541
|
||||
When sending json search query "pad, germany"
|
||||
Then results contain
|
||||
| ID | class | display_name
|
||||
| 0 | aeroway | Paderborn/Lippstadt,.*
|
||||
|
||||
Scenario: trac #2579
|
||||
When sending json search query "Johnsons Close, hackbridge" with address
|
||||
Then result addresses contain
|
||||
| ID | postcode
|
||||
| 0 | SM5 2LU
|
||||
|
||||
@Fail
|
||||
Scenario Outline: trac #2586
|
||||
When sending json search query "<query>" with address
|
||||
Then result addresses contain
|
||||
| ID | country_code
|
||||
| 0 | uk
|
||||
|
||||
Examples:
|
||||
| query
|
||||
| DL7 0SN
|
||||
| DL70SN
|
||||
|
||||
Scenario: trac #2628 (1)
|
||||
When sending json search query "Adam Kraft Str" with address
|
||||
Then result addresses contain
|
||||
| ID | road
|
||||
| 0 | Adam-Kraft-Straße
|
||||
|
||||
Scenario: trac #2628 (2)
|
||||
When sending json search query "Maxfeldstr. 5, Nürnberg" with address
|
||||
Then result addresses contain
|
||||
| ID | house_number | road | city
|
||||
| 0 | 5 | Maxfeldstraße | Nürnberg
|
||||
|
||||
Scenario: trac #2638
|
||||
When sending json search query "Nöthnitzer Str. 40, 01187 Dresden" with address
|
||||
Then result addresses contain
|
||||
| ID | house_number | road | city
|
||||
| 0 | 40 | Nöthnitzer Straße | Dresden
|
||||
|
||||
Scenario Outline: trac #2667
|
||||
When sending json search query "<query>" with address
|
||||
Then result addresses contain
|
||||
| ID | house_number
|
||||
| 0 | <number>
|
||||
|
||||
Examples:
|
||||
| number | query
|
||||
| 16 | 16 Woodpecker Way, Cambourne
|
||||
| 14906 | 14906, 114 Street Northwest, Edmonton, Alberta, Canada
|
||||
| 14904 | 14904, 114 Street Northwest, Edmonton, Alberta, Canada
|
||||
| 15022 | 15022, 114 Street Northwest, Edmonton, Alberta, Canada
|
||||
| 15024 | 15024, 114 Street Northwest, Edmonton, Alberta, Canada
|
||||
|
||||
Scenario: trac #2681
|
||||
When sending json search query "kirchstraße troisdorf Germany"
|
||||
Then results contain
|
||||
| ID | display_name
|
||||
| 0 | .*, Troisdorf, .*
|
||||
|
||||
Scenario: trac #2758
|
||||
When sending json search query "6а, полуботка, чернигов" with address
|
||||
Then result addresses contain
|
||||
| ID | house_number
|
||||
| 0 | 6а
|
||||
|
||||
Scenario: trac #2790
|
||||
When looking up coordinates 49.0942079697809,8.27565898861822
|
||||
Then result addresses contain
|
||||
| ID | road | village | country
|
||||
| 0 | Daimlerstraße | Jockgrim | Deutschland
|
||||
|
||||
Scenario: trac #2794
|
||||
When sending json search query "4008"
|
||||
Then results contain
|
||||
| ID | class | type
|
||||
| 0 | place | postcode
|
||||
|
||||
Scenario: trac #2797
|
||||
When sending json search query "Philippstr.4, 52349 Düren" with address
|
||||
Then result addresses contain
|
||||
| ID | road | town
|
||||
| 0 | Philippstraße | Düren
|
||||
|
||||
Scenario: trac #2830
|
||||
When sending json search query "528, Merkley Drive, K4A 1N5,CA" with address
|
||||
Then result addresses contain
|
||||
| ID | house_number | road | postcode | country
|
||||
| 0 | 528 | Merkley Drive | K4A 1N5 | Canada
|
||||
|
||||
Scenario: trac #2830
|
||||
When sending json search query "K4A 1N5,CA"
|
||||
Then results contain
|
||||
| ID | class | type | display_name
|
||||
| 0 | place | postcode | .*, Canada
|
||||
|
||||
Scenario: trac #2845
|
||||
When sending json search query "Leliestraat 31, Zwolle" with address
|
||||
Then result addresses contain
|
||||
| ID | city
|
||||
| 0 | Zwolle
|
||||
|
||||
Scenario: trac #2852
|
||||
When sending json search query "berlinerstrasse, leipzig" with address
|
||||
Then result addresses contain
|
||||
| ID | road
|
||||
| 0 | Berliner Straße
|
||||
|
||||
Scenario: trac #2871
|
||||
When looking up coordinates -33.906895553,150.99609375
|
||||
Then result addresses contain
|
||||
| ID | city | postcode | country
|
||||
| 0 | [^0-9]* | 2197 | Australia
|
||||
|
||||
Scenario: trac #2974
|
||||
When sending json search query "Azadi Square, Faruj" with address
|
||||
Then result addresses contain
|
||||
| ID | road | city
|
||||
| 0 | ميدان آزادي | فاروج
|
||||
And results contain
|
||||
| ID | latlon
|
||||
| 0 | 37.2323,58.2193 +-1km
|
||||
|
||||
Scenario: trac #2981
|
||||
When sending json search query "Ohmstraße 7, Berlin" with address
|
||||
Then at least 2 results are returned
|
||||
And result addresses contain
|
||||
| house_number | road | state
|
||||
| 7 | Ohmstraße | Berlin
|
||||
|
||||
Scenario: trac #3049
|
||||
When sending json search query "Soccer City"
|
||||
Then results contain
|
||||
| ID | class | type | latlon
|
||||
| 0 | leisure | stadium | -26.2347261,27.982645 +-50m
|
||||
|
||||
Scenario: trac #3130
|
||||
When sending json search query "Old Way, Frinton"
|
||||
Then results contain
|
||||
| ID | class | latlon
|
||||
| 0 | highway | 51.8324206,1.2447352 +-100m
|
||||
13
tests/features/api/reverse.feature
Normal file
13
tests/features/api/reverse.feature
Normal file
@@ -0,0 +1,13 @@
|
||||
Feature: Reverse geocoding
|
||||
Testing the reverse function
|
||||
|
||||
# Make sure country is not overwritten by the postcode
|
||||
Scenario: Country is returned
|
||||
Given the request parameters
|
||||
| accept-language
|
||||
| de
|
||||
When looking up coordinates 53.9788769,13.0830313
|
||||
Then result addresses contain
|
||||
| ID | country
|
||||
| 0 | Deutschland
|
||||
|
||||
52
tests/features/api/reverse_simple.feature
Normal file
52
tests/features/api/reverse_simple.feature
Normal file
@@ -0,0 +1,52 @@
|
||||
Feature: Simple Reverse Tests
|
||||
Simple tests for internal server errors and response format.
|
||||
These tests should pass on any Nominatim installation.
|
||||
|
||||
Scenario Outline: Simple reverse-geocoding
|
||||
When looking up xml coordinates <lat>,<lon>
|
||||
Then the result is valid xml
|
||||
When looking up json coordinates <lat>,<lon>
|
||||
Then the result is valid json
|
||||
When looking up jsonv2 coordinates <lat>,<lon>
|
||||
Then the result is valid json
|
||||
|
||||
Examples:
|
||||
| lat | lon
|
||||
| 0.0 | 0.0
|
||||
| 45.3 | 3.5
|
||||
| -79.34 | 23.5
|
||||
| 0.23 | -178.555
|
||||
|
||||
Scenario Outline: Wrapping of legal jsonp requests
|
||||
Given the request parameters
|
||||
| json_callback
|
||||
| foo
|
||||
When looking up <format> coordinates 67.3245,0.456
|
||||
Then the result is valid json
|
||||
|
||||
Examples:
|
||||
| format
|
||||
| json
|
||||
| jsonv2
|
||||
|
||||
Scenario: Reverse-geocoding without address
|
||||
Given the request parameters
|
||||
| addressdetails
|
||||
| 0
|
||||
When looking up xml coordinates 36.791966,127.171726
|
||||
Then the result is valid xml
|
||||
When looking up json coordinates 36.791966,127.171726
|
||||
Then the result is valid json
|
||||
When looking up jsonv2 coordinates 36.791966,127.171726
|
||||
Then the result is valid json
|
||||
|
||||
Scenario: Reverse-geocoding with zoom
|
||||
Given the request parameters
|
||||
| zoom
|
||||
| 10
|
||||
When looking up xml coordinates 36.791966,127.171726
|
||||
Then the result is valid xml
|
||||
When looking up json coordinates 36.791966,127.171726
|
||||
Then the result is valid json
|
||||
When looking up jsonv2 coordinates 36.791966,127.171726
|
||||
Then the result is valid json
|
||||
74
tests/features/api/search.feature
Normal file
74
tests/features/api/search.feature
Normal file
@@ -0,0 +1,74 @@
|
||||
Feature: Search queries
|
||||
Testing correctness of results
|
||||
|
||||
Scenario: UK House number search
|
||||
When sending json search query "27 Thoresby Road, Broxtowe" with address
|
||||
Then address of result 0 contains
|
||||
| type | value
|
||||
| house_number | 27
|
||||
| road | Thoresby Road
|
||||
| city | Broxtowe
|
||||
| state | England
|
||||
| country | United Kingdom
|
||||
| country_code | gb
|
||||
|
||||
|
||||
Scenario: House number search for non-street address
|
||||
Given the request parameters
|
||||
| accept-language
|
||||
| en
|
||||
When sending json search query "4 Pomocnia, Poland" with address
|
||||
Then address of result 0 is
|
||||
| type | value
|
||||
| house_number | 4
|
||||
| suburb | Pomocnia
|
||||
| county | gmina Pokrzywnica
|
||||
| state | Masovian Voivodeship
|
||||
| postcode | 06-121
|
||||
| country | Poland
|
||||
| country_code | pl
|
||||
|
||||
Scenario: House number interpolation even
|
||||
Given the request parameters
|
||||
| accept-language
|
||||
| en
|
||||
When sending json search query "140 rue Don Bosco, Saguenay" with address
|
||||
Then address of result 0 contains
|
||||
| type | value
|
||||
| house_number | 140
|
||||
| road | rue Don Bosco
|
||||
| city | Saguenay
|
||||
| state | Quebec
|
||||
| country | Canada
|
||||
| country_code | ca
|
||||
|
||||
Scenario: House number interpolation odd
|
||||
Given the request parameters
|
||||
| accept-language
|
||||
| en
|
||||
When sending json search query "141 rue Don Bosco, Saguenay" with address
|
||||
Then address of result 0 contains
|
||||
| type | value
|
||||
| house_number | 141
|
||||
| road | rue Don Bosco
|
||||
| city | Saguenay
|
||||
| state | Quebec
|
||||
| country | Canada
|
||||
| country_code | ca
|
||||
|
||||
Scenario: TIGER house number
|
||||
When sending json search query "3 West Victory Way, Craig"
|
||||
Then result 0 has not attributes osm_id,osm_type
|
||||
|
||||
Scenario: TIGER house number (road fallback)
|
||||
When sending json search query "3030 West Victory Way, Craig"
|
||||
Then result 0 has attributes osm_id,osm_type
|
||||
|
||||
Scenario: Expansion of Illinois
|
||||
Given the request parameters
|
||||
| accept-language
|
||||
| en
|
||||
When sending json search query "il, us"
|
||||
Then results contain
|
||||
| ID | display_name
|
||||
| 0 | Illinois.*
|
||||
33
tests/features/api/search_order.feature
Normal file
33
tests/features/api/search_order.feature
Normal file
@@ -0,0 +1,33 @@
|
||||
Feature: Result order for Geocoding
|
||||
Testing that importance ordering returns sensible results
|
||||
|
||||
Scenario Outline: city order in street search
|
||||
When sending json search query "<street>, <city>" with address
|
||||
Then address of result 0 contains
|
||||
| type | value
|
||||
| <type> | <city>
|
||||
|
||||
Examples:
|
||||
| type | city | street
|
||||
| city | Zürich | Rigistr
|
||||
| city | Karlsruhe | Sophienstr
|
||||
| city | München | Karlstr
|
||||
| city | Praha | Dlouhá
|
||||
|
||||
Scenario Outline: use more important city in street search
|
||||
When sending json search query "<street>, <city>" with address
|
||||
Then result addresses contain
|
||||
| ID | country_code
|
||||
| 0 | <country>
|
||||
|
||||
Examples:
|
||||
| country | city | street
|
||||
| gb | London | Main St
|
||||
| gb | Manchester | Central Street
|
||||
|
||||
# https://trac.openstreetmap.org/ticket/5094
|
||||
Scenario: housenumbers are ordered by complete match first
|
||||
When sending json search query "4 Докукина Москва" with address
|
||||
Then result addresses contain
|
||||
| ID | house_number
|
||||
| 0 | 4
|
||||
172
tests/features/api/search_params.feature
Normal file
172
tests/features/api/search_params.feature
Normal file
@@ -0,0 +1,172 @@
|
||||
Feature: Search queries
|
||||
Testing different queries and parameters
|
||||
|
||||
Scenario: Simple XML search
|
||||
When sending xml search query "Schaan"
|
||||
Then result 0 has attributes place_id,osm_type,osm_id
|
||||
And result 0 has attributes place_rank,boundingbox
|
||||
And result 0 has attributes lat,lon,display_name
|
||||
And result 0 has attributes class,type,importance,icon
|
||||
And result 0 has not attributes address
|
||||
|
||||
Scenario: Simple JSON search
|
||||
When sending json search query "Vaduz"
|
||||
And result 0 has attributes place_id,licence,icon,class,type
|
||||
And result 0 has attributes osm_type,osm_id,boundingbox
|
||||
And result 0 has attributes lat,lon,display_name,importance
|
||||
And result 0 has not attributes address
|
||||
|
||||
Scenario: JSON search with addressdetails
|
||||
When sending json search query "Montevideo" with address
|
||||
Then address of result 0 is
|
||||
| type | value
|
||||
| city | Montevideo
|
||||
| state | Montevideo
|
||||
| country | Uruguay
|
||||
| country_code | uy
|
||||
|
||||
Scenario: XML search with addressdetails
|
||||
When sending xml search query "Inuvik" with address
|
||||
Then address of result 0 is
|
||||
| type | value
|
||||
| town | Inuvik
|
||||
| state | Northwest Territories
|
||||
| country | Canada
|
||||
| country_code | ca
|
||||
|
||||
Scenario: Address details with unknown class types
|
||||
When sending json search query "foobar, Essen" with address
|
||||
Then results contain
|
||||
| ID | class | type
|
||||
| 0 | leisure | hackerspace
|
||||
And result addresses contain
|
||||
| ID | address29
|
||||
| 0 | foobar
|
||||
And address of result 0 does not contain leisure,hackerspace
|
||||
|
||||
Scenario: Disabling deduplication
|
||||
When sending json search query "Oxford Street, London"
|
||||
Then there are no duplicates
|
||||
Given the request parameters
|
||||
| dedupe
|
||||
| 0
|
||||
When sending json search query "Oxford Street, London"
|
||||
Then there are duplicates
|
||||
|
||||
Scenario: Search with bounded viewbox in right area
|
||||
Given the request parameters
|
||||
| bounded | viewbox
|
||||
| 1 | -87.7,41.9,-87.57,41.85
|
||||
When sending json search query "restaurant" with address
|
||||
Then result addresses contain
|
||||
| ID | city
|
||||
| 0 | Chicago
|
||||
|
||||
Scenario: Search with bounded viewboxlbrt in right area
|
||||
Given the request parameters
|
||||
| bounded | viewboxlbrt
|
||||
| 1 | -87.7,41.85,-87.57,41.9
|
||||
When sending json search query "restaurant" with address
|
||||
Then result addresses contain
|
||||
| ID | city
|
||||
| 0 | Chicago
|
||||
|
||||
Scenario: No POI search with unbounded viewbox
|
||||
Given the request parameters
|
||||
| viewbox
|
||||
| -87.7,41.9,-87.57,41.85
|
||||
When sending json search query "restaurant"
|
||||
Then results contain
|
||||
| display_name
|
||||
| [^,]*(?i)restaurant.*
|
||||
|
||||
Scenario: bounded search remains within viewbox, even with no results
|
||||
Given the request parameters
|
||||
| bounded | viewbox
|
||||
| 1 | -5.662003,43.54285,-5.6563282,43.5403125
|
||||
When sending json search query "restaurant"
|
||||
Then less than 1 result is returned
|
||||
|
||||
Scenario: bounded search remains within viewbox with results
|
||||
Given the request parameters
|
||||
| bounded | viewbox
|
||||
| 1 | -5.662003,43.55,-5.6563282,43.5403125
|
||||
When sending json search query "restaurant"
|
||||
| lon | lat
|
||||
| >= -5.662003 | >= 43.5403125
|
||||
| <= -5.6563282| <= 43.55
|
||||
|
||||
Scenario: Prefer results within viewbox
|
||||
Given the request parameters
|
||||
| accept-language
|
||||
| en
|
||||
When sending json search query "royan" with address
|
||||
Then result addresses contain
|
||||
| ID | country
|
||||
| 0 | France
|
||||
Given the request parameters
|
||||
| accept-language | viewbox
|
||||
| en | 51.94,36.59,51.99,36.56
|
||||
When sending json search query "royan" with address
|
||||
Then result addresses contain
|
||||
| ID | country
|
||||
| 0 | Iran
|
||||
|
||||
Scenario: Overly large limit number for search results
|
||||
Given the request parameters
|
||||
| limit
|
||||
| 1000
|
||||
When sending json search query "Neustadt"
|
||||
Then at most 50 results are returned
|
||||
|
||||
Scenario: Limit number of search results
|
||||
Given the request parameters
|
||||
| limit
|
||||
| 4
|
||||
When sending json search query "Neustadt"
|
||||
Then exactly 4 results are returned
|
||||
|
||||
Scenario: Restrict to feature type country
|
||||
Given the request parameters
|
||||
| featureType
|
||||
| country
|
||||
When sending xml search query "Monaco"
|
||||
Then results contain
|
||||
| place_rank
|
||||
| 4
|
||||
|
||||
Scenario: Restrict to feature type state
|
||||
When sending xml search query "Berlin"
|
||||
Then results contain
|
||||
| ID | place_rank
|
||||
| 0 | 1[56]
|
||||
Given the request parameters
|
||||
| featureType
|
||||
| state
|
||||
When sending xml search query "Berlin"
|
||||
Then results contain
|
||||
| place_rank
|
||||
| [78]
|
||||
|
||||
Scenario: Restrict to feature type city
|
||||
Given the request parameters
|
||||
| featureType
|
||||
| city
|
||||
When sending xml search query "Monaco"
|
||||
Then results contain
|
||||
| place_rank
|
||||
| 1[56789]
|
||||
|
||||
|
||||
Scenario: Restrict to feature type settlement
|
||||
When sending json search query "Everest"
|
||||
Then results contain
|
||||
| ID | display_name
|
||||
| 0 | Mount Everest.*
|
||||
Given the request parameters
|
||||
| featureType
|
||||
| settlement
|
||||
When sending json search query "Everest"
|
||||
Then results contain
|
||||
| ID | display_name
|
||||
| 0 | Everest.*
|
||||
227
tests/features/api/search_simple.feature
Normal file
227
tests/features/api/search_simple.feature
Normal file
@@ -0,0 +1,227 @@
|
||||
Feature: Simple Tests
|
||||
Simple tests for internal server errors and response format.
|
||||
These tests should pass on any Nominatim installation.
|
||||
|
||||
Scenario Outline: Testing different parameters
|
||||
Given the request parameters
|
||||
| <parameter>
|
||||
| <value>
|
||||
When sending search query "Manchester"
|
||||
Then the result is valid html
|
||||
Given the request parameters
|
||||
| <parameter>
|
||||
| <value>
|
||||
When sending html search query "Manchester"
|
||||
Then the result is valid html
|
||||
Given the request parameters
|
||||
| <parameter>
|
||||
| <value>
|
||||
When sending xml search query "Manchester"
|
||||
Then the result is valid xml
|
||||
Given the request parameters
|
||||
| <parameter>
|
||||
| <value>
|
||||
When sending json search query "Manchester"
|
||||
Then the result is valid json
|
||||
Given the request parameters
|
||||
| <parameter>
|
||||
| <value>
|
||||
When sending jsonv2 search query "Manchester"
|
||||
Then the result is valid json
|
||||
|
||||
Examples:
|
||||
| parameter | value
|
||||
| addressdetails | 1
|
||||
| addressdetails | 0
|
||||
| polygon | 1
|
||||
| polygon | 0
|
||||
| polygon_text | 1
|
||||
| polygon_text | 0
|
||||
| polygon_kml | 1
|
||||
| polygon_kml | 0
|
||||
| polygon_geojson | 1
|
||||
| polygon_geojson | 0
|
||||
| polygon_svg | 1
|
||||
| polygon_svg | 0
|
||||
| accept-language | de,en
|
||||
| countrycodes | uk,ir
|
||||
| bounded | 1
|
||||
| bounded | 0
|
||||
| exclude_place_ids| 385252,1234515
|
||||
| limit | 1000
|
||||
| dedupe | 1
|
||||
| dedupe | 0
|
||||
|
||||
Scenario: Search with invalid output format
|
||||
Given the request parameters
|
||||
| format
|
||||
| fd$#
|
||||
When sending search query "Berlin"
|
||||
Then the result is valid html
|
||||
|
||||
Scenario Outline: Simple Searches
|
||||
When sending search query "<query>"
|
||||
Then the result is valid html
|
||||
When sending html search query "<query>"
|
||||
Then the result is valid html
|
||||
When sending xml search query "<query>"
|
||||
Then the result is valid xml
|
||||
When sending json search query "<query>"
|
||||
Then the result is valid json
|
||||
When sending jsonv2 search query "<query>"
|
||||
Then the result is valid json
|
||||
|
||||
Examples:
|
||||
| query
|
||||
| New York, New York
|
||||
| France
|
||||
| 12, Main Street, Houston
|
||||
| München
|
||||
| 東京都
|
||||
| hotels in nantes
|
||||
| xywxkrf
|
||||
| gh; foo()
|
||||
| %#$@*&l;der#$!
|
||||
| 234
|
||||
| 47.4,8.3
|
||||
|
||||
Scenario: Empty XML search
|
||||
When sending xml search query "xnznxvcx"
|
||||
Then result header contains
|
||||
| attr | value
|
||||
| querystring | xnznxvcx
|
||||
| polygon | false
|
||||
| more_url | .*format=xml.*q=xnznxvcx.*
|
||||
|
||||
Scenario: Empty XML search with special XML characters
|
||||
When sending xml search query "xfdghn&zxn"xvbyx<vxx>cssdex"
|
||||
Then result header contains
|
||||
| attr | value
|
||||
| querystring | xfdghn&zxn"xvbyx<vxx>cssdex
|
||||
| polygon | false
|
||||
| more_url | .*format=xml.*q=xfdghn&zxn"xvbyx<vxx>cssdex.*
|
||||
|
||||
Scenario: Empty XML search with viewbox
|
||||
Given the request parameters
|
||||
| viewbox
|
||||
| 12,45.13,77,33
|
||||
When sending xml search query "xnznxvcx"
|
||||
Then result header contains
|
||||
| attr | value
|
||||
| querystring | xnznxvcx
|
||||
| polygon | false
|
||||
| viewbox | 12,45.13,77,33
|
||||
|
||||
Scenario: Empty XML search with viewboxlbrt
|
||||
Given the request parameters
|
||||
| viewboxlbrt
|
||||
| 12,34.13,77,45
|
||||
When sending xml search query "xnznxvcx"
|
||||
Then result header contains
|
||||
| attr | value
|
||||
| querystring | xnznxvcx
|
||||
| polygon | false
|
||||
| viewbox | 12,45.13,77,33
|
||||
|
||||
Scenario: Empty XML search with viewboxlbrt and viewbox
|
||||
Given the request parameters
|
||||
| viewbox | viewboxblrt
|
||||
| 12,45.13,77,33 | 1,2,3,4
|
||||
When sending xml search query "pub"
|
||||
Then result header contains
|
||||
| attr | value
|
||||
| querystring | pub
|
||||
| polygon | false
|
||||
| viewbox | 12,45.13,77,33
|
||||
|
||||
|
||||
Scenario Outline: Empty XML search with polygon values
|
||||
Given the request parameters
|
||||
| polygon
|
||||
| <polyval>
|
||||
When sending xml search query "xnznxvcx"
|
||||
Then result header contains
|
||||
| attr | value
|
||||
| polygon | <result>
|
||||
|
||||
Examples:
|
||||
| result | polyval
|
||||
| false | 0
|
||||
| true | 1
|
||||
| true | True
|
||||
| true | true
|
||||
| true | false
|
||||
| true | FALSE
|
||||
| true | yes
|
||||
| true | no
|
||||
| true | '; delete from foobar; select '
|
||||
|
||||
|
||||
Scenario: Empty XML search with exluded place ids
|
||||
Given the request parameters
|
||||
| exclude_place_ids
|
||||
| 123,76,342565
|
||||
When sending xml search query "jghrleoxsbwjer"
|
||||
Then result header contains
|
||||
| attr | value
|
||||
| exclude_place_ids | 123,76,342565
|
||||
|
||||
Scenario Outline: Wrapping of legal jsonp search requests
|
||||
Given the request parameters
|
||||
| json_callback
|
||||
| <data>
|
||||
When sending json search query "Tokyo"
|
||||
Then there is a json wrapper "<data>"
|
||||
|
||||
Examples:
|
||||
| data
|
||||
| foo
|
||||
| FOO
|
||||
| __world
|
||||
| $me
|
||||
| m1[4]
|
||||
| d_r[$d]
|
||||
|
||||
Scenario Outline: Wrapping of illegal jsonp search requests
|
||||
Given the request parameters
|
||||
| json_callback
|
||||
| <data>
|
||||
When sending json search query "Tokyo"
|
||||
Then a HTTP 400 is returned
|
||||
|
||||
Examples:
|
||||
| data
|
||||
| 1asd
|
||||
| bar(foo)
|
||||
| XXX['bad']
|
||||
| foo; evil
|
||||
|
||||
Scenario Outline: Ignore jsonp parameter for anything but json
|
||||
Given the request parameters
|
||||
| json_callback
|
||||
| 234
|
||||
When sending json search query "Malibu"
|
||||
Then a HTTP 400 is returned
|
||||
Given the request parameters
|
||||
| json_callback
|
||||
| 234
|
||||
When sending xml search query "Malibu"
|
||||
Then the result is valid xml
|
||||
Given the request parameters
|
||||
| json_callback
|
||||
| 234
|
||||
When sending html search query "Malibu"
|
||||
Then the result is valid html
|
||||
|
||||
Scenario: Empty JSON search
|
||||
When sending json search query "YHlERzzx"
|
||||
Then exactly 0 results are returned
|
||||
|
||||
Scenario: Empty JSONv2 search
|
||||
When sending jsonv2 search query "Flubb XdfESSaZx"
|
||||
Then exactly 0 results are returned
|
||||
|
||||
Scenario: Search for non-existing coordinates
|
||||
When sending json search query "-21.0,-33.0"
|
||||
Then exactly 0 results are returned
|
||||
|
||||
41
tests/features/api/search_structured.feature
Normal file
41
tests/features/api/search_structured.feature
Normal file
@@ -0,0 +1,41 @@
|
||||
Feature: Structured search queries
|
||||
Testing correctness of results with
|
||||
structured queries
|
||||
|
||||
Scenario: Country only
|
||||
When sending json structured query with address
|
||||
| country
|
||||
| Canada
|
||||
Then address of result 0 is
|
||||
| type | value
|
||||
| country | Canada
|
||||
| country_code | ca
|
||||
|
||||
Scenario: Postcode only
|
||||
When sending json structured query with address
|
||||
| postalcode
|
||||
| 22547
|
||||
Then at least 1 result is returned
|
||||
And results contain
|
||||
| type
|
||||
| post(al_)?code
|
||||
And result addresses contain
|
||||
| postcode
|
||||
| 22547
|
||||
|
||||
|
||||
Scenario: Street, postcode and country
|
||||
When sending xml structured query with address
|
||||
| street | postalcode | country
|
||||
| Old Palace Road | GU2 7UP | United Kingdom
|
||||
Then at least 1 result is returned
|
||||
Then result header contains
|
||||
| attr | value
|
||||
| querystring | Old Palace Road, GU2 7UP, United Kingdom
|
||||
|
||||
|
||||
Scenario: gihub #176
|
||||
When sending json structured query with address
|
||||
| city
|
||||
| Washington
|
||||
Then at least 1 result is returned
|
||||
98
tests/features/db/import/linking.feature
Normal file
98
tests/features/db/import/linking.feature
Normal file
@@ -0,0 +1,98 @@
|
||||
@DB
|
||||
Feature: Linking of places
|
||||
Tests for correctly determining linked places
|
||||
|
||||
Scenario: Waterways are linked when in waterway relations
|
||||
Given the scene split-road
|
||||
And the place ways
|
||||
| osm_type | osm_id | class | type | name | geometry
|
||||
| W | 1 | waterway | river | Rhein | :w-2
|
||||
| W | 2 | waterway | river | Rhein | :w-3
|
||||
| R | 13 | waterway | river | Rhein | :w-1 + :w-2 + :w-3
|
||||
| R | 23 | waterway | river | Limmat| :w-4a
|
||||
And the relations
|
||||
| id | members | tags
|
||||
| 13 | R23:tributary,W1,W2:main_stream | 'type' : 'waterway'
|
||||
When importing
|
||||
Then table placex contains
|
||||
| object | linked_place_id
|
||||
| W1 | R13
|
||||
| W2 | R13
|
||||
| R13 | None
|
||||
| R23 | None
|
||||
When sending query "rhein"
|
||||
Then results contain
|
||||
| osm_type
|
||||
| R
|
||||
|
||||
Scenario: Relations are not linked when in waterway relations
|
||||
Given the scene split-road
|
||||
And the place ways
|
||||
| osm_type | osm_id | class | type | name | geometry
|
||||
| W | 1 | waterway | river | Rhein | :w-2
|
||||
| W | 2 | waterway | river | Rhein | :w-3
|
||||
| R | 1 | waterway | river | Rhein | :w-1 + :w-2 + :w-3
|
||||
| R | 2 | waterway | river | Limmat| :w-4a
|
||||
And the relations
|
||||
| id | members | tags
|
||||
| 1 | R2 | 'type' : 'waterway'
|
||||
When importing
|
||||
Then table placex contains
|
||||
| object | linked_place_id
|
||||
| W1 | None
|
||||
| W2 | None
|
||||
| R1 | None
|
||||
| R2 | None
|
||||
|
||||
Scenario: Empty waterway relations are handled correctly
|
||||
Given the scene split-road
|
||||
And the place ways
|
||||
| osm_type | osm_id | class | type | name | geometry
|
||||
| R | 1 | waterway | river | Rhein | :w-1 + :w-2 + :w-3
|
||||
And the relations
|
||||
| id | members | tags
|
||||
| 1 | | 'type' : 'waterway'
|
||||
When importing
|
||||
Then table placex contains
|
||||
| object | linked_place_id
|
||||
| R1 | None
|
||||
|
||||
Scenario: Waterways are not linked when waterway types don't match
|
||||
Given the scene split-road
|
||||
And the place ways
|
||||
| osm_type | osm_id | class | type | name | geometry
|
||||
| W | 1 | waterway | drain | Rhein | :w-2
|
||||
| R | 1 | waterway | river | Rhein | :w-1 + :w-2 + :w-3
|
||||
And the relations
|
||||
| id | members | tags
|
||||
| 1 | N23,N34,W1,R45 | 'type' : 'multipolygon'
|
||||
When importing
|
||||
Then table placex contains
|
||||
| object | linked_place_id
|
||||
| W1 | None
|
||||
| R1 | None
|
||||
When sending query "rhein"
|
||||
Then results contain
|
||||
| ID | osm_type
|
||||
| 0 | R
|
||||
| 1 | W
|
||||
|
||||
Scenario: Side streams are linked only when they have the same name
|
||||
Given the scene split-road
|
||||
And the place ways
|
||||
| osm_type | osm_id | class | type | name | geometry
|
||||
| W | 1 | waterway | river | Rhein2 | :w-2
|
||||
| W | 2 | waterway | river | Rhein | :w-3
|
||||
| R | 1 | waterway | river | Rhein | :w-1 + :w-2 + :w-3
|
||||
And the relations
|
||||
| id | members | tags
|
||||
| 1 | W1:side_stream,W2:side_stream | 'type' : 'waterway'
|
||||
When importing
|
||||
Then table placex contains
|
||||
| object | linked_place_id
|
||||
| W1 | None
|
||||
| W2 | R1
|
||||
When sending query "rhein2"
|
||||
Then results contain
|
||||
| osm_type
|
||||
| W
|
||||
202
tests/features/db/import/naming.feature
Normal file
202
tests/features/db/import/naming.feature
Normal file
@@ -0,0 +1,202 @@
|
||||
@DB
|
||||
Feature: Import and search of names
|
||||
Tests all naming related issues: normalisation,
|
||||
abbreviations, internationalisation, etc.
|
||||
|
||||
|
||||
Scenario: Case-insensitivity of search
|
||||
Given the place nodes
|
||||
| osm_id | class | type | name
|
||||
| 1 | place | locality | 'name' : 'FooBar'
|
||||
When importing
|
||||
Then table placex contains
|
||||
| object | class | type | name
|
||||
| N1 | place | locality | 'name' : 'FooBar'
|
||||
When sending query "FooBar"
|
||||
Then results contain
|
||||
| ID | osm_type | osm_id
|
||||
| 0 | N | 1
|
||||
When sending query "foobar"
|
||||
Then results contain
|
||||
| ID | osm_type | osm_id
|
||||
| 0 | N | 1
|
||||
When sending query "fOObar"
|
||||
Then results contain
|
||||
| ID | osm_type | osm_id
|
||||
| 0 | N | 1
|
||||
When sending query "FOOBAR"
|
||||
Then results contain
|
||||
| ID | osm_type | osm_id
|
||||
| 0 | N | 1
|
||||
|
||||
Scenario: Multiple spaces in name
|
||||
Given the place nodes
|
||||
| osm_id | class | type | name
|
||||
| 1 | place | locality | 'name' : 'one two three'
|
||||
When importing
|
||||
When sending query "one two three"
|
||||
Then results contain
|
||||
| ID | osm_type | osm_id
|
||||
| 0 | N | 1
|
||||
When sending query "one two three"
|
||||
Then results contain
|
||||
| ID | osm_type | osm_id
|
||||
| 0 | N | 1
|
||||
When sending query "one two three"
|
||||
Then results contain
|
||||
| ID | osm_type | osm_id
|
||||
| 0 | N | 1
|
||||
When sending query " one two three"
|
||||
Then results contain
|
||||
| ID | osm_type | osm_id
|
||||
| 0 | N | 1
|
||||
|
||||
Scenario: Special characters in name
|
||||
Given the place nodes
|
||||
| osm_id | class | type | name
|
||||
| 1 | place | locality | 'name' : 'Jim-Knopf-Str'
|
||||
| 2 | place | locality | 'name' : 'Smith/Weston'
|
||||
| 3 | place | locality | 'name' : 'space mountain'
|
||||
| 4 | place | locality | 'name' : 'space'
|
||||
| 5 | place | locality | 'name' : 'mountain'
|
||||
When importing
|
||||
When sending query "Jim-Knopf-Str"
|
||||
Then results contain
|
||||
| ID | osm_type | osm_id
|
||||
| 0 | N | 1
|
||||
When sending query "Jim Knopf-Str"
|
||||
Then results contain
|
||||
| ID | osm_type | osm_id
|
||||
| 0 | N | 1
|
||||
When sending query "Jim Knopf Str"
|
||||
Then results contain
|
||||
| ID | osm_type | osm_id
|
||||
| 0 | N | 1
|
||||
When sending query "Jim/Knopf-Str"
|
||||
Then results contain
|
||||
| ID | osm_type | osm_id
|
||||
| 0 | N | 1
|
||||
When sending query "Jim-Knopfstr"
|
||||
Then results contain
|
||||
| ID | osm_type | osm_id
|
||||
| 0 | N | 1
|
||||
When sending query "Smith/Weston"
|
||||
Then results contain
|
||||
| ID | osm_type | osm_id
|
||||
| 0 | N | 2
|
||||
When sending query "Smith Weston"
|
||||
Then results contain
|
||||
| ID | osm_type | osm_id
|
||||
| 0 | N | 2
|
||||
When sending query "Smith-Weston"
|
||||
Then results contain
|
||||
| ID | osm_type | osm_id
|
||||
| 0 | N | 2
|
||||
When sending query "space mountain"
|
||||
Then results contain
|
||||
| ID | osm_type | osm_id
|
||||
| 0 | N | 3
|
||||
When sending query "space-mountain"
|
||||
Then results contain
|
||||
| ID | osm_type | osm_id
|
||||
| 0 | N | 3
|
||||
When sending query "space/mountain"
|
||||
Then results contain
|
||||
| ID | osm_type | osm_id
|
||||
| 0 | N | 3
|
||||
When sending query "space\mountain"
|
||||
Then results contain
|
||||
| ID | osm_type | osm_id
|
||||
| 0 | N | 3
|
||||
When sending query "space(mountain)"
|
||||
Then results contain
|
||||
| ID | osm_type | osm_id
|
||||
| 0 | N | 3
|
||||
|
||||
Scenario: No copying name tag if only one name
|
||||
Given the place nodes
|
||||
| osm_id | class | type | name | geometry
|
||||
| 1 | place | locality | 'name' : 'german' | country:de
|
||||
When importing
|
||||
Then table placex contains
|
||||
| object | calculated_country_code |
|
||||
| N1 | de
|
||||
And table placex contains as names for N1
|
||||
| object | k | v
|
||||
| N1 | name | german
|
||||
|
||||
Scenario: Copying name tag to default language if it does not exist
|
||||
Given the place nodes
|
||||
| osm_id | class | type | name | geometry
|
||||
| 1 | place | locality | 'name' : 'german', 'name:fi' : 'finnish' | country:de
|
||||
When importing
|
||||
Then table placex contains
|
||||
| object | calculated_country_code |
|
||||
| N1 | de
|
||||
And table placex contains as names for N1
|
||||
| k | v
|
||||
| name | german
|
||||
| name:fi | finnish
|
||||
| name:de | german
|
||||
|
||||
Scenario: Copying default language name tag to name if it does not exist
|
||||
Given the place nodes
|
||||
| osm_id | class | type | name | geometry
|
||||
| 1 | place | locality | 'name:de' : 'german', 'name:fi' : 'finnish' | country:de
|
||||
When importing
|
||||
Then table placex contains
|
||||
| object | calculated_country_code |
|
||||
| N1 | de
|
||||
And table placex contains as names for N1
|
||||
| k | v
|
||||
| name | german
|
||||
| name:fi | finnish
|
||||
| name:de | german
|
||||
|
||||
Scenario: Do not overwrite default language with name tag
|
||||
Given the place nodes
|
||||
| osm_id | class | type | name | geometry
|
||||
| 1 | place | locality | 'name' : 'german', 'name:fi' : 'finnish', 'name:de' : 'local' | country:de
|
||||
When importing
|
||||
Then table placex contains
|
||||
| object | calculated_country_code |
|
||||
| N1 | de
|
||||
And table placex contains as names for N1
|
||||
| k | v
|
||||
| name | german
|
||||
| name:fi | finnish
|
||||
| name:de | local
|
||||
|
||||
Scenario: Landuse without name are ignored
|
||||
Given the place areas
|
||||
| osm_type | osm_id | class | type | geometry
|
||||
| R | 1 | natural | meadow | (0 0, 1 0, 1 1, 0 1, 0 0)
|
||||
| R | 2 | landuse | industrial | (0 0, -1 0, -1 -1, 0 -1, 0 0)
|
||||
When importing
|
||||
Then table placex has no entry for R1
|
||||
And table placex has no entry for R2
|
||||
|
||||
Scenario: Landuse with name are found
|
||||
Given the place areas
|
||||
| osm_type | osm_id | class | type | name | geometry
|
||||
| R | 1 | natural | meadow | 'name' : 'landuse1' | (0 0, 1 0, 1 1, 0 1, 0 0)
|
||||
| R | 2 | landuse | industrial | 'name' : 'landuse2' | (0 0, -1 0, -1 -1, 0 -1, 0 0)
|
||||
When importing
|
||||
When sending query "landuse1"
|
||||
Then results contain
|
||||
| ID | osm_type | osm_id
|
||||
| 0 | R | 1
|
||||
When sending query "landuse2"
|
||||
Then results contain
|
||||
| ID | osm_type | osm_id
|
||||
| 0 | R | 2
|
||||
|
||||
Scenario: Postcode boundaries without ref
|
||||
Given the place areas
|
||||
| osm_type | osm_id | class | type | postcode | geometry
|
||||
| R | 1 | boundary | postal_code | 12345 | (0 0, 1 0, 1 1, 0 1, 0 0)
|
||||
When importing
|
||||
When sending query "12345"
|
||||
Then results contain
|
||||
| ID | osm_type | osm_id
|
||||
| 0 | R | 1
|
||||
458
tests/features/db/import/parenting.feature
Normal file
458
tests/features/db/import/parenting.feature
Normal file
@@ -0,0 +1,458 @@
|
||||
@DB
|
||||
Feature: Parenting of objects
|
||||
Tests that the correct parent is choosen
|
||||
|
||||
Scenario: Address inherits postcode from its street unless it has a postcode
|
||||
Given the scene roads-with-pois
|
||||
And the place nodes
|
||||
| osm_id | class | type | housenumber | geometry
|
||||
| 1 | place | house | 4 | :p-N1
|
||||
And the place nodes
|
||||
| osm_id | class | type | housenumber | postcode | geometry
|
||||
| 2 | place | house | 5 | 99999 | :p-N1
|
||||
And the place ways
|
||||
| osm_id | class | type | name | postcode | geometry
|
||||
| 1 | highway | residential | galoo | 12345 | :w-north
|
||||
When importing
|
||||
Then table placex contains
|
||||
| object | parent_place_id
|
||||
| N1 | W1
|
||||
| N2 | W1
|
||||
When sending query "4 galoo"
|
||||
Then results contain
|
||||
| ID | osm_type | osm_id | langaddress
|
||||
| 0 | N | 1 | 4, galoo, 12345
|
||||
When sending query "5 galoo"
|
||||
Then results contain
|
||||
| ID | osm_type | osm_id | langaddress
|
||||
| 0 | N | 2 | 5, galoo, 99999
|
||||
|
||||
|
||||
Scenario: Address without tags, closest street
|
||||
Given the scene roads-with-pois
|
||||
And the place nodes
|
||||
| osm_id | class | type | geometry
|
||||
| 1 | place | house | :p-N1
|
||||
| 2 | place | house | :p-N2
|
||||
| 3 | place | house | :p-S1
|
||||
| 4 | place | house | :p-S2
|
||||
And the named place ways
|
||||
| osm_id | class | type | geometry
|
||||
| 1 | highway | residential | :w-north
|
||||
| 2 | highway | residential | :w-south
|
||||
When importing
|
||||
Then table placex contains
|
||||
| object | parent_place_id
|
||||
| N1 | W1
|
||||
| N2 | W1
|
||||
| N3 | W2
|
||||
| N4 | W2
|
||||
|
||||
Scenario: Address without tags avoids unnamed streets
|
||||
Given the scene roads-with-pois
|
||||
And the place nodes
|
||||
| osm_id | class | type | geometry
|
||||
| 1 | place | house | :p-N1
|
||||
| 2 | place | house | :p-N2
|
||||
| 3 | place | house | :p-S1
|
||||
| 4 | place | house | :p-S2
|
||||
And the place ways
|
||||
| osm_id | class | type | geometry
|
||||
| 1 | highway | residential | :w-north
|
||||
And the named place ways
|
||||
| osm_id | class | type | geometry
|
||||
| 2 | highway | residential | :w-south
|
||||
When importing
|
||||
Then table placex contains
|
||||
| object | parent_place_id
|
||||
| N1 | W2
|
||||
| N2 | W2
|
||||
| N3 | W2
|
||||
| N4 | W2
|
||||
|
||||
Scenario: addr:street tag parents to appropriately named street
|
||||
Given the scene roads-with-pois
|
||||
And the place nodes
|
||||
| osm_id | class | type | street| geometry
|
||||
| 1 | place | house | south | :p-N1
|
||||
| 2 | place | house | north | :p-N2
|
||||
| 3 | place | house | south | :p-S1
|
||||
| 4 | place | house | north | :p-S2
|
||||
And the place ways
|
||||
| osm_id | class | type | name | geometry
|
||||
| 1 | highway | residential | north | :w-north
|
||||
| 2 | highway | residential | south | :w-south
|
||||
When importing
|
||||
Then table placex contains
|
||||
| object | parent_place_id
|
||||
| N1 | W2
|
||||
| N2 | W1
|
||||
| N3 | W2
|
||||
| N4 | W1
|
||||
|
||||
Scenario: addr:street tag parents to next named street
|
||||
Given the scene roads-with-pois
|
||||
And the place nodes
|
||||
| osm_id | class | type | street | geometry
|
||||
| 1 | place | house | abcdef | :p-N1
|
||||
| 2 | place | house | abcdef | :p-N2
|
||||
| 3 | place | house | abcdef | :p-S1
|
||||
| 4 | place | house | abcdef | :p-S2
|
||||
And the place ways
|
||||
| osm_id | class | type | name | geometry
|
||||
| 1 | highway | residential | abcdef | :w-north
|
||||
| 2 | highway | residential | abcdef | :w-south
|
||||
When importing
|
||||
Then table placex contains
|
||||
| object | parent_place_id
|
||||
| N1 | W1
|
||||
| N2 | W1
|
||||
| N3 | W2
|
||||
| N4 | W2
|
||||
|
||||
Scenario: addr:street tag without appropriately named street
|
||||
Given the scene roads-with-pois
|
||||
And the place nodes
|
||||
| osm_id | class | type | street | geometry
|
||||
| 1 | place | house | abcdef | :p-N1
|
||||
| 2 | place | house | abcdef | :p-N2
|
||||
| 3 | place | house | abcdef | :p-S1
|
||||
| 4 | place | house | abcdef | :p-S2
|
||||
And the place ways
|
||||
| osm_id | class | type | name | geometry
|
||||
| 1 | highway | residential | abcde | :w-north
|
||||
| 2 | highway | residential | abcde | :w-south
|
||||
When importing
|
||||
Then table placex contains
|
||||
| object | parent_place_id
|
||||
| N1 | W1
|
||||
| N2 | W1
|
||||
| N3 | W2
|
||||
| N4 | W2
|
||||
|
||||
Scenario: addr:place address
|
||||
Given the scene road-with-alley
|
||||
And the place nodes
|
||||
| osm_id | class | type | addr_place | geometry
|
||||
| 1 | place | house | myhamlet | :n-alley
|
||||
And the place nodes
|
||||
| osm_id | class | type | name | geometry
|
||||
| 2 | place | hamlet | myhamlet | :n-main-west
|
||||
And the place ways
|
||||
| osm_id | class | type | name | geometry
|
||||
| 1 | highway | residential | myhamlet | :w-main
|
||||
When importing
|
||||
Then table placex contains
|
||||
| object | parent_place_id
|
||||
| N1 | N2
|
||||
|
||||
Scenario: addr:street is preferred over addr:place
|
||||
Given the scene road-with-alley
|
||||
And the place nodes
|
||||
| osm_id | class | type | addr_place | street | geometry
|
||||
| 1 | place | house | myhamlet | mystreet| :n-alley
|
||||
And the place nodes
|
||||
| osm_id | class | type | name | geometry
|
||||
| 2 | place | hamlet | myhamlet | :n-main-west
|
||||
And the place ways
|
||||
| osm_id | class | type | name | geometry
|
||||
| 1 | highway | residential | mystreet | :w-main
|
||||
When importing
|
||||
Then table placex contains
|
||||
| object | parent_place_id
|
||||
| N1 | W1
|
||||
|
||||
Scenario: Untagged address in simple associated street relation
|
||||
Given the scene road-with-alley
|
||||
And the place nodes
|
||||
| osm_id | class | type | geometry
|
||||
| 1 | place | house | :n-alley
|
||||
| 2 | place | house | :n-corner
|
||||
| 3 | place | house | :n-main-west
|
||||
And the place ways
|
||||
| osm_id | class | type | name | geometry
|
||||
| 1 | highway | residential | foo | :w-main
|
||||
| 2 | highway | service | bar | :w-alley
|
||||
And the relations
|
||||
| id | members | tags
|
||||
| 1 | W1:street,N1,N2,N3 | 'type' : 'associatedStreet'
|
||||
When importing
|
||||
Then table placex contains
|
||||
| object | parent_place_id
|
||||
| N1 | W1
|
||||
| N2 | W1
|
||||
| N3 | W1
|
||||
|
||||
Scenario: Avoid unnamed streets in simple associated street relation
|
||||
Given the scene road-with-alley
|
||||
And the place nodes
|
||||
| osm_id | class | type | geometry
|
||||
| 1 | place | house | :n-alley
|
||||
| 2 | place | house | :n-corner
|
||||
| 3 | place | house | :n-main-west
|
||||
And the named place ways
|
||||
| osm_id | class | type | geometry
|
||||
| 1 | highway | residential | :w-main
|
||||
And the place ways
|
||||
| osm_id | class | type | geometry
|
||||
| 2 | highway | residential | :w-alley
|
||||
And the relations
|
||||
| id | members | tags
|
||||
| 1 | N1,N2,N3,W2:street,W1:street | 'type' : 'associatedStreet'
|
||||
When importing
|
||||
Then table placex contains
|
||||
| object | parent_place_id
|
||||
| N1 | W1
|
||||
| N2 | W1
|
||||
| N3 | W1
|
||||
|
||||
### Scenario 10
|
||||
Scenario: Associated street relation overrides addr:street
|
||||
Given the scene road-with-alley
|
||||
And the place nodes
|
||||
| osm_id | class | type | street | geometry
|
||||
| 1 | place | house | bar | :n-alley
|
||||
And the place ways
|
||||
| osm_id | class | type | name | geometry
|
||||
| 1 | highway | residential | foo | :w-main
|
||||
| 2 | highway | residential | bar | :w-alley
|
||||
And the relations
|
||||
| id | members | tags
|
||||
| 1 | W1:street,N1,N2,N3 | 'type' : 'associatedStreet'
|
||||
When importing
|
||||
Then table placex contains
|
||||
| object | parent_place_id
|
||||
| N1 | W1
|
||||
|
||||
Scenario: Building without tags, closest street from center point
|
||||
Given the scene building-on-street-corner
|
||||
And the named place ways
|
||||
| osm_id | class | type | geometry
|
||||
| 1 | building | yes | :w-building
|
||||
| 2 | highway | primary | :w-WE
|
||||
| 3 | highway | residential | :w-NS
|
||||
When importing
|
||||
Then table placex contains
|
||||
| object | parent_place_id
|
||||
| W1 | W3
|
||||
|
||||
Scenario: Building with addr:street tags
|
||||
Given the scene building-on-street-corner
|
||||
And the named place ways
|
||||
| osm_id | class | type | street | geometry
|
||||
| 1 | building | yes | bar | :w-building
|
||||
And the place ways
|
||||
| osm_id | class | type | name | geometry
|
||||
| 2 | highway | primary | bar | :w-WE
|
||||
| 3 | highway | residential | foo | :w-NS
|
||||
When importing
|
||||
Then table placex contains
|
||||
| object | parent_place_id
|
||||
| W1 | W2
|
||||
|
||||
Scenario: Building with addr:place tags
|
||||
Given the scene building-on-street-corner
|
||||
And the place nodes
|
||||
| osm_id | class | type | name | geometry
|
||||
| 1 | place | village | bar | :n-outer
|
||||
And the named place ways
|
||||
| osm_id | class | type | addr_place | geometry
|
||||
| 1 | building | yes | bar | :w-building
|
||||
And the place ways
|
||||
| osm_id | class | type | name | geometry
|
||||
| 2 | highway | primary | bar | :w-WE
|
||||
| 3 | highway | residential | foo | :w-NS
|
||||
When importing
|
||||
Then table placex contains
|
||||
| object | parent_place_id
|
||||
| W1 | N1
|
||||
|
||||
Scenario: Building in associated street relation
|
||||
Given the scene building-on-street-corner
|
||||
And the named place ways
|
||||
| osm_id | class | type | geometry
|
||||
| 1 | building | yes | :w-building
|
||||
And the place ways
|
||||
| osm_id | class | type | name | geometry
|
||||
| 2 | highway | primary | bar | :w-WE
|
||||
| 3 | highway | residential | foo | :w-NS
|
||||
And the relations
|
||||
| id | members | tags
|
||||
| 1 | W1:house,W2:street | 'type' : 'associatedStreet'
|
||||
When importing
|
||||
Then table placex contains
|
||||
| object | parent_place_id
|
||||
| W1 | W2
|
||||
|
||||
Scenario: Building in associated street relation overrides addr:street
|
||||
Given the scene building-on-street-corner
|
||||
And the named place ways
|
||||
| osm_id | class | type | street | geometry
|
||||
| 1 | building | yes | foo | :w-building
|
||||
And the place ways
|
||||
| osm_id | class | type | name | geometry
|
||||
| 2 | highway | primary | bar | :w-WE
|
||||
| 3 | highway | residential | foo | :w-NS
|
||||
And the relations
|
||||
| id | members | tags
|
||||
| 1 | W1:house,W2:street | 'type' : 'associatedStreet'
|
||||
When importing
|
||||
Then table placex contains
|
||||
| object | parent_place_id
|
||||
| W1 | W2
|
||||
|
||||
Scenario: Wrong member in associated street relation is ignored
|
||||
Given the scene building-on-street-corner
|
||||
And the named place nodes
|
||||
| osm_id | class | type | geometry
|
||||
| 1 | place | house | :n-outer
|
||||
And the named place ways
|
||||
| osm_id | class | type | street | geometry
|
||||
| 1 | building | yes | foo | :w-building
|
||||
And the place ways
|
||||
| osm_id | class | type | name | geometry
|
||||
| 2 | highway | primary | bar | :w-WE
|
||||
| 3 | highway | residential | foo | :w-NS
|
||||
And the relations
|
||||
| id | members | tags
|
||||
| 1 | N1:house,W1:street,W3:street | 'type' : 'associatedStreet'
|
||||
When importing
|
||||
Then table placex contains
|
||||
| object | parent_place_id
|
||||
| N1 | W3
|
||||
|
||||
Scenario: POIs in building inherit address
|
||||
Given the scene building-on-street-corner
|
||||
And the named place nodes
|
||||
| osm_id | class | type | geometry
|
||||
| 1 | amenity | bank | :n-inner
|
||||
| 2 | shop | bakery | :n-edge-NS
|
||||
| 3 | shop | supermarket| :n-edge-WE
|
||||
And the place ways
|
||||
| osm_id | class | type | street | addr_place | housenumber | geometry
|
||||
| 1 | building | yes | foo | nowhere | 3 | :w-building
|
||||
And the place ways
|
||||
| osm_id | class | type | name | geometry
|
||||
| 2 | highway | primary | bar | :w-WE
|
||||
| 3 | highway | residential | foo | :w-NS
|
||||
When importing
|
||||
Then table placex contains
|
||||
| object | parent_place_id | street | addr_place | housenumber
|
||||
| W1 | W3 | foo | nowhere | 3
|
||||
| N1 | W3 | foo | nowhere | 3
|
||||
| N2 | W3 | foo | nowhere | 3
|
||||
| N3 | W3 | foo | nowhere | 3
|
||||
|
||||
Scenario: POIs don't inherit from streets
|
||||
Given the scene building-on-street-corner
|
||||
And the named place nodes
|
||||
| osm_id | class | type | geometry
|
||||
| 1 | amenity | bank | :n-inner
|
||||
And the place ways
|
||||
| osm_id | class | type | street | addr_place | housenumber | geometry
|
||||
| 1 | highway | path | foo | nowhere | 3 | :w-building
|
||||
And the place ways
|
||||
| osm_id | class | type | name | geometry
|
||||
| 3 | highway | residential | foo | :w-NS
|
||||
When importing
|
||||
Then table placex contains
|
||||
| object | parent_place_id | street | addr_place | housenumber
|
||||
| N1 | W3 | None | None | None
|
||||
|
||||
Scenario: POIs with own address do not inherit building address
|
||||
Given the scene building-on-street-corner
|
||||
And the named place nodes
|
||||
| osm_id | class | type | street | geometry
|
||||
| 1 | amenity | bank | bar | :n-inner
|
||||
And the named place nodes
|
||||
| osm_id | class | type | housenumber | geometry
|
||||
| 2 | shop | bakery | 4 | :n-edge-NS
|
||||
And the named place nodes
|
||||
| osm_id | class | type | addr_place | geometry
|
||||
| 3 | shop | supermarket| nowhere | :n-edge-WE
|
||||
And the place nodes
|
||||
| osm_id | class | type | name | geometry
|
||||
| 4 | place | isolated_dwelling | theplace | :n-outer
|
||||
And the place ways
|
||||
| osm_id | class | type | addr_place | housenumber | geometry
|
||||
| 1 | building | yes | theplace | 3 | :w-building
|
||||
And the place ways
|
||||
| osm_id | class | type | name | geometry
|
||||
| 2 | highway | primary | bar | :w-WE
|
||||
| 3 | highway | residential | foo | :w-NS
|
||||
When importing
|
||||
Then table placex contains
|
||||
| object | parent_place_id | street | addr_place | housenumber
|
||||
| W1 | N4 | None | theplace | 3
|
||||
| N1 | W2 | bar | None | None
|
||||
| N2 | W3 | None | None | 4
|
||||
| N3 | W2 | None | nowhere | None
|
||||
|
||||
### Scenario 20
|
||||
Scenario: POIs parent a road if and only if they are attached to it
|
||||
Given the scene points-on-roads
|
||||
And the named place nodes
|
||||
| osm_id | class | type | street | geometry
|
||||
| 1 | highway | bus_stop | North St | :n-SE
|
||||
| 2 | highway | bus_stop | South St | :n-NW
|
||||
| 3 | highway | bus_stop | North St | :n-S-unglued
|
||||
| 4 | highway | bus_stop | South St | :n-N-unglued
|
||||
And the place ways
|
||||
| osm_id | class | type | name | geometry
|
||||
| 1 | highway | secondary | North St | :w-north
|
||||
| 2 | highway | unclassified | South St | :w-south
|
||||
And the ways
|
||||
| id | nodes
|
||||
| 1 | 100,101,2,103,104
|
||||
| 2 | 200,201,1,202,203
|
||||
When importing
|
||||
Then table placex contains
|
||||
| object | parent_place_id
|
||||
| N1 | W2
|
||||
| N2 | W1
|
||||
| N3 | W1
|
||||
| N4 | W2
|
||||
|
||||
Scenario: POIs do not parent non-roads they are attached to
|
||||
Given the scene points-on-roads
|
||||
And the named place nodes
|
||||
| osm_id | class | type | street | geometry
|
||||
| 1 | highway | bus_stop | North St | :n-SE
|
||||
| 2 | highway | bus_stop | South St | :n-NW
|
||||
And the place ways
|
||||
| osm_id | class | type | name | geometry
|
||||
| 1 | landuse | residential | North St | :w-north
|
||||
| 2 | waterway| river | South St | :w-south
|
||||
And the ways
|
||||
| id | nodes
|
||||
| 1 | 100,101,2,103,104
|
||||
| 2 | 200,201,1,202,203
|
||||
When importing
|
||||
Then table placex contains
|
||||
| object | parent_place_id
|
||||
| N1 | 0
|
||||
| N2 | 0
|
||||
|
||||
Scenario: POIs on building outlines inherit associated street relation
|
||||
Given the scene building-on-street-corner
|
||||
And the named place nodes
|
||||
| osm_id | class | type | geometry
|
||||
| 1 | place | house | :n-edge-NS
|
||||
And the named place ways
|
||||
| osm_id | class | type | geometry
|
||||
| 1 | building | yes | :w-building
|
||||
And the place ways
|
||||
| osm_id | class | type | name | geometry
|
||||
| 2 | highway | primary | bar | :w-WE
|
||||
| 3 | highway | residential | foo | :w-NS
|
||||
And the relations
|
||||
| id | members | tags
|
||||
| 1 | W1:house,W2:street | 'type' : 'associatedStreet'
|
||||
And the ways
|
||||
| id | nodes
|
||||
| 1 | 100,1,101,102,100
|
||||
When importing
|
||||
Then table placex contains
|
||||
| object | parent_place_id
|
||||
| N1 | W2
|
||||
|
||||
383
tests/features/db/import/placex.feature
Normal file
383
tests/features/db/import/placex.feature
Normal file
@@ -0,0 +1,383 @@
|
||||
@DB
|
||||
Feature: Import into placex
|
||||
Tests that data in placex is completed correctly.
|
||||
|
||||
Scenario: No country code tag is available
|
||||
Given the place nodes
|
||||
| osm_id | class | type | name | geometry
|
||||
| 1 | highway | primary | 'name' : 'A1' | country:us
|
||||
When importing
|
||||
Then table placex contains
|
||||
| object | country_code | calculated_country_code |
|
||||
| N1 | None | us |
|
||||
|
||||
Scenario: Location overwrites country code tag
|
||||
Given the scene country
|
||||
And the place nodes
|
||||
| osm_id | class | type | name | country_code | geometry
|
||||
| 1 | highway | primary | 'name' : 'A1' | de | :us
|
||||
When importing
|
||||
Then table placex contains
|
||||
| object | country_code | calculated_country_code |
|
||||
| N1 | de | us |
|
||||
|
||||
Scenario: Country code tag overwrites location for countries
|
||||
Given the place areas
|
||||
| osm_type | osm_id | class | type | admin_level | name | country_code | geometry
|
||||
| R | 1 | boundary | administrative | 2 | 'name' : 'foo' | de | (-100 40, -101 40, -101 41, -100 41, -100 40)
|
||||
When importing
|
||||
Then table placex contains
|
||||
| object | country_code | calculated_country_code |
|
||||
| R1 | de | de |
|
||||
|
||||
Scenario: Illegal country code tag for countries is ignored
|
||||
And the place areas
|
||||
| osm_type | osm_id | class | type | admin_level | name | country_code | geometry
|
||||
| R | 1 | boundary | administrative | 2 | 'name' : 'foo' | xx | (-100 40, -101 40, -101 41, -100 41, -100 40)
|
||||
When importing
|
||||
Then table placex contains
|
||||
| object | country_code | calculated_country_code |
|
||||
| R1 | xx | us |
|
||||
|
||||
Scenario: admin level is copied over
|
||||
Given the place nodes
|
||||
| osm_id | class | type | admin_level | name
|
||||
| 1 | place | state | 3 | 'name' : 'foo'
|
||||
When importing
|
||||
Then table placex contains
|
||||
| object | admin_level |
|
||||
| N1 | 3 |
|
||||
|
||||
Scenario: admin level is default 15
|
||||
Given the place nodes
|
||||
| osm_id | class | type | name
|
||||
| 1 | amenity | prison | 'name' : 'foo'
|
||||
When importing
|
||||
Then table placex contains
|
||||
| object | admin_level |
|
||||
| N1 | 15 |
|
||||
|
||||
Scenario: admin level is never larger than 15
|
||||
Given the place nodes
|
||||
| osm_id | class | type | name | admin_level
|
||||
| 1 | amenity | prison | 'name' : 'foo' | 16
|
||||
When importing
|
||||
Then table placex contains
|
||||
| object | admin_level |
|
||||
| N1 | 15 |
|
||||
|
||||
|
||||
Scenario: postcode node without postcode is dropped
|
||||
Given the place nodes
|
||||
| osm_id | class | type
|
||||
| 1 | place | postcode
|
||||
When importing
|
||||
Then table placex has no entry for N1
|
||||
|
||||
Scenario: postcode boundary without postcode is dropped
|
||||
Given the place areas
|
||||
| osm_type | osm_id | class | type | geometry
|
||||
| R | 1 | boundary | postal_code | poly-area:0.1
|
||||
When importing
|
||||
Then table placex has no entry for R1
|
||||
|
||||
Scenario: search and address ranks for GB post codes correctly assigned
|
||||
Given the place nodes
|
||||
| osm_id | class | type | postcode | geometry
|
||||
| 1 | place | postcode | E45 2CD | country:gb
|
||||
| 2 | place | postcode | E45 2 | country:gb
|
||||
| 3 | place | postcode | Y45 | country:gb
|
||||
When importing
|
||||
Then table placex contains
|
||||
| object | postcode | calculated_country_code | rank_search | rank_address
|
||||
| N1 | E45 2CD | gb | 25 | 5
|
||||
| N2 | E45 2 | gb | 23 | 5
|
||||
| N3 | Y45 | gb | 21 | 5
|
||||
|
||||
Scenario: wrongly formatted GB postcodes are down-ranked
|
||||
Given the place nodes
|
||||
| osm_id | class | type | postcode | geometry
|
||||
| 1 | place | postcode | EA452CD | country:gb
|
||||
| 2 | place | postcode | E45 23 | country:gb
|
||||
| 3 | place | postcode | y45 | country:gb
|
||||
When importing
|
||||
Then table placex contains
|
||||
| object | calculated_country_code | rank_search | rank_address
|
||||
| N1 | gb | 30 | 30
|
||||
| N2 | gb | 30 | 30
|
||||
| N3 | gb | 30 | 30
|
||||
|
||||
Scenario: search and address rank for DE postcodes correctly assigned
|
||||
Given the place nodes
|
||||
| osm_id | class | type | postcode | geometry
|
||||
| 1 | place | postcode | 56427 | country:de
|
||||
| 2 | place | postcode | 5642 | country:de
|
||||
| 3 | place | postcode | 5642A | country:de
|
||||
| 4 | place | postcode | 564276 | country:de
|
||||
When importing
|
||||
Then table placex contains
|
||||
| object | calculated_country_code | rank_search | rank_address
|
||||
| N1 | de | 21 | 11
|
||||
| N2 | de | 30 | 30
|
||||
| N3 | de | 30 | 30
|
||||
| N4 | de | 30 | 30
|
||||
|
||||
Scenario: search and address rank for other postcodes are correctly assigned
|
||||
Given the place nodes
|
||||
| osm_id | class | type | postcode | geometry
|
||||
| 1 | place | postcode | 1 | country:ca
|
||||
| 2 | place | postcode | X3 | country:ca
|
||||
| 3 | place | postcode | 543 | country:ca
|
||||
| 4 | place | postcode | 54dc | country:ca
|
||||
| 5 | place | postcode | 12345 | country:ca
|
||||
| 6 | place | postcode | 55TT667 | country:ca
|
||||
| 7 | place | postcode | 123-65 | country:ca
|
||||
| 8 | place | postcode | 12 445 4 | country:ca
|
||||
| 9 | place | postcode | A1:bc10 | country:ca
|
||||
When importing
|
||||
Then table placex contains
|
||||
| object | calculated_country_code | rank_search | rank_address
|
||||
| N1 | ca | 21 | 11
|
||||
| N2 | ca | 21 | 11
|
||||
| N3 | ca | 21 | 11
|
||||
| N4 | ca | 21 | 11
|
||||
| N5 | ca | 21 | 11
|
||||
| N6 | ca | 21 | 11
|
||||
| N7 | ca | 25 | 11
|
||||
| N8 | ca | 25 | 11
|
||||
| N9 | ca | 25 | 11
|
||||
|
||||
|
||||
Scenario: search and address ranks for places are correctly assigned
|
||||
Given the named place nodes
|
||||
| osm_id | class | type |
|
||||
| 1 | foo | bar |
|
||||
| 11 | place | Continent |
|
||||
| 12 | place | continent |
|
||||
| 13 | place | sea |
|
||||
| 14 | place | country |
|
||||
| 15 | place | state |
|
||||
| 16 | place | region |
|
||||
| 17 | place | county |
|
||||
| 18 | place | city |
|
||||
| 19 | place | island |
|
||||
| 20 | place | town |
|
||||
| 21 | place | village |
|
||||
| 22 | place | hamlet |
|
||||
| 23 | place | municipality |
|
||||
| 24 | place | district |
|
||||
| 25 | place | unincorporated_area |
|
||||
| 26 | place | borough |
|
||||
| 27 | place | suburb |
|
||||
| 28 | place | croft |
|
||||
| 29 | place | subdivision |
|
||||
| 30 | place | isolated_dwelling |
|
||||
| 31 | place | farm |
|
||||
| 32 | place | locality |
|
||||
| 33 | place | islet |
|
||||
| 34 | place | mountain_pass |
|
||||
| 35 | place | neighbourhood |
|
||||
| 36 | place | house |
|
||||
| 37 | place | building |
|
||||
| 38 | place | houses |
|
||||
And the named place nodes
|
||||
| osm_id | class | type | extratags
|
||||
| 100 | place | locality | 'locality' : 'townland'
|
||||
| 101 | place | city | 'capital' : 'yes'
|
||||
When importing
|
||||
Then table placex contains
|
||||
| object | rank_search | rank_address |
|
||||
| N1 | 30 | 30 |
|
||||
| N11 | 30 | 30 |
|
||||
| N12 | 2 | 2 |
|
||||
| N13 | 2 | 0 |
|
||||
| N14 | 4 | 4 |
|
||||
| N15 | 8 | 8 |
|
||||
| N16 | 18 | 0 |
|
||||
| N17 | 12 | 12 |
|
||||
| N18 | 16 | 16 |
|
||||
| N19 | 17 | 0 |
|
||||
| N20 | 18 | 16 |
|
||||
| N21 | 19 | 16 |
|
||||
| N22 | 19 | 16 |
|
||||
| N23 | 19 | 16 |
|
||||
| N24 | 19 | 16 |
|
||||
| N25 | 19 | 16 |
|
||||
| N26 | 19 | 16 |
|
||||
| N27 | 20 | 20 |
|
||||
| N28 | 20 | 20 |
|
||||
| N29 | 20 | 20 |
|
||||
| N30 | 20 | 20 |
|
||||
| N31 | 20 | 0 |
|
||||
| N32 | 20 | 0 |
|
||||
| N33 | 20 | 0 |
|
||||
| N34 | 20 | 0 |
|
||||
| N100 | 20 | 20 |
|
||||
| N101 | 15 | 16 |
|
||||
| N35 | 22 | 22 |
|
||||
| N36 | 30 | 30 |
|
||||
| N37 | 30 | 30 |
|
||||
| N38 | 28 | 0 |
|
||||
|
||||
Scenario: search and address ranks for boundaries are correctly assigned
|
||||
Given the named place nodes
|
||||
| osm_id | class | type
|
||||
| 1 | boundary | administrative
|
||||
And the named place ways
|
||||
| osm_id | class | type | geometry
|
||||
| 10 | boundary | administrative | 10 10, 11 11
|
||||
And the named place areas
|
||||
| osm_type | osm_id | class | type | admin_level | geometry
|
||||
| R | 20 | boundary | administrative | 2 | (1 1, 2 2, 1 2, 1 1)
|
||||
| R | 21 | boundary | administrative | 32 | (3 3, 4 4, 3 4, 3 3)
|
||||
| R | 22 | boundary | nature_park | 6 | (0 0, 1 0, 0 1, 0 0)
|
||||
| R | 23 | boundary | natural_reserve| 10 | (0 0, 1 1, 1 0, 0 0)
|
||||
When importing
|
||||
Then table placex has no entry for N1
|
||||
And table placex has no entry for W10
|
||||
And table placex contains
|
||||
| object | rank_search | rank_address
|
||||
| R20 | 4 | 4
|
||||
| R21 | 30 | 30
|
||||
| R22 | 12 | 0
|
||||
| R23 | 20 | 0
|
||||
|
||||
Scenario Outline: minor highways droped without name, included with
|
||||
Given the scene roads-with-pois
|
||||
And a wiped database
|
||||
And the place ways
|
||||
| osm_id | class | type | geometry
|
||||
| 1 | highway | <type> | :w-south
|
||||
And the named place ways
|
||||
| osm_id | class | type | geometry
|
||||
| 2 | highway | <type> | :w-north
|
||||
When importing
|
||||
Then table placex has no entry for W1
|
||||
And table placex contains
|
||||
| object | rank_search | rank_address
|
||||
| W2 | <rank> | <rank>
|
||||
|
||||
Examples:
|
||||
| type | rank
|
||||
| service | 27
|
||||
| cycleway | 27
|
||||
| path | 27
|
||||
| footway | 27
|
||||
| steps | 27
|
||||
| bridleway | 27
|
||||
| track | 26
|
||||
| byway | 26
|
||||
| motorway_link | 27
|
||||
| primary_link | 27
|
||||
| trunk_link | 27
|
||||
| secondary_link| 27
|
||||
| tertiary_link | 27
|
||||
|
||||
Scenario: search and address ranks for highways correctly assigned
|
||||
Given the scene roads-with-pois
|
||||
And the place nodes
|
||||
| osm_id | class | type
|
||||
| 1 | highway | bus_stop
|
||||
And the place ways
|
||||
| osm_id | class | type | geometry
|
||||
| 1 | highway | primary | :w-south
|
||||
| 2 | highway | secondary | :w-south
|
||||
| 3 | highway | tertiary | :w-south
|
||||
| 4 | highway | residential | :w-north
|
||||
| 5 | highway | unclassified | :w-north
|
||||
| 6 | highway | something | :w-north
|
||||
When importing
|
||||
Then table placex contains
|
||||
| object | rank_search | rank_address
|
||||
| N1 | 30 | 30
|
||||
| W1 | 26 | 26
|
||||
| W2 | 26 | 26
|
||||
| W3 | 26 | 26
|
||||
| W4 | 26 | 26
|
||||
| W5 | 26 | 26
|
||||
| W6 | 26 | 26
|
||||
|
||||
Scenario: rank and inclusion of landuses
|
||||
Given the place nodes
|
||||
| osm_id | class | type
|
||||
| 1 | landuse | residential
|
||||
And the named place nodes
|
||||
| osm_id | class | type
|
||||
| 2 | landuse | residential
|
||||
And the place ways
|
||||
| osm_id | class | type | geometry
|
||||
| 1 | landuse | residential | 0 0, 0 1
|
||||
And the named place ways
|
||||
| osm_id | class | type | geometry
|
||||
| 2 | landuse | residential | 1 1, 1 1.1
|
||||
And the place areas
|
||||
| osm_type | osm_id | class | type | geometry
|
||||
| W | 3 | landuse | residential | poly-area:0.1
|
||||
| R | 1 | landuse | residential | poly-area:0.01
|
||||
| R | 10 | landuse | residential | poly-area:0.5
|
||||
And the named place areas
|
||||
| osm_type | osm_id | class | type | geometry
|
||||
| W | 4 | landuse | residential | poly-area:0.1
|
||||
| R | 2 | landuse | residential | poly-area:0.05
|
||||
When importing
|
||||
Then table placex has no entry for N1
|
||||
And table placex has no entry for W1
|
||||
And table placex has no entry for W3
|
||||
And table placex has no entry for R1
|
||||
And table placex has no entry for R10
|
||||
And table placex contains
|
||||
| object | rank_search | rank_address
|
||||
| N2 | 30 | 30
|
||||
| W2 | 30 | 30
|
||||
| W4 | 22 | 22
|
||||
| R2 | 22 | 22
|
||||
|
||||
Scenario: rank and inclusion of naturals
|
||||
Given the place nodes
|
||||
| osm_id | class | type
|
||||
| 1 | natural | peak
|
||||
| 3 | natural | volcano
|
||||
And the named place nodes
|
||||
| osm_id | class | type
|
||||
| 2 | natural | peak
|
||||
| 4 | natural | volcano
|
||||
| 5 | natural | foobar
|
||||
And the place ways
|
||||
| osm_id | class | type | geometry
|
||||
| 1 | natural | mountain_range | 10 10,11 11
|
||||
And the named place ways
|
||||
| osm_id | class | type | geometry
|
||||
| 2 | natural | mountain_range | 12 12,11 11
|
||||
| 3 | natural | foobar | 13 13,13.1 13
|
||||
| 4 | natural | coastline | 14 14,14.1 14
|
||||
And the place areas
|
||||
| osm_type | osm_id | class | type | geometry
|
||||
| R | 1 | natural | volcano | poly-area:0.1
|
||||
| R | 2 | natural | volcano | poly-area:1.0
|
||||
And the named place areas
|
||||
| osm_type | osm_id | class | type | geometry
|
||||
| R | 3 | natural | volcano | poly-area:0.1
|
||||
| R | 4 | natural | foobar | poly-area:0.5
|
||||
| R | 5 | natural | sea | poly-area:5.0
|
||||
| R | 6 | natural | sea | poly-area:0.01
|
||||
| R | 7 | natural | coastline | poly-area:1.0
|
||||
When importing
|
||||
Then table placex has no entry for N1
|
||||
And table placex has no entry for N3
|
||||
And table placex has no entry for W1
|
||||
And table placex has no entry for R1
|
||||
And table placex has no entry for R2
|
||||
And table placex has no entry for R7
|
||||
And table placex has no entry for W4
|
||||
And table placex contains
|
||||
| object | rank_search | rank_address
|
||||
| N2 | 18 | 0
|
||||
| N4 | 18 | 0
|
||||
| N5 | 30 | 30
|
||||
| W2 | 18 | 0
|
||||
| R3 | 18 | 0
|
||||
| R4 | 22 | 22
|
||||
| R5 | 4 | 4
|
||||
| R6 | 4 | 4
|
||||
| W3 | 30 | 30
|
||||
|
||||
28
tests/features/db/import/search_terms.feature
Normal file
28
tests/features/db/import/search_terms.feature
Normal file
@@ -0,0 +1,28 @@
|
||||
@DB
|
||||
Feature: Creation of search terms
|
||||
Tests that search_name table is filled correctly
|
||||
|
||||
Scenario: POIs without a name have no search entry
|
||||
Given the scene roads-with-pois
|
||||
And the place nodes
|
||||
| osm_id | class | type | geometry
|
||||
| 1 | place | house | :p-N1
|
||||
And the place ways
|
||||
| osm_id | class | type | geometry
|
||||
| 1 | highway | residential | :w-north
|
||||
When importing
|
||||
Then table search_name has no entry for N1
|
||||
|
||||
|
||||
Scenario: Named POIs inherit address from parent
|
||||
Given the scene roads-with-pois
|
||||
And the place nodes
|
||||
| osm_id | class | type | name | geometry
|
||||
| 1 | place | house | foo | :p-N1
|
||||
And the place ways
|
||||
| osm_id | class | type | name | geometry
|
||||
| 1 | highway | residential | the road | :w-north
|
||||
When importing
|
||||
Then search_name table contains
|
||||
| place_id | name_vector | nameaddress_vector
|
||||
| N1 | foo | the road
|
||||
17
tests/features/db/import/simple.feature
Normal file
17
tests/features/db/import/simple.feature
Normal file
@@ -0,0 +1,17 @@
|
||||
@DB
|
||||
Feature: Import of simple objects
|
||||
Testing simple stuff
|
||||
|
||||
Scenario: Import place node
|
||||
Given the place nodes:
|
||||
| osm_id | class | type | name | geometry
|
||||
| 1 | place | village | 'name' : 'Foo' | 10.0 -10.0
|
||||
When importing
|
||||
Then table placex contains
|
||||
| object | class | type | name | centroid
|
||||
| N1 | place | village | 'name' : 'Foo' | 10.0,-10.0 +- 1m
|
||||
When sending query "Foo"
|
||||
Then results contain
|
||||
| ID | osm_type | osm_id
|
||||
| 0 | N | 1
|
||||
|
||||
92
tests/features/db/update/linked_places.feature
Normal file
92
tests/features/db/update/linked_places.feature
Normal file
@@ -0,0 +1,92 @@
|
||||
@DB
|
||||
Feature: Updates of linked places
|
||||
Tests that linked places are correctly added and deleted.
|
||||
|
||||
|
||||
Scenario: Add linked place when linking relation is renamed
|
||||
Given the place nodes
|
||||
| osm_id | class | type | name | geometry
|
||||
| 1 | place | city | foo | 0 0
|
||||
And the place areas
|
||||
| osm_type | osm_id | class | type | name | admin_level | geometry
|
||||
| R | 1 | boundary | administrative | foo | 8 | poly-area:0.1
|
||||
When importing
|
||||
And sending query "foo" with dups
|
||||
Then results contain
|
||||
| osm_type
|
||||
| R
|
||||
When updating place areas
|
||||
| osm_type | osm_id | class | type | name | admin_level | geometry
|
||||
| R | 1 | boundary | administrative | foobar | 8 | poly-area:0.1
|
||||
Then table placex contains
|
||||
| object | linked_place_id
|
||||
| N1 | None
|
||||
When sending query "foo" with dups
|
||||
Then results contain
|
||||
| osm_type
|
||||
| N
|
||||
|
||||
Scenario: Add linked place when linking relation is removed
|
||||
Given the place nodes
|
||||
| osm_id | class | type | name | geometry
|
||||
| 1 | place | city | foo | 0 0
|
||||
And the place areas
|
||||
| osm_type | osm_id | class | type | name | admin_level | geometry
|
||||
| R | 1 | boundary | administrative | foo | 8 | poly-area:0.1
|
||||
When importing
|
||||
And sending query "foo" with dups
|
||||
Then results contain
|
||||
| osm_type
|
||||
| R
|
||||
When marking for delete R1
|
||||
Then table placex contains
|
||||
| object | linked_place_id
|
||||
| N1 | None
|
||||
And sending query "foo" with dups
|
||||
Then results contain
|
||||
| osm_type
|
||||
| N
|
||||
|
||||
Scenario: Remove linked place when linking relation is added
|
||||
Given the place nodes
|
||||
| osm_id | class | type | name | geometry
|
||||
| 1 | place | city | foo | 0 0
|
||||
When importing
|
||||
And sending query "foo" with dups
|
||||
Then results contain
|
||||
| osm_type
|
||||
| N
|
||||
When updating place areas
|
||||
| osm_type | osm_id | class | type | name | admin_level | geometry
|
||||
| R | 1 | boundary | administrative | foo | 8 | poly-area:0.1
|
||||
Then table placex contains
|
||||
| object | linked_place_id
|
||||
| N1 | R1
|
||||
When sending query "foo" with dups
|
||||
Then results contain
|
||||
| osm_type
|
||||
| R
|
||||
|
||||
Scenario: Remove linked place when linking relation is renamed
|
||||
Given the place nodes
|
||||
| osm_id | class | type | name | geometry
|
||||
| 1 | place | city | foo | 0 0
|
||||
And the place areas
|
||||
| osm_type | osm_id | class | type | name | admin_level | geometry
|
||||
| R | 1 | boundary | administrative | foobar | 8 | poly-area:0.1
|
||||
When importing
|
||||
And sending query "foo" with dups
|
||||
Then results contain
|
||||
| osm_type
|
||||
| N
|
||||
When updating place areas
|
||||
| osm_type | osm_id | class | type | name | admin_level | geometry
|
||||
| R | 1 | boundary | administrative | foo | 8 | poly-area:0.1
|
||||
Then table placex contains
|
||||
| object | linked_place_id
|
||||
| N1 | R1
|
||||
When sending query "foo" with dups
|
||||
Then results contain
|
||||
| osm_type
|
||||
| R
|
||||
|
||||
39
tests/features/db/update/naming.feature
Normal file
39
tests/features/db/update/naming.feature
Normal file
@@ -0,0 +1,39 @@
|
||||
@DB
|
||||
Feature: Update of names in place objects
|
||||
Test all naming related issues in updates
|
||||
|
||||
|
||||
Scenario: Updating postcode in postcode boundaries without ref
|
||||
Given the place areas
|
||||
| osm_type | osm_id | class | type | postcode | geometry
|
||||
| R | 1 | boundary | postal_code | 12345 | (0 0, 1 0, 1 1, 0 1, 0 0)
|
||||
When importing
|
||||
And sending query "12345"
|
||||
Then results contain
|
||||
| ID | osm_type | osm_id
|
||||
| 0 | R | 1
|
||||
When updating place areas
|
||||
| osm_type | osm_id | class | type | postcode | geometry
|
||||
| R | 1 | boundary | postal_code | 54321 | (0 0, 1 0, 1 1, 0 1, 0 0)
|
||||
And sending query "12345"
|
||||
Then exactly 0 results are returned
|
||||
When sending query "54321"
|
||||
Then results contain
|
||||
| ID | osm_type | osm_id
|
||||
| 0 | R | 1
|
||||
|
||||
|
||||
Scenario: Delete postcode from postcode boundaries without ref
|
||||
Given the place areas
|
||||
| osm_type | osm_id | class | type | postcode | geometry
|
||||
| R | 1 | boundary | postal_code | 12345 | (0 0, 1 0, 1 1, 0 1, 0 0)
|
||||
When importing
|
||||
And sending query "12345"
|
||||
Then results contain
|
||||
| ID | osm_type | osm_id
|
||||
| 0 | R | 1
|
||||
When updating place areas
|
||||
| osm_type | osm_id | class | type | geometry
|
||||
| R | 1 | boundary | postal_code | (0 0, 1 0, 1 1, 0 1, 0 0)
|
||||
Then table placex has no entry for R1
|
||||
|
||||
87
tests/features/db/update/simple.feature
Normal file
87
tests/features/db/update/simple.feature
Normal file
@@ -0,0 +1,87 @@
|
||||
@DB
|
||||
Feature: Update of simple objects
|
||||
Testing simple stuff
|
||||
|
||||
Scenario: Remove name from a landuse object
|
||||
Given the place nodes
|
||||
| osm_id | class | type | name
|
||||
| 1 | landuse | wood | 'name' : 'Foo'
|
||||
When importing
|
||||
Then table placex contains
|
||||
| object | class | type | name
|
||||
| N1 | landuse| wood | 'name' : 'Foo'
|
||||
When updating place nodes
|
||||
| osm_id | class | type
|
||||
| 1 | landuse | wood
|
||||
Then table placex has no entry for N1
|
||||
|
||||
|
||||
Scenario: Do delete small boundary features
|
||||
Given the place areas
|
||||
| osm_type | osm_id | class | type | admin_level | geometry
|
||||
| R | 1 | boundary | administrative | 3 | (0 0, 1 0, 1 1, 0 1, 0 0)
|
||||
When importing
|
||||
Then table placex contains
|
||||
| object | rank_search
|
||||
| R1 | 6
|
||||
When marking for delete R1
|
||||
Then table placex has no entry for R1
|
||||
|
||||
Scenario: Do not delete large boundary features
|
||||
Given the place areas
|
||||
| osm_type | osm_id | class | type | admin_level | geometry
|
||||
| R | 1 | boundary | administrative | 3 | (0 0, 2 0, 2 2.1, 0 2, 0 0)
|
||||
When importing
|
||||
Then table placex contains
|
||||
| object | rank_search
|
||||
| R1 | 6
|
||||
When marking for delete R1
|
||||
Then table placex contains
|
||||
| object | rank_search
|
||||
| R1 | 6
|
||||
|
||||
Scenario: Do delete large features of low rank
|
||||
Given the named place areas
|
||||
| osm_type | osm_id | class | type | geometry
|
||||
| W | 1 | place | house | (0 0, 2 0, 2 2.1, 0 2, 0 0)
|
||||
| R | 1 | boundary | national_park | (0 0, 2 0, 2 2.1, 0 2, 0 0)
|
||||
When importing
|
||||
Then table placex contains
|
||||
| object | rank_address
|
||||
| R1 | 0
|
||||
| W1 | 30
|
||||
When marking for delete R1,W1
|
||||
Then table placex has no entry for W1
|
||||
Then table placex has no entry for R1
|
||||
|
||||
|
||||
Scenario: type mutation
|
||||
Given the place nodes
|
||||
| osm_id | class | type | geometry
|
||||
| 3 | shop | toys | 1 -1
|
||||
When importing
|
||||
Then table placex contains
|
||||
| object | class | type
|
||||
| N3 | shop | toys
|
||||
When updating place nodes
|
||||
| osm_id | class | type | geometry
|
||||
| 3 | shop | grocery | 1 -1
|
||||
Then table placex contains
|
||||
| object | class | type
|
||||
| N3 | shop | grocery
|
||||
|
||||
|
||||
Scenario: remove postcode place when house number is added
|
||||
Given the place nodes
|
||||
| osm_id | class | type | postcode | geometry
|
||||
| 3 | place | postcode | 12345 | 1 -1
|
||||
When importing
|
||||
Then table placex contains
|
||||
| object | class | type
|
||||
| N3 | place | postcode
|
||||
When updating place nodes
|
||||
| osm_id | class | type | postcode | housenumber | geometry
|
||||
| 3 | place | house | 12345 | 13 | 1 -1
|
||||
Then table placex contains
|
||||
| object | class | type
|
||||
| N3 | place | house
|
||||
13
tests/features/osm2pgsql/import/relation.feature
Normal file
13
tests/features/osm2pgsql/import/relation.feature
Normal file
@@ -0,0 +1,13 @@
|
||||
@DB
|
||||
Feature: Import of relations by osm2pgsql
|
||||
Testing specific relation problems related to members.
|
||||
|
||||
Scenario: Don't import empty waterways
|
||||
Given the osm nodes:
|
||||
| id | tags
|
||||
| 1 | 'amenity' : 'prison', 'name' : 'foo'
|
||||
And the osm relations:
|
||||
| id | tags | members
|
||||
| 1 | 'type' : 'waterway', 'waterway' : 'river', 'name' : 'XZ' | N1
|
||||
When loading osm data
|
||||
Then table place has no entry for R1
|
||||
68
tests/features/osm2pgsql/import/simple.feature
Normal file
68
tests/features/osm2pgsql/import/simple.feature
Normal file
@@ -0,0 +1,68 @@
|
||||
@DB
|
||||
Feature: Import of simple objects by osm2pgsql
|
||||
Testing basic tagging in osm2pgsql imports.
|
||||
|
||||
Scenario: Import simple objects
|
||||
Given the osm nodes:
|
||||
| id | tags
|
||||
| 1 | 'amenity' : 'prison', 'name' : 'foo'
|
||||
Given the osm nodes:
|
||||
| id | geometry
|
||||
| 100 | 0 0
|
||||
| 101 | 0 0.1
|
||||
| 102 | 0.1 0.2
|
||||
| 200 | 0 0
|
||||
| 201 | 0 1
|
||||
| 202 | 1 1
|
||||
| 203 | 1 0
|
||||
And the osm ways:
|
||||
| id | tags | nodes
|
||||
| 1 | 'shop' : 'toys', 'name' : 'tata' | 100 101 102
|
||||
| 2 | 'ref' : '45' | 200 201 202 203 200
|
||||
And the osm relations:
|
||||
| id | tags | members
|
||||
| 1 | 'type' : 'multipolygon', 'tourism' : 'hotel', 'name' : 'XZ' | N1,W2
|
||||
When loading osm data
|
||||
Then table place contains
|
||||
| object | class | type | name
|
||||
| N1 | amenity | prison | 'name' : 'foo'
|
||||
| W1 | shop | toys | 'name' : 'tata'
|
||||
| R1 | tourism | hotel | 'name' : 'XZ'
|
||||
|
||||
Scenario: Import object with two main tags
|
||||
Given the osm nodes:
|
||||
| id | tags
|
||||
| 1 | 'tourism' : 'hotel', 'amenity' : 'restaurant', 'name' : 'foo'
|
||||
When loading osm data
|
||||
Then table place contains
|
||||
| object | class | type | name
|
||||
| N1:tourism | tourism | hotel | 'name' : 'foo'
|
||||
| N1:amenity | amenity | restaurant | 'name' : 'foo'
|
||||
|
||||
Scenario: Import stand-alone house number with postcode
|
||||
Given the osm nodes:
|
||||
| id | tags
|
||||
| 1 | 'addr:housenumber' : '4', 'addr:postcode' : '3345'
|
||||
When loading osm data
|
||||
Then table place contains
|
||||
| object | class | type
|
||||
| N1 | place | house
|
||||
|
||||
Scenario: Landuses are only imported when named
|
||||
Given the osm nodes:
|
||||
| id | geometry
|
||||
| 100 | 0 0
|
||||
| 101 | 0 0.1
|
||||
| 102 | 0.1 0.1
|
||||
| 200 | 0 0
|
||||
| 202 | 1 1
|
||||
| 203 | 1 0
|
||||
And the osm ways:
|
||||
| id | tags | nodes
|
||||
| 1 | 'landuse' : 'residential', 'name' : 'rainbow' | 100 101 102 100
|
||||
| 2 | 'landuse' : 'residential' | 200 202 203 200
|
||||
When loading osm data
|
||||
Then table place contains
|
||||
| object | class | type
|
||||
| W1 | landuse | residential
|
||||
And table place has no entry for W2
|
||||
30
tests/features/osm2pgsql/update/relation.feature
Normal file
30
tests/features/osm2pgsql/update/relation.feature
Normal file
@@ -0,0 +1,30 @@
|
||||
@DB
|
||||
Feature: Update of relations by osm2pgsql
|
||||
Testing relation update by osm2pgsql.
|
||||
|
||||
Scenario: Remove all members of a relation
|
||||
Given the osm nodes:
|
||||
| id | tags
|
||||
| 1 | 'amenity' : 'prison', 'name' : 'foo'
|
||||
Given the osm nodes:
|
||||
| id | geometry
|
||||
| 200 | 0 0
|
||||
| 201 | 0 0.0001
|
||||
| 202 | 0.0001 0.0001
|
||||
| 203 | 0.0001 0
|
||||
Given the osm ways:
|
||||
| id | tags | nodes
|
||||
| 2 | 'ref' : '45' | 200 201 202 203 200
|
||||
Given the osm relations:
|
||||
| id | tags | members
|
||||
| 1 | 'type' : 'multipolygon', 'tourism' : 'hotel', 'name' : 'XZ' | W2
|
||||
When loading osm data
|
||||
Then table place contains
|
||||
| object | class | type | name
|
||||
| R1 | tourism | hotel | 'name' : 'XZ'
|
||||
Given the osm relations:
|
||||
| action | id | tags | members
|
||||
| M | 1 | 'type' : 'multipolygon', 'tourism' : 'hotel', 'name' : 'XZ' | N1
|
||||
When updating osm data
|
||||
Then table place has no entry for R1
|
||||
|
||||
22
tests/features/osm2pgsql/update/simple.feature
Normal file
22
tests/features/osm2pgsql/update/simple.feature
Normal file
@@ -0,0 +1,22 @@
|
||||
@DB
|
||||
Feature: Update of simple objects by osm2pgsql
|
||||
Testing basic update functions of osm2pgsql.
|
||||
|
||||
Scenario: Import object with two main tags
|
||||
Given the osm nodes:
|
||||
| id | tags
|
||||
| 1 | 'tourism' : 'hotel', 'amenity' : 'restaurant', 'name' : 'foo'
|
||||
When loading osm data
|
||||
Then table place contains
|
||||
| object | class | type | name
|
||||
| N1:tourism | tourism | hotel | 'name' : 'foo'
|
||||
| N1:amenity | amenity | restaurant | 'name' : 'foo'
|
||||
Given the osm nodes:
|
||||
| action | id | tags
|
||||
| M | 1 | 'tourism' : 'hotel', 'name' : 'foo'
|
||||
When updating osm data
|
||||
Then table place has no entry for N1:amenity
|
||||
And table place contains
|
||||
| object | class | type | name
|
||||
| N1:tourism | tourism | hotel | 'name' : 'foo'
|
||||
|
||||
31
tests/scenes/bin/make_scenes.sh
Executable file
31
tests/scenes/bin/make_scenes.sh
Executable file
@@ -0,0 +1,31 @@
|
||||
#/bin/bash -e
|
||||
#
|
||||
# Regenerates wkts for scenarios.
|
||||
#
|
||||
|
||||
datadir="$( cd "$( dirname "$0" )" && cd ../data && pwd )"
|
||||
|
||||
if [! -d "$datadir" ]; then
|
||||
echo "Cannot find data dir.";
|
||||
exit -1;
|
||||
fi
|
||||
|
||||
echo "Using datadir $datadir"
|
||||
pushd $datadir
|
||||
|
||||
# remove old wkts
|
||||
rm $datadir/*.wkt
|
||||
|
||||
# create wkts from SQL scripts
|
||||
for fl in *.sql; do
|
||||
echo "Processing $fl.."
|
||||
cat $fl | psql -d nominatim -t -o ${fl/.sql/.wkt}
|
||||
done
|
||||
|
||||
# create wkts from .osm files
|
||||
for fl in *.osm; do
|
||||
echo "Processing $fl.."
|
||||
../bin/osm2wkt $fl
|
||||
done
|
||||
|
||||
popd
|
||||
101
tests/scenes/bin/osm2wkt.cc
Normal file
101
tests/scenes/bin/osm2wkt.cc
Normal file
@@ -0,0 +1,101 @@
|
||||
|
||||
// The code in this file is released into the Public Domain.
|
||||
|
||||
#include <iostream>
|
||||
#include <fstream>
|
||||
#include <string>
|
||||
#include <unordered_map>
|
||||
|
||||
#include <osmium/area/assembler.hpp>
|
||||
#include <osmium/area/collector.hpp>
|
||||
#include <osmium/area/problem_reporter_exception.hpp>
|
||||
#include <osmium/geom/wkt.hpp>
|
||||
#include <osmium/handler.hpp>
|
||||
#include <osmium/handler/node_locations_for_ways.hpp>
|
||||
#include <osmium/io/any_input.hpp>
|
||||
#include <osmium/visitor.hpp>
|
||||
#include <osmium/index/map/stl_map.hpp>
|
||||
|
||||
typedef osmium::index::map::StlMap<osmium::unsigned_object_id_type, osmium::Location> index_type;
|
||||
|
||||
typedef osmium::handler::NodeLocationsForWays<index_type, index_type> location_handler_type;
|
||||
|
||||
|
||||
class ExportToWKTHandler : public osmium::handler::Handler {
|
||||
|
||||
osmium::geom::WKTFactory m_factory;
|
||||
std::unordered_map<std::string, std::ofstream> m_files;
|
||||
|
||||
public:
|
||||
|
||||
void node(const osmium::Node& node) {
|
||||
print_geometry(node.tags(), m_factory.create_point(node));
|
||||
}
|
||||
|
||||
void way(const osmium::Way& way) {
|
||||
if (!way.is_closed() || !way.tags().get_value_by_key("area"))
|
||||
print_geometry(way.tags(), m_factory.create_linestring(way));
|
||||
}
|
||||
|
||||
void area(const osmium::Area& area) {
|
||||
if (!area.from_way() || area.tags().get_value_by_key("area"))
|
||||
print_geometry(area.tags(), m_factory.create_multipolygon(area));
|
||||
}
|
||||
|
||||
void close() {
|
||||
for (auto& fd : m_files)
|
||||
fd.second.close();
|
||||
}
|
||||
|
||||
private:
|
||||
|
||||
void print_geometry(const osmium::TagList& tags, const std::string& wkt) {
|
||||
const char* scenario = tags.get_value_by_key("test:section");
|
||||
const char* id = tags.get_value_by_key("test:id");
|
||||
if (scenario && id) {
|
||||
auto& fd = m_files[std::string(scenario)];
|
||||
if (!fd.is_open())
|
||||
fd.open(std::string(scenario) + ".wkt");
|
||||
fd << id << " | " << wkt << "\n";
|
||||
}
|
||||
}
|
||||
|
||||
}; // class ExportToWKTHandler
|
||||
|
||||
int main(int argc, char* argv[]) {
|
||||
if (argc != 2) {
|
||||
std::cerr << "Usage: " << argv[0] << " OSMFILE\n";
|
||||
exit(1);
|
||||
}
|
||||
|
||||
std::string input_filename {argv[1]};
|
||||
|
||||
typedef osmium::area::Assembler area_assembler_type;
|
||||
osmium::area::ProblemReporterException problem_reporter;
|
||||
area_assembler_type assembler(&problem_reporter);
|
||||
osmium::area::Collector<area_assembler_type> collector(assembler);
|
||||
|
||||
std::cerr << "Pass 1...\n";
|
||||
osmium::io::Reader reader1(input_filename);
|
||||
collector.read_relations(reader1);
|
||||
std::cerr << "Pass 1 done\n";
|
||||
|
||||
index_type index_pos;
|
||||
index_type index_neg;
|
||||
location_handler_type location_handler(index_pos, index_neg);
|
||||
|
||||
std::cerr << "Pass 2...\n";
|
||||
ExportToWKTHandler export_handler;
|
||||
osmium::io::Reader reader2(input_filename);
|
||||
osmium::apply(reader2, location_handler, export_handler, collector.handler());
|
||||
reader2.close();
|
||||
osmium::apply(collector, export_handler);
|
||||
export_handler.close();
|
||||
std::cerr << "Pass 2 done\n";
|
||||
|
||||
|
||||
google::protobuf::ShutdownProtobufLibrary();
|
||||
|
||||
}
|
||||
|
||||
|
||||
7
tests/scenes/data/building-on-street-corner.wkt
Normal file
7
tests/scenes/data/building-on-street-corner.wkt
Normal file
@@ -0,0 +1,7 @@
|
||||
n-edge-NS | POINT(1.0040019 2.000324)
|
||||
n-inner | POINT(1.0039385 2.0003548)
|
||||
n-outer | POINT(1.0039478 2.0004676)
|
||||
n-edge-WE | POINT(1.0039599 2.0002345)
|
||||
w-WE | LINESTRING(1.0031759 2.0002316,1.0040361 2.0002211,1.0042735 2.0002264)
|
||||
w-NS | LINESTRING(1.0040414 2.0001051,1.0040361 2.0002211,1.0040364 2.0006377)
|
||||
w-building | MULTIPOLYGON(((1.0040019 2.000324,1.0040016 2.0002344,1.0039599 2.0002345,1.0039037 2.0002347,1.0039043 2.0004389,1.0040023 2.0004386,1.0040019 2.000324)))
|
||||
1
tests/scenes/data/country.sql
Normal file
1
tests/scenes/data/country.sql
Normal file
@@ -0,0 +1 @@
|
||||
select country_code, st_astext(st_pointonsurface(st_collect(geometry))) from country_osm_grid group by country_code order by country_code
|
||||
250
tests/scenes/data/country.wkt
Normal file
250
tests/scenes/data/country.wkt
Normal file
@@ -0,0 +1,250 @@
|
||||
ad | POINT(1.58972361752509 42.54241545)
|
||||
ae | POINT(54.6158905029297 24.8243131637573)
|
||||
af | POINT(65.9026412963867 34.8470859527588)
|
||||
ag | POINT(-61.7243069800293 17.069)
|
||||
ai | POINT(-63.1057155298182 18.2546197)
|
||||
al | POINT(19.8494176864624 40.2123275624912)
|
||||
am | POINT(44.6422958374023 40.3782157897949)
|
||||
ao | POINT(16.2192406654358 -12.7701482772827)
|
||||
aq | POINT(44.999999975 -75.6569557189941)
|
||||
ar | POINT(-61.1075973510742 -34.3761558532715)
|
||||
as | POINT(-170.684700024275 -14.2930755)
|
||||
at | POINT(14.2574706077576 47.3654232025146)
|
||||
au | POINT(138.231559753418 -23.7206888198853)
|
||||
aw | POINT(-69.98255055 12.555)
|
||||
ax | POINT(19.9183956313477 59.81682435)
|
||||
az | POINT(48.385555267334 40.6163997650146)
|
||||
ba | POINT(17.1851491928101 44.2558269500732)
|
||||
bb | POINT(-59.53342165 13.19)
|
||||
bd | POINT(89.759895324707 24.3420524597168)
|
||||
be | POINT(4.90078139305115 50.3468225048828)
|
||||
bf | POINT(-0.567435041069984 11.9047117233276)
|
||||
bg | POINT(24.8061628341675 43.0985908508301)
|
||||
bh | POINT(50.5203291219829 25.94685735)
|
||||
bi | POINT(29.5456137866089 -2.99057915)
|
||||
bj | POINT(2.70062518119812 10.0279288291931)
|
||||
bl | POINT(-62.7934947763772 17.907)
|
||||
bm | POINT(-64.7740692745195 32.30199165)
|
||||
bn | POINT(114.521968608887 4.2863885)
|
||||
bo | POINT(-62.0247344970703 -17.7772369384766)
|
||||
bq | POINT(-63.1432235610045 17.566)
|
||||
br | POINT(-45.7706508636475 -9.5868501663208)
|
||||
bs | POINT(-77.6091675884277 23.8745)
|
||||
bt | POINT(90.0135078430176 27.281379699707)
|
||||
bv | POINT(3.35744155625 -54.4215)
|
||||
bw | POINT(23.5150556564331 -23.4839134216309)
|
||||
by | POINT(26.7725925445557 53.1588516235352)
|
||||
bz | POINT(-88.6348991394043 16.3395160487277)
|
||||
ca | POINT(-107.74817276001 67.1261215209961)
|
||||
cc | POINT(96.8442066294247 -12.0173443)
|
||||
cd | POINT(24.0954418182373 -1.67713665962219)
|
||||
cf | POINT(22.5870132446289 5.98438787460327)
|
||||
cg | POINT(15.7887516021729 0.403886616230011)
|
||||
ch | POINT(7.65705513954163 46.5744686126709)
|
||||
ci | POINT(-6.31190967559814 6.6278383731842)
|
||||
ck | POINT(-159.778351359569 -21.23349585)
|
||||
cl | POINT(-70.4179039001465 -53.7718944549561)
|
||||
cm | POINT(13.260226726532 5.94519567489624)
|
||||
cn | POINT(96.4428558349609 38.0426063537598)
|
||||
co | POINT(-72.5295104980469 2.45174860954285)
|
||||
cr | POINT(-83.8331413269043 9.935142993927)
|
||||
cu | POINT(-80.8167381286621 21.8885278701782)
|
||||
cv | POINT(-24.508106575 14.929)
|
||||
cw | POINT(-68.9640918594077 12.1845)
|
||||
cx | POINT(105.624119513558 -10.48417)
|
||||
cy | POINT(32.959223486499 35.37010195)
|
||||
cz | POINT(16.3209805488586 49.5069274902344)
|
||||
de | POINT(9.30716800689697 50.2128944396973)
|
||||
dj | POINT(42.969040422876 11.41542855)
|
||||
dk | POINT(9.18490123748779 55.5634002685547)
|
||||
dm | POINT(-61.0035801928854 15.6547055)
|
||||
do | POINT(-69.6285591125488 18.5884169089722)
|
||||
dz | POINT(4.24749487638474 25.797215461731)
|
||||
ec | POINT(-77.4583168029785 -0.982844322919846)
|
||||
ee | POINT(23.9428863525391 58.439525604248)
|
||||
eg | POINT(28.952935218811 28.1771860122681)
|
||||
eh | POINT(-13.6903142929077 25.0124177932739)
|
||||
er | POINT(39.0122375488281 14.960337638855)
|
||||
es | POINT(-2.59110307693481 38.7935485839844)
|
||||
et | POINT(38.6169757843018 7.71399855613708)
|
||||
fi | POINT(26.8979873657227 63.5619449615479)
|
||||
fj | POINT(177.918533325195 -17.7423753738403)
|
||||
fk | POINT(-60.0855102539062 -51.6555919647217)
|
||||
fm | POINT(151.9535889125 8.5045)
|
||||
fo | POINT(-6.60483694084778 62.10000995)
|
||||
fr | POINT(0.284105718135834 47.5104522705078)
|
||||
ga | POINT(10.8107047080994 -0.0742915570735931)
|
||||
gb | POINT(-0.928231082856655 52.0161876678467)
|
||||
gd | POINT(-61.6452430375 12.191)
|
||||
ge | POINT(44.1666488647461 42.0038585662842)
|
||||
gf | POINT(-53.4652481079102 3.56188893318176)
|
||||
gg | POINT(-2.50580395030125 49.5854381)
|
||||
gh | POINT(-0.463488027453423 7.16051578521729)
|
||||
gi | POINT(-5.32053155848457 36.1106663)
|
||||
gl | POINT(-33.8551120758057 74.6635551452637)
|
||||
gm | POINT(-16.4096023535368 13.25)
|
||||
gn | POINT(-13.839409828186 10.9629158973694)
|
||||
gp | POINT(-61.6871265247053 16.23049055)
|
||||
gq | POINT(10.2397356033325 1.43119311332703)
|
||||
gr | POINT(23.1785039901733 39.0620670318604)
|
||||
gs | POINT(-36.4943086948773 -54.4306784)
|
||||
gt | POINT(-90.7436828613281 15.2042865753174)
|
||||
gu | POINT(144.733626445767 13.444138)
|
||||
gw | POINT(-14.8352527618408 11.9248690605164)
|
||||
gy | POINT(-58.4516773223877 5.73698806762695)
|
||||
hk | POINT(114.18577775 22.3492361)
|
||||
hm | POINT(73.6823082266602 -53.22105985)
|
||||
hn | POINT(-86.9541435241699 15.2382001876831)
|
||||
hr | POINT(17.499662399292 45.5268955230713)
|
||||
ht | POINT(-73.5192565917969 18.3249206691162)
|
||||
hu | POINT(20.3536291122437 47.5172100067139)
|
||||
id | POINT(123.345050811768 -0.837919592857361)
|
||||
ie | POINT(-9.00520038604736 52.8772506713867)
|
||||
il | POINT(35.4631499949707 32.86165655)
|
||||
im | POINT(-4.86740773691101 54.023)
|
||||
in | POINT(88.6762087020508 27.86155515)
|
||||
io | POINT(71.4274391359073 -6.14349685)
|
||||
iq | POINT(42.5810985565186 34.2610359191895)
|
||||
ir | POINT(51.268892288208 34.1931705474854)
|
||||
is | POINT(-17.5178508758545 64.7168769836426)
|
||||
it | POINT(10.4263944625854 44.8790493011475)
|
||||
je | POINT(-2.19261599848299 49.1245833)
|
||||
jm | POINT(-76.8402003547852 18.3935)
|
||||
jo | POINT(36.5555210113525 30.7574186325073)
|
||||
jp | POINT(138.725311279297 35.9209995269775)
|
||||
ke | POINT(36.9060287475586 1.08512867614627)
|
||||
kg | POINT(76.1557197570801 41.6649742126465)
|
||||
kh | POINT(104.319019317627 12.9555516242981)
|
||||
ki | POINT(173.633537933333 0.139)
|
||||
km | POINT(44.3147485207764 -12.241)
|
||||
kn | POINT(-62.6937987175 17.2555)
|
||||
kp | POINT(126.655757904053 39.6457576751709)
|
||||
kr | POINT(127.277404785156 36.4138870239258)
|
||||
kw | POINT(47.3068407840576 29.6918055)
|
||||
ky | POINT(-81.0745526670982 19.2994923579778)
|
||||
kz | POINT(72.008113861084 49.8885555267334)
|
||||
la | POINT(102.443916320801 19.8160953521729)
|
||||
lb | POINT(35.4846443715483 33.4176673878926)
|
||||
lc | POINT(-60.978944125 13.891)
|
||||
li | POINT(9.54693948514429 47.15934115)
|
||||
lk | POINT(80.3852043151855 8.41649961471558)
|
||||
lr | POINT(-11.169605255127 4.04122126102448)
|
||||
ls | POINT(28.6698419546997 -29.9453849)
|
||||
lt | POINT(24.5173501968384 55.4929389953613)
|
||||
lu | POINT(6.08649672997471 49.81533445)
|
||||
lv | POINT(23.5103368759155 56.6714401245117)
|
||||
ly | POINT(15.3684158325195 28.1217727661133)
|
||||
ma | POINT(-4.0306156873703 33.2169628143311)
|
||||
mc | POINT(7.47743150426578 43.62917385)
|
||||
md | POINT(29.6172503477783 46.6651745)
|
||||
me | POINT(19.7229134314941 43.02441345)
|
||||
mf | POINT(-63.0666651534257 18.0810209)
|
||||
mg | POINT(45.8637886047363 -20.5024528503418)
|
||||
mh | POINT(171.949820566667 5.983)
|
||||
mk | POINT(21.421085357666 41.0898007597656)
|
||||
ml | POINT(-1.93310506641865 16.4699301719666)
|
||||
mm | POINT(95.5462455749512 21.0962018966675)
|
||||
mn | POINT(99.8113822937012 48.1861572265625)
|
||||
mo | POINT(113.564416766761 22.16209625)
|
||||
mp | POINT(145.213452483189 14.1490205)
|
||||
mq | POINT(-60.8112834227783 14.43706925)
|
||||
mr | POINT(-9.42324566841125 22.5925149917603)
|
||||
ms | POINT(-62.1945521583333 16.745)
|
||||
mt | POINT(14.3836306158583 35.9446731)
|
||||
mu | POINT(57.551211475 -20.41)
|
||||
mv | POINT(73.3929214477539 4.19375014305115)
|
||||
mw | POINT(33.9572296142578 -12.2821822166443)
|
||||
mx | POINT(-105.892219543457 25.8682699203491)
|
||||
my | POINT(112.711540222168 2.10098683834076)
|
||||
mz | POINT(37.5868968963623 -15.5801844596863)
|
||||
na | POINT(16.6856970787048 -21.4657220840454)
|
||||
nc | POINT(164.953224182129 -20.3888988494873)
|
||||
ne | POINT(10.060417175293 19.0827360153198)
|
||||
nf | POINT(167.95718166875 -29.0645)
|
||||
ng | POINT(10.1778125762939 10.1780409812927)
|
||||
ni | POINT(-85.8797492980957 13.2171587944031)
|
||||
nl | POINT(-68.5706209441406 12.041)
|
||||
no | POINT(23.1155624389648 70.0993499755859)
|
||||
np | POINT(83.3625984191895 28.1310758590698)
|
||||
nr | POINT(166.934792270833 -0.5275)
|
||||
nu | POINT(-169.848737911905 -19.05305275)
|
||||
nz | POINT(167.972099304199 -45.1305675506592)
|
||||
om | POINT(56.8605518341064 20.4741315841675)
|
||||
pa | POINT(-79.4016036987305 8.80656003952026)
|
||||
pe | POINT(-78.6654052734375 -7.54711985588074)
|
||||
pf | POINT(-145.057191213086 -16.7086236)
|
||||
pg | POINT(146.646003723145 -7.37427568435669)
|
||||
ph | POINT(121.483592987061 15.0996527671814)
|
||||
pk | POINT(72.1134796142578 31.1462965011597)
|
||||
pl | POINT(17.8813629150391 52.771821975708)
|
||||
pm | POINT(-56.1951589074841 46.7832469)
|
||||
pn | POINT(-130.106425528029 -25.0695595)
|
||||
pr | POINT(-65.8875553967285 18.3716905)
|
||||
ps | POINT(35.3980153741943 32.24773475)
|
||||
pt | POINT(-8.45743942260742 40.1115436553955)
|
||||
pw | POINT(134.496454875 7.3245)
|
||||
py | POINT(-59.5178718566895 -22.4128150939941)
|
||||
qa | POINT(51.4990362304443 24.9981677)
|
||||
re | POINT(55.7734550547607 -21.3638828)
|
||||
ro | POINT(26.3763284683228 45.3612003326416)
|
||||
rs | POINT(20.4037199020386 44.5641384124756)
|
||||
ru | POINT(116.440608978271 59.0678024291992)
|
||||
rw | POINT(29.5788261333252 -1.6240443)
|
||||
sa | POINT(47.7316932678223 22.4379062652588)
|
||||
sb | POINT(164.638946533203 -10.2360653877258)
|
||||
sc | POINT(46.3656697 -9.454)
|
||||
sd | POINT(28.1472072601318 14.5642309188843)
|
||||
se | POINT(15.6866798400879 60.3556804656982)
|
||||
sg | POINT(103.84187219299 1.304)
|
||||
sh | POINT(-12.2815573611979 -37.11546755)
|
||||
si | POINT(14.0473856628607 46.390855)
|
||||
sj | POINT(15.2755260467529 79.2336540222168)
|
||||
sk | POINT(20.416033744812 48.869701385498)
|
||||
sl | POINT(-11.4777312278748 8.78156280517578)
|
||||
sm | POINT(12.4606268797657 43.9427969)
|
||||
sn | POINT(-15.3711128234863 14.9947791099548)
|
||||
so | POINT(46.9338359832764 9.34094429016113)
|
||||
sr | POINT(-56.4855213165283 4.5773549079895)
|
||||
ss | POINT(28.1357345581055 8.50933408737183)
|
||||
st | POINT(6.61025854583333 0.2215)
|
||||
sv | POINT(-89.3666543301004 13.4307287)
|
||||
sx | POINT(-63.1539330807882 17.9345)
|
||||
sy | POINT(38.1551322937012 35.3422107696533)
|
||||
sz | POINT(31.782634398523 -26.14244365)
|
||||
tc | POINT(-71.325541342334 21.35)
|
||||
td | POINT(17.4209251403809 13.4622311592102)
|
||||
tf | POINT(137.5 -67.5)
|
||||
tg | POINT(1.0698350071907 7.87677597999573)
|
||||
th | POINT(102.008777618408 16.4231028556824)
|
||||
tj | POINT(71.9134941101074 39.0152739312988)
|
||||
tk | POINT(-171.826039878679 -9.209903)
|
||||
tl | POINT(126.225208282471 -8.72636747360229)
|
||||
tm | POINT(57.7160358428955 39.9253444671631)
|
||||
tn | POINT(9.04958724975586 34.8419933319092)
|
||||
to | POINT(-176.993202209473 -23.1110429763794)
|
||||
tr | POINT(32.8200283050537 39.8635063171387)
|
||||
tt | POINT(-60.70793924375 11.1385)
|
||||
tv | POINT(178.774993896484 -9.41685771942139)
|
||||
tw | POINT(120.300746917725 23.1700229644775)
|
||||
tz | POINT(33.5389289855957 -5.01840615272522)
|
||||
ua | POINT(33.4433536529541 49.3061904907227)
|
||||
ug | POINT(32.9652328491211 2.08584922552109)
|
||||
um | POINT(-169.509930872296 16.74605815)
|
||||
us | POINT(-116.395355224609 40.7137908935547)
|
||||
uy | POINT(-56.4650554656982 -33.6265888214111)
|
||||
uz | POINT(61.3552989959717 42.9610729217529)
|
||||
va | POINT(12.3319785703086 42.0493197)
|
||||
vc | POINT(-61.0990541737305 13.316)
|
||||
ve | POINT(-64.8832321166992 7.69849991798401)
|
||||
vg | POINT(-64.6247911940199 18.419)
|
||||
vi | POINT(-64.8895090795187 18.3226325)
|
||||
vn | POINT(104.201791331787 10.27644235)
|
||||
vu | POINT(167.319198608398 -15.8868751525879)
|
||||
wf | POINT(-176.207816222208 -13.28535775)
|
||||
ws | POINT(-172.109667323427 -13.850938)
|
||||
ye | POINT(45.945629119873 16.1633830070496)
|
||||
yt | POINT(44.9377459760742 -12.6088246)
|
||||
za | POINT(23.1948881149292 -30.4327602386475)
|
||||
zm | POINT(26.3861808776855 -14.3996663093567)
|
||||
zw | POINT(30.1241998672485 -19.8690795898438)
|
||||
|
||||
8
tests/scenes/data/points-on-roads.wkt
Normal file
8
tests/scenes/data/points-on-roads.wkt
Normal file
@@ -0,0 +1,8 @@
|
||||
n-N-unglued | POINT(1.004922 2.0005155)
|
||||
n-S-unglued | POINT(1.0046259 2.0002949)
|
||||
n-NE | POINT(1.0050661 2.0006118)
|
||||
n-SE | POINT(1.0051339 2.0003349)
|
||||
n-NW | POINT(1.0047583 2.0004087)
|
||||
n-SW | POINT(1.0047275 2.0003564)
|
||||
w-north | LINESTRING(1.0044996 2.0004302,1.0046259 2.0003841,1.0047583 2.0004087,1.004922 2.0005155,1.0050661 2.0006118,1.0053155 2.0006241)
|
||||
w-south | LINESTRING(1.0045243 2.0002241,1.0046259 2.0002949,1.0047275 2.0003564,1.004826 2.0002918,1.0049368 2.0002641,1.0051339 2.0003349,1.0053278 2.0003687)
|
||||
11
tests/scenes/data/poly-area.wkt
Normal file
11
tests/scenes/data/poly-area.wkt
Normal file
@@ -0,0 +1,11 @@
|
||||
0.0001 | MULTIPOLYGON(((0.001 0,0 0,0 0.1,0.001 0.1,0.001 0)))
|
||||
0.0005 | MULTIPOLYGON(((0.005 0,0 0,0 0.1,0.005 0.1,0.005 0)))
|
||||
0.001 | MULTIPOLYGON(((0.01 0,0 0,0 0.1,0.01 0.1,0.01 0)))
|
||||
0.005 | MULTIPOLYGON(((0.05 0,0 0,0 0.1,0.05 0.1,0.05 0)))
|
||||
0.01 | MULTIPOLYGON(((0.1 0,0 0,0 0.1,0.1 0.1,0.1 0)))
|
||||
0.05 | MULTIPOLYGON(((0.5 0,0 0,0 0.1,0.5 0.1,0.5 0)))
|
||||
0.1 | MULTIPOLYGON(((0.1 0,0 0,0 1,0.1 1,0.1 0)))
|
||||
0.5 | MULTIPOLYGON(((0.5 0,0 0,0 1,0.5 1,0.5 0)))
|
||||
1.0 | MULTIPOLYGON(((1 0,0 0,0 1,1 1,1 0)))
|
||||
2.0 | MULTIPOLYGON(((2 0,0 0,0 1,2 1,2 0)))
|
||||
5.0 | MULTIPOLYGON(((5 0,0 0,0 1,5 1,5 0)))
|
||||
168
tests/scenes/data/poly-areas.osm
Normal file
168
tests/scenes/data/poly-areas.osm
Normal file
@@ -0,0 +1,168 @@
|
||||
<?xml version='1.0' encoding='UTF-8'?>
|
||||
<osm version='0.6'>
|
||||
<node id="100" version="1" timestamp="2014-01-01T00:00:00Z" uid="1" user="test" changeset="1" lat="0.0" lon="0.0" />
|
||||
<node id="101" version="1" timestamp="2014-01-01T00:00:00Z" uid="1" user="test" changeset="1" lat="0.1" lon="0.0" />
|
||||
<node id="102" version="1" timestamp="2014-01-01T00:00:00Z" uid="1" user="test" changeset="1" lat="0.1" lon="0.001" />
|
||||
<node id="103" version="1" timestamp="2014-01-01T00:00:00Z" uid="1" user="test" changeset="1" lat="0.0" lon="0.001" />
|
||||
<way id="100" version="1" timestamp="2014-01-01T00:00:00Z" uid="1" user="test" changeset="1">
|
||||
<nd ref="100" />
|
||||
<nd ref="101" />
|
||||
<nd ref="102" />
|
||||
<nd ref="103" />
|
||||
<nd ref="100" />
|
||||
<tag k="test:section" v="poly-area"/>
|
||||
<tag k="test:id" v="0.0001"/>
|
||||
<tag k="area" v="yes"/>
|
||||
</way>
|
||||
|
||||
<node id="200" version="1" timestamp="2014-01-01T00:00:00Z" uid="1" user="test" changeset="1" lat="0.0" lon="0.0" />
|
||||
<node id="201" version="1" timestamp="2014-01-01T00:00:00Z" uid="1" user="test" changeset="1" lat="0.1" lon="0.0" />
|
||||
<node id="202" version="1" timestamp="2014-01-01T00:00:00Z" uid="1" user="test" changeset="1" lat="0.1" lon="0.005" />
|
||||
<node id="203" version="1" timestamp="2014-01-01T00:00:00Z" uid="1" user="test" changeset="1" lat="0.0" lon="0.005" />
|
||||
<way id="200" version="1" timestamp="2014-01-01T00:00:00Z" uid="1" user="test" changeset="1">
|
||||
<nd ref="200" />
|
||||
<nd ref="201" />
|
||||
<nd ref="202" />
|
||||
<nd ref="203" />
|
||||
<nd ref="200" />
|
||||
<tag k="test:section" v="poly-area"/>
|
||||
<tag k="test:id" v="0.0005"/>
|
||||
<tag k="area" v="yes"/>
|
||||
</way>
|
||||
|
||||
<node id="300" version="1" timestamp="2014-01-01T00:00:00Z" uid="1" user="test" changeset="1" lat="0.0" lon="0.0" />
|
||||
<node id="301" version="1" timestamp="2014-01-01T00:00:00Z" uid="1" user="test" changeset="1" lat="0.1" lon="0.0" />
|
||||
<node id="302" version="1" timestamp="2014-01-01T00:00:00Z" uid="1" user="test" changeset="1" lat="0.1" lon="0.01" />
|
||||
<node id="303" version="1" timestamp="2014-01-01T00:00:00Z" uid="1" user="test" changeset="1" lat="0.0" lon="0.01" />
|
||||
<way id="300" version="1" timestamp="2014-01-01T00:00:00Z" uid="1" user="test" changeset="1">
|
||||
<nd ref="300" />
|
||||
<nd ref="301" />
|
||||
<nd ref="302" />
|
||||
<nd ref="303" />
|
||||
<nd ref="300" />
|
||||
<tag k="test:section" v="poly-area"/>
|
||||
<tag k="test:id" v="0.001"/>
|
||||
<tag k="area" v="yes"/>
|
||||
</way>
|
||||
|
||||
<node id="400" version="1" timestamp="2014-01-01T00:00:00Z" uid="1" user="test" changeset="1" lat="0.0" lon="0.0" />
|
||||
<node id="401" version="1" timestamp="2014-01-01T00:00:00Z" uid="1" user="test" changeset="1" lat="0.1" lon="0.0" />
|
||||
<node id="402" version="1" timestamp="2014-01-01T00:00:00Z" uid="1" user="test" changeset="1" lat="0.1" lon="0.05" />
|
||||
<node id="403" version="1" timestamp="2014-01-01T00:00:00Z" uid="1" user="test" changeset="1" lat="0.0" lon="0.05" />
|
||||
<way id="400" version="1" timestamp="2014-01-01T00:00:00Z" uid="1" user="test" changeset="1">
|
||||
<nd ref="400" />
|
||||
<nd ref="401" />
|
||||
<nd ref="402" />
|
||||
<nd ref="403" />
|
||||
<nd ref="400" />
|
||||
<tag k="test:section" v="poly-area"/>
|
||||
<tag k="test:id" v="0.005"/>
|
||||
<tag k="area" v="yes"/>
|
||||
</way>
|
||||
|
||||
<node id="500" version="1" timestamp="2014-01-01T00:00:00Z" uid="1" user="test" changeset="1" lat="0.0" lon="0.0" />
|
||||
<node id="501" version="1" timestamp="2014-01-01T00:00:00Z" uid="1" user="test" changeset="1" lat="0.1" lon="0.0" />
|
||||
<node id="502" version="1" timestamp="2014-01-01T00:00:00Z" uid="1" user="test" changeset="1" lat="0.1" lon="0.1" />
|
||||
<node id="503" version="1" timestamp="2014-01-01T00:00:00Z" uid="1" user="test" changeset="1" lat="0.0" lon="0.1" />
|
||||
<way id="500" version="1" timestamp="2014-01-01T00:00:00Z" uid="1" user="test" changeset="1">
|
||||
<nd ref="500" />
|
||||
<nd ref="501" />
|
||||
<nd ref="502" />
|
||||
<nd ref="503" />
|
||||
<nd ref="500" />
|
||||
<tag k="test:section" v="poly-area"/>
|
||||
<tag k="test:id" v="0.01"/>
|
||||
<tag k="area" v="yes"/>
|
||||
</way>
|
||||
|
||||
<node id="600" version="1" timestamp="2014-01-01T00:00:00Z" uid="1" user="test" changeset="1" lat="0.0" lon="0.0" />
|
||||
<node id="601" version="1" timestamp="2014-01-01T00:00:00Z" uid="1" user="test" changeset="1" lat="0.1" lon="0.0" />
|
||||
<node id="602" version="1" timestamp="2014-01-01T00:00:00Z" uid="1" user="test" changeset="1" lat="0.1" lon="0.5" />
|
||||
<node id="603" version="1" timestamp="2014-01-01T00:00:00Z" uid="1" user="test" changeset="1" lat="0.0" lon="0.5" />
|
||||
<way id="600" version="1" timestamp="2014-01-01T00:00:00Z" uid="1" user="test" changeset="1">
|
||||
<nd ref="600" />
|
||||
<nd ref="601" />
|
||||
<nd ref="602" />
|
||||
<nd ref="603" />
|
||||
<nd ref="600" />
|
||||
<tag k="test:section" v="poly-area"/>
|
||||
<tag k="test:id" v="0.05"/>
|
||||
<tag k="area" v="yes"/>
|
||||
</way>
|
||||
|
||||
<node id="700" version="1" timestamp="2014-01-01T00:00:00Z" uid="1" user="test" changeset="1" lat="0.0" lon="0.0" />
|
||||
<node id="701" version="1" timestamp="2014-01-01T00:00:00Z" uid="1" user="test" changeset="1" lat="1.0" lon="0.0" />
|
||||
<node id="702" version="1" timestamp="2014-01-01T00:00:00Z" uid="1" user="test" changeset="1" lat="1.0" lon="0.1" />
|
||||
<node id="703" version="1" timestamp="2014-01-01T00:00:00Z" uid="1" user="test" changeset="1" lat="0.0" lon="0.1" />
|
||||
<way id="700" version="1" timestamp="2014-01-01T00:00:00Z" uid="1" user="test" changeset="1">
|
||||
<nd ref="700" />
|
||||
<nd ref="701" />
|
||||
<nd ref="702" />
|
||||
<nd ref="703" />
|
||||
<nd ref="700" />
|
||||
<tag k="test:section" v="poly-area"/>
|
||||
<tag k="test:id" v="0.1"/>
|
||||
<tag k="area" v="yes"/>
|
||||
</way>
|
||||
|
||||
<node id="800" version="1" timestamp="2014-01-01T00:00:00Z" uid="1" user="test" changeset="1" lat="0.0" lon="0.0" />
|
||||
<node id="801" version="1" timestamp="2014-01-01T00:00:00Z" uid="1" user="test" changeset="1" lat="1.0" lon="0.0" />
|
||||
<node id="802" version="1" timestamp="2014-01-01T00:00:00Z" uid="1" user="test" changeset="1" lat="1.0" lon="0.5" />
|
||||
<node id="803" version="1" timestamp="2014-01-01T00:00:00Z" uid="1" user="test" changeset="1" lat="0.0" lon="0.5" />
|
||||
<way id="800" version="1" timestamp="2014-01-01T00:00:00Z" uid="1" user="test" changeset="1">
|
||||
<nd ref="800" />
|
||||
<nd ref="801" />
|
||||
<nd ref="802" />
|
||||
<nd ref="803" />
|
||||
<nd ref="800" />
|
||||
<tag k="test:section" v="poly-area"/>
|
||||
<tag k="test:id" v="0.5"/>
|
||||
<tag k="area" v="yes"/>
|
||||
</way>
|
||||
|
||||
<node id="900" version="1" timestamp="2014-01-01T00:00:00Z" uid="1" user="test" changeset="1" lat="0.0" lon="0.0" />
|
||||
<node id="901" version="1" timestamp="2014-01-01T00:00:00Z" uid="1" user="test" changeset="1" lat="1.0" lon="0.0" />
|
||||
<node id="902" version="1" timestamp="2014-01-01T00:00:00Z" uid="1" user="test" changeset="1" lat="1.0" lon="1.0" />
|
||||
<node id="903" version="1" timestamp="2014-01-01T00:00:00Z" uid="1" user="test" changeset="1" lat="0.0" lon="1.0" />
|
||||
<way id="900" version="1" timestamp="2014-01-01T00:00:00Z" uid="1" user="test" changeset="1">
|
||||
<nd ref="900" />
|
||||
<nd ref="901" />
|
||||
<nd ref="902" />
|
||||
<nd ref="903" />
|
||||
<nd ref="900" />
|
||||
<tag k="test:section" v="poly-area"/>
|
||||
<tag k="test:id" v="1.0"/>
|
||||
<tag k="area" v="yes"/>
|
||||
</way>
|
||||
|
||||
<node id="1000" version="1" timestamp="2014-01-01T00:00:00Z" uid="1" user="test" changeset="1" lat="0.0" lon="0.0" />
|
||||
<node id="1001" version="1" timestamp="2014-01-01T00:00:00Z" uid="1" user="test" changeset="1" lat="1.0" lon="0.0" />
|
||||
<node id="1002" version="1" timestamp="2014-01-01T00:00:00Z" uid="1" user="test" changeset="1" lat="1.0" lon="2.0" />
|
||||
<node id="1003" version="1" timestamp="2014-01-01T00:00:00Z" uid="1" user="test" changeset="1" lat="0.0" lon="2.0" />
|
||||
<way id="1000" version="1" timestamp="2014-01-01T00:00:00Z" uid="1" user="test" changeset="1">
|
||||
<nd ref="1000" />
|
||||
<nd ref="1001" />
|
||||
<nd ref="1002" />
|
||||
<nd ref="1003" />
|
||||
<nd ref="1000" />
|
||||
<tag k="test:section" v="poly-area"/>
|
||||
<tag k="test:id" v="2.0"/>
|
||||
<tag k="area" v="yes"/>
|
||||
</way>
|
||||
|
||||
<node id="1100" version="1" timestamp="2014-01-01T00:00:00Z" uid="1" user="test" changeset="1" lat="0.0" lon="0.0" />
|
||||
<node id="1101" version="1" timestamp="2014-01-01T00:00:00Z" uid="1" user="test" changeset="1" lat="1.0" lon="0.0" />
|
||||
<node id="1102" version="1" timestamp="2014-01-01T00:00:00Z" uid="1" user="test" changeset="1" lat="1.0" lon="5.0" />
|
||||
<node id="1103" version="1" timestamp="2014-01-01T00:00:00Z" uid="1" user="test" changeset="1" lat="0.0" lon="5.0" />
|
||||
<way id="1100" version="1" timestamp="2014-01-01T00:00:00Z" uid="1" user="test" changeset="1">
|
||||
<nd ref="1100" />
|
||||
<nd ref="1101" />
|
||||
<nd ref="1102" />
|
||||
<nd ref="1103" />
|
||||
<nd ref="1100" />
|
||||
<tag k="test:section" v="poly-area"/>
|
||||
<tag k="test:id" v="5.0"/>
|
||||
<tag k="area" v="yes"/>
|
||||
</way>
|
||||
|
||||
</osm>
|
||||
6
tests/scenes/data/road-with-alley.wkt
Normal file
6
tests/scenes/data/road-with-alley.wkt
Normal file
@@ -0,0 +1,6 @@
|
||||
n-main-east | POINT(1.0024481 2.0003542)
|
||||
n-main-west | POINT(1.001552 2.0002662)
|
||||
n-alley | POINT(1.0019235 2.0005463)
|
||||
n-corner | POINT(1.0019235 2.0003542)
|
||||
w-alley | LINESTRING(1.0019594 2.0003086,1.0019594 2.0005756)
|
||||
w-main | LINESTRING(1.0013435 2.0003118,1.0016759 2.0003053,1.0019594 2.0003086,1.0021255 2.0003151,1.0023699 2.0003118,1.0026078 2.0002988)
|
||||
6
tests/scenes/data/roads-with-pois.wkt
Normal file
6
tests/scenes/data/roads-with-pois.wkt
Normal file
@@ -0,0 +1,6 @@
|
||||
p-N2 | POINT(1.0003904 2.0003399)
|
||||
p-S1 | POINT(1.0008104 2.0002927)
|
||||
p-N1 | POINT(1.0005321 2.0005288)
|
||||
p-S2 | POINT(1.0006398 2.0001064)
|
||||
w-north | LINESTRING(1.0001174 2.0004055,1.0004298 2.0003976,1.0006608 2.0004579,1.0010624 2.0005419)
|
||||
w-south | LINESTRING(1.0001384 2.0001903,1.0007212 2.0001982,1.0010677 2.0002192)
|
||||
300
tests/scenes/data/roads.osm
Normal file
300
tests/scenes/data/roads.osm
Normal file
@@ -0,0 +1,300 @@
|
||||
<?xml version='1.0' encoding='UTF-8'?>
|
||||
<osm version='0.6' upload='false' generator='JOSM'>
|
||||
<node id='-204' action='modify' visible='true' lat='2.000651586300547' lon='1.005746444902722'>
|
||||
<tag k='name' v='split-road' />
|
||||
</node>
|
||||
<node id='-189' action='modify' visible='true' lat='2.0005350827634585' lon='1.0057490943252971' />
|
||||
<node id='-187' action='modify' visible='true' lat='2.0005430261867043' lon='1.0058974619895207' />
|
||||
<node id='-185' action='modify' visible='true' lat='2.0005430261867043' lon='1.0060511284988949' />
|
||||
<node id='-183' action='modify' visible='true' lat='2.0005324349557014' lon='1.0061306111761574' />
|
||||
<node id='-174' action='modify' visible='true' lat='2.0005244915324045' lon='1.00551064629351' />
|
||||
<node id='-172' action='modify' visible='true' lat='2.0005668564562127' lon='1.0056086749288005' />
|
||||
<node id='-170' action='modify' visible='true' lat='2.000561560840796' lon='1.0056855081834875' />
|
||||
<node id='-168' action='modify' visible='true' lat='2.0005006612622807' lon='1.0057358472124205' />
|
||||
<node id='-166' action='modify' visible='true' lat='2.000505956877894' lon='1.0059107091023978' />
|
||||
<node id='-164' action='modify' visible='true' lat='2.0005112524934896' lon='1.0060537779214704' />
|
||||
<node id='-162' action='modify' visible='true' lat='2.0004953656466506' lon='1.006135910021308' />
|
||||
<node id='-160' action='modify' visible='true' lat='2.000513900301281' lon='1.0062842776855314' />
|
||||
<node id='-158' action='modify' visible='true' lat='2.000471535376104' lon='1.0063717086305204' />
|
||||
<node id='-156' action='modify' visible='true' lat='2.000296780048186' lon='1.0063584615176433' />
|
||||
<node id='-154' action='modify' visible='true' lat='2.000262358542008' lon='1.006281628262956' />
|
||||
<node id='-152' action='modify' visible='true' lat='2.000339144977876' lon='1.0061994961631182' />
|
||||
<node id='-150' action='modify' visible='true' lat='2.0003576796342712' lon='1.0061041169504032' />
|
||||
<node id='-148' action='modify' visible='true' lat='2.0003497362101275' lon='1.0060034388925374' />
|
||||
<node id='-146' action='modify' visible='true' lat='2.000325905937466' lon='1.0058868642992191' />
|
||||
<node id='-144' action='modify' visible='true' lat='2.000280893199271' lon='1.0057941345090795' />
|
||||
<node id='-142' action='modify' visible='true' lat='2.0002782453911037' lon='1.0056351691545544' />
|
||||
<node id='-141' action='modify' visible='true' lat='2.0003603274423103' lon='1.005470904954879' />
|
||||
<node id='-139' action='modify' visible='true' lat='2.0007265421773432' lon='1.0047054313390744'>
|
||||
<tag k='name' v='points-on-road' />
|
||||
</node>
|
||||
<node id='-112' action='modify' visible='true' lat='2.0005155253763816' lon='1.0049220138358423' />
|
||||
<node id='-110' action='modify' visible='true' lat='2.0005155253763816' lon='1.0049220138358423'>
|
||||
<tag k='test:id' v='n-N-unglued' />
|
||||
<tag k='test:section' v='points-on-roads' />
|
||||
</node>
|
||||
<node id='-108' action='modify' visible='true' lat='2.0002948553437463' lon='1.0046258759080025'>
|
||||
<tag k='test:id' v='n-S-unglued' />
|
||||
<tag k='test:section' v='points-on-roads' />
|
||||
</node>
|
||||
<node id='-106' action='modify' visible='true' lat='2.0002948553437463' lon='1.0046258759080025' />
|
||||
<node id='-104' action='modify' visible='true' lat='2.0006117740392657' lon='1.0050661381425037'>
|
||||
<tag k='test:id' v='n-NE' />
|
||||
<tag k='test:section' v='points-on-roads' />
|
||||
</node>
|
||||
<node id='-102' action='modify' visible='true' lat='2.000334854794143' lon='1.0051338707939657'>
|
||||
<tag k='test:id' v='n-SE' />
|
||||
<tag k='test:section' v='points-on-roads' />
|
||||
</node>
|
||||
<node id='-100' action='modify' visible='true' lat='2.0006240815601832' lon='1.0053155174501582' />
|
||||
<node id='-98' action='modify' visible='true' lat='2.0004086999307416' lon='1.0047582624540412'>
|
||||
<tag k='test:id' v='n-NW' />
|
||||
<tag k='test:section' v='points-on-roads' />
|
||||
</node>
|
||||
<node id='-96' action='modify' visible='true' lat='2.0003840848855767' lon='1.0046258759080025' />
|
||||
<node id='-94' action='modify' visible='true' lat='2.0004302380949586' lon='1.004499646875733' />
|
||||
<node id='-92' action='modify' visible='true' lat='2.0003687004821606' lon='1.0053278324776966' />
|
||||
<node id='-90' action='modify' visible='true' lat='2.0002640865351053' lon='1.0049368303533495' />
|
||||
<node id='-88' action='modify' visible='true' lat='2.000291778462916' lon='1.004825995105503' />
|
||||
<node id='-86' action='modify' visible='true' lat='2.0003563929593238' lon='1.004727474885195'>
|
||||
<tag k='test:id' v='n-SW' />
|
||||
<tag k='test:section' v='points-on-roads' />
|
||||
</node>
|
||||
<node id='-84' action='modify' visible='true' lat='2.0002240870829975' lon='1.00452427693081' />
|
||||
<node id='-82' action='modify' visible='true' lat='2.000715618411992' lon='1.0018322734314236'>
|
||||
<tag k='name' v='road-with-alley' />
|
||||
</node>
|
||||
<node id='-80' action='modify' visible='true' lat='2.0007286441385155' lon='1.0004669962874884'>
|
||||
<tag k='name' v='roads-with-pois' />
|
||||
</node>
|
||||
<node id='-78' action='modify' visible='true' lat='2.000354154459697' lon='1.002448114434296'>
|
||||
<tag k='test:id' v='n-main-east' />
|
||||
<tag k='test:section' v='road-with-alley' />
|
||||
</node>
|
||||
<node id='-76' action='modify' visible='true' lat='2.000266230783582' lon='1.0015520494830263'>
|
||||
<tag k='test:id' v='n-main-west' />
|
||||
<tag k='test:section' v='road-with-alley' />
|
||||
</node>
|
||||
<node id='-74' action='modify' visible='true' lat='2.000546283957771' lon='1.0019235091355527'>
|
||||
<tag k='test:id' v='n-alley' />
|
||||
<tag k='test:section' v='road-with-alley' />
|
||||
</node>
|
||||
<node id='-72' action='modify' visible='true' lat='2.000354154459697' lon='1.0019235091355527'>
|
||||
<tag k='test:id' v='n-corner' />
|
||||
<tag k='test:section' v='road-with-alley' />
|
||||
</node>
|
||||
<node id='-70' action='modify' visible='true' lat='2.0005755918453296' lon='1.0019593517336036' />
|
||||
<node id='-68' action='modify' visible='true' lat='2.000298795108618' lon='1.0026077769165225' />
|
||||
<node id='-66' action='modify' visible='true' lat='2.000311820838452' lon='1.0023699124021854' />
|
||||
<node id='-64' action='modify' visible='true' lat='2.0003150772708946' lon='1.0021255310518389' />
|
||||
<node id='-62' action='modify' visible='true' lat='2.0003085644060037' lon='1.0019593517336036' />
|
||||
<node id='-60' action='modify' visible='true' lat='2.000305307973548' lon='1.0016758693672019' />
|
||||
<node id='-58' action='modify' visible='true' lat='2.000311820838452' lon='1.0013435107307307' />
|
||||
<node id='-56' action='modify' visible='true' lat='2.0004054696330322' lon='1.0001173628501097'>
|
||||
<tag k='test:section' v='roads-with-pois' />
|
||||
</node>
|
||||
<node id='-54' action='modify' visible='true' lat='2.000397598928471' lon='1.0004297578433892'>
|
||||
<tag k='test:section' v='roads-with-pois' />
|
||||
</node>
|
||||
<node id='-52' action='modify' visible='true' lat='2.0004579409958114' lon='1.0006607726283354'>
|
||||
<tag k='test:section' v='roads-with-pois' />
|
||||
</node>
|
||||
<node id='-50' action='modify' visible='true' lat='2.0005418951727663' lon='1.0010624233339804'>
|
||||
<tag k='test:section' v='roads-with-pois' />
|
||||
</node>
|
||||
<node id='-48' action='modify' visible='true' lat='2.000190337028091' lon='1.0001383641941959'>
|
||||
<tag k='test:section' v='roads-with-pois' />
|
||||
</node>
|
||||
<node id='-46' action='modify' visible='true' lat='2.000198207733647' lon='1.000721151492583'>
|
||||
<tag k='test:section' v='roads-with-pois' />
|
||||
</node>
|
||||
<node id='-44' action='modify' visible='true' lat='2.000219196281612' lon='1.0010676736700022'>
|
||||
<tag k='test:section' v='roads-with-pois' />
|
||||
</node>
|
||||
<node id='-42' action='modify' visible='true' lat='2.000339880427198' lon='1.000390380323228'>
|
||||
<tag k='test:id' v='p-N2' />
|
||||
<tag k='test:section' v='roads-with-pois' />
|
||||
</node>
|
||||
<node id='-40' action='modify' visible='true' lat='2.000292656197374' lon='1.0008104072049482'>
|
||||
<tag k='test:id' v='p-S1' />
|
||||
<tag k='test:section' v='roads-with-pois' />
|
||||
</node>
|
||||
<node id='-38' action='modify' visible='true' lat='2.0005287773329004' lon='1.0005321393958087'>
|
||||
<tag k='test:id' v='p-N1' />
|
||||
<tag k='test:section' v='roads-with-pois' />
|
||||
</node>
|
||||
<node id='-36' action='modify' visible='true' lat='2.000106382833144' lon='1.0006397712842492'>
|
||||
<tag k='test:id' v='p-S2' />
|
||||
<tag k='test:section' v='roads-with-pois' />
|
||||
</node>
|
||||
<node id='-34' action='modify' visible='true' lat='2.0007211692424525' lon='1.0035828658185688'>
|
||||
<tag k='name' v='building-on-street-corner' />
|
||||
</node>
|
||||
<node id='-32' action='modify' visible='true' lat='2.000231635335803' lon='1.0031759205058477' />
|
||||
<node id='-30' action='modify' visible='true' lat='2.000221087674047' lon='1.0040360790429201' />
|
||||
<node id='-28' action='modify' visible='true' lat='2.0002263615049336' lon='1.0042735461237067' />
|
||||
<node id='-26' action='modify' visible='true' lat='2.000105063390253' lon='1.0040413560891597' />
|
||||
<node id='-24' action='modify' visible='true' lat='2.0006377202618473' lon='1.0040363991494512' />
|
||||
<node id='-22' action='modify' visible='true' lat='2.0004388569487612' lon='1.0039042871025967' />
|
||||
<node id='-20' action='modify' visible='true' lat='2.0004385547672516' lon='1.0040022536164286' />
|
||||
<node id='-18' action='modify' visible='true' lat='2.0002343878022306' lon='1.0040016230872442' />
|
||||
<node id='-16' action='modify' visible='true' lat='2.000234689983778' lon='1.0039036565734125' />
|
||||
<node id='-14' action='modify' visible='true' lat='2.0003240388594246' lon='1.0040018999567464'>
|
||||
<tag k='test:id' v='n-edge-NS' />
|
||||
<tag k='test:section' v='building-on-street-corner' />
|
||||
</node>
|
||||
<node id='-12' action='modify' visible='true' lat='2.000354798021768' lon='1.0039384858598128'>
|
||||
<tag k='test:id' v='n-inner' />
|
||||
<tag k='test:section' v='building-on-street-corner' />
|
||||
</node>
|
||||
<node id='-10' action='modify' visible='true' lat='2.0004676051467527' lon='1.0039478144477645'>
|
||||
<tag k='test:id' v='n-outer' />
|
||||
<tag k='test:section' v='building-on-street-corner' />
|
||||
</node>
|
||||
<node id='-8' action='modify' visible='true' lat='2.000234516370527' lon='1.0039599415620857'>
|
||||
<tag k='test:id' v='n-edge-WE' />
|
||||
<tag k='test:section' v='building-on-street-corner' />
|
||||
</node>
|
||||
<node id='100000' timestamp='2014-01-01T00:00:00Z' uid='1' user='test' visible='true' version='1' changeset='1' lat='2.0' lon='1.0' />
|
||||
<node id='100001' timestamp='2014-01-01T00:00:00Z' uid='1' user='test' visible='true' version='1' changeset='1' lat='3.0' lon='1.0' />
|
||||
<node id='100002' timestamp='2014-01-01T00:00:00Z' uid='1' user='test' visible='true' version='1' changeset='1' lat='3.0' lon='2.0' />
|
||||
<node id='100003' timestamp='2014-01-01T00:00:00Z' uid='1' user='test' visible='true' version='1' changeset='1' lat='2.0' lon='2.0' />
|
||||
<way id='-195' action='modify' visible='true'>
|
||||
<nd ref='-170' />
|
||||
<nd ref='-172' />
|
||||
<nd ref='-174' />
|
||||
<tag k='test:id' v='w-5' />
|
||||
<tag k='test:section' v='split-road' />
|
||||
</way>
|
||||
<way id='-194' action='modify' visible='true'>
|
||||
<nd ref='-160' />
|
||||
<nd ref='-162' />
|
||||
<nd ref='-164' />
|
||||
<nd ref='-166' />
|
||||
<nd ref='-168' />
|
||||
<nd ref='-170' />
|
||||
<tag k='test:id' v='w-4a' />
|
||||
<tag k='test:section' v='split-road' />
|
||||
</way>
|
||||
<way id='-193' action='modify' visible='true'>
|
||||
<nd ref='-152' />
|
||||
<nd ref='-154' />
|
||||
<nd ref='-156' />
|
||||
<nd ref='-158' />
|
||||
<nd ref='-160' />
|
||||
<tag k='test:id' v='w-3' />
|
||||
<tag k='test:section' v='split-road' />
|
||||
</way>
|
||||
<way id='-192' action='modify' visible='true'>
|
||||
<nd ref='-144' />
|
||||
<nd ref='-146' />
|
||||
<nd ref='-148' />
|
||||
<nd ref='-150' />
|
||||
<nd ref='-152' />
|
||||
<tag k='test:id' v='w-2' />
|
||||
<tag k='test:section' v='split-road' />
|
||||
</way>
|
||||
<way id='-184' action='modify' visible='true'>
|
||||
<nd ref='-160' />
|
||||
<nd ref='-183' />
|
||||
<nd ref='-185' />
|
||||
<nd ref='-187' />
|
||||
<nd ref='-189' />
|
||||
<nd ref='-170' />
|
||||
<tag k='test:id' v='w-4b' />
|
||||
<tag k='test:section' v='split-road' />
|
||||
</way>
|
||||
<way id='-143' action='modify' visible='true'>
|
||||
<nd ref='-141' />
|
||||
<nd ref='-142' />
|
||||
<nd ref='-144' />
|
||||
<tag k='test:id' v='w-1' />
|
||||
<tag k='test:section' v='split-road' />
|
||||
</way>
|
||||
<way id='-134' action='modify' visible='true'>
|
||||
<nd ref='-94' />
|
||||
<nd ref='-96' />
|
||||
<nd ref='-98' />
|
||||
<nd ref='-112' />
|
||||
<nd ref='-104' />
|
||||
<nd ref='-100' />
|
||||
<tag k='test:id' v='w-north' />
|
||||
<tag k='test:section' v='points-on-roads' />
|
||||
</way>
|
||||
<way id='-132' action='modify' visible='true'>
|
||||
<nd ref='-84' />
|
||||
<nd ref='-106' />
|
||||
<nd ref='-86' />
|
||||
<nd ref='-88' />
|
||||
<nd ref='-90' />
|
||||
<nd ref='-102' />
|
||||
<nd ref='-92' />
|
||||
<tag k='test:id' v='w-south' />
|
||||
<tag k='test:section' v='points-on-roads' />
|
||||
</way>
|
||||
<way id='-130' action='modify' visible='true'>
|
||||
<nd ref='-62' />
|
||||
<nd ref='-70' />
|
||||
<tag k='test:id' v='w-alley' />
|
||||
<tag k='test:section' v='road-with-alley' />
|
||||
</way>
|
||||
<way id='-128' action='modify' visible='true'>
|
||||
<nd ref='-58' />
|
||||
<nd ref='-60' />
|
||||
<nd ref='-62' />
|
||||
<nd ref='-64' />
|
||||
<nd ref='-66' />
|
||||
<nd ref='-68' />
|
||||
<tag k='test:id' v='w-main' />
|
||||
<tag k='test:section' v='road-with-alley' />
|
||||
</way>
|
||||
<way id='-126' action='modify' visible='true'>
|
||||
<nd ref='-56' />
|
||||
<nd ref='-54' />
|
||||
<nd ref='-52' />
|
||||
<nd ref='-50' />
|
||||
<tag k='test:id' v='w-north' />
|
||||
<tag k='test:section' v='roads-with-pois' />
|
||||
</way>
|
||||
<way id='-124' action='modify' visible='true'>
|
||||
<nd ref='-48' />
|
||||
<nd ref='-46' />
|
||||
<nd ref='-44' />
|
||||
<tag k='test:id' v='w-south' />
|
||||
<tag k='test:section' v='roads-with-pois' />
|
||||
</way>
|
||||
<way id='-122' action='modify' visible='true'>
|
||||
<nd ref='-32' />
|
||||
<nd ref='-30' />
|
||||
<nd ref='-28' />
|
||||
<tag k='test:id' v='w-WE' />
|
||||
<tag k='test:section' v='building-on-street-corner' />
|
||||
</way>
|
||||
<way id='-120' action='modify' visible='true'>
|
||||
<nd ref='-26' />
|
||||
<nd ref='-30' />
|
||||
<nd ref='-24' />
|
||||
<tag k='test:id' v='w-NS' />
|
||||
<tag k='test:section' v='building-on-street-corner' />
|
||||
</way>
|
||||
<way id='-118' action='modify' visible='true'>
|
||||
<nd ref='-22' />
|
||||
<nd ref='-20' />
|
||||
<nd ref='-14' />
|
||||
<nd ref='-18' />
|
||||
<nd ref='-8' />
|
||||
<nd ref='-16' />
|
||||
<nd ref='-22' />
|
||||
<tag k='area' v='yes' />
|
||||
<tag k='test:id' v='w-building' />
|
||||
<tag k='test:section' v='building-on-street-corner' />
|
||||
</way>
|
||||
<way id='100000' timestamp='2014-01-01T00:00:00Z' uid='1' user='test' visible='true' version='1' changeset='1'>
|
||||
<nd ref='100000' />
|
||||
<nd ref='100001' />
|
||||
<nd ref='100002' />
|
||||
<nd ref='100003' />
|
||||
<nd ref='100000' />
|
||||
<tag k='note' v='test area, do not leave' />
|
||||
</way>
|
||||
</osm>
|
||||
6
tests/scenes/data/split-road.wkt
Normal file
6
tests/scenes/data/split-road.wkt
Normal file
@@ -0,0 +1,6 @@
|
||||
w-5 | LINESTRING(1.0056855 2.0005616,1.0056087 2.0005669,1.0055106 2.0005245)
|
||||
w-4a | LINESTRING(1.0062843 2.0005139,1.0061359 2.0004954,1.0060538 2.0005113,1.0059107 2.000506,1.0057358 2.0005007,1.0056855 2.0005616)
|
||||
w-3 | LINESTRING(1.0061995 2.0003391,1.0062816 2.0002624,1.0063585 2.0002968,1.0063717 2.0004715,1.0062843 2.0005139)
|
||||
w-2 | LINESTRING(1.0057941 2.0002809,1.0058869 2.0003259,1.0060034 2.0003497,1.0061041 2.0003577,1.0061995 2.0003391)
|
||||
w-4b | LINESTRING(1.0062843 2.0005139,1.0061306 2.0005324,1.0060511 2.000543,1.0058975 2.000543,1.0057491 2.0005351,1.0056855 2.0005616)
|
||||
w-1 | LINESTRING(1.0054709 2.0003603,1.0056352 2.0002782,1.0057941 2.0002809)
|
||||
232
tests/steps/api_result.py
Normal file
232
tests/steps/api_result.py
Normal file
@@ -0,0 +1,232 @@
|
||||
""" Steps for checking the results of queries.
|
||||
"""
|
||||
|
||||
from nose.tools import *
|
||||
from lettuce import *
|
||||
from tidylib import tidy_document
|
||||
from collections import OrderedDict
|
||||
import json
|
||||
import logging
|
||||
import re
|
||||
from xml.dom.minidom import parseString
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
def _parse_xml():
|
||||
""" Puts the DOM structure into more convenient python
|
||||
with a similar structure as the json document, so
|
||||
that the same the semantics can be used. It does not
|
||||
check if the content is valid (or at least not more than
|
||||
necessary to transform it into a dict structure).
|
||||
"""
|
||||
page = parseString(world.page).documentElement
|
||||
|
||||
# header info
|
||||
world.result_header = OrderedDict(page.attributes.items())
|
||||
logger.debug('Result header: %r' % (world.result_header))
|
||||
world.results = []
|
||||
|
||||
# results
|
||||
if page.nodeName == 'searchresults':
|
||||
for node in page.childNodes:
|
||||
if node.nodeName != "#text":
|
||||
assert_equals(node.nodeName, 'place', msg="Unexpected element '%s'" % node.nodeName)
|
||||
newresult = OrderedDict(node.attributes.items())
|
||||
assert_not_in('address', newresult)
|
||||
assert_not_in('geokml', newresult)
|
||||
address = OrderedDict()
|
||||
for sub in node.childNodes:
|
||||
if sub.nodeName == 'geokml':
|
||||
newresult['geokml'] = sub.childNodes[0].toxml()
|
||||
elif sub.nodeName == '#text':
|
||||
pass
|
||||
else:
|
||||
address[sub.nodeName] = sub.firstChild.nodeValue.strip()
|
||||
if address:
|
||||
newresult['address'] = address
|
||||
world.results.append(newresult)
|
||||
elif page.nodeName == 'reversegeocode':
|
||||
haserror = False
|
||||
address = {}
|
||||
for node in page.childNodes:
|
||||
if node.nodeName == 'result':
|
||||
assert_equals(len(world.results), 0)
|
||||
assert (not haserror)
|
||||
world.results.append(OrderedDict(node.attributes.items()))
|
||||
assert_not_in('display_name', world.results[0])
|
||||
assert_not_in('address', world.results[0])
|
||||
world.results[0]['display_name'] = node.firstChild.nodeValue.strip()
|
||||
elif node.nodeName == 'error':
|
||||
assert_equals(len(world.results), 0)
|
||||
haserror = True
|
||||
elif node.nodeName == 'addressparts':
|
||||
assert (not haserror)
|
||||
address = OrderedDict()
|
||||
for sub in node.childNodes:
|
||||
address[sub.nodeName] = sub.firstChild.nodeValue.strip()
|
||||
world.results[0]['address'] = address
|
||||
elif node.nodeName == "#text":
|
||||
pass
|
||||
else:
|
||||
assert False, "Unknown content '%s' in XML" % node.nodeName
|
||||
else:
|
||||
assert False, "Unknown document node name %s in XML" % page.nodeName
|
||||
|
||||
logger.debug("The following was parsed out of XML:")
|
||||
logger.debug(world.results)
|
||||
|
||||
@step(u'a HTTP (\d+) is returned')
|
||||
def api_result_http_error(step, error):
|
||||
assert_equals(world.returncode, int(error))
|
||||
|
||||
@step(u'the result is valid( \w+)?')
|
||||
def api_result_is_valid(step, fmt):
|
||||
assert_equals(world.returncode, 200)
|
||||
|
||||
if world.response_format == 'html':
|
||||
document, errors = tidy_document(world.page,
|
||||
options={'char-encoding' : 'utf8'})
|
||||
assert(len(errors) == 0), "Errors found in HTML document:\n%s" % errors
|
||||
world.results = document
|
||||
elif world.response_format == 'xml':
|
||||
_parse_xml()
|
||||
elif world.response_format == 'json':
|
||||
world.results = json.JSONDecoder(object_pairs_hook=OrderedDict).decode(world.page)
|
||||
else:
|
||||
assert False, "Unknown page format: %s" % (world.response_format)
|
||||
|
||||
if fmt:
|
||||
assert_equals (fmt.strip(), world.response_format)
|
||||
|
||||
|
||||
def compare(operator, op1, op2):
|
||||
if operator == 'less than':
|
||||
return op1 < op2
|
||||
elif operator == 'more than':
|
||||
return op1 > op2
|
||||
elif operator == 'exactly':
|
||||
return op1 == op2
|
||||
elif operator == 'at least':
|
||||
return op1 >= op2
|
||||
elif operator == 'at most':
|
||||
return op1 <= op2
|
||||
else:
|
||||
raise Exception("unknown operator '%s'" % operator)
|
||||
|
||||
@step(u'(less than|more than|exactly|at least|at most) (\d+) results? (?:is|are) returned')
|
||||
def validate_result_number(step, operator, number):
|
||||
step.given('the result is valid')
|
||||
numres = len(world.results)
|
||||
assert compare(operator, numres, int(number)), \
|
||||
"Bad number of results: expected %s %s, got %d." % (operator, number, numres)
|
||||
|
||||
@step(u'result (\d+) has( not)? attributes (\S+)')
|
||||
def search_check_for_result_attribute(step, num, invalid, attrs):
|
||||
num = int(num)
|
||||
step.given('at least %d results are returned' % (num + 1))
|
||||
res = world.results[num]
|
||||
for attr in attrs.split(','):
|
||||
if invalid:
|
||||
assert_not_in(attr.strip(), res)
|
||||
else:
|
||||
assert_in(attr.strip(),res)
|
||||
|
||||
@step(u'there is a json wrapper "([^"]*)"')
|
||||
def api_result_check_json_wrapper(step, wrapper):
|
||||
step.given('the result is valid json')
|
||||
assert_equals(world.json_callback, wrapper)
|
||||
|
||||
@step(u'result header contains')
|
||||
def api_result_header_contains(step):
|
||||
step.given('the result is valid')
|
||||
for line in step.hashes:
|
||||
assert_in(line['attr'], world.result_header)
|
||||
m = re.match("%s$" % (line['value'],), world.result_header[line['attr']])
|
||||
|
||||
@step(u'results contain$')
|
||||
def api_result_contains(step):
|
||||
step.given('at least 1 result is returned')
|
||||
for line in step.hashes:
|
||||
if 'ID' in line:
|
||||
reslist = (world.results[int(line['ID'])],)
|
||||
else:
|
||||
reslist = world.results
|
||||
for k,v in line.iteritems():
|
||||
if k == 'latlon':
|
||||
for curres in reslist:
|
||||
world.match_geometry((float(curres['lat']), float(curres['lon'])), v)
|
||||
elif k != 'ID':
|
||||
for curres in reslist:
|
||||
assert_in(k, curres)
|
||||
if v[0] in '<>=':
|
||||
# mathematical operation
|
||||
evalexp = '%s %s' % (curres[k], v)
|
||||
res = eval(evalexp)
|
||||
logger.debug('Evaluating: %s = %s' % (res, evalexp))
|
||||
assert_true(res, "Evaluation failed: %s" % (evalexp, ))
|
||||
else:
|
||||
# regex match
|
||||
m = re.match("%s$" % (v,), curres[k])
|
||||
assert_is_not_none(m, msg="field %s does not match: %s$ != %s." % (k, v, curres[k]))
|
||||
|
||||
|
||||
@step(u'result addresses contain$')
|
||||
def api_result_address_contains(step):
|
||||
step.given('the result is valid')
|
||||
for line in step.hashes:
|
||||
if 'ID' in line:
|
||||
reslist = (world.results[int(line['ID'])],)
|
||||
else:
|
||||
reslist = world.results
|
||||
for k,v in line.iteritems():
|
||||
if k != 'ID':
|
||||
for res in reslist:
|
||||
curres = res['address']
|
||||
assert_in(k, curres)
|
||||
m = re.match("%s$" % (v,), curres[k])
|
||||
assert_is_not_none(m, msg="field %s does not match: %s$ != %s." % (k, v, curres[k]))
|
||||
|
||||
|
||||
@step(u'address of result (\d+) contains')
|
||||
def api_result_address_exact(step, resid):
|
||||
resid = int(resid)
|
||||
step.given('at least %d results are returned' % (resid + 1))
|
||||
addr = world.results[resid]['address']
|
||||
for line in step.hashes:
|
||||
assert_in(line['type'], addr)
|
||||
assert_equals(line['value'], addr[line['type']])
|
||||
|
||||
@step(u'address of result (\d+) does not contain (.*)')
|
||||
def api_result_address_details_missing(step, resid, types):
|
||||
resid = int(resid)
|
||||
step.given('at least %d results are returned' % (resid + 1))
|
||||
addr = world.results[resid]['address']
|
||||
for t in types.split(','):
|
||||
assert_not_in(t.strip(), addr)
|
||||
|
||||
|
||||
@step(u'address of result (\d+) is')
|
||||
def api_result_address_exact(step, resid):
|
||||
resid = int(resid)
|
||||
step.given('at least %d results are returned' % (resid + 1))
|
||||
result = world.results[resid]
|
||||
linenr = 0
|
||||
assert_equals(len(step.hashes), len(result['address']))
|
||||
for k,v in result['address'].iteritems():
|
||||
assert_equals(step.hashes[linenr]['type'], k)
|
||||
assert_equals(step.hashes[linenr]['value'], v)
|
||||
linenr += 1
|
||||
|
||||
|
||||
@step('there are( no)? duplicates')
|
||||
def api_result_check_for_duplicates(step, nodups=None):
|
||||
step.given('at least 1 result is returned')
|
||||
resarr = []
|
||||
for res in world.results:
|
||||
resarr.append((res['osm_type'], res['class'],
|
||||
res['type'], res['display_name']))
|
||||
|
||||
if nodups is None:
|
||||
assert len(resarr) > len(set(resarr))
|
||||
else:
|
||||
assert_equal(len(resarr), len(set(resarr)))
|
||||
114
tests/steps/api_setup.py
Normal file
114
tests/steps/api_setup.py
Normal file
@@ -0,0 +1,114 @@
|
||||
""" Steps for setting up and sending API requests.
|
||||
"""
|
||||
|
||||
from nose.tools import *
|
||||
from lettuce import *
|
||||
import urllib
|
||||
import urllib2
|
||||
import logging
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
def api_call(requesttype):
|
||||
world.json_callback = None
|
||||
data = urllib.urlencode(world.params)
|
||||
url = "%s/%s?%s" % (world.config.base_url, requesttype, data)
|
||||
req = urllib2.Request(url=url, headers=world.header)
|
||||
try:
|
||||
fd = urllib2.urlopen(req)
|
||||
world.page = fd.read()
|
||||
world.returncode = 200
|
||||
except urllib2.HTTPError, ex:
|
||||
world.returncode = ex.code
|
||||
world.page = None
|
||||
return
|
||||
|
||||
pageinfo = fd.info()
|
||||
assert_equal('utf-8', pageinfo.getparam('charset').lower())
|
||||
pagetype = pageinfo.gettype()
|
||||
|
||||
fmt = world.params.get('format')
|
||||
if fmt == 'html':
|
||||
assert_equals('text/html', pagetype)
|
||||
world.response_format = fmt
|
||||
elif fmt == 'xml':
|
||||
assert_equals('text/xml', pagetype)
|
||||
world.response_format = fmt
|
||||
elif fmt in ('json', 'jsonv2'):
|
||||
if 'json_callback' in world.params:
|
||||
world.json_callback = world.params['json_callback']
|
||||
assert world.page.startswith(world.json_callback + '(')
|
||||
assert world.page.endswith(')')
|
||||
world.page = world.page[(len(world.json_callback)+1):-1]
|
||||
assert_equals('application/javascript', pagetype)
|
||||
else:
|
||||
assert_equals('application/json', pagetype)
|
||||
world.response_format = 'json'
|
||||
else:
|
||||
if requesttype == 'reverse':
|
||||
assert_equals('text/xml', pagetype)
|
||||
world.response_format = 'xml'
|
||||
else:
|
||||
assert_equals('text/html', pagetype)
|
||||
world.response_format = 'html'
|
||||
logger.debug("Page received (%s):" % world.response_format)
|
||||
logger.debug(world.page)
|
||||
|
||||
api_setup_prepare_params(None)
|
||||
|
||||
@before.each_scenario
|
||||
def api_setup_prepare_params(scenario):
|
||||
world.results = []
|
||||
world.params = {}
|
||||
world.header = {}
|
||||
|
||||
@step(u'the request parameters$')
|
||||
def api_setup_parameters(step):
|
||||
"""Define the parameters of the request as a hash.
|
||||
Resets parameter list.
|
||||
"""
|
||||
world.params = step.hashes[0]
|
||||
|
||||
@step(u'the HTTP header$')
|
||||
def api_setup_parameters(step):
|
||||
"""Define additional HTTP header parameters as a hash.
|
||||
Resets parameter list.
|
||||
"""
|
||||
world.header = step.hashes[0]
|
||||
|
||||
|
||||
@step(u'sending( \w+)? search query "([^"]*)"( with address)?')
|
||||
def api_setup_search(step, fmt, query, doaddr):
|
||||
world.params['q'] = query.encode('utf8')
|
||||
if doaddr:
|
||||
world.params['addressdetails'] = 1
|
||||
if fmt:
|
||||
world.params['format'] = fmt.strip()
|
||||
api_call('search')
|
||||
|
||||
@step(u'sending( \w+)? structured query( with address)?$')
|
||||
def api_setup_structured_search(step, fmt, doaddr):
|
||||
world.params.update(step.hashes[0])
|
||||
if doaddr:
|
||||
world.params['addressdetails'] = 1
|
||||
if fmt:
|
||||
world.params['format'] = fmt.strip()
|
||||
api_call('search')
|
||||
|
||||
@step(u'looking up (\w+ )?coordinates ([-\d.]+),([-\d.]+)')
|
||||
def api_setup_reverse(step, fmt, lat, lon):
|
||||
world.params['lat'] = lat
|
||||
world.params['lon'] = lon
|
||||
if fmt and fmt.strip():
|
||||
world.params['format'] = fmt.strip()
|
||||
api_call('reverse')
|
||||
|
||||
@step(u'looking up details for ([NRW]?\d+)')
|
||||
def api_setup_details(step, obj):
|
||||
if obj[0] in ('N', 'R', 'W'):
|
||||
# an osm id
|
||||
world.params['osmtype'] = obj[0]
|
||||
world.params['osmid'] = obj[1:]
|
||||
else:
|
||||
world.params['place_id'] = obj
|
||||
api_call('details')
|
||||
124
tests/steps/db_results.py
Normal file
124
tests/steps/db_results.py
Normal file
@@ -0,0 +1,124 @@
|
||||
""" Steps for checking the DB after import and update tests.
|
||||
|
||||
There are two groups of test here. The first group tests
|
||||
the contents of db tables directly, the second checks
|
||||
query results by using the command line query tool.
|
||||
"""
|
||||
|
||||
from nose.tools import *
|
||||
from lettuce import *
|
||||
import psycopg2
|
||||
import psycopg2.extensions
|
||||
import psycopg2.extras
|
||||
import os
|
||||
import subprocess
|
||||
import random
|
||||
import json
|
||||
import re
|
||||
import logging
|
||||
from collections import OrderedDict
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
@step(u'table placex contains as names for (N|R|W)(\d+)')
|
||||
def check_placex_names(step, osmtyp, osmid):
|
||||
""" Check for the exact content of the name hstaore in placex.
|
||||
"""
|
||||
cur = world.conn.cursor(cursor_factory=psycopg2.extras.DictCursor)
|
||||
cur.execute('SELECT name FROM placex where osm_type = %s and osm_id =%s', (osmtyp, int(osmid)))
|
||||
for line in cur:
|
||||
names = dict(line['name'])
|
||||
for name in step.hashes:
|
||||
assert_in(name['k'], names)
|
||||
assert_equals(names[name['k']], name['v'])
|
||||
del names[name['k']]
|
||||
assert_equals(len(names), 0)
|
||||
|
||||
|
||||
|
||||
@step(u'table ([a-z_]+) contains$')
|
||||
def check_placex_content(step, tablename):
|
||||
""" check that the given lines are in the given table
|
||||
Entries are searched by osm_type/osm_id and then all
|
||||
given columns are tested. If there is more than one
|
||||
line for an OSM object, they must match in these columns.
|
||||
"""
|
||||
cur = world.conn.cursor(cursor_factory=psycopg2.extras.DictCursor)
|
||||
for line in step.hashes:
|
||||
osmtype, osmid, cls = world.split_id(line['object'])
|
||||
q = 'SELECT *'
|
||||
if tablename == 'placex':
|
||||
q = q + ", ST_X(centroid) as clat, ST_Y(centroid) as clon"
|
||||
q = q + ' FROM %s where osm_type = %%s and osm_id = %%s' % (tablename,)
|
||||
if cls is None:
|
||||
params = (osmtype, osmid)
|
||||
else:
|
||||
q = q + ' and class = %s'
|
||||
params = (osmtype, osmid, cls)
|
||||
cur.execute(q, params)
|
||||
assert(cur.rowcount > 0)
|
||||
for res in cur:
|
||||
for k,v in line.iteritems():
|
||||
if not k == 'object':
|
||||
assert_in(k, res)
|
||||
if type(res[k]) is dict:
|
||||
val = world.make_hash(v)
|
||||
assert_equals(res[k], val)
|
||||
elif k in ('parent_place_id', 'linked_place_id'):
|
||||
pid = world.get_placeid(v)
|
||||
assert_equals(pid, res[k], "Results for '%s'/'%s' differ: '%s' != '%s'" % (line['object'], k, pid, res[k]))
|
||||
elif k == 'centroid':
|
||||
world.match_geometry((res['clat'], res['clon']), v)
|
||||
else:
|
||||
assert_equals(str(res[k]), v, "Results for '%s'/'%s' differ: '%s' != '%s'" % (line['object'], k, str(res[k]), v))
|
||||
|
||||
@step(u'table (placex?) has no entry for (N|R|W)(\d+)(:\w+)?')
|
||||
def check_placex_missing(step, tablename, osmtyp, osmid, placeclass):
|
||||
cur = world.conn.cursor()
|
||||
q = 'SELECT count(*) FROM %s where osm_type = %%s and osm_id = %%s' % (tablename, )
|
||||
args = [osmtyp, int(osmid)]
|
||||
if placeclass is not None:
|
||||
q = q + ' and class = %s'
|
||||
args.append(placeclass[1:])
|
||||
cur.execute(q, args)
|
||||
numres = cur.fetchone()[0]
|
||||
assert_equals (numres, 0)
|
||||
|
||||
@step(u'search_name table contains$')
|
||||
def check_search_name_content(step):
|
||||
cur = world.conn.cursor(cursor_factory=psycopg2.extras.DictCursor)
|
||||
for line in step.hashes:
|
||||
placeid = world.get_placeid(line['place_id'])
|
||||
cur.execute('SELECT * FROM search_name WHERE place_id = %s', (placeid,))
|
||||
assert(cur.rowcount > 0)
|
||||
for res in cur:
|
||||
for k,v in line.iteritems():
|
||||
if k in ('search_rank', 'address_rank'):
|
||||
assert_equals(int(v), res[k], "Results for '%s'/'%s' differ: '%s' != '%d'" % (line['place_id'], k, v, res[k]))
|
||||
elif k in ('importance'):
|
||||
assert_equals(float(v), res[k], "Results for '%s'/'%s' differ: '%s' != '%d'" % (line['place_id'], k, v, res[k]))
|
||||
elif k in ('name_vector', 'nameaddress_vector'):
|
||||
terms = [x.strip().replace('#', ' ') for x in v.split(',')]
|
||||
cur.execute('SELECT word_id, word_token FROM word, (SELECT unnest(%s) as term) t WHERE word_token = make_standard_name(t.term)', (terms,))
|
||||
assert cur.rowcount >= len(terms)
|
||||
for wid in cur:
|
||||
assert_in(wid['word_id'], res[k], "Missing term for %s/%s: %s" % (line['place_id'], k, wid['word_token']))
|
||||
elif k in ('country_code'):
|
||||
assert_equals(v, res[k], "Results for '%s'/'%s' differ: '%s' != '%d'" % (line['place_id'], k, v, res[k]))
|
||||
elif k == 'place_id':
|
||||
pass
|
||||
else:
|
||||
raise Exception("Cannot handle field %s in search_name table" % (k, ))
|
||||
|
||||
|
||||
@step(u'table search_name has no entry for (.*)')
|
||||
def check_placex_missing(step, osmid):
|
||||
""" Checks if there is an entry in the search index for the
|
||||
given place object.
|
||||
"""
|
||||
cur = world.conn.cursor()
|
||||
placeid = world.get_placeid(osmid)
|
||||
cur.execute('SELECT count(*) FROM search_name WHERE place_id =%s', (placeid,))
|
||||
numres = cur.fetchone()[0]
|
||||
assert_equals (numres, 0)
|
||||
|
||||
272
tests/steps/db_setup.py
Normal file
272
tests/steps/db_setup.py
Normal file
@@ -0,0 +1,272 @@
|
||||
""" Steps for setting up a test database with imports and updates.
|
||||
|
||||
There are two ways to state geometries for test data: with coordinates
|
||||
and via scenes.
|
||||
|
||||
Coordinates should be given as a wkt without the enclosing type name.
|
||||
|
||||
Scenes are prepared geometries which can be found in the scenes/data/
|
||||
directory. Each scene is saved in a .wkt file with its name, which
|
||||
contains a list of id/wkt pairs. A scene can be set globally
|
||||
for a scene by using the step `the scene <scene name>`. Then each
|
||||
object should be refered to as `:<object id>`. A geometry can also
|
||||
be referred to without loading the scene by explicitly stating the
|
||||
scene: `<scene name>:<object id>`.
|
||||
"""
|
||||
|
||||
from nose.tools import *
|
||||
from lettuce import *
|
||||
import psycopg2
|
||||
import psycopg2.extensions
|
||||
import psycopg2.extras
|
||||
import os
|
||||
import subprocess
|
||||
import random
|
||||
import base64
|
||||
|
||||
psycopg2.extensions.register_type(psycopg2.extensions.UNICODE)
|
||||
|
||||
@before.each_scenario
|
||||
def setup_test_database(scenario):
|
||||
""" Creates a new test database from the template database
|
||||
that was set up earlier in terrain.py. Will be done only
|
||||
for scenarios whose feature is tagged with 'DB'.
|
||||
"""
|
||||
if scenario.feature.tags is not None and 'DB' in scenario.feature.tags:
|
||||
world.db_template_setup()
|
||||
world.write_nominatim_config(world.config.test_db)
|
||||
conn = psycopg2.connect(database=world.config.template_db)
|
||||
conn.set_isolation_level(0)
|
||||
cur = conn.cursor()
|
||||
cur.execute('DROP DATABASE IF EXISTS %s' % (world.config.test_db, ))
|
||||
cur.execute('CREATE DATABASE %s TEMPLATE = %s' % (world.config.test_db, world.config.template_db))
|
||||
conn.close()
|
||||
world.conn = psycopg2.connect(database=world.config.test_db)
|
||||
psycopg2.extras.register_hstore(world.conn, globally=False, unicode=True)
|
||||
|
||||
@step('a wiped database')
|
||||
def db_setup_wipe_db(step):
|
||||
"""Explicit DB scenario setup only needed
|
||||
to work around a bug where scenario outlines don't call
|
||||
before_each_scenario correctly.
|
||||
"""
|
||||
if hasattr(world, 'conn'):
|
||||
world.conn.close()
|
||||
conn = psycopg2.connect(database=world.config.template_db)
|
||||
conn.set_isolation_level(0)
|
||||
cur = conn.cursor()
|
||||
cur.execute('DROP DATABASE IF EXISTS %s' % (world.config.test_db, ))
|
||||
cur.execute('CREATE DATABASE %s TEMPLATE = %s' % (world.config.test_db, world.config.template_db))
|
||||
conn.close()
|
||||
world.conn = psycopg2.connect(database=world.config.test_db)
|
||||
psycopg2.extras.register_hstore(world.conn, globally=False, unicode=True)
|
||||
|
||||
|
||||
@after.each_scenario
|
||||
def tear_down_test_database(scenario):
|
||||
""" Drops any previously created test database.
|
||||
"""
|
||||
if hasattr(world, 'conn'):
|
||||
world.conn.close()
|
||||
if scenario.feature.tags is not None and 'DB' in scenario.feature.tags and not world.config.keep_scenario_db:
|
||||
conn = psycopg2.connect(database=world.config.template_db)
|
||||
conn.set_isolation_level(0)
|
||||
cur = conn.cursor()
|
||||
cur.execute('DROP DATABASE %s' % (world.config.test_db,))
|
||||
conn.close()
|
||||
|
||||
|
||||
def _format_placex_cols(cols, geomtype, force_name):
|
||||
if 'name' in cols:
|
||||
if cols['name'].startswith("'"):
|
||||
cols['name'] = world.make_hash(cols['name'])
|
||||
else:
|
||||
cols['name'] = { 'name' : cols['name'] }
|
||||
elif force_name:
|
||||
cols['name'] = { 'name' : base64.urlsafe_b64encode(os.urandom(int(random.random()*30))) }
|
||||
if 'extratags' in cols:
|
||||
cols['extratags'] = world.make_hash(cols['extratags'])
|
||||
if 'admin_level' not in cols:
|
||||
cols['admin_level'] = 100
|
||||
if 'geometry' in cols:
|
||||
coords = world.get_scene_geometry(cols['geometry'])
|
||||
if coords is None:
|
||||
coords = "'%s(%s)'::geometry" % (geomtype, cols['geometry'])
|
||||
else:
|
||||
coords = "'%s'::geometry" % coords.wkt
|
||||
cols['geometry'] = coords
|
||||
|
||||
|
||||
def _insert_place_table_nodes(places, force_name):
|
||||
cur = world.conn.cursor()
|
||||
for line in places:
|
||||
cols = dict(line)
|
||||
cols['osm_type'] = 'N'
|
||||
_format_placex_cols(cols, 'POINT', force_name)
|
||||
if 'geometry' in cols:
|
||||
coords = cols.pop('geometry')
|
||||
else:
|
||||
coords = "ST_Point(%f, %f)" % (random.random()*360 - 180, random.random()*180 - 90)
|
||||
|
||||
query = 'INSERT INTO place (%s,geometry) values(%s, ST_SetSRID(%s, 4326))' % (
|
||||
','.join(cols.iterkeys()),
|
||||
','.join(['%s' for x in range(len(cols))]),
|
||||
coords
|
||||
)
|
||||
cur.execute(query, cols.values())
|
||||
world.conn.commit()
|
||||
|
||||
|
||||
def _insert_place_table_objects(places, geomtype, force_name):
|
||||
cur = world.conn.cursor()
|
||||
for line in places:
|
||||
cols = dict(line)
|
||||
if 'osm_type' not in cols:
|
||||
cols['osm_type'] = 'W'
|
||||
_format_placex_cols(cols, geomtype, force_name)
|
||||
coords = cols.pop('geometry')
|
||||
|
||||
query = 'INSERT INTO place (%s, geometry) values(%s, ST_SetSRID(%s, 4326))' % (
|
||||
','.join(cols.iterkeys()),
|
||||
','.join(['%s' for x in range(len(cols))]),
|
||||
coords
|
||||
)
|
||||
cur.execute(query, cols.values())
|
||||
world.conn.commit()
|
||||
|
||||
@step(u'the scene (.*)')
|
||||
def import_set_scene(step, scene):
|
||||
world.load_scene(scene)
|
||||
|
||||
@step(u'the (named )?place (node|way|area)s')
|
||||
def import_place_table_nodes(step, named, osmtype):
|
||||
"""Insert a list of nodes into the placex table.
|
||||
Expects a table where columns are named in the same way as placex.
|
||||
"""
|
||||
cur = world.conn.cursor()
|
||||
cur.execute('ALTER TABLE place DISABLE TRIGGER place_before_insert')
|
||||
if osmtype == 'node':
|
||||
_insert_place_table_nodes(step.hashes, named is not None)
|
||||
elif osmtype == 'way' :
|
||||
_insert_place_table_objects(step.hashes, 'LINESTRING', named is not None)
|
||||
elif osmtype == 'area' :
|
||||
_insert_place_table_objects(step.hashes, 'POLYGON', named is not None)
|
||||
cur.execute('ALTER TABLE place ENABLE TRIGGER place_before_insert')
|
||||
cur.close()
|
||||
world.conn.commit()
|
||||
|
||||
|
||||
@step(u'the relations')
|
||||
def import_fill_planet_osm_rels(step):
|
||||
"""Adds a raw relation to the osm2pgsql table.
|
||||
Three columns need to be suplied: id, tags, members.
|
||||
"""
|
||||
cur = world.conn.cursor()
|
||||
for line in step.hashes:
|
||||
members = []
|
||||
parts = { 'n' : [], 'w' : [], 'r' : [] }
|
||||
if line['members'].strip():
|
||||
for mem in line['members'].split(','):
|
||||
memparts = mem.strip().split(':', 2)
|
||||
memid = memparts[0].lower()
|
||||
parts[memid[0]].append(int(memid[1:]))
|
||||
members.append(memid)
|
||||
if len(memparts) == 2:
|
||||
members.append(memparts[1])
|
||||
else:
|
||||
members.append('')
|
||||
tags = []
|
||||
for k,v in world.make_hash(line['tags']).iteritems():
|
||||
tags.extend((k,v))
|
||||
if not members:
|
||||
members = None
|
||||
|
||||
cur.execute("""INSERT INTO planet_osm_rels
|
||||
(id, way_off, rel_off, parts, members, tags, pending)
|
||||
VALUES (%s, %s, %s, %s, %s, %s, false)""",
|
||||
(line['id'], len(parts['n']), len(parts['n']) + len(parts['w']),
|
||||
parts['n'] + parts['w'] + parts['r'], members, tags))
|
||||
world.conn.commit()
|
||||
|
||||
|
||||
@step(u'the ways')
|
||||
def import_fill_planet_osm_ways(step):
|
||||
cur = world.conn.cursor()
|
||||
for line in step.hashes:
|
||||
if 'tags' in line:
|
||||
tags = world.make_hash(line['tags'])
|
||||
else:
|
||||
tags = None
|
||||
nodes = [int(x.strip()) for x in line['nodes'].split(',')]
|
||||
|
||||
cur.execute("""INSERT INTO planet_osm_ways
|
||||
(id, nodes, tags, pending)
|
||||
VALUES (%s, %s, %s, false)""",
|
||||
(line['id'], nodes, tags))
|
||||
world.conn.commit()
|
||||
|
||||
############### import and update steps #######################################
|
||||
|
||||
@step(u'importing')
|
||||
def import_database(step):
|
||||
""" Runs the actual indexing. """
|
||||
world.run_nominatim_script('setup', 'create-functions', 'create-partition-functions')
|
||||
cur = world.conn.cursor()
|
||||
cur.execute("""insert into placex (osm_type, osm_id, class, type, name, admin_level,
|
||||
housenumber, street, addr_place, isin, postcode, country_code, extratags,
|
||||
geometry) select * from place""")
|
||||
world.conn.commit()
|
||||
world.run_nominatim_script('setup', 'index', 'index-noanalyse')
|
||||
#world.db_dump_table('placex')
|
||||
|
||||
|
||||
@step(u'updating place (node|way|area)s')
|
||||
def update_place_table_nodes(step, osmtype):
|
||||
""" Replace a geometry in place by reinsertion and reindex database.
|
||||
"""
|
||||
world.run_nominatim_script('setup', 'create-functions', 'create-partition-functions', 'enable-diff-updates')
|
||||
if osmtype == 'node':
|
||||
_insert_place_table_nodes(step.hashes, False)
|
||||
elif osmtype == 'way':
|
||||
_insert_place_table_objects(step.hashes, 'LINESTRING', False)
|
||||
elif osmtype == 'area':
|
||||
_insert_place_table_objects(step.hashes, 'POLYGON', False)
|
||||
world.run_nominatim_script('update', 'index')
|
||||
|
||||
@step(u'marking for delete (.*)')
|
||||
def update_delete_places(step, places):
|
||||
""" Remove an entry from place and reindex database.
|
||||
"""
|
||||
world.run_nominatim_script('setup', 'create-functions', 'create-partition-functions', 'enable-diff-updates')
|
||||
cur = world.conn.cursor()
|
||||
for place in places.split(','):
|
||||
osmtype, osmid, cls = world.split_id(place)
|
||||
if cls is None:
|
||||
q = "delete from place where osm_type = %s and osm_id = %s"
|
||||
params = (osmtype, osmid)
|
||||
else:
|
||||
q = "delete from place where osm_type = %s and osm_id = %s and class = %s"
|
||||
params = (osmtype, osmid, cls)
|
||||
cur.execute(q, params)
|
||||
world.conn.commit()
|
||||
#world.db_dump_table('placex')
|
||||
world.run_nominatim_script('update', 'index')
|
||||
|
||||
|
||||
|
||||
@step(u'sending query "(.*)"( with dups)?$')
|
||||
def query_cmd(step, query, with_dups):
|
||||
""" Results in standard query output. The same tests as for API queries
|
||||
can be used.
|
||||
"""
|
||||
cmd = [os.path.join(world.config.source_dir, 'utils', 'query.php'),
|
||||
'--search', query]
|
||||
if with_dups is not None:
|
||||
cmd.append('--nodedupe')
|
||||
proc = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
|
||||
(outp, err) = proc.communicate()
|
||||
assert (proc.returncode == 0), "query.php failed with message: %s" % err
|
||||
world.page = outp
|
||||
world.response_format = 'json'
|
||||
world.returncode = 200
|
||||
|
||||
212
tests/steps/osm2pgsql_setup.py
Normal file
212
tests/steps/osm2pgsql_setup.py
Normal file
@@ -0,0 +1,212 @@
|
||||
""" Steps for setting up a test database for osm2pgsql import.
|
||||
|
||||
Note that osm2pgsql features need a database and therefore need
|
||||
to be tagged with @DB.
|
||||
"""
|
||||
|
||||
from nose.tools import *
|
||||
from lettuce import *
|
||||
|
||||
import logging
|
||||
import random
|
||||
import tempfile
|
||||
import os
|
||||
import subprocess
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
@before.each_scenario
|
||||
def osm2pgsql_setup_test(scenario):
|
||||
world.osm2pgsql = []
|
||||
|
||||
@step(u'the osm nodes:')
|
||||
def osm2pgsql_import_nodes(step):
|
||||
""" Define a list of OSM nodes to be imported, given as a table.
|
||||
Each line describes one node with all its attributes.
|
||||
'id' is mendatory, all other fields are filled with random values
|
||||
when not given. If 'tags' is missing an empty tag list is assumed.
|
||||
For updates, a mandatory 'action' column needs to contain 'A' (add),
|
||||
'M' (modify), 'D' (delete).
|
||||
"""
|
||||
for line in step.hashes:
|
||||
node = { 'type' : 'N', 'version' : '1', 'timestamp': "2012-05-01T15:06:20Z",
|
||||
'changeset' : "11470653", 'uid' : "122294", 'user' : "foo"
|
||||
}
|
||||
node.update(line)
|
||||
node['id'] = int(node['id'])
|
||||
if 'geometry' in node:
|
||||
lat, lon = node['geometry'].split(' ')
|
||||
node['lat'] = float(lat)
|
||||
node['lon'] = float(lon)
|
||||
else:
|
||||
node['lon'] = random.random()*360 - 180
|
||||
node['lat'] = random.random()*180 - 90
|
||||
if 'tags' in node:
|
||||
node['tags'] = world.make_hash(line['tags'])
|
||||
else:
|
||||
node['tags'] = {}
|
||||
|
||||
world.osm2pgsql.append(node)
|
||||
|
||||
|
||||
@step(u'the osm ways:')
|
||||
def osm2pgsql_import_ways(step):
|
||||
""" Define a list of OSM ways to be imported.
|
||||
"""
|
||||
for line in step.hashes:
|
||||
way = { 'type' : 'W', 'version' : '1', 'timestamp': "2012-05-01T15:06:20Z",
|
||||
'changeset' : "11470653", 'uid' : "122294", 'user' : "foo"
|
||||
}
|
||||
way.update(line)
|
||||
|
||||
way['id'] = int(way['id'])
|
||||
if 'tags' in way:
|
||||
way['tags'] = world.make_hash(line['tags'])
|
||||
else:
|
||||
way['tags'] = None
|
||||
way['nodes'] = way['nodes'].strip().split()
|
||||
|
||||
world.osm2pgsql.append(way)
|
||||
|
||||
membertype = { 'N' : 'node', 'W' : 'way', 'R' : 'relation' }
|
||||
|
||||
@step(u'the osm relations:')
|
||||
def osm2pgsql_import_rels(step):
|
||||
""" Define a list of OSM relation to be imported.
|
||||
"""
|
||||
for line in step.hashes:
|
||||
rel = { 'type' : 'R', 'version' : '1', 'timestamp': "2012-05-01T15:06:20Z",
|
||||
'changeset' : "11470653", 'uid' : "122294", 'user' : "foo"
|
||||
}
|
||||
rel.update(line)
|
||||
|
||||
rel['id'] = int(rel['id'])
|
||||
if 'tags' in rel:
|
||||
rel['tags'] = world.make_hash(line['tags'])
|
||||
else:
|
||||
rel['tags'] = {}
|
||||
members = []
|
||||
if rel['members'].strip():
|
||||
for mem in line['members'].split(','):
|
||||
memparts = mem.strip().split(':', 2)
|
||||
memid = memparts[0].upper()
|
||||
members.append((membertype[memid[0]],
|
||||
memid[1:],
|
||||
memparts[1] if len(memparts) == 2 else ''
|
||||
))
|
||||
rel['members'] = members
|
||||
|
||||
world.osm2pgsql.append(rel)
|
||||
|
||||
|
||||
|
||||
def _sort_xml_entries(x, y):
|
||||
if x['type'] == y['type']:
|
||||
return cmp(x['id'], y['id'])
|
||||
else:
|
||||
return cmp('NWR'.find(x['type']), 'NWR'.find(y['type']))
|
||||
|
||||
def write_osm_obj(fd, obj):
|
||||
if obj['type'] == 'N':
|
||||
fd.write('<node id="%(id)d" lat="%(lat).8f" lon="%(lon).8f" version="%(version)s" timestamp="%(timestamp)%" changeset="%(changeset)s" uid="%(uid)s" user="%(user)s"'% obj)
|
||||
if obj['tags'] is None:
|
||||
fd.write('/>\n')
|
||||
else:
|
||||
fd.write('>\n')
|
||||
for k,v in obj['tags'].iteritems():
|
||||
fd.write(' <tag k="%s" v="%s"/>\n' % (k, v))
|
||||
fd.write('</node>\n')
|
||||
elif obj['type'] == 'W':
|
||||
fd.write('<way id="%(id)d" version="%(version)s" changeset="%(changeset)s" timestamp="%(timestamp)s" user="%(user)s" uid="%(uid)s">\n' % obj)
|
||||
for nd in obj['nodes']:
|
||||
fd.write('<nd ref="%s" />\n' % (nd,))
|
||||
for k,v in obj['tags'].iteritems():
|
||||
fd.write(' <tag k="%s" v="%s"/>\n' % (k, v))
|
||||
fd.write('</way>\n')
|
||||
elif obj['type'] == 'R':
|
||||
fd.write('<relation id="%(id)d" version="%(version)s" changeset="%(changeset)s" timestamp="%(timestamp)s" user="%(user)s" uid="%(uid)s">\n' % obj)
|
||||
for mem in obj['members']:
|
||||
fd.write(' <member type="%s" ref="%s" role="%s"/>\n' % mem)
|
||||
for k,v in obj['tags'].iteritems():
|
||||
fd.write(' <tag k="%s" v="%s"/>\n' % (k, v))
|
||||
fd.write('</relation>\n')
|
||||
|
||||
@step(u'loading osm data')
|
||||
def osm2pgsql_load_place(step):
|
||||
"""Imports the previously defined OSM data into a fresh copy of a
|
||||
Nominatim test database.
|
||||
"""
|
||||
|
||||
world.osm2pgsql.sort(cmp=_sort_xml_entries)
|
||||
|
||||
# create a OSM file in /tmp
|
||||
with tempfile.NamedTemporaryFile(dir='/tmp', delete=False) as fd:
|
||||
fname = fd.name
|
||||
fd.write("<?xml version='1.0' encoding='UTF-8'?>\n")
|
||||
fd.write('<osm version="0.6" generator="test-nominatim" timestamp="2014-08-26T20:22:02Z">\n')
|
||||
fd.write('\t<bounds minlat="43.72335" minlon="7.409205" maxlat="43.75169" maxlon="7.448637"/>\n')
|
||||
|
||||
for obj in world.osm2pgsql:
|
||||
write_osm_obj(fd, obj)
|
||||
|
||||
fd.write('</osm>\n')
|
||||
|
||||
logger.debug( "Filename: %s" % fname)
|
||||
|
||||
cmd = [os.path.join(world.config.source_dir, 'utils', 'setup.php')]
|
||||
cmd.extend(['--osm-file', fname, '--import-data'])
|
||||
proc = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
|
||||
(outp, outerr) = proc.communicate()
|
||||
assert (proc.returncode == 0), "OSM data import failed:\n%s\n%s\n" % (outp, outerr)
|
||||
|
||||
### reintroduce the triggers/indexes we've lost by having osm2pgsql set up place again
|
||||
cur = world.conn.cursor()
|
||||
cur.execute("""CREATE TRIGGER place_before_delete BEFORE DELETE ON place
|
||||
FOR EACH ROW EXECUTE PROCEDURE place_delete()""")
|
||||
cur.execute("""CREATE TRIGGER place_before_insert BEFORE INSERT ON place
|
||||
FOR EACH ROW EXECUTE PROCEDURE place_insert()""")
|
||||
cur.execute("""CREATE UNIQUE INDEX idx_place_osm_unique on place using btree(osm_id,osm_type,class,type)""")
|
||||
world.conn.commit()
|
||||
|
||||
|
||||
os.remove(fname)
|
||||
world.osm2pgsql = []
|
||||
|
||||
actiontypes = { 'C' : 'create', 'M' : 'modify', 'D' : 'delete' }
|
||||
|
||||
@step(u'updating osm data')
|
||||
def osm2pgsql_update_place(step):
|
||||
"""Creates an osc file from the previously defined data and imports it
|
||||
into the database.
|
||||
"""
|
||||
world.run_nominatim_script('setup', 'create-functions', 'create-partition-functions')
|
||||
cur = world.conn.cursor()
|
||||
cur.execute("""insert into placex (osm_type, osm_id, class, type, name, admin_level,
|
||||
housenumber, street, addr_place, isin, postcode, country_code, extratags,
|
||||
geometry) select * from place""")
|
||||
world.conn.commit()
|
||||
world.run_nominatim_script('setup', 'index', 'index-noanalyse')
|
||||
world.run_nominatim_script('setup', 'create-functions', 'create-partition-functions', 'enable-diff-updates')
|
||||
|
||||
with tempfile.NamedTemporaryFile(dir='/tmp', delete=False) as fd:
|
||||
fname = fd.name
|
||||
fd.write("<?xml version='1.0' encoding='UTF-8'?>\n")
|
||||
fd.write('<osmChange version="0.6" generator="Osmosis 0.43.1">\n')
|
||||
|
||||
for obj in world.osm2pgsql:
|
||||
fd.write('<%s>\n' % (actiontypes[obj['action']], ))
|
||||
write_osm_obj(fd, obj)
|
||||
fd.write('</%s>\n' % (actiontypes[obj['action']], ))
|
||||
|
||||
fd.write('</osmChange>\n')
|
||||
|
||||
logger.debug( "Filename: %s" % fname)
|
||||
|
||||
cmd = [os.path.join(world.config.source_dir, 'utils', 'update.php')]
|
||||
cmd.extend(['--import-diff', fname])
|
||||
proc = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
|
||||
(outp, outerr) = proc.communicate()
|
||||
assert (proc.returncode == 0), "OSM data update failed:\n%s\n%s\n" % (outp, outerr)
|
||||
|
||||
os.remove(fname)
|
||||
world.osm2pgsql = []
|
||||
246
tests/steps/terrain.py
Normal file
246
tests/steps/terrain.py
Normal file
@@ -0,0 +1,246 @@
|
||||
from lettuce import *
|
||||
from nose.tools import *
|
||||
import logging
|
||||
import os
|
||||
import subprocess
|
||||
import psycopg2
|
||||
import re
|
||||
from haversine import haversine
|
||||
from shapely.wkt import loads as wkt_load
|
||||
from shapely.ops import linemerge
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
class NominatimConfig:
|
||||
|
||||
def __init__(self):
|
||||
# logging setup
|
||||
loglevel = getattr(logging, os.environ.get('LOGLEVEL','info').upper())
|
||||
if 'LOGFILE' in os.environ:
|
||||
logging.basicConfig(filename=os.environ.get('LOGFILE','run.log'),
|
||||
level=loglevel)
|
||||
else:
|
||||
logging.basicConfig(level=loglevel)
|
||||
# Nominatim test setup
|
||||
self.base_url = os.environ.get('NOMINATIM_SERVER', 'http://localhost/nominatim')
|
||||
self.source_dir = os.path.abspath(os.environ.get('NOMINATIM_DIR', '..'))
|
||||
self.template_db = os.environ.get('TEMPLATE_DB', 'test_template_nominatim')
|
||||
self.test_db = os.environ.get('TEST_DB', 'test_nominatim')
|
||||
self.local_settings_file = os.environ.get('NOMINATIM_SETTINGS', '/tmp/nominatim_settings.php')
|
||||
self.reuse_template = 'NOMINATIM_REUSE_TEMPLATE' in os.environ
|
||||
self.keep_scenario_db = 'NOMINATIM_KEEP_SCENARIO_DB' in os.environ
|
||||
os.environ['NOMINATIM_SETTINGS'] = '/tmp/nominatim_settings.php'
|
||||
|
||||
scriptpath = os.path.dirname(os.path.abspath(__file__))
|
||||
self.scene_path = os.environ.get('SCENE_PATH',
|
||||
os.path.join(scriptpath, '..', 'scenes', 'data'))
|
||||
|
||||
|
||||
def __str__(self):
|
||||
return 'Server URL: %s\nSource dir: %s\n' % (self.base_url, self.source_dir)
|
||||
|
||||
world.config = NominatimConfig()
|
||||
|
||||
@world.absorb
|
||||
def write_nominatim_config(dbname):
|
||||
f = open(world.config.local_settings_file, 'w')
|
||||
f.write("<?php\n @define('CONST_Database_DSN', 'pgsql://@/%s');\n" % dbname)
|
||||
f.close()
|
||||
|
||||
|
||||
@world.absorb
|
||||
def run_nominatim_script(script, *args):
|
||||
cmd = [os.path.join(world.config.source_dir, 'utils', '%s.php' % script)]
|
||||
cmd.extend(['--%s' % x for x in args])
|
||||
proc = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
|
||||
(outp, outerr) = proc.communicate()
|
||||
assert (proc.returncode == 0), "Script '%s' failed:\n%s\n%s\n" % (script, outp, outerr)
|
||||
|
||||
@world.absorb
|
||||
def make_hash(inp):
|
||||
return eval('{' + inp + '}')
|
||||
|
||||
@world.absorb
|
||||
def split_id(oid):
|
||||
""" Splits a unique identifier for places into its components.
|
||||
As place_ids cannot be used for testing, we use a unique
|
||||
identifier instead that is of the form <osmtype><osmid>[:class].
|
||||
"""
|
||||
oid = oid.strip()
|
||||
if oid == 'None':
|
||||
return None, None, None
|
||||
osmtype = oid[0]
|
||||
assert_in(osmtype, ('R','N','W'))
|
||||
if ':' in oid:
|
||||
osmid, cls = oid[1:].split(':')
|
||||
return (osmtype, int(osmid), cls)
|
||||
else:
|
||||
return (osmtype, int(oid[1:]), None)
|
||||
|
||||
@world.absorb
|
||||
def get_placeid(oid):
|
||||
""" Tries to retrive the place_id for a unique identifier. """
|
||||
if oid[0].isdigit():
|
||||
return int(oid)
|
||||
|
||||
osmtype, osmid, cls = world.split_id(oid)
|
||||
if osmtype is None:
|
||||
return None
|
||||
cur = world.conn.cursor()
|
||||
if cls is None:
|
||||
q = 'SELECT place_id FROM placex where osm_type = %s and osm_id = %s'
|
||||
params = (osmtype, osmid)
|
||||
else:
|
||||
q = 'SELECT place_id FROM placex where osm_type = %s and osm_id = %s and class = %s'
|
||||
params = (osmtype, osmid, cls)
|
||||
cur.execute(q, params)
|
||||
assert_equals (cur.rowcount, 1)
|
||||
return cur.fetchone()[0]
|
||||
|
||||
|
||||
@world.absorb
|
||||
def match_geometry(coord, matchstring):
|
||||
m = re.match(r'([-0-9.]+),\s*([-0-9.]+)\s*(?:\+-([0-9.]+)([a-z]+)?)?', matchstring)
|
||||
assert_is_not_none(m, "Invalid match string")
|
||||
|
||||
logger.debug("Distmatch: %s/%s %s %s" % (m.group(1), m.group(2), m.group(3), m.group(4) ))
|
||||
dist = haversine(coord, (float(m.group(1)), float(m.group(2))))
|
||||
|
||||
if m.group(3) is not None:
|
||||
expdist = float(m.group(3))
|
||||
if m.group(4) is not None:
|
||||
if m.group(4) == 'm':
|
||||
expdist = expdist/1000
|
||||
elif m.group(4) == 'km':
|
||||
pass
|
||||
else:
|
||||
raise Exception("Unknown unit '%s' in geometry match" % (m.group(4), ))
|
||||
else:
|
||||
expdist = 0
|
||||
|
||||
logger.debug("Distances expected: %f, got: %f" % (expdist, dist))
|
||||
assert dist <= expdist, "Geometry too far away, expected: %f, got: %f" % (expdist, dist)
|
||||
|
||||
|
||||
|
||||
@world.absorb
|
||||
def db_dump_table(table):
|
||||
cur = world.conn.cursor()
|
||||
cur.execute('SELECT * FROM %s' % table)
|
||||
print '<<<<<<< BEGIN OF TABLE DUMP %s' % table
|
||||
for res in cur:
|
||||
print res
|
||||
print '<<<<<<< END OF TABLE DUMP %s' % table
|
||||
|
||||
@world.absorb
|
||||
def db_drop_database(name):
|
||||
conn = psycopg2.connect(database='postgres')
|
||||
conn.set_isolation_level(0)
|
||||
cur = conn.cursor()
|
||||
cur.execute('DROP DATABASE IF EXISTS %s' % (name, ))
|
||||
conn.close()
|
||||
|
||||
|
||||
world.is_template_set_up = False
|
||||
|
||||
@world.absorb
|
||||
def db_template_setup():
|
||||
""" Set up a template database, containing all tables
|
||||
but not yet any functions.
|
||||
"""
|
||||
if world.is_template_set_up:
|
||||
return
|
||||
|
||||
world.is_template_set_up = True
|
||||
world.write_nominatim_config(world.config.template_db)
|
||||
if world.config.reuse_template:
|
||||
# check that the template is there
|
||||
conn = psycopg2.connect(database='postgres')
|
||||
cur = conn.cursor()
|
||||
cur.execute('select count(*) from pg_database where datname = %s',
|
||||
(world.config.template_db,))
|
||||
if cur.fetchone()[0] == 1:
|
||||
return
|
||||
else:
|
||||
# just in case... make sure a previous table has been dropped
|
||||
world.db_drop_database(world.config.template_db)
|
||||
# call the first part of database setup
|
||||
world.run_nominatim_script('setup', 'create-db', 'setup-db')
|
||||
# remove external data to speed up indexing for tests
|
||||
conn = psycopg2.connect(database=world.config.template_db)
|
||||
psycopg2.extras.register_hstore(conn, globally=False, unicode=True)
|
||||
cur = conn.cursor()
|
||||
for table in ('gb_postcode', 'us_postcode', 'us_state', 'us_statecounty'):
|
||||
cur.execute('TRUNCATE TABLE %s' % (table,))
|
||||
conn.commit()
|
||||
conn.close()
|
||||
# execute osm2pgsql on an empty file to get the right tables
|
||||
osm2pgsql = os.path.join(world.config.source_dir, 'osm2pgsql', 'osm2pgsql')
|
||||
proc = subprocess.Popen([osm2pgsql, '-lsc', '-O', 'gazetteer', '-d', world.config.template_db, '-'],
|
||||
stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
|
||||
[outstr, errstr] = proc.communicate(input='<osm version="0.6"></osm>')
|
||||
world.run_nominatim_script('setup', 'create-functions', 'create-tables', 'create-partition-tables', 'create-partition-functions', 'load-data', 'create-search-indices')
|
||||
|
||||
|
||||
# Leave the table around so it can be reused again after a non-reuse test round.
|
||||
#@after.all
|
||||
def db_template_teardown(total):
|
||||
""" Set up a template database, containing all tables
|
||||
but not yet any functions.
|
||||
"""
|
||||
if world.is_template_set_up:
|
||||
# remove template DB
|
||||
if not world.config.reuse_template:
|
||||
world.db_drop_database(world.config.template_db)
|
||||
try:
|
||||
os.remove(world.config.local_settings_file)
|
||||
except OSError:
|
||||
pass # ignore missing file
|
||||
|
||||
|
||||
##########################################################################
|
||||
#
|
||||
# Data scene handling
|
||||
#
|
||||
|
||||
world.scenes = {}
|
||||
world.current_scene = None
|
||||
|
||||
@world.absorb
|
||||
def load_scene(name):
|
||||
if name in world.scenes:
|
||||
world.current_scene = world.scenes[name]
|
||||
else:
|
||||
with open(os.path.join(world.config.scene_path, "%s.wkt" % name), 'r') as fd:
|
||||
scene = {}
|
||||
for line in fd:
|
||||
if line.strip():
|
||||
obj, wkt = line.split('|', 2)
|
||||
wkt = wkt.strip()
|
||||
scene[obj.strip()] = wkt_load(wkt)
|
||||
world.scenes[name] = scene
|
||||
world.current_scene = scene
|
||||
|
||||
@world.absorb
|
||||
def get_scene_geometry(name):
|
||||
if not ':' in name:
|
||||
# Not a scene description
|
||||
return None
|
||||
|
||||
geoms = []
|
||||
for obj in name.split('+'):
|
||||
oname = obj.strip()
|
||||
if oname.startswith(':'):
|
||||
geoms.append(world.current_scene[oname[1:]])
|
||||
else:
|
||||
scene, obj = oname.split(':', 2)
|
||||
oldscene = world.current_scene
|
||||
world.load_scene(scene)
|
||||
wkt = world.current_scene[obj]
|
||||
world.current_scene = oldscene
|
||||
geoms.append(wkt)
|
||||
|
||||
if len(geoms) == 1:
|
||||
return geoms[0]
|
||||
else:
|
||||
return linemerge(geoms)
|
||||
55
utils/query.php
Executable file
55
utils/query.php
Executable file
@@ -0,0 +1,55 @@
|
||||
#!/usr/bin/php -Cq
|
||||
<?php
|
||||
|
||||
require_once(dirname(dirname(__FILE__)).'/lib/init-cmd.php');
|
||||
require_once(CONST_BasePath.'/lib/Geocode.php');
|
||||
ini_set('memory_limit', '800M');
|
||||
|
||||
$aCMDOptions = array(
|
||||
"Query database from command line. Returns search result as JSON.",
|
||||
array('help', 'h', 0, 1, 0, 0, false, 'Show Help'),
|
||||
array('quiet', 'q', 0, 1, 0, 0, 'bool', 'Quiet output'),
|
||||
array('verbose', 'v', 0, 1, 0, 0, 'bool', 'Verbose output'),
|
||||
|
||||
array('search', '', 0, 1, 1, 1, 'string', 'Search for given term or coordinate'),
|
||||
|
||||
array('accept-language', '', 0, 1, 1, 1, 'string', 'Preferred language order for showing search results'),
|
||||
array('bounded', '', 0, 1, 0, 0, 'bool', 'Restrict results to given viewbox'),
|
||||
array('nodedupe', '', 0, 1, 0, 0, 'bool', 'Do not remove duplicate results'),
|
||||
array('limit', '', 0, 1, 1, 1, 'int', 'Maximum number of results returned (default: 10)'),
|
||||
array('exclude_place_ids', '', 0, 1, 1, 1, 'string', 'Comma-separated list of place ids to exclude from results'),
|
||||
array('featureType', '', 0, 1, 1, 1, 'string', 'Restrict results to certain features (country, state,city,settlement)'),
|
||||
array('countrycodes', '', 0, 1, 1, 1, 'string', 'Comma-separated list of countries to restrict search to'),
|
||||
array('viewbox', '', 0, 1, 1, 1, 'string', 'Prefer results in given view box')
|
||||
);
|
||||
getCmdOpt($_SERVER['argv'], $aCMDOptions, $aCMDResult, true, true);
|
||||
|
||||
$oDB =& getDB();
|
||||
|
||||
if (isset($aCMDResult['search']) && $aCMDResult['search'])
|
||||
{
|
||||
if (isset($aCMDResult['bounded'])) $aCMDResult['bounded'] = 'true';
|
||||
if (isset($aCMDResult['nodedupe'])) $aCMDResult['dedupe'] = 'false';
|
||||
|
||||
$oGeocode =& new Geocode($oDB);
|
||||
if (isset($aCMDResult['accept-language']) && $aCMDResult['accept-language'])
|
||||
$oGeocode->setLanguagePreference(getPreferredLanguages($aCMDResult['accept-language']));
|
||||
else
|
||||
$oGeocode->setLanguagePreference(getPreferredLanguages());
|
||||
$oGeocode->loadParamArray($aCMDResult);
|
||||
$oGeocode->setQuery($aCMDResult['search']);
|
||||
|
||||
$aSearchResults = $oGeocode->lookup();
|
||||
|
||||
if (version_compare(phpversion(), "5.4.0", '<'))
|
||||
echo json_encode($aSearchResults);
|
||||
else
|
||||
echo json_encode($aSearchResults, JSON_PRETTY_PRINT | JSON_UNESCAPED_SLASHES | JSON_UNESCAPED_UNICODE)."\n";
|
||||
}
|
||||
else
|
||||
{
|
||||
showUsage($aCMDOptions, true);
|
||||
}
|
||||
|
||||
|
||||
|
||||
@@ -22,6 +22,7 @@
|
||||
array('create-functions', '', 0, 1, 0, 0, 'bool', 'Create functions'),
|
||||
array('enable-diff-updates', '', 0, 1, 0, 0, 'bool', 'Turn on the code required to make diff updates work'),
|
||||
array('enable-debug-statements', '', 0, 1, 0, 0, 'bool', 'Include debug warning statements in pgsql commands'),
|
||||
array('ignore-errors', '', 0, 1, 0, 0, 'bool', 'Continue import even when errors in SQL are present (EXPERT)'),
|
||||
array('create-minimal-tables', '', 0, 1, 0, 0, 'bool', 'Create minimal main tables'),
|
||||
array('create-tables', '', 0, 1, 0, 0, 'bool', 'Create main tables'),
|
||||
array('create-partition-tables', '', 0, 1, 0, 0, 'bool', 'Create required partition tables'),
|
||||
@@ -164,6 +165,13 @@
|
||||
{
|
||||
pgsqlRunScript('update country_name set partition = 0');
|
||||
}
|
||||
|
||||
// the following will be needed by create_functions later but
|
||||
// is only defined in the subsequently called create_tables.
|
||||
// Create dummies here that will be overwritten by the proper
|
||||
// versions in create-tables.
|
||||
pgsqlRunScript('CREATE TABLE place_boundingbox ()');
|
||||
pgsqlRunScript('create type wikipedia_article_match as ()');
|
||||
}
|
||||
|
||||
if ($aCMDResult['import-data'] || $aCMDResult['all'])
|
||||
@@ -677,6 +685,10 @@
|
||||
{
|
||||
showUsage($aCMDOptions, true);
|
||||
}
|
||||
else
|
||||
{
|
||||
echo "Setup finished.\n";
|
||||
}
|
||||
|
||||
function pgsqlRunScriptFile($sFilename)
|
||||
{
|
||||
@@ -723,7 +735,11 @@
|
||||
}
|
||||
fclose($ahPipes[1]);
|
||||
|
||||
proc_close($hProcess);
|
||||
$iReturn = proc_close($hProcess);
|
||||
if ($iReturn > 0)
|
||||
{
|
||||
fail("pgsql returned with error code ($iReturn)");
|
||||
}
|
||||
if ($ahGzipPipes)
|
||||
{
|
||||
fclose($ahGzipPipes[1]);
|
||||
@@ -734,10 +750,13 @@
|
||||
|
||||
function pgsqlRunScript($sScript)
|
||||
{
|
||||
global $aCMDResult;
|
||||
// Convert database DSN to psql parameters
|
||||
$aDSNInfo = DB::parseDSN(CONST_Database_DSN);
|
||||
if (!isset($aDSNInfo['port']) || !$aDSNInfo['port']) $aDSNInfo['port'] = 5432;
|
||||
$sCMD = 'psql -p '.$aDSNInfo['port'].' -d '.$aDSNInfo['database'];
|
||||
if (!$aCMDResult['ignore-errors'])
|
||||
$sCMD .= ' -v ON_ERROR_STOP=1';
|
||||
$aDescriptors = array(
|
||||
0 => array('pipe', 'r'),
|
||||
1 => STDOUT,
|
||||
@@ -750,10 +769,15 @@
|
||||
while(strlen($sScript))
|
||||
{
|
||||
$written = fwrite($ahPipes[0], $sScript);
|
||||
if ($written <= 0) break;
|
||||
$sScript = substr($sScript, $written);
|
||||
}
|
||||
fclose($ahPipes[0]);
|
||||
proc_close($hProcess);
|
||||
$iReturn = proc_close($hProcess);
|
||||
if ($iReturn > 0)
|
||||
{
|
||||
fail("pgsql returned with error code ($iReturn)");
|
||||
}
|
||||
}
|
||||
|
||||
function pgsqlRunRestoreData($sDumpFile)
|
||||
@@ -781,7 +805,7 @@
|
||||
}
|
||||
fclose($ahPipes[1]);
|
||||
|
||||
proc_close($hProcess);
|
||||
$iReturn = proc_close($hProcess);
|
||||
}
|
||||
|
||||
function pgsqlRunDropAndRestore($sDumpFile)
|
||||
@@ -809,7 +833,7 @@
|
||||
}
|
||||
fclose($ahPipes[1]);
|
||||
|
||||
proc_close($hProcess);
|
||||
$iReturn = proc_close($hProcess);
|
||||
}
|
||||
|
||||
function passthruCheckReturn($cmd)
|
||||
|
||||
@@ -254,6 +254,13 @@
|
||||
|
||||
if ($aResult['deduplicate'])
|
||||
{
|
||||
|
||||
$pgver = (float) CONST_Postgresql_Version;
|
||||
if ($pgver < 9.3) {
|
||||
echo "ERROR: deduplicate is only currently supported in postgresql 9.3";
|
||||
exit;
|
||||
}
|
||||
|
||||
$oDB =& getDB();
|
||||
$sSQL = 'select partition from country_name order by country_code';
|
||||
$aPartitions = $oDB->getCol($sSQL);
|
||||
@@ -276,15 +283,15 @@
|
||||
var_dump($aTokenSet, $sSQL);
|
||||
exit;
|
||||
}
|
||||
|
||||
|
||||
$aKeep = array_shift($aTokenSet);
|
||||
$iKeepID = $aKeep['word_id'];
|
||||
|
||||
foreach($aTokenSet as $aRemove)
|
||||
{
|
||||
$sSQL = "update search_name set";
|
||||
$sSQL .= " name_vector = (name_vector - ".$aRemove['word_id'].")+".$iKeepID.",";
|
||||
$sSQL .= " nameaddress_vector = (nameaddress_vector - ".$aRemove['word_id'].")+".$iKeepID;
|
||||
$sSQL .= " name_vector = array_replace(name_vector,".$aRemove['word_id'].",".$iKeepID."),";
|
||||
$sSQL .= " nameaddress_vector = array_replace(nameaddress_vector,".$aRemove['word_id'].",".$iKeepID.")";
|
||||
$sSQL .= " where name_vector @> ARRAY[".$aRemove['word_id']."]";
|
||||
$x = $oDB->query($sSQL);
|
||||
if (PEAR::isError($x))
|
||||
@@ -294,7 +301,7 @@
|
||||
}
|
||||
|
||||
$sSQL = "update search_name set";
|
||||
$sSQL .= " nameaddress_vector = (nameaddress_vector - ".$aRemove['word_id'].")+".$iKeepID;
|
||||
$sSQL .= " nameaddress_vector = array_replace(nameaddress_vector,".$aRemove['word_id'].",".$iKeepID.")";
|
||||
$sSQL .= " where nameaddress_vector @> ARRAY[".$aRemove['word_id']."]";
|
||||
$x = $oDB->query($sSQL);
|
||||
if (PEAR::isError($x))
|
||||
@@ -304,7 +311,7 @@
|
||||
}
|
||||
|
||||
$sSQL = "update location_area_country set";
|
||||
$sSQL .= " keywords = (keywords - ".$aRemove['word_id'].")+".$iKeepID;
|
||||
$sSQL .= " keywords = array_replace(keywords,".$aRemove['word_id'].",".$iKeepID.")";
|
||||
$sSQL .= " where keywords @> ARRAY[".$aRemove['word_id']."]";
|
||||
$x = $oDB->query($sSQL);
|
||||
if (PEAR::isError($x))
|
||||
@@ -316,8 +323,7 @@
|
||||
foreach ($aPartitions as $sPartition)
|
||||
{
|
||||
$sSQL = "update search_name_".$sPartition." set";
|
||||
$sSQL .= " name_vector = (name_vector - ".$aRemove['word_id'].")+".$iKeepID.",";
|
||||
$sSQL .= " nameaddress_vector = (nameaddress_vector - ".$aRemove['word_id'].")+".$iKeepID;
|
||||
$sSQL .= " name_vector = array_replace(name_vector,".$aRemove['word_id'].",".$iKeepID.")";
|
||||
$sSQL .= " where name_vector @> ARRAY[".$aRemove['word_id']."]";
|
||||
$x = $oDB->query($sSQL);
|
||||
if (PEAR::isError($x))
|
||||
@@ -326,18 +332,8 @@
|
||||
exit;
|
||||
}
|
||||
|
||||
$sSQL = "update search_name_".$sPartition." set";
|
||||
$sSQL .= " nameaddress_vector = (nameaddress_vector - ".$aRemove['word_id'].")+".$iKeepID;
|
||||
$sSQL .= " where nameaddress_vector @> ARRAY[".$aRemove['word_id']."]";
|
||||
$x = $oDB->query($sSQL);
|
||||
if (PEAR::isError($x))
|
||||
{
|
||||
var_dump($x);
|
||||
exit;
|
||||
}
|
||||
|
||||
$sSQL = "update location_area_country set";
|
||||
$sSQL .= " keywords = (keywords - ".$aRemove['word_id'].")+".$iKeepID;
|
||||
$sSQL .= " keywords = array_replace(keywords,".$aRemove['word_id'].",".$iKeepID.")";
|
||||
$sSQL .= " where keywords @> ARRAY[".$aRemove['word_id']."]";
|
||||
$x = $oDB->query($sSQL);
|
||||
if (PEAR::isError($x))
|
||||
|
||||
@@ -3,7 +3,7 @@
|
||||
|
||||
require_once(dirname(dirname(__FILE__)).'/lib/init-website.php');
|
||||
require_once(CONST_BasePath.'/lib/log.php');
|
||||
require_once(CONST_BasePath.'/lib/Geocode.php');
|
||||
require_once(CONST_BasePath.'/lib/Geocode.php');
|
||||
|
||||
ini_set('memory_limit', '200M');
|
||||
|
||||
@@ -25,104 +25,6 @@
|
||||
if (isset($aLangPrefOrder['name:ja'])) $oGeocode->setReverseInPlan(true);
|
||||
if (isset($aLangPrefOrder['name:pl'])) $oGeocode->setReverseInPlan(true);
|
||||
|
||||
function loadParamsToGeocode($oGeocode, $aParams, $bBatch = false)
|
||||
{
|
||||
if (isset($aParams['addressdetails'])) $oGeocode->setIncludeAddressDetails((bool)$aParams['addressdetails']);
|
||||
if (isset($aParams['bounded'])) $oGeocode->setBounded((bool)$aParams['bounded']);
|
||||
if (isset($aParams['dedupe'])) $oGeocode->setDedupe((bool)$aParams['dedupe']);
|
||||
|
||||
if (isset($aParams['limit'])) $oGeocode->setLimit((int)$aParams['limit']);
|
||||
if (isset($aParams['offset'])) $oGeocode->setOffset((int)$aParams['offset']);
|
||||
|
||||
if (isset($aParams['fallback'])) $oGeocode->setFallback((int)$aParams['fallback']);
|
||||
|
||||
// List of excluded Place IDs - used for more acurate pageing
|
||||
if (isset($aParams['exclude_place_ids']) && $aParams['exclude_place_ids'])
|
||||
{
|
||||
foreach(explode(',',$aParams['exclude_place_ids']) as $iExcludedPlaceID)
|
||||
{
|
||||
$iExcludedPlaceID = (int)$iExcludedPlaceID;
|
||||
if ($iExcludedPlaceID) $aExcludePlaceIDs[$iExcludedPlaceID] = $iExcludedPlaceID;
|
||||
}
|
||||
$oGeocode->setExcludedPlaceIds($aExcludePlaceIDs);
|
||||
}
|
||||
|
||||
// Only certain ranks of feature
|
||||
if (isset($aParams['featureType'])) $oGeocode->setFeatureType($aParams['featureType']);
|
||||
if (isset($aParams['featuretype'])) $oGeocode->setFeatureType($aParams['featuretype']);
|
||||
|
||||
// Country code list
|
||||
if (isset($aParams['countrycodes']))
|
||||
{
|
||||
$aCountryCodes = array();
|
||||
foreach(explode(',',$aParams['countrycodes']) as $sCountryCode)
|
||||
{
|
||||
if (preg_match('/^[a-zA-Z][a-zA-Z]$/', $sCountryCode))
|
||||
{
|
||||
$aCountryCodes[] = strtolower($sCountryCode);
|
||||
}
|
||||
}
|
||||
$oGeocode->setCountryCodesList($aCountryCodes);
|
||||
}
|
||||
|
||||
if (isset($aParams['viewboxlbrt']) && $aParams['viewboxlbrt'])
|
||||
{
|
||||
$aCoOrdinatesLBRT = explode(',',$aParams['viewboxlbrt']);
|
||||
$oGeocode->setViewBox($aCoOrdinatesLBRT[0], $aCoOrdinatesLBRT[1], $aCoOrdinatesLBRT[2], $aCoOrdinatesLBRT[3]);
|
||||
}
|
||||
else if (isset($aParams['viewbox']) && $aParams['viewbox'])
|
||||
{
|
||||
$aCoOrdinatesLTRB = explode(',',$aParams['viewbox']);
|
||||
$oGeocode->setViewBox($aCoOrdinatesLTRB[0], $aCoOrdinatesLTRB[3], $aCoOrdinatesLTRB[2], $aCoOrdinatesLTRB[1]);
|
||||
}
|
||||
|
||||
if (isset($aParams['route']) && $aParams['route'] && isset($aParams['routewidth']) && $aParams['routewidth'])
|
||||
{
|
||||
$aPoints = explode(',',$aParams['route']);
|
||||
if (sizeof($aPoints) % 2 != 0)
|
||||
{
|
||||
userError("Uneven number of points");
|
||||
exit;
|
||||
}
|
||||
$fPrevCoord = false;
|
||||
$aRoute = array();
|
||||
foreach($aPoints as $i => $fPoint)
|
||||
{
|
||||
if ($i%2)
|
||||
{
|
||||
$aRoute[] = array((float)$fPoint, $fPrevCoord);
|
||||
}
|
||||
else
|
||||
{
|
||||
$fPrevCoord = (float)$fPoint;
|
||||
}
|
||||
}
|
||||
$oGeocode->setRoute($aRoute);
|
||||
}
|
||||
|
||||
// Search query
|
||||
$sQuery = (isset($aParams['q'])?trim($aParams['q']):'');
|
||||
if (!$sQuery && !$bBatch && isset($_SERVER['PATH_INFO']) && $_SERVER['PATH_INFO'][0] == '/')
|
||||
{
|
||||
$sQuery = substr($_SERVER['PATH_INFO'], 1);
|
||||
|
||||
// reverse order of '/' separated string
|
||||
$aPhrases = explode('/', $sQuery);
|
||||
$aPhrases = array_reverse($aPhrases);
|
||||
$sQuery = join(', ',$aPhrases);
|
||||
}
|
||||
if (!$sQuery)
|
||||
{
|
||||
$oGeocode->setStructuredQuery(@$aParams['amenity'], @$aParams['street'], @$aParams['city'], @$aParams['county'], @$aParams['state'], @$aParams['country'], @$aParams['postalcode']);
|
||||
$oGeocode->setReverseInPlan(false);
|
||||
}
|
||||
else
|
||||
{
|
||||
$oGeocode->setQuery($sQuery);
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
// Format for output
|
||||
$sOutputFormat = 'html';
|
||||
if (isset($_GET['format']) && ($_GET['format'] == 'html' || $_GET['format'] == 'xml' || $_GET['format'] == 'json' || $_GET['format'] == 'jsonv2'))
|
||||
@@ -166,7 +68,7 @@
|
||||
$oGeocode->setIncludePolygonAsSVG($bAsSVG);
|
||||
}
|
||||
|
||||
loadParamsToGeocode($oGeocode, $_GET, false);
|
||||
$oGeocode->loadParamArray($_GET);
|
||||
|
||||
if (CONST_Search_BatchMode && isset($_GET['batch']))
|
||||
{
|
||||
@@ -175,13 +77,30 @@
|
||||
foreach($aBatch as $aBatchParams)
|
||||
{
|
||||
$oBatchGeocode = clone $oGeocode;
|
||||
loadParamsToGeocode($oBatchGeocode, $aBatchParams, true);
|
||||
$oBatchGeocode->loadParamArray($aBatchParams);
|
||||
$oBatchGeocode->setQueryFromParams($aBatchParams);
|
||||
$aSearchResults = $oBatchGeocode->lookup();
|
||||
$aBatchResults[] = $aSearchResults;
|
||||
}
|
||||
include(CONST_BasePath.'/lib/template/search-batch-json.php');
|
||||
exit;
|
||||
}
|
||||
} else {
|
||||
if (!(isset($_GET['q']) && $_GET['q']) && isset($_SERVER['PATH_INFO']) && $_SERVER['PATH_INFO'][0] == '/')
|
||||
{
|
||||
$sQuery = substr($_SERVER['PATH_INFO'], 1);
|
||||
|
||||
// reverse order of '/' separated string
|
||||
$aPhrases = explode('/', $sQuery);
|
||||
$aPhrases = array_reverse($aPhrases);
|
||||
$sQuery = join(', ',$aPhrases);
|
||||
$oGeocode->setQuery($sQuery);
|
||||
}
|
||||
else
|
||||
{
|
||||
$oGeocode->setQueryFromParams($_GET);
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
$hLog = logStart($oDB, 'search', $oGeocode->getQueryString(), $aLangPrefOrder);
|
||||
|
||||
|
||||
Reference in New Issue
Block a user