Compare commits

..

11 Commits

Author SHA1 Message Date
Sarah Hoffmann
933cc8b068 more bigint fixes 2013-03-06 13:57:20 +00:00
Brian Quinion
f397e29ae5 check versions 2013-03-06 13:31:46 +00:00
Brian Quinion
9320dbfd4b another int => bigint issue in addressline 2013-02-12 23:49:42 +01:00
Sarah Hoffmann
78d7221e1b update change log 2013-02-09 20:34:44 +01:00
Brian Quinion
9661159b59 fix some mised INTEGER to BIGINT conversions 2013-02-09 20:33:37 +01:00
Sarah Hoffmann
7587ead209 change log for 2.0.1 2013-01-24 22:06:56 +01:00
Sarah Hoffmann
9c88f32615 delete outdated entries from location_area_country 2013-01-23 23:01:24 +01:00
Sarah Hoffmann
49a3864d2c split partition.sql into table and function parts
Makes it easier to patch partition functions on an already existing
database.
2013-01-23 23:01:13 +01:00
Sarah Hoffmann
a0d1b418a3 deduplicate 2012-10-31 23:15:41 +01:00
Brian Quinion
7536bbac41 up to date list of authors 2012-10-31 16:37:29 +00:00
Brian Quinion
51191d5978 update release version 2012-10-31 16:24:34 +00:00
119 changed files with 1727688 additions and 35985 deletions

10
.gitignore vendored
View File

@@ -1,11 +1,9 @@
*.log
*.pyc
nominatim/*.d
nominatim/*.o
nominatim/nominatim
module/nominatim.so
module/nominatim.o
settings/configuration.txt
settings/download.lock
settings/state.txt
@@ -21,11 +19,3 @@ stamp-h1
missing
INSTALL
aclocal.m4
depcomp
install-sh
compile
data/wiki_import.sql
data/wiki_specialphrases.sql
data/osmosischange.osc

View File

@@ -6,12 +6,5 @@ Nominatim was written by:
Michael Spreng
Daniele Forsi
mfn
Grant Slater
Grant
Andree Klattenhoff
IrlJidel
appelflap
b3nn0
Spin0us
Kurt Roeckx
Rodolphe Quiédeville
Marc Tobias Metten

View File

@@ -2,58 +2,3 @@
* delete outdated entries from location_area_country
* remove remaining uses of INTEGER, to allow node ids larger than 2^31
2.1
* update to openlayers 2.12 (now custom built)
* update fallback OSM boundaries
* add support for postgresql 9.2/9.3 and postgis 2.x
* add structured queries
* add options for polygon output in various formats
(geojson, svg, kml, postgis text)
* maintenance functions for deleting objects and updating regions
(plcae_force_update/place_force_delete)
* web view for polygons that need deleting
* rate limiting using memcache
* improve layout of details page
* add support for boundary:postal_code
* full CORS support
* improve parenting of POIs
* support for extract daily diffs from Geofabrik
* support for addresses without a street
(addr:place and conscription number house numbers)
* improve layout of word and search_name_* tables
* support for US ZIP+4 codes
* refactoring of front-end PHP code
* lots of smaller bug fixes
2.2
* correct database rights for www-data
* add timestamps for update output
* load postgis via extension for postgis >= 2.0
* remove non-admin boundaries from addresses
* further improve ordering of results with same importance
* merge addr:postcode tags into object addresses
* include rank and importance in reverse geocode output
* replace ST_Line_Interpolate_Point with ST_LineInterpolatePoint
(for postgis >= 2.1)
* update osm2pgsql to latest version
* properly detect changes of admin_level
* remove landuses when name is removed
* smaller fixes
2.3
* further improve ordering of results
* support for more lat/lon formats in search-as-reverse
* fix handling of GB postcodes
* new functional test suite
* support for waterway relations
* inherit postcodes from street to poi
* fix housenumber normalisation to find non-latin house numbers
* take viewbox into account for ordering of results
* pois may now inherit address tags from surrounding buildings
* improve what objects may participate in an address
* clean up handled class/type combinations to current OSM usage
* lots of bug fixes

View File

@@ -1,27 +1,6 @@
ACLOCAL_AMFLAGS = -I osm2pgsql/m4
AUTOMAKE_OPTIONS = -Wno-portability
SUBDIRS = osm2pgsql module nominatim
NOMINATIM_SERVER ?= $(shell echo a | php -F lib/init.php -E 'echo CONST_Website_BaseURL."\n";')
NOMINATIM_DATABASE ?= $(shell echo a | php -F lib/init.php -E 'echo DB::parseDSN(CONST_Database_DSN)["database"];')
install:
@echo Nominatim needs to be executed directly from this directory. No install necessary.
test:
cd tests; NOMINATIM_SERVER=${NOMINATIM_SERVER} lettuce -t -Fail -t -poldi-only
test-fast:
cd tests; NOMINATIM_SERVER=${NOMINATIM_SERVER} NOMINATIM_REUSE_TEMPLATE=1 lettuce -t -Fail -t -poldi-only
test-db:
cd tests; NOMINATIM_SERVER=${NOMINATIM_SERVER} lettuce -t -Fail -t -poldi-only features/db
test-db-fast:
cd tests; NOMINATIM_SERVER=${NOMINATIM_SERVER} NOMINATIM_REUSE_TEMPLATE=1 lettuce -t -Fail -t -poldi-only features/db
test-api:
cd tests; NOMINATIM_SERVER=${NOMINATIM_SERVER} lettuce -t -Fail -t -poldi-only features/api
.PHONY: test test-fast test-db test-db-fast test-api

View File

@@ -1,4 +1,4 @@
AC_INIT(Nominatim,2.2)
AC_INIT(Nominatim,2.0)
if git rev-parse HEAD 2>/dev/null >/dev/null; then
AC_SUBST([PACKAGE_VERSION], [$PACKAGE_VERSION-git-`git rev-parse --short HEAD`])
fi

View File

@@ -1,36 +0,0 @@
# This file includes a small subset of OpenLayers code, designed to be
# integrated into another application. It includes only the Layer types
# neccesary to create tiled or untiled WMS, and does not include any Controls.
# This is the result of what was at the time called "Webmap.js" at the FOSS4G
# Web Mapping BOF.
[first]
[last]
[include]
OpenLayers/Map.js
OpenLayers/Kinetic.js
OpenLayers/Geometry/MultiLineString.js
OpenLayers/Geometry/MultiPolygon.js
OpenLayers/Format/WKT.js
OpenLayers/Layer/OSM.js
OpenLayers/Layer/Vector.js
OpenLayers/Layer/SphericalMercator.js
OpenLayers/Control/Attribution.js
OpenLayers/Control/KeyboardDefaults.js
OpenLayers/Control/Navigation.js
OpenLayers/Control/MousePosition.js
OpenLayers/Control/PanZoomBar.js
OpenLayers/Control/Permalink.js
OpenLayers/Control/TouchNavigation.js
OpenLayers/Style.js
OpenLayers/Protocol/HTTP.js
OpenLayers/Projection.js
OpenLayers/Renderer/SVG.js
OpenLayers/Renderer/VML.js
OpenLayers/Renderer/Canvas.js
[exclude]

File diff suppressed because one or more lines are too long

1696070
data/gb_postcode.sql Normal file

File diff suppressed because it is too large Load Diff

View File

@@ -1,27 +0,0 @@
-- This data contains Ordnance Survey data © Crown copyright and database right 2010.
-- Code-Point Open contains Royal Mail data © Royal Mail copyright and database right 2010.
-- OS data may be used under the terms of the OS OpenData licence:
-- http://www.ordnancesurvey.co.uk/oswebsite/opendata/licence/docs/licence.pdf
SET statement_timeout = 0;
SET client_encoding = 'UTF8';
SET standard_conforming_strings = off;
SET check_function_bodies = false;
SET client_min_messages = warning;
SET escape_string_warning = off;
SET search_path = public, pg_catalog;
SET default_tablespace = '';
SET default_with_oids = false;
CREATE TABLE gb_postcode (
id integer,
postcode character varying(9),
geometry geometry,
CONSTRAINT enforce_dims_geometry CHECK ((st_ndims(geometry) = 2)),
CONSTRAINT enforce_srid_geometry CHECK ((st_srid(geometry) = 4326))
);
GRANT SELECT ON TABLE gb_postcode TO "www-data";

3840
data/worldboundaries.sql Normal file

File diff suppressed because one or more lines are too long

File diff suppressed because it is too large Load Diff

View File

@@ -1,138 +0,0 @@
<?php
class PlaceLookup
{
protected $oDB;
protected $iPlaceID;
protected $aLangPrefOrder = array();
protected $bAddressDetails = false;
function PlaceLookup(&$oDB)
{
$this->oDB =& $oDB;
}
function setLanguagePreference($aLangPrefOrder)
{
$this->aLangPrefOrder = $aLangPrefOrder;
}
function setIncludeAddressDetails($bAddressDetails = true)
{
$this->bAddressDetails = $bAddressDetails;
}
function setPlaceID($iPlaceID)
{
$this->iPlaceID = $iPlaceID;
}
function setOSMID($sType, $iID)
{
$sSQL = "select place_id from placex where osm_type = '".pg_escape_string($sType)."' and osm_id = ".(int)$iID." order by type = 'postcode' asc";
$this->iPlaceID = $this->oDB->getOne($sSQL);
}
function lookup()
{
if (!$this->iPlaceID) return null;
$sLanguagePrefArraySQL = "ARRAY[".join(',',array_map("getDBQuoted", $this->aLangPrefOrder))."]";
$sSQL = "select placex.place_id, partition, osm_type, osm_id, class, type, admin_level, housenumber, street, isin, postcode, country_code, extratags, parent_place_id, linked_place_id, rank_address, rank_search, ";
$sSQL .= " coalesce(importance,0.75-(rank_search::float/40)) as importance, indexed_status, indexed_date, wikipedia, calculated_country_code, ";
$sSQL .= " get_address_by_language(place_id, $sLanguagePrefArraySQL) as langaddress,";
$sSQL .= " get_name_by_language(name, $sLanguagePrefArraySQL) as placename,";
$sSQL .= " get_name_by_language(name, ARRAY['ref']) as ref,";
$sSQL .= " st_y(centroid) as lat, st_x(centroid) as lon";
$sSQL .= " from placex where place_id = ".(int)$this->iPlaceID;
$aPlace = $this->oDB->getRow($sSQL);
if (!$aPlace['place_id']) return null;
if ($this->bAddressDetails)
{
$aAddress = $this->getAddressNames();
$aPlace['aAddress'] = $aAddress;
}
$aClassType = getClassTypes();
$sAddressType = '';
$sClassType = $aPlace['class'].':'.$aPlace['type'].':'.$aPlace['admin_level'];
if (isset($aClassType[$sClassType]) && isset($aClassType[$sClassType]['simplelabel']))
{
$sAddressType = $aClassType[$aClassType]['simplelabel'];
}
else
{
$sClassType = $aPlace['class'].':'.$aPlace['type'];
if (isset($aClassType[$sClassType]) && isset($aClassType[$sClassType]['simplelabel']))
$sAddressType = $aClassType[$sClassType]['simplelabel'];
else $sAddressType = $aPlace['class'];
}
$aPlace['addresstype'] = $sAddressType;
return $aPlace;
}
function getAddressDetails($bAll = false)
{
if (!$this->iPlaceID) return null;
$sLanguagePrefArraySQL = "ARRAY[".join(',',array_map("getDBQuoted", $this->aLangPrefOrder))."]";
$sSQL = "select *,get_name_by_language(name,$sLanguagePrefArraySQL) as localname from get_addressdata(".$this->iPlaceID.")";
if (!$bAll) $sSQL .= " WHERE isaddress OR type = 'country_code'";
$sSQL .= " order by rank_address desc,isaddress desc";
$aAddressLines = $this->oDB->getAll($sSQL);
if (PEAR::IsError($aAddressLines))
{
var_dump($aAddressLines);
exit;
}
return $aAddressLines;
}
function getAddressNames()
{
$aAddressLines = $this->getAddressDetails(false);;
$aAddress = array();
$aFallback = array();
$aClassType = getClassTypes();
foreach($aAddressLines as $aLine)
{
$bFallback = false;
$aTypeLabel = false;
if (isset($aClassType[$aLine['class'].':'.$aLine['type'].':'.$aLine['admin_level']])) $aTypeLabel = $aClassType[$aLine['class'].':'.$aLine['type'].':'.$aLine['admin_level']];
elseif (isset($aClassType[$aLine['class'].':'.$aLine['type']])) $aTypeLabel = $aClassType[$aLine['class'].':'.$aLine['type']];
elseif (isset($aClassType['boundary:administrative:'.((int)($aLine['rank_address']/2))]))
{
$aTypeLabel = $aClassType['boundary:administrative:'.((int)($aLine['rank_address']/2))];
$bFallback = true;
}
else
{
$aTypeLabel = array('simplelabel'=>'address'.$aLine['rank_address']);
$bFallback = true;
}
if ($aTypeLabel && ((isset($aLine['localname']) && $aLine['localname']) || (isset($aLine['housenumber']) && $aLine['housenumber'])))
{
$sTypeLabel = strtolower(isset($aTypeLabel['simplelabel'])?$aTypeLabel['simplelabel']:$aTypeLabel['label']);
$sTypeLabel = str_replace(' ','_',$sTypeLabel);
if (!isset($aAddress[$sTypeLabel]) || (isset($aFallback[$sTypeLabel]) && $aFallback[$sTypeLabel]) || $aLine['class'] == 'place')
{
$aAddress[$sTypeLabel] = $aLine['localname']?$aLine['localname']:$aLine['housenumber'];
}
$aFallback[$sTypeLabel] = $bFallback;
}
}
return $aAddress;
}
}
?>

View File

@@ -1,139 +0,0 @@
<?php
class ReverseGeocode
{
protected $oDB;
protected $fLat;
protected $fLon;
protected $iMaxRank = 28;
protected $aLangPrefOrder = array();
protected $bShowAddressDetails = true;
function ReverseGeocode(&$oDB)
{
$this->oDB =& $oDB;
}
function setLanguagePreference($aLangPref)
{
$this->aLangPrefOrder = $aLangPref;
}
function setIncludeAddressDetails($bAddressDetails = true)
{
$this->bAddressDetails = $bAddressDetails;
}
function setLatLon($fLat, $fLon)
{
$this->fLat = (float)$fLat;
$this->fLon = (float)$fLon;
}
function setRank($iRank)
{
$this->iMaxRank = $iRank;
}
function setZoom($iZoom)
{
// Zoom to rank, this could probably be calculated but a lookup gives fine control
$aZoomRank = array(
0 => 2, // Continent / Sea
1 => 2,
2 => 2,
3 => 4, // Country
4 => 4,
5 => 8, // State
6 => 10, // Region
7 => 10,
8 => 12, // County
9 => 12,
10 => 17, // City
11 => 17,
12 => 18, // Town / Village
13 => 18,
14 => 22, // Suburb
15 => 22,
16 => 26, // Street, TODO: major street?
17 => 26,
18 => 30, // or >, Building
19 => 30, // or >, Building
);
$this->iMaxRank = (isset($iZoom) && isset($aZoomRank[$iZoom]))?$aZoomRank[$iZoom]:28;
}
function lookup()
{
$sPointSQL = 'ST_SetSRID(ST_Point('.$this->fLon.','.$this->fLat.'),4326)';
$iMaxRank = $this->iMaxRank;
// Find the nearest point
$fSearchDiam = 0.0004;
$iPlaceID = null;
$aArea = false;
$fMaxAreaDistance = 1;
while(!$iPlaceID && $fSearchDiam < $fMaxAreaDistance)
{
$fSearchDiam = $fSearchDiam * 2;
// If we have to expand the search area by a large amount then we need a larger feature
// then there is a limit to how small the feature should be
if ($fSearchDiam > 2 && $iMaxRank > 4) $iMaxRank = 4;
if ($fSearchDiam > 1 && $iMaxRank > 9) $iMaxRank = 8;
if ($fSearchDiam > 0.8 && $iMaxRank > 10) $iMaxRank = 10;
if ($fSearchDiam > 0.6 && $iMaxRank > 12) $iMaxRank = 12;
if ($fSearchDiam > 0.2 && $iMaxRank > 17) $iMaxRank = 17;
if ($fSearchDiam > 0.1 && $iMaxRank > 18) $iMaxRank = 18;
if ($fSearchDiam > 0.008 && $iMaxRank > 22) $iMaxRank = 22;
if ($fSearchDiam > 0.001 && $iMaxRank > 26) $iMaxRank = 26;
$sSQL = 'select place_id,parent_place_id,rank_search from placex';
$sSQL .= ' WHERE ST_DWithin('.$sPointSQL.', geometry, '.$fSearchDiam.')';
$sSQL .= ' and rank_search != 28 and rank_search >= '.$iMaxRank;
$sSQL .= ' and (name is not null or housenumber is not null)';
$sSQL .= ' and class not in (\'waterway\',\'railway\',\'tunnel\',\'bridge\')';
$sSQL .= ' and indexed_status = 0 ';
$sSQL .= ' and (ST_GeometryType(geometry) not in (\'ST_Polygon\',\'ST_MultiPolygon\') ';
$sSQL .= ' OR ST_DWithin('.$sPointSQL.', centroid, '.$fSearchDiam.'))';
$sSQL .= ' ORDER BY ST_distance('.$sPointSQL.', geometry) ASC limit 1';
if (CONST_Debug) var_dump($sSQL);
$aPlace = $this->oDB->getRow($sSQL);
if (PEAR::IsError($aPlace))
{
failInternalError("Could not determine closest place.", $sSQL, $aPlace);
}
$iPlaceID = $aPlace['place_id'];
$iParentPlaceID = $aPlace['parent_place_id'];
}
// The point we found might be too small - use the address to find what it is a child of
if ($iPlaceID && $iMaxRank < 28)
{
if ($aPlace['rank_search'] > 28 && $iParentPlaceID)
{
$iPlaceID = $iParentPlaceID;
}
$sSQL = "select address_place_id from place_addressline where place_id = $iPlaceID order by abs(cached_rank_address - $iMaxRank) asc,cached_rank_address desc,isaddress desc,distance desc limit 1";
$iPlaceID = $this->oDB->getOne($sSQL);
if (PEAR::IsError($iPlaceID))
{
failInternalError("Could not get parent for place.", $sSQL, $iPlaceID);
}
if (!$iPlaceID)
{
$iPlaceID = $aPlace['place_id'];
}
}
$oPlaceLookup = new PlaceLookup($this->oDB);
$oPlaceLookup->setLanguagePreference($this->aLangPrefOrder);
$oPlaceLookup->setIncludeAddressDetails($this->bAddressDetails);
$oPlaceLookup->setPlaceId($iPlaceID);
return $oPlaceLookup->lookup();
}
}
?>

View File

@@ -1,8 +1,4 @@
<?php
if (file_exists(getenv('NOMINATIM_SETTINGS')))
{
require_once(getenv('NOMINATIM_SETTINGS'));
}
require_once('init.php');
require_once('cmd.php');

View File

@@ -1,16 +1,6 @@
<?php
require_once('init.php');
if (CONST_NoAccessControl)
{
header("Access-Control-Allow-Origin: *");
header("Access-Control-Allow-Methods: OPTIONS,GET");
if (!empty($_SERVER['HTTP_ACCESS_CONTROL_REQUEST_HEADERS']))
{
header("Access-Control-Allow-Headers: ".$_SERVER['HTTP_ACCESS_CONTROL_REQUEST_HEADERS']);
}
}
if ($_SERVER['REQUEST_METHOD'] == 'OPTIONS') exit;
require_once('init.php');
if (CONST_ClosedForIndexing && strpos(CONST_ClosedForIndexingExceptionIPs, ','.$_SERVER["REMOTE_ADDR"].',') === false)
{
@@ -18,47 +8,12 @@
exit;
}
$aBucketKeys = array();
if (isset($_SERVER["HTTP_REFERER"])) $aBucketKeys[] = str_replace('www.','',strtolower(parse_url($_SERVER["HTTP_REFERER"], PHP_URL_HOST)));
if (isset($_SERVER["REMOTE_ADDR"])) $aBucketKeys[] = $_SERVER["REMOTE_ADDR"];
if (isset($_GET["email"])) $aBucketKeys[] = $_GET["email"];
$fBucketVal = doBucket($aBucketKeys,
(defined('CONST_ConnectionBucket_PageType')?constant('CONST_ConnectionBucket_Cost_'.CONST_ConnectionBucket_PageType):1) + user_busy_cost(),
CONST_ConnectionBucket_LeakRate, CONST_ConnectionBucket_BlockLimit);
if ($fBucketVal > CONST_ConnectionBucket_WaitLimit && $fBucketVal < CONST_ConnectionBucket_BlockLimit)
if (strpos(CONST_BlockedIPs, ','.$_SERVER["REMOTE_ADDR"].',') !== false)
{
$m = getBucketMemcache();
$iCurrentSleeping = $m->increment('sleepCounter');
if (false === $iCurrentSleeping)
{
$m->add('sleepCounter', 0);
$iCurrentSleeping = $m->increment('sleepCounter');
}
if ($iCurrentSleeping >= CONST_ConnectionBucket_MaxSleeping || isBucketSleeping($aBucketKeys))
{
// Too many threads sleeping already. This becomes a hard block.
$fBucketVal = doBucket($aBucketKeys, CONST_ConnectionBucket_BlockLimit, CONST_ConnectionBucket_LeakRate, CONST_ConnectionBucket_BlockLimit);
}
else
{
setBucketSleeping($aBucketKeys, true);
sleep(($fBucketVal - CONST_ConnectionBucket_WaitLimit)/CONST_ConnectionBucket_LeakRate);
$fBucketVal = doBucket($aBucketKeys, CONST_ConnectionBucket_LeakRate, CONST_ConnectionBucket_LeakRate, CONST_ConnectionBucket_BlockLimit);
setBucketSleeping($aBucketKeys, false);
}
$m->decrement('sleepCounter');
}
if (strpos(CONST_BlockedIPs, ','.$_SERVER["REMOTE_ADDR"].',') !== false || $fBucketVal >= CONST_ConnectionBucket_BlockLimit)
{
header("HTTP/1.0 429 Too Many Requests");
echo "Your IP has been blocked. \n";
echo CONST_BlockMessage;
echo "Please create a nominatim trac ticket (http://trac.openstreetmap.org/newticket?component=nominatim) to request this to be removed. \n";
echo "Information on the Nominatim usage policy can be found here: http://wiki.openstreetmap.org/wiki/Nominatim#Usage_Policy \n";
exit;
}
header('Content-type: text/html; charset=utf-8');

View File

@@ -4,7 +4,6 @@
require_once(CONST_BasePath.'/settings/settings.php');
require_once(CONST_BasePath.'/lib/lib.php');
require_once(CONST_BasePath.'/lib/leakybucket.php');
require_once(CONST_BasePath.'/lib/db.php');
if (get_magic_quotes_gpc())

View File

@@ -1,168 +0,0 @@
<?php
function getBucketMemcache()
{
static $m;
if (!CONST_ConnectionBucket_MemcacheServerAddress) return null;
if (!isset($m))
{
$m = new Memcached();
$m->addServer(CONST_ConnectionBucket_MemcacheServerAddress, CONST_ConnectionBucket_MemcacheServerPort);
}
return $m;
}
function doBucket($asKey, $iRequestCost, $iLeakPerSecond, $iThreshold)
{
$m = getBucketMemcache();
if (!$m) return 0;
$iMaxVal = 0;
$t = time();
foreach($asKey as $sKey)
{
$aCurrentBlock = $m->get($sKey);
if (!$aCurrentBlock)
{
$aCurrentBlock = array($iRequestCost, $t, false);
}
else
{
// add RequestCost
// remove leak * the time since the last request
$aCurrentBlock[0] += $iRequestCost - ($t - $aCurrentBlock[1])*$iLeakPerSecond;
$aCurrentBlock[1] = $t;
}
if ($aCurrentBlock[0] <= 0)
{
$m->delete($sKey);
}
else
{
// If we have hit the threshold stop and record this to the block list
if ($aCurrentBlock[0] >= $iThreshold)
{
$aCurrentBlock[0] = $iThreshold;
// Make up to 10 attempts to record this to memcache (with locking to prevent conflicts)
$i = 10;
for($i = 0; $i < 10; $i++)
{
$aBlockedList = $m->get('blockedList', null, $hCasToken);
if (!$aBlockedList)
{
$aBlockedList = array();
$m->add('blockedList', $aBlockedList);
$aBlockedList = $m->get('blockedList', null, $hCasToken);
}
if (!isset($aBlockedList[$sKey]))
{
$aBlockedList[$sKey] = array(1, $t);
}
else
{
$aBlockedList[$sKey][0]++;
$aBlockedList[$sKey][1] = $t;
}
if (sizeof($aBlockedList) > CONST_ConnectionBucket_MaxBlockList)
{
uasort($aBlockedList, 'byValue1');
$aBlockedList = array_slice($aBlockedList, 0, CONST_ConnectionBucket_MaxBlockList);
}
$x = $m->cas($hCasToken, 'blockedList', $aBlockedList);
if ($x) break;
}
}
// Only keep in memcache until the time it would have expired (to avoid clutering memcache)
$m->set($sKey, $aCurrentBlock, $t + 1 + $aCurrentBlock[0]/$iLeakPerSecond);
}
// Bucket result in the largest bucket we find
$iMaxVal = max($iMaxVal, $aCurrentBlock[0]);
}
return $iMaxVal;
}
function isBucketSleeping($asKey)
{
$m = getBucketMemcache();
if (!$m) return false;
foreach($asKey as $sKey)
{
$aCurrentBlock = $m->get($sKey);
if ($aCurrentBlock[2]) return true;
}
return false;
}
function setBucketSleeping($asKey, $bVal)
{
$m = getBucketMemcache();
if (!$m) return false;
$iMaxVal = 0;
$t = time();
foreach($asKey as $sKey)
{
$aCurrentBlock = $m->get($sKey);
$aCurrentBlock[2] = $bVal;
$m->set($sKey, $aCurrentBlock, $t + 1 + $aCurrentBlock[0]/CONST_ConnectionBucket_LeakRate);
}
return true;
}
function byValue1($a, $b)
{
if ($a[1] == $b[1])
{
return 0;
}
return ($a[1] > $b[1]) ? -1 : 1;
}
function byLastBlockTime($a, $b)
{
if ($a['lastBlockTimestamp'] == $b['lastBlockTimestamp'])
{
return 0;
}
return ($a['lastBlockTimestamp'] > $b['lastBlockTimestamp']) ? -1 : 1;
}
function getBucketBlocks()
{
$m = getBucketMemcache();
if (!$m) return null;
$t = time();
$aBlockedList = $m->get('blockedList', null, $hCasToken);
if (!$aBlockedList) $aBlockedList = array();
foreach($aBlockedList as $sKey => $aDetails)
{
$aCurrentBlock = $m->get($sKey);
if (!$aCurrentBlock) $aCurrentBlock = array(0, $t);
$iCurrentBucketSize = max(0, $aCurrentBlock[0] - ($t - $aCurrentBlock[1])*CONST_ConnectionBucket_LeakRate);
$aBlockedList[$sKey] = array(
'totalBlocks' => $aDetails[0],
'lastBlockTimestamp' => $aDetails[1],
'isSleeping' => (isset($aCurrentBlock[2])?$aCurrentBlock[2]:false),
'currentBucketSize' => $iCurrentBucketSize,
'currentlyBlocked' => $iCurrentBucketSize + (CONST_ConnectionBucket_Cost_Reverse) >= CONST_ConnectionBucket_BlockLimit,
);
}
uasort($aBlockedList, 'byLastBlockTime');
return $aBlockedList;
}
function clearBucketBlocks()
{
$m = getBucketMemcache();
if (!$m) return false;
$m->delete('blockedList');
return true;
}

View File

@@ -1,6 +1,6 @@
<?php
function failInternalError($sError, $sSQL = false, $vDumpVar = false)
function failInternalError($sError, $sSQL = false, $vDumpVar = false)
{
header('HTTP/1.0 500 Internal Server Error');
header('Content-type: text/html; charset=utf-8');
@@ -11,12 +11,10 @@
if (CONST_Debug)
{
echo "<hr><h2>Debugging Information</h2><br>";
if ($sSQL)
{
if ($sSQL) {
echo "<h3>SQL query</h3><code>".$sSQL."</code>";
}
if ($vDumpVar)
{
if ($vDumpVar) {
echo "<h3>Result</h3> <code>";
var_dump($vDumpVar);
echo "</code>";
@@ -24,50 +22,34 @@
}
echo "\n</body></html>\n";
exit;
}
function userError($sError)
{
header('HTTP/1.0 400 Bad Request');
header('Content-type: text/html; charset=utf-8');
echo "<html><body><h1>Bad Request</h1>";
echo '<p>Nominatim has encountered an error with your request.</p>';
echo "<p><b>Details:</b> ".$sError,"</p>";
echo '<p>If you feel this error is incorrect feel free to report the bug in the <a href="http://trac.openstreetmap.org">OSM bug database</a>. Please include the error message above and the URL you used.</p>';
echo "\n</body></html>\n";
exit;
}
function fail($sError, $sUserError = false)
{
if (!$sUserError) $sUserError = $sError;
error_log('ERROR: '.$sError);
echo $sUserError."\n";
exit(-1);
exit;
}
function getBlockingProcesses()
{
$sStats = file_get_contents('/proc/stat');
if (preg_match('/procs_blocked ([0-9]+)/i', $sStats, $aMatches))
{
$sStats = file_get_contents('/proc/stat');
if (preg_match('/procs_blocked ([0-9]+)/i', $sStats, $aMatches))
{
return (int)$aMatches[1];
}
}
return 0;
}
function getLoadAverage()
{
$sLoadAverage = file_get_contents('/proc/loadavg');
$aLoadAverage = explode(' ',$sLoadAverage);
return (float)$aLoadAverage[0];
$aLoadAverage = explode(' ',$sLoadAverage);
return (int)$aLoadAverage[0];
}
function getProcessorCount()
{
$sCPU = file_get_contents('/proc/cpuinfo');
@@ -75,7 +57,6 @@
return sizeof($aMatches[0]);
}
function getTotalMemoryMB()
{
$sCPU = file_get_contents('/proc/meminfo');
@@ -83,7 +64,6 @@
return (int)($aMatches[1]/1024);
}
function getCacheMemoryMB()
{
$sCPU = file_get_contents('/proc/meminfo');
@@ -91,63 +71,48 @@
return (int)($aMatches[1]/1024);
}
function bySearchRank($a, $b)
{
if ($a['iSearchRank'] == $b['iSearchRank'])
return strlen($a['sOperator']) + strlen($a['sHouseNumber']) - strlen($b['sOperator']) - strlen($b['sHouseNumber']);
if ($a['iSearchRank'] == $b['iSearchRank']) return 0;
return ($a['iSearchRank'] < $b['iSearchRank']?-1:1);
}
function byImportance($a, $b)
{
if ($a['importance'] != $b['importance'])
return ($a['importance'] > $b['importance']?-1:1);
/*
if ($a['aPointPolygon']['numfeatures'] != $b['aPointPolygon']['numfeatures'])
return ($a['aPointPolygon']['numfeatures'] > $b['aPointPolygon']['numfeatures']?-1:1);
if ($a['aPointPolygon']['area'] != $b['aPointPolygon']['area'])
return ($a['aPointPolygon']['area'] > $b['aPointPolygon']['area']?-1:1);
// if ($a['levenshtein'] != $b['levenshtein'])
// return ($a['levenshtein'] < $b['levenshtein']?-1:1);
/*
if ($a['aPointPolygon']['numfeatures'] != $b['aPointPolygon']['numfeatures'])
return ($a['aPointPolygon']['numfeatures'] > $b['aPointPolygon']['numfeatures']?-1:1);
if ($a['aPointPolygon']['area'] != $b['aPointPolygon']['area'])
return ($a['aPointPolygon']['area'] > $b['aPointPolygon']['area']?-1:1);
// if ($a['levenshtein'] != $b['levenshtein'])
// return ($a['levenshtein'] < $b['levenshtein']?-1:1);
if ($a['rank_search'] != $b['rank_search'])
return ($a['rank_search'] < $b['rank_search']?-1:1);
*/
return ($a['rank_search'] < $b['rank_search']?-1:1);
*/
return ($a['foundorder'] < $b['foundorder']?-1:1);
}
function getPreferredLanguages($sLangString=false)
function getPreferredLanguages()
{
if (!$sLangString)
// If we have been provided the value in $_GET it overrides browser value
if (isset($_GET['accept-language']) && $_GET['accept-language'])
{
// If we have been provided the value in $_GET it overrides browser value
if (isset($_GET['accept-language']) && $_GET['accept-language'])
{
$_SERVER["HTTP_ACCEPT_LANGUAGE"] = $_GET['accept-language'];
$sLangString = $_GET['accept-language'];
}
else if (isset($_SERVER["HTTP_ACCEPT_LANGUAGE"]))
{
$sLangString = $_SERVER["HTTP_ACCEPT_LANGUAGE"];
}
$_SERVER["HTTP_ACCEPT_LANGUAGE"] = $_GET['accept-language'];
}
$aLanguages = array();
if ($sLangString)
if (preg_match_all('/(([a-z]{1,8})(-[a-z]{1,8})?)\s*(;\s*q\s*=\s*(1|0\.[0-9]+))?/i', $_SERVER['HTTP_ACCEPT_LANGUAGE'], $aLanguagesParse, PREG_SET_ORDER))
{
if (preg_match_all('/(([a-z]{1,8})(-[a-z]{1,8})?)\s*(;\s*q\s*=\s*(1|0\.[0-9]+))?/i', $sLangString, $aLanguagesParse, PREG_SET_ORDER))
foreach($aLanguagesParse as $iLang => $aLanguage)
{
foreach($aLanguagesParse as $iLang => $aLanguage)
{
$aLanguages[$aLanguage[1]] = isset($aLanguage[5])?(float)$aLanguage[5]:1 - ($iLang/100);
if (!isset($aLanguages[$aLanguage[2]])) $aLanguages[$aLanguage[2]] = $aLanguages[$aLanguage[1]]/10;
}
arsort($aLanguages);
$aLanguages[$aLanguage[1]] = isset($aLanguage[5])?(float)$aLanguage[5]:1 - ($iLang/100);
if (!isset($aLanguages[$aLanguage[2]])) $aLanguages[$aLanguage[2]] = $aLanguages[$aLanguage[1]]/10;
}
arsort($aLanguages);
}
if (!sizeof($aLanguages) && CONST_Default_Language) $aLanguages = array(CONST_Default_Language=>1);
if (!sizeof($aLanguages)) $aLanguages = array(CONST_Default_Language=>1);
foreach($aLanguages as $sLangauge => $fLangauagePref)
{
$aLangPrefOrder['short_name:'.$sLangauge] = 'short_name:'.$sLangauge;
@@ -173,27 +138,23 @@
return $aLangPrefOrder;
}
function getWordSets($aWords, $iDepth)
function getWordSets($aWords)
{
$aResult = array(array(join(' ',$aWords)));
$sFirstToken = '';
if ($iDepth < 8) {
while(sizeof($aWords) > 1)
while(sizeof($aWords) > 1)
{
$sWord = array_shift($aWords);
$sFirstToken .= ($sFirstToken?' ':'').$sWord;
$aRest = getWordSets($aWords);
foreach($aRest as $aSet)
{
$sWord = array_shift($aWords);
$sFirstToken .= ($sFirstToken?' ':'').$sWord;
$aRest = getWordSets($aWords, $iDepth+1);
foreach($aRest as $aSet)
{
$aResult[] = array_merge(array($sFirstToken),$aSet);
}
$aResult[] = array_merge(array($sFirstToken),$aSet);
}
}
return $aResult;
}
function getTokensFromSets($aSets)
{
$aTokens = array();
@@ -209,39 +170,37 @@
return $aTokens;
}
/*
GB Postcode functions
*/
GB Postcode functions
*/
function gbPostcodeAlphaDifference($s1, $s2)
{
$aValues = array(
'A'=>0,
'B'=>1,
'D'=>2,
'E'=>3,
'F'=>4,
'G'=>5,
'H'=>6,
'J'=>7,
'L'=>8,
'N'=>9,
'O'=>10,
'P'=>11,
'Q'=>12,
'R'=>13,
'S'=>14,
'T'=>15,
'U'=>16,
'W'=>17,
'X'=>18,
'Y'=>19,
'Z'=>20);
'A'=>0,
'B'=>1,
'D'=>2,
'E'=>3,
'F'=>4,
'G'=>5,
'H'=>6,
'J'=>7,
'L'=>8,
'N'=>9,
'O'=>10,
'P'=>11,
'Q'=>12,
'R'=>13,
'S'=>14,
'T'=>15,
'U'=>16,
'W'=>17,
'X'=>18,
'Y'=>19,
'Z'=>20);
return abs(($aValues[$s1[0]]*21+$aValues[$s1[1]]) - ($aValues[$s2[0]]*21+$aValues[$s2[1]]));
}
function gbPostcodeCalculate($sPostcode, $sPostcodeSector, $sPostcodeEnd, &$oDB)
{
// Try an exact match on the gb_postcode table
@@ -252,22 +211,15 @@
var_dump($sSQL, $aNearPostcodes);
exit;
}
if (sizeof($aNearPostcodes))
{
$aPostcodes = array();
foreach($aNearPostcodes as $aPostcode)
{
$aPostcodes[] = array('lat' => $aPostcode['lat'], 'lon' => $aPostcode['lon'], 'radius' => 0.005);
}
return $aPostcodes;
return array(array('lat' => $aNearPostcodes[0]['lat'], 'lon' => $aNearPostcodes[0]['lon'], 'radius' => 0.005));
}
return false;
}
function usPostcodeCalculate($sPostcode, &$oDB)
{
$iZipcode = (int)$sPostcode;
@@ -283,7 +235,7 @@
if (!sizeof($aNearPostcodes))
{
$sSQL = 'select zipcode,ST_X(ST_Centroid(geometry)) as lon,ST_Y(ST_Centroid(geometry)) as lat from us_zipcode where zipcode between '.($iZipcode-100).' and '.($iZipcode+100).' order by abs(zipcode - '.$iZipcode.') asc limit 5';
$sSQL = 'select zipcode,ST_X(ST_Centroid(geometry)) as lon,ST_Y(ST_Centroid(geometry)) as lat from us_zipcode where zipcode between '.($iZipcode-100).' and '.($iZipcode+100).' order by abs(zipcode - '.$iZipcode.') asc limit 5';
$aNearPostcodes = $oDB->getAll($sSQL);
if (PEAR::IsError($aNearPostcodes))
{
@@ -307,7 +259,7 @@
$fFac = 1;
else
$fFac = 1/($iDiff*$iDiff);
$fTotalFac += $fFac;
$fTotalLat += $aPostcode['lat'] * $fFac;
$fTotalLon += $aPostcode['lon'] * $fFac;
@@ -321,35 +273,30 @@
return false;
/*
$fTotalFac is a surprisingly good indicator of accuracy
$iZoom = 18 + round(log($fTotalFac,32));
$iZoom = max(13,min(18,$iZoom));
*/
$fTotalFac is a surprisingly good indicator of accuracy
$iZoom = 18 + round(log($fTotalFac,32));
$iZoom = max(13,min(18,$iZoom));
*/
}
function getClassTypes()
{
return array(
'boundary:administrative:1' => array('label'=>'Continent','frequency'=>0,'icon'=>'poi_boundary_administrative', 'defdiameter' => 0.32,),
'boundary:administrative:2' => array('label'=>'Country','frequency'=>0,'icon'=>'poi_boundary_administrative', 'defdiameter' => 0.32,),
'place:country' => array('label'=>'Country','frequency'=>0,'icon'=>'poi_boundary_administrative','defzoom'=>6, 'defdiameter' => 15,),
'boundary:administrative:3' => array('label'=>'State','frequency'=>0,'icon'=>'poi_boundary_administrative', 'defdiameter' => 0.32,),
'boundary:administrative:4' => array('label'=>'State','frequency'=>0,'icon'=>'poi_boundary_administrative', 'defdiameter' => 0.32,),
'place:state' => array('label'=>'State','frequency'=>0,'icon'=>'poi_boundary_administrative','defzoom'=>8, 'defdiameter' => 5.12,),
'boundary:administrative:5' => array('label'=>'State District','frequency'=>0,'icon'=>'poi_boundary_administrative', 'defdiameter' => 0.32,),
'boundary:administrative:6' => array('label'=>'County','frequency'=>0,'icon'=>'poi_boundary_administrative', 'defdiameter' => 0.32,),
'boundary:administrative:7' => array('label'=>'County','frequency'=>0,'icon'=>'poi_boundary_administrative', 'defdiameter' => 0.32,),
'place:county' => array('label'=>'County','frequency'=>108,'icon'=>'poi_boundary_administrative','defzoom'=>10, 'defdiameter' => 1.28,),
'boundary:administrative:8' => array('label'=>'City','frequency'=>0,'icon'=>'poi_boundary_administrative', 'defdiameter' => 0.32,),
'place:city' => array('label'=>'City','frequency'=>66,'icon'=>'poi_place_city','defzoom'=>12, 'defdiameter' => 0.32,),
'boundary:administrative:9' => array('label'=>'City District','frequency'=>0,'icon'=>'poi_boundary_administrative', 'defdiameter' => 0.32,),
'boundary:administrative:10' => array('label'=>'Suburb','frequency'=>0,'icon'=>'poi_boundary_administrative', 'defdiameter' => 0.32,),
'boundary:administrative:11' => array('label'=>'Neighbourhood','frequency'=>0,'icon'=>'poi_boundary_administrative', 'defdiameter' => 0.32,),
'place:region' => array('label'=>'Region','frequency'=>0,'icon'=>'poi_boundary_administrative','defzoom'=>8, 'defdiameter' => 0.04,),
'place:region' => array('label'=>'Region','frequency'=>0,'icon'=>'poi_boundary_administrative','defzoom'=>8, 'defdiameter' => 5.12,),
'place:island' => array('label'=>'Island','frequency'=>288,'icon'=>'','defzoom'=>11, 'defdiameter' => 0.64,),
'boundary:administrative' => array('label'=>'Administrative','frequency'=>413,'icon'=>'poi_boundary_administrative', 'defdiameter' => 0.32,),
'boundary:postal_code' => array('label'=>'Postcode','frequency'=>413,'icon'=>'poi_boundary_administrative', 'defdiameter' => 0.32,),
'place:town' => array('label'=>'Town','frequency'=>1497,'icon'=>'poi_place_town','defzoom'=>14, 'defdiameter' => 0.08,),
'place:village' => array('label'=>'Village','frequency'=>11230,'icon'=>'poi_place_village','defzoom'=>15, 'defdiameter' => 0.04,),
'place:hamlet' => array('label'=>'Hamlet','frequency'=>7075,'icon'=>'poi_place_village','defzoom'=>15, 'defdiameter' => 0.04,),
@@ -386,8 +333,6 @@
'landuse:commercial' => array('label'=>'Commercial','frequency'=>657,'icon'=>'',),
'place:airport' => array('label'=>'Airport','frequency'=>36,'icon'=>'transport_airport2', 'defdiameter' => 0.03,),
'aeroway:aerodrome' => array('label'=>'Aerodrome','frequency'=>36,'icon'=>'transport_airport2', 'defdiameter' => 0.03,),
'aeroway' => array('label'=>'Aeroway','frequency'=>36,'icon'=>'transport_airport2', 'defdiameter' => 0.03,),
'railway:station' => array('label'=>'Station','frequency'=>3431,'icon'=>'transport_train_station2', 'defdiameter' => 0.01,),
'amenity:place_of_worship' => array('label'=>'Place Of Worship','frequency'=>9049,'icon'=>'place_of_worship_unknown3',),
'amenity:pub' => array('label'=>'Pub','frequency'=>18969,'icon'=>'food_pub',),
@@ -498,11 +443,11 @@
'place:house_number' => array('label'=>'House Number','frequency'=>2086,'icon'=>'','defzoom'=>18,),
'place:country_code' => array('label'=>'Country Code','frequency'=>2086,'icon'=>'','defzoom'=>18,),
//
//
'leisure:pitch' => array('label'=>'Pitch','frequency'=>762,'icon'=>'',),
'highway:unsurfaced' => array('label'=>'Unsurfaced','frequency'=>492,'icon'=>'',),
'historic:ruins' => array('label'=>'Ruins','frequency'=>483,'icon'=>'tourist_ruin',),
'historic:ruins' => array('label'=>'Ruins','frequency'=>483,'icon'=>'shopping_jewelry',),
'amenity:college' => array('label'=>'College','frequency'=>473,'icon'=>'education_school',),
'historic:monument' => array('label'=>'Monument','frequency'=>470,'icon'=>'tourist_monument',),
'railway:subway' => array('label'=>'Subway','frequency'=>385,'icon'=>'',),
@@ -619,7 +564,6 @@
'natural:scrub' => array('label'=>'Scrub','frequency'=>20,'icon'=>'',),
'natural:fen' => array('label'=>'Fen','frequency'=>20,'icon'=>'',),
'building:yes' => array('label'=>'Building','frequency'=>200,'icon'=>'',),
'mountain_pass:yes' => array('label'=>'Mountain Pass','frequency'=>200,'icon'=>'',),
'amenity:parking' => array('label'=>'Parking','frequency'=>3157,'icon'=>'',),
'highway:bus_stop' => array('label'=>'Bus Stop','frequency'=>35777,'icon'=>'transport_bus_stop2',),
@@ -633,10 +577,9 @@
'railway:disused_station' => array('label'=>'Disused Station','frequency'=>114,'icon'=>'',),
'railway:abandoned' => array('label'=>'Abandoned','frequency'=>641,'icon'=>'',),
'railway:disused' => array('label'=>'Disused','frequency'=>72,'icon'=>'',),
);
);
}
function getClassTypesWithImportance()
{
$aOrders = getClassTypes();
@@ -647,31 +590,27 @@
}
return $aOrders;
}
function javascript_renderData($xVal)
{
header("Access-Control-Allow-Origin: *");
function javascript_renderData($xVal)
{
header("Access-Control-Allow-Origin: *");
$jsonout = json_encode($xVal);
$jsonout = json_encode($xVal);
if( ! isset($_GET['json_callback']))
{
if( ! isset($_GET['json_callback'])) {
header("Content-Type: application/json; charset=UTF-8");
echo $jsonout;
} else
{
if (preg_match('/^[$_\p{L}][$_\p{L}\p{Nd}.[\]]*$/u',$_GET['json_callback']))
{
} else {
if (preg_match('/^[$_\p{L}][$_\p{L}\p{Nd}.[\]]*$/u',$_GET['json_callback'])) {
header("Content-Type: application/javascript; charset=UTF-8");
echo $_GET['json_callback'].'('.$jsonout.')';
}
else
{
} else {
header('HTTP/1.0 400 Bad Request');
}
}
}
}
function _debugDumpGroupedSearches($aData, $aTokens)
{
@@ -690,11 +629,11 @@
}
}
echo "<table border=\"1\">";
echo "<tr><th>rank</th><th>Name Tokens</th><th>Name Not</th><th>Address Tokens</th><th>Address Not</th><th>country</th><th>operator</th><th>class</th><th>type</th><th>house#</th><th>Lat</th><th>Lon</th><th>Radius</th></tr>";
echo "<tr><th>rank</th><th>Name Tokens</th><th>Address Tokens</th><th>country</th><th>operator</th><th>class</th><th>type</th><th>house#</th><th>Lat</th><th>Lon</th><th>Radius</th></tr>";
foreach($aData as $iRank => $aRankedSet)
{
foreach($aRankedSet as $aRow)
{
{
echo "<tr>";
echo "<td>$iRank</td>";
@@ -707,15 +646,6 @@
}
echo "</td>";
echo "<td>";
$sSep = '';
foreach($aRow['aNameNonSearch'] as $iWordID)
{
echo $sSep.'#'.$aWordsIDs[$iWordID].'#';
$sSep = ', ';
}
echo "</td>";
echo "<td>";
$sSep = '';
foreach($aRow['aAddress'] as $iWordID)
@@ -725,15 +655,6 @@
}
echo "</td>";
echo "<td>";
$sSep = '';
foreach($aRow['aAddressNonSearch'] as $iWordID)
{
echo $sSep.'#'.$aWordsIDs[$iWordID].'#';
$sSep = ', ';
}
echo "</td>";
echo "<td>".$aRow['sCountryCode']."</td>";
echo "<td>".$aRow['sOperator']."</td>";
@@ -745,7 +666,7 @@
echo "<td>".$aRow['fLat']."</td>";
echo "<td>".$aRow['fLon']."</td>";
echo "<td>".$aRow['fRadius']."</td>";
echo "</tr>";
}
}
@@ -756,51 +677,120 @@
function getAddressDetails(&$oDB, $sLanguagePrefArraySQL, $iPlaceID, $sCountryCode = false, $bRaw = false)
{
$sSQL = "select *,get_name_by_language(name,$sLanguagePrefArraySQL) as localname from get_addressdata($iPlaceID)";
if (!$bRaw) $sSQL .= " WHERE isaddress OR type = 'country_code'";
IF (!$bRaw) $sSQL .= " WHERE isaddress OR type = 'country_code'";
$sSQL .= " order by rank_address desc,isaddress desc";
$aAddressLines = $oDB->getAll($sSQL);
if (PEAR::IsError($aAddressLines))
{
var_dump($aAddressLines);
exit;
}
$aAddressLines = $oDB->getAll($sSQL);
if (PEAR::IsError($aAddressLines))
{
var_dump($aAddressLines);
exit;
}
if ($bRaw) return $aAddressLines;
//echo "<pre>";
//var_dump($aAddressLines);
//echo "<pre>";
//var_dump($aAddressLines);
$aAddress = array();
$aFallback = array();
$aClassType = getClassTypes();
foreach($aAddressLines as $aLine)
{
$bFallback = false;
$aTypeLabel = false;
if (isset($aClassType[$aLine['class'].':'.$aLine['type'].':'.$aLine['admin_level']])) $aTypeLabel = $aClassType[$aLine['class'].':'.$aLine['type'].':'.$aLine['admin_level']];
elseif (isset($aClassType[$aLine['class'].':'.$aLine['type']])) $aTypeLabel = $aClassType[$aLine['class'].':'.$aLine['type']];
elseif (isset($aClassType['boundary:administrative:'.((int)($aLine['rank_address']/2))]))
{
$aTypeLabel = $aClassType['boundary:administrative:'.((int)($aLine['rank_address']/2))];
$bFallback = true;
}
else
{
$aTypeLabel = array('simplelabel'=>'address'.$aLine['rank_address']);
$bFallback = true;
}
if ($aTypeLabel && ((isset($aLine['localname']) && $aLine['localname']) || (isset($aLine['housenumber']) && $aLine['housenumber'])))
else $aTypeLabel = array('simplelabel'=>$aLine['class']);
if ($aTypeLabel && ($aLine['localname'] || $aLine['housenumber']))
{
$sTypeLabel = strtolower(isset($aTypeLabel['simplelabel'])?$aTypeLabel['simplelabel']:$aTypeLabel['label']);
$sTypeLabel = str_replace(' ','_',$sTypeLabel);
if (!isset($aAddress[$sTypeLabel]) || (isset($aFallback[$sTypeLabel]) && $aFallback[$sTypeLabel]) || $aLine['class'] == 'place')
{
$aAddress[$sTypeLabel] = $aLine['localname']?$aLine['localname']:$aLine['housenumber'];
}
$aFallback[$sTypeLabel] = $bFallback;
if (!isset($aAddress[$sTypeLabel]) && $aLine['localname']) $aAddress[$sTypeLabel] = $aLine['localname']?$aLine['localname']:$aLine['housenumber'];
}
}
//var_dump($aAddress);
//exit;
return $aAddress;
$aHouseNumber = $oDB->getRow('select housenumber, get_name_by_language(name,ARRAY[\'addr:housename\']) as housename,rank_search,postcode from placex where place_id = '.$iPlaceID);
$sHouseNumber = $aHouseNumber['housenumber'];
$sHouseName = $aHouseNumber['housename'];
$sPostcode = $aHouseNumber['postcode'];
$iRank = $aHouseNumber['rank_search'];
// Address
$sSQL = "select country_code, placex.place_id, osm_type, osm_id, class, type, housenumber, admin_level, rank_address, rank_search, ";
$sSQL .= "get_searchrank_label(rank_search) as rank_search_label, fromarea, isaddress, distance, ";
$sSQL .= " CASE WHEN type = 'postcode' THEN postcode ELSE get_name_by_language(name,$sLanguagePrefArraySQL) END as localname, ";
$sSQL .= " length(name::text) as namelength ";
$sSQL .= " from place_addressline join placex on (address_place_id = placex.place_id)";
$sSQL .= " where place_addressline.place_id = $iPlaceID and (rank_address > 0 OR address_place_id = $iPlaceID)";
if (!$bRaw) $sSQL .= " and isaddress";
$sSQL .= " order by cached_rank_address desc,isaddress desc,fromarea desc,distance asc,rank_search desc,namelength desc";
//var_dump($sSQL);
$aAddressLines = $oDB->getAll($sSQL);
if (PEAR::IsError($aAddressLines))
{
var_dump($aAddressLines);
exit;
}
if ($bRaw) return $aAddressLines;
$aClassType = getClassTypes();
$iMinRank = 100;
$aAddress = array();
if ($iRank >= 28 && $sHouseNumber) $aAddress['house_number'] = $sHouseNumber;
if ($iRank >= 28 && $sHouseName) $aAddress['house_name'] = $sHouseName;
foreach($aAddressLines as $aLine)
{
if (!$sCountryCode) $sCountryCode = $aLine['country_code'];
if ($aLine['rank_address'] < $iMinRank)
{
$aTypeLabel = false;
if (isset($aClassType[$aLine['class'].':'.$aLine['type'].':'.$aLine['admin_level']])) $aTypeLabel = $aClassType[$aLine['class'].':'.$aLine['type'].':'.$aLine['admin_level']];
elseif (isset($aClassType[$aLine['class'].':'.$aLine['type']])) $aTypeLabel = $aClassType[$aLine['class'].':'.$aLine['type']];
else $aTypeLabel = array('simplelabel'=>$aLine['class']);
if ($aTypeLabel && ($aLine['localname'] || $aLine['housenumber']))
{
$sTypeLabel = strtolower(isset($aTypeLabel['simplelabel'])?$aTypeLabel['simplelabel']:$aTypeLabel['label']);
$sTypeLabel = str_replace(' ','_',$sTypeLabel);
if (!isset($aAddress[$sTypeLabel]) && $aLine['localname']) $aAddress[$sTypeLabel] = $aLine['localname']?$aLine['localname']:$aLine['housenumber'];
}
$iMinRank = $aLine['rank_address'];
}
}
if ($sPostcode)
{
$aAddress['postcode'] = $sPostcode;
}
if ($iMinRank > 4 && $sCountryCode)
{
$sSQL = "select get_name_by_language(country_name.name,$sLanguagePrefArraySQL) as name";
$sSQL .= " from country_name where country_code = '$sCountryCode'";
$sCountryName = $oDB->getOne($sSQL);
if ($sCountryName)
{
$aAddress['country'] = $sCountryName;
}
}
if ($sCountryCode)
{
$aAddress['country_code'] = $sCountryCode;
}
return $aAddress;
}
function getWordSuggestions(&$oDB, $sWord)
{
$sWordQuoted = getDBQuoted(trim($sWord));
$sSQL = "select *,levenshtein($sWordQuoted,word) from test_token ";
$sSQL .= "where (metaphone = dmetaphone($sWordQuoted) or metaphonealt = dmetaphone($sWordQuoted) or ";
$sSQL .= "metaphone = dmetaphone_alt($sWordQuoted) or metaphonealt = dmetaphone_alt($sWordQuoted)) ";
$sSQL .= "and len between length($sWordQuoted)-2 and length($sWordQuoted)+2 ";
$sSQL .= "and levenshtein($sWordQuoted,word) < 3 ";
$sSQL .= "order by levenshtein($sWordQuoted,word) asc, abs(len - length($sWordQuoted)) asc limit 20";
$aSimilar = $oDB->getAll($sSQL);
return $aSimilar;
}
function geocodeReverse($fLat, $fLon, $iZoom=18)
{
@@ -810,27 +800,27 @@
// Zoom to rank, this could probably be calculated but a lookup gives fine control
$aZoomRank = array(
0 => 2, // Continent / Sea
1 => 2,
2 => 2,
3 => 4, // Country
4 => 4,
5 => 8, // State
6 => 10, // Region
7 => 10,
8 => 12, // County
9 => 12,
10 => 17, // City
11 => 17,
12 => 18, // Town / Village
13 => 18,
14 => 22, // Suburb
15 => 22,
16 => 26, // Street, TODO: major street?
17 => 26,
18 => 30, // or >, Building
19 => 30, // or >, Building
);
0 => 2, // Continent / Sea
1 => 2,
2 => 2,
3 => 4, // Country
4 => 4,
5 => 8, // State
6 => 10, // Region
7 => 10,
8 => 12, // County
9 => 12,
10 => 17, // City
11 => 17,
12 => 18, // Town / Village
13 => 18,
14 => 22, // Suburb
15 => 22,
16 => 26, // Street, TODO: major street?
17 => 26,
18 => 30, // or >, Building
19 => 30, // or >, Building
);
$iMaxRank = isset($aZoomRank[$iZoom])?$aZoomRank[$iZoom]:28;
// Find the nearest point
@@ -861,14 +851,14 @@
$sSQL .= ' and (ST_GeometryType(geometry) not in (\'ST_Polygon\',\'ST_MultiPolygon\') ';
$sSQL .= ' OR ST_DWithin('.$sPointSQL.', ST_Centroid(geometry), '.$fSearchDiam.'))';
$sSQL .= ' ORDER BY ST_distance('.$sPointSQL.', geometry) ASC limit 1';
//var_dump($sSQL);
//var_dump($sSQL);
$aPlace = $oDB->getRow($sSQL);
if (PEAR::IsError($aPlace))
$iPlaceID = $aPlace['place_id'];
if (PEAR::IsError($iPlaceID))
{
var_Dump($sSQL, $aPlace);
var_Dump($sSQL, $iPlaceID);
exit;
}
$iPlaceID = $aPlace['place_id'];
}
// The point we found might be too small - use the address to find what it is a child of
@@ -878,7 +868,7 @@
$iPlaceID = $oDB->getOne($sSQL);
if (PEAR::IsError($iPlaceID))
{
var_Dump($sSQL, $iPlaceID);
var_Dump($sSQL, $iPlaceID);
exit;
}
@@ -888,7 +878,7 @@
$iPlaceID = $oDB->getOne($sSQL);
if (PEAR::IsError($iPlaceID))
{
var_Dump($sSQL, $iPlaceID);
var_Dump($sSQL, $iPlaceID);
exit;
}
}
@@ -900,98 +890,3 @@
return $iPlaceID;
}
function addQuotes($s)
{
return "'".$s."'";
}
// returns boolean
function validLatLon($fLat,$fLon)
{
return ($fLat <= 90.1 && $fLat >= -90.1 && $fLon <= 180.1 && $fLon >= -180.1);
}
// Do we have anything that looks like a lat/lon pair?
// returns array(lat,lon,query_with_lat_lon_removed)
// or null
function looksLikeLatLonPair($sQuery)
{
$sFound = null;
$fQueryLat = null;
$fQueryLon = null;
// degrees decimal minutes
// N 40 26.767, W 79 58.933
// N 40°26.767, W 79°58.933
// 1 2 3 4 5 6
if (preg_match('/\\b([NS])[ ]+([0-9]+[0-9.]*)[° ]+([0-9.]+)?[\']*[, ]+([EW])[ ]+([0-9]+)[° ]+([0-9]+[0-9.]*)[\']*?\\b/', $sQuery, $aData))
{
$sFound = $aData[0];
$fQueryLat = ($aData[1]=='N'?1:-1) * ($aData[2] + $aData[3]/60);
$fQueryLon = ($aData[4]=='E'?1:-1) * ($aData[5] + $aData[6]/60);
}
// degrees decimal minutes
// 40 26.767 N, 79 58.933 W
// 40° 26.767 N 79° 58.933 W
// 1 2 3 4 5 6
elseif (preg_match('/\\b([0-9]+)[° ]+([0-9]+[0-9.]*)?[\']*[ ]+([NS])[, ]+([0-9]+)[° ]+([0-9]+[0-9.]*)?[\' ]+([EW])\\b/', $sQuery, $aData))
{
$sFound = $aData[0];
$fQueryLat = ($aData[3]=='N'?1:-1) * ($aData[1] + $aData[2]/60);
$fQueryLon = ($aData[6]=='E'?1:-1) * ($aData[4] + $aData[5]/60);
}
// degrees decimal seconds
// N 40 26 46 W 79 58 56
// N 40° 26 46″ W, 79° 58 56″
// 1 2 3 4 5 6 7 8
elseif (preg_match('/\\b([NS])[ ]([0-9]+)[° ]+([0-9]+)[\' ]+([0-9]+)[″"]*[, ]+([EW])[ ]([0-9]+)[° ]+([0-9]+)[\' ]+([0-9]+)[″"]*\\b/', $sQuery, $aData))
{
$sFound = $aData[0];
$fQueryLat = ($aData[1]=='N'?1:-1) * ($aData[2] + $aData[3]/60 + $aData[4]/3600);
$fQueryLon = ($aData[5]=='E'?1:-1) * ($aData[6] + $aData[7]/60 + $aData[8]/3600);
}
// degrees decimal seconds
// 40 26 46 N 79 58 56 W
// 40° 26 46″ N, 79° 58 56″ W
// 1 2 3 4 5 6 7 8
elseif (preg_match('/\\b([0-9]+)[° ]+([0-9]+)[\' ]+([0-9]+)[″" ]+([NS])[, ]+([0-9]+)[° ]+([0-9]+)[\' ]+([0-9]+)[″" ]+([EW])\\b/', $sQuery, $aData))
{
$sFound = $aData[0];
$fQueryLat = ($aData[4]=='N'?1:-1) * ($aData[1] + $aData[2]/60 + $aData[3]/3600);
$fQueryLon = ($aData[8]=='E'?1:-1) * ($aData[5] + $aData[6]/60 + $aData[7]/3600);
}
// degrees decimal
// N 40.446° W 79.982°
// 1 2 3 4
elseif (preg_match('/\\b([NS])[ ]([0-9]+[0-9]*\\.[0-9]+)[°]*[, ]+([EW])[ ]([0-9]+[0-9]*\\.[0-9]+)[°]*\\b/', $sQuery, $aData))
{
$sFound = $aData[0];
$fQueryLat = ($aData[1]=='N'?1:-1) * ($aData[2]);
$fQueryLon = ($aData[3]=='E'?1:-1) * ($aData[4]);
}
// degrees decimal
// 40.446° N 79.982° W
// 1 2 3 4
elseif (preg_match('/\\b([0-9]+[0-9]*\\.[0-9]+)[° ]+([NS])[, ]+([0-9]+[0-9]*\\.[0-9]+)[° ]+([EW])\\b/', $sQuery, $aData))
{
$sFound = $aData[0];
$fQueryLat = ($aData[2]=='N'?1:-1) * ($aData[1]);
$fQueryLon = ($aData[4]=='E'?1:-1) * ($aData[3]);
}
// degrees decimal
// 12.34, 56.78
// [12.456,-78.90]
// 1 2 3 4
elseif (preg_match('/(\\[|^|\\b)(-?[0-9]+[0-9]*\\.[0-9]+)[, ]+(-?[0-9]+[0-9]*\\.[0-9]+)(\\]|$|\\b)/', $sQuery, $aData))
{
$sFound = $aData[0];
$fQueryLat = $aData[2];
$fQueryLon = $aData[3];
}
if (!validLatLon($fQueryLat, $fQueryLon)) return;
$sQuery = trim(str_replace($sFound, ' ', $sQuery));
return array('lat' => $fQueryLat, 'lon' => $fQueryLon, 'query' => $sQuery);
}

View File

@@ -3,7 +3,7 @@
function logStart(&$oDB, $sType = '', $sQuery = '', $aLanguageList = array())
{
$aStartTime = explode('.',microtime(true));
if (!isset($aStartTime[1])) $aStartTime[1] = '0';
if (!$aStartTime[1]) $aStartTime[1] = '0';
$sOutputFormat = '';
if (isset($_GET['format'])) $sOutputFormat = $_GET['format'];
@@ -13,7 +13,7 @@
$_SERVER["REMOTE_ADDR"],
$_SERVER['QUERY_STRING'],
$sQuery
);
);
if (CONST_Log_DB)
{
@@ -31,21 +31,21 @@
if (CONST_Log_File && CONST_Log_File_ReverseLog != '')
{
if ($sType == 'reverse')
{
$aStartTime = explode('.',$hLog[0]);
file_put_contents(CONST_Log_File_ReverseLog,
$aStartTime[0].','.$aStartTime[1].','.
php_uname('n').','.
'"'.addslashes(isset($_SERVER['HTTP_REFERER'])?$_SERVER['HTTP_REFERER']:'').'",'.
'"'.addslashes($hLog[1]).'",'.
$_GET['lat'].','.
$_GET['lon'].','.
$_GET['zoom'].','.
'"'.addslashes($_SERVER['HTTP_USER_AGENT']).'",'.
'"'.addslashes($sOutputFormat).'"'."\n",
FILE_APPEND);
}
if ($sType == 'reverse')
{
$aStartTime = explode('.',$hLog[0]);
file_put_contents(CONST_Log_File_ReverseLog,
$aStartTime[0].','.$aStartTime[1].','.
php_uname('n').','.
'"'.addslashes(isset($_SERVER['HTTP_REFERER'])?$_SERVER['HTTP_REFERER']:'').'",'.
'"'.addslashes($hLog[1]).'",'.
$_GET['lat'].','.
$_GET['lon'].','.
$_GET['zoom'].','.
'"'.addslashes($_SERVER['HTTP_USER_AGENT']).'",'.
'"'.addslashes($sOutputFormat).'"'."\n",
FILE_APPEND);
}
}
return $hLog;
@@ -63,7 +63,7 @@
$sSQL .= ' where starttime = '.getDBQuoted($hLog[0]);
$sSQL .= ' and ipaddress = '.getDBQuoted($hLog[1]);
$sSQL .= ' and query = '.getDBQuoted($hLog[3]);
$oDB->query($sSQL);
$oDB->query($sSQL);
$sSQL = 'update new_query_log set endtime = '.getDBQuoted($sEndTime).', results = '.$iNumResults;
$sSQL .= ' where starttime = '.getDBQuoted($hLog[0]);
@@ -74,17 +74,17 @@
if (CONST_Log_File && CONST_Log_File_SearchLog != '')
{
$aStartTime = explode('.',$hLog[0]);
file_put_contents(CONST_Log_File_SearchLog,
$aStartTime[0].','.$aStartTime[1].','.
php_uname('n').','.
'"'.addslashes(isset($_SERVER['HTTP_REFERER'])?$_SERVER['HTTP_REFERER']:'').'",'.
'"'.addslashes($hLog[1]).'",'.
'"'.addslashes($hLog[3]).'",'.
'"'.addslashes($_SERVER['HTTP_USER_AGENT']).'",'.
'"'.addslashes((isset($_GET['format']))?$_GET['format']:'').'",'.
$iNumResults."\n",
FILE_APPEND);
$aStartTime = explode('.',$hLog[0]);
file_put_contents(CONST_Log_File_SearchLog,
$aStartTime[0].','.$aStartTime[1].','.
php_uname('n').','.
'"'.addslashes(isset($_SERVER['HTTP_REFERER'])?$_SERVER['HTTP_REFERER']:'').'",'.
'"'.addslashes($hLog[1]).'",'.
'"'.addslashes($hLog[3]).'",'.
'"'.addslashes($_SERVER['HTTP_USER_AGENT']).'",'.
'"'.addslashes((isset($_GET['format']))?$_GET['format']:'').'",'.
$iNumResults."\n",
FILE_APPEND);
}
}

View File

@@ -3,7 +3,7 @@
if (!sizeof($aPlace))
{
if (isset($sError))
if ($sError)
$aFilteredPlaces['error'] = $sError;
else
$aFilteredPlaces['error'] = 'Unable to geocode';
@@ -21,7 +21,7 @@
if (isset($aPlace['lat'])) $aFilteredPlaces['lat'] = $aPlace['lat'];
if (isset($aPlace['lon'])) $aFilteredPlaces['lon'] = $aPlace['lon'];
$aFilteredPlaces['display_name'] = $aPlace['langaddress'];
if ($bShowAddressDetails) $aFilteredPlaces['address'] = $aPlace['aAddress'];
if ($bShowAddressDetails) $aFilteredPlaces['address'] = $aAddress;
}
javascript_renderData($aFilteredPlaces);

View File

@@ -3,7 +3,7 @@
if (!sizeof($aPlace))
{
if (isset($sError))
if ($sError)
$aFilteredPlaces['error'] = $sError;
else
$aFilteredPlaces['error'] = 'Unable to geocode';
@@ -20,19 +20,13 @@
}
if (isset($aPlace['lat'])) $aFilteredPlaces['lat'] = $aPlace['lat'];
if (isset($aPlace['lon'])) $aFilteredPlaces['lon'] = $aPlace['lon'];
$aFilteredPlaces['place_rank'] = $aPlace['rank_search'];
$aFilteredPlaces['category'] = $aPlace['class'];
$aFilteredPlaces['type'] = $aPlace['type'];
$aFilteredPlaces['importance'] = $aPlace['importance'];
$aFilteredPlaces['addresstype'] = strtolower($aPlace['addresstype']);
$aFilteredPlaces['display_name'] = $aPlace['langaddress'];
$aFilteredPlaces['name'] = $aPlace['placename'];
if ($bShowAddressDetails && $aPlace['aAddress'] && sizeof($aPlace['aAddress'])) $aFilteredPlaces['address'] = $aPlace['aAddress'];
if ($bShowAddressDetails && $aAddress && sizeof($aAddress)) $aFilteredPlaces['address'] = $aAddress;
}
javascript_renderData($aFilteredPlaces);

View File

@@ -1,5 +1,6 @@
<?php
header("content-type: text/xml; charset=UTF-8");
header("Access-Control-Allow-Origin: *");
echo "<";
echo "?xml version=\"1.0\" encoding=\"UTF-8\" ?";
@@ -13,7 +14,7 @@
if (!sizeof($aPlace))
{
if (isset($sError))
if ($sError)
echo "<error>$sError</error>";
else
echo "<error>Unable to geocode</error>";
@@ -22,16 +23,15 @@
{
echo "<result";
if ($aPlace['place_id']) echo ' place_id="'.$aPlace['place_id'].'"';
$sOSMType = ($aPlace['osm_type'] == 'N'?'node':($aPlace['osm_type'] == 'W'?'way':($aPlace['osm_type'] == 'R'?'relation':'')));
if ($sOSMType) echo ' osm_type="'.$sOSMType.'"'.' osm_id="'.$aPlace['osm_id'].'"';
if ($aPlace['osm_type'] && $aPlace['osm_id']) echo ' osm_type="'.($aPlace['osm_type']=='N'?'node':($aPlace['osm_type']=='W'?'way':'relation')).'"'.' osm_id="'.$aPlace['osm_id'].'"';
if ($aPlace['ref']) echo ' ref="'.htmlspecialchars($aPlace['ref']).'"';
if (isset($aPlace['lat'])) echo ' lat="'.htmlspecialchars($aPlace['lat']).'"';
if (isset($aPlace['lon'])) echo ' lon="'.htmlspecialchars($aPlace['lon']).'"';
echo ">".htmlspecialchars($aPlace['langaddress'])."</result>";
if ($bShowAddressDetails) {
if ($bShowAddressDetails) {
echo "<addressparts>";
foreach($aPlace['aAddress'] as $sKey => $sValue)
foreach($aAddress as $sKey => $sValue)
{
$sKey = str_replace(' ','_',$sKey);
echo "<$sKey>";
@@ -39,7 +39,7 @@
echo "</$sKey>";
}
echo "</addressparts>";
}
}
}
echo "</reversegeocode>";

View File

@@ -1,180 +0,0 @@
<?php
header("content-type: text/html; charset=UTF-8");
?>
<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Strict//EN" "http://www.w3.org/TR/xhtml1/DTD/xhtml1-strict.dtd">
<html xmlns="http://www.w3.org/1999/xhtml">
<head>
<title>OpenStreetMap Nominatim: <?php echo $aPointDetails['localname'];?></title>
<style>
body {
margin:0px;
padding:16px;
background:#ffffff;
height: 100%;
font: normal 12px/15px arial,sans-serif;
}
.line{
margin-left:20px;
}
.name{
font-weight: bold;
}
.notused{
color:#ddd;
}
.noname{
color:#800;
}
#map {
width:500px;
height:500px;
border: 2px solid #666;
float: right;
}
</style>
<script src="js/OpenLayers.js"></script>
<script src="js/tiles.js"></script>
<script type="text/javascript">
var map;
function init() {
map = new OpenLayers.Map ("map", {
controls:[
new OpenLayers.Control.Permalink(),
new OpenLayers.Control.Navigation(),
new OpenLayers.Control.PanZoomBar(),
new OpenLayers.Control.MouseDefaults(),
new OpenLayers.Control.MousePosition(),
new OpenLayers.Control.Attribution()],
maxExtent: new OpenLayers.Bounds(-20037508.34,-20037508.34,20037508.34,20037508.34),
maxResolution: 156543.0399,
numZoomLevels: 19,
units: 'm',
projection: new OpenLayers.Projection("EPSG:900913"),
displayProjection: new OpenLayers.Projection("EPSG:4326")
} );
map.addLayer(new OpenLayers.Layer.OSM.<?php echo CONST_Tile_Default;?>("Default"));
var layer_style = OpenLayers.Util.extend({}, OpenLayers.Feature.Vector.style['default']);
layer_style.fillOpacity = 0.2;
layer_style.graphicOpacity = 0.2;
vectorLayer = new OpenLayers.Layer.Vector("Points", {style: layer_style});
map.addLayer(vectorLayer);
var proj_EPSG4326 = new OpenLayers.Projection("EPSG:4326");
var proj_map = map.getProjectionObject();
freader = new OpenLayers.Format.WKT({
'internalProjection': proj_map,
'externalProjection': proj_EPSG4326
});
var bounds;
<?php if ($aPointDetails['prevgeom']) { ?>
var feature = freader.read('<?php echo $aPointDetails['prevgeom'];?>');
if (feature) {
bounds = feature.geometry.getBounds();
}
feature.style = {
strokeColor: "#777777",
fillColor: "#F0F0F0",
strokeWidth: 2,
strokeOpacity: 0.75,
fillOpacity: 0.75,
strokeDashstyle: "longdash"
};
vectorLayer.addFeatures([feature]);
<?php } ?>
<?php if ($aPointDetails['newgeom']) { ?>
feature = freader.read('<?php echo $aPointDetails['newgeom'];?>');
if (feature) {
if (!bounds) {
bounds = feature.geometry.getBounds();
}
else
{
bounds.extend(feature.geometry.getBounds());
}
}
feature.style = {
strokeColor: "#75ADFF",
fillColor: "#FFF7F0",
strokeWidth: 2,
strokeOpacity: 0.75,
fillOpacity: 0.75
};
vectorLayer.addFeatures([feature]);
<?php } ?>
<?php if (isset($aPointDetails['error_x'])) { ?>
var pt = new OpenLayers.Geometry.Point(<?php echo $aPointDetails['error_x'].','.$aPointDetails['error_y'];?>);
pt = pt.transform(proj_EPSG4326, proj_map);
feature = new OpenLayers.Feature.Vector(pt, null,
{
graphicName : "x",
fillColor: "#FF0000",
graphic : true,
pointRadius: 6
});
vectorLayer.addFeatures([feature]);
<?php } ?>
map.zoomToExtent(bounds);
}
</script>
</head>
<body onload="init();">
<div id="map"></div>
<h1><?php echo $aPointDetails['localname'] ?></h1>
<div class="locationdetails">
<div>Type: <span class="type"><?php echo $aPointDetails['class'].':'.$aPointDetails['type'];?></span></div>
<?php
$sOSMType = ($aPointDetails['osm_type'] == 'N'?'node':($aPointDetails['osm_type'] == 'W'?'way': ($aPointDetails['osm_type'] == 'R'?'relation':'')));
if ($sOSMType) echo ' <div>OSM: <span class="osm"><span class="label"></span>'.$sOSMType.' <a href="http://www.openstreetmap.org/browse/'.$sOSMType.'/'.$aPointDetails['osm_id'].'">'. $aPointDetails['osm_id'].'</a></span></div>';
?>
<p>This object has an invalid geometry.</p>
<p><b>Details:</b> <?php
$sVal = $aPointDetails['errormessage']?$aPointDetails['errormessage']:'&nbsp;';
$sOSMType = ($aPointDetails['osm_type'] == 'N'?'node':($aPointDetails['osm_type'] == 'W'?'way':($aPointDetails['osm_type'] == 'R'?'relation':'')));
$sOSMID = $aPointDetails['osm_id'];
if (isset($aPointDetails['error_x']))
{
$sLat = $aPointDetails['error_y'];
$sLon = $aPointDetails['error_x'];
echo "<a href=\"http://www.openstreetmap.org/?lat=".$sLat."&lon=".$sLon."&zoom=18&layers=M&".$sOSMType."=".$sOSMID."\">".$sVal."</a>";
}
else
{
echo $sVal;
}
?>
<p><b>Edit:</b> in <?php
if (isset($aPointDetails['error_x']))
{
$fWidth = 0.0002;
echo " <a href=\"http://localhost:8111/load_and_zoom?left=".($sLon-$fWidth)."&right=".($sLon+$fWidth)."&top=".($sLat+$fWidth)."&bottom=".($sLat-$fWidth)."\" target=\"josm\">Remote Control (JOSM / Merkaartor)</a>";
echo " | <a href=\"http://www.openstreetmap.org/edit?editor=potlatch2&bbox=".($sLon-$fWidth).",".($sLat-$fWidth).",".($sLon+$fWidth).",".($sLat+$fWidth)."\" target=\"potlatch2\">Potlatch 2</a>";
}
else
{
echo " <a href=\"http://localhost:8111/import?url=http://www.openstreetmap.org/api/0.6/".$sOSMType.'/'.$sOSMID."/full\" target=\"josm\">Remote Control (JOSM / Merkaartor)</a>";
// Should be better to load by object id - but this doesn't seem to zoom correctly
//echo " <a href=\"http://localhost:8111/load_object?new_layer=true&objects=".strtolower($aPointDetails['osm_type']).$sOSMID."\" target=\"josm\">Remote Control (JOSM / Merkaartor)</a>";
}
?></p>
</body>
</html>

View File

@@ -1,15 +1,40 @@
<?php
header("content-type: text/html; charset=UTF-8");
?>
<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Strict//EN" "http://www.w3.org/TR/xhtml1/DTD/xhtml1-strict.dtd">
<html xmlns="http://www.w3.org/1999/xhtml">
<html>
<head>
<title>OpenStreetMap Nominatim: <?php echo $aPointDetails['localname'];?></title>
<link href="css/details.css" rel="stylesheet" type="text/css" />
<script src="js/OpenLayers.js" type="text/javascript"></script>
<script src="js/tiles.js" type="text/javascript"></script>
<style>
body {
margin:0px;
padding:16px;
background:#ffffff;
height: 100%;
font: normal 12px/15px arial,sans-serif;
}
.line{
margin-left:20px;
}
.name{
font-weight: bold;
}
.notused{
color:#ddd;
}
.noname{
color:#800;
}
#map {
width:500px;
height:500px;
border: 2px solid #666;
float: right;
}
</style>
<script src="js/OpenLayers.js"></script>
<script src="js/tiles.js"></script>
<script type="text/javascript">
var map;
function init() {
@@ -18,6 +43,7 @@
new OpenLayers.Control.Permalink(),
new OpenLayers.Control.Navigation(),
new OpenLayers.Control.PanZoomBar(),
new OpenLayers.Control.MouseDefaults(),
new OpenLayers.Control.MousePosition(),
new OpenLayers.Control.Attribution()],
maxExtent: new OpenLayers.Bounds(-20037508.34,-20037508.34,20037508.34,20037508.34),
@@ -27,51 +53,43 @@
projection: new OpenLayers.Projection("EPSG:900913"),
displayProjection: new OpenLayers.Projection("EPSG:4326")
} );
map.addLayer(new OpenLayers.Layer.OSM.<?php echo CONST_Tile_Default;?>("Default"));
map.addLayer(new OpenLayers.Layer.OSM.<?php echo CONST_Tile_Default;?>("Default",
{ attribution : '© <a target="_parent" href="http://www.openstreetmap.org">OpenStreetMap</a> and contributors, under an <a target="_parent" href="http://www.openstreetmap.org/copyright">open license</a>'}));
var layer_style = OpenLayers.Util.extend({}, OpenLayers.Feature.Vector.style['default']);
layer_style.fillOpacity = 0.2;
layer_style.graphicOpacity = 0.2;
var layer_style = OpenLayers.Util.extend({}, OpenLayers.Feature.Vector.style['default']);
layer_style.fillOpacity = 0.2;
layer_style.graphicOpacity = 0.2;
vectorLayer = new OpenLayers.Layer.Vector("Points", {style: layer_style});
map.addLayer(vectorLayer);
var pointList = [];
var style = {
strokeColor: "#75ADFF",
fillColor: "#F0F7FF",
strokeWidth: 2,
strokeOpacity: 0.75,
fillOpacity: 0.75
};
var proj_EPSG4326 = new OpenLayers.Projection("EPSG:4326");
var proj_map = map.getProjectionObject();
var latlon;
<?php
if (isset($aPolyPoints))
{
foreach($aPolyPoints as $aPolyPoint)
{
echo " pointList.push(new OpenLayers.Geometry.Point(".$aPolyPoint[1].",".$aPolyPoint[2]."));\n";
}
}
?>
var linearRing = new OpenLayers.Geometry.LinearRing(pointList).transform(proj_EPSG4326, proj_map);;
var polygonFeature = new OpenLayers.Feature.Vector(new OpenLayers.Geometry.Polygon([linearRing]),null,style);
vectorLayer.addFeatures([polygonFeature]);
freader = new OpenLayers.Format.WKT({
'internalProjection': proj_map,
'externalProjection': proj_EPSG4326
});
var feature = freader.read('<?php echo $aPointDetails['outlinestring'];?>');
var featureCentre = freader.read('POINT(<?php echo $aPointDetails['lon'];?> <?php echo $aPointDetails['lat'];?>)');
if (feature) {
map.zoomToExtent(feature.geometry.getBounds());
feature.style = {
strokeColor: "#75ADFF",
fillColor: "#F0F7FF",
strokeWidth: <?php echo ($aPointDetails['isarea']=='t'?'2':'5');?>,
strokeOpacity: 0.75,
fillOpacity: 0.75,
pointRadius: 50
};
<?php if ($aPointDetails['isarea']=='t') {?>
featureCentre.style = {
strokeColor: "#008800",
fillColor: "#338833",
strokeWidth: <?php echo ($aPointDetails['isarea']=='t'?'2':'5');?>,
strokeOpacity: 0.75,
fillOpacity: 0.75,
pointRadius: 8
};
vectorLayer.addFeatures([feature,featureCentre]);
<?php } else { ?>
vectorLayer.addFeatures([feature]);
<?php } ?>
}
map.zoomToExtent(new OpenLayers.Bounds(<?php echo $aPointPolygon['minlon']?>, <?php echo $aPointPolygon['minlat']?>, <?php echo $aPointPolygon['maxlon']?>, <?php echo $aPointPolygon['maxlat']?>).transform(proj_EPSG4326, proj_map));
}
</script>
</head>
<body onload="init();">
@@ -80,9 +98,9 @@
echo '<h1>';
if ($aPointDetails['icon'])
{
echo '<img style="float:right;margin-right:40px;" src="'.CONST_Website_BaseURL.'images/mapicons/'.$aPointDetails['icon'].'.n.32.png'.'" alt="'.$aPointDetails['icon'].'" />';
echo '<img style="float:right;margin-right:40px;" src="'.CONST_Website_BaseURL.'images/mapicons/'.$aPointDetails['icon'].'.n.32.png'.'">';
}
echo $aPointDetails['localname']."</h1>\n";
echo $aPointDetails['localname'].'</h1>';
echo '<div class="locationdetails">';
echo ' <div>Name: ';
foreach($aPointDetails['aNames'] as $sKey => $sValue)
@@ -91,14 +109,12 @@
}
echo ' </div>';
echo ' <div>Type: <span class="type">'.$aPointDetails['class'].':'.$aPointDetails['type'].'</span></div>';
echo ' <div>Last Updated: <span class="type">'.$aPointDetails['indexed_date'].'</span></div>';
echo ' <div>Admin Level: <span class="adminlevel">'.$aPointDetails['admin_level'].'</span></div>';
echo ' <div>Rank: <span class="rankaddress">'.$aPointDetails['rank_search_label'].'</span></div>';
if ($aPointDetails['calculated_importance']) echo ' <div>Importance: <span class="rankaddress">'.$aPointDetails['calculated_importance'].($aPointDetails['importance']?'':' (estimated)').'</span></div>';
if ($aPointDetails['importance']) echo ' <div>Importance: <span class="rankaddress">'.$aPointDetails['importance'].'</span></div>';
echo ' <div>Coverage: <span class="area">'.($aPointDetails['isarea']=='t'?'Polygon':'Point').'</span></div>';
echo ' <div>Centre Point: <span class="area">'.$aPointDetails['lat'].','.$aPointDetails['lon'].'</span></div>';
$sOSMType = ($aPointDetails['osm_type'] == 'N'?'node':($aPointDetails['osm_type'] == 'W'?'way':($aPointDetails['osm_type'] == 'R'?'relation':'')));
if ($sOSMType) echo ' <div>OSM: <span class="osm">'.$sOSMType.' <a href="http://www.openstreetmap.org/browse/'.$sOSMType.'/'.$aPointDetails['osm_id'].'">'.$aPointDetails['osm_id'].'</a></span></div>';
if ($sOSMType) echo ' <div>OSM: <span class="osm"><span class="label"></span>'.$sOSMType.' <a href="http://www.openstreetmap.org/browse/'.$sOSMType.'/'.$aPointDetails['osm_id'].'">'.$aPointDetails['osm_id'].'</a></span></div>';
if ($aPointDetails['wikipedia'])
{
list($sWikipediaLanguage,$sWikipediaArticle) = explode(':',$aPointDetails['wikipedia']);
@@ -110,10 +126,10 @@
echo ' <div class="line"><span class="name">'.$sValue.'</span> ('.$sKey.')</div>';
}
echo ' </div>';
echo "</div>\n";
echo '</div>';
echo "<h2>Address</h2>\n";
echo '<div class="address">';
echo '<h2>Address</h2>';
echo '<div class=\"address\">';
$iPrevRank = 1000000;
$sPrevLocalName = '';
foreach($aAddressLines as $aAddressLine)
@@ -129,21 +145,21 @@
echo '<span class="name">'.(trim($aAddressLine['localname'])?$aAddressLine['localname']:'<span class="noname">No Name</span>').'</span>';
echo ' (';
echo '<span class="type"><span class="label">Type: </span>'.$aAddressLine['class'].':'.$aAddressLine['type'].'</span>';
if ($sOSMType) echo ', <span class="osm">'.$sOSMType.' <a href="http://www.openstreetmap.org/browse/'.$sOSMType.'/'.$aAddressLine['osm_id'].'">'.$aAddressLine['osm_id'].'</a></span>';
if (isset($aAddressLine['admin_level'])) echo ', <span class="adminlevel">'.$aAddressLine['admin_level'].'</span>';
if (isset($aAddressLine['rank_search_label'])) echo ', <span class="rankaddress">'.$aAddressLine['rank_search_label'].'</span>';
if ($sOSMType) echo ', <span class="osm"><span class="label"></span>'.$sOSMType.' <a href="http://www.openstreetmap.org/browse/'.$sOSMType.'/'.$aAddressLine['osm_id'].'">'.$aAddressLine['osm_id'].'</a></span>';
echo ', <span class="adminlevel">'.$aAddressLine['admin_level'].'</span>';
echo ', <span class="rankaddress">'.$aAddressLine['rank_search_label'].'</span>';
// echo ', <span class="area">'.($aAddressLine['fromarea']=='t'?'Polygon':'Point').'</span>';
echo ', <span class="distance">'.$aAddressLine['distance'].'</span>';
echo ' <a href="details.php?place_id='.$aAddressLine['place_id'].'">GOTO</a>';
echo ')';
echo "</div>\n";
echo '</div>';
}
echo "</div>\n";
echo '</div>';
if ($aLinkedLines)
{
echo "<h2>Linked Places</h2>\n";
echo '<div class="linked">';
echo '<h2>Linked Places</h2>';
echo '<div class=\"linked\">';
foreach($aLinkedLines as $aAddressLine)
{
$sOSMType = ($aAddressLine['osm_type'] == 'N'?'node':($aAddressLine['osm_type'] == 'W'?'way':($aAddressLine['osm_type'] == 'R'?'relation':'')));
@@ -152,16 +168,16 @@
echo '<span class="name">'.(trim($aAddressLine['localname'])?$aAddressLine['localname']:'<span class="noname">No Name</span>').'</span>';
echo ' (';
echo '<span class="type"><span class="label">Type: </span>'.$aAddressLine['class'].':'.$aAddressLine['type'].'</span>';
if ($sOSMType) echo ', <span class="osm">'.$sOSMType.' <a href="http://www.openstreetmap.org/browse/'.$sOSMType.'/'.$aAddressLine['osm_id'].'">'.$aAddressLine['osm_id'].'</a></span>';
if ($sOSMType) echo ', <span class="osm"><span class="label"></span>'.$sOSMType.' <a href="http://www.openstreetmap.org/browse/'.$sOSMType.'/'.$aAddressLine['osm_id'].'">'.$aAddressLine['osm_id'].'</a></span>';
echo ', <span class="adminlevel">'.$aAddressLine['admin_level'].'</span>';
if (isset($aAddressLine['rank_search_label'])) echo ', <span class="rankaddress">'.$aAddressLine['rank_search_label'].'</span>';
echo ', <span class="rankaddress">'.$aAddressLine['rank_search_label'].'</span>';
// echo ', <span class="area">'.($aAddressLine['fromarea']=='t'?'Polygon':'Point').'</span>';
echo ', <span class="distance">'.$aAddressLine['distance'].'</span>';
echo ' <a href="details.php?place_id='.$aAddressLine['place_id'].'">GOTO</a>';
echo ')';
echo "</div>\n";
echo '</div>';
}
echo "</div>\n";
echo '</div>';
}
if ($aPlaceSearchNameKeywords)
@@ -169,7 +185,7 @@
echo '<h2>Name Keywords</h2>';
foreach($aPlaceSearchNameKeywords as $aRow)
{
echo '<div>'.$aRow['word_token']."</div>\n";
echo '<div>'.$aRow['word_token'].'</div>';
}
}
@@ -178,13 +194,13 @@
echo '<h2>Address Keywords</h2>';
foreach($aPlaceSearchAddressKeywords as $aRow)
{
echo '<div>'.($aRow['word_token'][0]==' '?'*':'').$aRow['word_token'].'('.$aRow['word_id'].')'."</div>\n";
echo '<div>'.($aRow['word_token'][0]==' '?'*':'').$aRow['word_token'].'('.$aRow['word_id'].')'.'</div>';
}
}
if (sizeof($aParentOfLines))
{
echo "<h2>Parent Of:</h2>\n<div>\n";
echo '<h2>Parent Of (named features only):</h2>';
$aGroupedAddressLines = array();
foreach($aParentOfLines as $aAddressLine)
@@ -195,7 +211,7 @@
foreach($aGroupedAddressLines as $sGroupHeading => $aParentOfLines)
{
$sGroupHeading = ucwords($sGroupHeading);
echo "<h3>$sGroupHeading</h3>\n";
echo "<h3>$sGroupHeading</h3>";
foreach($aParentOfLines as $aAddressLine)
{
$aAddressLine['localname'] = $aAddressLine['localname']?$aAddressLine['localname']:$aAddressLine['housenumber'];
@@ -204,17 +220,17 @@
echo '<div class="line">';
echo '<span class="name">'.(trim($aAddressLine['localname'])?$aAddressLine['localname']:'<span class="noname">No Name</span>').'</span>';
echo ' (';
// echo '<span class="type"><span class="label">Type: </span>'.$aAddressLine['class'].':'.$aAddressLine['type'].'</span>';
// echo ', <span class="adminlevel">'.$aAddressLine['admin_level'].'</span>';
// echo ', <span class="rankaddress">'.$aAddressLine['rank_address'].'</span>';
echo '<span class="area">'.($aAddressLine['isarea']=='t'?'Polygon':'Point').'</span>';
echo ', <span class="distance">~'.(round($aAddressLine['distance']*69,1)).'&nbsp;miles</span>';
if ($sOSMType) echo ', <span class="osm">'.$sOSMType.' <a href="http://www.openstreetmap.org/browse/'.$sOSMType.'/'.$aAddressLine['osm_id'].'">'.$aAddressLine['osm_id'].'</a></span>';
if ($sOSMType) echo ', <span class="osm"><span class="label"></span>'.$sOSMType.' <a href="http://www.openstreetmap.org/browse/'.$sOSMType.'/'.$aAddressLine['osm_id'].'">'.$aAddressLine['osm_id'].'</a></span>';
echo ', <a href="details.php?place_id='.$aAddressLine['place_id'].'">GOTO</a>';
echo ')';
echo "</div>\n";
echo '</div>';
}
}
if (sizeof($aParentOfLines) >= 500) {
echo '<p>There are more child objects which are not shown.</p>';
}
echo '</div>';
}

View File

@@ -1,88 +0,0 @@
<?php
$aOutput = array();
$aOutput['licence'] = "Data © OpenStreetMap contributors, ODbL 1.0. http://www.openstreetmap.org/copyright";
$aOutput['batch'] = array();
foreach($aBatchResults as $aSearchResults)
{
if (!$aSearchResults) $aSearchResults = array();
$aFilteredPlaces = array();
foreach($aSearchResults as $iResNum => $aPointDetails)
{
$aPlace = array(
'place_id'=>$aPointDetails['place_id'],
);
$sOSMType = ($aPointDetails['osm_type'] == 'N'?'node':($aPointDetails['osm_type'] == 'W'?'way':($aPointDetails['osm_type'] == 'R'?'relation':'')));
if ($sOSMType)
{
$aPlace['osm_type'] = $sOSMType;
$aPlace['osm_id'] = $aPointDetails['osm_id'];
}
if (isset($aPointDetails['aBoundingBox']))
{
$aPlace['boundingbox'] = array(
$aPointDetails['aBoundingBox'][0],
$aPointDetails['aBoundingBox'][1],
$aPointDetails['aBoundingBox'][2],
$aPointDetails['aBoundingBox'][3]);
if (isset($aPointDetails['aPolyPoints']) && $bShowPolygons)
{
$aPlace['polygonpoints'] = $aPointDetails['aPolyPoints'];
}
}
if (isset($aPointDetails['zoom']))
{
$aPlace['zoom'] = $aPointDetails['zoom'];
}
$aPlace['lat'] = $aPointDetails['lat'];
$aPlace['lon'] = $aPointDetails['lon'];
$aPlace['display_name'] = $aPointDetails['name'];
$aPlace['place_rank'] = $aPointDetails['rank_search'];
$aPlace['category'] = $aPointDetails['class'];
$aPlace['type'] = $aPointDetails['type'];
$aPlace['importance'] = $aPointDetails['importance'];
if (isset($aPointDetails['icon']))
{
$aPlace['icon'] = $aPointDetails['icon'];
}
if (isset($aPointDetails['address']) && sizeof($aPointDetails['address'])>0)
{
$aPlace['address'] = $aPointDetails['address'];
}
if (isset($aPointDetails['asgeojson']))
{
$aPlace['geojson'] = json_decode($aPointDetails['asgeojson']);
}
if (isset($aPointDetails['assvg']))
{
$aPlace['svg'] = $aPointDetails['assvg'];
}
if (isset($aPointDetails['astext']))
{
$aPlace['geotext'] = $aPointDetails['astext'];
}
if (isset($aPointDetails['askml']))
{
$aPlace['geokml'] = $aPointDetails['askml'];
}
$aFilteredPlaces[] = $aPlace;
}
$aOutput['batch'][] = $aFilteredPlaces;
}
javascript_renderData($aOutput, array('geojson'));

View File

@@ -1,40 +1,205 @@
<?php
header("content-type: text/html; charset=UTF-8");
?>
<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Strict//EN" "http://www.w3.org/TR/xhtml1/DTD/xhtml1-strict.dtd">
<html xmlns="http://www.w3.org/1999/xhtml">
<html>
<head>
<title>OpenStreetMap Nominatim: Search</title>
<base href="<?php echo CONST_Website_BaseURL;?>" />
<link href="nominatim.xml" rel="search" title="Nominatim Search" type="application/opensearchdescription+xml" />
<link href="css/search.css" rel="stylesheet" type="text/css" />
<script src="js/OpenLayers.js" type="text/javascript"></script>
<script src="js/tiles.js" type="text/javascript"></script>
<script src="js/prototype-1.6.0.3.js" type="text/javascript"></script>
<script src="js/OpenLayers.js"></script>
<script src="js/tiles.js"></script>
<script src="js/prototype-1.6.0.3.js"></script>
<style>
* {-moz-box-sizing: border-box;}
body {
margin:0px;
padding:0px;
overflow: hidden;
background:#ffffff;
height: 100%;
font: normal 12px/15px arial,sans-serif;
}
#seachheader {
position:absolute;
z-index:5;
top:0px;
left:0px;
width:100%;
height:38px;
background:#F0F7FF;
border-bottom: 2px solid #75ADFF;
}
#q {
width:300px;
}
#seachheaderfade1, #seachheaderfade2, #seachheaderfade3, #seachheaderfade4{
position:absolute;
z-index:4;
top:0px;
left:0px;
width:100%;
opacity: 0.15;
filter: alpha(opacity = 15);
background:#000000;
border: 1px solid #000000;
}
#seachheaderfade1{
height:39px;
}
#seachheaderfade2{
height:40px;
}
#seachheaderfade3{
height:41px;
}
#seachheaderfade4{
height:42px;
}
#searchresultsfade1, #searchresultsfade2, #searchresultsfade3, #searchresultsfade4 {
position:absolute;
z-index:2;
top:0px;
left:200px;
height: 100%;
opacity: 0.2;
filter: alpha(opacity = 20);
background:#ffffff;
border: 1px solid #ffffff;
}
#searchresultsfade1{
width:1px;
}
#searchresultsfade2{
width:2px;
}
#searchresultsfade3{
width:3px;
}
#searchresultsfade4{
width:4px;
}
#searchresults{
position:absolute;
z-index:3;
top:41px;
width:200px;
height: 100%;
background:#ffffff;
border: 1px solid #ffffff;
overflow: auto;
}
#map{
position:absolute;
z-index:1;
top:38px;
left:200px;
width:100%;
height:100%;
background:#eee;
}
#report{
position:absolute;
z-index:2;
top:38px;
left:200px;
width:100%;
height:100%;
background:#eee;
font: normal 12px/15px arial,sans-serif;
padding:20px;
}
#report table {
margin-left:20px;
}
#report th {
vertical-align:top;
text-align:left;
}
#report td.button {
text-align:right;
}
.result {
margin:5px;
margin-bottom:0px;
padding:2px;
padding-left:4px;
padding-right:4px;
border-radius: 5px;
-moz-border-radius: 5px;
-webkit-border-radius: 5px;
background:#F0F7FF;
border: 2px solid #D7E7FF;
font: normal 12px/15px arial,sans-serif;
cursor:pointer;
}
.result img{
float:right;
}
.result .latlon{
display: none;
}
.result .place_id{
display: none;
}
.result .type{
color: #999;
text-align:center;
font: normal 9px/10px arial,sans-serif;
padding-top:4px;
}
.result .details, .result .details a{
color: #999;
text-align:center;
font: normal 9px/10px arial,sans-serif;
padding-top:4px;
}
.noresults{
color: #000;
text-align:center;
font: normal 12px arial,sans-serif;
padding-top:4px;
}
.more{
color: #ccc;
text-align:center;
padding-top:4px;
}
.disclaimer{
color: #ccc;
text-align:center;
font: normal 9px/10px arial,sans-serif;
padding-top:4px;
}
form{
margin:0px;
padding:0px;
}
</style>
<script type="text/javascript">
var map;
function handleResize()
{
if ($('searchresults'))
{
var viewwidth = ((document.documentElement.clientWidth > 0?document.documentElement.clientWidth:document.documentElement.offsetWidth) - 200) + 'px';
$('map').style.width = viewwidth;
$('report').style.width = viewwidth;
$('map').style.width = (document.documentElement.clientWidth > 0?document.documentElement.clientWidth:document.documentElement.offsetWidth) - 200;
$('report').style.width = (document.documentElement.clientWidth > 0?document.documentElement.clientWidth:document.documentElement.offsetWidth) - 200;
}
else
{
$('map').style.width = ((document.documentElement.clientWidth > 0?document.documentElement.clientWidth:document.documentElement.offsetWidth) - 0) + 'px';
$('map').style.left = '0px';
$('map').style.width = (document.documentElement.clientWidth > 0?document.documentElement.clientWidth:document.documentElement.offsetWidth) - 0;
$('map').style.left = 0;
}
if ($('map')) $('map').style.height = ((document.documentElement.clientHeight > 0?document.documentElement.clientHeight:document.documentElement.offsetHeight) - 38) + 'px';
if ($('searchresults')) $('searchresults').style.height = ((document.documentElement.clientHeight > 0?document.documentElement.clientHeight:document.documentElement.offsetHeight) - 38) + 'px';
if ($('report')) $('report').style.height = ((document.documentElement.clientHeight > 0?document.documentElement.clientHeight:document.documentElement.offsetHeight) - 38) + 'px';
if ($('map')) $('map').style.height = (document.documentElement.clientHeight > 0?document.documentElement.clientHeight:document.documentElement.offsetHeight) - 38;
if ($('searchresults')) $('searchresults').style.height = (document.documentElement.clientHeight > 0?document.documentElement.clientHeight:document.documentElement.offsetHeight) - 38;
if ($('report')) $('report').style.height = (document.documentElement.clientHeight > 0?document.documentElement.clientHeight:document.documentElement.offsetHeight) - 38;
}
window.onresize = handleResize;
@@ -51,34 +216,40 @@
map.panTo(lonLat, 10);
}
function panToLatLonBoundingBox(lat,lon,minlat,maxlat,minlon,maxlon,wkt) {
vectorLayer.destroyFeatures();
var proj_EPSG4326 = new OpenLayers.Projection("EPSG:4326");
var proj_map = map.getProjectionObject();
map.zoomToExtent(new OpenLayers.Bounds(minlon,minlat,maxlon,maxlat).transform(proj_EPSG4326, proj_map));
var lonLat = new OpenLayers.LonLat(lon, lat).transform(proj_EPSG4326, proj_map);
function panToLatLonBoundingBox(lat,lon,minlat,maxlat,minlon,maxlon,points) {
var proj_EPSG4326 = new OpenLayers.Projection("EPSG:4326");
var proj_map = map.getProjectionObject();
map.zoomToExtent(new OpenLayers.Bounds(minlon,minlat,maxlon,maxlat).transform(proj_EPSG4326, proj_map));
var lonLat = new OpenLayers.LonLat(lon, lat).transform(new OpenLayers.Projection("EPSG:4326"), map.getProjectionObject());
map.panTo(lonLat, <?php echo $iZoom ?>);
if (wkt)
var pointList = [];
var style = {
strokeColor: "#75ADFF",
fillColor: "#F0F7FF",
strokeWidth: 2,
strokeOpacity: 0.75,
fillOpacity: 0.75
};
var proj_EPSG4326 = new OpenLayers.Projection("EPSG:4326");
var proj_map = map.getProjectionObject();
if (points)
{
var freader = new OpenLayers.Format.WKT({
'internalProjection': proj_map,
'externalProjection': proj_EPSG4326
});
var feature = freader.read(wkt);
if (feature)
{
feature.style = {
strokeColor: "#75ADFF",
fillColor: "#F0F7FF",
strokeWidth: 2,
strokeOpacity: 0.75,
fillOpacity: 0.75,
pointRadius: 100
};
vectorLayer.addFeatures([feature]);
}
points.each(function(p){
pointList.push(new OpenLayers.Geometry.Point(p[0],p[1]));
});
var linearRing = new OpenLayers.Geometry.LinearRing(pointList).transform(proj_EPSG4326, proj_map);;
var polygonFeature = new OpenLayers.Feature.Vector(new OpenLayers.Geometry.Polygon([linearRing]),null,style);
vectorLayer.destroyFeatures();
vectorLayer.addFeatures([polygonFeature]);
}
else
{
var lonLat = new OpenLayers.LonLat(lon, lat).transform(new OpenLayers.Projection("EPSG:4326"), map.getProjectionObject());
var point = new OpenLayers.Geometry.Point(lonLat.lon, lonLat.lat);
var pointFeature = new OpenLayers.Feature.Vector(point,null,style);
vectorLayer.destroyFeatures();
vectorLayer.addFeatures([pointFeature]);
}
}
@@ -111,6 +282,7 @@
controls:[
new OpenLayers.Control.Navigation(),
new OpenLayers.Control.PanZoomBar(),
new OpenLayers.Control.MouseDefaults(),
new OpenLayers.Control.MousePosition(),
new OpenLayers.Control.Attribution()],
maxExtent: new OpenLayers.Bounds(-20037508.34,-20037508.34,20037508.34,20037508.34),
@@ -123,7 +295,8 @@
"moveend": mapEventMove
}
} );
map.addLayer(new OpenLayers.Layer.OSM.<?php echo CONST_Tile_Default;?>("Default"));
map.addLayer(new OpenLayers.Layer.OSM.<?php echo CONST_Tile_Default;?>("Default",
{ attribution : '© <a target="_parent" href="http://www.openstreetmap.org">OpenStreetMap</a> and contributors, under an <a target="_parent" href="http://www.openstreetmap.org/copyright">open license</a>'}));
var layer_style = OpenLayers.Util.extend({}, OpenLayers.Feature.Vector.style['default']);
layer_style.fillOpacity = 0.2;
@@ -148,26 +321,26 @@
<div id="seachheader">
<form accept-charset="UTF-8" action="<?php echo CONST_Website_BaseURL; ?>search.php" method="get">
<table border="0" width="100%" summary="header">
<table border="0" width="100%">
<tr>
<td valign="middle" style="width:30px;"><img alt="logo" src="images/logo.gif" /></td>
<td valign="middle" style="width:400px;"><input id="q" name="q" value="<?php echo htmlspecialchars($sQuery);
?>" style="width:270px;" /><input type="text" id="viewbox" style="width:120px;" name="viewbox" /></td>
<td style="width:80px;"><input type="submit" value="Search"/></td>
<?php if (CONST_Search_AreaPolygons) { ?> <td style="width:100px;"><input type="checkbox" value="1" name="polygon" <?php if ($bAsText) echo "checked='checked'"; ?>/> Highlight</td>
<td valign="center" style="width:30px;"><img src="images/logo.gif"></td>
<td valign="center" style="width:400px;"><input id="q" name="q" value="<?php echo htmlspecialchars($sQuery);
?>" style="width:270px;"><input type="text" id="viewbox" style="width:130px;" name="viewbox"></td>
<td style="width:80px;"><input type="submit" value="Search"></td>
<?php if (CONST_Search_AreaPolygons) { ?> <td style="width:100px;"><input type="checkbox" value="1" name="polygon" <?php if ($bShowPolygons) echo "checked"; ?>> Highlight</td>
<td style="text-align:right;">Data: <?php echo $sDataDate; ?></td>
<td style="text-align:right;">
<a href="http://wiki.openstreetmap.org/wiki/Nominatim" target="_blank">Documentation</a> | <a href="http://wiki.openstreetmap.org/wiki/Nominatim/FAQ"
target="_blank">FAQ</a></td>
<?php } ?> <td style="text-align:right;"><?php if ($sQuery) { ?><input type="button" value="Report Problem With Results" onclick="$('report').style.visibility=($('report').style.visibility=='hidden'?'visible':'hidden')"/><?php } ?></td>
<?php } ?> <td style="text-align:right;"><?php if ($sQuery) { ?><input type="button" value="Report Problem With Results" onclick="$('report').style.visibility=($('report').style.visibility=='hidden'?'visible':'hidden')"><?php } ?></td>
</tr>
</table>
</form>
</div>
<?php
if ($sQuery)
if ($sQuery || sizeof($aSearchResults))
{
?>
<div id="searchresultsfade1"></div><div id="searchresultsfade2"></div><div id="searchresultsfade3"></div><div id="searchresultsfade4"></div>
@@ -186,8 +359,8 @@ target="_blank">FAQ</a></td>
echo ', '.$aResult['aBoundingBox'][1];
echo ', '.$aResult['aBoundingBox'][2];
echo ', '.$aResult['aBoundingBox'][3];
if (isset($aResult['astext'])) echo ', "'.$aResult['astext'].'"';
echo ");'>\n";
if (isset($aResult['aPolyPoints'])) echo ', '.json_encode($aResult['aPolyPoints']);
echo ');\'>';
}
elseif (isset($aResult['zoom']))
{
@@ -198,7 +371,7 @@ target="_blank">FAQ</a></td>
echo '<div class="result" onClick="panToLatLon('.$aResult['lat'].', '.$aResult['lon'].');">';
}
echo (isset($aResult['icon'])?'<img alt="icon" src="'.$aResult['icon'].'"/>':'');
echo (isset($aResult['icon'])?'<img src="'.$aResult['icon'].'">':'');
echo ' <span class="name">'.$aResult['name'].'</span>';
echo ' <span class="latlon">'.round($aResult['lat'],3).','.round($aResult['lon'],3).'</span>';
echo ' <span class="place_id">'.$aResult['place_id'].'</span>';
@@ -213,7 +386,7 @@ target="_blank">FAQ</a></td>
{
if ($sMoreURL)
{
echo '<div class="more"><a href="'.htmlentities($sMoreURL).'">Search for more results</a></div>';
echo '<div class="more"><a href="'.$sMoreURL.'">Search for more results</a></div>';
}
}
else
@@ -223,7 +396,7 @@ target="_blank">FAQ</a></td>
?>
<div class="disclaimer">Addresses and postcodes are approximate
<input type="button" value="Report Problem" onclick="$('report').style.visibility=($('report').style.visibility=='hidden'?'visible':'hidden')"/>
<input type="button" value="Report Problem" onclick="$('report').style.visibility=($('report').style.visibility=='hidden'?'visible':'hidden')">
</div>
</div>
<?php
@@ -238,12 +411,12 @@ href="http://wiki.openstreetmap.org/wiki/Nominatim/FAQ">FAQ</a>. If your proble
to check how the address was generated before reporting a problem.</p>
<p>Please use <a href="http://trac.openstreetmap.org/newticket?component=nominatim">trac.openstreetmap.org</a> to report problems
making sure to set
the component to 'nominatim'. You can search for existing bug reports <a href="http://trac.openstreetmap.org/query?status=new&amp;status=assigned&amp;status=reopened&amp;component=nominatim&amp;order=priority">here</a>.</p>
the component to 'nominatim'. You can search for existing bug reports <a href="http://trac.openstreetmap.org/query?status=new&status=assigned&status=reopened&component=nominatim&order=priority">here</a>.</p>
<p>Please ensure that you include a full description of the problem, including the search query that you used, the problem with the result and, if
the problem relates to missing data, the osm id of the item that is missing. Problems that contain enough detail are likely to get looked at before ones that
require significant research!</p>
</div>
<!--
<p>Please use this form to report problems with the search results. Of particular interest are items missing, but please also use this form to
report any other problems.</p>
@@ -278,7 +451,11 @@ init();
echo ', '.$aResult['aBoundingBox'][1];
echo ', '.$aResult['aBoundingBox'][2];
echo ', '.$aResult['aBoundingBox'][3];
if (isset($aResult['astext'])) echo ", '".$aResult['astext']."'";
if (isset($aResult['aPolyPoints']))
{
echo ', ';
echo javascript_renderData($aResult['aPolyPoints']);
}
echo ');'."\n";
}
else

View File

@@ -1,6 +1,4 @@
<?php
header("content-type: application/json; charset=UTF-8");
$aFilteredPlaces = array();
foreach($aSearchResults as $iResNum => $aPointDetails)
{
@@ -41,10 +39,7 @@
$aPlace['class'] = $aPointDetails['class'];
$aPlace['type'] = $aPointDetails['type'];
$aPlace['importance'] = $aPointDetails['importance'];
if (isset($aPointDetails['icon']) && $aPointDetails['icon'])
if ($aPointDetails['icon'])
{
$aPlace['icon'] = $aPointDetails['icon'];
}
@@ -54,26 +49,6 @@
$aPlace['address'] = $aPointDetails['address'];
}
if (isset($aPointDetails['asgeojson']))
{
$aPlace['geojson'] = json_decode($aPointDetails['asgeojson']);
}
if (isset($aPointDetails['assvg']))
{
$aPlace['svg'] = $aPointDetails['assvg'];
}
if (isset($aPointDetails['astext']))
{
$aPlace['geotext'] = $aPointDetails['astext'];
}
if (isset($aPointDetails['askml']))
{
$aPlace['geokml'] = $aPointDetails['askml'];
}
$aFilteredPlaces[] = $aPlace;
}

View File

@@ -36,14 +36,11 @@
$aPlace['lat'] = $aPointDetails['lat'];
$aPlace['lon'] = $aPointDetails['lon'];
$aPlace['display_name'] = $aPointDetails['name'];
$aPlace['place_rank'] = $aPointDetails['rank_search'];
$aPlace['place_rank'] = $aResult['rank_search'];
$aPlace['category'] = $aPointDetails['class'];
$aPlace['type'] = $aPointDetails['type'];
$aPlace['importance'] = $aPointDetails['importance'];
if (isset($aPointDetails['icon']))
if ($aPointDetails['icon'])
{
$aPlace['icon'] = $aPointDetails['icon'];
}
@@ -53,27 +50,7 @@
$aPlace['address'] = $aPointDetails['address'];
}
if (isset($aPointDetails['asgeojson']))
{
$aPlace['geojson'] = json_decode($aPointDetails['asgeojson']);
}
if (isset($aPointDetails['assvg']))
{
$aPlace['svg'] = $aPointDetails['assvg'];
}
if (isset($aPointDetails['astext']))
{
$aPlace['geotext'] = $aPointDetails['astext'];
}
if (isset($aPointDetails['askml']))
{
$aPlace['geokml'] = $aPointDetails['askml'];
}
$aFilteredPlaces[] = $aPlace;
}
javascript_renderData($aFilteredPlaces, array('geojson'));
javascript_renderData($aFilteredPlaces);

View File

@@ -1,5 +1,6 @@
<?php
header("content-type: text/xml; charset=UTF-8");
header("Access-Control-Allow-Origin: *");
echo "<";
echo "?xml version=\"1.0\" encoding=\"UTF-8\" ?";
@@ -9,7 +10,7 @@
echo " timestamp='".date(DATE_RFC822)."'";
echo " attribution='Data © OpenStreetMap contributors, ODbL 1.0. http://www.openstreetmap.org/copyright'";
echo " querystring='".htmlspecialchars($sQuery, ENT_QUOTES)."'";
if ($sViewBox) echo " viewbox='".htmlspecialchars($sViewBox, ENT_QUOTES)."'";
if (isset($sViewBox)) echo " viewbox='".htmlspecialchars($sViewBox, ENT_QUOTES)."'";
echo " polygon='".($bShowPolygons?'true':'false')."'";
if (sizeof($aExcludePlaceIDs))
{
@@ -49,27 +50,6 @@
}
}
if (isset($aResult['asgeojson']))
{
echo ' geojson=\'';
echo $aResult['asgeojson'];
echo '\'';
}
if (isset($aResult['assvg']))
{
echo ' geosvg=\'';
echo $aResult['assvg'];
echo '\'';
}
if (isset($aResult['astext']))
{
echo ' geotext=\'';
echo $aResult['astext'];
echo '\'';
}
if (isset($aResult['zoom']))
{
echo " zoom='".$aResult['zoom']."'";
@@ -80,28 +60,15 @@
echo " display_name='".htmlspecialchars($aResult['name'], ENT_QUOTES)."'";
echo " class='".htmlspecialchars($aResult['class'])."'";
echo " type='".htmlspecialchars($aResult['type'], ENT_QUOTES)."'";
echo " importance='".htmlspecialchars($aResult['importance'])."'";
if (isset($aResult['icon']) && $aResult['icon'])
echo " type='".htmlspecialchars($aResult['type'])."'";
if ($aResult['icon'])
{
echo " icon='".htmlspecialchars($aResult['icon'], ENT_QUOTES)."'";
}
if (isset($aResult['address']) || isset($aResult['askml']))
{
echo ">";
}
if (isset($aResult['askml']))
{
echo "\n<geokml>";
echo $aResult['askml'];
echo "</geokml>";
}
if (isset($aResult['address']))
{
echo "\n";
echo ">";
foreach($aResult['address'] as $sKey => $sValue)
{
$sKey = str_replace(' ','_',$sKey);
@@ -109,10 +76,7 @@
echo htmlspecialchars($sValue);
echo "</$sKey>";
}
}
if (isset($aResult['address']) || isset($aResult['askml']))
{
echo "</place>";
}
else

View File

@@ -1,6 +1,5 @@
MODULES = nominatim
PGXS := @POSTGRESQL_PGXS@
PG_CONFIG := @PG_CONFIG@
include $(PGXS)
all:

View File

@@ -143,12 +143,12 @@ transliteration( PG_FUNCTION_ARGS )
resultdata++;
}
}
/*else
else
{
ereport( WARNING, ( errcode( ERRCODE_SUCCESSFUL_COMPLETION ),
errmsg( "missing char: %i\n", *wchardata )));
}*/
}
wchardata++;
}

View File

@@ -2,45 +2,26 @@
#
# Plugin to monitor the age of the imported data in the rendering db
#
# Can be configured through libpq environment variables, for example
# PGUSER, PGDATABASE, etc. See man page of psql for more information
#
# To configure munin for a default installation, add these lines to
# the file /etc/munin/plugin-conf.d/munin-node or in any file in the
# directory /etc/munin/plugin-conf.d/
#
# [nominatim_*]
# user www-data
# env.PGUSER www-data
# env.PGPORT 5432
# env.PGDATABASE nominatim
# env.age_warning 21600
# env.age_critical 86400
# Parameters:
# Parameters:
#
# config (required)
# autoconf (optional - used by munin-config)
#
. $MUNIN_LIBDIR/plugins/plugin.sh
if [ "$1" = "config" ]; then
echo 'graph_title Data import lag'
echo 'graph_args --base 1000 -l 0'
echo 'graph_vlabel minutes'
echo 'graph_category nominatim'
echo 'age.label DB import age'
echo 'age.type GAUGE'
echo 'graph_title Data import lag'
echo 'graph_args --base 1000 -l 0'
echo 'graph_vlabel minutes'
echo 'graph_category nominatim'
echo 'age.label DB import age'
echo 'age.type GAUGE'
echo 'age.cdef age,60,/'
print_warning age
print_critical age
exit 0
fi
delay=`psql -c "copy (select extract(epoch from timezone('utc', now())-lastimportdate)::int from import_status) to stdout"`
delay=`psql -d nominatim -c "copy (select extract(epoch from timezone('utc', now())-lastimportdate)::int from import_status) to stdout"`
echo "age.value $delay"

View File

@@ -1,121 +0,0 @@
#!/usr/bin/perl -w
# Plugin to monitor response time of search queries.
#
# Based on a plugin by Dalibo <cedric.villemain@dalibo.com> 2007
# Based on a plugin (postgres_block_read_) from Bj<42>rn Ruberg <bjorn@linpro.no>
#
# Licenced under GPL v2.
#
# Usage:
#
# If required, give username, password and/or Postgresql server
# host through environment variables. See man page of psql for
# more info.
#
# Parameters:
#
# config (required)
#
# Magic markers
#%# family=auto
#%# capabilities=autoconf
use strict;
use DBI;
use vars qw ( $debug $configure );
use constant _PGMINI => 70400;
my $dbhost = $ENV{'PGHOST'} || '';
my $dbname = $ENV{'PGDATABASE'} || 'nominatim';
my $dbuser = $ENV{'PGUSER'} || 'postgres';
my $dbport = $ENV{'PGPORT'} || '5432';
my $dsn = "DBI:Pg:dbname=$dbname";
$dsn .=";host=$dbhost;port=$dbport" if $dbhost;
my $pg_server_version;
if (exists $ARGV[0]) {
if ($ARGV[0] eq 'autoconf') {
# Check for DBD::Pg
if (! eval "require DBD::Pg;") {
print "no (DBD::Pg not found)";
exit 1;
}
my $dbh = DBI->connect ($dsn,
$dbuser,
'',
{RaiseError =>1});
if ($dbh) {
$pg_server_version = $dbh->{'pg_server_version'};
if ($pg_server_version < (_PGMINI)) {
$pg_server_version =~ /(\d)(\d){2,2}(\d){2,2}/;
print "PostgreSQL Server version " . (_PGMINI) . " or above is needed. Current is $1.$2.$3 \n";
exit 1;
}
print "yes\n";
exit 0;
} else {
print "no Unable to access Database $dbname on host $dbhost as user $dbuser.\nError returned was: ". $DBI::errstr;
exit 1;
}
} elsif ($ARGV[0] eq 'debug') {
# Set debug flag
$debug = 1;
} elsif ($ARGV[0] eq 'config') {
# Set config flag
$configure = 1;
}
}
print "# $dsn\n" if $debug;
my $dbh = DBI->connect ($dsn,
$dbuser,
'',
{RaiseError =>1});
die ("no Unable to access Database $dbname on host $dbhost as user $dbuser.\nError returned was: ". $DBI::errstr."\n") unless($dbh);
$pg_server_version = $dbh->{'pg_server_version'};
if ($configure) {
print "graph_title Total Nominatim response time\n";
print "graph_vlabel Time to response\n";
print "graph_category Nominatim \n";
print "graph_period minute\n";
print "graph_args --base 1000\n";
print "avg.label Average time to response\n";
print "avg.draw LINE\n";
print "avg.type GAUGE\n";
print "avg.min 0\n";
print "avg.info Moving 5 minute average time to perform search\n";
print "avg.label Average time to response\n";
print "min.label Fastest time to response\n";
print "min.draw LINE\n";
print "min.type GAUGE\n";
print "min.min 0\n";
print "min.info Fastest query in last 5 minutes\n";
print "max.label Slowest time to response\n";
print "max.draw LINE\n";
print "max.type GAUGE\n";
print "max.min 0\n";
print "max.info Slowest query in last 5 minutes\n";
} else {
my $sql = "select TO_CHAR(avg(endtime-starttime),'SS.MS'),TO_CHAR(min(endtime-starttime),'SS.MS'),TO_CHAR(max(endtime-starttime),'SS.MS') from new_query_log where starttime > 'now'::timestamp - '5 minutes'::interval";
print "# $sql\n" if $debug;
my $sth = $dbh->prepare($sql);
$sth->execute();
printf ("# Rows: %d\n", $sth->rows) if $debug;
if ($sth->rows > 0) {
my ($avg, $min, $max) = $sth->fetchrow_array();
print "avg.value $avg\n";
print "min.value $min\n";
print "max.value $max\n";
}
}
exit 0;

View File

@@ -2,9 +2,6 @@
#
# Plugin to monitor the types of requsts made to the API
#
# Can be configured through libpq environment variables, for example
# PGUSER, PGDATABASE, etc. See man page of psql for more information.
#
# Parameters:
#
# config (required)
@@ -35,10 +32,10 @@ fi
query="select count(*)/5.0 from new_query_log where starttime > (now() - interval '5 min') and "
reverse=`psql -c "copy ($query type='reverse') to stdout"`
searchy=`psql -c "copy ($query type='search' and results>0) to stdout"`
searchn=`psql -c "copy ($query type='search' and results=0) to stdout"`
details=`psql -c "copy ($query type='details') to stdout"`
reverse=`psql -d nominatim -c "copy ($query type='reverse') to stdout"`
searchy=`psql -d nominatim -c "copy ($query type='search' and results>0) to stdout"`
searchn=`psql -d nominatim -c "copy ($query type='search' and results=0) to stdout"`
details=`psql -d nominatim -c "copy ($query type='details') to stdout"`
echo "z1.value $reverse"
echo "z2.value $searchy"

View File

@@ -1,949 +0,0 @@
#!/usr/bin/perl -w
# mysql2pgsql
# MySQL to PostgreSQL dump file converter
#
# For usage: perl mysql2pgsql.perl --help
#
# ddl statments are changed but none or only minimal real data
# formatting are done.
# data consistency is up to the DBA.
#
# (c) 2004-2007 Jose M Duarte and Joseph Speigle ... gborg
#
# (c) 2000-2004 Maxim Rudensky <fonin@omnistaronline.com>
# (c) 2000 Valentine Danilchuk <valdan@ziet.zhitomir.ua>
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions
# are met:
# 1. Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# 2. Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# 3. All advertising materials mentioning features or use of this software
# must display the following acknowledgement:
# This product includes software developed by the Max Rudensky
# and its contributors.
# 4. Neither the name of the author nor the names of its contributors
# may be used to endorse or promote products derived from this software
# without specific prior written permission.
# THIS SOFTWARE IS PROVIDED BY THE AUTHOR AND CONTRIBUTORS ``AS IS'' AND
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHOR OR CONTRIBUTORS BE LIABLE
# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS
# OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
# HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY
# OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
# SUCH DAMAGE.
use Getopt::Long;
use POSIX;
use strict;
use warnings;
# main sections
# -------------
# 1 variable declarations
# 2 subroutines
# 3 get commandline options and specify help statement
# 4 loop through file and process
# 5. print_plpgsql function prototype
#################################################################
# 1. variable declarations
#################################################################
# command line options
my( $ENC_IN, $ENC_OUT, $PRESERVE_CASE, $HELP, $DEBUG, $SCHEMA, $LOWERCASE, $CHAR2VARCHAR, $NODROP, $SEP_FILE, $opt_debug, $opt_help, $opt_schema, $opt_preserve_case, $opt_char2varchar, $opt_nodrop, $opt_sepfile, $opt_enc_in, $opt_enc_out );
# variables for constructing pre-create-table entities
my $pre_create_sql=''; # comments, 'enum' constraints preceding create table statement
my $auto_increment_seq= ''; # so we can easily substitute it if we need a default value
my $create_sql=''; # all the datatypes in the create table section
my $post_create_sql=''; # create indexes, foreign keys, table comments
my $function_create_sql = ''; # for the set (function,trigger) and CURRENT_TIMESTAMP ( function,trigger )
# constraints
my ($type, $column_valuesStr, @column_values, $value );
my %constraints=(); # holds values constraints used to emulate mysql datatypes (e.g. year, set)
# datatype conversion variables
my ( $index,$seq);
my ( $column_name, $col, $quoted_column);
my ( @year_holder, $year, $constraint_table_name);
my $table=""; # table_name for create sql statements
my $table_no_quotes=""; # table_name for create sql statements
my $sl = '^\s+\w+\s+'; # matches the column name
my $tables_first_timestamp_column= 1; # decision to print warnings about default_timestamp not being in postgres
my $mysql_numeric_datatypes = "TINYINT|SMALLINT|MEDIUMINT|INT|INTEGER|BIGINT|REAL|DOUBLE|FLOAT|DECIMAL|NUMERIC";
my $mysql_datetime_datatypes = "|DATE|TIME|TIMESTAMP|DATETIME|YEAR";
my $mysql_text_datatypes = "CHAR|VARCHAR|BINARY|VARBINARY|TINYBLOB|BLOB|MEDIUMBLOB|LONGBLOB|TINYTEXT|TEXT|MEDIUMTEXT|LONGTEXT|ENUM|SET";
my $mysql_datatypesStr = $mysql_numeric_datatypes . "|". $mysql_datetime_datatypes . "|". $mysql_text_datatypes ;
# handling INSERT INTO statements
my $rowRe = qr{
\( # opening parens
( # (start capture)
(?: # (start group)
' # string start
[^'\\]* # up to string-end or backslash (escape)
(?: # (start group)
\\. # gobble escaped character
[^'\\]* # up to string-end of backslash
)* # (end group, repeat zero or more)
' # string end
| # (OR)
.*? # everything else (not strings)
)* # (end group, repeat zero or more)
) # (end capture)
\) # closing parent
}x;
my ($insert_table, $valueString);
#
########################################################
# 2. subroutines
#
# get_identifier
# print_post_create_sql()
# quote_and_lc()
# make_plpgsql($table,$column_name) -- at end of file
########################################################
# returns an identifier with the given suffix doing controlled
# truncation if necessary
sub get_identifier($$$) {
my ($table, $col, $suffix) = @_;
my $name = '';
$table=~s/\"//g; # make sure that $table doesn't have quotes so we don't end up with redundant quoting
# in the case of multiple columns
my @cols = split(/,/,$col);
$col =~ s/,//g;
# in case all columns together too long we have to truncate them
if (length($col) > 55) {
my $totaltocut = length($col)-55;
my $tocut = ceil($totaltocut / @cols);
@cols = map {substr($_,0,abs(length($_)-$tocut))} @cols;
$col="";
foreach (@cols){
$col.=$_;
}
}
my $max_table_length = 63 - length("_${col}_$suffix");
if (length($table) > $max_table_length) {
$table = substr($table, length($table) - $max_table_length, $max_table_length);
}
return quote_and_lc("${table}_${col}_${suffix}");
}
#
#
# called when we encounter next CREATE TABLE statement
# also called at EOF to print out for last table
# prints comments, indexes, foreign key constraints (the latter 2 possibly to a separate file)
sub print_post_create_sql() {
my ( @create_idx_comments_constraints_commandsArr, $stmts, $table_field_combination);
my %stmts;
# loop to check for duplicates in $post_create_sql
# Needed because of duplicate key declarations ( PRIMARY KEY and KEY), auto_increment columns
@create_idx_comments_constraints_commandsArr = split(';\n?', $post_create_sql);
if ($SEP_FILE) {
open(SEP_FILE, ">>:encoding($ENC_OUT)", $SEP_FILE) or die "Unable to open $SEP_FILE for output: $!\n";
}
foreach (@create_idx_comments_constraints_commandsArr) {
if (m/CREATE INDEX "*(\S+)"*\s/i) { # CREATE INDEX korean_english_wordsize_idx ON korean_english USING btree (wordsize);
$table_field_combination = $1;
# if this particular table_field_combination was already used do not print the statement:
if ($SEP_FILE) {
print SEP_FILE "$_;\n" if !defined($stmts{$table_field_combination});
} else {
print OUT "$_;\n" if !defined($stmts{$table_field_combination});
}
$stmts{$table_field_combination} = 1;
}
elsif (m/COMMENT/i) { # COMMENT ON object IS 'text'; but comment may be part of table name so use 'elsif'
print OUT "$_;\n"
} else { # foreign key constraint or comments (those preceded by -- )
if ($SEP_FILE) {
print SEP_FILE "$_;\n";
} else {
print OUT "$_;\n"
}
}
}
if ($SEP_FILE) {
close SEP_FILE;
}
$post_create_sql='';
# empty %constraints for next " create table" statement
}
# quotes a string or a multicolumn string (comma separated)
# and optionally lowercase (if LOWERCASE is set)
# lowercase .... if user wants default postgres behavior
# quotes .... to preserve keywords and to preserve case when case-sensitive tables are to be used
sub quote_and_lc($)
{
my $col = shift;
if ($LOWERCASE) {
$col = lc($col);
}
if ($col =~ m/,/) {
my @cols = split(/,\s?/, $col);
@cols = map {"\"$_\""} @cols;
return join(', ', @cols);
} else {
return "\"$col\"";
}
}
########################################################
# 3. get commandline options and maybe print help
########################################################
GetOptions("help", "debug"=> \$opt_debug, "schema=s" => \$SCHEMA, "preserve_case" => \$opt_preserve_case, "char2varchar" => \$opt_char2varchar, "nodrop" => \$opt_nodrop, "sepfile=s" => \$opt_sepfile, "enc_in=s" => \$opt_enc_in, "enc_out=s" => \$opt_enc_out );
$HELP = $opt_help || 0;
$DEBUG = $opt_debug || 0;
$PRESERVE_CASE = $opt_preserve_case || 0;
if ($PRESERVE_CASE == 1) { $LOWERCASE = 0; }
else { $LOWERCASE = 1; }
$CHAR2VARCHAR = $opt_char2varchar || 0;
$NODROP = $opt_nodrop || 0;
$SEP_FILE = $opt_sepfile || 0;
$ENC_IN = $opt_enc_in || 'utf8';
$ENC_OUT = $opt_enc_out || 'utf8';
if (($HELP) || ! defined($ARGV[0]) || ! defined($ARGV[1])) {
print "\n\nUsage: perl $0 {--help --debug --preserve_case --char2varchar --nodrop --schema --sepfile --enc_in --enc_out } mysql.sql pg.sql\n";
print "\t* OPTIONS WITHOUT ARGS\n";
print "\t--help: prints this message \n";
print "\t--debug: output the commented-out mysql line above the postgres line in pg.sql \n";
print "\t--preserve_case: prevents automatic case-lowering of column and table names\n";
print "\t\tIf you want to preserve case, you must set this flag. For example,\n";
print "\t\tIf your client application quotes table and column-names and they have cases in them, set this flag\n";
print "\t--char2varchar: converts all char fields to varchar\n";
print "\t--nodrop: strips out DROP TABLE statements\n";
print "\t\totherise harmless warnings are printed by psql when the dropped table does not exist\n";
print "\n\t* OPTIONS WITH ARGS\n";
print "\t--schema: outputs a line into the postgres sql file setting search_path \n";
print "\t--sepfile: output foreign key constraints and indexes to a separate file so that it can be\n";
print "\t\timported after large data set is inserted from another dump file\n";
print "\t--enc_in: encoding of mysql in file (default utf8) \n";
print "\t--enc_out: encoding of postgres out file (default utf8) \n";
print "\n\t* REQUIRED ARGUMENTS\n";
if (defined ($ARGV[0])) {
print "\tmysql.sql ($ARGV[0])\n";
} else {
print "\tmysql.sql (undefined)\n";
}
if (defined ($ARGV[1])) {
print "\tpg.sql ($ARGV[1])\n";
} else {
print "\tpg.sql (undefined)\n";
}
print "\n";
exit 1;
}
########################################################
# 4. process through mysql_dump.sql file
# in a big loop
########################################################
# open in and out files
open(IN,"<:encoding($ENC_IN)", $ARGV[0]) || die "can't open mysql dump file $ARGV[0]";
open(OUT,">:encoding($ENC_OUT)", $ARGV[1]) || die "can't open pg dump file $ARGV[1]";
# output header
print OUT "--\n";
print OUT "-- Generated from mysql2pgsql.perl\n";
print OUT "-- http://gborg.postgresql.org/project/mysql2psql/\n";
print OUT "-- (c) 2001 - 2007 Jose M. Duarte, Joseph Speigle\n";
print OUT "--\n";
print OUT "\n";
print OUT "-- warnings are printed for drop tables if they do not exist\n";
print OUT "-- please see http://archives.postgresql.org/pgsql-novice/2004-10/msg00158.php\n\n";
print OUT "-- ##############################################################\n";
if ($SCHEMA ) {
print OUT "set search_path='" . $SCHEMA . "'\\g\n" ;
}
# loop through mysql file on a per-line basis
while(<IN>) {
############## flow #########################
# (the lines are directed to different string variables at different times)
#
# handle drop table , unlock, connect statements
# if ( start of create table) {
# print out post_create table (indexes, foreign key constraints, comments from previous table)
# add drop table statement if !$NODROP to pre_create_sql
# next;
# }
# else if ( inside create table) {
# add comments in this portion to create_sql
# if ( end of create table) {
# delete mysql-unique CREATE TABLE commands
# print pre_create_sql
# print the constraint tables for set and year datatypes
# print create_sql
# print function_create_sql (this is for the enum columns only)
# next;
# }
# do substitutions
# -- NUMERIC DATATYPES
# -- CHARACTER DATATYPES
# -- DATE AND TIME DATATYPES
# -- KEY AND UNIQUE CREATIONS
# and append them to create_sql
# } else {
# print inserts on-the-spot (this script only changes default timestamp of 0000-00-00)
# }
# LOOP until EOF
#
########################################################
if (!/^\s*insert into/i) { # not inside create table so don't worry about data corruption
s/`//g; # '`pgsql uses no backticks to denote table name (CREATE TABLE `sd`) or around field
# and table names like mysql
# doh! we hope all dashes and special chars are caught by the regular expressions :)
}
if (/^\s*USE\s*([^;]*);/) {
print OUT "\\c ". $1;
next;
}
if (/^(UN)?LOCK TABLES/i || /drop\s+table/i ) {
# skip
# DROP TABLE is added when we see the CREATE TABLE
next;
}
if (/(create\s+table\s+)([-_\w]+)\s/i) { # example: CREATE TABLE `english_english`
print_post_create_sql(); # for last table
$tables_first_timestamp_column= 1; # decision to print warnings about default_timestamp not being in postgres
$create_sql = '';
$table_no_quotes = $2 ;
$table=quote_and_lc($2);
if ( !$NODROP ) { # always print drop table if user doesn't explicitly say not to
# to drop a table that is referenced by a view or a foreign-key constraint of another table,
# CASCADE must be specified. (CASCADE will remove a dependent view entirely, but in the
# in the foreign-key case it will only remove the foreign-key constraint, not the other table entirely.)
# (source: 8.1.3 docs, section "drop table")
warn "table $table will be dropped CASCADE\n";
$pre_create_sql .= "DROP TABLE $table CASCADE\\g\n"; # custom dumps may be missing the 'dump' commands
}
s/(create\s+table\s+)([-_\w]+)\s/$1 $table /i;
if ($DEBUG) {
$create_sql .= '-- ' . $_;
}
$create_sql .= $_;
next;
}
if ($create_sql ne "") { # we are inside create table statement so lets process datatypes
# print out comments or empty lines in context
if ($DEBUG) {
$create_sql .= '-- ' . $_;
}
if (/^#/ || /^$/ || /^\s*--/) {
s/^#/--/; # Two hyphens (--) is the SQL-92 standard indicator for comments
$create_sql.=$_;
next;
}
if (/\).*;/i) { # end of create table squence
s/INSERT METHOD[=\s+][^;\s]+//i;
s/PASSWORD=[^;\s]+//i;
s/ROW_FORMAT=(?:DEFAULT|DYNAMIC|FIXED|COMPRESSED|REDUNDANT|COMPACT)+//i;
s/DELAY KEY WRITE=[^;\s]+//i;
s/INDEX DIRECTORY[=\s+][^;\s]+//i;
s/DATA DIRECTORY=[^;\s]+//i;
s/CONNECTION=[^;\s]+//i;
s/CHECKSUM=[^;\s]+//i;
s/Type=[^;\s]+//i; # ISAM , # older versions
s/COLLATE=[^;\s]+//i; # table's collate
s/COLLATE\s+[^;\s]+//i; # table's collate
# possible AUTO_INCREMENT starting index, it is used in mysql 5.0.26, not sure since which version
if (/AUTO_INCREMENT=(\d+)/i) {
# should take < ---- ) ENGINE=MyISAM AUTO_INCREMENT=16 DEFAULT CHARSET=latin1;
# and should ouput ---> CREATE SEQUENCE "rhm_host_info_id_seq" START WITH 16;
my $start_value = $1;
print $auto_increment_seq . "--\n";
# print $pre_create_sql . "--\n";
$pre_create_sql =~ s/(CREATE SEQUENCE $auto_increment_seq )/$1 START WITH $start_value /;
}
s/AUTO_INCREMENT=\d+//i;
s/PACK_KEYS=\d//i; # mysql 5.0.22
s/DEFAULT CHARSET=[^;\s]+//i; # my mysql version is 4.1.11
s/ENGINE\s*=\s*[^;\s]+//i; # my mysql version is 4.1.11
s/ROW_FORMAT=[^;\s]+//i; # my mysql version is 5.0.22
s/MIN_ROWS=[^;\s]+//i;
s/MAX_ROWS=[^;\s]+//i;
s/AVG_ROW_LENGTH=[^;\s]+//i;
if (/COMMENT='([^']*)'/) { # ) ENGINE=MyISAM DEFAULT CHARSET=utf8 COMMENT='must be country zones';
$post_create_sql.="COMMENT ON TABLE $table IS '$1'\;"; # COMMENT ON table_name IS 'text';
s/COMMENT='[^']*'//i;
}
$create_sql =~ s/,$//g; # strip last , inside create table
# make sure we end in a comma, as KEY statments are turned
# into post_create_sql indices
# they often are the last line so leaving a 'hanging comma'
my @array = split("\n", $create_sql);
for (my $a = $#array; $a >= 0; $a--) { #loop backwards
if ($a == $#array && $array[$a] =~ m/,\s*$/) { # for last line
$array[$a] =~ s/,\s*$//;
next;
}
if ($array[$a] !~ m/create table/i) { # i.e. if there was more than one column in table
if ($a != $#array && $array[$a] !~ m/,\s*$/ ) { # for second to last
$array[$a] =~ s/$/,/;
last;
}
elsif ($a != $#array && $array[$a] =~ m/,\s*$/ ) { # for second to last
last;
}
}
}
$create_sql = join("\n", @array) . "\n";
$create_sql .= $_;
# put comments out first
print OUT $pre_create_sql;
# create separate table to reference and to hold mysql's possible set data-type
# values. do that table's creation before create table
# definition
foreach $column_name (keys %constraints) {
$type=$constraints{$column_name}{'type'};
$column_valuesStr = $constraints{$column_name}{'values'};
$constraint_table_name = get_identifier(${table},${column_name} ,"constraint_table");
if ($type eq 'set') {
print OUT qq~DROP TABLE $constraint_table_name CASCADE\\g\n~ ;
print OUT qq~create table $constraint_table_name ( set_values varchar UNIQUE)\\g\n~ ;
$function_create_sql .= make_plpgsql($table,$column_name);
} elsif ($type eq 'year') {
print OUT qq~DROP TABLE $constraint_table_name CASCADE\\g\n~ ;
print OUT qq~create table $constraint_table_name ( year_values varchar UNIQUE)\\g\n~ ;
}
@column_values = split /,/, $column_valuesStr;
foreach $value (@column_values) {
print OUT qq~insert into $constraint_table_name values ( $value )\\g\n~; # ad ' for ints and varchars
}
}
$create_sql =~ s/double double/double precision/g;
# print create table and reset create table vars
# when moving from each "create table" to "insert" part of dump
print OUT $create_sql;
print OUT $function_create_sql;
$pre_create_sql="";
$auto_increment_seq="";
$create_sql="";
$function_create_sql='';
%constraints=();
# the post_create_sql for this table is output at the beginning of the next table def
# in case we want to make indexes after doing inserting
next;
}
if (/^\s*(\w+)\s+.*COMMENT\s*'([^']*)'/) { #`zone_country_id` int(11) COMMENT 'column comment here',
$quoted_column=quote_and_lc($1);
$post_create_sql.="COMMENT ON COLUMN $table"."."." $quoted_column IS '$2'\;"; # COMMENT ON table_name.column_name IS 'text';
s/COMMENT\s*'[^']*'//i;
}
# NUMERIC DATATYPES
#
# auto_increment -> sequences
# UNSIGNED conversions
# TINYINT
# SMALLINT
# MEDIUMINT
# INT, INTEGER
# BIGINT
#
# DOUBLE [PRECISION], REAL
# DECIMAL(M,D), NUMERIC(M,D)
# FLOAT(p)
# FLOAT
s/(\w*int)\(\d+\)/$1/g; # hack of the (n) stuff for e.g. mediumint(2) int(3)
if (/^(\s*)(\w+)\s*.*numeric.*auto_increment/i) { # int,auto_increment -> serial
$seq = get_identifier($table, $2, 'seq');
$quoted_column=quote_and_lc($2);
# Smash datatype to int8 and autogenerate the sequence.
s/^(\s*)(\w+)\s*.*NUMERIC(.*)auto_increment([^,]*)/$1 $quoted_column serial8 $4/ig;
$create_sql.=$_;
next;
}
if (/^\s*(\w+)\s+.*int.*auto_increment/i) { # example: data_id mediumint(8) unsigned NOT NULL auto_increment,
$seq = get_identifier($table, $1, 'seq');
$quoted_column=quote_and_lc($1);
s/(\s*)(\w+)\s+.*int.*auto_increment([^,]*)/$1 $quoted_column serial8 $3/ig;
$create_sql.=$_;
next;
}
# convert UNSIGNED to CHECK constraints
if (m/^(\s*)(\w+)\s+((float|double precision|double|real|decimal|numeric))(.*)unsigned/i) {
$quoted_column = quote_and_lc($2);
s/^(\s*)(\w+)\s+((float|double precision|double|real|decimal|numeric))(.*)unsigned/$1 $quoted_column $3 $4 CHECK ($quoted_column >= 0)/i;
}
# example: `wordsize` tinyint(3) unsigned default NULL,
if (m/^(\s+)(\w+)\s+(\w+)\s+unsigned/i) {
$quoted_column=quote_and_lc($2);
s/^(\s+)(\w+)\s+(\w+)\s+unsigned/$1 $quoted_column $3 CHECK ($quoted_column >= 0)/i;
}
if (m/^(\s*)(\w+)\s+(bigint.*)unsigned/) {
$quoted_column=quote_and_lc($2);
# see http://archives.postgresql.org/pgsql-general/2005-07/msg01178.php
# and see http://www.postgresql.org/docs/8.2/interactive/datatype-numeric.html
# see http://dev.mysql.com/doc/refman/5.1/en/numeric-types.html max size == 20 digits
s/^(\s*)(\w+)\s+bigint(.*)unsigned/$1 $quoted_column NUMERIC (20,0) CHECK ($quoted_column >= 0)/i;
}
# int type conversion
# TINYINT (signed) -128 to 127 (unsigned) 0 255
# SMALLINT A small integer. The signed range is -32768 to 32767. The unsigned range is 0 to 65535.
# MEDIUMINT A medium-sized integer. The signed range is -8388608 to 8388607. The unsigned range is 0 to 16777215.
# INT A normal-size integer. The signed range is -2147483648 to 2147483647. The unsigned range is 0 to 4294967295.
# BIGINT The signed range is -9223372036854775808 to 9223372036854775807. The unsigned range is 0 to 18446744073709551615
# for postgres see http://www.postgresql.org/docs/8.2/static/datatype-numeric.html#DATATYPE-INT
s/^(\s+"*\w+"*\s+)tinyint/$1 smallint/i;
s/^(\s+"*\w+"*\s+)mediumint/$1 integer/i;
# the floating point types
# double -> double precision
# double(n,m) -> double precision
# float - no need for conversion
# float(n) - no need for conversion
# float(n,m) -> double precision
s/(^\s*\w+\s+)double(\(\d+,\d+\))?/$1float/i;
s/float(\(\d+,\d+\))/float/i;
#
# CHARACTER TYPES
#
# set
# enum
# binary(M), VARBINARy(M), tinyblob, tinytext,
# bit
# char(M), varchar(M)
# blob -> text
# mediumblob
# longblob, longtext
# text -> text
# mediumtext
# longtext
# mysql docs: A BLOB is a binary large object that can hold a variable amount of data.
# set
# For example, a column specified as SET('one', 'two') NOT NULL can have any of these values:
# ''
# 'one'
# 'two'
# 'one,two'
if (/(\w*)\s+set\(((?:['"]\w+['"]\s*,*)+(?:['"]\w+['"])*)\)(.*)$/i) { # example: `au_auth` set('r','w','d') NOT NULL default '',
$column_name = $1;
$constraints{$column_name}{'values'} = $2; # 'abc','def', ...
$constraints{$column_name}{'type'} = "set"; # 'abc','def', ...
$_ = qq~ $column_name varchar , ~;
$column_name = quote_and_lc($1);
$create_sql.=$_;
next;
}
if (/(\S*)\s+enum\(((?:['"][^'"]+['"]\s*,)+['"][^'"]+['"])\)(.*)$/i) { # enum handling
# example: `test` enum('?','+','-') NOT NULL default '?'
# $2 is the values of the enum 'abc','def', ...
$quoted_column=quote_and_lc($1);
# "test" NOT NULL default '?' CONSTRAINT test_test_constraint CHECK ("test" IN ('?','+','-'))
$_ = qq~ $quoted_column varchar CHECK ($quoted_column IN ( $2 ))$3\n~; # just assume varchar?
$create_sql.=$_;
next;
}
# Take care of "binary" option for char and varchar
# (pre-4.1.2, it indicated a byte array; from 4.1.2, indicates
# a binary collation)
s/(?:var)?char(?:\(\d+\))? (?:byte|binary)/text/i;
if (m/(?:var)?binary\s*\(\d+\)/i) { # c varBINARY(3) in Mysql
warn "WARNING in table '$table' '$_': binary type is converted to bytea (unsized) for Postgres\n";
}
s/(?:var)?binary(?:\(\d+\))?/text/i; # c varBINARY(3) in Mysql
s/bit(?:\(\d+\))?/bytea/i; # bit datatype -> bytea
# large datatypes
s/\w*blob/bytea/gi;
s/tinytext/text/gi;
s/mediumtext/text/gi;
s/longtext/text/gi;
# char -> varchar -- if specified as a command line option
# PostgreSQL would otherwise pad with spaces as opposed
# to MySQL! Your user interface may depend on this!
if ($CHAR2VARCHAR) {
s/(^\s+\S+\s+)char/${1}varchar/gi;
}
# nuke column's collate and character set
s/(\S+)\s+character\s+set\s+\w+/$1/gi;
s/(\S+)\s+collate\s+\w+/$1/gi;
#
# DATE AND TIME TYPES
#
# date time
# year
# datetime
# timestamp
# date time
# these are the same types in postgres, just do the replacement of 0000-00-00 date
if (m/default '(\d+)-(\d+)-(\d+)([^']*)'/i) { # we grab the year, month and day
# NOTE: times of 00:00:00 are possible and are okay
my $time = '';
my $year=$1;
my $month= $2;
my $day = $3;
if ($4) {
$time = $4;
}
if ($year eq "0000") { $year = '1970'; }
if ($month eq "00") { $month = '01'; }
if ($day eq "00") { $day = '01'; }
s/default '[^']+'/default '$year-$month-$day$time'/i; # finally we replace with $datetime
}
# convert mysql's year datatype to a constraint
if (/(\w*)\s+year\(4\)(.*)$/i) { # can be integer OR string 1901-2155
$constraint_table_name = get_identifier($table,$1 ,"constraint_table");
$column_name=quote_and_lc($1);
@year_holder = ();
$year='';
for (1901 .. 2155) {
$year = "'$_'";
unless ($year =~ /2155/) { $year .= ','; }
push( @year_holder, $year);
}
$constraints{$column_name}{'values'} = join('','',@year_holder); # '1901','1902', ...
$constraints{$column_name}{'type'} = "year";
$_ = qq~ $column_name varchar CONSTRAINT ${table}_${column_name}_constraint REFERENCES $constraint_table_name ("year_values") $2\n~;
$create_sql.=$_;
next;
} elsif (/(\w*)\s+year\(2\)(.*)$/i) { # same for a 2-integer string
$constraint_table_name = get_identifier($table,$1 ,"constraint_table");
$column_name=quote_and_lc($1);
@year_holder = ();
$year='';
for (1970 .. 2069) {
$year = "'$_'";
if ($year =~ /2069/) { next; }
push( @year_holder, $year);
}
push( @year_holder, '0000');
$constraints{$column_name}{'values'} = join(',',@year_holder); # '1971','1972', ...
$constraints{$column_name}{'type'} = "year"; # 'abc','def', ...
$_ = qq~ $1 varchar CONSTRAINT ${table}_${column_name}_constraint REFERENCES $constraint_table_name ("year_values") $2\n~;
$create_sql.=$_;
next;
}
# datetime
# Default on a dump from MySQL 5.0.22 is in the same form as datetime so let it flow down
# to the timestamp section and deal with it there
s/(${sl})datetime /$1timestamp without time zone /i;
# change not null datetime field to null valid ones
# (to support remapping of "zero time" to null
# s/($sl)datetime not null/$1timestamp without time zone/i;
# timestamps
#
# nuke datetime representation (not supported in PostgreSQL)
# change default time of 0000-00-00 to 1970-01-01
# we may possibly need to create a trigger to provide
# equal functionality with ON UPDATE CURRENT TIMESTAMP
if (m/${sl}timestamp/i) {
if ( m/ON UPDATE CURRENT_TIMESTAMP/i ) { # the ... default CURRENT_TIMESTAMP only applies for blank inserts, not updates
s/ON UPDATE CURRENT_TIMESTAMP//i ;
m/^\s*(\w+)\s+timestamp/i ;
# automatic trigger creation
$table_no_quotes =~ s/"//g;
$function_create_sql .= " CREATE OR REPLACE FUNCTION update_". $table_no_quotes . "() RETURNS trigger AS '
BEGIN
NEW.$1 := CURRENT_TIMESTAMP;
RETURN NEW;
END;
' LANGUAGE 'plpgsql';
-- before INSERT is handled by 'default CURRENT_TIMESTAMP'
CREATE TRIGGER add_current_date_to_".$table_no_quotes." BEFORE UPDATE ON ". $table . " FOR EACH ROW EXECUTE PROCEDURE
update_".$table_no_quotes."();\n";
}
if ($tables_first_timestamp_column && m/DEFAULT NULL/i) {
# DEFAULT NULL is the same as DEFAULT CURRENT_TIMESTAMP for the first TIMESTAMP column. (MYSQL manual)
s/($sl)(timestamp\s+)default null/$1 $2 DEFAULT CURRENT_TIMESTAMP/i;
}
$tables_first_timestamp_column= 0;
if (m/${sl}timestamp\s*\(\d+\)/i) { # fix for timestamps with width spec not handled (ID: 1628)
warn "WARNING for in table '$table' '$_': your default timestamp width is being ignored for table $table \n";
s/($sl)timestamp(?:\(\d+\))/$1datetime/i;
}
} # end timestamp section
# KEY AND UNIQUE CREATIONS
#
# unique
if ( /^\s+unique\s+\(([^(]+)\)/i ) { # example UNIQUE `name` (`name`), same as UNIQUE KEY
# POSTGRESQL: treat same as mysql unique
$quoted_column = quote_and_lc($1);
s/\s+unique\s+\(([^(]+)\)/ unique ($quoted_column) /i;
$create_sql.=$_;
next;
} elsif ( /^\s+unique\s+key\s*(\w+)\s*\(([^(]+)\)/i ) { # example UNIQUE KEY `name` (`name`)
# MYSQL: unique key: allows null=YES, allows duplicates=NO (*)
# ... new ... UNIQUE KEY `unique_fullname` (`fullname`) in my mysql v. Ver 14.12 Distrib 5.1.7-beta
# POSTGRESQL: treat same as mysql unique
# just quote columns
$quoted_column = quote_and_lc($2);
s/\s+unique\s+key\s*(\w+)\s*\(([^(]+)\)/ unique ($quoted_column) /i;
$create_sql.=$_;
# the index corresponding to the 'key' is automatically created
next;
}
# keys
if ( /^\s+fulltext key\s+/i) { # example: FULLTEXT KEY `commenttext` (`commenttext`)
# that is key as a word in the first check for a match
# the tsvector datatype is made for these types of things
# example mysql file:
# what is tsvector datatype?
# http://www.sai.msu.su/~megera/postgres/gist/tsearch/V2/docs/tsearch-V2-intro.html
warn "dba must do fulltext key transformation for $table\n";
next;
}
if ( /^(\s+)constraint (\S+) foreign key \((\S+)\) references (\S+) \((\S+)\)(.*)/i ) {
$quoted_column =quote_and_lc($3);
$col=quote_and_lc($5);
$post_create_sql .= "ALTER TABLE $table ADD FOREIGN KEY ($quoted_column) REFERENCES " . quote_and_lc($4) . " ($col);\n";
next;
}
if ( /^\s*primary key\s*\(([^)]+)\)([,\s]+)/i ) { # example PRIMARY KEY (`name`)
# MYSQL: primary key: allows null=NO , allows duplicates=NO
# POSTGRESQL: When an index is declared unique, multiple table rows with equal indexed values will not be
# allowed. Null values are not considered equal.
# POSTGRESQL quote's source: 8.1.3 docs section 11.5 "unique indexes"
# so, in postgres, we need to add a NOT NULL to the UNIQUE constraint
# and, primary key (mysql) == primary key (postgres) so that we *really* don't need change anything
$quoted_column = quote_and_lc($1);
s/(\s*)primary key\s+\(([^)]+)\)([,\s]+)/$1 primary key ($quoted_column)$3/i;
# indexes are automatically created for unique columns
$create_sql.=$_;
next;
} elsif (m/^\s+key\s[-_\s\w]+\((.+)\)/i ) { # example: KEY `idx_mod_english_def_word` (`word`),
# regular key: allows null=YES, allows duplicates=YES
# MYSQL: KEY is normally a synonym for INDEX. http://dev.mysql.com/doc/refman/5.1/en/create-table.html
#
# * MySQL: ALTER TABLE {$table} ADD KEY $column ($column)
# * PostgreSQL: CREATE INDEX {$table}_$column_idx ON {$table}($column) // Please note the _idx "extension"
# PRIMARY KEY (`postid`),
# KEY `ownerid` (`ownerid`)
# create an index for everything which has a key listed for it.
my $col = $1;
# TODO we don't have a translation for the substring syntax in text columns in MySQL (e.g. "KEY my_idx (mytextcol(20))")
# for now just getting rid of the brackets and numbers (the substring specifier):
$col=~s/\(\d+\)//g;
$quoted_column = quote_and_lc($col);
if ($col =~ m/,/) {
$col = s/,/_/;
}
$index = get_identifier($table, $col, 'idx');
$post_create_sql.="CREATE INDEX $index ON $table USING btree ($quoted_column)\;";
# just create index do not add to create table statement
next;
}
# handle 'key' declared at end of column
if (/\w+.*primary key/i) { # mysql: key is normally just a synonym for index
# just leave as is ( postgres has primary key type)
} elsif (/(\w+\s+(?:$mysql_datatypesStr)\s+.*)key/i) { # mysql: key is normally just a synonym for index
# I can't find a reference for 'key' in a postgres command without using the word 'primary key'
s/$1key/$1/i ;
$index = get_identifier($table, $1, 'idx');
$quoted_column =quote_and_lc($1);
$post_create_sql.="CREATE INDEX $index ON $table USING btree ($quoted_column) \;";
$create_sql.=$_;
}
# do we really need this anymore?
# remap colums with names of existing system attribute
if (/"oid"/i) {
s/"oid"/"_oid"/g;
print STDERR "WARNING: table $table uses column \"oid\" which is renamed to \"_oid\"\nYou should fix application manually! Press return to continue.";
my $wait=<STDIN>;
}
s/oid/_oid/i if (/key/i && /oid/i); # fix oid in key
# FINAL QUOTING OF ALL COLUMNS
# quote column names which were not already quoted
# perhaps they were not quoted because they were not explicitly handled
if (!/^\s*"(\w+)"(\s+)/i) {
/^(\s*)(\w+)(\s+)(.*)$/i ;
$quoted_column= quote_and_lc($2);
s/^(\s*)(\w+)(\s+)(.*)$/$1 $quoted_column $3 $4 /;
}
$create_sql.=$_;
# END of if ($create_sql ne "") i.e. were inside create table statement so processed datatypes
}
# add "not in create table" comments or empty lines to pre_create_sql
elsif (/^#/ || /^$/ || /^\s*--/) {
s/^#/--/; # Two hyphens (--) is the SQL-92 standard indicator for comments
$pre_create_sql .= $_ ; # printed above create table statement
next;
}
elsif (/^\s*insert into/i) { # not inside create table and doing insert
# fix mysql's zero/null value for timestamps
s/'0000-00-00/'1970-01-01/gi;
# commented out to fix bug "Field contents interpreted as a timestamp", what was the point of this line anyway?
#s/([12]\d\d\d)([01]\d)([0-3]\d)([0-2]\d)([0-6]\d)([0-6]\d)/'$1-$2-$3 $4:$5:$6'/;
#---- fix data in inserted data: (from MS world)
s!\x96!-!g; # --
s!\x93!"!g; # ``
s!\x94!"!g; # ''
s!\x85!... !g; # \ldots
s!\x92!`!g;
print OUT $pre_create_sql; # print comments preceding the insert section
$pre_create_sql="";
$auto_increment_seq = "";
s/'((?:[^'\\]++|\\.)*+)'(?=[),])/E'$1'/g;
# for the E'' see http://www.postgresql.org/docs/8.2/interactive/release-8-1.html
s!\\\\!\\\\\\\\!g; # replace \\ with ]\\\\
# split 'extended' INSERT INTO statements to something PostgreSQL can understand
( $insert_table, $valueString) = $_ =~ m/^INSERT\s+INTO\s+['`"]*(.*?)['`"]*\s+VALUES\s*(.*)/i;
$insert_table = quote_and_lc($insert_table);
s/^INSERT INTO.*?\);//i; # hose the statement which is to be replaced whether a run-on or not
# guarantee table names are quoted
print OUT qq(INSERT INTO $insert_table VALUES $valueString \n);
} else {
print OUT $_ ; # example: /*!40101 SET @OLD_CHARACTER_SET_CLIENT=@@CHARACTER_SET_CLIENT */;
}
# keep looping and get next line of IN file
} # END while(<IN>)
print_post_create_sql(); # in case there is extra from the last table
#################################################################
# 5. print_plgsql function prototype
# emulate the set datatype with the following plpgsql function
# looks ugly so putting at end of file
#################################################################
#
sub make_plpgsql {
my ($table,$column_name) = ($_[0],$_[1]);
$table=~s/\"//g; # make sure that $table doesn't have quotes so we don't end up with redundant quoting
my $constraint_table = get_identifier($table,$column_name ,"constraint_table");
return "
-- this function is called by the insert/update trigger
-- it checks if the INSERT/UPDATE for the 'set' column
-- contains members which comprise a valid mysql set
-- this TRIGGER function therefore acts like a constraint
-- provided limited functionality for mysql's set datatype
-- just verifies and matches for string representations of the set at this point
-- though the set datatype uses bit comparisons, the only supported arguments to our
-- set datatype are VARCHAR arguments
-- to add a member to the set add it to the ".$table."_".$column_name." table
CREATE OR REPLACE FUNCTION check_".$table."_".$column_name."_set( ) RETURNS TRIGGER AS \$\$\n
DECLARE
----
arg_str VARCHAR ;
argx VARCHAR := '';
nobreak INT := 1;
rec_count INT := 0;
psn INT := 0;
str_in VARCHAR := NEW.$column_name;
----
BEGIN
----
IF str_in IS NULL THEN RETURN NEW ; END IF;
arg_str := REGEXP_REPLACE(str_in, '\\',\\'', ','); -- str_in is CONSTANT
arg_str := REGEXP_REPLACE(arg_str, '^\\'', '');
arg_str := REGEXP_REPLACE(arg_str, '\\'\$', '');
-- RAISE NOTICE 'arg_str %',arg_str;
psn := POSITION(',' in arg_str);
IF psn > 0 THEN
psn := psn - 1; -- minus-1 from comma position
-- RAISE NOTICE 'psn %',psn;
argx := SUBSTRING(arg_str FROM 1 FOR psn); -- get one set member
psn := psn + 2; -- go to first starting letter
arg_str := SUBSTRING(arg_str FROM psn); -- hack it off
ELSE
psn := 0; -- minus-1 from comma position
argx := arg_str;
END IF;
-- RAISE NOTICE 'argx %',argx;
-- RAISE NOTICE 'new arg_str: %',arg_str;
WHILE nobreak LOOP
EXECUTE 'SELECT count(*) FROM $constraint_table WHERE set_values = ' || quote_literal(argx) INTO rec_count;
IF rec_count = 0 THEN RAISE EXCEPTION 'one of the set values was not found';
END IF;
IF psn > 0 THEN
psn := psn - 1; -- minus-1 from comma position
-- RAISE NOTICE 'psn %',psn;
argx := SUBSTRING(arg_str FROM 1 FOR psn); -- get one set member
psn := psn + 2; -- go to first starting letter
arg_str := SUBSTRING(arg_str FROM psn); -- hack it off
psn := POSITION(',' in arg_str);
ELSE nobreak = 0;
END IF;
-- RAISE NOTICE 'next argx % and next arg_str %', argx, arg_str;
END LOOP;
RETURN NEW;
----
END;
\$\$ LANGUAGE 'plpgsql' VOLATILE;
drop trigger set_test ON $table;
-- make a trigger for each set field
-- make trigger and hard-code in column names
-- see http://archives.postgresql.org/pgsql-interfaces/2005-02/msg00020.php
CREATE TRIGGER set_test
BEFORE INSERT OR UPDATE ON $table FOR EACH ROW
EXECUTE PROCEDURE check_".$table."_".$column_name."_set();\n";
} # end sub make_plpgsql();

View File

@@ -308,13 +308,6 @@ void nominatim_index(int rank_min, int rank_max, int num_threads, const char *co
{
nominatim_exportXMLEnd(writer);
}
// Close all connections
for (i = 0; i < num_threads; i++)
{
PQfinish(thread_data[i].conn);
}
PQfinish(conn);
}
void *nominatim_indexThread(void * thread_data_in)

View File

@@ -115,7 +115,7 @@ int main(int argc, char *argv[])
PGconn *conn;
fprintf(stderr, "nominatim version %s\n\n", VERSION);
fprintf(stderr, "nominatim SVN version %s\n\n", VERSION);
while (1)
{

View File

@@ -1,26 +0,0 @@
<?xml version="1.0" encoding="UTF-8"?>
<phpunit backupGlobals="false"
backupStaticAttributes="false"
colors="true"
convertErrorsToExceptions="true"
convertNoticesToExceptions="true"
convertWarningsToExceptions="true"
processIsolation="false"
stopOnFailure="false"
syntaxCheck="true"
bootstrap="tests-php/bootstrap.php"
>
<php>
</php>
<testsuites>
<testsuite name="Nominatim PHP Test Suite">
<directory>./tests-php/Nominatim</directory>
</testsuite>
</testsuites>
<filter>
<whitelist>
<directory>./lib/</directory>
</whitelist>
</filter>
</phpunit>

View File

@@ -4,64 +4,26 @@
// General settings
@define('CONST_Debug', false);
@define('CONST_Database_DSN', 'pgsql://@/nominatim'); // <driver>://<username>:<password>@<host>:<port>/<database>
@define('CONST_Database_DSN', 'pgsql://@/nominatim');
@define('CONST_Max_Word_Frequency', '50000');
@define('CONST_Limit_Reindexing', true);
// Software versions
@define('CONST_Postgresql_Version', '9.1'); // values: 8.3, 8.4, 9.0, 9.1, 9.2
@define('CONST_Postgis_Version', '1.5'); // values: 1.5, 2.0
// Paths
@define('CONST_Postgresql_Version', '9.1');
@define('CONST_Path_Postgresql_Contrib', '/usr/share/postgresql/'.CONST_Postgresql_Version.'/contrib');
@define('CONST_Path_Postgresql_Postgis', CONST_Path_Postgresql_Contrib.'/postgis-'.CONST_Postgis_Version);
@define('CONST_Path_Postgresql_Postgis', CONST_Path_Postgresql_Contrib.'/postgis-1.5');
@define('CONST_Osm2pgsql_Binary', CONST_BasePath.'/osm2pgsql/osm2pgsql');
@define('CONST_Osmosis_Binary', '/usr/bin/osmosis');
// osm2pgsql settings
@define('CONST_Osm2pgsql_Flatnode_File', null);
// Replication settings
@define('CONST_Replication_Url', 'http://planet.openstreetmap.org/replication/minute');
@define('CONST_Replication_MaxInterval', '3600');
@define('CONST_Replication_Update_Interval', '60'); // How often upstream publishes diffs
@define('CONST_Replication_Recheck_Interval', '60'); // How long to sleep if no update found yet
// Connection buckets to rate limit people being nasty
@define('CONST_ConnectionBucket_MemcacheServerAddress', false);
@define('CONST_ConnectionBucket_MemcacheServerPort', 11211);
@define('CONST_ConnectionBucket_MaxBlockList', 100);
@define('CONST_ConnectionBucket_LeakRate', 1);
@define('CONST_ConnectionBucket_BlockLimit', 10);
@define('CONST_ConnectionBucket_WaitLimit', 6);
@define('CONST_ConnectionBucket_MaxSleeping', 10);
@define('CONST_ConnectionBucket_Cost_Reverse', 1);
@define('CONST_ConnectionBucket_Cost_Search', 2);
@define('CONST_ConnectionBucket_Cost_Details', 3);
@define('CONST_ConnectionBucket_Cost_Status', 1);
// Override this function to add an adjustment factor to the cost
// based on server load. e.g. getBlockingProcesses
if (!function_exists('user_busy_cost'))
{
function user_busy_cost()
{
return 0;
}
}
// Website settings
@define('CONST_NoAccessControl', true);
@define('CONST_ClosedForIndexing', false);
@define('CONST_ClosedForIndexingExceptionIPs', '');
@define('CONST_BlockedIPs', '');
@define('CONST_BulkUserIPs', '');
@define('CONST_BlockMessage', ''); // additional info to show for blocked IPs
@define('CONST_Website_BaseURL', 'http://'.php_uname('n').'/');
@define('CONST_Tile_Default', 'Mapnik');
@define('CONST_Default_Language', false);
@define('CONST_Default_Language', 'xx');
@define('CONST_Default_Lat', 20.0);
@define('CONST_Default_Lon', 0.0);
@define('CONST_Default_Zoom', 2);
@@ -69,13 +31,7 @@
@define('CONST_Search_AreaPolygons_Enabled', true);
@define('CONST_Search_AreaPolygons', true);
@define('CONST_Search_BatchMode', false);
@define('CONST_Search_TryDroppedAddressTerms', false);
@define('CONST_Search_NameOnlySearchFrequencyThreshold', false);
// Set to zero to disable polygon output
@define('CONST_PolygonOutput_MaximumTypes', 1);
@define('CONST_Suggestions_Enabled', false);
// Log settings
@define('CONST_Log_DB', true);

View File

@@ -1,33 +1,15 @@
-- Script to build a calculated country grid from existing tables
DROP TABLE IF EXISTS tmp_country_osm_grid;
CREATE TABLE tmp_country_osm_grid as select country_name.country_code,st_union(placex.geometry) as geometry from country_name,
drop table country_osm_grid2;
create table country_osm_grid2 as select country_name.country_code,st_union(placex.geometry) as geometry from country_name,
placex
where (lower(placex.country_code) = country_name.country_code)
and placex.rank_search < 16 and st_area(placex.geometry) > 0
and placex.rank_search < 16 and st_area(placex.geometry)>0
group by country_name.country_code;
ALTER TABLE tmp_country_osm_grid add column area double precision;
UPDATE tmp_country_osm_grid set area = st_area(geometry::geography);
-- compare old and new
select country_code, round, round(log(area)) from (select distinct country_code,round(log(area)) from country_osm_grid order by country_code) as x
left outer join tmp_country_osm_grid using (country_code) where area is null or round(log(area)) != round;
DROP TABLE IF EXISTS new_country_osm_grid;
CREATE TABLE new_country_osm_grid as select country_code,area,quad_split_geometry(geometry,0.5,20) as geometry from tmp_country_osm_grid;
CREATE INDEX new_idx_country_osm_grid_geometry ON new_country_osm_grid USING GIST (geometry);
-- Sometimes there are problems calculating area due to invalid data - optionally recalc
UPDATE new_country_osm_grid set area = sum from (select country_code,sum(case when st_area(geometry::geography) = 'NaN' THEN 0 ELSE st_area(geometry::geography) END)
from new_country_osm_grid group by country_code) as x where x.country_code = new_country_osm_grid.country_code;
-- compare old and new
select country_code, x.round, y.round from (select distinct country_code,round(log(area)) from country_osm_grid order by country_code) as x
left outer join (select distinct country_code,round(log(area)) from new_country_osm_grid order by country_code) as y
using (country_code) where x.round != y.round;
-- Flip the new table in
BEGIN;
DROP TABLE IF EXISTS country_osm_grid;
ALTER TABLE new_country_osm_grid rename to country_osm_grid;
ALTER INDEX new_idx_country_osm_grid_geometry RENAME TO idx_country_osm_grid_geometry;
COMMIT;
alter table country_osm_grid2 add column area double precision;
update country_osm_grid2 set area = st_area(geometry::geography);
drop table country_osm_grid3;
create table country_osm_grid3 as select country_code,area,quad_split_geometry(geometry,0.5,20) as geometry from country_osm_grid2;
drop table country_osm_grid;
alter table country_osm_grid3 rename to country_osm_grid;
CREATE INDEX idx_country_osm_grid_geometry ON country_osm_grid USING GIST (geometry);
update country_osm_grid set area = sum from (select country_code,sum(case when st_area(geometry::geography) = 'NaN' THEN 0 ELSE st_area(geometry::geography) END)
from country_osm_grid group by country_code) as x where x.country_code = country_osm_grid.country_code;

File diff suppressed because it is too large Load Diff

View File

@@ -1,8 +0,0 @@
CREATE OR REPLACE FUNCTION hstore(k text, v text) RETURNS HSTORE
AS $$
DECLARE
BEGIN
RETURN k => v;
END;
$$
LANGUAGE plpgsql IMMUTABLE;

View File

@@ -18,10 +18,10 @@ CREATE INDEX idx_placex_rank_address ON placex USING BTREE (rank_address);
CREATE INDEX idx_placex_pendingsector ON placex USING BTREE (rank_search,geometry_sector) where indexed_status > 0;
CREATE INDEX idx_placex_parent_place_id ON placex USING BTREE (parent_place_id) where parent_place_id IS NOT NULL;
CREATE INDEX idx_placex_interpolation ON placex USING BTREE (geometry_sector) where indexed_status > 0 and class='place' and type='houses';
CREATE INDEX idx_placex_reverse_geometry ON placex USING gist (geometry) where rank_search != 28 and (name is not null or housenumber is not null) and class not in ('waterway','railway','tunnel','bridge');
CREATE INDEX idx_location_area_country_place_id ON location_area_country USING BTREE (place_id);
CREATE INDEX idx_search_name_country_centroid ON search_name_country USING GIST (centroid);
CREATE INDEX idx_search_name_country_nameaddress_vector ON search_name_country USING GIN (nameaddress_vector) WITH (fastupdate = off);
-- start
CREATE INDEX idx_location_property_-partition-_centroid ON location_property_-partition- USING GIST (centroid);

View File

@@ -60,9 +60,6 @@ create or replace function insertLocationAreaLarge(
in_centroid GEOMETRY, in_geometry GEOMETRY) RETURNS BOOLEAN AS $$
DECLARE
BEGIN
IF in_rank_address = 0 THEN
RETURN TRUE;
END IF;
IF in_rank_search <= 4 THEN
INSERT INTO location_area_country values (in_partition, in_place_id, in_country_code, in_keywords, in_rank_search, in_rank_address, in_estimate, in_centroid, in_geometry);
@@ -114,7 +111,7 @@ END
$$
LANGUAGE plpgsql;
create or replace function getNearestNamedRoadFeature(in_partition INTEGER, point GEOMETRY, isin_token INTEGER[])
create or replace function getNearestNamedRoadFeature(in_partition INTEGER, point GEOMETRY, isin_token INTEGER)
RETURNS setof nearfeature AS $$
DECLARE
r nearfeature%rowtype;
@@ -126,9 +123,9 @@ BEGIN
SELECT place_id, name_vector, address_rank, search_rank,
ST_Distance(centroid, point) as distance, null as isguess
FROM search_name_-partition-
WHERE name_vector @> isin_token
WHERE name_vector @> ARRAY[isin_token]
AND ST_DWithin(centroid, point, 0.01)
AND search_rank between 26 and 27
AND search_rank between 22 and 27
ORDER BY distance ASC limit 1
LOOP
RETURN NEXT r;
@@ -142,35 +139,6 @@ END
$$
LANGUAGE plpgsql;
create or replace function getNearestNamedPlaceFeature(in_partition INTEGER, point GEOMETRY, isin_token INTEGER[])
RETURNS setof nearfeature AS $$
DECLARE
r nearfeature%rowtype;
BEGIN
-- start
IF in_partition = -partition- THEN
FOR r IN
SELECT place_id, name_vector, address_rank, search_rank,
ST_Distance(centroid, point) as distance, null as isguess
FROM search_name_-partition-
WHERE name_vector @> isin_token
AND ST_DWithin(centroid, point, 0.03)
AND search_rank between 16 and 22
ORDER BY distance ASC limit 1
LOOP
RETURN NEXT r;
END LOOP;
RETURN;
END IF;
-- end
RAISE EXCEPTION 'Unknown partition %', in_partition;
END
$$
LANGUAGE plpgsql;
create or replace function getNearestPostcode(in_partition INTEGER, point GEOMETRY)
RETURNS TEXT AS $$
DECLARE
@@ -198,7 +166,7 @@ create or replace function insertSearchName(
in_partition INTEGER, in_place_id BIGINT, in_country_code VARCHAR(2),
in_name_vector INTEGER[], in_nameaddress_vector INTEGER[],
in_rank_search INTEGER, in_rank_address INTEGER, in_importance FLOAT,
in_centroid GEOMETRY, in_geometry GEOMETRY) RETURNS BOOLEAN AS $$
in_centroid GEOMETRY) RETURNS BOOLEAN AS $$
DECLARE
BEGIN
@@ -208,20 +176,16 @@ BEGIN
IF in_rank_search <= 4 THEN
DELETE FROM search_name_country WHERE place_id = in_place_id;
IF in_rank_address > 0 THEN
INSERT INTO search_name_country values (in_place_id, in_rank_search, in_rank_address,
in_name_vector, in_geometry);
END IF;
INSERT INTO search_name_country values (in_place_id, in_rank_search, in_rank_address, in_importance, in_country_code,
in_name_vector, in_nameaddress_vector, in_centroid);
RETURN TRUE;
END IF;
-- start
IF in_partition = -partition- THEN
DELETE FROM search_name_-partition- values WHERE place_id = in_place_id;
IF in_rank_address > 0 THEN
INSERT INTO search_name_-partition- values (in_place_id, in_rank_search, in_rank_address,
in_name_vector, in_geometry);
END IF;
INSERT INTO search_name_-partition- values (in_place_id, in_rank_search, in_rank_address, 0, in_country_code,
in_name_vector, in_nameaddress_vector, in_centroid);
RETURN TRUE;
END IF;
-- end
@@ -334,9 +298,9 @@ BEGIN
RETURN;
END IF;
p1 := ST_LineInterpolatePoint(line,0);
p2 := ST_LineInterpolatePoint(line,0.5);
p3 := ST_LineInterpolatePoint(line,1);
p1 := ST_Line_Interpolate_Point(line,0);
p2 := ST_Line_Interpolate_Point(line,0.5);
p3 := ST_Line_Interpolate_Point(line,1);
-- start
IF in_partition = -partition- THEN

View File

@@ -1,9 +1,9 @@
drop type if exists nearplace cascade;
drop type nearplace cascade;
create type nearplace as (
place_id BIGINT
);
drop type if exists nearfeature cascade;
drop type nearfeature cascade;
create type nearfeature as (
place_id BIGINT,
keywords int[],
@@ -13,7 +13,7 @@ create type nearfeature as (
isguess boolean
);
drop type if exists nearfeaturecentr cascade;
drop type nearfeaturecentr cascade;
create type nearfeaturecentr as (
place_id BIGINT,
keywords int[],
@@ -24,16 +24,6 @@ create type nearfeaturecentr as (
centroid GEOMETRY
);
drop table IF EXISTS search_name_blank CASCADE;
CREATE TABLE search_name_blank (
place_id BIGINT,
search_rank integer,
address_rank integer,
name_vector integer[]
);
SELECT AddGeometryColumn('search_name_blank', 'centroid', 4326, 'GEOMETRY', 2);
CREATE TABLE location_area_country () INHERITS (location_area_large);
CREATE INDEX idx_location_area_country_geometry ON location_area_country USING GIST (geometry);

View File

@@ -32,7 +32,6 @@ CREATE TABLE query_log (
results integer
);
CREATE INDEX idx_query_log ON query_log USING BTREE (starttime);
GRANT SELECT ON query_log TO "www-data" ;
GRANT INSERT ON query_log TO "www-data" ;
GRANT UPDATE ON query_log TO "www-data" ;
@@ -70,6 +69,7 @@ drop table IF EXISTS word;
CREATE TABLE word (
word_id INTEGER,
word_token text,
word_trigram text,
word text,
class text,
type text,
@@ -77,7 +77,9 @@ CREATE TABLE word (
search_name_count INTEGER,
operator TEXT
);
SELECT AddGeometryColumn('word', 'location', 4326, 'GEOMETRY', 2);
CREATE INDEX idx_word_word_token on word USING BTREE (word_token);
--CREATE INDEX idx_word_trigram ON word USING gin(word_trigram gin_trgm_ops) WITH (fastupdate = off);
GRANT SELECT ON word TO "www-data" ;
DROP SEQUENCE seq_word;
CREATE SEQUENCE seq_word start 1;
@@ -121,8 +123,8 @@ CREATE INDEX idx_location_property_tiger_parent_place_id ON location_property_ti
CREATE INDEX idx_location_property_tiger_housenumber_parent_place_id ON location_property_tiger USING BTREE (parent_place_id, housenumber);
GRANT SELECT ON location_property_tiger TO "www-data";
drop table IF EXISTS search_name;
CREATE TABLE search_name (
drop table IF EXISTS search_name_blank CASCADE;
CREATE TABLE search_name_blank (
place_id BIGINT,
search_rank integer,
address_rank integer,
@@ -131,7 +133,10 @@ CREATE TABLE search_name (
name_vector integer[],
nameaddress_vector integer[]
);
SELECT AddGeometryColumn('search_name', 'centroid', 4326, 'GEOMETRY', 2);
SELECT AddGeometryColumn('search_name_blank', 'centroid', 4326, 'GEOMETRY', 2);
drop table IF EXISTS search_name;
CREATE TABLE search_name () INHERITS (search_name_blank);
CREATE INDEX idx_search_name_place_id ON search_name USING BTREE (place_id);
drop table IF EXISTS place_addressline;
@@ -212,9 +217,16 @@ CREATE INDEX idx_placex_adminname on placex USING BTREE (make_standard_name(name
DROP SEQUENCE seq_place;
CREATE SEQUENCE seq_place start 1;
GRANT SELECT on placex to "www-data" ;
GRANT UPDATE ON placex to "www-data" ;
GRANT SELECT ON search_name to "www-data" ;
GRANT DELETE on search_name to "www-data" ;
GRANT INSERT on search_name to "www-data" ;
GRANT SELECT on place_addressline to "www-data" ;
GRANT INSERT ON place_addressline to "www-data" ;
GRANT DELETE on place_addressline to "www-data" ;
GRANT SELECT ON seq_word to "www-data" ;
GRANT UPDATE ON seq_word to "www-data" ;
GRANT INSERT ON word to "www-data" ;
GRANT SELECT ON planet_osm_ways to "www-data" ;
GRANT SELECT ON planet_osm_rels to "www-data" ;
GRANT SELECT on location_area to "www-data" ;
@@ -236,7 +248,17 @@ CREATE TRIGGER place_before_delete BEFORE DELETE ON place
CREATE TRIGGER place_before_insert BEFORE INSERT ON place
FOR EACH ROW EXECUTE PROCEDURE place_insert();
alter table placex add column geometry_sector INTEGER;
alter table placex add column indexed_status INTEGER;
alter table placex add column indexed_date TIMESTAMP;
update placex set geometry_sector = geometry_sector(geometry);
drop index idx_placex_pendingbylatlon;
drop index idx_placex_interpolation;
drop index idx_placex_sector;
CREATE INDEX idx_placex_pendingbylatlon ON placex USING BTREE (geometry_index(geometry_sector,indexed,name),rank_search)
where geometry_index(geometry_sector,indexed,name) IS NOT NULL;
CREATE INDEX idx_placex_interpolation ON placex USING BTREE (geometry_sector) where indexed = false and class='place' and type='houses';
CREATE INDEX idx_placex_sector ON placex USING BTREE (geometry_sector,rank_address,osm_type,osm_id);
DROP SEQUENCE seq_postcodes;
@@ -256,7 +278,6 @@ CREATE TABLE import_polygon_error (
SELECT AddGeometryColumn('import_polygon_error', 'prevgeometry', 4326, 'GEOMETRY', 2);
SELECT AddGeometryColumn('import_polygon_error', 'newgeometry', 4326, 'GEOMETRY', 2);
CREATE INDEX idx_import_polygon_error_osmid ON import_polygon_error USING BTREE (osm_type, osm_id);
GRANT SELECT ON import_polygon_error TO "www-data";
drop table import_polygon_delete;
CREATE TABLE import_polygon_delete (
@@ -266,7 +287,6 @@ CREATE TABLE import_polygon_delete (
type TEXT NOT NULL
);
CREATE INDEX idx_import_polygon_delete_osmid ON import_polygon_delete USING BTREE (osm_type, osm_id);
GRANT SELECT ON import_polygon_delete TO "www-data";
drop sequence file;
CREATE SEQUENCE file start 1;

View File

@@ -53,12 +53,12 @@ BEGIN
END IF;
place_centroid := ST_Centroid(linegeo);
out_partition := get_partition('us');
out_partition := get_partition(place_centroid, 'us');
out_parent_place_id := null;
address_street_word_id := get_name_id(make_standard_name(in_street));
IF address_street_word_id IS NOT NULL THEN
FOR location IN SELECT * from getNearestNamedRoadFeature(out_partition, place_centroid, ARRAY[address_street_word_id]) LOOP
FOR location IN SELECT * from getNearestNamedRoadFeature(out_partition, place_centroid, address_street_word_id) LOOP
out_parent_place_id := location.place_id;
END LOOP;
END IF;
@@ -79,7 +79,7 @@ BEGIN
FOR housenum IN startnumber..endnumber BY stepsize LOOP
insert into location_property_tiger_import (place_id, partition, parent_place_id, housenumber, postcode, centroid)
values (nextval('seq_place'), out_partition, out_parent_place_id, housenum, in_postcode,
ST_LineInterpolatePoint(linegeo, (housenum::float-rangestartnumber::float)/numberrange::float));
ST_Line_Interpolate_Point(linegeo, (housenum::float-rangestartnumber::float)/numberrange::float));
newpoints := newpoints + 1;
END LOOP;

View File

@@ -1,77 +0,0 @@
<?php
namespace Nominatim;
require 'lib/lib.php';
class NominatimTest extends \PHPUnit_Framework_TestCase
{
protected function setUp()
{
}
public function test_addQuotes()
{
// FIXME: not quoting existing quote signs is probably a bug
$this->assertSame("'St. John's'", addQuotes("St. John's"));
$this->assertSame("''", addQuotes(''));
}
public function test_looksLikeLatLonPair()
{
// no coordinates expected
$this->assertNull(looksLikeLatLonPair(''));
$this->assertNull(looksLikeLatLonPair('abc'));
$this->assertNull(looksLikeLatLonPair('12 34'));
$this->assertNull(looksLikeLatLonPair('200.1 89.9')); // because latitude > 180
// coordinates expected
$this->assertNotNull(looksLikeLatLonPair('0.0 -0.0'));
$this->assertEquals(
array( 'lat' => 12.456, 'lon' => -78.90, 'query' => 'abc def'),
looksLikeLatLonPair(' abc 12.456 -78.90 def ')
);
$this->assertEquals(
array( 'lat' => 12.456, 'lon' => -78.90, 'query' => ''),
looksLikeLatLonPair(' [12.456,-78.90] ')
);
// http://en.wikipedia.org/wiki/Geographic_coordinate_conversion
// these all represent the same location
$aQueries = array(
'40 26.767 N 79 58.933 W',
'40° 26.767 N 79° 58.933 W',
"40° 26.767' N 79° 58.933' W",
'N 40 26.767, W 79 58.933',
'N 40°26.767, W 79°58.933',
"N 40°26.767', W 79°58.933'",
'40 26 46 N 79 58 56 W',
'40° 26 46″ N 79° 58 56″ W',
'N 40 26 46 W 79 58 56',
'N 40° 26 46″, W 79° 58 56″',
'N 40° 26\' 46", W 79° 58\' 56"',
'40.446 -79.982',
'40.446,-79.982',
'40.446° N 79.982° W',
'N 40.446° W 79.982°',
'[40.446 -79.982]',
' 40.446 , -79.982 ',
);
foreach($aQueries as $sQuery){
$aRes = looksLikeLatLonPair($sQuery);
$this->assertEquals( 40.446, $aRes['lat'], 'degrees decimal ' . $sQuery, 0.01);
$this->assertEquals(-79.982, $aRes['lon'], 'degrees decimal ' . $sQuery, 0.01);
}
}
}

View File

@@ -1,13 +0,0 @@
Basic unit tests of PHP code. Very low coverage. Doesn't cover interaction
with the webserver/HTTP or database (yet).
You need to have
https://phpunit.de/manual/4.2/en/
installed.
To execute the test suite run
$ phpunit
It will read phpunit.xml which points to the library, test path, bootstrap
strip and set other parameters.

View File

@@ -1,2 +0,0 @@
<?php

View File

@@ -1,98 +0,0 @@
This directory contains functional tests for the Nominatim API,
for the import/update from osm files and for indexing.
The tests use the lettuce framework (http://lettuce.it/) and
nose (https://nose.readthedocs.org). API tests are meant to be run
against a Nominatim installation with a complete planet-wide
setup based on a fairly recent planet. If you only have an
excerpt, some of the API tests may fail. Database tests can be
run without having a database installed.
Prerequisites
=============
* lettuce framework (http://lettuce.it/)
* nose (https://nose.readthedocs.org)
* pytidylib (http://countergram.com/open-source/pytidylib)
* haversine (https://github.com/mapado/haversine)
Usage
=====
* get prerequisites
[sudo] pip install lettuce nose pytidylib haversine psycopg2
* run the tests
NOMINATIM_SERVER=http://your.nominatim.instance/ lettuce features
The tests can be configured with a set of environment variables:
* `NOMINATIM_SERVER` - URL of the nominatim instance (API tests)
* `NOMINATIM_DIR` - source directory of Nominatim (import tests)
* `TEMPLATE_DB` - name of template database used as a skeleton for
the test databases (db tests)
* `TEST_DB` - name of test database (db tests)
* `NOMINATIM_SETTINGS` - file to write temporary Nominatim settings to (db tests)
* `NOMINATIM_REUSE_TEMPLATE` - if defined, the template database will not be
deleted after the test runs and reused during
the next run. This speeds up tests considerably
but might lead to outdated errors for some
changes in the database layout.
* `NOMINATIM_KEEP_SCENARIO_DB` - if defined, the test database will not be
dropped after a test is finished. Should
only be used if one single scenario is run,
otherwise the result is undefined.
* `LOGLEVEL` - set to 'debug' to get more verbose output (only works properly
when output to a logfile is configured)
* `LOGFILE` - sends debug output to the given file
Writing Tests
=============
The following explanation assume that the reader is familiar with the lettuce
notations of features, scenarios and steps.
All possible steps can be found in the `steps` directory and should ideally
be documented.
API Tests (`features/api`)
--------------------------
These tests are meant to test the different API calls and their parameters.
There are two kind of steps defined for these tests:
request setup steps (see `steps/api_setup.py`)
and steps for checking results (see `steps/api_result.py`).
Each scenario follows this simple sequence of steps:
1. One or more steps to define parameters and HTTP headers of the request.
These are cumulative, so you can use multiple steps.
2. A single step to call the API. This sends a HTTP request to the configured
server and collects the answer. The cached parameters will be deleted,
to ensure that the setup works properly with scenario outlines.
3. As many result checks as necessary. The result remains cached, so that
multiple tests can be added here.
Indexing Tests (`features/db`)
---------------------------------------------------
These tests check the import and update of the Nominatim database. They do not
test the correctness of osm2pgsql. Each test will write some data into the `place`
table (and optionally `the planet_osm_*` tables if required) and then run
Nominatim's processing functions on that.
These tests need to create their own test databases. By default they will be
called `test_template_nominatim` and `test_nominatim`. Names can be changed with
the environment variables `TEMPLATE_DB` and `TEST_DB`. The user running the tests
needs superuser rights for postgres.
Import Tests (`features/osm2pgsql`)
-----------------------------------
These tests check that data is imported correctly into the place table. They
use the same template database as the Indexing tests, so the same remarks apply.

View File

@@ -1,14 +0,0 @@
Feature: Object details
Check details page for correctness
Scenario Outline: Details via OSM id
When looking up details for <object>
Then the result is valid
Examples:
| object
| 1758375
| N158845944
| W72493656
| R62422

View File

@@ -1,100 +0,0 @@
Feature: Localization of search results
Scenario: Search - default language
When sending json search query "Germany"
Then results contain
| ID | display_name
| 0 | Deutschland.*
Scenario: Search - accept-language first
Given the request parameters
| accept-language
| en,de
When sending json search query "Deutschland"
Then results contain
| ID | display_name
| 0 | Germany.*
Scenario: Search - accept-language missing
Given the request parameters
| accept-language
| xx,fr,en,de
When sending json search query "Deutschland"
Then results contain
| ID | display_name
| 0 | Allemagne.*
Scenario: Search - http accept language header first
Given the HTTP header
| accept-language
| fr-ca,fr;q=0.8,en-ca;q=0.5,en;q=0.3
When sending json search query "Deutschland"
Then results contain
| ID | display_name
| 0 | Allemagne.*
Scenario: Search - http accept language header and accept-language
Given the request parameters
| accept-language
| de,en
Given the HTTP header
| accept-language
| fr-ca,fr;q=0.8,en-ca;q=0.5,en;q=0.3
When sending json search query "Deutschland"
Then results contain
| ID | display_name
| 0 | Deutschland.*
Scenario: Search - http accept language header fallback
Given the HTTP header
| accept-language
| fr-ca,en-ca;q=0.5
When sending json search query "Deutschland"
Then results contain
| ID | display_name
| 0 | Allemagne.*
Scenario: Search - http accept language header fallback (upper case)
Given the HTTP header
| accept-language
| fr-FR;q=0.8,en-ca;q=0.5
When sending json search query "Deutschland"
Then results contain
| ID | display_name
| 0 | Allemagne.*
Scenario: Reverse - default language
When looking up coordinates 48.13921,11.57328
Then result addresses contain
| ID | city
| 0 | München
Scenario: Reverse - accept-language parameter
Given the request parameters
| accept-language
| en,fr
When looking up coordinates 48.13921,11.57328
Then result addresses contain
| ID | city
| 0 | Munich
Scenario: Reverse - HTTP accept language header
Given the HTTP header
| accept-language
| fr-ca,fr;q=0.8,en-ca;q=0.5,en;q=0.3
When looking up coordinates 48.13921,11.57328
Then result addresses contain
| ID | city
| 0 | Munich
Scenario: Reverse - accept-language parameter and HTTP header
Given the request parameters
| accept-language
| it
Given the HTTP header
| accept-language
| fr-ca,fr;q=0.8,en-ca;q=0.5,en;q=0.3
When looking up coordinates 48.13921,11.57328
Then result addresses contain
| ID | city
| 0 | Monaco di Baviera

View File

@@ -1,179 +0,0 @@
Feature: API regression tests
Tests error cases reported in tickets.
@poldi-only
Scenario Outline: github #36
When sending json search query "<query>" with address
Then result addresses contain
| ID | road | city
| 0 | Seegasse | Gemeinde Wieselburg-Land
Examples:
| query
| Seegasse, Gemeinde Wieselburg-Land
| Seegasse, Wieselburg-Land
| Seegasse, Wieselburg
Scenario: trac #2430
When sending json search query "89 River Avenue, Hoddesdon, Hertfordshire, EN11 0JT"
Then at least 1 result is returned
Scenario: trac #2440
When sending json search query "East Harvard Avenue, Denver"
Then more than 2 results are returned
Scenario: trac #2456
When sending xml search query "Borlänge Kommun"
Then results contain
| ID | place_rank
| 0 | 19
Scenario: trac #2530
When sending json search query "Lange Straße, Bamberg" with address
Then result addresses contain
| ID | town
| 0 | Bamberg
Scenario: trac #2541
When sending json search query "pad, germany"
Then results contain
| ID | class | display_name
| 0 | aeroway | Paderborn/Lippstadt,.*
Scenario: trac #2579
When sending json search query "Johnsons Close, hackbridge" with address
Then result addresses contain
| ID | postcode
| 0 | SM5 2LU
@Fail
Scenario Outline: trac #2586
When sending json search query "<query>" with address
Then result addresses contain
| ID | country_code
| 0 | uk
Examples:
| query
| DL7 0SN
| DL70SN
Scenario: trac #2628 (1)
When sending json search query "Adam Kraft Str" with address
Then result addresses contain
| ID | road
| 0 | Adam-Kraft-Straße
Scenario: trac #2628 (2)
When sending json search query "Maxfeldstr. 5, Nürnberg" with address
Then result addresses contain
| ID | house_number | road | city
| 0 | 5 | Maxfeldstraße | Nürnberg
Scenario: trac #2638
When sending json search query "Nöthnitzer Str. 40, 01187 Dresden" with address
Then result addresses contain
| ID | house_number | road | city
| 0 | 40 | Nöthnitzer Straße | Dresden
Scenario Outline: trac #2667
When sending json search query "<query>" with address
Then result addresses contain
| ID | house_number
| 0 | <number>
Examples:
| number | query
| 16 | 16 Woodpecker Way, Cambourne
| 14906 | 14906, 114 Street Northwest, Edmonton, Alberta, Canada
| 14904 | 14904, 114 Street Northwest, Edmonton, Alberta, Canada
| 15022 | 15022, 114 Street Northwest, Edmonton, Alberta, Canada
| 15024 | 15024, 114 Street Northwest, Edmonton, Alberta, Canada
Scenario: trac #2681
When sending json search query "kirchstraße troisdorf Germany"
Then results contain
| ID | display_name
| 0 | .*, Troisdorf, .*
Scenario: trac #2758
When sending json search query "6а, полуботка, чернигов" with address
Then result addresses contain
| ID | house_number
| 0 | 6а
Scenario: trac #2790
When looking up coordinates 49.0942079697809,8.27565898861822
Then result addresses contain
| ID | road | village | country
| 0 | Daimlerstraße | Jockgrim | Deutschland
Scenario: trac #2794
When sending json search query "4008"
Then results contain
| ID | class | type
| 0 | place | postcode
Scenario: trac #2797
When sending json search query "Philippstr.4, 52349 Düren" with address
Then result addresses contain
| ID | road | town
| 0 | Philippstraße | Düren
Scenario: trac #2830
When sending json search query "528, Merkley Drive, K4A 1N5,CA" with address
Then result addresses contain
| ID | house_number | road | postcode | country
| 0 | 528 | Merkley Drive | K4A 1N5 | Canada
Scenario: trac #2830
When sending json search query "K4A 1N5,CA"
Then results contain
| ID | class | type | display_name
| 0 | place | postcode | .*, Canada
Scenario: trac #2845
When sending json search query "Leliestraat 31, Zwolle" with address
Then result addresses contain
| ID | city
| 0 | Zwolle
Scenario: trac #2852
When sending json search query "berlinerstrasse, leipzig" with address
Then result addresses contain
| ID | road
| 0 | Berliner Straße
Scenario: trac #2871
When looking up coordinates -33.906895553,150.99609375
Then result addresses contain
| ID | city | postcode | country
| 0 | [^0-9]* | 2197 | Australia
Scenario: trac #2974
When sending json search query "Azadi Square, Faruj" with address
Then result addresses contain
| ID | road | city
| 0 | ميدان آزادي | فاروج
And results contain
| ID | latlon
| 0 | 37.2323,58.2193 +-1km
Scenario: trac #2981
When sending json search query "Ohmstraße 7, Berlin" with address
Then at least 2 results are returned
And result addresses contain
| house_number | road | state
| 7 | Ohmstraße | Berlin
Scenario: trac #3049
When sending json search query "Soccer City"
Then results contain
| ID | class | type | latlon
| 0 | leisure | stadium | -26.2347261,27.982645 +-50m
Scenario: trac #3130
When sending json search query "Old Way, Frinton"
Then results contain
| ID | class | latlon
| 0 | highway | 51.8324206,1.2447352 +-100m

View File

@@ -1,13 +0,0 @@
Feature: Reverse geocoding
Testing the reverse function
# Make sure country is not overwritten by the postcode
Scenario: Country is returned
Given the request parameters
| accept-language
| de
When looking up coordinates 53.9788769,13.0830313
Then result addresses contain
| ID | country
| 0 | Deutschland

View File

@@ -1,52 +0,0 @@
Feature: Simple Reverse Tests
Simple tests for internal server errors and response format.
These tests should pass on any Nominatim installation.
Scenario Outline: Simple reverse-geocoding
When looking up xml coordinates <lat>,<lon>
Then the result is valid xml
When looking up json coordinates <lat>,<lon>
Then the result is valid json
When looking up jsonv2 coordinates <lat>,<lon>
Then the result is valid json
Examples:
| lat | lon
| 0.0 | 0.0
| 45.3 | 3.5
| -79.34 | 23.5
| 0.23 | -178.555
Scenario Outline: Wrapping of legal jsonp requests
Given the request parameters
| json_callback
| foo
When looking up <format> coordinates 67.3245,0.456
Then the result is valid json
Examples:
| format
| json
| jsonv2
Scenario: Reverse-geocoding without address
Given the request parameters
| addressdetails
| 0
When looking up xml coordinates 36.791966,127.171726
Then the result is valid xml
When looking up json coordinates 36.791966,127.171726
Then the result is valid json
When looking up jsonv2 coordinates 36.791966,127.171726
Then the result is valid json
Scenario: Reverse-geocoding with zoom
Given the request parameters
| zoom
| 10
When looking up xml coordinates 36.791966,127.171726
Then the result is valid xml
When looking up json coordinates 36.791966,127.171726
Then the result is valid json
When looking up jsonv2 coordinates 36.791966,127.171726
Then the result is valid json

View File

@@ -1,74 +0,0 @@
Feature: Search queries
Testing correctness of results
Scenario: UK House number search
When sending json search query "27 Thoresby Road, Broxtowe" with address
Then address of result 0 contains
| type | value
| house_number | 27
| road | Thoresby Road
| city | Broxtowe
| state | England
| country | United Kingdom
| country_code | gb
Scenario: House number search for non-street address
Given the request parameters
| accept-language
| en
When sending json search query "4 Pomocnia, Poland" with address
Then address of result 0 is
| type | value
| house_number | 4
| suburb | Pomocnia
| county | gmina Pokrzywnica
| state | Masovian Voivodeship
| postcode | 06-121
| country | Poland
| country_code | pl
Scenario: House number interpolation even
Given the request parameters
| accept-language
| en
When sending json search query "140 rue Don Bosco, Saguenay" with address
Then address of result 0 contains
| type | value
| house_number | 140
| road | rue Don Bosco
| city | Saguenay
| state | Quebec
| country | Canada
| country_code | ca
Scenario: House number interpolation odd
Given the request parameters
| accept-language
| en
When sending json search query "141 rue Don Bosco, Saguenay" with address
Then address of result 0 contains
| type | value
| house_number | 141
| road | rue Don Bosco
| city | Saguenay
| state | Quebec
| country | Canada
| country_code | ca
Scenario: TIGER house number
When sending json search query "3 West Victory Way, Craig"
Then result 0 has not attributes osm_id,osm_type
Scenario: TIGER house number (road fallback)
When sending json search query "3030 West Victory Way, Craig"
Then result 0 has attributes osm_id,osm_type
Scenario: Expansion of Illinois
Given the request parameters
| accept-language
| en
When sending json search query "il, us"
Then results contain
| ID | display_name
| 0 | Illinois.*

View File

@@ -1,33 +0,0 @@
Feature: Result order for Geocoding
Testing that importance ordering returns sensible results
Scenario Outline: city order in street search
When sending json search query "<street>, <city>" with address
Then address of result 0 contains
| type | value
| <type> | <city>
Examples:
| type | city | street
| city | Zürich | Rigistr
| city | Karlsruhe | Sophienstr
| city | München | Karlstr
| city | Praha | Dlouhá
Scenario Outline: use more important city in street search
When sending json search query "<street>, <city>" with address
Then result addresses contain
| ID | country_code
| 0 | <country>
Examples:
| country | city | street
| gb | London | Main St
| gb | Manchester | Central Street
# https://trac.openstreetmap.org/ticket/5094
Scenario: housenumbers are ordered by complete match first
When sending json search query "4 Докукина Москва" with address
Then result addresses contain
| ID | house_number
| 0 | 4

View File

@@ -1,172 +0,0 @@
Feature: Search queries
Testing different queries and parameters
Scenario: Simple XML search
When sending xml search query "Schaan"
Then result 0 has attributes place_id,osm_type,osm_id
And result 0 has attributes place_rank,boundingbox
And result 0 has attributes lat,lon,display_name
And result 0 has attributes class,type,importance,icon
And result 0 has not attributes address
Scenario: Simple JSON search
When sending json search query "Vaduz"
And result 0 has attributes place_id,licence,icon,class,type
And result 0 has attributes osm_type,osm_id,boundingbox
And result 0 has attributes lat,lon,display_name,importance
And result 0 has not attributes address
Scenario: JSON search with addressdetails
When sending json search query "Montevideo" with address
Then address of result 0 is
| type | value
| city | Montevideo
| state | Montevideo
| country | Uruguay
| country_code | uy
Scenario: XML search with addressdetails
When sending xml search query "Inuvik" with address
Then address of result 0 is
| type | value
| town | Inuvik
| state | Northwest Territories
| country | Canada
| country_code | ca
Scenario: Address details with unknown class types
When sending json search query "foobar, Essen" with address
Then results contain
| ID | class | type
| 0 | leisure | hackerspace
And result addresses contain
| ID | address29
| 0 | foobar
And address of result 0 does not contain leisure,hackerspace
Scenario: Disabling deduplication
When sending json search query "Oxford Street, London"
Then there are no duplicates
Given the request parameters
| dedupe
| 0
When sending json search query "Oxford Street, London"
Then there are duplicates
Scenario: Search with bounded viewbox in right area
Given the request parameters
| bounded | viewbox
| 1 | -87.7,41.9,-87.57,41.85
When sending json search query "restaurant" with address
Then result addresses contain
| ID | city
| 0 | Chicago
Scenario: Search with bounded viewboxlbrt in right area
Given the request parameters
| bounded | viewboxlbrt
| 1 | -87.7,41.85,-87.57,41.9
When sending json search query "restaurant" with address
Then result addresses contain
| ID | city
| 0 | Chicago
Scenario: No POI search with unbounded viewbox
Given the request parameters
| viewbox
| -87.7,41.9,-87.57,41.85
When sending json search query "restaurant"
Then results contain
| display_name
| [^,]*(?i)restaurant.*
Scenario: bounded search remains within viewbox, even with no results
Given the request parameters
| bounded | viewbox
| 1 | -5.662003,43.54285,-5.6563282,43.5403125
When sending json search query "restaurant"
Then less than 1 result is returned
Scenario: bounded search remains within viewbox with results
Given the request parameters
| bounded | viewbox
| 1 | -5.662003,43.55,-5.6563282,43.5403125
When sending json search query "restaurant"
| lon | lat
| >= -5.662003 | >= 43.5403125
| <= -5.6563282| <= 43.55
Scenario: Prefer results within viewbox
Given the request parameters
| accept-language
| en
When sending json search query "royan" with address
Then result addresses contain
| ID | country
| 0 | France
Given the request parameters
| accept-language | viewbox
| en | 51.94,36.59,51.99,36.56
When sending json search query "royan" with address
Then result addresses contain
| ID | country
| 0 | Iran
Scenario: Overly large limit number for search results
Given the request parameters
| limit
| 1000
When sending json search query "Neustadt"
Then at most 50 results are returned
Scenario: Limit number of search results
Given the request parameters
| limit
| 4
When sending json search query "Neustadt"
Then exactly 4 results are returned
Scenario: Restrict to feature type country
Given the request parameters
| featureType
| country
When sending xml search query "Monaco"
Then results contain
| place_rank
| 4
Scenario: Restrict to feature type state
When sending xml search query "Berlin"
Then results contain
| ID | place_rank
| 0 | 1[56]
Given the request parameters
| featureType
| state
When sending xml search query "Berlin"
Then results contain
| place_rank
| [78]
Scenario: Restrict to feature type city
Given the request parameters
| featureType
| city
When sending xml search query "Monaco"
Then results contain
| place_rank
| 1[56789]
Scenario: Restrict to feature type settlement
When sending json search query "Everest"
Then results contain
| ID | display_name
| 0 | Mount Everest.*
Given the request parameters
| featureType
| settlement
When sending json search query "Everest"
Then results contain
| ID | display_name
| 0 | Everest.*

View File

@@ -1,227 +0,0 @@
Feature: Simple Tests
Simple tests for internal server errors and response format.
These tests should pass on any Nominatim installation.
Scenario Outline: Testing different parameters
Given the request parameters
| <parameter>
| <value>
When sending search query "Manchester"
Then the result is valid html
Given the request parameters
| <parameter>
| <value>
When sending html search query "Manchester"
Then the result is valid html
Given the request parameters
| <parameter>
| <value>
When sending xml search query "Manchester"
Then the result is valid xml
Given the request parameters
| <parameter>
| <value>
When sending json search query "Manchester"
Then the result is valid json
Given the request parameters
| <parameter>
| <value>
When sending jsonv2 search query "Manchester"
Then the result is valid json
Examples:
| parameter | value
| addressdetails | 1
| addressdetails | 0
| polygon | 1
| polygon | 0
| polygon_text | 1
| polygon_text | 0
| polygon_kml | 1
| polygon_kml | 0
| polygon_geojson | 1
| polygon_geojson | 0
| polygon_svg | 1
| polygon_svg | 0
| accept-language | de,en
| countrycodes | uk,ir
| bounded | 1
| bounded | 0
| exclude_place_ids| 385252,1234515
| limit | 1000
| dedupe | 1
| dedupe | 0
Scenario: Search with invalid output format
Given the request parameters
| format
| fd$#
When sending search query "Berlin"
Then the result is valid html
Scenario Outline: Simple Searches
When sending search query "<query>"
Then the result is valid html
When sending html search query "<query>"
Then the result is valid html
When sending xml search query "<query>"
Then the result is valid xml
When sending json search query "<query>"
Then the result is valid json
When sending jsonv2 search query "<query>"
Then the result is valid json
Examples:
| query
| New York, New York
| France
| 12, Main Street, Houston
| München
|
| hotels in nantes
| xywxkrf
| gh; foo()
| %#$@*&l;der#$!
| 234
| 47.4,8.3
Scenario: Empty XML search
When sending xml search query "xnznxvcx"
Then result header contains
| attr | value
| querystring | xnznxvcx
| polygon | false
| more_url | .*format=xml.*q=xnznxvcx.*
Scenario: Empty XML search with special XML characters
When sending xml search query "xfdghn&zxn"xvbyx<vxx>cssdex"
Then result header contains
| attr | value
| querystring | xfdghn&zxn"xvbyx<vxx>cssdex
| polygon | false
| more_url | .*format=xml.*q=xfdghn&zxn"xvbyx<vxx>cssdex.*
Scenario: Empty XML search with viewbox
Given the request parameters
| viewbox
| 12,45.13,77,33
When sending xml search query "xnznxvcx"
Then result header contains
| attr | value
| querystring | xnznxvcx
| polygon | false
| viewbox | 12,45.13,77,33
Scenario: Empty XML search with viewboxlbrt
Given the request parameters
| viewboxlbrt
| 12,34.13,77,45
When sending xml search query "xnznxvcx"
Then result header contains
| attr | value
| querystring | xnznxvcx
| polygon | false
| viewbox | 12,45.13,77,33
Scenario: Empty XML search with viewboxlbrt and viewbox
Given the request parameters
| viewbox | viewboxblrt
| 12,45.13,77,33 | 1,2,3,4
When sending xml search query "pub"
Then result header contains
| attr | value
| querystring | pub
| polygon | false
| viewbox | 12,45.13,77,33
Scenario Outline: Empty XML search with polygon values
Given the request parameters
| polygon
| <polyval>
When sending xml search query "xnznxvcx"
Then result header contains
| attr | value
| polygon | <result>
Examples:
| result | polyval
| false | 0
| true | 1
| true | True
| true | true
| true | false
| true | FALSE
| true | yes
| true | no
| true | '; delete from foobar; select '
Scenario: Empty XML search with exluded place ids
Given the request parameters
| exclude_place_ids
| 123,76,342565
When sending xml search query "jghrleoxsbwjer"
Then result header contains
| attr | value
| exclude_place_ids | 123,76,342565
Scenario Outline: Wrapping of legal jsonp search requests
Given the request parameters
| json_callback
| <data>
When sending json search query "Tokyo"
Then there is a json wrapper "<data>"
Examples:
| data
| foo
| FOO
| __world
| $me
| m1[4]
| d_r[$d]
Scenario Outline: Wrapping of illegal jsonp search requests
Given the request parameters
| json_callback
| <data>
When sending json search query "Tokyo"
Then a HTTP 400 is returned
Examples:
| data
| 1asd
| bar(foo)
| XXX['bad']
| foo; evil
Scenario Outline: Ignore jsonp parameter for anything but json
Given the request parameters
| json_callback
| 234
When sending json search query "Malibu"
Then a HTTP 400 is returned
Given the request parameters
| json_callback
| 234
When sending xml search query "Malibu"
Then the result is valid xml
Given the request parameters
| json_callback
| 234
When sending html search query "Malibu"
Then the result is valid html
Scenario: Empty JSON search
When sending json search query "YHlERzzx"
Then exactly 0 results are returned
Scenario: Empty JSONv2 search
When sending jsonv2 search query "Flubb XdfESSaZx"
Then exactly 0 results are returned
Scenario: Search for non-existing coordinates
When sending json search query "-21.0,-33.0"
Then exactly 0 results are returned

View File

@@ -1,41 +0,0 @@
Feature: Structured search queries
Testing correctness of results with
structured queries
Scenario: Country only
When sending json structured query with address
| country
| Canada
Then address of result 0 is
| type | value
| country | Canada
| country_code | ca
Scenario: Postcode only
When sending json structured query with address
| postalcode
| 22547
Then at least 1 result is returned
And results contain
| type
| post(al_)?code
And result addresses contain
| postcode
| 22547
Scenario: Street, postcode and country
When sending xml structured query with address
| street | postalcode | country
| Old Palace Road | GU2 7UP | United Kingdom
Then at least 1 result is returned
Then result header contains
| attr | value
| querystring | Old Palace Road, GU2 7UP, United Kingdom
Scenario: gihub #176
When sending json structured query with address
| city
| Washington
Then at least 1 result is returned

View File

@@ -1,98 +0,0 @@
@DB
Feature: Linking of places
Tests for correctly determining linked places
Scenario: Waterways are linked when in waterway relations
Given the scene split-road
And the place ways
| osm_type | osm_id | class | type | name | geometry
| W | 1 | waterway | river | Rhein | :w-2
| W | 2 | waterway | river | Rhein | :w-3
| R | 13 | waterway | river | Rhein | :w-1 + :w-2 + :w-3
| R | 23 | waterway | river | Limmat| :w-4a
And the relations
| id | members | tags
| 13 | R23:tributary,W1,W2:main_stream | 'type' : 'waterway'
When importing
Then table placex contains
| object | linked_place_id
| W1 | R13
| W2 | R13
| R13 | None
| R23 | None
When sending query "rhein"
Then results contain
| osm_type
| R
Scenario: Relations are not linked when in waterway relations
Given the scene split-road
And the place ways
| osm_type | osm_id | class | type | name | geometry
| W | 1 | waterway | river | Rhein | :w-2
| W | 2 | waterway | river | Rhein | :w-3
| R | 1 | waterway | river | Rhein | :w-1 + :w-2 + :w-3
| R | 2 | waterway | river | Limmat| :w-4a
And the relations
| id | members | tags
| 1 | R2 | 'type' : 'waterway'
When importing
Then table placex contains
| object | linked_place_id
| W1 | None
| W2 | None
| R1 | None
| R2 | None
Scenario: Empty waterway relations are handled correctly
Given the scene split-road
And the place ways
| osm_type | osm_id | class | type | name | geometry
| R | 1 | waterway | river | Rhein | :w-1 + :w-2 + :w-3
And the relations
| id | members | tags
| 1 | | 'type' : 'waterway'
When importing
Then table placex contains
| object | linked_place_id
| R1 | None
Scenario: Waterways are not linked when waterway types don't match
Given the scene split-road
And the place ways
| osm_type | osm_id | class | type | name | geometry
| W | 1 | waterway | drain | Rhein | :w-2
| R | 1 | waterway | river | Rhein | :w-1 + :w-2 + :w-3
And the relations
| id | members | tags
| 1 | N23,N34,W1,R45 | 'type' : 'multipolygon'
When importing
Then table placex contains
| object | linked_place_id
| W1 | None
| R1 | None
When sending query "rhein"
Then results contain
| ID | osm_type
| 0 | R
| 1 | W
Scenario: Side streams are linked only when they have the same name
Given the scene split-road
And the place ways
| osm_type | osm_id | class | type | name | geometry
| W | 1 | waterway | river | Rhein2 | :w-2
| W | 2 | waterway | river | Rhein | :w-3
| R | 1 | waterway | river | Rhein | :w-1 + :w-2 + :w-3
And the relations
| id | members | tags
| 1 | W1:side_stream,W2:side_stream | 'type' : 'waterway'
When importing
Then table placex contains
| object | linked_place_id
| W1 | None
| W2 | R1
When sending query "rhein2"
Then results contain
| osm_type
| W

View File

@@ -1,202 +0,0 @@
@DB
Feature: Import and search of names
Tests all naming related issues: normalisation,
abbreviations, internationalisation, etc.
Scenario: Case-insensitivity of search
Given the place nodes
| osm_id | class | type | name
| 1 | place | locality | 'name' : 'FooBar'
When importing
Then table placex contains
| object | class | type | name
| N1 | place | locality | 'name' : 'FooBar'
When sending query "FooBar"
Then results contain
| ID | osm_type | osm_id
| 0 | N | 1
When sending query "foobar"
Then results contain
| ID | osm_type | osm_id
| 0 | N | 1
When sending query "fOObar"
Then results contain
| ID | osm_type | osm_id
| 0 | N | 1
When sending query "FOOBAR"
Then results contain
| ID | osm_type | osm_id
| 0 | N | 1
Scenario: Multiple spaces in name
Given the place nodes
| osm_id | class | type | name
| 1 | place | locality | 'name' : 'one two three'
When importing
When sending query "one two three"
Then results contain
| ID | osm_type | osm_id
| 0 | N | 1
When sending query "one two three"
Then results contain
| ID | osm_type | osm_id
| 0 | N | 1
When sending query "one two three"
Then results contain
| ID | osm_type | osm_id
| 0 | N | 1
When sending query " one two three"
Then results contain
| ID | osm_type | osm_id
| 0 | N | 1
Scenario: Special characters in name
Given the place nodes
| osm_id | class | type | name
| 1 | place | locality | 'name' : 'Jim-Knopf-Str'
| 2 | place | locality | 'name' : 'Smith/Weston'
| 3 | place | locality | 'name' : 'space mountain'
| 4 | place | locality | 'name' : 'space'
| 5 | place | locality | 'name' : 'mountain'
When importing
When sending query "Jim-Knopf-Str"
Then results contain
| ID | osm_type | osm_id
| 0 | N | 1
When sending query "Jim Knopf-Str"
Then results contain
| ID | osm_type | osm_id
| 0 | N | 1
When sending query "Jim Knopf Str"
Then results contain
| ID | osm_type | osm_id
| 0 | N | 1
When sending query "Jim/Knopf-Str"
Then results contain
| ID | osm_type | osm_id
| 0 | N | 1
When sending query "Jim-Knopfstr"
Then results contain
| ID | osm_type | osm_id
| 0 | N | 1
When sending query "Smith/Weston"
Then results contain
| ID | osm_type | osm_id
| 0 | N | 2
When sending query "Smith Weston"
Then results contain
| ID | osm_type | osm_id
| 0 | N | 2
When sending query "Smith-Weston"
Then results contain
| ID | osm_type | osm_id
| 0 | N | 2
When sending query "space mountain"
Then results contain
| ID | osm_type | osm_id
| 0 | N | 3
When sending query "space-mountain"
Then results contain
| ID | osm_type | osm_id
| 0 | N | 3
When sending query "space/mountain"
Then results contain
| ID | osm_type | osm_id
| 0 | N | 3
When sending query "space\mountain"
Then results contain
| ID | osm_type | osm_id
| 0 | N | 3
When sending query "space(mountain)"
Then results contain
| ID | osm_type | osm_id
| 0 | N | 3
Scenario: No copying name tag if only one name
Given the place nodes
| osm_id | class | type | name | geometry
| 1 | place | locality | 'name' : 'german' | country:de
When importing
Then table placex contains
| object | calculated_country_code |
| N1 | de
And table placex contains as names for N1
| object | k | v
| N1 | name | german
Scenario: Copying name tag to default language if it does not exist
Given the place nodes
| osm_id | class | type | name | geometry
| 1 | place | locality | 'name' : 'german', 'name:fi' : 'finnish' | country:de
When importing
Then table placex contains
| object | calculated_country_code |
| N1 | de
And table placex contains as names for N1
| k | v
| name | german
| name:fi | finnish
| name:de | german
Scenario: Copying default language name tag to name if it does not exist
Given the place nodes
| osm_id | class | type | name | geometry
| 1 | place | locality | 'name:de' : 'german', 'name:fi' : 'finnish' | country:de
When importing
Then table placex contains
| object | calculated_country_code |
| N1 | de
And table placex contains as names for N1
| k | v
| name | german
| name:fi | finnish
| name:de | german
Scenario: Do not overwrite default language with name tag
Given the place nodes
| osm_id | class | type | name | geometry
| 1 | place | locality | 'name' : 'german', 'name:fi' : 'finnish', 'name:de' : 'local' | country:de
When importing
Then table placex contains
| object | calculated_country_code |
| N1 | de
And table placex contains as names for N1
| k | v
| name | german
| name:fi | finnish
| name:de | local
Scenario: Landuse without name are ignored
Given the place areas
| osm_type | osm_id | class | type | geometry
| R | 1 | natural | meadow | (0 0, 1 0, 1 1, 0 1, 0 0)
| R | 2 | landuse | industrial | (0 0, -1 0, -1 -1, 0 -1, 0 0)
When importing
Then table placex has no entry for R1
And table placex has no entry for R2
Scenario: Landuse with name are found
Given the place areas
| osm_type | osm_id | class | type | name | geometry
| R | 1 | natural | meadow | 'name' : 'landuse1' | (0 0, 1 0, 1 1, 0 1, 0 0)
| R | 2 | landuse | industrial | 'name' : 'landuse2' | (0 0, -1 0, -1 -1, 0 -1, 0 0)
When importing
When sending query "landuse1"
Then results contain
| ID | osm_type | osm_id
| 0 | R | 1
When sending query "landuse2"
Then results contain
| ID | osm_type | osm_id
| 0 | R | 2
Scenario: Postcode boundaries without ref
Given the place areas
| osm_type | osm_id | class | type | postcode | geometry
| R | 1 | boundary | postal_code | 12345 | (0 0, 1 0, 1 1, 0 1, 0 0)
When importing
When sending query "12345"
Then results contain
| ID | osm_type | osm_id
| 0 | R | 1

View File

@@ -1,458 +0,0 @@
@DB
Feature: Parenting of objects
Tests that the correct parent is choosen
Scenario: Address inherits postcode from its street unless it has a postcode
Given the scene roads-with-pois
And the place nodes
| osm_id | class | type | housenumber | geometry
| 1 | place | house | 4 | :p-N1
And the place nodes
| osm_id | class | type | housenumber | postcode | geometry
| 2 | place | house | 5 | 99999 | :p-N1
And the place ways
| osm_id | class | type | name | postcode | geometry
| 1 | highway | residential | galoo | 12345 | :w-north
When importing
Then table placex contains
| object | parent_place_id
| N1 | W1
| N2 | W1
When sending query "4 galoo"
Then results contain
| ID | osm_type | osm_id | langaddress
| 0 | N | 1 | 4, galoo, 12345
When sending query "5 galoo"
Then results contain
| ID | osm_type | osm_id | langaddress
| 0 | N | 2 | 5, galoo, 99999
Scenario: Address without tags, closest street
Given the scene roads-with-pois
And the place nodes
| osm_id | class | type | geometry
| 1 | place | house | :p-N1
| 2 | place | house | :p-N2
| 3 | place | house | :p-S1
| 4 | place | house | :p-S2
And the named place ways
| osm_id | class | type | geometry
| 1 | highway | residential | :w-north
| 2 | highway | residential | :w-south
When importing
Then table placex contains
| object | parent_place_id
| N1 | W1
| N2 | W1
| N3 | W2
| N4 | W2
Scenario: Address without tags avoids unnamed streets
Given the scene roads-with-pois
And the place nodes
| osm_id | class | type | geometry
| 1 | place | house | :p-N1
| 2 | place | house | :p-N2
| 3 | place | house | :p-S1
| 4 | place | house | :p-S2
And the place ways
| osm_id | class | type | geometry
| 1 | highway | residential | :w-north
And the named place ways
| osm_id | class | type | geometry
| 2 | highway | residential | :w-south
When importing
Then table placex contains
| object | parent_place_id
| N1 | W2
| N2 | W2
| N3 | W2
| N4 | W2
Scenario: addr:street tag parents to appropriately named street
Given the scene roads-with-pois
And the place nodes
| osm_id | class | type | street| geometry
| 1 | place | house | south | :p-N1
| 2 | place | house | north | :p-N2
| 3 | place | house | south | :p-S1
| 4 | place | house | north | :p-S2
And the place ways
| osm_id | class | type | name | geometry
| 1 | highway | residential | north | :w-north
| 2 | highway | residential | south | :w-south
When importing
Then table placex contains
| object | parent_place_id
| N1 | W2
| N2 | W1
| N3 | W2
| N4 | W1
Scenario: addr:street tag parents to next named street
Given the scene roads-with-pois
And the place nodes
| osm_id | class | type | street | geometry
| 1 | place | house | abcdef | :p-N1
| 2 | place | house | abcdef | :p-N2
| 3 | place | house | abcdef | :p-S1
| 4 | place | house | abcdef | :p-S2
And the place ways
| osm_id | class | type | name | geometry
| 1 | highway | residential | abcdef | :w-north
| 2 | highway | residential | abcdef | :w-south
When importing
Then table placex contains
| object | parent_place_id
| N1 | W1
| N2 | W1
| N3 | W2
| N4 | W2
Scenario: addr:street tag without appropriately named street
Given the scene roads-with-pois
And the place nodes
| osm_id | class | type | street | geometry
| 1 | place | house | abcdef | :p-N1
| 2 | place | house | abcdef | :p-N2
| 3 | place | house | abcdef | :p-S1
| 4 | place | house | abcdef | :p-S2
And the place ways
| osm_id | class | type | name | geometry
| 1 | highway | residential | abcde | :w-north
| 2 | highway | residential | abcde | :w-south
When importing
Then table placex contains
| object | parent_place_id
| N1 | W1
| N2 | W1
| N3 | W2
| N4 | W2
Scenario: addr:place address
Given the scene road-with-alley
And the place nodes
| osm_id | class | type | addr_place | geometry
| 1 | place | house | myhamlet | :n-alley
And the place nodes
| osm_id | class | type | name | geometry
| 2 | place | hamlet | myhamlet | :n-main-west
And the place ways
| osm_id | class | type | name | geometry
| 1 | highway | residential | myhamlet | :w-main
When importing
Then table placex contains
| object | parent_place_id
| N1 | N2
Scenario: addr:street is preferred over addr:place
Given the scene road-with-alley
And the place nodes
| osm_id | class | type | addr_place | street | geometry
| 1 | place | house | myhamlet | mystreet| :n-alley
And the place nodes
| osm_id | class | type | name | geometry
| 2 | place | hamlet | myhamlet | :n-main-west
And the place ways
| osm_id | class | type | name | geometry
| 1 | highway | residential | mystreet | :w-main
When importing
Then table placex contains
| object | parent_place_id
| N1 | W1
Scenario: Untagged address in simple associated street relation
Given the scene road-with-alley
And the place nodes
| osm_id | class | type | geometry
| 1 | place | house | :n-alley
| 2 | place | house | :n-corner
| 3 | place | house | :n-main-west
And the place ways
| osm_id | class | type | name | geometry
| 1 | highway | residential | foo | :w-main
| 2 | highway | service | bar | :w-alley
And the relations
| id | members | tags
| 1 | W1:street,N1,N2,N3 | 'type' : 'associatedStreet'
When importing
Then table placex contains
| object | parent_place_id
| N1 | W1
| N2 | W1
| N3 | W1
Scenario: Avoid unnamed streets in simple associated street relation
Given the scene road-with-alley
And the place nodes
| osm_id | class | type | geometry
| 1 | place | house | :n-alley
| 2 | place | house | :n-corner
| 3 | place | house | :n-main-west
And the named place ways
| osm_id | class | type | geometry
| 1 | highway | residential | :w-main
And the place ways
| osm_id | class | type | geometry
| 2 | highway | residential | :w-alley
And the relations
| id | members | tags
| 1 | N1,N2,N3,W2:street,W1:street | 'type' : 'associatedStreet'
When importing
Then table placex contains
| object | parent_place_id
| N1 | W1
| N2 | W1
| N3 | W1
### Scenario 10
Scenario: Associated street relation overrides addr:street
Given the scene road-with-alley
And the place nodes
| osm_id | class | type | street | geometry
| 1 | place | house | bar | :n-alley
And the place ways
| osm_id | class | type | name | geometry
| 1 | highway | residential | foo | :w-main
| 2 | highway | residential | bar | :w-alley
And the relations
| id | members | tags
| 1 | W1:street,N1,N2,N3 | 'type' : 'associatedStreet'
When importing
Then table placex contains
| object | parent_place_id
| N1 | W1
Scenario: Building without tags, closest street from center point
Given the scene building-on-street-corner
And the named place ways
| osm_id | class | type | geometry
| 1 | building | yes | :w-building
| 2 | highway | primary | :w-WE
| 3 | highway | residential | :w-NS
When importing
Then table placex contains
| object | parent_place_id
| W1 | W3
Scenario: Building with addr:street tags
Given the scene building-on-street-corner
And the named place ways
| osm_id | class | type | street | geometry
| 1 | building | yes | bar | :w-building
And the place ways
| osm_id | class | type | name | geometry
| 2 | highway | primary | bar | :w-WE
| 3 | highway | residential | foo | :w-NS
When importing
Then table placex contains
| object | parent_place_id
| W1 | W2
Scenario: Building with addr:place tags
Given the scene building-on-street-corner
And the place nodes
| osm_id | class | type | name | geometry
| 1 | place | village | bar | :n-outer
And the named place ways
| osm_id | class | type | addr_place | geometry
| 1 | building | yes | bar | :w-building
And the place ways
| osm_id | class | type | name | geometry
| 2 | highway | primary | bar | :w-WE
| 3 | highway | residential | foo | :w-NS
When importing
Then table placex contains
| object | parent_place_id
| W1 | N1
Scenario: Building in associated street relation
Given the scene building-on-street-corner
And the named place ways
| osm_id | class | type | geometry
| 1 | building | yes | :w-building
And the place ways
| osm_id | class | type | name | geometry
| 2 | highway | primary | bar | :w-WE
| 3 | highway | residential | foo | :w-NS
And the relations
| id | members | tags
| 1 | W1:house,W2:street | 'type' : 'associatedStreet'
When importing
Then table placex contains
| object | parent_place_id
| W1 | W2
Scenario: Building in associated street relation overrides addr:street
Given the scene building-on-street-corner
And the named place ways
| osm_id | class | type | street | geometry
| 1 | building | yes | foo | :w-building
And the place ways
| osm_id | class | type | name | geometry
| 2 | highway | primary | bar | :w-WE
| 3 | highway | residential | foo | :w-NS
And the relations
| id | members | tags
| 1 | W1:house,W2:street | 'type' : 'associatedStreet'
When importing
Then table placex contains
| object | parent_place_id
| W1 | W2
Scenario: Wrong member in associated street relation is ignored
Given the scene building-on-street-corner
And the named place nodes
| osm_id | class | type | geometry
| 1 | place | house | :n-outer
And the named place ways
| osm_id | class | type | street | geometry
| 1 | building | yes | foo | :w-building
And the place ways
| osm_id | class | type | name | geometry
| 2 | highway | primary | bar | :w-WE
| 3 | highway | residential | foo | :w-NS
And the relations
| id | members | tags
| 1 | N1:house,W1:street,W3:street | 'type' : 'associatedStreet'
When importing
Then table placex contains
| object | parent_place_id
| N1 | W3
Scenario: POIs in building inherit address
Given the scene building-on-street-corner
And the named place nodes
| osm_id | class | type | geometry
| 1 | amenity | bank | :n-inner
| 2 | shop | bakery | :n-edge-NS
| 3 | shop | supermarket| :n-edge-WE
And the place ways
| osm_id | class | type | street | addr_place | housenumber | geometry
| 1 | building | yes | foo | nowhere | 3 | :w-building
And the place ways
| osm_id | class | type | name | geometry
| 2 | highway | primary | bar | :w-WE
| 3 | highway | residential | foo | :w-NS
When importing
Then table placex contains
| object | parent_place_id | street | addr_place | housenumber
| W1 | W3 | foo | nowhere | 3
| N1 | W3 | foo | nowhere | 3
| N2 | W3 | foo | nowhere | 3
| N3 | W3 | foo | nowhere | 3
Scenario: POIs don't inherit from streets
Given the scene building-on-street-corner
And the named place nodes
| osm_id | class | type | geometry
| 1 | amenity | bank | :n-inner
And the place ways
| osm_id | class | type | street | addr_place | housenumber | geometry
| 1 | highway | path | foo | nowhere | 3 | :w-building
And the place ways
| osm_id | class | type | name | geometry
| 3 | highway | residential | foo | :w-NS
When importing
Then table placex contains
| object | parent_place_id | street | addr_place | housenumber
| N1 | W3 | None | None | None
Scenario: POIs with own address do not inherit building address
Given the scene building-on-street-corner
And the named place nodes
| osm_id | class | type | street | geometry
| 1 | amenity | bank | bar | :n-inner
And the named place nodes
| osm_id | class | type | housenumber | geometry
| 2 | shop | bakery | 4 | :n-edge-NS
And the named place nodes
| osm_id | class | type | addr_place | geometry
| 3 | shop | supermarket| nowhere | :n-edge-WE
And the place nodes
| osm_id | class | type | name | geometry
| 4 | place | isolated_dwelling | theplace | :n-outer
And the place ways
| osm_id | class | type | addr_place | housenumber | geometry
| 1 | building | yes | theplace | 3 | :w-building
And the place ways
| osm_id | class | type | name | geometry
| 2 | highway | primary | bar | :w-WE
| 3 | highway | residential | foo | :w-NS
When importing
Then table placex contains
| object | parent_place_id | street | addr_place | housenumber
| W1 | N4 | None | theplace | 3
| N1 | W2 | bar | None | None
| N2 | W3 | None | None | 4
| N3 | W2 | None | nowhere | None
### Scenario 20
Scenario: POIs parent a road if and only if they are attached to it
Given the scene points-on-roads
And the named place nodes
| osm_id | class | type | street | geometry
| 1 | highway | bus_stop | North St | :n-SE
| 2 | highway | bus_stop | South St | :n-NW
| 3 | highway | bus_stop | North St | :n-S-unglued
| 4 | highway | bus_stop | South St | :n-N-unglued
And the place ways
| osm_id | class | type | name | geometry
| 1 | highway | secondary | North St | :w-north
| 2 | highway | unclassified | South St | :w-south
And the ways
| id | nodes
| 1 | 100,101,2,103,104
| 2 | 200,201,1,202,203
When importing
Then table placex contains
| object | parent_place_id
| N1 | W2
| N2 | W1
| N3 | W1
| N4 | W2
Scenario: POIs do not parent non-roads they are attached to
Given the scene points-on-roads
And the named place nodes
| osm_id | class | type | street | geometry
| 1 | highway | bus_stop | North St | :n-SE
| 2 | highway | bus_stop | South St | :n-NW
And the place ways
| osm_id | class | type | name | geometry
| 1 | landuse | residential | North St | :w-north
| 2 | waterway| river | South St | :w-south
And the ways
| id | nodes
| 1 | 100,101,2,103,104
| 2 | 200,201,1,202,203
When importing
Then table placex contains
| object | parent_place_id
| N1 | 0
| N2 | 0
Scenario: POIs on building outlines inherit associated street relation
Given the scene building-on-street-corner
And the named place nodes
| osm_id | class | type | geometry
| 1 | place | house | :n-edge-NS
And the named place ways
| osm_id | class | type | geometry
| 1 | building | yes | :w-building
And the place ways
| osm_id | class | type | name | geometry
| 2 | highway | primary | bar | :w-WE
| 3 | highway | residential | foo | :w-NS
And the relations
| id | members | tags
| 1 | W1:house,W2:street | 'type' : 'associatedStreet'
And the ways
| id | nodes
| 1 | 100,1,101,102,100
When importing
Then table placex contains
| object | parent_place_id
| N1 | W2

View File

@@ -1,383 +0,0 @@
@DB
Feature: Import into placex
Tests that data in placex is completed correctly.
Scenario: No country code tag is available
Given the place nodes
| osm_id | class | type | name | geometry
| 1 | highway | primary | 'name' : 'A1' | country:us
When importing
Then table placex contains
| object | country_code | calculated_country_code |
| N1 | None | us |
Scenario: Location overwrites country code tag
Given the scene country
And the place nodes
| osm_id | class | type | name | country_code | geometry
| 1 | highway | primary | 'name' : 'A1' | de | :us
When importing
Then table placex contains
| object | country_code | calculated_country_code |
| N1 | de | us |
Scenario: Country code tag overwrites location for countries
Given the place areas
| osm_type | osm_id | class | type | admin_level | name | country_code | geometry
| R | 1 | boundary | administrative | 2 | 'name' : 'foo' | de | (-100 40, -101 40, -101 41, -100 41, -100 40)
When importing
Then table placex contains
| object | country_code | calculated_country_code |
| R1 | de | de |
Scenario: Illegal country code tag for countries is ignored
And the place areas
| osm_type | osm_id | class | type | admin_level | name | country_code | geometry
| R | 1 | boundary | administrative | 2 | 'name' : 'foo' | xx | (-100 40, -101 40, -101 41, -100 41, -100 40)
When importing
Then table placex contains
| object | country_code | calculated_country_code |
| R1 | xx | us |
Scenario: admin level is copied over
Given the place nodes
| osm_id | class | type | admin_level | name
| 1 | place | state | 3 | 'name' : 'foo'
When importing
Then table placex contains
| object | admin_level |
| N1 | 3 |
Scenario: admin level is default 15
Given the place nodes
| osm_id | class | type | name
| 1 | amenity | prison | 'name' : 'foo'
When importing
Then table placex contains
| object | admin_level |
| N1 | 15 |
Scenario: admin level is never larger than 15
Given the place nodes
| osm_id | class | type | name | admin_level
| 1 | amenity | prison | 'name' : 'foo' | 16
When importing
Then table placex contains
| object | admin_level |
| N1 | 15 |
Scenario: postcode node without postcode is dropped
Given the place nodes
| osm_id | class | type
| 1 | place | postcode
When importing
Then table placex has no entry for N1
Scenario: postcode boundary without postcode is dropped
Given the place areas
| osm_type | osm_id | class | type | geometry
| R | 1 | boundary | postal_code | poly-area:0.1
When importing
Then table placex has no entry for R1
Scenario: search and address ranks for GB post codes correctly assigned
Given the place nodes
| osm_id | class | type | postcode | geometry
| 1 | place | postcode | E45 2CD | country:gb
| 2 | place | postcode | E45 2 | country:gb
| 3 | place | postcode | Y45 | country:gb
When importing
Then table placex contains
| object | postcode | calculated_country_code | rank_search | rank_address
| N1 | E45 2CD | gb | 25 | 5
| N2 | E45 2 | gb | 23 | 5
| N3 | Y45 | gb | 21 | 5
Scenario: wrongly formatted GB postcodes are down-ranked
Given the place nodes
| osm_id | class | type | postcode | geometry
| 1 | place | postcode | EA452CD | country:gb
| 2 | place | postcode | E45 23 | country:gb
| 3 | place | postcode | y45 | country:gb
When importing
Then table placex contains
| object | calculated_country_code | rank_search | rank_address
| N1 | gb | 30 | 30
| N2 | gb | 30 | 30
| N3 | gb | 30 | 30
Scenario: search and address rank for DE postcodes correctly assigned
Given the place nodes
| osm_id | class | type | postcode | geometry
| 1 | place | postcode | 56427 | country:de
| 2 | place | postcode | 5642 | country:de
| 3 | place | postcode | 5642A | country:de
| 4 | place | postcode | 564276 | country:de
When importing
Then table placex contains
| object | calculated_country_code | rank_search | rank_address
| N1 | de | 21 | 11
| N2 | de | 30 | 30
| N3 | de | 30 | 30
| N4 | de | 30 | 30
Scenario: search and address rank for other postcodes are correctly assigned
Given the place nodes
| osm_id | class | type | postcode | geometry
| 1 | place | postcode | 1 | country:ca
| 2 | place | postcode | X3 | country:ca
| 3 | place | postcode | 543 | country:ca
| 4 | place | postcode | 54dc | country:ca
| 5 | place | postcode | 12345 | country:ca
| 6 | place | postcode | 55TT667 | country:ca
| 7 | place | postcode | 123-65 | country:ca
| 8 | place | postcode | 12 445 4 | country:ca
| 9 | place | postcode | A1:bc10 | country:ca
When importing
Then table placex contains
| object | calculated_country_code | rank_search | rank_address
| N1 | ca | 21 | 11
| N2 | ca | 21 | 11
| N3 | ca | 21 | 11
| N4 | ca | 21 | 11
| N5 | ca | 21 | 11
| N6 | ca | 21 | 11
| N7 | ca | 25 | 11
| N8 | ca | 25 | 11
| N9 | ca | 25 | 11
Scenario: search and address ranks for places are correctly assigned
Given the named place nodes
| osm_id | class | type |
| 1 | foo | bar |
| 11 | place | Continent |
| 12 | place | continent |
| 13 | place | sea |
| 14 | place | country |
| 15 | place | state |
| 16 | place | region |
| 17 | place | county |
| 18 | place | city |
| 19 | place | island |
| 20 | place | town |
| 21 | place | village |
| 22 | place | hamlet |
| 23 | place | municipality |
| 24 | place | district |
| 25 | place | unincorporated_area |
| 26 | place | borough |
| 27 | place | suburb |
| 28 | place | croft |
| 29 | place | subdivision |
| 30 | place | isolated_dwelling |
| 31 | place | farm |
| 32 | place | locality |
| 33 | place | islet |
| 34 | place | mountain_pass |
| 35 | place | neighbourhood |
| 36 | place | house |
| 37 | place | building |
| 38 | place | houses |
And the named place nodes
| osm_id | class | type | extratags
| 100 | place | locality | 'locality' : 'townland'
| 101 | place | city | 'capital' : 'yes'
When importing
Then table placex contains
| object | rank_search | rank_address |
| N1 | 30 | 30 |
| N11 | 30 | 30 |
| N12 | 2 | 2 |
| N13 | 2 | 0 |
| N14 | 4 | 4 |
| N15 | 8 | 8 |
| N16 | 18 | 0 |
| N17 | 12 | 12 |
| N18 | 16 | 16 |
| N19 | 17 | 0 |
| N20 | 18 | 16 |
| N21 | 19 | 16 |
| N22 | 19 | 16 |
| N23 | 19 | 16 |
| N24 | 19 | 16 |
| N25 | 19 | 16 |
| N26 | 19 | 16 |
| N27 | 20 | 20 |
| N28 | 20 | 20 |
| N29 | 20 | 20 |
| N30 | 20 | 20 |
| N31 | 20 | 0 |
| N32 | 20 | 0 |
| N33 | 20 | 0 |
| N34 | 20 | 0 |
| N100 | 20 | 20 |
| N101 | 15 | 16 |
| N35 | 22 | 22 |
| N36 | 30 | 30 |
| N37 | 30 | 30 |
| N38 | 28 | 0 |
Scenario: search and address ranks for boundaries are correctly assigned
Given the named place nodes
| osm_id | class | type
| 1 | boundary | administrative
And the named place ways
| osm_id | class | type | geometry
| 10 | boundary | administrative | 10 10, 11 11
And the named place areas
| osm_type | osm_id | class | type | admin_level | geometry
| R | 20 | boundary | administrative | 2 | (1 1, 2 2, 1 2, 1 1)
| R | 21 | boundary | administrative | 32 | (3 3, 4 4, 3 4, 3 3)
| R | 22 | boundary | nature_park | 6 | (0 0, 1 0, 0 1, 0 0)
| R | 23 | boundary | natural_reserve| 10 | (0 0, 1 1, 1 0, 0 0)
When importing
Then table placex has no entry for N1
And table placex has no entry for W10
And table placex contains
| object | rank_search | rank_address
| R20 | 4 | 4
| R21 | 30 | 30
| R22 | 12 | 0
| R23 | 20 | 0
Scenario Outline: minor highways droped without name, included with
Given the scene roads-with-pois
And a wiped database
And the place ways
| osm_id | class | type | geometry
| 1 | highway | <type> | :w-south
And the named place ways
| osm_id | class | type | geometry
| 2 | highway | <type> | :w-north
When importing
Then table placex has no entry for W1
And table placex contains
| object | rank_search | rank_address
| W2 | <rank> | <rank>
Examples:
| type | rank
| service | 27
| cycleway | 27
| path | 27
| footway | 27
| steps | 27
| bridleway | 27
| track | 26
| byway | 26
| motorway_link | 27
| primary_link | 27
| trunk_link | 27
| secondary_link| 27
| tertiary_link | 27
Scenario: search and address ranks for highways correctly assigned
Given the scene roads-with-pois
And the place nodes
| osm_id | class | type
| 1 | highway | bus_stop
And the place ways
| osm_id | class | type | geometry
| 1 | highway | primary | :w-south
| 2 | highway | secondary | :w-south
| 3 | highway | tertiary | :w-south
| 4 | highway | residential | :w-north
| 5 | highway | unclassified | :w-north
| 6 | highway | something | :w-north
When importing
Then table placex contains
| object | rank_search | rank_address
| N1 | 30 | 30
| W1 | 26 | 26
| W2 | 26 | 26
| W3 | 26 | 26
| W4 | 26 | 26
| W5 | 26 | 26
| W6 | 26 | 26
Scenario: rank and inclusion of landuses
Given the place nodes
| osm_id | class | type
| 1 | landuse | residential
And the named place nodes
| osm_id | class | type
| 2 | landuse | residential
And the place ways
| osm_id | class | type | geometry
| 1 | landuse | residential | 0 0, 0 1
And the named place ways
| osm_id | class | type | geometry
| 2 | landuse | residential | 1 1, 1 1.1
And the place areas
| osm_type | osm_id | class | type | geometry
| W | 3 | landuse | residential | poly-area:0.1
| R | 1 | landuse | residential | poly-area:0.01
| R | 10 | landuse | residential | poly-area:0.5
And the named place areas
| osm_type | osm_id | class | type | geometry
| W | 4 | landuse | residential | poly-area:0.1
| R | 2 | landuse | residential | poly-area:0.05
When importing
Then table placex has no entry for N1
And table placex has no entry for W1
And table placex has no entry for W3
And table placex has no entry for R1
And table placex has no entry for R10
And table placex contains
| object | rank_search | rank_address
| N2 | 30 | 30
| W2 | 30 | 30
| W4 | 22 | 22
| R2 | 22 | 22
Scenario: rank and inclusion of naturals
Given the place nodes
| osm_id | class | type
| 1 | natural | peak
| 3 | natural | volcano
And the named place nodes
| osm_id | class | type
| 2 | natural | peak
| 4 | natural | volcano
| 5 | natural | foobar
And the place ways
| osm_id | class | type | geometry
| 1 | natural | mountain_range | 10 10,11 11
And the named place ways
| osm_id | class | type | geometry
| 2 | natural | mountain_range | 12 12,11 11
| 3 | natural | foobar | 13 13,13.1 13
| 4 | natural | coastline | 14 14,14.1 14
And the place areas
| osm_type | osm_id | class | type | geometry
| R | 1 | natural | volcano | poly-area:0.1
| R | 2 | natural | volcano | poly-area:1.0
And the named place areas
| osm_type | osm_id | class | type | geometry
| R | 3 | natural | volcano | poly-area:0.1
| R | 4 | natural | foobar | poly-area:0.5
| R | 5 | natural | sea | poly-area:5.0
| R | 6 | natural | sea | poly-area:0.01
| R | 7 | natural | coastline | poly-area:1.0
When importing
Then table placex has no entry for N1
And table placex has no entry for N3
And table placex has no entry for W1
And table placex has no entry for R1
And table placex has no entry for R2
And table placex has no entry for R7
And table placex has no entry for W4
And table placex contains
| object | rank_search | rank_address
| N2 | 18 | 0
| N4 | 18 | 0
| N5 | 30 | 30
| W2 | 18 | 0
| R3 | 18 | 0
| R4 | 22 | 22
| R5 | 4 | 4
| R6 | 4 | 4
| W3 | 30 | 30

View File

@@ -1,28 +0,0 @@
@DB
Feature: Creation of search terms
Tests that search_name table is filled correctly
Scenario: POIs without a name have no search entry
Given the scene roads-with-pois
And the place nodes
| osm_id | class | type | geometry
| 1 | place | house | :p-N1
And the place ways
| osm_id | class | type | geometry
| 1 | highway | residential | :w-north
When importing
Then table search_name has no entry for N1
Scenario: Named POIs inherit address from parent
Given the scene roads-with-pois
And the place nodes
| osm_id | class | type | name | geometry
| 1 | place | house | foo | :p-N1
And the place ways
| osm_id | class | type | name | geometry
| 1 | highway | residential | the road | :w-north
When importing
Then search_name table contains
| place_id | name_vector | nameaddress_vector
| N1 | foo | the road

View File

@@ -1,17 +0,0 @@
@DB
Feature: Import of simple objects
Testing simple stuff
Scenario: Import place node
Given the place nodes:
| osm_id | class | type | name | geometry
| 1 | place | village | 'name' : 'Foo' | 10.0 -10.0
When importing
Then table placex contains
| object | class | type | name | centroid
| N1 | place | village | 'name' : 'Foo' | 10.0,-10.0 +- 1m
When sending query "Foo"
Then results contain
| ID | osm_type | osm_id
| 0 | N | 1

View File

@@ -1,92 +0,0 @@
@DB
Feature: Updates of linked places
Tests that linked places are correctly added and deleted.
Scenario: Add linked place when linking relation is renamed
Given the place nodes
| osm_id | class | type | name | geometry
| 1 | place | city | foo | 0 0
And the place areas
| osm_type | osm_id | class | type | name | admin_level | geometry
| R | 1 | boundary | administrative | foo | 8 | poly-area:0.1
When importing
And sending query "foo" with dups
Then results contain
| osm_type
| R
When updating place areas
| osm_type | osm_id | class | type | name | admin_level | geometry
| R | 1 | boundary | administrative | foobar | 8 | poly-area:0.1
Then table placex contains
| object | linked_place_id
| N1 | None
When sending query "foo" with dups
Then results contain
| osm_type
| N
Scenario: Add linked place when linking relation is removed
Given the place nodes
| osm_id | class | type | name | geometry
| 1 | place | city | foo | 0 0
And the place areas
| osm_type | osm_id | class | type | name | admin_level | geometry
| R | 1 | boundary | administrative | foo | 8 | poly-area:0.1
When importing
And sending query "foo" with dups
Then results contain
| osm_type
| R
When marking for delete R1
Then table placex contains
| object | linked_place_id
| N1 | None
And sending query "foo" with dups
Then results contain
| osm_type
| N
Scenario: Remove linked place when linking relation is added
Given the place nodes
| osm_id | class | type | name | geometry
| 1 | place | city | foo | 0 0
When importing
And sending query "foo" with dups
Then results contain
| osm_type
| N
When updating place areas
| osm_type | osm_id | class | type | name | admin_level | geometry
| R | 1 | boundary | administrative | foo | 8 | poly-area:0.1
Then table placex contains
| object | linked_place_id
| N1 | R1
When sending query "foo" with dups
Then results contain
| osm_type
| R
Scenario: Remove linked place when linking relation is renamed
Given the place nodes
| osm_id | class | type | name | geometry
| 1 | place | city | foo | 0 0
And the place areas
| osm_type | osm_id | class | type | name | admin_level | geometry
| R | 1 | boundary | administrative | foobar | 8 | poly-area:0.1
When importing
And sending query "foo" with dups
Then results contain
| osm_type
| N
When updating place areas
| osm_type | osm_id | class | type | name | admin_level | geometry
| R | 1 | boundary | administrative | foo | 8 | poly-area:0.1
Then table placex contains
| object | linked_place_id
| N1 | R1
When sending query "foo" with dups
Then results contain
| osm_type
| R

View File

@@ -1,39 +0,0 @@
@DB
Feature: Update of names in place objects
Test all naming related issues in updates
Scenario: Updating postcode in postcode boundaries without ref
Given the place areas
| osm_type | osm_id | class | type | postcode | geometry
| R | 1 | boundary | postal_code | 12345 | (0 0, 1 0, 1 1, 0 1, 0 0)
When importing
And sending query "12345"
Then results contain
| ID | osm_type | osm_id
| 0 | R | 1
When updating place areas
| osm_type | osm_id | class | type | postcode | geometry
| R | 1 | boundary | postal_code | 54321 | (0 0, 1 0, 1 1, 0 1, 0 0)
And sending query "12345"
Then exactly 0 results are returned
When sending query "54321"
Then results contain
| ID | osm_type | osm_id
| 0 | R | 1
Scenario: Delete postcode from postcode boundaries without ref
Given the place areas
| osm_type | osm_id | class | type | postcode | geometry
| R | 1 | boundary | postal_code | 12345 | (0 0, 1 0, 1 1, 0 1, 0 0)
When importing
And sending query "12345"
Then results contain
| ID | osm_type | osm_id
| 0 | R | 1
When updating place areas
| osm_type | osm_id | class | type | geometry
| R | 1 | boundary | postal_code | (0 0, 1 0, 1 1, 0 1, 0 0)
Then table placex has no entry for R1

View File

@@ -1,87 +0,0 @@
@DB
Feature: Update of simple objects
Testing simple stuff
Scenario: Remove name from a landuse object
Given the place nodes
| osm_id | class | type | name
| 1 | landuse | wood | 'name' : 'Foo'
When importing
Then table placex contains
| object | class | type | name
| N1 | landuse| wood | 'name' : 'Foo'
When updating place nodes
| osm_id | class | type
| 1 | landuse | wood
Then table placex has no entry for N1
Scenario: Do delete small boundary features
Given the place areas
| osm_type | osm_id | class | type | admin_level | geometry
| R | 1 | boundary | administrative | 3 | (0 0, 1 0, 1 1, 0 1, 0 0)
When importing
Then table placex contains
| object | rank_search
| R1 | 6
When marking for delete R1
Then table placex has no entry for R1
Scenario: Do not delete large boundary features
Given the place areas
| osm_type | osm_id | class | type | admin_level | geometry
| R | 1 | boundary | administrative | 3 | (0 0, 2 0, 2 2.1, 0 2, 0 0)
When importing
Then table placex contains
| object | rank_search
| R1 | 6
When marking for delete R1
Then table placex contains
| object | rank_search
| R1 | 6
Scenario: Do delete large features of low rank
Given the named place areas
| osm_type | osm_id | class | type | geometry
| W | 1 | place | house | (0 0, 2 0, 2 2.1, 0 2, 0 0)
| R | 1 | boundary | national_park | (0 0, 2 0, 2 2.1, 0 2, 0 0)
When importing
Then table placex contains
| object | rank_address
| R1 | 0
| W1 | 30
When marking for delete R1,W1
Then table placex has no entry for W1
Then table placex has no entry for R1
Scenario: type mutation
Given the place nodes
| osm_id | class | type | geometry
| 3 | shop | toys | 1 -1
When importing
Then table placex contains
| object | class | type
| N3 | shop | toys
When updating place nodes
| osm_id | class | type | geometry
| 3 | shop | grocery | 1 -1
Then table placex contains
| object | class | type
| N3 | shop | grocery
Scenario: remove postcode place when house number is added
Given the place nodes
| osm_id | class | type | postcode | geometry
| 3 | place | postcode | 12345 | 1 -1
When importing
Then table placex contains
| object | class | type
| N3 | place | postcode
When updating place nodes
| osm_id | class | type | postcode | housenumber | geometry
| 3 | place | house | 12345 | 13 | 1 -1
Then table placex contains
| object | class | type
| N3 | place | house

View File

@@ -1,13 +0,0 @@
@DB
Feature: Import of relations by osm2pgsql
Testing specific relation problems related to members.
Scenario: Don't import empty waterways
Given the osm nodes:
| id | tags
| 1 | 'amenity' : 'prison', 'name' : 'foo'
And the osm relations:
| id | tags | members
| 1 | 'type' : 'waterway', 'waterway' : 'river', 'name' : 'XZ' | N1
When loading osm data
Then table place has no entry for R1

View File

@@ -1,68 +0,0 @@
@DB
Feature: Import of simple objects by osm2pgsql
Testing basic tagging in osm2pgsql imports.
Scenario: Import simple objects
Given the osm nodes:
| id | tags
| 1 | 'amenity' : 'prison', 'name' : 'foo'
Given the osm nodes:
| id | geometry
| 100 | 0 0
| 101 | 0 0.1
| 102 | 0.1 0.2
| 200 | 0 0
| 201 | 0 1
| 202 | 1 1
| 203 | 1 0
And the osm ways:
| id | tags | nodes
| 1 | 'shop' : 'toys', 'name' : 'tata' | 100 101 102
| 2 | 'ref' : '45' | 200 201 202 203 200
And the osm relations:
| id | tags | members
| 1 | 'type' : 'multipolygon', 'tourism' : 'hotel', 'name' : 'XZ' | N1,W2
When loading osm data
Then table place contains
| object | class | type | name
| N1 | amenity | prison | 'name' : 'foo'
| W1 | shop | toys | 'name' : 'tata'
| R1 | tourism | hotel | 'name' : 'XZ'
Scenario: Import object with two main tags
Given the osm nodes:
| id | tags
| 1 | 'tourism' : 'hotel', 'amenity' : 'restaurant', 'name' : 'foo'
When loading osm data
Then table place contains
| object | class | type | name
| N1:tourism | tourism | hotel | 'name' : 'foo'
| N1:amenity | amenity | restaurant | 'name' : 'foo'
Scenario: Import stand-alone house number with postcode
Given the osm nodes:
| id | tags
| 1 | 'addr:housenumber' : '4', 'addr:postcode' : '3345'
When loading osm data
Then table place contains
| object | class | type
| N1 | place | house
Scenario: Landuses are only imported when named
Given the osm nodes:
| id | geometry
| 100 | 0 0
| 101 | 0 0.1
| 102 | 0.1 0.1
| 200 | 0 0
| 202 | 1 1
| 203 | 1 0
And the osm ways:
| id | tags | nodes
| 1 | 'landuse' : 'residential', 'name' : 'rainbow' | 100 101 102 100
| 2 | 'landuse' : 'residential' | 200 202 203 200
When loading osm data
Then table place contains
| object | class | type
| W1 | landuse | residential
And table place has no entry for W2

View File

@@ -1,30 +0,0 @@
@DB
Feature: Update of relations by osm2pgsql
Testing relation update by osm2pgsql.
Scenario: Remove all members of a relation
Given the osm nodes:
| id | tags
| 1 | 'amenity' : 'prison', 'name' : 'foo'
Given the osm nodes:
| id | geometry
| 200 | 0 0
| 201 | 0 0.0001
| 202 | 0.0001 0.0001
| 203 | 0.0001 0
Given the osm ways:
| id | tags | nodes
| 2 | 'ref' : '45' | 200 201 202 203 200
Given the osm relations:
| id | tags | members
| 1 | 'type' : 'multipolygon', 'tourism' : 'hotel', 'name' : 'XZ' | W2
When loading osm data
Then table place contains
| object | class | type | name
| R1 | tourism | hotel | 'name' : 'XZ'
Given the osm relations:
| action | id | tags | members
| M | 1 | 'type' : 'multipolygon', 'tourism' : 'hotel', 'name' : 'XZ' | N1
When updating osm data
Then table place has no entry for R1

View File

@@ -1,22 +0,0 @@
@DB
Feature: Update of simple objects by osm2pgsql
Testing basic update functions of osm2pgsql.
Scenario: Import object with two main tags
Given the osm nodes:
| id | tags
| 1 | 'tourism' : 'hotel', 'amenity' : 'restaurant', 'name' : 'foo'
When loading osm data
Then table place contains
| object | class | type | name
| N1:tourism | tourism | hotel | 'name' : 'foo'
| N1:amenity | amenity | restaurant | 'name' : 'foo'
Given the osm nodes:
| action | id | tags
| M | 1 | 'tourism' : 'hotel', 'name' : 'foo'
When updating osm data
Then table place has no entry for N1:amenity
And table place contains
| object | class | type | name
| N1:tourism | tourism | hotel | 'name' : 'foo'

View File

@@ -1,31 +0,0 @@
#/bin/bash -e
#
# Regenerates wkts for scenarios.
#
datadir="$( cd "$( dirname "$0" )" && cd ../data && pwd )"
if [! -d "$datadir" ]; then
echo "Cannot find data dir.";
exit -1;
fi
echo "Using datadir $datadir"
pushd $datadir
# remove old wkts
rm $datadir/*.wkt
# create wkts from SQL scripts
for fl in *.sql; do
echo "Processing $fl.."
cat $fl | psql -d nominatim -t -o ${fl/.sql/.wkt}
done
# create wkts from .osm files
for fl in *.osm; do
echo "Processing $fl.."
../bin/osm2wkt $fl
done
popd

View File

@@ -1,101 +0,0 @@
// The code in this file is released into the Public Domain.
#include <iostream>
#include <fstream>
#include <string>
#include <unordered_map>
#include <osmium/area/assembler.hpp>
#include <osmium/area/collector.hpp>
#include <osmium/area/problem_reporter_exception.hpp>
#include <osmium/geom/wkt.hpp>
#include <osmium/handler.hpp>
#include <osmium/handler/node_locations_for_ways.hpp>
#include <osmium/io/any_input.hpp>
#include <osmium/visitor.hpp>
#include <osmium/index/map/stl_map.hpp>
typedef osmium::index::map::StlMap<osmium::unsigned_object_id_type, osmium::Location> index_type;
typedef osmium::handler::NodeLocationsForWays<index_type, index_type> location_handler_type;
class ExportToWKTHandler : public osmium::handler::Handler {
osmium::geom::WKTFactory m_factory;
std::unordered_map<std::string, std::ofstream> m_files;
public:
void node(const osmium::Node& node) {
print_geometry(node.tags(), m_factory.create_point(node));
}
void way(const osmium::Way& way) {
if (!way.is_closed() || !way.tags().get_value_by_key("area"))
print_geometry(way.tags(), m_factory.create_linestring(way));
}
void area(const osmium::Area& area) {
if (!area.from_way() || area.tags().get_value_by_key("area"))
print_geometry(area.tags(), m_factory.create_multipolygon(area));
}
void close() {
for (auto& fd : m_files)
fd.second.close();
}
private:
void print_geometry(const osmium::TagList& tags, const std::string& wkt) {
const char* scenario = tags.get_value_by_key("test:section");
const char* id = tags.get_value_by_key("test:id");
if (scenario && id) {
auto& fd = m_files[std::string(scenario)];
if (!fd.is_open())
fd.open(std::string(scenario) + ".wkt");
fd << id << " | " << wkt << "\n";
}
}
}; // class ExportToWKTHandler
int main(int argc, char* argv[]) {
if (argc != 2) {
std::cerr << "Usage: " << argv[0] << " OSMFILE\n";
exit(1);
}
std::string input_filename {argv[1]};
typedef osmium::area::Assembler area_assembler_type;
osmium::area::ProblemReporterException problem_reporter;
area_assembler_type assembler(&problem_reporter);
osmium::area::Collector<area_assembler_type> collector(assembler);
std::cerr << "Pass 1...\n";
osmium::io::Reader reader1(input_filename);
collector.read_relations(reader1);
std::cerr << "Pass 1 done\n";
index_type index_pos;
index_type index_neg;
location_handler_type location_handler(index_pos, index_neg);
std::cerr << "Pass 2...\n";
ExportToWKTHandler export_handler;
osmium::io::Reader reader2(input_filename);
osmium::apply(reader2, location_handler, export_handler, collector.handler());
reader2.close();
osmium::apply(collector, export_handler);
export_handler.close();
std::cerr << "Pass 2 done\n";
google::protobuf::ShutdownProtobufLibrary();
}

View File

@@ -1,7 +0,0 @@
n-edge-NS | POINT(1.0040019 2.000324)
n-inner | POINT(1.0039385 2.0003548)
n-outer | POINT(1.0039478 2.0004676)
n-edge-WE | POINT(1.0039599 2.0002345)
w-WE | LINESTRING(1.0031759 2.0002316,1.0040361 2.0002211,1.0042735 2.0002264)
w-NS | LINESTRING(1.0040414 2.0001051,1.0040361 2.0002211,1.0040364 2.0006377)
w-building | MULTIPOLYGON(((1.0040019 2.000324,1.0040016 2.0002344,1.0039599 2.0002345,1.0039037 2.0002347,1.0039043 2.0004389,1.0040023 2.0004386,1.0040019 2.000324)))

View File

@@ -1 +0,0 @@
select country_code, st_astext(st_pointonsurface(st_collect(geometry))) from country_osm_grid group by country_code order by country_code

View File

@@ -1,250 +0,0 @@
ad | POINT(1.58972361752509 42.54241545)
ae | POINT(54.6158905029297 24.8243131637573)
af | POINT(65.9026412963867 34.8470859527588)
ag | POINT(-61.7243069800293 17.069)
ai | POINT(-63.1057155298182 18.2546197)
al | POINT(19.8494176864624 40.2123275624912)
am | POINT(44.6422958374023 40.3782157897949)
ao | POINT(16.2192406654358 -12.7701482772827)
aq | POINT(44.999999975 -75.6569557189941)
ar | POINT(-61.1075973510742 -34.3761558532715)
as | POINT(-170.684700024275 -14.2930755)
at | POINT(14.2574706077576 47.3654232025146)
au | POINT(138.231559753418 -23.7206888198853)
aw | POINT(-69.98255055 12.555)
ax | POINT(19.9183956313477 59.81682435)
az | POINT(48.385555267334 40.6163997650146)
ba | POINT(17.1851491928101 44.2558269500732)
bb | POINT(-59.53342165 13.19)
bd | POINT(89.759895324707 24.3420524597168)
be | POINT(4.90078139305115 50.3468225048828)
bf | POINT(-0.567435041069984 11.9047117233276)
bg | POINT(24.8061628341675 43.0985908508301)
bh | POINT(50.5203291219829 25.94685735)
bi | POINT(29.5456137866089 -2.99057915)
bj | POINT(2.70062518119812 10.0279288291931)
bl | POINT(-62.7934947763772 17.907)
bm | POINT(-64.7740692745195 32.30199165)
bn | POINT(114.521968608887 4.2863885)
bo | POINT(-62.0247344970703 -17.7772369384766)
bq | POINT(-63.1432235610045 17.566)
br | POINT(-45.7706508636475 -9.5868501663208)
bs | POINT(-77.6091675884277 23.8745)
bt | POINT(90.0135078430176 27.281379699707)
bv | POINT(3.35744155625 -54.4215)
bw | POINT(23.5150556564331 -23.4839134216309)
by | POINT(26.7725925445557 53.1588516235352)
bz | POINT(-88.6348991394043 16.3395160487277)
ca | POINT(-107.74817276001 67.1261215209961)
cc | POINT(96.8442066294247 -12.0173443)
cd | POINT(24.0954418182373 -1.67713665962219)
cf | POINT(22.5870132446289 5.98438787460327)
cg | POINT(15.7887516021729 0.403886616230011)
ch | POINT(7.65705513954163 46.5744686126709)
ci | POINT(-6.31190967559814 6.6278383731842)
ck | POINT(-159.778351359569 -21.23349585)
cl | POINT(-70.4179039001465 -53.7718944549561)
cm | POINT(13.260226726532 5.94519567489624)
cn | POINT(96.4428558349609 38.0426063537598)
co | POINT(-72.5295104980469 2.45174860954285)
cr | POINT(-83.8331413269043 9.935142993927)
cu | POINT(-80.8167381286621 21.8885278701782)
cv | POINT(-24.508106575 14.929)
cw | POINT(-68.9640918594077 12.1845)
cx | POINT(105.624119513558 -10.48417)
cy | POINT(32.959223486499 35.37010195)
cz | POINT(16.3209805488586 49.5069274902344)
de | POINT(9.30716800689697 50.2128944396973)
dj | POINT(42.969040422876 11.41542855)
dk | POINT(9.18490123748779 55.5634002685547)
dm | POINT(-61.0035801928854 15.6547055)
do | POINT(-69.6285591125488 18.5884169089722)
dz | POINT(4.24749487638474 25.797215461731)
ec | POINT(-77.4583168029785 -0.982844322919846)
ee | POINT(23.9428863525391 58.439525604248)
eg | POINT(28.952935218811 28.1771860122681)
eh | POINT(-13.6903142929077 25.0124177932739)
er | POINT(39.0122375488281 14.960337638855)
es | POINT(-2.59110307693481 38.7935485839844)
et | POINT(38.6169757843018 7.71399855613708)
fi | POINT(26.8979873657227 63.5619449615479)
fj | POINT(177.918533325195 -17.7423753738403)
fk | POINT(-60.0855102539062 -51.6555919647217)
fm | POINT(151.9535889125 8.5045)
fo | POINT(-6.60483694084778 62.10000995)
fr | POINT(0.284105718135834 47.5104522705078)
ga | POINT(10.8107047080994 -0.0742915570735931)
gb | POINT(-0.928231082856655 52.0161876678467)
gd | POINT(-61.6452430375 12.191)
ge | POINT(44.1666488647461 42.0038585662842)
gf | POINT(-53.4652481079102 3.56188893318176)
gg | POINT(-2.50580395030125 49.5854381)
gh | POINT(-0.463488027453423 7.16051578521729)
gi | POINT(-5.32053155848457 36.1106663)
gl | POINT(-33.8551120758057 74.6635551452637)
gm | POINT(-16.4096023535368 13.25)
gn | POINT(-13.839409828186 10.9629158973694)
gp | POINT(-61.6871265247053 16.23049055)
gq | POINT(10.2397356033325 1.43119311332703)
gr | POINT(23.1785039901733 39.0620670318604)
gs | POINT(-36.4943086948773 -54.4306784)
gt | POINT(-90.7436828613281 15.2042865753174)
gu | POINT(144.733626445767 13.444138)
gw | POINT(-14.8352527618408 11.9248690605164)
gy | POINT(-58.4516773223877 5.73698806762695)
hk | POINT(114.18577775 22.3492361)
hm | POINT(73.6823082266602 -53.22105985)
hn | POINT(-86.9541435241699 15.2382001876831)
hr | POINT(17.499662399292 45.5268955230713)
ht | POINT(-73.5192565917969 18.3249206691162)
hu | POINT(20.3536291122437 47.5172100067139)
id | POINT(123.345050811768 -0.837919592857361)
ie | POINT(-9.00520038604736 52.8772506713867)
il | POINT(35.4631499949707 32.86165655)
im | POINT(-4.86740773691101 54.023)
in | POINT(88.6762087020508 27.86155515)
io | POINT(71.4274391359073 -6.14349685)
iq | POINT(42.5810985565186 34.2610359191895)
ir | POINT(51.268892288208 34.1931705474854)
is | POINT(-17.5178508758545 64.7168769836426)
it | POINT(10.4263944625854 44.8790493011475)
je | POINT(-2.19261599848299 49.1245833)
jm | POINT(-76.8402003547852 18.3935)
jo | POINT(36.5555210113525 30.7574186325073)
jp | POINT(138.725311279297 35.9209995269775)
ke | POINT(36.9060287475586 1.08512867614627)
kg | POINT(76.1557197570801 41.6649742126465)
kh | POINT(104.319019317627 12.9555516242981)
ki | POINT(173.633537933333 0.139)
km | POINT(44.3147485207764 -12.241)
kn | POINT(-62.6937987175 17.2555)
kp | POINT(126.655757904053 39.6457576751709)
kr | POINT(127.277404785156 36.4138870239258)
kw | POINT(47.3068407840576 29.6918055)
ky | POINT(-81.0745526670982 19.2994923579778)
kz | POINT(72.008113861084 49.8885555267334)
la | POINT(102.443916320801 19.8160953521729)
lb | POINT(35.4846443715483 33.4176673878926)
lc | POINT(-60.978944125 13.891)
li | POINT(9.54693948514429 47.15934115)
lk | POINT(80.3852043151855 8.41649961471558)
lr | POINT(-11.169605255127 4.04122126102448)
ls | POINT(28.6698419546997 -29.9453849)
lt | POINT(24.5173501968384 55.4929389953613)
lu | POINT(6.08649672997471 49.81533445)
lv | POINT(23.5103368759155 56.6714401245117)
ly | POINT(15.3684158325195 28.1217727661133)
ma | POINT(-4.0306156873703 33.2169628143311)
mc | POINT(7.47743150426578 43.62917385)
md | POINT(29.6172503477783 46.6651745)
me | POINT(19.7229134314941 43.02441345)
mf | POINT(-63.0666651534257 18.0810209)
mg | POINT(45.8637886047363 -20.5024528503418)
mh | POINT(171.949820566667 5.983)
mk | POINT(21.421085357666 41.0898007597656)
ml | POINT(-1.93310506641865 16.4699301719666)
mm | POINT(95.5462455749512 21.0962018966675)
mn | POINT(99.8113822937012 48.1861572265625)
mo | POINT(113.564416766761 22.16209625)
mp | POINT(145.213452483189 14.1490205)
mq | POINT(-60.8112834227783 14.43706925)
mr | POINT(-9.42324566841125 22.5925149917603)
ms | POINT(-62.1945521583333 16.745)
mt | POINT(14.3836306158583 35.9446731)
mu | POINT(57.551211475 -20.41)
mv | POINT(73.3929214477539 4.19375014305115)
mw | POINT(33.9572296142578 -12.2821822166443)
mx | POINT(-105.892219543457 25.8682699203491)
my | POINT(112.711540222168 2.10098683834076)
mz | POINT(37.5868968963623 -15.5801844596863)
na | POINT(16.6856970787048 -21.4657220840454)
nc | POINT(164.953224182129 -20.3888988494873)
ne | POINT(10.060417175293 19.0827360153198)
nf | POINT(167.95718166875 -29.0645)
ng | POINT(10.1778125762939 10.1780409812927)
ni | POINT(-85.8797492980957 13.2171587944031)
nl | POINT(-68.5706209441406 12.041)
no | POINT(23.1155624389648 70.0993499755859)
np | POINT(83.3625984191895 28.1310758590698)
nr | POINT(166.934792270833 -0.5275)
nu | POINT(-169.848737911905 -19.05305275)
nz | POINT(167.972099304199 -45.1305675506592)
om | POINT(56.8605518341064 20.4741315841675)
pa | POINT(-79.4016036987305 8.80656003952026)
pe | POINT(-78.6654052734375 -7.54711985588074)
pf | POINT(-145.057191213086 -16.7086236)
pg | POINT(146.646003723145 -7.37427568435669)
ph | POINT(121.483592987061 15.0996527671814)
pk | POINT(72.1134796142578 31.1462965011597)
pl | POINT(17.8813629150391 52.771821975708)
pm | POINT(-56.1951589074841 46.7832469)
pn | POINT(-130.106425528029 -25.0695595)
pr | POINT(-65.8875553967285 18.3716905)
ps | POINT(35.3980153741943 32.24773475)
pt | POINT(-8.45743942260742 40.1115436553955)
pw | POINT(134.496454875 7.3245)
py | POINT(-59.5178718566895 -22.4128150939941)
qa | POINT(51.4990362304443 24.9981677)
re | POINT(55.7734550547607 -21.3638828)
ro | POINT(26.3763284683228 45.3612003326416)
rs | POINT(20.4037199020386 44.5641384124756)
ru | POINT(116.440608978271 59.0678024291992)
rw | POINT(29.5788261333252 -1.6240443)
sa | POINT(47.7316932678223 22.4379062652588)
sb | POINT(164.638946533203 -10.2360653877258)
sc | POINT(46.3656697 -9.454)
sd | POINT(28.1472072601318 14.5642309188843)
se | POINT(15.6866798400879 60.3556804656982)
sg | POINT(103.84187219299 1.304)
sh | POINT(-12.2815573611979 -37.11546755)
si | POINT(14.0473856628607 46.390855)
sj | POINT(15.2755260467529 79.2336540222168)
sk | POINT(20.416033744812 48.869701385498)
sl | POINT(-11.4777312278748 8.78156280517578)
sm | POINT(12.4606268797657 43.9427969)
sn | POINT(-15.3711128234863 14.9947791099548)
so | POINT(46.9338359832764 9.34094429016113)
sr | POINT(-56.4855213165283 4.5773549079895)
ss | POINT(28.1357345581055 8.50933408737183)
st | POINT(6.61025854583333 0.2215)
sv | POINT(-89.3666543301004 13.4307287)
sx | POINT(-63.1539330807882 17.9345)
sy | POINT(38.1551322937012 35.3422107696533)
sz | POINT(31.782634398523 -26.14244365)
tc | POINT(-71.325541342334 21.35)
td | POINT(17.4209251403809 13.4622311592102)
tf | POINT(137.5 -67.5)
tg | POINT(1.0698350071907 7.87677597999573)
th | POINT(102.008777618408 16.4231028556824)
tj | POINT(71.9134941101074 39.0152739312988)
tk | POINT(-171.826039878679 -9.209903)
tl | POINT(126.225208282471 -8.72636747360229)
tm | POINT(57.7160358428955 39.9253444671631)
tn | POINT(9.04958724975586 34.8419933319092)
to | POINT(-176.993202209473 -23.1110429763794)
tr | POINT(32.8200283050537 39.8635063171387)
tt | POINT(-60.70793924375 11.1385)
tv | POINT(178.774993896484 -9.41685771942139)
tw | POINT(120.300746917725 23.1700229644775)
tz | POINT(33.5389289855957 -5.01840615272522)
ua | POINT(33.4433536529541 49.3061904907227)
ug | POINT(32.9652328491211 2.08584922552109)
um | POINT(-169.509930872296 16.74605815)
us | POINT(-116.395355224609 40.7137908935547)
uy | POINT(-56.4650554656982 -33.6265888214111)
uz | POINT(61.3552989959717 42.9610729217529)
va | POINT(12.3319785703086 42.0493197)
vc | POINT(-61.0990541737305 13.316)
ve | POINT(-64.8832321166992 7.69849991798401)
vg | POINT(-64.6247911940199 18.419)
vi | POINT(-64.8895090795187 18.3226325)
vn | POINT(104.201791331787 10.27644235)
vu | POINT(167.319198608398 -15.8868751525879)
wf | POINT(-176.207816222208 -13.28535775)
ws | POINT(-172.109667323427 -13.850938)
ye | POINT(45.945629119873 16.1633830070496)
yt | POINT(44.9377459760742 -12.6088246)
za | POINT(23.1948881149292 -30.4327602386475)
zm | POINT(26.3861808776855 -14.3996663093567)
zw | POINT(30.1241998672485 -19.8690795898438)

View File

@@ -1,8 +0,0 @@
n-N-unglued | POINT(1.004922 2.0005155)
n-S-unglued | POINT(1.0046259 2.0002949)
n-NE | POINT(1.0050661 2.0006118)
n-SE | POINT(1.0051339 2.0003349)
n-NW | POINT(1.0047583 2.0004087)
n-SW | POINT(1.0047275 2.0003564)
w-north | LINESTRING(1.0044996 2.0004302,1.0046259 2.0003841,1.0047583 2.0004087,1.004922 2.0005155,1.0050661 2.0006118,1.0053155 2.0006241)
w-south | LINESTRING(1.0045243 2.0002241,1.0046259 2.0002949,1.0047275 2.0003564,1.004826 2.0002918,1.0049368 2.0002641,1.0051339 2.0003349,1.0053278 2.0003687)

View File

@@ -1,11 +0,0 @@
0.0001 | MULTIPOLYGON(((0.001 0,0 0,0 0.1,0.001 0.1,0.001 0)))
0.0005 | MULTIPOLYGON(((0.005 0,0 0,0 0.1,0.005 0.1,0.005 0)))
0.001 | MULTIPOLYGON(((0.01 0,0 0,0 0.1,0.01 0.1,0.01 0)))
0.005 | MULTIPOLYGON(((0.05 0,0 0,0 0.1,0.05 0.1,0.05 0)))
0.01 | MULTIPOLYGON(((0.1 0,0 0,0 0.1,0.1 0.1,0.1 0)))
0.05 | MULTIPOLYGON(((0.5 0,0 0,0 0.1,0.5 0.1,0.5 0)))
0.1 | MULTIPOLYGON(((0.1 0,0 0,0 1,0.1 1,0.1 0)))
0.5 | MULTIPOLYGON(((0.5 0,0 0,0 1,0.5 1,0.5 0)))
1.0 | MULTIPOLYGON(((1 0,0 0,0 1,1 1,1 0)))
2.0 | MULTIPOLYGON(((2 0,0 0,0 1,2 1,2 0)))
5.0 | MULTIPOLYGON(((5 0,0 0,0 1,5 1,5 0)))

View File

@@ -1,168 +0,0 @@
<?xml version='1.0' encoding='UTF-8'?>
<osm version='0.6'>
<node id="100" version="1" timestamp="2014-01-01T00:00:00Z" uid="1" user="test" changeset="1" lat="0.0" lon="0.0" />
<node id="101" version="1" timestamp="2014-01-01T00:00:00Z" uid="1" user="test" changeset="1" lat="0.1" lon="0.0" />
<node id="102" version="1" timestamp="2014-01-01T00:00:00Z" uid="1" user="test" changeset="1" lat="0.1" lon="0.001" />
<node id="103" version="1" timestamp="2014-01-01T00:00:00Z" uid="1" user="test" changeset="1" lat="0.0" lon="0.001" />
<way id="100" version="1" timestamp="2014-01-01T00:00:00Z" uid="1" user="test" changeset="1">
<nd ref="100" />
<nd ref="101" />
<nd ref="102" />
<nd ref="103" />
<nd ref="100" />
<tag k="test:section" v="poly-area"/>
<tag k="test:id" v="0.0001"/>
<tag k="area" v="yes"/>
</way>
<node id="200" version="1" timestamp="2014-01-01T00:00:00Z" uid="1" user="test" changeset="1" lat="0.0" lon="0.0" />
<node id="201" version="1" timestamp="2014-01-01T00:00:00Z" uid="1" user="test" changeset="1" lat="0.1" lon="0.0" />
<node id="202" version="1" timestamp="2014-01-01T00:00:00Z" uid="1" user="test" changeset="1" lat="0.1" lon="0.005" />
<node id="203" version="1" timestamp="2014-01-01T00:00:00Z" uid="1" user="test" changeset="1" lat="0.0" lon="0.005" />
<way id="200" version="1" timestamp="2014-01-01T00:00:00Z" uid="1" user="test" changeset="1">
<nd ref="200" />
<nd ref="201" />
<nd ref="202" />
<nd ref="203" />
<nd ref="200" />
<tag k="test:section" v="poly-area"/>
<tag k="test:id" v="0.0005"/>
<tag k="area" v="yes"/>
</way>
<node id="300" version="1" timestamp="2014-01-01T00:00:00Z" uid="1" user="test" changeset="1" lat="0.0" lon="0.0" />
<node id="301" version="1" timestamp="2014-01-01T00:00:00Z" uid="1" user="test" changeset="1" lat="0.1" lon="0.0" />
<node id="302" version="1" timestamp="2014-01-01T00:00:00Z" uid="1" user="test" changeset="1" lat="0.1" lon="0.01" />
<node id="303" version="1" timestamp="2014-01-01T00:00:00Z" uid="1" user="test" changeset="1" lat="0.0" lon="0.01" />
<way id="300" version="1" timestamp="2014-01-01T00:00:00Z" uid="1" user="test" changeset="1">
<nd ref="300" />
<nd ref="301" />
<nd ref="302" />
<nd ref="303" />
<nd ref="300" />
<tag k="test:section" v="poly-area"/>
<tag k="test:id" v="0.001"/>
<tag k="area" v="yes"/>
</way>
<node id="400" version="1" timestamp="2014-01-01T00:00:00Z" uid="1" user="test" changeset="1" lat="0.0" lon="0.0" />
<node id="401" version="1" timestamp="2014-01-01T00:00:00Z" uid="1" user="test" changeset="1" lat="0.1" lon="0.0" />
<node id="402" version="1" timestamp="2014-01-01T00:00:00Z" uid="1" user="test" changeset="1" lat="0.1" lon="0.05" />
<node id="403" version="1" timestamp="2014-01-01T00:00:00Z" uid="1" user="test" changeset="1" lat="0.0" lon="0.05" />
<way id="400" version="1" timestamp="2014-01-01T00:00:00Z" uid="1" user="test" changeset="1">
<nd ref="400" />
<nd ref="401" />
<nd ref="402" />
<nd ref="403" />
<nd ref="400" />
<tag k="test:section" v="poly-area"/>
<tag k="test:id" v="0.005"/>
<tag k="area" v="yes"/>
</way>
<node id="500" version="1" timestamp="2014-01-01T00:00:00Z" uid="1" user="test" changeset="1" lat="0.0" lon="0.0" />
<node id="501" version="1" timestamp="2014-01-01T00:00:00Z" uid="1" user="test" changeset="1" lat="0.1" lon="0.0" />
<node id="502" version="1" timestamp="2014-01-01T00:00:00Z" uid="1" user="test" changeset="1" lat="0.1" lon="0.1" />
<node id="503" version="1" timestamp="2014-01-01T00:00:00Z" uid="1" user="test" changeset="1" lat="0.0" lon="0.1" />
<way id="500" version="1" timestamp="2014-01-01T00:00:00Z" uid="1" user="test" changeset="1">
<nd ref="500" />
<nd ref="501" />
<nd ref="502" />
<nd ref="503" />
<nd ref="500" />
<tag k="test:section" v="poly-area"/>
<tag k="test:id" v="0.01"/>
<tag k="area" v="yes"/>
</way>
<node id="600" version="1" timestamp="2014-01-01T00:00:00Z" uid="1" user="test" changeset="1" lat="0.0" lon="0.0" />
<node id="601" version="1" timestamp="2014-01-01T00:00:00Z" uid="1" user="test" changeset="1" lat="0.1" lon="0.0" />
<node id="602" version="1" timestamp="2014-01-01T00:00:00Z" uid="1" user="test" changeset="1" lat="0.1" lon="0.5" />
<node id="603" version="1" timestamp="2014-01-01T00:00:00Z" uid="1" user="test" changeset="1" lat="0.0" lon="0.5" />
<way id="600" version="1" timestamp="2014-01-01T00:00:00Z" uid="1" user="test" changeset="1">
<nd ref="600" />
<nd ref="601" />
<nd ref="602" />
<nd ref="603" />
<nd ref="600" />
<tag k="test:section" v="poly-area"/>
<tag k="test:id" v="0.05"/>
<tag k="area" v="yes"/>
</way>
<node id="700" version="1" timestamp="2014-01-01T00:00:00Z" uid="1" user="test" changeset="1" lat="0.0" lon="0.0" />
<node id="701" version="1" timestamp="2014-01-01T00:00:00Z" uid="1" user="test" changeset="1" lat="1.0" lon="0.0" />
<node id="702" version="1" timestamp="2014-01-01T00:00:00Z" uid="1" user="test" changeset="1" lat="1.0" lon="0.1" />
<node id="703" version="1" timestamp="2014-01-01T00:00:00Z" uid="1" user="test" changeset="1" lat="0.0" lon="0.1" />
<way id="700" version="1" timestamp="2014-01-01T00:00:00Z" uid="1" user="test" changeset="1">
<nd ref="700" />
<nd ref="701" />
<nd ref="702" />
<nd ref="703" />
<nd ref="700" />
<tag k="test:section" v="poly-area"/>
<tag k="test:id" v="0.1"/>
<tag k="area" v="yes"/>
</way>
<node id="800" version="1" timestamp="2014-01-01T00:00:00Z" uid="1" user="test" changeset="1" lat="0.0" lon="0.0" />
<node id="801" version="1" timestamp="2014-01-01T00:00:00Z" uid="1" user="test" changeset="1" lat="1.0" lon="0.0" />
<node id="802" version="1" timestamp="2014-01-01T00:00:00Z" uid="1" user="test" changeset="1" lat="1.0" lon="0.5" />
<node id="803" version="1" timestamp="2014-01-01T00:00:00Z" uid="1" user="test" changeset="1" lat="0.0" lon="0.5" />
<way id="800" version="1" timestamp="2014-01-01T00:00:00Z" uid="1" user="test" changeset="1">
<nd ref="800" />
<nd ref="801" />
<nd ref="802" />
<nd ref="803" />
<nd ref="800" />
<tag k="test:section" v="poly-area"/>
<tag k="test:id" v="0.5"/>
<tag k="area" v="yes"/>
</way>
<node id="900" version="1" timestamp="2014-01-01T00:00:00Z" uid="1" user="test" changeset="1" lat="0.0" lon="0.0" />
<node id="901" version="1" timestamp="2014-01-01T00:00:00Z" uid="1" user="test" changeset="1" lat="1.0" lon="0.0" />
<node id="902" version="1" timestamp="2014-01-01T00:00:00Z" uid="1" user="test" changeset="1" lat="1.0" lon="1.0" />
<node id="903" version="1" timestamp="2014-01-01T00:00:00Z" uid="1" user="test" changeset="1" lat="0.0" lon="1.0" />
<way id="900" version="1" timestamp="2014-01-01T00:00:00Z" uid="1" user="test" changeset="1">
<nd ref="900" />
<nd ref="901" />
<nd ref="902" />
<nd ref="903" />
<nd ref="900" />
<tag k="test:section" v="poly-area"/>
<tag k="test:id" v="1.0"/>
<tag k="area" v="yes"/>
</way>
<node id="1000" version="1" timestamp="2014-01-01T00:00:00Z" uid="1" user="test" changeset="1" lat="0.0" lon="0.0" />
<node id="1001" version="1" timestamp="2014-01-01T00:00:00Z" uid="1" user="test" changeset="1" lat="1.0" lon="0.0" />
<node id="1002" version="1" timestamp="2014-01-01T00:00:00Z" uid="1" user="test" changeset="1" lat="1.0" lon="2.0" />
<node id="1003" version="1" timestamp="2014-01-01T00:00:00Z" uid="1" user="test" changeset="1" lat="0.0" lon="2.0" />
<way id="1000" version="1" timestamp="2014-01-01T00:00:00Z" uid="1" user="test" changeset="1">
<nd ref="1000" />
<nd ref="1001" />
<nd ref="1002" />
<nd ref="1003" />
<nd ref="1000" />
<tag k="test:section" v="poly-area"/>
<tag k="test:id" v="2.0"/>
<tag k="area" v="yes"/>
</way>
<node id="1100" version="1" timestamp="2014-01-01T00:00:00Z" uid="1" user="test" changeset="1" lat="0.0" lon="0.0" />
<node id="1101" version="1" timestamp="2014-01-01T00:00:00Z" uid="1" user="test" changeset="1" lat="1.0" lon="0.0" />
<node id="1102" version="1" timestamp="2014-01-01T00:00:00Z" uid="1" user="test" changeset="1" lat="1.0" lon="5.0" />
<node id="1103" version="1" timestamp="2014-01-01T00:00:00Z" uid="1" user="test" changeset="1" lat="0.0" lon="5.0" />
<way id="1100" version="1" timestamp="2014-01-01T00:00:00Z" uid="1" user="test" changeset="1">
<nd ref="1100" />
<nd ref="1101" />
<nd ref="1102" />
<nd ref="1103" />
<nd ref="1100" />
<tag k="test:section" v="poly-area"/>
<tag k="test:id" v="5.0"/>
<tag k="area" v="yes"/>
</way>
</osm>

View File

@@ -1,6 +0,0 @@
n-main-east | POINT(1.0024481 2.0003542)
n-main-west | POINT(1.001552 2.0002662)
n-alley | POINT(1.0019235 2.0005463)
n-corner | POINT(1.0019235 2.0003542)
w-alley | LINESTRING(1.0019594 2.0003086,1.0019594 2.0005756)
w-main | LINESTRING(1.0013435 2.0003118,1.0016759 2.0003053,1.0019594 2.0003086,1.0021255 2.0003151,1.0023699 2.0003118,1.0026078 2.0002988)

View File

@@ -1,6 +0,0 @@
p-N2 | POINT(1.0003904 2.0003399)
p-S1 | POINT(1.0008104 2.0002927)
p-N1 | POINT(1.0005321 2.0005288)
p-S2 | POINT(1.0006398 2.0001064)
w-north | LINESTRING(1.0001174 2.0004055,1.0004298 2.0003976,1.0006608 2.0004579,1.0010624 2.0005419)
w-south | LINESTRING(1.0001384 2.0001903,1.0007212 2.0001982,1.0010677 2.0002192)

View File

@@ -1,300 +0,0 @@
<?xml version='1.0' encoding='UTF-8'?>
<osm version='0.6' upload='false' generator='JOSM'>
<node id='-204' action='modify' visible='true' lat='2.000651586300547' lon='1.005746444902722'>
<tag k='name' v='split-road' />
</node>
<node id='-189' action='modify' visible='true' lat='2.0005350827634585' lon='1.0057490943252971' />
<node id='-187' action='modify' visible='true' lat='2.0005430261867043' lon='1.0058974619895207' />
<node id='-185' action='modify' visible='true' lat='2.0005430261867043' lon='1.0060511284988949' />
<node id='-183' action='modify' visible='true' lat='2.0005324349557014' lon='1.0061306111761574' />
<node id='-174' action='modify' visible='true' lat='2.0005244915324045' lon='1.00551064629351' />
<node id='-172' action='modify' visible='true' lat='2.0005668564562127' lon='1.0056086749288005' />
<node id='-170' action='modify' visible='true' lat='2.000561560840796' lon='1.0056855081834875' />
<node id='-168' action='modify' visible='true' lat='2.0005006612622807' lon='1.0057358472124205' />
<node id='-166' action='modify' visible='true' lat='2.000505956877894' lon='1.0059107091023978' />
<node id='-164' action='modify' visible='true' lat='2.0005112524934896' lon='1.0060537779214704' />
<node id='-162' action='modify' visible='true' lat='2.0004953656466506' lon='1.006135910021308' />
<node id='-160' action='modify' visible='true' lat='2.000513900301281' lon='1.0062842776855314' />
<node id='-158' action='modify' visible='true' lat='2.000471535376104' lon='1.0063717086305204' />
<node id='-156' action='modify' visible='true' lat='2.000296780048186' lon='1.0063584615176433' />
<node id='-154' action='modify' visible='true' lat='2.000262358542008' lon='1.006281628262956' />
<node id='-152' action='modify' visible='true' lat='2.000339144977876' lon='1.0061994961631182' />
<node id='-150' action='modify' visible='true' lat='2.0003576796342712' lon='1.0061041169504032' />
<node id='-148' action='modify' visible='true' lat='2.0003497362101275' lon='1.0060034388925374' />
<node id='-146' action='modify' visible='true' lat='2.000325905937466' lon='1.0058868642992191' />
<node id='-144' action='modify' visible='true' lat='2.000280893199271' lon='1.0057941345090795' />
<node id='-142' action='modify' visible='true' lat='2.0002782453911037' lon='1.0056351691545544' />
<node id='-141' action='modify' visible='true' lat='2.0003603274423103' lon='1.005470904954879' />
<node id='-139' action='modify' visible='true' lat='2.0007265421773432' lon='1.0047054313390744'>
<tag k='name' v='points-on-road' />
</node>
<node id='-112' action='modify' visible='true' lat='2.0005155253763816' lon='1.0049220138358423' />
<node id='-110' action='modify' visible='true' lat='2.0005155253763816' lon='1.0049220138358423'>
<tag k='test:id' v='n-N-unglued' />
<tag k='test:section' v='points-on-roads' />
</node>
<node id='-108' action='modify' visible='true' lat='2.0002948553437463' lon='1.0046258759080025'>
<tag k='test:id' v='n-S-unglued' />
<tag k='test:section' v='points-on-roads' />
</node>
<node id='-106' action='modify' visible='true' lat='2.0002948553437463' lon='1.0046258759080025' />
<node id='-104' action='modify' visible='true' lat='2.0006117740392657' lon='1.0050661381425037'>
<tag k='test:id' v='n-NE' />
<tag k='test:section' v='points-on-roads' />
</node>
<node id='-102' action='modify' visible='true' lat='2.000334854794143' lon='1.0051338707939657'>
<tag k='test:id' v='n-SE' />
<tag k='test:section' v='points-on-roads' />
</node>
<node id='-100' action='modify' visible='true' lat='2.0006240815601832' lon='1.0053155174501582' />
<node id='-98' action='modify' visible='true' lat='2.0004086999307416' lon='1.0047582624540412'>
<tag k='test:id' v='n-NW' />
<tag k='test:section' v='points-on-roads' />
</node>
<node id='-96' action='modify' visible='true' lat='2.0003840848855767' lon='1.0046258759080025' />
<node id='-94' action='modify' visible='true' lat='2.0004302380949586' lon='1.004499646875733' />
<node id='-92' action='modify' visible='true' lat='2.0003687004821606' lon='1.0053278324776966' />
<node id='-90' action='modify' visible='true' lat='2.0002640865351053' lon='1.0049368303533495' />
<node id='-88' action='modify' visible='true' lat='2.000291778462916' lon='1.004825995105503' />
<node id='-86' action='modify' visible='true' lat='2.0003563929593238' lon='1.004727474885195'>
<tag k='test:id' v='n-SW' />
<tag k='test:section' v='points-on-roads' />
</node>
<node id='-84' action='modify' visible='true' lat='2.0002240870829975' lon='1.00452427693081' />
<node id='-82' action='modify' visible='true' lat='2.000715618411992' lon='1.0018322734314236'>
<tag k='name' v='road-with-alley' />
</node>
<node id='-80' action='modify' visible='true' lat='2.0007286441385155' lon='1.0004669962874884'>
<tag k='name' v='roads-with-pois' />
</node>
<node id='-78' action='modify' visible='true' lat='2.000354154459697' lon='1.002448114434296'>
<tag k='test:id' v='n-main-east' />
<tag k='test:section' v='road-with-alley' />
</node>
<node id='-76' action='modify' visible='true' lat='2.000266230783582' lon='1.0015520494830263'>
<tag k='test:id' v='n-main-west' />
<tag k='test:section' v='road-with-alley' />
</node>
<node id='-74' action='modify' visible='true' lat='2.000546283957771' lon='1.0019235091355527'>
<tag k='test:id' v='n-alley' />
<tag k='test:section' v='road-with-alley' />
</node>
<node id='-72' action='modify' visible='true' lat='2.000354154459697' lon='1.0019235091355527'>
<tag k='test:id' v='n-corner' />
<tag k='test:section' v='road-with-alley' />
</node>
<node id='-70' action='modify' visible='true' lat='2.0005755918453296' lon='1.0019593517336036' />
<node id='-68' action='modify' visible='true' lat='2.000298795108618' lon='1.0026077769165225' />
<node id='-66' action='modify' visible='true' lat='2.000311820838452' lon='1.0023699124021854' />
<node id='-64' action='modify' visible='true' lat='2.0003150772708946' lon='1.0021255310518389' />
<node id='-62' action='modify' visible='true' lat='2.0003085644060037' lon='1.0019593517336036' />
<node id='-60' action='modify' visible='true' lat='2.000305307973548' lon='1.0016758693672019' />
<node id='-58' action='modify' visible='true' lat='2.000311820838452' lon='1.0013435107307307' />
<node id='-56' action='modify' visible='true' lat='2.0004054696330322' lon='1.0001173628501097'>
<tag k='test:section' v='roads-with-pois' />
</node>
<node id='-54' action='modify' visible='true' lat='2.000397598928471' lon='1.0004297578433892'>
<tag k='test:section' v='roads-with-pois' />
</node>
<node id='-52' action='modify' visible='true' lat='2.0004579409958114' lon='1.0006607726283354'>
<tag k='test:section' v='roads-with-pois' />
</node>
<node id='-50' action='modify' visible='true' lat='2.0005418951727663' lon='1.0010624233339804'>
<tag k='test:section' v='roads-with-pois' />
</node>
<node id='-48' action='modify' visible='true' lat='2.000190337028091' lon='1.0001383641941959'>
<tag k='test:section' v='roads-with-pois' />
</node>
<node id='-46' action='modify' visible='true' lat='2.000198207733647' lon='1.000721151492583'>
<tag k='test:section' v='roads-with-pois' />
</node>
<node id='-44' action='modify' visible='true' lat='2.000219196281612' lon='1.0010676736700022'>
<tag k='test:section' v='roads-with-pois' />
</node>
<node id='-42' action='modify' visible='true' lat='2.000339880427198' lon='1.000390380323228'>
<tag k='test:id' v='p-N2' />
<tag k='test:section' v='roads-with-pois' />
</node>
<node id='-40' action='modify' visible='true' lat='2.000292656197374' lon='1.0008104072049482'>
<tag k='test:id' v='p-S1' />
<tag k='test:section' v='roads-with-pois' />
</node>
<node id='-38' action='modify' visible='true' lat='2.0005287773329004' lon='1.0005321393958087'>
<tag k='test:id' v='p-N1' />
<tag k='test:section' v='roads-with-pois' />
</node>
<node id='-36' action='modify' visible='true' lat='2.000106382833144' lon='1.0006397712842492'>
<tag k='test:id' v='p-S2' />
<tag k='test:section' v='roads-with-pois' />
</node>
<node id='-34' action='modify' visible='true' lat='2.0007211692424525' lon='1.0035828658185688'>
<tag k='name' v='building-on-street-corner' />
</node>
<node id='-32' action='modify' visible='true' lat='2.000231635335803' lon='1.0031759205058477' />
<node id='-30' action='modify' visible='true' lat='2.000221087674047' lon='1.0040360790429201' />
<node id='-28' action='modify' visible='true' lat='2.0002263615049336' lon='1.0042735461237067' />
<node id='-26' action='modify' visible='true' lat='2.000105063390253' lon='1.0040413560891597' />
<node id='-24' action='modify' visible='true' lat='2.0006377202618473' lon='1.0040363991494512' />
<node id='-22' action='modify' visible='true' lat='2.0004388569487612' lon='1.0039042871025967' />
<node id='-20' action='modify' visible='true' lat='2.0004385547672516' lon='1.0040022536164286' />
<node id='-18' action='modify' visible='true' lat='2.0002343878022306' lon='1.0040016230872442' />
<node id='-16' action='modify' visible='true' lat='2.000234689983778' lon='1.0039036565734125' />
<node id='-14' action='modify' visible='true' lat='2.0003240388594246' lon='1.0040018999567464'>
<tag k='test:id' v='n-edge-NS' />
<tag k='test:section' v='building-on-street-corner' />
</node>
<node id='-12' action='modify' visible='true' lat='2.000354798021768' lon='1.0039384858598128'>
<tag k='test:id' v='n-inner' />
<tag k='test:section' v='building-on-street-corner' />
</node>
<node id='-10' action='modify' visible='true' lat='2.0004676051467527' lon='1.0039478144477645'>
<tag k='test:id' v='n-outer' />
<tag k='test:section' v='building-on-street-corner' />
</node>
<node id='-8' action='modify' visible='true' lat='2.000234516370527' lon='1.0039599415620857'>
<tag k='test:id' v='n-edge-WE' />
<tag k='test:section' v='building-on-street-corner' />
</node>
<node id='100000' timestamp='2014-01-01T00:00:00Z' uid='1' user='test' visible='true' version='1' changeset='1' lat='2.0' lon='1.0' />
<node id='100001' timestamp='2014-01-01T00:00:00Z' uid='1' user='test' visible='true' version='1' changeset='1' lat='3.0' lon='1.0' />
<node id='100002' timestamp='2014-01-01T00:00:00Z' uid='1' user='test' visible='true' version='1' changeset='1' lat='3.0' lon='2.0' />
<node id='100003' timestamp='2014-01-01T00:00:00Z' uid='1' user='test' visible='true' version='1' changeset='1' lat='2.0' lon='2.0' />
<way id='-195' action='modify' visible='true'>
<nd ref='-170' />
<nd ref='-172' />
<nd ref='-174' />
<tag k='test:id' v='w-5' />
<tag k='test:section' v='split-road' />
</way>
<way id='-194' action='modify' visible='true'>
<nd ref='-160' />
<nd ref='-162' />
<nd ref='-164' />
<nd ref='-166' />
<nd ref='-168' />
<nd ref='-170' />
<tag k='test:id' v='w-4a' />
<tag k='test:section' v='split-road' />
</way>
<way id='-193' action='modify' visible='true'>
<nd ref='-152' />
<nd ref='-154' />
<nd ref='-156' />
<nd ref='-158' />
<nd ref='-160' />
<tag k='test:id' v='w-3' />
<tag k='test:section' v='split-road' />
</way>
<way id='-192' action='modify' visible='true'>
<nd ref='-144' />
<nd ref='-146' />
<nd ref='-148' />
<nd ref='-150' />
<nd ref='-152' />
<tag k='test:id' v='w-2' />
<tag k='test:section' v='split-road' />
</way>
<way id='-184' action='modify' visible='true'>
<nd ref='-160' />
<nd ref='-183' />
<nd ref='-185' />
<nd ref='-187' />
<nd ref='-189' />
<nd ref='-170' />
<tag k='test:id' v='w-4b' />
<tag k='test:section' v='split-road' />
</way>
<way id='-143' action='modify' visible='true'>
<nd ref='-141' />
<nd ref='-142' />
<nd ref='-144' />
<tag k='test:id' v='w-1' />
<tag k='test:section' v='split-road' />
</way>
<way id='-134' action='modify' visible='true'>
<nd ref='-94' />
<nd ref='-96' />
<nd ref='-98' />
<nd ref='-112' />
<nd ref='-104' />
<nd ref='-100' />
<tag k='test:id' v='w-north' />
<tag k='test:section' v='points-on-roads' />
</way>
<way id='-132' action='modify' visible='true'>
<nd ref='-84' />
<nd ref='-106' />
<nd ref='-86' />
<nd ref='-88' />
<nd ref='-90' />
<nd ref='-102' />
<nd ref='-92' />
<tag k='test:id' v='w-south' />
<tag k='test:section' v='points-on-roads' />
</way>
<way id='-130' action='modify' visible='true'>
<nd ref='-62' />
<nd ref='-70' />
<tag k='test:id' v='w-alley' />
<tag k='test:section' v='road-with-alley' />
</way>
<way id='-128' action='modify' visible='true'>
<nd ref='-58' />
<nd ref='-60' />
<nd ref='-62' />
<nd ref='-64' />
<nd ref='-66' />
<nd ref='-68' />
<tag k='test:id' v='w-main' />
<tag k='test:section' v='road-with-alley' />
</way>
<way id='-126' action='modify' visible='true'>
<nd ref='-56' />
<nd ref='-54' />
<nd ref='-52' />
<nd ref='-50' />
<tag k='test:id' v='w-north' />
<tag k='test:section' v='roads-with-pois' />
</way>
<way id='-124' action='modify' visible='true'>
<nd ref='-48' />
<nd ref='-46' />
<nd ref='-44' />
<tag k='test:id' v='w-south' />
<tag k='test:section' v='roads-with-pois' />
</way>
<way id='-122' action='modify' visible='true'>
<nd ref='-32' />
<nd ref='-30' />
<nd ref='-28' />
<tag k='test:id' v='w-WE' />
<tag k='test:section' v='building-on-street-corner' />
</way>
<way id='-120' action='modify' visible='true'>
<nd ref='-26' />
<nd ref='-30' />
<nd ref='-24' />
<tag k='test:id' v='w-NS' />
<tag k='test:section' v='building-on-street-corner' />
</way>
<way id='-118' action='modify' visible='true'>
<nd ref='-22' />
<nd ref='-20' />
<nd ref='-14' />
<nd ref='-18' />
<nd ref='-8' />
<nd ref='-16' />
<nd ref='-22' />
<tag k='area' v='yes' />
<tag k='test:id' v='w-building' />
<tag k='test:section' v='building-on-street-corner' />
</way>
<way id='100000' timestamp='2014-01-01T00:00:00Z' uid='1' user='test' visible='true' version='1' changeset='1'>
<nd ref='100000' />
<nd ref='100001' />
<nd ref='100002' />
<nd ref='100003' />
<nd ref='100000' />
<tag k='note' v='test area, do not leave' />
</way>
</osm>

View File

@@ -1,6 +0,0 @@
w-5 | LINESTRING(1.0056855 2.0005616,1.0056087 2.0005669,1.0055106 2.0005245)
w-4a | LINESTRING(1.0062843 2.0005139,1.0061359 2.0004954,1.0060538 2.0005113,1.0059107 2.000506,1.0057358 2.0005007,1.0056855 2.0005616)
w-3 | LINESTRING(1.0061995 2.0003391,1.0062816 2.0002624,1.0063585 2.0002968,1.0063717 2.0004715,1.0062843 2.0005139)
w-2 | LINESTRING(1.0057941 2.0002809,1.0058869 2.0003259,1.0060034 2.0003497,1.0061041 2.0003577,1.0061995 2.0003391)
w-4b | LINESTRING(1.0062843 2.0005139,1.0061306 2.0005324,1.0060511 2.000543,1.0058975 2.000543,1.0057491 2.0005351,1.0056855 2.0005616)
w-1 | LINESTRING(1.0054709 2.0003603,1.0056352 2.0002782,1.0057941 2.0002809)

View File

@@ -1,232 +0,0 @@
""" Steps for checking the results of queries.
"""
from nose.tools import *
from lettuce import *
from tidylib import tidy_document
from collections import OrderedDict
import json
import logging
import re
from xml.dom.minidom import parseString
logger = logging.getLogger(__name__)
def _parse_xml():
""" Puts the DOM structure into more convenient python
with a similar structure as the json document, so
that the same the semantics can be used. It does not
check if the content is valid (or at least not more than
necessary to transform it into a dict structure).
"""
page = parseString(world.page).documentElement
# header info
world.result_header = OrderedDict(page.attributes.items())
logger.debug('Result header: %r' % (world.result_header))
world.results = []
# results
if page.nodeName == 'searchresults':
for node in page.childNodes:
if node.nodeName != "#text":
assert_equals(node.nodeName, 'place', msg="Unexpected element '%s'" % node.nodeName)
newresult = OrderedDict(node.attributes.items())
assert_not_in('address', newresult)
assert_not_in('geokml', newresult)
address = OrderedDict()
for sub in node.childNodes:
if sub.nodeName == 'geokml':
newresult['geokml'] = sub.childNodes[0].toxml()
elif sub.nodeName == '#text':
pass
else:
address[sub.nodeName] = sub.firstChild.nodeValue.strip()
if address:
newresult['address'] = address
world.results.append(newresult)
elif page.nodeName == 'reversegeocode':
haserror = False
address = {}
for node in page.childNodes:
if node.nodeName == 'result':
assert_equals(len(world.results), 0)
assert (not haserror)
world.results.append(OrderedDict(node.attributes.items()))
assert_not_in('display_name', world.results[0])
assert_not_in('address', world.results[0])
world.results[0]['display_name'] = node.firstChild.nodeValue.strip()
elif node.nodeName == 'error':
assert_equals(len(world.results), 0)
haserror = True
elif node.nodeName == 'addressparts':
assert (not haserror)
address = OrderedDict()
for sub in node.childNodes:
address[sub.nodeName] = sub.firstChild.nodeValue.strip()
world.results[0]['address'] = address
elif node.nodeName == "#text":
pass
else:
assert False, "Unknown content '%s' in XML" % node.nodeName
else:
assert False, "Unknown document node name %s in XML" % page.nodeName
logger.debug("The following was parsed out of XML:")
logger.debug(world.results)
@step(u'a HTTP (\d+) is returned')
def api_result_http_error(step, error):
assert_equals(world.returncode, int(error))
@step(u'the result is valid( \w+)?')
def api_result_is_valid(step, fmt):
assert_equals(world.returncode, 200)
if world.response_format == 'html':
document, errors = tidy_document(world.page,
options={'char-encoding' : 'utf8'})
assert(len(errors) == 0), "Errors found in HTML document:\n%s" % errors
world.results = document
elif world.response_format == 'xml':
_parse_xml()
elif world.response_format == 'json':
world.results = json.JSONDecoder(object_pairs_hook=OrderedDict).decode(world.page)
else:
assert False, "Unknown page format: %s" % (world.response_format)
if fmt:
assert_equals (fmt.strip(), world.response_format)
def compare(operator, op1, op2):
if operator == 'less than':
return op1 < op2
elif operator == 'more than':
return op1 > op2
elif operator == 'exactly':
return op1 == op2
elif operator == 'at least':
return op1 >= op2
elif operator == 'at most':
return op1 <= op2
else:
raise Exception("unknown operator '%s'" % operator)
@step(u'(less than|more than|exactly|at least|at most) (\d+) results? (?:is|are) returned')
def validate_result_number(step, operator, number):
step.given('the result is valid')
numres = len(world.results)
assert compare(operator, numres, int(number)), \
"Bad number of results: expected %s %s, got %d." % (operator, number, numres)
@step(u'result (\d+) has( not)? attributes (\S+)')
def search_check_for_result_attribute(step, num, invalid, attrs):
num = int(num)
step.given('at least %d results are returned' % (num + 1))
res = world.results[num]
for attr in attrs.split(','):
if invalid:
assert_not_in(attr.strip(), res)
else:
assert_in(attr.strip(),res)
@step(u'there is a json wrapper "([^"]*)"')
def api_result_check_json_wrapper(step, wrapper):
step.given('the result is valid json')
assert_equals(world.json_callback, wrapper)
@step(u'result header contains')
def api_result_header_contains(step):
step.given('the result is valid')
for line in step.hashes:
assert_in(line['attr'], world.result_header)
m = re.match("%s$" % (line['value'],), world.result_header[line['attr']])
@step(u'results contain$')
def api_result_contains(step):
step.given('at least 1 result is returned')
for line in step.hashes:
if 'ID' in line:
reslist = (world.results[int(line['ID'])],)
else:
reslist = world.results
for k,v in line.iteritems():
if k == 'latlon':
for curres in reslist:
world.match_geometry((float(curres['lat']), float(curres['lon'])), v)
elif k != 'ID':
for curres in reslist:
assert_in(k, curres)
if v[0] in '<>=':
# mathematical operation
evalexp = '%s %s' % (curres[k], v)
res = eval(evalexp)
logger.debug('Evaluating: %s = %s' % (res, evalexp))
assert_true(res, "Evaluation failed: %s" % (evalexp, ))
else:
# regex match
m = re.match("%s$" % (v,), curres[k])
assert_is_not_none(m, msg="field %s does not match: %s$ != %s." % (k, v, curres[k]))
@step(u'result addresses contain$')
def api_result_address_contains(step):
step.given('the result is valid')
for line in step.hashes:
if 'ID' in line:
reslist = (world.results[int(line['ID'])],)
else:
reslist = world.results
for k,v in line.iteritems():
if k != 'ID':
for res in reslist:
curres = res['address']
assert_in(k, curres)
m = re.match("%s$" % (v,), curres[k])
assert_is_not_none(m, msg="field %s does not match: %s$ != %s." % (k, v, curres[k]))
@step(u'address of result (\d+) contains')
def api_result_address_exact(step, resid):
resid = int(resid)
step.given('at least %d results are returned' % (resid + 1))
addr = world.results[resid]['address']
for line in step.hashes:
assert_in(line['type'], addr)
assert_equals(line['value'], addr[line['type']])
@step(u'address of result (\d+) does not contain (.*)')
def api_result_address_details_missing(step, resid, types):
resid = int(resid)
step.given('at least %d results are returned' % (resid + 1))
addr = world.results[resid]['address']
for t in types.split(','):
assert_not_in(t.strip(), addr)
@step(u'address of result (\d+) is')
def api_result_address_exact(step, resid):
resid = int(resid)
step.given('at least %d results are returned' % (resid + 1))
result = world.results[resid]
linenr = 0
assert_equals(len(step.hashes), len(result['address']))
for k,v in result['address'].iteritems():
assert_equals(step.hashes[linenr]['type'], k)
assert_equals(step.hashes[linenr]['value'], v)
linenr += 1
@step('there are( no)? duplicates')
def api_result_check_for_duplicates(step, nodups=None):
step.given('at least 1 result is returned')
resarr = []
for res in world.results:
resarr.append((res['osm_type'], res['class'],
res['type'], res['display_name']))
if nodups is None:
assert len(resarr) > len(set(resarr))
else:
assert_equal(len(resarr), len(set(resarr)))

View File

@@ -1,114 +0,0 @@
""" Steps for setting up and sending API requests.
"""
from nose.tools import *
from lettuce import *
import urllib
import urllib2
import logging
logger = logging.getLogger(__name__)
def api_call(requesttype):
world.json_callback = None
data = urllib.urlencode(world.params)
url = "%s/%s?%s" % (world.config.base_url, requesttype, data)
req = urllib2.Request(url=url, headers=world.header)
try:
fd = urllib2.urlopen(req)
world.page = fd.read()
world.returncode = 200
except urllib2.HTTPError, ex:
world.returncode = ex.code
world.page = None
return
pageinfo = fd.info()
assert_equal('utf-8', pageinfo.getparam('charset').lower())
pagetype = pageinfo.gettype()
fmt = world.params.get('format')
if fmt == 'html':
assert_equals('text/html', pagetype)
world.response_format = fmt
elif fmt == 'xml':
assert_equals('text/xml', pagetype)
world.response_format = fmt
elif fmt in ('json', 'jsonv2'):
if 'json_callback' in world.params:
world.json_callback = world.params['json_callback']
assert world.page.startswith(world.json_callback + '(')
assert world.page.endswith(')')
world.page = world.page[(len(world.json_callback)+1):-1]
assert_equals('application/javascript', pagetype)
else:
assert_equals('application/json', pagetype)
world.response_format = 'json'
else:
if requesttype == 'reverse':
assert_equals('text/xml', pagetype)
world.response_format = 'xml'
else:
assert_equals('text/html', pagetype)
world.response_format = 'html'
logger.debug("Page received (%s):" % world.response_format)
logger.debug(world.page)
api_setup_prepare_params(None)
@before.each_scenario
def api_setup_prepare_params(scenario):
world.results = []
world.params = {}
world.header = {}
@step(u'the request parameters$')
def api_setup_parameters(step):
"""Define the parameters of the request as a hash.
Resets parameter list.
"""
world.params = step.hashes[0]
@step(u'the HTTP header$')
def api_setup_parameters(step):
"""Define additional HTTP header parameters as a hash.
Resets parameter list.
"""
world.header = step.hashes[0]
@step(u'sending( \w+)? search query "([^"]*)"( with address)?')
def api_setup_search(step, fmt, query, doaddr):
world.params['q'] = query.encode('utf8')
if doaddr:
world.params['addressdetails'] = 1
if fmt:
world.params['format'] = fmt.strip()
api_call('search')
@step(u'sending( \w+)? structured query( with address)?$')
def api_setup_structured_search(step, fmt, doaddr):
world.params.update(step.hashes[0])
if doaddr:
world.params['addressdetails'] = 1
if fmt:
world.params['format'] = fmt.strip()
api_call('search')
@step(u'looking up (\w+ )?coordinates ([-\d.]+),([-\d.]+)')
def api_setup_reverse(step, fmt, lat, lon):
world.params['lat'] = lat
world.params['lon'] = lon
if fmt and fmt.strip():
world.params['format'] = fmt.strip()
api_call('reverse')
@step(u'looking up details for ([NRW]?\d+)')
def api_setup_details(step, obj):
if obj[0] in ('N', 'R', 'W'):
# an osm id
world.params['osmtype'] = obj[0]
world.params['osmid'] = obj[1:]
else:
world.params['place_id'] = obj
api_call('details')

View File

@@ -1,124 +0,0 @@
""" Steps for checking the DB after import and update tests.
There are two groups of test here. The first group tests
the contents of db tables directly, the second checks
query results by using the command line query tool.
"""
from nose.tools import *
from lettuce import *
import psycopg2
import psycopg2.extensions
import psycopg2.extras
import os
import subprocess
import random
import json
import re
import logging
from collections import OrderedDict
logger = logging.getLogger(__name__)
@step(u'table placex contains as names for (N|R|W)(\d+)')
def check_placex_names(step, osmtyp, osmid):
""" Check for the exact content of the name hstaore in placex.
"""
cur = world.conn.cursor(cursor_factory=psycopg2.extras.DictCursor)
cur.execute('SELECT name FROM placex where osm_type = %s and osm_id =%s', (osmtyp, int(osmid)))
for line in cur:
names = dict(line['name'])
for name in step.hashes:
assert_in(name['k'], names)
assert_equals(names[name['k']], name['v'])
del names[name['k']]
assert_equals(len(names), 0)
@step(u'table ([a-z_]+) contains$')
def check_placex_content(step, tablename):
""" check that the given lines are in the given table
Entries are searched by osm_type/osm_id and then all
given columns are tested. If there is more than one
line for an OSM object, they must match in these columns.
"""
cur = world.conn.cursor(cursor_factory=psycopg2.extras.DictCursor)
for line in step.hashes:
osmtype, osmid, cls = world.split_id(line['object'])
q = 'SELECT *'
if tablename == 'placex':
q = q + ", ST_X(centroid) as clat, ST_Y(centroid) as clon"
q = q + ' FROM %s where osm_type = %%s and osm_id = %%s' % (tablename,)
if cls is None:
params = (osmtype, osmid)
else:
q = q + ' and class = %s'
params = (osmtype, osmid, cls)
cur.execute(q, params)
assert(cur.rowcount > 0)
for res in cur:
for k,v in line.iteritems():
if not k == 'object':
assert_in(k, res)
if type(res[k]) is dict:
val = world.make_hash(v)
assert_equals(res[k], val)
elif k in ('parent_place_id', 'linked_place_id'):
pid = world.get_placeid(v)
assert_equals(pid, res[k], "Results for '%s'/'%s' differ: '%s' != '%s'" % (line['object'], k, pid, res[k]))
elif k == 'centroid':
world.match_geometry((res['clat'], res['clon']), v)
else:
assert_equals(str(res[k]), v, "Results for '%s'/'%s' differ: '%s' != '%s'" % (line['object'], k, str(res[k]), v))
@step(u'table (placex?) has no entry for (N|R|W)(\d+)(:\w+)?')
def check_placex_missing(step, tablename, osmtyp, osmid, placeclass):
cur = world.conn.cursor()
q = 'SELECT count(*) FROM %s where osm_type = %%s and osm_id = %%s' % (tablename, )
args = [osmtyp, int(osmid)]
if placeclass is not None:
q = q + ' and class = %s'
args.append(placeclass[1:])
cur.execute(q, args)
numres = cur.fetchone()[0]
assert_equals (numres, 0)
@step(u'search_name table contains$')
def check_search_name_content(step):
cur = world.conn.cursor(cursor_factory=psycopg2.extras.DictCursor)
for line in step.hashes:
placeid = world.get_placeid(line['place_id'])
cur.execute('SELECT * FROM search_name WHERE place_id = %s', (placeid,))
assert(cur.rowcount > 0)
for res in cur:
for k,v in line.iteritems():
if k in ('search_rank', 'address_rank'):
assert_equals(int(v), res[k], "Results for '%s'/'%s' differ: '%s' != '%d'" % (line['place_id'], k, v, res[k]))
elif k in ('importance'):
assert_equals(float(v), res[k], "Results for '%s'/'%s' differ: '%s' != '%d'" % (line['place_id'], k, v, res[k]))
elif k in ('name_vector', 'nameaddress_vector'):
terms = [x.strip().replace('#', ' ') for x in v.split(',')]
cur.execute('SELECT word_id, word_token FROM word, (SELECT unnest(%s) as term) t WHERE word_token = make_standard_name(t.term)', (terms,))
assert cur.rowcount >= len(terms)
for wid in cur:
assert_in(wid['word_id'], res[k], "Missing term for %s/%s: %s" % (line['place_id'], k, wid['word_token']))
elif k in ('country_code'):
assert_equals(v, res[k], "Results for '%s'/'%s' differ: '%s' != '%d'" % (line['place_id'], k, v, res[k]))
elif k == 'place_id':
pass
else:
raise Exception("Cannot handle field %s in search_name table" % (k, ))
@step(u'table search_name has no entry for (.*)')
def check_placex_missing(step, osmid):
""" Checks if there is an entry in the search index for the
given place object.
"""
cur = world.conn.cursor()
placeid = world.get_placeid(osmid)
cur.execute('SELECT count(*) FROM search_name WHERE place_id =%s', (placeid,))
numres = cur.fetchone()[0]
assert_equals (numres, 0)

View File

@@ -1,272 +0,0 @@
""" Steps for setting up a test database with imports and updates.
There are two ways to state geometries for test data: with coordinates
and via scenes.
Coordinates should be given as a wkt without the enclosing type name.
Scenes are prepared geometries which can be found in the scenes/data/
directory. Each scene is saved in a .wkt file with its name, which
contains a list of id/wkt pairs. A scene can be set globally
for a scene by using the step `the scene <scene name>`. Then each
object should be refered to as `:<object id>`. A geometry can also
be referred to without loading the scene by explicitly stating the
scene: `<scene name>:<object id>`.
"""
from nose.tools import *
from lettuce import *
import psycopg2
import psycopg2.extensions
import psycopg2.extras
import os
import subprocess
import random
import base64
psycopg2.extensions.register_type(psycopg2.extensions.UNICODE)
@before.each_scenario
def setup_test_database(scenario):
""" Creates a new test database from the template database
that was set up earlier in terrain.py. Will be done only
for scenarios whose feature is tagged with 'DB'.
"""
if scenario.feature.tags is not None and 'DB' in scenario.feature.tags:
world.db_template_setup()
world.write_nominatim_config(world.config.test_db)
conn = psycopg2.connect(database=world.config.template_db)
conn.set_isolation_level(0)
cur = conn.cursor()
cur.execute('DROP DATABASE IF EXISTS %s' % (world.config.test_db, ))
cur.execute('CREATE DATABASE %s TEMPLATE = %s' % (world.config.test_db, world.config.template_db))
conn.close()
world.conn = psycopg2.connect(database=world.config.test_db)
psycopg2.extras.register_hstore(world.conn, globally=False, unicode=True)
@step('a wiped database')
def db_setup_wipe_db(step):
"""Explicit DB scenario setup only needed
to work around a bug where scenario outlines don't call
before_each_scenario correctly.
"""
if hasattr(world, 'conn'):
world.conn.close()
conn = psycopg2.connect(database=world.config.template_db)
conn.set_isolation_level(0)
cur = conn.cursor()
cur.execute('DROP DATABASE IF EXISTS %s' % (world.config.test_db, ))
cur.execute('CREATE DATABASE %s TEMPLATE = %s' % (world.config.test_db, world.config.template_db))
conn.close()
world.conn = psycopg2.connect(database=world.config.test_db)
psycopg2.extras.register_hstore(world.conn, globally=False, unicode=True)
@after.each_scenario
def tear_down_test_database(scenario):
""" Drops any previously created test database.
"""
if hasattr(world, 'conn'):
world.conn.close()
if scenario.feature.tags is not None and 'DB' in scenario.feature.tags and not world.config.keep_scenario_db:
conn = psycopg2.connect(database=world.config.template_db)
conn.set_isolation_level(0)
cur = conn.cursor()
cur.execute('DROP DATABASE %s' % (world.config.test_db,))
conn.close()
def _format_placex_cols(cols, geomtype, force_name):
if 'name' in cols:
if cols['name'].startswith("'"):
cols['name'] = world.make_hash(cols['name'])
else:
cols['name'] = { 'name' : cols['name'] }
elif force_name:
cols['name'] = { 'name' : base64.urlsafe_b64encode(os.urandom(int(random.random()*30))) }
if 'extratags' in cols:
cols['extratags'] = world.make_hash(cols['extratags'])
if 'admin_level' not in cols:
cols['admin_level'] = 100
if 'geometry' in cols:
coords = world.get_scene_geometry(cols['geometry'])
if coords is None:
coords = "'%s(%s)'::geometry" % (geomtype, cols['geometry'])
else:
coords = "'%s'::geometry" % coords.wkt
cols['geometry'] = coords
def _insert_place_table_nodes(places, force_name):
cur = world.conn.cursor()
for line in places:
cols = dict(line)
cols['osm_type'] = 'N'
_format_placex_cols(cols, 'POINT', force_name)
if 'geometry' in cols:
coords = cols.pop('geometry')
else:
coords = "ST_Point(%f, %f)" % (random.random()*360 - 180, random.random()*180 - 90)
query = 'INSERT INTO place (%s,geometry) values(%s, ST_SetSRID(%s, 4326))' % (
','.join(cols.iterkeys()),
','.join(['%s' for x in range(len(cols))]),
coords
)
cur.execute(query, cols.values())
world.conn.commit()
def _insert_place_table_objects(places, geomtype, force_name):
cur = world.conn.cursor()
for line in places:
cols = dict(line)
if 'osm_type' not in cols:
cols['osm_type'] = 'W'
_format_placex_cols(cols, geomtype, force_name)
coords = cols.pop('geometry')
query = 'INSERT INTO place (%s, geometry) values(%s, ST_SetSRID(%s, 4326))' % (
','.join(cols.iterkeys()),
','.join(['%s' for x in range(len(cols))]),
coords
)
cur.execute(query, cols.values())
world.conn.commit()
@step(u'the scene (.*)')
def import_set_scene(step, scene):
world.load_scene(scene)
@step(u'the (named )?place (node|way|area)s')
def import_place_table_nodes(step, named, osmtype):
"""Insert a list of nodes into the placex table.
Expects a table where columns are named in the same way as placex.
"""
cur = world.conn.cursor()
cur.execute('ALTER TABLE place DISABLE TRIGGER place_before_insert')
if osmtype == 'node':
_insert_place_table_nodes(step.hashes, named is not None)
elif osmtype == 'way' :
_insert_place_table_objects(step.hashes, 'LINESTRING', named is not None)
elif osmtype == 'area' :
_insert_place_table_objects(step.hashes, 'POLYGON', named is not None)
cur.execute('ALTER TABLE place ENABLE TRIGGER place_before_insert')
cur.close()
world.conn.commit()
@step(u'the relations')
def import_fill_planet_osm_rels(step):
"""Adds a raw relation to the osm2pgsql table.
Three columns need to be suplied: id, tags, members.
"""
cur = world.conn.cursor()
for line in step.hashes:
members = []
parts = { 'n' : [], 'w' : [], 'r' : [] }
if line['members'].strip():
for mem in line['members'].split(','):
memparts = mem.strip().split(':', 2)
memid = memparts[0].lower()
parts[memid[0]].append(int(memid[1:]))
members.append(memid)
if len(memparts) == 2:
members.append(memparts[1])
else:
members.append('')
tags = []
for k,v in world.make_hash(line['tags']).iteritems():
tags.extend((k,v))
if not members:
members = None
cur.execute("""INSERT INTO planet_osm_rels
(id, way_off, rel_off, parts, members, tags, pending)
VALUES (%s, %s, %s, %s, %s, %s, false)""",
(line['id'], len(parts['n']), len(parts['n']) + len(parts['w']),
parts['n'] + parts['w'] + parts['r'], members, tags))
world.conn.commit()
@step(u'the ways')
def import_fill_planet_osm_ways(step):
cur = world.conn.cursor()
for line in step.hashes:
if 'tags' in line:
tags = world.make_hash(line['tags'])
else:
tags = None
nodes = [int(x.strip()) for x in line['nodes'].split(',')]
cur.execute("""INSERT INTO planet_osm_ways
(id, nodes, tags, pending)
VALUES (%s, %s, %s, false)""",
(line['id'], nodes, tags))
world.conn.commit()
############### import and update steps #######################################
@step(u'importing')
def import_database(step):
""" Runs the actual indexing. """
world.run_nominatim_script('setup', 'create-functions', 'create-partition-functions')
cur = world.conn.cursor()
cur.execute("""insert into placex (osm_type, osm_id, class, type, name, admin_level,
housenumber, street, addr_place, isin, postcode, country_code, extratags,
geometry) select * from place""")
world.conn.commit()
world.run_nominatim_script('setup', 'index', 'index-noanalyse')
#world.db_dump_table('placex')
@step(u'updating place (node|way|area)s')
def update_place_table_nodes(step, osmtype):
""" Replace a geometry in place by reinsertion and reindex database.
"""
world.run_nominatim_script('setup', 'create-functions', 'create-partition-functions', 'enable-diff-updates')
if osmtype == 'node':
_insert_place_table_nodes(step.hashes, False)
elif osmtype == 'way':
_insert_place_table_objects(step.hashes, 'LINESTRING', False)
elif osmtype == 'area':
_insert_place_table_objects(step.hashes, 'POLYGON', False)
world.run_nominatim_script('update', 'index')
@step(u'marking for delete (.*)')
def update_delete_places(step, places):
""" Remove an entry from place and reindex database.
"""
world.run_nominatim_script('setup', 'create-functions', 'create-partition-functions', 'enable-diff-updates')
cur = world.conn.cursor()
for place in places.split(','):
osmtype, osmid, cls = world.split_id(place)
if cls is None:
q = "delete from place where osm_type = %s and osm_id = %s"
params = (osmtype, osmid)
else:
q = "delete from place where osm_type = %s and osm_id = %s and class = %s"
params = (osmtype, osmid, cls)
cur.execute(q, params)
world.conn.commit()
#world.db_dump_table('placex')
world.run_nominatim_script('update', 'index')
@step(u'sending query "(.*)"( with dups)?$')
def query_cmd(step, query, with_dups):
""" Results in standard query output. The same tests as for API queries
can be used.
"""
cmd = [os.path.join(world.config.source_dir, 'utils', 'query.php'),
'--search', query]
if with_dups is not None:
cmd.append('--nodedupe')
proc = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
(outp, err) = proc.communicate()
assert (proc.returncode == 0), "query.php failed with message: %s" % err
world.page = outp
world.response_format = 'json'
world.returncode = 200

View File

@@ -1,212 +0,0 @@
""" Steps for setting up a test database for osm2pgsql import.
Note that osm2pgsql features need a database and therefore need
to be tagged with @DB.
"""
from nose.tools import *
from lettuce import *
import logging
import random
import tempfile
import os
import subprocess
logger = logging.getLogger(__name__)
@before.each_scenario
def osm2pgsql_setup_test(scenario):
world.osm2pgsql = []
@step(u'the osm nodes:')
def osm2pgsql_import_nodes(step):
""" Define a list of OSM nodes to be imported, given as a table.
Each line describes one node with all its attributes.
'id' is mendatory, all other fields are filled with random values
when not given. If 'tags' is missing an empty tag list is assumed.
For updates, a mandatory 'action' column needs to contain 'A' (add),
'M' (modify), 'D' (delete).
"""
for line in step.hashes:
node = { 'type' : 'N', 'version' : '1', 'timestamp': "2012-05-01T15:06:20Z",
'changeset' : "11470653", 'uid' : "122294", 'user' : "foo"
}
node.update(line)
node['id'] = int(node['id'])
if 'geometry' in node:
lat, lon = node['geometry'].split(' ')
node['lat'] = float(lat)
node['lon'] = float(lon)
else:
node['lon'] = random.random()*360 - 180
node['lat'] = random.random()*180 - 90
if 'tags' in node:
node['tags'] = world.make_hash(line['tags'])
else:
node['tags'] = {}
world.osm2pgsql.append(node)
@step(u'the osm ways:')
def osm2pgsql_import_ways(step):
""" Define a list of OSM ways to be imported.
"""
for line in step.hashes:
way = { 'type' : 'W', 'version' : '1', 'timestamp': "2012-05-01T15:06:20Z",
'changeset' : "11470653", 'uid' : "122294", 'user' : "foo"
}
way.update(line)
way['id'] = int(way['id'])
if 'tags' in way:
way['tags'] = world.make_hash(line['tags'])
else:
way['tags'] = None
way['nodes'] = way['nodes'].strip().split()
world.osm2pgsql.append(way)
membertype = { 'N' : 'node', 'W' : 'way', 'R' : 'relation' }
@step(u'the osm relations:')
def osm2pgsql_import_rels(step):
""" Define a list of OSM relation to be imported.
"""
for line in step.hashes:
rel = { 'type' : 'R', 'version' : '1', 'timestamp': "2012-05-01T15:06:20Z",
'changeset' : "11470653", 'uid' : "122294", 'user' : "foo"
}
rel.update(line)
rel['id'] = int(rel['id'])
if 'tags' in rel:
rel['tags'] = world.make_hash(line['tags'])
else:
rel['tags'] = {}
members = []
if rel['members'].strip():
for mem in line['members'].split(','):
memparts = mem.strip().split(':', 2)
memid = memparts[0].upper()
members.append((membertype[memid[0]],
memid[1:],
memparts[1] if len(memparts) == 2 else ''
))
rel['members'] = members
world.osm2pgsql.append(rel)
def _sort_xml_entries(x, y):
if x['type'] == y['type']:
return cmp(x['id'], y['id'])
else:
return cmp('NWR'.find(x['type']), 'NWR'.find(y['type']))
def write_osm_obj(fd, obj):
if obj['type'] == 'N':
fd.write('<node id="%(id)d" lat="%(lat).8f" lon="%(lon).8f" version="%(version)s" timestamp="%(timestamp)%" changeset="%(changeset)s" uid="%(uid)s" user="%(user)s"'% obj)
if obj['tags'] is None:
fd.write('/>\n')
else:
fd.write('>\n')
for k,v in obj['tags'].iteritems():
fd.write(' <tag k="%s" v="%s"/>\n' % (k, v))
fd.write('</node>\n')
elif obj['type'] == 'W':
fd.write('<way id="%(id)d" version="%(version)s" changeset="%(changeset)s" timestamp="%(timestamp)s" user="%(user)s" uid="%(uid)s">\n' % obj)
for nd in obj['nodes']:
fd.write('<nd ref="%s" />\n' % (nd,))
for k,v in obj['tags'].iteritems():
fd.write(' <tag k="%s" v="%s"/>\n' % (k, v))
fd.write('</way>\n')
elif obj['type'] == 'R':
fd.write('<relation id="%(id)d" version="%(version)s" changeset="%(changeset)s" timestamp="%(timestamp)s" user="%(user)s" uid="%(uid)s">\n' % obj)
for mem in obj['members']:
fd.write(' <member type="%s" ref="%s" role="%s"/>\n' % mem)
for k,v in obj['tags'].iteritems():
fd.write(' <tag k="%s" v="%s"/>\n' % (k, v))
fd.write('</relation>\n')
@step(u'loading osm data')
def osm2pgsql_load_place(step):
"""Imports the previously defined OSM data into a fresh copy of a
Nominatim test database.
"""
world.osm2pgsql.sort(cmp=_sort_xml_entries)
# create a OSM file in /tmp
with tempfile.NamedTemporaryFile(dir='/tmp', delete=False) as fd:
fname = fd.name
fd.write("<?xml version='1.0' encoding='UTF-8'?>\n")
fd.write('<osm version="0.6" generator="test-nominatim" timestamp="2014-08-26T20:22:02Z">\n')
fd.write('\t<bounds minlat="43.72335" minlon="7.409205" maxlat="43.75169" maxlon="7.448637"/>\n')
for obj in world.osm2pgsql:
write_osm_obj(fd, obj)
fd.write('</osm>\n')
logger.debug( "Filename: %s" % fname)
cmd = [os.path.join(world.config.source_dir, 'utils', 'setup.php')]
cmd.extend(['--osm-file', fname, '--import-data'])
proc = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
(outp, outerr) = proc.communicate()
assert (proc.returncode == 0), "OSM data import failed:\n%s\n%s\n" % (outp, outerr)
### reintroduce the triggers/indexes we've lost by having osm2pgsql set up place again
cur = world.conn.cursor()
cur.execute("""CREATE TRIGGER place_before_delete BEFORE DELETE ON place
FOR EACH ROW EXECUTE PROCEDURE place_delete()""")
cur.execute("""CREATE TRIGGER place_before_insert BEFORE INSERT ON place
FOR EACH ROW EXECUTE PROCEDURE place_insert()""")
cur.execute("""CREATE UNIQUE INDEX idx_place_osm_unique on place using btree(osm_id,osm_type,class,type)""")
world.conn.commit()
os.remove(fname)
world.osm2pgsql = []
actiontypes = { 'C' : 'create', 'M' : 'modify', 'D' : 'delete' }
@step(u'updating osm data')
def osm2pgsql_update_place(step):
"""Creates an osc file from the previously defined data and imports it
into the database.
"""
world.run_nominatim_script('setup', 'create-functions', 'create-partition-functions')
cur = world.conn.cursor()
cur.execute("""insert into placex (osm_type, osm_id, class, type, name, admin_level,
housenumber, street, addr_place, isin, postcode, country_code, extratags,
geometry) select * from place""")
world.conn.commit()
world.run_nominatim_script('setup', 'index', 'index-noanalyse')
world.run_nominatim_script('setup', 'create-functions', 'create-partition-functions', 'enable-diff-updates')
with tempfile.NamedTemporaryFile(dir='/tmp', delete=False) as fd:
fname = fd.name
fd.write("<?xml version='1.0' encoding='UTF-8'?>\n")
fd.write('<osmChange version="0.6" generator="Osmosis 0.43.1">\n')
for obj in world.osm2pgsql:
fd.write('<%s>\n' % (actiontypes[obj['action']], ))
write_osm_obj(fd, obj)
fd.write('</%s>\n' % (actiontypes[obj['action']], ))
fd.write('</osmChange>\n')
logger.debug( "Filename: %s" % fname)
cmd = [os.path.join(world.config.source_dir, 'utils', 'update.php')]
cmd.extend(['--import-diff', fname])
proc = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
(outp, outerr) = proc.communicate()
assert (proc.returncode == 0), "OSM data update failed:\n%s\n%s\n" % (outp, outerr)
os.remove(fname)
world.osm2pgsql = []

View File

@@ -1,246 +0,0 @@
from lettuce import *
from nose.tools import *
import logging
import os
import subprocess
import psycopg2
import re
from haversine import haversine
from shapely.wkt import loads as wkt_load
from shapely.ops import linemerge
logger = logging.getLogger(__name__)
class NominatimConfig:
def __init__(self):
# logging setup
loglevel = getattr(logging, os.environ.get('LOGLEVEL','info').upper())
if 'LOGFILE' in os.environ:
logging.basicConfig(filename=os.environ.get('LOGFILE','run.log'),
level=loglevel)
else:
logging.basicConfig(level=loglevel)
# Nominatim test setup
self.base_url = os.environ.get('NOMINATIM_SERVER', 'http://localhost/nominatim')
self.source_dir = os.path.abspath(os.environ.get('NOMINATIM_DIR', '..'))
self.template_db = os.environ.get('TEMPLATE_DB', 'test_template_nominatim')
self.test_db = os.environ.get('TEST_DB', 'test_nominatim')
self.local_settings_file = os.environ.get('NOMINATIM_SETTINGS', '/tmp/nominatim_settings.php')
self.reuse_template = 'NOMINATIM_REUSE_TEMPLATE' in os.environ
self.keep_scenario_db = 'NOMINATIM_KEEP_SCENARIO_DB' in os.environ
os.environ['NOMINATIM_SETTINGS'] = '/tmp/nominatim_settings.php'
scriptpath = os.path.dirname(os.path.abspath(__file__))
self.scene_path = os.environ.get('SCENE_PATH',
os.path.join(scriptpath, '..', 'scenes', 'data'))
def __str__(self):
return 'Server URL: %s\nSource dir: %s\n' % (self.base_url, self.source_dir)
world.config = NominatimConfig()
@world.absorb
def write_nominatim_config(dbname):
f = open(world.config.local_settings_file, 'w')
f.write("<?php\n @define('CONST_Database_DSN', 'pgsql://@/%s');\n" % dbname)
f.close()
@world.absorb
def run_nominatim_script(script, *args):
cmd = [os.path.join(world.config.source_dir, 'utils', '%s.php' % script)]
cmd.extend(['--%s' % x for x in args])
proc = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
(outp, outerr) = proc.communicate()
assert (proc.returncode == 0), "Script '%s' failed:\n%s\n%s\n" % (script, outp, outerr)
@world.absorb
def make_hash(inp):
return eval('{' + inp + '}')
@world.absorb
def split_id(oid):
""" Splits a unique identifier for places into its components.
As place_ids cannot be used for testing, we use a unique
identifier instead that is of the form <osmtype><osmid>[:class].
"""
oid = oid.strip()
if oid == 'None':
return None, None, None
osmtype = oid[0]
assert_in(osmtype, ('R','N','W'))
if ':' in oid:
osmid, cls = oid[1:].split(':')
return (osmtype, int(osmid), cls)
else:
return (osmtype, int(oid[1:]), None)
@world.absorb
def get_placeid(oid):
""" Tries to retrive the place_id for a unique identifier. """
if oid[0].isdigit():
return int(oid)
osmtype, osmid, cls = world.split_id(oid)
if osmtype is None:
return None
cur = world.conn.cursor()
if cls is None:
q = 'SELECT place_id FROM placex where osm_type = %s and osm_id = %s'
params = (osmtype, osmid)
else:
q = 'SELECT place_id FROM placex where osm_type = %s and osm_id = %s and class = %s'
params = (osmtype, osmid, cls)
cur.execute(q, params)
assert_equals (cur.rowcount, 1)
return cur.fetchone()[0]
@world.absorb
def match_geometry(coord, matchstring):
m = re.match(r'([-0-9.]+),\s*([-0-9.]+)\s*(?:\+-([0-9.]+)([a-z]+)?)?', matchstring)
assert_is_not_none(m, "Invalid match string")
logger.debug("Distmatch: %s/%s %s %s" % (m.group(1), m.group(2), m.group(3), m.group(4) ))
dist = haversine(coord, (float(m.group(1)), float(m.group(2))))
if m.group(3) is not None:
expdist = float(m.group(3))
if m.group(4) is not None:
if m.group(4) == 'm':
expdist = expdist/1000
elif m.group(4) == 'km':
pass
else:
raise Exception("Unknown unit '%s' in geometry match" % (m.group(4), ))
else:
expdist = 0
logger.debug("Distances expected: %f, got: %f" % (expdist, dist))
assert dist <= expdist, "Geometry too far away, expected: %f, got: %f" % (expdist, dist)
@world.absorb
def db_dump_table(table):
cur = world.conn.cursor()
cur.execute('SELECT * FROM %s' % table)
print '<<<<<<< BEGIN OF TABLE DUMP %s' % table
for res in cur:
print res
print '<<<<<<< END OF TABLE DUMP %s' % table
@world.absorb
def db_drop_database(name):
conn = psycopg2.connect(database='postgres')
conn.set_isolation_level(0)
cur = conn.cursor()
cur.execute('DROP DATABASE IF EXISTS %s' % (name, ))
conn.close()
world.is_template_set_up = False
@world.absorb
def db_template_setup():
""" Set up a template database, containing all tables
but not yet any functions.
"""
if world.is_template_set_up:
return
world.is_template_set_up = True
world.write_nominatim_config(world.config.template_db)
if world.config.reuse_template:
# check that the template is there
conn = psycopg2.connect(database='postgres')
cur = conn.cursor()
cur.execute('select count(*) from pg_database where datname = %s',
(world.config.template_db,))
if cur.fetchone()[0] == 1:
return
else:
# just in case... make sure a previous table has been dropped
world.db_drop_database(world.config.template_db)
# call the first part of database setup
world.run_nominatim_script('setup', 'create-db', 'setup-db')
# remove external data to speed up indexing for tests
conn = psycopg2.connect(database=world.config.template_db)
psycopg2.extras.register_hstore(conn, globally=False, unicode=True)
cur = conn.cursor()
for table in ('gb_postcode', 'us_postcode', 'us_state', 'us_statecounty'):
cur.execute('TRUNCATE TABLE %s' % (table,))
conn.commit()
conn.close()
# execute osm2pgsql on an empty file to get the right tables
osm2pgsql = os.path.join(world.config.source_dir, 'osm2pgsql', 'osm2pgsql')
proc = subprocess.Popen([osm2pgsql, '-lsc', '-O', 'gazetteer', '-d', world.config.template_db, '-'],
stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
[outstr, errstr] = proc.communicate(input='<osm version="0.6"></osm>')
world.run_nominatim_script('setup', 'create-functions', 'create-tables', 'create-partition-tables', 'create-partition-functions', 'load-data', 'create-search-indices')
# Leave the table around so it can be reused again after a non-reuse test round.
#@after.all
def db_template_teardown(total):
""" Set up a template database, containing all tables
but not yet any functions.
"""
if world.is_template_set_up:
# remove template DB
if not world.config.reuse_template:
world.db_drop_database(world.config.template_db)
try:
os.remove(world.config.local_settings_file)
except OSError:
pass # ignore missing file
##########################################################################
#
# Data scene handling
#
world.scenes = {}
world.current_scene = None
@world.absorb
def load_scene(name):
if name in world.scenes:
world.current_scene = world.scenes[name]
else:
with open(os.path.join(world.config.scene_path, "%s.wkt" % name), 'r') as fd:
scene = {}
for line in fd:
if line.strip():
obj, wkt = line.split('|', 2)
wkt = wkt.strip()
scene[obj.strip()] = wkt_load(wkt)
world.scenes[name] = scene
world.current_scene = scene
@world.absorb
def get_scene_geometry(name):
if not ':' in name:
# Not a scene description
return None
geoms = []
for obj in name.split('+'):
oname = obj.strip()
if oname.startswith(':'):
geoms.append(world.current_scene[oname[1:]])
else:
scene, obj = oname.split(':', 2)
oldscene = world.current_scene
world.load_scene(scene)
wkt = world.current_scene[obj]
world.current_scene = oldscene
geoms.append(wkt)
if len(geoms) == 1:
return geoms[0]
else:
return linemerge(geoms)

View File

@@ -1,52 +0,0 @@
#!/usr/bin/php -Cq
<?php
require_once(dirname(dirname(__FILE__)).'/lib/init-cmd.php');
ini_set('memory_limit', '800M');
$aCMDOptions = array(
"Manage service blocks / restrictions",
array('help', 'h', 0, 1, 0, 0, false, 'Show Help'),
array('quiet', 'q', 0, 1, 0, 0, 'bool', 'Quiet output'),
array('verbose', 'v', 0, 1, 0, 0, 'bool', 'Verbose output'),
array('list', 'l', 0, 1, 0, 0, 'bool', 'List recent blocks'),
array('delete', 'd', 0, 1, 0, 0, 'bool', 'Clear recent blocks list'),
array('flush', '', 0, 1, 0, 0, 'bool', 'Flush all blocks / stats'),
);
getCmdOpt($_SERVER['argv'], $aCMDOptions, $aResult, true, true);
$m = getBucketMemcache();
if (!$m)
{
echo "ERROR: Bucket memcache is not configured\n";
exit;
}
if ($aResult['list'])
{
$iCurrentSleeping = $m->get('sleepCounter');
echo "\n Sleeping blocks count: $iCurrentSleeping\n";
$aBlocks = getBucketBlocks();
echo "\n";
printf(" %-40s | %12s | %7s | %13s | %31s | %8s\n", "Key", "Total Blocks", "Current", "Still Blocked", "Last Block Time", "Sleeping");
printf(" %'--40s-|-%'-12s-|-%'-7s-|-%'-13s-|-%'-31s-|-%'-8s\n", "", "", "", "", "", "");
foreach($aBlocks as $sKey => $aDetails)
{
printf(" %-40s | %12s | %7s | %13s | %31s | %8s\n", $sKey, $aDetails['totalBlocks'],
(int)$aDetails['currentBucketSize'], $aDetails['currentlyBlocked']?'Y':'N',
date("r", $aDetails['lastBlockTimestamp']), $aDetails['isSleeping']?'Y':'N');
}
echo "\n";
}
if ($aResult['delete'])
{
$m->set('sleepCounter', 0);
clearBucketBlocks();
}
if ($aResult['flush'])
{
$m->flush();
}

View File

@@ -1,34 +0,0 @@
#!/usr/bin/php -Cq
<?php
require_once(dirname(dirname(__FILE__)).'/lib/init-cmd.php');
ini_set('memory_limit', '800M');
ini_set('display_errors', 'stderr');
$aCMDOptions = array(
"Import country language data from osm wiki",
array('help', 'h', 0, 1, 0, 0, false, 'Show Help'),
array('quiet', 'q', 0, 1, 0, 0, 'bool', 'Quiet output'),
array('verbose', 'v', 0, 1, 0, 0, 'bool', 'Verbose output'),
);
getCmdOpt($_SERVER['argv'], $aCMDOptions, $aCMDResult, true, true);
include(CONST_BasePath.'/settings/phrase_settings.php');
if (true)
{
$sURL = 'http://wiki.openstreetmap.org/wiki/Special:Export/Nominatim/Country_Codes';
$sWikiPageXML = file_get_contents($sURL);
if (preg_match_all('#\\| ([a-z]{2}) \\|\\| [^|]+\\|\\| ([a-z,]+)#', $sWikiPageXML, $aMatches, PREG_SET_ORDER))
{
foreach($aMatches as $aMatch)
{
$aLanguages = explode(',', $aMatch[2]);
foreach($aLanguages as $i => $s)
{
$aLanguages[$i] = '"'.pg_escape_string($s).'"';
}
echo "UPDATE country_name set country_default_language_codes = '{".join(',',$aLanguages)."}' where country_code = '".pg_escape_string($aMatch[1])."';\n";
}
}
}

View File

@@ -1,593 +0,0 @@
#!/usr/bin/php -Cq
<?php
require_once(dirname(dirname(__FILE__)).'/lib/init-cmd.php');
ini_set('memory_limit', '800M');
$aCMDOptions = array(
"Create and setup nominatim search system",
array('help', 'h', 0, 1, 0, 0, false, 'Show Help'),
array('quiet', 'q', 0, 1, 0, 0, 'bool', 'Quiet output'),
array('verbose', 'v', 0, 1, 0, 0, 'bool', 'Verbose output'),
array('create-tables', '', 0, 1, 0, 0, 'bool', 'Create wikipedia tables'),
array('parse-articles', '', 0, 1, 0, 0, 'bool', 'Parse wikipedia articles'),
array('link', '', 0, 1, 0, 0, 'bool', 'Try to link to existing OSM ids'),
);
getCmdOpt($_SERVER['argv'], $aCMDOptions, $aCMDResult, true, true);
/*
$sTestPageText = <<<EOD
{{Coord|47|N|2|E|type:country_region:FR|display=title}}
{{ Infobox Amusement park
| name = Six Flags Great Adventure
| image = [[File:SixFlagsGreatAdventure logo.png]]
| caption = Six Flags Great Adventure logo
| location = [[Jackson, New Jersey|Jackson]]
| location2 = New Jersey
| location3 = United States
| address = 1 Six Flags Boulevard<ref name="drivedir"/>
| season = March/April through October/November
| opening_date = July 1, 1974
| previous_names = Great Adventure
| area_acre = 2200
| rides = 45 park admission rides
| coasters = 12
| water_rides = 2
| owner = [[Six Flags]]
| general_manager =
| homepage = [http://www.sixflags.com/parks/greatadventure/ Six Flags Great Adventure]
}}
EOD;
var_dump(_templatesToProperties(_parseWikipediaContent($sTestPageText)));
exit;
//| coordinates = {{Coord|40|08|16.65|N|74|26|26.69|W|region:US-NJ_type:landmark|display=inline,title}}
*/
/*
$a = array();
$a[] = 'test';
$oDB &= getDB();
if ($aCMDResult['drop-tables'])
{
$oDB->query('DROP TABLE wikipedia_article');
$oDB->query('DROP TABLE wikipedia_link');
}
*/
if ($aCMDResult['create-tables'])
{
$sSQL = <<<'EOD'
CREATE TABLE wikipedia_article (
language text NOT NULL,
title text NOT NULL,
langcount integer,
othercount integer,
totalcount integer,
lat double precision,
lon double precision,
importance double precision,
title_en text,
osm_type character(1),
osm_id bigint,
infobox_type text,
population bigint,
website text
);
$oDB->query($sSQL);
$oDB->query("SELECT AddGeometryColumn('wikipedia_article', 'location', 4326, 'GEOMETRY', 2)");
$sSQL = <<<'EOD'
CREATE TABLE wikipedia_link (
from_id INTEGER,
to_name text
);
EOD;
$oDB->query($sSQL);
}
function degreesAndMinutesToDecimal($iDegrees, $iMinutes=0, $fSeconds=0, $sNSEW='N')
{
$sNSEW = strtoupper($sNSEW);
return ($sNSEW == 'S' || $sNSEW == 'W'?-1:1) * ((float)$iDegrees + (float)$iMinutes/60 + (float)$fSeconds/3600);
}
function _parseWikipediaContent($sPageText)
{
$sPageText = str_replace("\n", ' ', $sPageText);
$sPageText = preg_replace('#<!--.*?-->#m', '', $sPageText);
$sPageText = preg_replace('#<math>.*?<\\/math>#m', '', $sPageText);
$aPageText = preg_split('#({{|}}|\\[\\[|\\]\\]|[|])#', $sPageText, -1, PREG_SPLIT_DELIM_CAPTURE);
$aPageProperties = array();
$sPageBody = '';
$aTemplates = array();
$aLinks = array();
$aTemplateStack = array();
$aState = array('body');
foreach($aPageText as $i => $sPart)
{
switch($sPart)
{
case '{{':
array_unshift($aTemplateStack, array('', array()));
array_unshift($aState, 'template');
break;
case '}}':
if ($aState[0] == 'template' || $aState[0] == 'templateparam')
{
$aTemplate = array_shift($aTemplateStack);
array_shift($aState);
$aTemplates[] = $aTemplate;
}
break;
case '[[':
$sLinkPage = '';
$sLinkSyn = '';
array_unshift($aState, 'link');
break;
case ']]':
if ($aState[0] == 'link' || $aState[0] == 'linksynonim')
{
if (!$sLinkSyn) $sLinkSyn = $sLinkPage;
if (substr($sLinkPage, 0, 6) == 'Image:') $sLinkSyn = substr($sLinkPage, 6);
$aLinks[] = array($sLinkPage, $sLinkSyn);
array_shift($aState);
switch($aState[0])
{
case 'template':
$aTemplateStack[0][0] .= trim($sPart);
break;
case 'templateparam':
$aTemplateStack[0][1][0] .= $sLinkSyn;
break;
case 'link':
$sLinkPage .= trim($sPart);
break;
case 'linksynonim':
$sLinkSyn .= $sPart;
break;
case 'body':
$sPageBody .= $sLinkSyn;
break;
default:
var_dump($aState, $sPageName, $aTemplateStack, $sPart, $aPageText);
fail('unknown state');
}
}
break;
case '|':
if ($aState[0] == 'template' || $aState[0] == 'templateparam')
{
// Create a new template paramater
$aState[0] = 'templateparam';
array_unshift($aTemplateStack[0][1], '');
}
if ($aState[0] == 'link') $aState[0] = 'linksynonim';
break;
default:
switch($aState[0])
{
case 'template':
$aTemplateStack[0][0] .= trim($sPart);
break;
case 'templateparam':
$aTemplateStack[0][1][0] .= $sPart;
break;
case 'link':
$sLinkPage .= trim($sPart);
break;
case 'linksynonim':
$sLinkSyn .= $sPart;
break;
case 'body':
$sPageBody .= $sPart;
break;
default:
var_dump($aState, $aPageText);
fail('unknown state');
}
break;
}
}
return $aTemplates;
}
function _templatesToProperties($aTemplates)
{
$aPageProperties = array();
foreach($aTemplates as $iTemplate => $aTemplate)
{
$aParams = array();
foreach(array_reverse($aTemplate[1]) as $iParam => $sParam)
{
if (($iPos = strpos($sParam, '=')) === FALSE)
{
$aParams[] = trim($sParam);
}
else
{
$aParams[trim(substr($sParam, 0, $iPos))] = trim(substr($sParam, $iPos+1));
}
}
$aTemplates[$iTemplate][1] = $aParams;
if (!isset($aPageProperties['sOfficialName']) && isset($aParams['official_name']) && $aParams['official_name']) $aPageProperties['sOfficialName'] = $aParams['official_name'];
if (!isset($aPageProperties['iPopulation']) && isset($aParams['population']) && $aParams['population'] && preg_match('#^[0-9.,]+#', $aParams['population']))
{
$aPageProperties['iPopulation'] = (int)str_replace(array(',','.'), '', $aParams['population']);
}
if (!isset($aPageProperties['iPopulation']) && isset($aParams['population_total']) && $aParams['population_total'] && preg_match('#^[0-9.,]+#', $aParams['population_total']))
{
$aPageProperties['iPopulation'] = (int)str_replace(array(',','.'), '', $aParams['population_total']);
}
if (!isset($aPageProperties['iPopulation']) && isset($aParams['population_urban']) && $aParams['population_urban'] && preg_match('#^[0-9.,]+#', $aParams['population_urban']))
{
$aPageProperties['iPopulation'] = (int)str_replace(array(',','.'), '', $aParams['population_urban']);
}
if (!isset($aPageProperties['iPopulation']) && isset($aParams['population_estimate']) && $aParams['population_estimate'] && preg_match('#^[0-9.,]+#', $aParams['population_estimate']))
{
$aPageProperties['iPopulation'] = (int)str_replace(array(',','.'), '', $aParams['population_estimate']);
}
if (!isset($aPageProperties['sWebsite']) && isset($aParams['website']) && $aParams['website'])
{
if (preg_match('#^\\[?([^ \\]]+)[^\\]]*\\]?$#', $aParams['website'], $aMatch))
{
$aPageProperties['sWebsite'] = $aMatch[1];
if (strpos($aPageProperties['sWebsite'],':/'.'/') === FALSE)
{
$aPageProperties['sWebsite'] = 'http:/'.'/'.$aPageProperties['sWebsite'];
}
}
}
if (!isset($aPageProperties['sTopLevelDomain']) && isset($aParams['cctld']) && $aParams['cctld'])
{
$aPageProperties['sTopLevelDomain'] = str_replace(array('[',']','.'),'', $aParams['cctld']);
}
if (!isset($aPageProperties['sInfoboxType']) && strtolower(substr($aTemplate[0],0,7)) == 'infobox')
{
$aPageProperties['sInfoboxType'] = trim(substr($aTemplate[0],8));
// $aPageProperties['aInfoboxParams'] = $aParams;
}
// Assume the first template with lots of params is the type (fallback for infobox)
if (!isset($aPageProperties['sPossibleInfoboxType']) && sizeof($aParams) > 10)
{
$aPageProperties['sPossibleInfoboxType'] = trim($aTemplate[0]);
// $aPageProperties['aInfoboxParams'] = $aParams;
}
// do we have a lat/lon
if (!isset($aPageProperties['fLat']))
{
if (isset($aParams['latd']) && isset($aParams['longd']))
{
$aPageProperties['fLat'] = degreesAndMinutesToDecimal($aParams['latd'], @$aParams['latm'], @$aParams['lats'], @$aParams['latNS']);
$aPageProperties['fLon'] = degreesAndMinutesToDecimal($aParams['longd'], @$aParams['longm'], @$aParams['longs'], @$aParams['longEW']);
}
if (isset($aParams['lat_degrees']) && isset($aParams['lat_degrees']))
{
$aPageProperties['fLat'] = degreesAndMinutesToDecimal($aParams['lat_degrees'], @$aParams['lat_minutes'], @$aParams['lat_seconds'], @$aParams['lat_direction']);
$aPageProperties['fLon'] = degreesAndMinutesToDecimal($aParams['long_degrees'], @$aParams['long_minutes'], @$aParams['long_seconds'], @$aParams['long_direction']);
}
if (isset($aParams['latitude']) && isset($aParams['longitude']))
{
if (preg_match('#[0-9.]+#', $aParams['latitude']) && preg_match('#[0-9.]+#', $aParams['longitude']))
{
$aPageProperties['fLat'] = (float)$aParams['latitude'];
$aPageProperties['fLon'] = (float)$aParams['longitude'];
}
}
if (strtolower($aTemplate[0]) == 'coord')
{
if (isset($aParams[3]) && (strtoupper($aParams[3]) == 'N' || strtoupper($aParams[3]) == 'S'))
{
$aPageProperties['fLat'] = degreesAndMinutesToDecimal($aParams[0], $aParams[1], $aParams[2], $aParams[3]);
$aPageProperties['fLon'] = degreesAndMinutesToDecimal($aParams[4], $aParams[5], $aParams[6], $aParams[7]);
}
elseif (isset($aParams[0]) && isset($aParams[1]) && isset($aParams[2]) && (strtoupper($aParams[2]) == 'N' || strtoupper($aParams[2]) == 'S'))
{
$aPageProperties['fLat'] = degreesAndMinutesToDecimal($aParams[0], $aParams[1], 0, $aParams[2]);
$aPageProperties['fLon'] = degreesAndMinutesToDecimal($aParams[3], $aParams[4], 0, $aParams[5]);
}
else if (isset($aParams[0]) && isset($aParams[1]) && (strtoupper($aParams[1]) == 'N' || strtoupper($aParams[1]) == 'S'))
{
$aPageProperties['fLat'] = (strtoupper($aParams[1]) == 'N'?1:-1) * (float)$aParams[0];
$aPageProperties['fLon'] = (strtoupper($aParams[3]) == 'E'?1:-1) * (float)$aParams[2];
}
else if (isset($aParams[0]) && is_numeric($aParams[0]) && isset($aParams[1]) && is_numeric($aParams[1]))
{
$aPageProperties['fLat'] = (float)$aParams[0];
$aPageProperties['fLon'] = (float)$aParams[1];
}
}
if (isset($aParams['Latitude']) && isset($aParams['Longitude']))
{
$aParams['Latitude'] = str_replace('&nbsp;',' ',$aParams['Latitude']);
$aParams['Longitude'] = str_replace('&nbsp;',' ',$aParams['Longitude']);
if (preg_match('#^([0-9]+)°(([0-9]+))? ([NS]) to ([0-9]+)°(([0-9]+))? ([NS])#', $aParams['Latitude'], $aMatch))
{
$aPageProperties['fLat'] =
(degreesAndMinutesToDecimal($aMatch[1], $aMatch[3], 0, $aMatch[4])
+degreesAndMinutesToDecimal($aMatch[5], $aMatch[7], 0, $aMatch[8])) / 2;
}
else if (preg_match('#^([0-9]+)°(([0-9]+))? ([NS])#', $aParams['Latitude'], $aMatch))
{
$aPageProperties['fLat'] = degreesAndMinutesToDecimal($aMatch[1], $aMatch[3], 0, $aMatch[4]);
}
if (preg_match('#^([0-9]+)°(([0-9]+))? ([EW]) to ([0-9]+)°(([0-9]+))? ([EW])#', $aParams['Longitude'], $aMatch))
{
$aPageProperties['fLon'] =
(degreesAndMinutesToDecimal($aMatch[1], $aMatch[3], 0, $aMatch[4])
+degreesAndMinutesToDecimal($aMatch[5], $aMatch[7], 0, $aMatch[8])) / 2;
}
else if (preg_match('#^([0-9]+)°(([0-9]+))? ([EW])#', $aParams['Longitude'], $aMatch))
{
$aPageProperties['fLon'] = degreesAndMinutesToDecimal($aMatch[1], $aMatch[3], 0, $aMatch[4]);
}
}
}
}
if (isset($aPageProperties['sPossibleInfoboxType']))
{
if (!isset($aPageProperties['sInfoboxType'])) $aPageProperties['sInfoboxType'] = '#'.$aPageProperties['sPossibleInfoboxType'];
unset($aPageProperties['sPossibleInfoboxType']);
}
return $aPageProperties;
}
if (isset($aCMDResult['parse-wikipedia']))
{
$oDB =& getDB();
$aArticleNames = $oDB->getCol('select page_title from content where page_namespace = 0 and page_id %10 = '.$aCMDResult['parse-wikipedia'].' and (page_content ilike \'%{{Coord%\' or (page_content ilike \'%lat%\' and page_content ilike \'%lon%\'))');
// $aArticleNames = $oDB->getCol($sSQL = 'select page_title from content where page_namespace = 0 and (page_content ilike \'%{{Coord%\' or (page_content ilike \'%lat%\' and page_content ilike \'%lon%\')) and page_title in (\'Virginia\')');
foreach($aArticleNames as $sArticleName)
{
$sPageText = $oDB->getOne('select page_content from content where page_namespace = 0 and page_title = \''.pg_escape_string($sArticleName).'\'');
$aP = _templatesToProperties(_parseWikipediaContent($sPageText));
if (isset($aP['sInfoboxType']))
{
$aP['sInfoboxType'] = preg_replace('#\\s+#',' ',$aP['sInfoboxType']);
$sSQL = 'update wikipedia_article set ';
$sSQL .= 'infobox_type = \''.pg_escape_string($aP['sInfoboxType']).'\'';
$sSQL .= ' where language = \'en\' and title = \''.pg_escape_string($sArticleName).'\';';
$oDB->query($sSQL);
}
if (isset($aP['iPopulation']))
{
$sSQL = 'update wikipedia_article set ';
$sSQL .= 'population = \''.pg_escape_string($aP['iPopulation']).'\'';
$sSQL .= ' where language = \'en\' and title = \''.pg_escape_string($sArticleName).'\';';
$oDB->query($sSQL);
}
if (isset($aP['sWebsite']))
{
$sSQL = 'update wikipedia_article set ';
$sSQL .= 'website = \''.pg_escape_string($aP['sWebsite']).'\'';
$sSQL .= ' where language = \'en\' and title = \''.pg_escape_string($sArticleName).'\';';
$oDB->query($sSQL);
}
if (isset($aP['fLat']) && ($aP['fLat']!='-0' || $aP['fLon']!='-0'))
{
if (!isset($aP['sInfoboxType'])) $aP['sInfoboxType'] = '';
echo $sArticleName.'|'.$aP['sInfoboxType'].'|'.$aP['fLat'].'|'.$aP['fLon'] ."\n";
$sSQL = 'update wikipedia_article set ';
$sSQL .= 'lat = \''.pg_escape_string($aP['fLat']).'\',';
$sSQL .= 'lon = \''.pg_escape_string($aP['fLon']).'\'';
$sSQL .= ' where language = \'en\' and title = \''.pg_escape_string($sArticleName).'\';';
$oDB->query($sSQL);
}
}
}
function nominatimXMLStart($hParser, $sName, $aAttr)
{
global $aNominatRecords;
switch($sName)
{
case 'PLACE':
$aNominatRecords[] = $aAttr;
break;
}
}
function nominatimXMLEnd($hParser, $sName)
{
}
if (isset($aCMDResult['link']))
{
$oDB =& getDB();
$aWikiArticles = $oDB->getAll("select * from wikipedia_article where language = 'en' and lat is not null and osm_type is null and totalcount < 31 order by importance desc limit 200000");
// If you point this script at production OSM you will be blocked
$sNominatimBaseURL = 'http://SEVERNAME/search.php';
foreach($aWikiArticles as $aRecord)
{
$aRecord['name'] = str_replace('_',' ',$aRecord['title']);
$sURL = $sNominatimBaseURL.'?format=xml&accept-language=en';
echo "\n-- ".$aRecord['name'].", ".$aRecord['infobox_type']."\n";
$fMaxDist = 0.0000001;
$bUnknown = false;
switch(strtolower($aRecord['infobox_type']))
{
case 'former country':
continue 2;
case 'sea':
$fMaxDist = 60; // effectively turn it off
$sURL .= "&viewbox=".($aRecord['lon']-$fMaxDist).",".($aRecord['lat']+$fMaxDist).",".($aRecord['lon']+$fMaxDist).",".($aRecord['lat']-$fMaxDist);
break;
case 'country':
case 'island':
case 'islands':
case 'continent':
$fMaxDist = 60; // effectively turn it off
$sURL .= "&featuretype=country";
$sURL .= "&viewbox=".($aRecord['lon']-$fMaxDist).",".($aRecord['lat']+$fMaxDist).",".($aRecord['lon']+$fMaxDist).",".($aRecord['lat']-$fMaxDist);
break;
case 'prefecture japan':
$aRecord['name'] = trim(str_replace(' Prefecture',' ', $aRecord['name']));
case 'state':
case '#us state':
case 'county':
case 'u.s. state':
case 'u.s. state symbols':
case 'german state':
case 'province or territory of canada';
case 'indian jurisdiction';
case 'province';
case 'french region':
case 'region of italy':
case 'kommune':
case '#australia state or territory':
case 'russian federal subject':
$fMaxDist = 4;
$sURL .= "&featuretype=state";
$sURL .= "&viewbox=".($aRecord['lon']-$fMaxDist).",".($aRecord['lat']+$fMaxDist).",".($aRecord['lon']+$fMaxDist).",".($aRecord['lat']-$fMaxDist);
break;
case 'protected area':
$fMaxDist = 1;
$sURL .= "&nearlat=".$aRecord['lat'];
$sURL .= "&nearlon=".$aRecord['lon'];
$sURL .= "&viewbox=".($aRecord['lon']-$fMaxDist).",".($aRecord['lat']+$fMaxDist).",".($aRecord['lon']+$fMaxDist).",".($aRecord['lat']-$fMaxDist);
break;
case 'settlement':
$bUnknown = true;
case 'french commune':
case 'italian comune':
case 'uk place':
case 'italian comune':
case 'australian place':
case 'german place':
case '#geobox':
case 'u.s. county':
case 'municipality':
case 'city japan':
case 'russian inhabited locality':
case 'finnish municipality/land area':
case 'england county':
case 'israel municipality':
case 'russian city':
case 'city':
$fMaxDist = 0.2;
$sURL .= "&featuretype=settlement";
$sURL .= "&viewbox=".($aRecord['lon']-0.5).",".($aRecord['lat']+0.5).",".($aRecord['lon']+0.5).",".($aRecord['lat']-0.5);
break;
case 'mountain':
case 'mountain pass':
case 'river':
case 'lake':
case 'airport':
$fMaxDist = 0.2;
$sURL .= "&viewbox=".($aRecord['lon']-0.5).",".($aRecord['lat']+0.5).",".($aRecord['lon']+0.5).",".($aRecord['lat']-0.5);
case 'ship begin':
$fMaxDist = 0.1;
$aTypes = array('wreck');
$sURL .= "&viewbox=".($aRecord['lon']-0.01).",".($aRecord['lat']+0.01).",".($aRecord['lon']+0.01).",".($aRecord['lat']-0.01);
$sURL .= "&nearlat=".$aRecord['lat'];
$sURL .= "&nearlon=".$aRecord['lon'];
break;
case 'road':
case 'university':
case 'company':
case 'department':
$fMaxDist = 0.005;
$sURL .= "&viewbox=".($aRecord['lon']-0.01).",".($aRecord['lat']+0.01).",".($aRecord['lon']+0.01).",".($aRecord['lat']-0.01);
$sURL .= "&bounded=1";
$sURL .= "&nearlat=".$aRecord['lat'];
$sURL .= "&nearlon=".$aRecord['lon'];
break;
default:
$bUnknown = true;
$fMaxDist = 0.005;
$sURL .= "&viewbox=".($aRecord['lon']-0.01).",".($aRecord['lat']+0.01).",".($aRecord['lon']+0.01).",".($aRecord['lat']-0.01);
// $sURL .= "&bounded=1";
$sURL .= "&nearlat=".$aRecord['lat'];
$sURL .= "&nearlon=".$aRecord['lon'];
echo "-- Unknown: ".$aRecord['infobox_type']."\n";
break;
}
$sNameURL = $sURL.'&q='.urlencode($aRecord['name']);
var_Dump($sNameURL);
$sXML = file_get_contents($sNameURL);
$aNominatRecords = array();
$hXMLParser = xml_parser_create();
xml_set_element_handler($hXMLParser, 'nominatimXMLStart', 'nominatimXMLEnd');
xml_parse($hXMLParser, $sXML, true);
xml_parser_free($hXMLParser);
if (!isset($aNominatRecords[0]))
{
$aNameParts = preg_split('#[(,]#',$aRecord['name']);
if (sizeof($aNameParts) > 1)
{
$sNameURL = $sURL.'&q='.urlencode(trim($aNameParts[0]));
var_Dump($sNameURL);
$sXML = file_get_contents($sNameURL);
$aNominatRecords = array();
$hXMLParser = xml_parser_create();
xml_set_element_handler($hXMLParser, 'nominatimXMLStart', 'nominatimXMLEnd');
xml_parse($hXMLParser, $sXML, true);
xml_parser_free($hXMLParser);#
}
}
// assume first is best/right
for($i = 0; $i < sizeof($aNominatRecords); $i++)
{
$fDiff = ($aRecord['lat']-$aNominatRecords[$i]['LAT']) * ($aRecord['lat']-$aNominatRecords[$i]['LAT']);
$fDiff += ($aRecord['lon']-$aNominatRecords[$i]['LON']) * ($aRecord['lon']-$aNominatRecords[$i]['LON']);
$fDiff = sqrt($fDiff);
if ($bUnknown) {
// If it was an unknown type base it on the rank of the found result
$iRank = (int)$aNominatRecords[$i]['PLACE_RANK'];
if ($iRank <= 4) $fMaxDist = 2;
elseif ($iRank <= 8) $fMaxDist = 1;
elseif ($iRank <= 10) $fMaxDist = 0.8;
elseif ($iRank <= 12) $fMaxDist = 0.6;
elseif ($iRank <= 17) $fMaxDist = 0.2;
elseif ($iRank <= 18) $fMaxDist = 0.1;
elseif ($iRank <= 22) $fMaxDist = 0.02;
elseif ($iRank <= 26) $fMaxDist = 0.001;
else $fMaxDist = 0.001;
}
echo "-- FOUND \"".substr($aNominatRecords[$i]['DISPLAY_NAME'],0,50)."\", ".$aNominatRecords[$i]['CLASS'].", ".$aNominatRecords[$i]['TYPE'].", ".$aNominatRecords[$i]['PLACE_RANK'].", ".$aNominatRecords[$i]['OSM_TYPE']." (dist:$fDiff, max:$fMaxDist)\n";
if ($fDiff > $fMaxDist)
{
echo "-- Diff too big $fDiff (max: $fMaxDist)".$aRecord['lat'].','.$aNominatRecords[$i]['LAT'].' & '.$aRecord['lon'].','.$aNominatRecords[$i]['LON']." \n";
}
else
{
$sSQL = "update wikipedia_article set osm_type=";
switch($aNominatRecords[$i]['OSM_TYPE'])
{
case 'relation': $sSQL .= "'R'"; break;
case 'way': $sSQL .= "'W'"; break;
case 'node': $sSQL .= "'N'"; break;
}
$sSQL .= ", osm_id=".$aNominatRecords[$i]['OSM_ID']." where language = '".pg_escape_string($aRecord['language'])."' and title = '".pg_escape_string($aRecord['title'])."'";
$oDB->query($sSQL);
break;
}
}
}
}

View File

@@ -1,53 +0,0 @@
#!/bin/bash
psqlcmd=psql wikipedia2013
mysql2pgsqlcmd=./mysql2pgsql.perl /dev/stdin /dev/stdout
language=( "ar" "bg" "ca" "cs" "da" "de" "en" "es" "eo" "eu" "fa" "fr" "ko" "hi" "hr" "id" "it" "he" "lt" "hu" "ms" "nl" "ja" "no" "pl" "pt" "kk" "ro" "ru" "sk" "sl" "sr" "fi" "sv" "tr" "uk" "vi" "vo" "war" "zh" )
# wikipedia pages and links
echo "CREATE TABLE linkcounts (language text, title text, count integer, sumcount integer, lat double, lon double );" | $psqlcmd
echo "CREATE TABLE wikipedia_redirect (language text, from_title text, to_title text );" | $psqlcmd
for i in "${language[@]}"
do
wget http://dumps.wikimedia.org/${i}wiki/latest/${i}wiki-latest-page.sql.gz
wget http://dumps.wikimedia.org/${i}wiki/latest/${i}wiki-latest-pagelinks.sql.gz
wget http://dumps.wikimedia.org/${i}wiki/latest/${i}wiki-latest-langlinks.sql.gz
wget http://dumps.wikimedia.org/${i}wiki/latest/${i}wiki-latest-redirect.sql.gz
done
for i in "${language[@]}"
do
gzip -dc ${i}wiki-latest-pagelinks.sql.gz | sed "s/\`pagelinks\`/\`${i}pagelinks\`/g" | $mysql2pgsqlcmd | $psqlcmd
gzip -dc ${i}wiki-latest-page.sql.gz | sed "s/\`page\`/\`${i}page\`/g" | $mysql2pgsqlcmd | $psqlcmd
gzip -dc ${i}wiki-latest-langlinks.sql.gz | sed "s/\`langlinks\`/\`${i}langlinks\`/g" | $mysql2pgsqlcmd | $psqlcmd
gzip -dc ${i}wiki-latest-redirect.sql.gz | sed "s/\`redirect\`/\`${i}redirect\`/g" | $mysql2pgsqlcmd | $psqlcmd
done
for i in "${language[@]}"
do
echo "create table ${i}pagelinkcount as select pl_title as title,count(*) as count from ${i}pagelinks where pl_namespace = 0 group by pl_title;" | $psqlcmd
echo "insert into linkcounts select '${i}',pl_title,count(*) from ${i}pagelinks where pl_namespace = 0 group by pl_title;" | $psqlcmd
echo "insert into wikipedia_redirect select '${i}',page_title,rd_title from ${i}redirect join ${i}page on (rd_from = page_id) where page_namespace = 0 and rd_namespace = 0;" | $psqlcmd
echo "alter table ${i}pagelinkcount add column othercount integer;" | $psqlcmd
echo "update ${i}pagelinkcount set othercount = 0;" | $psqlcmd
for j in "${language[@]}"
do
echo "update ${i}pagelinkcount set othercount = ${i}pagelinkcount.othercount + x.count from (select page_title as title,count from ${i}langlinks join ${i}page on (ll_from = page_id) join ${j}pagelinkcount on (ll_lang = '${j}' and ll_title = title)) as x where x.title = ${i}pagelinkcount.title;" | $psqlcmd
done
echo "insert into wikipedia_article select '${i}', title, count, othercount, count+othercount from ${i}pagelinkcount;" | $psqlcmd
done
echo "update wikipedia_article set importance = log(totalcount)/log((select max(totalcount) from wikipedia_article))" | $psqlcmd
# precalculated lat,lon from dbpedia
wget http://downloads.dbpedia.org/current/en/geo_coordinates_en.nq.bz2
bzip2 -dc geo_coordinates_en.nq.bz2 | grep http://www.georss.org/georss/point | sed 's|<http://dbpedia.org/resource/[^>]*> *<http://www.georss.org/georss/point> "\(-\?[-0-9.E]\+\) \(-\?[-0-9.E]\+\)"@en <http://\([a-z][a-z]\).wikipedia.org/wiki/\([^#]\+\)#> .|update pagelinks set lat=\1, lon=\2 where language = '"'"'\3'"'"' and title = decode_url_part('"'"'\4'"'"');|g' | $psqlcmd
# media wiki dumper
wget https://github.com/bcollier/mwdumper/blob/master/build/mwdumper.jar
# latest english wikipedia articles
wget http://dumps.wikimedia.org/enwiki/latest/enwiki-latest-pages-articles.xml.bz2
java -jar mwdumper.jar --format=sql:1.5 enwiki-latest-pages-articles.xml.bz2 | ./mysql2pgsql.perl /dev/stdin /dev/stdout | sed 's/"text (/text ("/g' | sed 's/"old_flags)"/"old_flags")/g' | sed 's/"revision (/revision ("/g' | sed 's/"rev_deleted)"/"rev_deleted")/g' | sed 's/"page (/page ("/g' | sed 's/"page_len)"/"page_len")/g' | sed "s/DATE_ADD(E'1970-01-01', INTERVAL UNIX_TIMESTAMP() SECOND)[+]//g" | sed 's/RAND()/0/g' | $psqlcmd

View File

@@ -11,7 +11,7 @@
array('verbose', 'v', 0, 1, 0, 0, 'bool', 'Verbose output'),
array('parse-tiger', '', 0, 1, 1, 1, 'realpath', 'Convert tiger edge files to nominatim sql import'),
array('parse-tiger-2011', '', 0, 1, 1, 1, 'realpath', 'Convert tiger edge files to nominatim sql import - datafiles from 2011 or later (source: edges directory of tiger data)'),
array('parse-tiger-2011', '', 0, 1, 1, 1, 'realpath', 'Convert tiger edge files to nominatim sql import (source: edges directory of tiger data)'),
);
getCmdOpt($_SERVER['argv'], $aCMDOptions, $aCMDResult, true, true);
@@ -98,7 +98,7 @@
$bDidSomething = true;
foreach(glob($aCMDResult['parse-tiger-2011'].'/tl_20??_?????_edges.zip', 0) as $sImportFile)
foreach(glob($aCMDResult['parse-tiger-2011'].'/tl_2011_?????_edges.zip', 0) as $sImportFile)
{
set_time_limit(30);
preg_match('#([0-9]{5})_(.*)#',basename($sImportFile), $aMatch);

View File

@@ -1,55 +0,0 @@
#!/usr/bin/php -Cq
<?php
require_once(dirname(dirname(__FILE__)).'/lib/init-cmd.php');
require_once(CONST_BasePath.'/lib/Geocode.php');
ini_set('memory_limit', '800M');
$aCMDOptions = array(
"Query database from command line. Returns search result as JSON.",
array('help', 'h', 0, 1, 0, 0, false, 'Show Help'),
array('quiet', 'q', 0, 1, 0, 0, 'bool', 'Quiet output'),
array('verbose', 'v', 0, 1, 0, 0, 'bool', 'Verbose output'),
array('search', '', 0, 1, 1, 1, 'string', 'Search for given term or coordinate'),
array('accept-language', '', 0, 1, 1, 1, 'string', 'Preferred language order for showing search results'),
array('bounded', '', 0, 1, 0, 0, 'bool', 'Restrict results to given viewbox'),
array('nodedupe', '', 0, 1, 0, 0, 'bool', 'Do not remove duplicate results'),
array('limit', '', 0, 1, 1, 1, 'int', 'Maximum number of results returned (default: 10)'),
array('exclude_place_ids', '', 0, 1, 1, 1, 'string', 'Comma-separated list of place ids to exclude from results'),
array('featureType', '', 0, 1, 1, 1, 'string', 'Restrict results to certain features (country, state,city,settlement)'),
array('countrycodes', '', 0, 1, 1, 1, 'string', 'Comma-separated list of countries to restrict search to'),
array('viewbox', '', 0, 1, 1, 1, 'string', 'Prefer results in given view box')
);
getCmdOpt($_SERVER['argv'], $aCMDOptions, $aCMDResult, true, true);
$oDB =& getDB();
if (isset($aCMDResult['search']) && $aCMDResult['search'])
{
if (isset($aCMDResult['bounded'])) $aCMDResult['bounded'] = 'true';
if (isset($aCMDResult['nodedupe'])) $aCMDResult['dedupe'] = 'false';
$oGeocode =& new Geocode($oDB);
if (isset($aCMDResult['accept-language']) && $aCMDResult['accept-language'])
$oGeocode->setLanguagePreference(getPreferredLanguages($aCMDResult['accept-language']));
else
$oGeocode->setLanguagePreference(getPreferredLanguages());
$oGeocode->loadParamArray($aCMDResult);
$oGeocode->setQuery($aCMDResult['search']);
$aSearchResults = $oGeocode->lookup();
if (version_compare(phpversion(), "5.4.0", '<'))
echo json_encode($aSearchResults);
else
echo json_encode($aSearchResults, JSON_PRETTY_PRINT | JSON_UNESCAPED_SLASHES | JSON_UNESCAPED_UNICODE)."\n";
}
else
{
showUsage($aCMDOptions, true);
}

View File

@@ -1,78 +0,0 @@
#!/usr/bin/php -Cq
<?php
// Apache log file
$sFile = "sample.log.txt";
$sHost1 = 'http://mq-open-search-lm02.ihost.aol.com:8000/nominatim/v1';
$sHost2 = 'http://mq-open-search-lm03.ihost.aol.com:8000/nominatim/v1';
$sHost1Escaped = str_replace('/', '\\/', $sHost1);
$sHost2Escaped = str_replace('/', '\\/', $sHost2);
$aToDo = array(251, 293, 328, 399.1, 455.1, 479, 496, 499, 574, 609, 702, 790, 846, 865, 878, 894, 902, 961, 980);
$hFile = @fopen($sFile, "r");
if (!$hFile)
{
echo "Unable to open file: $sFile\n";
exit;
}
$i = 0;
while (($sLine = fgets($hFile, 10000)) !== false)
{
$i++;
if (!in_array($i, $aToDo)) continue;
if (preg_match('#"GET (.*) HTTP/1.[01]"#', $sLine, $aResult))
{
$sURL1 = $sHost1.$aResult[1];
$sURL2 = $sHost2.$aResult[1];
$sRes1 = '';
$k = 0;
while(!$sRes1 && $k < 10)
{
$sRes1 = file_get_contents($sURL1);
$k++;
if (!$sRes1) sleep(10);
}
$sRes2 = file_get_contents($sURL2);
// Strip out the things that will always change
$sRes1 = preg_replace('# timestamp=\'[^\']*\'#', '', $sRes1);
$sRes1 = str_replace($sHost1, '', $sRes1);
$sRes1 = str_replace($sHost1Escaped, '', $sRes1);
$sRes2 = preg_replace('# timestamp=\'[^\']*\'#', '', $sRes2);
$sRes2 = str_replace($sHost2, '', $sRes2);
$sRes2 = str_replace($sHost2Escaped, '', $sRes2);
if ($sRes1 != $sRes2)
{
echo "$i:\n";
var_dump($sURL1, $sURL2);
$sRes = $sURL1.":\n";
for ($j = 0; $j < strlen($sRes1); $j+=40)
{
$sRes .= substr($sRes1, $j, 40)."\n";
}
file_put_contents('log/'.$i.'.1', $sRes);
$sRes = $sURL2.":\n";
for ($j = 0; $j < strlen($sRes2); $j+=40)
{
$sRes .= substr($sRes2, $j, 40)."\n";
}
file_put_contents('log/'.$i.'.2', $sRes);
}
echo ".\n";
}
else
{
var_dump($sLine);
}
}
fclose($hFile);

Some files were not shown because too many files have changed in this diff Show More