forked from hans/Nominatim
Merge remote-tracking branch 'origin/master' into osmline
Conflicts: lib/Geocode.php lib/PlaceLookup.php sql/functions.sql sql/tables.sql utils/setup.php
This commit is contained in:
2930
data/us_state.sql
2930
data/us_state.sql
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
132
lib/Geocode.php
132
lib/Geocode.php
@@ -228,11 +228,9 @@
|
||||
function loadParamArray($aParams)
|
||||
{
|
||||
if (isset($aParams['addressdetails'])) $this->bIncludeAddressDetails = (bool)$aParams['addressdetails'];
|
||||
if ((float) CONST_Postgresql_Version > 9.2)
|
||||
{
|
||||
if (isset($aParams['extratags'])) $this->bIncludeExtraTags = (bool)$aParams['extratags'];
|
||||
if (isset($aParams['namedetails'])) $this->bIncludeNameDetails = (bool)$aParams['namedetails'];
|
||||
}
|
||||
if (isset($aParams['extratags'])) $this->bIncludeExtraTags = (bool)$aParams['extratags'];
|
||||
if (isset($aParams['namedetails'])) $this->bIncludeNameDetails = (bool)$aParams['namedetails'];
|
||||
|
||||
if (isset($aParams['bounded'])) $this->bBoundedSearch = (bool)$aParams['bounded'];
|
||||
if (isset($aParams['dedupe'])) $this->bDeDupe = (bool)$aParams['dedupe'];
|
||||
|
||||
@@ -433,18 +431,41 @@
|
||||
|
||||
if (30 >= $this->iMinAddressRank && 30 <= $this->iMaxAddressRank)
|
||||
{
|
||||
//query also location_property_osmline and location_property_tiger and location_property_aux
|
||||
//Tiger search only if a housenumber was searched and if it was found (i.e. aPlaceIDs[placeID] = housenumber != -1) (realized through a join)
|
||||
//only Tiger housenumbers need to be interpolated, because they are saved as lines with start- and endnumber, the common osm housenumbers are usually saved as points
|
||||
$sHousenumbers = "";
|
||||
$i = 0;
|
||||
$length = count($aPlaceIDs);
|
||||
foreach($aPlaceIDs as $placeID => $housenumber)
|
||||
if (CONST_Use_US_Tiger_Data)
|
||||
{
|
||||
$i++;
|
||||
$sHousenumbers .= "(".$placeID.", ".$housenumber.")";
|
||||
if($i<$length)
|
||||
$sHousenumbers .= ", ";
|
||||
//query also location_property_tiger and location_property_aux
|
||||
//Tiger search only if a housenumber was searched and if it was found (i.e. aPlaceIDs[placeID] = housenumber != -1) (realized through a join)
|
||||
//only Tiger housenumbers need to be interpolated, because they are saved as lines with start- and endnumber, the common osm housenumbers are usually saved as points
|
||||
$sHousenumbers = "";
|
||||
$i = 0;
|
||||
$length = count($aPlaceIDs);
|
||||
foreach($aPlaceIDs as $placeID => $housenumber)
|
||||
{
|
||||
$i++;
|
||||
$sHousenumbers .= "(".$placeID.", ".$housenumber.")";
|
||||
if($i<$length)
|
||||
$sHousenumbers .= ", ";
|
||||
}
|
||||
|
||||
$sSQL .= "union ";
|
||||
$sSQL .= "select 'T' as osm_type, place_id as osm_id, 'place' as class, 'house' as type, null as admin_level, 30 as rank_search, 30 as rank_address, min(place_id) as place_id, min(parent_place_id) as parent_place_id, 'us' as country_code";
|
||||
$sSQL .= ", get_address_by_language(place_id, housenumber_for_place, $sLanguagePrefArraySQL) as langaddress ";
|
||||
$sSQL .= ", null as placename";
|
||||
$sSQL .= ", null as ref";
|
||||
if ($this->bIncludeExtraTags) $sSQL .= ", null as extra";
|
||||
if ($this->bIncludeNameDetails) $sSQL .= ", null as names";
|
||||
$sSQL .= ", avg(st_x(centroid)) as lon, avg(st_y(centroid)) as lat,";
|
||||
$sSQL .= $sImportanceSQL."-1.15 as importance ";
|
||||
$sSQL .= ", (select max(p.importance*(p.rank_address+2)) from place_addressline s, placex p where s.place_id = min(blub.parent_place_id) and p.place_id = s.address_place_id and s.isaddress and p.importance is not null) as addressimportance ";
|
||||
$sSQL .= ", null as extra_place ";
|
||||
$sSQL .= " from (select place_id";
|
||||
//interpolate the Tiger housenumbers here
|
||||
$sSQL .= ", ST_LineInterpolatePoint(linegeo, (housenumber_for_place-startnumber::float)/(endnumber-startnumber)::float) as centroid, parent_place_id, housenumber_for_place ";
|
||||
$sSQL .= "from (location_property_tiger ";
|
||||
$sSQL .= " join (values ".$sHousenumbers.") as housenumbers(place_id, housenumber_for_place) using(place_id)) ";
|
||||
$sSQL .= " where housenumber_for_place>=0 and 30 between $this->iMinAddressRank and $this->iMaxAddressRank) as blub"; //postgres wants an alias here
|
||||
$sSQL .= " group by place_id, housenumber_for_place"; //is this group by really needed?, place_id + housenumber (in combination) are unique
|
||||
if (!$this->bDeDupe) $sSQL .= ", place_id ";
|
||||
}
|
||||
// osmline, osm_type is 'I' for Interpolation Line
|
||||
$sSQL .= " union ";
|
||||
@@ -469,46 +490,25 @@
|
||||
$sSQL .= " group by place_id, housenumber_for_place, calculated_country_code "; //is this group by really needed?, place_id + housenumber (in combination) are unique
|
||||
if (!$this->bDeDupe) $sSQL .= ", place_id ";
|
||||
|
||||
// tiger
|
||||
$sSQL .= " union ";
|
||||
$sSQL .= "select 'T' as osm_type, place_id as osm_id, 'place' as class, 'house' as type, null as admin_level, 30 as rank_search, 30 as rank_address, min(place_id) as place_id, min(parent_place_id) as parent_place_id, 'us' as country_code";
|
||||
$sSQL .= ", get_address_by_language(place_id, housenumber_for_place, $sLanguagePrefArraySQL) as langaddress ";
|
||||
$sSQL .= ", null as placename";
|
||||
$sSQL .= ", null as ref";
|
||||
if ($this->bIncludeExtraTags) $sSQL .= ", null as extra";
|
||||
if ($this->bIncludeNameDetails) $sSQL .= ", null as names";
|
||||
$sSQL .= ", avg(st_x(centroid)) as lon, avg(st_y(centroid)) as lat,";
|
||||
$sSQL .= $sImportanceSQL."-1.15 as importance ";
|
||||
$sSQL .= ", (select max(p.importance*(p.rank_address+2)) from place_addressline s, placex p where s.place_id = min(blub.parent_place_id) and p.place_id = s.address_place_id and s.isaddress and p.importance is not null) as addressimportance ";
|
||||
$sSQL .= ", null as extra_place ";
|
||||
$sSQL .= " from (select place_id";
|
||||
//interpolate the Tiger housenumbers here
|
||||
$sSQL .= ", CASE WHEN startnumber != endnumber THEN ST_LineInterpolatePoint(linegeo, (housenumber_for_place-startnumber::float)/(endnumber-startnumber)::float) ";
|
||||
$sSQL .= " ELSE ST_LineInterpolatePoint(linegeo, 0.5) END as centroid";
|
||||
$sSQL .= ", parent_place_id, housenumber_for_place ";
|
||||
$sSQL .= " from (location_property_tiger ";
|
||||
$sSQL .= " join (values ".$sHousenumbers.") as housenumbers(place_id, housenumber_for_place) using(place_id)) ";
|
||||
$sSQL .= " where housenumber_for_place>=0 and 30 between $this->iMinAddressRank and $this->iMaxAddressRank) as blub"; //postgres wants an alias here
|
||||
$sSQL .= " group by place_id, housenumber_for_place"; //is this group by really needed?, place_id + housenumber (in combination) are unique
|
||||
if (!$this->bDeDupe) $sSQL .= ", place_id ";
|
||||
|
||||
// aux
|
||||
$sSQL .= " union ";
|
||||
$sSQL .= "select 'L' as osm_type, place_id as osm_id, 'place' as class, 'house' as type, null as admin_level, 0 as rank_search, 0 as rank_address, min(place_id) as place_id, min(parent_place_id) as parent_place_id, 'us' as country_code, ";
|
||||
$sSQL .= "get_address_by_language(place_id, -1, $sLanguagePrefArraySQL) as langaddress, ";
|
||||
$sSQL .= "null as placename, ";
|
||||
$sSQL .= "null as ref, ";
|
||||
if ($this->bIncludeExtraTags) $sSQL .= "null as extra, ";
|
||||
if ($this->bIncludeNameDetails) $sSQL .= "null as names, ";
|
||||
$sSQL .= "avg(ST_X(centroid)) as lon, avg(ST_Y(centroid)) as lat, ";
|
||||
$sSQL .= $sImportanceSQL."-1.10 as importance, ";
|
||||
$sSQL .= "(select max(p.importance*(p.rank_address+2)) from place_addressline s, placex p where s.place_id = min(location_property_aux.parent_place_id) and p.place_id = s.address_place_id and s.isaddress and p.importance is not null) as addressimportance, ";
|
||||
$sSQL .= "null as extra_place ";
|
||||
$sSQL .= "from location_property_aux where place_id in ($sPlaceIDs) ";
|
||||
$sSQL .= "and 30 between $this->iMinAddressRank and $this->iMaxAddressRank ";
|
||||
$sSQL .= "group by place_id";
|
||||
if (!$this->bDeDupe) $sSQL .= ", place_id";
|
||||
$sSQL .= ", get_address_by_language(place_id, -1, $sLanguagePrefArraySQL) ";
|
||||
if (CONST_Use_Aux_Location_data)
|
||||
{
|
||||
$sSQL .= " union ";
|
||||
$sSQL .= "select 'L' as osm_type, place_id as osm_id, 'place' as class, 'house' as type, null as admin_level, 0 as rank_search, 0 as rank_address, min(place_id) as place_id, min(parent_place_id) as parent_place_id, 'us' as country_code, ";
|
||||
$sSQL .= "get_address_by_language(place_id, -1, $sLanguagePrefArraySQL) as langaddress, ";
|
||||
$sSQL .= "null as placename, ";
|
||||
$sSQL .= "null as ref, ";
|
||||
if ($this->bIncludeExtraTags) $sSQL .= "null as extra, ";
|
||||
if ($this->bIncludeNameDetails) $sSQL .= "null as names, ";
|
||||
$sSQL .= "avg(ST_X(centroid)) as lon, avg(ST_Y(centroid)) as lat, ";
|
||||
$sSQL .= $sImportanceSQL."-1.10 as importance, ";
|
||||
$sSQL .= "(select max(p.importance*(p.rank_address+2)) from place_addressline s, placex p where s.place_id = min(location_property_aux.parent_place_id) and p.place_id = s.address_place_id and s.isaddress and p.importance is not null) as addressimportance, ";
|
||||
$sSQL .= "null as extra_place ";
|
||||
$sSQL .= "from location_property_aux where place_id in ($sPlaceIDs) ";
|
||||
$sSQL .= "and 30 between $this->iMinAddressRank and $this->iMaxAddressRank ";
|
||||
$sSQL .= "group by place_id";
|
||||
if (!$this->bDeDupe) $sSQL .= ", place_id";
|
||||
$sSQL .= ", get_address_by_language(place_id, -1, $sLanguagePrefArraySQL) ";
|
||||
}
|
||||
}
|
||||
|
||||
$sSQL .= " order by importance desc";
|
||||
@@ -1487,7 +1487,7 @@
|
||||
}
|
||||
|
||||
// If nothing found try the aux fallback table
|
||||
if (!sizeof($aPlaceIDs))
|
||||
if (CONST_Use_Aux_Location_data && !sizeof($aPlaceIDs))
|
||||
{
|
||||
$sSQL = "select place_id from location_property_aux where parent_place_id in (".$sPlaceIDs.") and housenumber = '".pg_escape_string($aSearch['sHouseNumber'])."'";
|
||||
if (sizeof($this->aExcludePlaceIDs))
|
||||
@@ -1498,8 +1498,10 @@
|
||||
if (CONST_Debug) var_dump($sSQL);
|
||||
$aPlaceIDs = $this->oDB->getCol($sSQL);
|
||||
}
|
||||
//if nothing found search in Tiger data for this housenumber(location_property_tiger)
|
||||
if (!sizeof($aPlaceIDs))
|
||||
|
||||
//if nothing was found in placex or location_property_aux, then search in Tiger data for this housenumber(location_property_tiger)
|
||||
$searchedHousenumber = intval($aSearch['sHouseNumber']);
|
||||
if (CONST_Use_US_Tiger_Data && !sizeof($aPlaceIDs))
|
||||
{
|
||||
//new query for lines, not housenumbers anymore
|
||||
if($searchedHousenumber%2 == 0){
|
||||
@@ -1672,11 +1674,13 @@
|
||||
$sSQL .= "and (placex.rank_address between $this->iMinAddressRank and $this->iMaxAddressRank ";
|
||||
if (14 >= $this->iMinAddressRank && 14 <= $this->iMaxAddressRank) $sSQL .= " OR (extratags->'place') = 'city'";
|
||||
if ($this->aAddressRankList) $sSQL .= " OR placex.rank_address in (".join(',',$this->aAddressRankList).")";
|
||||
$sSQL .= ") UNION select place_id from location_property_tiger where place_id in (".join(',',array_keys($aResultPlaceIDs)).") ";
|
||||
$sSQL .= "and (30 between $this->iMinAddressRank and $this->iMaxAddressRank ";
|
||||
if ($this->aAddressRankList) $sSQL .= " OR 30 in (".join(',',$this->aAddressRankList).")";
|
||||
$sSQL .= ") UNION select place_id from location_property_osmline where place_id in (".join(',',array_keys($aResultPlaceIDs)).") ";
|
||||
$sSQL .= "and (30 between $this->iMinAddressRank and $this->iMaxAddressRank)";
|
||||
if (CONST_Use_US_Tiger_Data)
|
||||
{
|
||||
$sSQL .= ") UNION select place_id from location_property_tiger where place_id in (".join(',',array_keys($aResultPlaceIDs)).") ";
|
||||
$sSQL .= "and (30 between $this->iMinAddressRank and $this->iMaxAddressRank ";
|
||||
if ($this->aAddressRankList) $sSQL .= " OR 30 in (".join(',',$this->aAddressRankList).")";
|
||||
}
|
||||
$sSQL .= ")";
|
||||
if (CONST_Debug) var_dump($sSQL);
|
||||
$aFilteredPlaceIDs = $this->oDB->getCol($sSQL);
|
||||
$tempIDs = array();
|
||||
|
||||
@@ -42,18 +42,12 @@
|
||||
|
||||
function setIncludeExtraTags($bExtraTags = false)
|
||||
{
|
||||
if ((float) CONST_Postgresql_Version > 9.2)
|
||||
{
|
||||
$this->bExtraTags = $bExtraTags;
|
||||
}
|
||||
$this->bExtraTags = $bExtraTags;
|
||||
}
|
||||
|
||||
function setIncludeNameDetails($bNameDetails = false)
|
||||
{
|
||||
if ((float) CONST_Postgresql_Version > 9.2)
|
||||
{
|
||||
$this->bNameDetails = $bNameDetails;
|
||||
}
|
||||
$this->bNameDetails = $bNameDetails;
|
||||
}
|
||||
|
||||
|
||||
@@ -128,7 +122,7 @@
|
||||
|
||||
$sLanguagePrefArraySQL = "ARRAY[".join(',',array_map("getDBQuoted", $this->aLangPrefOrder))."]";
|
||||
|
||||
if ($this->sType == 'tiger')
|
||||
if (CONST_Use_US_Tiger_Data && $this->sType == 'tiger')
|
||||
{
|
||||
$sSQL = "select place_id,partition, 'T' as osm_type, place_id as osm_id, 'place' as class, 'house' as type, null as admin_level, housenumber, null as street, null as isin, postcode,";
|
||||
$sSQL .= " 'us' as country_code, parent_place_id, null as linked_place_id, 30 as rank_address, 30 as rank_search,";
|
||||
@@ -193,7 +187,7 @@
|
||||
|
||||
if ($this->bAddressDetails)
|
||||
{
|
||||
if ($this->sType == 'tiger' || $this->sType == 'interpolation') // to get addressdetails for interpolation lines and tiger data, the housenumber is needed
|
||||
if(CONST_Use_US_Tiger_Data && $this->sType == 'tiger') // to get addressdetails for tiger data, the housenumber is needed
|
||||
$aAddress = $this->getAddressNames($aPlace['housenumber']);
|
||||
else
|
||||
$aAddress = $this->getAddressNames();
|
||||
|
||||
@@ -259,7 +259,7 @@
|
||||
}
|
||||
|
||||
// Only street found? If it's in the US we can check TIGER data for nearest housenumber
|
||||
if ($bIsInUnitedStates && $iMaxRank_orig >= 28 && $iPlaceID && ($aPlace['rank_search'] == 26 || $aPlace['rank_search'] == 27 ))
|
||||
if (CONST_Use_US_Tiger_Data && $bIsInUnitedStates && $iMaxRank_orig >= 28 && $iPlaceID && ($aPlace['rank_search'] == 26 || $aPlace['rank_search'] == 27 ))
|
||||
{
|
||||
$fSearchDiam = 0.001;
|
||||
$sSQL = 'SELECT place_id,parent_place_id,30 as rank_search, ST_line_locate_point(linegeo,'.$sPointSQL.') as fraction';
|
||||
|
||||
13
lib/db.php
13
lib/db.php
@@ -24,3 +24,16 @@
|
||||
return "'".pg_escape_string($s)."'";
|
||||
}
|
||||
|
||||
function getPostgresVersion(&$oDB)
|
||||
{
|
||||
$sVersionString = $oDB->getOne('select version()');
|
||||
preg_match('#PostgreSQL ([0-9]+)[.]([0-9]+)[^0-9]#', $sVersionString, $aMatches);
|
||||
return (float) ($aMatches[1].'.'.$aMatches[2]);
|
||||
}
|
||||
|
||||
function getPostgisVersion(&$oDB)
|
||||
{
|
||||
$sVersionString = $oDB->getOne('select postgis_full_version()');
|
||||
preg_match('#POSTGIS="([0-9]+)[.]([0-9]+)[.]([0-9]+)( r([0-9]+))?"#', $sVersionString, $aMatches);
|
||||
return (float) ($aMatches[1].'.'.$aMatches[2]);
|
||||
}
|
||||
|
||||
@@ -12,12 +12,6 @@
|
||||
}
|
||||
if ($_SERVER['REQUEST_METHOD'] == 'OPTIONS') exit;
|
||||
|
||||
if (CONST_ClosedForIndexing && strpos(CONST_ClosedForIndexingExceptionIPs, ','.$_SERVER["REMOTE_ADDR"].',') === false)
|
||||
{
|
||||
echo "Closed for re-indexing...";
|
||||
exit;
|
||||
}
|
||||
|
||||
$aBucketKeys = array();
|
||||
|
||||
if (isset($_SERVER["HTTP_REFERER"])) $aBucketKeys[] = str_replace('www.','',strtolower(parse_url($_SERVER["HTTP_REFERER"], PHP_URL_HOST)));
|
||||
|
||||
112
lib/lib.php
112
lib/lib.php
@@ -55,17 +55,6 @@
|
||||
}
|
||||
|
||||
|
||||
function getBlockingProcesses()
|
||||
{
|
||||
$sStats = file_get_contents('/proc/stat');
|
||||
if (preg_match('/procs_blocked ([0-9]+)/i', $sStats, $aMatches))
|
||||
{
|
||||
return (int)$aMatches[1];
|
||||
}
|
||||
return 0;
|
||||
}
|
||||
|
||||
|
||||
function getLoadAverage()
|
||||
{
|
||||
$sLoadAverage = file_get_contents('/proc/loadavg');
|
||||
@@ -110,16 +99,7 @@
|
||||
{
|
||||
if ($a['importance'] != $b['importance'])
|
||||
return ($a['importance'] > $b['importance']?-1:1);
|
||||
/*
|
||||
if ($a['aPointPolygon']['numfeatures'] != $b['aPointPolygon']['numfeatures'])
|
||||
return ($a['aPointPolygon']['numfeatures'] > $b['aPointPolygon']['numfeatures']?-1:1);
|
||||
if ($a['aPointPolygon']['area'] != $b['aPointPolygon']['area'])
|
||||
return ($a['aPointPolygon']['area'] > $b['aPointPolygon']['area']?-1:1);
|
||||
// if ($a['levenshtein'] != $b['levenshtein'])
|
||||
// return ($a['levenshtein'] < $b['levenshtein']?-1:1);
|
||||
if ($a['rank_search'] != $b['rank_search'])
|
||||
return ($a['rank_search'] < $b['rank_search']?-1:1);
|
||||
*/
|
||||
|
||||
return ($a['foundorder'] < $b['foundorder']?-1:1);
|
||||
}
|
||||
|
||||
@@ -229,7 +209,6 @@
|
||||
{
|
||||
$aTokens[' '.$sWord] = ' '.$sWord;
|
||||
$aTokens[$sWord] = $sWord;
|
||||
//if (!strpos($sWord,' ')) $aTokens[$sWord] = $sWord;
|
||||
}
|
||||
}
|
||||
return $aTokens;
|
||||
@@ -240,34 +219,6 @@
|
||||
GB Postcode functions
|
||||
*/
|
||||
|
||||
function gbPostcodeAlphaDifference($s1, $s2)
|
||||
{
|
||||
$aValues = array(
|
||||
'A'=>0,
|
||||
'B'=>1,
|
||||
'D'=>2,
|
||||
'E'=>3,
|
||||
'F'=>4,
|
||||
'G'=>5,
|
||||
'H'=>6,
|
||||
'J'=>7,
|
||||
'L'=>8,
|
||||
'N'=>9,
|
||||
'O'=>10,
|
||||
'P'=>11,
|
||||
'Q'=>12,
|
||||
'R'=>13,
|
||||
'S'=>14,
|
||||
'T'=>15,
|
||||
'U'=>16,
|
||||
'W'=>17,
|
||||
'X'=>18,
|
||||
'Y'=>19,
|
||||
'Z'=>20);
|
||||
return abs(($aValues[$s1[0]]*21+$aValues[$s1[1]]) - ($aValues[$s2[0]]*21+$aValues[$s2[1]]));
|
||||
}
|
||||
|
||||
|
||||
function gbPostcodeCalculate($sPostcode, $sPostcodeSector, $sPostcodeEnd, &$oDB)
|
||||
{
|
||||
// Try an exact match on the gb_postcode table
|
||||
@@ -294,66 +245,6 @@
|
||||
}
|
||||
|
||||
|
||||
function usPostcodeCalculate($sPostcode, &$oDB)
|
||||
{
|
||||
$iZipcode = (int)$sPostcode;
|
||||
|
||||
// Try an exact match on the us_zippostcode table
|
||||
$sSQL = 'select zipcode, ST_X(ST_Centroid(geometry)) as lon,ST_Y(ST_Centroid(geometry)) as lat from us_zipcode where zipcode = '.$iZipcode;
|
||||
$aNearPostcodes = $oDB->getAll($sSQL);
|
||||
if (PEAR::IsError($aNearPostcodes))
|
||||
{
|
||||
var_dump($sSQL, $aNearPostcodes);
|
||||
exit;
|
||||
}
|
||||
|
||||
if (!sizeof($aNearPostcodes))
|
||||
{
|
||||
$sSQL = 'select zipcode,ST_X(ST_Centroid(geometry)) as lon,ST_Y(ST_Centroid(geometry)) as lat from us_zipcode where zipcode between '.($iZipcode-100).' and '.($iZipcode+100).' order by abs(zipcode - '.$iZipcode.') asc limit 5';
|
||||
$aNearPostcodes = $oDB->getAll($sSQL);
|
||||
if (PEAR::IsError($aNearPostcodes))
|
||||
{
|
||||
var_dump($sSQL, $aNearPostcodes);
|
||||
exit;
|
||||
}
|
||||
}
|
||||
|
||||
if (!sizeof($aNearPostcodes))
|
||||
{
|
||||
return false;
|
||||
}
|
||||
|
||||
$fTotalLat = 0;
|
||||
$fTotalLon = 0;
|
||||
$fTotalFac = 0;
|
||||
foreach($aNearPostcodes as $aPostcode)
|
||||
{
|
||||
$iDiff = abs($aPostcode['zipcode'] - $iZipcode) + 1;
|
||||
if ($iDiff == 0)
|
||||
$fFac = 1;
|
||||
else
|
||||
$fFac = 1/($iDiff*$iDiff);
|
||||
|
||||
$fTotalFac += $fFac;
|
||||
$fTotalLat += $aPostcode['lat'] * $fFac;
|
||||
$fTotalLon += $aPostcode['lon'] * $fFac;
|
||||
}
|
||||
if ($fTotalFac)
|
||||
{
|
||||
$fLat = $fTotalLat / $fTotalFac;
|
||||
$fLon = $fTotalLon / $fTotalFac;
|
||||
return array(array('lat' => $fLat, 'lon' => $fLon, 'radius' => 0.2));
|
||||
}
|
||||
return false;
|
||||
|
||||
/*
|
||||
$fTotalFac is a surprisingly good indicator of accuracy
|
||||
$iZoom = 18 + round(log($fTotalFac,32));
|
||||
$iZoom = max(13,min(18,$iZoom));
|
||||
*/
|
||||
}
|
||||
|
||||
|
||||
function getClassTypes()
|
||||
{
|
||||
return array(
|
||||
@@ -701,7 +592,6 @@
|
||||
|
||||
function javascript_renderData($xVal, $iOptions = 0)
|
||||
{
|
||||
header("Access-Control-Allow-Origin: *");
|
||||
if (defined('PHP_VERSION_ID') && PHP_VERSION_ID > 50400)
|
||||
$iOptions |= JSON_UNESCAPED_UNICODE;
|
||||
$jsonout = json_encode($xVal, $iOptions);
|
||||
|
||||
82
lib/log.php
82
lib/log.php
@@ -2,68 +2,59 @@
|
||||
|
||||
function logStart(&$oDB, $sType = '', $sQuery = '', $aLanguageList = array())
|
||||
{
|
||||
$aStartTime = explode('.',microtime(true));
|
||||
$fStartTime = microtime(true);
|
||||
$aStartTime = explode('.', $fStartTime);
|
||||
if (!isset($aStartTime[1])) $aStartTime[1] = '0';
|
||||
|
||||
$sOutputFormat = '';
|
||||
if (isset($_GET['format'])) $sOutputFormat = $_GET['format'];
|
||||
|
||||
if ($sType == 'reverse')
|
||||
{
|
||||
$sOutQuery = (isset($_GET['lat'])?$_GET['lat']:'').'/';
|
||||
if (isset($_GET['lon'])) $sOutQuery .= $_GET['lon'];
|
||||
if (isset($_GET['zoom'])) $sOutQuery .= '/'.$_GET['zoom'];
|
||||
}
|
||||
else
|
||||
$sOutQuery = $sQuery;
|
||||
|
||||
$hLog = array(
|
||||
date('Y-m-d H:i:s',$aStartTime[0]).'.'.$aStartTime[1],
|
||||
$_SERVER["REMOTE_ADDR"],
|
||||
$_SERVER['QUERY_STRING'],
|
||||
$sQuery
|
||||
$sOutQuery,
|
||||
$sType,
|
||||
$fStartTime
|
||||
);
|
||||
|
||||
if (CONST_Log_DB)
|
||||
{
|
||||
// Log
|
||||
if ($sType == 'search')
|
||||
{
|
||||
$oDB->query('insert into query_log values ('.getDBQuoted($hLog[0]).','.getDBQuoted($hLog[3]).','.getDBQuoted($hLog[1]).')');
|
||||
}
|
||||
|
||||
$sSQL = 'insert into new_query_log (type,starttime,query,ipaddress,useragent,language,format)';
|
||||
if (isset($_GET['email']))
|
||||
$sUserAgent = $_GET['email'];
|
||||
elseif (isset($_SERVER['HTTP_REFERER']))
|
||||
$sUserAgent = $_SERVER['HTTP_REFERER'];
|
||||
elseif (isset($_SERVER['HTTP_USER_AGENT']))
|
||||
$sUserAgent = $_SERVER['HTTP_USER_AGENT'];
|
||||
else
|
||||
$sUserAgent = '';
|
||||
$sSQL = 'insert into new_query_log (type,starttime,query,ipaddress,useragent,language,format,searchterm)';
|
||||
$sSQL .= ' values ('.getDBQuoted($sType).','.getDBQuoted($hLog[0]).','.getDBQuoted($hLog[2]);
|
||||
$sSQL .= ','.getDBQuoted($hLog[1]).','.getDBQuoted($_SERVER['HTTP_USER_AGENT']).','.getDBQuoted(join(',',$aLanguageList)).','.getDBQuoted($sOutputFormat).')';
|
||||
$sSQL .= ','.getDBQuoted($hLog[1]).','.getDBQuoted($sUserAgent).','.getDBQuoted(join(',',$aLanguageList)).','.getDBQuoted($sOutputFormat).','.getDBQuoted($hLog[3]).')';
|
||||
$oDB->query($sSQL);
|
||||
}
|
||||
|
||||
if (CONST_Log_File && CONST_Log_File_ReverseLog != '')
|
||||
{
|
||||
if ($sType == 'reverse')
|
||||
{
|
||||
$aStartTime = explode('.',$hLog[0]);
|
||||
file_put_contents(CONST_Log_File_ReverseLog,
|
||||
$aStartTime[0].','.$aStartTime[1].','.
|
||||
php_uname('n').','.
|
||||
'"'.addslashes(isset($_SERVER['HTTP_REFERER'])?$_SERVER['HTTP_REFERER']:'').'",'.
|
||||
'"'.addslashes($hLog[1]).'",'.
|
||||
$_GET['lat'].','.
|
||||
$_GET['lon'].','.
|
||||
$_GET['zoom'].','.
|
||||
'"'.addslashes($_SERVER['HTTP_USER_AGENT']).'",'.
|
||||
'"'.addslashes($sOutputFormat).'"'."\n",
|
||||
FILE_APPEND);
|
||||
}
|
||||
}
|
||||
|
||||
return $hLog;
|
||||
}
|
||||
|
||||
function logEnd(&$oDB, $hLog, $iNumResults)
|
||||
{
|
||||
$aEndTime = explode('.',microtime(true));
|
||||
if (!$aEndTime[1]) $aEndTime[1] = '0';
|
||||
$sEndTime = date('Y-m-d H:i:s',$aEndTime[0]).'.'.$aEndTime[1];
|
||||
$fEndTime = microtime(true);
|
||||
|
||||
if (CONST_Log_DB)
|
||||
{
|
||||
$sSQL = 'update query_log set endtime = '.getDBQuoted($sEndTime).', results = '.$iNumResults;
|
||||
$sSQL .= ' where starttime = '.getDBQuoted($hLog[0]);
|
||||
$sSQL .= ' and ipaddress = '.getDBQuoted($hLog[1]);
|
||||
$sSQL .= ' and query = '.getDBQuoted($hLog[3]);
|
||||
$oDB->query($sSQL);
|
||||
$aEndTime = explode('.', $fEndTime);
|
||||
if (!$aEndTime[1]) $aEndTime[1] = '0';
|
||||
$sEndTime = date('Y-m-d H:i:s',$aEndTime[0]).'.'.$aEndTime[1];
|
||||
|
||||
$sSQL = 'update new_query_log set endtime = '.getDBQuoted($sEndTime).', results = '.$iNumResults;
|
||||
$sSQL .= ' where starttime = '.getDBQuoted($hLog[0]);
|
||||
@@ -72,19 +63,12 @@
|
||||
$oDB->query($sSQL);
|
||||
}
|
||||
|
||||
if (CONST_Log_File && CONST_Log_File_SearchLog != '')
|
||||
if (CONST_Log_File)
|
||||
{
|
||||
$aStartTime = explode('.',$hLog[0]);
|
||||
file_put_contents(CONST_Log_File_SearchLog,
|
||||
$aStartTime[0].','.$aStartTime[1].','.
|
||||
php_uname('n').','.
|
||||
'"'.addslashes(isset($_SERVER['HTTP_REFERER'])?$_SERVER['HTTP_REFERER']:'').'",'.
|
||||
'"'.addslashes($hLog[1]).'",'.
|
||||
'"'.addslashes($hLog[3]).'",'.
|
||||
'"'.addslashes($_SERVER['HTTP_USER_AGENT']).'",'.
|
||||
'"'.addslashes((isset($_GET['format']))?$_GET['format']:'').'",'.
|
||||
$iNumResults."\n",
|
||||
FILE_APPEND);
|
||||
$aOutdata = sprintf("[%s] %.4f %d %s \"%s\"\n",
|
||||
$hLog[0], $fEndTime-$hLog[5], $iNumResults,
|
||||
$hLog[4], $hLog[2]);
|
||||
file_put_contents(CONST_Log_File, $aOutdata, FILE_APPEND | LOCK_EX);
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
@@ -1,165 +0,0 @@
|
||||
amenity airport
|
||||
amenity arts_centre
|
||||
amenity atm
|
||||
amenity auditorium
|
||||
amenity bank
|
||||
amenity bar
|
||||
amenity bench
|
||||
amenity bicycle_parking
|
||||
amenity bicycle_rental
|
||||
amenity brothel
|
||||
amenity bureau_de_change
|
||||
amenity bus_station
|
||||
amenity cafe
|
||||
amenity car_rental
|
||||
amenity car_wash
|
||||
amenity casino
|
||||
amenity cinema
|
||||
amenity clinic
|
||||
amenity club
|
||||
amenity college
|
||||
amenity community_centre
|
||||
amenity courthouse
|
||||
amenity crematorium
|
||||
amenity dentist
|
||||
amenity doctors
|
||||
amenity dormitory
|
||||
amenity drinking_water
|
||||
amenity driving_school
|
||||
amenity embassy
|
||||
amenity emergency_phone
|
||||
amenity fast_food
|
||||
amenity ferry_terminal
|
||||
amenity fire_hydrant
|
||||
amenity fire_station
|
||||
amenity fountain
|
||||
amenity fuel
|
||||
amenity grave_yard
|
||||
amenity hall
|
||||
amenity health_centre
|
||||
amenity hospital
|
||||
amenity hotel
|
||||
amenity hunting_stand
|
||||
amenity ice_cream
|
||||
amenity kindergarten
|
||||
amenity library
|
||||
amenity market
|
||||
amenity marketplace
|
||||
amenity nightclub
|
||||
amenity nursery
|
||||
amenity nursing_home
|
||||
amenity office
|
||||
amenity park
|
||||
amenity parking
|
||||
amenity pharmacy
|
||||
amenity place_of_worship
|
||||
amenity police
|
||||
amenity post_box
|
||||
amenity post_office
|
||||
amenity preschool
|
||||
amenity prison
|
||||
amenity pub
|
||||
amenity public_building
|
||||
amenity public_market
|
||||
amenity reception_area
|
||||
amenity restaurant
|
||||
amenity retirement_home
|
||||
amenity sauna
|
||||
amenity school
|
||||
amenity shelter
|
||||
amenity shop
|
||||
amenity shopping
|
||||
amenity social_club
|
||||
amenity studio
|
||||
amenity supermarket
|
||||
amenity taxi
|
||||
amenity telephone
|
||||
amenity theatre
|
||||
amenity toilets
|
||||
amenity townhall
|
||||
amenity university
|
||||
amenity veterinary
|
||||
amenity waste_basket
|
||||
amenity wifi
|
||||
amenity youth_centre
|
||||
boundary administrative
|
||||
building apartments
|
||||
building block
|
||||
building bunker
|
||||
building chapel
|
||||
building church
|
||||
building commercial
|
||||
building dormitory
|
||||
building entrance
|
||||
building faculty
|
||||
building farm
|
||||
building flats
|
||||
building garage
|
||||
building hospital
|
||||
building hotel
|
||||
building house
|
||||
building industrial
|
||||
building office
|
||||
building public
|
||||
building residential
|
||||
building retail
|
||||
building school
|
||||
building shop
|
||||
building stadium
|
||||
building store
|
||||
building terrace
|
||||
building tower
|
||||
building train_station
|
||||
building university
|
||||
highway bridleway
|
||||
highway bus_stop
|
||||
highway construction
|
||||
highway cycleway
|
||||
highway distance_marker
|
||||
highway emergency_access_point
|
||||
highway footway
|
||||
highway gate
|
||||
highway motorway_junction
|
||||
highway path
|
||||
highway pedestrian
|
||||
highway platform
|
||||
highway primary
|
||||
highway primary_link
|
||||
highway raceway
|
||||
highway road
|
||||
highway secondary
|
||||
highway secondary_link
|
||||
highway services
|
||||
highway steps
|
||||
highway tertiary
|
||||
highway track
|
||||
highway trail
|
||||
highway trunk
|
||||
highway trunk_link
|
||||
highway unsurfaced
|
||||
historic archaeological_site
|
||||
historic battlefield
|
||||
historic building
|
||||
historic castle
|
||||
historic church
|
||||
historic house
|
||||
historic icon
|
||||
historic manor
|
||||
historic memorial
|
||||
historic mine
|
||||
historic monument
|
||||
historic museum
|
||||
historic ruins
|
||||
historic tower
|
||||
historic wayside_cross
|
||||
historic wayside_shrine
|
||||
historic wreck
|
||||
landuse cemetery
|
||||
landuse commercial
|
||||
landuse construction
|
||||
landuse farm
|
||||
landuse farmland
|
||||
landuse farmyard
|
||||
landuse forest
|
||||
landuse grass
|
||||
landuse industrial
|
||||
@@ -11,6 +11,18 @@
|
||||
@define('CONST_Database_Web_User', 'www-data');
|
||||
@define('CONST_Max_Word_Frequency', '50000');
|
||||
@define('CONST_Limit_Reindexing', true);
|
||||
// Set to false to avoid importing extra postcodes for the US.
|
||||
@define('CONST_Use_Extra_US_Postcodes', true);
|
||||
// Set to true after importing Tiger house number data for the US.
|
||||
// Note: The tables must already exist or queries will throw errors.
|
||||
// After changing this setting run ./utils/setup --create-functions
|
||||
// again.
|
||||
@define('CONST_Use_US_Tiger_Data', false);
|
||||
// Set to true after importing other external house number data.
|
||||
// Note: the aux tables must already exist or queries will throw errors.
|
||||
// After changing this setting run ./utils/setup --create-functions
|
||||
// again.
|
||||
@define('CONST_Use_Aux_Location_data', false);
|
||||
|
||||
// Proxy settings
|
||||
@define('CONST_HTTP_Proxy', false);
|
||||
@@ -19,13 +31,7 @@
|
||||
@define('CONST_HTTP_Proxy_Login', '');
|
||||
@define('CONST_HTTP_Proxy_Password', '');
|
||||
|
||||
// Software versions
|
||||
@define('CONST_Postgresql_Version', '9.3'); // values: 9.0, ... , 9.4
|
||||
@define('CONST_Postgis_Version', '2.1'); // values: 1.5, 2.0, 2.1
|
||||
|
||||
// Paths
|
||||
@define('CONST_Path_Postgresql_Contrib', '/usr/share/postgresql/'.CONST_Postgresql_Version.'/contrib');
|
||||
@define('CONST_Path_Postgresql_Postgis', CONST_Path_Postgresql_Contrib.'/postgis-'.CONST_Postgis_Version);
|
||||
@define('CONST_Osm2pgsql_Binary', CONST_InstallPath.'/osm2pgsql/osm2pgsql');
|
||||
@define('CONST_Osmosis_Binary', '/usr/bin/osmosis');
|
||||
@define('CONST_Tiger_Data_Path', CONST_BasePath.'/data/tiger');
|
||||
@@ -81,23 +87,22 @@
|
||||
|
||||
// Website settings
|
||||
@define('CONST_NoAccessControl', true);
|
||||
@define('CONST_ClosedForIndexing', false);
|
||||
@define('CONST_ClosedForIndexingExceptionIPs', '');
|
||||
@define('CONST_BlockedIPs', '');
|
||||
@define('CONST_BulkUserIPs', '');
|
||||
@define('CONST_BlockMessage', ''); // additional info to show for blocked IPs
|
||||
|
||||
@define('CONST_Website_BaseURL', 'http://'.php_uname('n').'/');
|
||||
@define('CONST_Tile_Default', 'Mapnik');
|
||||
|
||||
// Language to assume when none is supplied with the query.
|
||||
// When set to false, the local language (i.e. the name tag without suffix)
|
||||
// will be used.
|
||||
@define('CONST_Default_Language', false);
|
||||
// Appearance of the map in the debug interface.
|
||||
@define('CONST_Default_Lat', 20.0);
|
||||
@define('CONST_Default_Lon', 0.0);
|
||||
@define('CONST_Default_Zoom', 2);
|
||||
@define('CONST_Map_Tile_URL', 'http://{s}.tile.osm.org/{z}/{x}/{y}.png');
|
||||
@define('CONST_Map_Tile_Attribution', ''); // Set if tile source isn't osm.org
|
||||
|
||||
@define('CONST_Search_AreaPolygons_Enabled', true);
|
||||
@define('CONST_Search_AreaPolygons', true);
|
||||
|
||||
@define('CONST_Search_BatchMode', false);
|
||||
@@ -108,16 +113,19 @@
|
||||
// When set to false only selected languages alloow reverse search.
|
||||
@define('CONST_Search_ReversePlanForAll', true);
|
||||
|
||||
// Maximum number of OSM ids that may be queried at once
|
||||
// for the places endpoint.
|
||||
@define('CONST_Places_Max_ID_count', 50);
|
||||
|
||||
// Set to zero to disable polygon output
|
||||
// Number of different geometry formats that may be queried in parallel.
|
||||
// Set to zero to disable polygon output.
|
||||
@define('CONST_PolygonOutput_MaximumTypes', 1);
|
||||
|
||||
// Log settings
|
||||
@define('CONST_Log_DB', true);
|
||||
// Set to true to log into new_query_log table.
|
||||
// You should set up a cron job that regularly clears out this table.
|
||||
@define('CONST_Log_DB', false);
|
||||
// Set to a file name to enable logging to a file.
|
||||
@define('CONST_Log_File', false);
|
||||
@define('CONST_Log_File_Format', 'TODO'); // Currently hard coded
|
||||
@define('CONST_Log_File_SearchLog', '');
|
||||
@define('CONST_Log_File_ReverseLog', '');
|
||||
|
||||
|
||||
|
||||
6
sql/aux_tables.sql
Normal file
6
sql/aux_tables.sql
Normal file
@@ -0,0 +1,6 @@
|
||||
CREATE TABLE location_property_aux () INHERITS (location_property);
|
||||
CREATE INDEX idx_location_property_aux_place_id ON location_property_aux USING BTREE (place_id);
|
||||
CREATE INDEX idx_location_property_aux_parent_place_id ON location_property_aux USING BTREE (parent_place_id);
|
||||
CREATE INDEX idx_location_property_aux_housenumber_parent_place_id ON location_property_aux USING BTREE (parent_place_id, housenumber);
|
||||
GRANT SELECT ON location_property_aux TO "{www-user}";
|
||||
|
||||
@@ -1,3 +1,15 @@
|
||||
-- Splits the line at the given point and returns the two parts
|
||||
-- in a multilinestring.
|
||||
CREATE OR REPLACE FUNCTION split_line_on_node(line GEOMETRY, point GEOMETRY)
|
||||
RETURNS GEOMETRY
|
||||
AS $$
|
||||
BEGIN
|
||||
RETURN ST_Split(ST_Snap(line, point, 0.0005), point);
|
||||
END;
|
||||
$$
|
||||
LANGUAGE plpgsql;
|
||||
|
||||
|
||||
CREATE OR REPLACE FUNCTION geometry_sector(partition INTEGER, place geometry) RETURNS INTEGER
|
||||
AS $$
|
||||
DECLARE
|
||||
@@ -990,7 +1002,7 @@ BEGIN
|
||||
|
||||
--DEBUG: RAISE WARNING 'placex_insert:END: % % % %',NEW.osm_type,NEW.osm_id,NEW.class,NEW.type;
|
||||
|
||||
RETURN NEW; -- @DIFFUPDATES@ The following is not needed until doing diff updates, and slows the main index process down
|
||||
RETURN NEW; -- %DIFFUPDATES% The following is not needed until doing diff updates, and slows the main index process down
|
||||
|
||||
IF NEW.rank_address > 0 THEN
|
||||
IF (ST_GeometryType(NEW.geometry) in ('ST_Polygon','ST_MultiPolygon') AND ST_IsValid(NEW.geometry)) THEN
|
||||
@@ -1606,6 +1618,7 @@ BEGIN
|
||||
NEW.parent_place_id = 0;
|
||||
parent_place_id_rank = 0;
|
||||
|
||||
|
||||
-- convert isin to array of tokenids
|
||||
isin_tokens := '{}'::int[];
|
||||
IF NEW.isin IS NOT NULL THEN
|
||||
@@ -1645,6 +1658,7 @@ BEGIN
|
||||
END IF;
|
||||
END IF;
|
||||
|
||||
-- %NOTIGERDATA% IF 0 THEN
|
||||
-- for the USA we have an additional address table. Merge in zip codes from there too
|
||||
IF NEW.rank_search = 26 AND NEW.calculated_country_code = 'us' THEN
|
||||
FOR location IN SELECT distinct postcode from location_property_tiger where parent_place_id = NEW.place_id LOOP
|
||||
@@ -1657,6 +1671,7 @@ BEGIN
|
||||
nameaddress_vector := array_merge(nameaddress_vector, ARRAY[address_street_word_id]);
|
||||
END LOOP;
|
||||
END IF;
|
||||
-- %NOTIGERDATA% END IF;
|
||||
|
||||
-- RAISE WARNING 'ISIN: %', isin_tokens;
|
||||
|
||||
@@ -2315,7 +2330,6 @@ DECLARE
|
||||
countryname HSTORE;
|
||||
hadcountry BOOLEAN;
|
||||
BEGIN
|
||||
|
||||
-- first query osmline (interpolation lines)
|
||||
select parent_place_id, calculated_country_code, 30, postcode, null, 'place', 'house' from location_property_osmline
|
||||
WHERE place_id = in_place_id AND in_housenumber>=startnumber AND in_housenumber <= endnumber
|
||||
@@ -2323,8 +2337,9 @@ BEGIN
|
||||
IF for_place_id IS NOT NULL THEN
|
||||
searchhousenumber = in_housenumber::text;
|
||||
END IF;
|
||||
|
||||
|
||||
--then query tiger data
|
||||
-- %NOTIGERDATA% IF 0 THEN
|
||||
IF for_place_id IS NULL THEN
|
||||
select parent_place_id,'us', 30, postcode, null, 'place', 'house' from location_property_tiger
|
||||
WHERE place_id = in_place_id AND in_housenumber>=startnumber AND in_housenumber <= endnumber
|
||||
@@ -2333,13 +2348,16 @@ BEGIN
|
||||
searchhousenumber = in_housenumber::text;
|
||||
END IF;
|
||||
END IF;
|
||||
|
||||
-- %NOTIGERDATA% END IF;
|
||||
|
||||
-- %NOAUXDATA% IF 0 THEN
|
||||
IF for_place_id IS NULL THEN
|
||||
select parent_place_id,'us', housenumber, 30, postcode, null, 'place', 'house' from location_property_aux
|
||||
WHERE place_id = in_place_id
|
||||
INTO for_place_id,searchcountrycode, searchhousenumber, searchrankaddress, searchpostcode, searchhousename, searchclass, searchtype;
|
||||
END IF;
|
||||
|
||||
-- %NOAUXDATA% END IF;
|
||||
|
||||
IF for_place_id IS NULL THEN
|
||||
select parent_place_id, calculated_country_code, housenumber, rank_search, postcode, name, class, type from placex
|
||||
WHERE place_id = in_place_id and rank_address = 30
|
||||
|
||||
@@ -19,10 +19,6 @@ CREATE INDEX idx_location_area_country_place_id ON location_area_country USING B
|
||||
|
||||
CREATE INDEX idx_search_name_country_centroid ON search_name_country USING GIST (centroid) {ts:address-index};
|
||||
|
||||
-- start
|
||||
CREATE INDEX idx_location_property_-partition-_centroid ON location_property_-partition- USING GIST (centroid) {ts:address-index};
|
||||
-- end
|
||||
|
||||
DROP INDEX IF EXISTS place_id_idx;
|
||||
CREATE UNIQUE INDEX idx_place_osm_unique on place using btree(osm_id,osm_type,class,type) {ts:address-index};
|
||||
|
||||
|
||||
@@ -51,11 +51,6 @@ CREATE INDEX idx_search_name_-partition-_place_id ON search_name_-partition- USI
|
||||
CREATE INDEX idx_search_name_-partition-_centroid ON search_name_-partition- USING GIST (centroid) {ts:address-index};
|
||||
CREATE INDEX idx_search_name_-partition-_name_vector ON search_name_-partition- USING GIN (name_vector) WITH (fastupdate = off) {ts:address-index};
|
||||
|
||||
CREATE TABLE location_property_-partition- () INHERITS (location_property) {ts:aux-data};
|
||||
CREATE INDEX idx_location_property_-partition-_place_id ON location_property_-partition- USING BTREE (place_id) {ts:aux-index};
|
||||
CREATE INDEX idx_location_property_-partition-_parent_place_id ON location_property_-partition- USING BTREE (parent_place_id) {ts:aux-index};
|
||||
CREATE INDEX idx_location_property_-partition-_housenumber_parent_place_id ON location_property_-partition- USING BTREE (parent_place_id, housenumber) {ts:aux-index};
|
||||
|
||||
CREATE TABLE location_road_-partition- (
|
||||
partition integer,
|
||||
place_id BIGINT,
|
||||
|
||||
@@ -1,14 +0,0 @@
|
||||
-- Splits the line at the given point and returns the two parts
|
||||
-- in a multilinestring.
|
||||
CREATE OR REPLACE FUNCTION split_line_on_node(line GEOMETRY, point GEOMETRY)
|
||||
RETURNS GEOMETRY
|
||||
AS $$
|
||||
DECLARE
|
||||
frac FLOAT;
|
||||
BEGIN
|
||||
frac := ST_Line_Locate_Point(line, point);
|
||||
RETURN ST_Collect(ST_Line_Substring(line, 0, frac),
|
||||
ST_Line_Substring(line, frac, 1));
|
||||
END
|
||||
$$
|
||||
LANGUAGE plpgsql;
|
||||
@@ -1,10 +0,0 @@
|
||||
-- Splits the line at the given point and returns the two parts
|
||||
-- in a multilinestring.
|
||||
CREATE OR REPLACE FUNCTION split_line_on_node(line GEOMETRY, point GEOMETRY)
|
||||
RETURNS GEOMETRY
|
||||
AS $$
|
||||
BEGIN
|
||||
RETURN ST_Split(ST_Snap(line, point, 0.0005), point);
|
||||
END;
|
||||
$$
|
||||
LANGUAGE plpgsql;
|
||||
@@ -1,155 +0,0 @@
|
||||
drop table import_npi_log;
|
||||
CREATE TABLE import_npi_log (
|
||||
npiid integer,
|
||||
batchend timestamp,
|
||||
batchsize integer,
|
||||
starttime timestamp,
|
||||
endtime timestamp,
|
||||
event text
|
||||
);
|
||||
|
||||
drop table IF EXISTS word;
|
||||
CREATE TABLE word (
|
||||
word_id INTEGER,
|
||||
word_token text,
|
||||
word_trigram text,
|
||||
word text,
|
||||
class text,
|
||||
type text,
|
||||
country_code varchar(2),
|
||||
search_name_count INTEGER,
|
||||
operator TEXT
|
||||
);
|
||||
SELECT AddGeometryColumn('word', 'location', 4326, 'GEOMETRY', 2);
|
||||
CREATE INDEX idx_word_word_id on word USING BTREE (word_id);
|
||||
CREATE INDEX idx_word_word_token on word USING BTREE (word_token);
|
||||
GRANT SELECT ON word TO "www-data" ;
|
||||
DROP SEQUENCE seq_word;
|
||||
CREATE SEQUENCE seq_word start 1;
|
||||
|
||||
drop table IF EXISTS location_property CASCADE;
|
||||
CREATE TABLE location_property (
|
||||
place_id BIGINT,
|
||||
partition integer,
|
||||
parent_place_id BIGINT,
|
||||
housenumber TEXT,
|
||||
postcode TEXT
|
||||
);
|
||||
SELECT AddGeometryColumn('location_property', 'centroid', 4326, 'POINT', 2);
|
||||
|
||||
CREATE TABLE location_property_aux () INHERITS (location_property);
|
||||
CREATE INDEX idx_location_property_aux_place_id ON location_property_aux USING BTREE (place_id);
|
||||
CREATE INDEX idx_location_property_aux_parent_place_id ON location_property_aux USING BTREE (parent_place_id);
|
||||
CREATE INDEX idx_location_property_aux_housenumber_parent_place_id ON location_property_aux USING BTREE (parent_place_id, housenumber);
|
||||
|
||||
CREATE TABLE location_property_tiger () INHERITS (location_property);
|
||||
CREATE INDEX idx_location_property_tiger_place_id ON location_property_tiger USING BTREE (place_id);
|
||||
CREATE INDEX idx_location_property_tiger_parent_place_id ON location_property_tiger USING BTREE (parent_place_id);
|
||||
CREATE INDEX idx_location_property_tiger_housenumber_parent_place_id ON location_property_tiger USING BTREE (parent_place_id, housenumber);
|
||||
|
||||
drop table IF EXISTS search_name_blank CASCADE;
|
||||
CREATE TABLE search_name_blank (
|
||||
place_id BIGINT,
|
||||
search_rank integer,
|
||||
address_rank integer,
|
||||
importance FLOAT,
|
||||
country_code varchar(2),
|
||||
name_vector integer[],
|
||||
nameaddress_vector integer[]
|
||||
);
|
||||
SELECT AddGeometryColumn('search_name_blank', 'centroid', 4326, 'GEOMETRY', 2);
|
||||
|
||||
drop table IF EXISTS search_name;
|
||||
CREATE TABLE search_name () INHERITS (search_name_blank);
|
||||
CREATE INDEX search_name_name_vector_idx ON search_name USING GIN (name_vector gin__int_ops) WITH (fastupdate = off);
|
||||
CREATE INDEX searchnameplacesearch_search_nameaddress_vector_idx ON search_name USING GIN (nameaddress_vector gin__int_ops) WITH (fastupdate = off);
|
||||
CREATE INDEX idx_search_name_centroid ON search_name USING GIST (centroid);
|
||||
CREATE INDEX idx_search_name_place_id ON search_name USING BTREE (place_id);
|
||||
|
||||
drop table IF EXISTS place_addressline;
|
||||
CREATE TABLE place_addressline (
|
||||
place_id BIGINT,
|
||||
address_place_id BIGINT,
|
||||
fromarea boolean,
|
||||
isaddress boolean,
|
||||
distance float,
|
||||
cached_rank_address integer
|
||||
);
|
||||
CREATE INDEX idx_place_addressline_place_id on place_addressline USING BTREE (place_id);
|
||||
CREATE INDEX idx_place_addressline_address_place_id on place_addressline USING BTREE (address_place_id);
|
||||
|
||||
drop table IF EXISTS place_boundingbox CASCADE;
|
||||
CREATE TABLE place_boundingbox (
|
||||
place_id BIGINT,
|
||||
minlat float,
|
||||
maxlat float,
|
||||
minlon float,
|
||||
maxlon float,
|
||||
numfeatures integer,
|
||||
area float
|
||||
);
|
||||
CREATE INDEX idx_place_boundingbox_place_id on place_boundingbox USING BTREE (place_id);
|
||||
SELECT AddGeometryColumn('place_boundingbox', 'outline', 4326, 'GEOMETRY', 2);
|
||||
CREATE INDEX idx_place_boundingbox_outline ON place_boundingbox USING GIST (outline);
|
||||
GRANT SELECT on place_boundingbox to "www-data" ;
|
||||
GRANT INSERT on place_boundingbox to "www-data" ;
|
||||
|
||||
drop table country;
|
||||
CREATE TABLE country (
|
||||
country_code varchar(2),
|
||||
country_name hstore,
|
||||
country_default_language_code varchar(2)
|
||||
);
|
||||
SELECT AddGeometryColumn('country', 'geometry', 4326, 'POLYGON', 2);
|
||||
insert into country select iso3166::varchar(2), 'name:en'->cntry_name, null,
|
||||
ST_Transform(geometryn(the_geom, generate_series(1, numgeometries(the_geom))), 4326) from worldboundaries;
|
||||
CREATE INDEX idx_country_country_code ON country USING BTREE (country_code);
|
||||
CREATE INDEX idx_country_geometry ON country USING GIST (geometry);
|
||||
|
||||
drop table placex;
|
||||
CREATE TABLE placex (
|
||||
place_id BIGINT NOT NULL,
|
||||
partition integer,
|
||||
osm_type char(1),
|
||||
osm_id INTEGER,
|
||||
class TEXT NOT NULL,
|
||||
type TEXT NOT NULL,
|
||||
name HSTORE,
|
||||
admin_level INTEGER,
|
||||
housenumber TEXT,
|
||||
street TEXT,
|
||||
isin TEXT,
|
||||
postcode TEXT,
|
||||
country_code varchar(2),
|
||||
extratags HSTORE,
|
||||
parent_place_id BIGINT,
|
||||
linked_place_id BIGINT,
|
||||
rank_address INTEGER,
|
||||
rank_search INTEGER,
|
||||
importance FLOAT,
|
||||
indexed_status INTEGER,
|
||||
indexed_date TIMESTAMP,
|
||||
geometry_sector INTEGER
|
||||
);
|
||||
SELECT AddGeometryColumn('placex', 'geometry', 4326, 'GEOMETRY', 2);
|
||||
CREATE UNIQUE INDEX idx_place_id ON placex USING BTREE (place_id);
|
||||
CREATE INDEX idx_placex_osmid ON placex USING BTREE (osm_type, osm_id);
|
||||
CREATE INDEX idx_placex_rank_search ON placex USING BTREE (rank_search);
|
||||
CREATE INDEX idx_placex_rank_address ON placex USING BTREE (rank_address);
|
||||
CREATE INDEX idx_placex_geometry ON placex USING GIST (geometry);
|
||||
CREATE INDEX idx_placex_parent_place_id ON placex USING BTREE (parent_place_id) where parent_place_id IS NOT NULL;
|
||||
|
||||
DROP SEQUENCE seq_place;
|
||||
CREATE SEQUENCE seq_place start 1;
|
||||
GRANT SELECT on placex to "www-data" ;
|
||||
GRANT UPDATE ON placex to "www-data" ;
|
||||
GRANT SELECT ON search_name to "www-data" ;
|
||||
GRANT DELETE on search_name to "www-data" ;
|
||||
GRANT INSERT on search_name to "www-data" ;
|
||||
GRANT SELECT on place_addressline to "www-data" ;
|
||||
GRANT INSERT ON place_addressline to "www-data" ;
|
||||
GRANT DELETE on place_addressline to "www-data" ;
|
||||
GRANT SELECT ON seq_word to "www-data" ;
|
||||
GRANT UPDATE ON seq_word to "www-data" ;
|
||||
GRANT INSERT ON word to "www-data" ;
|
||||
GRANT SELECT on country to "www-data" ;
|
||||
@@ -13,29 +13,6 @@ CREATE TABLE import_osmosis_log (
|
||||
event text
|
||||
);
|
||||
|
||||
drop table if exists import_npi_log;
|
||||
CREATE TABLE import_npi_log (
|
||||
npiid integer,
|
||||
batchend timestamp,
|
||||
batchsize integer,
|
||||
starttime timestamp,
|
||||
endtime timestamp,
|
||||
event text
|
||||
);
|
||||
|
||||
--drop table IF EXISTS query_log;
|
||||
CREATE TABLE query_log (
|
||||
starttime timestamp,
|
||||
query text,
|
||||
ipaddress text,
|
||||
endtime timestamp,
|
||||
results integer
|
||||
);
|
||||
CREATE INDEX idx_query_log ON query_log USING BTREE (starttime);
|
||||
GRANT SELECT ON query_log TO "{www-user}" ;
|
||||
GRANT INSERT ON query_log TO "{www-user}" ;
|
||||
GRANT UPDATE ON query_log TO "{www-user}" ;
|
||||
|
||||
CREATE TABLE new_query_log (
|
||||
type text,
|
||||
starttime timestamp,
|
||||
@@ -43,6 +20,7 @@ CREATE TABLE new_query_log (
|
||||
useragent text,
|
||||
language text,
|
||||
query text,
|
||||
searchterm text,
|
||||
endtime timestamp,
|
||||
results integer,
|
||||
format text,
|
||||
@@ -56,9 +34,6 @@ GRANT SELECT ON new_query_log TO "{www-user}" ;
|
||||
GRANT SELECT ON TABLE country_name TO "{www-user}";
|
||||
GRANT SELECT ON TABLE gb_postcode TO "{www-user}";
|
||||
|
||||
create view vw_search_query_log as SELECT substr(query, 1, 50) AS query, starttime, endtime - starttime AS duration, substr(useragent, 1, 20) as
|
||||
useragent, language, results, ipaddress FROM new_query_log WHERE type = 'search' ORDER BY starttime DESC;
|
||||
|
||||
drop table IF EXISTS word;
|
||||
CREATE TABLE word (
|
||||
word_id INTEGER,
|
||||
@@ -129,7 +104,6 @@ CREATE UNIQUE INDEX idx_osmline_place_id ON location_property_osmline (place_id)
|
||||
CREATE INDEX idx_osmline_parent_place_id ON location_property_osmline (parent_place_id) {ts:search-index};
|
||||
GRANT SELECT ON location_property_osmline TO "{www-user}";
|
||||
|
||||
|
||||
drop table IF EXISTS search_name;
|
||||
CREATE TABLE search_name (
|
||||
place_id BIGINT,
|
||||
|
||||
@@ -1,9 +0,0 @@
|
||||
\a
|
||||
\t
|
||||
\o /tmp/bigintupdate.sql
|
||||
select 'alter table "'||relname||'" alter '||attname||' type bigint;' from pg_attribute join pg_class on
|
||||
(attrelid = oid) where attname like '%place_id%' and attnum > 0 and relkind = 'r'::"char" and atttypid = 23
|
||||
and not relname::text ~ '^.*_[0-9]+$' order by 'alter table "'||relname||'" alter '||attname||' type
|
||||
bigint;';
|
||||
\o
|
||||
\i /tmp/bigintupdate.sql
|
||||
@@ -175,8 +175,10 @@ def db_template_setup():
|
||||
conn = psycopg2.connect(database=world.config.template_db)
|
||||
psycopg2.extras.register_hstore(conn, globally=False, unicode=True)
|
||||
cur = conn.cursor()
|
||||
for table in ('gb_postcode', 'us_postcode', 'us_state', 'us_statecounty'):
|
||||
cur.execute('TRUNCATE TABLE %s' % (table,))
|
||||
for table in ('gb_postcode', 'us_postcode'):
|
||||
cur.execute("select * from pg_tables where tablename = '%s'" % (table, ))
|
||||
if cur.rowcount > 0:
|
||||
cur.execute('TRUNCATE TABLE %s' % (table,))
|
||||
conn.commit()
|
||||
conn.close()
|
||||
# execute osm2pgsql on an empty file to get the right tables
|
||||
|
||||
172
utils/setup.php
172
utils/setup.php
@@ -24,7 +24,6 @@
|
||||
array('enable-diff-updates', '', 0, 1, 0, 0, 'bool', 'Turn on the code required to make diff updates work'),
|
||||
array('enable-debug-statements', '', 0, 1, 0, 0, 'bool', 'Include debug warning statements in pgsql commands'),
|
||||
array('ignore-errors', '', 0, 1, 0, 0, 'bool', 'Continue import even when errors in SQL are present (EXPERT)'),
|
||||
array('create-minimal-tables', '', 0, 1, 0, 0, 'bool', 'Create minimal main tables'),
|
||||
array('create-tables', '', 0, 1, 0, 0, 'bool', 'Create main tables'),
|
||||
array('create-partition-tables', '', 0, 1, 0, 0, 'bool', 'Create required partition tables'),
|
||||
array('create-partition-functions', '', 0, 1, 0, 0, 'bool', 'Create required partition triggers'),
|
||||
@@ -37,7 +36,6 @@
|
||||
array('osmosis-init', '', 0, 1, 0, 0, 'bool', 'Generate default osmosis configuration'),
|
||||
array('index', '', 0, 1, 0, 0, 'bool', 'Index the data'),
|
||||
array('index-noanalyse', '', 0, 1, 0, 0, 'bool', 'Do not perform analyse operations during index (EXPERT)'),
|
||||
array('index-output', '', 0, 1, 1, 1, 'string', 'File to dump index information to'),
|
||||
array('create-search-indices', '', 0, 1, 0, 0, 'bool', 'Create additional indices required for search and update'),
|
||||
array('create-website', '', 0, 1, 1, 1, 'realpath', 'Create symlinks to setup web directory'),
|
||||
array('drop', '', 0, 1, 0, 0, 'bool', 'Drop tables needed for updates, making the database readonly (EXPERIMENTAL)'),
|
||||
@@ -92,8 +90,6 @@
|
||||
$aDSNInfo = DB::parseDSN(CONST_Database_DSN);
|
||||
if (!isset($aDSNInfo['port']) || !$aDSNInfo['port']) $aDSNInfo['port'] = 5432;
|
||||
|
||||
$fPostgisVersion = (float) CONST_Postgis_Version;
|
||||
|
||||
if ($aCMDResult['create-db'] || $aCMDResult['all'])
|
||||
{
|
||||
echo "Create DB\n";
|
||||
@@ -114,40 +110,39 @@
|
||||
|
||||
$oDB =& getDB();
|
||||
|
||||
$sVersionString = $oDB->getOne('select version()');
|
||||
preg_match('#PostgreSQL ([0-9]+)[.]([0-9]+)[^0-9]#', $sVersionString, $aMatches);
|
||||
if (CONST_Postgresql_Version != $aMatches[1].'.'.$aMatches[2])
|
||||
$fPostgresVersion = getPostgresVersion($oDB);
|
||||
echo 'Postgres version found: '.$fPostgresVersion."\n";
|
||||
|
||||
if ($fPostgresVersion < 9.1)
|
||||
{
|
||||
echo "ERROR: PostgreSQL version is not correct. Expected ".CONST_Postgresql_Version." found ".$aMatches[1].'.'.$aMatches[2]."\n";
|
||||
exit;
|
||||
fail("Minimum supported version of Postgresql is 9.1.");
|
||||
}
|
||||
|
||||
passthru('createlang plpgsql -p '.$aDSNInfo['port'].' '.$aDSNInfo['database']);
|
||||
$pgver = (float) CONST_Postgresql_Version;
|
||||
if ($pgver < 9.1) {
|
||||
pgsqlRunScriptFile(CONST_Path_Postgresql_Contrib.'/hstore.sql');
|
||||
pgsqlRunScriptFile(CONST_BasePath.'/sql/hstore_compatability_9_0.sql');
|
||||
} else {
|
||||
pgsqlRunScript('CREATE EXTENSION hstore');
|
||||
pgsqlRunScript('CREATE EXTENSION IF NOT EXISTS hstore');
|
||||
pgsqlRunScript('CREATE EXTENSION IF NOT EXISTS postgis');
|
||||
|
||||
// For extratags and namedetails the hstore_to_json converter is
|
||||
// needed which is only available from Postgresql 9.3+. For older
|
||||
// versions add a dummy function that returns nothing.
|
||||
$iNumFunc = $oDB->getOne("select count(*) from pg_proc where proname = 'hstore_to_json'");
|
||||
if (PEAR::isError($iNumFunc))
|
||||
{
|
||||
fail("Cannot query stored procedures.", $iNumFunc);
|
||||
}
|
||||
if ($iNumFunc == 0)
|
||||
{
|
||||
pgsqlRunScript("create function hstore_to_json(dummy hstore) returns text AS 'select null::text' language sql immutable");
|
||||
echo "WARNING: Postgresql is too old. extratags and namedetails API not available.";
|
||||
}
|
||||
|
||||
if ($fPostgisVersion < 2.0) {
|
||||
pgsqlRunScriptFile(CONST_Path_Postgresql_Postgis.'/postgis.sql');
|
||||
pgsqlRunScriptFile(CONST_Path_Postgresql_Postgis.'/spatial_ref_sys.sql');
|
||||
} else {
|
||||
pgsqlRunScript('CREATE EXTENSION IF NOT EXISTS postgis');
|
||||
}
|
||||
if ($fPostgisVersion < 2.1) {
|
||||
$fPostgisVersion = getPostgisVersion($oDB);
|
||||
echo 'Postgis version found: '.$fPostgisVersion."\n";
|
||||
|
||||
if ($fPostgisVersion < 2.1)
|
||||
{
|
||||
// Function was renamed in 2.1 and throws an annoying deprecation warning
|
||||
pgsqlRunScript('ALTER FUNCTION st_line_interpolate_point(geometry, double precision) RENAME TO ST_LineInterpolatePoint');
|
||||
}
|
||||
$sVersionString = $oDB->getOne('select postgis_full_version()');
|
||||
preg_match('#POSTGIS="([0-9]+)[.]([0-9]+)[.]([0-9]+)( r([0-9]+))?"#', $sVersionString, $aMatches);
|
||||
if (CONST_Postgis_Version != $aMatches[1].'.'.$aMatches[2])
|
||||
{
|
||||
echo "ERROR: PostGIS version is not correct. Expected ".CONST_Postgis_Version." found ".$aMatches[1].'.'.$aMatches[2]."\n";
|
||||
exit;
|
||||
}
|
||||
|
||||
pgsqlRunScriptFile(CONST_BasePath.'/data/country_name.sql');
|
||||
pgsqlRunScriptFile(CONST_BasePath.'/data/country_naturalearthdata.sql');
|
||||
@@ -161,9 +156,10 @@
|
||||
{
|
||||
echo "WARNING: external UK postcode table not found.\n";
|
||||
}
|
||||
pgsqlRunScriptFile(CONST_BasePath.'/data/us_statecounty.sql');
|
||||
pgsqlRunScriptFile(CONST_BasePath.'/data/us_state.sql');
|
||||
pgsqlRunScriptFile(CONST_BasePath.'/data/us_postcode.sql');
|
||||
if (CONST_Use_Extra_US_Postcodes)
|
||||
{
|
||||
pgsqlRunScriptFile(CONST_BasePath.'/data/us_postcode.sql');
|
||||
}
|
||||
|
||||
if ($aCMDResult['no-partitions'])
|
||||
{
|
||||
@@ -221,51 +217,7 @@
|
||||
echo "Functions\n";
|
||||
$bDidSomething = true;
|
||||
if (!file_exists(CONST_InstallPath.'/module/nominatim.so')) fail("nominatim module not built");
|
||||
$sTemplate = file_get_contents(CONST_BasePath.'/sql/functions.sql');
|
||||
$sTemplate = str_replace('{modulepath}', CONST_InstallPath.'/module', $sTemplate);
|
||||
if ($aCMDResult['enable-diff-updates']) $sTemplate = str_replace('RETURN NEW; -- @DIFFUPDATES@', '--', $sTemplate);
|
||||
if ($aCMDResult['enable-debug-statements']) $sTemplate = str_replace('--DEBUG:', '', $sTemplate);
|
||||
if (CONST_Limit_Reindexing) $sTemplate = str_replace('--LIMIT INDEXING:', '', $sTemplate);
|
||||
pgsqlRunScript($sTemplate);
|
||||
|
||||
if ($fPostgisVersion < 2.0) {
|
||||
echo "Helper functions for postgis < 2.0\n";
|
||||
$sTemplate = file_get_contents(CONST_BasePath.'/sql/postgis_15_aux.sql');
|
||||
} else {
|
||||
echo "Helper functions for postgis >= 2.0\n";
|
||||
$sTemplate = file_get_contents(CONST_BasePath.'/sql/postgis_20_aux.sql');
|
||||
}
|
||||
pgsqlRunScript($sTemplate);
|
||||
}
|
||||
|
||||
if ($aCMDResult['create-minimal-tables'])
|
||||
{
|
||||
echo "Minimal Tables\n";
|
||||
$bDidSomething = true;
|
||||
pgsqlRunScriptFile(CONST_BasePath.'/sql/tables-minimal.sql');
|
||||
|
||||
$sScript = '';
|
||||
|
||||
// Backstop the import process - easliest possible import id
|
||||
$sScript .= "insert into import_npi_log values (18022);\n";
|
||||
|
||||
$hFile = @fopen(CONST_BasePath.'/settings/partitionedtags.def', "r");
|
||||
if (!$hFile) fail('unable to open list of partitions: '.CONST_BasePath.'/settings/partitionedtags.def');
|
||||
|
||||
while (($sLine = fgets($hFile, 4096)) !== false && $sLine && substr($sLine,0,1) !='#')
|
||||
{
|
||||
list($sClass, $sType) = explode(' ', trim($sLine));
|
||||
$sScript .= "create table place_classtype_".$sClass."_".$sType." as ";
|
||||
$sScript .= "select place_id as place_id,geometry as centroid from placex limit 0;\n";
|
||||
|
||||
$sScript .= "CREATE INDEX idx_place_classtype_".$sClass."_".$sType."_centroid ";
|
||||
$sScript .= "ON place_classtype_".$sClass."_".$sType." USING GIST (centroid);\n";
|
||||
|
||||
$sScript .= "CREATE INDEX idx_place_classtype_".$sClass."_".$sType."_place_id ";
|
||||
$sScript .= "ON place_classtype_".$sClass."_".$sType." USING btree(place_id);\n";
|
||||
}
|
||||
fclose($hFile);
|
||||
pgsqlRunScript($sScript);
|
||||
create_sql_functions($aCMDResult);
|
||||
}
|
||||
|
||||
if ($aCMDResult['create-tables'] || $aCMDResult['all'])
|
||||
@@ -291,10 +243,7 @@
|
||||
|
||||
// re-run the functions
|
||||
echo "Functions\n";
|
||||
$sTemplate = file_get_contents(CONST_BasePath.'/sql/functions.sql');
|
||||
$sTemplate = str_replace('{modulepath}',
|
||||
CONST_InstallPath.'/module', $sTemplate);
|
||||
pgsqlRunScript($sTemplate);
|
||||
create_sql_functions($aCMDResult);
|
||||
}
|
||||
|
||||
if ($aCMDResult['create-partition-tables'] || $aCMDResult['all'])
|
||||
@@ -597,10 +546,14 @@
|
||||
$sSQL .= "avg(st_x(st_centroid(geometry))) as x,avg(st_y(st_centroid(geometry))) as y ";
|
||||
$sSQL .= "from placex where postcode is not null group by calculated_country_code,postcode) as x";
|
||||
if (!pg_query($oDB->connection, $sSQL)) fail(pg_last_error($oDB->connection));
|
||||
$sSQL = "insert into placex (osm_type,osm_id,class,type,postcode,calculated_country_code,geometry) ";
|
||||
$sSQL .= "select 'P',nextval('seq_postcodes'),'place','postcode',postcode,'us',";
|
||||
$sSQL .= "ST_SetSRID(ST_Point(x,y),4326) as geometry from us_postcode";
|
||||
if (!pg_query($oDB->connection, $sSQL)) fail(pg_last_error($oDB->connection));
|
||||
|
||||
if (CONST_Use_Extra_US_Postcodes)
|
||||
{
|
||||
$sSQL = "insert into placex (osm_type,osm_id,class,type,postcode,calculated_country_code,geometry) ";
|
||||
$sSQL .= "select 'P',nextval('seq_postcodes'),'place','postcode',postcode,'us',";
|
||||
$sSQL .= "ST_SetSRID(ST_Point(x,y),4326) as geometry from us_postcode";
|
||||
if (!pg_query($oDB->connection, $sSQL)) fail(pg_last_error($oDB->connection));
|
||||
}
|
||||
}
|
||||
|
||||
if ($aCMDResult['osmosis-init'] || ($aCMDResult['all'] && !$aCMDResult['drop'])) // no use doing osmosis-init when dropping update tables
|
||||
@@ -701,7 +654,6 @@
|
||||
{
|
||||
$bDidSomething = true;
|
||||
$sOutputFile = '';
|
||||
if (isset($aCMDResult['index-output'])) $sOutputFile = ' -F '.$aCMDResult['index-output'];
|
||||
$sBaseCmd = CONST_InstallPath.'/nominatim/nominatim -i -d '.$aDSNInfo['database'].' -P '.$aDSNInfo['port'].' -t '.$iInstances.$sOutputFile;
|
||||
passthruCheckReturn($sBaseCmd.' -R 4');
|
||||
if (!$aCMDResult['index-noanalyse']) pgsqlRunScript('ANALYSE');
|
||||
@@ -714,14 +666,6 @@
|
||||
{
|
||||
echo "Search indices\n";
|
||||
$bDidSomething = true;
|
||||
$oDB =& getDB();
|
||||
$sSQL = 'select distinct partition from country_name';
|
||||
$aPartitions = $oDB->getCol($sSQL);
|
||||
if (PEAR::isError($aPartitions))
|
||||
{
|
||||
fail($aPartitions->getMessage());
|
||||
}
|
||||
if (!$aCMDResult['no-partitions']) $aPartitions[] = 0;
|
||||
|
||||
$sTemplate = file_get_contents(CONST_BasePath.'/sql/indices.src.sql');
|
||||
$sTemplate = replace_tablespace('{ts:address-index}',
|
||||
@@ -730,16 +674,6 @@
|
||||
CONST_Tablespace_Search_Index, $sTemplate);
|
||||
$sTemplate = replace_tablespace('{ts:aux-index}',
|
||||
CONST_Tablespace_Aux_Index, $sTemplate);
|
||||
preg_match_all('#^-- start(.*?)^-- end#ms', $sTemplate, $aMatches, PREG_SET_ORDER);
|
||||
foreach($aMatches as $aMatch)
|
||||
{
|
||||
$sResult = '';
|
||||
foreach($aPartitions as $sPartitionName)
|
||||
{
|
||||
$sResult .= str_replace('-partition-', $sPartitionName, $aMatch[1]);
|
||||
}
|
||||
$sTemplate = str_replace($aMatch[0], $sResult, $sTemplate);
|
||||
}
|
||||
|
||||
pgsqlRunScript($sTemplate);
|
||||
}
|
||||
@@ -1011,3 +945,31 @@
|
||||
return $sSql;
|
||||
}
|
||||
|
||||
function create_sql_functions($aCMDResult)
|
||||
{
|
||||
$sTemplate = file_get_contents(CONST_BasePath.'/sql/functions.sql');
|
||||
$sTemplate = str_replace('{modulepath}', CONST_InstallPath.'/module', $sTemplate);
|
||||
if ($aCMDResult['enable-diff-updates'])
|
||||
{
|
||||
$sTemplate = str_replace('RETURN NEW; -- %DIFFUPDATES%', '--', $sTemplate);
|
||||
}
|
||||
if ($aCMDResult['enable-debug-statements'])
|
||||
{
|
||||
$sTemplate = str_replace('--DEBUG:', '', $sTemplate);
|
||||
}
|
||||
if (CONST_Limit_Reindexing)
|
||||
{
|
||||
$sTemplate = str_replace('--LIMIT INDEXING:', '', $sTemplate);
|
||||
}
|
||||
if (!CONST_Use_US_Tiger_Data)
|
||||
{
|
||||
$sTemplate = str_replace('-- %NOTIGERDATA% ', '', $sTemplate);
|
||||
}
|
||||
if (!CONST_Use_Aux_Location_data)
|
||||
{
|
||||
$sTemplate = str_replace('-- %NOAUXDATA% ', '', $sTemplate);
|
||||
}
|
||||
pgsqlRunScript($sTemplate);
|
||||
|
||||
}
|
||||
|
||||
|
||||
433
utils/update.php
433
utils/update.php
@@ -11,18 +11,11 @@
|
||||
array('quiet', 'q', 0, 1, 0, 0, 'bool', 'Quiet output'),
|
||||
array('verbose', 'v', 0, 1, 0, 0, 'bool', 'Verbose output'),
|
||||
|
||||
array('max-load', '', 0, 1, 1, 1, 'float', 'Maximum load average - indexing is paused if this is exceeded'),
|
||||
array('max-blocking', '', 0, 1, 1, 1, 'int', 'Maximum blocking processes - indexing is aborted / paused if this is exceeded'),
|
||||
|
||||
array('import-osmosis', '', 0, 1, 0, 0, 'bool', 'Import using osmosis'),
|
||||
array('import-osmosis-all', '', 0, 1, 0, 0, 'bool', 'Import using osmosis forever'),
|
||||
array('no-npi', '', 0, 1, 0, 0, 'bool', 'Do not write npi index files'),
|
||||
array('no-npi', '', 0, 1, 0, 0, 'bool', '(obsolate)'),
|
||||
array('no-index', '', 0, 1, 0, 0, 'bool', 'Do not index the new data'),
|
||||
|
||||
array('import-npi-all', '', 0, 1, 0, 0, 'bool', 'Import npi pre-indexed files'),
|
||||
|
||||
array('import-hourly', '', 0, 1, 0, 0, 'bool', 'Import hourly diffs'),
|
||||
array('import-daily', '', 0, 1, 0, 0, 'bool', 'Import daily diffs'),
|
||||
array('import-all', '', 0, 1, 0, 0, 'bool', 'Import all available files'),
|
||||
|
||||
array('import-file', '', 0, 1, 1, 1, 'realpath', 'Re-import data from an OSM file'),
|
||||
@@ -37,34 +30,14 @@
|
||||
array('index', '', 0, 1, 0, 0, 'bool', 'Index'),
|
||||
array('index-rank', '', 0, 1, 1, 1, 'int', 'Rank to start indexing from'),
|
||||
array('index-instances', '', 0, 1, 1, 1, 'int', 'Number of indexing instances (threads)'),
|
||||
array('index-estrate', '', 0, 1, 1, 1, 'int', 'Estimated indexed items per second (def:30)'),
|
||||
|
||||
array('deduplicate', '', 0, 1, 0, 0, 'bool', 'Deduplicate tokens'),
|
||||
);
|
||||
getCmdOpt($_SERVER['argv'], $aCMDOptions, $aResult, true, true);
|
||||
|
||||
if ($aResult['import-hourly'] + $aResult['import-daily'] + isset($aResult['import-diff']) > 1)
|
||||
{
|
||||
showUsage($aCMDOptions, true, 'Select either import of hourly or daily');
|
||||
}
|
||||
|
||||
if (!isset($aResult['index-instances'])) $aResult['index-instances'] = 1;
|
||||
if (!isset($aResult['index-rank'])) $aResult['index-rank'] = 0;
|
||||
|
||||
/*
|
||||
// Lock to prevent multiple copies running
|
||||
if (exec('/bin/ps uww | grep '.basename(__FILE__).' | grep -v /dev/null | grep -v grep -c', $aOutput2, $iResult) > 1)
|
||||
{
|
||||
fail("Copy already running\n");
|
||||
}
|
||||
if (!isset($aResult['max-load'])) $aResult['max-load'] = 1.9;
|
||||
if (!isset($aResult['max-blocking'])) $aResult['max-blocking'] = 3;
|
||||
if (getBlockingProcesses() > $aResult['max-blocking'])
|
||||
{
|
||||
fail("Too many blocking processes for import\n");
|
||||
}
|
||||
*/
|
||||
|
||||
date_default_timezone_set('Etc/UTC');
|
||||
|
||||
$oDB =& getDB();
|
||||
@@ -86,153 +59,112 @@
|
||||
}
|
||||
|
||||
|
||||
$bFirst = true;
|
||||
$bContinue = $aResult['import-all'];
|
||||
while ($bContinue || $bFirst)
|
||||
if (isset($aResult['import-diff']))
|
||||
{
|
||||
$bFirst = false;
|
||||
|
||||
if ($aResult['import-hourly'])
|
||||
// import diff directly (e.g. from osmosis --rri)
|
||||
$sNextFile = $aResult['import-diff'];
|
||||
if (!file_exists($sNextFile))
|
||||
{
|
||||
// Mirror the hourly diffs
|
||||
exec('wget --quiet --mirror -l 1 -P '.$sMirrorDir.' http://planet.openstreetmap.org/hourly');
|
||||
$sNextFile = $oDB->getOne('select TO_CHAR(lastimportdate,\'YYYYMMDDHH24\')||\'-\'||TO_CHAR(lastimportdate+\'1 hour\'::interval,\'YYYYMMDDHH24\')||\'.osc.gz\' from import_status');
|
||||
$sNextFile = $sMirrorDir.'planet.openstreetmap.org/hourly/'.$sNextFile;
|
||||
$sUpdateSQL = 'update import_status set lastimportdate = lastimportdate+\'1 hour\'::interval';
|
||||
fail("Cannot open $sNextFile\n");
|
||||
}
|
||||
|
||||
if ($aResult['import-daily'])
|
||||
// Import the file
|
||||
$sCMD = $sOsm2pgsqlCmd.' '.$sNextFile;
|
||||
echo $sCMD."\n";
|
||||
exec($sCMD, $sJunk, $iErrorLevel);
|
||||
|
||||
if ($iErrorLevel)
|
||||
{
|
||||
// Mirror the daily diffs
|
||||
exec('wget --quiet --mirror -l 1 -P '.$sMirrorDir.' http://planet.openstreetmap.org/daily');
|
||||
$sNextFile = $oDB->getOne('select TO_CHAR(lastimportdate,\'YYYYMMDD\')||\'-\'||TO_CHAR(lastimportdate+\'1 day\'::interval,\'YYYYMMDD\')||\'.osc.gz\' from import_status');
|
||||
$sNextFile = $sMirrorDir.'planet.openstreetmap.org/daily/'.$sNextFile;
|
||||
$sUpdateSQL = 'update import_status set lastimportdate = lastimportdate::date + 1';
|
||||
}
|
||||
|
||||
if (isset($aResult['import-diff']))
|
||||
{
|
||||
// import diff directly (e.g. from osmosis --rri)
|
||||
$sNextFile = $aResult['import-diff'];
|
||||
if (!file_exists($sNextFile))
|
||||
{
|
||||
fail("Cannot open $sNextFile\n");
|
||||
}
|
||||
// Don't update the import status - we don't know what this file contains
|
||||
$sUpdateSQL = 'update import_status set lastimportdate = now() where false';
|
||||
fail("Error from osm2pgsql, $iErrorLevel\n");
|
||||
}
|
||||
|
||||
// Missing file is not an error - it might not be created yet
|
||||
if (($aResult['import-hourly'] || $aResult['import-daily'] || isset($aResult['import-diff'])) && file_exists($sNextFile))
|
||||
{
|
||||
// Import the file
|
||||
$sCMD = $sOsm2pgsqlCmd.' '.$sNextFile;
|
||||
echo $sCMD."\n";
|
||||
exec($sCMD, $sJunk, $iErrorLevel);
|
||||
|
||||
if ($iErrorLevel)
|
||||
{
|
||||
fail("Error from osm2pgsql, $iErrorLevel\n");
|
||||
}
|
||||
|
||||
// Move the date onwards
|
||||
$oDB->query($sUpdateSQL);
|
||||
}
|
||||
else
|
||||
{
|
||||
$bContinue = false;
|
||||
}
|
||||
// Don't update the import status - we don't know what this file contains
|
||||
}
|
||||
|
||||
$bModifyXML = false;
|
||||
$sModifyXMLstr = '';
|
||||
$bUseOSMApi = isset($aResult['import-from-main-api']) && $aResult['import-from-main-api'];
|
||||
$sTemporaryFile = CONST_BasePath.'/data/osmosischange.osc';
|
||||
$bHaveDiff = false;
|
||||
if (isset($aResult['import-file']) && $aResult['import-file'])
|
||||
{
|
||||
$bModifyXML = true;
|
||||
$bHaveDiff = true;
|
||||
$sCMD = CONST_Osmosis_Binary.' --read-xml \''.$aResult['import-file'].'\' --read-empty --derive-change --write-xml-change '.$sTemporaryFile;
|
||||
echo $sCMD."\n";
|
||||
exec($sCMD, $sJunk, $iErrorLevel);
|
||||
if ($iErrorLevel)
|
||||
{
|
||||
fail("Error converting osm to osc, osmosis returned: $iErrorLevel\n");
|
||||
}
|
||||
}
|
||||
|
||||
$bUseOSMApi = isset($aResult['import-from-main-api']) && $aResult['import-from-main-api'];
|
||||
$sContentURL = '';
|
||||
if (isset($aResult['import-node']) && $aResult['import-node'])
|
||||
{
|
||||
$bModifyXML = true;
|
||||
if ($bUseOSMApi)
|
||||
{
|
||||
$sModifyXMLstr = file_get_contents('http://www.openstreetmap.org/api/0.6/node/'.$aResult['import-node']);
|
||||
$sContentURL = 'http://www.openstreetmap.org/api/0.6/node/'.$aResult['import-node'];
|
||||
}
|
||||
else
|
||||
{
|
||||
$sModifyXMLstr = file_get_contents('http://overpass-api.de/api/interpreter?data=node('.$aResult['import-node'].');out%20meta;');
|
||||
$sContentURL = 'http://overpass-api.de/api/interpreter?data=node('.$aResult['import-node'].');out%20meta;';
|
||||
}
|
||||
}
|
||||
if (isset($aResult['import-way']) && $aResult['import-way'])
|
||||
{
|
||||
$bModifyXML = true;
|
||||
if ($bUseOSMApi)
|
||||
{
|
||||
$sCmd = 'http://www.openstreetmap.org/api/0.6/way/'.$aResult['import-way'].'/full';
|
||||
$sContentURL = 'http://www.openstreetmap.org/api/0.6/way/'.$aResult['import-way'].'/full';
|
||||
}
|
||||
else
|
||||
{
|
||||
$sCmd = 'http://overpass-api.de/api/interpreter?data=(way('.$aResult['import-way'].');node(w););out%20meta;';
|
||||
$sContentURL = 'http://overpass-api.de/api/interpreter?data=(way('.$aResult['import-way'].');node(w););out%20meta;';
|
||||
}
|
||||
$sModifyXMLstr = file_get_contents($sCmd);
|
||||
}
|
||||
if (isset($aResult['import-relation']) && $aResult['import-relation'])
|
||||
{
|
||||
$bModifyXML = true;
|
||||
if ($bUseOSMApi)
|
||||
{
|
||||
$sModifyXMLstr = file_get_contents('http://www.openstreetmap.org/api/0.6/relation/'.$aResult['import-relation'].'/full');
|
||||
$sContentURLsModifyXMLstr = 'http://www.openstreetmap.org/api/0.6/relation/'.$aResult['import-relation'].'/full';
|
||||
}
|
||||
else
|
||||
{
|
||||
$sModifyXMLstr = file_get_contents('http://overpass-api.de/api/interpreter?data=((rel('.$aResult['import-relation'].');way(r);node(w));node(r));out%20meta;');
|
||||
$sContentURL = 'http://overpass-api.de/api/interpreter?data=((rel('.$aResult['import-relation'].');way(r);node(w));node(r));out%20meta;';
|
||||
}
|
||||
}
|
||||
if ($bModifyXML)
|
||||
if ($sContentURL)
|
||||
{
|
||||
// derive change from normal osm file with osmosis
|
||||
$sTemporaryFile = CONST_BasePath.'/data/osmosischange.osc';
|
||||
if (isset($aResult['import-file']) && $aResult['import-file'])
|
||||
{
|
||||
$sCMD = CONST_Osmosis_Binary.' --read-xml \''.$aResult['import-file'].'\' --read-empty --derive-change --write-xml-change '.$sTemporaryFile;
|
||||
echo $sCMD."\n";
|
||||
exec($sCMD, $sJunk, $iErrorLevel);
|
||||
if ($iErrorLevel)
|
||||
{
|
||||
fail("Error converting osm to osc, osmosis returned: $iErrorLevel\n");
|
||||
}
|
||||
}
|
||||
else
|
||||
{
|
||||
$aSpec = array(
|
||||
0 => array("pipe", "r"), // stdin
|
||||
1 => array("pipe", "w"), // stdout
|
||||
2 => array("pipe", "w") // stderr
|
||||
);
|
||||
$sCMD = CONST_Osmosis_Binary.' --read-xml - --read-empty --derive-change --write-xml-change '.$sTemporaryFile;
|
||||
echo $sCMD."\n";
|
||||
$hProc = proc_open($sCMD, $aSpec, $aPipes);
|
||||
if (!is_resource($hProc))
|
||||
{
|
||||
fail("Error converting osm to osc, osmosis failed\n");
|
||||
}
|
||||
fwrite($aPipes[0], $sModifyXMLstr);
|
||||
fclose($aPipes[0]);
|
||||
$sOut = stream_get_contents($aPipes[1]);
|
||||
if ($aResult['verbose']) echo $sOut;
|
||||
fclose($aPipes[1]);
|
||||
$sErrors = stream_get_contents($aPipes[2]);
|
||||
if ($aResult['verbose']) echo $sErrors;
|
||||
fclose($aPipes[2]);
|
||||
if ($iError = proc_close($hProc))
|
||||
{
|
||||
echo "Error converting osm to osc, osmosis returned: $iError\n";
|
||||
echo $sOut;
|
||||
echo $sErrors;
|
||||
exit(-1);
|
||||
}
|
||||
}
|
||||
$sModifyXMLstr = file_get_contents($sContentURL);
|
||||
$bHaveDiff = true;
|
||||
|
||||
$aSpec = array(
|
||||
0 => array("pipe", "r"), // stdin
|
||||
1 => array("pipe", "w"), // stdout
|
||||
2 => array("pipe", "w") // stderr
|
||||
);
|
||||
$sCMD = CONST_Osmosis_Binary.' --read-xml - --read-empty --derive-change --write-xml-change '.$sTemporaryFile;
|
||||
echo $sCMD."\n";
|
||||
$hProc = proc_open($sCMD, $aSpec, $aPipes);
|
||||
if (!is_resource($hProc))
|
||||
{
|
||||
fail("Error converting osm to osc, osmosis failed\n");
|
||||
}
|
||||
fwrite($aPipes[0], $sModifyXMLstr);
|
||||
fclose($aPipes[0]);
|
||||
$sOut = stream_get_contents($aPipes[1]);
|
||||
if ($aResult['verbose']) echo $sOut;
|
||||
fclose($aPipes[1]);
|
||||
$sErrors = stream_get_contents($aPipes[2]);
|
||||
if ($aResult['verbose']) echo $sErrors;
|
||||
fclose($aPipes[2]);
|
||||
if ($iError = proc_close($hProc))
|
||||
{
|
||||
echo $sOut;
|
||||
echo $sErrors;
|
||||
fail("Error converting osm to osc, osmosis returned: $iError\n");
|
||||
}
|
||||
}
|
||||
|
||||
if ($bHaveDiff)
|
||||
{
|
||||
// import generated change file
|
||||
$sCMD = $sOsm2pgsqlCmd.' '.$sTemporaryFile;
|
||||
echo $sCMD."\n";
|
||||
@@ -246,19 +178,19 @@
|
||||
if ($aResult['deduplicate'])
|
||||
{
|
||||
|
||||
$pgver = (float) CONST_Postgresql_Version;
|
||||
if ($pgver < 9.3) {
|
||||
if (getPostgresVersion() < 9.3)
|
||||
{
|
||||
fail("ERROR: deduplicate is only currently supported in postgresql 9.3");
|
||||
}
|
||||
|
||||
$oDB =& getDB();
|
||||
$sSQL = 'select partition from country_name order by country_code';
|
||||
$aPartitions = $oDB->getCol($sSQL);
|
||||
if (PEAR::isError($aPartitions))
|
||||
{
|
||||
fail($aPartitions->getMessage());
|
||||
}
|
||||
$aPartitions[] = 0;
|
||||
$oDB =& getDB();
|
||||
$sSQL = 'select partition from country_name order by country_code';
|
||||
$aPartitions = $oDB->getCol($sSQL);
|
||||
if (PEAR::isError($aPartitions))
|
||||
{
|
||||
fail($aPartitions->getMessage());
|
||||
}
|
||||
$aPartitions[] = 0;
|
||||
|
||||
$sSQL = "select word_token,count(*) from word where substr(word_token, 1, 1) = ' ' and class is null and type is null and country_code is null group by word_token having count(*) > 1 order by word_token";
|
||||
$aDuplicateTokens = $oDB->getAll($sSQL);
|
||||
@@ -341,7 +273,6 @@
|
||||
exit(1);
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
@@ -358,92 +289,81 @@
|
||||
}
|
||||
|
||||
$sImportFile = CONST_BasePath.'/data/osmosischange.osc';
|
||||
$sOsmosisCMD = CONST_Osmosis_Binary;
|
||||
$sOsmosisConfigDirectory = CONST_InstallPath.'/settings';
|
||||
$sCMDDownload = $sOsmosisCMD.' --read-replication-interval workingDirectory='.$sOsmosisConfigDirectory.' --simplify-change --write-xml-change '.$sImportFile;
|
||||
$sCMDCheckReplicationLag = $sOsmosisCMD.' -q --read-replication-lag workingDirectory='.$sOsmosisConfigDirectory;
|
||||
$sCMDDownload = CONST_Osmosis_Binary.' --read-replication-interval workingDirectory='.$sOsmosisConfigDirectory.' --simplify-change --write-xml-change '.$sImportFile;
|
||||
$sCMDCheckReplicationLag = CONST_Osmosis_Binary.' -q --read-replication-lag workingDirectory='.$sOsmosisConfigDirectory;
|
||||
$sCMDImport = $sOsm2pgsqlCmd.' '.$sImportFile;
|
||||
$sCMDIndex = CONST_InstallPath.'/nominatim/nominatim -i -d '.$aDSNInfo['database'].' -P '.$aDSNInfo['port'].' -t '.$aResult['index-instances'];
|
||||
if (!$aResult['no-npi']) {
|
||||
$sCMDIndex .= '-F ';
|
||||
}
|
||||
|
||||
while(true)
|
||||
{
|
||||
$fStartTime = time();
|
||||
$iFileSize = 1001;
|
||||
|
||||
// Logic behind this is that osm2pgsql locks the database quite a bit
|
||||
// So it is better to import lots of small files
|
||||
// But indexing works most efficiently on large amounts of data
|
||||
// So do lots of small imports and a BIG index
|
||||
|
||||
// while($aResult['import-osmosis-all'] && $iFileSize > 1000)
|
||||
// {
|
||||
if (!file_exists($sImportFile))
|
||||
if (!file_exists($sImportFile))
|
||||
{
|
||||
// First check if there are new updates published (except for minutelies - there's always new diffs to process)
|
||||
if ( CONST_Replication_Update_Interval > 60 )
|
||||
{
|
||||
// First check if there are new updates published (except for minutelies - there's always new diffs to process)
|
||||
if ( CONST_Replication_Update_Interval > 60 )
|
||||
{
|
||||
|
||||
unset($aReplicationLag);
|
||||
exec($sCMDCheckReplicationLag, $aReplicationLag, $iErrorLevel);
|
||||
while ($iErrorLevel > 0 || $aReplicationLag[0] < 1)
|
||||
{
|
||||
if ($iErrorLevel)
|
||||
{
|
||||
echo "Error: $iErrorLevel. ";
|
||||
echo "Re-trying: ".$sCMDCheckReplicationLag." in ".CONST_Replication_Recheck_Interval." secs\n";
|
||||
}
|
||||
else
|
||||
{
|
||||
echo ".";
|
||||
}
|
||||
sleep(CONST_Replication_Recheck_Interval);
|
||||
unset($aReplicationLag);
|
||||
exec($sCMDCheckReplicationLag, $aReplicationLag, $iErrorLevel);
|
||||
while ($iErrorLevel > 0 || $aReplicationLag[0] < 1)
|
||||
{
|
||||
if ($iErrorLevel)
|
||||
{
|
||||
echo "Error: $iErrorLevel. ";
|
||||
echo "Re-trying: ".$sCMDCheckReplicationLag." in ".CONST_Replication_Recheck_Interval." secs\n";
|
||||
}
|
||||
else
|
||||
{
|
||||
echo ".";
|
||||
}
|
||||
sleep(CONST_Replication_Recheck_Interval);
|
||||
unset($aReplicationLag);
|
||||
exec($sCMDCheckReplicationLag, $aReplicationLag, $iErrorLevel);
|
||||
}
|
||||
// There are new replication files - use osmosis to download the file
|
||||
echo "\n".date('Y-m-d H:i:s')." Replication Delay is ".$aReplicationLag[0]."\n";
|
||||
}
|
||||
$fStartTime = time();
|
||||
$fCMDStartTime = time();
|
||||
echo $sCMDDownload."\n";
|
||||
exec($sCMDDownload, $sJunk, $iErrorLevel);
|
||||
while ($iErrorLevel > 0)
|
||||
{
|
||||
echo "Error: $iErrorLevel\n";
|
||||
sleep(60);
|
||||
echo 'Re-trying: '.$sCMDDownload."\n";
|
||||
exec($sCMDDownload, $sJunk, $iErrorLevel);
|
||||
}
|
||||
$iFileSize = filesize($sImportFile);
|
||||
$sBatchEnd = getosmosistimestamp($sOsmosisConfigDirectory);
|
||||
$sSQL = "INSERT INTO import_osmosis_log values ('$sBatchEnd',$iFileSize,'".date('Y-m-d H:i:s',$fCMDStartTime)."','".date('Y-m-d H:i:s')."','osmosis')";
|
||||
var_Dump($sSQL);
|
||||
$oDB->query($sSQL);
|
||||
echo date('Y-m-d H:i:s')." Completed osmosis step for $sBatchEnd in ".round((time()-$fCMDStartTime)/60,2)." minutes\n";
|
||||
// There are new replication files - use osmosis to download the file
|
||||
echo "\n".date('Y-m-d H:i:s')." Replication Delay is ".$aReplicationLag[0]."\n";
|
||||
}
|
||||
|
||||
$iFileSize = filesize($sImportFile);
|
||||
$sBatchEnd = getosmosistimestamp($sOsmosisConfigDirectory);
|
||||
|
||||
// Import the file
|
||||
$fStartTime = time();
|
||||
$fCMDStartTime = time();
|
||||
echo $sCMDImport."\n";
|
||||
exec($sCMDImport, $sJunk, $iErrorLevel);
|
||||
if ($iErrorLevel)
|
||||
echo $sCMDDownload."\n";
|
||||
exec($sCMDDownload, $sJunk, $iErrorLevel);
|
||||
while ($iErrorLevel > 0)
|
||||
{
|
||||
echo "Error: $iErrorLevel\n";
|
||||
exit($iErrorLevel);
|
||||
sleep(60);
|
||||
echo 'Re-trying: '.$sCMDDownload."\n";
|
||||
exec($sCMDDownload, $sJunk, $iErrorLevel);
|
||||
}
|
||||
$sSQL = "INSERT INTO import_osmosis_log values ('$sBatchEnd',$iFileSize,'".date('Y-m-d H:i:s',$fCMDStartTime)."','".date('Y-m-d H:i:s')."','osm2pgsql')";
|
||||
$iFileSize = filesize($sImportFile);
|
||||
$sBatchEnd = getosmosistimestamp($sOsmosisConfigDirectory);
|
||||
$sSQL = "INSERT INTO import_osmosis_log values ('$sBatchEnd',$iFileSize,'".date('Y-m-d H:i:s',$fCMDStartTime)."','".date('Y-m-d H:i:s')."','osmosis')";
|
||||
var_Dump($sSQL);
|
||||
$oDB->query($sSQL);
|
||||
echo date('Y-m-d H:i:s')." Completed osm2pgsql step for $sBatchEnd in ".round((time()-$fCMDStartTime)/60,2)." minutes\n";
|
||||
echo date('Y-m-d H:i:s')." Completed osmosis step for $sBatchEnd in ".round((time()-$fCMDStartTime)/60,2)." minutes\n";
|
||||
}
|
||||
|
||||
// Archive for debug?
|
||||
unlink($sImportFile);
|
||||
// }
|
||||
$iFileSize = filesize($sImportFile);
|
||||
$sBatchEnd = getosmosistimestamp($sOsmosisConfigDirectory);
|
||||
|
||||
// Import the file
|
||||
$fCMDStartTime = time();
|
||||
echo $sCMDImport."\n";
|
||||
exec($sCMDImport, $sJunk, $iErrorLevel);
|
||||
if ($iErrorLevel)
|
||||
{
|
||||
echo "Error: $iErrorLevel\n";
|
||||
exit($iErrorLevel);
|
||||
}
|
||||
$sSQL = "INSERT INTO import_osmosis_log values ('$sBatchEnd',$iFileSize,'".date('Y-m-d H:i:s',$fCMDStartTime)."','".date('Y-m-d H:i:s')."','osm2pgsql')";
|
||||
var_Dump($sSQL);
|
||||
$oDB->query($sSQL);
|
||||
echo date('Y-m-d H:i:s')." Completed osm2pgsql step for $sBatchEnd in ".round((time()-$fCMDStartTime)/60,2)." minutes\n";
|
||||
|
||||
// Archive for debug?
|
||||
unlink($sImportFile);
|
||||
|
||||
$sBatchEnd = getosmosistimestamp($sOsmosisConfigDirectory);
|
||||
|
||||
@@ -451,31 +371,6 @@
|
||||
$sThisIndexCmd = $sCMDIndex;
|
||||
$fCMDStartTime = time();
|
||||
|
||||
if (!$aResult['no-npi'])
|
||||
{
|
||||
$iFileID = $oDB->getOne('select nextval(\'file\')');
|
||||
if (PEAR::isError($iFileID))
|
||||
{
|
||||
echo $iFileID->getMessage()."\n";
|
||||
exit(-1);
|
||||
}
|
||||
$sFileDir = CONST_BasePath.'/export/diff/';
|
||||
$sFileDir .= str_pad(floor($iFileID/1000000), 3, '0', STR_PAD_LEFT);
|
||||
$sFileDir .= '/'.str_pad(floor($iFileID/1000) % 1000, 3, '0', STR_PAD_LEFT);
|
||||
|
||||
if (!is_dir($sFileDir)) mkdir($sFileDir, 0777, true);
|
||||
$sThisIndexCmd .= $sFileDir;
|
||||
$sThisIndexCmd .= '/'.str_pad($iFileID % 1000, 3, '0', STR_PAD_LEFT);
|
||||
$sThisIndexCmd .= ".npi.out";
|
||||
|
||||
preg_match('#^([0-9]{4})-([0-9]{2})-([0-9]{2})#', $sBatchEnd, $aBatchMatch);
|
||||
$sFileDir = CONST_BasePath.'/export/index/';
|
||||
$sFileDir .= $aBatchMatch[1].'/'.$aBatchMatch[2];
|
||||
|
||||
if (!is_dir($sFileDir)) mkdir($sFileDir, 0777, true);
|
||||
file_put_contents($sFileDir.'/'.$aBatchMatch[3].'.idx', "$sBatchEnd\t$iFileID\n", FILE_APPEND);
|
||||
}
|
||||
|
||||
if (!$aResult['no-index'])
|
||||
{
|
||||
echo "$sThisIndexCmd\n";
|
||||
@@ -485,25 +380,6 @@
|
||||
echo "Error: $iErrorLevel\n";
|
||||
exit($iErrorLevel);
|
||||
}
|
||||
|
||||
if (!$aResult['no-npi'])
|
||||
{
|
||||
$sFileDir = CONST_BasePath.'/export/diff/';
|
||||
$sFileDir .= str_pad(floor($iFileID/1000000), 3, '0', STR_PAD_LEFT);
|
||||
$sFileDir .= '/'.str_pad(floor($iFileID/1000) % 1000, 3, '0', STR_PAD_LEFT);
|
||||
|
||||
$sThisIndexCmd = 'bzip2 -z9 '.$sFileDir.'/'.str_pad($iFileID % 1000, 3, '0', STR_PAD_LEFT).".npi.out";
|
||||
echo "$sThisIndexCmd\n";
|
||||
exec($sThisIndexCmd, $sJunk, $iErrorLevel);
|
||||
if ($iErrorLevel)
|
||||
{
|
||||
echo "Error: $iErrorLevel\n";
|
||||
exit($iErrorLevel);
|
||||
}
|
||||
|
||||
rename($sFileDir.'/'.str_pad($iFileID % 1000, 3, '0', STR_PAD_LEFT).".npi.out.bz2",
|
||||
$sFileDir.'/'.str_pad($iFileID % 1000, 3, '0', STR_PAD_LEFT).".npi.bz2");
|
||||
}
|
||||
}
|
||||
|
||||
$sSQL = "INSERT INTO import_osmosis_log values ('$sBatchEnd',$iFileSize,'".date('Y-m-d H:i:s',$fCMDStartTime)."','".date('Y-m-d H:i:s')."','index')";
|
||||
@@ -529,55 +405,6 @@
|
||||
echo date('Y-m-d H:i:s')." Sleeping $iSleep seconds\n";
|
||||
sleep($iSleep);
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
if ($aResult['import-npi-all'])
|
||||
{
|
||||
$iNPIID = $oDB->getOne('select max(npiid) from import_npi_log');
|
||||
if (PEAR::isError($iNPIID))
|
||||
{
|
||||
var_dump($iNPIID);
|
||||
exit(1);
|
||||
}
|
||||
$sConfigDirectory = CONST_InstallPath.'/settings';
|
||||
$sCMDImportTemplate = CONST_InstallPath.'/nominatim/nominatim -d gazetteer -P 5433 -I -T '.CONST_BasePath.'/settings/partitionedtags.def -F ';
|
||||
while(true)
|
||||
{
|
||||
$fStartTime = time();
|
||||
|
||||
$iNPIID++;
|
||||
|
||||
$sImportFile = CONST_BasePath.'/export/diff/';
|
||||
$sImportFile .= str_pad(floor($iNPIID/1000000), 3, '0', STR_PAD_LEFT);
|
||||
$sImportFile .= '/'.str_pad(floor($iNPIID/1000) % 1000, 3, '0', STR_PAD_LEFT);
|
||||
$sImportFile .= '/'.str_pad($iNPIID % 1000, 3, '0', STR_PAD_LEFT);
|
||||
$sImportFile .= ".npi";
|
||||
while(!file_exists($sImportFile) && !file_exists($sImportFile.'.bz2'))
|
||||
{
|
||||
echo "sleep (waiting for $sImportFile)\n";
|
||||
sleep(10);
|
||||
}
|
||||
if (file_exists($sImportFile.'.bz2')) $sImportFile .= '.bz2';
|
||||
|
||||
$iFileSize = filesize($sImportFile);
|
||||
|
||||
// Import the file
|
||||
$fCMDStartTime = time();
|
||||
$sCMDImport = $sCMDImportTemplate . $sImportFile;
|
||||
echo $sCMDImport."\n";
|
||||
exec($sCMDImport, $sJunk, $iErrorLevel);
|
||||
if ($iErrorLevel)
|
||||
{
|
||||
fail("Error: $iErrorLevel\n");
|
||||
}
|
||||
$sBatchEnd = $iNPIID;
|
||||
echo "Completed for $sBatchEnd in ".round((time()-$fCMDStartTime)/60,2)." minutes\n";
|
||||
$sSQL = "INSERT INTO import_npi_log values ($iNPIID, null, $iFileSize,'".date('Y-m-d H:i:s',$fCMDStartTime)."','".date('Y-m-d H:i:s')."','import')";
|
||||
var_Dump($sSQL);
|
||||
$oDB->query($sSQL);
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
function getosmosistimestamp($sOsmosisConfigDirectory)
|
||||
|
||||
@@ -56,10 +56,17 @@
|
||||
|
||||
$iPlaceID = (int)$_GET['place_id'];
|
||||
|
||||
$iParentPlaceID = $oDB->getOne('select parent_place_id from location_property_tiger where place_id = '.$iPlaceID);
|
||||
if ($iParentPlaceID) $iPlaceID = $iParentPlaceID;
|
||||
$iParentPlaceID = $oDB->getOne('select parent_place_id from location_property_aux where place_id = '.$iPlaceID);
|
||||
if ($iParentPlaceID) $iPlaceID = $iParentPlaceID;
|
||||
if (CONST_Use_US_Tiger_Data)
|
||||
{
|
||||
$iParentPlaceID = $oDB->getOne('select parent_place_id from location_property_tiger where place_id = '.$iPlaceID);
|
||||
if ($iParentPlaceID) $iPlaceID = $iParentPlaceID;
|
||||
}
|
||||
|
||||
if (CONST_Use_Aux_Location_data)
|
||||
{
|
||||
$iParentPlaceID = $oDB->getOne('select parent_place_id from location_property_aux where place_id = '.$iPlaceID);
|
||||
if ($iParentPlaceID) $iPlaceID = $iParentPlaceID;
|
||||
}
|
||||
|
||||
$hLog = logStart($oDB, 'details', $_SERVER['QUERY_STRING'], $aLangPrefOrder);
|
||||
|
||||
|
||||
@@ -47,13 +47,13 @@
|
||||
|
||||
$iPlaceID = (int)$_GET['place_id'];
|
||||
|
||||
$sAuxHouseNumber = false;
|
||||
$iParentPlaceID = $oDB->getOne('select parent_place_id from location_property_tiger where place_id = '.$iPlaceID);
|
||||
if ($iParentPlaceID)
|
||||
if (CONST_Use_US_Tiger_Data)
|
||||
{
|
||||
$iPlaceID = $iParentPlaceID;
|
||||
$iParentPlaceID = $oDB->getOne('select parent_place_id from location_property_tiger where place_id = '.$iPlaceID);
|
||||
if ($iParentPlaceID) $iPlaceID = $iParentPlaceID;
|
||||
}
|
||||
else
|
||||
|
||||
if (CONST_Use_Aux_Location_data)
|
||||
{
|
||||
$iParentPlaceID = $oDB->getOne('select parent_place_id from location_property_aux where place_id = '.$iPlaceID);
|
||||
if ($iParentPlaceID) $iPlaceID = $iParentPlaceID;
|
||||
|
||||
Reference in New Issue
Block a user