add functional tests

These are the tests that were formerly located at
https://github.com/lonvia/test-nominatim
This commit is contained in:
Sarah Hoffmann
2014-09-07 17:02:10 +02:00
parent e7ee9c2d84
commit 86d90bc460
43 changed files with 4601 additions and 0 deletions

232
tests/steps/api_result.py Normal file
View File

@@ -0,0 +1,232 @@
""" Steps for checking the results of queries.
"""
from nose.tools import *
from lettuce import *
from tidylib import tidy_document
from collections import OrderedDict
import json
import logging
import re
from xml.dom.minidom import parseString
logger = logging.getLogger(__name__)
def _parse_xml():
""" Puts the DOM structure into more convenient python
with a similar structure as the json document, so
that the same the semantics can be used. It does not
check if the content is valid (or at least not more than
necessary to transform it into a dict structure).
"""
page = parseString(world.page).documentElement
# header info
world.result_header = OrderedDict(page.attributes.items())
logger.debug('Result header: %r' % (world.result_header))
world.results = []
# results
if page.nodeName == 'searchresults':
for node in page.childNodes:
if node.nodeName != "#text":
assert_equals(node.nodeName, 'place', msg="Unexpected element '%s'" % node.nodeName)
newresult = OrderedDict(node.attributes.items())
assert_not_in('address', newresult)
assert_not_in('geokml', newresult)
address = OrderedDict()
for sub in node.childNodes:
if sub.nodeName == 'geokml':
newresult['geokml'] = sub.childNodes[0].toxml()
elif sub.nodeName == '#text':
pass
else:
address[sub.nodeName] = sub.firstChild.nodeValue.strip()
if address:
newresult['address'] = address
world.results.append(newresult)
elif page.nodeName == 'reversegeocode':
haserror = False
address = {}
for node in page.childNodes:
if node.nodeName == 'result':
assert_equals(len(world.results), 0)
assert (not haserror)
world.results.append(OrderedDict(node.attributes.items()))
assert_not_in('display_name', world.results[0])
assert_not_in('address', world.results[0])
world.results[0]['display_name'] = node.firstChild.nodeValue.strip()
elif node.nodeName == 'error':
assert_equals(len(world.results), 0)
haserror = True
elif node.nodeName == 'addressparts':
assert (not haserror)
address = OrderedDict()
for sub in node.childNodes:
address[sub.nodeName] = sub.firstChild.nodeValue.strip()
world.results[0]['address'] = address
elif node.nodeName == "#text":
pass
else:
assert False, "Unknown content '%s' in XML" % node.nodeName
else:
assert False, "Unknown document node name %s in XML" % page.nodeName
logger.debug("The following was parsed out of XML:")
logger.debug(world.results)
@step(u'a HTTP (\d+) is returned')
def api_result_http_error(step, error):
assert_equals(world.returncode, int(error))
@step(u'the result is valid( \w+)?')
def api_result_is_valid(step, fmt):
assert_equals(world.returncode, 200)
if world.response_format == 'html':
document, errors = tidy_document(world.page,
options={'char-encoding' : 'utf8'})
assert(len(errors) == 0), "Errors found in HTML document:\n%s" % errors
world.results = document
elif world.response_format == 'xml':
_parse_xml()
elif world.response_format == 'json':
world.results = json.JSONDecoder(object_pairs_hook=OrderedDict).decode(world.page)
else:
assert False, "Unknown page format: %s" % (world.response_format)
if fmt:
assert_equals (fmt.strip(), world.response_format)
def compare(operator, op1, op2):
if operator == 'less than':
return op1 < op2
elif operator == 'more than':
return op1 > op2
elif operator == 'exactly':
return op1 == op2
elif operator == 'at least':
return op1 >= op2
elif operator == 'at most':
return op1 <= op2
else:
raise Exception("unknown operator '%s'" % operator)
@step(u'(less than|more than|exactly|at least|at most) (\d+) results? (?:is|are) returned')
def validate_result_number(step, operator, number):
step.given('the result is valid')
numres = len(world.results)
assert compare(operator, numres, int(number)), \
"Bad number of results: expected %s %s, got %d." % (operator, number, numres)
@step(u'result (\d+) has( not)? attributes (\S+)')
def search_check_for_result_attribute(step, num, invalid, attrs):
num = int(num)
step.given('at least %d results are returned' % (num + 1))
res = world.results[num]
for attr in attrs.split(','):
if invalid:
assert_not_in(attr.strip(), res)
else:
assert_in(attr.strip(),res)
@step(u'there is a json wrapper "([^"]*)"')
def api_result_check_json_wrapper(step, wrapper):
step.given('the result is valid json')
assert_equals(world.json_callback, wrapper)
@step(u'result header contains')
def api_result_header_contains(step):
step.given('the result is valid')
for line in step.hashes:
assert_in(line['attr'], world.result_header)
m = re.match("%s$" % (line['value'],), world.result_header[line['attr']])
@step(u'results contain$')
def api_result_contains(step):
step.given('at least 1 result is returned')
for line in step.hashes:
if 'ID' in line:
reslist = (world.results[int(line['ID'])],)
else:
reslist = world.results
for k,v in line.iteritems():
if k == 'latlon':
for curres in reslist:
world.match_geometry((float(curres['lat']), float(curres['lon'])), v)
elif k != 'ID':
for curres in reslist:
assert_in(k, curres)
if v[0] in '<>=':
# mathematical operation
evalexp = '%s %s' % (curres[k], v)
res = eval(evalexp)
logger.debug('Evaluating: %s = %s' % (res, evalexp))
assert_true(res, "Evaluation failed: %s" % (evalexp, ))
else:
# regex match
m = re.match("%s$" % (v,), curres[k])
assert_is_not_none(m, msg="field %s does not match: %s$ != %s." % (k, v, curres[k]))
@step(u'result addresses contain$')
def api_result_address_contains(step):
step.given('the result is valid')
for line in step.hashes:
if 'ID' in line:
reslist = (world.results[int(line['ID'])],)
else:
reslist = world.results
for k,v in line.iteritems():
if k != 'ID':
for res in reslist:
curres = res['address']
assert_in(k, curres)
m = re.match("%s$" % (v,), curres[k])
assert_is_not_none(m, msg="field %s does not match: %s$ != %s." % (k, v, curres[k]))
@step(u'address of result (\d+) contains')
def api_result_address_exact(step, resid):
resid = int(resid)
step.given('at least %d results are returned' % (resid + 1))
addr = world.results[resid]['address']
for line in step.hashes:
assert_in(line['type'], addr)
assert_equals(line['value'], addr[line['type']])
@step(u'address of result (\d+) does not contain (.*)')
def api_result_address_details_missing(step, resid, types):
resid = int(resid)
step.given('at least %d results are returned' % (resid + 1))
addr = world.results[resid]['address']
for t in types.split(','):
assert_not_in(t.strip(), addr)
@step(u'address of result (\d+) is')
def api_result_address_exact(step, resid):
resid = int(resid)
step.given('at least %d results are returned' % (resid + 1))
result = world.results[resid]
linenr = 0
assert_equals(len(step.hashes), len(result['address']))
for k,v in result['address'].iteritems():
assert_equals(step.hashes[linenr]['type'], k)
assert_equals(step.hashes[linenr]['value'], v)
linenr += 1
@step('there are( no)? duplicates')
def api_result_check_for_duplicates(step, nodups=None):
step.given('at least 1 result is returned')
resarr = []
for res in world.results:
resarr.append((res['osm_type'], res['class'],
res['type'], res['display_name']))
if nodups is None:
assert len(resarr) > len(set(resarr))
else:
assert_equal(len(resarr), len(set(resarr)))

114
tests/steps/api_setup.py Normal file
View File

@@ -0,0 +1,114 @@
""" Steps for setting up and sending API requests.
"""
from nose.tools import *
from lettuce import *
import urllib
import urllib2
import logging
logger = logging.getLogger(__name__)
def api_call(requesttype):
world.json_callback = None
data = urllib.urlencode(world.params)
url = "%s/%s?%s" % (world.config.base_url, requesttype, data)
req = urllib2.Request(url=url, headers=world.header)
try:
fd = urllib2.urlopen(req)
world.page = fd.read()
world.returncode = 200
except urllib2.HTTPError, ex:
world.returncode = ex.code
world.page = None
return
pageinfo = fd.info()
assert_equal('utf-8', pageinfo.getparam('charset').lower())
pagetype = pageinfo.gettype()
fmt = world.params.get('format')
if fmt == 'html':
assert_equals('text/html', pagetype)
world.response_format = fmt
elif fmt == 'xml':
assert_equals('text/xml', pagetype)
world.response_format = fmt
elif fmt in ('json', 'jsonv2'):
if 'json_callback' in world.params:
world.json_callback = world.params['json_callback']
assert world.page.startswith(world.json_callback + '(')
assert world.page.endswith(')')
world.page = world.page[(len(world.json_callback)+1):-1]
assert_equals('application/javascript', pagetype)
else:
assert_equals('application/json', pagetype)
world.response_format = 'json'
else:
if requesttype == 'reverse':
assert_equals('text/xml', pagetype)
world.response_format = 'xml'
else:
assert_equals('text/html', pagetype)
world.response_format = 'html'
logger.debug("Page received (%s):" % world.response_format)
logger.debug(world.page)
api_setup_prepare_params(None)
@before.each_scenario
def api_setup_prepare_params(scenario):
world.results = []
world.params = {}
world.header = {}
@step(u'the request parameters$')
def api_setup_parameters(step):
"""Define the parameters of the request as a hash.
Resets parameter list.
"""
world.params = step.hashes[0]
@step(u'the HTTP header$')
def api_setup_parameters(step):
"""Define additional HTTP header parameters as a hash.
Resets parameter list.
"""
world.header = step.hashes[0]
@step(u'sending( \w+)? search query "([^"]*)"( with address)?')
def api_setup_search(step, fmt, query, doaddr):
world.params['q'] = query.encode('utf8')
if doaddr:
world.params['addressdetails'] = 1
if fmt:
world.params['format'] = fmt.strip()
api_call('search')
@step(u'sending( \w+)? structured query( with address)?$')
def api_setup_structured_search(step, fmt, doaddr):
world.params.update(step.hashes[0])
if doaddr:
world.params['addressdetails'] = 1
if fmt:
world.params['format'] = fmt.strip()
api_call('search')
@step(u'looking up (\w+ )?coordinates ([-\d.]+),([-\d.]+)')
def api_setup_reverse(step, fmt, lat, lon):
world.params['lat'] = lat
world.params['lon'] = lon
if fmt and fmt.strip():
world.params['format'] = fmt.strip()
api_call('reverse')
@step(u'looking up details for ([NRW]?\d+)')
def api_setup_details(step, obj):
if obj[0] in ('N', 'R', 'W'):
# an osm id
world.params['osmtype'] = obj[0]
world.params['osmid'] = obj[1:]
else:
world.params['place_id'] = obj
api_call('details')

124
tests/steps/db_results.py Normal file
View File

@@ -0,0 +1,124 @@
""" Steps for checking the DB after import and update tests.
There are two groups of test here. The first group tests
the contents of db tables directly, the second checks
query results by using the command line query tool.
"""
from nose.tools import *
from lettuce import *
import psycopg2
import psycopg2.extensions
import psycopg2.extras
import os
import subprocess
import random
import json
import re
import logging
from collections import OrderedDict
logger = logging.getLogger(__name__)
@step(u'table placex contains as names for (N|R|W)(\d+)')
def check_placex_names(step, osmtyp, osmid):
""" Check for the exact content of the name hstaore in placex.
"""
cur = world.conn.cursor(cursor_factory=psycopg2.extras.DictCursor)
cur.execute('SELECT name FROM placex where osm_type = %s and osm_id =%s', (osmtyp, int(osmid)))
for line in cur:
names = dict(line['name'])
for name in step.hashes:
assert_in(name['k'], names)
assert_equals(names[name['k']], name['v'])
del names[name['k']]
assert_equals(len(names), 0)
@step(u'table ([a-z_]+) contains$')
def check_placex_content(step, tablename):
""" check that the given lines are in the given table
Entries are searched by osm_type/osm_id and then all
given columns are tested. If there is more than one
line for an OSM object, they must match in these columns.
"""
cur = world.conn.cursor(cursor_factory=psycopg2.extras.DictCursor)
for line in step.hashes:
osmtype, osmid, cls = world.split_id(line['object'])
q = 'SELECT *'
if tablename == 'placex':
q = q + ", ST_X(centroid) as clat, ST_Y(centroid) as clon"
q = q + ' FROM %s where osm_type = %%s and osm_id = %%s' % (tablename,)
if cls is None:
params = (osmtype, osmid)
else:
q = q + ' and class = %s'
params = (osmtype, osmid, cls)
cur.execute(q, params)
assert(cur.rowcount > 0)
for res in cur:
for k,v in line.iteritems():
if not k == 'object':
assert_in(k, res)
if type(res[k]) is dict:
val = world.make_hash(v)
assert_equals(res[k], val)
elif k in ('parent_place_id', 'linked_place_id'):
pid = world.get_placeid(v)
assert_equals(pid, res[k], "Results for '%s'/'%s' differ: '%s' != '%s'" % (line['object'], k, pid, res[k]))
elif k == 'centroid':
world.match_geometry((res['clat'], res['clon']), v)
else:
assert_equals(str(res[k]), v, "Results for '%s'/'%s' differ: '%s' != '%s'" % (line['object'], k, str(res[k]), v))
@step(u'table (placex?) has no entry for (N|R|W)(\d+)(:\w+)?')
def check_placex_missing(step, tablename, osmtyp, osmid, placeclass):
cur = world.conn.cursor()
q = 'SELECT count(*) FROM %s where osm_type = %%s and osm_id = %%s' % (tablename, )
args = [osmtyp, int(osmid)]
if placeclass is not None:
q = q + ' and class = %s'
args.append(placeclass[1:])
cur.execute(q, args)
numres = cur.fetchone()[0]
assert_equals (numres, 0)
@step(u'search_name table contains$')
def check_search_name_content(step):
cur = world.conn.cursor(cursor_factory=psycopg2.extras.DictCursor)
for line in step.hashes:
placeid = world.get_placeid(line['place_id'])
cur.execute('SELECT * FROM search_name WHERE place_id = %s', (placeid,))
assert(cur.rowcount > 0)
for res in cur:
for k,v in line.iteritems():
if k in ('search_rank', 'address_rank'):
assert_equals(int(v), res[k], "Results for '%s'/'%s' differ: '%s' != '%d'" % (line['place_id'], k, v, res[k]))
elif k in ('importance'):
assert_equals(float(v), res[k], "Results for '%s'/'%s' differ: '%s' != '%d'" % (line['place_id'], k, v, res[k]))
elif k in ('name_vector', 'nameaddress_vector'):
terms = [x.strip().replace('#', ' ') for x in v.split(',')]
cur.execute('SELECT word_id, word_token FROM word, (SELECT unnest(%s) as term) t WHERE word_token = make_standard_name(t.term)', (terms,))
assert cur.rowcount >= len(terms)
for wid in cur:
assert_in(wid['word_id'], res[k], "Missing term for %s/%s: %s" % (line['place_id'], k, wid['word_token']))
elif k in ('country_code'):
assert_equals(v, res[k], "Results for '%s'/'%s' differ: '%s' != '%d'" % (line['place_id'], k, v, res[k]))
elif k == 'place_id':
pass
else:
raise Exception("Cannot handle field %s in search_name table" % (k, ))
@step(u'table search_name has no entry for (.*)')
def check_placex_missing(step, osmid):
""" Checks if there is an entry in the search index for the
given place object.
"""
cur = world.conn.cursor()
placeid = world.get_placeid(osmid)
cur.execute('SELECT count(*) FROM search_name WHERE place_id =%s', (placeid,))
numres = cur.fetchone()[0]
assert_equals (numres, 0)

272
tests/steps/db_setup.py Normal file
View File

@@ -0,0 +1,272 @@
""" Steps for setting up a test database with imports and updates.
There are two ways to state geometries for test data: with coordinates
and via scenes.
Coordinates should be given as a wkt without the enclosing type name.
Scenes are prepared geometries which can be found in the scenes/data/
directory. Each scene is saved in a .wkt file with its name, which
contains a list of id/wkt pairs. A scene can be set globally
for a scene by using the step `the scene <scene name>`. Then each
object should be refered to as `:<object id>`. A geometry can also
be referred to without loading the scene by explicitly stating the
scene: `<scene name>:<object id>`.
"""
from nose.tools import *
from lettuce import *
import psycopg2
import psycopg2.extensions
import psycopg2.extras
import os
import subprocess
import random
import base64
psycopg2.extensions.register_type(psycopg2.extensions.UNICODE)
@before.each_scenario
def setup_test_database(scenario):
""" Creates a new test database from the template database
that was set up earlier in terrain.py. Will be done only
for scenarios whose feature is tagged with 'DB'.
"""
if scenario.feature.tags is not None and 'DB' in scenario.feature.tags:
world.db_template_setup()
world.write_nominatim_config(world.config.test_db)
conn = psycopg2.connect(database=world.config.template_db)
conn.set_isolation_level(0)
cur = conn.cursor()
cur.execute('DROP DATABASE IF EXISTS %s' % (world.config.test_db, ))
cur.execute('CREATE DATABASE %s TEMPLATE = %s' % (world.config.test_db, world.config.template_db))
conn.close()
world.conn = psycopg2.connect(database=world.config.test_db)
psycopg2.extras.register_hstore(world.conn, globally=False, unicode=True)
@step('a wiped database')
def db_setup_wipe_db(step):
"""Explicit DB scenario setup only needed
to work around a bug where scenario outlines don't call
before_each_scenario correctly.
"""
if hasattr(world, 'conn'):
world.conn.close()
conn = psycopg2.connect(database=world.config.template_db)
conn.set_isolation_level(0)
cur = conn.cursor()
cur.execute('DROP DATABASE IF EXISTS %s' % (world.config.test_db, ))
cur.execute('CREATE DATABASE %s TEMPLATE = %s' % (world.config.test_db, world.config.template_db))
conn.close()
world.conn = psycopg2.connect(database=world.config.test_db)
psycopg2.extras.register_hstore(world.conn, globally=False, unicode=True)
@after.each_scenario
def tear_down_test_database(scenario):
""" Drops any previously created test database.
"""
if hasattr(world, 'conn'):
world.conn.close()
if scenario.feature.tags is not None and 'DB' in scenario.feature.tags and not world.config.keep_scenario_db:
conn = psycopg2.connect(database=world.config.template_db)
conn.set_isolation_level(0)
cur = conn.cursor()
cur.execute('DROP DATABASE %s' % (world.config.test_db,))
conn.close()
def _format_placex_cols(cols, geomtype, force_name):
if 'name' in cols:
if cols['name'].startswith("'"):
cols['name'] = world.make_hash(cols['name'])
else:
cols['name'] = { 'name' : cols['name'] }
elif force_name:
cols['name'] = { 'name' : base64.urlsafe_b64encode(os.urandom(int(random.random()*30))) }
if 'extratags' in cols:
cols['extratags'] = world.make_hash(cols['extratags'])
if 'admin_level' not in cols:
cols['admin_level'] = 100
if 'geometry' in cols:
coords = world.get_scene_geometry(cols['geometry'])
if coords is None:
coords = "'%s(%s)'::geometry" % (geomtype, cols['geometry'])
else:
coords = "'%s'::geometry" % coords.wkt
cols['geometry'] = coords
def _insert_place_table_nodes(places, force_name):
cur = world.conn.cursor()
for line in places:
cols = dict(line)
cols['osm_type'] = 'N'
_format_placex_cols(cols, 'POINT', force_name)
if 'geometry' in cols:
coords = cols.pop('geometry')
else:
coords = "ST_Point(%f, %f)" % (random.random()*360 - 180, random.random()*180 - 90)
query = 'INSERT INTO place (%s,geometry) values(%s, ST_SetSRID(%s, 4326))' % (
','.join(cols.iterkeys()),
','.join(['%s' for x in range(len(cols))]),
coords
)
cur.execute(query, cols.values())
world.conn.commit()
def _insert_place_table_objects(places, geomtype, force_name):
cur = world.conn.cursor()
for line in places:
cols = dict(line)
if 'osm_type' not in cols:
cols['osm_type'] = 'W'
_format_placex_cols(cols, geomtype, force_name)
coords = cols.pop('geometry')
query = 'INSERT INTO place (%s, geometry) values(%s, ST_SetSRID(%s, 4326))' % (
','.join(cols.iterkeys()),
','.join(['%s' for x in range(len(cols))]),
coords
)
cur.execute(query, cols.values())
world.conn.commit()
@step(u'the scene (.*)')
def import_set_scene(step, scene):
world.load_scene(scene)
@step(u'the (named )?place (node|way|area)s')
def import_place_table_nodes(step, named, osmtype):
"""Insert a list of nodes into the placex table.
Expects a table where columns are named in the same way as placex.
"""
cur = world.conn.cursor()
cur.execute('ALTER TABLE place DISABLE TRIGGER place_before_insert')
if osmtype == 'node':
_insert_place_table_nodes(step.hashes, named is not None)
elif osmtype == 'way' :
_insert_place_table_objects(step.hashes, 'LINESTRING', named is not None)
elif osmtype == 'area' :
_insert_place_table_objects(step.hashes, 'POLYGON', named is not None)
cur.execute('ALTER TABLE place ENABLE TRIGGER place_before_insert')
cur.close()
world.conn.commit()
@step(u'the relations')
def import_fill_planet_osm_rels(step):
"""Adds a raw relation to the osm2pgsql table.
Three columns need to be suplied: id, tags, members.
"""
cur = world.conn.cursor()
for line in step.hashes:
members = []
parts = { 'n' : [], 'w' : [], 'r' : [] }
if line['members'].strip():
for mem in line['members'].split(','):
memparts = mem.strip().split(':', 2)
memid = memparts[0].lower()
parts[memid[0]].append(int(memid[1:]))
members.append(memid)
if len(memparts) == 2:
members.append(memparts[1])
else:
members.append('')
tags = []
for k,v in world.make_hash(line['tags']).iteritems():
tags.extend((k,v))
if not members:
members = None
cur.execute("""INSERT INTO planet_osm_rels
(id, way_off, rel_off, parts, members, tags, pending)
VALUES (%s, %s, %s, %s, %s, %s, false)""",
(line['id'], len(parts['n']), len(parts['n']) + len(parts['w']),
parts['n'] + parts['w'] + parts['r'], members, tags))
world.conn.commit()
@step(u'the ways')
def import_fill_planet_osm_ways(step):
cur = world.conn.cursor()
for line in step.hashes:
if 'tags' in line:
tags = world.make_hash(line['tags'])
else:
tags = None
nodes = [int(x.strip()) for x in line['nodes'].split(',')]
cur.execute("""INSERT INTO planet_osm_ways
(id, nodes, tags, pending)
VALUES (%s, %s, %s, false)""",
(line['id'], nodes, tags))
world.conn.commit()
############### import and update steps #######################################
@step(u'importing')
def import_database(step):
""" Runs the actual indexing. """
world.run_nominatim_script('setup', 'create-functions', 'create-partition-functions')
cur = world.conn.cursor()
cur.execute("""insert into placex (osm_type, osm_id, class, type, name, admin_level,
housenumber, street, addr_place, isin, postcode, country_code, extratags,
geometry) select * from place""")
world.conn.commit()
world.run_nominatim_script('setup', 'index', 'index-noanalyse')
#world.db_dump_table('placex')
@step(u'updating place (node|way|area)s')
def update_place_table_nodes(step, osmtype):
""" Replace a geometry in place by reinsertion and reindex database.
"""
world.run_nominatim_script('setup', 'create-functions', 'create-partition-functions', 'enable-diff-updates')
if osmtype == 'node':
_insert_place_table_nodes(step.hashes, False)
elif osmtype == 'way':
_insert_place_table_objects(step.hashes, 'LINESTRING', False)
elif osmtype == 'area':
_insert_place_table_objects(step.hashes, 'POLYGON', False)
world.run_nominatim_script('update', 'index')
@step(u'marking for delete (.*)')
def update_delete_places(step, places):
""" Remove an entry from place and reindex database.
"""
world.run_nominatim_script('setup', 'create-functions', 'create-partition-functions', 'enable-diff-updates')
cur = world.conn.cursor()
for place in places.split(','):
osmtype, osmid, cls = world.split_id(place)
if cls is None:
q = "delete from place where osm_type = %s and osm_id = %s"
params = (osmtype, osmid)
else:
q = "delete from place where osm_type = %s and osm_id = %s and class = %s"
params = (osmtype, osmid, cls)
cur.execute(q, params)
world.conn.commit()
#world.db_dump_table('placex')
world.run_nominatim_script('update', 'index')
@step(u'sending query "(.*)"( with dups)?$')
def query_cmd(step, query, with_dups):
""" Results in standard query output. The same tests as for API queries
can be used.
"""
cmd = [os.path.join(world.config.source_dir, 'utils', 'query.php'),
'--search', query]
if with_dups is not None:
cmd.append('--nodedupe')
proc = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
(outp, err) = proc.communicate()
assert (proc.returncode == 0), "query.php failed with message: %s" % err
world.page = outp
world.response_format = 'json'
world.returncode = 200

View File

@@ -0,0 +1,212 @@
""" Steps for setting up a test database for osm2pgsql import.
Note that osm2pgsql features need a database and therefore need
to be tagged with @DB.
"""
from nose.tools import *
from lettuce import *
import logging
import random
import tempfile
import os
import subprocess
logger = logging.getLogger(__name__)
@before.each_scenario
def osm2pgsql_setup_test(scenario):
world.osm2pgsql = []
@step(u'the osm nodes:')
def osm2pgsql_import_nodes(step):
""" Define a list of OSM nodes to be imported, given as a table.
Each line describes one node with all its attributes.
'id' is mendatory, all other fields are filled with random values
when not given. If 'tags' is missing an empty tag list is assumed.
For updates, a mandatory 'action' column needs to contain 'A' (add),
'M' (modify), 'D' (delete).
"""
for line in step.hashes:
node = { 'type' : 'N', 'version' : '1', 'timestamp': "2012-05-01T15:06:20Z",
'changeset' : "11470653", 'uid' : "122294", 'user' : "foo"
}
node.update(line)
node['id'] = int(node['id'])
if 'geometry' in node:
lat, lon = node['geometry'].split(' ')
node['lat'] = float(lat)
node['lon'] = float(lon)
else:
node['lon'] = random.random()*360 - 180
node['lat'] = random.random()*180 - 90
if 'tags' in node:
node['tags'] = world.make_hash(line['tags'])
else:
node['tags'] = {}
world.osm2pgsql.append(node)
@step(u'the osm ways:')
def osm2pgsql_import_ways(step):
""" Define a list of OSM ways to be imported.
"""
for line in step.hashes:
way = { 'type' : 'W', 'version' : '1', 'timestamp': "2012-05-01T15:06:20Z",
'changeset' : "11470653", 'uid' : "122294", 'user' : "foo"
}
way.update(line)
way['id'] = int(way['id'])
if 'tags' in way:
way['tags'] = world.make_hash(line['tags'])
else:
way['tags'] = None
way['nodes'] = way['nodes'].strip().split()
world.osm2pgsql.append(way)
membertype = { 'N' : 'node', 'W' : 'way', 'R' : 'relation' }
@step(u'the osm relations:')
def osm2pgsql_import_rels(step):
""" Define a list of OSM relation to be imported.
"""
for line in step.hashes:
rel = { 'type' : 'R', 'version' : '1', 'timestamp': "2012-05-01T15:06:20Z",
'changeset' : "11470653", 'uid' : "122294", 'user' : "foo"
}
rel.update(line)
rel['id'] = int(rel['id'])
if 'tags' in rel:
rel['tags'] = world.make_hash(line['tags'])
else:
rel['tags'] = {}
members = []
if rel['members'].strip():
for mem in line['members'].split(','):
memparts = mem.strip().split(':', 2)
memid = memparts[0].upper()
members.append((membertype[memid[0]],
memid[1:],
memparts[1] if len(memparts) == 2 else ''
))
rel['members'] = members
world.osm2pgsql.append(rel)
def _sort_xml_entries(x, y):
if x['type'] == y['type']:
return cmp(x['id'], y['id'])
else:
return cmp('NWR'.find(x['type']), 'NWR'.find(y['type']))
def write_osm_obj(fd, obj):
if obj['type'] == 'N':
fd.write('<node id="%(id)d" lat="%(lat).8f" lon="%(lon).8f" version="%(version)s" timestamp="%(timestamp)%" changeset="%(changeset)s" uid="%(uid)s" user="%(user)s"'% obj)
if obj['tags'] is None:
fd.write('/>\n')
else:
fd.write('>\n')
for k,v in obj['tags'].iteritems():
fd.write(' <tag k="%s" v="%s"/>\n' % (k, v))
fd.write('</node>\n')
elif obj['type'] == 'W':
fd.write('<way id="%(id)d" version="%(version)s" changeset="%(changeset)s" timestamp="%(timestamp)s" user="%(user)s" uid="%(uid)s">\n' % obj)
for nd in obj['nodes']:
fd.write('<nd ref="%s" />\n' % (nd,))
for k,v in obj['tags'].iteritems():
fd.write(' <tag k="%s" v="%s"/>\n' % (k, v))
fd.write('</way>\n')
elif obj['type'] == 'R':
fd.write('<relation id="%(id)d" version="%(version)s" changeset="%(changeset)s" timestamp="%(timestamp)s" user="%(user)s" uid="%(uid)s">\n' % obj)
for mem in obj['members']:
fd.write(' <member type="%s" ref="%s" role="%s"/>\n' % mem)
for k,v in obj['tags'].iteritems():
fd.write(' <tag k="%s" v="%s"/>\n' % (k, v))
fd.write('</relation>\n')
@step(u'loading osm data')
def osm2pgsql_load_place(step):
"""Imports the previously defined OSM data into a fresh copy of a
Nominatim test database.
"""
world.osm2pgsql.sort(cmp=_sort_xml_entries)
# create a OSM file in /tmp
with tempfile.NamedTemporaryFile(dir='/tmp', delete=False) as fd:
fname = fd.name
fd.write("<?xml version='1.0' encoding='UTF-8'?>\n")
fd.write('<osm version="0.6" generator="test-nominatim" timestamp="2014-08-26T20:22:02Z">\n')
fd.write('\t<bounds minlat="43.72335" minlon="7.409205" maxlat="43.75169" maxlon="7.448637"/>\n')
for obj in world.osm2pgsql:
write_osm_obj(fd, obj)
fd.write('</osm>\n')
logger.debug( "Filename: %s" % fname)
cmd = [os.path.join(world.config.source_dir, 'utils', 'setup.php')]
cmd.extend(['--osm-file', fname, '--import-data'])
proc = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
(outp, outerr) = proc.communicate()
assert (proc.returncode == 0), "OSM data import failed:\n%s\n%s\n" % (outp, outerr)
### reintroduce the triggers/indexes we've lost by having osm2pgsql set up place again
cur = world.conn.cursor()
cur.execute("""CREATE TRIGGER place_before_delete BEFORE DELETE ON place
FOR EACH ROW EXECUTE PROCEDURE place_delete()""")
cur.execute("""CREATE TRIGGER place_before_insert BEFORE INSERT ON place
FOR EACH ROW EXECUTE PROCEDURE place_insert()""")
cur.execute("""CREATE UNIQUE INDEX idx_place_osm_unique on place using btree(osm_id,osm_type,class,type)""")
world.conn.commit()
os.remove(fname)
world.osm2pgsql = []
actiontypes = { 'C' : 'create', 'M' : 'modify', 'D' : 'delete' }
@step(u'updating osm data')
def osm2pgsql_update_place(step):
"""Creates an osc file from the previously defined data and imports it
into the database.
"""
world.run_nominatim_script('setup', 'create-functions', 'create-partition-functions')
cur = world.conn.cursor()
cur.execute("""insert into placex (osm_type, osm_id, class, type, name, admin_level,
housenumber, street, addr_place, isin, postcode, country_code, extratags,
geometry) select * from place""")
world.conn.commit()
world.run_nominatim_script('setup', 'index', 'index-noanalyse')
world.run_nominatim_script('setup', 'create-functions', 'create-partition-functions', 'enable-diff-updates')
with tempfile.NamedTemporaryFile(dir='/tmp', delete=False) as fd:
fname = fd.name
fd.write("<?xml version='1.0' encoding='UTF-8'?>\n")
fd.write('<osmChange version="0.6" generator="Osmosis 0.43.1">\n')
for obj in world.osm2pgsql:
fd.write('<%s>\n' % (actiontypes[obj['action']], ))
write_osm_obj(fd, obj)
fd.write('</%s>\n' % (actiontypes[obj['action']], ))
fd.write('</osmChange>\n')
logger.debug( "Filename: %s" % fname)
cmd = [os.path.join(world.config.source_dir, 'utils', 'update.php')]
cmd.extend(['--import-diff', fname])
proc = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
(outp, outerr) = proc.communicate()
assert (proc.returncode == 0), "OSM data update failed:\n%s\n%s\n" % (outp, outerr)
os.remove(fname)
world.osm2pgsql = []

246
tests/steps/terrain.py Normal file
View File

@@ -0,0 +1,246 @@
from lettuce import *
from nose.tools import *
import logging
import os
import subprocess
import psycopg2
import re
from haversine import haversine
from shapely.wkt import loads as wkt_load
from shapely.ops import linemerge
logger = logging.getLogger(__name__)
class NominatimConfig:
def __init__(self):
# logging setup
loglevel = getattr(logging, os.environ.get('LOGLEVEL','info').upper())
if 'LOGFILE' in os.environ:
logging.basicConfig(filename=os.environ.get('LOGFILE','run.log'),
level=loglevel)
else:
logging.basicConfig(level=loglevel)
# Nominatim test setup
self.base_url = os.environ.get('NOMINATIM_SERVER', 'http://localhost/nominatim')
self.source_dir = os.path.abspath(os.environ.get('NOMINATIM_DIR', '../Nominatim'))
self.template_db = os.environ.get('TEMPLATE_DB', 'test_template_nominatim')
self.test_db = os.environ.get('TEST_DB', 'test_nominatim')
self.local_settings_file = os.environ.get('NOMINATIM_SETTINGS', '/tmp/nominatim_settings.php')
self.reuse_template = 'NOMINATIM_REUSE_TEMPLATE' in os.environ
self.keep_scenario_db = 'NOMINATIM_KEEP_SCENARIO_DB' in os.environ
os.environ['NOMINATIM_SETTINGS'] = '/tmp/nominatim_settings.php'
scriptpath = os.path.dirname(os.path.abspath(__file__))
self.scene_path = os.environ.get('SCENE_PATH',
os.path.join(scriptpath, '..', 'scenes', 'data'))
def __str__(self):
return 'Server URL: %s\nSource dir: %s\n' % (self.base_url, self.source_dir)
world.config = NominatimConfig()
@world.absorb
def write_nominatim_config(dbname):
f = open(world.config.local_settings_file, 'w')
f.write("<?php\n @define('CONST_Database_DSN', 'pgsql://@/%s');\n" % dbname)
f.close()
@world.absorb
def run_nominatim_script(script, *args):
cmd = [os.path.join(world.config.source_dir, 'utils', '%s.php' % script)]
cmd.extend(['--%s' % x for x in args])
proc = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
(outp, outerr) = proc.communicate()
assert (proc.returncode == 0), "Script '%s' failed:\n%s\n%s\n" % (script, outp, outerr)
@world.absorb
def make_hash(inp):
return eval('{' + inp + '}')
@world.absorb
def split_id(oid):
""" Splits a unique identifier for places into its components.
As place_ids cannot be used for testing, we use a unique
identifier instead that is of the form <osmtype><osmid>[:class].
"""
oid = oid.strip()
if oid == 'None':
return None, None, None
osmtype = oid[0]
assert_in(osmtype, ('R','N','W'))
if ':' in oid:
osmid, cls = oid[1:].split(':')
return (osmtype, int(osmid), cls)
else:
return (osmtype, int(oid[1:]), None)
@world.absorb
def get_placeid(oid):
""" Tries to retrive the place_id for a unique identifier. """
if oid[0].isdigit():
return int(oid)
osmtype, osmid, cls = world.split_id(oid)
if osmtype is None:
return None
cur = world.conn.cursor()
if cls is None:
q = 'SELECT place_id FROM placex where osm_type = %s and osm_id = %s'
params = (osmtype, osmid)
else:
q = 'SELECT place_id FROM placex where osm_type = %s and osm_id = %s and class = %s'
params = (osmtype, osmid, cls)
cur.execute(q, params)
assert_equals (cur.rowcount, 1)
return cur.fetchone()[0]
@world.absorb
def match_geometry(coord, matchstring):
m = re.match(r'([-0-9.]+),\s*([-0-9.]+)\s*(?:\+-([0-9.]+)([a-z]+)?)?', matchstring)
assert_is_not_none(m, "Invalid match string")
logger.debug("Distmatch: %s/%s %s %s" % (m.group(1), m.group(2), m.group(3), m.group(4) ))
dist = haversine(coord, (float(m.group(1)), float(m.group(2))))
if m.group(3) is not None:
expdist = float(m.group(3))
if m.group(4) is not None:
if m.group(4) == 'm':
expdist = expdist/1000
elif m.group(4) == 'km':
pass
else:
raise Exception("Unknown unit '%s' in geometry match" % (m.group(4), ))
else:
expdist = 0
logger.debug("Distances expected: %f, got: %f" % (expdist, dist))
assert dist <= expdist, "Geometry too far away, expected: %f, got: %f" % (expdist, dist)
@world.absorb
def db_dump_table(table):
cur = world.conn.cursor()
cur.execute('SELECT * FROM %s' % table)
print '<<<<<<< BEGIN OF TABLE DUMP %s' % table
for res in cur:
print res
print '<<<<<<< END OF TABLE DUMP %s' % table
@world.absorb
def db_drop_database(name):
conn = psycopg2.connect(database='postgres')
conn.set_isolation_level(0)
cur = conn.cursor()
cur.execute('DROP DATABASE IF EXISTS %s' % (name, ))
conn.close()
world.is_template_set_up = False
@world.absorb
def db_template_setup():
""" Set up a template database, containing all tables
but not yet any functions.
"""
if world.is_template_set_up:
return
world.is_template_set_up = True
world.write_nominatim_config(world.config.template_db)
if world.config.reuse_template:
# check that the template is there
conn = psycopg2.connect(database='postgres')
cur = conn.cursor()
cur.execute('select count(*) from pg_database where datname = %s',
(world.config.template_db,))
if cur.fetchone()[0] == 1:
return
else:
# just in case... make sure a previous table has been dropped
world.db_drop_database(world.config.template_db)
# call the first part of database setup
world.run_nominatim_script('setup', 'create-db', 'setup-db')
# remove external data to speed up indexing for tests
conn = psycopg2.connect(database=world.config.template_db)
psycopg2.extras.register_hstore(conn, globally=False, unicode=True)
cur = conn.cursor()
for table in ('gb_postcode', 'us_postcode', 'us_state', 'us_statecounty'):
cur.execute('TRUNCATE TABLE %s' % (table,))
conn.commit()
conn.close()
# execute osm2pgsql on an empty file to get the right tables
osm2pgsql = os.path.join(world.config.source_dir, 'osm2pgsql', 'osm2pgsql')
proc = subprocess.Popen([osm2pgsql, '-lsc', '-O', 'gazetteer', '-d', world.config.template_db, '-'],
stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
[outstr, errstr] = proc.communicate(input='<osm version="0.6"></osm>')
world.run_nominatim_script('setup', 'create-functions', 'create-tables', 'create-partition-tables', 'create-partition-functions', 'load-data', 'create-search-indices')
# Leave the table around so it can be reused again after a non-reuse test round.
#@after.all
def db_template_teardown(total):
""" Set up a template database, containing all tables
but not yet any functions.
"""
if world.is_template_set_up:
# remove template DB
if not world.config.reuse_template:
world.db_drop_database(world.config.template_db)
try:
os.remove(world.config.local_settings_file)
except OSError:
pass # ignore missing file
##########################################################################
#
# Data scene handling
#
world.scenes = {}
world.current_scene = None
@world.absorb
def load_scene(name):
if name in world.scenes:
world.current_scene = world.scenes[name]
else:
with open(os.path.join(world.config.scene_path, "%s.wkt" % name), 'r') as fd:
scene = {}
for line in fd:
if line.strip():
obj, wkt = line.split('|', 2)
wkt = wkt.strip()
scene[obj.strip()] = wkt_load(wkt)
world.scenes[name] = scene
world.current_scene = scene
@world.absorb
def get_scene_geometry(name):
if not ':' in name:
# Not a scene description
return None
geoms = []
for obj in name.split('+'):
oname = obj.strip()
if oname.startswith(':'):
geoms.append(world.current_scene[oname[1:]])
else:
scene, obj = oname.split(':', 2)
oldscene = world.current_scene
world.load_scene(scene)
wkt = world.current_scene[obj]
world.current_scene = oldscene
geoms.append(wkt)
if len(geoms) == 1:
return geoms[0]
else:
return linemerge(geoms)