more unit tests for tokenizers

This commit is contained in:
Sarah Hoffmann
2021-12-02 15:46:36 +01:00
parent 7617a9316e
commit 14a78f55cd
5 changed files with 333 additions and 31 deletions

View File

@@ -513,7 +513,9 @@ class _TokenInfo:
with conn.cursor() as cur:
return cur.scalar("SELECT word_ids_from_name(%s)::text", (name, ))
self.data['street'] = self.cache.streets.get(street, _get_street)
tokens = self.cache.streets.get(street, _get_street)
if tokens:
self.data['street'] = tokens
def add_place(self, conn, place):
@@ -542,9 +544,12 @@ class _TokenInfo:
tokens = {}
for key, value in terms:
tokens[key] = self.cache.address_terms.get(value, _get_address_term)
items = self.cache.address_terms.get(value, _get_address_term)
if items[0] or items[1]:
tokens[key] = items
self.data['addr'] = tokens
if tokens:
self.data['addr'] = tokens
class _LRU: