add new command for cleaning word tokens

Just pulls outdated housenumbers for the moment.
This commit is contained in:
Sarah Hoffmann
2022-01-20 20:05:15 +01:00
parent 86588419fb
commit 344a2bfc1a
4 changed files with 53 additions and 0 deletions

View File

@@ -209,6 +209,13 @@ class AbstractTokenizer(ABC):
"""
@abstractmethod
def update_word_tokens(self) -> None:
""" Do house-keeping on the tokenizers internal data structures.
Remove unused word tokens, resort data etc.
"""
@abstractmethod
def name_analyzer(self) -> AbstractAnalyzer:
""" Create a new analyzer for tokenizing names and queries

View File

@@ -112,6 +112,39 @@ class LegacyICUTokenizer(AbstractTokenizer):
conn.commit()
def _cleanup_housenumbers(self):
""" Remove unused house numbers.
"""
with connect(self.dsn) as conn:
with conn.cursor(name="hnr_counter") as cur:
cur.execute("""SELECT word_id, word_token FROM word
WHERE type = 'H'
AND NOT EXISTS(SELECT * FROM search_name
WHERE ARRAY[word.word_id] && name_vector)
AND (char_length(word_token) > 6
OR word_token not similar to '\d+')
""")
candidates = {token: wid for wid, token in cur}
with conn.cursor(name="hnr_counter") as cur:
cur.execute("""SELECT housenumber FROM placex
WHERE housenumber is not null
AND (char_length(housenumber) > 6
OR housenumber not similar to '\d+')
""")
for row in cur:
for hnr in row[0].split(';'):
candidates.pop(hnr, None)
LOG.info("There are %s outdated housenumbers.", len(candidates))
def update_word_tokens(self):
""" Remove unused tokens.
"""
LOG.info("Cleaning up housenumber tokens.")
self._cleanup_housenumbers()
LOG.info("Tokenizer house-keeping done.")
def name_analyzer(self):
""" Create a new analyzer for tokenizing names and queries
using this tokinzer. Analyzers are context managers and should

View File

@@ -211,6 +211,13 @@ class LegacyTokenizer(AbstractTokenizer):
cur.drop_table("word_frequencies")
conn.commit()
def update_word_tokens(self):
""" No house-keeping implemented for the legacy tokenizer.
"""
LOG.info("No tokenizer clean-up available.")
def name_analyzer(self):
""" Create a new analyzer for tokenizing names and queries
using this tokinzer. Analyzers are context managers and should