move index creation for word table to tokenizer

This introduces a finalization routing for the tokenizer
where it can post-process the import if necessary.
This commit is contained in:
Sarah Hoffmann
2021-04-30 17:28:34 +02:00
parent 20891abe1c
commit 388ebcbae2
6 changed files with 31 additions and 11 deletions

View File

@@ -119,6 +119,15 @@ class LegacyTokenizer:
self.normalization = properties.get_property(conn, DBCFG_NORMALIZATION)
def finalize_import(self, config):
""" Do any required postprocessing to make the tokenizer data ready
for use.
"""
with connect(self.dsn) as conn:
sqlp = SQLPreprocessor(conn, config)
sqlp.run_sql_file(conn, 'tokenizer/legacy_tokenizer_indices.sql')
def update_sql_functions(self, config):
""" Reimport the SQL functions for this tokenizer.
"""