Merge pull request #3955 from AmmarYasser455/fix/typos

docs: fix multiple typos in documentation and source code
This commit is contained in:
Sarah Hoffmann
2026-02-01 10:05:34 +01:00
committed by GitHub
9 changed files with 17 additions and 17 deletions

View File

@@ -4,7 +4,7 @@
Bugs can be reported at https://github.com/openstreetmap/Nominatim/issues. Bugs can be reported at https://github.com/openstreetmap/Nominatim/issues.
Please always open a separate issue for each problem. In particular, do Please always open a separate issue for each problem. In particular, do
not add your bugs to closed issues. They may looks similar to you but not add your bugs to closed issues. They may look similar to you but
often are completely different from the maintainer's point of view. often are completely different from the maintainer's point of view.
## Workflow for Pull Requests ## Workflow for Pull Requests
@@ -21,7 +21,7 @@ that you are responsible for your pull requests. You should be prepared
to get change requests because as the maintainers we have to make sure to get change requests because as the maintainers we have to make sure
that your contribution fits well with the rest of the code. Please make that your contribution fits well with the rest of the code. Please make
sure that you have time to react to these comments and amend the code or sure that you have time to react to these comments and amend the code or
engage in a conversion. Do not expect that others will pick up your code, engage in a conversation. Do not expect that others will pick up your code,
it will almost never happen. it will almost never happen.
Please open a separate pull request for each issue you want to address. Please open a separate pull request for each issue you want to address.
@@ -38,7 +38,7 @@ description or in documentation need to
1. clearly mark the AI-generated sections as such, for example, by 1. clearly mark the AI-generated sections as such, for example, by
mentioning all use of AI in the PR description, and mentioning all use of AI in the PR description, and
2. include proof that you have run the generated code on an actual 2. include proof that you have run the generated code on an actual
installation of Nominatim. Adding and excuting tests will not be installation of Nominatim. Adding and executing tests will not be
sufficient. You need to show that the code actually solves the problem sufficient. You need to show that the code actually solves the problem
the PR claims to solve. the PR claims to solve.

View File

@@ -17,7 +17,7 @@ CREATE TABLE location_property_tiger_import (
-- Lookup functions for tiger import when update -- Lookup functions for tiger import when update
-- informations are dropped (see gh-issue #2463) -- tables are dropped (see gh-issue #2463)
CREATE OR REPLACE FUNCTION getNearestNamedRoadPlaceIdSlow(in_centroid GEOMETRY, CREATE OR REPLACE FUNCTION getNearestNamedRoadPlaceIdSlow(in_centroid GEOMETRY,
in_token_info JSONB) in_token_info JSONB)
RETURNS BIGINT RETURNS BIGINT

View File

@@ -43,7 +43,7 @@ class FormatDispatcher:
return decorator return decorator
def error_format_func(self, func: ErrorFormatFunc) -> ErrorFormatFunc: def error_format_func(self, func: ErrorFormatFunc) -> ErrorFormatFunc:
""" Decorator for a function that formats error messges. """ Decorator for a function that formats error messages.
There is only one error formatter per dispatcher. Using There is only one error formatter per dispatcher. Using
the decorator repeatedly will overwrite previous functions. the decorator repeatedly will overwrite previous functions.
""" """
@@ -79,7 +79,7 @@ class FormatDispatcher:
def set_content_type(self, fmt: str, content_type: str) -> None: def set_content_type(self, fmt: str, content_type: str) -> None:
""" Set the content type for the given format. This is the string """ Set the content type for the given format. This is the string
that will be returned in the Content-Type header of the HTML that will be returned in the Content-Type header of the HTML
response, when the given format is choosen. response, when the given format is chosen.
""" """
self.content_types[fmt] = content_type self.content_types[fmt] = content_type

View File

@@ -22,7 +22,7 @@ class CountedTokenIDs:
""" A list of token IDs with their respective counts, sorted """ A list of token IDs with their respective counts, sorted
from least frequent to most frequent. from least frequent to most frequent.
If a token count is one, then statistics are likely to be unavaible If a token count is one, then statistics are likely to be unavailable
and a relatively high count is assumed instead. and a relatively high count is assumed instead.
""" """

View File

@@ -17,7 +17,7 @@ import dataclasses
# The x value for the regression computation will be the position of the # The x value for the regression computation will be the position of the
# token in the query. Thus we know the x values will be [0, query length). # token in the query. Thus we know the x values will be [0, query length).
# As the denominator only depends on the x values, we can pre-compute here # As the denominator only depends on the x values, we can pre-compute here
# the denominatior to use for a given query length. # the denominator to use for a given query length.
# Note that query length of two or less is special cased and will not use # Note that query length of two or less is special cased and will not use
# the values from this array. Thus it is not a problem that they are 0. # the values from this array. Thus it is not a problem that they are 0.
LINFAC = [i * (sum(si * si for si in range(i)) - (i - 1) * i * (i - 1) / 4) LINFAC = [i * (sum(si * si for si in range(i)) - (i - 1) * i * (i - 1) / 4)
@@ -129,7 +129,7 @@ class Token(ABC):
@abstractmethod @abstractmethod
def get_country(self) -> str: def get_country(self) -> str:
""" Return the country code this tojen is associated with """ Return the country code this token is associated with
(currently for country tokens only). (currently for country tokens only).
""" """
@@ -231,7 +231,7 @@ class QueryNode:
return max(0, -self.penalty) return max(0, -self.penalty)
def name_address_ratio(self) -> float: def name_address_ratio(self) -> float:
""" Return the propability that the partial token belonging to """ Return the probability that the partial token belonging to
this node forms part of a name (as opposed of part of the address). this node forms part of a name (as opposed of part of the address).
""" """
if self.partial is None: if self.partial is None:
@@ -275,7 +275,7 @@ class QueryStruct:
directed acyclic graph. directed acyclic graph.
A query also has a direction penalty 'dir_penalty'. This describes A query also has a direction penalty 'dir_penalty'. This describes
the likelyhood if the query should be read from left-to-right or the likelihood if the query should be read from left-to-right or
vice versa. A negative 'dir_penalty' should be read as a penalty on vice versa. A negative 'dir_penalty' should be read as a penalty on
right-to-left reading, while a positive value represents a penalty right-to-left reading, while a positive value represents a penalty
for left-to-right reading. The default value is 0, which is equivalent for left-to-right reading. The default value is 0, which is equivalent

View File

@@ -136,7 +136,7 @@ def _print_output(formatter: napi.FormatDispatcher, result: Any,
json.dump(json.loads(output), sys.stdout, indent=4, ensure_ascii=False) json.dump(json.loads(output), sys.stdout, indent=4, ensure_ascii=False)
except json.decoder.JSONDecodeError as err: except json.decoder.JSONDecodeError as err:
# Catch the error here, so that data can be debugged, # Catch the error here, so that data can be debugged,
# when people are developping custom result formatters. # when people are developing custom result formatters.
LOG.fatal("Parsing json failed: %s\nUnformatted output:\n%s", err, output) LOG.fatal("Parsing json failed: %s\nUnformatted output:\n%s", err, output)
else: else:
sys.stdout.write(output) sys.stdout.write(output)

View File

@@ -177,7 +177,7 @@ class Indexer:
`total_tuples` may contain the total number of rows to process. `total_tuples` may contain the total number of rows to process.
When not supplied, the value will be computed using the When not supplied, the value will be computed using the
approriate runner function. appropriate runner function.
""" """
LOG.warning("Starting %s (using batch size %s)", runner.name(), batch) LOG.warning("Starting %s (using batch size %s)", runner.name(), batch)

View File

@@ -225,7 +225,7 @@ async def load_data(dsn: str, threads: int) -> None:
total=pysql.Literal(placex_threads), total=pysql.Literal(placex_threads),
mod=pysql.Literal(imod)), None) mod=pysql.Literal(imod)), None)
# Interpolations need to be copied seperately # Interpolations need to be copied separately
await pool.put_query(""" await pool.put_query("""
INSERT INTO location_property_osmline (osm_id, address, linegeo) INSERT INTO location_property_osmline (osm_id, address, linegeo)
SELECT osm_id, address, geometry FROM place SELECT osm_id, address, geometry FROM place

View File

@@ -29,7 +29,7 @@ _MIGRATION_FUNCTIONS: List[Tuple[NominatimVersion, Callable[..., None]]] = []
def migrate(config: Configuration, paths: Any) -> int: def migrate(config: Configuration, paths: Any) -> int:
""" Check for the current database version and execute migrations, """ Check for the current database version and execute migrations,
if necesssary. if necessary.
""" """
with connect(config.get_libpq_dsn()) as conn: with connect(config.get_libpq_dsn()) as conn:
register_hstore(conn) register_hstore(conn)
@@ -143,7 +143,7 @@ def create_placex_entrance_table(conn: Connection, config: Configuration, **_: A
@_migration(5, 1, 99, 1) @_migration(5, 1, 99, 1)
def create_place_entrance_table(conn: Connection, config: Configuration, **_: Any) -> None: def create_place_entrance_table(conn: Connection, config: Configuration, **_: Any) -> None:
""" Add the place_entrance table to store incomming entrance nodes """ Add the place_entrance table to store incoming entrance nodes
""" """
if not table_exists(conn, 'place_entrance'): if not table_exists(conn, 'place_entrance'):
with conn.cursor() as cur: with conn.cursor() as cur:
@@ -252,7 +252,7 @@ def create_place_postcode_table(conn: Connection, config: Configuration, **_: An
""") """)
sqlp.run_string(conn, sqlp.run_string(conn,
'GRANT SELECT ON location_postcodes TO "{{config.DATABASE_WEBUSER}}"') 'GRANT SELECT ON location_postcodes TO "{{config.DATABASE_WEBUSER}}"')
# remove postcodes from the various auxillary tables # remove postcodes from the various auxiliary tables
cur.execute( cur.execute(
""" """
DELETE FROM place_addressline DELETE FROM place_addressline