Coverage for an_website/utils/utils.py: 71.466%
382 statements
« prev ^ index » next coverage.py v7.6.10, created at 2025-01-22 15:59 +0000
« prev ^ index » next coverage.py v7.6.10, created at 2025-01-22 15:59 +0000
1# This program is free software: you can redistribute it and/or modify
2# it under the terms of the GNU Affero General Public License as
3# published by the Free Software Foundation, either version 3 of the
4# License, or (at your option) any later version.
5#
6# This program is distributed in the hope that it will be useful,
7# but WITHOUT ANY WARRANTY; without even the implied warranty of
8# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
9# GNU Affero General Public License for more details.
10#
11# You should have received a copy of the GNU Affero General Public License
12# along with this program. If not, see <https://www.gnu.org/licenses/>.
14"""A module with many useful things used by other modules."""
16from __future__ import annotations
18import argparse
19import asyncio
20import bisect
21import contextlib
22import logging
23import os.path
24import pathlib
25import random
26import sys
27import time
28from base64 import b85encode
29from collections.abc import (
30 Awaitable,
31 Callable,
32 Collection,
33 Generator,
34 Iterable,
35 Mapping,
36 Set,
37)
38from dataclasses import dataclass, field
39from datetime import datetime, timezone
40from enum import IntFlag
41from functools import cache, partial
42from hashlib import sha1
43from importlib.resources.abc import Traversable
44from ipaddress import IPv4Address, IPv6Address, ip_address, ip_network
45from pathlib import Path
46from typing import (
47 IO,
48 TYPE_CHECKING,
49 Any,
50 Final,
51 Literal,
52 TypeAlias,
53 Union,
54 cast,
55 get_args,
56)
57from urllib.parse import SplitResult, parse_qsl, urlencode, urlsplit, urlunsplit
59import elasticapm
60import regex
61from blake3 import blake3
62from elastic_transport import ApiError, TransportError
63from elasticsearch import AsyncElasticsearch
64from geoip import geolite2 # type: ignore[import-untyped]
65from openmoji_dist import VERSION as OPENMOJI_VERSION
66from rapidfuzz.distance.Levenshtein import distance
67from redis.asyncio import Redis
68from tornado.web import HTTPError, RequestHandler
69from typed_stream import Stream
70from UltraDict import UltraDict # type: ignore[import-untyped]
72from .. import DIR as ROOT_DIR
74if TYPE_CHECKING:
75 from .background_tasks import BackgroundTask
77LOGGER: Final = logging.getLogger(__name__)
79# pylint: disable=consider-alternative-union-syntax
80type Handler = Union[
81 tuple[str, type[RequestHandler]],
82 tuple[str, type[RequestHandler], dict[str, Any]],
83 tuple[str, type[RequestHandler], dict[str, Any], str],
84]
86type OpenMojiValue = Literal[False, "img", "glyf_colr1", "glyf_colr0"]
87BumpscosityValue: TypeAlias = Literal[0, 1, 12, 50, 76, 100, 1000]
88BUMPSCOSITY_VALUES: Final[tuple[BumpscosityValue, ...]] = get_args(
89 BumpscosityValue
90)
92PRINT = int.from_bytes((ROOT_DIR / "primes.bin").read_bytes(), "big")
94IP_HASH_SALT: Final = {
95 "date": datetime.now(timezone.utc).date(),
96 "hasher": blake3(
97 blake3(
98 datetime.now(timezone.utc).date().isoformat().encode("ASCII")
99 ).digest()
100 ),
101}
103SUS_PATHS: Final[Set[str]] = {
104 "/-profiler/phpinfo",
105 "/.aws/credentials",
106 "/.env",
107 "/.env.bak",
108 "/.ftpconfig",
109 "/admin/controller/extension/extension",
110 "/assets/filemanager/dialog",
111 "/assets/vendor/server/php",
112 "/aws.yml",
113 "/boaform/admin/formlogin",
114 "/phpinfo",
115 "/public/assets/jquery-file-upload/server/php",
116 "/root",
117 "/settings/aws.yml",
118 "/uploads",
119 "/vendor/phpunit/phpunit/src/util/php/eval-stdin",
120 "/wordpress",
121 "/wp",
122 "/wp-admin",
123 "/wp-admin/css",
124 "/wp-includes",
125 "/wp-login",
126 "/wp-upload",
127}
130class ArgparseNamespace(argparse.Namespace):
131 """A class to fake type hints for argparse.Namespace."""
133 # pylint: disable=too-few-public-methods
134 __slots__ = ("config", "save_config_to")
136 config: list[pathlib.Path]
137 save_config_to: pathlib.Path | None
140class AwaitableValue[T](Awaitable[T]): # pylint: disable=undefined-variable
141 # pylint: disable=too-few-public-methods
142 """An awaitable that always returns the same value."""
144 def __await__(self) -> Generator[None, None, T]:
145 """Return the value."""
146 yield
147 return self._value
149 def __init__(self, value: T) -> None:
150 """Set the value."""
151 self._value = value
154class Permission(IntFlag):
155 """Permissions for accessing restricted stuff."""
157 RATELIMITS = 1
158 TRACEBACK = 2
159 BACKDOOR = 4
160 UPDATE = 8
161 REPORTING = 16
162 SHORTEN = 32
163 UPLOAD = 64
166class Timer:
167 """Timer class used for timing stuff."""
169 __slots__ = ("_execution_time", "_start_time")
171 _execution_time: int
173 def __init__(self) -> None:
174 """Start the timer."""
175 self._start_time = time.perf_counter_ns()
177 def get(self) -> float:
178 """Get the execution time in seconds."""
179 return self.get_ns() / 1_000_000_000
181 def get_ns(self) -> int:
182 """Get the execution time in nanoseconds."""
183 assert hasattr(self, "_execution_time"), "Timer not stopped yet"
184 return self._execution_time
186 def stop(self) -> float:
187 """Stop the timer and get the execution time in seconds."""
188 return self.stop_ns() / 1_000_000_000
190 def stop_ns(self) -> int:
191 """Stop the timer and get the execution time in nanoseconds."""
192 assert not hasattr(self, "_execution_time"), "Timer already stopped"
193 self._execution_time = time.perf_counter_ns() - self._start_time
194 return self._execution_time
197@cache
198def add_args_to_url(url: str | SplitResult, **kwargs: object) -> str:
199 """Add query arguments to a URL."""
200 if isinstance(url, str):
201 url = urlsplit(url)
203 if not kwargs:
204 return url.geturl()
206 url_args: dict[str, str] = dict(
207 parse_qsl(url.query, keep_blank_values=True)
208 )
210 for key, value in kwargs.items():
211 if value is None:
212 if key in url_args:
213 del url_args[key]
214 # pylint: disable-next=confusing-consecutive-elif
215 elif isinstance(value, bool):
216 url_args[key] = bool_to_str(value)
217 else:
218 url_args[key] = str(value)
220 return urlunsplit(
221 (
222 url.scheme,
223 url.netloc,
224 url.path,
225 urlencode(url_args),
226 url.fragment,
227 )
228 )
231def anonymize_ip[ # noqa: D103
232 A: (str, None, str | None)
233](address: A, *, ignore_invalid: bool = False) -> A:
234 """Anonymize an IP address."""
235 if address is None:
236 return None
238 address = address.strip()
240 try:
241 version = ip_address(address).version
242 except ValueError:
243 if ignore_invalid:
244 return address
245 raise
247 if version == 4:
248 return str(ip_network(address + "/24", strict=False).network_address)
249 if version == 6:
250 return str(ip_network(address + "/48", strict=False).network_address)
252 raise HTTPError(reason="ERROR: -41")
255ansi_replace = partial(regex.sub, "\033" + r"\[-?\d+[a-zA-Z]", "")
256ansi_replace.__doc__ = "Remove ANSI escape sequences from a string."
259def apm_anonymization_processor(
260 client: elasticapm.Client, # pylint: disable=unused-argument
261 event: dict[str, Any],
262) -> dict[str, Any]:
263 """Anonymize an APM event."""
264 if "context" in event and "request" in event["context"]:
265 request = event["context"]["request"]
266 if "url" in request and "pathname" in request["url"]:
267 path = request["url"]["pathname"]
268 if path == "/robots.txt" or path.lower() in SUS_PATHS:
269 return event
270 if "socket" in request and "remote_address" in request["socket"]:
271 request["socket"]["remote_address"] = anonymize_ip(
272 request["socket"]["remote_address"]
273 )
274 if "headers" in request:
275 headers = request["headers"]
276 if "X-Forwarded-For" in headers:
277 headers["X-Forwarded-For"] = ", ".join(
278 anonymize_ip(ip.strip(), ignore_invalid=True)
279 for ip in headers["X-Forwarded-For"].split(",")
280 )
281 for header in headers:
282 if "ip" in header.lower().split("-"):
283 headers[header] = anonymize_ip(
284 headers[header], ignore_invalid=True
285 )
286 return event
289def apply[V, Ret](value: V, fun: Callable[[V], Ret]) -> Ret: # noqa: D103
290 """Apply a function to a value and return the result."""
291 return fun(value)
294backspace_replace = partial(regex.sub, ".?\x08", "")
295backspace_replace.__doc__ = "Remove backspaces from a string."
298def bool_to_str(val: bool) -> str:
299 """Convert a boolean to sure/nope."""
300 return "sure" if val else "nope"
303def bounded_edit_distance(s1: str, s2: str, /, k: int) -> int:
304 """Return a bounded edit distance between two strings.
306 k is the maximum number returned
307 """
308 if (dist := distance(s1, s2, score_cutoff=k)) == k + 1:
309 return k
310 return dist
313def country_code_to_flag(code: str) -> str:
314 """Convert a two-letter ISO country code to a flag emoji."""
315 return "".join(chr(ord(char) + 23 * 29 * 191) for char in code.upper())
318def create_argument_parser() -> argparse.ArgumentParser:
319 """Parse command line arguments."""
320 parser = argparse.ArgumentParser()
321 parser.add_argument(
322 "-c",
323 "--config",
324 default=[pathlib.Path("config.ini")],
325 help="the path to the config file",
326 metavar="PATH",
327 nargs="*",
328 type=pathlib.Path,
329 )
330 parser.add_argument(
331 "--save-config-to",
332 default=None,
333 help="save the configuration to a file",
334 metavar="Path",
335 nargs="?",
336 type=pathlib.Path,
337 )
338 return parser
341def emoji2html(emoji: str) -> str:
342 """Convert an emoji to HTML."""
343 return f"<img src={emoji2url(emoji)!r} alt={emoji!r} class='emoji'>"
346def emoji2url(emoji: str) -> str:
347 """Convert an emoji to an URL."""
348 if len(emoji) == 2:
349 emoji = emoji.removesuffix("\uFE0F")
350 code = "-".join(f"{ord(c):04x}" for c in emoji)
351 return f"/static/openmoji/svg/{code.upper()}.svg?v={OPENMOJI_VERSION}"
354def emojify(string: str) -> str:
355 """Emojify a given string."""
356 string = regex.sub(
357 r"[a-zA-Z]+",
358 lambda match: "\u200C".join(country_code_to_flag(match[0])),
359 replace_umlauts(string),
360 )
361 string = regex.sub(
362 r"[0-9#*]+", lambda match: f"{'⃣'.join(match[0])}⃣", string
363 )
364 return (
365 string.replace("!?", "⁉")
366 .replace("!!", "‼")
367 .replace("?", "❓")
368 .replace("!", "❗")
369 .replace("-", "➖")
370 .replace("+", "➕")
371 )
374async def geoip(
375 ip: None | str,
376 database: str = "GeoLite2-City.mmdb",
377 elasticsearch: None | AsyncElasticsearch = None,
378 *,
379 allow_fallback: bool = True,
380 caches: dict[str, dict[str, dict[str, Any]]] = UltraDict(), # noqa: B008
381) -> None | dict[str, Any]:
382 """Get GeoIP information."""
383 # pylint: disable=too-complex
384 if not ip:
385 return None
387 cache = caches.get(ip, {}) # pylint: disable=redefined-outer-name
388 if database not in cache:
389 if not elasticsearch:
390 if allow_fallback and database in {
391 "GeoLite2-City.mmdb",
392 "GeoLite2-Country.mmdb",
393 }:
394 return geoip_fallback(
395 ip, country=database == "GeoLite2-City.mmdb"
396 )
397 return None
399 properties: None | tuple[str, ...]
400 if database == "GeoLite2-City.mmdb":
401 properties = (
402 "continent_name",
403 "country_iso_code",
404 "country_name",
405 "region_iso_code",
406 "region_name",
407 "city_name",
408 "location",
409 "timezone",
410 )
411 elif database == "GeoLite2-Country.mmdb":
412 properties = (
413 "continent_name",
414 "country_iso_code",
415 "country_name",
416 )
417 elif database == "GeoLite2-ASN.mmdb":
418 properties = ("asn", "network", "organization_name")
419 else:
420 properties = None
422 try:
423 cache[database] = (
424 await elasticsearch.ingest.simulate(
425 pipeline={
426 "processors": [
427 {
428 "geoip": {
429 "field": "ip",
430 "database_file": database,
431 "properties": properties,
432 }
433 }
434 ]
435 },
436 docs=[{"_source": {"ip": ip}}],
437 filter_path="docs.doc._source",
438 )
439 )["docs"][0]["doc"]["_source"].get("geoip", {})
440 except (ApiError, TransportError):
441 if allow_fallback and database in {
442 "GeoLite2-City.mmdb",
443 "GeoLite2-Country.mmdb",
444 }:
445 return geoip_fallback(
446 ip, country=database == "GeoLite2-City.mmdb"
447 )
448 raise
450 if "country_iso_code" in cache[database]:
451 cache[database]["country_flag"] = country_code_to_flag(
452 cache[database]["country_iso_code"]
453 )
455 caches[ip] = cache
456 return cache[database]
459def geoip_fallback(ip: str, country: bool = False) -> None | dict[str, Any]:
460 """Get GeoIP information without using Elasticsearch."""
461 if not (info := geolite2.lookup(ip)):
462 return None
464 info_dict = info.get_info_dict()
466 continent_name = info_dict.get("continent", {}).get("names", {}).get("en")
467 country_iso_code = info_dict.get("country", {}).get("iso_code")
468 country_name = info_dict.get("country", {}).get("names", {}).get("en")
470 data = {
471 "continent_name": continent_name,
472 "country_iso_code": country_iso_code,
473 "country_name": country_name,
474 }
476 if data["country_iso_code"]:
477 data["country_flag"] = country_code_to_flag(data["country_iso_code"])
479 if country:
480 for key, value in tuple(data.items()):
481 if not value:
482 del data[key]
484 return data
486 latitude = info_dict.get("location", {}).get("latitude")
487 longitude = info_dict.get("location", {}).get("longitude")
488 location = (latitude, longitude) if latitude and longitude else None
489 time_zone = info_dict.get("location", {}).get("time_zone")
491 data.update({"location": location, "timezone": time_zone})
493 for key, value in tuple(data.items()):
494 if not value:
495 del data[key]
497 return data
500def get_arguments_without_help() -> tuple[str, ...]:
501 """Get arguments without help."""
502 return tuple(arg for arg in sys.argv[1:] if arg not in {"-h", "--help"})
505def get_close_matches( # based on difflib.get_close_matches
506 word: str,
507 possibilities: Iterable[str],
508 count: int = 3,
509 cutoff: float = 0.5,
510) -> tuple[str, ...]:
511 """Use normalized_distance to return list of the best "good enough" matches.
513 word is a sequence for which close matches are desired (typically a string).
515 possibilities is a list of sequences against which to match word
516 (typically a list of strings).
518 Optional arg count (default 3) is the maximum number of close matches to
519 return. count must be > 0.
521 Optional arg cutoff (default 0.5) is a float in [0, 1]. Possibilities
522 that don't score at least that similar to word are ignored.
524 The best (no more than count) matches among the possibilities are returned
525 in a tuple, sorted by similarity score, most similar first.
526 """
527 if count <= 0:
528 raise ValueError(f"count must be > 0: {count}")
529 if not 0.0 <= cutoff <= 1.0:
530 raise ValueError(f"cutoff must be in [0.0, 1.0]: {cutoff}")
531 word_len = len(word)
532 if not word_len:
533 if cutoff < 1.0:
534 return ()
535 return Stream(possibilities).limit(count).collect(tuple)
536 result: list[tuple[float, str]] = []
537 for possibility in possibilities:
538 if max_dist := max(word_len, len(possibility)):
539 dist = bounded_edit_distance(
540 possibility, word, 1 + int(cutoff * max_dist)
541 )
542 if (ratio := dist / max_dist) <= cutoff:
543 bisect.insort(result, (ratio, possibility))
544 if len(result) > count:
545 result.pop(-1)
546 # Strip scores for the best count matches
547 return tuple(word for score, word in result)
550def hash_bytes(*args: bytes, hasher: Any = None, size: int = 32) -> str:
551 """Hash bytes and return the Base85 representation."""
552 digest: bytes
553 if not hasher:
554 hasher = blake3()
555 for arg in args:
556 hasher.update(arg)
557 digest = (
558 hasher.digest(size)
559 if isinstance(hasher, blake3)
560 else hasher.digest()[:size]
561 )
562 return b85encode(digest).decode("ASCII")
565def hash_ip(
566 address: None | str | IPv4Address | IPv6Address, size: int = 32
567) -> str:
568 """Hash an IP address."""
569 if isinstance(address, str):
570 address = ip_address(address)
571 if IP_HASH_SALT["date"] != (date := datetime.now(timezone.utc).date()):
572 IP_HASH_SALT["hasher"] = blake3(
573 blake3(date.isoformat().encode("ASCII")).digest()
574 )
575 IP_HASH_SALT["date"] = date
576 return hash_bytes(
577 address.packed if address else b"",
578 hasher=IP_HASH_SALT["hasher"].copy(), # type: ignore[attr-defined]
579 size=size,
580 )
583def is_in_european_union(ip: None | str) -> None | bool:
584 """Return whether the specified address is in the EU."""
585 if not (ip and (info := geolite2.lookup(ip))):
586 return None
588 return cast(bool, info.get_info_dict().get("is_in_european_union", False))
591def is_prime(number: int) -> bool:
592 """Return whether the specified number is prime."""
593 if not number % 2:
594 return number == 2
595 return bool(PRINT & (1 << (number // 2)))
598def length_of_match(match: regex.Match[Any]) -> int:
599 """Calculate the length of the regex match and return it."""
600 return match.end() - match.start()
603def n_from_set[T](set_: Set[T], n: int) -> set[T]: # noqa: D103
604 """Get and return n elements of the set as a new set."""
605 new_set = set()
606 for i, element in enumerate(set_):
607 if i >= n:
608 break
609 new_set.add(element)
610 return new_set
613def name_to_id(val: str) -> str:
614 """Replace umlauts and whitespaces in a string to get a valid HTML id."""
615 return regex.sub(
616 r"[^a-z0-9]+",
617 "-",
618 replace_umlauts(val).lower(),
619 ).strip("-")
622def none_to_default[T, D](value: None | T, default: D) -> D | T: # noqa: D103
623 """Like ?? in ECMAScript."""
624 return default if value is None else value
627def parse_bumpscosity(value: str | int | None) -> BumpscosityValue:
628 """Parse a string to a valid bumpscosity value."""
629 if isinstance(value, str):
630 with contextlib.suppress(ValueError):
631 value = int(value, base=0)
632 if value in BUMPSCOSITY_VALUES:
633 return cast(BumpscosityValue, value)
634 return random.Random(repr(value)).choice(BUMPSCOSITY_VALUES)
637def parse_openmoji_arg(value: str, default: OpenMojiValue) -> OpenMojiValue:
638 """Parse the openmoji arg into a Literal."""
639 value = value.lower()
640 if value == "glyf_colr0":
641 return "glyf_colr0"
642 if value == "glyf_colr1":
643 return "glyf_colr1"
644 if value in {"i", "img"}:
645 return "img"
646 if value in {"n", "nope"}:
647 return False
648 return default
651async def ratelimit( # pylint: disable=too-many-arguments
652 redis: Redis[str],
653 redis_prefix: str,
654 remote_ip: str,
655 *,
656 bucket: None | str,
657 max_burst: int,
658 count_per_period: int,
659 period: int,
660 tokens: int,
661) -> tuple[bool, dict[str, str]]:
662 """Take b1nzy to space using Redis."""
663 remote_ip = hash_bytes(remote_ip.encode("ASCII"))
664 key = f"{redis_prefix}:ratelimit:{remote_ip}"
665 if bucket:
666 key = f"{key}:{bucket}"
668 # see: https://github.com/brandur/redis-cell#usage
669 result = await redis.execute_command(
670 # type: ignore[no-untyped-call]
671 "CL.THROTTLE",
672 key,
673 max_burst,
674 count_per_period,
675 period,
676 tokens,
677 )
679 now = time.time()
681 headers: dict[str, str] = {}
683 if result[0]:
684 headers["Retry-After"] = str(result[3])
685 if not bucket:
686 headers["X-RateLimit-Global"] = "true"
688 if bucket:
689 headers["X-RateLimit-Limit"] = str(result[1])
690 headers["X-RateLimit-Remaining"] = str(result[2])
691 headers["X-RateLimit-Reset"] = str(now + result[4])
692 headers["X-RateLimit-Reset-After"] = str(result[4])
693 headers["X-RateLimit-Bucket"] = hash_bytes(bucket.encode("ASCII"))
695 return bool(result[0]), headers
698def remove_suffix_ignore_case(string: str, suffix: str) -> str:
699 """Remove a suffix without caring about the case."""
700 if string.lower().endswith(suffix.lower()):
701 return string[: -len(suffix)]
702 return string
705def replace_umlauts(string: str) -> str:
706 """Replace Ä, Ö, Ü, ẞ, ä, ö, ü, ß in string."""
707 if string.isupper():
708 return (
709 string.replace("Ä", "AE")
710 .replace("Ö", "OE")
711 .replace("Ü", "UE")
712 .replace("ẞ", "SS")
713 )
714 if " " in string:
715 return " ".join(replace_umlauts(word) for word in string.split(" "))
716 return (
717 string.replace("ä", "ae")
718 .replace("ö", "oe")
719 .replace("ü", "ue")
720 .replace("ß", "ss")
721 .replace("Ä", "Ae")
722 .replace("Ö", "Oe")
723 .replace("Ü", "Ue")
724 .replace("ẞ", "SS")
725 )
728def recurse_directory(
729 root: Traversable,
730 # pylint: disable-next=redefined-builtin
731 filter: Callable[[Traversable], bool] = lambda _: True,
732) -> Iterable[str]:
733 """Recursively iterate over entries in a directory."""
734 dirs: list[str] = ["."]
735 while dirs: # pylint: disable=while-used
736 curr_dir = dirs.pop()
737 for path in (root if curr_dir == "." else root / curr_dir).iterdir():
738 current: str = (
739 path.name
740 if curr_dir == "."
741 else os.path.join(curr_dir, path.name)
742 )
743 if path.is_dir():
744 dirs.append(current)
745 if filter(path):
746 yield current
749async def run(
750 program: str,
751 *args: str,
752 stdin: int | IO[Any] = asyncio.subprocess.DEVNULL,
753 stdout: None | int | IO[Any] = asyncio.subprocess.PIPE,
754 stderr: None | int | IO[Any] = asyncio.subprocess.PIPE,
755 **kwargs: Any,
756) -> tuple[None | int, bytes, bytes]:
757 """Run a programm and return the exit code, stdout and stderr as tuple."""
758 proc = await asyncio.create_subprocess_exec(
759 program,
760 *args,
761 stdin=stdin,
762 stdout=stdout,
763 stderr=stderr,
764 **kwargs,
765 )
766 output = await proc.communicate()
767 return proc.returncode, *output
770def size_of_file(file: Traversable) -> int:
771 """Calculate the size of a file."""
772 if isinstance(file, Path):
773 return file.stat().st_size
775 with file.open("rb") as data:
776 return sum(map(len, data)) # pylint: disable=bad-builtin
779def str_to_bool(val: None | str | bool, default: None | bool = None) -> bool:
780 """Convert a string representation of truth to True or False."""
781 if isinstance(val, bool):
782 return val
783 if isinstance(val, str):
784 val = val.lower()
785 if val in {
786 "1",
787 "a",
788 "accept",
789 "e",
790 "enabled",
791 "on",
792 "s",
793 "sure",
794 "t",
795 "true",
796 "y",
797 "yes",
798 }:
799 return True
800 if val in {
801 "0",
802 "d",
803 "disabled",
804 "f",
805 "false",
806 "n",
807 "no",
808 "nope",
809 "off",
810 "r",
811 "reject",
812 }:
813 return False
814 if val in {"idc", "maybe", "random"}:
815 return bool(random.randrange(2)) # nosec: B311
816 if default is None:
817 raise ValueError(f"Invalid bool value: {val!r}")
818 return default
821def str_to_set(string: str) -> set[str]:
822 """Convert a string to a set of strings."""
823 return {part.strip() for part in string.split(",") if part.strip()}
826def strangle(string: str) -> float:
827 """Convert a string to an angle."""
828 hasher = sha1(string.encode("UTF-8"), usedforsecurity=False)
829 return int.from_bytes(hasher.digest()[:2], "little") / (1 << 16) * 360
832def time_function[ # noqa: D103
833 T, **P # pylint: disable=invalid-name
834](function: Callable[P, T], *args: P.args, **kwargs: P.kwargs) -> tuple[
835 T, float
836]:
837 """Run the function and return the result and the time it took in seconds."""
838 timer = Timer()
839 return function(*args, **kwargs), timer.stop()
842def time_to_str(spam: float) -> str:
843 """Convert the time into a string with second precision."""
844 int_time = int(spam)
845 div_60 = int(int_time / 60)
846 div_60_60 = int(div_60 / 60)
848 return (
849 f"{int(div_60_60 / 24)}d "
850 f"{div_60_60 % 24}h "
851 f"{div_60 % 60}min "
852 f"{int_time % 60}s"
853 )
856@dataclass(order=True, frozen=True, slots=True)
857class PageInfo:
858 """The PageInfo class that is used for the subpages of a ModuleInfo."""
860 name: str
861 description: str
862 path: None | str = None
863 # keywords that can be used for searching
864 keywords: tuple[str, ...] = field(default_factory=tuple)
865 hidden: bool = False # whether to hide this page info on the page
866 short_name: None | str = None # short name for the page
869@dataclass(order=True, frozen=True, slots=True)
870class ModuleInfo(PageInfo):
871 """
872 The ModuleInfo class adds handlers and subpages to the PageInfo.
874 This gets created by every module to add the handlers.
875 """
877 handlers: tuple[Handler, ...] = field(default_factory=tuple[Handler, ...])
878 sub_pages: tuple[PageInfo, ...] = field(default_factory=tuple)
879 aliases: tuple[str, ...] | Mapping[str, str] = field(default_factory=tuple)
880 required_background_tasks: Collection[BackgroundTask] = field(
881 default_factory=frozenset
882 )
884 def get_keywords_as_str(self, path: str) -> str:
885 """Get the keywords as comma-seperated string."""
886 page_info = self.get_page_info(path)
887 if self != page_info:
888 return ", ".join((*self.keywords, *page_info.keywords))
890 return ", ".join(self.keywords)
892 def get_page_info(self, path: str) -> PageInfo:
893 """Get the PageInfo of the specified path."""
894 if self.path == path:
895 return self
897 for page_info in self.sub_pages:
898 if page_info.path == path:
899 return page_info
901 return self