Coverage for an_website/utils/utils.py: 70.732%
369 statements
« prev ^ index » next coverage.py v7.6.4, created at 2024-11-16 19:56 +0000
« prev ^ index » next coverage.py v7.6.4, created at 2024-11-16 19:56 +0000
1# This program is free software: you can redistribute it and/or modify
2# it under the terms of the GNU Affero General Public License as
3# published by the Free Software Foundation, either version 3 of the
4# License, or (at your option) any later version.
5#
6# This program is distributed in the hope that it will be useful,
7# but WITHOUT ANY WARRANTY; without even the implied warranty of
8# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
9# GNU Affero General Public License for more details.
10#
11# You should have received a copy of the GNU Affero General Public License
12# along with this program. If not, see <https://www.gnu.org/licenses/>.
14"""A module with many useful things used by other modules."""
16from __future__ import annotations
18import argparse
19import asyncio
20import contextlib
21import heapq
22import logging
23import os.path
24import pathlib
25import random
26import sys
27import time
28from base64 import b85encode
29from collections.abc import (
30 Awaitable,
31 Callable,
32 Collection,
33 Generator,
34 Iterable,
35 Mapping,
36 Set,
37)
38from dataclasses import dataclass, field
39from datetime import datetime, timezone
40from enum import IntFlag
41from functools import cache, partial
42from hashlib import sha1
43from importlib.resources.abc import Traversable
44from ipaddress import IPv4Address, IPv6Address, ip_address, ip_network
45from pathlib import Path
46from typing import (
47 IO,
48 TYPE_CHECKING,
49 Any,
50 Final,
51 Literal,
52 TypeAlias,
53 Union,
54 cast,
55 get_args,
56)
57from urllib.parse import SplitResult, parse_qsl, urlencode, urlsplit, urlunsplit
59import elasticapm
60import regex
61from blake3 import blake3
62from elastic_transport import ApiError, TransportError
63from elasticsearch import AsyncElasticsearch
64from geoip import geolite2 # type: ignore[import-untyped]
65from openmoji_dist import VERSION as OPENMOJI_VERSION
66from rapidfuzz.distance.Levenshtein import normalized_distance
67from redis.asyncio import Redis
68from tornado.web import HTTPError, RequestHandler
69from UltraDict import UltraDict # type: ignore[import-untyped]
71from .. import DIR as ROOT_DIR
73if TYPE_CHECKING:
74 from .background_tasks import BackgroundTask
76LOGGER: Final = logging.getLogger(__name__)
78# pylint: disable=consider-alternative-union-syntax
79type Handler = Union[
80 tuple[str, type[RequestHandler]],
81 tuple[str, type[RequestHandler], dict[str, Any]],
82 tuple[str, type[RequestHandler], dict[str, Any], str],
83]
85type OpenMojiValue = Literal[False, "img", "glyf_colr1", "glyf_colr0"]
86BumpscosityValue: TypeAlias = Literal[0, 1, 12, 50, 76, 100, 1000]
87BUMPSCOSITY_VALUES: Final[tuple[BumpscosityValue, ...]] = get_args(
88 BumpscosityValue
89)
91PRINT = int.from_bytes((ROOT_DIR / "primes.bin").read_bytes(), "big")
93IP_HASH_SALT: Final = {
94 "date": datetime.now(timezone.utc).date(),
95 "hasher": blake3(
96 blake3(
97 datetime.now(timezone.utc).date().isoformat().encode("ASCII")
98 ).digest()
99 ),
100}
102SUS_PATHS: Final[Set[str]] = {
103 "/-profiler/phpinfo",
104 "/.aws/credentials",
105 "/.env",
106 "/.env.bak",
107 "/.ftpconfig",
108 "/admin/controller/extension/extension",
109 "/assets/filemanager/dialog",
110 "/assets/vendor/server/php",
111 "/aws.yml",
112 "/boaform/admin/formlogin",
113 "/phpinfo",
114 "/public/assets/jquery-file-upload/server/php",
115 "/root",
116 "/settings/aws.yml",
117 "/uploads",
118 "/vendor/phpunit/phpunit/src/util/php/eval-stdin",
119 "/wordpress",
120 "/wp",
121 "/wp-admin",
122 "/wp-admin/css",
123 "/wp-includes",
124 "/wp-login",
125 "/wp-upload",
126}
129class ArgparseNamespace(argparse.Namespace):
130 """A class to fake type hints for argparse.Namespace."""
132 # pylint: disable=too-few-public-methods
133 __slots__ = ("config", "save_config_to")
135 config: list[pathlib.Path]
136 save_config_to: pathlib.Path | None
139class AwaitableValue[T](Awaitable[T]): # pylint: disable=undefined-variable
140 # pylint: disable=too-few-public-methods
141 """An awaitable that always returns the same value."""
143 def __await__(self) -> Generator[None, None, T]:
144 """Return the value."""
145 yield
146 return self._value
148 def __init__(self, value: T) -> None:
149 """Set the value."""
150 self._value = value
153class Permission(IntFlag):
154 """Permissions for accessing restricted stuff."""
156 RATELIMITS = 1
157 TRACEBACK = 2
158 BACKDOOR = 4
159 UPDATE = 8
160 REPORTING = 16
161 SHORTEN = 32
162 UPLOAD = 64
165class Timer:
166 """Timer class used for timing stuff."""
168 __slots__ = ("_execution_time", "_start_time")
170 _execution_time: int
172 def __init__(self) -> None:
173 """Start the timer."""
174 self._start_time = time.perf_counter_ns()
176 def get(self) -> float:
177 """Get the execution time in seconds."""
178 return self.get_ns() / 1_000_000_000
180 def get_ns(self) -> int:
181 """Get the execution time in nanoseconds."""
182 assert hasattr(self, "_execution_time"), "Timer not stopped yet"
183 return self._execution_time
185 def stop(self) -> float:
186 """Stop the timer and get the execution time in seconds."""
187 return self.stop_ns() / 1_000_000_000
189 def stop_ns(self) -> int:
190 """Stop the timer and get the execution time in nanoseconds."""
191 assert not hasattr(self, "_execution_time"), "Timer already stopped"
192 self._execution_time = time.perf_counter_ns() - self._start_time
193 return self._execution_time
196@cache
197def add_args_to_url(url: str | SplitResult, **kwargs: object) -> str:
198 """Add query arguments to a URL."""
199 if isinstance(url, str):
200 url = urlsplit(url)
202 if not kwargs:
203 return url.geturl()
205 url_args: dict[str, str] = dict(
206 parse_qsl(url.query, keep_blank_values=True)
207 )
209 for key, value in kwargs.items():
210 if value is None:
211 if key in url_args:
212 del url_args[key]
213 # pylint: disable-next=confusing-consecutive-elif
214 elif isinstance(value, bool):
215 url_args[key] = bool_to_str(value)
216 else:
217 url_args[key] = str(value)
219 return urlunsplit(
220 (
221 url.scheme,
222 url.netloc,
223 url.path,
224 urlencode(url_args),
225 url.fragment,
226 )
227 )
230def anonymize_ip[ # noqa: D103
231 A: (str, None, str | None)
232](address: A, *, ignore_invalid: bool = False) -> A:
233 """Anonymize an IP address."""
234 if address is None:
235 return None
237 address = address.strip()
239 try:
240 version = ip_address(address).version
241 except ValueError:
242 if ignore_invalid:
243 return address
244 raise
246 if version == 4:
247 return str(ip_network(address + "/24", strict=False).network_address)
248 if version == 6:
249 return str(ip_network(address + "/48", strict=False).network_address)
251 raise HTTPError(reason="ERROR: -41")
254ansi_replace = partial(regex.sub, "\033" + r"\[-?\d+[a-zA-Z]", "")
255ansi_replace.__doc__ = "Remove ANSI escape sequences from a string."
258def apm_anonymization_processor(
259 client: elasticapm.Client, # pylint: disable=unused-argument
260 event: dict[str, Any],
261) -> dict[str, Any]:
262 """Anonymize an APM event."""
263 if "context" in event and "request" in event["context"]:
264 request = event["context"]["request"]
265 if "url" in request and "pathname" in request["url"]:
266 path = request["url"]["pathname"]
267 if path == "/robots.txt" or path.lower() in SUS_PATHS:
268 return event
269 if "socket" in request and "remote_address" in request["socket"]:
270 request["socket"]["remote_address"] = anonymize_ip(
271 request["socket"]["remote_address"]
272 )
273 if "headers" in request:
274 headers = request["headers"]
275 if "X-Forwarded-For" in headers:
276 headers["X-Forwarded-For"] = ", ".join(
277 anonymize_ip(ip.strip(), ignore_invalid=True)
278 for ip in headers["X-Forwarded-For"].split(",")
279 )
280 for header in headers:
281 if "ip" in header.lower().split("-"):
282 headers[header] = anonymize_ip(
283 headers[header], ignore_invalid=True
284 )
285 return event
288def apply[V, Ret](value: V, fun: Callable[[V], Ret]) -> Ret: # noqa: D103
289 """Apply a function to a value and return the result."""
290 return fun(value)
293backspace_replace = partial(regex.sub, ".?\x08", "")
294backspace_replace.__doc__ = "Remove backspaces from a string."
297def bool_to_str(val: bool) -> str:
298 """Convert a boolean to sure/nope."""
299 return "sure" if val else "nope"
302def country_code_to_flag(code: str) -> str:
303 """Convert a two-letter ISO country code to a flag emoji."""
304 return "".join(chr(ord(char) + 23 * 29 * 191) for char in code.upper())
307def create_argument_parser() -> argparse.ArgumentParser:
308 """Parse command line arguments."""
309 parser = argparse.ArgumentParser()
310 parser.add_argument(
311 "-c",
312 "--config",
313 default=[pathlib.Path("config.ini")],
314 help="the path to the config file",
315 metavar="PATH",
316 nargs="*",
317 type=pathlib.Path,
318 )
319 parser.add_argument(
320 "--save-config-to",
321 default=None,
322 help="save the configuration to a file",
323 metavar="Path",
324 nargs="?",
325 type=pathlib.Path,
326 )
327 return parser
330def emoji2html(emoji: str) -> str:
331 """Convert an emoji to HTML."""
332 return f"<img src={emoji2url(emoji)!r} alt={emoji!r} class='emoji'>"
335def emoji2url(emoji: str) -> str:
336 """Convert an emoji to an URL."""
337 if len(emoji) == 2:
338 emoji = emoji.removesuffix("\uFE0F")
339 code = "-".join(f"{ord(c):04x}" for c in emoji)
340 return f"/static/openmoji/svg/{code.upper()}.svg?v={OPENMOJI_VERSION}"
343def emojify(string: str) -> str:
344 """Emojify a given string."""
345 string = regex.sub(
346 r"[a-zA-Z]+",
347 lambda match: "\u200C".join(country_code_to_flag(match[0])),
348 replace_umlauts(string),
349 )
350 string = regex.sub(
351 r"[0-9#*]+", lambda match: f"{'⃣'.join(match[0])}⃣", string
352 )
353 return (
354 string.replace("!?", "⁉")
355 .replace("!!", "‼")
356 .replace("?", "❓")
357 .replace("!", "❗")
358 .replace("-", "➖")
359 .replace("+", "➕")
360 )
363async def geoip(
364 ip: None | str,
365 database: str = "GeoLite2-City.mmdb",
366 elasticsearch: None | AsyncElasticsearch = None,
367 *,
368 allow_fallback: bool = True,
369 caches: dict[str, dict[str, dict[str, Any]]] = UltraDict(), # noqa: B008
370) -> None | dict[str, Any]:
371 """Get GeoIP information."""
372 # pylint: disable=too-complex
373 if not ip:
374 return None
376 cache = caches.get(ip, {}) # pylint: disable=redefined-outer-name
377 if database not in cache:
378 if not elasticsearch:
379 if allow_fallback and database in {
380 "GeoLite2-City.mmdb",
381 "GeoLite2-Country.mmdb",
382 }:
383 return geoip_fallback(
384 ip, country=database == "GeoLite2-City.mmdb"
385 )
386 return None
388 properties: None | tuple[str, ...]
389 if database == "GeoLite2-City.mmdb":
390 properties = (
391 "continent_name",
392 "country_iso_code",
393 "country_name",
394 "region_iso_code",
395 "region_name",
396 "city_name",
397 "location",
398 "timezone",
399 )
400 elif database == "GeoLite2-Country.mmdb":
401 properties = (
402 "continent_name",
403 "country_iso_code",
404 "country_name",
405 )
406 elif database == "GeoLite2-ASN.mmdb":
407 properties = ("asn", "network", "organization_name")
408 else:
409 properties = None
411 try:
412 cache[database] = (
413 await elasticsearch.ingest.simulate(
414 pipeline={
415 "processors": [
416 {
417 "geoip": {
418 "field": "ip",
419 "database_file": database,
420 "properties": properties,
421 }
422 }
423 ]
424 },
425 docs=[{"_source": {"ip": ip}}],
426 filter_path="docs.doc._source",
427 )
428 )["docs"][0]["doc"]["_source"].get("geoip", {})
429 except (ApiError, TransportError):
430 if allow_fallback and database in {
431 "GeoLite2-City.mmdb",
432 "GeoLite2-Country.mmdb",
433 }:
434 return geoip_fallback(
435 ip, country=database == "GeoLite2-City.mmdb"
436 )
437 raise
439 if "country_iso_code" in cache[database]:
440 cache[database]["country_flag"] = country_code_to_flag(
441 cache[database]["country_iso_code"]
442 )
444 caches[ip] = cache
445 return cache[database]
448def geoip_fallback(ip: str, country: bool = False) -> None | dict[str, Any]:
449 """Get GeoIP information without using Elasticsearch."""
450 if not (info := geolite2.lookup(ip)):
451 return None
453 info_dict = info.get_info_dict()
455 continent_name = info_dict.get("continent", {}).get("names", {}).get("en")
456 country_iso_code = info_dict.get("country", {}).get("iso_code")
457 country_name = info_dict.get("country", {}).get("names", {}).get("en")
459 data = {
460 "continent_name": continent_name,
461 "country_iso_code": country_iso_code,
462 "country_name": country_name,
463 }
465 if data["country_iso_code"]:
466 data["country_flag"] = country_code_to_flag(data["country_iso_code"])
468 if country:
469 for key, value in tuple(data.items()):
470 if not value:
471 del data[key]
473 return data
475 latitude = info_dict.get("location", {}).get("latitude")
476 longitude = info_dict.get("location", {}).get("longitude")
477 location = (latitude, longitude) if latitude and longitude else None
478 time_zone = info_dict.get("location", {}).get("time_zone")
480 data.update({"location": location, "timezone": time_zone})
482 for key, value in tuple(data.items()):
483 if not value:
484 del data[key]
486 return data
489def get_arguments_without_help() -> tuple[str, ...]:
490 """Get arguments without help."""
491 return tuple(arg for arg in sys.argv[1:] if arg not in {"-h", "--help"})
494def get_close_matches( # based on difflib.get_close_matches
495 word: str,
496 possibilities: Iterable[str],
497 count: int = 3,
498 cutoff: float = 0.5,
499) -> tuple[str, ...]:
500 """Use normalized_distance to return list of the best "good enough" matches.
502 word is a sequence for which close matches are desired (typically a string).
504 possibilities is a list of sequences against which to match word
505 (typically a list of strings).
507 Optional arg count (default 3) is the maximum number of close matches to
508 return. count must be > 0.
510 Optional arg cutoff (default 0.5) is a float in [0, 1]. Possibilities
511 that don't score at least that similar to word are ignored.
513 The best (no more than count) matches among the possibilities are returned
514 in a tuple, sorted by similarity score, most similar first.
515 """
516 if count <= 0:
517 raise ValueError(f"count must be > 0: {count}")
518 if not 0.0 <= cutoff <= 1.0:
519 raise ValueError(f"cutoff must be in [0.0, 1.0]: {cutoff}")
520 result: list[tuple[float, str]] = []
521 for possibility in possibilities:
522 ratio: float = normalized_distance(possibility, word)
523 if ratio <= cutoff:
524 result.append((ratio, possibility))
525 # Strip scores for the best count matches
526 return tuple(word for score, word in heapq.nsmallest(count, result))
529def hash_bytes(*args: bytes, hasher: Any = None, size: int = 32) -> str:
530 """Hash bytes and return the Base85 representation."""
531 digest: bytes
532 if not hasher:
533 hasher = blake3()
534 for arg in args:
535 hasher.update(arg)
536 digest = (
537 hasher.digest(size)
538 if isinstance(hasher, blake3)
539 else hasher.digest()[:size]
540 )
541 return b85encode(digest).decode("ASCII")
544def hash_ip(
545 address: None | str | IPv4Address | IPv6Address, size: int = 32
546) -> str:
547 """Hash an IP address."""
548 if isinstance(address, str):
549 address = ip_address(address)
550 if IP_HASH_SALT["date"] != (date := datetime.now(timezone.utc).date()):
551 IP_HASH_SALT["hasher"] = blake3(
552 blake3(date.isoformat().encode("ASCII")).digest()
553 )
554 IP_HASH_SALT["date"] = date
555 return hash_bytes(
556 address.packed if address else b"",
557 hasher=IP_HASH_SALT["hasher"].copy(), # type: ignore[attr-defined]
558 size=size,
559 )
562def is_in_european_union(ip: None | str) -> None | bool:
563 """Return whether the specified address is in the EU."""
564 if not (ip and (info := geolite2.lookup(ip))):
565 return None
567 return cast(bool, info.get_info_dict().get("is_in_european_union", False))
570def is_prime(number: int) -> bool:
571 """Return whether the specified number is prime."""
572 if not number % 2:
573 return number == 2
574 return bool(PRINT & (1 << (number // 2)))
577def length_of_match(match: regex.Match[Any]) -> int:
578 """Calculate the length of the regex match and return it."""
579 return match.end() - match.start()
582def n_from_set[T](set_: Set[T], n: int) -> set[T]: # noqa: D103
583 """Get and return n elements of the set as a new set."""
584 new_set = set()
585 for i, element in enumerate(set_):
586 if i >= n:
587 break
588 new_set.add(element)
589 return new_set
592def name_to_id(val: str) -> str:
593 """Replace umlauts and whitespaces in a string to get a valid HTML id."""
594 return regex.sub(
595 r"[^a-z0-9]+",
596 "-",
597 replace_umlauts(val).lower(),
598 ).strip("-")
601def none_to_default[T, D](value: None | T, default: D) -> D | T: # noqa: D103
602 """Like ?? in ECMAScript."""
603 return default if value is None else value
606def parse_bumpscosity(value: str | int | None) -> BumpscosityValue:
607 """Parse a string to a valid bumpscosity value."""
608 if isinstance(value, str):
609 with contextlib.suppress(ValueError):
610 value = int(value, base=0)
611 if value in BUMPSCOSITY_VALUES:
612 return cast(BumpscosityValue, value)
613 return random.Random(repr(value)).choice(BUMPSCOSITY_VALUES)
616def parse_openmoji_arg(value: str, default: OpenMojiValue) -> OpenMojiValue:
617 """Parse the openmoji arg into a Literal."""
618 value = value.lower()
619 if value == "glyf_colr0":
620 return "glyf_colr0"
621 if value == "glyf_colr1":
622 return "glyf_colr1"
623 if value in {"i", "img"}:
624 return "img"
625 if value in {"n", "nope"}:
626 return False
627 return default
630async def ratelimit( # pylint: disable=too-many-arguments
631 redis: Redis[str],
632 redis_prefix: str,
633 remote_ip: str,
634 *,
635 bucket: None | str,
636 max_burst: int,
637 count_per_period: int,
638 period: int,
639 tokens: int,
640) -> tuple[bool, dict[str, str]]:
641 """Take b1nzy to space using Redis."""
642 remote_ip = hash_bytes(remote_ip.encode("ASCII"))
643 key = f"{redis_prefix}:ratelimit:{remote_ip}"
644 if bucket:
645 key = f"{key}:{bucket}"
647 # see: https://github.com/brandur/redis-cell#usage
648 result = await redis.execute_command(
649 # type: ignore[no-untyped-call]
650 "CL.THROTTLE",
651 key,
652 max_burst,
653 count_per_period,
654 period,
655 tokens,
656 )
658 now = time.time()
660 headers: dict[str, str] = {}
662 if result[0]:
663 headers["Retry-After"] = str(result[3])
664 if not bucket:
665 headers["X-RateLimit-Global"] = "true"
667 if bucket:
668 headers["X-RateLimit-Limit"] = str(result[1])
669 headers["X-RateLimit-Remaining"] = str(result[2])
670 headers["X-RateLimit-Reset"] = str(now + result[4])
671 headers["X-RateLimit-Reset-After"] = str(result[4])
672 headers["X-RateLimit-Bucket"] = hash_bytes(bucket.encode("ASCII"))
674 return bool(result[0]), headers
677def remove_suffix_ignore_case(string: str, suffix: str) -> str:
678 """Remove a suffix without caring about the case."""
679 if string.lower().endswith(suffix.lower()):
680 return string[: -len(suffix)]
681 return string
684def replace_umlauts(string: str) -> str:
685 """Replace Ä, Ö, Ü, ẞ, ä, ö, ü, ß in string."""
686 if string.isupper():
687 return (
688 string.replace("Ä", "AE")
689 .replace("Ö", "OE")
690 .replace("Ü", "UE")
691 .replace("ẞ", "SS")
692 )
693 if " " in string:
694 return " ".join(replace_umlauts(word) for word in string.split(" "))
695 return (
696 string.replace("ä", "ae")
697 .replace("ö", "oe")
698 .replace("ü", "ue")
699 .replace("ß", "ss")
700 .replace("Ä", "Ae")
701 .replace("Ö", "Oe")
702 .replace("Ü", "Ue")
703 .replace("ẞ", "SS")
704 )
707def recurse_directory(
708 root: Traversable,
709 # pylint: disable-next=redefined-builtin
710 filter: Callable[[Traversable], bool] = lambda _: True,
711) -> Iterable[str]:
712 """Recursively iterate over entries in a directory."""
713 dirs: list[str] = ["."]
714 while dirs: # pylint: disable=while-used
715 curr_dir = dirs.pop()
716 for path in (root if curr_dir == "." else root / curr_dir).iterdir():
717 current: str = (
718 path.name
719 if curr_dir == "."
720 else os.path.join(curr_dir, path.name)
721 )
722 if path.is_dir():
723 dirs.append(current)
724 if filter(path):
725 yield current
728async def run(
729 program: str,
730 *args: str,
731 stdin: int | IO[Any] = asyncio.subprocess.DEVNULL,
732 stdout: None | int | IO[Any] = asyncio.subprocess.PIPE,
733 stderr: None | int | IO[Any] = asyncio.subprocess.PIPE,
734 **kwargs: Any,
735) -> tuple[None | int, bytes, bytes]:
736 """Run a programm and return the exit code, stdout and stderr as tuple."""
737 proc = await asyncio.create_subprocess_exec(
738 program,
739 *args,
740 stdin=stdin,
741 stdout=stdout,
742 stderr=stderr,
743 **kwargs,
744 )
745 output = await proc.communicate()
746 return proc.returncode, *output
749def size_of_file(file: Traversable) -> int:
750 """Calculate the size of a file."""
751 if isinstance(file, Path):
752 return file.stat().st_size
754 with file.open("rb") as data:
755 return sum(map(len, data)) # pylint: disable=bad-builtin
758def str_to_bool(val: None | str | bool, default: None | bool = None) -> bool:
759 """Convert a string representation of truth to True or False."""
760 if isinstance(val, bool):
761 return val
762 if isinstance(val, str):
763 val = val.lower()
764 if val in {
765 "1",
766 "a",
767 "accept",
768 "e",
769 "enabled",
770 "on",
771 "s",
772 "sure",
773 "t",
774 "true",
775 "y",
776 "yes",
777 }:
778 return True
779 if val in {
780 "0",
781 "d",
782 "disabled",
783 "f",
784 "false",
785 "n",
786 "no",
787 "nope",
788 "off",
789 "r",
790 "reject",
791 }:
792 return False
793 if val in {"idc", "maybe", "random"}:
794 return bool(random.randrange(2)) # nosec: B311
795 if default is None:
796 raise ValueError(f"Invalid bool value: {val!r}")
797 return default
800def str_to_set(string: str) -> set[str]:
801 """Convert a string to a set of strings."""
802 return {part.strip() for part in string.split(",") if part.strip()}
805def strangle(string: str) -> float:
806 """Convert a string to an angle."""
807 hasher = sha1(string.encode("UTF-8"), usedforsecurity=False)
808 return int.from_bytes(hasher.digest()[:2], "little") / (1 << 16) * 360
811def time_function[ # noqa: D103
812 T, **P # pylint: disable=invalid-name
813](function: Callable[P, T], *args: P.args, **kwargs: P.kwargs) -> tuple[
814 T, float
815]:
816 """Run the function and return the result and the time it took in seconds."""
817 timer = Timer()
818 return function(*args, **kwargs), timer.stop()
821def time_to_str(spam: float) -> str:
822 """Convert the time into a string with second precision."""
823 int_time = int(spam)
824 div_60 = int(int_time / 60)
825 div_60_60 = int(div_60 / 60)
827 return (
828 f"{int(div_60_60 / 24)}d "
829 f"{div_60_60 % 24}h "
830 f"{div_60 % 60}min "
831 f"{int_time % 60}s"
832 )
835@dataclass(order=True, frozen=True, slots=True)
836class PageInfo:
837 """The PageInfo class that is used for the subpages of a ModuleInfo."""
839 name: str
840 description: str
841 path: None | str = None
842 # keywords that can be used for searching
843 keywords: tuple[str, ...] = field(default_factory=tuple)
844 hidden: bool = False # whether to hide this page info on the page
845 short_name: None | str = None # short name for the page
848@dataclass(order=True, frozen=True, slots=True)
849class ModuleInfo(PageInfo):
850 """
851 The ModuleInfo class adds handlers and subpages to the PageInfo.
853 This gets created by every module to add the handlers.
854 """
856 handlers: tuple[Handler, ...] = field(default_factory=tuple[Handler, ...])
857 sub_pages: tuple[PageInfo, ...] = field(default_factory=tuple)
858 aliases: tuple[str, ...] | Mapping[str, str] = field(default_factory=tuple)
859 required_background_tasks: Collection[BackgroundTask] = field(
860 default_factory=frozenset
861 )
863 def get_keywords_as_str(self, path: str) -> str:
864 """Get the keywords as comma-seperated string."""
865 page_info = self.get_page_info(path)
866 if self != page_info:
867 return ", ".join((*self.keywords, *page_info.keywords))
869 return ", ".join(self.keywords)
871 def get_page_info(self, path: str) -> PageInfo:
872 """Get the PageInfo of the specified path."""
873 if self.path == path:
874 return self
876 for page_info in self.sub_pages:
877 if page_info.path == path:
878 return page_info
880 return self