Coverage for an_website / utils / utils.py: 70.866%
381 statements
« prev ^ index » next coverage.py v7.13.4, created at 2026-03-22 18:49 +0000
« prev ^ index » next coverage.py v7.13.4, created at 2026-03-22 18:49 +0000
1# This program is free software: you can redistribute it and/or modify
2# it under the terms of the GNU Affero General Public License as
3# published by the Free Software Foundation, either version 3 of the
4# License, or (at your option) any later version.
5#
6# This program is distributed in the hope that it will be useful,
7# but WITHOUT ANY WARRANTY; without even the implied warranty of
8# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
9# GNU Affero General Public License for more details.
10#
11# You should have received a copy of the GNU Affero General Public License
12# along with this program. If not, see <https://www.gnu.org/licenses/>.
14"""A module with many useful things used by other modules."""
17import argparse
18import asyncio
19import bisect
20import contextlib
21import logging
22import random
23import sys
24import time
25from base64 import b85encode
26from collections.abc import (
27 Awaitable,
28 Callable,
29 Collection,
30 Generator,
31 Iterable,
32 Mapping,
33 Set,
34)
35from dataclasses import dataclass, field
36from datetime import datetime, timezone
37from enum import IntFlag
38from functools import cache, partial
39from hashlib import sha1
40from importlib.resources.abc import Traversable
41from ipaddress import IPv4Address, IPv6Address, ip_address, ip_network
42from pathlib import Path
43from typing import (
44 IO,
45 TYPE_CHECKING,
46 Any,
47 Final,
48 Literal,
49 TypeAlias,
50 cast,
51 get_args,
52)
53from urllib.parse import SplitResult, parse_qsl, urlencode, urlsplit, urlunsplit
55import elasticapm
56import regex
57from blake3 import blake3
58from elastic_transport import ApiError, TransportError
59from elasticsearch import AsyncElasticsearch
60from geoip import geolite2 # type: ignore[import-untyped]
61from openmoji_dist import VERSION as OPENMOJI_VERSION
62from rapidfuzz.distance.Levenshtein import distance
63from redis.asyncio import Redis
64from tornado.web import HTTPError, RequestHandler
65from typed_stream import Stream
66from UltraDict import UltraDict # type: ignore[import-untyped]
68from .. import DIR as ROOT_DIR, pytest_is_running
70if TYPE_CHECKING:
71 from .background_tasks import BackgroundTask
73LOGGER: Final = logging.getLogger(__name__)
75type Handler = (
76 tuple[str, type[RequestHandler]]
77 | tuple[str, type[RequestHandler], dict[str, Any]]
78 | tuple[str, type[RequestHandler], dict[str, Any], str]
79)
81type OpenMojiValue = Literal[False, "img", "glyf_colr1", "glyf_colr0"]
82BumpscosityValue: TypeAlias = Literal[0, 1, 12, 50, 76, 100, 1000]
83BUMPSCOSITY_VALUES: Final[tuple[BumpscosityValue, ...]] = get_args(
84 BumpscosityValue
85)
87PRINT = int.from_bytes((ROOT_DIR / "primes.bin").read_bytes(), "big")
89IP_HASH_SALT: Final = {
90 "date": datetime.now(timezone.utc).date(),
91 "hasher": blake3(
92 blake3(
93 datetime.now(timezone.utc).date().isoformat().encode("ASCII")
94 ).digest()
95 ),
96}
98SUS_PATHS: Final[Set[str]] = {
99 "/-profiler/phpinfo",
100 "/.aws/credentials",
101 "/.env",
102 "/.env.bak",
103 "/.ftpconfig",
104 "/admin/controller/extension/extension",
105 "/assets/filemanager/dialog",
106 "/assets/vendor/server/php",
107 "/aws.yml",
108 "/boaform/admin/formlogin",
109 "/phpinfo",
110 "/public/assets/jquery-file-upload/server/php",
111 "/root",
112 "/settings/aws.yml",
113 "/uploads",
114 "/vendor/phpunit/phpunit/src/util/php/eval-stdin",
115 "/wordpress",
116 "/wp",
117 "/wp-admin",
118 "/wp-admin/css",
119 "/wp-includes",
120 "/wp-login",
121 "/wp-upload",
122}
125class ArgparseNamespace(argparse.Namespace):
126 """A class to fake type hints for argparse.Namespace."""
128 # pylint: disable=too-few-public-methods
129 __slots__ = ("config", "save_config_to", "version", "verbose")
131 config: list[Path]
132 save_config_to: Path | None
133 version: bool
134 verbose: int
137class AwaitableValue[T](Awaitable[T]):
138 # pylint: disable=too-few-public-methods
139 """An awaitable that always returns the same value."""
141 def __await__(self) -> Generator[None, None, T]:
142 """Return the value."""
143 yield
144 return self._value
146 def __init__(self, value: T) -> None:
147 """Set the value."""
148 self._value = value
151class Permission(IntFlag):
152 """Permissions for accessing restricted stuff."""
154 RATELIMITS = 1
155 TRACEBACK = 2
156 BACKDOOR = 4
157 UPDATE = 8
158 REPORTING = 16
159 SHORTEN = 32
160 UPLOAD = 64
163class Timer:
164 """Timer class used for timing stuff."""
166 __slots__ = ("_execution_time", "_start_time")
168 _execution_time: int
170 def __init__(self) -> None:
171 """Start the timer."""
172 self._start_time = time.perf_counter_ns()
174 def get(self) -> float:
175 """Get the execution time in seconds."""
176 return self.get_ns() / 1_000_000_000
178 def get_ns(self) -> int:
179 """Get the execution time in nanoseconds."""
180 assert hasattr(self, "_execution_time"), "Timer not stopped yet"
181 return self._execution_time
183 def stop(self) -> float:
184 """Stop the timer and get the execution time in seconds."""
185 return self.stop_ns() / 1_000_000_000
187 def stop_ns(self) -> int:
188 """Stop the timer and get the execution time in nanoseconds."""
189 assert not hasattr(self, "_execution_time"), "Timer already stopped"
190 self._execution_time = time.perf_counter_ns() - self._start_time
191 return self._execution_time
194@cache
195def add_args_to_url(url: str | SplitResult, **kwargs: object) -> str:
196 """Add query arguments to a URL."""
197 if isinstance(url, str):
198 url = urlsplit(url)
200 if not kwargs:
201 return url.geturl()
203 url_args: dict[str, str] = dict(
204 parse_qsl(url.query, keep_blank_values=True)
205 )
207 for key, value in kwargs.items():
208 if value is None:
209 if key in url_args:
210 del url_args[key]
211 # pylint: disable-next=confusing-consecutive-elif
212 elif isinstance(value, bool):
213 url_args[key] = bool_to_str(value)
214 else:
215 url_args[key] = str(value)
217 return urlunsplit(
218 (
219 url.scheme,
220 url.netloc,
221 url.path,
222 urlencode(url_args),
223 url.fragment,
224 )
225 )
228def anonymize_ip[ # noqa: D103
229 A: (str, None, str | None)
230](address: A, *, ignore_invalid: bool = False) -> A:
231 """Anonymize an IP address."""
232 if address is None:
233 return None
235 address = address.strip()
237 try:
238 version = ip_address(address).version
239 except ValueError:
240 if ignore_invalid:
241 return address
242 raise
244 if version == 4:
245 return str(ip_network(address + "/24", strict=False).network_address)
246 if version == 6:
247 return str(ip_network(address + "/48", strict=False).network_address)
249 raise HTTPError(reason="ERROR: -41")
252ansi_replace = partial(regex.sub, "\033" + r"\[-?\d+[a-zA-Z]", "")
253ansi_replace.__doc__ = "Remove ANSI escape sequences from a string."
256def apm_anonymization_processor(
257 # pylint: disable-next=unused-argument
258 client: elasticapm.Client,
259 event: dict[str, Any],
260) -> dict[str, Any]:
261 """Anonymize an APM event."""
262 if "context" in event and "request" in event["context"]:
263 request = event["context"]["request"]
264 if "url" in request and "pathname" in request["url"]:
265 path = request["url"]["pathname"]
266 if path == "/robots.txt" or path.lower() in SUS_PATHS:
267 return event
268 if "socket" in request and "remote_address" in request["socket"]:
269 request["socket"]["remote_address"] = anonymize_ip(
270 request["socket"]["remote_address"]
271 )
272 if "headers" in request:
273 headers = request["headers"]
274 if "X-Forwarded-For" in headers:
275 headers["X-Forwarded-For"] = ", ".join(
276 anonymize_ip(ip.strip(), ignore_invalid=True)
277 for ip in headers["X-Forwarded-For"].split(",")
278 )
279 for header in headers:
280 if "ip" in header.lower().split("-"):
281 headers[header] = anonymize_ip(
282 headers[header], ignore_invalid=True
283 )
284 return event
287def apply[V, Ret](value: V, fun: Callable[[V], Ret]) -> Ret: # noqa: D103
288 """Apply a function to a value and return the result."""
289 return fun(value)
292backspace_replace = partial(regex.sub, ".?\x08", "")
293backspace_replace.__doc__ = "Remove backspaces from a string."
296def bool_to_str(val: bool) -> str:
297 """Convert a boolean to sure/nope."""
298 return "sure" if val else "nope"
301def bounded_edit_distance(s1: str, s2: str, /, k: int) -> int:
302 """Return a bounded edit distance between two strings.
304 k is the maximum number returned
305 """
306 if (dist := distance(s1, s2, score_cutoff=k)) == k + 1:
307 return k
308 return dist
311def country_code_to_flag(code: str) -> str:
312 """Convert a two-letter ISO country code to a flag emoji."""
313 return "".join(chr(ord(char) + 23 * 29 * 191) for char in code.upper())
316def create_argument_parser() -> argparse.ArgumentParser:
317 """Parse command line arguments."""
318 parser = argparse.ArgumentParser()
319 parser.add_argument(
320 "--version",
321 help="show the version of the website",
322 action="store_true",
323 default=False,
324 )
325 parser.add_argument(
326 "--verbose",
327 action="count",
328 default=0,
329 )
330 parser.add_argument(
331 "-c",
332 "--config",
333 default=[Path("config.ini")],
334 help="the path to the config file",
335 metavar="PATH",
336 nargs="*",
337 type=Path,
338 )
339 parser.add_argument(
340 "--save-config-to",
341 default=None,
342 help="save the configuration to a file",
343 metavar="Path",
344 nargs="?",
345 type=Path,
346 )
347 return parser
350def emoji2html(emoji: str) -> str:
351 """Convert an emoji to HTML."""
352 return f"<img src={emoji2url(emoji)!r} alt={emoji!r} class='emoji'>"
355def emoji2url(emoji: str) -> str:
356 """Convert an emoji to an URL."""
357 if len(emoji) == 2:
358 emoji = emoji.removesuffix("\uFE0F")
359 code = "-".join(f"{ord(c):04x}" for c in emoji)
360 return f"/static/openmoji/svg/{code.upper()}.svg?v={OPENMOJI_VERSION}"
363if sys.flags.dev_mode and not pytest_is_running():
364 __origignal_emoji2url = emoji2url
366 def emoji2url(emoji: str) -> str: # pylint: disable=function-redefined
367 """Convert an emoji to an URL."""
368 import openmoji_dist # pylint: disable=import-outside-toplevel
369 from emoji import is_emoji # pylint: disable=import-outside-toplevel
371 assert is_emoji(emoji), f"{emoji} needs to be emoji"
372 result = __origignal_emoji2url(emoji)
373 file = (
374 openmoji_dist.get_openmoji_data()
375 / result.removeprefix("/static/openmoji/").split("?")[0]
376 )
377 assert file.is_file(), f"{file} needs to exist"
378 return result
381EMOJI_MAPPING: Final[Mapping[str, str]] = {
382 "⁉": "⁉",
383 "‼": "‼",
384 "?": "❓",
385 "!": "❗",
386 "-": "➖",
387 "+": "➕",
388 "\U0001F51F": "\U0001F51F",
389}
392def emojify(string: str) -> Iterable[str]:
393 """Emojify a given string."""
394 non_emojis: list[str] = []
395 for ch in (
396 replace_umlauts(string)
397 .replace("!?", "⁉")
398 .replace("!!", "‼")
399 .replace("10", "\U0001F51F")
400 ):
401 emoji: str | None = None
402 if ch.isascii():
403 if ch.isdigit() or ch in "#*":
404 emoji = f"{ch}\uFE0F\u20E3"
405 elif ch.isalpha():
406 emoji = country_code_to_flag(ch)
407 emoji = EMOJI_MAPPING.get(ch, emoji)
409 if emoji is None:
410 non_emojis.append(ch)
411 else:
412 if non_emojis:
413 yield "".join(non_emojis)
414 non_emojis.clear()
415 yield emoji
417 if non_emojis:
418 yield "".join(non_emojis)
421async def geoip(
422 ip: None | str,
423 database: str = "GeoLite2-City.mmdb",
424 elasticsearch: None | AsyncElasticsearch = None,
425 *,
426 allow_fallback: bool = True,
427 caches: dict[str, dict[str, dict[str, Any]]] = UltraDict(), # noqa: B008
428) -> None | dict[str, Any]:
429 """Get GeoIP information."""
430 # pylint: disable=too-complex
431 if not ip:
432 return None
434 # pylint: disable-next=redefined-outer-name
435 cache = caches.get(ip, {})
436 if database not in cache:
437 if not elasticsearch:
438 if allow_fallback and database in {
439 "GeoLite2-City.mmdb",
440 "GeoLite2-Country.mmdb",
441 }:
442 return geoip_fallback(
443 ip, country=database == "GeoLite2-City.mmdb"
444 )
445 return None
447 properties: None | tuple[str, ...]
448 if database == "GeoLite2-City.mmdb":
449 properties = (
450 "continent_name",
451 "country_iso_code",
452 "country_name",
453 "region_iso_code",
454 "region_name",
455 "city_name",
456 "location",
457 "timezone",
458 )
459 elif database == "GeoLite2-Country.mmdb":
460 properties = (
461 "continent_name",
462 "country_iso_code",
463 "country_name",
464 )
465 elif database == "GeoLite2-ASN.mmdb":
466 properties = ("asn", "network", "organization_name")
467 else:
468 properties = None
470 try:
471 cache[database] = (
472 await elasticsearch.ingest.simulate(
473 pipeline={
474 "processors": [
475 {
476 "geoip": {
477 "field": "ip",
478 "database_file": database,
479 "properties": properties,
480 }
481 }
482 ]
483 },
484 docs=[{"_source": {"ip": ip}}],
485 filter_path="docs.doc._source",
486 )
487 )["docs"][0]["doc"]["_source"].get("geoip", {})
488 except (ApiError, TransportError):
489 if allow_fallback and database in {
490 "GeoLite2-City.mmdb",
491 "GeoLite2-Country.mmdb",
492 }:
493 return geoip_fallback(
494 ip, country=database == "GeoLite2-City.mmdb"
495 )
496 raise
498 if "country_iso_code" in cache[database]:
499 cache[database]["country_flag"] = country_code_to_flag(
500 cache[database]["country_iso_code"]
501 )
503 caches[ip] = cache
504 return cache[database]
507def geoip_fallback(ip: str, country: bool = False) -> None | dict[str, Any]:
508 """Get GeoIP information without using Elasticsearch."""
509 if not (info := geolite2.lookup(ip)):
510 return None
512 info_dict = info.get_info_dict()
514 continent_name = info_dict.get("continent", {}).get("names", {}).get("en")
515 country_iso_code = info_dict.get("country", {}).get("iso_code")
516 country_name = info_dict.get("country", {}).get("names", {}).get("en")
518 data = {
519 "continent_name": continent_name,
520 "country_iso_code": country_iso_code,
521 "country_name": country_name,
522 }
524 if data["country_iso_code"]:
525 data["country_flag"] = country_code_to_flag(data["country_iso_code"])
527 if country:
528 for key, value in tuple(data.items()):
529 if not value:
530 del data[key]
532 return data
534 latitude = info_dict.get("location", {}).get("latitude")
535 longitude = info_dict.get("location", {}).get("longitude")
536 location = (latitude, longitude) if latitude and longitude else None
537 time_zone = info_dict.get("location", {}).get("time_zone")
539 data.update({"location": location, "timezone": time_zone})
541 for key, value in tuple(data.items()):
542 if not value:
543 del data[key]
545 return data
548def get_arguments_without_help() -> tuple[str, ...]:
549 """Get arguments without help."""
550 return tuple(arg for arg in sys.argv[1:] if arg not in {"-h", "--help"})
553def get_close_matches( # based on difflib.get_close_matches
554 word: str,
555 possibilities: Iterable[str],
556 count: int = 3,
557 cutoff: float = 0.5,
558) -> tuple[str, ...]:
559 """Use normalized_distance to return list of the best "good enough" matches.
561 word is a sequence for which close matches are desired (typically a string).
563 possibilities is a list of sequences against which to match word
564 (typically a list of strings).
566 Optional arg count (default 3) is the maximum number of close matches to
567 return. count must be > 0.
569 Optional arg cutoff (default 0.5) is a float in [0, 1]. Possibilities
570 that don't score at least that similar to word are ignored.
572 The best (no more than count) matches among the possibilities are returned
573 in a tuple, sorted by similarity score, most similar first.
574 """
575 if count <= 0:
576 raise ValueError(f"count must be > 0: {count}")
577 if not 0.0 <= cutoff <= 1.0:
578 raise ValueError(f"cutoff must be in [0.0, 1.0]: {cutoff}")
579 word_len = len(word)
580 if not word_len:
581 if cutoff < 1.0:
582 return ()
583 return Stream(possibilities).limit(count).collect(tuple)
584 result: list[tuple[float, str]] = []
585 for possibility in possibilities:
586 if max_dist := max(word_len, len(possibility)):
587 dist = bounded_edit_distance(
588 possibility, word, 1 + int(cutoff * max_dist)
589 )
590 if (ratio := dist / max_dist) <= cutoff:
591 bisect.insort(result, (ratio, possibility))
592 if len(result) > count:
593 result.pop(-1)
594 # Strip scores for the best count matches
595 return tuple(word for score, word in result)
598def hash_bytes(*args: bytes, hasher: Any = None, size: int = 32) -> str:
599 """Hash bytes and return the Base85 representation."""
600 digest: bytes
601 if not hasher:
602 hasher = blake3()
603 for arg in args:
604 hasher.update(arg)
605 digest = (
606 hasher.digest(size)
607 if isinstance(hasher, blake3)
608 else hasher.digest()[:size]
609 )
610 return b85encode(digest).decode("ASCII")
613def hash_ip(
614 address: None | str | IPv4Address | IPv6Address, size: int = 32
615) -> str:
616 """Hash an IP address."""
617 if isinstance(address, str):
618 address = ip_address(address)
619 if IP_HASH_SALT["date"] != (date := datetime.now(timezone.utc).date()):
620 IP_HASH_SALT["hasher"] = blake3(
621 blake3(date.isoformat().encode("ASCII")).digest()
622 )
623 IP_HASH_SALT["date"] = date
624 return hash_bytes(
625 address.packed if address else b"",
626 hasher=IP_HASH_SALT["hasher"].copy(), # type: ignore[attr-defined]
627 size=size,
628 )
631def is_in_european_union(ip: None | str) -> None | bool:
632 """Return whether the specified address is in the EU."""
633 if not (ip and (info := geolite2.lookup(ip))):
634 return None
636 return cast(bool, info.get_info_dict().get("is_in_european_union", False))
639def is_prime(number: int) -> bool:
640 """Return whether the specified number is prime."""
641 if not number % 2:
642 return number == 2
643 return bool(PRINT & (1 << (number // 2)))
646def length_of_match(match: regex.Match[Any]) -> int:
647 """Calculate the length of the regex match and return it."""
648 return match.end() - match.start()
651def n_from_set[T](set_: Set[T], n: int) -> set[T]: # noqa: D103
652 """Get and return n elements of the set as a new set."""
653 new_set = set()
654 for i, element in enumerate(set_):
655 if i >= n:
656 break
657 new_set.add(element)
658 return new_set
661def name_to_id(val: str) -> str:
662 """Replace umlauts and whitespaces in a string to get a valid HTML id."""
663 return regex.sub(
664 r"[^a-z0-9]+",
665 "-",
666 replace_umlauts(val).lower(),
667 ).strip("-")
670def none_to_default[T, D](value: None | T, default: D) -> D | T: # noqa: D103
671 """Like ?? in ECMAScript."""
672 return default if value is None else value
675def parse_bumpscosity(value: str | int | None) -> BumpscosityValue:
676 """Parse a string to a valid bumpscosity value."""
677 if isinstance(value, str):
678 with contextlib.suppress(ValueError):
679 value = int(value, base=0)
680 if value in BUMPSCOSITY_VALUES:
681 return cast(BumpscosityValue, value)
682 return random.Random(repr(value)).choice(BUMPSCOSITY_VALUES)
685def parse_openmoji_arg(value: str, default: OpenMojiValue) -> OpenMojiValue:
686 """Parse the openmoji arg into a Literal."""
687 value = value.lower()
688 if value == "glyf_colr0":
689 return "glyf_colr0"
690 if value == "glyf_colr1":
691 return "glyf_colr1"
692 if value in {"i", "img"}:
693 return "img"
694 if value in {"n", "nope"}:
695 return False
696 return default
699# pylint: disable-next=too-many-arguments
700async def ratelimit(
701 redis: Redis[str],
702 redis_prefix: str,
703 remote_ip: str,
704 *,
705 bucket: None | str,
706 max_burst: int,
707 count_per_period: int,
708 period: int,
709 tokens: int,
710) -> tuple[bool, dict[str, str]]:
711 """Take b1nzy to space using Redis."""
712 remote_ip = hash_bytes(remote_ip.encode("ASCII"))
713 key = f"{redis_prefix}:ratelimit:{remote_ip}"
714 if bucket:
715 key = f"{key}:{bucket}"
717 # see: https://github.com/brandur/redis-cell#usage
718 result = await redis.execute_command(
719 # type: ignore[no-untyped-call]
720 "CL.THROTTLE",
721 key,
722 max_burst,
723 count_per_period,
724 period,
725 tokens,
726 )
728 now = time.time()
730 headers: dict[str, str] = {}
732 if result[0]:
733 headers["Retry-After"] = str(result[3])
734 if not bucket:
735 headers["X-RateLimit-Global"] = "true"
737 if bucket:
738 headers["X-RateLimit-Limit"] = str(result[1])
739 headers["X-RateLimit-Remaining"] = str(result[2])
740 headers["X-RateLimit-Reset"] = str(now + result[4])
741 headers["X-RateLimit-Reset-After"] = str(result[4])
742 headers["X-RateLimit-Bucket"] = hash_bytes(bucket.encode("ASCII"))
744 return bool(result[0]), headers
747def remove_suffix_ignore_case(string: str, suffix: str) -> str:
748 """Remove a suffix without caring about the case."""
749 if string.lower().endswith(suffix.lower()):
750 return string[: -len(suffix)]
751 return string
754def replace_umlauts(string: str) -> str:
755 """Replace Ä, Ö, Ü, ẞ, ä, ö, ü, ß in string."""
756 if string.isupper():
757 return (
758 string.replace("Ä", "AE")
759 .replace("Ö", "OE")
760 .replace("Ü", "UE")
761 .replace("ẞ", "SS")
762 )
763 if " " in string:
764 return " ".join(replace_umlauts(word) for word in string.split(" "))
765 return (
766 string.replace("ä", "ae")
767 .replace("ö", "oe")
768 .replace("ü", "ue")
769 .replace("ß", "ss")
770 .replace("Ä", "Ae")
771 .replace("Ö", "Oe")
772 .replace("Ü", "Ue")
773 .replace("ẞ", "SS")
774 )
777async def run(
778 program: str,
779 *args: str,
780 stdin: int | IO[Any] = asyncio.subprocess.DEVNULL,
781 stdout: None | int | IO[Any] = asyncio.subprocess.PIPE,
782 stderr: None | int | IO[Any] = asyncio.subprocess.PIPE,
783 **kwargs: Any,
784) -> tuple[None | int, bytes, bytes]:
785 """Run a programm and return the exit code, stdout and stderr as tuple."""
786 proc = await asyncio.create_subprocess_exec(
787 program,
788 *args,
789 stdin=stdin,
790 stdout=stdout,
791 stderr=stderr,
792 **kwargs,
793 )
794 output = await proc.communicate()
795 return proc.returncode, *output
798def size_of_file(file: Traversable) -> int:
799 """Calculate the size of a file."""
800 if isinstance(file, Path):
801 return file.stat().st_size
803 with file.open("rb") as data:
804 return sum(map(len, data)) # pylint: disable=bad-builtin
807def str_to_bool(val: None | str | bool, default: None | bool = None) -> bool:
808 """Convert a string representation of truth to True or False."""
809 if isinstance(val, bool):
810 return val
811 if isinstance(val, str):
812 val = val.lower()
813 if val in {
814 "1",
815 "a",
816 "accept",
817 "e",
818 "enabled",
819 "on",
820 "s",
821 "sure",
822 "t",
823 "true",
824 "y",
825 "yes",
826 }:
827 return True
828 if val in {
829 "0",
830 "d",
831 "disabled",
832 "f",
833 "false",
834 "n",
835 "no",
836 "nope",
837 "off",
838 "r",
839 "reject",
840 }:
841 return False
842 if val in {"idc", "maybe", "random"}:
843 return bool(random.randrange(2)) # nosec: B311
844 if default is None:
845 raise ValueError(f"Invalid bool value: {val!r}")
846 return default
849def str_to_set(string: str) -> set[str]:
850 """Convert a string to a set of strings."""
851 return {part.strip() for part in string.split(",") if part.strip()}
854def strangle(string: str) -> float:
855 """Convert a string to an angle."""
856 hasher = sha1(string.encode("UTF-8"), usedforsecurity=False)
857 return int.from_bytes(hasher.digest()[:2], "little") / (1 << 16) * 360
860def time_function[ # noqa: D103
861 T, **P # fmt: skip
862](
863 function: Callable[P, T], *args: P.args, **kwargs: P.kwargs
864) -> tuple[T, float]:
865 """Run the function and return the result and the time it took in seconds."""
866 timer = Timer()
867 return function(*args, **kwargs), timer.stop()
870def time_to_str(spam: float) -> str:
871 """Convert the time into a string with second precision."""
872 int_time = int(spam)
873 div_60 = int(int_time / 60)
874 div_60_60 = int(div_60 / 60)
876 return (
877 f"{int(div_60_60 / 24)}d "
878 f"{div_60_60 % 24}h "
879 f"{div_60 % 60}min "
880 f"{int_time % 60}s"
881 )
884@dataclass(order=True, frozen=True, slots=True)
885class PageInfo:
886 """The PageInfo class that is used for the subpages of a ModuleInfo."""
888 name: str
889 description: str
890 path: None | str = None
891 # keywords that can be used for searching
892 keywords: tuple[str, ...] = field(default_factory=tuple)
893 hidden: bool = False # whether to hide this page info on the page
894 short_name: None | str = None # short name for the page
897@dataclass(order=True, frozen=True, slots=True)
898class ModuleInfo(PageInfo):
899 """
900 The ModuleInfo class adds handlers and subpages to the PageInfo.
902 This gets created by every module to add the handlers.
903 """
905 handlers: tuple[Handler, ...] = field(default_factory=tuple[Handler, ...])
906 sub_pages: tuple[PageInfo, ...] = field(default_factory=tuple)
907 aliases: tuple[str, ...] | Mapping[str, str] = field(default_factory=tuple)
908 required_background_tasks: Collection[BackgroundTask] = field(
909 default_factory=frozenset
910 )
912 def get_keywords_as_str(self, path: str) -> str:
913 """Get the keywords as comma-seperated string."""
914 page_info = self.get_page_info(path)
915 if self != page_info:
916 return ", ".join((*self.keywords, *page_info.keywords))
918 return ", ".join(self.keywords)
920 def get_page_info(self, path: str) -> PageInfo:
921 """Get the PageInfo of the specified path."""
922 if self.path == path:
923 return self
925 for page_info in self.sub_pages:
926 if page_info.path == path:
927 return page_info
929 return self