Coverage for an_website / utils / utils.py: 71.465%
389 statements
« prev ^ index » next coverage.py v7.13.4, created at 2026-03-04 20:05 +0000
« prev ^ index » next coverage.py v7.13.4, created at 2026-03-04 20:05 +0000
1# This program is free software: you can redistribute it and/or modify
2# it under the terms of the GNU Affero General Public License as
3# published by the Free Software Foundation, either version 3 of the
4# License, or (at your option) any later version.
5#
6# This program is distributed in the hope that it will be useful,
7# but WITHOUT ANY WARRANTY; without even the implied warranty of
8# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
9# GNU Affero General Public License for more details.
10#
11# You should have received a copy of the GNU Affero General Public License
12# along with this program. If not, see <https://www.gnu.org/licenses/>.
14"""A module with many useful things used by other modules."""
16from __future__ import annotations
18import argparse
19import asyncio
20import bisect
21import contextlib
22import logging
23import random
24import sys
25import time
26from base64 import b85encode
27from collections.abc import (
28 Awaitable,
29 Callable,
30 Collection,
31 Generator,
32 Iterable,
33 Mapping,
34 Set,
35)
36from dataclasses import dataclass, field
37from datetime import datetime, timezone
38from enum import IntFlag
39from functools import cache, partial
40from hashlib import sha1
41from importlib.resources.abc import Traversable
42from ipaddress import IPv4Address, IPv6Address, ip_address, ip_network
43from pathlib import Path
44from typing import (
45 IO,
46 TYPE_CHECKING,
47 Any,
48 Final,
49 Literal,
50 TypeAlias,
51 cast,
52 get_args,
53)
54from urllib.parse import SplitResult, parse_qsl, urlencode, urlsplit, urlunsplit
56import elasticapm
57import regex
58from blake3 import blake3
59from elastic_transport import ApiError, TransportError
60from elasticsearch import AsyncElasticsearch
61from geoip import geolite2 # type: ignore[import-untyped]
62from openmoji_dist import VERSION as OPENMOJI_VERSION
63from rapidfuzz.distance.Levenshtein import distance
64from redis.asyncio import Redis
65from tornado.web import HTTPError, RequestHandler
66from typed_stream import Stream
67from UltraDict import UltraDict # type: ignore[import-untyped]
69from .. import DIR as ROOT_DIR, pytest_is_running
71if TYPE_CHECKING:
72 from .background_tasks import BackgroundTask
74LOGGER: Final = logging.getLogger(__name__)
76type Handler = (
77 tuple[str, type[RequestHandler]]
78 | tuple[str, type[RequestHandler], dict[str, Any]]
79 | tuple[str, type[RequestHandler], dict[str, Any], str]
80)
82type OpenMojiValue = Literal[False, "img", "glyf_colr1", "glyf_colr0"]
83BumpscosityValue: TypeAlias = Literal[0, 1, 12, 50, 76, 100, 1000]
84BUMPSCOSITY_VALUES: Final[tuple[BumpscosityValue, ...]] = get_args(
85 BumpscosityValue
86)
88PRINT = int.from_bytes((ROOT_DIR / "primes.bin").read_bytes(), "big")
90IP_HASH_SALT: Final = {
91 "date": datetime.now(timezone.utc).date(),
92 "hasher": blake3(
93 blake3(
94 datetime.now(timezone.utc).date().isoformat().encode("ASCII")
95 ).digest()
96 ),
97}
99SUS_PATHS: Final[Set[str]] = {
100 "/-profiler/phpinfo",
101 "/.aws/credentials",
102 "/.env",
103 "/.env.bak",
104 "/.ftpconfig",
105 "/admin/controller/extension/extension",
106 "/assets/filemanager/dialog",
107 "/assets/vendor/server/php",
108 "/aws.yml",
109 "/boaform/admin/formlogin",
110 "/phpinfo",
111 "/public/assets/jquery-file-upload/server/php",
112 "/root",
113 "/settings/aws.yml",
114 "/uploads",
115 "/vendor/phpunit/phpunit/src/util/php/eval-stdin",
116 "/wordpress",
117 "/wp",
118 "/wp-admin",
119 "/wp-admin/css",
120 "/wp-includes",
121 "/wp-login",
122 "/wp-upload",
123}
126class ArgparseNamespace(argparse.Namespace):
127 """A class to fake type hints for argparse.Namespace."""
129 # pylint: disable=too-few-public-methods
130 __slots__ = ("config", "save_config_to", "version", "verbose")
132 config: list[Path]
133 save_config_to: Path | None
134 version: bool
135 verbose: int
138class AwaitableValue[T](Awaitable[T]):
139 # pylint: disable=too-few-public-methods
140 """An awaitable that always returns the same value."""
142 def __await__(self) -> Generator[None, None, T]:
143 """Return the value."""
144 yield
145 return self._value
147 def __init__(self, value: T) -> None:
148 """Set the value."""
149 self._value = value
152class Permission(IntFlag):
153 """Permissions for accessing restricted stuff."""
155 RATELIMITS = 1
156 TRACEBACK = 2
157 BACKDOOR = 4
158 UPDATE = 8
159 REPORTING = 16
160 SHORTEN = 32
161 UPLOAD = 64
164class Timer:
165 """Timer class used for timing stuff."""
167 __slots__ = ("_execution_time", "_start_time")
169 _execution_time: int
171 def __init__(self) -> None:
172 """Start the timer."""
173 self._start_time = time.perf_counter_ns()
175 def get(self) -> float:
176 """Get the execution time in seconds."""
177 return self.get_ns() / 1_000_000_000
179 def get_ns(self) -> int:
180 """Get the execution time in nanoseconds."""
181 assert hasattr(self, "_execution_time"), "Timer not stopped yet"
182 return self._execution_time
184 def stop(self) -> float:
185 """Stop the timer and get the execution time in seconds."""
186 return self.stop_ns() / 1_000_000_000
188 def stop_ns(self) -> int:
189 """Stop the timer and get the execution time in nanoseconds."""
190 assert not hasattr(self, "_execution_time"), "Timer already stopped"
191 self._execution_time = time.perf_counter_ns() - self._start_time
192 return self._execution_time
195@cache
196def add_args_to_url(url: str | SplitResult, **kwargs: object) -> str:
197 """Add query arguments to a URL."""
198 if isinstance(url, str):
199 url = urlsplit(url)
201 if not kwargs:
202 return url.geturl()
204 url_args: dict[str, str] = dict(
205 parse_qsl(url.query, keep_blank_values=True)
206 )
208 for key, value in kwargs.items():
209 if value is None:
210 if key in url_args:
211 del url_args[key]
212 # pylint: disable-next=confusing-consecutive-elif
213 elif isinstance(value, bool):
214 url_args[key] = bool_to_str(value)
215 else:
216 url_args[key] = str(value)
218 return urlunsplit(
219 (
220 url.scheme,
221 url.netloc,
222 url.path,
223 urlencode(url_args),
224 url.fragment,
225 )
226 )
229def anonymize_ip[ # noqa: D103
230 A: (str, None, str | None)
231](address: A, *, ignore_invalid: bool = False) -> A:
232 """Anonymize an IP address."""
233 if address is None:
234 return None
236 address = address.strip()
238 try:
239 version = ip_address(address).version
240 except ValueError:
241 if ignore_invalid:
242 return address
243 raise
245 if version == 4:
246 return str(ip_network(address + "/24", strict=False).network_address)
247 if version == 6:
248 return str(ip_network(address + "/48", strict=False).network_address)
250 raise HTTPError(reason="ERROR: -41")
253ansi_replace = partial(regex.sub, "\033" + r"\[-?\d+[a-zA-Z]", "")
254ansi_replace.__doc__ = "Remove ANSI escape sequences from a string."
257def apm_anonymization_processor(
258 # pylint: disable-next=unused-argument
259 client: elasticapm.Client,
260 event: dict[str, Any],
261) -> dict[str, Any]:
262 """Anonymize an APM event."""
263 if "context" in event and "request" in event["context"]:
264 request = event["context"]["request"]
265 if "url" in request and "pathname" in request["url"]:
266 path = request["url"]["pathname"]
267 if path == "/robots.txt" or path.lower() in SUS_PATHS:
268 return event
269 if "socket" in request and "remote_address" in request["socket"]:
270 request["socket"]["remote_address"] = anonymize_ip(
271 request["socket"]["remote_address"]
272 )
273 if "headers" in request:
274 headers = request["headers"]
275 if "X-Forwarded-For" in headers:
276 headers["X-Forwarded-For"] = ", ".join(
277 anonymize_ip(ip.strip(), ignore_invalid=True)
278 for ip in headers["X-Forwarded-For"].split(",")
279 )
280 for header in headers:
281 if "ip" in header.lower().split("-"):
282 headers[header] = anonymize_ip(
283 headers[header], ignore_invalid=True
284 )
285 return event
288def apply[V, Ret](value: V, fun: Callable[[V], Ret]) -> Ret: # noqa: D103
289 """Apply a function to a value and return the result."""
290 return fun(value)
293backspace_replace = partial(regex.sub, ".?\x08", "")
294backspace_replace.__doc__ = "Remove backspaces from a string."
297def bool_to_str(val: bool) -> str:
298 """Convert a boolean to sure/nope."""
299 return "sure" if val else "nope"
302def bounded_edit_distance(s1: str, s2: str, /, k: int) -> int:
303 """Return a bounded edit distance between two strings.
305 k is the maximum number returned
306 """
307 if (dist := distance(s1, s2, score_cutoff=k)) == k + 1:
308 return k
309 return dist
312def country_code_to_flag(code: str) -> str:
313 """Convert a two-letter ISO country code to a flag emoji."""
314 return "".join(chr(ord(char) + 23 * 29 * 191) for char in code.upper())
317def create_argument_parser() -> argparse.ArgumentParser:
318 """Parse command line arguments."""
319 parser = argparse.ArgumentParser()
320 parser.add_argument(
321 "--version",
322 help="show the version of the website",
323 action="store_true",
324 default=False,
325 )
326 parser.add_argument(
327 "--verbose",
328 action="count",
329 default=0,
330 )
331 parser.add_argument(
332 "-c",
333 "--config",
334 default=[Path("config.ini")],
335 help="the path to the config file",
336 metavar="PATH",
337 nargs="*",
338 type=Path,
339 )
340 parser.add_argument(
341 "--save-config-to",
342 default=None,
343 help="save the configuration to a file",
344 metavar="Path",
345 nargs="?",
346 type=Path,
347 )
348 return parser
351def emoji2html(emoji: str) -> str:
352 """Convert an emoji to HTML."""
353 return f"<img src={emoji2url(emoji)!r} alt={emoji!r} class='emoji'>"
356def emoji2url(emoji: str) -> str:
357 """Convert an emoji to an URL."""
358 if len(emoji) == 2:
359 emoji = emoji.removesuffix("\uFE0F")
360 code = "-".join(f"{ord(c):04x}" for c in emoji)
361 return f"/static/openmoji/svg/{code.upper()}.svg?v={OPENMOJI_VERSION}"
364if sys.flags.dev_mode and not pytest_is_running():
365 __origignal_emoji2url = emoji2url
367 def emoji2url(emoji: str) -> str: # pylint: disable=function-redefined
368 """Convert an emoji to an URL."""
369 import openmoji_dist # pylint: disable=import-outside-toplevel
370 from emoji import is_emoji # pylint: disable=import-outside-toplevel
372 assert is_emoji(emoji), f"{emoji} needs to be emoji"
373 result = __origignal_emoji2url(emoji)
374 file = (
375 openmoji_dist.get_openmoji_data()
376 / result.removeprefix("/static/openmoji/").split("?")[0]
377 )
378 assert file.is_file(), f"{file} needs to exist"
379 return result
382EMOJI_MAPPING: Final[Mapping[str, str]] = {
383 "⁉": "⁉",
384 "‼": "‼",
385 "?": "❓",
386 "!": "❗",
387 "-": "➖",
388 "+": "➕",
389 "\U0001F51F": "\U0001F51F",
390}
393def emojify(string: str) -> Iterable[str]:
394 """Emojify a given string."""
395 non_emojis: list[str] = []
396 for ch in (
397 replace_umlauts(string)
398 .replace("!?", "⁉")
399 .replace("!!", "‼")
400 .replace("10", "\U0001F51F")
401 ):
402 emoji: str | None = None
403 if ch.isascii():
404 if ch.isdigit() or ch in "#*":
405 emoji = f"{ch}\uFE0F\u20E3"
406 elif ch.isalpha():
407 emoji = country_code_to_flag(ch)
408 emoji = EMOJI_MAPPING.get(ch, emoji)
410 if emoji is None:
411 non_emojis.append(ch)
412 else:
413 if non_emojis:
414 yield "".join(non_emojis)
415 non_emojis.clear()
416 yield emoji
418 if non_emojis:
419 yield "".join(non_emojis)
422async def geoip(
423 ip: None | str,
424 database: str = "GeoLite2-City.mmdb",
425 elasticsearch: None | AsyncElasticsearch = None,
426 *,
427 allow_fallback: bool = True,
428 caches: dict[str, dict[str, dict[str, Any]]] = UltraDict(), # noqa: B008
429) -> None | dict[str, Any]:
430 """Get GeoIP information."""
431 # pylint: disable=too-complex
432 if not ip:
433 return None
435 # pylint: disable-next=redefined-outer-name
436 cache = caches.get(ip, {})
437 if database not in cache:
438 if not elasticsearch:
439 if allow_fallback and database in {
440 "GeoLite2-City.mmdb",
441 "GeoLite2-Country.mmdb",
442 }:
443 return geoip_fallback(
444 ip, country=database == "GeoLite2-City.mmdb"
445 )
446 return None
448 properties: None | tuple[str, ...]
449 if database == "GeoLite2-City.mmdb":
450 properties = (
451 "continent_name",
452 "country_iso_code",
453 "country_name",
454 "region_iso_code",
455 "region_name",
456 "city_name",
457 "location",
458 "timezone",
459 )
460 elif database == "GeoLite2-Country.mmdb":
461 properties = (
462 "continent_name",
463 "country_iso_code",
464 "country_name",
465 )
466 elif database == "GeoLite2-ASN.mmdb":
467 properties = ("asn", "network", "organization_name")
468 else:
469 properties = None
471 try:
472 cache[database] = (
473 await elasticsearch.ingest.simulate(
474 pipeline={
475 "processors": [
476 {
477 "geoip": {
478 "field": "ip",
479 "database_file": database,
480 "properties": properties,
481 }
482 }
483 ]
484 },
485 docs=[{"_source": {"ip": ip}}],
486 filter_path="docs.doc._source",
487 )
488 )["docs"][0]["doc"]["_source"].get("geoip", {})
489 except (ApiError, TransportError):
490 if allow_fallback and database in {
491 "GeoLite2-City.mmdb",
492 "GeoLite2-Country.mmdb",
493 }:
494 return geoip_fallback(
495 ip, country=database == "GeoLite2-City.mmdb"
496 )
497 raise
499 if "country_iso_code" in cache[database]:
500 cache[database]["country_flag"] = country_code_to_flag(
501 cache[database]["country_iso_code"]
502 )
504 caches[ip] = cache
505 return cache[database]
508def geoip_fallback(ip: str, country: bool = False) -> None | dict[str, Any]:
509 """Get GeoIP information without using Elasticsearch."""
510 if not (info := geolite2.lookup(ip)):
511 return None
513 info_dict = info.get_info_dict()
515 continent_name = info_dict.get("continent", {}).get("names", {}).get("en")
516 country_iso_code = info_dict.get("country", {}).get("iso_code")
517 country_name = info_dict.get("country", {}).get("names", {}).get("en")
519 data = {
520 "continent_name": continent_name,
521 "country_iso_code": country_iso_code,
522 "country_name": country_name,
523 }
525 if data["country_iso_code"]:
526 data["country_flag"] = country_code_to_flag(data["country_iso_code"])
528 if country:
529 for key, value in tuple(data.items()):
530 if not value:
531 del data[key]
533 return data
535 latitude = info_dict.get("location", {}).get("latitude")
536 longitude = info_dict.get("location", {}).get("longitude")
537 location = (latitude, longitude) if latitude and longitude else None
538 time_zone = info_dict.get("location", {}).get("time_zone")
540 data.update({"location": location, "timezone": time_zone})
542 for key, value in tuple(data.items()):
543 if not value:
544 del data[key]
546 return data
549def get_arguments_without_help() -> tuple[str, ...]:
550 """Get arguments without help."""
551 return tuple(arg for arg in sys.argv[1:] if arg not in {"-h", "--help"})
554def get_close_matches( # based on difflib.get_close_matches
555 word: str,
556 possibilities: Iterable[str],
557 count: int = 3,
558 cutoff: float = 0.5,
559) -> tuple[str, ...]:
560 """Use normalized_distance to return list of the best "good enough" matches.
562 word is a sequence for which close matches are desired (typically a string).
564 possibilities is a list of sequences against which to match word
565 (typically a list of strings).
567 Optional arg count (default 3) is the maximum number of close matches to
568 return. count must be > 0.
570 Optional arg cutoff (default 0.5) is a float in [0, 1]. Possibilities
571 that don't score at least that similar to word are ignored.
573 The best (no more than count) matches among the possibilities are returned
574 in a tuple, sorted by similarity score, most similar first.
575 """
576 if count <= 0:
577 raise ValueError(f"count must be > 0: {count}")
578 if not 0.0 <= cutoff <= 1.0:
579 raise ValueError(f"cutoff must be in [0.0, 1.0]: {cutoff}")
580 word_len = len(word)
581 if not word_len:
582 if cutoff < 1.0:
583 return ()
584 return Stream(possibilities).limit(count).collect(tuple)
585 result: list[tuple[float, str]] = []
586 for possibility in possibilities:
587 if max_dist := max(word_len, len(possibility)):
588 dist = bounded_edit_distance(
589 possibility, word, 1 + int(cutoff * max_dist)
590 )
591 if (ratio := dist / max_dist) <= cutoff:
592 bisect.insort(result, (ratio, possibility))
593 if len(result) > count:
594 result.pop(-1)
595 # Strip scores for the best count matches
596 return tuple(word for score, word in result)
599def hash_bytes(*args: bytes, hasher: Any = None, size: int = 32) -> str:
600 """Hash bytes and return the Base85 representation."""
601 digest: bytes
602 if not hasher:
603 hasher = blake3()
604 for arg in args:
605 hasher.update(arg)
606 digest = (
607 hasher.digest(size)
608 if isinstance(hasher, blake3)
609 else hasher.digest()[:size]
610 )
611 return b85encode(digest).decode("ASCII")
614def hash_ip(
615 address: None | str | IPv4Address | IPv6Address, size: int = 32
616) -> str:
617 """Hash an IP address."""
618 if isinstance(address, str):
619 address = ip_address(address)
620 if IP_HASH_SALT["date"] != (date := datetime.now(timezone.utc).date()):
621 IP_HASH_SALT["hasher"] = blake3(
622 blake3(date.isoformat().encode("ASCII")).digest()
623 )
624 IP_HASH_SALT["date"] = date
625 return hash_bytes(
626 address.packed if address else b"",
627 hasher=IP_HASH_SALT["hasher"].copy(), # type: ignore[attr-defined]
628 size=size,
629 )
632def is_in_european_union(ip: None | str) -> None | bool:
633 """Return whether the specified address is in the EU."""
634 if not (ip and (info := geolite2.lookup(ip))):
635 return None
637 return cast(bool, info.get_info_dict().get("is_in_european_union", False))
640def is_prime(number: int) -> bool:
641 """Return whether the specified number is prime."""
642 if not number % 2:
643 return number == 2
644 return bool(PRINT & (1 << (number // 2)))
647def length_of_match(match: regex.Match[Any]) -> int:
648 """Calculate the length of the regex match and return it."""
649 return match.end() - match.start()
652def n_from_set[T](set_: Set[T], n: int) -> set[T]: # noqa: D103
653 """Get and return n elements of the set as a new set."""
654 new_set = set()
655 for i, element in enumerate(set_):
656 if i >= n:
657 break
658 new_set.add(element)
659 return new_set
662def name_to_id(val: str) -> str:
663 """Replace umlauts and whitespaces in a string to get a valid HTML id."""
664 return regex.sub(
665 r"[^a-z0-9]+",
666 "-",
667 replace_umlauts(val).lower(),
668 ).strip("-")
671def none_to_default[T, D](value: None | T, default: D) -> D | T: # noqa: D103
672 """Like ?? in ECMAScript."""
673 return default if value is None else value
676def parse_bumpscosity(value: str | int | None) -> BumpscosityValue:
677 """Parse a string to a valid bumpscosity value."""
678 if isinstance(value, str):
679 with contextlib.suppress(ValueError):
680 value = int(value, base=0)
681 if value in BUMPSCOSITY_VALUES:
682 return cast(BumpscosityValue, value)
683 return random.Random(repr(value)).choice(BUMPSCOSITY_VALUES)
686def parse_openmoji_arg(value: str, default: OpenMojiValue) -> OpenMojiValue:
687 """Parse the openmoji arg into a Literal."""
688 value = value.lower()
689 if value == "glyf_colr0":
690 return "glyf_colr0"
691 if value == "glyf_colr1":
692 return "glyf_colr1"
693 if value in {"i", "img"}:
694 return "img"
695 if value in {"n", "nope"}:
696 return False
697 return default
700# pylint: disable-next=too-many-arguments
701async def ratelimit(
702 redis: Redis[str],
703 redis_prefix: str,
704 remote_ip: str,
705 *,
706 bucket: None | str,
707 max_burst: int,
708 count_per_period: int,
709 period: int,
710 tokens: int,
711) -> tuple[bool, dict[str, str]]:
712 """Take b1nzy to space using Redis."""
713 remote_ip = hash_bytes(remote_ip.encode("ASCII"))
714 key = f"{redis_prefix}:ratelimit:{remote_ip}"
715 if bucket:
716 key = f"{key}:{bucket}"
718 # see: https://github.com/brandur/redis-cell#usage
719 result = await redis.execute_command(
720 # type: ignore[no-untyped-call]
721 "CL.THROTTLE",
722 key,
723 max_burst,
724 count_per_period,
725 period,
726 tokens,
727 )
729 now = time.time()
731 headers: dict[str, str] = {}
733 if result[0]:
734 headers["Retry-After"] = str(result[3])
735 if not bucket:
736 headers["X-RateLimit-Global"] = "true"
738 if bucket:
739 headers["X-RateLimit-Limit"] = str(result[1])
740 headers["X-RateLimit-Remaining"] = str(result[2])
741 headers["X-RateLimit-Reset"] = str(now + result[4])
742 headers["X-RateLimit-Reset-After"] = str(result[4])
743 headers["X-RateLimit-Bucket"] = hash_bytes(bucket.encode("ASCII"))
745 return bool(result[0]), headers
748def remove_suffix_ignore_case(string: str, suffix: str) -> str:
749 """Remove a suffix without caring about the case."""
750 if string.lower().endswith(suffix.lower()):
751 return string[: -len(suffix)]
752 return string
755def replace_umlauts(string: str) -> str:
756 """Replace Ä, Ö, Ü, ẞ, ä, ö, ü, ß in string."""
757 if string.isupper():
758 return (
759 string.replace("Ä", "AE")
760 .replace("Ö", "OE")
761 .replace("Ü", "UE")
762 .replace("ẞ", "SS")
763 )
764 if " " in string:
765 return " ".join(replace_umlauts(word) for word in string.split(" "))
766 return (
767 string.replace("ä", "ae")
768 .replace("ö", "oe")
769 .replace("ü", "ue")
770 .replace("ß", "ss")
771 .replace("Ä", "Ae")
772 .replace("Ö", "Oe")
773 .replace("Ü", "Ue")
774 .replace("ẞ", "SS")
775 )
778async def run(
779 program: str,
780 *args: str,
781 stdin: int | IO[Any] = asyncio.subprocess.DEVNULL,
782 stdout: None | int | IO[Any] = asyncio.subprocess.PIPE,
783 stderr: None | int | IO[Any] = asyncio.subprocess.PIPE,
784 **kwargs: Any,
785) -> tuple[None | int, bytes, bytes]:
786 """Run a programm and return the exit code, stdout and stderr as tuple."""
787 proc = await asyncio.create_subprocess_exec(
788 program,
789 *args,
790 stdin=stdin,
791 stdout=stdout,
792 stderr=stderr,
793 **kwargs,
794 )
795 output = await proc.communicate()
796 return proc.returncode, *output
799def size_of_file(file: Traversable) -> int:
800 """Calculate the size of a file."""
801 if isinstance(file, Path):
802 return file.stat().st_size
804 with file.open("rb") as data:
805 return sum(map(len, data)) # pylint: disable=bad-builtin
808def str_to_bool(val: None | str | bool, default: None | bool = None) -> bool:
809 """Convert a string representation of truth to True or False."""
810 if isinstance(val, bool):
811 return val
812 if isinstance(val, str):
813 val = val.lower()
814 if val in {
815 "1",
816 "a",
817 "accept",
818 "e",
819 "enabled",
820 "on",
821 "s",
822 "sure",
823 "t",
824 "true",
825 "y",
826 "yes",
827 }:
828 return True
829 if val in {
830 "0",
831 "d",
832 "disabled",
833 "f",
834 "false",
835 "n",
836 "no",
837 "nope",
838 "off",
839 "r",
840 "reject",
841 }:
842 return False
843 if val in {"idc", "maybe", "random"}:
844 return bool(random.randrange(2)) # nosec: B311
845 if default is None:
846 raise ValueError(f"Invalid bool value: {val!r}")
847 return default
850def str_to_set(string: str) -> set[str]:
851 """Convert a string to a set of strings."""
852 return {part.strip() for part in string.split(",") if part.strip()}
855def strangle(string: str) -> float:
856 """Convert a string to an angle."""
857 hasher = sha1(string.encode("UTF-8"), usedforsecurity=False)
858 return int.from_bytes(hasher.digest()[:2], "little") / (1 << 16) * 360
861def time_function[ # noqa: D103
862 T, **P # fmt: skip
863](
864 function: Callable[P, T], *args: P.args, **kwargs: P.kwargs
865) -> tuple[T, float]:
866 """Run the function and return the result and the time it took in seconds."""
867 timer = Timer()
868 return function(*args, **kwargs), timer.stop()
871def time_to_str(spam: float) -> str:
872 """Convert the time into a string with second precision."""
873 int_time = int(spam)
874 div_60 = int(int_time / 60)
875 div_60_60 = int(div_60 / 60)
877 return (
878 f"{int(div_60_60 / 24)}d "
879 f"{div_60_60 % 24}h "
880 f"{div_60 % 60}min "
881 f"{int_time % 60}s"
882 )
885@dataclass(order=True, frozen=True, slots=True)
886class PageInfo:
887 """The PageInfo class that is used for the subpages of a ModuleInfo."""
889 name: str
890 description: str
891 path: None | str = None
892 # keywords that can be used for searching
893 keywords: tuple[str, ...] = field(default_factory=tuple)
894 hidden: bool = False # whether to hide this page info on the page
895 short_name: None | str = None # short name for the page
898@dataclass(order=True, frozen=True, slots=True)
899class ModuleInfo(PageInfo):
900 """
901 The ModuleInfo class adds handlers and subpages to the PageInfo.
903 This gets created by every module to add the handlers.
904 """
906 handlers: tuple[Handler, ...] = field(default_factory=tuple[Handler, ...])
907 sub_pages: tuple[PageInfo, ...] = field(default_factory=tuple)
908 aliases: tuple[str, ...] | Mapping[str, str] = field(default_factory=tuple)
909 required_background_tasks: Collection[BackgroundTask] = field(
910 default_factory=frozenset
911 )
913 def get_keywords_as_str(self, path: str) -> str:
914 """Get the keywords as comma-seperated string."""
915 page_info = self.get_page_info(path)
916 if self != page_info:
917 return ", ".join((*self.keywords, *page_info.keywords))
919 return ", ".join(self.keywords)
921 def get_page_info(self, path: str) -> PageInfo:
922 """Get the PageInfo of the specified path."""
923 if self.path == path:
924 return self
926 for page_info in self.sub_pages:
927 if page_info.path == path:
928 return page_info
930 return self