From 0b4dd47558c6fc8c2ea166de686b85532202bae6 Mon Sep 17 00:00:00 2001
From: Kristian Krsnik <git@krsnik.at>
Date: Thu, 2 Jan 2025 18:18:57 +0100
Subject: [PATCH] initial rewrite

---
 .gitignore                             |  28 ++-
 flake.lock                             | 142 ++---------
 flake.nix                              | 198 ++++++++++++---
 nix/module.nix                         |  19 +-
 poetry.lock                            | 323 -------------------------
 pyproject.toml                         |  56 +++--
 src/main.py                            | 114 ---------
 src/test/test_imports.py               |   2 +
 src/test/test_testdata.py              | 116 +++++++++
 src/testdata/__init__.py               |   3 +
 src/testdata/__main__.py               |   3 +
 src/testdata/logger/__init__.py        |   3 +
 src/testdata/logger/logger.py          | 171 +++++++++++++
 src/testdata/main.py                   |  28 +++
 src/{__init__.py => testdata/py.typed} |   0
 src/testdata/testdata.py               | 134 ++++++++++
 src/testdata/utils.py                  |  40 +++
 src/utils.py                           |  54 -----
 18 files changed, 756 insertions(+), 678 deletions(-)
 delete mode 100644 poetry.lock
 delete mode 100644 src/main.py
 create mode 100644 src/test/test_imports.py
 create mode 100644 src/test/test_testdata.py
 create mode 100644 src/testdata/__init__.py
 create mode 100644 src/testdata/__main__.py
 create mode 100644 src/testdata/logger/__init__.py
 create mode 100644 src/testdata/logger/logger.py
 create mode 100644 src/testdata/main.py
 rename src/{__init__.py => testdata/py.typed} (100%)
 create mode 100644 src/testdata/testdata.py
 create mode 100644 src/testdata/utils.py
 delete mode 100644 src/utils.py

diff --git a/.gitignore b/.gitignore
index 231dc67..123a7da 100644
--- a/.gitignore
+++ b/.gitignore
@@ -1,6 +1,30 @@
+# Python #
+# Virtual Environment
+/.venv/
+
+# Cache
+__pycache__/
+
+# Build
+/dist/
+*.egg-info/
+
+# Tools
+/.pytest_cache/
+/.mypy_cache/
+
+# Nix #
+
+# Build
 /result
 
+# MicroVM
+/var.img
+/control.socket
+
+# Direnv #
 /.direnv/
 
-*.json
-*.bin
+# Project specific files #
+config.json
+db.json
diff --git a/flake.lock b/flake.lock
index 3db2152..611ac39 100644
--- a/flake.lock
+++ b/flake.lock
@@ -5,29 +5,11 @@
         "systems": "systems"
       },
       "locked": {
-        "lastModified": 1705309234,
-        "narHash": "sha256-uNRRNRKmJyCRC/8y1RqBkqWBLM034y4qN7EprSdmgyA=",
+        "lastModified": 1731533236,
+        "narHash": "sha256-l0KFg5HjrsfsO/JpG+r7fRrqm12kzFHyUHqHCVpMMbI=",
         "owner": "numtide",
         "repo": "flake-utils",
-        "rev": "1ef2e671c3b0c19053962c07dbda38332dcebf26",
-        "type": "github"
-      },
-      "original": {
-        "owner": "numtide",
-        "repo": "flake-utils",
-        "type": "github"
-      }
-    },
-    "flake-utils_2": {
-      "inputs": {
-        "systems": "systems_2"
-      },
-      "locked": {
-        "lastModified": 1710146030,
-        "narHash": "sha256-SZ5L6eA7HJ/nmkzGG7/ISclqe6oZdOZTNoesiInkXPQ=",
-        "owner": "numtide",
-        "repo": "flake-utils",
-        "rev": "b1d9ab70662946ef0850d488da1c9019f3a9752a",
+        "rev": "11707dc2f618dd54ca8739b309ec4fc024de578b",
         "type": "github"
       },
       "original": {
@@ -45,11 +27,11 @@
         "spectrum": "spectrum"
       },
       "locked": {
-        "lastModified": 1720034501,
-        "narHash": "sha256-fzZpuVnhw5uOtA4OuXw3a+Otpy8C+QV0Uu5XfhGEPSg=",
+        "lastModified": 1735074045,
+        "narHash": "sha256-CeYsC8J2dNiV2FCQOxK1oZ/jNpOF2io7aCEFHmfi95U=",
         "owner": "astro",
         "repo": "microvm.nix",
-        "rev": "a808af7775f508a2afedd1e4940a382fe1194f21",
+        "rev": "2ae08de8e8068b00193b9cfbc0acc9dfdda03181",
         "type": "github"
       },
       "original": {
@@ -58,34 +40,13 @@
         "type": "github"
       }
     },
-    "nix-github-actions": {
-      "inputs": {
-        "nixpkgs": [
-          "poetry2nix-lib",
-          "nixpkgs"
-        ]
-      },
-      "locked": {
-        "lastModified": 1703863825,
-        "narHash": "sha256-rXwqjtwiGKJheXB43ybM8NwWB8rO2dSRrEqes0S7F5Y=",
-        "owner": "nix-community",
-        "repo": "nix-github-actions",
-        "rev": "5163432afc817cf8bd1f031418d1869e4c9d5547",
-        "type": "github"
-      },
-      "original": {
-        "owner": "nix-community",
-        "repo": "nix-github-actions",
-        "type": "github"
-      }
-    },
     "nixpkgs": {
       "locked": {
-        "lastModified": 1720031269,
-        "narHash": "sha256-rwz8NJZV+387rnWpTYcXaRNvzUSnnF9aHONoJIYmiUQ=",
+        "lastModified": 1735471104,
+        "narHash": "sha256-0q9NGQySwDQc7RhAV2ukfnu7Gxa5/ybJ2ANT8DQrQrs=",
         "owner": "nixos",
         "repo": "nixpkgs",
-        "rev": "9f4128e00b0ae8ec65918efeba59db998750ead6",
+        "rev": "88195a94f390381c6afcdaa933c2f6ff93959cb4",
         "type": "github"
       },
       "original": {
@@ -95,45 +56,20 @@
         "type": "github"
       }
     },
-    "poetry2nix-lib": {
-      "inputs": {
-        "flake-utils": "flake-utils_2",
-        "nix-github-actions": "nix-github-actions",
-        "nixpkgs": [
-          "nixpkgs"
-        ],
-        "systems": "systems_3",
-        "treefmt-nix": "treefmt-nix"
-      },
-      "locked": {
-        "lastModified": 1719850884,
-        "narHash": "sha256-UU/lVTHFx0GpEkihoLJrMuM9DcuhZmNe3db45vshSyI=",
-        "owner": "nix-community",
-        "repo": "poetry2nix",
-        "rev": "42262f382c68afab1113ebd1911d0c93822d756e",
-        "type": "github"
-      },
-      "original": {
-        "owner": "nix-community",
-        "repo": "poetry2nix",
-        "type": "github"
-      }
-    },
     "root": {
       "inputs": {
         "microvm": "microvm",
-        "nixpkgs": "nixpkgs",
-        "poetry2nix-lib": "poetry2nix-lib"
+        "nixpkgs": "nixpkgs"
       }
     },
     "spectrum": {
       "flake": false,
       "locked": {
-        "lastModified": 1708358594,
-        "narHash": "sha256-e71YOotu2FYA67HoC/voJDTFsiPpZNRwmiQb4f94OxQ=",
+        "lastModified": 1733308308,
+        "narHash": "sha256-+RcbMAjSxV1wW5UpS9abIG1lFZC8bITPiFIKNnE7RLs=",
         "ref": "refs/heads/main",
-        "rev": "6d0e73864d28794cdbd26ab7b37259ab0e1e044c",
-        "revCount": 614,
+        "rev": "80c9e9830d460c944c8f730065f18bb733bc7ee2",
+        "revCount": 792,
         "type": "git",
         "url": "https://spectrum-os.org/git/spectrum"
       },
@@ -156,56 +92,6 @@
         "repo": "default",
         "type": "github"
       }
-    },
-    "systems_2": {
-      "locked": {
-        "lastModified": 1681028828,
-        "narHash": "sha256-Vy1rq5AaRuLzOxct8nz4T6wlgyUR7zLU309k9mBC768=",
-        "owner": "nix-systems",
-        "repo": "default",
-        "rev": "da67096a3b9bf56a91d16901293e51ba5b49a27e",
-        "type": "github"
-      },
-      "original": {
-        "owner": "nix-systems",
-        "repo": "default",
-        "type": "github"
-      }
-    },
-    "systems_3": {
-      "locked": {
-        "lastModified": 1681028828,
-        "narHash": "sha256-Vy1rq5AaRuLzOxct8nz4T6wlgyUR7zLU309k9mBC768=",
-        "owner": "nix-systems",
-        "repo": "default",
-        "rev": "da67096a3b9bf56a91d16901293e51ba5b49a27e",
-        "type": "github"
-      },
-      "original": {
-        "id": "systems",
-        "type": "indirect"
-      }
-    },
-    "treefmt-nix": {
-      "inputs": {
-        "nixpkgs": [
-          "poetry2nix-lib",
-          "nixpkgs"
-        ]
-      },
-      "locked": {
-        "lastModified": 1719749022,
-        "narHash": "sha256-ddPKHcqaKCIFSFc/cvxS14goUhCOAwsM1PbMr0ZtHMg=",
-        "owner": "numtide",
-        "repo": "treefmt-nix",
-        "rev": "8df5ff62195d4e67e2264df0b7f5e8c9995fd0bd",
-        "type": "github"
-      },
-      "original": {
-        "owner": "numtide",
-        "repo": "treefmt-nix",
-        "type": "github"
-      }
     }
   },
   "root": "root",
diff --git a/flake.nix b/flake.nix
index a853b7e..ac8521a 100644
--- a/flake.nix
+++ b/flake.nix
@@ -1,61 +1,189 @@
 {
-  description = "A webserver to create files for tetsing purposes";
+  description = "A webserver to create files for testing purposes";
 
   inputs = {
     nixpkgs.url = "github:nixos/nixpkgs/nixos-unstable";
-    poetry2nix-lib = {
-      url = "github:nix-community/poetry2nix";
-      inputs.nixpkgs.follows = "nixpkgs";
-    };
+
     microvm = {
       url = "github:astro/microvm.nix";
       inputs.nixpkgs.follows = "nixpkgs";
     };
   };
 
-  outputs = {self, ...} @ inputs: let
+  outputs = {
+    self,
+    nixpkgs,
+    ...
+  } @ inputs: let
     supportedSystems = ["x86_64-linux" "x86_64-darwin" "aarch64-linux" "aarch64-darwin"];
-    forAllSystems = inputs.nixpkgs.lib.genAttrs supportedSystems;
-    pkgs = forAllSystems (system: inputs.nixpkgs.legacyPackages.${system});
-    poetry2nix = forAllSystems (system: inputs.poetry2nix-lib.lib.mkPoetry2Nix {pkgs = pkgs.${system};});
+    forAllSystems = nixpkgs.lib.genAttrs supportedSystems;
+    pkgs = forAllSystems (system: nixpkgs.legacyPackages.${system}.extend overlay);
+
+    overlay = final: prev: rec {
+      python3Packages = prev.python3Packages.overrideScope (pfinal: pprev: {
+        packageNameToDrv = x: builtins.getAttr (cleanPythonPackageName x) final.python3Packages;
+      });
+
+      cleanPythonPackageName = x: let
+        cleanName = builtins.match "([a-z,A-Z,0-9,_,-]+).*" x;
+      in
+        if cleanName != null
+        then builtins.elemAt cleanName 0
+        else builtins.warn "Could not determine package name from '${x}'" null;
+    };
+
+    pyproject = builtins.fromTOML (builtins.readFile ./pyproject.toml);
+
+    buildDependencies = forAllSystems (system: builtins.map pkgs.${system}.python3Packages.packageNameToDrv pyproject.build-system.requires);
+    runtimeDependencies = forAllSystems (system: builtins.map pkgs.${system}.python3Packages.packageNameToDrv pyproject.project.dependencies);
+    optionalDependencies = forAllSystems (system: builtins.mapAttrs (name: value: builtins.map pkgs.${system}.python3Packages.packageNameToDrv value) pyproject.project.optional-dependencies);
   in {
     # `nix build`
-    packages = forAllSystems (system: {
-      default = poetry2nix.${system}.mkPoetryApplication {
-        projectDir = self;
-      };
-      vm = self.nixosConfigurations.vm.config.microvm.declaredRunner;
-    });
+    packages = forAllSystems (system: let
+      buildTestdata = {skipCheck ? false}:
+        pkgs.${system}.python3Packages.buildPythonPackage {
+          pname = pyproject.project.name;
+          version = pyproject.project.version;
+          src = ./.;
 
-    # `nix run`
-    apps = forAllSystems (system: {
-      default = {
-        program = "${self.packages.${system}.default}/bin/testdata";
-        type = "app";
-      };
+          pyproject = true;
+
+          build-system = buildDependencies.${system};
+
+          dependencies = runtimeDependencies.${system};
+
+          optional-dependencies = optionalDependencies.${system};
+
+          nativeCheckInputs = optionalDependencies.${system}.dev;
+
+          checkPhase = let
+            dev = builtins.map (x: x.pname) optionalDependencies.${system}.dev;
+          in ''
+            ${
+              if builtins.elem "pytest" dev && !skipCheck
+              then "pytest src"
+              else ""
+            }
+            ${
+              if builtins.elem "mypy" dev && !skipCheck
+              then "mypy src"
+              else ""
+            }
+            ${
+              if builtins.elem "pylint" dev && !skipCheck
+              then "pylint src"
+              else ""
+            }
+          '';
+        };
+    in rec {
+      default = testdata;
+      testdata = buildTestdata {skipCheck = false;};
+      quick = buildTestdata {skipCheck = true;};
+      vm = self.nixosConfigurations.vm.config.microvm.declaredRunner;
     });
 
     # `nix fmt`
     formatter = forAllSystems (system: pkgs.${system}.alejandra);
 
     # `nix develop`
-    devShells = forAllSystems (system: {
-      default = let
-        poetryEnv =
-          if builtins.pathExists ./poetry.lock
-          then poetry2nix.${system}.mkPoetryEnv {projectDir = self;}
-          else null;
-      in
-        pkgs.${system}.mkShellNoCC {
-          packages = with pkgs.${system};
-            [
-              poetry
-            ]
-            ++ [poetryEnv];
-        };
+    devShells = forAllSystems (system: rec {
+      default = venv;
+
+      venv = pkgs.${system}.mkShell {
+        shellHook = ''
+          if [ ! -d .venv/ ]; then
+            echo "Creating Virtual Environment..."
+            ${pkgs.${system}.python3}/bin/python3 -m venv .venv
+          fi
+
+          alias activate='source .venv/bin/activate'
+
+          echo "Entering Virtual Environment..."
+          source .venv/bin/activate
+        '';
+      };
     });
 
     # NixOS Module
     nixosModules.default = import ./nix/module.nix inputs;
+
+    # nixos definition for a microvm to test nixosModules
+    nixosConfigurations = let
+      system = "x86_64-linux";
+    in {
+      vm = nixpkgs.lib.nixosSystem {
+        inherit system;
+
+        modules = [
+          inputs.microvm.nixosModules.microvm
+          ({config, ...}: {
+            system.stateVersion = config.system.nixos.version;
+
+            networking.hostName = "vm";
+            users.users.root.password = "";
+
+            microvm = {
+              # volumes = [
+              #   {
+              #     mountPoint = "/var";
+              #     image = "var.img";
+              #     size = 256;
+              #   }
+              # ];
+              shares = [
+                {
+                  # use proto = "virtiofs" for MicroVMs that are started by systemd
+                  proto = "9p";
+                  tag = "ro-store";
+                  # a host's /nix/store will be picked up so that no
+                  # squashfs/erofs will be built for it.
+                  source = "/nix/store";
+                  mountPoint = "/nix/.ro-store";
+                }
+              ];
+
+              interfaces = [
+                {
+                  type = "user";
+                  id = "qemu";
+                  mac = "02:00:00:01:01:01";
+                }
+              ];
+
+              forwardPorts = [
+                {
+                  host.port = config.services.testdata.port;
+                  guest.port = config.services.testdata.port;
+                }
+              ];
+
+              # "qemu" has 9p built-in!
+              hypervisor = "qemu";
+              socket = "control.socket";
+            };
+          })
+
+          self.nixosModules.default
+          rec {
+            networking.firewall.allowedTCPPorts = [services.testdata.port];
+            services.testdata = {
+              enable = true;
+
+              host = "0.0.0.0";
+              port = 1234;
+
+              settings = {
+                "keys" = ["one" "two" "three"];
+                "max-size" = "1GB";
+                "max-data" = "100GB";
+                "buffer-size" = "12MiB";
+                "database" = "/root/testdata_state.json";
+              };
+            };
+          }
+        ];
+      };
+    };
   };
 }
diff --git a/nix/module.nix b/nix/module.nix
index 4fcb760..5f2ea32 100644
--- a/nix/module.nix
+++ b/nix/module.nix
@@ -2,17 +2,16 @@ inputs: {
   config,
   lib,
   pkgs,
-  system,
   ...
 }: let
-  cfg = config.testdata;
+  cfg = config.services.testdata;
   package = inputs.self.packages.${pkgs.stdenv.hostPlatform.system}.default;
   inherit (lib) mkIf mkEnableOption mkOption types;
 
   format = pkgs.formats.json {};
   configFile = format.generate "config.json" cfg.settings;
 in {
-  options.testdata = {
+  options.services.testdata = {
     enable = mkEnableOption "testdata";
 
     settings = mkOption {
@@ -29,7 +28,17 @@ in {
         ]);
       in
         valueType;
-      default = throw "Please specify testdata.settings";
+      default = throw "Please specify services.testdata.settings";
+    };
+
+    host = mkOption {
+      type = types.str;
+      default = throw "Please specify a services.testdata.port";
+    };
+
+    port = mkOption {
+      type = types.int;
+      default = throw "Please specify a services.testdata.port";
     };
   };
 
@@ -41,7 +50,7 @@ in {
 
       serviceConfig = {
         Type = "simple";
-        ExecStart = "${package}/bin/testdata --config ${configFile}";
+        ExecStart = "${package}/bin/testdata --config ${configFile} --listen ${cfg.host} --port ${builtins.toString cfg.port}";
       };
 
       wantedBy = ["multi-user.target"];
diff --git a/poetry.lock b/poetry.lock
deleted file mode 100644
index 05b4642..0000000
--- a/poetry.lock
+++ /dev/null
@@ -1,323 +0,0 @@
-# This file is automatically @generated by Poetry 1.8.2 and should not be changed by hand.
-
-[[package]]
-name = "annotated-types"
-version = "0.6.0"
-description = "Reusable constraint types to use with typing.Annotated"
-optional = false
-python-versions = ">=3.8"
-files = [
-    {file = "annotated_types-0.6.0-py3-none-any.whl", hash = "sha256:0641064de18ba7a25dee8f96403ebc39113d0cb953a01429249d5c7564666a43"},
-    {file = "annotated_types-0.6.0.tar.gz", hash = "sha256:563339e807e53ffd9c267e99fc6d9ea23eb8443c08f112651963e24e22f84a5d"},
-]
-
-[[package]]
-name = "anyio"
-version = "4.3.0"
-description = "High level compatibility layer for multiple asynchronous event loop implementations"
-optional = false
-python-versions = ">=3.8"
-files = [
-    {file = "anyio-4.3.0-py3-none-any.whl", hash = "sha256:048e05d0f6caeed70d731f3db756d35dcc1f35747c8c403364a8332c630441b8"},
-    {file = "anyio-4.3.0.tar.gz", hash = "sha256:f75253795a87df48568485fd18cdd2a3fa5c4f7c5be8e5e36637733fce06fed6"},
-]
-
-[package.dependencies]
-idna = ">=2.8"
-sniffio = ">=1.1"
-
-[package.extras]
-doc = ["Sphinx (>=7)", "packaging", "sphinx-autodoc-typehints (>=1.2.0)", "sphinx-rtd-theme"]
-test = ["anyio[trio]", "coverage[toml] (>=7)", "exceptiongroup (>=1.2.0)", "hypothesis (>=4.0)", "psutil (>=5.9)", "pytest (>=7.0)", "pytest-mock (>=3.6.1)", "trustme", "uvloop (>=0.17)"]
-trio = ["trio (>=0.23)"]
-
-[[package]]
-name = "fastapi"
-version = "0.110.1"
-description = "FastAPI framework, high performance, easy to learn, fast to code, ready for production"
-optional = false
-python-versions = ">=3.8"
-files = [
-    {file = "fastapi-0.110.1-py3-none-any.whl", hash = "sha256:5df913203c482f820d31f48e635e022f8cbfe7350e4830ef05a3163925b1addc"},
-    {file = "fastapi-0.110.1.tar.gz", hash = "sha256:6feac43ec359dfe4f45b2c18ec8c94edb8dc2dfc461d417d9e626590c071baad"},
-]
-
-[package.dependencies]
-pydantic = ">=1.7.4,<1.8 || >1.8,<1.8.1 || >1.8.1,<2.0.0 || >2.0.0,<2.0.1 || >2.0.1,<2.1.0 || >2.1.0,<3.0.0"
-starlette = ">=0.37.2,<0.38.0"
-typing-extensions = ">=4.8.0"
-
-[package.extras]
-all = ["email-validator (>=2.0.0)", "httpx (>=0.23.0)", "itsdangerous (>=1.1.0)", "jinja2 (>=2.11.2)", "orjson (>=3.2.1)", "pydantic-extra-types (>=2.0.0)", "pydantic-settings (>=2.0.0)", "python-multipart (>=0.0.7)", "pyyaml (>=5.3.1)", "ujson (>=4.0.1,!=4.0.2,!=4.1.0,!=4.2.0,!=4.3.0,!=5.0.0,!=5.1.0)", "uvicorn[standard] (>=0.12.0)"]
-
-[[package]]
-name = "h11"
-version = "0.14.0"
-description = "A pure-Python, bring-your-own-I/O implementation of HTTP/1.1"
-optional = false
-python-versions = ">=3.7"
-files = [
-    {file = "h11-0.14.0-py3-none-any.whl", hash = "sha256:e3fe4ac4b851c468cc8363d500db52c2ead036020723024a109d37346efaa761"},
-    {file = "h11-0.14.0.tar.gz", hash = "sha256:8f19fbbe99e72420ff35c00b27a34cb9937e902a8b810e2c88300c6f0a3b699d"},
-]
-
-[[package]]
-name = "h2"
-version = "4.1.0"
-description = "HTTP/2 State-Machine based protocol implementation"
-optional = false
-python-versions = ">=3.6.1"
-files = [
-    {file = "h2-4.1.0-py3-none-any.whl", hash = "sha256:03a46bcf682256c95b5fd9e9a99c1323584c3eec6440d379b9903d709476bc6d"},
-    {file = "h2-4.1.0.tar.gz", hash = "sha256:a83aca08fbe7aacb79fec788c9c0bac936343560ed9ec18b82a13a12c28d2abb"},
-]
-
-[package.dependencies]
-hpack = ">=4.0,<5"
-hyperframe = ">=6.0,<7"
-
-[[package]]
-name = "hpack"
-version = "4.0.0"
-description = "Pure-Python HPACK header compression"
-optional = false
-python-versions = ">=3.6.1"
-files = [
-    {file = "hpack-4.0.0-py3-none-any.whl", hash = "sha256:84a076fad3dc9a9f8063ccb8041ef100867b1878b25ef0ee63847a5d53818a6c"},
-    {file = "hpack-4.0.0.tar.gz", hash = "sha256:fc41de0c63e687ebffde81187a948221294896f6bdc0ae2312708df339430095"},
-]
-
-[[package]]
-name = "hypercorn"
-version = "0.16.0"
-description = "A ASGI Server based on Hyper libraries and inspired by Gunicorn"
-optional = false
-python-versions = ">=3.8"
-files = [
-    {file = "hypercorn-0.16.0-py3-none-any.whl", hash = "sha256:929e45c4acde3fbf7c58edf55336d30a009d2b4cb1f1eb96e6a515d61b663f58"},
-    {file = "hypercorn-0.16.0.tar.gz", hash = "sha256:3b17d1dcf4992c1f262d9f9dd799c374125d0b9a8e40e1e2d11e2938b0adfe03"},
-]
-
-[package.dependencies]
-h11 = "*"
-h2 = ">=3.1.0"
-priority = "*"
-wsproto = ">=0.14.0"
-
-[package.extras]
-docs = ["pydata_sphinx_theme", "sphinxcontrib_mermaid"]
-h3 = ["aioquic (>=0.9.0,<1.0)"]
-trio = ["exceptiongroup (>=1.1.0)", "trio (>=0.22.0)"]
-uvloop = ["uvloop"]
-
-[[package]]
-name = "hyperframe"
-version = "6.0.1"
-description = "HTTP/2 framing layer for Python"
-optional = false
-python-versions = ">=3.6.1"
-files = [
-    {file = "hyperframe-6.0.1-py3-none-any.whl", hash = "sha256:0ec6bafd80d8ad2195c4f03aacba3a8265e57bc4cff261e802bf39970ed02a15"},
-    {file = "hyperframe-6.0.1.tar.gz", hash = "sha256:ae510046231dc8e9ecb1a6586f63d2347bf4c8905914aa84ba585ae85f28a914"},
-]
-
-[[package]]
-name = "idna"
-version = "3.6"
-description = "Internationalized Domain Names in Applications (IDNA)"
-optional = false
-python-versions = ">=3.5"
-files = [
-    {file = "idna-3.6-py3-none-any.whl", hash = "sha256:c05567e9c24a6b9faaa835c4821bad0590fbb9d5779e7caa6e1cc4978e7eb24f"},
-    {file = "idna-3.6.tar.gz", hash = "sha256:9ecdbbd083b06798ae1e86adcbfe8ab1479cf864e4ee30fe4e46a003d12491ca"},
-]
-
-[[package]]
-name = "ipaddress"
-version = "1.0.23"
-description = "IPv4/IPv6 manipulation library"
-optional = false
-python-versions = "*"
-files = [
-    {file = "ipaddress-1.0.23-py2.py3-none-any.whl", hash = "sha256:6e0f4a39e66cb5bb9a137b00276a2eff74f93b71dcbdad6f10ff7df9d3557fcc"},
-    {file = "ipaddress-1.0.23.tar.gz", hash = "sha256:b7f8e0369580bb4a24d5ba1d7cc29660a4a6987763faf1d8a8046830e020e7e2"},
-]
-
-[[package]]
-name = "priority"
-version = "2.0.0"
-description = "A pure-Python implementation of the HTTP/2 priority tree"
-optional = false
-python-versions = ">=3.6.1"
-files = [
-    {file = "priority-2.0.0-py3-none-any.whl", hash = "sha256:6f8eefce5f3ad59baf2c080a664037bb4725cd0a790d53d59ab4059288faf6aa"},
-    {file = "priority-2.0.0.tar.gz", hash = "sha256:c965d54f1b8d0d0b19479db3924c7c36cf672dbf2aec92d43fbdaf4492ba18c0"},
-]
-
-[[package]]
-name = "pydantic"
-version = "2.6.4"
-description = "Data validation using Python type hints"
-optional = false
-python-versions = ">=3.8"
-files = [
-    {file = "pydantic-2.6.4-py3-none-any.whl", hash = "sha256:cc46fce86607580867bdc3361ad462bab9c222ef042d3da86f2fb333e1d916c5"},
-    {file = "pydantic-2.6.4.tar.gz", hash = "sha256:b1704e0847db01817624a6b86766967f552dd9dbf3afba4004409f908dcc84e6"},
-]
-
-[package.dependencies]
-annotated-types = ">=0.4.0"
-pydantic-core = "2.16.3"
-typing-extensions = ">=4.6.1"
-
-[package.extras]
-email = ["email-validator (>=2.0.0)"]
-
-[[package]]
-name = "pydantic-core"
-version = "2.16.3"
-description = ""
-optional = false
-python-versions = ">=3.8"
-files = [
-    {file = "pydantic_core-2.16.3-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:75b81e678d1c1ede0785c7f46690621e4c6e63ccd9192af1f0bd9d504bbb6bf4"},
-    {file = "pydantic_core-2.16.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9c865a7ee6f93783bd5d781af5a4c43dadc37053a5b42f7d18dc019f8c9d2bd1"},
-    {file = "pydantic_core-2.16.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:162e498303d2b1c036b957a1278fa0899d02b2842f1ff901b6395104c5554a45"},
-    {file = "pydantic_core-2.16.3-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:2f583bd01bbfbff4eaee0868e6fc607efdfcc2b03c1c766b06a707abbc856187"},
-    {file = "pydantic_core-2.16.3-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b926dd38db1519ed3043a4de50214e0d600d404099c3392f098a7f9d75029ff8"},
-    {file = "pydantic_core-2.16.3-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:716b542728d4c742353448765aa7cdaa519a7b82f9564130e2b3f6766018c9ec"},
-    {file = "pydantic_core-2.16.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fc4ad7f7ee1a13d9cb49d8198cd7d7e3aa93e425f371a68235f784e99741561f"},
-    {file = "pydantic_core-2.16.3-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:bd87f48924f360e5d1c5f770d6155ce0e7d83f7b4e10c2f9ec001c73cf475c99"},
-    {file = "pydantic_core-2.16.3-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:0df446663464884297c793874573549229f9eca73b59360878f382a0fc085979"},
-    {file = "pydantic_core-2.16.3-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:4df8a199d9f6afc5ae9a65f8f95ee52cae389a8c6b20163762bde0426275b7db"},
-    {file = "pydantic_core-2.16.3-cp310-none-win32.whl", hash = "sha256:456855f57b413f077dff513a5a28ed838dbbb15082ba00f80750377eed23d132"},
-    {file = "pydantic_core-2.16.3-cp310-none-win_amd64.whl", hash = "sha256:732da3243e1b8d3eab8c6ae23ae6a58548849d2e4a4e03a1924c8ddf71a387cb"},
-    {file = "pydantic_core-2.16.3-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:519ae0312616026bf4cedc0fe459e982734f3ca82ee8c7246c19b650b60a5ee4"},
-    {file = "pydantic_core-2.16.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:b3992a322a5617ded0a9f23fd06dbc1e4bd7cf39bc4ccf344b10f80af58beacd"},
-    {file = "pydantic_core-2.16.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8d62da299c6ecb04df729e4b5c52dc0d53f4f8430b4492b93aa8de1f541c4aac"},
-    {file = "pydantic_core-2.16.3-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:2acca2be4bb2f2147ada8cac612f8a98fc09f41c89f87add7256ad27332c2fda"},
-    {file = "pydantic_core-2.16.3-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1b662180108c55dfbf1280d865b2d116633d436cfc0bba82323554873967b340"},
-    {file = "pydantic_core-2.16.3-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e7c6ed0dc9d8e65f24f5824291550139fe6f37fac03788d4580da0d33bc00c97"},
-    {file = "pydantic_core-2.16.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a6b1bb0827f56654b4437955555dc3aeeebeddc47c2d7ed575477f082622c49e"},
-    {file = "pydantic_core-2.16.3-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:e56f8186d6210ac7ece503193ec84104da7ceb98f68ce18c07282fcc2452e76f"},
-    {file = "pydantic_core-2.16.3-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:936e5db01dd49476fa8f4383c259b8b1303d5dd5fb34c97de194560698cc2c5e"},
-    {file = "pydantic_core-2.16.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:33809aebac276089b78db106ee692bdc9044710e26f24a9a2eaa35a0f9fa70ba"},
-    {file = "pydantic_core-2.16.3-cp311-none-win32.whl", hash = "sha256:ded1c35f15c9dea16ead9bffcde9bb5c7c031bff076355dc58dcb1cb436c4721"},
-    {file = "pydantic_core-2.16.3-cp311-none-win_amd64.whl", hash = "sha256:d89ca19cdd0dd5f31606a9329e309d4fcbb3df860960acec32630297d61820df"},
-    {file = "pydantic_core-2.16.3-cp311-none-win_arm64.whl", hash = "sha256:6162f8d2dc27ba21027f261e4fa26f8bcb3cf9784b7f9499466a311ac284b5b9"},
-    {file = "pydantic_core-2.16.3-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:0f56ae86b60ea987ae8bcd6654a887238fd53d1384f9b222ac457070b7ac4cff"},
-    {file = "pydantic_core-2.16.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:c9bd22a2a639e26171068f8ebb5400ce2c1bc7d17959f60a3b753ae13c632975"},
-    {file = "pydantic_core-2.16.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4204e773b4b408062960e65468d5346bdfe139247ee5f1ca2a378983e11388a2"},
-    {file = "pydantic_core-2.16.3-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:f651dd19363c632f4abe3480a7c87a9773be27cfe1341aef06e8759599454120"},
-    {file = "pydantic_core-2.16.3-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:aaf09e615a0bf98d406657e0008e4a8701b11481840be7d31755dc9f97c44053"},
-    {file = "pydantic_core-2.16.3-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8e47755d8152c1ab5b55928ab422a76e2e7b22b5ed8e90a7d584268dd49e9c6b"},
-    {file = "pydantic_core-2.16.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:500960cb3a0543a724a81ba859da816e8cf01b0e6aaeedf2c3775d12ee49cade"},
-    {file = "pydantic_core-2.16.3-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:cf6204fe865da605285c34cf1172879d0314ff267b1c35ff59de7154f35fdc2e"},
-    {file = "pydantic_core-2.16.3-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:d33dd21f572545649f90c38c227cc8631268ba25c460b5569abebdd0ec5974ca"},
-    {file = "pydantic_core-2.16.3-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:49d5d58abd4b83fb8ce763be7794d09b2f50f10aa65c0f0c1696c677edeb7cbf"},
-    {file = "pydantic_core-2.16.3-cp312-none-win32.whl", hash = "sha256:f53aace168a2a10582e570b7736cc5bef12cae9cf21775e3eafac597e8551fbe"},
-    {file = "pydantic_core-2.16.3-cp312-none-win_amd64.whl", hash = "sha256:0d32576b1de5a30d9a97f300cc6a3f4694c428d956adbc7e6e2f9cad279e45ed"},
-    {file = "pydantic_core-2.16.3-cp312-none-win_arm64.whl", hash = "sha256:ec08be75bb268473677edb83ba71e7e74b43c008e4a7b1907c6d57e940bf34b6"},
-    {file = "pydantic_core-2.16.3-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:b1f6f5938d63c6139860f044e2538baeee6f0b251a1816e7adb6cbce106a1f01"},
-    {file = "pydantic_core-2.16.3-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:2a1ef6a36fdbf71538142ed604ad19b82f67b05749512e47f247a6ddd06afdc7"},
-    {file = "pydantic_core-2.16.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:704d35ecc7e9c31d48926150afada60401c55efa3b46cd1ded5a01bdffaf1d48"},
-    {file = "pydantic_core-2.16.3-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:d937653a696465677ed583124b94a4b2d79f5e30b2c46115a68e482c6a591c8a"},
-    {file = "pydantic_core-2.16.3-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c9803edf8e29bd825f43481f19c37f50d2b01899448273b3a7758441b512acf8"},
-    {file = "pydantic_core-2.16.3-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:72282ad4892a9fb2da25defeac8c2e84352c108705c972db82ab121d15f14e6d"},
-    {file = "pydantic_core-2.16.3-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7f752826b5b8361193df55afcdf8ca6a57d0232653494ba473630a83ba50d8c9"},
-    {file = "pydantic_core-2.16.3-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:4384a8f68ddb31a0b0c3deae88765f5868a1b9148939c3f4121233314ad5532c"},
-    {file = "pydantic_core-2.16.3-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:a4b2bf78342c40b3dc830880106f54328928ff03e357935ad26c7128bbd66ce8"},
-    {file = "pydantic_core-2.16.3-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:13dcc4802961b5f843a9385fc821a0b0135e8c07fc3d9949fd49627c1a5e6ae5"},
-    {file = "pydantic_core-2.16.3-cp38-none-win32.whl", hash = "sha256:e3e70c94a0c3841e6aa831edab1619ad5c511199be94d0c11ba75fe06efe107a"},
-    {file = "pydantic_core-2.16.3-cp38-none-win_amd64.whl", hash = "sha256:ecdf6bf5f578615f2e985a5e1f6572e23aa632c4bd1dc67f8f406d445ac115ed"},
-    {file = "pydantic_core-2.16.3-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:bda1ee3e08252b8d41fa5537413ffdddd58fa73107171a126d3b9ff001b9b820"},
-    {file = "pydantic_core-2.16.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:21b888c973e4f26b7a96491c0965a8a312e13be108022ee510248fe379a5fa23"},
-    {file = "pydantic_core-2.16.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:be0ec334369316fa73448cc8c982c01e5d2a81c95969d58b8f6e272884df0074"},
-    {file = "pydantic_core-2.16.3-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:b5b6079cc452a7c53dd378c6f881ac528246b3ac9aae0f8eef98498a75657805"},
-    {file = "pydantic_core-2.16.3-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7ee8d5f878dccb6d499ba4d30d757111847b6849ae07acdd1205fffa1fc1253c"},
-    {file = "pydantic_core-2.16.3-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7233d65d9d651242a68801159763d09e9ec96e8a158dbf118dc090cd77a104c9"},
-    {file = "pydantic_core-2.16.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c6119dc90483a5cb50a1306adb8d52c66e447da88ea44f323e0ae1a5fcb14256"},
-    {file = "pydantic_core-2.16.3-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:578114bc803a4c1ff9946d977c221e4376620a46cf78da267d946397dc9514a8"},
-    {file = "pydantic_core-2.16.3-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:d8f99b147ff3fcf6b3cc60cb0c39ea443884d5559a30b1481e92495f2310ff2b"},
-    {file = "pydantic_core-2.16.3-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:4ac6b4ce1e7283d715c4b729d8f9dab9627586dafce81d9eaa009dd7f25dd972"},
-    {file = "pydantic_core-2.16.3-cp39-none-win32.whl", hash = "sha256:e7774b570e61cb998490c5235740d475413a1f6de823169b4cf94e2fe9e9f6b2"},
-    {file = "pydantic_core-2.16.3-cp39-none-win_amd64.whl", hash = "sha256:9091632a25b8b87b9a605ec0e61f241c456e9248bfdcf7abdf344fdb169c81cf"},
-    {file = "pydantic_core-2.16.3-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:36fa178aacbc277bc6b62a2c3da95226520da4f4e9e206fdf076484363895d2c"},
-    {file = "pydantic_core-2.16.3-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:dcca5d2bf65c6fb591fff92da03f94cd4f315972f97c21975398bd4bd046854a"},
-    {file = "pydantic_core-2.16.3-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2a72fb9963cba4cd5793854fd12f4cfee731e86df140f59ff52a49b3552db241"},
-    {file = "pydantic_core-2.16.3-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b60cc1a081f80a2105a59385b92d82278b15d80ebb3adb200542ae165cd7d183"},
-    {file = "pydantic_core-2.16.3-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:cbcc558401de90a746d02ef330c528f2e668c83350f045833543cd57ecead1ad"},
-    {file = "pydantic_core-2.16.3-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:fee427241c2d9fb7192b658190f9f5fd6dfe41e02f3c1489d2ec1e6a5ab1e04a"},
-    {file = "pydantic_core-2.16.3-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:f4cb85f693044e0f71f394ff76c98ddc1bc0953e48c061725e540396d5c8a2e1"},
-    {file = "pydantic_core-2.16.3-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:b29eeb887aa931c2fcef5aa515d9d176d25006794610c264ddc114c053bf96fe"},
-    {file = "pydantic_core-2.16.3-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:a425479ee40ff021f8216c9d07a6a3b54b31c8267c6e17aa88b70d7ebd0e5e5b"},
-    {file = "pydantic_core-2.16.3-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:5c5cbc703168d1b7a838668998308018a2718c2130595e8e190220238addc96f"},
-    {file = "pydantic_core-2.16.3-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:99b6add4c0b39a513d323d3b93bc173dac663c27b99860dd5bf491b240d26137"},
-    {file = "pydantic_core-2.16.3-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:75f76ee558751746d6a38f89d60b6228fa174e5172d143886af0f85aa306fd89"},
-    {file = "pydantic_core-2.16.3-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:00ee1c97b5364b84cb0bd82e9bbf645d5e2871fb8c58059d158412fee2d33d8a"},
-    {file = "pydantic_core-2.16.3-pp39-pypy39_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:287073c66748f624be4cef893ef9174e3eb88fe0b8a78dc22e88eca4bc357ca6"},
-    {file = "pydantic_core-2.16.3-pp39-pypy39_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:ed25e1835c00a332cb10c683cd39da96a719ab1dfc08427d476bce41b92531fc"},
-    {file = "pydantic_core-2.16.3-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:86b3d0033580bd6bbe07590152007275bd7af95f98eaa5bd36f3da219dcd93da"},
-    {file = "pydantic_core-2.16.3.tar.gz", hash = "sha256:1cac689f80a3abab2d3c0048b29eea5751114054f032a941a32de4c852c59cad"},
-]
-
-[package.dependencies]
-typing-extensions = ">=4.6.0,<4.7.0 || >4.7.0"
-
-[[package]]
-name = "sniffio"
-version = "1.3.1"
-description = "Sniff out which async library your code is running under"
-optional = false
-python-versions = ">=3.7"
-files = [
-    {file = "sniffio-1.3.1-py3-none-any.whl", hash = "sha256:2f6da418d1f1e0fddd844478f41680e794e6051915791a034ff65e5f100525a2"},
-    {file = "sniffio-1.3.1.tar.gz", hash = "sha256:f4324edc670a0f49750a81b895f35c3adb843cca46f0530f79fc1babb23789dc"},
-]
-
-[[package]]
-name = "starlette"
-version = "0.37.2"
-description = "The little ASGI library that shines."
-optional = false
-python-versions = ">=3.8"
-files = [
-    {file = "starlette-0.37.2-py3-none-any.whl", hash = "sha256:6fe59f29268538e5d0d182f2791a479a0c64638e6935d1c6989e63fb2699c6ee"},
-    {file = "starlette-0.37.2.tar.gz", hash = "sha256:9af890290133b79fc3db55474ade20f6220a364a0402e0b556e7cd5e1e093823"},
-]
-
-[package.dependencies]
-anyio = ">=3.4.0,<5"
-
-[package.extras]
-full = ["httpx (>=0.22.0)", "itsdangerous", "jinja2", "python-multipart (>=0.0.7)", "pyyaml"]
-
-[[package]]
-name = "typing-extensions"
-version = "4.11.0"
-description = "Backported and Experimental Type Hints for Python 3.8+"
-optional = false
-python-versions = ">=3.8"
-files = [
-    {file = "typing_extensions-4.11.0-py3-none-any.whl", hash = "sha256:c1f94d72897edaf4ce775bb7558d5b79d8126906a14ea5ed1635921406c0387a"},
-    {file = "typing_extensions-4.11.0.tar.gz", hash = "sha256:83f085bd5ca59c80295fc2a82ab5dac679cbe02b9f33f7d83af68e241bea51b0"},
-]
-
-[[package]]
-name = "wsproto"
-version = "1.2.0"
-description = "WebSockets state-machine based protocol implementation"
-optional = false
-python-versions = ">=3.7.0"
-files = [
-    {file = "wsproto-1.2.0-py3-none-any.whl", hash = "sha256:b9acddd652b585d75b20477888c56642fdade28bdfd3579aa24a4d2c037dd736"},
-    {file = "wsproto-1.2.0.tar.gz", hash = "sha256:ad565f26ecb92588a3e43bc3d96164de84cd9902482b130d0ddbaa9664a85065"},
-]
-
-[package.dependencies]
-h11 = ">=0.9.0,<1"
-
-[metadata]
-lock-version = "2.0"
-python-versions = "^3.11"
-content-hash = "a4a91c84503735a2120b16b6cd0a4c672588c4d58dc1fd410be0cdf8921982d2"
diff --git a/pyproject.toml b/pyproject.toml
index 94c4a1b..4e6b546 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -1,21 +1,43 @@
-[tool.poetry]
-name = "main"
-version = "0.1.0"
-description = ""
-authors = ["Your Name <you@example.com>"]
-readme = "README.md"
-packages = [{ include = "src" }]
+[project]
+name = "testdata"
+version = "1.1.0"
+requires-python = ">=3.11, <4"
+dependencies = [
+    "fastapi==0.115.*",
+    "uvicorn==0.32.*",
+    "pydantic==2.10.*",
+]
 
-[tool.poetry.dependencies]
-python = "^3.11"
-fastapi = "^0.110.1"
-hypercorn = "^0.16.0"
-pydantic = "^2.6.4"
-ipaddress = "^1.0.23"
+[project.optional-dependencies]
+dev = [
+    "pytest==8.3.*",
+    "mypy==1.13.*",
+    "pylint==3.3.*",
+    "requests==2.32.*",
+    "types-requests==2.32.*"
+]
 
-[tool.poetry.scripts]
-testdata = "src.main:main"
+[project.scripts]
+testdata = "testdata.main:main"
 
 [build-system]
-requires = ["poetry-core"]
-build-backend = "poetry.core.masonry.api"
+requires = ["setuptools==75.*"]
+build-backend = "setuptools.build_meta"
+
+[tool.setuptools.packages.find]
+where = ["src"]
+
+[tool.setuptools.package-data]
+testdata = ["py.typed"]
+
+[tool.autopep8]
+max_line_length = 150
+
+[tool.pylint.'MESSAGES CONTROL']
+disable = [
+    "line-too-long",
+    "missing-module-docstring",
+    "missing-class-docstring",
+    "missing-function-docstring",
+    "too-few-public-methods"
+]
diff --git a/src/main.py b/src/main.py
deleted file mode 100644
index 54a405d..0000000
--- a/src/main.py
+++ /dev/null
@@ -1,114 +0,0 @@
-import sys
-import asyncio
-import argparse
-import json
-from os.path import exists
-
-from fastapi import FastAPI, Request, HTTPException, Query
-from fastapi.responses import StreamingResponse
-from fastapi import status
-from hypercorn.config import Config
-from hypercorn.asyncio import serve
-import ipaddress
-
-from .utils import convert_to_bytes, generate_data, load_database, save_database
-
-# Setup Parser
-parser = argparse.ArgumentParser()
-parser.add_argument('-c', '--config', type=argparse.FileType('r'),
-                    default='./config.json', help='Path to config file in JSON format.')
-
-args = parser.parse_args(sys.argv[1:])
-
-# Load Config
-CONFIG = json.load(args.config)
-BUFFER_SIZE = convert_to_bytes(CONFIG['buffer-size'])
-MAX_SIZE = convert_to_bytes(CONFIG['max-size'])
-MAX_DATA = convert_to_bytes(CONFIG['max-data'])
-AUTHORIZED_KEYS = CONFIG['keys']
-if isinstance(AUTHORIZED_KEYS, str):
-    with open(AUTHORIZED_KEYS) as file:
-        AUTHORIZED_KEYS = list(
-            filter(lambda x: x.strip() != '', file.read().splitlines()))
-DATABASE = CONFIG['database']
-
-if not exists(DATABASE):
-    save_database(DATABASE, {'data-used': 0})
-
-
-api = FastAPI(docs_url=None, redoc_url=None)
-
-
-class MaxSizePerRequestError(Exception):
-    pass
-
-
-class MinSizePerRequestError(Exception):
-    pass
-
-
-@api.get('/zeros')
-async def test_data(api_key: str, size: str) -> StreamingResponse:
-    try:
-        if api_key not in AUTHORIZED_KEYS:
-            raise HTTPException(
-                status_code=status.HTTP_401_UNAUTHORIZED,
-                detail='Invalid API Key.'
-            )
-
-        try:
-            size = convert_to_bytes(size)
-        except ValueError as err:
-            raise HTTPException(
-                status_code=status.HTTP_400_BAD_REQUEST,
-                detail='Invalid format format for size.'
-            ) from err
-
-        if size < 0:
-            raise MinSizePerRequestError
-        elif MAX_SIZE < size:
-            raise MaxSizePerRequestError
-
-        database = load_database(DATABASE)
-        if MAX_DATA <= database['data-used'] + size:
-            raise HTTPException(
-                status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
-                detail='Service not available.'
-            )
-        database['data-used'] += size
-
-        save_database(DATABASE, database)
-
-        return StreamingResponse(
-            status_code=status.HTTP_200_OK,
-            content=generate_data(size, BUFFER_SIZE),
-            media_type='application/octet-stream',
-            headers={
-                'Content-Length': str(size)
-            }
-        )
-
-    except MinSizePerRequestError as err:
-        raise HTTPException(
-            status_code=status.HTTP_416_REQUESTED_RANGE_NOT_SATISFIABLE,
-            detail='Size has to be not-negative.'
-        ) from err
-    except MaxSizePerRequestError as err:
-        raise HTTPException(
-            status_code=status.HTTP_416_REQUESTED_RANGE_NOT_SATISFIABLE,
-            detail=f'Exceeded max size per request of {MAX_SIZE} Bytes.'
-        ) from err
-
-
-def main():
-    asyncio.run(serve(
-        api,
-        Config().from_mapping(
-            bind=CONFIG['binds'],
-            accesslog='-'
-        )
-    ))
-
-
-if __name__ == '__main__':
-    main()
diff --git a/src/test/test_imports.py b/src/test/test_imports.py
new file mode 100644
index 0000000..deb3f92
--- /dev/null
+++ b/src/test/test_imports.py
@@ -0,0 +1,2 @@
+def test_import_testdata():
+    import testdata  # pylint: disable=unused-import,import-outside-toplevel
diff --git a/src/test/test_testdata.py b/src/test/test_testdata.py
new file mode 100644
index 0000000..b417f93
--- /dev/null
+++ b/src/test/test_testdata.py
@@ -0,0 +1,116 @@
+import json
+import time
+import tempfile
+import asyncio
+from multiprocessing import Process
+from typing import Generator
+
+import pytest
+import requests
+
+import testdata
+
+
+PROTOCOL = 'http'
+HOST = 'localhost'
+PORT = 1234
+TIMEOUT = 1  # seconds
+
+
+@pytest.fixture(scope='function')
+def _server(request) -> Generator[str, None, None]:
+
+    with tempfile.NamedTemporaryFile() as tmpfile:
+        request.param['database'] = tmpfile.name
+        config = testdata.Testdata.Config.model_validate_json(json.dumps(request.param))
+        server = testdata.Testdata(config)
+
+        def run_server():
+            asyncio.run(server.run(HOST, PORT))
+
+        process = Process(target=run_server)
+        process.start()
+
+        # Wait until webserver becomes available
+        start = time.time()
+        while (time.time() - start) < TIMEOUT:
+            try:
+                requests.get(f'{PROTOCOL}://{HOST}:{PORT}', timeout=TIMEOUT)
+                break
+            except requests.exceptions.ConnectionError:
+                pass
+
+        yield tmpfile.name
+
+        process.terminate()
+
+        # Wait until webserver is completely shut down
+        start = time.time()
+        while (time.time() - start) < TIMEOUT:
+            try:
+                requests.get(f'{PROTOCOL}://{HOST}:{PORT}', timeout=TIMEOUT)
+            except requests.exceptions.ConnectionError:
+                break
+
+
+@pytest.mark.parametrize('_server', [({
+    'keys': ['one', 'two', 'three'],
+    'max-size': '100',
+    'max-data': 1234,
+    'buffer-size': '12MiB',
+})], indirect=['_server'])
+def test_request_size_lower_bound(_server):
+    response = requests.get(f'{PROTOCOL}://{HOST}:{PORT}/zeros?api_key=one&size=-1', timeout=TIMEOUT)
+    assert response.status_code == 416
+
+    response = requests.get(f'{PROTOCOL}://{HOST}:{PORT}/zeros?api_key=one&size=0', timeout=TIMEOUT)
+    assert response.status_code == 200
+    assert response.content == b''
+
+
+@pytest.mark.parametrize('_server', [({
+    'keys': ['one', 'two', 'three'],
+    'max-size': '100',
+    'max-data': 1234,
+    'buffer-size': '12MiB',
+})], indirect=['_server'])
+def test_request_size_upper_bound(_server):
+    response = requests.get(f'{PROTOCOL}://{HOST}:{PORT}/zeros?api_key=one&size=100', timeout=TIMEOUT)
+    assert response.status_code == 200
+    assert response.content == b'\0' * 100
+
+    response = requests.get(f'{PROTOCOL}://{HOST}:{PORT}/zeros?api_key=one&size=101', timeout=TIMEOUT)
+    assert response.status_code == 416
+
+
+@pytest.mark.parametrize('_server', [({
+    'keys': ['one', 'two', 'three'],
+    'max-size': '100',
+    'max-data': 1234,
+    'buffer-size': '12MiB',
+})], indirect=['_server'])
+def test_invalid_api_key(_server):
+    response = requests.get(f'{PROTOCOL}://{HOST}:{PORT}/zeros?api_key=four&size=100', timeout=TIMEOUT)
+    assert response.status_code == 401
+
+
+@pytest.mark.parametrize('_server', [({
+    'keys': ['one', 'two', 'three'],
+    'max-size': '1KB',
+    'max-data': '1KB',
+    'buffer-size': '12MiB',
+    'update-database-interval': 0.1
+})], indirect=['_server'])
+def test_check_database_update(_server):
+    database = _server
+
+    with open(database, 'r', encoding='utf-8') as file:
+        file.seek(0)
+        assert json.load(file) == {'data-used': 0}
+
+        response = requests.get(f'{PROTOCOL}://{HOST}:{PORT}/zeros?api_key=one&size=100', timeout=TIMEOUT)
+        assert response.status_code == 200
+
+        time.sleep(0.1)
+        file.seek(0)
+        assert json.load(file) == {'data-used': 100}
diff --git a/src/testdata/__init__.py b/src/testdata/__init__.py
new file mode 100644
index 0000000..95ead44
--- /dev/null
+++ b/src/testdata/__init__.py
@@ -0,0 +1,3 @@
+from .testdata import Testdata
+from .utils import convert_to_bytes
+from .main import run
diff --git a/src/testdata/__main__.py b/src/testdata/__main__.py
new file mode 100644
index 0000000..5d6a810
--- /dev/null
+++ b/src/testdata/__main__.py
@@ -0,0 +1,3 @@
+from .main import main
+
+main()
diff --git a/src/testdata/logger/__init__.py b/src/testdata/logger/__init__.py
new file mode 100644
index 0000000..5b241af
--- /dev/null
+++ b/src/testdata/logger/__init__.py
@@ -0,0 +1,3 @@
+from logging import getLogger
+
+from .logger import setup_logging
diff --git a/src/testdata/logger/logger.py b/src/testdata/logger/logger.py
new file mode 100644
index 0000000..9bf320e
--- /dev/null
+++ b/src/testdata/logger/logger.py
@@ -0,0 +1,171 @@
+import logging.config
+import logging.handlers
+import sys
+import json
+import logging
+import atexit
+
+from datetime import datetime, timezone
+from typing_extensions import override
+
+LOG_RECORD_BUILTIN_ATTRS = {
+    "args",
+    "asctime",
+    "created",
+    "exc_info",
+    "exc_text",
+    "filename",
+    "funcName",
+    "levelname",
+    "levelno",
+    "lineno",
+    "module",
+    "msecs",
+    "message",
+    "msg",
+    "name",
+    "pathname",
+    "process",
+    "processName",
+    "relativeCreated",
+    "stack_info",
+    "thread",
+    "threadName",
+    "taskName",
+}
+
+
+class JSONFormatter(logging.Formatter):
+
+    def __init__(self, *, fmt_keys: dict[str, str] | None = None):
+        super().__init__()
+        self.fmt_keys = fmt_keys if fmt_keys is not None else {}
+
+    @override
+    def format(self, record: logging.LogRecord) -> str:
+        message = self._prepare_log_dict(record)
+        return json.dumps(message, default=str)
+
+    def _prepare_log_dict(self, record: logging.LogRecord) -> None:
+        always_fields = {
+            'message': record.getMessage(),
+            'timestamp': datetime.fromtimestamp(
+                record.created, tz=timezone.utc
+            ).isoformat()
+        }
+
+        if record.exc_info is not None:
+            always_fields['exc_info'] = self.formatException(record.exc_info)
+
+        if record.stack_info is not None:
+            always_fields['stack_info'] = self.formatStack(record.stack_info)
+
+        message = {
+            key: msg_value
+            if (msg_value := always_fields.pop(value, None)) is not None
+            else getattr(record, value)
+            for key, value in self.fmt_keys.items()
+        }
+
+        message.update(always_fields)
+
+        for key, value in record.__dict__.items():
+            if key not in LOG_RECORD_BUILTIN_ATTRS:
+                message[key] = value
+
+        return message
+
+
+class NonErrorFilter(logging.Filter):
+    @override
+    def filter(self, record: logging.LogRecord) -> bool | logging.LogRecord:
+        return record.levelno <= logging.INFO
+
+
+def generate_log_config(log_path: str | None = None) -> dict:
+    logger_config = {
+        'version': 1,
+        'disable_existing_loggers': False,
+        'filters': {
+            'no_errors': {
+                "()": NonErrorFilter
+            }
+        },
+        'formatters': {
+            'simple': {
+                'format': '[%(asctime)s][%(levelname)s] %(message)s',
+                'datefmt': '%Y-%m-%d %H:%M:%S'
+            },
+            'detailed': {
+                'format': '[%(asctime)s][%(levelname)s] %(message)s',
+                'datefmt': '%Y-%m-%dT%H:%M:%S%z'  # ISO-8601 Timestamp
+            },
+            'json': {
+                '()': JSONFormatter,
+                'fmt_keys': {
+                    'timestamp': 'timestamp',
+                    'level': 'levelname',
+                    'message': 'message',
+                    'logger': 'name',
+                    'module': 'module',
+                    'function': 'funcName',
+                    'line': 'lineno',
+                    'thread_name': 'threadName'
+                },
+            }
+        },
+        'handlers': {
+            'stdout': {
+                'class': logging.StreamHandler,
+                'filters': ['no_errors'],
+                'formatter': 'simple',
+                'stream': 'ext://sys.stdout'
+            },
+            'stderr': {
+                'class': logging.StreamHandler,
+                'level': 'WARNING',
+                'formatter': 'simple',
+                'stream': 'ext://sys.stderr'
+            } | ({'file': {
+                'class': logging.handlers.RotatingFileHandler,
+                'level': 'DEBUG',
+                'formatter': 'json',
+                'filename': 'log.jsonl',
+                'maxBytes': 1024 * 1024 * 10,  # 10 MB
+                'backupCount': 3
+            }} if log_path is not None else {}),
+        },
+        'loggers': {
+            'root': {
+                'level': 'DEBUG',
+                'handlers': [
+                    'stdout',
+                    'stderr'
+                ] + (['file'] if log_path is not None else []),
+            }
+        }
+    }
+
+    if sys.version_info >= (3, 12):  # Python 3.12+
+        logger_config['handlers']['queue_handler'] = {
+            'class': logging.handlers.QueueHandler,
+            'handlers': [
+                'stdout',
+                'stderr'
+            ] + (['file'] if log_path is not None else [])
+        }
+
+        logger_config['loggers']['root']['handlers'] = ['queue_handler']
+
+    return logger_config
+
+
+def setup_logging(log_path: str = '-') -> None:
+    log_config = generate_log_config(log_path)
+    logging.config.dictConfig(log_config)
+
+    if sys.version_info >= (3, 12):  # Python 3.12+
+        queue_handler = logging.getHandlerByName('queue_handler')
+        if queue_handler is not None:
+            queue_handler.listener.start()
+            atexit.register(queue_handler.listener.stop)
diff --git a/src/testdata/main.py b/src/testdata/main.py
new file mode 100644
index 0000000..daa378d
--- /dev/null
+++ b/src/testdata/main.py
@@ -0,0 +1,28 @@
+import sys
+import argparse
+import asyncio
+
+from .testdata import Testdata
+
+def parse_args(args: list[str]):
+    parser = argparse.ArgumentParser()
+    parser.add_argument('-c', '--config', type=argparse.FileType('r'), default='./config.json', help='Path to config file in JSON format.')
+    parser.add_argument('-l', '--listen', type=str, default='0.0.0.0', help='IP on which to listen.')
+    parser.add_argument('-p', '--port', type=int, default='8080', help='Port on which to serve the webserver.')
+
+    return parser.parse_args(args)
+
+
+def run(argv: list[str]) -> None:
+    # Parse command-line parameters
+    args = parse_args(argv)
+
+    # Load Config
+    config = Testdata.Config.model_validate_json(args.config.read())
+
+    # Run webserver
+    asyncio.run(Testdata(config).run(args.listen, args.port))
+
+
+def main() -> None:
+    run(sys.argv[1:])
diff --git a/src/__init__.py b/src/testdata/py.typed
similarity index 100%
rename from src/__init__.py
rename to src/testdata/py.typed
diff --git a/src/testdata/testdata.py b/src/testdata/testdata.py
new file mode 100644
index 0000000..c0c7792
--- /dev/null
+++ b/src/testdata/testdata.py
@@ -0,0 +1,134 @@
+import os
+import json
+import asyncio
+
+from typing_extensions import Annotated
+import uvicorn
+from fastapi import FastAPI, status, HTTPException
+from fastapi.responses import StreamingResponse
+from pydantic import BaseModel, ConfigDict, Field, BeforeValidator, ValidationError
+
+from .utils import convert_to_bytes, generate_data
+
+
+class MaxSizePerRequestError(Exception):
+    pass
+
+
+class MinSizePerRequestError(Exception):
+    pass
+
+
+class Testdata:
+
+    class Config(BaseModel):
+
+        model_config = ConfigDict(extra='forbid')
+
+        @staticmethod
+        def to_bytes(value: int | str) -> int:
+            try:
+                return convert_to_bytes(value)
+            except Exception as err:
+                raise ValidationError from err
+
+        @staticmethod
+        def is_authorized_keys(value: set[str] | str) -> set[str]:
+            if isinstance(value, str):
+                with open(value, encoding='utf-8') as file:
+                    return set(filter(lambda x: x.strip() != '', file.read().splitlines()))
+            return value
+
+        authorized_keys: Annotated[set[str], BeforeValidator(is_authorized_keys)] = Field(alias='keys')
+        max_size: Annotated[int, BeforeValidator(to_bytes)] = Field(alias='max-size')
+        max_data: Annotated[int, BeforeValidator(to_bytes)] = Field(alias='max-data')
+        buffer_size: Annotated[int, BeforeValidator(to_bytes)] = Field(alias='buffer-size')
+        database: str
+        update_database_interval: float = Field(alias='update-database-interval', default=5)
+
+    _config: Config
+    _api: FastAPI
+    _state: dict[str, int]
+
+    def __init__(self, config: Config):
+        self._config = config
+        self._api = FastAPI(docs_url=None, redoc_url=None)
+
+        # Store internal state
+        self._state = {'data-used': 0}
+
+        @self._api.get('/zeros')
+        async def zeros(api_key: str, size: int | str) -> StreamingResponse:
+            try:
+                if api_key not in config.authorized_keys:
+                    raise HTTPException(
+                        status_code=status.HTTP_401_UNAUTHORIZED,
+                        detail='Invalid API Key.'
+                    )
+                try:
+                    size = convert_to_bytes(size)
+                except ValueError as err:
+                    raise HTTPException(
+                        status_code=status.HTTP_400_BAD_REQUEST,
+                        detail='Invalid format format for size.'
+                    ) from err
+
+                if size < 0:
+                    raise MinSizePerRequestError
+                if config.max_size < size:
+                    raise MaxSizePerRequestError
+
+                # update internal state
+                if config.max_data < self._state['data-used'] + size:
+                    raise HTTPException(
+                        status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
+                        detail='Service not available.'
+                    )
+                self._state['data-used'] += size
+
+                return StreamingResponse(
+                    status_code=status.HTTP_200_OK,
+                    content=generate_data(size, config.buffer_size),
+                    media_type='application/octet-stream',
+                    headers={
+                        'Content-Length': str(size)
+                    }
+                )
+
+            except MinSizePerRequestError as err:
+                raise HTTPException(
+                    status_code=status.HTTP_416_REQUESTED_RANGE_NOT_SATISFIABLE,
+                    detail='Size has to be not-negative.'
+                ) from err
+            except MaxSizePerRequestError as err:
+                raise HTTPException(
+                    status_code=status.HTTP_416_REQUESTED_RANGE_NOT_SATISFIABLE,
+                    detail=f'Exceeded max size per request of {config.max_size} Bytes.'
+                ) from err
+
+    async def _update_state(self):
+        if os.path.exists(self._config.database):
+            mode = 'r+'
+        else:
+            mode = 'w+'
+
+        with open(self._config.database, mode, encoding='utf-8') as file:
+            try:
+                self._state = json.load(file)
+            except json.JSONDecodeError:
+                pass
+
+            while True:
+                file.seek(0)
+                json.dump(self._state, file)
+                file.truncate()
+                await asyncio.sleep(self._config.update_database_interval)
+
+    async def run(self, host: str, port: int) -> None:
+        try:
+            await asyncio.gather(
+                asyncio.create_task(uvicorn.Server(uvicorn.Config(self._api, host, port)).serve()),
+                self._update_state()
+            )
+        except asyncio.exceptions.CancelledError:
+            pass
diff --git a/src/testdata/utils.py b/src/testdata/utils.py
new file mode 100644
index 0000000..0a75e1a
--- /dev/null
+++ b/src/testdata/utils.py
@@ -0,0 +1,40 @@
+import asyncio
+from typing import AsyncGenerator
+
+def convert_to_bytes(size: int | str) -> int:
+    if isinstance(size, int):
+        return size
+    if isinstance(size, str):
+        try:
+            return int(size)
+        except ValueError as err:
+            units = {
+                'TB': 1000 ** 4, 'TiB': 1024 ** 4,
+                'GB': 1000 ** 3, 'GiB': 1024 ** 3,
+                'MB': 1000 ** 2, 'MiB': 1024 ** 2,
+                'KB': 1000, 'KiB': 1024,
+                'B': 1
+            }
+
+            for unit, value in units.items():
+                if size.endswith(unit):
+                    return int(float(size.removesuffix(unit)) * value)
+
+            raise ValueError from err
+
+    else:
+        raise ValueError
+
+
+async def generate_data(size: int, buffer_size: int = 4 * 1024) -> AsyncGenerator[bytes, None]:
+    # https://github.com/tiangolo/fastapi/issues/5183
+    # https://github.com/encode/starlette/discussions/1776#discussioncomment-3207518
+
+    size_left = size
+    while size_left > buffer_size:
+        size_left -= buffer_size
+        yield b'\0' * buffer_size
+        await asyncio.sleep(0)
+
+    yield b'\0' * size_left
+    await asyncio.sleep(0)
diff --git a/src/utils.py b/src/utils.py
deleted file mode 100644
index 8a9a09d..0000000
--- a/src/utils.py
+++ /dev/null
@@ -1,54 +0,0 @@
-import json
-import asyncio
-
-def convert_to_bytes(size: int | str) -> int:
-    try:
-        return int(size)
-    except ValueError:  # treat as string
-        units = {
-            'TB': 1000 ** 4, 'TiB': 1024 ** 4,
-            'GB': 1000 ** 3, 'GiB': 1024 ** 3,
-            'MB': 1000 ** 2, 'MiB': 1024 ** 2,
-            'KB': 1000, 'KiB': 1024,
-            'B': 1
-        }
-
-        for unit in units:
-            if size.endswith(unit):
-                return int(float(size.removesuffix(unit)) * units[unit])
-                break
-
-        raise ValueError
-
-
-async def generate_data(size: int, buffer_size: int = 4 * 1024) -> bytes:
-    size_left = size
-
-    # https://github.com/tiangolo/fastapi/issues/5183
-    # https://github.com/encode/starlette/discussions/1776#discussioncomment-3207518
-
-    try:
-        while size_left > buffer_size:
-            size_left -= buffer_size
-            yield b'\0' * buffer_size
-            await asyncio.sleep(0)
-        else:
-            yield b'\0' * size_left
-            await asyncio.sleep(0)
-    except asyncio.CancelledError:
-        raise GeneratorExit
-
-
-def check_policies(ip: str) -> None:
-    network = ipaddress.ip_network(ip)
-    print(network)
-
-
-def load_database(path: str) -> dict:
-    with open(path, 'r') as file:
-        return json.load(file)
-
-
-def save_database(path: str, database: dict) -> None:
-    with open(path, 'w') as file:
-        json.dump(database, file, indent=2)