6 Commits

Author SHA1 Message Date
394b5f13f8 updated 2025-05-29 17:57:06 +02:00
2c14192d95 changed dependency versions 2025-01-05 21:06:07 +01:00
2414e01f8b added docker as an example 2025-01-04 20:05:49 +01:00
0901edf8eb added testcase 2025-01-04 18:51:50 +01:00
d29cac2130 fixed logfile creation issue 2025-01-04 18:48:20 +01:00
3f74df5355 reworked api definition 2025-01-04 17:12:40 +01:00
12 changed files with 424 additions and 271 deletions

18
.gitignore vendored
View File

@ -1,3 +1,9 @@
# Nix #
/result
# Direnv #
/.direnv/
# Python # # Python #
# Virtual Environment # Virtual Environment
/.venv/ /.venv/
@ -13,18 +19,6 @@ __pycache__/
/.pytest_cache/ /.pytest_cache/
/.mypy_cache/ /.mypy_cache/
# Nix #
# Build
/result
# MicroVM
/var.img
/control.socket
# Direnv #
/.direnv/
# Project specific files # # Project specific files #
config.json config.json
db.json db.json

28
Dockerfile Normal file
View File

@ -0,0 +1,28 @@
# Nix builder
FROM nixos/nix:latest AS builder
# Copy our source and setup our working dir.
COPY . /tmp/build
WORKDIR /tmp/build
# Build our Nix environment
RUN nix \
--extra-experimental-features "nix-command flakes" \
--option filter-syscalls false \
build
# Copy the Nix store closure into a directory. The Nix store closure is the
# entire set of Nix store values that we need for our build.
RUN mkdir /tmp/nix-store-closure
RUN cp -r $(nix-store -qR result/) /tmp/nix-store-closure
# Final image is based on scratch. We copy a bunch of Nix dependencies
# but they're fully self-contained so we don't need Nix anymore.
FROM scratch
WORKDIR /app
# Copy /nix/store
COPY --from=builder /tmp/nix-store-closure /nix/store
COPY --from=builder /tmp/build/result /app
CMD ["/app/bin/testdata"]

15
docker-compose.yaml Normal file
View File

@ -0,0 +1,15 @@
services:
testdata:
image: result/latest
build:
dockerfile: ./Dockerfile
environment:
TESTDATA_HOST: 0.0.0.0
TESTDATA_PORT: 1234
TESTDATA_CONFIG: ./config.json
volumes:
- ./config.json:/app/config.json
- ./db.json:/app/db.json
- ./log.jsonl:/app/log.jsonl

26
flake.lock generated
View File

@ -27,11 +27,11 @@
"spectrum": "spectrum" "spectrum": "spectrum"
}, },
"locked": { "locked": {
"lastModified": 1735074045, "lastModified": 1748464257,
"narHash": "sha256-CeYsC8J2dNiV2FCQOxK1oZ/jNpOF2io7aCEFHmfi95U=", "narHash": "sha256-PdnQSE2vPfql9WEjunj2qQnDpuuvk7HH+4djgXJSwFs=",
"owner": "astro", "owner": "astro",
"repo": "microvm.nix", "repo": "microvm.nix",
"rev": "2ae08de8e8068b00193b9cfbc0acc9dfdda03181", "rev": "e238645b6f0447a2eb1d538d300d5049d4006f9f",
"type": "github" "type": "github"
}, },
"original": { "original": {
@ -42,17 +42,17 @@
}, },
"nixpkgs": { "nixpkgs": {
"locked": { "locked": {
"lastModified": 1733759999, "lastModified": 1748370509,
"narHash": "sha256-463SNPWmz46iLzJKRzO3Q2b0Aurff3U1n0nYItxq7jU=", "narHash": "sha256-QlL8slIgc16W5UaI3w7xHQEP+Qmv/6vSNTpoZrrSlbk=",
"owner": "nixos", "owner": "NixOS",
"repo": "nixpkgs", "repo": "nixpkgs",
"rev": "a73246e2eef4c6ed172979932bc80e1404ba2d56", "rev": "4faa5f5321320e49a78ae7848582f684d64783e9",
"type": "github" "type": "github"
}, },
"original": { "original": {
"owner": "nixos", "owner": "NixOS",
"ref": "nixos-unstable",
"repo": "nixpkgs", "repo": "nixpkgs",
"rev": "a73246e2eef4c6ed172979932bc80e1404ba2d56",
"type": "github" "type": "github"
} }
}, },
@ -65,11 +65,11 @@
"spectrum": { "spectrum": {
"flake": false, "flake": false,
"locked": { "locked": {
"lastModified": 1733308308, "lastModified": 1746869549,
"narHash": "sha256-+RcbMAjSxV1wW5UpS9abIG1lFZC8bITPiFIKNnE7RLs=", "narHash": "sha256-BKZ/yZO/qeLKh9YqVkKB6wJiDQJAZNN5rk5NsMImsWs=",
"ref": "refs/heads/main", "ref": "refs/heads/main",
"rev": "80c9e9830d460c944c8f730065f18bb733bc7ee2", "rev": "d927e78530892ec8ed389e8fae5f38abee00ad87",
"revCount": 792, "revCount": 862,
"type": "git", "type": "git",
"url": "https://spectrum-os.org/git/spectrum" "url": "https://spectrum-os.org/git/spectrum"
}, },

216
flake.nix
View File

@ -2,7 +2,7 @@
description = "A webserver to create files for testing purposes"; description = "A webserver to create files for testing purposes";
inputs = { inputs = {
nixpkgs.url = "github:nixos/nixpkgs?rev=a73246e2eef4c6ed172979932bc80e1404ba2d56"; nixpkgs.url = "github:NixOS/nixpkgs/nixos-unstable";
microvm = { microvm = {
url = "github:astro/microvm.nix"; url = "github:astro/microvm.nix";
@ -10,163 +10,92 @@
}; };
}; };
outputs = { outputs =
self, {
nixpkgs, self,
... nixpkgs,
} @ inputs: let ...
supportedSystems = ["x86_64-linux" "x86_64-darwin" "aarch64-linux" "aarch64-darwin"]; }@inputs:
forAllSystems = nixpkgs.lib.genAttrs supportedSystems; let
pkgs = forAllSystems (system: nixpkgs.legacyPackages.${system}.extend overlay); supportedSystems = [ "x86_64-linux" ];
forAllSystems = nixpkgs.lib.genAttrs supportedSystems;
overlay = final: prev: rec { pkgs = forAllSystems (system: nixpkgs.legacyPackages.${system});
python3Packages = prev.python3Packages.overrideScope (pfinal: pprev: { in
packageNameToDrv = x: builtins.getAttr (cleanPythonPackageName x) final.python3Packages; {
# `nix build`
packages = forAllSystems (system: rec {
default = testdata;
testdata = pkgs.${system}.callPackage ./nix/package.nix { src = ./.; };
vm = self.nixosConfigurations.vm.config.microvm.declaredRunner;
}); });
cleanPythonPackageName = x: let # `nix develop`
cleanName = builtins.match "([a-z,A-Z,0-9,_,-]+).*" x; devShells = forAllSystems (system: rec {
in default = venv;
if cleanName != null
then builtins.elemAt cleanName 0
else builtins.warn "Could not determine package name from '${x}'" null;
};
pyproject = builtins.fromTOML (builtins.readFile ./pyproject.toml); venv = pkgs.${system}.mkShell {
shellHook = ''
if [ ! -d .venv/ ]; then
echo "Creating Virtual Environment..."
${pkgs.${system}.python3}/bin/python3 -m venv .venv
fi
buildDependencies = forAllSystems (system: builtins.map pkgs.${system}.python3Packages.packageNameToDrv pyproject.build-system.requires); alias activate='source .venv/bin/activate'
runtimeDependencies = forAllSystems (system: builtins.map pkgs.${system}.python3Packages.packageNameToDrv pyproject.project.dependencies);
optionalDependencies = forAllSystems (system: builtins.mapAttrs (name: value: builtins.map pkgs.${system}.python3Packages.packageNameToDrv value) pyproject.project.optional-dependencies);
in {
# `nix build`
packages = forAllSystems (system: let
buildTestdata = {skipCheck ? false}:
pkgs.${system}.python3Packages.buildPythonPackage {
pname = pyproject.project.name;
version = pyproject.project.version;
src = ./.;
pyproject = true; echo "Entering Virtual Environment..."
source .venv/bin/activate
build-system = buildDependencies.${system};
dependencies = runtimeDependencies.${system};
optional-dependencies = optionalDependencies.${system};
nativeCheckInputs = optionalDependencies.${system}.dev;
checkPhase = let
dev = builtins.map (x: x.pname) optionalDependencies.${system}.dev;
in ''
${
if builtins.elem "pytest" dev && !skipCheck
then "pytest src tests"
else ""
}
${
if builtins.elem "mypy" dev && !skipCheck
then "mypy src tests"
else ""
}
${
if builtins.elem "pylint" dev && !skipCheck
then "pylint src tests"
else ""
}
''; '';
}; };
in rec { });
default = testdata;
testdata = buildTestdata {skipCheck = false;};
quick = buildTestdata {skipCheck = true;};
vm = self.nixosConfigurations.vm.config.microvm.declaredRunner;
});
# `nix fmt` # NixOS Module
formatter = forAllSystems (system: pkgs.${system}.alejandra); nixosModules.default = import ./nix/module.nix inputs;
# `nix develop` # NixOS definition for a microvm to test nixosModules
devShells = forAllSystems (system: rec { nixosConfigurations."vm" = nixpkgs.lib.nixosSystem {
default = venv; system = "x86_64-linux";
venv = pkgs.${system}.mkShell {
shellHook = ''
if [ ! -d .venv/ ]; then
echo "Creating Virtual Environment..."
${pkgs.${system}.python3}/bin/python3 -m venv .venv
fi
alias activate='source .venv/bin/activate'
echo "Entering Virtual Environment..."
source .venv/bin/activate
'';
};
});
# NixOS Module
nixosModules.default = import ./nix/module.nix inputs;
# nixos definition for a microvm to test nixosModules
nixosConfigurations = let
system = "x86_64-linux";
in {
vm = nixpkgs.lib.nixosSystem {
inherit system;
modules = [ modules = [
inputs.microvm.nixosModules.microvm inputs.microvm.nixosModules.microvm
({config, ...}: { (
system.stateVersion = config.system.nixos.version; { config, ... }:
{
services.getty.autologinUser = "root";
networking.hostName = "vm"; microvm = {
users.users.root.password = ""; hypervisor = "qemu";
microvm = { shares = [
# volumes = [ {
# { # Host's /nix/store will be picked up so that no squashfs/erofs will be built for it.
# mountPoint = "/var"; tag = "ro-store";
# image = "var.img"; source = "/nix/store";
# size = 256; mountPoint = "/nix/.ro-store";
# } }
# ]; ];
shares = [
{
# use proto = "virtiofs" for MicroVMs that are started by systemd
proto = "9p";
tag = "ro-store";
# a host's /nix/store will be picked up so that no
# squashfs/erofs will be built for it.
source = "/nix/store";
mountPoint = "/nix/.ro-store";
}
];
interfaces = [ interfaces = [
{ {
type = "user"; type = "user";
id = "qemu"; id = "qemu";
mac = "02:00:00:01:01:01"; # Locally administered have one of 2/6/A/E in the second nibble.
} mac = "02:00:00:01:01:01";
]; }
];
forwardPorts = [ forwardPorts = [
{ {
host.port = config.services.testdata.port; host.port = config.services.testdata.port;
guest.port = config.services.testdata.port; guest.port = config.services.testdata.port;
} }
]; ];
};
# "qemu" has 9p built-in! }
hypervisor = "qemu"; )
socket = "control.socket";
};
})
self.nixosModules.default self.nixosModules.default
rec { rec {
networking.firewall.allowedTCPPorts = [services.testdata.port]; networking.firewall.allowedTCPPorts = [ services.testdata.port ];
services.testdata = { services.testdata = {
enable = true; enable = true;
@ -174,11 +103,15 @@
port = 1234; port = 1234;
settings = { settings = {
keys = ["one" "two" "three"]; keys = [
"one"
"two"
"three"
];
max-size = "1GB"; max-size = "1GB";
max-data = "100GB"; max-data = "100GB";
buffer-size = "12MiB"; buffer-size = "12MiB";
database = "/root/testdata_state.json"; database = "/root/testdata-state.json";
database-update-interval = 5.0; database-update-interval = 5.0;
log = "/root/log.jsonl"; log = "/root/log.jsonl";
}; };
@ -187,5 +120,4 @@
]; ];
}; };
}; };
};
} }

View File

@ -1,32 +1,41 @@
inputs: { inputs:
{
config, config,
lib, lib,
pkgs, pkgs,
... ...
}: let }:
let
cfg = config.services.testdata; cfg = config.services.testdata;
package = inputs.self.packages.${pkgs.stdenv.hostPlatform.system}.default; package = inputs.self.packages.${pkgs.stdenv.hostPlatform.system}.default;
inherit (lib) mkIf mkEnableOption mkOption types; inherit (lib)
mkIf
mkEnableOption
mkOption
types
;
format = pkgs.formats.json {}; format = pkgs.formats.json { };
configFile = format.generate "config.json" cfg.settings; configFile = format.generate "config.json" cfg.settings;
in { in
{
options.services.testdata = { options.services.testdata = {
enable = mkEnableOption "testdata"; enable = mkEnableOption "testdata";
settings = mkOption { settings = mkOption {
type = with types; let type =
valueType = nullOr (oneOf [ with types;
# TODO: restrict type to actual config file structure let
bool valueType = nullOr (oneOf [
int bool
float int
str float
path str
(attrsOf valueType) path
(listOf valueType) (attrsOf valueType)
]); (listOf valueType)
in ]);
in
valueType; valueType;
default = throw "Please specify services.testdata.settings"; default = throw "Please specify services.testdata.settings";
}; };
@ -43,7 +52,7 @@ in {
}; };
config = mkIf cfg.enable { config = mkIf cfg.enable {
environment.systemPackages = [package]; environment.systemPackages = [ package ];
systemd.services.testdata = { systemd.services.testdata = {
enable = true; enable = true;
@ -53,7 +62,7 @@ in {
ExecStart = "${package}/bin/testdata --config ${configFile} --listen ${cfg.host} --port ${builtins.toString cfg.port}"; ExecStart = "${package}/bin/testdata --config ${configFile} --listen ${cfg.host} --port ${builtins.toString cfg.port}";
}; };
wantedBy = ["multi-user.target"]; wantedBy = [ "multi-user.target" ];
}; };
}; };
} }

46
nix/package.nix Normal file
View File

@ -0,0 +1,46 @@
{
src,
python3Packages,
}:
let
inherit (python3Packages)
setuptools
fastapi
uvicorn
pydantic
pytest
requests
mypy
pylint
;
project = (builtins.fromTOML (builtins.readFile "${src}/pyproject.toml")).project;
pname = project.name;
version = project.version;
in
python3Packages.buildPythonPackage {
inherit pname version src;
pyproject = true;
build-system = [ setuptools ];
dependencies = [
fastapi
uvicorn
pydantic
];
nativeCheckInputs = [
pytest
requests
mypy
pylint
];
checkPhase = ''
pytest tests
mypy src
pylint src
'';
}

View File

@ -1,27 +1,23 @@
[project] [project]
name = "testdata" name = "testdata"
version = "1.1.0" version = "1.2.2"
requires-python = "~=3.12, <4" requires-python = "~=3.12, <4"
dependencies = [ dependencies = ["fastapi~=0.115", "uvicorn~=0.32", "pydantic~=2.9"]
"fastapi~=0.115.3",
"uvicorn~=0.32.0",
"pydantic~=2.9.2",
]
[project.optional-dependencies] [project.optional-dependencies]
dev = [ dev = [
"pytest~=8.3.4", "pytest~=8.3",
"mypy~=1.13.0", "mypy~=1.13",
"pylint~=3.3.3", "pylint~=3.3",
"requests~=2.32.3", "requests~=2.32",
"types-requests~=2.32.0" "types-requests~=2.32",
] ]
[project.scripts] [project.scripts]
testdata = "testdata.main:main" testdata = "testdata.main:main"
[build-system] [build-system]
requires = ["setuptools~=75.1.1"] requires = ["setuptools~=78.1"]
build-backend = "setuptools.build_meta" build-backend = "setuptools.build_meta"
[tool.setuptools.packages.find] [tool.setuptools.packages.find]
@ -40,5 +36,5 @@ disable = [
"missing-class-docstring", "missing-class-docstring",
"missing-function-docstring", "missing-function-docstring",
"too-few-public-methods", "too-few-public-methods",
"broad-exception-caught" "broad-exception-caught",
] ]

View File

@ -132,7 +132,7 @@ def generate_log_config(log_path: str | None = None) -> dict:
'class': logging.handlers.RotatingFileHandler, 'class': logging.handlers.RotatingFileHandler,
'level': 'DEBUG', 'level': 'DEBUG',
'formatter': 'json', 'formatter': 'json',
'filename': 'log.jsonl', 'filename': log_path,
'maxBytes': 1024 * 1024 * 10, # 10 MiB 'maxBytes': 1024 * 1024 * 10, # 10 MiB
'backupCount': 3 'backupCount': 3
}} if log_path is not None else {}), }} if log_path is not None else {}),

26
src/testdata/main.py vendored
View File

@ -1,14 +1,32 @@
import os
import sys import sys
import argparse import argparse
import asyncio import asyncio
import shutil
from .testdata import Testdata from .testdata import Testdata
def parse_args(args: list[str]): def parse_args(args: list[str]):
parser = argparse.ArgumentParser() def formatter(prog):
parser.add_argument('-c', '--config', type=argparse.FileType('r'), default='./config.json', help='Path to config file in JSON format.') return argparse.ArgumentDefaultsHelpFormatter(prog, max_help_position=shutil.get_terminal_size().columns)
parser.add_argument('-l', '--listen', type=str, default='0.0.0.0', help='IP on which to listen.')
parser.add_argument('-p', '--port', type=int, default='8080', help='Port on which to serve the webserver.') parser = argparse.ArgumentParser(formatter_class=formatter)
parser.add_argument(
'-c', '--config', type=argparse.FileType('r'),
default=os.environ['TESTDATA_CONFIG'] if 'TESTDATA_CONFIG' in os.environ else './config.json',
help='Path to config file in JSON format.'
)
parser.add_argument(
'-l', '--listen', type=str,
default=os.environ['TESTDATA_HOST'] if 'TESTDATA_HOST' in os.environ else '0.0.0.0',
help='IP on which to listen.'
)
parser.add_argument(
'-p', '--port', type=int,
default=os.environ['TESTDATA_PORT'] if 'TESTDATA_PORT' in os.environ else 8080,
help='Port on which to serve the webserver.'
)
return parser.parse_args(args) return parser.parse_args(args)

View File

@ -1,10 +1,16 @@
import os import os
import json import json
import asyncio import asyncio
import inspect
import functools
import random
import importlib.metadata
from datetime import datetime
from typing_extensions import Annotated
import uvicorn import uvicorn
from fastapi import FastAPI, Request, status, HTTPException from typing_extensions import Annotated
from fastapi import FastAPI, Request, Security, status, HTTPException
from fastapi.security import APIKeyHeader, APIKeyQuery
from fastapi.responses import StreamingResponse from fastapi.responses import StreamingResponse
from pydantic import BaseModel, ConfigDict, Field, BeforeValidator, ValidationError from pydantic import BaseModel, ConfigDict, Field, BeforeValidator, ValidationError
@ -49,77 +55,158 @@ class Testdata:
_config: Config _config: Config
_api: FastAPI _api: FastAPI
_state: dict[str, int] _state: dict
_logger: logger.Logger _logger: logger.Logger
def __init__(self, config: Config): def __init__(self, config: Config):
self._config = config self._config = config
self._api = FastAPI(docs_url=None, redoc_url=None)
self._logger = logger.getLogger('testdata') self._logger = logger.getLogger('testdata')
self._api = self._setup_api()
# Store internal state # Store internal state
self._state = {'data-used': 0} self._state = {
'version': importlib.metadata.version('testdata'), # For future compatibility
'data-used': {f'{(today := datetime.today()).year}-{today.month:02}': 0} # math each months data usage
}
@self._api.get('/zeros') def _setup_api(self) -> FastAPI:
async def zeros(api_key: str, size: int | str, request: Request) -> StreamingResponse: api = FastAPI(docs_url='/', redoc_url=None)
try:
extra = {'api_key': api_key, 'ip': request.client.host if request.client is not None else None, 'size': size}
self._logger.debug('Initiated request.', extra=extra)
if api_key not in config.authorized_keys: # Security
raise HTTPException( def get_api_key(
status_code=status.HTTP_401_UNAUTHORIZED, api_key_query: str = Security(APIKeyQuery(name="api_key", auto_error=False)),
detail='Invalid API Key.' api_key_header: str = Security(APIKeyHeader(name="x-api-key", auto_error=False))
) ) -> str:
try: # https://joshdimella.com/blog/adding-api-key-auth-to-fast-api
size = convert_to_bytes(size)
except ValueError as err:
self._logger.warning('Invalid format for size.', extra=extra)
raise HTTPException(
status_code=status.HTTP_400_BAD_REQUEST,
detail='Invalid format for size.'
) from err
if size < 0: if api_key_query in self._config.authorized_keys:
raise MinSizePerRequestError return api_key_query
if config.max_size < size: if api_key_header in self._config.authorized_keys:
raise MaxSizePerRequestError return api_key_header
# update internal state raise HTTPException(
if config.max_data < self._state['data-used'] + size: status_code=status.HTTP_401_UNAUTHORIZED,
raise HTTPException( detail='Invalid or missing API Key'
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, )
detail='Service not available.'
)
self._state['data-used'] += size
self._logger.debug('Successfully processed request.', extra=extra) # A wrapper to set the function signature to accept the api key dependency
return StreamingResponse( def secure(func):
status_code=status.HTTP_200_OK, # Get old signature
content=generate_data(size, config.buffer_size), positional_only, positional_or_keyword, variadic_positional, keyword_only, variadic_keyword = [], [], [], [], []
media_type='application/octet-stream', for value in inspect.signature(func).parameters.values():
headers={ if value.kind == inspect.Parameter.POSITIONAL_ONLY:
'Content-Length': str(size) positional_only.append(value)
} elif value.kind == inspect.Parameter.POSITIONAL_OR_KEYWORD:
positional_or_keyword.append(value)
elif value.kind == inspect.Parameter.VAR_POSITIONAL:
variadic_positional.append(value)
elif value.kind == inspect.Parameter.KEYWORD_ONLY:
keyword_only.append(value)
elif value.kind == inspect.Parameter.VAR_KEYWORD:
variadic_keyword.append(value)
# Avoid passing an unrecognized keyword
if inspect.iscoroutinefunction(func):
async def wrapper(*args, **kwargs):
if len(variadic_keyword) == 0:
if 'api_key' in kwargs:
del kwargs['api_key']
return await func(*args, **kwargs)
else:
def wrapper(*args, **kwargs):
if len(variadic_keyword) == 0:
if 'api_key' in kwargs:
del kwargs['api_key']
return func(*args, **kwargs)
# Override signature
wrapper.__signature__ = inspect.signature(func).replace(
parameters=(
*positional_only,
*positional_or_keyword,
*variadic_positional,
*keyword_only,
inspect.Parameter('api_key', inspect.Parameter.POSITIONAL_OR_KEYWORD, default=Security(get_api_key)),
*variadic_keyword
) )
)
except MinSizePerRequestError as err: return functools.wraps(func)(wrapper)
self._logger.warning('Size if negative.', extra=extra)
raise HTTPException( # Routes
status_code=status.HTTP_416_REQUESTED_RANGE_NOT_SATISFIABLE, api.get('/zeros')(secure(self._zeros))
detail='Size has to be non-negative.'
) from err return api
except MaxSizePerRequestError as err:
self._logger.warning('Exceeded max size per request.', extra=extra) async def _zeros(self, size: int | str, request: Request, filename: str = 'zeros.bin') -> StreamingResponse:
raise HTTPException( try:
status_code=status.HTTP_416_REQUESTED_RANGE_NOT_SATISFIABLE, extra = {'id': f'{random.randint(0, 2 ** 32 - 1):08X}'}
detail=f'Exceeded max size per request of {config.max_size} Bytes.' self._logger.debug(
) from err 'Initiated request.',
except Exception as err: extra=extra | {
self._logger.exception(err) 'ip': request.client.host if request.client is not None else None,
raise err 'query-params': dict(request.query_params),
'headers': dict(request.headers)
}
)
try:
size = convert_to_bytes(size)
except ValueError as err:
self._logger.warning('Invalid format for size.', extra=extra)
raise HTTPException(
status_code=status.HTTP_400_BAD_REQUEST,
detail='Invalid format for size.'
) from err
if size < 0:
raise MinSizePerRequestError
if self._config.max_size < size:
raise MaxSizePerRequestError
# update internal state
current_date = f'{(today := datetime.today()).year}-{today.month:02}'
if current_date not in self._state['data-used']:
self._state['data-used'][current_date] = 0
if self._config.max_data < self._state['data-used'][current_date] + size:
raise HTTPException(
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
detail='Service not available.'
)
self._state['data-used'][current_date] += size
self._logger.debug('Successfully processed request.', extra=extra)
return StreamingResponse(
status_code=status.HTTP_200_OK,
content=generate_data(size, self._config.buffer_size),
media_type='application/octet-stream',
headers={
'Content-Length': str(size),
'Content-Disposition': f'attachment; filename="{filename}"'
}
)
except MinSizePerRequestError as err:
self._logger.warning('Size if negative.', extra=extra)
raise HTTPException(
status_code=status.HTTP_416_REQUESTED_RANGE_NOT_SATISFIABLE,
detail='Size has to be non-negative.'
) from err
except MaxSizePerRequestError as err:
self._logger.warning('Exceeded max size per request.', extra=extra)
raise HTTPException(
status_code=status.HTTP_416_REQUESTED_RANGE_NOT_SATISFIABLE,
detail=f'Exceeded max size per request of {self._config.max_size} Bytes.'
) from err
except Exception as err:
self._logger.exception(err)
raise err
async def _update_state(self) -> None:
assert self._config.database is not None
async def _update_state(self):
mode = 'r+' if os.path.exists(self._config.database) else 'w+' mode = 'r+' if os.path.exists(self._config.database) else 'w+'
with open(self._config.database, mode, encoding='utf-8') as file: with open(self._config.database, mode, encoding='utf-8') as file:
@ -130,7 +217,7 @@ class Testdata:
while True: while True:
file.seek(0) file.seek(0)
json.dump(self._state, file) json.dump(self._state, file, indent=2)
file.truncate() file.truncate()
await asyncio.sleep(self._config.database_update_interval) await asyncio.sleep(self._config.database_update_interval)
@ -142,7 +229,7 @@ class Testdata:
self._logger = logger.getLogger('testdata') self._logger = logger.getLogger('testdata')
self._logger.info('Server started.') self._logger.info('Server started.')
coroutines = [asyncio.create_task(uvicorn.Server(uvicorn.Config(self._api, host, port)).serve())] coroutines = [uvicorn.Server(uvicorn.Config(self._api, host, port)).serve()]
if self._config.database is not None: if self._config.database is not None:
coroutines.append(self._update_state()) coroutines.append(self._update_state())

View File

@ -53,6 +53,17 @@ def _server(request) -> Generator[str, None, None]:
break break
@pytest.mark.parametrize('_server', [({
'keys': ['one', 'two', 'three'],
'max-size': '100',
'max-data': 1234,
'buffer-size': '12MiB',
})], indirect=['_server'])
def test_invalid_api_key(_server):
response = requests.get(f'{PROTOCOL}://{HOST}:{PORT}/zeros?api_key=four&size=100', timeout=TIMEOUT)
assert response.status_code == 401
@pytest.mark.parametrize('_server', [({ @pytest.mark.parametrize('_server', [({
'keys': ['one', 'two', 'three'], 'keys': ['one', 'two', 'three'],
'max-size': '100', 'max-size': '100',
@ -85,13 +96,16 @@ def test_request_size_upper_bound(_server):
@pytest.mark.parametrize('_server', [({ @pytest.mark.parametrize('_server', [({
'keys': ['one', 'two', 'three'], 'keys': ['one', 'two', 'three'],
'max-size': '100', 'max-size': '100KB',
'max-data': 1234, 'max-data': '100KB',
'buffer-size': '12MiB', 'buffer-size': '12MiB',
})], indirect=['_server']) })], indirect=['_server'])
def test_invalid_api_key(_server): def test_request_max_data_used(_server):
response = requests.get(f'{PROTOCOL}://{HOST}:{PORT}/zeros?api_key=four&size=100', timeout=TIMEOUT) response = requests.get(f'{PROTOCOL}://{HOST}:{PORT}/zeros?api_key=one&size=100KB', timeout=TIMEOUT)
assert response.status_code == 401 assert response.status_code == 200
response = requests.get(f'{PROTOCOL}://{HOST}:{PORT}/zeros?api_key=one&size=1', timeout=TIMEOUT)
assert response.status_code == 500
@pytest.mark.parametrize('_server', [({ @pytest.mark.parametrize('_server', [({
@ -102,15 +116,29 @@ def test_invalid_api_key(_server):
'database-update-interval': 0.1 'database-update-interval': 0.1
})], indirect=['_server']) })], indirect=['_server'])
def test_check_database_update(_server): def test_check_database_update(_server):
import importlib.metadata
from datetime import datetime
database = _server database = _server
with open(database, 'r', encoding='utf-8') as file: with open(database, 'r', encoding='utf-8') as file:
file.seek(0) file.seek(0)
assert json.load(file) == {'data-used': 0} today = datetime.today()
assert json.load(file) == {
'version': importlib.metadata.version('testdata'),
'data-used': {
f'{today.year}-{today.month:02}': 0
}
}
response = requests.get(f'{PROTOCOL}://{HOST}:{PORT}/zeros?api_key=one&size=100', timeout=TIMEOUT) response = requests.get(f'{PROTOCOL}://{HOST}:{PORT}/zeros?api_key=one&size=100', timeout=TIMEOUT)
assert response.status_code == 200 assert response.status_code == 200
time.sleep(0.1) time.sleep(0.1)
file.seek(0) file.seek(0)
assert json.load(file) == {'data-used': 100} assert json.load(file) == {
'version': importlib.metadata.version('testdata'),
'data-used': {
f'{today.year}-{today.month:02}': 100
}
}