6 Commits

Author SHA1 Message Date
394b5f13f8 updated 2025-05-29 17:57:06 +02:00
2c14192d95 changed dependency versions 2025-01-05 21:06:07 +01:00
2414e01f8b added docker as an example 2025-01-04 20:05:49 +01:00
0901edf8eb added testcase 2025-01-04 18:51:50 +01:00
d29cac2130 fixed logfile creation issue 2025-01-04 18:48:20 +01:00
3f74df5355 reworked api definition 2025-01-04 17:12:40 +01:00
12 changed files with 424 additions and 271 deletions

18
.gitignore vendored
View File

@ -1,3 +1,9 @@
# Nix #
/result
# Direnv #
/.direnv/
# Python #
# Virtual Environment
/.venv/
@ -13,18 +19,6 @@ __pycache__/
/.pytest_cache/
/.mypy_cache/
# Nix #
# Build
/result
# MicroVM
/var.img
/control.socket
# Direnv #
/.direnv/
# Project specific files #
config.json
db.json

28
Dockerfile Normal file
View File

@ -0,0 +1,28 @@
# Nix builder
FROM nixos/nix:latest AS builder
# Copy our source and setup our working dir.
COPY . /tmp/build
WORKDIR /tmp/build
# Build our Nix environment
RUN nix \
--extra-experimental-features "nix-command flakes" \
--option filter-syscalls false \
build
# Copy the Nix store closure into a directory. The Nix store closure is the
# entire set of Nix store values that we need for our build.
RUN mkdir /tmp/nix-store-closure
RUN cp -r $(nix-store -qR result/) /tmp/nix-store-closure
# Final image is based on scratch. We copy a bunch of Nix dependencies
# but they're fully self-contained so we don't need Nix anymore.
FROM scratch
WORKDIR /app
# Copy /nix/store
COPY --from=builder /tmp/nix-store-closure /nix/store
COPY --from=builder /tmp/build/result /app
CMD ["/app/bin/testdata"]

15
docker-compose.yaml Normal file
View File

@ -0,0 +1,15 @@
services:
testdata:
image: result/latest
build:
dockerfile: ./Dockerfile
environment:
TESTDATA_HOST: 0.0.0.0
TESTDATA_PORT: 1234
TESTDATA_CONFIG: ./config.json
volumes:
- ./config.json:/app/config.json
- ./db.json:/app/db.json
- ./log.jsonl:/app/log.jsonl

26
flake.lock generated
View File

@ -27,11 +27,11 @@
"spectrum": "spectrum"
},
"locked": {
"lastModified": 1735074045,
"narHash": "sha256-CeYsC8J2dNiV2FCQOxK1oZ/jNpOF2io7aCEFHmfi95U=",
"lastModified": 1748464257,
"narHash": "sha256-PdnQSE2vPfql9WEjunj2qQnDpuuvk7HH+4djgXJSwFs=",
"owner": "astro",
"repo": "microvm.nix",
"rev": "2ae08de8e8068b00193b9cfbc0acc9dfdda03181",
"rev": "e238645b6f0447a2eb1d538d300d5049d4006f9f",
"type": "github"
},
"original": {
@ -42,17 +42,17 @@
},
"nixpkgs": {
"locked": {
"lastModified": 1733759999,
"narHash": "sha256-463SNPWmz46iLzJKRzO3Q2b0Aurff3U1n0nYItxq7jU=",
"owner": "nixos",
"lastModified": 1748370509,
"narHash": "sha256-QlL8slIgc16W5UaI3w7xHQEP+Qmv/6vSNTpoZrrSlbk=",
"owner": "NixOS",
"repo": "nixpkgs",
"rev": "a73246e2eef4c6ed172979932bc80e1404ba2d56",
"rev": "4faa5f5321320e49a78ae7848582f684d64783e9",
"type": "github"
},
"original": {
"owner": "nixos",
"owner": "NixOS",
"ref": "nixos-unstable",
"repo": "nixpkgs",
"rev": "a73246e2eef4c6ed172979932bc80e1404ba2d56",
"type": "github"
}
},
@ -65,11 +65,11 @@
"spectrum": {
"flake": false,
"locked": {
"lastModified": 1733308308,
"narHash": "sha256-+RcbMAjSxV1wW5UpS9abIG1lFZC8bITPiFIKNnE7RLs=",
"lastModified": 1746869549,
"narHash": "sha256-BKZ/yZO/qeLKh9YqVkKB6wJiDQJAZNN5rk5NsMImsWs=",
"ref": "refs/heads/main",
"rev": "80c9e9830d460c944c8f730065f18bb733bc7ee2",
"revCount": 792,
"rev": "d927e78530892ec8ed389e8fae5f38abee00ad87",
"revCount": 862,
"type": "git",
"url": "https://spectrum-os.org/git/spectrum"
},

216
flake.nix
View File

@ -2,7 +2,7 @@
description = "A webserver to create files for testing purposes";
inputs = {
nixpkgs.url = "github:nixos/nixpkgs?rev=a73246e2eef4c6ed172979932bc80e1404ba2d56";
nixpkgs.url = "github:NixOS/nixpkgs/nixos-unstable";
microvm = {
url = "github:astro/microvm.nix";
@ -10,163 +10,92 @@
};
};
outputs = {
self,
nixpkgs,
...
} @ inputs: let
supportedSystems = ["x86_64-linux" "x86_64-darwin" "aarch64-linux" "aarch64-darwin"];
forAllSystems = nixpkgs.lib.genAttrs supportedSystems;
pkgs = forAllSystems (system: nixpkgs.legacyPackages.${system}.extend overlay);
overlay = final: prev: rec {
python3Packages = prev.python3Packages.overrideScope (pfinal: pprev: {
packageNameToDrv = x: builtins.getAttr (cleanPythonPackageName x) final.python3Packages;
outputs =
{
self,
nixpkgs,
...
}@inputs:
let
supportedSystems = [ "x86_64-linux" ];
forAllSystems = nixpkgs.lib.genAttrs supportedSystems;
pkgs = forAllSystems (system: nixpkgs.legacyPackages.${system});
in
{
# `nix build`
packages = forAllSystems (system: rec {
default = testdata;
testdata = pkgs.${system}.callPackage ./nix/package.nix { src = ./.; };
vm = self.nixosConfigurations.vm.config.microvm.declaredRunner;
});
cleanPythonPackageName = x: let
cleanName = builtins.match "([a-z,A-Z,0-9,_,-]+).*" x;
in
if cleanName != null
then builtins.elemAt cleanName 0
else builtins.warn "Could not determine package name from '${x}'" null;
};
# `nix develop`
devShells = forAllSystems (system: rec {
default = venv;
pyproject = builtins.fromTOML (builtins.readFile ./pyproject.toml);
venv = pkgs.${system}.mkShell {
shellHook = ''
if [ ! -d .venv/ ]; then
echo "Creating Virtual Environment..."
${pkgs.${system}.python3}/bin/python3 -m venv .venv
fi
buildDependencies = forAllSystems (system: builtins.map pkgs.${system}.python3Packages.packageNameToDrv pyproject.build-system.requires);
runtimeDependencies = forAllSystems (system: builtins.map pkgs.${system}.python3Packages.packageNameToDrv pyproject.project.dependencies);
optionalDependencies = forAllSystems (system: builtins.mapAttrs (name: value: builtins.map pkgs.${system}.python3Packages.packageNameToDrv value) pyproject.project.optional-dependencies);
in {
# `nix build`
packages = forAllSystems (system: let
buildTestdata = {skipCheck ? false}:
pkgs.${system}.python3Packages.buildPythonPackage {
pname = pyproject.project.name;
version = pyproject.project.version;
src = ./.;
alias activate='source .venv/bin/activate'
pyproject = true;
build-system = buildDependencies.${system};
dependencies = runtimeDependencies.${system};
optional-dependencies = optionalDependencies.${system};
nativeCheckInputs = optionalDependencies.${system}.dev;
checkPhase = let
dev = builtins.map (x: x.pname) optionalDependencies.${system}.dev;
in ''
${
if builtins.elem "pytest" dev && !skipCheck
then "pytest src tests"
else ""
}
${
if builtins.elem "mypy" dev && !skipCheck
then "mypy src tests"
else ""
}
${
if builtins.elem "pylint" dev && !skipCheck
then "pylint src tests"
else ""
}
echo "Entering Virtual Environment..."
source .venv/bin/activate
'';
};
in rec {
default = testdata;
testdata = buildTestdata {skipCheck = false;};
quick = buildTestdata {skipCheck = true;};
vm = self.nixosConfigurations.vm.config.microvm.declaredRunner;
});
});
# `nix fmt`
formatter = forAllSystems (system: pkgs.${system}.alejandra);
# NixOS Module
nixosModules.default = import ./nix/module.nix inputs;
# `nix develop`
devShells = forAllSystems (system: rec {
default = venv;
venv = pkgs.${system}.mkShell {
shellHook = ''
if [ ! -d .venv/ ]; then
echo "Creating Virtual Environment..."
${pkgs.${system}.python3}/bin/python3 -m venv .venv
fi
alias activate='source .venv/bin/activate'
echo "Entering Virtual Environment..."
source .venv/bin/activate
'';
};
});
# NixOS Module
nixosModules.default = import ./nix/module.nix inputs;
# nixos definition for a microvm to test nixosModules
nixosConfigurations = let
system = "x86_64-linux";
in {
vm = nixpkgs.lib.nixosSystem {
inherit system;
# NixOS definition for a microvm to test nixosModules
nixosConfigurations."vm" = nixpkgs.lib.nixosSystem {
system = "x86_64-linux";
modules = [
inputs.microvm.nixosModules.microvm
({config, ...}: {
system.stateVersion = config.system.nixos.version;
(
{ config, ... }:
{
services.getty.autologinUser = "root";
networking.hostName = "vm";
users.users.root.password = "";
microvm = {
hypervisor = "qemu";
microvm = {
# volumes = [
# {
# mountPoint = "/var";
# image = "var.img";
# size = 256;
# }
# ];
shares = [
{
# use proto = "virtiofs" for MicroVMs that are started by systemd
proto = "9p";
tag = "ro-store";
# a host's /nix/store will be picked up so that no
# squashfs/erofs will be built for it.
source = "/nix/store";
mountPoint = "/nix/.ro-store";
}
];
shares = [
{
# Host's /nix/store will be picked up so that no squashfs/erofs will be built for it.
tag = "ro-store";
source = "/nix/store";
mountPoint = "/nix/.ro-store";
}
];
interfaces = [
{
type = "user";
id = "qemu";
mac = "02:00:00:01:01:01";
}
];
interfaces = [
{
type = "user";
id = "qemu";
# Locally administered have one of 2/6/A/E in the second nibble.
mac = "02:00:00:01:01:01";
}
];
forwardPorts = [
{
host.port = config.services.testdata.port;
guest.port = config.services.testdata.port;
}
];
# "qemu" has 9p built-in!
hypervisor = "qemu";
socket = "control.socket";
};
})
forwardPorts = [
{
host.port = config.services.testdata.port;
guest.port = config.services.testdata.port;
}
];
};
}
)
self.nixosModules.default
rec {
networking.firewall.allowedTCPPorts = [services.testdata.port];
networking.firewall.allowedTCPPorts = [ services.testdata.port ];
services.testdata = {
enable = true;
@ -174,11 +103,15 @@
port = 1234;
settings = {
keys = ["one" "two" "three"];
keys = [
"one"
"two"
"three"
];
max-size = "1GB";
max-data = "100GB";
buffer-size = "12MiB";
database = "/root/testdata_state.json";
database = "/root/testdata-state.json";
database-update-interval = 5.0;
log = "/root/log.jsonl";
};
@ -187,5 +120,4 @@
];
};
};
};
}

View File

@ -1,32 +1,41 @@
inputs: {
inputs:
{
config,
lib,
pkgs,
...
}: let
}:
let
cfg = config.services.testdata;
package = inputs.self.packages.${pkgs.stdenv.hostPlatform.system}.default;
inherit (lib) mkIf mkEnableOption mkOption types;
inherit (lib)
mkIf
mkEnableOption
mkOption
types
;
format = pkgs.formats.json {};
format = pkgs.formats.json { };
configFile = format.generate "config.json" cfg.settings;
in {
in
{
options.services.testdata = {
enable = mkEnableOption "testdata";
settings = mkOption {
type = with types; let
valueType = nullOr (oneOf [
# TODO: restrict type to actual config file structure
bool
int
float
str
path
(attrsOf valueType)
(listOf valueType)
]);
in
type =
with types;
let
valueType = nullOr (oneOf [
bool
int
float
str
path
(attrsOf valueType)
(listOf valueType)
]);
in
valueType;
default = throw "Please specify services.testdata.settings";
};
@ -43,7 +52,7 @@ in {
};
config = mkIf cfg.enable {
environment.systemPackages = [package];
environment.systemPackages = [ package ];
systemd.services.testdata = {
enable = true;
@ -53,7 +62,7 @@ in {
ExecStart = "${package}/bin/testdata --config ${configFile} --listen ${cfg.host} --port ${builtins.toString cfg.port}";
};
wantedBy = ["multi-user.target"];
wantedBy = [ "multi-user.target" ];
};
};
}

46
nix/package.nix Normal file
View File

@ -0,0 +1,46 @@
{
src,
python3Packages,
}:
let
inherit (python3Packages)
setuptools
fastapi
uvicorn
pydantic
pytest
requests
mypy
pylint
;
project = (builtins.fromTOML (builtins.readFile "${src}/pyproject.toml")).project;
pname = project.name;
version = project.version;
in
python3Packages.buildPythonPackage {
inherit pname version src;
pyproject = true;
build-system = [ setuptools ];
dependencies = [
fastapi
uvicorn
pydantic
];
nativeCheckInputs = [
pytest
requests
mypy
pylint
];
checkPhase = ''
pytest tests
mypy src
pylint src
'';
}

View File

@ -1,27 +1,23 @@
[project]
name = "testdata"
version = "1.1.0"
version = "1.2.2"
requires-python = "~=3.12, <4"
dependencies = [
"fastapi~=0.115.3",
"uvicorn~=0.32.0",
"pydantic~=2.9.2",
]
dependencies = ["fastapi~=0.115", "uvicorn~=0.32", "pydantic~=2.9"]
[project.optional-dependencies]
dev = [
"pytest~=8.3.4",
"mypy~=1.13.0",
"pylint~=3.3.3",
"requests~=2.32.3",
"types-requests~=2.32.0"
"pytest~=8.3",
"mypy~=1.13",
"pylint~=3.3",
"requests~=2.32",
"types-requests~=2.32",
]
[project.scripts]
testdata = "testdata.main:main"
[build-system]
requires = ["setuptools~=75.1.1"]
requires = ["setuptools~=78.1"]
build-backend = "setuptools.build_meta"
[tool.setuptools.packages.find]
@ -40,5 +36,5 @@ disable = [
"missing-class-docstring",
"missing-function-docstring",
"too-few-public-methods",
"broad-exception-caught"
"broad-exception-caught",
]

View File

@ -132,7 +132,7 @@ def generate_log_config(log_path: str | None = None) -> dict:
'class': logging.handlers.RotatingFileHandler,
'level': 'DEBUG',
'formatter': 'json',
'filename': 'log.jsonl',
'filename': log_path,
'maxBytes': 1024 * 1024 * 10, # 10 MiB
'backupCount': 3
}} if log_path is not None else {}),

26
src/testdata/main.py vendored
View File

@ -1,14 +1,32 @@
import os
import sys
import argparse
import asyncio
import shutil
from .testdata import Testdata
def parse_args(args: list[str]):
parser = argparse.ArgumentParser()
parser.add_argument('-c', '--config', type=argparse.FileType('r'), default='./config.json', help='Path to config file in JSON format.')
parser.add_argument('-l', '--listen', type=str, default='0.0.0.0', help='IP on which to listen.')
parser.add_argument('-p', '--port', type=int, default='8080', help='Port on which to serve the webserver.')
def formatter(prog):
return argparse.ArgumentDefaultsHelpFormatter(prog, max_help_position=shutil.get_terminal_size().columns)
parser = argparse.ArgumentParser(formatter_class=formatter)
parser.add_argument(
'-c', '--config', type=argparse.FileType('r'),
default=os.environ['TESTDATA_CONFIG'] if 'TESTDATA_CONFIG' in os.environ else './config.json',
help='Path to config file in JSON format.'
)
parser.add_argument(
'-l', '--listen', type=str,
default=os.environ['TESTDATA_HOST'] if 'TESTDATA_HOST' in os.environ else '0.0.0.0',
help='IP on which to listen.'
)
parser.add_argument(
'-p', '--port', type=int,
default=os.environ['TESTDATA_PORT'] if 'TESTDATA_PORT' in os.environ else 8080,
help='Port on which to serve the webserver.'
)
return parser.parse_args(args)

View File

@ -1,10 +1,16 @@
import os
import json
import asyncio
import inspect
import functools
import random
import importlib.metadata
from datetime import datetime
from typing_extensions import Annotated
import uvicorn
from fastapi import FastAPI, Request, status, HTTPException
from typing_extensions import Annotated
from fastapi import FastAPI, Request, Security, status, HTTPException
from fastapi.security import APIKeyHeader, APIKeyQuery
from fastapi.responses import StreamingResponse
from pydantic import BaseModel, ConfigDict, Field, BeforeValidator, ValidationError
@ -49,77 +55,158 @@ class Testdata:
_config: Config
_api: FastAPI
_state: dict[str, int]
_state: dict
_logger: logger.Logger
def __init__(self, config: Config):
self._config = config
self._api = FastAPI(docs_url=None, redoc_url=None)
self._logger = logger.getLogger('testdata')
self._api = self._setup_api()
# Store internal state
self._state = {'data-used': 0}
self._state = {
'version': importlib.metadata.version('testdata'), # For future compatibility
'data-used': {f'{(today := datetime.today()).year}-{today.month:02}': 0} # math each months data usage
}
@self._api.get('/zeros')
async def zeros(api_key: str, size: int | str, request: Request) -> StreamingResponse:
try:
extra = {'api_key': api_key, 'ip': request.client.host if request.client is not None else None, 'size': size}
self._logger.debug('Initiated request.', extra=extra)
def _setup_api(self) -> FastAPI:
api = FastAPI(docs_url='/', redoc_url=None)
if api_key not in config.authorized_keys:
raise HTTPException(
status_code=status.HTTP_401_UNAUTHORIZED,
detail='Invalid API Key.'
)
try:
size = convert_to_bytes(size)
except ValueError as err:
self._logger.warning('Invalid format for size.', extra=extra)
raise HTTPException(
status_code=status.HTTP_400_BAD_REQUEST,
detail='Invalid format for size.'
) from err
# Security
def get_api_key(
api_key_query: str = Security(APIKeyQuery(name="api_key", auto_error=False)),
api_key_header: str = Security(APIKeyHeader(name="x-api-key", auto_error=False))
) -> str:
# https://joshdimella.com/blog/adding-api-key-auth-to-fast-api
if size < 0:
raise MinSizePerRequestError
if config.max_size < size:
raise MaxSizePerRequestError
if api_key_query in self._config.authorized_keys:
return api_key_query
if api_key_header in self._config.authorized_keys:
return api_key_header
# update internal state
if config.max_data < self._state['data-used'] + size:
raise HTTPException(
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
detail='Service not available.'
)
self._state['data-used'] += size
raise HTTPException(
status_code=status.HTTP_401_UNAUTHORIZED,
detail='Invalid or missing API Key'
)
self._logger.debug('Successfully processed request.', extra=extra)
return StreamingResponse(
status_code=status.HTTP_200_OK,
content=generate_data(size, config.buffer_size),
media_type='application/octet-stream',
headers={
'Content-Length': str(size)
}
# A wrapper to set the function signature to accept the api key dependency
def secure(func):
# Get old signature
positional_only, positional_or_keyword, variadic_positional, keyword_only, variadic_keyword = [], [], [], [], []
for value in inspect.signature(func).parameters.values():
if value.kind == inspect.Parameter.POSITIONAL_ONLY:
positional_only.append(value)
elif value.kind == inspect.Parameter.POSITIONAL_OR_KEYWORD:
positional_or_keyword.append(value)
elif value.kind == inspect.Parameter.VAR_POSITIONAL:
variadic_positional.append(value)
elif value.kind == inspect.Parameter.KEYWORD_ONLY:
keyword_only.append(value)
elif value.kind == inspect.Parameter.VAR_KEYWORD:
variadic_keyword.append(value)
# Avoid passing an unrecognized keyword
if inspect.iscoroutinefunction(func):
async def wrapper(*args, **kwargs):
if len(variadic_keyword) == 0:
if 'api_key' in kwargs:
del kwargs['api_key']
return await func(*args, **kwargs)
else:
def wrapper(*args, **kwargs):
if len(variadic_keyword) == 0:
if 'api_key' in kwargs:
del kwargs['api_key']
return func(*args, **kwargs)
# Override signature
wrapper.__signature__ = inspect.signature(func).replace(
parameters=(
*positional_only,
*positional_or_keyword,
*variadic_positional,
*keyword_only,
inspect.Parameter('api_key', inspect.Parameter.POSITIONAL_OR_KEYWORD, default=Security(get_api_key)),
*variadic_keyword
)
)
except MinSizePerRequestError as err:
self._logger.warning('Size if negative.', extra=extra)
raise HTTPException(
status_code=status.HTTP_416_REQUESTED_RANGE_NOT_SATISFIABLE,
detail='Size has to be non-negative.'
) from err
except MaxSizePerRequestError as err:
self._logger.warning('Exceeded max size per request.', extra=extra)
raise HTTPException(
status_code=status.HTTP_416_REQUESTED_RANGE_NOT_SATISFIABLE,
detail=f'Exceeded max size per request of {config.max_size} Bytes.'
) from err
except Exception as err:
self._logger.exception(err)
raise err
return functools.wraps(func)(wrapper)
# Routes
api.get('/zeros')(secure(self._zeros))
return api
async def _zeros(self, size: int | str, request: Request, filename: str = 'zeros.bin') -> StreamingResponse:
try:
extra = {'id': f'{random.randint(0, 2 ** 32 - 1):08X}'}
self._logger.debug(
'Initiated request.',
extra=extra | {
'ip': request.client.host if request.client is not None else None,
'query-params': dict(request.query_params),
'headers': dict(request.headers)
}
)
try:
size = convert_to_bytes(size)
except ValueError as err:
self._logger.warning('Invalid format for size.', extra=extra)
raise HTTPException(
status_code=status.HTTP_400_BAD_REQUEST,
detail='Invalid format for size.'
) from err
if size < 0:
raise MinSizePerRequestError
if self._config.max_size < size:
raise MaxSizePerRequestError
# update internal state
current_date = f'{(today := datetime.today()).year}-{today.month:02}'
if current_date not in self._state['data-used']:
self._state['data-used'][current_date] = 0
if self._config.max_data < self._state['data-used'][current_date] + size:
raise HTTPException(
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
detail='Service not available.'
)
self._state['data-used'][current_date] += size
self._logger.debug('Successfully processed request.', extra=extra)
return StreamingResponse(
status_code=status.HTTP_200_OK,
content=generate_data(size, self._config.buffer_size),
media_type='application/octet-stream',
headers={
'Content-Length': str(size),
'Content-Disposition': f'attachment; filename="{filename}"'
}
)
except MinSizePerRequestError as err:
self._logger.warning('Size if negative.', extra=extra)
raise HTTPException(
status_code=status.HTTP_416_REQUESTED_RANGE_NOT_SATISFIABLE,
detail='Size has to be non-negative.'
) from err
except MaxSizePerRequestError as err:
self._logger.warning('Exceeded max size per request.', extra=extra)
raise HTTPException(
status_code=status.HTTP_416_REQUESTED_RANGE_NOT_SATISFIABLE,
detail=f'Exceeded max size per request of {self._config.max_size} Bytes.'
) from err
except Exception as err:
self._logger.exception(err)
raise err
async def _update_state(self) -> None:
assert self._config.database is not None
async def _update_state(self):
mode = 'r+' if os.path.exists(self._config.database) else 'w+'
with open(self._config.database, mode, encoding='utf-8') as file:
@ -130,7 +217,7 @@ class Testdata:
while True:
file.seek(0)
json.dump(self._state, file)
json.dump(self._state, file, indent=2)
file.truncate()
await asyncio.sleep(self._config.database_update_interval)
@ -142,7 +229,7 @@ class Testdata:
self._logger = logger.getLogger('testdata')
self._logger.info('Server started.')
coroutines = [asyncio.create_task(uvicorn.Server(uvicorn.Config(self._api, host, port)).serve())]
coroutines = [uvicorn.Server(uvicorn.Config(self._api, host, port)).serve()]
if self._config.database is not None:
coroutines.append(self._update_state())

View File

@ -53,6 +53,17 @@ def _server(request) -> Generator[str, None, None]:
break
@pytest.mark.parametrize('_server', [({
'keys': ['one', 'two', 'three'],
'max-size': '100',
'max-data': 1234,
'buffer-size': '12MiB',
})], indirect=['_server'])
def test_invalid_api_key(_server):
response = requests.get(f'{PROTOCOL}://{HOST}:{PORT}/zeros?api_key=four&size=100', timeout=TIMEOUT)
assert response.status_code == 401
@pytest.mark.parametrize('_server', [({
'keys': ['one', 'two', 'three'],
'max-size': '100',
@ -85,13 +96,16 @@ def test_request_size_upper_bound(_server):
@pytest.mark.parametrize('_server', [({
'keys': ['one', 'two', 'three'],
'max-size': '100',
'max-data': 1234,
'max-size': '100KB',
'max-data': '100KB',
'buffer-size': '12MiB',
})], indirect=['_server'])
def test_invalid_api_key(_server):
response = requests.get(f'{PROTOCOL}://{HOST}:{PORT}/zeros?api_key=four&size=100', timeout=TIMEOUT)
assert response.status_code == 401
def test_request_max_data_used(_server):
response = requests.get(f'{PROTOCOL}://{HOST}:{PORT}/zeros?api_key=one&size=100KB', timeout=TIMEOUT)
assert response.status_code == 200
response = requests.get(f'{PROTOCOL}://{HOST}:{PORT}/zeros?api_key=one&size=1', timeout=TIMEOUT)
assert response.status_code == 500
@pytest.mark.parametrize('_server', [({
@ -102,15 +116,29 @@ def test_invalid_api_key(_server):
'database-update-interval': 0.1
})], indirect=['_server'])
def test_check_database_update(_server):
import importlib.metadata
from datetime import datetime
database = _server
with open(database, 'r', encoding='utf-8') as file:
file.seek(0)
assert json.load(file) == {'data-used': 0}
today = datetime.today()
assert json.load(file) == {
'version': importlib.metadata.version('testdata'),
'data-used': {
f'{today.year}-{today.month:02}': 0
}
}
response = requests.get(f'{PROTOCOL}://{HOST}:{PORT}/zeros?api_key=one&size=100', timeout=TIMEOUT)
assert response.status_code == 200
time.sleep(0.1)
file.seek(0)
assert json.load(file) == {'data-used': 100}
assert json.load(file) == {
'version': importlib.metadata.version('testdata'),
'data-used': {
f'{today.year}-{today.month:02}': 100
}
}