Compare commits

..

1 Commits

Author SHA1 Message Date
d9d8759600
initial rewrite 2025-01-03 23:04:20 +01:00
10 changed files with 122 additions and 296 deletions

View File

@ -1,28 +0,0 @@
# Nix builder
FROM nixos/nix:latest AS builder
# Copy our source and setup our working dir.
COPY . /tmp/build
WORKDIR /tmp/build
# Build our Nix environment
RUN nix \
--extra-experimental-features "nix-command flakes" \
--option filter-syscalls false \
build
# Copy the Nix store closure into a directory. The Nix store closure is the
# entire set of Nix store values that we need for our build.
RUN mkdir /tmp/nix-store-closure
RUN cp -r $(nix-store -qR result/) /tmp/nix-store-closure
# Final image is based on scratch. We copy a bunch of Nix dependencies
# but they're fully self-contained so we don't need Nix anymore.
FROM scratch
WORKDIR /app
# Copy /nix/store
COPY --from=builder /tmp/nix-store-closure /nix/store
COPY --from=builder /tmp/build/result /app
CMD ["/app/bin/testdata"]

View File

@ -1,15 +1,19 @@
# Simple Testdata Generator
# Simple Test Data Generator
## Example Config
```json
{
"keys": ["TESTKEY1", "TESTKEY2", "TESTKEY3"],
"max-size": "1GiB",
"max-data": "1TiB",
"buffer-size": "12MiB",
"database": "./db.json",
"database-update-interval": 5.0,
"log": "./log.jsonl"
"binds": [
"127.0.0.1:9250"
],
"log": "-",
"buffer-size": "4KiB",
"max-size": "2GB",
"keys": [
"TESTKEY"
],
"max-data": "10GB",
"database": "database.json"
}
```

View File

@ -1,15 +0,0 @@
services:
testdata:
image: result/latest
build:
dockerfile: ./Dockerfile
environment:
TESTDATA_HOST: 0.0.0.0
TESTDATA_PORT: 1234
TESTDATA_CONFIG: ./config.json
volumes:
- ./config.json:/app/config.json
- ./db.json:/app/db.json
- ./log.jsonl:/app/log.jsonl

8
flake.lock generated
View File

@ -42,17 +42,17 @@
},
"nixpkgs": {
"locked": {
"lastModified": 1733759999,
"narHash": "sha256-463SNPWmz46iLzJKRzO3Q2b0Aurff3U1n0nYItxq7jU=",
"lastModified": 1735471104,
"narHash": "sha256-0q9NGQySwDQc7RhAV2ukfnu7Gxa5/ybJ2ANT8DQrQrs=",
"owner": "nixos",
"repo": "nixpkgs",
"rev": "a73246e2eef4c6ed172979932bc80e1404ba2d56",
"rev": "88195a94f390381c6afcdaa933c2f6ff93959cb4",
"type": "github"
},
"original": {
"owner": "nixos",
"ref": "nixos-unstable",
"repo": "nixpkgs",
"rev": "a73246e2eef4c6ed172979932bc80e1404ba2d56",
"type": "github"
}
},

View File

@ -2,7 +2,7 @@
description = "A webserver to create files for testing purposes";
inputs = {
nixpkgs.url = "github:nixos/nixpkgs?rev=a73246e2eef4c6ed172979932bc80e1404ba2d56";
nixpkgs.url = "github:nixos/nixpkgs/nixos-unstable";
microvm = {
url = "github:astro/microvm.nix";
@ -61,17 +61,17 @@
in ''
${
if builtins.elem "pytest" dev && !skipCheck
then "pytest tests"
then "pytest src tests"
else ""
}
${
if builtins.elem "mypy" dev && !skipCheck
then "mypy src"
then "mypy src tests"
else ""
}
${
if builtins.elem "pylint" dev && !skipCheck
then "pylint src"
then "pylint src tests"
else ""
}
'';
@ -174,13 +174,11 @@
port = 1234;
settings = {
keys = ["one" "two" "three"];
max-size = "1GB";
max-data = "100GB";
buffer-size = "12MiB";
database = "/root/testdata_state.json";
database-update-interval = 5.0;
log = "/root/log.jsonl";
"keys" = ["one" "two" "three"];
"max-size" = "1GB";
"max-data" = "100GB";
"buffer-size" = "12MiB";
"database" = "/root/testdata_state.json";
};
};
}

View File

@ -1,27 +1,27 @@
[project]
name = "testdata"
version = "1.2.1"
requires-python = "~=3.12, <4"
version = "1.1.0"
requires-python = ">=3.11, <4"
dependencies = [
"fastapi~=0.115",
"uvicorn~=0.32",
"pydantic~=2.9",
"fastapi==0.115.*",
"uvicorn==0.32.*",
"pydantic==2.10.*",
]
[project.optional-dependencies]
dev = [
"pytest~=8.3",
"mypy~=1.13",
"pylint~=3.3",
"requests~=2.32",
"types-requests~=2.32"
"pytest==8.3.*",
"mypy==1.13.*",
"pylint==3.3.*",
"requests==2.32.*",
"types-requests==2.32.*"
]
[project.scripts]
testdata = "testdata.main:main"
[build-system]
requires = ["setuptools~=75.1"]
requires = ["setuptools==75.*"]
build-backend = "setuptools.build_meta"
[tool.setuptools.packages.find]

View File

@ -132,7 +132,7 @@ def generate_log_config(log_path: str | None = None) -> dict:
'class': logging.handlers.RotatingFileHandler,
'level': 'DEBUG',
'formatter': 'json',
'filename': log_path,
'filename': 'log.jsonl',
'maxBytes': 1024 * 1024 * 10, # 10 MiB
'backupCount': 3
}} if log_path is not None else {}),

26
src/testdata/main.py vendored
View File

@ -1,32 +1,14 @@
import os
import sys
import argparse
import asyncio
import shutil
from .testdata import Testdata
def parse_args(args: list[str]):
def formatter(prog):
return argparse.ArgumentDefaultsHelpFormatter(prog, max_help_position=shutil.get_terminal_size().columns)
parser = argparse.ArgumentParser(formatter_class=formatter)
parser.add_argument(
'-c', '--config', type=argparse.FileType('r'),
default=os.environ['TESTDATA_CONFIG'] if 'TESTDATA_CONFIG' in os.environ else './config.json',
help='Path to config file in JSON format.'
)
parser.add_argument(
'-l', '--listen', type=str,
default=os.environ['TESTDATA_HOST'] if 'TESTDATA_HOST' in os.environ else '0.0.0.0',
help='IP on which to listen.'
)
parser.add_argument(
'-p', '--port', type=int,
default=os.environ['TESTDATA_PORT'] if 'TESTDATA_PORT' in os.environ else 8080,
help='Port on which to serve the webserver.'
)
parser = argparse.ArgumentParser()
parser.add_argument('-c', '--config', type=argparse.FileType('r'), default='./config.json', help='Path to config file in JSON format.')
parser.add_argument('-l', '--listen', type=str, default='0.0.0.0', help='IP on which to listen.')
parser.add_argument('-p', '--port', type=int, default='8080', help='Port on which to serve the webserver.')
return parser.parse_args(args)

View File

@ -1,16 +1,10 @@
import os
import json
import asyncio
import inspect
import functools
import random
import importlib.metadata
from datetime import datetime
import uvicorn
from typing_extensions import Annotated
from fastapi import FastAPI, Request, Security, status, HTTPException
from fastapi.security import APIKeyHeader, APIKeyQuery
import uvicorn
from fastapi import FastAPI, status, HTTPException
from fastapi.responses import StreamingResponse
from pydantic import BaseModel, ConfigDict, Field, BeforeValidator, ValidationError
@ -49,165 +43,85 @@ class Testdata:
max_size: Annotated[int, BeforeValidator(to_bytes)] = Field(alias='max-size')
max_data: Annotated[int, BeforeValidator(to_bytes)] = Field(alias='max-data')
buffer_size: Annotated[int, BeforeValidator(to_bytes)] = Field(alias='buffer-size')
database: str | None = None
log: str | None = Field(alias='log', default=None)
database_update_interval: float = Field(alias='database-update-interval', default=5)
database: str
log_path: str | None = Field(alias='log-path', default=None)
update_database_interval: float = Field(alias='update-database-interval', default=5)
_config: Config
_api: FastAPI
_state: dict
_state: dict[str, int]
_logger: logger.Logger
def __init__(self, config: Config):
self._config = config
self._api = FastAPI(docs_url=None, redoc_url=None)
self._logger = logger.getLogger('testdata')
self._api = self._setup_api()
# Store internal state
self._state = {
'version': importlib.metadata.version('testdata'), # For future compatibility
'data-used': {f'{(today := datetime.today()).year}-{today.month:02}': 0} # math each months data usage
}
def _setup_api(self) -> FastAPI:
api = FastAPI(docs_url='/', redoc_url=None)
# Security
def get_api_key(
api_key_query: str = Security(APIKeyQuery(name="api_key", auto_error=False)),
api_key_header: str = Security(APIKeyHeader(name="x-api-key", auto_error=False))
) -> str:
# https://joshdimella.com/blog/adding-api-key-auth-to-fast-api
if api_key_query in self._config.authorized_keys:
return api_key_query
if api_key_header in self._config.authorized_keys:
return api_key_header
raise HTTPException(
status_code=status.HTTP_401_UNAUTHORIZED,
detail='Invalid or missing API Key'
)
# A wrapper to set the function signature to accept the api key dependency
def secure(func):
# Get old signature
positional_only, positional_or_keyword, variadic_positional, keyword_only, variadic_keyword = [], [], [], [], []
for value in inspect.signature(func).parameters.values():
if value.kind == inspect.Parameter.POSITIONAL_ONLY:
positional_only.append(value)
elif value.kind == inspect.Parameter.POSITIONAL_OR_KEYWORD:
positional_or_keyword.append(value)
elif value.kind == inspect.Parameter.VAR_POSITIONAL:
variadic_positional.append(value)
elif value.kind == inspect.Parameter.KEYWORD_ONLY:
keyword_only.append(value)
elif value.kind == inspect.Parameter.VAR_KEYWORD:
variadic_keyword.append(value)
# Avoid passing an unrecognized keyword
if inspect.iscoroutinefunction(func):
async def wrapper(*args, **kwargs):
if len(variadic_keyword) == 0:
if 'api_key' in kwargs:
del kwargs['api_key']
return await func(*args, **kwargs)
else:
def wrapper(*args, **kwargs):
if len(variadic_keyword) == 0:
if 'api_key' in kwargs:
del kwargs['api_key']
return func(*args, **kwargs)
# Override signature
wrapper.__signature__ = inspect.signature(func).replace(
parameters=(
*positional_only,
*positional_or_keyword,
*variadic_positional,
*keyword_only,
inspect.Parameter('api_key', inspect.Parameter.POSITIONAL_OR_KEYWORD, default=Security(get_api_key)),
*variadic_keyword
)
)
return functools.wraps(func)(wrapper)
# Routes
api.get('/zeros')(secure(self._zeros))
return api
async def _zeros(self, size: int | str, request: Request, filename: str = 'zeros.bin') -> StreamingResponse:
try:
extra = {'id': f'{random.randint(0, 2 ** 32 - 1):08X}'}
self._logger.debug(
'Initiated request.',
extra=extra | {
'ip': request.client.host if request.client is not None else None,
'query-params': dict(request.query_params),
'headers': dict(request.headers)
}
)
self._state = {'data-used': 0}
@self._api.get('/zeros')
async def zeros(api_key: str, size: int | str) -> StreamingResponse:
try:
size = convert_to_bytes(size)
except ValueError as err:
self._logger.warning('Invalid format for size.', extra=extra)
raise HTTPException(
status_code=status.HTTP_400_BAD_REQUEST,
detail='Invalid format for size.'
) from err
self._logger.debug('', extra={'api_key': api_key, 'size': size})
if size < 0:
raise MinSizePerRequestError
if self._config.max_size < size:
raise MaxSizePerRequestError
if api_key not in config.authorized_keys:
raise HTTPException(
status_code=status.HTTP_401_UNAUTHORIZED,
detail='Invalid API Key.'
)
try:
size = convert_to_bytes(size)
except ValueError as err:
self._logger.warning('Invalid format for size.', extra={'api_key': api_key, 'size': size})
raise HTTPException(
status_code=status.HTTP_400_BAD_REQUEST,
detail='Invalid format for size.'
) from err
# update internal state
current_date = f'{(today := datetime.today()).year}-{today.month:02}'
if current_date not in self._state['data-used']:
self._state['data-used'][current_date] = 0
if self._config.max_data < self._state['data-used'][current_date] + size:
raise HTTPException(
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
detail='Service not available.'
if size < 0:
raise MinSizePerRequestError
if config.max_size < size:
raise MaxSizePerRequestError
# update internal state
if config.max_data < self._state['data-used'] + size:
raise HTTPException(
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
detail='Service not available.'
)
self._state['data-used'] += size
return StreamingResponse(
status_code=status.HTTP_200_OK,
content=generate_data(size, config.buffer_size),
media_type='application/octet-stream',
headers={
'Content-Length': str(size)
}
)
self._state['data-used'][current_date] += size
self._logger.debug('Successfully processed request.', extra=extra)
return StreamingResponse(
status_code=status.HTTP_200_OK,
content=generate_data(size, self._config.buffer_size),
media_type='application/octet-stream',
headers={
'Content-Length': str(size),
'Content-Disposition': f'attachment; filename="{filename}"'
}
)
except MinSizePerRequestError as err:
self._logger.warning('Size if negative.', extra={'api_key': api_key, 'size': size})
raise HTTPException(
status_code=status.HTTP_416_REQUESTED_RANGE_NOT_SATISFIABLE,
detail='Size has to be non-negative.'
) from err
except MaxSizePerRequestError as err:
self._logger.warning('Exceeded max size per request.', extra={'api_key': api_key, 'size': size})
raise HTTPException(
status_code=status.HTTP_416_REQUESTED_RANGE_NOT_SATISFIABLE,
detail=f'Exceeded max size per request of {config.max_size} Bytes.'
) from err
except Exception as err:
self._logger.exception(err)
raise err
except MinSizePerRequestError as err:
self._logger.warning('Size if negative.', extra=extra)
raise HTTPException(
status_code=status.HTTP_416_REQUESTED_RANGE_NOT_SATISFIABLE,
detail='Size has to be non-negative.'
) from err
except MaxSizePerRequestError as err:
self._logger.warning('Exceeded max size per request.', extra=extra)
raise HTTPException(
status_code=status.HTTP_416_REQUESTED_RANGE_NOT_SATISFIABLE,
detail=f'Exceeded max size per request of {self._config.max_size} Bytes.'
) from err
except Exception as err:
self._logger.exception(err)
raise err
async def _update_state(self) -> None:
assert self._config.database is not None
mode = 'r+' if os.path.exists(self._config.database) else 'w+'
async def _update_state(self):
if os.path.exists(self._config.database):
mode = 'r+'
else:
mode = 'w+'
with open(self._config.database, mode, encoding='utf-8') as file:
try:
@ -217,23 +131,22 @@ class Testdata:
while True:
file.seek(0)
json.dump(self._state, file, indent=2)
json.dump(self._state, file)
file.truncate()
await asyncio.sleep(self._config.database_update_interval)
await asyncio.sleep(self._config.update_database_interval)
async def run(self, host: str, port: int) -> None:
try:
if self._config.log is not None:
logger.setup_logging(self._config.log)
if self._config.log_path is not None:
logger.setup_logging(self._config.log_path)
self._logger = logger.getLogger('testdata')
# self._logger = logger.getLogger('testdata')
self._logger.info('Server started.')
coroutines = [uvicorn.Server(uvicorn.Config(self._api, host, port)).serve()]
if self._config.database is not None:
coroutines.append(self._update_state())
await asyncio.gather(*coroutines)
await asyncio.gather(
asyncio.create_task(uvicorn.Server(uvicorn.Config(self._api, host, port)).serve()),
self._update_state()
)
except asyncio.exceptions.CancelledError:
self._logger.info('Server stopped.')
except Exception as err:

View File

@ -53,17 +53,6 @@ def _server(request) -> Generator[str, None, None]:
break
@pytest.mark.parametrize('_server', [({
'keys': ['one', 'two', 'three'],
'max-size': '100',
'max-data': 1234,
'buffer-size': '12MiB',
})], indirect=['_server'])
def test_invalid_api_key(_server):
response = requests.get(f'{PROTOCOL}://{HOST}:{PORT}/zeros?api_key=four&size=100', timeout=TIMEOUT)
assert response.status_code == 401
@pytest.mark.parametrize('_server', [({
'keys': ['one', 'two', 'three'],
'max-size': '100',
@ -96,16 +85,13 @@ def test_request_size_upper_bound(_server):
@pytest.mark.parametrize('_server', [({
'keys': ['one', 'two', 'three'],
'max-size': '100KB',
'max-data': '100KB',
'max-size': '100',
'max-data': 1234,
'buffer-size': '12MiB',
})], indirect=['_server'])
def test_request_max_data_used(_server):
response = requests.get(f'{PROTOCOL}://{HOST}:{PORT}/zeros?api_key=one&size=100KB', timeout=TIMEOUT)
assert response.status_code == 200
response = requests.get(f'{PROTOCOL}://{HOST}:{PORT}/zeros?api_key=one&size=1', timeout=TIMEOUT)
assert response.status_code == 500
def test_invalid_api_key(_server):
response = requests.get(f'{PROTOCOL}://{HOST}:{PORT}/zeros?api_key=four&size=100', timeout=TIMEOUT)
assert response.status_code == 401
@pytest.mark.parametrize('_server', [({
@ -113,32 +99,18 @@ def test_request_max_data_used(_server):
'max-size': '1KB',
'max-data': '1KB',
'buffer-size': '12MiB',
'database-update-interval': 0.1
'update-database-interval': 0.1
})], indirect=['_server'])
def test_check_database_update(_server):
import importlib.metadata
from datetime import datetime
database = _server
with open(database, 'r', encoding='utf-8') as file:
file.seek(0)
today = datetime.today()
assert json.load(file) == {
'version': importlib.metadata.version('testdata'),
'data-used': {
f'{today.year}-{today.month:02}': 0
}
}
assert json.load(file) == {'data-used': 0}
response = requests.get(f'{PROTOCOL}://{HOST}:{PORT}/zeros?api_key=one&size=100', timeout=TIMEOUT)
assert response.status_code == 200
time.sleep(0.1)
file.seek(0)
assert json.load(file) == {
'version': importlib.metadata.version('testdata'),
'data-used': {
f'{today.year}-{today.month:02}': 100
}
}
assert json.load(file) == {'data-used': 100}