1 Commits

Author SHA1 Message Date
d9d8759600 initial rewrite 2025-01-03 23:04:20 +01:00
6 changed files with 54 additions and 52 deletions

View File

@ -1,15 +1,19 @@
# Simple Testdata Generator # Simple Test Data Generator
## Example Config ## Example Config
```json ```json
{ {
"keys": ["TESTKEY1", "TESTKEY2", "TESTKEY3"], "binds": [
"max-size": "1GiB", "127.0.0.1:9250"
"max-data": "1TiB", ],
"buffer-size": "12MiB", "log": "-",
"database": "./db.json", "buffer-size": "4KiB",
"database-update-interval": 5.0, "max-size": "2GB",
"log": "./log.jsonl" "keys": [
"TESTKEY"
],
"max-data": "10GB",
"database": "database.json"
} }
``` ```

8
flake.lock generated
View File

@ -42,17 +42,17 @@
}, },
"nixpkgs": { "nixpkgs": {
"locked": { "locked": {
"lastModified": 1733759999, "lastModified": 1735471104,
"narHash": "sha256-463SNPWmz46iLzJKRzO3Q2b0Aurff3U1n0nYItxq7jU=", "narHash": "sha256-0q9NGQySwDQc7RhAV2ukfnu7Gxa5/ybJ2ANT8DQrQrs=",
"owner": "nixos", "owner": "nixos",
"repo": "nixpkgs", "repo": "nixpkgs",
"rev": "a73246e2eef4c6ed172979932bc80e1404ba2d56", "rev": "88195a94f390381c6afcdaa933c2f6ff93959cb4",
"type": "github" "type": "github"
}, },
"original": { "original": {
"owner": "nixos", "owner": "nixos",
"ref": "nixos-unstable",
"repo": "nixpkgs", "repo": "nixpkgs",
"rev": "a73246e2eef4c6ed172979932bc80e1404ba2d56",
"type": "github" "type": "github"
} }
}, },

View File

@ -2,7 +2,7 @@
description = "A webserver to create files for testing purposes"; description = "A webserver to create files for testing purposes";
inputs = { inputs = {
nixpkgs.url = "github:nixos/nixpkgs?rev=a73246e2eef4c6ed172979932bc80e1404ba2d56"; nixpkgs.url = "github:nixos/nixpkgs/nixos-unstable";
microvm = { microvm = {
url = "github:astro/microvm.nix"; url = "github:astro/microvm.nix";
@ -174,13 +174,11 @@
port = 1234; port = 1234;
settings = { settings = {
keys = ["one" "two" "three"]; "keys" = ["one" "two" "three"];
max-size = "1GB"; "max-size" = "1GB";
max-data = "100GB"; "max-data" = "100GB";
buffer-size = "12MiB"; "buffer-size" = "12MiB";
database = "/root/testdata_state.json"; "database" = "/root/testdata_state.json";
database-update-interval = 5.0;
log = "/root/log.jsonl";
}; };
}; };
} }

View File

@ -1,27 +1,27 @@
[project] [project]
name = "testdata" name = "testdata"
version = "1.1.0" version = "1.1.0"
requires-python = "~=3.12, <4" requires-python = ">=3.11, <4"
dependencies = [ dependencies = [
"fastapi~=0.115.3", "fastapi==0.115.*",
"uvicorn~=0.32.0", "uvicorn==0.32.*",
"pydantic~=2.9.2", "pydantic==2.10.*",
] ]
[project.optional-dependencies] [project.optional-dependencies]
dev = [ dev = [
"pytest~=8.3.4", "pytest==8.3.*",
"mypy~=1.13.0", "mypy==1.13.*",
"pylint~=3.3.3", "pylint==3.3.*",
"requests~=2.32.3", "requests==2.32.*",
"types-requests~=2.32.0" "types-requests==2.32.*"
] ]
[project.scripts] [project.scripts]
testdata = "testdata.main:main" testdata = "testdata.main:main"
[build-system] [build-system]
requires = ["setuptools~=75.1.1"] requires = ["setuptools==75.*"]
build-backend = "setuptools.build_meta" build-backend = "setuptools.build_meta"
[tool.setuptools.packages.find] [tool.setuptools.packages.find]

View File

@ -4,7 +4,7 @@ import asyncio
from typing_extensions import Annotated from typing_extensions import Annotated
import uvicorn import uvicorn
from fastapi import FastAPI, Request, status, HTTPException from fastapi import FastAPI, status, HTTPException
from fastapi.responses import StreamingResponse from fastapi.responses import StreamingResponse
from pydantic import BaseModel, ConfigDict, Field, BeforeValidator, ValidationError from pydantic import BaseModel, ConfigDict, Field, BeforeValidator, ValidationError
@ -43,9 +43,9 @@ class Testdata:
max_size: Annotated[int, BeforeValidator(to_bytes)] = Field(alias='max-size') max_size: Annotated[int, BeforeValidator(to_bytes)] = Field(alias='max-size')
max_data: Annotated[int, BeforeValidator(to_bytes)] = Field(alias='max-data') max_data: Annotated[int, BeforeValidator(to_bytes)] = Field(alias='max-data')
buffer_size: Annotated[int, BeforeValidator(to_bytes)] = Field(alias='buffer-size') buffer_size: Annotated[int, BeforeValidator(to_bytes)] = Field(alias='buffer-size')
database: str | None = None database: str
log: str | None = Field(alias='log', default=None) log_path: str | None = Field(alias='log-path', default=None)
database_update_interval: float = Field(alias='database-update-interval', default=5) update_database_interval: float = Field(alias='update-database-interval', default=5)
_config: Config _config: Config
_api: FastAPI _api: FastAPI
@ -61,10 +61,9 @@ class Testdata:
self._state = {'data-used': 0} self._state = {'data-used': 0}
@self._api.get('/zeros') @self._api.get('/zeros')
async def zeros(api_key: str, size: int | str, request: Request) -> StreamingResponse: async def zeros(api_key: str, size: int | str) -> StreamingResponse:
try: try:
extra = {'api_key': api_key, 'ip': request.client.host if request.client is not None else None, 'size': size} self._logger.debug('', extra={'api_key': api_key, 'size': size})
self._logger.debug('Initiated request.', extra=extra)
if api_key not in config.authorized_keys: if api_key not in config.authorized_keys:
raise HTTPException( raise HTTPException(
@ -74,7 +73,7 @@ class Testdata:
try: try:
size = convert_to_bytes(size) size = convert_to_bytes(size)
except ValueError as err: except ValueError as err:
self._logger.warning('Invalid format for size.', extra=extra) self._logger.warning('Invalid format for size.', extra={'api_key': api_key, 'size': size})
raise HTTPException( raise HTTPException(
status_code=status.HTTP_400_BAD_REQUEST, status_code=status.HTTP_400_BAD_REQUEST,
detail='Invalid format for size.' detail='Invalid format for size.'
@ -93,7 +92,6 @@ class Testdata:
) )
self._state['data-used'] += size self._state['data-used'] += size
self._logger.debug('Successfully processed request.', extra=extra)
return StreamingResponse( return StreamingResponse(
status_code=status.HTTP_200_OK, status_code=status.HTTP_200_OK,
content=generate_data(size, config.buffer_size), content=generate_data(size, config.buffer_size),
@ -104,13 +102,13 @@ class Testdata:
) )
except MinSizePerRequestError as err: except MinSizePerRequestError as err:
self._logger.warning('Size if negative.', extra=extra) self._logger.warning('Size if negative.', extra={'api_key': api_key, 'size': size})
raise HTTPException( raise HTTPException(
status_code=status.HTTP_416_REQUESTED_RANGE_NOT_SATISFIABLE, status_code=status.HTTP_416_REQUESTED_RANGE_NOT_SATISFIABLE,
detail='Size has to be non-negative.' detail='Size has to be non-negative.'
) from err ) from err
except MaxSizePerRequestError as err: except MaxSizePerRequestError as err:
self._logger.warning('Exceeded max size per request.', extra=extra) self._logger.warning('Exceeded max size per request.', extra={'api_key': api_key, 'size': size})
raise HTTPException( raise HTTPException(
status_code=status.HTTP_416_REQUESTED_RANGE_NOT_SATISFIABLE, status_code=status.HTTP_416_REQUESTED_RANGE_NOT_SATISFIABLE,
detail=f'Exceeded max size per request of {config.max_size} Bytes.' detail=f'Exceeded max size per request of {config.max_size} Bytes.'
@ -120,7 +118,10 @@ class Testdata:
raise err raise err
async def _update_state(self): async def _update_state(self):
mode = 'r+' if os.path.exists(self._config.database) else 'w+' if os.path.exists(self._config.database):
mode = 'r+'
else:
mode = 'w+'
with open(self._config.database, mode, encoding='utf-8') as file: with open(self._config.database, mode, encoding='utf-8') as file:
try: try:
@ -132,21 +133,20 @@ class Testdata:
file.seek(0) file.seek(0)
json.dump(self._state, file) json.dump(self._state, file)
file.truncate() file.truncate()
await asyncio.sleep(self._config.database_update_interval) await asyncio.sleep(self._config.update_database_interval)
async def run(self, host: str, port: int) -> None: async def run(self, host: str, port: int) -> None:
try: try:
if self._config.log is not None: if self._config.log_path is not None:
logger.setup_logging(self._config.log) logger.setup_logging(self._config.log_path)
self._logger = logger.getLogger('testdata') # self._logger = logger.getLogger('testdata')
self._logger.info('Server started.') self._logger.info('Server started.')
coroutines = [asyncio.create_task(uvicorn.Server(uvicorn.Config(self._api, host, port)).serve())] await asyncio.gather(
if self._config.database is not None: asyncio.create_task(uvicorn.Server(uvicorn.Config(self._api, host, port)).serve()),
coroutines.append(self._update_state()) self._update_state()
)
await asyncio.gather(*coroutines)
except asyncio.exceptions.CancelledError: except asyncio.exceptions.CancelledError:
self._logger.info('Server stopped.') self._logger.info('Server stopped.')
except Exception as err: except Exception as err:

View File

@ -99,7 +99,7 @@ def test_invalid_api_key(_server):
'max-size': '1KB', 'max-size': '1KB',
'max-data': '1KB', 'max-data': '1KB',
'buffer-size': '12MiB', 'buffer-size': '12MiB',
'database-update-interval': 0.1 'update-database-interval': 0.1
})], indirect=['_server']) })], indirect=['_server'])
def test_check_database_update(_server): def test_check_database_update(_server):
database = _server database = _server