3 Commits

Author SHA1 Message Date
5050996547 updated depenency versions 2025-01-04 01:05:40 +01:00
590d92d191 formatting 2025-01-04 00:47:21 +01:00
626b736626 Rewrite 2025-01-04 00:27:28 +01:00
6 changed files with 52 additions and 54 deletions

View File

@ -1,19 +1,15 @@
# Simple Test Data Generator # Simple Testdata Generator
## Example Config ## Example Config
```json ```json
{ {
"binds": [ "keys": ["TESTKEY1", "TESTKEY2", "TESTKEY3"],
"127.0.0.1:9250" "max-size": "1GiB",
], "max-data": "1TiB",
"log": "-", "buffer-size": "12MiB",
"buffer-size": "4KiB", "database": "./db.json",
"max-size": "2GB", "database-update-interval": 5.0,
"keys": [ "log": "./log.jsonl"
"TESTKEY"
],
"max-data": "10GB",
"database": "database.json"
} }
``` ```

8
flake.lock generated
View File

@ -42,17 +42,17 @@
}, },
"nixpkgs": { "nixpkgs": {
"locked": { "locked": {
"lastModified": 1735471104, "lastModified": 1733759999,
"narHash": "sha256-0q9NGQySwDQc7RhAV2ukfnu7Gxa5/ybJ2ANT8DQrQrs=", "narHash": "sha256-463SNPWmz46iLzJKRzO3Q2b0Aurff3U1n0nYItxq7jU=",
"owner": "nixos", "owner": "nixos",
"repo": "nixpkgs", "repo": "nixpkgs",
"rev": "88195a94f390381c6afcdaa933c2f6ff93959cb4", "rev": "a73246e2eef4c6ed172979932bc80e1404ba2d56",
"type": "github" "type": "github"
}, },
"original": { "original": {
"owner": "nixos", "owner": "nixos",
"ref": "nixos-unstable",
"repo": "nixpkgs", "repo": "nixpkgs",
"rev": "a73246e2eef4c6ed172979932bc80e1404ba2d56",
"type": "github" "type": "github"
} }
}, },

View File

@ -2,7 +2,7 @@
description = "A webserver to create files for testing purposes"; description = "A webserver to create files for testing purposes";
inputs = { inputs = {
nixpkgs.url = "github:nixos/nixpkgs/nixos-unstable"; nixpkgs.url = "github:nixos/nixpkgs?rev=a73246e2eef4c6ed172979932bc80e1404ba2d56";
microvm = { microvm = {
url = "github:astro/microvm.nix"; url = "github:astro/microvm.nix";
@ -174,11 +174,13 @@
port = 1234; port = 1234;
settings = { settings = {
"keys" = ["one" "two" "three"]; keys = ["one" "two" "three"];
"max-size" = "1GB"; max-size = "1GB";
"max-data" = "100GB"; max-data = "100GB";
"buffer-size" = "12MiB"; buffer-size = "12MiB";
"database" = "/root/testdata_state.json"; database = "/root/testdata_state.json";
database-update-interval = 5.0;
log = "/root/log.jsonl";
}; };
}; };
} }

View File

@ -1,27 +1,27 @@
[project] [project]
name = "testdata" name = "testdata"
version = "1.1.0" version = "1.1.0"
requires-python = ">=3.11, <4" requires-python = "~=3.12, <4"
dependencies = [ dependencies = [
"fastapi==0.115.*", "fastapi~=0.115.3",
"uvicorn==0.32.*", "uvicorn~=0.32.0",
"pydantic==2.10.*", "pydantic~=2.9.2",
] ]
[project.optional-dependencies] [project.optional-dependencies]
dev = [ dev = [
"pytest==8.3.*", "pytest~=8.3.4",
"mypy==1.13.*", "mypy~=1.13.0",
"pylint==3.3.*", "pylint~=3.3.3",
"requests==2.32.*", "requests~=2.32.3",
"types-requests==2.32.*" "types-requests~=2.32.0"
] ]
[project.scripts] [project.scripts]
testdata = "testdata.main:main" testdata = "testdata.main:main"
[build-system] [build-system]
requires = ["setuptools==75.*"] requires = ["setuptools~=75.1.1"]
build-backend = "setuptools.build_meta" build-backend = "setuptools.build_meta"
[tool.setuptools.packages.find] [tool.setuptools.packages.find]

View File

@ -4,7 +4,7 @@ import asyncio
from typing_extensions import Annotated from typing_extensions import Annotated
import uvicorn import uvicorn
from fastapi import FastAPI, status, HTTPException from fastapi import FastAPI, Request, status, HTTPException
from fastapi.responses import StreamingResponse from fastapi.responses import StreamingResponse
from pydantic import BaseModel, ConfigDict, Field, BeforeValidator, ValidationError from pydantic import BaseModel, ConfigDict, Field, BeforeValidator, ValidationError
@ -43,9 +43,9 @@ class Testdata:
max_size: Annotated[int, BeforeValidator(to_bytes)] = Field(alias='max-size') max_size: Annotated[int, BeforeValidator(to_bytes)] = Field(alias='max-size')
max_data: Annotated[int, BeforeValidator(to_bytes)] = Field(alias='max-data') max_data: Annotated[int, BeforeValidator(to_bytes)] = Field(alias='max-data')
buffer_size: Annotated[int, BeforeValidator(to_bytes)] = Field(alias='buffer-size') buffer_size: Annotated[int, BeforeValidator(to_bytes)] = Field(alias='buffer-size')
database: str database: str | None = None
log_path: str | None = Field(alias='log-path', default=None) log: str | None = Field(alias='log', default=None)
update_database_interval: float = Field(alias='update-database-interval', default=5) database_update_interval: float = Field(alias='database-update-interval', default=5)
_config: Config _config: Config
_api: FastAPI _api: FastAPI
@ -61,9 +61,10 @@ class Testdata:
self._state = {'data-used': 0} self._state = {'data-used': 0}
@self._api.get('/zeros') @self._api.get('/zeros')
async def zeros(api_key: str, size: int | str) -> StreamingResponse: async def zeros(api_key: str, size: int | str, request: Request) -> StreamingResponse:
try: try:
self._logger.debug('', extra={'api_key': api_key, 'size': size}) extra = {'api_key': api_key, 'ip': request.client.host if request.client is not None else None, 'size': size}
self._logger.debug('Initiated request.', extra=extra)
if api_key not in config.authorized_keys: if api_key not in config.authorized_keys:
raise HTTPException( raise HTTPException(
@ -73,7 +74,7 @@ class Testdata:
try: try:
size = convert_to_bytes(size) size = convert_to_bytes(size)
except ValueError as err: except ValueError as err:
self._logger.warning('Invalid format for size.', extra={'api_key': api_key, 'size': size}) self._logger.warning('Invalid format for size.', extra=extra)
raise HTTPException( raise HTTPException(
status_code=status.HTTP_400_BAD_REQUEST, status_code=status.HTTP_400_BAD_REQUEST,
detail='Invalid format for size.' detail='Invalid format for size.'
@ -92,6 +93,7 @@ class Testdata:
) )
self._state['data-used'] += size self._state['data-used'] += size
self._logger.debug('Successfully processed request.', extra=extra)
return StreamingResponse( return StreamingResponse(
status_code=status.HTTP_200_OK, status_code=status.HTTP_200_OK,
content=generate_data(size, config.buffer_size), content=generate_data(size, config.buffer_size),
@ -102,13 +104,13 @@ class Testdata:
) )
except MinSizePerRequestError as err: except MinSizePerRequestError as err:
self._logger.warning('Size if negative.', extra={'api_key': api_key, 'size': size}) self._logger.warning('Size if negative.', extra=extra)
raise HTTPException( raise HTTPException(
status_code=status.HTTP_416_REQUESTED_RANGE_NOT_SATISFIABLE, status_code=status.HTTP_416_REQUESTED_RANGE_NOT_SATISFIABLE,
detail='Size has to be non-negative.' detail='Size has to be non-negative.'
) from err ) from err
except MaxSizePerRequestError as err: except MaxSizePerRequestError as err:
self._logger.warning('Exceeded max size per request.', extra={'api_key': api_key, 'size': size}) self._logger.warning('Exceeded max size per request.', extra=extra)
raise HTTPException( raise HTTPException(
status_code=status.HTTP_416_REQUESTED_RANGE_NOT_SATISFIABLE, status_code=status.HTTP_416_REQUESTED_RANGE_NOT_SATISFIABLE,
detail=f'Exceeded max size per request of {config.max_size} Bytes.' detail=f'Exceeded max size per request of {config.max_size} Bytes.'
@ -118,10 +120,7 @@ class Testdata:
raise err raise err
async def _update_state(self): async def _update_state(self):
if os.path.exists(self._config.database): mode = 'r+' if os.path.exists(self._config.database) else 'w+'
mode = 'r+'
else:
mode = 'w+'
with open(self._config.database, mode, encoding='utf-8') as file: with open(self._config.database, mode, encoding='utf-8') as file:
try: try:
@ -133,20 +132,21 @@ class Testdata:
file.seek(0) file.seek(0)
json.dump(self._state, file) json.dump(self._state, file)
file.truncate() file.truncate()
await asyncio.sleep(self._config.update_database_interval) await asyncio.sleep(self._config.database_update_interval)
async def run(self, host: str, port: int) -> None: async def run(self, host: str, port: int) -> None:
try: try:
if self._config.log_path is not None: if self._config.log is not None:
logger.setup_logging(self._config.log_path) logger.setup_logging(self._config.log)
# self._logger = logger.getLogger('testdata') self._logger = logger.getLogger('testdata')
self._logger.info('Server started.') self._logger.info('Server started.')
await asyncio.gather( coroutines = [asyncio.create_task(uvicorn.Server(uvicorn.Config(self._api, host, port)).serve())]
asyncio.create_task(uvicorn.Server(uvicorn.Config(self._api, host, port)).serve()), if self._config.database is not None:
self._update_state() coroutines.append(self._update_state())
)
await asyncio.gather(*coroutines)
except asyncio.exceptions.CancelledError: except asyncio.exceptions.CancelledError:
self._logger.info('Server stopped.') self._logger.info('Server stopped.')
except Exception as err: except Exception as err:

View File

@ -99,7 +99,7 @@ def test_invalid_api_key(_server):
'max-size': '1KB', 'max-size': '1KB',
'max-data': '1KB', 'max-data': '1KB',
'buffer-size': '12MiB', 'buffer-size': '12MiB',
'update-database-interval': 0.1 'database-update-interval': 0.1
})], indirect=['_server']) })], indirect=['_server'])
def test_check_database_update(_server): def test_check_database_update(_server):
database = _server database = _server