4 Commits
0.1.0 ... 0.2.1

Author SHA1 Message Date
29bdad09bf reduced Docker image size
All checks were successful
CI / Build Pip package (push) Successful in 16s
CI / Build Docker image (push) Successful in 3m6s
2024-10-25 07:25:13 +08:00
16dbd3a82a addded nginx confioguration file
All checks were successful
CI / Build Pip package (push) Successful in 16s
CI / Build Docker image (push) Successful in 3m33s
2024-10-24 23:55:21 +08:00
33a3858b02 added cli 2024-10-24 23:26:06 +08:00
e2e4083321 switch from aiohttp to httpx 2024-10-24 23:18:23 +08:00
13 changed files with 387 additions and 330 deletions

View File

@@ -9,10 +9,10 @@ RUN adduser -D luser
USER luser
WORKDIR /home/luser
COPY --chown=luser:users ./requirements-dev.txt ./requirements-dev.txt
COPY --chown=luser:users ./requirements-dev.txt ./requirements-run.txt
COPY --chown=luser:users ./requirements-run.txt ./requirements-run.txt
WORKDIR /home/luser/
RUN python -m venv .venv
RUN --mount=type=cache,target=/home/luser/.cache/pip,uid=1000,gid=1000 .venv/bin/pip wheel -w /home/luser/wheel -r requirements-dev.txt pygraphviz
RUN --mount=type=cache,target=/home/luser/.cache/pip,uid=1000,gid=1000 .venv/bin/pip wheel -w /home/luser/wheel pygraphviz
RUN --mount=type=cache,target=/home/luser/.cache/pip,uid=1000,gid=1000 .venv/bin/pip install -r requirements-dev.txt /home/luser/wheel/*.whl
COPY --chown=luser:users . /home/luser/bugis
WORKDIR /home/luser/bugis
@@ -21,12 +21,12 @@ RUN --mount=type=cache,target=/home/luser/.cache/pip,uid=1000,gid=1000 /home/lus
FROM base AS release
RUN mkdir /srv/http
RUN adduser -D -h /var/bugis -u 1000 bugis
RUN adduser -D -h /var/lib/bugis -u 1000 bugis
USER bugis
WORKDIR /var/bugis
WORKDIR /var/lib/bugis
COPY --chown=bugis:users conf/pip.conf ./.pip/pip.conf
RUN python -m venv .venv
RUN --mount=type=cache,target=/var/bugis/.cache/pip,uid=1000,gid=1000 --mount=type=bind,ro,from=build,source=/home/luser/bugis/requirements-run.txt,target=/requirements-run.txt --mount=type=bind,ro,from=build,source=/home/luser/wheel,target=/wheel .venv/bin/pip install -r /requirements-run.txt /wheel/*.whl
RUN --mount=type=cache,target=/var/bugis/.cache/pip,uid=1000,gid=1000 --mount=type=bind,ro,from=build,source=/home/luser/requirements-run.txt,target=/requirements-run.txt --mount=type=bind,ro,from=build,source=/home/luser/wheel,target=/wheel .venv/bin/pip install -r /requirements-run.txt /wheel/*.whl
RUN --mount=type=cache,target=/var/bugis/.cache/pip,uid=1000,gid=1000 --mount=type=bind,ro,from=build,source=/home/luser/bugis/dist,target=/dist .venv/bin/pip install /dist/*.whl
VOLUME /srv/http
WORKDIR /srv/http
@@ -37,6 +37,6 @@ ENV GRANIAN_INTERFACE=asgi
ENV GRANIAN_LOOP=asyncio
ENV GRANIAN_LOG_ENABLED=false
ENTRYPOINT ["/var/bugis/.venv/bin/python", "-m", "granian", "bugis.asgi:application"]
ENTRYPOINT ["/var/lib/bugis/.venv/bin/python", "-m", "granian", "bugis.asgi:application"]
EXPOSE 8000/tcp

14
conf/nginx-bugis.conf Normal file
View File

@@ -0,0 +1,14 @@
server {
listen 8080;
http2 on;
server_name localhost;
location / {
proxy_pass http://granian:8000;
proxy_http_version 1.1;
proxy_set_header Host $host;
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
proxy_read_timeout 60s;
}
}

View File

@@ -29,7 +29,7 @@ dependencies = [
"PyYAML",
"pygraphviz",
"aiofiles",
"aiohttp[speedups]"
"httpx[http2]"
]
[project.optional-dependencies]
@@ -60,3 +60,6 @@ strict = true
[tool.setuptools_scm]
version_file = "src/bugis/_version.py"
[project.scripts]
bugis = "bugis.cli:main"

View File

@@ -1,39 +1,27 @@
#
# This file is autogenerated by pip-compile with Python 3.10
# This file is autogenerated by pip-compile with Python 3.12
# by the following command:
#
# pip-compile --extra-index-url=https://gitea.woggioni.net/api/packages/woggioni/pypi/simple --extra=dev --output-file=requirements-dev.txt --strip-extras pyproject.toml
# pip-compile --extra=dev --output-file=requirements-dev.txt pyproject.toml
#
--extra-index-url https://gitea.woggioni.net/api/packages/woggioni/pypi/simple
--index-url https://gitea.woggioni.net/api/packages/woggioni/pypi/simple
--extra-index-url https://pypi.org/simple
aiodns==3.2.0
# via aiohttp
aiofiles==24.1.0
# via bugis (pyproject.toml)
aiohappyeyeballs==2.4.3
# via aiohttp
aiohttp==3.10.10
# via bugis (pyproject.toml)
aiosignal==1.3.1
# via aiohttp
anyio==4.6.2.post1
# via httpx
asttokens==2.4.1
# via stack-data
async-timeout==4.0.3
# via aiohttp
attrs==24.2.0
# via aiohttp
backports-tarfile==1.2.0
# via jaraco-context
brotli==1.1.0
# via aiohttp
build==1.2.2.post1
# via bugis (pyproject.toml)
certifi==2024.8.30
# via requests
cffi==1.17.1
# via
# cryptography
# pycares
# httpcore
# httpx
# requests
cffi==1.17.1
# via cryptography
charset-normalizer==3.4.0
# via requests
click==8.1.7
@@ -46,24 +34,29 @@ decorator==5.1.1
# ipython
docutils==0.21.2
# via readme-renderer
exceptiongroup==1.2.2
# via ipython
executing==2.1.0
# via stack-data
frozenlist==1.4.1
# via
# aiohttp
# aiosignal
granian==1.6.1
# via bugis (pyproject.toml)
h11==0.14.0
# via httpcore
h2==4.1.0
# via httpx
hpack==4.0.0
# via h2
httpcore==1.0.6
# via httpx
httpx[http2]==0.27.2
# via bugis (pyproject.toml)
hyperframe==6.0.1
# via h2
idna==3.10
# via
# anyio
# httpx
# requests
# yarl
importlib-metadata==8.5.0
# via
# keyring
# twine
# via twine
ipdb==0.13.13
# via bugis (pyproject.toml)
ipython==8.28.0
@@ -94,11 +87,7 @@ more-itertools==10.5.0
# via
# jaraco-classes
# jaraco-functools
multidict==6.1.0
# via
# aiohttp
# yarl
mypy==1.12.1
mypy==1.13.0
# via bugis (pyproject.toml)
mypy-extensions==1.0.0
# via mypy
@@ -114,16 +103,12 @@ pkginfo==1.10.0
# via twine
prompt-toolkit==3.0.48
# via ipython
propcache==0.2.0
# via yarl
ptyprocess==0.7.0
# via pexpect
pure-eval==0.2.3
# via stack-data
pwo==0.0.3
pwo==0.0.4
# via bugis (pyproject.toml)
pycares==4.4.0
# via aiodns
pycparser==2.22
# via cffi
pygments==2.18.0
@@ -148,32 +133,28 @@ requests-toolbelt==1.0.0
# via twine
rfc3986==2.0.0
# via twine
rich==13.9.2
rich==13.9.3
# via twine
secretstorage==3.3.3
# via keyring
six==1.16.0
# via asttokens
sniffio==1.3.1
# via
# anyio
# httpx
stack-data==0.6.3
# via ipython
tomli==2.0.2
# via
# build
# ipdb
# mypy
traitlets==5.14.3
# via
# ipython
# matplotlib-inline
twine==5.1.1
# via bugis (pyproject.toml)
typing-extensions==4.7.1
typing-extensions==4.12.2
# via
# ipython
# multidict
# mypy
# pwo
# rich
urllib3==2.2.3
# via
# requests
@@ -184,7 +165,5 @@ watchdog==5.0.3
# via bugis (pyproject.toml)
wcwidth==0.2.13
# via prompt-toolkit
yarl==1.16.0
# via aiohttp
zipp==3.20.2
# via importlib-metadata

View File

@@ -1,66 +1,57 @@
#
# This file is autogenerated by pip-compile with Python 3.10
# This file is autogenerated by pip-compile with Python 3.12
# by the following command:
#
# pip-compile --extra-index-url=https://gitea.woggioni.net/api/packages/woggioni/pypi/simple --extra=run --output-file=requirements-run.txt --strip-extras pyproject.toml
# pip-compile --extra=run --output-file=requirements-run.txt pyproject.toml
#
--extra-index-url https://gitea.woggioni.net/api/packages/woggioni/pypi/simple
--index-url https://gitea.woggioni.net/api/packages/woggioni/pypi/simple
--extra-index-url https://pypi.org/simple
aiodns==3.2.0
# via aiohttp
aiofiles==24.1.0
# via bugis (pyproject.toml)
aiohappyeyeballs==2.4.3
# via aiohttp
aiohttp==3.10.10
# via bugis (pyproject.toml)
aiosignal==1.3.1
# via aiohttp
async-timeout==4.0.3
# via aiohttp
attrs==24.2.0
# via aiohttp
brotli==1.1.0
# via aiohttp
cffi==1.17.1
# via pycares
anyio==4.6.2.post1
# via httpx
certifi==2024.8.30
# via
# httpcore
# httpx
click==8.1.7
# via granian
frozenlist==1.4.1
# via
# aiohttp
# aiosignal
granian==1.6.1
# via bugis (pyproject.toml)
h11==0.14.0
# via httpcore
h2==4.1.0
# via httpx
hpack==4.0.0
# via h2
httpcore==1.0.6
# via httpx
httpx[http2]==0.27.2
# via bugis (pyproject.toml)
hyperframe==6.0.1
# via h2
idna==3.10
# via yarl
# via
# anyio
# httpx
markdown==3.7
# via bugis (pyproject.toml)
multidict==6.1.0
# via
# aiohttp
# yarl
propcache==0.2.0
# via yarl
pwo==0.0.3
pwo==0.0.4
# via bugis (pyproject.toml)
pycares==4.4.0
# via aiodns
pycparser==2.22
# via cffi
pygments==2.18.0
# via bugis (pyproject.toml)
pygraphviz==1.14
# via bugis (pyproject.toml)
pyyaml==6.0.2
# via bugis (pyproject.toml)
typing-extensions==4.7.1
sniffio==1.3.1
# via
# multidict
# pwo
# anyio
# httpx
typing-extensions==4.12.2
# via pwo
uvloop==0.21.0
# via granian
watchdog==5.0.3
# via bugis (pyproject.toml)
yarl==1.16.0
# via aiohttp

View File

@@ -1,60 +1,51 @@
#
# This file is autogenerated by pip-compile with Python 3.10
# This file is autogenerated by pip-compile with Python 3.12
# by the following command:
#
# pip-compile --extra-index-url=https://gitea.woggioni.net/api/packages/woggioni/pypi/simple --output-file=requirements.txt --strip-extras pyproject.toml
# pip-compile --output-file=requirements.txt pyproject.toml
#
--extra-index-url https://gitea.woggioni.net/api/packages/woggioni/pypi/simple
--index-url https://gitea.woggioni.net/api/packages/woggioni/pypi/simple
--extra-index-url https://pypi.org/simple
aiodns==3.2.0
# via aiohttp
aiofiles==24.1.0
# via bugis (pyproject.toml)
aiohappyeyeballs==2.4.3
# via aiohttp
aiohttp==3.10.10
# via bugis (pyproject.toml)
aiosignal==1.3.1
# via aiohttp
async-timeout==4.0.3
# via aiohttp
attrs==24.2.0
# via aiohttp
brotli==1.1.0
# via aiohttp
cffi==1.17.1
# via pycares
frozenlist==1.4.1
anyio==4.6.2.post1
# via httpx
certifi==2024.8.30
# via
# aiohttp
# aiosignal
# httpcore
# httpx
h11==0.14.0
# via httpcore
h2==4.1.0
# via httpx
hpack==4.0.0
# via h2
httpcore==1.0.6
# via httpx
httpx[http2]==0.27.2
# via bugis (pyproject.toml)
hyperframe==6.0.1
# via h2
idna==3.10
# via yarl
# via
# anyio
# httpx
markdown==3.7
# via bugis (pyproject.toml)
multidict==6.1.0
# via
# aiohttp
# yarl
propcache==0.2.0
# via yarl
pwo==0.0.3
pwo==0.0.4
# via bugis (pyproject.toml)
pycares==4.4.0
# via aiodns
pycparser==2.22
# via cffi
pygments==2.18.0
# via bugis (pyproject.toml)
pygraphviz==1.14
# via bugis (pyproject.toml)
pyyaml==6.0.2
# via bugis (pyproject.toml)
typing-extensions==4.7.1
sniffio==1.3.1
# via
# multidict
# pwo
# anyio
# httpx
typing-extensions==4.12.2
# via pwo
watchdog==5.0.3
# via bugis (pyproject.toml)
yarl==1.16.0
# via aiohttp

5
src/bugis/__main__.py Normal file
View File

@@ -0,0 +1,5 @@
import argparse
from cli import main
import sys
main(sys.argv[1:])

View File

@@ -14,14 +14,14 @@ from pwo import Maybe
from .server import Server
from asyncio import get_running_loop
from .asgi_utils import decode_headers
from typing import Optional
from typing import Optional, Awaitable, Callable, Any, Mapping
log = logging.getLogger('access')
log.propagate = False
_server : Optional[Server] = None
_server: Optional[Server] = None
async def application(scope, receive, send):
async def application(scope, receive, send : Callable[[Mapping[str, Any]], Awaitable[None]]):
global _server
if scope['type'] == 'lifespan':
while True:
@@ -49,6 +49,9 @@ async def application(scope, receive, send):
.filter(lambda it: it.get('type') == 'http.response.start')
.if_present(maybe_log))
return result
pathsend = (Maybe.of_nullable(scope.get('extensions'))
.map(lambda it: it.get("http.response.pathsend"))
.is_present)
await _server.handle_request(
scope['method'],
scope['path'],
@@ -58,6 +61,7 @@ async def application(scope, receive, send):
.map(lambda it: it.decode())
.or_else(None),
Maybe.of_nullable(scope.get('query_string', None)).map(lambda it: it.decode()).or_else(None),
wrapped_send
wrapped_send,
pathsend
)

View File

@@ -1,54 +1,15 @@
import asyncio
from asyncio import Queue, AbstractEventLoop, Future, Task, gather
from logging import getLogger, Logger
from pathlib import Path
from pwo import TopicManager, Subscriber
from watchdog.events import FileMovedEvent, FileClosedEvent, FileCreatedEvent, FileModifiedEvent
from watchdog.events import FileSystemEventHandler, FileSystemEvent, PatternMatchingEventHandler
from watchdog.observers import Observer
from watchdog.events import FileMovedEvent, FileClosedEvent, FileCreatedEvent, FileModifiedEvent
from pathlib import Path
from asyncio import Queue, AbstractEventLoop, Future, CancelledError, Task, gather
from typing import Callable, Optional
from logging import getLogger, Logger
log: Logger = getLogger(__name__)
class Subscription:
_unsubscribe_callback: Callable[['Subscription'], None]
_event: Optional[Future]
_loop: AbstractEventLoop
def __init__(self, unsubscribe: Callable[['Subscription'], None], loop: AbstractEventLoop):
self._unsubscribe_callback = unsubscribe
self._event: Optional[Future] = None
self._loop = loop
def unsubscribe(self) -> None:
self._event.cancel()
self._unsubscribe_callback(self)
log.debug('Deleted subscription %s', id(self))
async def wait(self, tout: float) -> bool:
self._event = self._loop.create_future()
def callback():
if not self._event.done():
self._event.set_result(False)
handle = self._loop.call_later(tout, callback)
try:
log.debug('Subscription %s is waiting for an event', id(self))
return await self._event
except CancelledError:
return False
finally:
handle.cancel()
def notify(self) -> None:
log.debug('Subscription %s notified', id(self))
if not self._event.done():
self._event.set_result(True)
def reset(self) -> None:
self._event = self._loop.create_future()
class _EventHandler(FileSystemEventHandler):
_queue: Queue
@@ -67,22 +28,6 @@ class _EventHandler(FileSystemEventHandler):
self._loop.call_soon_threadsafe(self._queue.put_nowait, event)
class AsyncQueueIterator:
_queue: Queue
def __init__(self, queue: Queue):
self._queue = queue
def __aiter__(self):
return self
async def __anext__(self):
item = await self._queue.get()
if item is None:
raise StopAsyncIteration
return item
observer = Observer()
@@ -96,55 +41,11 @@ def watch(path: Path, queue: Queue, loop: AbstractEventLoop,
observer.join()
loop.call_soon_threadsafe(queue.put_nowait, None)
class SubscriptionManager:
_loop: AbstractEventLoop
_queue: Queue
_subscriptions: dict[str, set[Subscription]]
def __init__(self, loop: AbstractEventLoop):
self._subscriptions: dict[str, set[Subscription]] = dict()
self._loop = loop
self._queue = Queue()
def subscribe(self, path: str) -> Subscription:
subscriptions = self._subscriptions
subscriptions_per_path = subscriptions.setdefault(path, set())
def unsubscribe_callback(subscription):
subscriptions_per_path.remove(subscription)
log.debug('Removed subscription %s to path %s', id(result), path)
result = Subscription(unsubscribe_callback, self._loop)
log.debug('Created subscription %s to path %s', id(result), path)
subscriptions_per_path.add(result)
return result
def _notify_subscriptions(self, path):
subscriptions = self._subscriptions
subscriptions_per_path = subscriptions.get(path, None)
if subscriptions_per_path:
log.debug(f"Subscriptions on '{path}': {len(subscriptions_per_path)}")
for s in subscriptions_per_path:
s.notify()
async def process_events(self):
async for evt in AsyncQueueIterator(self._queue):
log.debug(f"Processed event for path '{evt}'")
self._notify_subscriptions(evt)
log.debug(f"Event processor has completed")
def post_event(self, path):
def callback():
self._queue.put_nowait(path)
log.debug(f"Posted event for path '{path}', queue size: {self._queue.qsize()}")
self._loop.call_soon_threadsafe(callback)
class FileWatcher(PatternMatchingEventHandler):
_subscription_manager: SubscriptionManager
_topic_manager: TopicManager
_loop: AbstractEventLoop
_subscription_manager_loop: Task
_topic_manager_loop: Task
_running_tasks : Future
def __init__(self, path):
@@ -155,29 +56,29 @@ class FileWatcher(PatternMatchingEventHandler):
self._observer: Observer = Observer()
self._observer.schedule(self, path=path, recursive=True)
self._loop = asyncio.get_running_loop()
self._subscription_manager = SubscriptionManager(self._loop)
self._topic_manager = TopicManager(self._loop)
self._running_tasks = gather(
self._loop.run_in_executor(None, self._observer.start),
self._loop.create_task(self._subscription_manager.process_events())
self._loop.create_task(self._topic_manager.process_events())
)
async def stop(self) -> None:
def _observer_stop():
self._observer.stop()
self._observer.join()
self._subscription_manager.post_event(None)
self._topic_manager.post_event(None)
await self._loop.run_in_executor(None, _observer_stop)
await self._running_tasks
def subscribe(self, path: str) -> Subscription:
return self._subscription_manager.subscribe(path)
def subscribe(self, path: str) -> Subscriber:
return self._topic_manager.subscribe(path)
def on_any_event(self, event: FileSystemEvent) -> None:
what = "directory" if event.is_directory else "file"
def post_event(path):
self._subscription_manager.post_event(path)
self._topic_manager.post_event(path)
if isinstance(event, FileClosedEvent):
log.debug("Closed %s: %s", what, event.src_path)

41
src/bugis/cli.py Normal file
View File

@@ -0,0 +1,41 @@
import argparse
from dataclasses import asdict
from os import environ
from pathlib import Path
from typing import Optional, Sequence
import yaml
from granian import Granian
from pwo import Maybe
from .configuration import Configuration
def main(args: Optional[Sequence[str]] = None):
parser = argparse.ArgumentParser(description="A simple CLI program to render Markdown files")
default_configuration_file = (Maybe.of(environ.get('XDG_CONFIG_HOME'))
.map(lambda it: Path(it))
.map(lambda it: it / 'bugis' / 'bugis.yaml')
.or_else_get(lambda: Path(environ.get('HOME')) / '.config' / 'bugis' / 'bugis.yaml')
.filter(Path.exists)
.or_else(None)
)
parser.add_argument(
'-c',
'--configuration',
help='Path to the configuration file',
default=default_configuration_file,
type=Path,
)
args = parser.parse_args(args)
def parse(configuration: Path):
with open(configuration, 'r') as f:
return Configuration.from_dict(yaml.safe_load(f))
conf = Maybe.of_nullable(args.configuration).map(parse).or_else(Configuration.instance)
Granian(
"bugis.asgi:application",
**asdict(conf.granian)
).serve()

View File

@@ -1,45 +1,140 @@
import os
from os import environ
from pathlib import Path
from dataclasses import dataclass
from dataclasses import dataclass, field, asdict
from granian.constants import Loops, Interfaces, ThreadModes, HTTPModes, StrEnum
from granian.log import LogLevels
from granian.http import HTTP1Settings, HTTP2Settings
from typing import Optional, Sequence, Dict, Any
from pwo import classproperty, Maybe
from yaml import add_representer, SafeDumper, SafeLoader
class ClassPropertyDescriptor(object):
def __init__(self, fget, fset=None):
self.fget = fget
self.fset = fset
def __get__(self, obj, klass=None):
if klass is None:
klass = type(obj)
return self.fget.__get__(obj, klass)()
def __set__(self, obj, value):
if not self.fset:
raise AttributeError("can't set attribute")
type_ = type(obj)
return self.fset.__get__(obj, type_)(value)
def setter(self, func):
if not isinstance(func, (classmethod, staticmethod)):
func = classmethod(func)
self.fset = func
return self
def classproperty(func):
if not isinstance(func, (classmethod, staticmethod)):
func = classmethod(func)
return ClassPropertyDescriptor(func)
@dataclass(frozen=True)
class Configuration:
logging_configuration_file : str = environ.get("LOGGING_CONFIGURATION_FILE", Path(__file__).parent / 'default-conf' / 'logging.yaml')
plant_uml_server_address : str = environ.get('PLANT_UML_SERVER_ADDRESS', None)
logging_configuration_file: str = environ.get("LOGGING_CONFIGURATION_FILE",
Path(__file__).parent / 'default-conf' / 'logging.yaml')
plant_uml_server_address: str = environ.get('PLANT_UML_SERVER_ADDRESS', None)
@classproperty
def instance(cls) -> 'Configuration':
return Configuration()
@dataclass(frozen=True)
class GranianConfiguration:
address: str = '127.0.0.1'
port: int = 8000
interface: str = Interfaces.ASGI
workers: int = 1
threads: int = 1
blocking_threads: Optional[int] = None
threading_mode: ThreadModes = ThreadModes.workers
loop: Loops = Loops.auto
loop_opt: bool = False
http: HTTPModes = HTTPModes.auto
websockets: bool = True
backlog: int = 1024
backpressure: Optional[int] = None
http1_settings: Optional[HTTP1Settings] = None
http2_settings: Optional[HTTP2Settings] = None
log_enabled: bool = True
log_level: LogLevels = LogLevels.info
log_dictconfig: Optional[Dict[str, Any]] = None
log_access: bool = False
log_access_format: Optional[str] = None
ssl_cert: Optional[Path] = None
ssl_key: Optional[Path] = None
ssl_key_password: Optional[str] = None
url_path_prefix: Optional[str] = None
respawn_failed_workers: bool = False
respawn_interval: float = 3.5
workers_lifetime: Optional[int] = None
factory: bool = False
reload: bool = False
reload_paths: Optional[Sequence[Path]] = None
reload_ignore_dirs: Optional[Sequence[str]] = None
reload_ignore_patterns: Optional[Sequence[str]] = None
reload_ignore_paths: Optional[Sequence[Path]] = None
process_name: Optional[str] = None
pid_file: Optional[Path] = None
@staticmethod
def from_dict(d) -> 'Configuration.GranianConfiguration':
return Configuration.GranianConfiguration(**{k: v for k, v in dict(
address=d.get('address', None),
port=d.get('port', None),
interface=Maybe.of_nullable(d.get('interface')).map(lambda it: Interfaces(it)).or_else(None),
workers=d.get('workers', None),
threads=d.get('threads', None),
blocking_threads=d.get('blocking_threads', None),
threading_mode=Maybe.of_nullable(d.get('threading_modes')).map(lambda it: ThreadModes(it)).or_else(None),
loop=Maybe.of_nullable(d.get('loop')).map(lambda it: Loops(it)).or_else(None),
loop_opt=d.get('loop_opt', None),
http=Maybe.of_nullable(d.get('http')).map(lambda it: HTTPModes(it)).or_else(None),
websockets=d.get('websockets', None),
backlog=d.get('backlog', None),
backpressure=d.get('backpressure', None),
http1_settings=Maybe.of_nullable(d.get('http1_settings')).map(lambda it: HTTP1Settings(**it)).or_else(None),
http2_settings=Maybe.of_nullable(d.get('http2_settings')).map(lambda it: HTTP2Settings(**it)).or_else(None),
log_enabled=d.get('log_enabled', None),
log_level=Maybe.of_nullable(d.get('log_level')).map(lambda it: LogLevels(it)).or_else(None),
# log_dictconfig: Optional[Dict[str, Any]] = None,
log_access=d.get('log_access', None),
log_access_format=d.get('log_access_format', None),
ssl_cert=d.get('ssl_cert', None),
ssl_key=d.get('ssl_key', None),
ssl_key_password=d.get('ssl_key_password', None),
url_path_prefix=d.get('url_path_prefix', None),
respawn_failed_workers=d.get('respawn_failed_workers', None),
respawn_interval=d.get('respawn_interval', None),
workers_lifetime=d.get('workers_lifetime', None),
factory=d.get('factory', None),
reload=d.get('reload', None),
reload_paths=d.get('reload_paths', None),
reload_ignore_dirs=d.get('reload_ignore_dirs', None),
reload_ignore_patterns=d.get('reload_ignore_patterns', None),
reload_ignore_paths=d.get('reload_ignore_paths', None),
process_name=d.get('process_name', None),
pid_file=d.get('pid_file', None),
).items() if v is not None})
granian: GranianConfiguration = GranianConfiguration()
@staticmethod
def from_dict(d) -> 'Configuration':
return Configuration(
**{k: v for k, v in dict(
logging_configuration_file=d.get('logging_configuration_file', None),
plant_uml_server_address=d.get('plant_uml_server_address', None),
granian=Maybe.of_nullable(d.get('granian'))
.map(Configuration.GranianConfiguration.from_dict)
.or_else(None)
).items() if v is not None
}
)
def to_yaml(self, stream):
dumper = SafeDumper(stream)
dumper.add_representer(Configuration, lambda dumper, conf: dumper.represent_dict(asdict(conf)))
dumper.add_representer(Configuration.GranianConfiguration,
lambda dumper, conf: dumper.represent_dict(asdict(conf)))
dumper.add_representer(LogLevels, lambda dumper, level: dumper.represent_str(level.lower()))
dumper.add_multi_representer(Path, lambda dumper, path: dumper.represent_str(str(path)))
dumper.add_multi_representer(StrEnum, lambda dumper, str_enum: dumper.represent_str(str(str_enum)))
dumper.add_representer(HTTP1Settings, lambda dumper, settings: dumper.represent_dict(vars(settings)))
dumper.add_representer(HTTP2Settings, lambda dumper, settings: dumper.represent_dict(vars(settings)))
try:
dumper.open()
dumper.represent(Configuration.instance)
dumper.close()
finally:
dumper.dispose()
@staticmethod
def from_yaml(stream) -> 'Configuration':
loader = SafeLoader(stream)
try:
conf = loader.get_single_data()
return Configuration.from_dict(conf)
finally:
loader.dispose()

View File

@@ -1,17 +1,19 @@
from typing import TYPE_CHECKING
from aiofiles import open as async_open
from aiohttp import ClientSession
from .configuration import Configuration
from yarl import URL
if TYPE_CHECKING:
from _typeshed import StrOrBytesPath
from httpx import AsyncClient, URL
from typing import Callable, Awaitable
from urllib.parse import urljoin
async def render_plant_uml(path: 'StrOrBytesPath') -> bytes:
async with ClientSession() as session:
url = URL(Configuration.instance.plant_uml_server_address) / 'svg'
chunk_size = 0x10000
async def render_plant_uml(client: AsyncClient, path: 'StrOrBytesPath', send : Callable[[bytes], Awaitable[None]]):
url = URL(urljoin(Configuration.instance.plant_uml_server_address, 'svg'))
async with async_open(path, 'rb') as file:
source = await file.read()
async with session.post(url, data=source) as response:
response = await client.post(url, content=source)
response.raise_for_status()
return await response.read()
async for chunk in response.aiter_bytes(chunk_size=chunk_size):
await send(chunk)

View File

@@ -6,7 +6,7 @@ from io import BytesIO
from mimetypes import init as mimeinit, guess_type
from os import getcwd
from os.path import join, normpath, splitext, relpath, basename
from typing import Callable, TYPE_CHECKING, Optional, Awaitable, AsyncGenerator, Any
from typing import Callable, TYPE_CHECKING, Optional, Awaitable, AsyncGenerator, Any, Mapping
import pygraphviz as pgv
from aiofiles import open as async_open
@@ -14,6 +14,7 @@ from aiofiles.base import AiofilesContextManager
from aiofiles.os import listdir
from aiofiles.ospath import exists, isdir, isfile, getmtime
from aiofiles.threadpool.binary import AsyncBufferedReader
from httpx import AsyncClient
from pwo import Maybe
from .asgi_utils import encode_headers
@@ -29,11 +30,12 @@ mimeinit()
cwd: 'StrOrBytesPath' = getcwd()
def completed_future[T](result : T) -> Future[T]:
def completed_future[T](result: T) -> Future[T]:
future = Future()
future.set_result(result)
return future
def has_extension(filepath, extension):
_, ext = splitext(filepath)
return ext == extension
@@ -46,13 +48,19 @@ def is_markdown(filepath):
def is_dotfile(filepath):
return has_extension(filepath, ".dot")
def is_plant_uml(filepath):
return has_extension(filepath, ".puml")
logger = logging.getLogger(__name__)
class Server:
_loop : AbstractEventLoop
root_dir: 'StrOrBytesPath'
prefix: Optional['StrOrBytesPath']
_loop: AbstractEventLoop
_client: AsyncClient
def __init__(self,
root_dir: 'StrOrBytesPath' = getcwd(),
@@ -63,8 +71,16 @@ class Server:
self.file_watcher = FileWatcher(cwd)
self.prefix = prefix and normpath(f'{prefix.decode()}')
self._loop = loop
self._client = AsyncClient()
async def handle_request(self, method: str, url_path: str, etag: Optional[str], query_string: Optional[str], send):
async def handle_request(self,
method: str,
url_path: str,
etag: Optional[str],
query_string: Optional[str],
send: Callable[[Mapping[str, Any]], Awaitable[None]],
pathsend: bool = False
):
if method != 'GET':
await send({
'type': 'http.response.start',
@@ -84,11 +100,12 @@ class Server:
etag, digest = await self.compute_etag_and_digest(
etag,
url_path,
lambda: AiofilesContextManager(completed_future(AsyncBufferedReader(BytesIO(content), loop=self._loop, executor=None))),
lambda: AiofilesContextManager(
completed_future(AsyncBufferedReader(BytesIO(content), loop=self._loop, executor=None))),
lambda: completed_future(mtime)
)
if etag and etag == digest:
await self.not_modified(send, digest, ('Cache-Control', 'must-revalidate, max-age=86400'))
await self.not_modified(send, digest, 'must-revalidate, max-age=86400')
return
elif content:
mime_type = guess_type(basename(url_path))[0] or 'application/octet-stream'
@@ -147,6 +164,7 @@ class Server:
result = graph.draw(None, format="svg", prog="dot")
logger.debug("Completed Graphviz rendering for file '%s'", filepath)
return result
body = await self._loop.run_in_executor(None, render_graphviz, path)
await send({
'type': 'http.response.start',
@@ -163,7 +181,6 @@ class Server:
})
elif is_plant_uml(path):
logger.debug("Starting PlantUML rendering for file '%s'", path)
body = await render_plant_uml(path)
logger.debug("Completed PlantUML rendering for file '%s'", path)
await send({
'type': 'http.response.start',
@@ -174,13 +191,18 @@ class Server:
'Cache-Control': 'no-cache'
})
})
await render_plant_uml(self._client, path, lambda chunk: send({
'type': 'http.response.body',
'body': chunk,
'more_body': True
}))
await send({
'type': 'http.response.body',
'body': body
'body': '',
'more_body': False
})
else:
async def read_file(file_path):
buffer_size = 0x10000
async def read_file(file_path, buffer_size=0x10000):
async with async_open(file_path, 'rb') as f:
while True:
result = await f.read(buffer_size)
@@ -197,7 +219,12 @@ class Server:
'Cache-Control': 'no-cache'
})
})
if pathsend:
await send({
'type': 'http.response.pathsend',
'path': path
})
else:
async for chunk in read_file(path):
await send({
'type': 'http.response.body',
@@ -227,7 +254,7 @@ class Server:
await self.not_found(send)
@staticmethod
async def stream_hash(source: AsyncBufferedReader, bufsize=0x1000) -> bytes:
async def stream_hash(source: AsyncBufferedReader, bufsize=0x10000) -> bytes:
if bufsize <= 0:
raise ValueError("Buffer size must be greater than 0")
md5 = hashlib.md5()
@@ -239,7 +266,7 @@ class Server:
return md5.digest()
@staticmethod
async def file_hash(filepath, bufsize=0x1000) -> bytes:
async def file_hash(filepath, bufsize=0x10000) -> bytes:
if bufsize <= 0:
raise ValueError("Buffer size must be greater than 0")
md5 = hashlib.md5()
@@ -321,7 +348,7 @@ class Server:
})
@staticmethod
async def not_modified(send, digest: str, cache_control: str ='no-cache') -> []:
async def not_modified(send, digest: str, cache_control: str = 'no-cache') -> []:
await send({
'type': 'http.response.start',
'status': 304,
@@ -362,15 +389,19 @@ class Server:
for entry in sorted(await listdir(path)):
if await filter(join(path, entry)):
yield entry
return result()
async for entry in await ls(isdir):
result += '<li><a href="' + entry + '/' + '"/>' + entry + '/' + '</li>'
async def file_filter(entry: str) -> bool:
return await isfile(entry) and is_markdown(entry)
async for entry in await ls(file_filter):
result += '<li><a href="' + entry + '"/>' + entry + '</li>'
return result
async def stop(self):
await self.file_watcher.stop()
await self._client.aclose()