3 Commits
0.1.0 ... 0.2.0

Author SHA1 Message Date
16dbd3a82a addded nginx confioguration file
All checks were successful
CI / Build Pip package (push) Successful in 16s
CI / Build Docker image (push) Successful in 3m33s
2024-10-24 23:55:21 +08:00
33a3858b02 added cli 2024-10-24 23:26:06 +08:00
e2e4083321 switch from aiohttp to httpx 2024-10-24 23:18:23 +08:00
13 changed files with 382 additions and 325 deletions

View File

@@ -9,7 +9,7 @@ RUN adduser -D luser
USER luser USER luser
WORKDIR /home/luser WORKDIR /home/luser
COPY --chown=luser:users ./requirements-dev.txt ./requirements-dev.txt COPY --chown=luser:users ./requirements-dev.txt ./requirements-dev.txt
COPY --chown=luser:users ./requirements-dev.txt ./requirements-run.txt COPY --chown=luser:users ./requirements-run.txt ./requirements-run.txt
WORKDIR /home/luser/ WORKDIR /home/luser/
RUN python -m venv .venv RUN python -m venv .venv
RUN --mount=type=cache,target=/home/luser/.cache/pip,uid=1000,gid=1000 .venv/bin/pip wheel -w /home/luser/wheel -r requirements-dev.txt pygraphviz RUN --mount=type=cache,target=/home/luser/.cache/pip,uid=1000,gid=1000 .venv/bin/pip wheel -w /home/luser/wheel -r requirements-dev.txt pygraphviz

14
conf/nginx-bugis.conf Normal file
View File

@@ -0,0 +1,14 @@
server {
listen 8080;
http2 on;
server_name localhost;
location / {
proxy_pass http://granian:8000;
proxy_http_version 1.1;
proxy_set_header Host $host;
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
proxy_read_timeout 60s;
}
}

View File

@@ -29,7 +29,7 @@ dependencies = [
"PyYAML", "PyYAML",
"pygraphviz", "pygraphviz",
"aiofiles", "aiofiles",
"aiohttp[speedups]" "httpx[http2]"
] ]
[project.optional-dependencies] [project.optional-dependencies]
@@ -60,3 +60,6 @@ strict = true
[tool.setuptools_scm] [tool.setuptools_scm]
version_file = "src/bugis/_version.py" version_file = "src/bugis/_version.py"
[project.scripts]
bugis = "bugis.cli:main"

View File

@@ -1,39 +1,27 @@
# #
# This file is autogenerated by pip-compile with Python 3.10 # This file is autogenerated by pip-compile with Python 3.12
# by the following command: # by the following command:
# #
# pip-compile --extra-index-url=https://gitea.woggioni.net/api/packages/woggioni/pypi/simple --extra=dev --output-file=requirements-dev.txt --strip-extras pyproject.toml # pip-compile --extra=dev --output-file=requirements-dev.txt pyproject.toml
# #
--extra-index-url https://gitea.woggioni.net/api/packages/woggioni/pypi/simple --index-url https://gitea.woggioni.net/api/packages/woggioni/pypi/simple
--extra-index-url https://pypi.org/simple
aiodns==3.2.0
# via aiohttp
aiofiles==24.1.0 aiofiles==24.1.0
# via bugis (pyproject.toml) # via bugis (pyproject.toml)
aiohappyeyeballs==2.4.3 anyio==4.6.2.post1
# via aiohttp # via httpx
aiohttp==3.10.10
# via bugis (pyproject.toml)
aiosignal==1.3.1
# via aiohttp
asttokens==2.4.1 asttokens==2.4.1
# via stack-data # via stack-data
async-timeout==4.0.3
# via aiohttp
attrs==24.2.0
# via aiohttp
backports-tarfile==1.2.0
# via jaraco-context
brotli==1.1.0
# via aiohttp
build==1.2.2.post1 build==1.2.2.post1
# via bugis (pyproject.toml) # via bugis (pyproject.toml)
certifi==2024.8.30 certifi==2024.8.30
# via requests
cffi==1.17.1
# via # via
# cryptography # httpcore
# pycares # httpx
# requests
cffi==1.17.1
# via cryptography
charset-normalizer==3.4.0 charset-normalizer==3.4.0
# via requests # via requests
click==8.1.7 click==8.1.7
@@ -46,24 +34,29 @@ decorator==5.1.1
# ipython # ipython
docutils==0.21.2 docutils==0.21.2
# via readme-renderer # via readme-renderer
exceptiongroup==1.2.2
# via ipython
executing==2.1.0 executing==2.1.0
# via stack-data # via stack-data
frozenlist==1.4.1
# via
# aiohttp
# aiosignal
granian==1.6.1 granian==1.6.1
# via bugis (pyproject.toml) # via bugis (pyproject.toml)
h11==0.14.0
# via httpcore
h2==4.1.0
# via httpx
hpack==4.0.0
# via h2
httpcore==1.0.6
# via httpx
httpx[http2]==0.27.2
# via bugis (pyproject.toml)
hyperframe==6.0.1
# via h2
idna==3.10 idna==3.10
# via # via
# anyio
# httpx
# requests # requests
# yarl
importlib-metadata==8.5.0 importlib-metadata==8.5.0
# via # via twine
# keyring
# twine
ipdb==0.13.13 ipdb==0.13.13
# via bugis (pyproject.toml) # via bugis (pyproject.toml)
ipython==8.28.0 ipython==8.28.0
@@ -94,11 +87,7 @@ more-itertools==10.5.0
# via # via
# jaraco-classes # jaraco-classes
# jaraco-functools # jaraco-functools
multidict==6.1.0 mypy==1.13.0
# via
# aiohttp
# yarl
mypy==1.12.1
# via bugis (pyproject.toml) # via bugis (pyproject.toml)
mypy-extensions==1.0.0 mypy-extensions==1.0.0
# via mypy # via mypy
@@ -114,16 +103,12 @@ pkginfo==1.10.0
# via twine # via twine
prompt-toolkit==3.0.48 prompt-toolkit==3.0.48
# via ipython # via ipython
propcache==0.2.0
# via yarl
ptyprocess==0.7.0 ptyprocess==0.7.0
# via pexpect # via pexpect
pure-eval==0.2.3 pure-eval==0.2.3
# via stack-data # via stack-data
pwo==0.0.3 pwo==0.0.4
# via bugis (pyproject.toml) # via bugis (pyproject.toml)
pycares==4.4.0
# via aiodns
pycparser==2.22 pycparser==2.22
# via cffi # via cffi
pygments==2.18.0 pygments==2.18.0
@@ -148,32 +133,28 @@ requests-toolbelt==1.0.0
# via twine # via twine
rfc3986==2.0.0 rfc3986==2.0.0
# via twine # via twine
rich==13.9.2 rich==13.9.3
# via twine # via twine
secretstorage==3.3.3 secretstorage==3.3.3
# via keyring # via keyring
six==1.16.0 six==1.16.0
# via asttokens # via asttokens
sniffio==1.3.1
# via
# anyio
# httpx
stack-data==0.6.3 stack-data==0.6.3
# via ipython # via ipython
tomli==2.0.2
# via
# build
# ipdb
# mypy
traitlets==5.14.3 traitlets==5.14.3
# via # via
# ipython # ipython
# matplotlib-inline # matplotlib-inline
twine==5.1.1 twine==5.1.1
# via bugis (pyproject.toml) # via bugis (pyproject.toml)
typing-extensions==4.7.1 typing-extensions==4.12.2
# via # via
# ipython
# multidict
# mypy # mypy
# pwo # pwo
# rich
urllib3==2.2.3 urllib3==2.2.3
# via # via
# requests # requests
@@ -184,7 +165,5 @@ watchdog==5.0.3
# via bugis (pyproject.toml) # via bugis (pyproject.toml)
wcwidth==0.2.13 wcwidth==0.2.13
# via prompt-toolkit # via prompt-toolkit
yarl==1.16.0
# via aiohttp
zipp==3.20.2 zipp==3.20.2
# via importlib-metadata # via importlib-metadata

View File

@@ -1,66 +1,57 @@
# #
# This file is autogenerated by pip-compile with Python 3.10 # This file is autogenerated by pip-compile with Python 3.12
# by the following command: # by the following command:
# #
# pip-compile --extra-index-url=https://gitea.woggioni.net/api/packages/woggioni/pypi/simple --extra=run --output-file=requirements-run.txt --strip-extras pyproject.toml # pip-compile --extra=run --output-file=requirements-run.txt pyproject.toml
# #
--extra-index-url https://gitea.woggioni.net/api/packages/woggioni/pypi/simple --index-url https://gitea.woggioni.net/api/packages/woggioni/pypi/simple
--extra-index-url https://pypi.org/simple
aiodns==3.2.0
# via aiohttp
aiofiles==24.1.0 aiofiles==24.1.0
# via bugis (pyproject.toml) # via bugis (pyproject.toml)
aiohappyeyeballs==2.4.3 anyio==4.6.2.post1
# via aiohttp # via httpx
aiohttp==3.10.10 certifi==2024.8.30
# via bugis (pyproject.toml) # via
aiosignal==1.3.1 # httpcore
# via aiohttp # httpx
async-timeout==4.0.3
# via aiohttp
attrs==24.2.0
# via aiohttp
brotli==1.1.0
# via aiohttp
cffi==1.17.1
# via pycares
click==8.1.7 click==8.1.7
# via granian # via granian
frozenlist==1.4.1
# via
# aiohttp
# aiosignal
granian==1.6.1 granian==1.6.1
# via bugis (pyproject.toml) # via bugis (pyproject.toml)
h11==0.14.0
# via httpcore
h2==4.1.0
# via httpx
hpack==4.0.0
# via h2
httpcore==1.0.6
# via httpx
httpx[http2]==0.27.2
# via bugis (pyproject.toml)
hyperframe==6.0.1
# via h2
idna==3.10 idna==3.10
# via yarl # via
# anyio
# httpx
markdown==3.7 markdown==3.7
# via bugis (pyproject.toml) # via bugis (pyproject.toml)
multidict==6.1.0 pwo==0.0.4
# via
# aiohttp
# yarl
propcache==0.2.0
# via yarl
pwo==0.0.3
# via bugis (pyproject.toml) # via bugis (pyproject.toml)
pycares==4.4.0
# via aiodns
pycparser==2.22
# via cffi
pygments==2.18.0 pygments==2.18.0
# via bugis (pyproject.toml) # via bugis (pyproject.toml)
pygraphviz==1.14 pygraphviz==1.14
# via bugis (pyproject.toml) # via bugis (pyproject.toml)
pyyaml==6.0.2 pyyaml==6.0.2
# via bugis (pyproject.toml) # via bugis (pyproject.toml)
typing-extensions==4.7.1 sniffio==1.3.1
# via # via
# multidict # anyio
# pwo # httpx
typing-extensions==4.12.2
# via pwo
uvloop==0.21.0 uvloop==0.21.0
# via granian # via granian
watchdog==5.0.3 watchdog==5.0.3
# via bugis (pyproject.toml) # via bugis (pyproject.toml)
yarl==1.16.0
# via aiohttp

View File

@@ -1,60 +1,51 @@
# #
# This file is autogenerated by pip-compile with Python 3.10 # This file is autogenerated by pip-compile with Python 3.12
# by the following command: # by the following command:
# #
# pip-compile --extra-index-url=https://gitea.woggioni.net/api/packages/woggioni/pypi/simple --output-file=requirements.txt --strip-extras pyproject.toml # pip-compile --output-file=requirements.txt pyproject.toml
# #
--extra-index-url https://gitea.woggioni.net/api/packages/woggioni/pypi/simple --index-url https://gitea.woggioni.net/api/packages/woggioni/pypi/simple
--extra-index-url https://pypi.org/simple
aiodns==3.2.0
# via aiohttp
aiofiles==24.1.0 aiofiles==24.1.0
# via bugis (pyproject.toml) # via bugis (pyproject.toml)
aiohappyeyeballs==2.4.3 anyio==4.6.2.post1
# via aiohttp # via httpx
aiohttp==3.10.10 certifi==2024.8.30
# via bugis (pyproject.toml)
aiosignal==1.3.1
# via aiohttp
async-timeout==4.0.3
# via aiohttp
attrs==24.2.0
# via aiohttp
brotli==1.1.0
# via aiohttp
cffi==1.17.1
# via pycares
frozenlist==1.4.1
# via # via
# aiohttp # httpcore
# aiosignal # httpx
h11==0.14.0
# via httpcore
h2==4.1.0
# via httpx
hpack==4.0.0
# via h2
httpcore==1.0.6
# via httpx
httpx[http2]==0.27.2
# via bugis (pyproject.toml)
hyperframe==6.0.1
# via h2
idna==3.10 idna==3.10
# via yarl # via
# anyio
# httpx
markdown==3.7 markdown==3.7
# via bugis (pyproject.toml) # via bugis (pyproject.toml)
multidict==6.1.0 pwo==0.0.4
# via
# aiohttp
# yarl
propcache==0.2.0
# via yarl
pwo==0.0.3
# via bugis (pyproject.toml) # via bugis (pyproject.toml)
pycares==4.4.0
# via aiodns
pycparser==2.22
# via cffi
pygments==2.18.0 pygments==2.18.0
# via bugis (pyproject.toml) # via bugis (pyproject.toml)
pygraphviz==1.14 pygraphviz==1.14
# via bugis (pyproject.toml) # via bugis (pyproject.toml)
pyyaml==6.0.2 pyyaml==6.0.2
# via bugis (pyproject.toml) # via bugis (pyproject.toml)
typing-extensions==4.7.1 sniffio==1.3.1
# via # via
# multidict # anyio
# pwo # httpx
typing-extensions==4.12.2
# via pwo
watchdog==5.0.3 watchdog==5.0.3
# via bugis (pyproject.toml) # via bugis (pyproject.toml)
yarl==1.16.0
# via aiohttp

5
src/bugis/__main__.py Normal file
View File

@@ -0,0 +1,5 @@
import argparse
from cli import main
import sys
main(sys.argv[1:])

View File

@@ -14,14 +14,14 @@ from pwo import Maybe
from .server import Server from .server import Server
from asyncio import get_running_loop from asyncio import get_running_loop
from .asgi_utils import decode_headers from .asgi_utils import decode_headers
from typing import Optional from typing import Optional, Awaitable, Callable, Any, Mapping
log = logging.getLogger('access') log = logging.getLogger('access')
log.propagate = False log.propagate = False
_server: Optional[Server] = None _server: Optional[Server] = None
async def application(scope, receive, send): async def application(scope, receive, send : Callable[[Mapping[str, Any]], Awaitable[None]]):
global _server global _server
if scope['type'] == 'lifespan': if scope['type'] == 'lifespan':
while True: while True:
@@ -49,6 +49,9 @@ async def application(scope, receive, send):
.filter(lambda it: it.get('type') == 'http.response.start') .filter(lambda it: it.get('type') == 'http.response.start')
.if_present(maybe_log)) .if_present(maybe_log))
return result return result
pathsend = (Maybe.of_nullable(scope.get('extensions'))
.map(lambda it: it.get("http.response.pathsend"))
.is_present)
await _server.handle_request( await _server.handle_request(
scope['method'], scope['method'],
scope['path'], scope['path'],
@@ -58,6 +61,7 @@ async def application(scope, receive, send):
.map(lambda it: it.decode()) .map(lambda it: it.decode())
.or_else(None), .or_else(None),
Maybe.of_nullable(scope.get('query_string', None)).map(lambda it: it.decode()).or_else(None), Maybe.of_nullable(scope.get('query_string', None)).map(lambda it: it.decode()).or_else(None),
wrapped_send wrapped_send,
pathsend
) )

View File

@@ -1,54 +1,15 @@
import asyncio import asyncio
from asyncio import Queue, AbstractEventLoop, Future, Task, gather
from logging import getLogger, Logger
from pathlib import Path
from pwo import TopicManager, Subscriber
from watchdog.events import FileMovedEvent, FileClosedEvent, FileCreatedEvent, FileModifiedEvent
from watchdog.events import FileSystemEventHandler, FileSystemEvent, PatternMatchingEventHandler from watchdog.events import FileSystemEventHandler, FileSystemEvent, PatternMatchingEventHandler
from watchdog.observers import Observer from watchdog.observers import Observer
from watchdog.events import FileMovedEvent, FileClosedEvent, FileCreatedEvent, FileModifiedEvent
from pathlib import Path
from asyncio import Queue, AbstractEventLoop, Future, CancelledError, Task, gather
from typing import Callable, Optional
from logging import getLogger, Logger
log: Logger = getLogger(__name__) log: Logger = getLogger(__name__)
class Subscription:
_unsubscribe_callback: Callable[['Subscription'], None]
_event: Optional[Future]
_loop: AbstractEventLoop
def __init__(self, unsubscribe: Callable[['Subscription'], None], loop: AbstractEventLoop):
self._unsubscribe_callback = unsubscribe
self._event: Optional[Future] = None
self._loop = loop
def unsubscribe(self) -> None:
self._event.cancel()
self._unsubscribe_callback(self)
log.debug('Deleted subscription %s', id(self))
async def wait(self, tout: float) -> bool:
self._event = self._loop.create_future()
def callback():
if not self._event.done():
self._event.set_result(False)
handle = self._loop.call_later(tout, callback)
try:
log.debug('Subscription %s is waiting for an event', id(self))
return await self._event
except CancelledError:
return False
finally:
handle.cancel()
def notify(self) -> None:
log.debug('Subscription %s notified', id(self))
if not self._event.done():
self._event.set_result(True)
def reset(self) -> None:
self._event = self._loop.create_future()
class _EventHandler(FileSystemEventHandler): class _EventHandler(FileSystemEventHandler):
_queue: Queue _queue: Queue
@@ -67,22 +28,6 @@ class _EventHandler(FileSystemEventHandler):
self._loop.call_soon_threadsafe(self._queue.put_nowait, event) self._loop.call_soon_threadsafe(self._queue.put_nowait, event)
class AsyncQueueIterator:
_queue: Queue
def __init__(self, queue: Queue):
self._queue = queue
def __aiter__(self):
return self
async def __anext__(self):
item = await self._queue.get()
if item is None:
raise StopAsyncIteration
return item
observer = Observer() observer = Observer()
@@ -96,55 +41,11 @@ def watch(path: Path, queue: Queue, loop: AbstractEventLoop,
observer.join() observer.join()
loop.call_soon_threadsafe(queue.put_nowait, None) loop.call_soon_threadsafe(queue.put_nowait, None)
class SubscriptionManager:
_loop: AbstractEventLoop
_queue: Queue
_subscriptions: dict[str, set[Subscription]]
def __init__(self, loop: AbstractEventLoop):
self._subscriptions: dict[str, set[Subscription]] = dict()
self._loop = loop
self._queue = Queue()
def subscribe(self, path: str) -> Subscription:
subscriptions = self._subscriptions
subscriptions_per_path = subscriptions.setdefault(path, set())
def unsubscribe_callback(subscription):
subscriptions_per_path.remove(subscription)
log.debug('Removed subscription %s to path %s', id(result), path)
result = Subscription(unsubscribe_callback, self._loop)
log.debug('Created subscription %s to path %s', id(result), path)
subscriptions_per_path.add(result)
return result
def _notify_subscriptions(self, path):
subscriptions = self._subscriptions
subscriptions_per_path = subscriptions.get(path, None)
if subscriptions_per_path:
log.debug(f"Subscriptions on '{path}': {len(subscriptions_per_path)}")
for s in subscriptions_per_path:
s.notify()
async def process_events(self):
async for evt in AsyncQueueIterator(self._queue):
log.debug(f"Processed event for path '{evt}'")
self._notify_subscriptions(evt)
log.debug(f"Event processor has completed")
def post_event(self, path):
def callback():
self._queue.put_nowait(path)
log.debug(f"Posted event for path '{path}', queue size: {self._queue.qsize()}")
self._loop.call_soon_threadsafe(callback)
class FileWatcher(PatternMatchingEventHandler): class FileWatcher(PatternMatchingEventHandler):
_subscription_manager: SubscriptionManager _topic_manager: TopicManager
_loop: AbstractEventLoop _loop: AbstractEventLoop
_subscription_manager_loop: Task _topic_manager_loop: Task
_running_tasks : Future _running_tasks : Future
def __init__(self, path): def __init__(self, path):
@@ -155,29 +56,29 @@ class FileWatcher(PatternMatchingEventHandler):
self._observer: Observer = Observer() self._observer: Observer = Observer()
self._observer.schedule(self, path=path, recursive=True) self._observer.schedule(self, path=path, recursive=True)
self._loop = asyncio.get_running_loop() self._loop = asyncio.get_running_loop()
self._subscription_manager = SubscriptionManager(self._loop) self._topic_manager = TopicManager(self._loop)
self._running_tasks = gather( self._running_tasks = gather(
self._loop.run_in_executor(None, self._observer.start), self._loop.run_in_executor(None, self._observer.start),
self._loop.create_task(self._subscription_manager.process_events()) self._loop.create_task(self._topic_manager.process_events())
) )
async def stop(self) -> None: async def stop(self) -> None:
def _observer_stop(): def _observer_stop():
self._observer.stop() self._observer.stop()
self._observer.join() self._observer.join()
self._subscription_manager.post_event(None) self._topic_manager.post_event(None)
await self._loop.run_in_executor(None, _observer_stop) await self._loop.run_in_executor(None, _observer_stop)
await self._running_tasks await self._running_tasks
def subscribe(self, path: str) -> Subscription: def subscribe(self, path: str) -> Subscriber:
return self._subscription_manager.subscribe(path) return self._topic_manager.subscribe(path)
def on_any_event(self, event: FileSystemEvent) -> None: def on_any_event(self, event: FileSystemEvent) -> None:
what = "directory" if event.is_directory else "file" what = "directory" if event.is_directory else "file"
def post_event(path): def post_event(path):
self._subscription_manager.post_event(path) self._topic_manager.post_event(path)
if isinstance(event, FileClosedEvent): if isinstance(event, FileClosedEvent):
log.debug("Closed %s: %s", what, event.src_path) log.debug("Closed %s: %s", what, event.src_path)

41
src/bugis/cli.py Normal file
View File

@@ -0,0 +1,41 @@
import argparse
from dataclasses import asdict
from os import environ
from pathlib import Path
from typing import Optional, Sequence
import yaml
from granian import Granian
from pwo import Maybe
from .configuration import Configuration
def main(args: Optional[Sequence[str]] = None):
parser = argparse.ArgumentParser(description="A simple CLI program to render Markdown files")
default_configuration_file = (Maybe.of(environ.get('XDG_CONFIG_HOME'))
.map(lambda it: Path(it))
.map(lambda it: it / 'bugis' / 'bugis.yaml')
.or_else_get(lambda: Path(environ.get('HOME')) / '.config' / 'bugis' / 'bugis.yaml')
.filter(Path.exists)
.or_else(None)
)
parser.add_argument(
'-c',
'--configuration',
help='Path to the configuration file',
default=default_configuration_file,
type=Path,
)
args = parser.parse_args(args)
def parse(configuration: Path):
with open(configuration, 'r') as f:
return Configuration.from_dict(yaml.safe_load(f))
conf = Maybe.of_nullable(args.configuration).map(parse).or_else(Configuration.instance)
Granian(
"bugis.asgi:application",
**asdict(conf.granian)
).serve()

View File

@@ -1,45 +1,140 @@
import os
from os import environ from os import environ
from pathlib import Path from pathlib import Path
from dataclasses import dataclass from dataclasses import dataclass, field, asdict
from granian.constants import Loops, Interfaces, ThreadModes, HTTPModes, StrEnum
from granian.log import LogLevels
from granian.http import HTTP1Settings, HTTP2Settings
from typing import Optional, Sequence, Dict, Any
from pwo import classproperty, Maybe
from yaml import add_representer, SafeDumper, SafeLoader
class ClassPropertyDescriptor(object):
def __init__(self, fget, fset=None):
self.fget = fget
self.fset = fset
def __get__(self, obj, klass=None):
if klass is None:
klass = type(obj)
return self.fget.__get__(obj, klass)()
def __set__(self, obj, value):
if not self.fset:
raise AttributeError("can't set attribute")
type_ = type(obj)
return self.fset.__get__(obj, type_)(value)
def setter(self, func):
if not isinstance(func, (classmethod, staticmethod)):
func = classmethod(func)
self.fset = func
return self
def classproperty(func):
if not isinstance(func, (classmethod, staticmethod)):
func = classmethod(func)
return ClassPropertyDescriptor(func)
@dataclass(frozen=True) @dataclass(frozen=True)
class Configuration: class Configuration:
logging_configuration_file : str = environ.get("LOGGING_CONFIGURATION_FILE", Path(__file__).parent / 'default-conf' / 'logging.yaml') logging_configuration_file: str = environ.get("LOGGING_CONFIGURATION_FILE",
Path(__file__).parent / 'default-conf' / 'logging.yaml')
plant_uml_server_address: str = environ.get('PLANT_UML_SERVER_ADDRESS', None) plant_uml_server_address: str = environ.get('PLANT_UML_SERVER_ADDRESS', None)
@classproperty @classproperty
def instance(cls) -> 'Configuration': def instance(cls) -> 'Configuration':
return Configuration() return Configuration()
@dataclass(frozen=True)
class GranianConfiguration:
address: str = '127.0.0.1'
port: int = 8000
interface: str = Interfaces.ASGI
workers: int = 1
threads: int = 1
blocking_threads: Optional[int] = None
threading_mode: ThreadModes = ThreadModes.workers
loop: Loops = Loops.auto
loop_opt: bool = False
http: HTTPModes = HTTPModes.auto
websockets: bool = True
backlog: int = 1024
backpressure: Optional[int] = None
http1_settings: Optional[HTTP1Settings] = None
http2_settings: Optional[HTTP2Settings] = None
log_enabled: bool = True
log_level: LogLevels = LogLevels.info
log_dictconfig: Optional[Dict[str, Any]] = None
log_access: bool = False
log_access_format: Optional[str] = None
ssl_cert: Optional[Path] = None
ssl_key: Optional[Path] = None
ssl_key_password: Optional[str] = None
url_path_prefix: Optional[str] = None
respawn_failed_workers: bool = False
respawn_interval: float = 3.5
workers_lifetime: Optional[int] = None
factory: bool = False
reload: bool = False
reload_paths: Optional[Sequence[Path]] = None
reload_ignore_dirs: Optional[Sequence[str]] = None
reload_ignore_patterns: Optional[Sequence[str]] = None
reload_ignore_paths: Optional[Sequence[Path]] = None
process_name: Optional[str] = None
pid_file: Optional[Path] = None
@staticmethod
def from_dict(d) -> 'Configuration.GranianConfiguration':
return Configuration.GranianConfiguration(**{k: v for k, v in dict(
address=d.get('address', None),
port=d.get('port', None),
interface=Maybe.of_nullable(d.get('interface')).map(lambda it: Interfaces(it)).or_else(None),
workers=d.get('workers', None),
threads=d.get('threads', None),
blocking_threads=d.get('blocking_threads', None),
threading_mode=Maybe.of_nullable(d.get('threading_modes')).map(lambda it: ThreadModes(it)).or_else(None),
loop=Maybe.of_nullable(d.get('loop')).map(lambda it: Loops(it)).or_else(None),
loop_opt=d.get('loop_opt', None),
http=Maybe.of_nullable(d.get('http')).map(lambda it: HTTPModes(it)).or_else(None),
websockets=d.get('websockets', None),
backlog=d.get('backlog', None),
backpressure=d.get('backpressure', None),
http1_settings=Maybe.of_nullable(d.get('http1_settings')).map(lambda it: HTTP1Settings(**it)).or_else(None),
http2_settings=Maybe.of_nullable(d.get('http2_settings')).map(lambda it: HTTP2Settings(**it)).or_else(None),
log_enabled=d.get('log_enabled', None),
log_level=Maybe.of_nullable(d.get('log_level')).map(lambda it: LogLevels(it)).or_else(None),
# log_dictconfig: Optional[Dict[str, Any]] = None,
log_access=d.get('log_access', None),
log_access_format=d.get('log_access_format', None),
ssl_cert=d.get('ssl_cert', None),
ssl_key=d.get('ssl_key', None),
ssl_key_password=d.get('ssl_key_password', None),
url_path_prefix=d.get('url_path_prefix', None),
respawn_failed_workers=d.get('respawn_failed_workers', None),
respawn_interval=d.get('respawn_interval', None),
workers_lifetime=d.get('workers_lifetime', None),
factory=d.get('factory', None),
reload=d.get('reload', None),
reload_paths=d.get('reload_paths', None),
reload_ignore_dirs=d.get('reload_ignore_dirs', None),
reload_ignore_patterns=d.get('reload_ignore_patterns', None),
reload_ignore_paths=d.get('reload_ignore_paths', None),
process_name=d.get('process_name', None),
pid_file=d.get('pid_file', None),
).items() if v is not None})
granian: GranianConfiguration = GranianConfiguration()
@staticmethod
def from_dict(d) -> 'Configuration':
return Configuration(
**{k: v for k, v in dict(
logging_configuration_file=d.get('logging_configuration_file', None),
plant_uml_server_address=d.get('plant_uml_server_address', None),
granian=Maybe.of_nullable(d.get('granian'))
.map(Configuration.GranianConfiguration.from_dict)
.or_else(None)
).items() if v is not None
}
)
def to_yaml(self, stream):
dumper = SafeDumper(stream)
dumper.add_representer(Configuration, lambda dumper, conf: dumper.represent_dict(asdict(conf)))
dumper.add_representer(Configuration.GranianConfiguration,
lambda dumper, conf: dumper.represent_dict(asdict(conf)))
dumper.add_representer(LogLevels, lambda dumper, level: dumper.represent_str(level.lower()))
dumper.add_multi_representer(Path, lambda dumper, path: dumper.represent_str(str(path)))
dumper.add_multi_representer(StrEnum, lambda dumper, str_enum: dumper.represent_str(str(str_enum)))
dumper.add_representer(HTTP1Settings, lambda dumper, settings: dumper.represent_dict(vars(settings)))
dumper.add_representer(HTTP2Settings, lambda dumper, settings: dumper.represent_dict(vars(settings)))
try:
dumper.open()
dumper.represent(Configuration.instance)
dumper.close()
finally:
dumper.dispose()
@staticmethod
def from_yaml(stream) -> 'Configuration':
loader = SafeLoader(stream)
try:
conf = loader.get_single_data()
return Configuration.from_dict(conf)
finally:
loader.dispose()

View File

@@ -1,17 +1,19 @@
from typing import TYPE_CHECKING from typing import TYPE_CHECKING
from aiofiles import open as async_open from aiofiles import open as async_open
from aiohttp import ClientSession
from .configuration import Configuration from .configuration import Configuration
from yarl import URL
if TYPE_CHECKING: if TYPE_CHECKING:
from _typeshed import StrOrBytesPath from _typeshed import StrOrBytesPath
from httpx import AsyncClient, URL
from typing import Callable, Awaitable
from urllib.parse import urljoin
async def render_plant_uml(path: 'StrOrBytesPath') -> bytes: chunk_size = 0x10000
async with ClientSession() as session: async def render_plant_uml(client: AsyncClient, path: 'StrOrBytesPath', send : Callable[[bytes], Awaitable[None]]):
url = URL(Configuration.instance.plant_uml_server_address) / 'svg' url = URL(urljoin(Configuration.instance.plant_uml_server_address, 'svg'))
async with async_open(path, 'rb') as file: async with async_open(path, 'rb') as file:
source = await file.read() source = await file.read()
async with session.post(url, data=source) as response: response = await client.post(url, content=source)
response.raise_for_status() response.raise_for_status()
return await response.read() async for chunk in response.aiter_bytes(chunk_size=chunk_size):
await send(chunk)

View File

@@ -6,7 +6,7 @@ from io import BytesIO
from mimetypes import init as mimeinit, guess_type from mimetypes import init as mimeinit, guess_type
from os import getcwd from os import getcwd
from os.path import join, normpath, splitext, relpath, basename from os.path import join, normpath, splitext, relpath, basename
from typing import Callable, TYPE_CHECKING, Optional, Awaitable, AsyncGenerator, Any from typing import Callable, TYPE_CHECKING, Optional, Awaitable, AsyncGenerator, Any, Mapping
import pygraphviz as pgv import pygraphviz as pgv
from aiofiles import open as async_open from aiofiles import open as async_open
@@ -14,6 +14,7 @@ from aiofiles.base import AiofilesContextManager
from aiofiles.os import listdir from aiofiles.os import listdir
from aiofiles.ospath import exists, isdir, isfile, getmtime from aiofiles.ospath import exists, isdir, isfile, getmtime
from aiofiles.threadpool.binary import AsyncBufferedReader from aiofiles.threadpool.binary import AsyncBufferedReader
from httpx import AsyncClient
from pwo import Maybe from pwo import Maybe
from .asgi_utils import encode_headers from .asgi_utils import encode_headers
@@ -34,6 +35,7 @@ def completed_future[T](result : T) -> Future[T]:
future.set_result(result) future.set_result(result)
return future return future
def has_extension(filepath, extension): def has_extension(filepath, extension):
_, ext = splitext(filepath) _, ext = splitext(filepath)
return ext == extension return ext == extension
@@ -46,13 +48,19 @@ def is_markdown(filepath):
def is_dotfile(filepath): def is_dotfile(filepath):
return has_extension(filepath, ".dot") return has_extension(filepath, ".dot")
def is_plant_uml(filepath): def is_plant_uml(filepath):
return has_extension(filepath, ".puml") return has_extension(filepath, ".puml")
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
class Server: class Server:
root_dir: 'StrOrBytesPath'
prefix: Optional['StrOrBytesPath']
_loop: AbstractEventLoop _loop: AbstractEventLoop
_client: AsyncClient
def __init__(self, def __init__(self,
root_dir: 'StrOrBytesPath' = getcwd(), root_dir: 'StrOrBytesPath' = getcwd(),
@@ -63,8 +71,16 @@ class Server:
self.file_watcher = FileWatcher(cwd) self.file_watcher = FileWatcher(cwd)
self.prefix = prefix and normpath(f'{prefix.decode()}') self.prefix = prefix and normpath(f'{prefix.decode()}')
self._loop = loop self._loop = loop
self._client = AsyncClient()
async def handle_request(self, method: str, url_path: str, etag: Optional[str], query_string: Optional[str], send): async def handle_request(self,
method: str,
url_path: str,
etag: Optional[str],
query_string: Optional[str],
send: Callable[[Mapping[str, Any]], Awaitable[None]],
pathsend: bool = False
):
if method != 'GET': if method != 'GET':
await send({ await send({
'type': 'http.response.start', 'type': 'http.response.start',
@@ -84,11 +100,12 @@ class Server:
etag, digest = await self.compute_etag_and_digest( etag, digest = await self.compute_etag_and_digest(
etag, etag,
url_path, url_path,
lambda: AiofilesContextManager(completed_future(AsyncBufferedReader(BytesIO(content), loop=self._loop, executor=None))), lambda: AiofilesContextManager(
completed_future(AsyncBufferedReader(BytesIO(content), loop=self._loop, executor=None))),
lambda: completed_future(mtime) lambda: completed_future(mtime)
) )
if etag and etag == digest: if etag and etag == digest:
await self.not_modified(send, digest, ('Cache-Control', 'must-revalidate, max-age=86400')) await self.not_modified(send, digest, 'must-revalidate, max-age=86400')
return return
elif content: elif content:
mime_type = guess_type(basename(url_path))[0] or 'application/octet-stream' mime_type = guess_type(basename(url_path))[0] or 'application/octet-stream'
@@ -147,6 +164,7 @@ class Server:
result = graph.draw(None, format="svg", prog="dot") result = graph.draw(None, format="svg", prog="dot")
logger.debug("Completed Graphviz rendering for file '%s'", filepath) logger.debug("Completed Graphviz rendering for file '%s'", filepath)
return result return result
body = await self._loop.run_in_executor(None, render_graphviz, path) body = await self._loop.run_in_executor(None, render_graphviz, path)
await send({ await send({
'type': 'http.response.start', 'type': 'http.response.start',
@@ -163,7 +181,6 @@ class Server:
}) })
elif is_plant_uml(path): elif is_plant_uml(path):
logger.debug("Starting PlantUML rendering for file '%s'", path) logger.debug("Starting PlantUML rendering for file '%s'", path)
body = await render_plant_uml(path)
logger.debug("Completed PlantUML rendering for file '%s'", path) logger.debug("Completed PlantUML rendering for file '%s'", path)
await send({ await send({
'type': 'http.response.start', 'type': 'http.response.start',
@@ -174,13 +191,18 @@ class Server:
'Cache-Control': 'no-cache' 'Cache-Control': 'no-cache'
}) })
}) })
await render_plant_uml(self._client, path, lambda chunk: send({
'type': 'http.response.body',
'body': chunk,
'more_body': True
}))
await send({ await send({
'type': 'http.response.body', 'type': 'http.response.body',
'body': body 'body': '',
'more_body': False
}) })
else: else:
async def read_file(file_path): async def read_file(file_path, buffer_size=0x10000):
buffer_size = 0x10000
async with async_open(file_path, 'rb') as f: async with async_open(file_path, 'rb') as f:
while True: while True:
result = await f.read(buffer_size) result = await f.read(buffer_size)
@@ -197,7 +219,12 @@ class Server:
'Cache-Control': 'no-cache' 'Cache-Control': 'no-cache'
}) })
}) })
if pathsend:
await send({
'type': 'http.response.pathsend',
'path': path
})
else:
async for chunk in read_file(path): async for chunk in read_file(path):
await send({ await send({
'type': 'http.response.body', 'type': 'http.response.body',
@@ -227,7 +254,7 @@ class Server:
await self.not_found(send) await self.not_found(send)
@staticmethod @staticmethod
async def stream_hash(source: AsyncBufferedReader, bufsize=0x1000) -> bytes: async def stream_hash(source: AsyncBufferedReader, bufsize=0x10000) -> bytes:
if bufsize <= 0: if bufsize <= 0:
raise ValueError("Buffer size must be greater than 0") raise ValueError("Buffer size must be greater than 0")
md5 = hashlib.md5() md5 = hashlib.md5()
@@ -239,7 +266,7 @@ class Server:
return md5.digest() return md5.digest()
@staticmethod @staticmethod
async def file_hash(filepath, bufsize=0x1000) -> bytes: async def file_hash(filepath, bufsize=0x10000) -> bytes:
if bufsize <= 0: if bufsize <= 0:
raise ValueError("Buffer size must be greater than 0") raise ValueError("Buffer size must be greater than 0")
md5 = hashlib.md5() md5 = hashlib.md5()
@@ -362,15 +389,19 @@ class Server:
for entry in sorted(await listdir(path)): for entry in sorted(await listdir(path)):
if await filter(join(path, entry)): if await filter(join(path, entry)):
yield entry yield entry
return result() return result()
async for entry in await ls(isdir): async for entry in await ls(isdir):
result += '<li><a href="' + entry + '/' + '"/>' + entry + '/' + '</li>' result += '<li><a href="' + entry + '/' + '"/>' + entry + '/' + '</li>'
async def file_filter(entry: str) -> bool: async def file_filter(entry: str) -> bool:
return await isfile(entry) and is_markdown(entry) return await isfile(entry) and is_markdown(entry)
async for entry in await ls(file_filter): async for entry in await ls(file_filter):
result += '<li><a href="' + entry + '"/>' + entry + '</li>' result += '<li><a href="' + entry + '"/>' + entry + '</li>'
return result return result
async def stop(self): async def stop(self):
await self.file_watcher.stop() await self.file_watcher.stop()
await self._client.aclose()