diff --git a/Dockerfile b/Dockerfile index 0e54f3a..e8c50fb 100644 --- a/Dockerfile +++ b/Dockerfile @@ -32,7 +32,8 @@ VOLUME /srv/http WORKDIR /srv/http ENV GRANIAN_HOST=0.0.0.0 -ENV GRANIAN_INTERFACE=asginl +ENV GRANIAN_PORT=8000 +ENV GRANIAN_INTERFACE=asgi ENV GRANIAN_LOOP=asyncio ENV GRANIAN_LOG_ENABLED=false diff --git a/build_docker_image.sh b/build_docker_image.sh deleted file mode 100755 index 7f4a3be..0000000 --- a/build_docker_image.sh +++ /dev/null @@ -1,9 +0,0 @@ -#!/bin/bash -set -e - -venv/bin/python -m build -mkdir -p docker/build -cp dist/bugis-*.whl docker/build/ -cp docker/Dockerfile docker/build/Dockerfile - -docker build docker/build --tag bugis:latest diff --git a/conf/log.yml b/conf/log.yml deleted file mode 100644 index c910efd..0000000 --- a/conf/log.yml +++ /dev/null @@ -1,18 +0,0 @@ -version: 1 -disable_existing_loggers: True -handlers: - console: - class : logging.StreamHandler - formatter: default - level : INFO - stream : ext://sys.stdout -formatters: - brief: - format: '%(message)s' - default: - format: '%(asctime)s %(levelname)-8s %(name)-15s %(threadName)s %(message)s' - datefmt: '%Y-%m-%d %H:%M:%S' -loggers: - root: - handlers: [console] - level: INFO \ No newline at end of file diff --git a/docker-compose.yaml b/docker-compose.yaml new file mode 100644 index 0000000..afe8fc3 --- /dev/null +++ b/docker-compose.yaml @@ -0,0 +1,46 @@ +networks: + default: + external: false + ipam: + driver: default + config: + - subnet: 172.128.0.0/16 + ip_range: 172.128.0.0/16 + gateway: 172.128.0.254 + +services: + granian: + build: + context: . + user: $UID:$GID + restart: unless-stopped +# container_name: granian + environment: + PLANT_UML_SERVER_ADDRESS: http://plant_uml:8080 + deploy: + resources: + limits: + cpus: "0.5" + memory: 512M + volumes: + - ${STATIC_ROOT}:/srv/http + plant_uml: + image: plantuml/plantuml-server:jetty +# container_name: plantUML + restart: unless-stopped + tmpfs: /tmp/jetty + deploy: + resources: + limits: + cpus: "4" + memory: 1G + nginx: + image: gitea.woggioni.net/woggioni/nginx:v1.27.2 +# container_name: nginx + restart: unless-stopped + depends_on: + - granian + volumes: + - ./conf/nginx-bugis.conf:/etc/nginx/conf.d/bugis.conf:ro + ports: + - 127.0.0.1:80:8080 \ No newline at end of file diff --git a/pyproject.toml b/pyproject.toml index 669fb56..8e6bb7e 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -28,7 +28,8 @@ dependencies = [ "pwo", "PyYAML", "pygraphviz", - "aiofiles" + "aiofiles", + "aiohttp[speedups]" ] [project.optional-dependencies] diff --git a/requirements-dev.txt b/requirements-dev.txt index 30ca754..6b1cbe2 100644 --- a/requirements-dev.txt +++ b/requirements-dev.txt @@ -1,22 +1,39 @@ # -# This file is autogenerated by pip-compile with Python 3.12 +# This file is autogenerated by pip-compile with Python 3.10 # by the following command: # -# pip-compile --extra=dev --output-file=requirements-dev.txt pyproject.toml +# pip-compile --extra-index-url=https://gitea.woggioni.net/api/packages/woggioni/pypi/simple --extra=dev --output-file=requirements-dev.txt --strip-extras pyproject.toml # ---index-url https://gitea.woggioni.net/api/packages/woggioni/pypi/simple ---extra-index-url https://pypi.org/simple +--extra-index-url https://gitea.woggioni.net/api/packages/woggioni/pypi/simple +aiodns==3.2.0 + # via aiohttp aiofiles==24.1.0 # via bugis (pyproject.toml) +aiohappyeyeballs==2.4.3 + # via aiohttp +aiohttp==3.10.10 + # via bugis (pyproject.toml) +aiosignal==1.3.1 + # via aiohttp asttokens==2.4.1 # via stack-data +async-timeout==4.0.3 + # via aiohttp +attrs==24.2.0 + # via aiohttp +backports-tarfile==1.2.0 + # via jaraco-context +brotli==1.1.0 + # via aiohttp build==1.2.2.post1 # via bugis (pyproject.toml) certifi==2024.8.30 # via requests cffi==1.17.1 - # via cryptography + # via + # cryptography + # pycares charset-normalizer==3.4.0 # via requests click==8.1.7 @@ -29,14 +46,24 @@ decorator==5.1.1 # ipython docutils==0.21.2 # via readme-renderer +exceptiongroup==1.2.2 + # via ipython executing==2.1.0 # via stack-data +frozenlist==1.4.1 + # via + # aiohttp + # aiosignal granian==1.6.1 # via bugis (pyproject.toml) idna==3.10 - # via requests + # via + # requests + # yarl importlib-metadata==8.5.0 - # via twine + # via + # keyring + # twine ipdb==0.13.13 # via bugis (pyproject.toml) ipython==8.28.0 @@ -67,6 +94,10 @@ more-itertools==10.5.0 # via # jaraco-classes # jaraco-functools +multidict==6.1.0 + # via + # aiohttp + # yarl mypy==1.12.1 # via bugis (pyproject.toml) mypy-extensions==1.0.0 @@ -83,12 +114,16 @@ pkginfo==1.10.0 # via twine prompt-toolkit==3.0.48 # via ipython +propcache==0.2.0 + # via yarl ptyprocess==0.7.0 # via pexpect pure-eval==0.2.3 # via stack-data pwo==0.0.3 # via bugis (pyproject.toml) +pycares==4.4.0 + # via aiodns pycparser==2.22 # via cffi pygments==2.18.0 @@ -121,6 +156,11 @@ six==1.16.0 # via asttokens stack-data==0.6.3 # via ipython +tomli==2.0.2 + # via + # build + # ipdb + # mypy traitlets==5.14.3 # via # ipython @@ -129,8 +169,11 @@ twine==5.1.1 # via bugis (pyproject.toml) typing-extensions==4.7.1 # via + # ipython + # multidict # mypy # pwo + # rich urllib3==2.2.3 # via # requests @@ -141,5 +184,7 @@ watchdog==5.0.3 # via bugis (pyproject.toml) wcwidth==0.2.13 # via prompt-toolkit +yarl==1.16.0 + # via aiohttp zipp==3.20.2 # via importlib-metadata diff --git a/requirements-run.txt b/requirements-run.txt index a20919e..e56da45 100644 --- a/requirements-run.txt +++ b/requirements-run.txt @@ -1,22 +1,53 @@ # -# This file is autogenerated by pip-compile with Python 3.12 +# This file is autogenerated by pip-compile with Python 3.10 # by the following command: # -# pip-compile --extra=run --output-file=requirements-run.txt pyproject.toml +# pip-compile --extra-index-url=https://gitea.woggioni.net/api/packages/woggioni/pypi/simple --extra=run --output-file=requirements-run.txt --strip-extras pyproject.toml # ---index-url https://gitea.woggioni.net/api/packages/woggioni/pypi/simple ---extra-index-url https://pypi.org/simple +--extra-index-url https://gitea.woggioni.net/api/packages/woggioni/pypi/simple +aiodns==3.2.0 + # via aiohttp aiofiles==24.1.0 # via bugis (pyproject.toml) +aiohappyeyeballs==2.4.3 + # via aiohttp +aiohttp==3.10.10 + # via bugis (pyproject.toml) +aiosignal==1.3.1 + # via aiohttp +async-timeout==4.0.3 + # via aiohttp +attrs==24.2.0 + # via aiohttp +brotli==1.1.0 + # via aiohttp +cffi==1.17.1 + # via pycares click==8.1.7 # via granian +frozenlist==1.4.1 + # via + # aiohttp + # aiosignal granian==1.6.1 # via bugis (pyproject.toml) +idna==3.10 + # via yarl markdown==3.7 # via bugis (pyproject.toml) +multidict==6.1.0 + # via + # aiohttp + # yarl +propcache==0.2.0 + # via yarl pwo==0.0.3 # via bugis (pyproject.toml) +pycares==4.4.0 + # via aiodns +pycparser==2.22 + # via cffi pygments==2.18.0 # via bugis (pyproject.toml) pygraphviz==1.14 @@ -24,8 +55,12 @@ pygraphviz==1.14 pyyaml==6.0.2 # via bugis (pyproject.toml) typing-extensions==4.7.1 - # via pwo + # via + # multidict + # pwo uvloop==0.21.0 # via granian watchdog==5.0.3 # via bugis (pyproject.toml) +yarl==1.16.0 + # via aiohttp diff --git a/requirements.txt b/requirements.txt index 0b3380b..bd0a0e2 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,18 +1,49 @@ # -# This file is autogenerated by pip-compile with Python 3.12 +# This file is autogenerated by pip-compile with Python 3.10 # by the following command: # -# pip-compile --extra='' --output-file=requirements-.txt pyproject.toml +# pip-compile --extra-index-url=https://gitea.woggioni.net/api/packages/woggioni/pypi/simple --output-file=requirements.txt --strip-extras pyproject.toml # ---index-url https://gitea.woggioni.net/api/packages/woggioni/pypi/simple ---extra-index-url https://pypi.org/simple +--extra-index-url https://gitea.woggioni.net/api/packages/woggioni/pypi/simple +aiodns==3.2.0 + # via aiohttp aiofiles==24.1.0 # via bugis (pyproject.toml) +aiohappyeyeballs==2.4.3 + # via aiohttp +aiohttp==3.10.10 + # via bugis (pyproject.toml) +aiosignal==1.3.1 + # via aiohttp +async-timeout==4.0.3 + # via aiohttp +attrs==24.2.0 + # via aiohttp +brotli==1.1.0 + # via aiohttp +cffi==1.17.1 + # via pycares +frozenlist==1.4.1 + # via + # aiohttp + # aiosignal +idna==3.10 + # via yarl markdown==3.7 # via bugis (pyproject.toml) +multidict==6.1.0 + # via + # aiohttp + # yarl +propcache==0.2.0 + # via yarl pwo==0.0.3 # via bugis (pyproject.toml) +pycares==4.4.0 + # via aiodns +pycparser==2.22 + # via cffi pygments==2.18.0 # via bugis (pyproject.toml) pygraphviz==1.14 @@ -20,6 +51,10 @@ pygraphviz==1.14 pyyaml==6.0.2 # via bugis (pyproject.toml) typing-extensions==4.7.1 - # via pwo + # via + # multidict + # pwo watchdog==5.0.3 # via bugis (pyproject.toml) +yarl==1.16.0 + # via aiohttp diff --git a/src/bugis/asgi.py b/src/bugis/asgi.py index afa506a..c40d468 100644 --- a/src/bugis/asgi.py +++ b/src/bugis/asgi.py @@ -1,11 +1,11 @@ import logging from logging.config import dictConfig as configure_logging -from os import environ -from yaml import safe_load -from pathlib import Path -logging_configuration_file = environ.get("LOGGING_CONFIGURATION_FILE", Path(__file__).parent / 'default-conf' / 'logging.yaml') -with open(logging_configuration_file, 'r') as input_file: +from yaml import safe_load + +from .configuration import Configuration + +with open(Configuration.instance.logging_configuration_file, 'r') as input_file: conf = safe_load(input_file) configure_logging(conf) @@ -13,55 +13,51 @@ with open(logging_configuration_file, 'r') as input_file: from pwo import Maybe from .server import Server from asyncio import get_running_loop +from .asgi_utils import decode_headers +from typing import Optional log = logging.getLogger('access') log.propagate = False -_server = None +_server : Optional[Server] = None -def decode_headers(headers): - result = dict() - for key, value in headers: - if isinstance(key, bytes): - key = key.decode() - if isinstance(value, bytes): - value = value.decode() - l = result.setdefault(key, list()) - l.append(value) - return { - k: tuple(v) for k, v in result.items() - } - -async def application(ctx, receive, send): +async def application(scope, receive, send): global _server - if _server is None: - _server = Server(loop=get_running_loop(), prefix=None) - - def maybe_log(evt): - d = { - 'response_headers': (Maybe.of_nullable(evt.get('headers')) - .map(decode_headers) - .or_none()), - 'status': evt['status'] - } - log.info(None, extra=dict(**{k : v for k, v in d.items() if k is not None}, **ctx)) - def wrapped_send(*args, **kwargs): - result = send(*args, **kwargs) - (Maybe.of(args) - .filter(lambda it: len(it) > 0) - .map(lambda it: it[0]) - .filter(lambda it: it.get('type') == 'http.response.start') - .if_present(maybe_log)) - return result - await _server.handle_request( - ctx['method'], - ctx['path'], - Maybe.of([header[1] for header in ctx['headers'] if header[0].decode().lower() == 'if-none-match']) - .filter(lambda it: len(it) > 0) - .map(lambda it: it[0]) - .map(lambda it: it.decode()) - .or_else(None), - Maybe.of_nullable(ctx.get('query_string', None)).map(lambda it: it.decode()).or_else(None), - wrapped_send - ) + if scope['type'] == 'lifespan': + while True: + message = await receive() + if message['type'] == 'lifespan.startup': + _server = Server(loop=get_running_loop(), prefix=None) + await send({'type': 'lifespan.startup.complete'}) + elif message['type'] == 'lifespan.shutdown': + await _server.stop() + await send({'type': 'lifespan.shutdown.complete'}) + else: + def maybe_log(evt): + d = { + 'response_headers': (Maybe.of_nullable(evt.get('headers')) + .map(decode_headers) + .or_none()), + 'status': evt['status'] + } + log.info(None, extra=dict(**{k : v for k, v in d.items() if k is not None}, **scope)) + def wrapped_send(*args, **kwargs): + result = send(*args, **kwargs) + (Maybe.of(args) + .filter(lambda it: len(it) > 0) + .map(lambda it: it[0]) + .filter(lambda it: it.get('type') == 'http.response.start') + .if_present(maybe_log)) + return result + await _server.handle_request( + scope['method'], + scope['path'], + Maybe.of([header[1] for header in scope['headers'] if header[0].decode().lower() == 'if-none-match']) + .filter(lambda it: len(it) > 0) + .map(lambda it: it[0]) + .map(lambda it: it.decode()) + .or_else(None), + Maybe.of_nullable(scope.get('query_string', None)).map(lambda it: it.decode()).or_else(None), + wrapped_send + ) diff --git a/src/bugis/asgi_utils.py b/src/bugis/asgi_utils.py new file mode 100644 index 0000000..fa64d08 --- /dev/null +++ b/src/bugis/asgi_utils.py @@ -0,0 +1,26 @@ +from typing import Tuple, Dict, Sequence + +type StrOrStrings = (str, Sequence[str]) + +def decode_headers(headers: Sequence[Tuple[bytes, bytes]]) -> Dict[str, Sequence[str]]: + result = dict() + for key, value in headers: + if isinstance(key, bytes): + key = key.decode() + if isinstance(value, bytes): + value = value.decode() + l = result.setdefault(key, list()) + l.append(value) + return { + k: tuple(v) for k, v in result.items() + } + +def encode_headers(headers: Dict[str, StrOrStrings]) -> Tuple[Tuple[bytes, bytes], ...]: + result = [] + for key, value in headers.items(): + if isinstance(value, str): + result.append((key.encode(), value.encode())) + elif isinstance(value, Sequence): + for single_value in value: + result.append((key.encode(), single_value.encode())) + return tuple(result) \ No newline at end of file diff --git a/src/bugis/async_watchdog.py b/src/bugis/async_watchdog.py index ddba655..6129675 100644 --- a/src/bugis/async_watchdog.py +++ b/src/bugis/async_watchdog.py @@ -4,33 +4,38 @@ from watchdog.events import FileSystemEventHandler, FileSystemEvent, PatternMatc from watchdog.observers import Observer from watchdog.events import FileMovedEvent, FileClosedEvent, FileCreatedEvent, FileModifiedEvent from pathlib import Path -from asyncio import Queue, AbstractEventLoop, Future, CancelledError, Task -from typing import Callable +from asyncio import Queue, AbstractEventLoop, Future, CancelledError, Task, gather +from typing import Callable, Optional from logging import getLogger, Logger log: Logger = getLogger(__name__) class Subscription: _unsubscribe_callback: Callable[['Subscription'], None] - _event: Future + _event: Optional[Future] _loop: AbstractEventLoop def __init__(self, unsubscribe: Callable[['Subscription'], None], loop: AbstractEventLoop): self._unsubscribe_callback = unsubscribe - self._event: Future = loop.create_future() + self._event: Optional[Future] = None self._loop = loop def unsubscribe(self) -> None: + self._event.cancel() self._unsubscribe_callback(self) log.debug('Deleted subscription %s', id(self)) async def wait(self, tout: float) -> bool: - handle = self._loop.call_later(tout, lambda: self._event.cancel()) + self._event = self._loop.create_future() + + def callback(): + if not self._event.done(): + self._event.set_result(False) + handle = self._loop.call_later(tout, callback) try: log.debug('Subscription %s is waiting for an event', id(self)) - await self._event - return True + return await self._event except CancelledError: return False finally: @@ -38,7 +43,8 @@ class Subscription: def notify(self) -> None: log.debug('Subscription %s notified', id(self)) - self._event.set_result(None) + if not self._event.done(): + self._event.set_result(True) def reset(self) -> None: self._event = self._loop.create_future() @@ -106,6 +112,7 @@ class SubscriptionManager: def unsubscribe_callback(subscription): subscriptions_per_path.remove(subscription) + log.debug('Removed subscription %s to path %s', id(result), path) result = Subscription(unsubscribe_callback, self._loop) log.debug('Created subscription %s to path %s', id(result), path) @@ -116,21 +123,29 @@ class SubscriptionManager: subscriptions = self._subscriptions subscriptions_per_path = subscriptions.get(path, None) if subscriptions_per_path: + log.debug(f"Subscriptions on '{path}': {len(subscriptions_per_path)}") for s in subscriptions_per_path: s.notify() async def process_events(self): async for evt in AsyncQueueIterator(self._queue): + log.debug(f"Processed event for path '{evt}'") self._notify_subscriptions(evt) + log.debug(f"Event processor has completed") + def post_event(self, path): - self._loop.call_soon_threadsafe(self._queue.put_nowait, path) + def callback(): + self._queue.put_nowait(path) + log.debug(f"Posted event for path '{path}', queue size: {self._queue.qsize()}") + self._loop.call_soon_threadsafe(callback) class FileWatcher(PatternMatchingEventHandler): _subscription_manager: SubscriptionManager _loop: AbstractEventLoop _subscription_manager_loop: Task + _running_tasks : Future def __init__(self, path): super().__init__(patterns=['*.md'], @@ -141,8 +156,10 @@ class FileWatcher(PatternMatchingEventHandler): self._observer.schedule(self, path=path, recursive=True) self._loop = asyncio.get_running_loop() self._subscription_manager = SubscriptionManager(self._loop) - self._loop.run_in_executor(None, self._observer.start) - self._subscription_manager_loop = self._loop.create_task(self._subscription_manager.process_events()) + self._running_tasks = gather( + self._loop.run_in_executor(None, self._observer.start), + self._loop.create_task(self._subscription_manager.process_events()) + ) async def stop(self) -> None: def _observer_stop(): @@ -150,8 +167,8 @@ class FileWatcher(PatternMatchingEventHandler): self._observer.join() self._subscription_manager.post_event(None) - self._loop.run_in_executor(None, _observer_stop) - await self._subscription_manager_loop + await self._loop.run_in_executor(None, _observer_stop) + await self._running_tasks def subscribe(self, path: str) -> Subscription: return self._subscription_manager.subscribe(path) diff --git a/src/bugis/configuration.py b/src/bugis/configuration.py new file mode 100644 index 0000000..dbb6cbe --- /dev/null +++ b/src/bugis/configuration.py @@ -0,0 +1,45 @@ +import os +from os import environ +from pathlib import Path +from dataclasses import dataclass + +class ClassPropertyDescriptor(object): + + def __init__(self, fget, fset=None): + self.fget = fget + self.fset = fset + + def __get__(self, obj, klass=None): + if klass is None: + klass = type(obj) + return self.fget.__get__(obj, klass)() + + def __set__(self, obj, value): + if not self.fset: + raise AttributeError("can't set attribute") + type_ = type(obj) + return self.fset.__get__(obj, type_)(value) + + def setter(self, func): + if not isinstance(func, (classmethod, staticmethod)): + func = classmethod(func) + self.fset = func + return self + +def classproperty(func): + if not isinstance(func, (classmethod, staticmethod)): + func = classmethod(func) + return ClassPropertyDescriptor(func) + +@dataclass(frozen=True) +class Configuration: + logging_configuration_file : str = environ.get("LOGGING_CONFIGURATION_FILE", Path(__file__).parent / 'default-conf' / 'logging.yaml') + plant_uml_server_address : str = environ.get('PLANT_UML_SERVER_ADDRESS', None) + + + @classproperty + def instance(cls) -> 'Configuration': + return Configuration() + + + diff --git a/src/bugis/plantuml.py b/src/bugis/plantuml.py new file mode 100644 index 0000000..aa27cd1 --- /dev/null +++ b/src/bugis/plantuml.py @@ -0,0 +1,17 @@ +from typing import TYPE_CHECKING + +from aiofiles import open as async_open +from aiohttp import ClientSession +from .configuration import Configuration +from yarl import URL +if TYPE_CHECKING: + from _typeshed import StrOrBytesPath + +async def render_plant_uml(path: 'StrOrBytesPath') -> bytes: + async with ClientSession() as session: + url = URL(Configuration.instance.plant_uml_server_address) / 'svg' + async with async_open(path, 'rb') as file: + source = await file.read() + async with session.post(url, data=source) as response: + response.raise_for_status() + return await response.read() diff --git a/src/bugis/server.py b/src/bugis/server.py index bd57ae6..640b5b0 100644 --- a/src/bugis/server.py +++ b/src/bugis/server.py @@ -1,25 +1,26 @@ -import logging -from os import getcwd -from mimetypes import init as mimeinit, guess_type import hashlib - -from os.path import join, normpath, splitext, relpath, basename +import logging +from asyncio import AbstractEventLoop from asyncio import Future -from aiofiles.os import listdir -from aiofiles.ospath import exists, isdir, isfile, getmtime +from io import BytesIO +from mimetypes import init as mimeinit, guess_type +from os import getcwd +from os.path import join, normpath, splitext, relpath, basename +from typing import Callable, TYPE_CHECKING, Optional, Awaitable, AsyncGenerator, Any + +import pygraphviz as pgv from aiofiles import open as async_open from aiofiles.base import AiofilesContextManager +from aiofiles.os import listdir +from aiofiles.ospath import exists, isdir, isfile, getmtime from aiofiles.threadpool.binary import AsyncBufferedReader -from asyncio import AbstractEventLoop - -from .md2html import compile_html, load_from_cache, STATIC_RESOURCES, MARDOWN_EXTENSIONS -from shutil import which -import pygraphviz as pgv -from io import BytesIO -from typing import Callable, TYPE_CHECKING, Optional, Awaitable, AsyncGenerator, Any -from .async_watchdog import FileWatcher from pwo import Maybe +from .asgi_utils import encode_headers +from .async_watchdog import FileWatcher +from .md2html import compile_html, load_from_cache, STATIC_RESOURCES, MARDOWN_EXTENSIONS +from .plantuml import render_plant_uml + if TYPE_CHECKING: from _typeshed import StrOrBytesPath @@ -45,6 +46,9 @@ def is_markdown(filepath): def is_dotfile(filepath): return has_extension(filepath, ".dot") +def is_plant_uml(filepath): + return has_extension(filepath, ".puml") + logger = logging.getLogger(__name__) class Server: @@ -91,11 +95,11 @@ class Server: await send({ 'type': 'http.response.start', 'status': 200, - 'headers': [ - ('content-type', f'{mime_type}; charset=UTF-8'), - ('etag', f'W/"{digest}"'.encode()), - ('Cache-Control', 'must-revalidate, max-age=86400'), - ] + 'headers': encode_headers({ + 'content-type': f'{mime_type}; charset=UTF-8', + 'etag': f'W/{digest}', + 'Cache-Control': 'must-revalidate, max-age=86400', + }) }) await send({ 'type': 'http.response.body', @@ -124,7 +128,7 @@ class Server: lambda: getmtime(path) ) if etag != digest: - if exists(path) and await isfile(path): + if await exists(path) and await isfile(path): await self.render_markdown(url_path, path, True, digest, send) return else: @@ -136,7 +140,7 @@ class Server: elif is_markdown(path): raw = query_string == 'reload' await self.render_markdown(url_path, path, raw, digest, send) - elif is_dotfile(path) and which("dot"): + elif is_dotfile(path): def render_graphviz(filepath: StrOrBytesPath) -> bytes: logger.debug("Starting Graphviz rendering for file '%s'", filepath) graph = pgv.AGraph(filepath) @@ -147,11 +151,28 @@ class Server: await send({ 'type': 'http.response.start', 'status': 200, - 'headers': ( - ('Content-Type', 'image/svg+xml; charset=UTF-8'), - ('Etag', f'W/"{digest}"'), - ('Cache-Control', 'no-cache'), - ) + 'headers': encode_headers({ + 'Content-Type': 'image/svg+xml; charset=UTF-8', + 'Etag': f'W/{digest}', + 'Cache-Control': 'no-cache', + }) + }) + await send({ + 'type': 'http.response.body', + 'body': body + }) + elif is_plant_uml(path): + logger.debug("Starting PlantUML rendering for file '%s'", path) + body = await render_plant_uml(path) + logger.debug("Completed PlantUML rendering for file '%s'", path) + await send({ + 'type': 'http.response.start', + 'status': 200, + 'headers': encode_headers({ + 'Content-Type': 'image/svg+xml; charset=UTF-8', + 'Etag': f'W/{digest}', + 'Cache-Control': 'no-cache' + }) }) await send({ 'type': 'http.response.body', @@ -170,11 +191,11 @@ class Server: await send({ 'type': 'http.response.start', 'status': 200, - 'headers': ( - ('Content-Type', guess_type(basename(path))[0] or 'application/octet-stream'), - ('Etag', f'W/"{digest}"'), - ('Cache-Control', 'no-cache') - ) + 'headers': encode_headers({ + 'Content-Type': guess_type(basename(path))[0] or 'application/octet-stream', + 'Etag': f'W/{digest}', + 'Cache-Control': 'no-cache' + }) }) async for chunk in read_file(path): @@ -194,9 +215,9 @@ class Server: await send({ 'type': 'http.response.start', 'status': 200, - 'headers': ( - ('Content-Type', 'text/html; charset=UTF-8'), - ) + 'headers': encode_headers({ + 'Content-Type': 'text/html; charset=UTF-8', + }) }) await send({ 'type': 'http.response.body', @@ -288,11 +309,11 @@ class Server: await send({ 'type': 'http.response.start', 'status': 200, - 'headers': ( - ('Content-Type', 'text/html; charset=UTF-8'), - ('Etag', f'W/{digest}'), - ('Cache-Control', 'no-cache'), - ) + 'headers': encode_headers({ + 'Content-Type': 'text/html; charset=UTF-8', + 'Etag': f'W/{digest}', + 'Cache-Control': 'no-cache', + }) }) await send({ 'type': 'http.response.body', @@ -300,14 +321,14 @@ class Server: }) @staticmethod - async def not_modified(send, digest: str, cache_control=('Cache-Control', 'no-cache')) -> []: + async def not_modified(send, digest: str, cache_control: str ='no-cache') -> []: await send({ 'type': 'http.response.start', 'status': 304, - 'headers': ( - (b'Etag', f'W/{digest}'.encode()), - cache_control - ) + 'headers': encode_headers({ + 'Etag': f'W/{digest}', + 'Cache-Control': cache_control + }) }) await send({ 'type': 'http.response.body', @@ -350,3 +371,6 @@ class Server: async for entry in await ls(file_filter): result += '