diff --git a/pyproject.toml b/pyproject.toml index 8ee307c..6602a1e 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -59,4 +59,7 @@ exclude = ["scripts", "docs", "test"] strict = true [tool.setuptools_scm] -version_file = "src/bugis/_version.py" \ No newline at end of file +version_file = "src/bugis/_version.py" + +[project.scripts] +bugis = "bugis.cli:main" \ No newline at end of file diff --git a/requirements-dev.txt b/requirements-dev.txt index 22ac57a..e2c0fd6 100644 --- a/requirements-dev.txt +++ b/requirements-dev.txt @@ -1,10 +1,11 @@ # -# This file is autogenerated by pip-compile with Python 3.10 +# This file is autogenerated by pip-compile with Python 3.12 # by the following command: # -# pip-compile --extra-index-url=https://gitea.woggioni.net/api/packages/woggioni/pypi/simple --extra=dev --output-file=requirements-dev.txt pyproject.toml +# pip-compile --extra=dev --output-file=requirements-dev.txt pyproject.toml # ---extra-index-url https://gitea.woggioni.net/api/packages/woggioni/pypi/simple +--index-url https://gitea.woggioni.net/api/packages/woggioni/pypi/simple +--extra-index-url https://pypi.org/simple aiofiles==24.1.0 # via bugis (pyproject.toml) @@ -12,8 +13,6 @@ anyio==4.6.2.post1 # via httpx asttokens==2.4.1 # via stack-data -backports-tarfile==1.2.0 - # via jaraco-context build==1.2.2.post1 # via bugis (pyproject.toml) certifi==2024.8.30 @@ -35,10 +34,6 @@ decorator==5.1.1 # ipython docutils==0.21.2 # via readme-renderer -exceptiongroup==1.2.2 - # via - # anyio - # ipython executing==2.1.0 # via stack-data granian==1.6.1 @@ -61,9 +56,7 @@ idna==3.10 # httpx # requests importlib-metadata==8.5.0 - # via - # keyring - # twine + # via twine ipdb==0.13.13 # via bugis (pyproject.toml) ipython==8.28.0 @@ -94,7 +87,7 @@ more-itertools==10.5.0 # via # jaraco-classes # jaraco-functools -mypy==1.12.1 +mypy==1.13.0 # via bugis (pyproject.toml) mypy-extensions==1.0.0 # via mypy @@ -114,7 +107,7 @@ ptyprocess==0.7.0 # via pexpect pure-eval==0.2.3 # via stack-data -pwo==0.0.3 +pwo==0.0.4 # via bugis (pyproject.toml) pycparser==2.22 # via cffi @@ -140,7 +133,7 @@ requests-toolbelt==1.0.0 # via twine rfc3986==2.0.0 # via twine -rich==13.9.2 +rich==13.9.3 # via twine secretstorage==3.3.3 # via keyring @@ -152,24 +145,16 @@ sniffio==1.3.1 # httpx stack-data==0.6.3 # via ipython -tomli==2.0.2 - # via - # build - # ipdb - # mypy traitlets==5.14.3 # via # ipython # matplotlib-inline twine==5.1.1 # via bugis (pyproject.toml) -typing-extensions==4.7.1 +typing-extensions==4.12.2 # via - # anyio - # ipython # mypy # pwo - # rich urllib3==2.2.3 # via # requests diff --git a/requirements-run.txt b/requirements-run.txt index 9dd9a6f..645527f 100644 --- a/requirements-run.txt +++ b/requirements-run.txt @@ -1,10 +1,11 @@ # -# This file is autogenerated by pip-compile with Python 3.10 +# This file is autogenerated by pip-compile with Python 3.12 # by the following command: # -# pip-compile --extra-index-url=https://gitea.woggioni.net/api/packages/woggioni/pypi/simple --extra=run --output-file=requirements-run.txt pyproject.toml +# pip-compile --extra=run --output-file=requirements-run.txt pyproject.toml # ---extra-index-url https://gitea.woggioni.net/api/packages/woggioni/pypi/simple +--index-url https://gitea.woggioni.net/api/packages/woggioni/pypi/simple +--extra-index-url https://pypi.org/simple aiofiles==24.1.0 # via bugis (pyproject.toml) @@ -16,8 +17,6 @@ certifi==2024.8.30 # httpx click==8.1.7 # via granian -exceptiongroup==1.2.2 - # via anyio granian==1.6.1 # via bugis (pyproject.toml) h11==0.14.0 @@ -38,7 +37,7 @@ idna==3.10 # httpx markdown==3.7 # via bugis (pyproject.toml) -pwo==0.0.3 +pwo==0.0.4 # via bugis (pyproject.toml) pygments==2.18.0 # via bugis (pyproject.toml) @@ -50,10 +49,8 @@ sniffio==1.3.1 # via # anyio # httpx -typing-extensions==4.7.1 - # via - # anyio - # pwo +typing-extensions==4.12.2 + # via pwo uvloop==0.21.0 # via granian watchdog==5.0.3 diff --git a/requirements.txt b/requirements.txt index 8323611..27c3b38 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,10 +1,11 @@ # -# This file is autogenerated by pip-compile with Python 3.10 +# This file is autogenerated by pip-compile with Python 3.12 # by the following command: # -# pip-compile --extra-index-url=https://gitea.woggioni.net/api/packages/woggioni/pypi/simple --output-file=requirements.txt --strip-extras pyproject.toml +# pip-compile --output-file=requirements.txt pyproject.toml # ---extra-index-url https://gitea.woggioni.net/api/packages/woggioni/pypi/simple +--index-url https://gitea.woggioni.net/api/packages/woggioni/pypi/simple +--extra-index-url https://pypi.org/simple aiofiles==24.1.0 # via bugis (pyproject.toml) @@ -14,8 +15,6 @@ certifi==2024.8.30 # via # httpcore # httpx -exceptiongroup==1.2.2 - # via anyio h11==0.14.0 # via httpcore h2==4.1.0 @@ -24,7 +23,7 @@ hpack==4.0.0 # via h2 httpcore==1.0.6 # via httpx -httpx==0.27.2 +httpx[http2]==0.27.2 # via bugis (pyproject.toml) hyperframe==6.0.1 # via h2 @@ -34,7 +33,7 @@ idna==3.10 # httpx markdown==3.7 # via bugis (pyproject.toml) -pwo==0.0.3 +pwo==0.0.4 # via bugis (pyproject.toml) pygments==2.18.0 # via bugis (pyproject.toml) @@ -46,9 +45,7 @@ sniffio==1.3.1 # via # anyio # httpx -typing-extensions==4.7.1 - # via - # anyio - # pwo +typing-extensions==4.12.2 + # via pwo watchdog==5.0.3 # via bugis (pyproject.toml) diff --git a/src/bugis/__main__.py b/src/bugis/__main__.py new file mode 100644 index 0000000..f7ad4a5 --- /dev/null +++ b/src/bugis/__main__.py @@ -0,0 +1,5 @@ +import argparse +from cli import main +import sys + +main(sys.argv[1:]) diff --git a/src/bugis/asgi.py b/src/bugis/asgi.py index 232b2e2..8d59258 100644 --- a/src/bugis/asgi.py +++ b/src/bugis/asgi.py @@ -19,7 +19,7 @@ from typing import Optional, Awaitable, Callable, Any, Mapping log = logging.getLogger('access') log.propagate = False -_server : Optional[Server] = None +_server: Optional[Server] = None async def application(scope, receive, send : Callable[[Mapping[str, Any]], Awaitable[None]]): global _server @@ -49,6 +49,9 @@ async def application(scope, receive, send : Callable[[Mapping[str, Any]], Await .filter(lambda it: it.get('type') == 'http.response.start') .if_present(maybe_log)) return result + pathsend = (Maybe.of_nullable(scope.get('extensions')) + .map(lambda it: it.get("http.response.pathsend")) + .is_present) await _server.handle_request( scope['method'], scope['path'], @@ -58,6 +61,7 @@ async def application(scope, receive, send : Callable[[Mapping[str, Any]], Await .map(lambda it: it.decode()) .or_else(None), Maybe.of_nullable(scope.get('query_string', None)).map(lambda it: it.decode()).or_else(None), - wrapped_send + wrapped_send, + pathsend ) diff --git a/src/bugis/async_watchdog.py b/src/bugis/async_watchdog.py index 6129675..710c505 100644 --- a/src/bugis/async_watchdog.py +++ b/src/bugis/async_watchdog.py @@ -1,54 +1,15 @@ import asyncio +from asyncio import Queue, AbstractEventLoop, Future, Task, gather +from logging import getLogger, Logger +from pathlib import Path +from pwo import TopicManager, Subscriber +from watchdog.events import FileMovedEvent, FileClosedEvent, FileCreatedEvent, FileModifiedEvent from watchdog.events import FileSystemEventHandler, FileSystemEvent, PatternMatchingEventHandler from watchdog.observers import Observer -from watchdog.events import FileMovedEvent, FileClosedEvent, FileCreatedEvent, FileModifiedEvent -from pathlib import Path -from asyncio import Queue, AbstractEventLoop, Future, CancelledError, Task, gather -from typing import Callable, Optional -from logging import getLogger, Logger log: Logger = getLogger(__name__) -class Subscription: - _unsubscribe_callback: Callable[['Subscription'], None] - _event: Optional[Future] - _loop: AbstractEventLoop - - def __init__(self, unsubscribe: Callable[['Subscription'], None], loop: AbstractEventLoop): - self._unsubscribe_callback = unsubscribe - self._event: Optional[Future] = None - self._loop = loop - - - def unsubscribe(self) -> None: - self._event.cancel() - self._unsubscribe_callback(self) - log.debug('Deleted subscription %s', id(self)) - - async def wait(self, tout: float) -> bool: - self._event = self._loop.create_future() - - def callback(): - if not self._event.done(): - self._event.set_result(False) - handle = self._loop.call_later(tout, callback) - try: - log.debug('Subscription %s is waiting for an event', id(self)) - return await self._event - except CancelledError: - return False - finally: - handle.cancel() - - def notify(self) -> None: - log.debug('Subscription %s notified', id(self)) - if not self._event.done(): - self._event.set_result(True) - - def reset(self) -> None: - self._event = self._loop.create_future() - class _EventHandler(FileSystemEventHandler): _queue: Queue @@ -67,22 +28,6 @@ class _EventHandler(FileSystemEventHandler): self._loop.call_soon_threadsafe(self._queue.put_nowait, event) -class AsyncQueueIterator: - _queue: Queue - - def __init__(self, queue: Queue): - self._queue = queue - - def __aiter__(self): - return self - - async def __anext__(self): - item = await self._queue.get() - if item is None: - raise StopAsyncIteration - return item - - observer = Observer() @@ -96,55 +41,11 @@ def watch(path: Path, queue: Queue, loop: AbstractEventLoop, observer.join() loop.call_soon_threadsafe(queue.put_nowait, None) -class SubscriptionManager: - _loop: AbstractEventLoop - _queue: Queue - _subscriptions: dict[str, set[Subscription]] - - def __init__(self, loop: AbstractEventLoop): - self._subscriptions: dict[str, set[Subscription]] = dict() - self._loop = loop - self._queue = Queue() - - def subscribe(self, path: str) -> Subscription: - subscriptions = self._subscriptions - subscriptions_per_path = subscriptions.setdefault(path, set()) - - def unsubscribe_callback(subscription): - subscriptions_per_path.remove(subscription) - log.debug('Removed subscription %s to path %s', id(result), path) - - result = Subscription(unsubscribe_callback, self._loop) - log.debug('Created subscription %s to path %s', id(result), path) - subscriptions_per_path.add(result) - return result - - def _notify_subscriptions(self, path): - subscriptions = self._subscriptions - subscriptions_per_path = subscriptions.get(path, None) - if subscriptions_per_path: - log.debug(f"Subscriptions on '{path}': {len(subscriptions_per_path)}") - for s in subscriptions_per_path: - s.notify() - - async def process_events(self): - async for evt in AsyncQueueIterator(self._queue): - log.debug(f"Processed event for path '{evt}'") - self._notify_subscriptions(evt) - log.debug(f"Event processor has completed") - - - def post_event(self, path): - def callback(): - self._queue.put_nowait(path) - log.debug(f"Posted event for path '{path}', queue size: {self._queue.qsize()}") - self._loop.call_soon_threadsafe(callback) - class FileWatcher(PatternMatchingEventHandler): - _subscription_manager: SubscriptionManager + _topic_manager: TopicManager _loop: AbstractEventLoop - _subscription_manager_loop: Task + _topic_manager_loop: Task _running_tasks : Future def __init__(self, path): @@ -155,29 +56,29 @@ class FileWatcher(PatternMatchingEventHandler): self._observer: Observer = Observer() self._observer.schedule(self, path=path, recursive=True) self._loop = asyncio.get_running_loop() - self._subscription_manager = SubscriptionManager(self._loop) + self._topic_manager = TopicManager(self._loop) self._running_tasks = gather( self._loop.run_in_executor(None, self._observer.start), - self._loop.create_task(self._subscription_manager.process_events()) + self._loop.create_task(self._topic_manager.process_events()) ) async def stop(self) -> None: def _observer_stop(): self._observer.stop() self._observer.join() - self._subscription_manager.post_event(None) + self._topic_manager.post_event(None) await self._loop.run_in_executor(None, _observer_stop) await self._running_tasks - def subscribe(self, path: str) -> Subscription: - return self._subscription_manager.subscribe(path) + def subscribe(self, path: str) -> Subscriber: + return self._topic_manager.subscribe(path) def on_any_event(self, event: FileSystemEvent) -> None: what = "directory" if event.is_directory else "file" def post_event(path): - self._subscription_manager.post_event(path) + self._topic_manager.post_event(path) if isinstance(event, FileClosedEvent): log.debug("Closed %s: %s", what, event.src_path) diff --git a/src/bugis/cli.py b/src/bugis/cli.py new file mode 100644 index 0000000..725d30f --- /dev/null +++ b/src/bugis/cli.py @@ -0,0 +1,41 @@ +import argparse +from dataclasses import asdict +from os import environ +from pathlib import Path +from typing import Optional, Sequence + +import yaml +from granian import Granian +from pwo import Maybe + +from .configuration import Configuration + + +def main(args: Optional[Sequence[str]] = None): + parser = argparse.ArgumentParser(description="A simple CLI program to render Markdown files") + default_configuration_file = (Maybe.of(environ.get('XDG_CONFIG_HOME')) + .map(lambda it: Path(it)) + .map(lambda it: it / 'bugis' / 'bugis.yaml') + .or_else_get(lambda: Path(environ.get('HOME')) / '.config' / 'bugis' / 'bugis.yaml') + .filter(Path.exists) + .or_else(None) + ) + parser.add_argument( + '-c', + '--configuration', + help='Path to the configuration file', + default=default_configuration_file, + type=Path, + ) + args = parser.parse_args(args) + + def parse(configuration: Path): + with open(configuration, 'r') as f: + return Configuration.from_dict(yaml.safe_load(f)) + + conf = Maybe.of_nullable(args.configuration).map(parse).or_else(Configuration.instance) + + Granian( + "bugis.asgi:application", + **asdict(conf.granian) + ).serve() \ No newline at end of file diff --git a/src/bugis/configuration.py b/src/bugis/configuration.py index dbb6cbe..072c89c 100644 --- a/src/bugis/configuration.py +++ b/src/bugis/configuration.py @@ -1,45 +1,140 @@ -import os from os import environ from pathlib import Path -from dataclasses import dataclass +from dataclasses import dataclass, field, asdict +from granian.constants import Loops, Interfaces, ThreadModes, HTTPModes, StrEnum +from granian.log import LogLevels +from granian.http import HTTP1Settings, HTTP2Settings +from typing import Optional, Sequence, Dict, Any +from pwo import classproperty, Maybe +from yaml import add_representer, SafeDumper, SafeLoader -class ClassPropertyDescriptor(object): - - def __init__(self, fget, fset=None): - self.fget = fget - self.fset = fset - - def __get__(self, obj, klass=None): - if klass is None: - klass = type(obj) - return self.fget.__get__(obj, klass)() - - def __set__(self, obj, value): - if not self.fset: - raise AttributeError("can't set attribute") - type_ = type(obj) - return self.fset.__get__(obj, type_)(value) - - def setter(self, func): - if not isinstance(func, (classmethod, staticmethod)): - func = classmethod(func) - self.fset = func - return self - -def classproperty(func): - if not isinstance(func, (classmethod, staticmethod)): - func = classmethod(func) - return ClassPropertyDescriptor(func) @dataclass(frozen=True) class Configuration: - logging_configuration_file : str = environ.get("LOGGING_CONFIGURATION_FILE", Path(__file__).parent / 'default-conf' / 'logging.yaml') - plant_uml_server_address : str = environ.get('PLANT_UML_SERVER_ADDRESS', None) - + logging_configuration_file: str = environ.get("LOGGING_CONFIGURATION_FILE", + Path(__file__).parent / 'default-conf' / 'logging.yaml') + plant_uml_server_address: str = environ.get('PLANT_UML_SERVER_ADDRESS', None) @classproperty def instance(cls) -> 'Configuration': return Configuration() + @dataclass(frozen=True) + class GranianConfiguration: + address: str = '127.0.0.1' + port: int = 8000 + interface: str = Interfaces.ASGI + workers: int = 1 + threads: int = 1 + blocking_threads: Optional[int] = None + threading_mode: ThreadModes = ThreadModes.workers + loop: Loops = Loops.auto + loop_opt: bool = False + http: HTTPModes = HTTPModes.auto + websockets: bool = True + backlog: int = 1024 + backpressure: Optional[int] = None + http1_settings: Optional[HTTP1Settings] = None + http2_settings: Optional[HTTP2Settings] = None + log_enabled: bool = True + log_level: LogLevels = LogLevels.info + log_dictconfig: Optional[Dict[str, Any]] = None + log_access: bool = False + log_access_format: Optional[str] = None + ssl_cert: Optional[Path] = None + ssl_key: Optional[Path] = None + ssl_key_password: Optional[str] = None + url_path_prefix: Optional[str] = None + respawn_failed_workers: bool = False + respawn_interval: float = 3.5 + workers_lifetime: Optional[int] = None + factory: bool = False + reload: bool = False + reload_paths: Optional[Sequence[Path]] = None + reload_ignore_dirs: Optional[Sequence[str]] = None + reload_ignore_patterns: Optional[Sequence[str]] = None + reload_ignore_paths: Optional[Sequence[Path]] = None + process_name: Optional[str] = None + pid_file: Optional[Path] = None + @staticmethod + def from_dict(d) -> 'Configuration.GranianConfiguration': + return Configuration.GranianConfiguration(**{k: v for k, v in dict( + address=d.get('address', None), + port=d.get('port', None), + interface=Maybe.of_nullable(d.get('interface')).map(lambda it: Interfaces(it)).or_else(None), + workers=d.get('workers', None), + threads=d.get('threads', None), + blocking_threads=d.get('blocking_threads', None), + threading_mode=Maybe.of_nullable(d.get('threading_modes')).map(lambda it: ThreadModes(it)).or_else(None), + loop=Maybe.of_nullable(d.get('loop')).map(lambda it: Loops(it)).or_else(None), + loop_opt=d.get('loop_opt', None), + http=Maybe.of_nullable(d.get('http')).map(lambda it: HTTPModes(it)).or_else(None), + websockets=d.get('websockets', None), + backlog=d.get('backlog', None), + backpressure=d.get('backpressure', None), + http1_settings=Maybe.of_nullable(d.get('http1_settings')).map(lambda it: HTTP1Settings(**it)).or_else(None), + http2_settings=Maybe.of_nullable(d.get('http2_settings')).map(lambda it: HTTP2Settings(**it)).or_else(None), + log_enabled=d.get('log_enabled', None), + log_level=Maybe.of_nullable(d.get('log_level')).map(lambda it: LogLevels(it)).or_else(None), + # log_dictconfig: Optional[Dict[str, Any]] = None, + log_access=d.get('log_access', None), + log_access_format=d.get('log_access_format', None), + ssl_cert=d.get('ssl_cert', None), + ssl_key=d.get('ssl_key', None), + ssl_key_password=d.get('ssl_key_password', None), + url_path_prefix=d.get('url_path_prefix', None), + respawn_failed_workers=d.get('respawn_failed_workers', None), + respawn_interval=d.get('respawn_interval', None), + workers_lifetime=d.get('workers_lifetime', None), + factory=d.get('factory', None), + reload=d.get('reload', None), + reload_paths=d.get('reload_paths', None), + reload_ignore_dirs=d.get('reload_ignore_dirs', None), + reload_ignore_patterns=d.get('reload_ignore_patterns', None), + reload_ignore_paths=d.get('reload_ignore_paths', None), + process_name=d.get('process_name', None), + pid_file=d.get('pid_file', None), + ).items() if v is not None}) + granian: GranianConfiguration = GranianConfiguration() + + @staticmethod + def from_dict(d) -> 'Configuration': + return Configuration( + **{k: v for k, v in dict( + logging_configuration_file=d.get('logging_configuration_file', None), + plant_uml_server_address=d.get('plant_uml_server_address', None), + granian=Maybe.of_nullable(d.get('granian')) + .map(Configuration.GranianConfiguration.from_dict) + .or_else(None) + ).items() if v is not None + } + ) + + def to_yaml(self, stream): + dumper = SafeDumper(stream) + dumper.add_representer(Configuration, lambda dumper, conf: dumper.represent_dict(asdict(conf))) + dumper.add_representer(Configuration.GranianConfiguration, + lambda dumper, conf: dumper.represent_dict(asdict(conf))) + dumper.add_representer(LogLevels, lambda dumper, level: dumper.represent_str(level.lower())) + dumper.add_multi_representer(Path, lambda dumper, path: dumper.represent_str(str(path))) + dumper.add_multi_representer(StrEnum, lambda dumper, str_enum: dumper.represent_str(str(str_enum))) + dumper.add_representer(HTTP1Settings, lambda dumper, settings: dumper.represent_dict(vars(settings))) + dumper.add_representer(HTTP2Settings, lambda dumper, settings: dumper.represent_dict(vars(settings))) + + try: + dumper.open() + dumper.represent(Configuration.instance) + dumper.close() + finally: + dumper.dispose() + + @staticmethod + def from_yaml(stream) -> 'Configuration': + loader = SafeLoader(stream) + try: + conf = loader.get_single_data() + return Configuration.from_dict(conf) + finally: + loader.dispose() diff --git a/src/bugis/server.py b/src/bugis/server.py index 490dfc3..b58ed54 100644 --- a/src/bugis/server.py +++ b/src/bugis/server.py @@ -30,11 +30,12 @@ mimeinit() cwd: 'StrOrBytesPath' = getcwd() -def completed_future[T](result : T) -> Future[T]: +def completed_future[T](result: T) -> Future[T]: future = Future() future.set_result(result) return future + def has_extension(filepath, extension): _, ext = splitext(filepath) return ext == extension @@ -47,11 +48,14 @@ def is_markdown(filepath): def is_dotfile(filepath): return has_extension(filepath, ".dot") + def is_plant_uml(filepath): return has_extension(filepath, ".puml") + logger = logging.getLogger(__name__) + class Server: root_dir: 'StrOrBytesPath' prefix: Optional['StrOrBytesPath'] @@ -74,7 +78,8 @@ class Server: url_path: str, etag: Optional[str], query_string: Optional[str], - send: Callable[[Mapping[str, Any]], Awaitable[None]] + send: Callable[[Mapping[str, Any]], Awaitable[None]], + pathsend: bool = False ): if method != 'GET': await send({ @@ -95,7 +100,8 @@ class Server: etag, digest = await self.compute_etag_and_digest( etag, url_path, - lambda: AiofilesContextManager(completed_future(AsyncBufferedReader(BytesIO(content), loop=self._loop, executor=None))), + lambda: AiofilesContextManager( + completed_future(AsyncBufferedReader(BytesIO(content), loop=self._loop, executor=None))), lambda: completed_future(mtime) ) if etag and etag == digest: @@ -158,6 +164,7 @@ class Server: result = graph.draw(None, format="svg", prog="dot") logger.debug("Completed Graphviz rendering for file '%s'", filepath) return result + body = await self._loop.run_in_executor(None, render_graphviz, path) await send({ 'type': 'http.response.start', @@ -195,8 +202,7 @@ class Server: 'more_body': False }) else: - async def read_file(file_path): - buffer_size = 0x10000 + async def read_file(file_path, buffer_size=0x10000): async with async_open(file_path, 'rb') as f: while True: result = await f.read(buffer_size) @@ -213,18 +219,23 @@ class Server: 'Cache-Control': 'no-cache' }) }) - - async for chunk in read_file(path): + if pathsend: + await send({ + 'type': 'http.response.pathsend', + 'path': path + }) + else: + async for chunk in read_file(path): + await send({ + 'type': 'http.response.body', + 'body': chunk, + 'more_body': True + }) await send({ 'type': 'http.response.body', - 'body': chunk, - 'more_body': True + 'body': b'', + 'more_body': False }) - await send({ - 'type': 'http.response.body', - 'body': b'', - 'more_body': False - }) elif await isdir(path): body = (await self.directory_listing(url_path, path)).encode() @@ -243,7 +254,7 @@ class Server: await self.not_found(send) @staticmethod - async def stream_hash(source: AsyncBufferedReader, bufsize=0x1000) -> bytes: + async def stream_hash(source: AsyncBufferedReader, bufsize=0x10000) -> bytes: if bufsize <= 0: raise ValueError("Buffer size must be greater than 0") md5 = hashlib.md5() @@ -255,7 +266,7 @@ class Server: return md5.digest() @staticmethod - async def file_hash(filepath, bufsize=0x1000) -> bytes: + async def file_hash(filepath, bufsize=0x10000) -> bytes: if bufsize <= 0: raise ValueError("Buffer size must be greater than 0") md5 = hashlib.md5() @@ -277,8 +288,8 @@ class Server: return ( Maybe.of_nullable(etag) - .map(skip_weak_marker) - .or_else(None) + .map(skip_weak_marker) + .or_else(None) ) async def compute_etag_and_digest( @@ -308,11 +319,11 @@ class Server: return etag, digest async def render_markdown(self, - url_path: 'StrOrBytesPath', - path: str, - raw: bool, - digest: str, - send) -> None: + url_path: 'StrOrBytesPath', + path: str, + raw: bool, + digest: str, + send) -> None: body = await compile_html( url_path, @@ -337,7 +348,7 @@ class Server: }) @staticmethod - async def not_modified(send, digest: str, cache_control: str ='no-cache') -> []: + async def not_modified(send, digest: str, cache_control: str = 'no-cache') -> []: await send({ 'type': 'http.response.start', 'status': 304, @@ -378,12 +389,15 @@ class Server: for entry in sorted(await listdir(path)): if await filter(join(path, entry)): yield entry + return result() async for entry in await ls(isdir): result += '
  • ' + entry + '/' + '
  • ' + async def file_filter(entry: str) -> bool: return await isfile(entry) and is_markdown(entry) + async for entry in await ls(file_filter): result += '
  • ' + entry + '
  • ' return result