added cli

This commit is contained in:
2024-10-24 23:16:03 +08:00
parent e2e4083321
commit 33a3858b02
10 changed files with 258 additions and 216 deletions

View File

@@ -59,4 +59,7 @@ exclude = ["scripts", "docs", "test"]
strict = true strict = true
[tool.setuptools_scm] [tool.setuptools_scm]
version_file = "src/bugis/_version.py" version_file = "src/bugis/_version.py"
[project.scripts]
bugis = "bugis.cli:main"

View File

@@ -1,10 +1,11 @@
# #
# This file is autogenerated by pip-compile with Python 3.10 # This file is autogenerated by pip-compile with Python 3.12
# by the following command: # by the following command:
# #
# pip-compile --extra-index-url=https://gitea.woggioni.net/api/packages/woggioni/pypi/simple --extra=dev --output-file=requirements-dev.txt pyproject.toml # pip-compile --extra=dev --output-file=requirements-dev.txt pyproject.toml
# #
--extra-index-url https://gitea.woggioni.net/api/packages/woggioni/pypi/simple --index-url https://gitea.woggioni.net/api/packages/woggioni/pypi/simple
--extra-index-url https://pypi.org/simple
aiofiles==24.1.0 aiofiles==24.1.0
# via bugis (pyproject.toml) # via bugis (pyproject.toml)
@@ -12,8 +13,6 @@ anyio==4.6.2.post1
# via httpx # via httpx
asttokens==2.4.1 asttokens==2.4.1
# via stack-data # via stack-data
backports-tarfile==1.2.0
# via jaraco-context
build==1.2.2.post1 build==1.2.2.post1
# via bugis (pyproject.toml) # via bugis (pyproject.toml)
certifi==2024.8.30 certifi==2024.8.30
@@ -35,10 +34,6 @@ decorator==5.1.1
# ipython # ipython
docutils==0.21.2 docutils==0.21.2
# via readme-renderer # via readme-renderer
exceptiongroup==1.2.2
# via
# anyio
# ipython
executing==2.1.0 executing==2.1.0
# via stack-data # via stack-data
granian==1.6.1 granian==1.6.1
@@ -61,9 +56,7 @@ idna==3.10
# httpx # httpx
# requests # requests
importlib-metadata==8.5.0 importlib-metadata==8.5.0
# via # via twine
# keyring
# twine
ipdb==0.13.13 ipdb==0.13.13
# via bugis (pyproject.toml) # via bugis (pyproject.toml)
ipython==8.28.0 ipython==8.28.0
@@ -94,7 +87,7 @@ more-itertools==10.5.0
# via # via
# jaraco-classes # jaraco-classes
# jaraco-functools # jaraco-functools
mypy==1.12.1 mypy==1.13.0
# via bugis (pyproject.toml) # via bugis (pyproject.toml)
mypy-extensions==1.0.0 mypy-extensions==1.0.0
# via mypy # via mypy
@@ -114,7 +107,7 @@ ptyprocess==0.7.0
# via pexpect # via pexpect
pure-eval==0.2.3 pure-eval==0.2.3
# via stack-data # via stack-data
pwo==0.0.3 pwo==0.0.4
# via bugis (pyproject.toml) # via bugis (pyproject.toml)
pycparser==2.22 pycparser==2.22
# via cffi # via cffi
@@ -140,7 +133,7 @@ requests-toolbelt==1.0.0
# via twine # via twine
rfc3986==2.0.0 rfc3986==2.0.0
# via twine # via twine
rich==13.9.2 rich==13.9.3
# via twine # via twine
secretstorage==3.3.3 secretstorage==3.3.3
# via keyring # via keyring
@@ -152,24 +145,16 @@ sniffio==1.3.1
# httpx # httpx
stack-data==0.6.3 stack-data==0.6.3
# via ipython # via ipython
tomli==2.0.2
# via
# build
# ipdb
# mypy
traitlets==5.14.3 traitlets==5.14.3
# via # via
# ipython # ipython
# matplotlib-inline # matplotlib-inline
twine==5.1.1 twine==5.1.1
# via bugis (pyproject.toml) # via bugis (pyproject.toml)
typing-extensions==4.7.1 typing-extensions==4.12.2
# via # via
# anyio
# ipython
# mypy # mypy
# pwo # pwo
# rich
urllib3==2.2.3 urllib3==2.2.3
# via # via
# requests # requests

View File

@@ -1,10 +1,11 @@
# #
# This file is autogenerated by pip-compile with Python 3.10 # This file is autogenerated by pip-compile with Python 3.12
# by the following command: # by the following command:
# #
# pip-compile --extra-index-url=https://gitea.woggioni.net/api/packages/woggioni/pypi/simple --extra=run --output-file=requirements-run.txt pyproject.toml # pip-compile --extra=run --output-file=requirements-run.txt pyproject.toml
# #
--extra-index-url https://gitea.woggioni.net/api/packages/woggioni/pypi/simple --index-url https://gitea.woggioni.net/api/packages/woggioni/pypi/simple
--extra-index-url https://pypi.org/simple
aiofiles==24.1.0 aiofiles==24.1.0
# via bugis (pyproject.toml) # via bugis (pyproject.toml)
@@ -16,8 +17,6 @@ certifi==2024.8.30
# httpx # httpx
click==8.1.7 click==8.1.7
# via granian # via granian
exceptiongroup==1.2.2
# via anyio
granian==1.6.1 granian==1.6.1
# via bugis (pyproject.toml) # via bugis (pyproject.toml)
h11==0.14.0 h11==0.14.0
@@ -38,7 +37,7 @@ idna==3.10
# httpx # httpx
markdown==3.7 markdown==3.7
# via bugis (pyproject.toml) # via bugis (pyproject.toml)
pwo==0.0.3 pwo==0.0.4
# via bugis (pyproject.toml) # via bugis (pyproject.toml)
pygments==2.18.0 pygments==2.18.0
# via bugis (pyproject.toml) # via bugis (pyproject.toml)
@@ -50,10 +49,8 @@ sniffio==1.3.1
# via # via
# anyio # anyio
# httpx # httpx
typing-extensions==4.7.1 typing-extensions==4.12.2
# via # via pwo
# anyio
# pwo
uvloop==0.21.0 uvloop==0.21.0
# via granian # via granian
watchdog==5.0.3 watchdog==5.0.3

View File

@@ -1,10 +1,11 @@
# #
# This file is autogenerated by pip-compile with Python 3.10 # This file is autogenerated by pip-compile with Python 3.12
# by the following command: # by the following command:
# #
# pip-compile --extra-index-url=https://gitea.woggioni.net/api/packages/woggioni/pypi/simple --output-file=requirements.txt --strip-extras pyproject.toml # pip-compile --output-file=requirements.txt pyproject.toml
# #
--extra-index-url https://gitea.woggioni.net/api/packages/woggioni/pypi/simple --index-url https://gitea.woggioni.net/api/packages/woggioni/pypi/simple
--extra-index-url https://pypi.org/simple
aiofiles==24.1.0 aiofiles==24.1.0
# via bugis (pyproject.toml) # via bugis (pyproject.toml)
@@ -14,8 +15,6 @@ certifi==2024.8.30
# via # via
# httpcore # httpcore
# httpx # httpx
exceptiongroup==1.2.2
# via anyio
h11==0.14.0 h11==0.14.0
# via httpcore # via httpcore
h2==4.1.0 h2==4.1.0
@@ -24,7 +23,7 @@ hpack==4.0.0
# via h2 # via h2
httpcore==1.0.6 httpcore==1.0.6
# via httpx # via httpx
httpx==0.27.2 httpx[http2]==0.27.2
# via bugis (pyproject.toml) # via bugis (pyproject.toml)
hyperframe==6.0.1 hyperframe==6.0.1
# via h2 # via h2
@@ -34,7 +33,7 @@ idna==3.10
# httpx # httpx
markdown==3.7 markdown==3.7
# via bugis (pyproject.toml) # via bugis (pyproject.toml)
pwo==0.0.3 pwo==0.0.4
# via bugis (pyproject.toml) # via bugis (pyproject.toml)
pygments==2.18.0 pygments==2.18.0
# via bugis (pyproject.toml) # via bugis (pyproject.toml)
@@ -46,9 +45,7 @@ sniffio==1.3.1
# via # via
# anyio # anyio
# httpx # httpx
typing-extensions==4.7.1 typing-extensions==4.12.2
# via # via pwo
# anyio
# pwo
watchdog==5.0.3 watchdog==5.0.3
# via bugis (pyproject.toml) # via bugis (pyproject.toml)

5
src/bugis/__main__.py Normal file
View File

@@ -0,0 +1,5 @@
import argparse
from cli import main
import sys
main(sys.argv[1:])

View File

@@ -19,7 +19,7 @@ from typing import Optional, Awaitable, Callable, Any, Mapping
log = logging.getLogger('access') log = logging.getLogger('access')
log.propagate = False log.propagate = False
_server : Optional[Server] = None _server: Optional[Server] = None
async def application(scope, receive, send : Callable[[Mapping[str, Any]], Awaitable[None]]): async def application(scope, receive, send : Callable[[Mapping[str, Any]], Awaitable[None]]):
global _server global _server
@@ -49,6 +49,9 @@ async def application(scope, receive, send : Callable[[Mapping[str, Any]], Await
.filter(lambda it: it.get('type') == 'http.response.start') .filter(lambda it: it.get('type') == 'http.response.start')
.if_present(maybe_log)) .if_present(maybe_log))
return result return result
pathsend = (Maybe.of_nullable(scope.get('extensions'))
.map(lambda it: it.get("http.response.pathsend"))
.is_present)
await _server.handle_request( await _server.handle_request(
scope['method'], scope['method'],
scope['path'], scope['path'],
@@ -58,6 +61,7 @@ async def application(scope, receive, send : Callable[[Mapping[str, Any]], Await
.map(lambda it: it.decode()) .map(lambda it: it.decode())
.or_else(None), .or_else(None),
Maybe.of_nullable(scope.get('query_string', None)).map(lambda it: it.decode()).or_else(None), Maybe.of_nullable(scope.get('query_string', None)).map(lambda it: it.decode()).or_else(None),
wrapped_send wrapped_send,
pathsend
) )

View File

@@ -1,54 +1,15 @@
import asyncio import asyncio
from asyncio import Queue, AbstractEventLoop, Future, Task, gather
from logging import getLogger, Logger
from pathlib import Path
from pwo import TopicManager, Subscriber
from watchdog.events import FileMovedEvent, FileClosedEvent, FileCreatedEvent, FileModifiedEvent
from watchdog.events import FileSystemEventHandler, FileSystemEvent, PatternMatchingEventHandler from watchdog.events import FileSystemEventHandler, FileSystemEvent, PatternMatchingEventHandler
from watchdog.observers import Observer from watchdog.observers import Observer
from watchdog.events import FileMovedEvent, FileClosedEvent, FileCreatedEvent, FileModifiedEvent
from pathlib import Path
from asyncio import Queue, AbstractEventLoop, Future, CancelledError, Task, gather
from typing import Callable, Optional
from logging import getLogger, Logger
log: Logger = getLogger(__name__) log: Logger = getLogger(__name__)
class Subscription:
_unsubscribe_callback: Callable[['Subscription'], None]
_event: Optional[Future]
_loop: AbstractEventLoop
def __init__(self, unsubscribe: Callable[['Subscription'], None], loop: AbstractEventLoop):
self._unsubscribe_callback = unsubscribe
self._event: Optional[Future] = None
self._loop = loop
def unsubscribe(self) -> None:
self._event.cancel()
self._unsubscribe_callback(self)
log.debug('Deleted subscription %s', id(self))
async def wait(self, tout: float) -> bool:
self._event = self._loop.create_future()
def callback():
if not self._event.done():
self._event.set_result(False)
handle = self._loop.call_later(tout, callback)
try:
log.debug('Subscription %s is waiting for an event', id(self))
return await self._event
except CancelledError:
return False
finally:
handle.cancel()
def notify(self) -> None:
log.debug('Subscription %s notified', id(self))
if not self._event.done():
self._event.set_result(True)
def reset(self) -> None:
self._event = self._loop.create_future()
class _EventHandler(FileSystemEventHandler): class _EventHandler(FileSystemEventHandler):
_queue: Queue _queue: Queue
@@ -67,22 +28,6 @@ class _EventHandler(FileSystemEventHandler):
self._loop.call_soon_threadsafe(self._queue.put_nowait, event) self._loop.call_soon_threadsafe(self._queue.put_nowait, event)
class AsyncQueueIterator:
_queue: Queue
def __init__(self, queue: Queue):
self._queue = queue
def __aiter__(self):
return self
async def __anext__(self):
item = await self._queue.get()
if item is None:
raise StopAsyncIteration
return item
observer = Observer() observer = Observer()
@@ -96,55 +41,11 @@ def watch(path: Path, queue: Queue, loop: AbstractEventLoop,
observer.join() observer.join()
loop.call_soon_threadsafe(queue.put_nowait, None) loop.call_soon_threadsafe(queue.put_nowait, None)
class SubscriptionManager:
_loop: AbstractEventLoop
_queue: Queue
_subscriptions: dict[str, set[Subscription]]
def __init__(self, loop: AbstractEventLoop):
self._subscriptions: dict[str, set[Subscription]] = dict()
self._loop = loop
self._queue = Queue()
def subscribe(self, path: str) -> Subscription:
subscriptions = self._subscriptions
subscriptions_per_path = subscriptions.setdefault(path, set())
def unsubscribe_callback(subscription):
subscriptions_per_path.remove(subscription)
log.debug('Removed subscription %s to path %s', id(result), path)
result = Subscription(unsubscribe_callback, self._loop)
log.debug('Created subscription %s to path %s', id(result), path)
subscriptions_per_path.add(result)
return result
def _notify_subscriptions(self, path):
subscriptions = self._subscriptions
subscriptions_per_path = subscriptions.get(path, None)
if subscriptions_per_path:
log.debug(f"Subscriptions on '{path}': {len(subscriptions_per_path)}")
for s in subscriptions_per_path:
s.notify()
async def process_events(self):
async for evt in AsyncQueueIterator(self._queue):
log.debug(f"Processed event for path '{evt}'")
self._notify_subscriptions(evt)
log.debug(f"Event processor has completed")
def post_event(self, path):
def callback():
self._queue.put_nowait(path)
log.debug(f"Posted event for path '{path}', queue size: {self._queue.qsize()}")
self._loop.call_soon_threadsafe(callback)
class FileWatcher(PatternMatchingEventHandler): class FileWatcher(PatternMatchingEventHandler):
_subscription_manager: SubscriptionManager _topic_manager: TopicManager
_loop: AbstractEventLoop _loop: AbstractEventLoop
_subscription_manager_loop: Task _topic_manager_loop: Task
_running_tasks : Future _running_tasks : Future
def __init__(self, path): def __init__(self, path):
@@ -155,29 +56,29 @@ class FileWatcher(PatternMatchingEventHandler):
self._observer: Observer = Observer() self._observer: Observer = Observer()
self._observer.schedule(self, path=path, recursive=True) self._observer.schedule(self, path=path, recursive=True)
self._loop = asyncio.get_running_loop() self._loop = asyncio.get_running_loop()
self._subscription_manager = SubscriptionManager(self._loop) self._topic_manager = TopicManager(self._loop)
self._running_tasks = gather( self._running_tasks = gather(
self._loop.run_in_executor(None, self._observer.start), self._loop.run_in_executor(None, self._observer.start),
self._loop.create_task(self._subscription_manager.process_events()) self._loop.create_task(self._topic_manager.process_events())
) )
async def stop(self) -> None: async def stop(self) -> None:
def _observer_stop(): def _observer_stop():
self._observer.stop() self._observer.stop()
self._observer.join() self._observer.join()
self._subscription_manager.post_event(None) self._topic_manager.post_event(None)
await self._loop.run_in_executor(None, _observer_stop) await self._loop.run_in_executor(None, _observer_stop)
await self._running_tasks await self._running_tasks
def subscribe(self, path: str) -> Subscription: def subscribe(self, path: str) -> Subscriber:
return self._subscription_manager.subscribe(path) return self._topic_manager.subscribe(path)
def on_any_event(self, event: FileSystemEvent) -> None: def on_any_event(self, event: FileSystemEvent) -> None:
what = "directory" if event.is_directory else "file" what = "directory" if event.is_directory else "file"
def post_event(path): def post_event(path):
self._subscription_manager.post_event(path) self._topic_manager.post_event(path)
if isinstance(event, FileClosedEvent): if isinstance(event, FileClosedEvent):
log.debug("Closed %s: %s", what, event.src_path) log.debug("Closed %s: %s", what, event.src_path)

41
src/bugis/cli.py Normal file
View File

@@ -0,0 +1,41 @@
import argparse
from dataclasses import asdict
from os import environ
from pathlib import Path
from typing import Optional, Sequence
import yaml
from granian import Granian
from pwo import Maybe
from .configuration import Configuration
def main(args: Optional[Sequence[str]] = None):
parser = argparse.ArgumentParser(description="A simple CLI program to render Markdown files")
default_configuration_file = (Maybe.of(environ.get('XDG_CONFIG_HOME'))
.map(lambda it: Path(it))
.map(lambda it: it / 'bugis' / 'bugis.yaml')
.or_else_get(lambda: Path(environ.get('HOME')) / '.config' / 'bugis' / 'bugis.yaml')
.filter(Path.exists)
.or_else(None)
)
parser.add_argument(
'-c',
'--configuration',
help='Path to the configuration file',
default=default_configuration_file,
type=Path,
)
args = parser.parse_args(args)
def parse(configuration: Path):
with open(configuration, 'r') as f:
return Configuration.from_dict(yaml.safe_load(f))
conf = Maybe.of_nullable(args.configuration).map(parse).or_else(Configuration.instance)
Granian(
"bugis.asgi:application",
**asdict(conf.granian)
).serve()

View File

@@ -1,45 +1,140 @@
import os
from os import environ from os import environ
from pathlib import Path from pathlib import Path
from dataclasses import dataclass from dataclasses import dataclass, field, asdict
from granian.constants import Loops, Interfaces, ThreadModes, HTTPModes, StrEnum
from granian.log import LogLevels
from granian.http import HTTP1Settings, HTTP2Settings
from typing import Optional, Sequence, Dict, Any
from pwo import classproperty, Maybe
from yaml import add_representer, SafeDumper, SafeLoader
class ClassPropertyDescriptor(object):
def __init__(self, fget, fset=None):
self.fget = fget
self.fset = fset
def __get__(self, obj, klass=None):
if klass is None:
klass = type(obj)
return self.fget.__get__(obj, klass)()
def __set__(self, obj, value):
if not self.fset:
raise AttributeError("can't set attribute")
type_ = type(obj)
return self.fset.__get__(obj, type_)(value)
def setter(self, func):
if not isinstance(func, (classmethod, staticmethod)):
func = classmethod(func)
self.fset = func
return self
def classproperty(func):
if not isinstance(func, (classmethod, staticmethod)):
func = classmethod(func)
return ClassPropertyDescriptor(func)
@dataclass(frozen=True) @dataclass(frozen=True)
class Configuration: class Configuration:
logging_configuration_file : str = environ.get("LOGGING_CONFIGURATION_FILE", Path(__file__).parent / 'default-conf' / 'logging.yaml') logging_configuration_file: str = environ.get("LOGGING_CONFIGURATION_FILE",
plant_uml_server_address : str = environ.get('PLANT_UML_SERVER_ADDRESS', None) Path(__file__).parent / 'default-conf' / 'logging.yaml')
plant_uml_server_address: str = environ.get('PLANT_UML_SERVER_ADDRESS', None)
@classproperty @classproperty
def instance(cls) -> 'Configuration': def instance(cls) -> 'Configuration':
return Configuration() return Configuration()
@dataclass(frozen=True)
class GranianConfiguration:
address: str = '127.0.0.1'
port: int = 8000
interface: str = Interfaces.ASGI
workers: int = 1
threads: int = 1
blocking_threads: Optional[int] = None
threading_mode: ThreadModes = ThreadModes.workers
loop: Loops = Loops.auto
loop_opt: bool = False
http: HTTPModes = HTTPModes.auto
websockets: bool = True
backlog: int = 1024
backpressure: Optional[int] = None
http1_settings: Optional[HTTP1Settings] = None
http2_settings: Optional[HTTP2Settings] = None
log_enabled: bool = True
log_level: LogLevels = LogLevels.info
log_dictconfig: Optional[Dict[str, Any]] = None
log_access: bool = False
log_access_format: Optional[str] = None
ssl_cert: Optional[Path] = None
ssl_key: Optional[Path] = None
ssl_key_password: Optional[str] = None
url_path_prefix: Optional[str] = None
respawn_failed_workers: bool = False
respawn_interval: float = 3.5
workers_lifetime: Optional[int] = None
factory: bool = False
reload: bool = False
reload_paths: Optional[Sequence[Path]] = None
reload_ignore_dirs: Optional[Sequence[str]] = None
reload_ignore_patterns: Optional[Sequence[str]] = None
reload_ignore_paths: Optional[Sequence[Path]] = None
process_name: Optional[str] = None
pid_file: Optional[Path] = None
@staticmethod
def from_dict(d) -> 'Configuration.GranianConfiguration':
return Configuration.GranianConfiguration(**{k: v for k, v in dict(
address=d.get('address', None),
port=d.get('port', None),
interface=Maybe.of_nullable(d.get('interface')).map(lambda it: Interfaces(it)).or_else(None),
workers=d.get('workers', None),
threads=d.get('threads', None),
blocking_threads=d.get('blocking_threads', None),
threading_mode=Maybe.of_nullable(d.get('threading_modes')).map(lambda it: ThreadModes(it)).or_else(None),
loop=Maybe.of_nullable(d.get('loop')).map(lambda it: Loops(it)).or_else(None),
loop_opt=d.get('loop_opt', None),
http=Maybe.of_nullable(d.get('http')).map(lambda it: HTTPModes(it)).or_else(None),
websockets=d.get('websockets', None),
backlog=d.get('backlog', None),
backpressure=d.get('backpressure', None),
http1_settings=Maybe.of_nullable(d.get('http1_settings')).map(lambda it: HTTP1Settings(**it)).or_else(None),
http2_settings=Maybe.of_nullable(d.get('http2_settings')).map(lambda it: HTTP2Settings(**it)).or_else(None),
log_enabled=d.get('log_enabled', None),
log_level=Maybe.of_nullable(d.get('log_level')).map(lambda it: LogLevels(it)).or_else(None),
# log_dictconfig: Optional[Dict[str, Any]] = None,
log_access=d.get('log_access', None),
log_access_format=d.get('log_access_format', None),
ssl_cert=d.get('ssl_cert', None),
ssl_key=d.get('ssl_key', None),
ssl_key_password=d.get('ssl_key_password', None),
url_path_prefix=d.get('url_path_prefix', None),
respawn_failed_workers=d.get('respawn_failed_workers', None),
respawn_interval=d.get('respawn_interval', None),
workers_lifetime=d.get('workers_lifetime', None),
factory=d.get('factory', None),
reload=d.get('reload', None),
reload_paths=d.get('reload_paths', None),
reload_ignore_dirs=d.get('reload_ignore_dirs', None),
reload_ignore_patterns=d.get('reload_ignore_patterns', None),
reload_ignore_paths=d.get('reload_ignore_paths', None),
process_name=d.get('process_name', None),
pid_file=d.get('pid_file', None),
).items() if v is not None})
granian: GranianConfiguration = GranianConfiguration()
@staticmethod
def from_dict(d) -> 'Configuration':
return Configuration(
**{k: v for k, v in dict(
logging_configuration_file=d.get('logging_configuration_file', None),
plant_uml_server_address=d.get('plant_uml_server_address', None),
granian=Maybe.of_nullable(d.get('granian'))
.map(Configuration.GranianConfiguration.from_dict)
.or_else(None)
).items() if v is not None
}
)
def to_yaml(self, stream):
dumper = SafeDumper(stream)
dumper.add_representer(Configuration, lambda dumper, conf: dumper.represent_dict(asdict(conf)))
dumper.add_representer(Configuration.GranianConfiguration,
lambda dumper, conf: dumper.represent_dict(asdict(conf)))
dumper.add_representer(LogLevels, lambda dumper, level: dumper.represent_str(level.lower()))
dumper.add_multi_representer(Path, lambda dumper, path: dumper.represent_str(str(path)))
dumper.add_multi_representer(StrEnum, lambda dumper, str_enum: dumper.represent_str(str(str_enum)))
dumper.add_representer(HTTP1Settings, lambda dumper, settings: dumper.represent_dict(vars(settings)))
dumper.add_representer(HTTP2Settings, lambda dumper, settings: dumper.represent_dict(vars(settings)))
try:
dumper.open()
dumper.represent(Configuration.instance)
dumper.close()
finally:
dumper.dispose()
@staticmethod
def from_yaml(stream) -> 'Configuration':
loader = SafeLoader(stream)
try:
conf = loader.get_single_data()
return Configuration.from_dict(conf)
finally:
loader.dispose()

View File

@@ -30,11 +30,12 @@ mimeinit()
cwd: 'StrOrBytesPath' = getcwd() cwd: 'StrOrBytesPath' = getcwd()
def completed_future[T](result : T) -> Future[T]: def completed_future[T](result: T) -> Future[T]:
future = Future() future = Future()
future.set_result(result) future.set_result(result)
return future return future
def has_extension(filepath, extension): def has_extension(filepath, extension):
_, ext = splitext(filepath) _, ext = splitext(filepath)
return ext == extension return ext == extension
@@ -47,11 +48,14 @@ def is_markdown(filepath):
def is_dotfile(filepath): def is_dotfile(filepath):
return has_extension(filepath, ".dot") return has_extension(filepath, ".dot")
def is_plant_uml(filepath): def is_plant_uml(filepath):
return has_extension(filepath, ".puml") return has_extension(filepath, ".puml")
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
class Server: class Server:
root_dir: 'StrOrBytesPath' root_dir: 'StrOrBytesPath'
prefix: Optional['StrOrBytesPath'] prefix: Optional['StrOrBytesPath']
@@ -74,7 +78,8 @@ class Server:
url_path: str, url_path: str,
etag: Optional[str], etag: Optional[str],
query_string: Optional[str], query_string: Optional[str],
send: Callable[[Mapping[str, Any]], Awaitable[None]] send: Callable[[Mapping[str, Any]], Awaitable[None]],
pathsend: bool = False
): ):
if method != 'GET': if method != 'GET':
await send({ await send({
@@ -95,7 +100,8 @@ class Server:
etag, digest = await self.compute_etag_and_digest( etag, digest = await self.compute_etag_and_digest(
etag, etag,
url_path, url_path,
lambda: AiofilesContextManager(completed_future(AsyncBufferedReader(BytesIO(content), loop=self._loop, executor=None))), lambda: AiofilesContextManager(
completed_future(AsyncBufferedReader(BytesIO(content), loop=self._loop, executor=None))),
lambda: completed_future(mtime) lambda: completed_future(mtime)
) )
if etag and etag == digest: if etag and etag == digest:
@@ -158,6 +164,7 @@ class Server:
result = graph.draw(None, format="svg", prog="dot") result = graph.draw(None, format="svg", prog="dot")
logger.debug("Completed Graphviz rendering for file '%s'", filepath) logger.debug("Completed Graphviz rendering for file '%s'", filepath)
return result return result
body = await self._loop.run_in_executor(None, render_graphviz, path) body = await self._loop.run_in_executor(None, render_graphviz, path)
await send({ await send({
'type': 'http.response.start', 'type': 'http.response.start',
@@ -195,8 +202,7 @@ class Server:
'more_body': False 'more_body': False
}) })
else: else:
async def read_file(file_path): async def read_file(file_path, buffer_size=0x10000):
buffer_size = 0x10000
async with async_open(file_path, 'rb') as f: async with async_open(file_path, 'rb') as f:
while True: while True:
result = await f.read(buffer_size) result = await f.read(buffer_size)
@@ -213,18 +219,23 @@ class Server:
'Cache-Control': 'no-cache' 'Cache-Control': 'no-cache'
}) })
}) })
if pathsend:
async for chunk in read_file(path): await send({
'type': 'http.response.pathsend',
'path': path
})
else:
async for chunk in read_file(path):
await send({
'type': 'http.response.body',
'body': chunk,
'more_body': True
})
await send({ await send({
'type': 'http.response.body', 'type': 'http.response.body',
'body': chunk, 'body': b'',
'more_body': True 'more_body': False
}) })
await send({
'type': 'http.response.body',
'body': b'',
'more_body': False
})
elif await isdir(path): elif await isdir(path):
body = (await self.directory_listing(url_path, path)).encode() body = (await self.directory_listing(url_path, path)).encode()
@@ -243,7 +254,7 @@ class Server:
await self.not_found(send) await self.not_found(send)
@staticmethod @staticmethod
async def stream_hash(source: AsyncBufferedReader, bufsize=0x1000) -> bytes: async def stream_hash(source: AsyncBufferedReader, bufsize=0x10000) -> bytes:
if bufsize <= 0: if bufsize <= 0:
raise ValueError("Buffer size must be greater than 0") raise ValueError("Buffer size must be greater than 0")
md5 = hashlib.md5() md5 = hashlib.md5()
@@ -255,7 +266,7 @@ class Server:
return md5.digest() return md5.digest()
@staticmethod @staticmethod
async def file_hash(filepath, bufsize=0x1000) -> bytes: async def file_hash(filepath, bufsize=0x10000) -> bytes:
if bufsize <= 0: if bufsize <= 0:
raise ValueError("Buffer size must be greater than 0") raise ValueError("Buffer size must be greater than 0")
md5 = hashlib.md5() md5 = hashlib.md5()
@@ -277,8 +288,8 @@ class Server:
return ( return (
Maybe.of_nullable(etag) Maybe.of_nullable(etag)
.map(skip_weak_marker) .map(skip_weak_marker)
.or_else(None) .or_else(None)
) )
async def compute_etag_and_digest( async def compute_etag_and_digest(
@@ -308,11 +319,11 @@ class Server:
return etag, digest return etag, digest
async def render_markdown(self, async def render_markdown(self,
url_path: 'StrOrBytesPath', url_path: 'StrOrBytesPath',
path: str, path: str,
raw: bool, raw: bool,
digest: str, digest: str,
send) -> None: send) -> None:
body = await compile_html( body = await compile_html(
url_path, url_path,
@@ -337,7 +348,7 @@ class Server:
}) })
@staticmethod @staticmethod
async def not_modified(send, digest: str, cache_control: str ='no-cache') -> []: async def not_modified(send, digest: str, cache_control: str = 'no-cache') -> []:
await send({ await send({
'type': 'http.response.start', 'type': 'http.response.start',
'status': 304, 'status': 304,
@@ -378,12 +389,15 @@ class Server:
for entry in sorted(await listdir(path)): for entry in sorted(await listdir(path)):
if await filter(join(path, entry)): if await filter(join(path, entry)):
yield entry yield entry
return result() return result()
async for entry in await ls(isdir): async for entry in await ls(isdir):
result += '<li><a href="' + entry + '/' + '"/>' + entry + '/' + '</li>' result += '<li><a href="' + entry + '/' + '"/>' + entry + '/' + '</li>'
async def file_filter(entry: str) -> bool: async def file_filter(entry: str) -> bool:
return await isfile(entry) and is_markdown(entry) return await isfile(entry) and is_markdown(entry)
async for entry in await ls(file_filter): async for entry in await ls(file_filter):
result += '<li><a href="' + entry + '"/>' + entry + '</li>' result += '<li><a href="' + entry + '"/>' + entry + '</li>'
return result return result