added cli

This commit is contained in:
2024-10-24 23:16:03 +08:00
parent 2ca7f8e908
commit b4e6cecdfe
10 changed files with 271 additions and 225 deletions

View File

@@ -29,7 +29,7 @@ dependencies = [
"PyYAML",
"pygraphviz",
"aiofiles",
"aiohttp[speedups]"
"aiohttp[speedups]",
]
[project.optional-dependencies]
@@ -59,4 +59,7 @@ exclude = ["scripts", "docs", "test"]
strict = true
[tool.setuptools_scm]
version_file = "src/bugis/_version.py"
version_file = "src/bugis/_version.py"
[project.scripts]
bugis = "bugis.cli:main"

View File

@@ -1,10 +1,11 @@
#
# This file is autogenerated by pip-compile with Python 3.10
# This file is autogenerated by pip-compile with Python 3.12
# by the following command:
#
# pip-compile --extra-index-url=https://gitea.woggioni.net/api/packages/woggioni/pypi/simple --extra=dev --output-file=requirements-dev.txt --strip-extras pyproject.toml
# pip-compile --extra=dev --output-file=requirements-dev.txt pyproject.toml
#
--extra-index-url https://gitea.woggioni.net/api/packages/woggioni/pypi/simple
--index-url https://gitea.woggioni.net/api/packages/woggioni/pypi/simple
--extra-index-url https://pypi.org/simple
aiodns==3.2.0
# via aiohttp
@@ -12,18 +13,14 @@ aiofiles==24.1.0
# via bugis (pyproject.toml)
aiohappyeyeballs==2.4.3
# via aiohttp
aiohttp==3.10.10
aiohttp[speedups]==3.10.10
# via bugis (pyproject.toml)
aiosignal==1.3.1
# via aiohttp
asttokens==2.4.1
# via stack-data
async-timeout==4.0.3
# via aiohttp
attrs==24.2.0
# via aiohttp
backports-tarfile==1.2.0
# via jaraco-context
brotli==1.1.0
# via aiohttp
build==1.2.2.post1
@@ -46,11 +43,9 @@ decorator==5.1.1
# ipython
docutils==0.21.2
# via readme-renderer
exceptiongroup==1.2.2
# via ipython
executing==2.1.0
# via stack-data
frozenlist==1.4.1
frozenlist==1.5.0
# via
# aiohttp
# aiosignal
@@ -61,9 +56,7 @@ idna==3.10
# requests
# yarl
importlib-metadata==8.5.0
# via
# keyring
# twine
# via twine
ipdb==0.13.13
# via bugis (pyproject.toml)
ipython==8.28.0
@@ -98,7 +91,7 @@ multidict==6.1.0
# via
# aiohttp
# yarl
mypy==1.12.1
mypy==1.13.0
# via bugis (pyproject.toml)
mypy-extensions==1.0.0
# via mypy
@@ -120,7 +113,7 @@ ptyprocess==0.7.0
# via pexpect
pure-eval==0.2.3
# via stack-data
pwo==0.0.3
pwo==0.0.4
# via bugis (pyproject.toml)
pycares==4.4.0
# via aiodns
@@ -148,7 +141,7 @@ requests-toolbelt==1.0.0
# via twine
rfc3986==2.0.0
# via twine
rich==13.9.2
rich==13.9.3
# via twine
secretstorage==3.3.3
# via keyring
@@ -156,24 +149,16 @@ six==1.16.0
# via asttokens
stack-data==0.6.3
# via ipython
tomli==2.0.2
# via
# build
# ipdb
# mypy
traitlets==5.14.3
# via
# ipython
# matplotlib-inline
twine==5.1.1
# via bugis (pyproject.toml)
typing-extensions==4.7.1
typing-extensions==4.12.2
# via
# ipython
# multidict
# mypy
# pwo
# rich
urllib3==2.2.3
# via
# requests

View File

@@ -1,10 +1,11 @@
#
# This file is autogenerated by pip-compile with Python 3.10
# This file is autogenerated by pip-compile with Python 3.12
# by the following command:
#
# pip-compile --extra-index-url=https://gitea.woggioni.net/api/packages/woggioni/pypi/simple --extra=run --output-file=requirements-run.txt --strip-extras pyproject.toml
# pip-compile --extra=run --output-file=requirements-run.txt pyproject.toml
#
--extra-index-url https://gitea.woggioni.net/api/packages/woggioni/pypi/simple
--index-url https://gitea.woggioni.net/api/packages/woggioni/pypi/simple
--extra-index-url https://pypi.org/simple
aiodns==3.2.0
# via aiohttp
@@ -12,12 +13,10 @@ aiofiles==24.1.0
# via bugis (pyproject.toml)
aiohappyeyeballs==2.4.3
# via aiohttp
aiohttp==3.10.10
aiohttp[speedups]==3.10.10
# via bugis (pyproject.toml)
aiosignal==1.3.1
# via aiohttp
async-timeout==4.0.3
# via aiohttp
attrs==24.2.0
# via aiohttp
brotli==1.1.0
@@ -26,7 +25,7 @@ cffi==1.17.1
# via pycares
click==8.1.7
# via granian
frozenlist==1.4.1
frozenlist==1.5.0
# via
# aiohttp
# aiosignal
@@ -42,7 +41,7 @@ multidict==6.1.0
# yarl
propcache==0.2.0
# via yarl
pwo==0.0.3
pwo==0.0.4
# via bugis (pyproject.toml)
pycares==4.4.0
# via aiodns
@@ -54,10 +53,8 @@ pygraphviz==1.14
# via bugis (pyproject.toml)
pyyaml==6.0.2
# via bugis (pyproject.toml)
typing-extensions==4.7.1
# via
# multidict
# pwo
typing-extensions==4.12.2
# via pwo
uvloop==0.21.0
# via granian
watchdog==5.0.3

View File

@@ -1,10 +1,11 @@
#
# This file is autogenerated by pip-compile with Python 3.10
# This file is autogenerated by pip-compile with Python 3.12
# by the following command:
#
# pip-compile --extra-index-url=https://gitea.woggioni.net/api/packages/woggioni/pypi/simple --output-file=requirements.txt --strip-extras pyproject.toml
# pip-compile pyproject.toml
#
--extra-index-url https://gitea.woggioni.net/api/packages/woggioni/pypi/simple
--index-url https://gitea.woggioni.net/api/packages/woggioni/pypi/simple
--extra-index-url https://pypi.org/simple
aiodns==3.2.0
# via aiohttp
@@ -12,19 +13,17 @@ aiofiles==24.1.0
# via bugis (pyproject.toml)
aiohappyeyeballs==2.4.3
# via aiohttp
aiohttp==3.10.10
aiohttp[speedups]==3.10.10
# via bugis (pyproject.toml)
aiosignal==1.3.1
# via aiohttp
async-timeout==4.0.3
# via aiohttp
attrs==24.2.0
# via aiohttp
brotli==1.1.0
# via aiohttp
cffi==1.17.1
# via pycares
frozenlist==1.4.1
frozenlist==1.5.0
# via
# aiohttp
# aiosignal
@@ -38,7 +37,7 @@ multidict==6.1.0
# yarl
propcache==0.2.0
# via yarl
pwo==0.0.3
pwo==0.0.4
# via bugis (pyproject.toml)
pycares==4.4.0
# via aiodns
@@ -50,10 +49,8 @@ pygraphviz==1.14
# via bugis (pyproject.toml)
pyyaml==6.0.2
# via bugis (pyproject.toml)
typing-extensions==4.7.1
# via
# multidict
# pwo
typing-extensions==4.12.2
# via pwo
watchdog==5.0.3
# via bugis (pyproject.toml)
yarl==1.16.0

5
src/bugis/__main__.py Normal file
View File

@@ -0,0 +1,5 @@
import argparse
from cli import main
import sys
main(sys.argv[1:])

View File

@@ -19,7 +19,7 @@ from typing import Optional
log = logging.getLogger('access')
log.propagate = False
_server : Optional[Server] = None
_server: Optional[Server] = None
async def application(scope, receive, send):
global _server
@@ -49,6 +49,9 @@ async def application(scope, receive, send):
.filter(lambda it: it.get('type') == 'http.response.start')
.if_present(maybe_log))
return result
pathsend = (Maybe.of_nullable(scope.get('extensions'))
.map(lambda it: it.get("http.response.pathsend"))
.is_present)
await _server.handle_request(
scope['method'],
scope['path'],
@@ -58,6 +61,7 @@ async def application(scope, receive, send):
.map(lambda it: it.decode())
.or_else(None),
Maybe.of_nullable(scope.get('query_string', None)).map(lambda it: it.decode()).or_else(None),
wrapped_send
wrapped_send,
pathsend
)

View File

@@ -1,54 +1,15 @@
import asyncio
from asyncio import Queue, AbstractEventLoop, Future, Task, gather
from logging import getLogger, Logger
from pathlib import Path
from pwo import TopicManager, Subscriber
from watchdog.events import FileMovedEvent, FileClosedEvent, FileCreatedEvent, FileModifiedEvent
from watchdog.events import FileSystemEventHandler, FileSystemEvent, PatternMatchingEventHandler
from watchdog.observers import Observer
from watchdog.events import FileMovedEvent, FileClosedEvent, FileCreatedEvent, FileModifiedEvent
from pathlib import Path
from asyncio import Queue, AbstractEventLoop, Future, CancelledError, Task, gather
from typing import Callable, Optional
from logging import getLogger, Logger
log: Logger = getLogger(__name__)
class Subscription:
_unsubscribe_callback: Callable[['Subscription'], None]
_event: Optional[Future]
_loop: AbstractEventLoop
def __init__(self, unsubscribe: Callable[['Subscription'], None], loop: AbstractEventLoop):
self._unsubscribe_callback = unsubscribe
self._event: Optional[Future] = None
self._loop = loop
def unsubscribe(self) -> None:
self._event.cancel()
self._unsubscribe_callback(self)
log.debug('Deleted subscription %s', id(self))
async def wait(self, tout: float) -> bool:
self._event = self._loop.create_future()
def callback():
if not self._event.done():
self._event.set_result(False)
handle = self._loop.call_later(tout, callback)
try:
log.debug('Subscription %s is waiting for an event', id(self))
return await self._event
except CancelledError:
return False
finally:
handle.cancel()
def notify(self) -> None:
log.debug('Subscription %s notified', id(self))
if not self._event.done():
self._event.set_result(True)
def reset(self) -> None:
self._event = self._loop.create_future()
class _EventHandler(FileSystemEventHandler):
_queue: Queue
@@ -67,22 +28,6 @@ class _EventHandler(FileSystemEventHandler):
self._loop.call_soon_threadsafe(self._queue.put_nowait, event)
class AsyncQueueIterator:
_queue: Queue
def __init__(self, queue: Queue):
self._queue = queue
def __aiter__(self):
return self
async def __anext__(self):
item = await self._queue.get()
if item is None:
raise StopAsyncIteration
return item
observer = Observer()
@@ -96,55 +41,11 @@ def watch(path: Path, queue: Queue, loop: AbstractEventLoop,
observer.join()
loop.call_soon_threadsafe(queue.put_nowait, None)
class SubscriptionManager:
_loop: AbstractEventLoop
_queue: Queue
_subscriptions: dict[str, set[Subscription]]
def __init__(self, loop: AbstractEventLoop):
self._subscriptions: dict[str, set[Subscription]] = dict()
self._loop = loop
self._queue = Queue()
def subscribe(self, path: str) -> Subscription:
subscriptions = self._subscriptions
subscriptions_per_path = subscriptions.setdefault(path, set())
def unsubscribe_callback(subscription):
subscriptions_per_path.remove(subscription)
log.debug('Removed subscription %s to path %s', id(result), path)
result = Subscription(unsubscribe_callback, self._loop)
log.debug('Created subscription %s to path %s', id(result), path)
subscriptions_per_path.add(result)
return result
def _notify_subscriptions(self, path):
subscriptions = self._subscriptions
subscriptions_per_path = subscriptions.get(path, None)
if subscriptions_per_path:
log.debug(f"Subscriptions on '{path}': {len(subscriptions_per_path)}")
for s in subscriptions_per_path:
s.notify()
async def process_events(self):
async for evt in AsyncQueueIterator(self._queue):
log.debug(f"Processed event for path '{evt}'")
self._notify_subscriptions(evt)
log.debug(f"Event processor has completed")
def post_event(self, path):
def callback():
self._queue.put_nowait(path)
log.debug(f"Posted event for path '{path}', queue size: {self._queue.qsize()}")
self._loop.call_soon_threadsafe(callback)
class FileWatcher(PatternMatchingEventHandler):
_subscription_manager: SubscriptionManager
_topic_manager: TopicManager
_loop: AbstractEventLoop
_subscription_manager_loop: Task
_topic_manager_loop: Task
_running_tasks : Future
def __init__(self, path):
@@ -155,29 +56,29 @@ class FileWatcher(PatternMatchingEventHandler):
self._observer: Observer = Observer()
self._observer.schedule(self, path=path, recursive=True)
self._loop = asyncio.get_running_loop()
self._subscription_manager = SubscriptionManager(self._loop)
self._topic_manager = TopicManager(self._loop)
self._running_tasks = gather(
self._loop.run_in_executor(None, self._observer.start),
self._loop.create_task(self._subscription_manager.process_events())
self._loop.create_task(self._topic_manager.process_events())
)
async def stop(self) -> None:
def _observer_stop():
self._observer.stop()
self._observer.join()
self._subscription_manager.post_event(None)
self._topic_manager.post_event(None)
await self._loop.run_in_executor(None, _observer_stop)
await self._running_tasks
def subscribe(self, path: str) -> Subscription:
return self._subscription_manager.subscribe(path)
def subscribe(self, path: str) -> Subscriber:
return self._topic_manager.subscribe(path)
def on_any_event(self, event: FileSystemEvent) -> None:
what = "directory" if event.is_directory else "file"
def post_event(path):
self._subscription_manager.post_event(path)
self._topic_manager.post_event(path)
if isinstance(event, FileClosedEvent):
log.debug("Closed %s: %s", what, event.src_path)

41
src/bugis/cli.py Normal file
View File

@@ -0,0 +1,41 @@
import argparse
from dataclasses import asdict
from os import environ
from pathlib import Path
from typing import Optional, Sequence
import yaml
from granian import Granian
from pwo import Maybe
from .configuration import Configuration
def main(args: Optional[Sequence[str]] = None):
parser = argparse.ArgumentParser(description="A simple CLI program to render Markdown files")
default_configuration_file = (Maybe.of(environ.get('XDG_CONFIG_HOME'))
.map(lambda it: Path(it))
.map(lambda it: it / 'bugis' / 'bugis.yaml')
.or_else_get(lambda: Path(environ.get('HOME')) / '.config' / 'bugis' / 'bugis.yaml')
.filter(Path.exists)
.or_else(None)
)
parser.add_argument(
'-c',
'--configuration',
help='Path to the configuration file',
default=default_configuration_file,
type=Path,
)
args = parser.parse_args(args)
def parse(configuration: Path):
with open(configuration, 'r') as f:
return Configuration.from_dict(yaml.safe_load(f))
conf = Maybe.of_nullable(args.configuration).map(parse).or_else(Configuration.instance)
Granian(
"bugis.asgi:application",
**asdict(conf.granian)
).serve()

View File

@@ -1,45 +1,140 @@
import os
from os import environ
from pathlib import Path
from dataclasses import dataclass
from dataclasses import dataclass, field, asdict
from granian.constants import Loops, Interfaces, ThreadModes, HTTPModes, StrEnum
from granian.log import LogLevels
from granian.http import HTTP1Settings, HTTP2Settings
from typing import Optional, Sequence, Dict, Any
from pwo import classproperty, Maybe
from yaml import add_representer, SafeDumper, SafeLoader
class ClassPropertyDescriptor(object):
def __init__(self, fget, fset=None):
self.fget = fget
self.fset = fset
def __get__(self, obj, klass=None):
if klass is None:
klass = type(obj)
return self.fget.__get__(obj, klass)()
def __set__(self, obj, value):
if not self.fset:
raise AttributeError("can't set attribute")
type_ = type(obj)
return self.fset.__get__(obj, type_)(value)
def setter(self, func):
if not isinstance(func, (classmethod, staticmethod)):
func = classmethod(func)
self.fset = func
return self
def classproperty(func):
if not isinstance(func, (classmethod, staticmethod)):
func = classmethod(func)
return ClassPropertyDescriptor(func)
@dataclass(frozen=True)
class Configuration:
logging_configuration_file : str = environ.get("LOGGING_CONFIGURATION_FILE", Path(__file__).parent / 'default-conf' / 'logging.yaml')
plant_uml_server_address : str = environ.get('PLANT_UML_SERVER_ADDRESS', None)
logging_configuration_file: str = environ.get("LOGGING_CONFIGURATION_FILE",
Path(__file__).parent / 'default-conf' / 'logging.yaml')
plant_uml_server_address: str = environ.get('PLANT_UML_SERVER_ADDRESS', None)
@classproperty
def instance(cls) -> 'Configuration':
return Configuration()
@dataclass(frozen=True)
class GranianConfiguration:
address: str = '127.0.0.1'
port: int = 8000
interface: str = Interfaces.ASGI
workers: int = 1
threads: int = 1
blocking_threads: Optional[int] = None
threading_mode: ThreadModes = ThreadModes.workers
loop: Loops = Loops.auto
loop_opt: bool = False
http: HTTPModes = HTTPModes.auto
websockets: bool = True
backlog: int = 1024
backpressure: Optional[int] = None
http1_settings: Optional[HTTP1Settings] = None
http2_settings: Optional[HTTP2Settings] = None
log_enabled: bool = True
log_level: LogLevels = LogLevels.info
log_dictconfig: Optional[Dict[str, Any]] = None
log_access: bool = False
log_access_format: Optional[str] = None
ssl_cert: Optional[Path] = None
ssl_key: Optional[Path] = None
ssl_key_password: Optional[str] = None
url_path_prefix: Optional[str] = None
respawn_failed_workers: bool = False
respawn_interval: float = 3.5
workers_lifetime: Optional[int] = None
factory: bool = False
reload: bool = False
reload_paths: Optional[Sequence[Path]] = None
reload_ignore_dirs: Optional[Sequence[str]] = None
reload_ignore_patterns: Optional[Sequence[str]] = None
reload_ignore_paths: Optional[Sequence[Path]] = None
process_name: Optional[str] = None
pid_file: Optional[Path] = None
@staticmethod
def from_dict(d) -> 'Configuration.GranianConfiguration':
return Configuration.GranianConfiguration(**{k: v for k, v in dict(
address=d.get('address', None),
port=d.get('port', None),
interface=Maybe.of_nullable(d.get('interface')).map(lambda it: Interfaces(it)).or_else(None),
workers=d.get('workers', None),
threads=d.get('threads', None),
blocking_threads=d.get('blocking_threads', None),
threading_mode=Maybe.of_nullable(d.get('threading_modes')).map(lambda it: ThreadModes(it)).or_else(None),
loop=Maybe.of_nullable(d.get('loop')).map(lambda it: Loops(it)).or_else(None),
loop_opt=d.get('loop_opt', None),
http=Maybe.of_nullable(d.get('http')).map(lambda it: HTTPModes(it)).or_else(None),
websockets=d.get('websockets', None),
backlog=d.get('backlog', None),
backpressure=d.get('backpressure', None),
http1_settings=Maybe.of_nullable(d.get('http1_settings')).map(lambda it: HTTP1Settings(**it)).or_else(None),
http2_settings=Maybe.of_nullable(d.get('http2_settings')).map(lambda it: HTTP2Settings(**it)).or_else(None),
log_enabled=d.get('log_enabled', None),
log_level=Maybe.of_nullable(d.get('log_level')).map(lambda it: LogLevels(it)).or_else(None),
# log_dictconfig: Optional[Dict[str, Any]] = None,
log_access=d.get('log_access', None),
log_access_format=d.get('log_access_format', None),
ssl_cert=d.get('ssl_cert', None),
ssl_key=d.get('ssl_key', None),
ssl_key_password=d.get('ssl_key_password', None),
url_path_prefix=d.get('url_path_prefix', None),
respawn_failed_workers=d.get('respawn_failed_workers', None),
respawn_interval=d.get('respawn_interval', None),
workers_lifetime=d.get('workers_lifetime', None),
factory=d.get('factory', None),
reload=d.get('reload', None),
reload_paths=d.get('reload_paths', None),
reload_ignore_dirs=d.get('reload_ignore_dirs', None),
reload_ignore_patterns=d.get('reload_ignore_patterns', None),
reload_ignore_paths=d.get('reload_ignore_paths', None),
process_name=d.get('process_name', None),
pid_file=d.get('pid_file', None),
).items() if v is not None})
granian: GranianConfiguration = GranianConfiguration()
@staticmethod
def from_dict(d) -> 'Configuration':
return Configuration(
**{k: v for k, v in dict(
logging_configuration_file=d.get('logging_configuration_file', None),
plant_uml_server_address=d.get('plant_uml_server_address', None),
granian=Maybe.of_nullable(d.get('granian'))
.map(Configuration.GranianConfiguration.from_dict)
.or_else(None)
).items() if v is not None
}
)
def to_yaml(self, stream):
dumper = SafeDumper(stream)
dumper.add_representer(Configuration, lambda dumper, conf: dumper.represent_dict(asdict(conf)))
dumper.add_representer(Configuration.GranianConfiguration,
lambda dumper, conf: dumper.represent_dict(asdict(conf)))
dumper.add_representer(LogLevels, lambda dumper, level: dumper.represent_str(level.lower()))
dumper.add_multi_representer(Path, lambda dumper, path: dumper.represent_str(str(path)))
dumper.add_multi_representer(StrEnum, lambda dumper, str_enum: dumper.represent_str(str(str_enum)))
dumper.add_representer(HTTP1Settings, lambda dumper, settings: dumper.represent_dict(vars(settings)))
dumper.add_representer(HTTP2Settings, lambda dumper, settings: dumper.represent_dict(vars(settings)))
try:
dumper.open()
dumper.represent(Configuration.instance)
dumper.close()
finally:
dumper.dispose()
@staticmethod
def from_yaml(stream) -> 'Configuration':
loader = SafeLoader(stream)
try:
conf = loader.get_single_data()
return Configuration.from_dict(conf)
finally:
loader.dispose()

View File

@@ -29,11 +29,12 @@ mimeinit()
cwd: 'StrOrBytesPath' = getcwd()
def completed_future[T](result : T) -> Future[T]:
def completed_future[T](result: T) -> Future[T]:
future = Future()
future.set_result(result)
return future
def has_extension(filepath, extension):
_, ext = splitext(filepath)
return ext == extension
@@ -46,13 +47,16 @@ def is_markdown(filepath):
def is_dotfile(filepath):
return has_extension(filepath, ".dot")
def is_plant_uml(filepath):
return has_extension(filepath, ".puml")
logger = logging.getLogger(__name__)
class Server:
_loop : AbstractEventLoop
_loop: AbstractEventLoop
def __init__(self,
root_dir: 'StrOrBytesPath' = getcwd(),
@@ -64,7 +68,12 @@ class Server:
self.prefix = prefix and normpath(f'{prefix.decode()}')
self._loop = loop
async def handle_request(self, method: str, url_path: str, etag: Optional[str], query_string: Optional[str], send):
async def handle_request(self,
method: str,
url_path: str,
etag: Optional[str],
query_string: Optional[str], send,
pathsend: bool = False):
if method != 'GET':
await send({
'type': 'http.response.start',
@@ -84,11 +93,12 @@ class Server:
etag, digest = await self.compute_etag_and_digest(
etag,
url_path,
lambda: AiofilesContextManager(completed_future(AsyncBufferedReader(BytesIO(content), loop=self._loop, executor=None))),
lambda: AiofilesContextManager(
completed_future(AsyncBufferedReader(BytesIO(content), loop=self._loop, executor=None))),
lambda: completed_future(mtime)
)
if etag and etag == digest:
await self.not_modified(send, digest, ('Cache-Control', 'must-revalidate, max-age=86400'))
await self.not_modified(send, digest, 'must-revalidate, max-age=86400')
return
elif content:
mime_type = guess_type(basename(url_path))[0] or 'application/octet-stream'
@@ -147,6 +157,7 @@ class Server:
result = graph.draw(None, format="svg", prog="dot")
logger.debug("Completed Graphviz rendering for file '%s'", filepath)
return result
body = await self._loop.run_in_executor(None, render_graphviz, path)
await send({
'type': 'http.response.start',
@@ -179,8 +190,7 @@ class Server:
'body': body
})
else:
async def read_file(file_path):
buffer_size = 0x10000
async def read_file(file_path, buffer_size=0x10000):
async with async_open(file_path, 'rb') as f:
while True:
result = await f.read(buffer_size)
@@ -197,18 +207,23 @@ class Server:
'Cache-Control': 'no-cache'
})
})
async for chunk in read_file(path):
if pathsend:
await send({
'type': 'http.response.pathsend',
'path': path
})
else:
async for chunk in read_file(path):
await send({
'type': 'http.response.body',
'body': chunk,
'more_body': True
})
await send({
'type': 'http.response.body',
'body': chunk,
'more_body': True
'body': b'',
'more_body': False
})
await send({
'type': 'http.response.body',
'body': b'',
'more_body': False
})
elif await isdir(path):
body = (await self.directory_listing(url_path, path)).encode()
@@ -227,7 +242,7 @@ class Server:
await self.not_found(send)
@staticmethod
async def stream_hash(source: AsyncBufferedReader, bufsize=0x1000) -> bytes:
async def stream_hash(source: AsyncBufferedReader, bufsize=0x10000) -> bytes:
if bufsize <= 0:
raise ValueError("Buffer size must be greater than 0")
md5 = hashlib.md5()
@@ -239,7 +254,7 @@ class Server:
return md5.digest()
@staticmethod
async def file_hash(filepath, bufsize=0x1000) -> bytes:
async def file_hash(filepath, bufsize=0x10000) -> bytes:
if bufsize <= 0:
raise ValueError("Buffer size must be greater than 0")
md5 = hashlib.md5()
@@ -261,8 +276,8 @@ class Server:
return (
Maybe.of_nullable(etag)
.map(skip_weak_marker)
.or_else(None)
.map(skip_weak_marker)
.or_else(None)
)
async def compute_etag_and_digest(
@@ -292,11 +307,11 @@ class Server:
return etag, digest
async def render_markdown(self,
url_path: 'StrOrBytesPath',
path: str,
raw: bool,
digest: str,
send) -> None:
url_path: 'StrOrBytesPath',
path: str,
raw: bool,
digest: str,
send) -> None:
body = await compile_html(
url_path,
@@ -321,7 +336,7 @@ class Server:
})
@staticmethod
async def not_modified(send, digest: str, cache_control: str ='no-cache') -> []:
async def not_modified(send, digest: str, cache_control: str = 'no-cache') -> []:
await send({
'type': 'http.response.start',
'status': 304,
@@ -362,15 +377,18 @@ class Server:
for entry in sorted(await listdir(path)):
if await filter(join(path, entry)):
yield entry
return result()
async for entry in await ls(isdir):
result += '<li><a href="' + entry + '/' + '"/>' + entry + '/' + '</li>'
async def file_filter(entry: str) -> bool:
return await isfile(entry) and is_markdown(entry)
async for entry in await ls(file_filter):
result += '<li><a href="' + entry + '"/>' + entry + '</li>'
return result
async def stop(self):
await self.file_watcher.stop()
await self.file_watcher.stop()