6 Commits

Author SHA1 Message Date
6acf6d1d6e updated README.md 2024-10-30 15:40:21 +08:00
02737bf9b4 added command line arguments
All checks were successful
CI / Build Pip package (push) Successful in 18s
CI / Build Docker image (push) Successful in 1m2s
2024-10-28 00:10:56 +08:00
29bdad09bf reduced Docker image size
All checks were successful
CI / Build Pip package (push) Successful in 16s
CI / Build Docker image (push) Successful in 3m6s
2024-10-25 07:25:13 +08:00
16dbd3a82a addded nginx confioguration file
All checks were successful
CI / Build Pip package (push) Successful in 16s
CI / Build Docker image (push) Successful in 3m33s
2024-10-24 23:55:21 +08:00
33a3858b02 added cli 2024-10-24 23:26:06 +08:00
e2e4083321 switch from aiohttp to httpx 2024-10-24 23:18:23 +08:00
15 changed files with 507 additions and 377 deletions

View File

@@ -9,10 +9,10 @@ RUN adduser -D luser
USER luser USER luser
WORKDIR /home/luser WORKDIR /home/luser
COPY --chown=luser:users ./requirements-dev.txt ./requirements-dev.txt COPY --chown=luser:users ./requirements-dev.txt ./requirements-dev.txt
COPY --chown=luser:users ./requirements-dev.txt ./requirements-run.txt COPY --chown=luser:users ./requirements-run.txt ./requirements-run.txt
WORKDIR /home/luser/ WORKDIR /home/luser/
RUN python -m venv .venv RUN python -m venv .venv
RUN --mount=type=cache,target=/home/luser/.cache/pip,uid=1000,gid=1000 .venv/bin/pip wheel -w /home/luser/wheel -r requirements-dev.txt pygraphviz RUN --mount=type=cache,target=/home/luser/.cache/pip,uid=1000,gid=1000 .venv/bin/pip wheel -w /home/luser/wheel pygraphviz
RUN --mount=type=cache,target=/home/luser/.cache/pip,uid=1000,gid=1000 .venv/bin/pip install -r requirements-dev.txt /home/luser/wheel/*.whl RUN --mount=type=cache,target=/home/luser/.cache/pip,uid=1000,gid=1000 .venv/bin/pip install -r requirements-dev.txt /home/luser/wheel/*.whl
COPY --chown=luser:users . /home/luser/bugis COPY --chown=luser:users . /home/luser/bugis
WORKDIR /home/luser/bugis WORKDIR /home/luser/bugis
@@ -21,12 +21,12 @@ RUN --mount=type=cache,target=/home/luser/.cache/pip,uid=1000,gid=1000 /home/lus
FROM base AS release FROM base AS release
RUN mkdir /srv/http RUN mkdir /srv/http
RUN adduser -D -h /var/bugis -u 1000 bugis RUN adduser -D -h /var/lib/bugis -u 1000 bugis
USER bugis USER bugis
WORKDIR /var/bugis WORKDIR /var/lib/bugis
COPY --chown=bugis:users conf/pip.conf ./.pip/pip.conf COPY --chown=bugis:users conf/pip.conf ./.pip/pip.conf
RUN python -m venv .venv RUN python -m venv .venv
RUN --mount=type=cache,target=/var/bugis/.cache/pip,uid=1000,gid=1000 --mount=type=bind,ro,from=build,source=/home/luser/bugis/requirements-run.txt,target=/requirements-run.txt --mount=type=bind,ro,from=build,source=/home/luser/wheel,target=/wheel .venv/bin/pip install -r /requirements-run.txt /wheel/*.whl RUN --mount=type=cache,target=/var/bugis/.cache/pip,uid=1000,gid=1000 --mount=type=bind,ro,from=build,source=/home/luser/requirements-run.txt,target=/requirements-run.txt --mount=type=bind,ro,from=build,source=/home/luser/wheel,target=/wheel .venv/bin/pip install -r /requirements-run.txt /wheel/*.whl
RUN --mount=type=cache,target=/var/bugis/.cache/pip,uid=1000,gid=1000 --mount=type=bind,ro,from=build,source=/home/luser/bugis/dist,target=/dist .venv/bin/pip install /dist/*.whl RUN --mount=type=cache,target=/var/bugis/.cache/pip,uid=1000,gid=1000 --mount=type=bind,ro,from=build,source=/home/luser/bugis/dist,target=/dist .venv/bin/pip install /dist/*.whl
VOLUME /srv/http VOLUME /srv/http
WORKDIR /srv/http WORKDIR /srv/http
@@ -36,7 +36,7 @@ ENV GRANIAN_PORT=8000
ENV GRANIAN_INTERFACE=asgi ENV GRANIAN_INTERFACE=asgi
ENV GRANIAN_LOOP=asyncio ENV GRANIAN_LOOP=asyncio
ENV GRANIAN_LOG_ENABLED=false ENV GRANIAN_LOG_ENABLED=false
ENV GRANIAN_LOG_ACCESS_ENABLED=true
ENTRYPOINT ["/var/bugis/.venv/bin/python", "-m", "granian", "bugis.asgi:application"] ENTRYPOINT ["/var/lib/bugis/.venv/bin/python", "-m", "granian", "bugis.asgi:application"]
EXPOSE 8000/tcp EXPOSE 8000/tcp

View File

@@ -18,6 +18,23 @@ docker run --rm -v /your/document/directory:/srv/http --user $(id -u):$(id -g)
### Run in docker with `nginx` and `plantUML` server ### Run in docker with `nginx` and `plantUML` server
```bash ```bash
docker compose up --build STATIC_ROOT=/your/document/directory UID=$(id -u) GID=$(id -g) docker compose up --build
``` ```
### Install with pipx
```bash
pipx install -r requirements-run.txt .
```
or
```bash
pipx install --extra-index-url https://gitea.woggioni.net/api/packages/woggioni/pypi/simple/ bugis[run]
```
### Run from cli
```bash
bugis -a 127.0.0.1 -p 8000
```

14
conf/nginx-bugis.conf Normal file
View File

@@ -0,0 +1,14 @@
server {
listen 8080;
http2 on;
server_name localhost;
location / {
proxy_pass http://granian:8000;
proxy_http_version 1.1;
proxy_set_header Host $host;
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
proxy_read_timeout 60s;
}
}

View File

@@ -29,7 +29,7 @@ dependencies = [
"PyYAML", "PyYAML",
"pygraphviz", "pygraphviz",
"aiofiles", "aiofiles",
"aiohttp[speedups]" "httpx[http2]"
] ]
[project.optional-dependencies] [project.optional-dependencies]
@@ -59,4 +59,7 @@ exclude = ["scripts", "docs", "test"]
strict = true strict = true
[tool.setuptools_scm] [tool.setuptools_scm]
version_file = "src/bugis/_version.py" version_file = "src/bugis/_version.py"
[project.scripts]
bugis = "bugis.cli:main"

View File

@@ -1,39 +1,27 @@
# #
# This file is autogenerated by pip-compile with Python 3.10 # This file is autogenerated by pip-compile with Python 3.12
# by the following command: # by the following command:
# #
# pip-compile --extra-index-url=https://gitea.woggioni.net/api/packages/woggioni/pypi/simple --extra=dev --output-file=requirements-dev.txt --strip-extras pyproject.toml # pip-compile --extra=dev --output-file=requirements-dev.txt pyproject.toml
# #
--extra-index-url https://gitea.woggioni.net/api/packages/woggioni/pypi/simple --index-url https://gitea.woggioni.net/api/packages/woggioni/pypi/simple
--extra-index-url https://pypi.org/simple
aiodns==3.2.0
# via aiohttp
aiofiles==24.1.0 aiofiles==24.1.0
# via bugis (pyproject.toml) # via bugis (pyproject.toml)
aiohappyeyeballs==2.4.3 anyio==4.6.2.post1
# via aiohttp # via httpx
aiohttp==3.10.10
# via bugis (pyproject.toml)
aiosignal==1.3.1
# via aiohttp
asttokens==2.4.1 asttokens==2.4.1
# via stack-data # via stack-data
async-timeout==4.0.3
# via aiohttp
attrs==24.2.0
# via aiohttp
backports-tarfile==1.2.0
# via jaraco-context
brotli==1.1.0
# via aiohttp
build==1.2.2.post1 build==1.2.2.post1
# via bugis (pyproject.toml) # via bugis (pyproject.toml)
certifi==2024.8.30 certifi==2024.8.30
# via requests
cffi==1.17.1
# via # via
# cryptography # httpcore
# pycares # httpx
# requests
cffi==1.17.1
# via cryptography
charset-normalizer==3.4.0 charset-normalizer==3.4.0
# via requests # via requests
click==8.1.7 click==8.1.7
@@ -46,24 +34,29 @@ decorator==5.1.1
# ipython # ipython
docutils==0.21.2 docutils==0.21.2
# via readme-renderer # via readme-renderer
exceptiongroup==1.2.2
# via ipython
executing==2.1.0 executing==2.1.0
# via stack-data # via stack-data
frozenlist==1.4.1
# via
# aiohttp
# aiosignal
granian==1.6.1 granian==1.6.1
# via bugis (pyproject.toml) # via bugis (pyproject.toml)
h11==0.14.0
# via httpcore
h2==4.1.0
# via httpx
hpack==4.0.0
# via h2
httpcore==1.0.6
# via httpx
httpx[http2]==0.27.2
# via bugis (pyproject.toml)
hyperframe==6.0.1
# via h2
idna==3.10 idna==3.10
# via # via
# anyio
# httpx
# requests # requests
# yarl
importlib-metadata==8.5.0 importlib-metadata==8.5.0
# via # via twine
# keyring
# twine
ipdb==0.13.13 ipdb==0.13.13
# via bugis (pyproject.toml) # via bugis (pyproject.toml)
ipython==8.28.0 ipython==8.28.0
@@ -94,11 +87,7 @@ more-itertools==10.5.0
# via # via
# jaraco-classes # jaraco-classes
# jaraco-functools # jaraco-functools
multidict==6.1.0 mypy==1.13.0
# via
# aiohttp
# yarl
mypy==1.12.1
# via bugis (pyproject.toml) # via bugis (pyproject.toml)
mypy-extensions==1.0.0 mypy-extensions==1.0.0
# via mypy # via mypy
@@ -114,16 +103,12 @@ pkginfo==1.10.0
# via twine # via twine
prompt-toolkit==3.0.48 prompt-toolkit==3.0.48
# via ipython # via ipython
propcache==0.2.0
# via yarl
ptyprocess==0.7.0 ptyprocess==0.7.0
# via pexpect # via pexpect
pure-eval==0.2.3 pure-eval==0.2.3
# via stack-data # via stack-data
pwo==0.0.3 pwo==0.0.4
# via bugis (pyproject.toml) # via bugis (pyproject.toml)
pycares==4.4.0
# via aiodns
pycparser==2.22 pycparser==2.22
# via cffi # via cffi
pygments==2.18.0 pygments==2.18.0
@@ -148,32 +133,28 @@ requests-toolbelt==1.0.0
# via twine # via twine
rfc3986==2.0.0 rfc3986==2.0.0
# via twine # via twine
rich==13.9.2 rich==13.9.3
# via twine # via twine
secretstorage==3.3.3 secretstorage==3.3.3
# via keyring # via keyring
six==1.16.0 six==1.16.0
# via asttokens # via asttokens
sniffio==1.3.1
# via
# anyio
# httpx
stack-data==0.6.3 stack-data==0.6.3
# via ipython # via ipython
tomli==2.0.2
# via
# build
# ipdb
# mypy
traitlets==5.14.3 traitlets==5.14.3
# via # via
# ipython # ipython
# matplotlib-inline # matplotlib-inline
twine==5.1.1 twine==5.1.1
# via bugis (pyproject.toml) # via bugis (pyproject.toml)
typing-extensions==4.7.1 typing-extensions==4.12.2
# via # via
# ipython
# multidict
# mypy # mypy
# pwo # pwo
# rich
urllib3==2.2.3 urllib3==2.2.3
# via # via
# requests # requests
@@ -184,7 +165,5 @@ watchdog==5.0.3
# via bugis (pyproject.toml) # via bugis (pyproject.toml)
wcwidth==0.2.13 wcwidth==0.2.13
# via prompt-toolkit # via prompt-toolkit
yarl==1.16.0
# via aiohttp
zipp==3.20.2 zipp==3.20.2
# via importlib-metadata # via importlib-metadata

View File

@@ -1,66 +1,57 @@
# #
# This file is autogenerated by pip-compile with Python 3.10 # This file is autogenerated by pip-compile with Python 3.12
# by the following command: # by the following command:
# #
# pip-compile --extra-index-url=https://gitea.woggioni.net/api/packages/woggioni/pypi/simple --extra=run --output-file=requirements-run.txt --strip-extras pyproject.toml # pip-compile --extra=run --output-file=requirements-run.txt pyproject.toml
# #
--extra-index-url https://gitea.woggioni.net/api/packages/woggioni/pypi/simple --index-url https://gitea.woggioni.net/api/packages/woggioni/pypi/simple
--extra-index-url https://pypi.org/simple
aiodns==3.2.0
# via aiohttp
aiofiles==24.1.0 aiofiles==24.1.0
# via bugis (pyproject.toml) # via bugis (pyproject.toml)
aiohappyeyeballs==2.4.3 anyio==4.6.2.post1
# via aiohttp # via httpx
aiohttp==3.10.10 certifi==2024.8.30
# via bugis (pyproject.toml) # via
aiosignal==1.3.1 # httpcore
# via aiohttp # httpx
async-timeout==4.0.3
# via aiohttp
attrs==24.2.0
# via aiohttp
brotli==1.1.0
# via aiohttp
cffi==1.17.1
# via pycares
click==8.1.7 click==8.1.7
# via granian # via granian
frozenlist==1.4.1
# via
# aiohttp
# aiosignal
granian==1.6.1 granian==1.6.1
# via bugis (pyproject.toml) # via bugis (pyproject.toml)
h11==0.14.0
# via httpcore
h2==4.1.0
# via httpx
hpack==4.0.0
# via h2
httpcore==1.0.6
# via httpx
httpx[http2]==0.27.2
# via bugis (pyproject.toml)
hyperframe==6.0.1
# via h2
idna==3.10 idna==3.10
# via yarl # via
# anyio
# httpx
markdown==3.7 markdown==3.7
# via bugis (pyproject.toml) # via bugis (pyproject.toml)
multidict==6.1.0 pwo==0.0.4
# via
# aiohttp
# yarl
propcache==0.2.0
# via yarl
pwo==0.0.3
# via bugis (pyproject.toml) # via bugis (pyproject.toml)
pycares==4.4.0
# via aiodns
pycparser==2.22
# via cffi
pygments==2.18.0 pygments==2.18.0
# via bugis (pyproject.toml) # via bugis (pyproject.toml)
pygraphviz==1.14 pygraphviz==1.14
# via bugis (pyproject.toml) # via bugis (pyproject.toml)
pyyaml==6.0.2 pyyaml==6.0.2
# via bugis (pyproject.toml) # via bugis (pyproject.toml)
typing-extensions==4.7.1 sniffio==1.3.1
# via # via
# multidict # anyio
# pwo # httpx
typing-extensions==4.12.2
# via pwo
uvloop==0.21.0 uvloop==0.21.0
# via granian # via granian
watchdog==5.0.3 watchdog==5.0.3
# via bugis (pyproject.toml) # via bugis (pyproject.toml)
yarl==1.16.0
# via aiohttp

View File

@@ -1,60 +1,51 @@
# #
# This file is autogenerated by pip-compile with Python 3.10 # This file is autogenerated by pip-compile with Python 3.12
# by the following command: # by the following command:
# #
# pip-compile --extra-index-url=https://gitea.woggioni.net/api/packages/woggioni/pypi/simple --output-file=requirements.txt --strip-extras pyproject.toml # pip-compile --output-file=requirements.txt pyproject.toml
# #
--extra-index-url https://gitea.woggioni.net/api/packages/woggioni/pypi/simple --index-url https://gitea.woggioni.net/api/packages/woggioni/pypi/simple
--extra-index-url https://pypi.org/simple
aiodns==3.2.0
# via aiohttp
aiofiles==24.1.0 aiofiles==24.1.0
# via bugis (pyproject.toml) # via bugis (pyproject.toml)
aiohappyeyeballs==2.4.3 anyio==4.6.2.post1
# via aiohttp # via httpx
aiohttp==3.10.10 certifi==2024.8.30
# via bugis (pyproject.toml)
aiosignal==1.3.1
# via aiohttp
async-timeout==4.0.3
# via aiohttp
attrs==24.2.0
# via aiohttp
brotli==1.1.0
# via aiohttp
cffi==1.17.1
# via pycares
frozenlist==1.4.1
# via # via
# aiohttp # httpcore
# aiosignal # httpx
h11==0.14.0
# via httpcore
h2==4.1.0
# via httpx
hpack==4.0.0
# via h2
httpcore==1.0.6
# via httpx
httpx[http2]==0.27.2
# via bugis (pyproject.toml)
hyperframe==6.0.1
# via h2
idna==3.10 idna==3.10
# via yarl # via
# anyio
# httpx
markdown==3.7 markdown==3.7
# via bugis (pyproject.toml) # via bugis (pyproject.toml)
multidict==6.1.0 pwo==0.0.4
# via
# aiohttp
# yarl
propcache==0.2.0
# via yarl
pwo==0.0.3
# via bugis (pyproject.toml) # via bugis (pyproject.toml)
pycares==4.4.0
# via aiodns
pycparser==2.22
# via cffi
pygments==2.18.0 pygments==2.18.0
# via bugis (pyproject.toml) # via bugis (pyproject.toml)
pygraphviz==1.14 pygraphviz==1.14
# via bugis (pyproject.toml) # via bugis (pyproject.toml)
pyyaml==6.0.2 pyyaml==6.0.2
# via bugis (pyproject.toml) # via bugis (pyproject.toml)
typing-extensions==4.7.1 sniffio==1.3.1
# via # via
# multidict # anyio
# pwo # httpx
typing-extensions==4.12.2
# via pwo
watchdog==5.0.3 watchdog==5.0.3
# via bugis (pyproject.toml) # via bugis (pyproject.toml)
yarl==1.16.0
# via aiohttp

4
src/bugis/__main__.py Normal file
View File

@@ -0,0 +1,4 @@
from .cli import main
import sys
main(sys.argv[1:])

View File

@@ -1,27 +1,18 @@
import logging import logging
from logging.config import dictConfig as configure_logging from asyncio import get_running_loop
from typing import Optional, Awaitable, Callable, Any, Mapping
from yaml import safe_load
from .configuration import Configuration
with open(Configuration.instance.logging_configuration_file, 'r') as input_file:
conf = safe_load(input_file)
configure_logging(conf)
from pwo import Maybe from pwo import Maybe
from .server import Server from .server import Server
from asyncio import get_running_loop
from .asgi_utils import decode_headers
from typing import Optional
log = logging.getLogger('access') log = logging.getLogger('access')
log.propagate = False log.propagate = False
_server : Optional[Server] = None _server: Optional[Server] = None
async def application(scope, receive, send): async def application(scope, receive, send : Callable[[Mapping[str, Any]], Awaitable[None]]):
global _server global _server
if scope['type'] == 'lifespan': if scope['type'] == 'lifespan':
while True: while True:
@@ -33,22 +24,9 @@ async def application(scope, receive, send):
await _server.stop() await _server.stop()
await send({'type': 'lifespan.shutdown.complete'}) await send({'type': 'lifespan.shutdown.complete'})
else: else:
def maybe_log(evt): pathsend = (Maybe.of_nullable(scope.get('extensions'))
d = { .map(lambda it: it.get("http.response.pathsend"))
'response_headers': (Maybe.of_nullable(evt.get('headers')) .is_present)
.map(decode_headers)
.or_none()),
'status': evt['status']
}
log.info(None, extra=dict(**{k : v for k, v in d.items() if k is not None}, **scope))
def wrapped_send(*args, **kwargs):
result = send(*args, **kwargs)
(Maybe.of(args)
.filter(lambda it: len(it) > 0)
.map(lambda it: it[0])
.filter(lambda it: it.get('type') == 'http.response.start')
.if_present(maybe_log))
return result
await _server.handle_request( await _server.handle_request(
scope['method'], scope['method'],
scope['path'], scope['path'],
@@ -58,6 +36,7 @@ async def application(scope, receive, send):
.map(lambda it: it.decode()) .map(lambda it: it.decode())
.or_else(None), .or_else(None),
Maybe.of_nullable(scope.get('query_string', None)).map(lambda it: it.decode()).or_else(None), Maybe.of_nullable(scope.get('query_string', None)).map(lambda it: it.decode()).or_else(None),
wrapped_send send,
pathsend
) )

View File

@@ -1,54 +1,15 @@
import asyncio import asyncio
from asyncio import Queue, AbstractEventLoop, Future, Task, gather
from logging import getLogger, Logger
from pathlib import Path
from pwo import TopicManager, Subscriber
from watchdog.events import FileMovedEvent, FileClosedEvent, FileCreatedEvent, FileModifiedEvent
from watchdog.events import FileSystemEventHandler, FileSystemEvent, PatternMatchingEventHandler from watchdog.events import FileSystemEventHandler, FileSystemEvent, PatternMatchingEventHandler
from watchdog.observers import Observer from watchdog.observers import Observer
from watchdog.events import FileMovedEvent, FileClosedEvent, FileCreatedEvent, FileModifiedEvent
from pathlib import Path
from asyncio import Queue, AbstractEventLoop, Future, CancelledError, Task, gather
from typing import Callable, Optional
from logging import getLogger, Logger
log: Logger = getLogger(__name__) log: Logger = getLogger(__name__)
class Subscription:
_unsubscribe_callback: Callable[['Subscription'], None]
_event: Optional[Future]
_loop: AbstractEventLoop
def __init__(self, unsubscribe: Callable[['Subscription'], None], loop: AbstractEventLoop):
self._unsubscribe_callback = unsubscribe
self._event: Optional[Future] = None
self._loop = loop
def unsubscribe(self) -> None:
self._event.cancel()
self._unsubscribe_callback(self)
log.debug('Deleted subscription %s', id(self))
async def wait(self, tout: float) -> bool:
self._event = self._loop.create_future()
def callback():
if not self._event.done():
self._event.set_result(False)
handle = self._loop.call_later(tout, callback)
try:
log.debug('Subscription %s is waiting for an event', id(self))
return await self._event
except CancelledError:
return False
finally:
handle.cancel()
def notify(self) -> None:
log.debug('Subscription %s notified', id(self))
if not self._event.done():
self._event.set_result(True)
def reset(self) -> None:
self._event = self._loop.create_future()
class _EventHandler(FileSystemEventHandler): class _EventHandler(FileSystemEventHandler):
_queue: Queue _queue: Queue
@@ -67,22 +28,6 @@ class _EventHandler(FileSystemEventHandler):
self._loop.call_soon_threadsafe(self._queue.put_nowait, event) self._loop.call_soon_threadsafe(self._queue.put_nowait, event)
class AsyncQueueIterator:
_queue: Queue
def __init__(self, queue: Queue):
self._queue = queue
def __aiter__(self):
return self
async def __anext__(self):
item = await self._queue.get()
if item is None:
raise StopAsyncIteration
return item
observer = Observer() observer = Observer()
@@ -96,55 +41,11 @@ def watch(path: Path, queue: Queue, loop: AbstractEventLoop,
observer.join() observer.join()
loop.call_soon_threadsafe(queue.put_nowait, None) loop.call_soon_threadsafe(queue.put_nowait, None)
class SubscriptionManager:
_loop: AbstractEventLoop
_queue: Queue
_subscriptions: dict[str, set[Subscription]]
def __init__(self, loop: AbstractEventLoop):
self._subscriptions: dict[str, set[Subscription]] = dict()
self._loop = loop
self._queue = Queue()
def subscribe(self, path: str) -> Subscription:
subscriptions = self._subscriptions
subscriptions_per_path = subscriptions.setdefault(path, set())
def unsubscribe_callback(subscription):
subscriptions_per_path.remove(subscription)
log.debug('Removed subscription %s to path %s', id(result), path)
result = Subscription(unsubscribe_callback, self._loop)
log.debug('Created subscription %s to path %s', id(result), path)
subscriptions_per_path.add(result)
return result
def _notify_subscriptions(self, path):
subscriptions = self._subscriptions
subscriptions_per_path = subscriptions.get(path, None)
if subscriptions_per_path:
log.debug(f"Subscriptions on '{path}': {len(subscriptions_per_path)}")
for s in subscriptions_per_path:
s.notify()
async def process_events(self):
async for evt in AsyncQueueIterator(self._queue):
log.debug(f"Processed event for path '{evt}'")
self._notify_subscriptions(evt)
log.debug(f"Event processor has completed")
def post_event(self, path):
def callback():
self._queue.put_nowait(path)
log.debug(f"Posted event for path '{path}', queue size: {self._queue.qsize()}")
self._loop.call_soon_threadsafe(callback)
class FileWatcher(PatternMatchingEventHandler): class FileWatcher(PatternMatchingEventHandler):
_subscription_manager: SubscriptionManager _topic_manager: TopicManager
_loop: AbstractEventLoop _loop: AbstractEventLoop
_subscription_manager_loop: Task _topic_manager_loop: Task
_running_tasks : Future _running_tasks : Future
def __init__(self, path): def __init__(self, path):
@@ -155,29 +56,29 @@ class FileWatcher(PatternMatchingEventHandler):
self._observer: Observer = Observer() self._observer: Observer = Observer()
self._observer.schedule(self, path=path, recursive=True) self._observer.schedule(self, path=path, recursive=True)
self._loop = asyncio.get_running_loop() self._loop = asyncio.get_running_loop()
self._subscription_manager = SubscriptionManager(self._loop) self._topic_manager = TopicManager(self._loop)
self._running_tasks = gather( self._running_tasks = gather(
self._loop.run_in_executor(None, self._observer.start), self._loop.run_in_executor(None, self._observer.start),
self._loop.create_task(self._subscription_manager.process_events()) self._loop.create_task(self._topic_manager.process_events())
) )
async def stop(self) -> None: async def stop(self) -> None:
def _observer_stop(): def _observer_stop():
self._observer.stop() self._observer.stop()
self._observer.join() self._observer.join()
self._subscription_manager.post_event(None) self._topic_manager.post_event(None)
await self._loop.run_in_executor(None, _observer_stop) await self._loop.run_in_executor(None, _observer_stop)
await self._running_tasks await self._running_tasks
def subscribe(self, path: str) -> Subscription: def subscribe(self, path: str) -> Subscriber:
return self._subscription_manager.subscribe(path) return self._topic_manager.subscribe(path)
def on_any_event(self, event: FileSystemEvent) -> None: def on_any_event(self, event: FileSystemEvent) -> None:
what = "directory" if event.is_directory else "file" what = "directory" if event.is_directory else "file"
def post_event(path): def post_event(path):
self._subscription_manager.post_event(path) self._topic_manager.post_event(path)
if isinstance(event, FileClosedEvent): if isinstance(event, FileClosedEvent):
log.debug("Closed %s: %s", what, event.src_path) log.debug("Closed %s: %s", what, event.src_path)

114
src/bugis/cli.py Normal file
View File

@@ -0,0 +1,114 @@
import argparse
from dataclasses import asdict
from os import environ
from pathlib import Path
from typing import Optional, Sequence
import yaml
from granian import Granian
from pwo import Maybe
from .configuration import Configuration
from granian.constants import HTTPModes, Interfaces, ThreadModes, Loops
def main(args: Optional[Sequence[str]] = None):
parser = argparse.ArgumentParser(description="A simple CLI program to render Markdown files")
default_configuration_file = (Maybe.of(environ.get('XDG_CONFIG_HOME'))
.map(lambda it: Path(it))
.map(lambda it: it / 'bugis' / 'bugis.yaml')
.or_else_get(lambda: Path(environ.get('HOME')) / '.config' / 'bugis' / 'bugis.yaml')
.filter(Path.exists)
.or_else(None)
)
parser.add_argument(
'-c',
'--configuration',
help='Path to the configuration file',
default=default_configuration_file,
type=Path,
)
parser.add_argument(
'-a',
'--address',
help='Server bind address',
default='127.0.0.1',
)
parser.add_argument(
'-p',
'--port',
help='Server port',
default='8000',
type=int
)
parser.add_argument(
'--access-log',
help='Enable access log',
action='store_true',
dest='log_access'
)
parser.add_argument(
'--logging-configuration',
help='Logging configuration file',
dest='log_config_file'
)
parser.add_argument(
'-w', '--workers',
help='Number of worker processes',
default='1',
dest='workers',
type = int
)
parser.add_argument(
'-t', '--threads',
help='Number of threads per worker',
default='1',
dest='threads',
type=int
)
parser.add_argument(
'--http',
help='HTTP protocol version',
dest='http',
type=lambda it: HTTPModes(it),
choices=[str(mode) for mode in HTTPModes],
default = 'auto',
)
parser.add_argument(
'--threading-mode',
help='Threading mode',
dest='threading_mode',
type=lambda it: ThreadModes(it),
choices=[str(mode) for mode in ThreadModes],
default=ThreadModes.workers
)
parser.add_argument(
'--loop',
help='Loop',
dest='loop',
type=lambda it: Loops(it),
choices=[str(mode) for mode in Loops]
)
args = parser.parse_args(args)
def parse(configuration: Path):
with open(configuration, 'r') as f:
return yaml.safe_load(f)
def assign(it: Configuration):
Configuration.instance = it
Maybe.of_nullable(args.configuration).map(parse).if_present(assign)
conf = Configuration.instance
granian_conf = asdict(conf).setdefault('granian', dict())
for k, v in vars(args).items():
if v is not None:
granian_conf[k] = v
if args.log_config_file:
with open(args.log_config_file, 'r') as f:
granian_conf['log_dictconfig'] = yaml.safe_load(f)
granian_conf = Configuration.GranianConfiguration.from_dict(granian_conf)
Granian(
"bugis.asgi:application",
**asdict(granian_conf)
).serve()

View File

@@ -1,45 +1,153 @@
import os
from os import environ from os import environ
from pathlib import Path from pathlib import Path
from dataclasses import dataclass from dataclasses import dataclass, field, asdict
class ClassPropertyDescriptor(object): import yaml
from granian.constants import Loops, Interfaces, ThreadModes, HTTPModes, StrEnum
from granian.log import LogLevels
from granian.http import HTTP1Settings, HTTP2Settings
from typing import Optional, Sequence, Dict, Any
from pwo import classproperty, Maybe
from yaml import add_representer, SafeDumper, SafeLoader
def __init__(self, fget, fset=None): def parse_log_config(conf_file=None) -> Dict[str, Any]:
self.fget = fget if conf_file is None:
self.fset = fset conf_file = environ.get("LOGGING_CONFIGURATION_FILE",
Path(__file__).parent / 'default-conf' / 'logging.yaml')
def __get__(self, obj, klass=None): with open(conf_file, 'r') as file:
if klass is None: return yaml.safe_load(file)
klass = type(obj)
return self.fget.__get__(obj, klass)()
def __set__(self, obj, value):
if not self.fset:
raise AttributeError("can't set attribute")
type_ = type(obj)
return self.fset.__get__(obj, type_)(value)
def setter(self, func):
if not isinstance(func, (classmethod, staticmethod)):
func = classmethod(func)
self.fset = func
return self
def classproperty(func):
if not isinstance(func, (classmethod, staticmethod)):
func = classmethod(func)
return ClassPropertyDescriptor(func)
@dataclass(frozen=True) @dataclass(frozen=True)
class Configuration: class Configuration:
logging_configuration_file : str = environ.get("LOGGING_CONFIGURATION_FILE", Path(__file__).parent / 'default-conf' / 'logging.yaml') plant_uml_server_address: str = environ.get('PLANT_UML_SERVER_ADDRESS', None)
plant_uml_server_address : str = environ.get('PLANT_UML_SERVER_ADDRESS', None) _instance = None
@classproperty @classproperty
def instance(cls) -> 'Configuration': def instance(cls) -> 'Configuration':
return Configuration() if cls._instance is None:
cls._instance = Configuration()
return cls._instance
@instance.setter
def bar(cls, value):
cls._instance = value
@dataclass(frozen=True)
class GranianConfiguration:
address: str = '127.0.0.1'
port: int = 8000
interface: str = Interfaces.ASGI
workers: int = 1
threads: int = 1
blocking_threads: Optional[int] = None
threading_mode: ThreadModes = ThreadModes.workers
loop: Loops = Loops.auto
loop_opt: bool = False
http: HTTPModes = HTTPModes.auto
websockets: bool = True
backlog: int = 1024
backpressure: Optional[int] = None
http1_settings: Optional[HTTP1Settings] = None
http2_settings: Optional[HTTP2Settings] = None
log_enabled: bool = True
log_level: LogLevels = LogLevels.info
log_dictconfig: Optional[Dict[str, Any]] = None
log_access: bool = False
log_access_format: Optional[str] = None
ssl_cert: Optional[Path] = None
ssl_key: Optional[Path] = None
ssl_key_password: Optional[str] = None
url_path_prefix: Optional[str] = None
respawn_failed_workers: bool = False
respawn_interval: float = 3.5
workers_lifetime: Optional[int] = None
factory: bool = False
reload: bool = False
reload_paths: Optional[Sequence[Path]] = None
reload_ignore_dirs: Optional[Sequence[str]] = None
reload_ignore_patterns: Optional[Sequence[str]] = None
reload_ignore_paths: Optional[Sequence[Path]] = None
process_name: Optional[str] = None
pid_file: Optional[Path] = None
@staticmethod
def from_dict(d) -> 'Configuration.GranianConfiguration':
return Configuration.GranianConfiguration(**{k: v for k, v in dict(
address=d.get('address', None),
port=d.get('port', None),
interface=Maybe.of_nullable(d.get('interface')).map(lambda it: Interfaces(it)).or_else(None),
workers=d.get('workers', None),
threads=d.get('threads', None),
blocking_threads=d.get('blocking_threads', None),
threading_mode=Maybe.of_nullable(d.get('threading_modes')).map(lambda it: ThreadModes(it)).or_else(None),
loop=Maybe.of_nullable(d.get('loop')).map(lambda it: Loops(it)).or_else(None),
loop_opt=d.get('loop_opt', None),
http=Maybe.of_nullable(d.get('http')).map(lambda it: HTTPModes(it)).or_else(None),
websockets=d.get('websockets', None),
backlog=d.get('backlog', None),
backpressure=d.get('backpressure', None),
http1_settings=Maybe.of_nullable(d.get('http1_settings')).map(lambda it: HTTP1Settings(**it)).or_else(None),
http2_settings=Maybe.of_nullable(d.get('http2_settings')).map(lambda it: HTTP2Settings(**it)).or_else(None),
log_enabled=d.get('log_enabled', None),
log_level=Maybe.of_nullable(d.get('log_level')).map(lambda it: LogLevels(it)).or_else(None),
log_dictconfig=parse_log_config(d.get('log_config_file')),
log_access=d.get('log_access', None),
log_access_format=d.get('log_access_format', None),
ssl_cert=d.get('ssl_cert', None),
ssl_key=d.get('ssl_key', None),
ssl_key_password=d.get('ssl_key_password', None),
url_path_prefix=d.get('url_path_prefix', None),
respawn_failed_workers=d.get('respawn_failed_workers', None),
respawn_interval=d.get('respawn_interval', None),
workers_lifetime=d.get('workers_lifetime', None),
factory=d.get('factory', None),
reload=d.get('reload', None),
reload_paths=d.get('reload_paths', None),
reload_ignore_dirs=d.get('reload_ignore_dirs', None),
reload_ignore_patterns=d.get('reload_ignore_patterns', None),
reload_ignore_paths=d.get('reload_ignore_paths', None),
process_name=d.get('process_name', None),
pid_file=d.get('pid_file', None),
).items() if v is not None})
granian: GranianConfiguration = GranianConfiguration()
@staticmethod
def from_dict(d) -> 'Configuration':
return Configuration(
**{k: v for k, v in dict(
logging_configuration_file=d.get('logging_configuration_file', None),
plant_uml_server_address=d.get('plant_uml_server_address', None),
granian=Maybe.of_nullable(d.get('granian'))
.map(Configuration.GranianConfiguration.from_dict)
.or_else(None)
).items() if v is not None
}
)
def to_yaml(self, stream):
dumper = SafeDumper(stream)
dumper.add_representer(Configuration, lambda dumper, conf: dumper.represent_dict(asdict(conf)))
dumper.add_representer(Configuration.GranianConfiguration,
lambda dumper, conf: dumper.represent_dict(asdict(conf)))
dumper.add_representer(LogLevels, lambda dumper, level: dumper.represent_str(level.lower()))
dumper.add_multi_representer(Path, lambda dumper, path: dumper.represent_str(str(path)))
dumper.add_multi_representer(StrEnum, lambda dumper, str_enum: dumper.represent_str(str(str_enum)))
dumper.add_representer(HTTP1Settings, lambda dumper, settings: dumper.represent_dict(vars(settings)))
dumper.add_representer(HTTP2Settings, lambda dumper, settings: dumper.represent_dict(vars(settings)))
try:
dumper.open()
dumper.represent(Configuration.instance)
dumper.close()
finally:
dumper.dispose()
@staticmethod
def from_yaml(stream) -> 'Configuration':
loader = SafeLoader(stream)
try:
conf = loader.get_single_data()
return Configuration.from_dict(conf)
finally:
loader.dispose()

View File

@@ -1,5 +1,5 @@
version: 1 version: 1
disable_existing_loggers: True disable_existing_loggers: False
handlers: handlers:
console: console:
class : logging.StreamHandler class : logging.StreamHandler
@@ -8,28 +8,23 @@ handlers:
stream : ext://sys.stderr stream : ext://sys.stderr
access: access:
class : logging.StreamHandler class : logging.StreamHandler
formatter: request formatter: access
level : INFO level : INFO
stream : ext://sys.stderr stream : ext://sys.stdout
formatters: formatters:
brief:
format: '%(message)s'
default: default:
format: '{asctime} [{levelname}] ({processName:s}/{threadName:s}) - {name} - {message}' format: '{asctime}.{msecs:0<3.0f} [{levelname}] ({processName:s}/{threadName:s}) - {name} - {message}'
style: '{'
datefmt: '%Y-%m-%d %H:%M:%S'
request:
format: '{asctime} {client[0]}:{client[1]} HTTP/{http_version} {method} {path} - {status}'
style: '{' style: '{'
datefmt: '%Y-%m-%d %H:%M:%S' datefmt: '%Y-%m-%d %H:%M:%S'
access:
format: '%(message)s'
loggers: loggers:
root: root:
handlers: [console] handlers: [console]
level: DEBUG _granian:
access:
handlers: [access]
level: INFO level: INFO
watchdog.observers.inotify_buffer: propagate: False
granian.access:
handlers: [ access ]
level: INFO level: INFO
MARKDOWN: propagate: False
level: INFO

View File

@@ -1,17 +1,19 @@
from typing import TYPE_CHECKING from typing import TYPE_CHECKING
from aiofiles import open as async_open from aiofiles import open as async_open
from aiohttp import ClientSession
from .configuration import Configuration from .configuration import Configuration
from yarl import URL
if TYPE_CHECKING: if TYPE_CHECKING:
from _typeshed import StrOrBytesPath from _typeshed import StrOrBytesPath
from httpx import AsyncClient, URL
from typing import Callable, Awaitable
from urllib.parse import urljoin
async def render_plant_uml(path: 'StrOrBytesPath') -> bytes: chunk_size = 0x10000
async with ClientSession() as session: async def render_plant_uml(client: AsyncClient, path: 'StrOrBytesPath', send : Callable[[bytes], Awaitable[None]]):
url = URL(Configuration.instance.plant_uml_server_address) / 'svg' url = URL(urljoin(Configuration.instance.plant_uml_server_address, 'svg'))
async with async_open(path, 'rb') as file: async with async_open(path, 'rb') as file:
source = await file.read() source = await file.read()
async with session.post(url, data=source) as response: response = await client.post(url, content=source)
response.raise_for_status() response.raise_for_status()
return await response.read() async for chunk in response.aiter_bytes(chunk_size=chunk_size):
await send(chunk)

View File

@@ -6,7 +6,7 @@ from io import BytesIO
from mimetypes import init as mimeinit, guess_type from mimetypes import init as mimeinit, guess_type
from os import getcwd from os import getcwd
from os.path import join, normpath, splitext, relpath, basename from os.path import join, normpath, splitext, relpath, basename
from typing import Callable, TYPE_CHECKING, Optional, Awaitable, AsyncGenerator, Any from typing import Callable, TYPE_CHECKING, Optional, Awaitable, AsyncGenerator, Any, Mapping
import pygraphviz as pgv import pygraphviz as pgv
from aiofiles import open as async_open from aiofiles import open as async_open
@@ -14,10 +14,12 @@ from aiofiles.base import AiofilesContextManager
from aiofiles.os import listdir from aiofiles.os import listdir
from aiofiles.ospath import exists, isdir, isfile, getmtime from aiofiles.ospath import exists, isdir, isfile, getmtime
from aiofiles.threadpool.binary import AsyncBufferedReader from aiofiles.threadpool.binary import AsyncBufferedReader
from httpx import AsyncClient
from pwo import Maybe from pwo import Maybe
from .asgi_utils import encode_headers from .asgi_utils import encode_headers
from .async_watchdog import FileWatcher from .async_watchdog import FileWatcher
from .configuration import Configuration
from .md2html import compile_html, load_from_cache, STATIC_RESOURCES, MARDOWN_EXTENSIONS from .md2html import compile_html, load_from_cache, STATIC_RESOURCES, MARDOWN_EXTENSIONS
from .plantuml import render_plant_uml from .plantuml import render_plant_uml
@@ -29,11 +31,12 @@ mimeinit()
cwd: 'StrOrBytesPath' = getcwd() cwd: 'StrOrBytesPath' = getcwd()
def completed_future[T](result : T) -> Future[T]: def completed_future[T](result: T) -> Future[T]:
future = Future() future = Future()
future.set_result(result) future.set_result(result)
return future return future
def has_extension(filepath, extension): def has_extension(filepath, extension):
_, ext = splitext(filepath) _, ext = splitext(filepath)
return ext == extension return ext == extension
@@ -46,13 +49,19 @@ def is_markdown(filepath):
def is_dotfile(filepath): def is_dotfile(filepath):
return has_extension(filepath, ".dot") return has_extension(filepath, ".dot")
def is_plant_uml(filepath): def is_plant_uml(filepath):
return has_extension(filepath, ".puml") return has_extension(filepath, ".puml")
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
class Server: class Server:
_loop : AbstractEventLoop root_dir: 'StrOrBytesPath'
prefix: Optional['StrOrBytesPath']
_loop: AbstractEventLoop
_client: AsyncClient
def __init__(self, def __init__(self,
root_dir: 'StrOrBytesPath' = getcwd(), root_dir: 'StrOrBytesPath' = getcwd(),
@@ -63,8 +72,16 @@ class Server:
self.file_watcher = FileWatcher(cwd) self.file_watcher = FileWatcher(cwd)
self.prefix = prefix and normpath(f'{prefix.decode()}') self.prefix = prefix and normpath(f'{prefix.decode()}')
self._loop = loop self._loop = loop
self._client = AsyncClient()
async def handle_request(self, method: str, url_path: str, etag: Optional[str], query_string: Optional[str], send): async def handle_request(self,
method: str,
url_path: str,
etag: Optional[str],
query_string: Optional[str],
send: Callable[[Mapping[str, Any]], Awaitable[None]],
pathsend: bool = False
):
if method != 'GET': if method != 'GET':
await send({ await send({
'type': 'http.response.start', 'type': 'http.response.start',
@@ -84,11 +101,12 @@ class Server:
etag, digest = await self.compute_etag_and_digest( etag, digest = await self.compute_etag_and_digest(
etag, etag,
url_path, url_path,
lambda: AiofilesContextManager(completed_future(AsyncBufferedReader(BytesIO(content), loop=self._loop, executor=None))), lambda: AiofilesContextManager(
completed_future(AsyncBufferedReader(BytesIO(content), loop=self._loop, executor=None))),
lambda: completed_future(mtime) lambda: completed_future(mtime)
) )
if etag and etag == digest: if etag and etag == digest:
await self.not_modified(send, digest, ('Cache-Control', 'must-revalidate, max-age=86400')) await self.not_modified(send, digest, 'must-revalidate, max-age=86400')
return return
elif content: elif content:
mime_type = guess_type(basename(url_path))[0] or 'application/octet-stream' mime_type = guess_type(basename(url_path))[0] or 'application/octet-stream'
@@ -147,6 +165,7 @@ class Server:
result = graph.draw(None, format="svg", prog="dot") result = graph.draw(None, format="svg", prog="dot")
logger.debug("Completed Graphviz rendering for file '%s'", filepath) logger.debug("Completed Graphviz rendering for file '%s'", filepath)
return result return result
body = await self._loop.run_in_executor(None, render_graphviz, path) body = await self._loop.run_in_executor(None, render_graphviz, path)
await send({ await send({
'type': 'http.response.start', 'type': 'http.response.start',
@@ -161,9 +180,8 @@ class Server:
'type': 'http.response.body', 'type': 'http.response.body',
'body': body 'body': body
}) })
elif is_plant_uml(path): elif Configuration.instance.plant_uml_server_address and is_plant_uml(path):
logger.debug("Starting PlantUML rendering for file '%s'", path) logger.debug("Starting PlantUML rendering for file '%s'", path)
body = await render_plant_uml(path)
logger.debug("Completed PlantUML rendering for file '%s'", path) logger.debug("Completed PlantUML rendering for file '%s'", path)
await send({ await send({
'type': 'http.response.start', 'type': 'http.response.start',
@@ -174,13 +192,18 @@ class Server:
'Cache-Control': 'no-cache' 'Cache-Control': 'no-cache'
}) })
}) })
await render_plant_uml(self._client, path, lambda chunk: send({
'type': 'http.response.body',
'body': chunk,
'more_body': True
}))
await send({ await send({
'type': 'http.response.body', 'type': 'http.response.body',
'body': body 'body': '',
'more_body': False
}) })
else: else:
async def read_file(file_path): async def read_file(file_path, buffer_size=0x10000):
buffer_size = 0x10000
async with async_open(file_path, 'rb') as f: async with async_open(file_path, 'rb') as f:
while True: while True:
result = await f.read(buffer_size) result = await f.read(buffer_size)
@@ -197,18 +220,23 @@ class Server:
'Cache-Control': 'no-cache' 'Cache-Control': 'no-cache'
}) })
}) })
if pathsend:
async for chunk in read_file(path): await send({
'type': 'http.response.pathsend',
'path': path
})
else:
async for chunk in read_file(path):
await send({
'type': 'http.response.body',
'body': chunk,
'more_body': True
})
await send({ await send({
'type': 'http.response.body', 'type': 'http.response.body',
'body': chunk, 'body': b'',
'more_body': True 'more_body': False
}) })
await send({
'type': 'http.response.body',
'body': b'',
'more_body': False
})
elif await isdir(path): elif await isdir(path):
body = (await self.directory_listing(url_path, path)).encode() body = (await self.directory_listing(url_path, path)).encode()
@@ -227,7 +255,7 @@ class Server:
await self.not_found(send) await self.not_found(send)
@staticmethod @staticmethod
async def stream_hash(source: AsyncBufferedReader, bufsize=0x1000) -> bytes: async def stream_hash(source: AsyncBufferedReader, bufsize=0x10000) -> bytes:
if bufsize <= 0: if bufsize <= 0:
raise ValueError("Buffer size must be greater than 0") raise ValueError("Buffer size must be greater than 0")
md5 = hashlib.md5() md5 = hashlib.md5()
@@ -239,7 +267,7 @@ class Server:
return md5.digest() return md5.digest()
@staticmethod @staticmethod
async def file_hash(filepath, bufsize=0x1000) -> bytes: async def file_hash(filepath, bufsize=0x10000) -> bytes:
if bufsize <= 0: if bufsize <= 0:
raise ValueError("Buffer size must be greater than 0") raise ValueError("Buffer size must be greater than 0")
md5 = hashlib.md5() md5 = hashlib.md5()
@@ -261,8 +289,8 @@ class Server:
return ( return (
Maybe.of_nullable(etag) Maybe.of_nullable(etag)
.map(skip_weak_marker) .map(skip_weak_marker)
.or_else(None) .or_else(None)
) )
async def compute_etag_and_digest( async def compute_etag_and_digest(
@@ -292,11 +320,11 @@ class Server:
return etag, digest return etag, digest
async def render_markdown(self, async def render_markdown(self,
url_path: 'StrOrBytesPath', url_path: 'StrOrBytesPath',
path: str, path: str,
raw: bool, raw: bool,
digest: str, digest: str,
send) -> None: send) -> None:
body = await compile_html( body = await compile_html(
url_path, url_path,
@@ -321,7 +349,7 @@ class Server:
}) })
@staticmethod @staticmethod
async def not_modified(send, digest: str, cache_control: str ='no-cache') -> []: async def not_modified(send, digest: str, cache_control: str = 'no-cache') -> []:
await send({ await send({
'type': 'http.response.start', 'type': 'http.response.start',
'status': 304, 'status': 304,
@@ -362,15 +390,19 @@ class Server:
for entry in sorted(await listdir(path)): for entry in sorted(await listdir(path)):
if await filter(join(path, entry)): if await filter(join(path, entry)):
yield entry yield entry
return result() return result()
async for entry in await ls(isdir): async for entry in await ls(isdir):
result += '<li><a href="' + entry + '/' + '"/>' + entry + '/' + '</li>' result += '<li><a href="' + entry + '/' + '"/>' + entry + '/' + '</li>'
async def file_filter(entry: str) -> bool: async def file_filter(entry: str) -> bool:
return await isfile(entry) and is_markdown(entry) return await isfile(entry) and is_markdown(entry)
async for entry in await ls(file_filter): async for entry in await ls(file_filter):
result += '<li><a href="' + entry + '"/>' + entry + '</li>' result += '<li><a href="' + entry + '"/>' + entry + '</li>'
return result return result
async def stop(self): async def stop(self):
await self.file_watcher.stop() await self.file_watcher.stop()
await self._client.aclose()