renamed project to Bugis and switch from WSGI@uwsgi to ASGI@granian
Some checks failed
CI / Build pip package (push) Failing after 12s
CI / Build Docker image (push) Failing after 8s

This commit is contained in:
2024-10-20 16:21:30 +08:00
parent 49a9bad07f
commit 1a805039f1
25 changed files with 623 additions and 209 deletions

View File

@@ -0,0 +1,59 @@
name: CI
on:
push:
branches: [ master ]
jobs:
"Build pip package":
runs-on: woryzen
steps:
- name: Checkout sources
uses: actions/checkout@v4
- uses: actions/setup-python@v5
with:
cache: 'pip'
- name: Create virtualenv
run: |
python -m venv .venv
.venv/bin/pip install -r requirements.txt
- name: Execute build
run: |
.venv/bin/python -m build
- name: Publish artifacts
env:
TWINE_REPOSITORY_URL: ${{ vars.PYPI_REGISTRY_URL }}
TWINE_USERNAME: ${{ vars.PUBLISHER_USERNAME }}
TWINE_PASSWORD: ${{ secrets.PUBLISHER_TOKEN }}
run: |
.venv/bin/python -m twine upload --repository gitea dist/*{.whl,tar.gz}
"Build Docker image":
runs-on: woryzen
steps:
-
name: Set up Docker Buildx
uses: docker/setup-buildx-action@v3.4.0
with:
driver: docker-container
platforms: |
linux/amd64
linux/arm64
-
name: Login to Gitea container registry
uses: docker/login-action@v3
with:
registry: gitea.woggioni.net
username: woggioni
password: ${{ secrets.PUBLISHER_TOKEN }}
-
name: Build and push bugis images
uses: docker/build-push-action@v6
with:
platforms: |
linux/amd64
linux/arm64
push: true
pull: true
tags: |
"gitea.woggioni.net/woggioni/bugis:latest"
"gitea.woggioni.net/woggioni/bugis:1.0"
cache-from: type=registry,ref=gitea.woggioni.net/woggioni/bugis:buildx
cache-to: type=registry,mode=max,compression=zstd,image-manifest=true,oci-mediatypes=true,ref=gitea.woggioni.net/woggioni/bugis:buildx

1
.gitignore vendored
View File

@@ -1 +1,2 @@
.venv .venv
__pycache__

View File

@@ -1,34 +1,40 @@
FROM alpine:latest AS base FROM alpine:latest AS base
LABEL org.opencontainers.image.authors=oggioni.walter@gmail.com LABEL org.opencontainers.image.authors=oggioni.walter@gmail.com
RUN --mount=type=cache,target=/var/cache/apk apk update RUN --mount=type=cache,target=/var/cache/apk apk update
RUN --mount=type=cache,target=/var/cache/apk apk add python3 py3-pip uwsgi uwsgi-python3 graphviz uwsgi-gevent3 RUN --mount=type=cache,target=/var/cache/apk apk add python3 py3-pip graphviz
FROM base AS build FROM base AS build
RUN --mount=type=cache,target=/var/cache/apk apk add musl-dev gcc graphviz-dev
RUN adduser -D luser RUN adduser -D luser
USER luser USER luser
WORKDIR /home/luser WORKDIR /home/luser
COPY --chown=luser:users ./requirements-dev.txt ./md2html/requirements-dev.txt COPY --chown=luser:users ./requirements-dev.txt ./bugis/requirements-dev.txt
COPY --chown=luser:users ./src ./md2html/src COPY --chown=luser:users ./src ./bugis/src
COPY --chown=luser:users ./pyproject.toml ./md2html/pyproject.toml COPY --chown=luser:users ./pyproject.toml ./bugis/pyproject.toml
WORKDIR /home/luser/md2html WORKDIR /home/luser/bugis
RUN python -m venv venv RUN python -m venv .venv
RUN --mount=type=cache,target=/home/luser/.cache/pip,uid=1000,gid=1000 venv/bin/pip install -r requirements-dev.txt RUN --mount=type=cache,target=/home/luser/.cache/pip,uid=1000,gid=1000 .venv/bin/pip wheel -w /home/luser/wheel -r requirements-dev.txt pygraphviz
RUN --mount=type=cache,target=/home/luser/.cache/pip,uid=1000,gid=1000 venv/bin/python -m build RUN --mount=type=cache,target=/home/luser/.cache/pip,uid=1000,gid=1000 .venv/bin/pip install -r requirements-dev.txt /home/luser/wheel/*.whl
RUN --mount=type=cache,target=/home/luser/.cache/pip,uid=1000,gid=1000 .venv/bin/python -m build
FROM base AS release FROM base AS release
RUN mkdir /srv/http RUN mkdir /srv/http
RUN adduser -D -h /var/md2html -u 1000 md2html RUN adduser -D -h /var/bugis -u 1000 bugis
USER md2html USER bugis
WORKDIR /var/md2html WORKDIR /var/bugis
RUN python -m venv venv COPY --chown=bugis:users conf/pip.conf ./.pip/pip.conf
RUN --mount=type=cache,target=/var/md2html/.cache/pip,uid=1000,gid=1000 --mount=type=cache,ro,from=build,source=/home/luser/md2html/dist,target=/dist venv/bin/pip install /dist/*.whl RUN python -m venv .venv
COPY --chown=md2html:users conf/uwsgi.ini /var/md2html/ RUN --mount=type=cache,target=/var/bugis/.cache/pip,uid=1000,gid=1000 --mount=type=bind,ro,source=./requirements-run.txt,target=/requirements-run.txt --mount=type=bind,ro,from=build,source=/home/luser/wheel,target=/wheel .venv/bin/pip install -r /requirements-run.txt /wheel/*.whl
RUN --mount=type=cache,target=/var/bugis/.cache/pip,uid=1000,gid=1000 --mount=type=bind,ro,from=build,source=/home/luser/bugis/dist,target=/dist .venv/bin/pip install /dist/*.whl
VOLUME /srv/http VOLUME /srv/http
WORKDIR /srv/http WORKDIR /srv/http
ENTRYPOINT ["uwsgi"]
EXPOSE 1910/tcp
EXPOSE 1910/udp
CMD [ "--ini", "/var/md2html/uwsgi.ini" ]
ENV GRANIAN_HOST=0.0.0.0
ENV GRANIAN_INTERFACE=asginl
ENV GRANIAN_LOOP=asyncio
ENV GRANIAN_LOOP=asyncio
ENV GRANIAN_LOG_ENABLED=false
ENTRYPOINT ["/var/bugis/.venv/bin/python", "-m", "granian", "bugis.asgi:application"]
EXPOSE 8000/tcp

View File

@@ -9,3 +9,4 @@ uwsgi --need-plugin /usr/lib/uwsgi/python_plugin.so \
--http-auto-chunked \ --http-auto-chunked \
--gevent 10 --gevent 10
``` ```

View File

@@ -3,8 +3,7 @@ set -e
venv/bin/python -m build venv/bin/python -m build
mkdir -p docker/build mkdir -p docker/build
cp dist/md2html-*.whl docker/build/ cp dist/bugis-*.whl docker/build/
cp docker/Dockerfile docker/build/Dockerfile cp docker/Dockerfile docker/build/Dockerfile
cp docker/uwsgi.ini docker/build/uwsgi.ini
docker build docker/build --tag alpine:md2html docker build docker/build --tag bugis:latest

18
conf/log.yml Normal file
View File

@@ -0,0 +1,18 @@
version: 1
disable_existing_loggers: True
handlers:
console:
class : logging.StreamHandler
formatter: default
level : INFO
stream : ext://sys.stdout
formatters:
brief:
format: '%(message)s'
default:
format: '%(asctime)s %(levelname)-8s %(name)-15s %(threadName)s %(message)s'
datefmt: '%Y-%m-%d %H:%M:%S'
loggers:
root:
handlers: [console]
level: INFO

View File

@@ -1,8 +0,0 @@
[uwsgi]
#logformat = "%(proto) - %(method) %(uri) %(status) %(addr)
need-plugin=/usr/lib/uwsgi/python_plugin.so
need-plugin=/usr/lib/uwsgi/gevent3_plugin.so
socket = 0.0.0.0:1910
module = md2html.uwsgi_handler
virtualenv = /var/md2html/venv
gevent = 1000

View File

@@ -3,8 +3,8 @@ requires = ["setuptools>=61.0"]
build-backend = "setuptools.build_meta" build-backend = "setuptools.build_meta"
[project] [project]
name = "md2html" name = "bugis"
version = "0.3" version = "1.0"
authors = [ authors = [
{ name="Walter Oggioni", email="oggioni.walter@gmail.com" }, { name="Walter Oggioni", email="oggioni.walter@gmail.com" },
] ]
@@ -22,29 +22,32 @@ classifiers = [
'Programming Language :: Python :: 3', 'Programming Language :: Python :: 3',
] ]
dependencies = [ dependencies = [
"gevent",
"greenlet",
"Markdown", "Markdown",
"Pygments", "Pygments",
"watchdog", "watchdog",
"zope.event", "pwo",
"zope.interface" "PyYAML",
"pygraphviz"
] ]
[project.optional-dependencies] [project.optional-dependencies]
dev = [ dev = [
"build", "pip-tools", "mypy", "ipdb" "build", "granian", "mypy", "ipdb", "twine"
]
run = [
"granian"
] ]
[tool.setuptools.package-data] [tool.setuptools.package-data]
md2html = ['static/*'] bugis = ['static/*', 'default-conf/*']
[project.urls] [project.urls]
"Homepage" = "https://github.com/woggioni/md2html" "Homepage" = "https://github.com/woggioni/bugis"
"Bug Tracker" = "https://github.com/woggioni/md2html/issues" "Bug Tracker" = "https://github.com/woggioni/bugis/issues"
[tool.mypy] [tool.mypy]
python_version = "3.10" python_version = "3.12"
disallow_untyped_defs = true disallow_untyped_defs = true
show_error_codes = true show_error_codes = true
no_implicit_optional = true no_implicit_optional = true

View File

@@ -1,103 +1,143 @@
# #
# This file is autogenerated by pip-compile with Python 3.10 # This file is autogenerated by pip-compile with Python 3.12
# by the following command: # by the following command:
# #
# pip-compile --extra=dev --output-file=requirements-dev.txt pyproject.toml # pip-compile --extra=dev --output-file=requirements-dev.txt pyproject.toml
# #
--index-url https://gitea.woggioni.net/api/packages/woggioni/pypi/simple/ --index-url https://gitea.woggioni.net/api/packages/woggioni/pypi/simple
--extra-index-url https://pypi.org/simple --extra-index-url https://pypi.org/simple
asttokens==2.4.1 asttokens==2.4.1
# via stack-data # via stack-data
build==1.2.1 build==1.2.2.post1
# via # via bugis (pyproject.toml)
# md2html (pyproject.toml) certifi==2024.8.30
# pip-tools # via requests
cffi==1.17.1
# via cryptography
charset-normalizer==3.4.0
# via requests
click==8.1.7 click==8.1.7
# via pip-tools # via granian
cryptography==43.0.3
# via secretstorage
decorator==5.1.1 decorator==5.1.1
# via # via
# ipdb # ipdb
# ipython # ipython
exceptiongroup==1.2.2 docutils==0.21.2
# via ipython # via readme-renderer
executing==2.0.1 executing==2.1.0
# via stack-data # via stack-data
gevent==24.2.1 granian==1.6.1
# via md2html (pyproject.toml) # via bugis (pyproject.toml)
greenlet==3.0.3 idna==3.10
# via # via requests
# gevent importlib-metadata==8.5.0
# md2html (pyproject.toml) # via twine
ipdb==0.13.13 ipdb==0.13.13
# via md2html (pyproject.toml) # via bugis (pyproject.toml)
ipython==8.26.0 ipython==8.28.0
# via ipdb # via ipdb
jaraco-classes==3.4.0
# via keyring
jaraco-context==6.0.1
# via keyring
jaraco-functools==4.1.0
# via keyring
jedi==0.19.1 jedi==0.19.1
# via ipython # via ipython
markdown==3.6 jeepney==0.8.0
# via md2html (pyproject.toml) # via
# keyring
# secretstorage
keyring==25.4.1
# via twine
markdown==3.7
# via bugis (pyproject.toml)
markdown-it-py==3.0.0
# via rich
matplotlib-inline==0.1.7 matplotlib-inline==0.1.7
# via ipython # via ipython
mypy==1.11.1 mdurl==0.1.2
# via md2html (pyproject.toml) # via markdown-it-py
more-itertools==10.5.0
# via
# jaraco-classes
# jaraco-functools
mypy==1.12.1
# via bugis (pyproject.toml)
mypy-extensions==1.0.0 mypy-extensions==1.0.0
# via mypy # via mypy
nh3==0.2.18
# via readme-renderer
packaging==24.1 packaging==24.1
# via build # via build
parso==0.8.4 parso==0.8.4
# via jedi # via jedi
pexpect==4.9.0 pexpect==4.9.0
# via ipython # via ipython
pip-tools==7.4.1 pkginfo==1.10.0
# via md2html (pyproject.toml) # via twine
prompt-toolkit==3.0.47 prompt-toolkit==3.0.48
# via ipython # via ipython
ptyprocess==0.7.0 ptyprocess==0.7.0
# via pexpect # via pexpect
pure-eval==0.2.3 pure-eval==0.2.3
# via stack-data # via stack-data
pwo==0.0.3
# via bugis (pyproject.toml)
pycparser==2.22
# via cffi
pygments==2.18.0 pygments==2.18.0
# via # via
# bugis (pyproject.toml)
# ipython # ipython
# md2html (pyproject.toml) # readme-renderer
pyproject-hooks==1.1.0 # rich
pygraphviz==1.14
# via bugis (pyproject.toml)
pyproject-hooks==1.2.0
# via build
pyyaml==6.0.2
# via bugis (pyproject.toml)
readme-renderer==44.0
# via twine
requests==2.32.3
# via # via
# build # requests-toolbelt
# pip-tools # twine
requests-toolbelt==1.0.0
# via twine
rfc3986==2.0.0
# via twine
rich==13.9.2
# via twine
secretstorage==3.3.3
# via keyring
six==1.16.0 six==1.16.0
# via asttokens # via asttokens
stack-data==0.6.3 stack-data==0.6.3
# via ipython # via ipython
tomli==2.0.1
# via
# build
# ipdb
# mypy
# pip-tools
traitlets==5.14.3 traitlets==5.14.3
# via # via
# ipython # ipython
# matplotlib-inline # matplotlib-inline
typing-extensions==4.12.2 twine==5.1.1
# via bugis (pyproject.toml)
typing-extensions==4.7.1
# via # via
# ipython
# mypy # mypy
watchdog==4.0.2 # pwo
# via md2html (pyproject.toml) urllib3==2.2.3
# via
# requests
# twine
uvloop==0.21.0
# via granian
watchdog==5.0.3
# via bugis (pyproject.toml)
wcwidth==0.2.13 wcwidth==0.2.13
# via prompt-toolkit # via prompt-toolkit
wheel==0.44.0 zipp==3.20.2
# via pip-tools # via importlib-metadata
zope-event==5.0
# via
# gevent
# md2html (pyproject.toml)
zope-interface==7.0.1
# via
# gevent
# md2html (pyproject.toml)
# The following packages are considered to be unsafe in a requirements file:
# pip
# setuptools

View File

@@ -1,32 +1,23 @@
# #
# This file is autogenerated by pip-compile with Python 3.10 # This file is autogenerated by pip-compile with Python 3.12
# by the following command: # by the following command:
# #
# pip-compile --output-file=requirements.txt --strip-extras pyproject.toml # pip-compile --output-file=requirements.txt pyproject.toml
# #
--index-url https://gitea.woggioni.net/api/packages/woggioni/pypi/simple/ --index-url https://gitea.woggioni.net/api/packages/woggioni/pypi/simple
--extra-index-url https://pypi.org/simple --extra-index-url https://pypi.org/simple
gevent==23.9.1 markdown==3.7
# via md2html (pyproject.toml) # via bugis (pyproject.toml)
greenlet==3.0.0 pwo==0.0.3
# via # via bugis (pyproject.toml)
# gevent pygments==2.18.0
# md2html (pyproject.toml) # via bugis (pyproject.toml)
markdown==3.5 pygraphviz==1.14
# via md2html (pyproject.toml) # via bugis (pyproject.toml)
pygments==2.16.1 pyyaml==6.0.2
# via md2html (pyproject.toml) # via bugis (pyproject.toml)
watchdog==3.0.0 typing-extensions==4.7.1
# via md2html (pyproject.toml) # via pwo
zope-event==5.0 watchdog==5.0.3
# via # via bugis (pyproject.toml)
# gevent
# md2html (pyproject.toml)
zope-interface==6.1
# via
# gevent
# md2html (pyproject.toml)
# The following packages are considered to be unsafe in a requirements file:
# setuptools

35
src/bugis/asgi.py Normal file
View File

@@ -0,0 +1,35 @@
import logging
from logging.config import dictConfig as configure_logging
from os import environ
from pathlib import Path
from pwo import Maybe
from yaml import safe_load
from .server import Server
logging_configuration_file = environ.get("LOGGING_CONFIGURATION_FILE", Path(__file__).parent / 'default-conf' / 'logging.yaml')
with open(logging_configuration_file, 'r') as input_file:
conf = safe_load(input_file)
configure_logging(conf)
log = logging.getLogger(__name__)
_server = None
async def application(ctx, receive, send):
global _server
if _server is None:
_server = Server(prefix=None)
log.info(None, extra=ctx)
await _server.handle_request(
ctx['method'],
ctx['path'],
Maybe.of([header[1] for header in ctx['headers'] if header[0].decode().lower() == 'if-none-match'])
.filter(lambda it: len(it) > 0)
.map(lambda it: it[0])
.map(lambda it: it.decode())
.or_else(None),
Maybe.of_nullable(ctx.get('query_string', None)).map(lambda it: it.decode()).or_else(None),
send
)

172
src/bugis/async_watchdog.py Normal file
View File

@@ -0,0 +1,172 @@
import asyncio
from watchdog.events import FileSystemEventHandler, FileSystemEvent, PatternMatchingEventHandler
from watchdog.observers import Observer
from watchdog.events import FileMovedEvent, FileClosedEvent, FileCreatedEvent, FileModifiedEvent
from pathlib import Path
from asyncio import Queue, AbstractEventLoop, Future, CancelledError, Task
from typing import Optional, Callable
from logging import getLogger
class Subscription:
_unsubscribe_callback: Callable[['Subscription'], None]
_event: Future
_loop: AbstractEventLoop
def __init__(self, unsubscribe: Callable[['Subscription'], None], loop: AbstractEventLoop):
self._unsubscribe_callback = unsubscribe
self._event: Future = loop.create_future()
self._loop = loop
def unsubscribe(self) -> None:
self._unsubscribe_callback(self)
async def wait(self, tout: float) -> bool:
handle = self._loop.call_later(tout, lambda: self._event.cancel())
try:
await self._event
return True
except CancelledError:
return False
finally:
handle.cancel()
def notify(self) -> None:
self._event.set_result(None)
def reset(self) -> None:
self._event = self._loop.create_future()
class _EventHandler(FileSystemEventHandler):
_queue: Queue
_loop: AbstractEventLoop
def __init__(self, queue: Queue, loop: AbstractEventLoop,
*args, **kwargs):
self._loop = loop
self._queue = queue
super(*args, **kwargs)
def on_created(self, event: FileSystemEvent) -> None:
self._loop.call_soon_threadsafe(self._queue.put_nowait, event)
def on_modified(self, event: FileSystemEvent) -> None:
self._loop.call_soon_threadsafe(self._queue.put_nowait, event)
class AsyncQueueIterator:
_queue: Queue
def __init__(self, queue: Queue):
self._queue = queue
def __aiter__(self):
return self
async def __anext__(self):
item = await self._queue.get()
if item is None:
raise StopAsyncIteration
return item
observer = Observer()
def watch(path: Path, queue: Queue, loop: AbstractEventLoop,
recursive: bool = False) -> None:
"""Watch a directory for changes."""
handler = _EventHandler(queue, loop)
observer.schedule(handler, str(path), recursive=recursive)
observer.start()
observer.join()
loop.call_soon_threadsafe(queue.put_nowait, None)
class SubscriptionManager:
_loop: AbstractEventLoop
_queue: Queue
_subscriptions: dict[str, set[Subscription]]
def __init__(self, loop: AbstractEventLoop):
self._subscriptions: dict[str, set[Subscription]] = dict()
self._loop = loop
self._queue = Queue()
def subscribe(self, path: str) -> Subscription:
subscriptions = self._subscriptions
subscriptions_per_path = subscriptions.setdefault(path, set())
def unsubscribe_callback(subscription):
subscriptions_per_path.remove(subscription)
result = Subscription(unsubscribe_callback, self._loop)
subscriptions_per_path.add(result)
return result
def _notify_subscriptions(self, path):
subscriptions = self._subscriptions
subscriptions_per_path = subscriptions.get(path, None)
if subscriptions_per_path:
for s in subscriptions_per_path:
s.notify()
async def process_events(self):
async for evt in AsyncQueueIterator(self._queue):
self._notify_subscriptions(evt)
def post_event(self, path):
self._loop.call_soon_threadsafe(self._queue.put_nowait, path)
class FileWatcher(PatternMatchingEventHandler):
_subscription_manager: SubscriptionManager
_loop: AbstractEventLoop
_subscription_manager_loop: Task
def __init__(self, path):
super().__init__(patterns=['*.md'],
ignore_patterns=None,
ignore_directories=False,
case_sensitive=True)
self._observer: Observer = Observer()
self._observer.schedule(self, path=path, recursive=True)
self.logger = getLogger(FileWatcher.__name__)
self._loop = asyncio.get_running_loop()
self._subscription_manager = SubscriptionManager(self._loop)
self._loop.run_in_executor(None, self._observer.start)
self._subscription_manager_loop = self._loop.create_task(self._subscription_manager.process_events())
async def stop(self) -> None:
def _observer_stop():
self._observer.stop()
self._observer.join()
self._subscription_manager.post_event(None)
self._loop.run_in_executor(None, _observer_stop)
await self._subscription_manager_loop
def subscribe(self, path: str) -> Subscription:
return self._subscription_manager.subscribe(path)
def on_any_event(self, event: FileSystemEvent) -> None:
what = "directory" if event.is_directory else "file"
def post_event(path):
self._subscription_manager.post_event(path)
if isinstance(event, FileClosedEvent):
self.logger.debug("Closed %s: %s", what, event.src_path)
# update_subscriptions()
elif isinstance(event, FileMovedEvent):
self.logger.debug("Moved %s: %s to %s", what, event.src_path, event.dest_path)
post_event(event.dest_path)
elif isinstance(event, FileCreatedEvent):
self.logger.debug("Created %s: %s", what, event.src_path)
post_event(event.src_path)
elif isinstance(event, FileModifiedEvent):
self.logger.debug("Modified %s: %s", what, event.src_path)
post_event(event.src_path)

View File

@@ -0,0 +1,19 @@
version: 1
disable_existing_loggers: True
handlers:
console:
class : logging.StreamHandler
formatter: default
level : INFO
stream : ext://sys.stdout
formatters:
brief:
format: '%(message)s'
default:
format: '{asctime} [{levelname}] ({processName:s}/{threadName:s}) - {name} - {message}'
style: '{'
datefmt: '%Y-%m-%d %H:%M:%S'
loggers:
root:
handlers: [console]
level: INFO

View File

@@ -5,26 +5,32 @@ from watchdog.events import PatternMatchingEventHandler, FileSystemEvent, \
FileCreatedEvent, FileModifiedEvent, FileClosedEvent, FileMovedEvent FileCreatedEvent, FileModifiedEvent, FileClosedEvent, FileMovedEvent
from watchdog.observers import Observer from watchdog.observers import Observer
import logging import logging
from gevent.event import Event # from gevent.event import Event
from asyncio import Future, BaseEventLoop
class Subscription: class Subscription:
_unsubscribe_callback: Callable[['Subscription'], None]
_event: Future
_loop: BaseEventLoop
def __init__(self, unsubscribe: Callable[['Subscription'], None]): def __init__(self, unsubscribe: Callable[['Subscription'], None], loop: BaseEventLoop):
self._unsubscribe_callback = unsubscribe self._unsubscribe_callback = unsubscribe
self._event: Event = Event() self._event: Future = loop.create_future()
self._loop = loop
def unsubscribe(self) -> None: def unsubscribe(self) -> None:
self._unsubscribe_callback(self) self._unsubscribe_callback(self)
def wait(self, tout: float) -> bool: async def wait(self, tout: float) -> bool:
handle = self._loop.call_later(tout, lambda: self._event.cancel())
await self._event
return self._event.wait(tout) return self._event.wait(tout)
def notify(self) -> None: def notify(self) -> None:
self._event.set() self._event.set_result(None)
def reset(self) -> None: def reset(self) -> None:
self._event.clear() self._event = self._loop.create_future()
class FileWatcher(PatternMatchingEventHandler): class FileWatcher(PatternMatchingEventHandler):

View File

@@ -5,10 +5,11 @@ from mimetypes import init as mimeinit, guess_type
import hashlib import hashlib
from .md2html import compile_html, load_from_cache, STATIC_RESOURCES, MARDOWN_EXTENSIONS from .md2html import compile_html, load_from_cache, STATIC_RESOURCES, MARDOWN_EXTENSIONS
from shutil import which from shutil import which
from subprocess import check_output import pygraphviz as pgv
from io import BytesIO from io import BytesIO
from typing import Callable, TYPE_CHECKING, BinaryIO, Optional from typing import Callable, TYPE_CHECKING, BinaryIO, Optional
from .file_watch import FileWatcher from .async_watchdog import FileWatcher
from pwo import Maybe
if TYPE_CHECKING: if TYPE_CHECKING:
from _typeshed import StrOrBytesPath from _typeshed import StrOrBytesPath
@@ -40,10 +41,17 @@ class Server:
self.logger = logging.getLogger(Server.__name__) self.logger = logging.getLogger(Server.__name__)
self.prefix = prefix and normpath(f'{prefix.decode()}') self.prefix = prefix and normpath(f'{prefix.decode()}')
def handle_request(self, method: str, url_path: str, etag: Optional[str], query_string: Optional[str], start_response): async def handle_request(self, method: str, url_path: str, etag: Optional[str], query_string: Optional[str], send):
if method != 'GET': if method != 'GET':
start_response('405', []) await send({
return [] 'type': 'http.response.start',
'status': 405
})
await send({
'type': 'http.response.body',
'body': b'',
})
return
relative_path = relpath(url_path, start=self.prefix or '/') relative_path = relpath(url_path, start=self.prefix or '/')
url_path: 'StrOrBytesPath' = normpath(join('/', relative_path)) url_path: 'StrOrBytesPath' = normpath(join('/', relative_path))
path: 'StrOrBytesPath' = join(self.root_dir, relative_path) path: 'StrOrBytesPath' = join(self.root_dir, relative_path)
@@ -57,15 +65,25 @@ class Server:
lambda: mtime lambda: mtime
) )
if etag and etag == digest: if etag and etag == digest:
return self.not_modified(start_response, digest, ('Cache-Control', 'must-revalidate, max-age=86400')) await self.not_modified(send, digest, ('Cache-Control', 'must-revalidate, max-age=86400'))
return
elif content: elif content:
mime_type = guess_type(basename(url_path))[0] or 'application/octet-stream' mime_type = guess_type(basename(url_path))[0] or 'application/octet-stream'
start_response('200 OK', [ await send({
('Content-Type', f'{mime_type}; charset=UTF-8'), 'type': 'http.response.start',
('Etag', 'W/"%s"' % digest), 'status': 200,
('Cache-Control', 'must-revalidate, max-age=86400'), 'headers': [
]) (b'content-type', f'{mime_type}; charset=UTF-8'.encode()),
return content (b'etag', f'W/"{digest}"'.encode()),
(b'content-type', f'{mime_type}; charset=UTF-8'.encode()),
(b'Cache-Control', b'must-revalidate, max-age=86400'),
]
})
await send({
'type': 'http.response.body',
'body': content
})
return
elif exists(path): elif exists(path):
if isfile(path): if isfile(path):
etag, digest = self.compute_etag_and_digest( etag, digest = self.compute_etag_and_digest(
@@ -74,11 +92,12 @@ class Server:
lambda: open(path, 'rb'), lambda: open(path, 'rb'),
lambda: getmtime(path) lambda: getmtime(path)
) )
self.logger.debug('%s %s', etag, digest)
if etag and etag == digest: if etag and etag == digest:
if is_markdown(path) and query_string == 'reload': if is_markdown(path) and query_string == 'reload':
subscription = self.file_watcher.subscribe(path) subscription = self.file_watcher.subscribe(path)
try: try:
has_changed = subscription.wait(30) has_changed = await subscription.wait(30)
if has_changed: if has_changed:
_, digest = self.compute_etag_and_digest( _, digest = self.compute_etag_and_digest(
etag, etag,
@@ -88,22 +107,33 @@ class Server:
) )
if etag != digest: if etag != digest:
if exists(path) and isfile(path): if exists(path) and isfile(path):
return self.render_markdown(url_path, path, True, digest, start_response) await self.render_markdown(url_path, path, True, digest, send)
return
else: else:
return self.not_found(start_response) await self.not_found(send)
return
finally: finally:
subscription.unsubscribe() subscription.unsubscribe()
return self.not_modified(start_response, digest) await self.not_modified(send, digest)
elif is_markdown(path): elif is_markdown(path):
raw = query_string == 'reload' raw = query_string == 'reload'
return self.render_markdown(url_path, path, raw, digest, start_response) await self.render_markdown(url_path, path, raw, digest, send)
elif is_dotfile(path) and which("dot"): elif is_dotfile(path) and which("dot"):
body = check_output(['dot', '-Tsvg', basename(path)], cwd=dirname(path)) graph = pgv.AGraph(path)
start_response('200 OK', [('Content-Type', 'image/svg+xml; charset=UTF-8'), body = graph.draw(None, format="svg", prog="dot")
('Etag', 'W/"%s"' % digest), await send({
('Cache-Control', 'no-cache'), 'type': 'http.response.start',
]) 'status': 200,
return [body] 'headers': (
(b'Content-Type', b'image/svg+xml; charset=UTF-8'),
(b'Etag', f'W/"{digest}"'.encode()),
(b'Cache-Control', b'no-cache'),
)
})
await send({
'type': 'http.response.body',
'body': body
})
else: else:
def read_file(file_path): def read_file(file_path):
buffer_size = 1024 buffer_size = 1024
@@ -114,19 +144,34 @@ class Server:
break break
yield result yield result
start_response('200 OK', await send({
[('Content-Type', guess_type(basename(path))[0] or 'application/octet-stream'), 'type': 'http.response.start',
('Etag', 'W/"%s"' % digest), 'status': 200,
('Cache-Control', 'no-cache'), 'headers': (
]) (b'Content-Type', guess_type(basename(path))[0].encode() or b'application/octet-stream'),
return read_file(path) (b'Etag', f'W/"{digest}"'),
(b'Cache-Control', b'no-cache')
)
})
await send({
'type': 'http.response.body',
'body': read_file(path)
})
elif isdir(path): elif isdir(path):
body = self.directory_listing(url_path, path).encode() body = self.directory_listing(url_path, path).encode()
start_response('200 OK', [ await send({
('Content-Type', 'text/html; charset=UTF-8'), 'type': 'http.response.start',
]) 'status': 200,
return [body] 'headers': (
return self.not_found(start_response) (b'Content-Type', b'text/html; charset=UTF-8'),
)
})
await send({
'type': 'http.response.body',
'body': body
})
else:
await self.not_found(send)
@staticmethod @staticmethod
def stream_hash(source: BinaryIO, bufsize=0x1000) -> bytes: def stream_hash(source: BinaryIO, bufsize=0x1000) -> bytes:
@@ -155,13 +200,17 @@ class Server:
@staticmethod @staticmethod
def parse_etag(etag: str) -> Optional[str]: def parse_etag(etag: str) -> Optional[str]:
if etag is None: def skip_weak_marker(s):
return if s.startswith('W/'):
start = etag.find('"') return s[2:]
if start < 0: else:
return return s
end = etag.find('"', start + 1)
return etag[start + 1: end] return (
Maybe.of_nullable(etag)
.map(skip_weak_marker)
.or_else(None)
)
def compute_etag_and_digest( def compute_etag_and_digest(
self, self,
@@ -189,34 +238,55 @@ class Server:
etag = Server.parse_etag(etag_header) etag = Server.parse_etag(etag_header)
return etag, digest return etag, digest
def render_markdown(self, async def render_markdown(self,
url_path: 'StrOrBytesPath', url_path: 'StrOrBytesPath',
path: str, path: str,
raw: bool, raw: bool,
digest: str, digest: str,
start_response) -> list[bytes]: send) -> list[bytes]:
body = compile_html(url_path, body = compile_html(url_path,
path, path,
self.prefix, self.prefix,
MARDOWN_EXTENSIONS, MARDOWN_EXTENSIONS,
raw=raw).encode() raw=raw).encode()
start_response('200 OK', [('Content-Type', 'text/html; charset=UTF-8'), await send({
('Etag', 'W/"%s"' % digest), 'type': 'http.response.start',
('Cache-Control', 'no-cache'), 'status': 200,
]) 'headers': (
return [body] (b'Content-Type', b'text/html; charset=UTF-8'),
(b'Etag', f'W/{digest}'.encode()),
(b'Cache-Control', b'no-cache'),
)
})
await send({
'type': 'http.response.body',
'body': body
})
return
@staticmethod @staticmethod
def not_modified(start_response, digest: str, cache_control=('Cache-Control', 'no-cache')) -> []: async def not_modified(send, digest: str, cache_control=('Cache-Control', 'no-cache')) -> []:
start_response('304 Not Modified', [ await send({
('Etag', f'W/"{digest}"'), 'type': 'http.response.start',
cache_control, 'status': 304,
]) 'headers': (
return [] (b'Etag', f'W/{digest}'.encode()),
cache_control
)
})
await send({
'type': 'http.response.body',
})
return
@staticmethod @staticmethod
def not_found(start_response) -> list[bytes]: async def not_found(send) -> None:
start_response('404 NOT_FOUND', []) await send({
return [] 'type': 'http.response.start',
'status': 404
})
await send({
'type': 'http.response.body',
})
def directory_listing(self, path_info, path) -> str: def directory_listing(self, path_info, path) -> str:
icon_path = join(self.prefix or '', 'markdown.svg') icon_path = join(self.prefix or '', 'markdown.svg')

View File

Before

Width:  |  Height:  |  Size: 394 B

After

Width:  |  Height:  |  Size: 394 B

View File

@@ -1,25 +0,0 @@
import logging
from .server import Server
from uwsgi import log, opt
class UwsgiHandler(logging.Handler):
def emit(self, record: logging.LogRecord) -> None:
log(self.formatter.format(record))
logging.basicConfig(
level=logging.INFO,
format='%(asctime)s [%(threadName)s] (%(name)s) %(levelname)s %(message)s',
handlers=[UwsgiHandler()]
)
server = Server(prefix=opt.get('prefix', None))
def application(env, start_response):
return server.handle_request(
env['REQUEST_METHOD'],
env['PATH_INFO'],
env.get('HTTP_IF_NONE_MATCH', None),
env.get('QUERY_STRING', None),
start_response
)

27
test/example.dot Normal file
View File

@@ -0,0 +1,27 @@
digraph D {
subgraph cluster_p {
label = "Parent";
subgraph cluster_c1 {
label = "Child one";
a;
subgraph cluster_gc_1 {
label = "Grand-Child one";
b;
}
subgraph cluster_gc_2 {
label = "Grand-Child two";
c;
d;
}
}
subgraph cluster_c2 {
label = "Child two";
e;
}
}
}