renamed project to Bugis and switch from WSGI@uwsgi to ASGI@granian
This commit is contained in:
59
.gitea/workflows/build.yaml
Normal file
59
.gitea/workflows/build.yaml
Normal file
@@ -0,0 +1,59 @@
|
||||
name: CI
|
||||
on:
|
||||
push:
|
||||
branches: [ master ]
|
||||
jobs:
|
||||
"Build pip package":
|
||||
runs-on: woryzen
|
||||
steps:
|
||||
- name: Checkout sources
|
||||
uses: actions/checkout@v4
|
||||
- uses: actions/setup-python@v5
|
||||
with:
|
||||
cache: 'pip'
|
||||
- name: Create virtualenv
|
||||
run: |
|
||||
python -m venv .venv
|
||||
.venv/bin/pip install -r requirements.txt
|
||||
- name: Execute build
|
||||
run: |
|
||||
.venv/bin/python -m build
|
||||
- name: Publish artifacts
|
||||
env:
|
||||
TWINE_REPOSITORY_URL: ${{ vars.PYPI_REGISTRY_URL }}
|
||||
TWINE_USERNAME: ${{ vars.PUBLISHER_USERNAME }}
|
||||
TWINE_PASSWORD: ${{ secrets.PUBLISHER_TOKEN }}
|
||||
run: |
|
||||
.venv/bin/python -m twine upload --repository gitea dist/*{.whl,tar.gz}
|
||||
"Build Docker image":
|
||||
runs-on: woryzen
|
||||
steps:
|
||||
-
|
||||
name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v3.4.0
|
||||
with:
|
||||
driver: docker-container
|
||||
platforms: |
|
||||
linux/amd64
|
||||
linux/arm64
|
||||
-
|
||||
name: Login to Gitea container registry
|
||||
uses: docker/login-action@v3
|
||||
with:
|
||||
registry: gitea.woggioni.net
|
||||
username: woggioni
|
||||
password: ${{ secrets.PUBLISHER_TOKEN }}
|
||||
-
|
||||
name: Build and push bugis images
|
||||
uses: docker/build-push-action@v6
|
||||
with:
|
||||
platforms: |
|
||||
linux/amd64
|
||||
linux/arm64
|
||||
push: true
|
||||
pull: true
|
||||
tags: |
|
||||
"gitea.woggioni.net/woggioni/bugis:latest"
|
||||
"gitea.woggioni.net/woggioni/bugis:1.0"
|
||||
cache-from: type=registry,ref=gitea.woggioni.net/woggioni/bugis:buildx
|
||||
cache-to: type=registry,mode=max,compression=zstd,image-manifest=true,oci-mediatypes=true,ref=gitea.woggioni.net/woggioni/bugis:buildx
|
1
.gitignore
vendored
1
.gitignore
vendored
@@ -1 +1,2 @@
|
||||
.venv
|
||||
__pycache__
|
||||
|
44
Dockerfile
44
Dockerfile
@@ -1,34 +1,40 @@
|
||||
FROM alpine:latest AS base
|
||||
LABEL org.opencontainers.image.authors=oggioni.walter@gmail.com
|
||||
RUN --mount=type=cache,target=/var/cache/apk apk update
|
||||
RUN --mount=type=cache,target=/var/cache/apk apk add python3 py3-pip uwsgi uwsgi-python3 graphviz uwsgi-gevent3
|
||||
RUN --mount=type=cache,target=/var/cache/apk apk add python3 py3-pip graphviz
|
||||
|
||||
FROM base AS build
|
||||
RUN --mount=type=cache,target=/var/cache/apk apk add musl-dev gcc graphviz-dev
|
||||
RUN adduser -D luser
|
||||
USER luser
|
||||
WORKDIR /home/luser
|
||||
COPY --chown=luser:users ./requirements-dev.txt ./md2html/requirements-dev.txt
|
||||
COPY --chown=luser:users ./src ./md2html/src
|
||||
COPY --chown=luser:users ./pyproject.toml ./md2html/pyproject.toml
|
||||
WORKDIR /home/luser/md2html
|
||||
RUN python -m venv venv
|
||||
RUN --mount=type=cache,target=/home/luser/.cache/pip,uid=1000,gid=1000 venv/bin/pip install -r requirements-dev.txt
|
||||
RUN --mount=type=cache,target=/home/luser/.cache/pip,uid=1000,gid=1000 venv/bin/python -m build
|
||||
COPY --chown=luser:users ./requirements-dev.txt ./bugis/requirements-dev.txt
|
||||
COPY --chown=luser:users ./src ./bugis/src
|
||||
COPY --chown=luser:users ./pyproject.toml ./bugis/pyproject.toml
|
||||
WORKDIR /home/luser/bugis
|
||||
RUN python -m venv .venv
|
||||
RUN --mount=type=cache,target=/home/luser/.cache/pip,uid=1000,gid=1000 .venv/bin/pip wheel -w /home/luser/wheel -r requirements-dev.txt pygraphviz
|
||||
RUN --mount=type=cache,target=/home/luser/.cache/pip,uid=1000,gid=1000 .venv/bin/pip install -r requirements-dev.txt /home/luser/wheel/*.whl
|
||||
RUN --mount=type=cache,target=/home/luser/.cache/pip,uid=1000,gid=1000 .venv/bin/python -m build
|
||||
|
||||
FROM base AS release
|
||||
RUN mkdir /srv/http
|
||||
RUN adduser -D -h /var/md2html -u 1000 md2html
|
||||
USER md2html
|
||||
WORKDIR /var/md2html
|
||||
RUN python -m venv venv
|
||||
RUN --mount=type=cache,target=/var/md2html/.cache/pip,uid=1000,gid=1000 --mount=type=cache,ro,from=build,source=/home/luser/md2html/dist,target=/dist venv/bin/pip install /dist/*.whl
|
||||
COPY --chown=md2html:users conf/uwsgi.ini /var/md2html/
|
||||
|
||||
RUN adduser -D -h /var/bugis -u 1000 bugis
|
||||
USER bugis
|
||||
WORKDIR /var/bugis
|
||||
COPY --chown=bugis:users conf/pip.conf ./.pip/pip.conf
|
||||
RUN python -m venv .venv
|
||||
RUN --mount=type=cache,target=/var/bugis/.cache/pip,uid=1000,gid=1000 --mount=type=bind,ro,source=./requirements-run.txt,target=/requirements-run.txt --mount=type=bind,ro,from=build,source=/home/luser/wheel,target=/wheel .venv/bin/pip install -r /requirements-run.txt /wheel/*.whl
|
||||
RUN --mount=type=cache,target=/var/bugis/.cache/pip,uid=1000,gid=1000 --mount=type=bind,ro,from=build,source=/home/luser/bugis/dist,target=/dist .venv/bin/pip install /dist/*.whl
|
||||
VOLUME /srv/http
|
||||
WORKDIR /srv/http
|
||||
ENTRYPOINT ["uwsgi"]
|
||||
EXPOSE 1910/tcp
|
||||
EXPOSE 1910/udp
|
||||
CMD [ "--ini", "/var/md2html/uwsgi.ini" ]
|
||||
|
||||
ENV GRANIAN_HOST=0.0.0.0
|
||||
ENV GRANIAN_INTERFACE=asginl
|
||||
ENV GRANIAN_LOOP=asyncio
|
||||
ENV GRANIAN_LOOP=asyncio
|
||||
ENV GRANIAN_LOG_ENABLED=false
|
||||
|
||||
ENTRYPOINT ["/var/bugis/.venv/bin/python", "-m", "granian", "bugis.asgi:application"]
|
||||
EXPOSE 8000/tcp
|
||||
|
||||
|
@@ -9,3 +9,4 @@ uwsgi --need-plugin /usr/lib/uwsgi/python_plugin.so \
|
||||
--http-auto-chunked \
|
||||
--gevent 10
|
||||
```
|
||||
|
||||
|
@@ -3,8 +3,7 @@ set -e
|
||||
|
||||
venv/bin/python -m build
|
||||
mkdir -p docker/build
|
||||
cp dist/md2html-*.whl docker/build/
|
||||
cp dist/bugis-*.whl docker/build/
|
||||
cp docker/Dockerfile docker/build/Dockerfile
|
||||
cp docker/uwsgi.ini docker/build/uwsgi.ini
|
||||
|
||||
docker build docker/build --tag alpine:md2html
|
||||
docker build docker/build --tag bugis:latest
|
||||
|
18
conf/log.yml
Normal file
18
conf/log.yml
Normal file
@@ -0,0 +1,18 @@
|
||||
version: 1
|
||||
disable_existing_loggers: True
|
||||
handlers:
|
||||
console:
|
||||
class : logging.StreamHandler
|
||||
formatter: default
|
||||
level : INFO
|
||||
stream : ext://sys.stdout
|
||||
formatters:
|
||||
brief:
|
||||
format: '%(message)s'
|
||||
default:
|
||||
format: '%(asctime)s %(levelname)-8s %(name)-15s %(threadName)s %(message)s'
|
||||
datefmt: '%Y-%m-%d %H:%M:%S'
|
||||
loggers:
|
||||
root:
|
||||
handlers: [console]
|
||||
level: INFO
|
@@ -1,8 +0,0 @@
|
||||
[uwsgi]
|
||||
#logformat = "%(proto) - %(method) %(uri) %(status) %(addr)
|
||||
need-plugin=/usr/lib/uwsgi/python_plugin.so
|
||||
need-plugin=/usr/lib/uwsgi/gevent3_plugin.so
|
||||
socket = 0.0.0.0:1910
|
||||
module = md2html.uwsgi_handler
|
||||
virtualenv = /var/md2html/venv
|
||||
gevent = 1000
|
@@ -3,8 +3,8 @@ requires = ["setuptools>=61.0"]
|
||||
build-backend = "setuptools.build_meta"
|
||||
|
||||
[project]
|
||||
name = "md2html"
|
||||
version = "0.3"
|
||||
name = "bugis"
|
||||
version = "1.0"
|
||||
authors = [
|
||||
{ name="Walter Oggioni", email="oggioni.walter@gmail.com" },
|
||||
]
|
||||
@@ -22,29 +22,32 @@ classifiers = [
|
||||
'Programming Language :: Python :: 3',
|
||||
]
|
||||
dependencies = [
|
||||
"gevent",
|
||||
"greenlet",
|
||||
"Markdown",
|
||||
"Pygments",
|
||||
"watchdog",
|
||||
"zope.event",
|
||||
"zope.interface"
|
||||
"pwo",
|
||||
"PyYAML",
|
||||
"pygraphviz"
|
||||
]
|
||||
|
||||
[project.optional-dependencies]
|
||||
dev = [
|
||||
"build", "pip-tools", "mypy", "ipdb"
|
||||
"build", "granian", "mypy", "ipdb", "twine"
|
||||
]
|
||||
|
||||
run = [
|
||||
"granian"
|
||||
]
|
||||
|
||||
[tool.setuptools.package-data]
|
||||
md2html = ['static/*']
|
||||
bugis = ['static/*', 'default-conf/*']
|
||||
|
||||
[project.urls]
|
||||
"Homepage" = "https://github.com/woggioni/md2html"
|
||||
"Bug Tracker" = "https://github.com/woggioni/md2html/issues"
|
||||
"Homepage" = "https://github.com/woggioni/bugis"
|
||||
"Bug Tracker" = "https://github.com/woggioni/bugis/issues"
|
||||
|
||||
[tool.mypy]
|
||||
python_version = "3.10"
|
||||
python_version = "3.12"
|
||||
disallow_untyped_defs = true
|
||||
show_error_codes = true
|
||||
no_implicit_optional = true
|
||||
|
@@ -1,103 +1,143 @@
|
||||
#
|
||||
# This file is autogenerated by pip-compile with Python 3.10
|
||||
# This file is autogenerated by pip-compile with Python 3.12
|
||||
# by the following command:
|
||||
#
|
||||
# pip-compile --extra=dev --output-file=requirements-dev.txt pyproject.toml
|
||||
#
|
||||
--index-url https://gitea.woggioni.net/api/packages/woggioni/pypi/simple/
|
||||
--index-url https://gitea.woggioni.net/api/packages/woggioni/pypi/simple
|
||||
--extra-index-url https://pypi.org/simple
|
||||
|
||||
asttokens==2.4.1
|
||||
# via stack-data
|
||||
build==1.2.1
|
||||
# via
|
||||
# md2html (pyproject.toml)
|
||||
# pip-tools
|
||||
build==1.2.2.post1
|
||||
# via bugis (pyproject.toml)
|
||||
certifi==2024.8.30
|
||||
# via requests
|
||||
cffi==1.17.1
|
||||
# via cryptography
|
||||
charset-normalizer==3.4.0
|
||||
# via requests
|
||||
click==8.1.7
|
||||
# via pip-tools
|
||||
# via granian
|
||||
cryptography==43.0.3
|
||||
# via secretstorage
|
||||
decorator==5.1.1
|
||||
# via
|
||||
# ipdb
|
||||
# ipython
|
||||
exceptiongroup==1.2.2
|
||||
# via ipython
|
||||
executing==2.0.1
|
||||
docutils==0.21.2
|
||||
# via readme-renderer
|
||||
executing==2.1.0
|
||||
# via stack-data
|
||||
gevent==24.2.1
|
||||
# via md2html (pyproject.toml)
|
||||
greenlet==3.0.3
|
||||
# via
|
||||
# gevent
|
||||
# md2html (pyproject.toml)
|
||||
granian==1.6.1
|
||||
# via bugis (pyproject.toml)
|
||||
idna==3.10
|
||||
# via requests
|
||||
importlib-metadata==8.5.0
|
||||
# via twine
|
||||
ipdb==0.13.13
|
||||
# via md2html (pyproject.toml)
|
||||
ipython==8.26.0
|
||||
# via bugis (pyproject.toml)
|
||||
ipython==8.28.0
|
||||
# via ipdb
|
||||
jaraco-classes==3.4.0
|
||||
# via keyring
|
||||
jaraco-context==6.0.1
|
||||
# via keyring
|
||||
jaraco-functools==4.1.0
|
||||
# via keyring
|
||||
jedi==0.19.1
|
||||
# via ipython
|
||||
markdown==3.6
|
||||
# via md2html (pyproject.toml)
|
||||
jeepney==0.8.0
|
||||
# via
|
||||
# keyring
|
||||
# secretstorage
|
||||
keyring==25.4.1
|
||||
# via twine
|
||||
markdown==3.7
|
||||
# via bugis (pyproject.toml)
|
||||
markdown-it-py==3.0.0
|
||||
# via rich
|
||||
matplotlib-inline==0.1.7
|
||||
# via ipython
|
||||
mypy==1.11.1
|
||||
# via md2html (pyproject.toml)
|
||||
mdurl==0.1.2
|
||||
# via markdown-it-py
|
||||
more-itertools==10.5.0
|
||||
# via
|
||||
# jaraco-classes
|
||||
# jaraco-functools
|
||||
mypy==1.12.1
|
||||
# via bugis (pyproject.toml)
|
||||
mypy-extensions==1.0.0
|
||||
# via mypy
|
||||
nh3==0.2.18
|
||||
# via readme-renderer
|
||||
packaging==24.1
|
||||
# via build
|
||||
parso==0.8.4
|
||||
# via jedi
|
||||
pexpect==4.9.0
|
||||
# via ipython
|
||||
pip-tools==7.4.1
|
||||
# via md2html (pyproject.toml)
|
||||
prompt-toolkit==3.0.47
|
||||
pkginfo==1.10.0
|
||||
# via twine
|
||||
prompt-toolkit==3.0.48
|
||||
# via ipython
|
||||
ptyprocess==0.7.0
|
||||
# via pexpect
|
||||
pure-eval==0.2.3
|
||||
# via stack-data
|
||||
pwo==0.0.3
|
||||
# via bugis (pyproject.toml)
|
||||
pycparser==2.22
|
||||
# via cffi
|
||||
pygments==2.18.0
|
||||
# via
|
||||
# bugis (pyproject.toml)
|
||||
# ipython
|
||||
# md2html (pyproject.toml)
|
||||
pyproject-hooks==1.1.0
|
||||
# readme-renderer
|
||||
# rich
|
||||
pygraphviz==1.14
|
||||
# via bugis (pyproject.toml)
|
||||
pyproject-hooks==1.2.0
|
||||
# via build
|
||||
pyyaml==6.0.2
|
||||
# via bugis (pyproject.toml)
|
||||
readme-renderer==44.0
|
||||
# via twine
|
||||
requests==2.32.3
|
||||
# via
|
||||
# build
|
||||
# pip-tools
|
||||
# requests-toolbelt
|
||||
# twine
|
||||
requests-toolbelt==1.0.0
|
||||
# via twine
|
||||
rfc3986==2.0.0
|
||||
# via twine
|
||||
rich==13.9.2
|
||||
# via twine
|
||||
secretstorage==3.3.3
|
||||
# via keyring
|
||||
six==1.16.0
|
||||
# via asttokens
|
||||
stack-data==0.6.3
|
||||
# via ipython
|
||||
tomli==2.0.1
|
||||
# via
|
||||
# build
|
||||
# ipdb
|
||||
# mypy
|
||||
# pip-tools
|
||||
traitlets==5.14.3
|
||||
# via
|
||||
# ipython
|
||||
# matplotlib-inline
|
||||
typing-extensions==4.12.2
|
||||
twine==5.1.1
|
||||
# via bugis (pyproject.toml)
|
||||
typing-extensions==4.7.1
|
||||
# via
|
||||
# ipython
|
||||
# mypy
|
||||
watchdog==4.0.2
|
||||
# via md2html (pyproject.toml)
|
||||
# pwo
|
||||
urllib3==2.2.3
|
||||
# via
|
||||
# requests
|
||||
# twine
|
||||
uvloop==0.21.0
|
||||
# via granian
|
||||
watchdog==5.0.3
|
||||
# via bugis (pyproject.toml)
|
||||
wcwidth==0.2.13
|
||||
# via prompt-toolkit
|
||||
wheel==0.44.0
|
||||
# via pip-tools
|
||||
zope-event==5.0
|
||||
# via
|
||||
# gevent
|
||||
# md2html (pyproject.toml)
|
||||
zope-interface==7.0.1
|
||||
# via
|
||||
# gevent
|
||||
# md2html (pyproject.toml)
|
||||
|
||||
# The following packages are considered to be unsafe in a requirements file:
|
||||
# pip
|
||||
# setuptools
|
||||
zipp==3.20.2
|
||||
# via importlib-metadata
|
||||
|
@@ -1,32 +1,23 @@
|
||||
#
|
||||
# This file is autogenerated by pip-compile with Python 3.10
|
||||
# This file is autogenerated by pip-compile with Python 3.12
|
||||
# by the following command:
|
||||
#
|
||||
# pip-compile --output-file=requirements.txt --strip-extras pyproject.toml
|
||||
# pip-compile --output-file=requirements.txt pyproject.toml
|
||||
#
|
||||
--index-url https://gitea.woggioni.net/api/packages/woggioni/pypi/simple/
|
||||
--index-url https://gitea.woggioni.net/api/packages/woggioni/pypi/simple
|
||||
--extra-index-url https://pypi.org/simple
|
||||
|
||||
gevent==23.9.1
|
||||
# via md2html (pyproject.toml)
|
||||
greenlet==3.0.0
|
||||
# via
|
||||
# gevent
|
||||
# md2html (pyproject.toml)
|
||||
markdown==3.5
|
||||
# via md2html (pyproject.toml)
|
||||
pygments==2.16.1
|
||||
# via md2html (pyproject.toml)
|
||||
watchdog==3.0.0
|
||||
# via md2html (pyproject.toml)
|
||||
zope-event==5.0
|
||||
# via
|
||||
# gevent
|
||||
# md2html (pyproject.toml)
|
||||
zope-interface==6.1
|
||||
# via
|
||||
# gevent
|
||||
# md2html (pyproject.toml)
|
||||
|
||||
# The following packages are considered to be unsafe in a requirements file:
|
||||
# setuptools
|
||||
markdown==3.7
|
||||
# via bugis (pyproject.toml)
|
||||
pwo==0.0.3
|
||||
# via bugis (pyproject.toml)
|
||||
pygments==2.18.0
|
||||
# via bugis (pyproject.toml)
|
||||
pygraphviz==1.14
|
||||
# via bugis (pyproject.toml)
|
||||
pyyaml==6.0.2
|
||||
# via bugis (pyproject.toml)
|
||||
typing-extensions==4.7.1
|
||||
# via pwo
|
||||
watchdog==5.0.3
|
||||
# via bugis (pyproject.toml)
|
||||
|
35
src/bugis/asgi.py
Normal file
35
src/bugis/asgi.py
Normal file
@@ -0,0 +1,35 @@
|
||||
import logging
|
||||
from logging.config import dictConfig as configure_logging
|
||||
from os import environ
|
||||
from pathlib import Path
|
||||
|
||||
from pwo import Maybe
|
||||
from yaml import safe_load
|
||||
from .server import Server
|
||||
|
||||
logging_configuration_file = environ.get("LOGGING_CONFIGURATION_FILE", Path(__file__).parent / 'default-conf' / 'logging.yaml')
|
||||
with open(logging_configuration_file, 'r') as input_file:
|
||||
conf = safe_load(input_file)
|
||||
configure_logging(conf)
|
||||
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
_server = None
|
||||
async def application(ctx, receive, send):
|
||||
global _server
|
||||
if _server is None:
|
||||
_server = Server(prefix=None)
|
||||
log.info(None, extra=ctx)
|
||||
await _server.handle_request(
|
||||
ctx['method'],
|
||||
ctx['path'],
|
||||
Maybe.of([header[1] for header in ctx['headers'] if header[0].decode().lower() == 'if-none-match'])
|
||||
.filter(lambda it: len(it) > 0)
|
||||
.map(lambda it: it[0])
|
||||
.map(lambda it: it.decode())
|
||||
.or_else(None),
|
||||
Maybe.of_nullable(ctx.get('query_string', None)).map(lambda it: it.decode()).or_else(None),
|
||||
send
|
||||
)
|
||||
|
172
src/bugis/async_watchdog.py
Normal file
172
src/bugis/async_watchdog.py
Normal file
@@ -0,0 +1,172 @@
|
||||
import asyncio
|
||||
|
||||
from watchdog.events import FileSystemEventHandler, FileSystemEvent, PatternMatchingEventHandler
|
||||
from watchdog.observers import Observer
|
||||
from watchdog.events import FileMovedEvent, FileClosedEvent, FileCreatedEvent, FileModifiedEvent
|
||||
from pathlib import Path
|
||||
from asyncio import Queue, AbstractEventLoop, Future, CancelledError, Task
|
||||
from typing import Optional, Callable
|
||||
from logging import getLogger
|
||||
|
||||
|
||||
class Subscription:
|
||||
_unsubscribe_callback: Callable[['Subscription'], None]
|
||||
_event: Future
|
||||
_loop: AbstractEventLoop
|
||||
|
||||
def __init__(self, unsubscribe: Callable[['Subscription'], None], loop: AbstractEventLoop):
|
||||
self._unsubscribe_callback = unsubscribe
|
||||
self._event: Future = loop.create_future()
|
||||
self._loop = loop
|
||||
|
||||
def unsubscribe(self) -> None:
|
||||
self._unsubscribe_callback(self)
|
||||
|
||||
async def wait(self, tout: float) -> bool:
|
||||
handle = self._loop.call_later(tout, lambda: self._event.cancel())
|
||||
try:
|
||||
await self._event
|
||||
return True
|
||||
except CancelledError:
|
||||
return False
|
||||
finally:
|
||||
handle.cancel()
|
||||
|
||||
def notify(self) -> None:
|
||||
self._event.set_result(None)
|
||||
|
||||
def reset(self) -> None:
|
||||
self._event = self._loop.create_future()
|
||||
|
||||
|
||||
class _EventHandler(FileSystemEventHandler):
|
||||
_queue: Queue
|
||||
_loop: AbstractEventLoop
|
||||
|
||||
def __init__(self, queue: Queue, loop: AbstractEventLoop,
|
||||
*args, **kwargs):
|
||||
self._loop = loop
|
||||
self._queue = queue
|
||||
super(*args, **kwargs)
|
||||
|
||||
def on_created(self, event: FileSystemEvent) -> None:
|
||||
self._loop.call_soon_threadsafe(self._queue.put_nowait, event)
|
||||
|
||||
def on_modified(self, event: FileSystemEvent) -> None:
|
||||
self._loop.call_soon_threadsafe(self._queue.put_nowait, event)
|
||||
|
||||
|
||||
class AsyncQueueIterator:
|
||||
_queue: Queue
|
||||
|
||||
def __init__(self, queue: Queue):
|
||||
self._queue = queue
|
||||
|
||||
def __aiter__(self):
|
||||
return self
|
||||
|
||||
async def __anext__(self):
|
||||
item = await self._queue.get()
|
||||
if item is None:
|
||||
raise StopAsyncIteration
|
||||
return item
|
||||
|
||||
|
||||
observer = Observer()
|
||||
|
||||
|
||||
def watch(path: Path, queue: Queue, loop: AbstractEventLoop,
|
||||
recursive: bool = False) -> None:
|
||||
"""Watch a directory for changes."""
|
||||
handler = _EventHandler(queue, loop)
|
||||
|
||||
observer.schedule(handler, str(path), recursive=recursive)
|
||||
observer.start()
|
||||
observer.join()
|
||||
loop.call_soon_threadsafe(queue.put_nowait, None)
|
||||
|
||||
|
||||
class SubscriptionManager:
|
||||
_loop: AbstractEventLoop
|
||||
_queue: Queue
|
||||
_subscriptions: dict[str, set[Subscription]]
|
||||
|
||||
def __init__(self, loop: AbstractEventLoop):
|
||||
self._subscriptions: dict[str, set[Subscription]] = dict()
|
||||
self._loop = loop
|
||||
self._queue = Queue()
|
||||
|
||||
def subscribe(self, path: str) -> Subscription:
|
||||
subscriptions = self._subscriptions
|
||||
subscriptions_per_path = subscriptions.setdefault(path, set())
|
||||
|
||||
def unsubscribe_callback(subscription):
|
||||
subscriptions_per_path.remove(subscription)
|
||||
|
||||
result = Subscription(unsubscribe_callback, self._loop)
|
||||
subscriptions_per_path.add(result)
|
||||
return result
|
||||
|
||||
def _notify_subscriptions(self, path):
|
||||
subscriptions = self._subscriptions
|
||||
subscriptions_per_path = subscriptions.get(path, None)
|
||||
if subscriptions_per_path:
|
||||
for s in subscriptions_per_path:
|
||||
s.notify()
|
||||
|
||||
async def process_events(self):
|
||||
async for evt in AsyncQueueIterator(self._queue):
|
||||
self._notify_subscriptions(evt)
|
||||
|
||||
def post_event(self, path):
|
||||
self._loop.call_soon_threadsafe(self._queue.put_nowait, path)
|
||||
|
||||
|
||||
class FileWatcher(PatternMatchingEventHandler):
|
||||
_subscription_manager: SubscriptionManager
|
||||
_loop: AbstractEventLoop
|
||||
_subscription_manager_loop: Task
|
||||
|
||||
def __init__(self, path):
|
||||
super().__init__(patterns=['*.md'],
|
||||
ignore_patterns=None,
|
||||
ignore_directories=False,
|
||||
case_sensitive=True)
|
||||
self._observer: Observer = Observer()
|
||||
self._observer.schedule(self, path=path, recursive=True)
|
||||
self.logger = getLogger(FileWatcher.__name__)
|
||||
self._loop = asyncio.get_running_loop()
|
||||
self._subscription_manager = SubscriptionManager(self._loop)
|
||||
self._loop.run_in_executor(None, self._observer.start)
|
||||
self._subscription_manager_loop = self._loop.create_task(self._subscription_manager.process_events())
|
||||
|
||||
async def stop(self) -> None:
|
||||
def _observer_stop():
|
||||
self._observer.stop()
|
||||
self._observer.join()
|
||||
self._subscription_manager.post_event(None)
|
||||
|
||||
self._loop.run_in_executor(None, _observer_stop)
|
||||
await self._subscription_manager_loop
|
||||
|
||||
def subscribe(self, path: str) -> Subscription:
|
||||
return self._subscription_manager.subscribe(path)
|
||||
|
||||
def on_any_event(self, event: FileSystemEvent) -> None:
|
||||
what = "directory" if event.is_directory else "file"
|
||||
|
||||
def post_event(path):
|
||||
self._subscription_manager.post_event(path)
|
||||
|
||||
if isinstance(event, FileClosedEvent):
|
||||
self.logger.debug("Closed %s: %s", what, event.src_path)
|
||||
# update_subscriptions()
|
||||
elif isinstance(event, FileMovedEvent):
|
||||
self.logger.debug("Moved %s: %s to %s", what, event.src_path, event.dest_path)
|
||||
post_event(event.dest_path)
|
||||
elif isinstance(event, FileCreatedEvent):
|
||||
self.logger.debug("Created %s: %s", what, event.src_path)
|
||||
post_event(event.src_path)
|
||||
elif isinstance(event, FileModifiedEvent):
|
||||
self.logger.debug("Modified %s: %s", what, event.src_path)
|
||||
post_event(event.src_path)
|
19
src/bugis/default-conf/logging.yaml
Normal file
19
src/bugis/default-conf/logging.yaml
Normal file
@@ -0,0 +1,19 @@
|
||||
version: 1
|
||||
disable_existing_loggers: True
|
||||
handlers:
|
||||
console:
|
||||
class : logging.StreamHandler
|
||||
formatter: default
|
||||
level : INFO
|
||||
stream : ext://sys.stdout
|
||||
formatters:
|
||||
brief:
|
||||
format: '%(message)s'
|
||||
default:
|
||||
format: '{asctime} [{levelname}] ({processName:s}/{threadName:s}) - {name} - {message}'
|
||||
style: '{'
|
||||
datefmt: '%Y-%m-%d %H:%M:%S'
|
||||
loggers:
|
||||
root:
|
||||
handlers: [console]
|
||||
level: INFO
|
@@ -5,26 +5,32 @@ from watchdog.events import PatternMatchingEventHandler, FileSystemEvent, \
|
||||
FileCreatedEvent, FileModifiedEvent, FileClosedEvent, FileMovedEvent
|
||||
from watchdog.observers import Observer
|
||||
import logging
|
||||
from gevent.event import Event
|
||||
|
||||
# from gevent.event import Event
|
||||
from asyncio import Future, BaseEventLoop
|
||||
|
||||
class Subscription:
|
||||
_unsubscribe_callback: Callable[['Subscription'], None]
|
||||
_event: Future
|
||||
_loop: BaseEventLoop
|
||||
|
||||
def __init__(self, unsubscribe: Callable[['Subscription'], None]):
|
||||
def __init__(self, unsubscribe: Callable[['Subscription'], None], loop: BaseEventLoop):
|
||||
self._unsubscribe_callback = unsubscribe
|
||||
self._event: Event = Event()
|
||||
self._event: Future = loop.create_future()
|
||||
self._loop = loop
|
||||
|
||||
def unsubscribe(self) -> None:
|
||||
self._unsubscribe_callback(self)
|
||||
|
||||
def wait(self, tout: float) -> bool:
|
||||
async def wait(self, tout: float) -> bool:
|
||||
handle = self._loop.call_later(tout, lambda: self._event.cancel())
|
||||
await self._event
|
||||
return self._event.wait(tout)
|
||||
|
||||
def notify(self) -> None:
|
||||
self._event.set()
|
||||
self._event.set_result(None)
|
||||
|
||||
def reset(self) -> None:
|
||||
self._event.clear()
|
||||
self._event = self._loop.create_future()
|
||||
|
||||
|
||||
class FileWatcher(PatternMatchingEventHandler):
|
@@ -5,10 +5,11 @@ from mimetypes import init as mimeinit, guess_type
|
||||
import hashlib
|
||||
from .md2html import compile_html, load_from_cache, STATIC_RESOURCES, MARDOWN_EXTENSIONS
|
||||
from shutil import which
|
||||
from subprocess import check_output
|
||||
import pygraphviz as pgv
|
||||
from io import BytesIO
|
||||
from typing import Callable, TYPE_CHECKING, BinaryIO, Optional
|
||||
from .file_watch import FileWatcher
|
||||
from .async_watchdog import FileWatcher
|
||||
from pwo import Maybe
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from _typeshed import StrOrBytesPath
|
||||
@@ -40,10 +41,17 @@ class Server:
|
||||
self.logger = logging.getLogger(Server.__name__)
|
||||
self.prefix = prefix and normpath(f'{prefix.decode()}')
|
||||
|
||||
def handle_request(self, method: str, url_path: str, etag: Optional[str], query_string: Optional[str], start_response):
|
||||
async def handle_request(self, method: str, url_path: str, etag: Optional[str], query_string: Optional[str], send):
|
||||
if method != 'GET':
|
||||
start_response('405', [])
|
||||
return []
|
||||
await send({
|
||||
'type': 'http.response.start',
|
||||
'status': 405
|
||||
})
|
||||
await send({
|
||||
'type': 'http.response.body',
|
||||
'body': b'',
|
||||
})
|
||||
return
|
||||
relative_path = relpath(url_path, start=self.prefix or '/')
|
||||
url_path: 'StrOrBytesPath' = normpath(join('/', relative_path))
|
||||
path: 'StrOrBytesPath' = join(self.root_dir, relative_path)
|
||||
@@ -57,15 +65,25 @@ class Server:
|
||||
lambda: mtime
|
||||
)
|
||||
if etag and etag == digest:
|
||||
return self.not_modified(start_response, digest, ('Cache-Control', 'must-revalidate, max-age=86400'))
|
||||
await self.not_modified(send, digest, ('Cache-Control', 'must-revalidate, max-age=86400'))
|
||||
return
|
||||
elif content:
|
||||
mime_type = guess_type(basename(url_path))[0] or 'application/octet-stream'
|
||||
start_response('200 OK', [
|
||||
('Content-Type', f'{mime_type}; charset=UTF-8'),
|
||||
('Etag', 'W/"%s"' % digest),
|
||||
('Cache-Control', 'must-revalidate, max-age=86400'),
|
||||
])
|
||||
return content
|
||||
await send({
|
||||
'type': 'http.response.start',
|
||||
'status': 200,
|
||||
'headers': [
|
||||
(b'content-type', f'{mime_type}; charset=UTF-8'.encode()),
|
||||
(b'etag', f'W/"{digest}"'.encode()),
|
||||
(b'content-type', f'{mime_type}; charset=UTF-8'.encode()),
|
||||
(b'Cache-Control', b'must-revalidate, max-age=86400'),
|
||||
]
|
||||
})
|
||||
await send({
|
||||
'type': 'http.response.body',
|
||||
'body': content
|
||||
})
|
||||
return
|
||||
elif exists(path):
|
||||
if isfile(path):
|
||||
etag, digest = self.compute_etag_and_digest(
|
||||
@@ -74,11 +92,12 @@ class Server:
|
||||
lambda: open(path, 'rb'),
|
||||
lambda: getmtime(path)
|
||||
)
|
||||
self.logger.debug('%s %s', etag, digest)
|
||||
if etag and etag == digest:
|
||||
if is_markdown(path) and query_string == 'reload':
|
||||
subscription = self.file_watcher.subscribe(path)
|
||||
try:
|
||||
has_changed = subscription.wait(30)
|
||||
has_changed = await subscription.wait(30)
|
||||
if has_changed:
|
||||
_, digest = self.compute_etag_and_digest(
|
||||
etag,
|
||||
@@ -88,22 +107,33 @@ class Server:
|
||||
)
|
||||
if etag != digest:
|
||||
if exists(path) and isfile(path):
|
||||
return self.render_markdown(url_path, path, True, digest, start_response)
|
||||
await self.render_markdown(url_path, path, True, digest, send)
|
||||
return
|
||||
else:
|
||||
return self.not_found(start_response)
|
||||
await self.not_found(send)
|
||||
return
|
||||
finally:
|
||||
subscription.unsubscribe()
|
||||
return self.not_modified(start_response, digest)
|
||||
await self.not_modified(send, digest)
|
||||
elif is_markdown(path):
|
||||
raw = query_string == 'reload'
|
||||
return self.render_markdown(url_path, path, raw, digest, start_response)
|
||||
await self.render_markdown(url_path, path, raw, digest, send)
|
||||
elif is_dotfile(path) and which("dot"):
|
||||
body = check_output(['dot', '-Tsvg', basename(path)], cwd=dirname(path))
|
||||
start_response('200 OK', [('Content-Type', 'image/svg+xml; charset=UTF-8'),
|
||||
('Etag', 'W/"%s"' % digest),
|
||||
('Cache-Control', 'no-cache'),
|
||||
])
|
||||
return [body]
|
||||
graph = pgv.AGraph(path)
|
||||
body = graph.draw(None, format="svg", prog="dot")
|
||||
await send({
|
||||
'type': 'http.response.start',
|
||||
'status': 200,
|
||||
'headers': (
|
||||
(b'Content-Type', b'image/svg+xml; charset=UTF-8'),
|
||||
(b'Etag', f'W/"{digest}"'.encode()),
|
||||
(b'Cache-Control', b'no-cache'),
|
||||
)
|
||||
})
|
||||
await send({
|
||||
'type': 'http.response.body',
|
||||
'body': body
|
||||
})
|
||||
else:
|
||||
def read_file(file_path):
|
||||
buffer_size = 1024
|
||||
@@ -114,19 +144,34 @@ class Server:
|
||||
break
|
||||
yield result
|
||||
|
||||
start_response('200 OK',
|
||||
[('Content-Type', guess_type(basename(path))[0] or 'application/octet-stream'),
|
||||
('Etag', 'W/"%s"' % digest),
|
||||
('Cache-Control', 'no-cache'),
|
||||
])
|
||||
return read_file(path)
|
||||
await send({
|
||||
'type': 'http.response.start',
|
||||
'status': 200,
|
||||
'headers': (
|
||||
(b'Content-Type', guess_type(basename(path))[0].encode() or b'application/octet-stream'),
|
||||
(b'Etag', f'W/"{digest}"'),
|
||||
(b'Cache-Control', b'no-cache')
|
||||
)
|
||||
})
|
||||
await send({
|
||||
'type': 'http.response.body',
|
||||
'body': read_file(path)
|
||||
})
|
||||
elif isdir(path):
|
||||
body = self.directory_listing(url_path, path).encode()
|
||||
start_response('200 OK', [
|
||||
('Content-Type', 'text/html; charset=UTF-8'),
|
||||
])
|
||||
return [body]
|
||||
return self.not_found(start_response)
|
||||
await send({
|
||||
'type': 'http.response.start',
|
||||
'status': 200,
|
||||
'headers': (
|
||||
(b'Content-Type', b'text/html; charset=UTF-8'),
|
||||
)
|
||||
})
|
||||
await send({
|
||||
'type': 'http.response.body',
|
||||
'body': body
|
||||
})
|
||||
else:
|
||||
await self.not_found(send)
|
||||
|
||||
@staticmethod
|
||||
def stream_hash(source: BinaryIO, bufsize=0x1000) -> bytes:
|
||||
@@ -155,13 +200,17 @@ class Server:
|
||||
|
||||
@staticmethod
|
||||
def parse_etag(etag: str) -> Optional[str]:
|
||||
if etag is None:
|
||||
return
|
||||
start = etag.find('"')
|
||||
if start < 0:
|
||||
return
|
||||
end = etag.find('"', start + 1)
|
||||
return etag[start + 1: end]
|
||||
def skip_weak_marker(s):
|
||||
if s.startswith('W/'):
|
||||
return s[2:]
|
||||
else:
|
||||
return s
|
||||
|
||||
return (
|
||||
Maybe.of_nullable(etag)
|
||||
.map(skip_weak_marker)
|
||||
.or_else(None)
|
||||
)
|
||||
|
||||
def compute_etag_and_digest(
|
||||
self,
|
||||
@@ -189,34 +238,55 @@ class Server:
|
||||
etag = Server.parse_etag(etag_header)
|
||||
return etag, digest
|
||||
|
||||
def render_markdown(self,
|
||||
async def render_markdown(self,
|
||||
url_path: 'StrOrBytesPath',
|
||||
path: str,
|
||||
raw: bool,
|
||||
digest: str,
|
||||
start_response) -> list[bytes]:
|
||||
send) -> list[bytes]:
|
||||
body = compile_html(url_path,
|
||||
path,
|
||||
self.prefix,
|
||||
MARDOWN_EXTENSIONS,
|
||||
raw=raw).encode()
|
||||
start_response('200 OK', [('Content-Type', 'text/html; charset=UTF-8'),
|
||||
('Etag', 'W/"%s"' % digest),
|
||||
('Cache-Control', 'no-cache'),
|
||||
])
|
||||
return [body]
|
||||
await send({
|
||||
'type': 'http.response.start',
|
||||
'status': 200,
|
||||
'headers': (
|
||||
(b'Content-Type', b'text/html; charset=UTF-8'),
|
||||
(b'Etag', f'W/{digest}'.encode()),
|
||||
(b'Cache-Control', b'no-cache'),
|
||||
)
|
||||
})
|
||||
await send({
|
||||
'type': 'http.response.body',
|
||||
'body': body
|
||||
})
|
||||
return
|
||||
@staticmethod
|
||||
def not_modified(start_response, digest: str, cache_control=('Cache-Control', 'no-cache')) -> []:
|
||||
start_response('304 Not Modified', [
|
||||
('Etag', f'W/"{digest}"'),
|
||||
cache_control,
|
||||
])
|
||||
return []
|
||||
async def not_modified(send, digest: str, cache_control=('Cache-Control', 'no-cache')) -> []:
|
||||
await send({
|
||||
'type': 'http.response.start',
|
||||
'status': 304,
|
||||
'headers': (
|
||||
(b'Etag', f'W/{digest}'.encode()),
|
||||
cache_control
|
||||
)
|
||||
})
|
||||
await send({
|
||||
'type': 'http.response.body',
|
||||
})
|
||||
return
|
||||
|
||||
@staticmethod
|
||||
def not_found(start_response) -> list[bytes]:
|
||||
start_response('404 NOT_FOUND', [])
|
||||
return []
|
||||
async def not_found(send) -> None:
|
||||
await send({
|
||||
'type': 'http.response.start',
|
||||
'status': 404
|
||||
})
|
||||
await send({
|
||||
'type': 'http.response.body',
|
||||
})
|
||||
|
||||
def directory_listing(self, path_info, path) -> str:
|
||||
icon_path = join(self.prefix or '', 'markdown.svg')
|
Before Width: | Height: | Size: 394 B After Width: | Height: | Size: 394 B |
@@ -1,25 +0,0 @@
|
||||
import logging
|
||||
from .server import Server
|
||||
from uwsgi import log, opt
|
||||
class UwsgiHandler(logging.Handler):
|
||||
|
||||
def emit(self, record: logging.LogRecord) -> None:
|
||||
log(self.formatter.format(record))
|
||||
|
||||
|
||||
logging.basicConfig(
|
||||
level=logging.INFO,
|
||||
format='%(asctime)s [%(threadName)s] (%(name)s) %(levelname)s %(message)s',
|
||||
handlers=[UwsgiHandler()]
|
||||
)
|
||||
|
||||
server = Server(prefix=opt.get('prefix', None))
|
||||
|
||||
def application(env, start_response):
|
||||
return server.handle_request(
|
||||
env['REQUEST_METHOD'],
|
||||
env['PATH_INFO'],
|
||||
env.get('HTTP_IF_NONE_MATCH', None),
|
||||
env.get('QUERY_STRING', None),
|
||||
start_response
|
||||
)
|
27
test/example.dot
Normal file
27
test/example.dot
Normal file
@@ -0,0 +1,27 @@
|
||||
digraph D {
|
||||
|
||||
subgraph cluster_p {
|
||||
label = "Parent";
|
||||
|
||||
subgraph cluster_c1 {
|
||||
label = "Child one";
|
||||
a;
|
||||
|
||||
subgraph cluster_gc_1 {
|
||||
label = "Grand-Child one";
|
||||
b;
|
||||
}
|
||||
subgraph cluster_gc_2 {
|
||||
label = "Grand-Child two";
|
||||
c;
|
||||
d;
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
subgraph cluster_c2 {
|
||||
label = "Child two";
|
||||
e;
|
||||
}
|
||||
}
|
||||
}
|
Reference in New Issue
Block a user