Compare commits
9 Commits
0.1.0
...
8f0320f262
Author | SHA1 | Date | |
---|---|---|---|
8f0320f262
|
|||
ee6e645cc1
|
|||
544229b7a6
|
|||
6acf6d1d6e
|
|||
02737bf9b4
|
|||
29bdad09bf
|
|||
16dbd3a82a
|
|||
33a3858b02
|
|||
e2e4083321
|
@@ -16,19 +16,22 @@ jobs:
|
|||||||
- uses: actions/setup-python@v5
|
- uses: actions/setup-python@v5
|
||||||
with:
|
with:
|
||||||
cache: 'pip'
|
cache: 'pip'
|
||||||
- name: Create virtualenv
|
- name: Bugis Core
|
||||||
run: |
|
run: |
|
||||||
|
cd core
|
||||||
python -m venv .venv
|
python -m venv .venv
|
||||||
.venv/bin/pip install -r requirements-dev.txt
|
.venv/bin/pip install -r requirements-dev.txt
|
||||||
- name: Execute build
|
|
||||||
run: |
|
|
||||||
.venv/bin/python -m build
|
.venv/bin/python -m build
|
||||||
|
.venv/bin/pip install .
|
||||||
|
.venv/bin/python -m mypy -p src
|
||||||
|
.venv/bin/python -m unittest discover -s tests
|
||||||
- name: Publish artifacts
|
- name: Publish artifacts
|
||||||
env:
|
env:
|
||||||
TWINE_REPOSITORY_URL: ${{ vars.PYPI_REGISTRY_URL }}
|
TWINE_REPOSITORY_URL: ${{ vars.PYPI_REGISTRY_URL }}
|
||||||
TWINE_USERNAME: ${{ vars.PUBLISHER_USERNAME }}
|
TWINE_USERNAME: ${{ vars.PUBLISHER_USERNAME }}
|
||||||
TWINE_PASSWORD: ${{ secrets.PUBLISHER_TOKEN }}
|
TWINE_PASSWORD: ${{ secrets.PUBLISHER_TOKEN }}
|
||||||
run: |
|
run: |
|
||||||
|
cd core
|
||||||
.venv/bin/python -m twine upload --repository gitea dist/*{.whl,tar.gz}
|
.venv/bin/python -m twine upload --repository gitea dist/*{.whl,tar.gz}
|
||||||
build_docker_image:
|
build_docker_image:
|
||||||
name: "Build Docker image"
|
name: "Build Docker image"
|
||||||
|
2
.gitignore
vendored
2
.gitignore
vendored
@@ -1,7 +1,7 @@
|
|||||||
.venv
|
.venv
|
||||||
__pycache__
|
__pycache__
|
||||||
*.pyc
|
*.pyc
|
||||||
src/bugis/_version.py
|
_version.py
|
||||||
*.egg-info
|
*.egg-info
|
||||||
/build
|
/build
|
||||||
/dist
|
/dist
|
||||||
|
14
Dockerfile
14
Dockerfile
@@ -9,10 +9,10 @@ RUN adduser -D luser
|
|||||||
USER luser
|
USER luser
|
||||||
WORKDIR /home/luser
|
WORKDIR /home/luser
|
||||||
COPY --chown=luser:users ./requirements-dev.txt ./requirements-dev.txt
|
COPY --chown=luser:users ./requirements-dev.txt ./requirements-dev.txt
|
||||||
COPY --chown=luser:users ./requirements-dev.txt ./requirements-run.txt
|
COPY --chown=luser:users ./requirements-run.txt ./requirements-run.txt
|
||||||
WORKDIR /home/luser/
|
WORKDIR /home/luser/
|
||||||
RUN python -m venv .venv
|
RUN python -m venv .venv
|
||||||
RUN --mount=type=cache,target=/home/luser/.cache/pip,uid=1000,gid=1000 .venv/bin/pip wheel -w /home/luser/wheel -r requirements-dev.txt pygraphviz
|
RUN --mount=type=cache,target=/home/luser/.cache/pip,uid=1000,gid=1000 .venv/bin/pip wheel -w /home/luser/wheel pygraphviz
|
||||||
RUN --mount=type=cache,target=/home/luser/.cache/pip,uid=1000,gid=1000 .venv/bin/pip install -r requirements-dev.txt /home/luser/wheel/*.whl
|
RUN --mount=type=cache,target=/home/luser/.cache/pip,uid=1000,gid=1000 .venv/bin/pip install -r requirements-dev.txt /home/luser/wheel/*.whl
|
||||||
COPY --chown=luser:users . /home/luser/bugis
|
COPY --chown=luser:users . /home/luser/bugis
|
||||||
WORKDIR /home/luser/bugis
|
WORKDIR /home/luser/bugis
|
||||||
@@ -21,12 +21,12 @@ RUN --mount=type=cache,target=/home/luser/.cache/pip,uid=1000,gid=1000 /home/lus
|
|||||||
|
|
||||||
FROM base AS release
|
FROM base AS release
|
||||||
RUN mkdir /srv/http
|
RUN mkdir /srv/http
|
||||||
RUN adduser -D -h /var/bugis -u 1000 bugis
|
RUN adduser -D -h /var/lib/bugis -u 1000 bugis
|
||||||
USER bugis
|
USER bugis
|
||||||
WORKDIR /var/bugis
|
WORKDIR /var/lib/bugis
|
||||||
COPY --chown=bugis:users conf/pip.conf ./.pip/pip.conf
|
COPY --chown=bugis:users conf/pip.conf ./.pip/pip.conf
|
||||||
RUN python -m venv .venv
|
RUN python -m venv .venv
|
||||||
RUN --mount=type=cache,target=/var/bugis/.cache/pip,uid=1000,gid=1000 --mount=type=bind,ro,from=build,source=/home/luser/bugis/requirements-run.txt,target=/requirements-run.txt --mount=type=bind,ro,from=build,source=/home/luser/wheel,target=/wheel .venv/bin/pip install -r /requirements-run.txt /wheel/*.whl
|
RUN --mount=type=cache,target=/var/bugis/.cache/pip,uid=1000,gid=1000 --mount=type=bind,ro,from=build,source=/home/luser/requirements-run.txt,target=/requirements-run.txt --mount=type=bind,ro,from=build,source=/home/luser/wheel,target=/wheel .venv/bin/pip install -r /requirements-run.txt /wheel/*.whl
|
||||||
RUN --mount=type=cache,target=/var/bugis/.cache/pip,uid=1000,gid=1000 --mount=type=bind,ro,from=build,source=/home/luser/bugis/dist,target=/dist .venv/bin/pip install /dist/*.whl
|
RUN --mount=type=cache,target=/var/bugis/.cache/pip,uid=1000,gid=1000 --mount=type=bind,ro,from=build,source=/home/luser/bugis/dist,target=/dist .venv/bin/pip install /dist/*.whl
|
||||||
VOLUME /srv/http
|
VOLUME /srv/http
|
||||||
WORKDIR /srv/http
|
WORKDIR /srv/http
|
||||||
@@ -36,7 +36,7 @@ ENV GRANIAN_PORT=8000
|
|||||||
ENV GRANIAN_INTERFACE=asgi
|
ENV GRANIAN_INTERFACE=asgi
|
||||||
ENV GRANIAN_LOOP=asyncio
|
ENV GRANIAN_LOOP=asyncio
|
||||||
ENV GRANIAN_LOG_ENABLED=false
|
ENV GRANIAN_LOG_ENABLED=false
|
||||||
|
ENV GRANIAN_LOG_ACCESS_ENABLED=true
|
||||||
ENTRYPOINT ["/var/bugis/.venv/bin/python", "-m", "granian", "bugis.asgi:application"]
|
ENTRYPOINT ["/var/lib/bugis/.venv/bin/python", "-m", "granian", "bugis.asgi:application"]
|
||||||
EXPOSE 8000/tcp
|
EXPOSE 8000/tcp
|
||||||
|
|
||||||
|
19
README.md
19
README.md
@@ -18,6 +18,23 @@ docker run --rm -v /your/document/directory:/srv/http --user $(id -u):$(id -g)
|
|||||||
### Run in docker with `nginx` and `plantUML` server
|
### Run in docker with `nginx` and `plantUML` server
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
docker compose up --build
|
STATIC_ROOT=/your/document/directory UID=$(id -u) GID=$(id -g) docker compose up --build
|
||||||
```
|
```
|
||||||
|
|
||||||
|
### Install with pipx
|
||||||
|
|
||||||
|
```bash
|
||||||
|
pipx install -r requirements-run.txt .
|
||||||
|
```
|
||||||
|
|
||||||
|
or
|
||||||
|
|
||||||
|
```bash
|
||||||
|
pipx install --extra-index-url https://gitea.woggioni.net/api/packages/woggioni/pypi/simple/ bugis[run]
|
||||||
|
```
|
||||||
|
|
||||||
|
### Run from cli
|
||||||
|
|
||||||
|
```bash
|
||||||
|
bugis -a 127.0.0.1 -p 8000
|
||||||
|
```
|
54
cli/pyproject.toml
Normal file
54
cli/pyproject.toml
Normal file
@@ -0,0 +1,54 @@
|
|||||||
|
[build-system]
|
||||||
|
requires = ["setuptools>=61.0", "setuptools-scm>=8"]
|
||||||
|
build-backend = "setuptools.build_meta"
|
||||||
|
|
||||||
|
[project]
|
||||||
|
name = "bugis_cli"
|
||||||
|
dynamic = ["version"]
|
||||||
|
authors = [
|
||||||
|
{ name="Walter Oggioni", email="oggioni.walter@gmail.com" },
|
||||||
|
]
|
||||||
|
description = "Markdown to HTML renderer"
|
||||||
|
readme = "README.md"
|
||||||
|
requires-python = ">=3.10"
|
||||||
|
classifiers = [
|
||||||
|
'Development Status :: 3 - Alpha',
|
||||||
|
'Topic :: Utilities',
|
||||||
|
'License :: OSI Approved :: MIT License',
|
||||||
|
'Intended Audience :: System Administrators',
|
||||||
|
'Intended Audience :: Developers',
|
||||||
|
'Environment :: Console',
|
||||||
|
'License :: OSI Approved :: MIT License',
|
||||||
|
'Programming Language :: Python :: 3',
|
||||||
|
]
|
||||||
|
|
||||||
|
dependencies = [
|
||||||
|
"granian",
|
||||||
|
"bugis"
|
||||||
|
]
|
||||||
|
|
||||||
|
[project.optional-dependencies]
|
||||||
|
dev = [
|
||||||
|
"build", "mypy", "ipdb", "twine"
|
||||||
|
]
|
||||||
|
|
||||||
|
[project.urls]
|
||||||
|
"Homepage" = "https://github.com/woggioni/bugis"
|
||||||
|
"Bug Tracker" = "https://github.com/woggioni/bugis/issues"
|
||||||
|
|
||||||
|
[tool.mypy]
|
||||||
|
python_version = "3.12"
|
||||||
|
disallow_untyped_defs = true
|
||||||
|
show_error_codes = true
|
||||||
|
no_implicit_optional = true
|
||||||
|
warn_return_any = true
|
||||||
|
warn_unused_ignores = true
|
||||||
|
exclude = ["scripts", "docs", "test"]
|
||||||
|
strict = true
|
||||||
|
|
||||||
|
[tool.setuptools_scm]
|
||||||
|
root='..'
|
||||||
|
version_file = "src/bugis/cli/_version.py"
|
||||||
|
|
||||||
|
[project.scripts]
|
||||||
|
bugis = "bugis.cli:main"
|
171
cli/requirements-dev.txt
Normal file
171
cli/requirements-dev.txt
Normal file
@@ -0,0 +1,171 @@
|
|||||||
|
#
|
||||||
|
# This file is autogenerated by pip-compile with Python 3.12
|
||||||
|
# by the following command:
|
||||||
|
#
|
||||||
|
# pip-compile --extra=dev --output-file=requirements-dev.txt
|
||||||
|
#
|
||||||
|
--index-url https://gitea.woggioni.net/api/packages/woggioni/pypi/simple
|
||||||
|
--extra-index-url https://pypi.org/simple
|
||||||
|
|
||||||
|
aiofiles==24.1.0
|
||||||
|
# via bugis
|
||||||
|
anyio==4.6.2.post1
|
||||||
|
# via httpx
|
||||||
|
asttokens==2.4.1
|
||||||
|
# via stack-data
|
||||||
|
bugis==0.2.2
|
||||||
|
# via bugis_cli (pyproject.toml)
|
||||||
|
build==1.2.2.post1
|
||||||
|
# via bugis_cli (pyproject.toml)
|
||||||
|
certifi==2024.8.30
|
||||||
|
# via
|
||||||
|
# httpcore
|
||||||
|
# httpx
|
||||||
|
# requests
|
||||||
|
cffi==1.17.1
|
||||||
|
# via cryptography
|
||||||
|
charset-normalizer==3.4.0
|
||||||
|
# via requests
|
||||||
|
click==8.1.7
|
||||||
|
# via granian
|
||||||
|
cryptography==43.0.3
|
||||||
|
# via secretstorage
|
||||||
|
decorator==5.1.1
|
||||||
|
# via
|
||||||
|
# ipdb
|
||||||
|
# ipython
|
||||||
|
docutils==0.21.2
|
||||||
|
# via readme-renderer
|
||||||
|
executing==2.1.0
|
||||||
|
# via stack-data
|
||||||
|
granian==1.6.3
|
||||||
|
# via bugis_cli (pyproject.toml)
|
||||||
|
h11==0.14.0
|
||||||
|
# via httpcore
|
||||||
|
h2==4.1.0
|
||||||
|
# via httpx
|
||||||
|
hpack==4.0.0
|
||||||
|
# via h2
|
||||||
|
httpcore==1.0.6
|
||||||
|
# via httpx
|
||||||
|
httpx[http2]==0.27.2
|
||||||
|
# via bugis
|
||||||
|
hyperframe==6.0.1
|
||||||
|
# via h2
|
||||||
|
idna==3.10
|
||||||
|
# via
|
||||||
|
# anyio
|
||||||
|
# httpx
|
||||||
|
# requests
|
||||||
|
importlib-metadata==8.5.0
|
||||||
|
# via twine
|
||||||
|
ipdb==0.13.13
|
||||||
|
# via bugis_cli (pyproject.toml)
|
||||||
|
ipython==8.29.0
|
||||||
|
# via ipdb
|
||||||
|
jaraco-classes==3.4.0
|
||||||
|
# via keyring
|
||||||
|
jaraco-context==6.0.1
|
||||||
|
# via keyring
|
||||||
|
jaraco-functools==4.1.0
|
||||||
|
# via keyring
|
||||||
|
jedi==0.19.1
|
||||||
|
# via ipython
|
||||||
|
jeepney==0.8.0
|
||||||
|
# via
|
||||||
|
# keyring
|
||||||
|
# secretstorage
|
||||||
|
keyring==25.5.0
|
||||||
|
# via twine
|
||||||
|
markdown==3.7
|
||||||
|
# via bugis
|
||||||
|
markdown-it-py==3.0.0
|
||||||
|
# via rich
|
||||||
|
matplotlib-inline==0.1.7
|
||||||
|
# via ipython
|
||||||
|
mdurl==0.1.2
|
||||||
|
# via markdown-it-py
|
||||||
|
more-itertools==10.5.0
|
||||||
|
# via
|
||||||
|
# jaraco-classes
|
||||||
|
# jaraco-functools
|
||||||
|
mypy==1.13.0
|
||||||
|
# via bugis_cli (pyproject.toml)
|
||||||
|
mypy-extensions==1.0.0
|
||||||
|
# via mypy
|
||||||
|
nh3==0.2.18
|
||||||
|
# via readme-renderer
|
||||||
|
packaging==24.1
|
||||||
|
# via build
|
||||||
|
parso==0.8.4
|
||||||
|
# via jedi
|
||||||
|
pexpect==4.9.0
|
||||||
|
# via ipython
|
||||||
|
pkginfo==1.10.0
|
||||||
|
# via twine
|
||||||
|
prompt-toolkit==3.0.48
|
||||||
|
# via ipython
|
||||||
|
ptyprocess==0.7.0
|
||||||
|
# via pexpect
|
||||||
|
pure-eval==0.2.3
|
||||||
|
# via stack-data
|
||||||
|
pwo==0.0.4
|
||||||
|
# via bugis
|
||||||
|
pycparser==2.22
|
||||||
|
# via cffi
|
||||||
|
pygments==2.18.0
|
||||||
|
# via
|
||||||
|
# bugis
|
||||||
|
# ipython
|
||||||
|
# readme-renderer
|
||||||
|
# rich
|
||||||
|
pygraphviz==1.14
|
||||||
|
# via bugis
|
||||||
|
pyproject-hooks==1.2.0
|
||||||
|
# via build
|
||||||
|
pyyaml==6.0.2
|
||||||
|
# via bugis
|
||||||
|
readme-renderer==44.0
|
||||||
|
# via twine
|
||||||
|
requests==2.32.3
|
||||||
|
# via
|
||||||
|
# requests-toolbelt
|
||||||
|
# twine
|
||||||
|
requests-toolbelt==1.0.0
|
||||||
|
# via twine
|
||||||
|
rfc3986==2.0.0
|
||||||
|
# via twine
|
||||||
|
rich==13.9.3
|
||||||
|
# via twine
|
||||||
|
secretstorage==3.3.3
|
||||||
|
# via keyring
|
||||||
|
six==1.16.0
|
||||||
|
# via asttokens
|
||||||
|
sniffio==1.3.1
|
||||||
|
# via
|
||||||
|
# anyio
|
||||||
|
# httpx
|
||||||
|
stack-data==0.6.3
|
||||||
|
# via ipython
|
||||||
|
traitlets==5.14.3
|
||||||
|
# via
|
||||||
|
# ipython
|
||||||
|
# matplotlib-inline
|
||||||
|
twine==5.1.1
|
||||||
|
# via bugis_cli (pyproject.toml)
|
||||||
|
typing-extensions==4.12.2
|
||||||
|
# via
|
||||||
|
# mypy
|
||||||
|
# pwo
|
||||||
|
urllib3==2.2.3
|
||||||
|
# via
|
||||||
|
# requests
|
||||||
|
# twine
|
||||||
|
uvloop==0.21.0
|
||||||
|
# via granian
|
||||||
|
watchdog==5.0.3
|
||||||
|
# via bugis
|
||||||
|
wcwidth==0.2.13
|
||||||
|
# via prompt-toolkit
|
||||||
|
zipp==3.20.2
|
||||||
|
# via importlib-metadata
|
59
cli/requirements.txt
Normal file
59
cli/requirements.txt
Normal file
@@ -0,0 +1,59 @@
|
|||||||
|
#
|
||||||
|
# This file is autogenerated by pip-compile with Python 3.12
|
||||||
|
# by the following command:
|
||||||
|
#
|
||||||
|
# pip-compile --output-file=requirements.txt
|
||||||
|
#
|
||||||
|
--index-url https://gitea.woggioni.net/api/packages/woggioni/pypi/simple
|
||||||
|
--extra-index-url https://pypi.org/simple
|
||||||
|
|
||||||
|
aiofiles==24.1.0
|
||||||
|
# via bugis
|
||||||
|
anyio==4.6.2.post1
|
||||||
|
# via httpx
|
||||||
|
bugis==0.2.2
|
||||||
|
# via bugis_cli (pyproject.toml)
|
||||||
|
certifi==2024.8.30
|
||||||
|
# via
|
||||||
|
# httpcore
|
||||||
|
# httpx
|
||||||
|
click==8.1.7
|
||||||
|
# via granian
|
||||||
|
granian==1.6.3
|
||||||
|
# via bugis_cli (pyproject.toml)
|
||||||
|
h11==0.14.0
|
||||||
|
# via httpcore
|
||||||
|
h2==4.1.0
|
||||||
|
# via httpx
|
||||||
|
hpack==4.0.0
|
||||||
|
# via h2
|
||||||
|
httpcore==1.0.6
|
||||||
|
# via httpx
|
||||||
|
httpx[http2]==0.27.2
|
||||||
|
# via bugis
|
||||||
|
hyperframe==6.0.1
|
||||||
|
# via h2
|
||||||
|
idna==3.10
|
||||||
|
# via
|
||||||
|
# anyio
|
||||||
|
# httpx
|
||||||
|
markdown==3.7
|
||||||
|
# via bugis
|
||||||
|
pwo==0.0.4
|
||||||
|
# via bugis
|
||||||
|
pygments==2.18.0
|
||||||
|
# via bugis
|
||||||
|
pygraphviz==1.14
|
||||||
|
# via bugis
|
||||||
|
pyyaml==6.0.2
|
||||||
|
# via bugis
|
||||||
|
sniffio==1.3.1
|
||||||
|
# via
|
||||||
|
# anyio
|
||||||
|
# httpx
|
||||||
|
typing-extensions==4.12.2
|
||||||
|
# via pwo
|
||||||
|
uvloop==0.21.0
|
||||||
|
# via granian
|
||||||
|
watchdog==5.0.3
|
||||||
|
# via bugis
|
118
cli/src/bugis/cli/__init__.py
Normal file
118
cli/src/bugis/cli/__init__.py
Normal file
@@ -0,0 +1,118 @@
|
|||||||
|
from os import environ
|
||||||
|
from pathlib import Path
|
||||||
|
from typing import Optional, Sequence
|
||||||
|
import argparse
|
||||||
|
import yaml
|
||||||
|
from granian import Granian
|
||||||
|
from pwo import Maybe
|
||||||
|
|
||||||
|
from bugis.configuration import instance, Configuration
|
||||||
|
from granian.constants import HTTPModes, ThreadModes, Loops
|
||||||
|
from dataclasses import asdict
|
||||||
|
from typing import Any, Mapping
|
||||||
|
|
||||||
|
def main(args: Optional[Sequence[str]] = None) -> None:
|
||||||
|
parser = argparse.ArgumentParser(description="A simple CLI program to render Markdown files")
|
||||||
|
default_configuration_file = (Maybe.of_nullable(environ.get('XDG_CONFIG_HOME'))
|
||||||
|
.map(lambda it: Path(it))
|
||||||
|
.map(lambda it: it / 'bugis' / 'bugis.yaml')
|
||||||
|
.or_else_get(
|
||||||
|
lambda: Maybe.of_nullable(environ.get('HOME'))
|
||||||
|
.map(lambda it: Path(it) / '.config' / 'bugis' / 'bugis.yaml').or_none())
|
||||||
|
.filter(Path.exists)
|
||||||
|
.or_none()
|
||||||
|
)
|
||||||
|
parser.add_argument(
|
||||||
|
'-c',
|
||||||
|
'--configuration',
|
||||||
|
help='Path to the configuration file',
|
||||||
|
default=default_configuration_file,
|
||||||
|
type=Path,
|
||||||
|
)
|
||||||
|
parser.add_argument(
|
||||||
|
'-a',
|
||||||
|
'--address',
|
||||||
|
help='Server bind address',
|
||||||
|
default='127.0.0.1',
|
||||||
|
)
|
||||||
|
parser.add_argument(
|
||||||
|
'-p',
|
||||||
|
'--port',
|
||||||
|
help='Server port',
|
||||||
|
default='8000',
|
||||||
|
type=int
|
||||||
|
)
|
||||||
|
parser.add_argument(
|
||||||
|
'--access-log',
|
||||||
|
help='Enable access log',
|
||||||
|
action='store_true',
|
||||||
|
dest='log_access'
|
||||||
|
)
|
||||||
|
parser.add_argument(
|
||||||
|
'--logging-configuration',
|
||||||
|
help='Logging configuration file',
|
||||||
|
dest='log_config_file'
|
||||||
|
)
|
||||||
|
parser.add_argument(
|
||||||
|
'-w', '--workers',
|
||||||
|
help='Number of worker processes',
|
||||||
|
default='1',
|
||||||
|
dest='workers',
|
||||||
|
type=int
|
||||||
|
)
|
||||||
|
parser.add_argument(
|
||||||
|
'-t', '--threads',
|
||||||
|
help='Number of threads per worker',
|
||||||
|
default='1',
|
||||||
|
dest='threads',
|
||||||
|
type=int
|
||||||
|
)
|
||||||
|
parser.add_argument(
|
||||||
|
'--http',
|
||||||
|
help='HTTP protocol version',
|
||||||
|
dest='http',
|
||||||
|
type=lambda it: HTTPModes(it),
|
||||||
|
choices=[str(mode) for mode in HTTPModes],
|
||||||
|
default='auto',
|
||||||
|
)
|
||||||
|
parser.add_argument(
|
||||||
|
'--threading-mode',
|
||||||
|
help='Threading mode',
|
||||||
|
dest='threading_mode',
|
||||||
|
type=lambda it: ThreadModes(it),
|
||||||
|
choices=[str(mode) for mode in ThreadModes],
|
||||||
|
default=ThreadModes.workers
|
||||||
|
)
|
||||||
|
parser.add_argument(
|
||||||
|
'--loop',
|
||||||
|
help='Loop',
|
||||||
|
dest='loop',
|
||||||
|
type=lambda it: Loops(it),
|
||||||
|
choices=[str(mode) for mode in Loops]
|
||||||
|
)
|
||||||
|
arguments = parser.parse_args(args)
|
||||||
|
|
||||||
|
def parse(configuration: Path) -> Any:
|
||||||
|
with open(configuration, 'r') as f:
|
||||||
|
return yaml.safe_load(f)
|
||||||
|
|
||||||
|
def assign(it: Configuration) -> None:
|
||||||
|
global instance
|
||||||
|
instance = it
|
||||||
|
|
||||||
|
Maybe.of_nullable(arguments.configuration).map(parse).if_present(assign)
|
||||||
|
conf = instance
|
||||||
|
|
||||||
|
granian_conf = asdict(conf).setdefault('granian', dict())
|
||||||
|
for k, v in vars(arguments).items():
|
||||||
|
if v is not None:
|
||||||
|
granian_conf[k] = v
|
||||||
|
if arguments.log_config_file:
|
||||||
|
with open(arguments.log_config_file, 'r') as f:
|
||||||
|
granian_conf['log_dictconfig'] = yaml.safe_load(f)
|
||||||
|
granian_conf = Configuration.GranianConfiguration.from_dict(granian_conf)
|
||||||
|
|
||||||
|
Granian(
|
||||||
|
"bugis.asgi:application",
|
||||||
|
**asdict(granian_conf)
|
||||||
|
).serve()
|
4
cli/src/bugis/cli/__main__.py
Normal file
4
cli/src/bugis/cli/__main__.py
Normal file
@@ -0,0 +1,4 @@
|
|||||||
|
from . import main
|
||||||
|
import sys
|
||||||
|
|
||||||
|
main(sys.argv[1:])
|
30
cli/src/bugis/cli/default-conf/logging.yaml
Normal file
30
cli/src/bugis/cli/default-conf/logging.yaml
Normal file
@@ -0,0 +1,30 @@
|
|||||||
|
version: 1
|
||||||
|
disable_existing_loggers: False
|
||||||
|
handlers:
|
||||||
|
console:
|
||||||
|
class : logging.StreamHandler
|
||||||
|
formatter: default
|
||||||
|
level : INFO
|
||||||
|
stream : ext://sys.stderr
|
||||||
|
access:
|
||||||
|
class : logging.StreamHandler
|
||||||
|
formatter: access
|
||||||
|
level : INFO
|
||||||
|
stream : ext://sys.stdout
|
||||||
|
formatters:
|
||||||
|
default:
|
||||||
|
format: '{asctime}.{msecs:0<3.0f} [{levelname}] ({processName:s}/{threadName:s}) - {name} - {message}'
|
||||||
|
style: '{'
|
||||||
|
datefmt: '%Y-%m-%d %H:%M:%S'
|
||||||
|
access:
|
||||||
|
format: '%(message)s'
|
||||||
|
loggers:
|
||||||
|
root:
|
||||||
|
handlers: [console]
|
||||||
|
_granian:
|
||||||
|
level: INFO
|
||||||
|
propagate: False
|
||||||
|
granian.access:
|
||||||
|
handlers: [ access ]
|
||||||
|
level: INFO
|
||||||
|
propagate: False
|
14
conf/nginx-bugis.conf
Normal file
14
conf/nginx-bugis.conf
Normal file
@@ -0,0 +1,14 @@
|
|||||||
|
server {
|
||||||
|
listen 8080;
|
||||||
|
http2 on;
|
||||||
|
|
||||||
|
server_name localhost;
|
||||||
|
|
||||||
|
location / {
|
||||||
|
proxy_pass http://granian:8000;
|
||||||
|
proxy_http_version 1.1;
|
||||||
|
proxy_set_header Host $host;
|
||||||
|
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
|
||||||
|
proxy_read_timeout 60s;
|
||||||
|
}
|
||||||
|
}
|
46
core/conf/logging.json
Normal file
46
core/conf/logging.json
Normal file
@@ -0,0 +1,46 @@
|
|||||||
|
{
|
||||||
|
"version": 1,
|
||||||
|
"disable_existing_loggers": false,
|
||||||
|
"handlers": {
|
||||||
|
"console": {
|
||||||
|
"class": "logging.StreamHandler",
|
||||||
|
"formatter": "default",
|
||||||
|
"level": "DEBUG",
|
||||||
|
"stream": "ext://sys.stderr"
|
||||||
|
},
|
||||||
|
"access": {
|
||||||
|
"class": "logging.StreamHandler",
|
||||||
|
"formatter": "access",
|
||||||
|
"level": "DEBUG",
|
||||||
|
"stream": "ext://sys.stdout"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"formatters": {
|
||||||
|
"default": {
|
||||||
|
"format": "{asctime}.{msecs:0<3.0f} [{levelname}] ({processName:s}/{threadName:s}) - {name} - {message}",
|
||||||
|
"style": "{",
|
||||||
|
"datefmt": "%Y-%m-%d %H:%M:%S"
|
||||||
|
},
|
||||||
|
"access": {
|
||||||
|
"format": "%(message)s"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"loggers": {
|
||||||
|
"root": {
|
||||||
|
"handlers": [
|
||||||
|
"console"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"_granian": {
|
||||||
|
"level": "DEBUG",
|
||||||
|
"propagate": false
|
||||||
|
},
|
||||||
|
"granian.access": {
|
||||||
|
"handlers": [
|
||||||
|
"access"
|
||||||
|
],
|
||||||
|
"level": "DEBUG",
|
||||||
|
"propagate": false
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
20
core/example/hello.py
Normal file
20
core/example/hello.py
Normal file
@@ -0,0 +1,20 @@
|
|||||||
|
from bugis.core import BugisApp, HttpContext
|
||||||
|
|
||||||
|
|
||||||
|
class Hello(BugisApp):
|
||||||
|
|
||||||
|
async def handle_request(self, ctx: HttpContext) -> None:
|
||||||
|
async for chunk in ctx.request_body:
|
||||||
|
print(chunk)
|
||||||
|
await ctx.send_str(200, 'Hello World')
|
||||||
|
|
||||||
|
|
||||||
|
app = BugisApp()
|
||||||
|
|
||||||
|
|
||||||
|
@app.GET('/hello')
|
||||||
|
@app.GET('/hello2')
|
||||||
|
async def handle_request(ctx: HttpContext) -> None:
|
||||||
|
async for chunk in ctx.request_body:
|
||||||
|
print(chunk)
|
||||||
|
await ctx.send_str(200, 'Hello World')
|
55
core/pyproject.toml
Normal file
55
core/pyproject.toml
Normal file
@@ -0,0 +1,55 @@
|
|||||||
|
[build-system]
|
||||||
|
requires = ["setuptools>=61.0", "setuptools-scm>=8"]
|
||||||
|
build-backend = "setuptools.build_meta"
|
||||||
|
|
||||||
|
[project]
|
||||||
|
name = "bugis_core"
|
||||||
|
dynamic = ["version"]
|
||||||
|
authors = [
|
||||||
|
{ name="Walter Oggioni", email="oggioni.walter@gmail.com" },
|
||||||
|
]
|
||||||
|
description = "Markdown to HTML renderer"
|
||||||
|
readme = "README.md"
|
||||||
|
requires-python = ">=3.10"
|
||||||
|
classifiers = [
|
||||||
|
'Development Status :: 3 - Alpha',
|
||||||
|
'Topic :: Utilities',
|
||||||
|
'License :: OSI Approved :: MIT License',
|
||||||
|
'Intended Audience :: System Administrators',
|
||||||
|
'Intended Audience :: Developers',
|
||||||
|
'Environment :: Console',
|
||||||
|
'License :: OSI Approved :: MIT License',
|
||||||
|
'Programming Language :: Python :: 3',
|
||||||
|
]
|
||||||
|
|
||||||
|
dependencies = [
|
||||||
|
"pwo",
|
||||||
|
]
|
||||||
|
|
||||||
|
[project.optional-dependencies]
|
||||||
|
dev = [
|
||||||
|
"build", "mypy", "ipdb", "twine", "granian", "httpx"
|
||||||
|
]
|
||||||
|
|
||||||
|
rsgi = [
|
||||||
|
"granian"
|
||||||
|
]
|
||||||
|
|
||||||
|
[project.urls]
|
||||||
|
"Homepage" = "https://github.com/woggioni/bugis"
|
||||||
|
"Bug Tracker" = "https://github.com/woggioni/bugis/issues"
|
||||||
|
|
||||||
|
[tool.mypy]
|
||||||
|
python_version = "3.12"
|
||||||
|
disallow_untyped_defs = true
|
||||||
|
show_error_codes = true
|
||||||
|
no_implicit_optional = true
|
||||||
|
warn_return_any = true
|
||||||
|
warn_unused_ignores = true
|
||||||
|
exclude = ["scripts", "docs", "test"]
|
||||||
|
strict = true
|
||||||
|
|
||||||
|
[tool.setuptools_scm]
|
||||||
|
root='..'
|
||||||
|
version_file = "src/bugis/core/_version.py"
|
||||||
|
|
138
core/requirements-dev.txt
Normal file
138
core/requirements-dev.txt
Normal file
@@ -0,0 +1,138 @@
|
|||||||
|
#
|
||||||
|
# This file is autogenerated by pip-compile with Python 3.12
|
||||||
|
# by the following command:
|
||||||
|
#
|
||||||
|
# pip-compile --extra=dev --output-file=requirements-dev.txt
|
||||||
|
#
|
||||||
|
--index-url https://gitea.woggioni.net/api/packages/woggioni/pypi/simple
|
||||||
|
--extra-index-url https://pypi.org/simple
|
||||||
|
|
||||||
|
aiofiles==24.1.0
|
||||||
|
# via bugis_core (pyproject.toml)
|
||||||
|
asttokens==2.4.1
|
||||||
|
# via stack-data
|
||||||
|
build==1.2.2.post1
|
||||||
|
# via bugis_core (pyproject.toml)
|
||||||
|
certifi==2024.8.30
|
||||||
|
# via requests
|
||||||
|
cffi==1.17.1
|
||||||
|
# via cryptography
|
||||||
|
charset-normalizer==3.4.0
|
||||||
|
# via requests
|
||||||
|
cryptography==43.0.3
|
||||||
|
# via secretstorage
|
||||||
|
decorator==5.1.1
|
||||||
|
# via
|
||||||
|
# ipdb
|
||||||
|
# ipython
|
||||||
|
docutils==0.21.2
|
||||||
|
# via readme-renderer
|
||||||
|
executing==2.1.0
|
||||||
|
# via stack-data
|
||||||
|
idna==3.10
|
||||||
|
# via requests
|
||||||
|
importlib-metadata==8.5.0
|
||||||
|
# via twine
|
||||||
|
ipdb==0.13.13
|
||||||
|
# via bugis_core (pyproject.toml)
|
||||||
|
ipython==8.29.0
|
||||||
|
# via ipdb
|
||||||
|
jaraco-classes==3.4.0
|
||||||
|
# via keyring
|
||||||
|
jaraco-context==6.0.1
|
||||||
|
# via keyring
|
||||||
|
jaraco-functools==4.1.0
|
||||||
|
# via keyring
|
||||||
|
jedi==0.19.1
|
||||||
|
# via ipython
|
||||||
|
jeepney==0.8.0
|
||||||
|
# via
|
||||||
|
# keyring
|
||||||
|
# secretstorage
|
||||||
|
keyring==25.5.0
|
||||||
|
# via twine
|
||||||
|
markdown-it-py==3.0.0
|
||||||
|
# via rich
|
||||||
|
matplotlib-inline==0.1.7
|
||||||
|
# via ipython
|
||||||
|
mdurl==0.1.2
|
||||||
|
# via markdown-it-py
|
||||||
|
more-itertools==10.5.0
|
||||||
|
# via
|
||||||
|
# jaraco-classes
|
||||||
|
# jaraco-functools
|
||||||
|
mypy==1.13.0
|
||||||
|
# via bugis_core (pyproject.toml)
|
||||||
|
mypy-extensions==1.0.0
|
||||||
|
# via mypy
|
||||||
|
nh3==0.2.18
|
||||||
|
# via readme-renderer
|
||||||
|
packaging==24.1
|
||||||
|
# via build
|
||||||
|
parso==0.8.4
|
||||||
|
# via jedi
|
||||||
|
pexpect==4.9.0
|
||||||
|
# via ipython
|
||||||
|
pkginfo==1.10.0
|
||||||
|
# via twine
|
||||||
|
prompt-toolkit==3.0.48
|
||||||
|
# via ipython
|
||||||
|
ptyprocess==0.7.0
|
||||||
|
# via pexpect
|
||||||
|
pure-eval==0.2.3
|
||||||
|
# via stack-data
|
||||||
|
pwo==0.0.5
|
||||||
|
# via bugis_core (pyproject.toml)
|
||||||
|
pycparser==2.22
|
||||||
|
# via cffi
|
||||||
|
pygments==2.18.0
|
||||||
|
# via
|
||||||
|
# ipython
|
||||||
|
# readme-renderer
|
||||||
|
# rich
|
||||||
|
pyproject-hooks==1.2.0
|
||||||
|
# via build
|
||||||
|
pyyaml==6.0.2
|
||||||
|
# via bugis_core (pyproject.toml)
|
||||||
|
readme-renderer==44.0
|
||||||
|
# via twine
|
||||||
|
requests==2.32.3
|
||||||
|
# via
|
||||||
|
# requests-toolbelt
|
||||||
|
# twine
|
||||||
|
requests-toolbelt==1.0.0
|
||||||
|
# via twine
|
||||||
|
rfc3986==2.0.0
|
||||||
|
# via twine
|
||||||
|
rich==13.9.3
|
||||||
|
# via twine
|
||||||
|
secretstorage==3.3.3
|
||||||
|
# via keyring
|
||||||
|
six==1.16.0
|
||||||
|
# via asttokens
|
||||||
|
sortedcontainers==2.4.0
|
||||||
|
# via bugis_core (pyproject.toml)
|
||||||
|
stack-data==0.6.3
|
||||||
|
# via ipython
|
||||||
|
traitlets==5.14.3
|
||||||
|
# via
|
||||||
|
# ipython
|
||||||
|
# matplotlib-inline
|
||||||
|
twine==5.1.1
|
||||||
|
# via bugis_core (pyproject.toml)
|
||||||
|
types-pyyaml==6.0.12.20240917
|
||||||
|
# via bugis_core (pyproject.toml)
|
||||||
|
typing-extensions==4.12.2
|
||||||
|
# via
|
||||||
|
# mypy
|
||||||
|
# pwo
|
||||||
|
urllib3==2.2.3
|
||||||
|
# via
|
||||||
|
# requests
|
||||||
|
# twine
|
||||||
|
watchdog==5.0.3
|
||||||
|
# via bugis_core (pyproject.toml)
|
||||||
|
wcwidth==0.2.13
|
||||||
|
# via prompt-toolkit
|
||||||
|
zipp==3.20.2
|
||||||
|
# via importlib-metadata
|
21
core/requirements.txt
Normal file
21
core/requirements.txt
Normal file
@@ -0,0 +1,21 @@
|
|||||||
|
#
|
||||||
|
# This file is autogenerated by pip-compile with Python 3.12
|
||||||
|
# by the following command:
|
||||||
|
#
|
||||||
|
# pip-compile --output-file=requirements.txt
|
||||||
|
#
|
||||||
|
--index-url https://gitea.woggioni.net/api/packages/woggioni/pypi/simple
|
||||||
|
--extra-index-url https://pypi.org/simple
|
||||||
|
|
||||||
|
aiofiles==24.1.0
|
||||||
|
# via bugis_core (pyproject.toml)
|
||||||
|
pwo==0.0.5
|
||||||
|
# via bugis_core (pyproject.toml)
|
||||||
|
pyyaml==6.0.2
|
||||||
|
# via bugis_core (pyproject.toml)
|
||||||
|
sortedcontainers==2.4.0
|
||||||
|
# via bugis_core (pyproject.toml)
|
||||||
|
typing-extensions==4.12.2
|
||||||
|
# via pwo
|
||||||
|
watchdog==5.0.3
|
||||||
|
# via bugis_core (pyproject.toml)
|
15
core/src/bugis/core/__init__.py
Normal file
15
core/src/bugis/core/__init__.py
Normal file
@@ -0,0 +1,15 @@
|
|||||||
|
from ._app import BugisApp
|
||||||
|
from ._http_method import HttpMethod
|
||||||
|
from ._http_context import HttpContext
|
||||||
|
from ._tree import Tree, PathIterator
|
||||||
|
from ._path_handler import PathHandler
|
||||||
|
|
||||||
|
|
||||||
|
__all__ = [
|
||||||
|
'HttpMethod',
|
||||||
|
'BugisApp',
|
||||||
|
'HttpContext',
|
||||||
|
'Tree',
|
||||||
|
'PathHandler',
|
||||||
|
'PathIterator'
|
||||||
|
]
|
143
core/src/bugis/core/_app.py
Normal file
143
core/src/bugis/core/_app.py
Normal file
@@ -0,0 +1,143 @@
|
|||||||
|
from abc import ABC, abstractmethod
|
||||||
|
from asyncio import Queue, AbstractEventLoop
|
||||||
|
from asyncio import get_running_loop
|
||||||
|
from logging import getLogger
|
||||||
|
from typing import Callable, Awaitable, Any, Mapping, Sequence, Optional, Unpack, Tuple
|
||||||
|
|
||||||
|
from pwo import Maybe, AsyncQueueIterator
|
||||||
|
|
||||||
|
from ._http_context import HttpContext
|
||||||
|
from ._http_method import HttpMethod
|
||||||
|
from ._types import StrOrStrings
|
||||||
|
|
||||||
|
try:
|
||||||
|
from ._rsgi import RsgiContext
|
||||||
|
from granian._granian import RSGIHTTPProtocol, RSGIHTTPScope # type: ignore
|
||||||
|
except ImportError:
|
||||||
|
pass
|
||||||
|
|
||||||
|
from ._asgi import AsgiContext
|
||||||
|
from ._tree import Tree
|
||||||
|
from ._types.asgi import LifespanScope, HTTPScope as ASGIHTTPScope, WebSocketScope
|
||||||
|
|
||||||
|
log = getLogger(__name__)
|
||||||
|
|
||||||
|
type HttpHandler = Callable[[HttpContext, Unpack[Any]], Awaitable[None]]
|
||||||
|
|
||||||
|
|
||||||
|
class AbstractBugisApp(ABC):
|
||||||
|
async def __call__(self,
|
||||||
|
scope: ASGIHTTPScope | WebSocketScope | LifespanScope,
|
||||||
|
receive: Callable[[], Awaitable[Any]],
|
||||||
|
send: Callable[[Mapping[str, Any]], Awaitable[None]]) -> None:
|
||||||
|
loop = get_running_loop()
|
||||||
|
if scope['type'] == 'lifespan':
|
||||||
|
while True:
|
||||||
|
message = await receive()
|
||||||
|
if message['type'] == 'lifespan.startup':
|
||||||
|
self.setup(loop)
|
||||||
|
await send({'type': 'lifespan.startup.complete'})
|
||||||
|
elif message['type'] == 'lifespan.shutdown':
|
||||||
|
self.shutdown(loop)
|
||||||
|
await send({'type': 'lifespan.shutdown.complete'})
|
||||||
|
elif scope['type'] == 'http':
|
||||||
|
queue: Queue[Optional[bytes]] = Queue()
|
||||||
|
ctx = AsgiContext(scope, receive, send, AsyncQueueIterator(queue))
|
||||||
|
request_handling = loop.create_task(self.handle_request(ctx))
|
||||||
|
while True:
|
||||||
|
message = await receive()
|
||||||
|
if message['type'] == 'http.request':
|
||||||
|
Maybe.of(message['body']).filter(lambda it: len(it) > 0).if_present(queue.put_nowait)
|
||||||
|
if not message.get('more_body', False):
|
||||||
|
queue.put_nowait(None)
|
||||||
|
await request_handling
|
||||||
|
break
|
||||||
|
elif message['type'] == 'http.disconnect':
|
||||||
|
request_handling.cancel()
|
||||||
|
break
|
||||||
|
else:
|
||||||
|
raise NotImplementedError()
|
||||||
|
|
||||||
|
def setup(self, loop: AbstractEventLoop) -> None:
|
||||||
|
pass
|
||||||
|
|
||||||
|
def shutdown(self, loop: AbstractEventLoop) -> None:
|
||||||
|
pass
|
||||||
|
|
||||||
|
@abstractmethod
|
||||||
|
async def handle_request(self, ctx: HttpContext) -> None:
|
||||||
|
raise NotImplementedError()
|
||||||
|
|
||||||
|
def __rsgi_init__(self, loop: AbstractEventLoop) -> None:
|
||||||
|
self.setup(loop)
|
||||||
|
|
||||||
|
def __rsgi_del__(self, loop: AbstractEventLoop) -> None:
|
||||||
|
self.shutdown(loop)
|
||||||
|
|
||||||
|
async def __rsgi__(self, scope: RSGIHTTPScope, protocol: RSGIHTTPProtocol) -> None:
|
||||||
|
ctx = RsgiContext(scope, protocol)
|
||||||
|
await self.handle_request(ctx)
|
||||||
|
|
||||||
|
|
||||||
|
class BugisApp(AbstractBugisApp):
|
||||||
|
_tree: Tree
|
||||||
|
|
||||||
|
def __init__(self) -> None:
|
||||||
|
self._tree = Tree()
|
||||||
|
|
||||||
|
async def handle_request(self, ctx: HttpContext) -> None:
|
||||||
|
result = self._tree.get_handler(ctx.path, ctx.method)
|
||||||
|
if result is not None:
|
||||||
|
handler, captured = result
|
||||||
|
await handler.handle_request(ctx, captured)
|
||||||
|
else:
|
||||||
|
await ctx.send_empty(404)
|
||||||
|
pass
|
||||||
|
|
||||||
|
def route(self,
|
||||||
|
paths: StrOrStrings,
|
||||||
|
methods: Optional[HttpMethod | Sequence[HttpMethod]] = None,
|
||||||
|
recursive: bool = False) -> Callable[[HttpHandler], HttpHandler]:
|
||||||
|
|
||||||
|
def wrapped(handler: HttpHandler) -> HttpHandler:
|
||||||
|
nonlocal methods
|
||||||
|
nonlocal paths
|
||||||
|
_methods: Tuple[Optional[HttpMethod], ...]
|
||||||
|
if methods is None:
|
||||||
|
_methods = (None,)
|
||||||
|
elif isinstance(methods, HttpMethod):
|
||||||
|
_methods = (methods,)
|
||||||
|
else:
|
||||||
|
_methods = tuple(methods)
|
||||||
|
_paths: Tuple[str, ...]
|
||||||
|
if isinstance(paths, str):
|
||||||
|
_paths = (paths,)
|
||||||
|
else:
|
||||||
|
_paths = tuple(paths)
|
||||||
|
for method in _methods:
|
||||||
|
for path in _paths:
|
||||||
|
self._tree.register(path, method, handler, recursive)
|
||||||
|
return handler
|
||||||
|
|
||||||
|
return wrapped
|
||||||
|
|
||||||
|
def GET(self, path: str, recursive: bool = False) -> Callable[[HttpHandler], HttpHandler]:
|
||||||
|
return self.route(path, (HttpMethod.GET,), recursive)
|
||||||
|
|
||||||
|
def POST(self, path: str, recursive: bool = False) -> Callable[[HttpHandler], HttpHandler]:
|
||||||
|
return self.route(path, (HttpMethod.POST,), recursive)
|
||||||
|
|
||||||
|
def PUT(self, path: str, recursive: bool = False) -> Callable[[HttpHandler], HttpHandler]:
|
||||||
|
return self.route(path, (HttpMethod.PUT,), recursive)
|
||||||
|
|
||||||
|
def DELETE(self, path: str, recursive: bool = False) -> Callable[[HttpHandler], HttpHandler]:
|
||||||
|
return self.route(path, (HttpMethod.DELETE,), recursive)
|
||||||
|
|
||||||
|
def OPTIONS(self, path: str, recursive: bool = False) -> Callable[[HttpHandler], HttpHandler]:
|
||||||
|
return self.route(path, (HttpMethod.OPTIONS,), recursive)
|
||||||
|
|
||||||
|
def HEAD(self, path: str, recursive: bool = False) -> Callable[[HttpHandler], HttpHandler]:
|
||||||
|
return self.route(path, (HttpMethod.HEAD,), recursive)
|
||||||
|
|
||||||
|
def PATCH(self, path: str, recursive: bool = False) -> Callable[[HttpHandler], HttpHandler]:
|
||||||
|
return self.route(path, (HttpMethod.PATCH,), recursive)
|
148
core/src/bugis/core/_asgi.py
Normal file
148
core/src/bugis/core/_asgi.py
Normal file
@@ -0,0 +1,148 @@
|
|||||||
|
from typing import (
|
||||||
|
Sequence,
|
||||||
|
Tuple,
|
||||||
|
Dict,
|
||||||
|
Mapping,
|
||||||
|
Callable,
|
||||||
|
Any,
|
||||||
|
AsyncIterator,
|
||||||
|
Awaitable,
|
||||||
|
AsyncGenerator,
|
||||||
|
Optional,
|
||||||
|
List,
|
||||||
|
Iterable
|
||||||
|
)
|
||||||
|
|
||||||
|
from pwo import Maybe
|
||||||
|
from pathlib import Path
|
||||||
|
from ._http_method import HttpMethod
|
||||||
|
from ._http_context import HttpContext
|
||||||
|
from ._types import StrOrStrings
|
||||||
|
from ._types.asgi import HTTPScope
|
||||||
|
|
||||||
|
|
||||||
|
def decode_headers(headers: Iterable[Tuple[bytes, bytes]]) -> Dict[str, Sequence[str]]:
|
||||||
|
result: Dict[str, List[str]] = dict()
|
||||||
|
for key, value in headers:
|
||||||
|
key_str: str
|
||||||
|
value_str: str
|
||||||
|
if isinstance(key, bytes):
|
||||||
|
key_str = key.decode()
|
||||||
|
elif isinstance(key, str):
|
||||||
|
key_str = key
|
||||||
|
else:
|
||||||
|
raise NotImplementedError('This should never happen')
|
||||||
|
if isinstance(value, bytes):
|
||||||
|
value_str = value.decode()
|
||||||
|
elif isinstance(key, str):
|
||||||
|
value_str = value
|
||||||
|
else:
|
||||||
|
raise NotImplementedError('This should never happen')
|
||||||
|
ls = result.setdefault(key_str, list())
|
||||||
|
ls.append(value_str)
|
||||||
|
return {
|
||||||
|
k: tuple(v) for k, v in result.items()
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
def encode_headers(headers: Mapping[str, StrOrStrings]) -> Tuple[Tuple[bytes, bytes], ...]:
|
||||||
|
result = []
|
||||||
|
for key, value in headers.items():
|
||||||
|
if isinstance(value, str):
|
||||||
|
result.append((key.encode(), value.encode()))
|
||||||
|
elif isinstance(value, Sequence):
|
||||||
|
for single_value in value:
|
||||||
|
result.append((key.encode(), single_value.encode()))
|
||||||
|
return tuple(result)
|
||||||
|
|
||||||
|
|
||||||
|
class AsgiContext(HttpContext):
|
||||||
|
pathsend: bool
|
||||||
|
receive: Callable[[], Awaitable[Any]]
|
||||||
|
send: Callable[[Mapping[str, Any]], Awaitable[None]]
|
||||||
|
scheme: str
|
||||||
|
method: HttpMethod
|
||||||
|
path: str
|
||||||
|
query_string: str
|
||||||
|
headers: Mapping[str, Sequence[str]]
|
||||||
|
client: Optional[Tuple[str, int]]
|
||||||
|
server: Optional[Tuple[str, Optional[int]]]
|
||||||
|
request_body: AsyncIterator[bytes]
|
||||||
|
|
||||||
|
def __init__(self,
|
||||||
|
scope: HTTPScope,
|
||||||
|
receive: Callable[[], Awaitable[Any]],
|
||||||
|
send: Callable[[Mapping[str, Any]], Awaitable[None]],
|
||||||
|
request_body_iterator: AsyncIterator[bytes]):
|
||||||
|
self.receive = receive
|
||||||
|
self.send = send
|
||||||
|
self.pathsend = (Maybe.of_nullable(scope.get('extensions'))
|
||||||
|
.map(lambda it: it.get("http.response.pathsend"))
|
||||||
|
.is_present)
|
||||||
|
self.path = scope['path']
|
||||||
|
self.query_string = scope['query_string'].decode()
|
||||||
|
self.method = HttpMethod(scope['method'])
|
||||||
|
self.scheme = scope['scheme']
|
||||||
|
self.client = scope['client']
|
||||||
|
self.server = scope['server']
|
||||||
|
self.headers = decode_headers(scope['headers'])
|
||||||
|
self.request_body = request_body_iterator
|
||||||
|
|
||||||
|
async def stream_body(self,
|
||||||
|
status: int,
|
||||||
|
body_generator: AsyncGenerator[bytes, None],
|
||||||
|
headers: Optional[Mapping[str, Sequence[str]]] = None) -> None:
|
||||||
|
await self._send_head(status, headers)
|
||||||
|
async for chunk in body_generator:
|
||||||
|
await self.send({
|
||||||
|
'type': 'http.response.body',
|
||||||
|
'body': chunk,
|
||||||
|
'more_body': True
|
||||||
|
})
|
||||||
|
await self.send({
|
||||||
|
'type': 'http.response.body',
|
||||||
|
'body': '',
|
||||||
|
'more_body': False
|
||||||
|
})
|
||||||
|
|
||||||
|
async def send_bytes(self, status: int, body: bytes, headers: Optional[Mapping[str, Sequence[str]]] = None) -> None:
|
||||||
|
await self._send_head(status, headers)
|
||||||
|
await self.send({
|
||||||
|
'type': 'http.response.body',
|
||||||
|
'body': body,
|
||||||
|
})
|
||||||
|
|
||||||
|
async def send_str(self, status: int, body: str, headers: Optional[Mapping[str, Sequence[str]]] = None) -> None:
|
||||||
|
await self._send_head(status, headers)
|
||||||
|
await self.send({
|
||||||
|
'type': 'http.response.body',
|
||||||
|
'body': body.encode(),
|
||||||
|
})
|
||||||
|
|
||||||
|
async def _send_head(self, status: int, headers: Optional[Mapping[str, Sequence[str]]] = None) -> None:
|
||||||
|
await self.send({
|
||||||
|
'type': 'http.response.start',
|
||||||
|
'status': status,
|
||||||
|
'headers': Maybe.of_nullable(headers).map(encode_headers).or_else(tuple())
|
||||||
|
})
|
||||||
|
|
||||||
|
async def send_file(self,
|
||||||
|
status: int,
|
||||||
|
path: Path,
|
||||||
|
headers: Optional[Mapping[str, Sequence[str]]] = None) -> None:
|
||||||
|
if self.pathsend:
|
||||||
|
await self._send_head(status, headers)
|
||||||
|
await self.send({
|
||||||
|
'type': 'http.response.pathsend',
|
||||||
|
'path': path
|
||||||
|
})
|
||||||
|
else:
|
||||||
|
raise NotImplementedError()
|
||||||
|
|
||||||
|
async def send_empty(self, status: int, headers: Optional[Mapping[str, Sequence[str]]] = None) -> None:
|
||||||
|
await self._send_head(status, headers)
|
||||||
|
await self.send({
|
||||||
|
'type': 'http.response.body',
|
||||||
|
'body': '',
|
||||||
|
'more_body': False
|
||||||
|
})
|
51
core/src/bugis/core/_http_context.py
Normal file
51
core/src/bugis/core/_http_context.py
Normal file
@@ -0,0 +1,51 @@
|
|||||||
|
from typing import (
|
||||||
|
Callable,
|
||||||
|
Awaitable,
|
||||||
|
Tuple,
|
||||||
|
AsyncIterator,
|
||||||
|
AsyncGenerator,
|
||||||
|
Mapping,
|
||||||
|
Sequence,
|
||||||
|
Any,
|
||||||
|
Optional
|
||||||
|
)
|
||||||
|
from abc import ABC, abstractmethod
|
||||||
|
from pathlib import Path
|
||||||
|
|
||||||
|
from ._http_method import HttpMethod
|
||||||
|
|
||||||
|
|
||||||
|
class HttpContext(ABC):
|
||||||
|
pathsend: bool
|
||||||
|
receive: Callable[[None], Awaitable[Any]]
|
||||||
|
send: Callable[[Mapping[str, Any]], Awaitable[None]]
|
||||||
|
scheme: str
|
||||||
|
method: HttpMethod
|
||||||
|
path: str
|
||||||
|
query_string: str
|
||||||
|
headers: Mapping[str, Sequence[str]]
|
||||||
|
client: Optional[Tuple[str, int]]
|
||||||
|
server: Optional[Tuple[str, Optional[int]]]
|
||||||
|
request_body: AsyncIterator[bytes]
|
||||||
|
|
||||||
|
@abstractmethod
|
||||||
|
async def stream_body(self,
|
||||||
|
status: int,
|
||||||
|
body_generator: AsyncGenerator[bytes, None],
|
||||||
|
headers: Optional[Mapping[str, Sequence[str]]] = None) -> None:
|
||||||
|
pass
|
||||||
|
|
||||||
|
@abstractmethod
|
||||||
|
async def send_bytes(self, status: int, body: bytes, headers: Optional[Mapping[str, Sequence[str]]] = None) -> None:
|
||||||
|
pass
|
||||||
|
|
||||||
|
async def send_str(self, status: int, body: str, headers: Optional[Mapping[str, Sequence[str]]] = None) -> None:
|
||||||
|
await self.send_bytes(status, body.encode(), headers)
|
||||||
|
|
||||||
|
@abstractmethod
|
||||||
|
async def send_file(self, status: int, path: Path, headers: Optional[Mapping[str, Sequence[str]]] = None) -> None:
|
||||||
|
pass
|
||||||
|
|
||||||
|
@abstractmethod
|
||||||
|
async def send_empty(self, status: int, headers: Optional[Mapping[str, Sequence[str]]] = None) -> None:
|
||||||
|
pass
|
11
core/src/bugis/core/_http_method.py
Normal file
11
core/src/bugis/core/_http_method.py
Normal file
@@ -0,0 +1,11 @@
|
|||||||
|
from enum import StrEnum
|
||||||
|
|
||||||
|
|
||||||
|
class HttpMethod(StrEnum):
|
||||||
|
OPTIONS = 'OPTIONS'
|
||||||
|
HEAD = 'HEAD'
|
||||||
|
GET = 'GET'
|
||||||
|
POST = 'POST'
|
||||||
|
PUT = 'PUT'
|
||||||
|
DELETE = 'DELETE'
|
||||||
|
PATCH = 'PATCH'
|
12
core/src/bugis/core/_node.py
Normal file
12
core/src/bugis/core/_node.py
Normal file
@@ -0,0 +1,12 @@
|
|||||||
|
from dataclasses import dataclass
|
||||||
|
from typing import (
|
||||||
|
Optional,
|
||||||
|
Dict,
|
||||||
|
List,
|
||||||
|
)
|
||||||
|
from ._types import NodeType
|
||||||
|
from ._path_handler import PathHandler
|
||||||
|
from ._path_matcher import PathMatcher
|
||||||
|
|
||||||
|
|
||||||
|
|
33
core/src/bugis/core/_path_handler.py
Normal file
33
core/src/bugis/core/_path_handler.py
Normal file
@@ -0,0 +1,33 @@
|
|||||||
|
from abc import ABC, abstractmethod
|
||||||
|
from typing import (
|
||||||
|
Sequence,
|
||||||
|
Dict,
|
||||||
|
Optional
|
||||||
|
)
|
||||||
|
from dataclasses import dataclass, field
|
||||||
|
from ._http_context import HttpContext
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass
|
||||||
|
class Matches:
|
||||||
|
|
||||||
|
kwargs: Dict[str, str] = field(default_factory=dict)
|
||||||
|
|
||||||
|
path: Optional[Sequence[str]] = None
|
||||||
|
|
||||||
|
unmatched_paths: Sequence[str] = field(default_factory=list)
|
||||||
|
|
||||||
|
|
||||||
|
class PathHandler(ABC):
|
||||||
|
|
||||||
|
@abstractmethod
|
||||||
|
async def handle_request(self, ctx: HttpContext, captured: Matches) -> None:
|
||||||
|
pass
|
||||||
|
|
||||||
|
@property
|
||||||
|
@abstractmethod
|
||||||
|
def recursive(self) -> bool:
|
||||||
|
raise NotImplementedError()
|
||||||
|
|
||||||
|
|
||||||
|
type PathHandlers = (PathHandler | Sequence[PathHandler])
|
97
core/src/bugis/core/_path_matcher.py
Normal file
97
core/src/bugis/core/_path_matcher.py
Normal file
@@ -0,0 +1,97 @@
|
|||||||
|
from fnmatch import fnmatch
|
||||||
|
from abc import ABC, abstractmethod
|
||||||
|
from typing import Optional, Sequence, Dict, List, Union
|
||||||
|
from dataclasses import dataclass
|
||||||
|
from ._path_handler import PathHandler
|
||||||
|
from ._types import NodeType, PathMatcherResult
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass
|
||||||
|
class Node:
|
||||||
|
key: NodeType
|
||||||
|
parent: Optional[Union['Node', 'PathMatcher']]
|
||||||
|
children: Dict[NodeType, 'Node']
|
||||||
|
handlers: List[PathHandler]
|
||||||
|
path_matchers: List['PathMatcher']
|
||||||
|
|
||||||
|
|
||||||
|
class PathMatcher(ABC):
|
||||||
|
parent: Optional[Union['Node', 'PathMatcher']]
|
||||||
|
children: Dict[NodeType, Node]
|
||||||
|
handlers: List[PathHandler]
|
||||||
|
path_matchers: List['PathMatcher']
|
||||||
|
|
||||||
|
def __init__(self,
|
||||||
|
parent: Optional[Union['Node', 'PathMatcher']],
|
||||||
|
children: Dict[NodeType, Node],
|
||||||
|
handlers: List[PathHandler],
|
||||||
|
path_matchers: List['PathMatcher']
|
||||||
|
):
|
||||||
|
self.parent = parent
|
||||||
|
self.children = children
|
||||||
|
self.handlers = handlers
|
||||||
|
self.path_matchers = path_matchers
|
||||||
|
|
||||||
|
@abstractmethod
|
||||||
|
def match(self, path: Sequence[str]) -> Optional[PathMatcherResult]:
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
class StrMatcher(PathMatcher):
|
||||||
|
name: str
|
||||||
|
|
||||||
|
def __init__(self,
|
||||||
|
name: str,
|
||||||
|
parent: Optional[Node | PathMatcher],
|
||||||
|
children: Dict[NodeType, Node],
|
||||||
|
handlers: List[PathHandler],
|
||||||
|
path_matchers: List[PathMatcher],
|
||||||
|
):
|
||||||
|
super().__init__(parent, children, handlers, path_matchers)
|
||||||
|
self.name = name
|
||||||
|
|
||||||
|
def match(self, path: Sequence[str]) -> Optional[PathMatcherResult]:
|
||||||
|
if len(path):
|
||||||
|
return {self.name: path[0]}
|
||||||
|
else:
|
||||||
|
return None
|
||||||
|
|
||||||
|
|
||||||
|
class IntMatcher(PathMatcher):
|
||||||
|
name: str
|
||||||
|
|
||||||
|
def __init__(self,
|
||||||
|
name: str,
|
||||||
|
parent: Optional[Node | PathMatcher],
|
||||||
|
children: Dict[NodeType, Node],
|
||||||
|
handlers: List[PathHandler],
|
||||||
|
path_matchers: List[PathMatcher],
|
||||||
|
):
|
||||||
|
super().__init__(parent, children, handlers, path_matchers)
|
||||||
|
self.name = name
|
||||||
|
|
||||||
|
def match(self, path: Sequence[str]) -> Optional[PathMatcherResult]:
|
||||||
|
if len(path) > 0:
|
||||||
|
try:
|
||||||
|
return {self.name: int(path[0])}
|
||||||
|
except ValueError:
|
||||||
|
return None
|
||||||
|
else:
|
||||||
|
return None
|
||||||
|
|
||||||
|
|
||||||
|
class GlobMatcher(PathMatcher):
|
||||||
|
pattern: str
|
||||||
|
|
||||||
|
def __init__(self,
|
||||||
|
pattern: str,
|
||||||
|
parent: Optional[Node | PathMatcher],
|
||||||
|
children: Dict[NodeType, Node],
|
||||||
|
handlers: List[PathHandler],
|
||||||
|
path_matchers: List[PathMatcher],
|
||||||
|
):
|
||||||
|
super().__init__(parent, children, handlers, path_matchers)
|
||||||
|
self.pattern = pattern
|
||||||
|
|
||||||
|
def match(self, path: Sequence[str]) -> Optional[PathMatcherResult]:
|
||||||
|
return path if fnmatch('/'.join(path), self.pattern) else None
|
94
core/src/bugis/core/_rsgi.py
Normal file
94
core/src/bugis/core/_rsgi.py
Normal file
@@ -0,0 +1,94 @@
|
|||||||
|
from functools import reduce
|
||||||
|
from pathlib import Path
|
||||||
|
from typing import (
|
||||||
|
Any,
|
||||||
|
Sequence,
|
||||||
|
Mapping,
|
||||||
|
AsyncIterator,
|
||||||
|
Tuple,
|
||||||
|
AsyncGenerator,
|
||||||
|
Optional,
|
||||||
|
List,
|
||||||
|
Dict,
|
||||||
|
Callable,
|
||||||
|
cast
|
||||||
|
)
|
||||||
|
|
||||||
|
from granian._granian import RSGIHTTPProtocol, RSGIHTTPScope
|
||||||
|
from pwo import Maybe
|
||||||
|
|
||||||
|
from ._http_context import HttpContext
|
||||||
|
from ._http_method import HttpMethod
|
||||||
|
|
||||||
|
|
||||||
|
class RsgiContext(HttpContext):
|
||||||
|
protocol: RSGIHTTPProtocol
|
||||||
|
scheme: str
|
||||||
|
method: HttpMethod
|
||||||
|
path: str
|
||||||
|
query_string: str
|
||||||
|
headers: Mapping[str, Sequence[str]]
|
||||||
|
client: Optional[Tuple[str, int]]
|
||||||
|
server: Optional[Tuple[str, Optional[int]]]
|
||||||
|
request_body: AsyncIterator[bytes]
|
||||||
|
head = Optional[Tuple[int, Sequence[Tuple[str, str]]]]
|
||||||
|
|
||||||
|
def __init__(self, scope: RSGIHTTPScope, protocol: RSGIHTTPProtocol):
|
||||||
|
self.scheme = scope.scheme
|
||||||
|
self.path = scope.path
|
||||||
|
self.method = HttpMethod(scope.method)
|
||||||
|
self.query_string = scope.query_string
|
||||||
|
|
||||||
|
def acc(d: Dict[str, List[str]], t: Tuple[str, str]) -> Dict[str, List[str]]:
|
||||||
|
d.setdefault(t[0], list()).append(t[1])
|
||||||
|
return d
|
||||||
|
|
||||||
|
fun = cast(Callable[[Mapping[str, Sequence[str]], tuple[str, str]], Mapping[str, Sequence[str]]], acc)
|
||||||
|
self.headers = reduce(fun, scope.headers.items(), {})
|
||||||
|
self.client = (Maybe.of(scope.client.split(':'))
|
||||||
|
.map(lambda it: (it[0], int(it[1])))
|
||||||
|
.or_else_throw(RuntimeError))
|
||||||
|
self.server = (Maybe.of(scope.server.split(':'))
|
||||||
|
.map(lambda it: (it[0], int(it[1])))
|
||||||
|
.or_else_throw(RuntimeError))
|
||||||
|
self.request_body = aiter(protocol)
|
||||||
|
self.protocol = protocol
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def _rearrange_headers(headers: Mapping[str, Sequence[str]]) -> List[Tuple[str, str]]:
|
||||||
|
return list(
|
||||||
|
((key, value) for key, values in headers.items() for value in values)
|
||||||
|
)
|
||||||
|
|
||||||
|
async def stream_body(self,
|
||||||
|
status: int,
|
||||||
|
body_generator: AsyncGenerator[bytes, None],
|
||||||
|
headers: Optional[Mapping[str, Sequence[str]]] = None) -> None:
|
||||||
|
transport = self.protocol.response_stream(status,
|
||||||
|
Maybe.of_nullable(headers)
|
||||||
|
.map(self._rearrange_headers)
|
||||||
|
.or_else([]))
|
||||||
|
async for chunk in body_generator:
|
||||||
|
await transport.send_bytes(chunk)
|
||||||
|
|
||||||
|
async def send_bytes(self, status: int, body: bytes, headers: Optional[Mapping[str, Sequence[str]]] = None) -> None:
|
||||||
|
rearranged_headers = Maybe.of_nullable(headers).map(RsgiContext._rearrange_headers).or_else(list())
|
||||||
|
if len(body) > 0:
|
||||||
|
self.protocol.response_bytes(status, rearranged_headers, body)
|
||||||
|
else:
|
||||||
|
self.protocol.response_empty(status, rearranged_headers)
|
||||||
|
|
||||||
|
async def send_str(self, status: int, body: str, headers: Optional[Mapping[str, Sequence[str]]] = None) -> None:
|
||||||
|
rearranged_headers = Maybe.of_nullable(headers).map(RsgiContext._rearrange_headers).or_else(list())
|
||||||
|
if len(body) > 0:
|
||||||
|
self.protocol.response_str(status, rearranged_headers, body)
|
||||||
|
else:
|
||||||
|
self.protocol.response_empty(status, rearranged_headers)
|
||||||
|
|
||||||
|
async def send_file(self, status: int, path: Path, headers: Optional[Mapping[str, Sequence[str]]] = None) -> None:
|
||||||
|
rearranged_headers = Maybe.of_nullable(headers).map(RsgiContext._rearrange_headers).or_else(list())
|
||||||
|
self.protocol.response_file(status, rearranged_headers, str(path))
|
||||||
|
|
||||||
|
async def send_empty(self, status: int, headers: Optional[Mapping[str, Sequence[str]]] = None) -> None:
|
||||||
|
rearranged_headers = Maybe.of_nullable(headers).map(RsgiContext._rearrange_headers).or_else(list())
|
||||||
|
self.protocol.response_empty(status, rearranged_headers)
|
223
core/src/bugis/core/_tree.py
Normal file
223
core/src/bugis/core/_tree.py
Normal file
@@ -0,0 +1,223 @@
|
|||||||
|
from itertools import chain
|
||||||
|
from typing import (
|
||||||
|
Sequence,
|
||||||
|
Awaitable,
|
||||||
|
Callable,
|
||||||
|
Optional,
|
||||||
|
Generator,
|
||||||
|
Self,
|
||||||
|
List,
|
||||||
|
Tuple,
|
||||||
|
Mapping,
|
||||||
|
Any,
|
||||||
|
)
|
||||||
|
from typing_extensions import Unpack
|
||||||
|
from urllib.parse import urlparse
|
||||||
|
|
||||||
|
from pwo import Maybe, index_of_with_escape
|
||||||
|
|
||||||
|
from ._http_context import HttpContext
|
||||||
|
from ._http_method import HttpMethod
|
||||||
|
from ._path_handler import PathHandler
|
||||||
|
from ._path_matcher import PathMatcher, IntMatcher, GlobMatcher, StrMatcher, Node
|
||||||
|
from ._types import NodeType, Matches
|
||||||
|
|
||||||
|
|
||||||
|
class Tree:
|
||||||
|
|
||||||
|
def __init__(self) -> None:
|
||||||
|
self.root = Node('/', None, {}, [], [])
|
||||||
|
|
||||||
|
def search(self, path: Generator[str, None, None], method: HttpMethod) \
|
||||||
|
-> Optional[Tuple[Node | PathMatcher, Matches]]:
|
||||||
|
paths: List[str] = list(path)
|
||||||
|
result: Node | PathMatcher = self.root
|
||||||
|
|
||||||
|
matches = Matches()
|
||||||
|
it, i = iter((it for it in paths)), -1
|
||||||
|
while True:
|
||||||
|
node = result
|
||||||
|
leaf, i = next(it, None), i + 1
|
||||||
|
if leaf is None:
|
||||||
|
break
|
||||||
|
child = node.children.get(leaf)
|
||||||
|
if child is None and isinstance(leaf, str):
|
||||||
|
for matcher in node.path_matchers:
|
||||||
|
match = matcher.match(paths[i:])
|
||||||
|
if match is not None:
|
||||||
|
if isinstance(match, Mapping):
|
||||||
|
matches.kwargs.update(match)
|
||||||
|
elif isinstance(match, Sequence):
|
||||||
|
matches.path = match
|
||||||
|
result = matcher
|
||||||
|
break
|
||||||
|
else:
|
||||||
|
break
|
||||||
|
else:
|
||||||
|
result = child
|
||||||
|
child = result.children.get(method)
|
||||||
|
if child is not None:
|
||||||
|
result = child
|
||||||
|
matches.unmatched_paths = paths[i:]
|
||||||
|
return None if result == self.root else (result, matches)
|
||||||
|
|
||||||
|
def add(self, path: Generator[str, None, None], method: Optional[HttpMethod], *path_handlers: PathHandler) -> Node | PathMatcher:
|
||||||
|
lineage: Generator[NodeType, None, None] = (it for it in
|
||||||
|
chain(path,
|
||||||
|
Maybe.of_nullable(method)
|
||||||
|
.map(lambda it: [it])
|
||||||
|
.or_else([])))
|
||||||
|
result: Node | PathMatcher = self.root
|
||||||
|
it = iter(lineage)
|
||||||
|
|
||||||
|
while True:
|
||||||
|
node = result
|
||||||
|
leaf = next(it, None)
|
||||||
|
if leaf is None:
|
||||||
|
break
|
||||||
|
child = node.children.get(leaf)
|
||||||
|
if child is None:
|
||||||
|
break
|
||||||
|
else:
|
||||||
|
result = child
|
||||||
|
key = leaf
|
||||||
|
while key is not None:
|
||||||
|
new_node = self.parse(key, result)
|
||||||
|
if isinstance(new_node, Node):
|
||||||
|
result.children[key] = new_node
|
||||||
|
else:
|
||||||
|
result.path_matchers.append(new_node)
|
||||||
|
result = new_node
|
||||||
|
key = next(it, None)
|
||||||
|
|
||||||
|
result.handlers = list(chain(result.handlers, path_handlers))
|
||||||
|
return result
|
||||||
|
|
||||||
|
def register(self,
|
||||||
|
path: str,
|
||||||
|
method: Optional[HttpMethod],
|
||||||
|
callback: Callable[[HttpContext, Unpack[Any]], Awaitable[None]],
|
||||||
|
recursive: bool) -> None:
|
||||||
|
class Handler(PathHandler):
|
||||||
|
|
||||||
|
async def handle_request(self, ctx: HttpContext, captured: Matches) -> None:
|
||||||
|
args = Maybe.of_nullable(captured.path).map(lambda it: [it]).or_else([])
|
||||||
|
await callback(ctx, *args, **captured.kwargs)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def recursive(self) -> bool:
|
||||||
|
return recursive
|
||||||
|
|
||||||
|
handler = Handler()
|
||||||
|
self.add((p for p in PathIterator(path)), method, handler)
|
||||||
|
|
||||||
|
def find_node(self, path: Generator[str, None, None], method: HttpMethod = HttpMethod.GET) \
|
||||||
|
-> Optional[Tuple[Node | PathMatcher, Matches]]:
|
||||||
|
return (Maybe.of_nullable(self.search(path, method))
|
||||||
|
.filter(lambda it: len(it[0].handlers) > 0)
|
||||||
|
.or_none())
|
||||||
|
|
||||||
|
def get_handler(self, url: str, method: HttpMethod = HttpMethod.GET) \
|
||||||
|
-> Optional[Tuple[PathHandler, Matches]]:
|
||||||
|
path = urlparse(url).path
|
||||||
|
result: Optional[Tuple[Node | PathMatcher, Matches]] = self.find_node((p for p in PathIterator(path)), method)
|
||||||
|
if result is None:
|
||||||
|
return None
|
||||||
|
node, captured = result
|
||||||
|
# requested = (p for p in PathIterator(path))
|
||||||
|
# found = reversed([n for n in NodeAncestryIterator(node) if n != self.root])
|
||||||
|
# unmatched: List[str] = []
|
||||||
|
# for r, f in zip(requested, found):
|
||||||
|
# if f is None:
|
||||||
|
# unmatched.append(r)
|
||||||
|
for handler in node.handlers:
|
||||||
|
if len(captured.unmatched_paths) == 0:
|
||||||
|
return handler, captured
|
||||||
|
elif handler.recursive:
|
||||||
|
return handler, captured
|
||||||
|
# if handler.match(unmatched, method):
|
||||||
|
# return (handler, unmatched)
|
||||||
|
return None
|
||||||
|
|
||||||
|
def parse(self, leaf: str, parent: Optional[Node | PathMatcher]) -> Node | PathMatcher:
|
||||||
|
start = 0
|
||||||
|
result = index_of_with_escape(leaf, '${', '\\', 0)
|
||||||
|
if result >= 0:
|
||||||
|
start = result + 2
|
||||||
|
end = leaf.index('}', start + 2)
|
||||||
|
definition = leaf[start:end]
|
||||||
|
try:
|
||||||
|
colon = definition.index(':')
|
||||||
|
except ValueError:
|
||||||
|
colon = None
|
||||||
|
if colon is None:
|
||||||
|
key = definition
|
||||||
|
kind = 'str'
|
||||||
|
else:
|
||||||
|
key = definition[:colon]
|
||||||
|
kind = definition[colon+1:] if colon is not None else 'str'
|
||||||
|
if kind == 'str':
|
||||||
|
return StrMatcher(name=key, parent=parent, children={}, handlers=[], path_matchers=[])
|
||||||
|
elif kind == 'int':
|
||||||
|
return IntMatcher(name=key, parent=parent, children={}, handlers=[], path_matchers=[])
|
||||||
|
else:
|
||||||
|
raise ValueError(f"Unknown kind: '{kind}'")
|
||||||
|
result = index_of_with_escape(leaf, '*', '\\', 0)
|
||||||
|
if result >= 0:
|
||||||
|
return GlobMatcher(pattern=leaf, parent=parent, children={}, handlers=[], path_matchers=[])
|
||||||
|
else:
|
||||||
|
return Node(key=leaf, parent=parent, children={}, handlers=[], path_matchers=[])
|
||||||
|
|
||||||
|
|
||||||
|
class PathIterator:
|
||||||
|
path: str
|
||||||
|
cursor: int
|
||||||
|
|
||||||
|
def __init__(self, path: str):
|
||||||
|
self.path = path
|
||||||
|
self.cursor = 0
|
||||||
|
|
||||||
|
def __iter__(self) -> Self:
|
||||||
|
return self
|
||||||
|
|
||||||
|
def advance_cursor(self, next_value: int) -> None:
|
||||||
|
if next_value < len(self.path):
|
||||||
|
self.cursor = next_value
|
||||||
|
else:
|
||||||
|
self.cursor = -1
|
||||||
|
|
||||||
|
def __next__(self) -> str:
|
||||||
|
if self.cursor < 0:
|
||||||
|
raise StopIteration()
|
||||||
|
else:
|
||||||
|
while self.cursor >= 0:
|
||||||
|
next_separator = self.path.find('/', self.cursor)
|
||||||
|
if next_separator < 0:
|
||||||
|
result = self.path[self.cursor:]
|
||||||
|
self.cursor = next_separator
|
||||||
|
return result
|
||||||
|
elif next_separator == self.cursor:
|
||||||
|
self.advance_cursor(next_separator + 1)
|
||||||
|
else:
|
||||||
|
result = self.path[self.cursor:next_separator]
|
||||||
|
self.advance_cursor(next_separator + 1)
|
||||||
|
return result
|
||||||
|
raise StopIteration()
|
||||||
|
|
||||||
|
|
||||||
|
class NodeAncestryIterator:
|
||||||
|
node: Node | PathMatcher
|
||||||
|
|
||||||
|
def __init__(self, node: Node):
|
||||||
|
self.node = node
|
||||||
|
|
||||||
|
def __iter__(self) -> Self:
|
||||||
|
return self
|
||||||
|
|
||||||
|
def __next__(self) -> Node | PathMatcher:
|
||||||
|
parent = self.node.parent
|
||||||
|
if parent is None:
|
||||||
|
raise StopIteration()
|
||||||
|
else:
|
||||||
|
self.node = parent
|
||||||
|
return parent
|
96
core/src/bugis/core/_types/__init__.py
Normal file
96
core/src/bugis/core/_types/__init__.py
Normal file
@@ -0,0 +1,96 @@
|
|||||||
|
from typing import (
|
||||||
|
TypedDict,
|
||||||
|
Literal,
|
||||||
|
Iterable,
|
||||||
|
Tuple,
|
||||||
|
Optional,
|
||||||
|
NotRequired,
|
||||||
|
Dict,
|
||||||
|
Any,
|
||||||
|
Union,
|
||||||
|
Mapping,
|
||||||
|
Sequence
|
||||||
|
)
|
||||||
|
|
||||||
|
from bugis.core._http_method import HttpMethod
|
||||||
|
|
||||||
|
from bugis.core._path_handler import PathHandler, Matches
|
||||||
|
|
||||||
|
type StrOrStrings = (str | Sequence[str])
|
||||||
|
|
||||||
|
type NodeType = (str | HttpMethod)
|
||||||
|
|
||||||
|
type PathMatcherResult = Mapping[str, Any] | Sequence[str]
|
||||||
|
|
||||||
|
|
||||||
|
class ASGIVersions(TypedDict):
|
||||||
|
spec_version: str
|
||||||
|
version: Union[Literal["2.0"], Literal["3.0"]]
|
||||||
|
|
||||||
|
|
||||||
|
class HTTPScope(TypedDict):
|
||||||
|
type: Literal["http"]
|
||||||
|
asgi: ASGIVersions
|
||||||
|
http_version: str
|
||||||
|
method: str
|
||||||
|
scheme: str
|
||||||
|
path: str
|
||||||
|
raw_path: bytes
|
||||||
|
query_string: bytes
|
||||||
|
root_path: str
|
||||||
|
headers: Iterable[Tuple[bytes, bytes]]
|
||||||
|
client: Optional[Tuple[str, int]]
|
||||||
|
server: Optional[Tuple[str, Optional[int]]]
|
||||||
|
state: NotRequired[Dict[str, Any]]
|
||||||
|
extensions: Optional[Dict[str, Dict[object, object]]]
|
||||||
|
|
||||||
|
|
||||||
|
class WebSocketScope(TypedDict):
|
||||||
|
type: Literal["websocket"]
|
||||||
|
asgi: ASGIVersions
|
||||||
|
http_version: str
|
||||||
|
scheme: str
|
||||||
|
path: str
|
||||||
|
raw_path: bytes
|
||||||
|
query_string: bytes
|
||||||
|
root_path: str
|
||||||
|
headers: Iterable[Tuple[bytes, bytes]]
|
||||||
|
client: Optional[Tuple[str, int]]
|
||||||
|
server: Optional[Tuple[str, Optional[int]]]
|
||||||
|
subprotocols: Iterable[str]
|
||||||
|
state: NotRequired[Dict[str, Any]]
|
||||||
|
extensions: Optional[Dict[str, Dict[object, object]]]
|
||||||
|
|
||||||
|
|
||||||
|
class LifespanScope(TypedDict):
|
||||||
|
type: Literal["lifespan"]
|
||||||
|
asgi: ASGIVersions
|
||||||
|
state: NotRequired[Dict[str, Any]]
|
||||||
|
|
||||||
|
|
||||||
|
class RSGI:
|
||||||
|
class Scope(TypedDict):
|
||||||
|
proto: Literal['http'] # = 'http'
|
||||||
|
rsgi_version: str
|
||||||
|
http_version: str
|
||||||
|
server: str
|
||||||
|
client: str
|
||||||
|
scheme: str
|
||||||
|
method: str
|
||||||
|
path: str
|
||||||
|
query_string: str
|
||||||
|
headers: Mapping[str, str]
|
||||||
|
authority: Optional[str]
|
||||||
|
|
||||||
|
|
||||||
|
__all__ = [
|
||||||
|
'HttpMethod',
|
||||||
|
'HTTPScope',
|
||||||
|
'LifespanScope',
|
||||||
|
'RSGI',
|
||||||
|
'ASGIVersions',
|
||||||
|
'WebSocketScope',
|
||||||
|
'PathHandler',
|
||||||
|
'NodeType',
|
||||||
|
'Matches'
|
||||||
|
]
|
57
core/src/bugis/core/_types/asgi.py
Normal file
57
core/src/bugis/core/_types/asgi.py
Normal file
@@ -0,0 +1,57 @@
|
|||||||
|
from typing import (
|
||||||
|
Sequence,
|
||||||
|
TypedDict,
|
||||||
|
Literal,
|
||||||
|
Iterable,
|
||||||
|
Tuple,
|
||||||
|
Optional,
|
||||||
|
NotRequired,
|
||||||
|
Dict,
|
||||||
|
Any,
|
||||||
|
Union
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class ASGIVersions(TypedDict):
|
||||||
|
spec_version: str
|
||||||
|
version: Union[Literal["2.0"], Literal["3.0"]]
|
||||||
|
|
||||||
|
|
||||||
|
class HTTPScope(TypedDict):
|
||||||
|
type: Literal["http"]
|
||||||
|
asgi: ASGIVersions
|
||||||
|
http_version: str
|
||||||
|
method: str
|
||||||
|
scheme: str
|
||||||
|
path: str
|
||||||
|
raw_path: bytes
|
||||||
|
query_string: bytes
|
||||||
|
root_path: str
|
||||||
|
headers: Iterable[Tuple[bytes, bytes]]
|
||||||
|
client: Optional[Tuple[str, int]]
|
||||||
|
server: Optional[Tuple[str, Optional[int]]]
|
||||||
|
state: NotRequired[Dict[str, Any]]
|
||||||
|
extensions: Optional[Dict[str, Dict[object, object]]]
|
||||||
|
|
||||||
|
|
||||||
|
class WebSocketScope(TypedDict):
|
||||||
|
type: Literal["websocket"]
|
||||||
|
asgi: ASGIVersions
|
||||||
|
http_version: str
|
||||||
|
scheme: str
|
||||||
|
path: str
|
||||||
|
raw_path: bytes
|
||||||
|
query_string: bytes
|
||||||
|
root_path: str
|
||||||
|
headers: Iterable[Tuple[bytes, bytes]]
|
||||||
|
client: Optional[Tuple[str, int]]
|
||||||
|
server: Optional[Tuple[str, Optional[int]]]
|
||||||
|
subprotocols: Iterable[str]
|
||||||
|
state: NotRequired[Dict[str, Any]]
|
||||||
|
extensions: Optional[Dict[str, Dict[object, object]]]
|
||||||
|
|
||||||
|
|
||||||
|
class LifespanScope(TypedDict):
|
||||||
|
type: Literal["lifespan"]
|
||||||
|
asgi: ASGIVersions
|
||||||
|
state: NotRequired[Dict[str, Any]]
|
26
core/src/bugis/core/_types/rsgi.py
Normal file
26
core/src/bugis/core/_types/rsgi.py
Normal file
@@ -0,0 +1,26 @@
|
|||||||
|
from typing import (
|
||||||
|
Sequence,
|
||||||
|
TypedDict,
|
||||||
|
Literal,
|
||||||
|
Iterable,
|
||||||
|
Tuple,
|
||||||
|
Optional,
|
||||||
|
NotRequired,
|
||||||
|
Dict,
|
||||||
|
Any,
|
||||||
|
Union,
|
||||||
|
Mapping,
|
||||||
|
)
|
||||||
|
|
||||||
|
class HTTPScope(TypedDict):
|
||||||
|
proto: Literal['http']
|
||||||
|
rsgi_version: str
|
||||||
|
http_version: str
|
||||||
|
server: str
|
||||||
|
client: str
|
||||||
|
scheme: str
|
||||||
|
method: str
|
||||||
|
path: str
|
||||||
|
query_string: str
|
||||||
|
headers: Mapping[str, str]
|
||||||
|
authority: Optional[str]
|
0
core/src/bugis/core/py.typed
Normal file
0
core/src/bugis/core/py.typed
Normal file
127
core/tests/test_asgi.py
Normal file
127
core/tests/test_asgi.py
Normal file
@@ -0,0 +1,127 @@
|
|||||||
|
import unittest
|
||||||
|
import json
|
||||||
|
import httpx
|
||||||
|
from pwo import async_test
|
||||||
|
from bugis.core import BugisApp, HttpContext, HttpMethod
|
||||||
|
from typing import Sequence
|
||||||
|
|
||||||
|
|
||||||
|
class AsgiTest(unittest.TestCase):
|
||||||
|
app: BugisApp
|
||||||
|
|
||||||
|
def setUp(self):
|
||||||
|
self.app = BugisApp()
|
||||||
|
|
||||||
|
@self.app.GET('/hello')
|
||||||
|
@self.app.GET('/hello2')
|
||||||
|
@self.app.route('/hello3')
|
||||||
|
async def handle_request(ctx: HttpContext) -> None:
|
||||||
|
async for chunk in ctx.request_body:
|
||||||
|
print(chunk)
|
||||||
|
await ctx.send_str(200, 'Hello World!')
|
||||||
|
|
||||||
|
@self.app.route(('/foo/bar',), HttpMethod.PUT, recursive=True)
|
||||||
|
async def handle_request(ctx: HttpContext) -> None:
|
||||||
|
async for chunk in ctx.request_body:
|
||||||
|
print(chunk)
|
||||||
|
await ctx.send_str(200, ctx.path)
|
||||||
|
|
||||||
|
@self.app.route(('/foo/*',), HttpMethod.PUT, recursive=True)
|
||||||
|
async def handle_request(ctx: HttpContext, path: Sequence[str]) -> None:
|
||||||
|
async for chunk in ctx.request_body:
|
||||||
|
print(chunk)
|
||||||
|
await ctx.send_str(200, json.dumps(path))
|
||||||
|
|
||||||
|
@self.app.GET('/employee/${employee_id}')
|
||||||
|
async def handle_request(ctx: HttpContext, employee_id: str) -> None:
|
||||||
|
async for chunk in ctx.request_body:
|
||||||
|
print(chunk)
|
||||||
|
await ctx.send_str(200, employee_id)
|
||||||
|
|
||||||
|
@self.app.GET('/square/${x:int}')
|
||||||
|
async def handle_request(ctx: HttpContext, x: int) -> None:
|
||||||
|
async for chunk in ctx.request_body:
|
||||||
|
print(chunk)
|
||||||
|
await ctx.send_str(200, str(x * x))
|
||||||
|
|
||||||
|
@self.app.GET('/department/${department_id:int}/employee/${employee_id:int}')
|
||||||
|
async def handle_request(ctx: HttpContext, department_id: int, employee_id: int) -> None:
|
||||||
|
async for chunk in ctx.request_body:
|
||||||
|
print(chunk)
|
||||||
|
await ctx.send_str(200, json.dumps({
|
||||||
|
'department_id': department_id,
|
||||||
|
'employee_id': employee_id
|
||||||
|
}))
|
||||||
|
|
||||||
|
@async_test
|
||||||
|
async def test_hello(self):
|
||||||
|
transport = httpx.ASGITransport(app=self.app)
|
||||||
|
|
||||||
|
async with httpx.AsyncClient(transport=transport, base_url="http://127.0.0.1:80") as client:
|
||||||
|
r = await client.get("/hello")
|
||||||
|
self.assertEqual(r.status_code, 200)
|
||||||
|
self.assertEqual(r.text, "Hello World!")
|
||||||
|
|
||||||
|
r = await client.get("/hello2")
|
||||||
|
self.assertEqual(r.status_code, 200)
|
||||||
|
self.assertEqual(r.text, "Hello World!")
|
||||||
|
|
||||||
|
r = await client.post("/hello3")
|
||||||
|
self.assertEqual(r.status_code, 200)
|
||||||
|
self.assertEqual(r.text, "Hello World!")
|
||||||
|
|
||||||
|
r = await client.get("/hello4")
|
||||||
|
self.assertEqual(r.status_code, 404)
|
||||||
|
self.assertTrue(len(r.text) == 0)
|
||||||
|
|
||||||
|
@async_test
|
||||||
|
async def test_foo(self):
|
||||||
|
transport = httpx.ASGITransport(app=self.app)
|
||||||
|
|
||||||
|
async with httpx.AsyncClient(transport=transport, base_url="http://127.0.0.1:80") as client:
|
||||||
|
r = await client.put("/foo/fizz/baz")
|
||||||
|
self.assertEqual(r.status_code, 200)
|
||||||
|
response = json.loads(r.text)
|
||||||
|
self.assertEqual(['fizz', 'baz'], response)
|
||||||
|
|
||||||
|
@async_test
|
||||||
|
async def test_foo_bar(self):
|
||||||
|
transport = httpx.ASGITransport(app=self.app)
|
||||||
|
|
||||||
|
async with httpx.AsyncClient(transport=transport, base_url="http://127.0.0.1:80") as client:
|
||||||
|
r = await client.put("/foo/bar/baz")
|
||||||
|
self.assertEqual(r.status_code, 200)
|
||||||
|
self.assertEqual('/foo/bar/baz', r.text)
|
||||||
|
|
||||||
|
@async_test
|
||||||
|
async def test_employee(self):
|
||||||
|
transport = httpx.ASGITransport(app=self.app)
|
||||||
|
|
||||||
|
async with httpx.AsyncClient(transport=transport, base_url="http://127.0.0.1:80") as client:
|
||||||
|
r = await client.get("/employee/101325")
|
||||||
|
self.assertEqual(r.status_code, 200)
|
||||||
|
self.assertEqual(r.text, '101325')
|
||||||
|
|
||||||
|
@async_test
|
||||||
|
async def test_square(self):
|
||||||
|
transport = httpx.ASGITransport(app=self.app)
|
||||||
|
|
||||||
|
async with httpx.AsyncClient(transport=transport, base_url="http://127.0.0.1:80") as client:
|
||||||
|
x = 30
|
||||||
|
r = await client.get(f"/square/{x}")
|
||||||
|
self.assertEqual(r.status_code, 200)
|
||||||
|
self.assertEqual(r.text, str(x * x))
|
||||||
|
|
||||||
|
@async_test
|
||||||
|
async def test_department_employee(self):
|
||||||
|
transport = httpx.ASGITransport(app=self.app)
|
||||||
|
|
||||||
|
async with httpx.AsyncClient(transport=transport, base_url="http://127.0.0.1:80") as client:
|
||||||
|
r = await client.get("department/189350/employee/101325")
|
||||||
|
self.assertEqual(r.status_code, 200)
|
||||||
|
response = json.loads(r.text)
|
||||||
|
self.assertEqual({
|
||||||
|
'department_id': 189350,
|
||||||
|
'employee_id': 101325
|
||||||
|
}, response)
|
||||||
|
|
83
core/tests/test_tree.py
Normal file
83
core/tests/test_tree.py
Normal file
@@ -0,0 +1,83 @@
|
|||||||
|
from typing import Sequence, Tuple, Optional, List
|
||||||
|
|
||||||
|
from bugis.core import Tree, PathHandler, HttpContext, HttpMethod, PathIterator
|
||||||
|
from bugis.core import HttpMethod
|
||||||
|
from pwo import Maybe
|
||||||
|
import unittest
|
||||||
|
|
||||||
|
|
||||||
|
class PathIteratorTest(unittest.TestCase):
|
||||||
|
cases: Tuple[Tuple[str, Tuple[str, ...]], ...] = (
|
||||||
|
('/', tuple()),
|
||||||
|
('root/foo', ('root', 'foo')),
|
||||||
|
('/root', ('root',)),
|
||||||
|
('/root', ('root',)),
|
||||||
|
('/root/', ('root',)),
|
||||||
|
('/root/bar/', ('root', 'bar')),
|
||||||
|
)
|
||||||
|
|
||||||
|
def test_path_iterator(self):
|
||||||
|
for (case, expected) in self.cases:
|
||||||
|
with self.subTest(case) as _:
|
||||||
|
components = tuple((c for c in PathIterator(case)))
|
||||||
|
self.assertEqual(expected, components)
|
||||||
|
|
||||||
|
|
||||||
|
class TreeTest(unittest.TestCase):
|
||||||
|
tree: Tree
|
||||||
|
handlers: List[PathHandler]
|
||||||
|
|
||||||
|
def setUp(self):
|
||||||
|
self.tree = Tree()
|
||||||
|
|
||||||
|
class TestHandler(PathHandler):
|
||||||
|
|
||||||
|
def handle_request(self, ctx: HttpContext):
|
||||||
|
pass
|
||||||
|
|
||||||
|
@property
|
||||||
|
def recursive(self) -> bool:
|
||||||
|
return True
|
||||||
|
|
||||||
|
self.handlers = [TestHandler() for _ in range(20)]
|
||||||
|
|
||||||
|
routes: Tuple[Tuple[Tuple[str, ...], Optional[HttpMethod], PathHandler], ...] = (
|
||||||
|
(('home', 'something'), HttpMethod.GET, self.handlers[0]),
|
||||||
|
(('home', 'something_else'), HttpMethod.GET, self.handlers[1]),
|
||||||
|
(('home', 'something_else'), HttpMethod.POST, self.handlers[2]),
|
||||||
|
(('home', 'something', 'object'), HttpMethod.GET, self.handlers[3]),
|
||||||
|
(('home', 'something_else', 'foo'), HttpMethod.GET, self.handlers[4]),
|
||||||
|
(('home',), HttpMethod.GET, self.handlers[5]),
|
||||||
|
(('home',), HttpMethod.POST, self.handlers[6]),
|
||||||
|
(('home',), None, self.handlers[7]),
|
||||||
|
(('home', '*.md'), None, self.handlers[8]),
|
||||||
|
(('home', 'something', '*', 'blah', '*.md'), None, self.handlers[9]),
|
||||||
|
(('home', 'bar', '*'), None, self.handlers[10]),
|
||||||
|
|
||||||
|
)
|
||||||
|
|
||||||
|
for path, method, handler in routes:
|
||||||
|
self.tree.add((p for p in path), method, handler)
|
||||||
|
|
||||||
|
def test_tree(self):
|
||||||
|
|
||||||
|
cases: Tuple[Tuple[str, HttpMethod, Optional[int]], ...] = (
|
||||||
|
('http://localhost:127.0.0.1:5432/home/something', HttpMethod.GET, 0),
|
||||||
|
('http://localhost:127.0.0.1:5432/home/something_else', HttpMethod.GET, 1),
|
||||||
|
('http://localhost:127.0.0.1:5432/home/something_else', HttpMethod.POST, 2),
|
||||||
|
('http://localhost:127.0.0.1:5432/home/something/object', HttpMethod.GET, 3),
|
||||||
|
('http://localhost:127.0.0.1:5432/home/something_else/foo', HttpMethod.GET, 4),
|
||||||
|
('http://localhost:127.0.0.1:5432/', HttpMethod.GET, None),
|
||||||
|
('http://localhost:127.0.0.1:5432/home', HttpMethod.GET, 5),
|
||||||
|
('http://localhost:127.0.0.1:5432/home', HttpMethod.POST, 6),
|
||||||
|
('http://localhost:127.0.0.1:5432/home', HttpMethod.PUT, 7),
|
||||||
|
('http://localhost:127.0.0.1:5432/home/README.md', HttpMethod.GET, 8),
|
||||||
|
('http://localhost:127.0.0.1:5432/home/something/ciao/blah/README.md', HttpMethod.GET, 9),
|
||||||
|
('http://localhost:127.0.0.1:5432/home/bar/ciao/blah/README.md', HttpMethod.GET, 10),
|
||||||
|
)
|
||||||
|
for url, method, handler_num in cases:
|
||||||
|
with self.subTest(f"{str(method)} {url}"):
|
||||||
|
res = self.tree.get_handler(url, method)
|
||||||
|
self.assertIs(Maybe.of(handler_num).map(self.handlers.__getitem__).or_none(),
|
||||||
|
Maybe.of_nullable(res).map(lambda it: it[0]).or_none())
|
||||||
|
|
@@ -29,7 +29,7 @@ dependencies = [
|
|||||||
"PyYAML",
|
"PyYAML",
|
||||||
"pygraphviz",
|
"pygraphviz",
|
||||||
"aiofiles",
|
"aiofiles",
|
||||||
"aiohttp[speedups]"
|
"httpx[http2]"
|
||||||
]
|
]
|
||||||
|
|
||||||
[project.optional-dependencies]
|
[project.optional-dependencies]
|
||||||
@@ -60,3 +60,6 @@ strict = true
|
|||||||
|
|
||||||
[tool.setuptools_scm]
|
[tool.setuptools_scm]
|
||||||
version_file = "src/bugis/_version.py"
|
version_file = "src/bugis/_version.py"
|
||||||
|
|
||||||
|
[project.scripts]
|
||||||
|
bugis = "bugis.cli:main"
|
@@ -1,39 +1,27 @@
|
|||||||
#
|
#
|
||||||
# This file is autogenerated by pip-compile with Python 3.10
|
# This file is autogenerated by pip-compile with Python 3.12
|
||||||
# by the following command:
|
# by the following command:
|
||||||
#
|
#
|
||||||
# pip-compile --extra-index-url=https://gitea.woggioni.net/api/packages/woggioni/pypi/simple --extra=dev --output-file=requirements-dev.txt --strip-extras pyproject.toml
|
# pip-compile --extra=dev --output-file=requirements-dev.txt pyproject.toml
|
||||||
#
|
#
|
||||||
--extra-index-url https://gitea.woggioni.net/api/packages/woggioni/pypi/simple
|
--index-url https://gitea.woggioni.net/api/packages/woggioni/pypi/simple
|
||||||
|
--extra-index-url https://pypi.org/simple
|
||||||
|
|
||||||
aiodns==3.2.0
|
|
||||||
# via aiohttp
|
|
||||||
aiofiles==24.1.0
|
aiofiles==24.1.0
|
||||||
# via bugis (pyproject.toml)
|
# via bugis (pyproject.toml)
|
||||||
aiohappyeyeballs==2.4.3
|
anyio==4.6.2.post1
|
||||||
# via aiohttp
|
# via httpx
|
||||||
aiohttp==3.10.10
|
|
||||||
# via bugis (pyproject.toml)
|
|
||||||
aiosignal==1.3.1
|
|
||||||
# via aiohttp
|
|
||||||
asttokens==2.4.1
|
asttokens==2.4.1
|
||||||
# via stack-data
|
# via stack-data
|
||||||
async-timeout==4.0.3
|
|
||||||
# via aiohttp
|
|
||||||
attrs==24.2.0
|
|
||||||
# via aiohttp
|
|
||||||
backports-tarfile==1.2.0
|
|
||||||
# via jaraco-context
|
|
||||||
brotli==1.1.0
|
|
||||||
# via aiohttp
|
|
||||||
build==1.2.2.post1
|
build==1.2.2.post1
|
||||||
# via bugis (pyproject.toml)
|
# via bugis (pyproject.toml)
|
||||||
certifi==2024.8.30
|
certifi==2024.8.30
|
||||||
# via requests
|
|
||||||
cffi==1.17.1
|
|
||||||
# via
|
# via
|
||||||
# cryptography
|
# httpcore
|
||||||
# pycares
|
# httpx
|
||||||
|
# requests
|
||||||
|
cffi==1.17.1
|
||||||
|
# via cryptography
|
||||||
charset-normalizer==3.4.0
|
charset-normalizer==3.4.0
|
||||||
# via requests
|
# via requests
|
||||||
click==8.1.7
|
click==8.1.7
|
||||||
@@ -46,24 +34,29 @@ decorator==5.1.1
|
|||||||
# ipython
|
# ipython
|
||||||
docutils==0.21.2
|
docutils==0.21.2
|
||||||
# via readme-renderer
|
# via readme-renderer
|
||||||
exceptiongroup==1.2.2
|
|
||||||
# via ipython
|
|
||||||
executing==2.1.0
|
executing==2.1.0
|
||||||
# via stack-data
|
# via stack-data
|
||||||
frozenlist==1.4.1
|
|
||||||
# via
|
|
||||||
# aiohttp
|
|
||||||
# aiosignal
|
|
||||||
granian==1.6.1
|
granian==1.6.1
|
||||||
# via bugis (pyproject.toml)
|
# via bugis (pyproject.toml)
|
||||||
|
h11==0.14.0
|
||||||
|
# via httpcore
|
||||||
|
h2==4.1.0
|
||||||
|
# via httpx
|
||||||
|
hpack==4.0.0
|
||||||
|
# via h2
|
||||||
|
httpcore==1.0.6
|
||||||
|
# via httpx
|
||||||
|
httpx[http2]==0.27.2
|
||||||
|
# via bugis (pyproject.toml)
|
||||||
|
hyperframe==6.0.1
|
||||||
|
# via h2
|
||||||
idna==3.10
|
idna==3.10
|
||||||
# via
|
# via
|
||||||
|
# anyio
|
||||||
|
# httpx
|
||||||
# requests
|
# requests
|
||||||
# yarl
|
|
||||||
importlib-metadata==8.5.0
|
importlib-metadata==8.5.0
|
||||||
# via
|
# via twine
|
||||||
# keyring
|
|
||||||
# twine
|
|
||||||
ipdb==0.13.13
|
ipdb==0.13.13
|
||||||
# via bugis (pyproject.toml)
|
# via bugis (pyproject.toml)
|
||||||
ipython==8.28.0
|
ipython==8.28.0
|
||||||
@@ -94,11 +87,7 @@ more-itertools==10.5.0
|
|||||||
# via
|
# via
|
||||||
# jaraco-classes
|
# jaraco-classes
|
||||||
# jaraco-functools
|
# jaraco-functools
|
||||||
multidict==6.1.0
|
mypy==1.13.0
|
||||||
# via
|
|
||||||
# aiohttp
|
|
||||||
# yarl
|
|
||||||
mypy==1.12.1
|
|
||||||
# via bugis (pyproject.toml)
|
# via bugis (pyproject.toml)
|
||||||
mypy-extensions==1.0.0
|
mypy-extensions==1.0.0
|
||||||
# via mypy
|
# via mypy
|
||||||
@@ -114,16 +103,12 @@ pkginfo==1.10.0
|
|||||||
# via twine
|
# via twine
|
||||||
prompt-toolkit==3.0.48
|
prompt-toolkit==3.0.48
|
||||||
# via ipython
|
# via ipython
|
||||||
propcache==0.2.0
|
|
||||||
# via yarl
|
|
||||||
ptyprocess==0.7.0
|
ptyprocess==0.7.0
|
||||||
# via pexpect
|
# via pexpect
|
||||||
pure-eval==0.2.3
|
pure-eval==0.2.3
|
||||||
# via stack-data
|
# via stack-data
|
||||||
pwo==0.0.3
|
pwo==0.0.4
|
||||||
# via bugis (pyproject.toml)
|
# via bugis (pyproject.toml)
|
||||||
pycares==4.4.0
|
|
||||||
# via aiodns
|
|
||||||
pycparser==2.22
|
pycparser==2.22
|
||||||
# via cffi
|
# via cffi
|
||||||
pygments==2.18.0
|
pygments==2.18.0
|
||||||
@@ -148,32 +133,28 @@ requests-toolbelt==1.0.0
|
|||||||
# via twine
|
# via twine
|
||||||
rfc3986==2.0.0
|
rfc3986==2.0.0
|
||||||
# via twine
|
# via twine
|
||||||
rich==13.9.2
|
rich==13.9.3
|
||||||
# via twine
|
# via twine
|
||||||
secretstorage==3.3.3
|
secretstorage==3.3.3
|
||||||
# via keyring
|
# via keyring
|
||||||
six==1.16.0
|
six==1.16.0
|
||||||
# via asttokens
|
# via asttokens
|
||||||
|
sniffio==1.3.1
|
||||||
|
# via
|
||||||
|
# anyio
|
||||||
|
# httpx
|
||||||
stack-data==0.6.3
|
stack-data==0.6.3
|
||||||
# via ipython
|
# via ipython
|
||||||
tomli==2.0.2
|
|
||||||
# via
|
|
||||||
# build
|
|
||||||
# ipdb
|
|
||||||
# mypy
|
|
||||||
traitlets==5.14.3
|
traitlets==5.14.3
|
||||||
# via
|
# via
|
||||||
# ipython
|
# ipython
|
||||||
# matplotlib-inline
|
# matplotlib-inline
|
||||||
twine==5.1.1
|
twine==5.1.1
|
||||||
# via bugis (pyproject.toml)
|
# via bugis (pyproject.toml)
|
||||||
typing-extensions==4.7.1
|
typing-extensions==4.12.2
|
||||||
# via
|
# via
|
||||||
# ipython
|
|
||||||
# multidict
|
|
||||||
# mypy
|
# mypy
|
||||||
# pwo
|
# pwo
|
||||||
# rich
|
|
||||||
urllib3==2.2.3
|
urllib3==2.2.3
|
||||||
# via
|
# via
|
||||||
# requests
|
# requests
|
||||||
@@ -184,7 +165,5 @@ watchdog==5.0.3
|
|||||||
# via bugis (pyproject.toml)
|
# via bugis (pyproject.toml)
|
||||||
wcwidth==0.2.13
|
wcwidth==0.2.13
|
||||||
# via prompt-toolkit
|
# via prompt-toolkit
|
||||||
yarl==1.16.0
|
|
||||||
# via aiohttp
|
|
||||||
zipp==3.20.2
|
zipp==3.20.2
|
||||||
# via importlib-metadata
|
# via importlib-metadata
|
||||||
|
@@ -1,66 +1,57 @@
|
|||||||
#
|
#
|
||||||
# This file is autogenerated by pip-compile with Python 3.10
|
# This file is autogenerated by pip-compile with Python 3.12
|
||||||
# by the following command:
|
# by the following command:
|
||||||
#
|
#
|
||||||
# pip-compile --extra-index-url=https://gitea.woggioni.net/api/packages/woggioni/pypi/simple --extra=run --output-file=requirements-run.txt --strip-extras pyproject.toml
|
# pip-compile --extra=run --output-file=requirements-run.txt pyproject.toml
|
||||||
#
|
#
|
||||||
--extra-index-url https://gitea.woggioni.net/api/packages/woggioni/pypi/simple
|
--index-url https://gitea.woggioni.net/api/packages/woggioni/pypi/simple
|
||||||
|
--extra-index-url https://pypi.org/simple
|
||||||
|
|
||||||
aiodns==3.2.0
|
|
||||||
# via aiohttp
|
|
||||||
aiofiles==24.1.0
|
aiofiles==24.1.0
|
||||||
# via bugis (pyproject.toml)
|
# via bugis (pyproject.toml)
|
||||||
aiohappyeyeballs==2.4.3
|
anyio==4.6.2.post1
|
||||||
# via aiohttp
|
# via httpx
|
||||||
aiohttp==3.10.10
|
certifi==2024.8.30
|
||||||
# via bugis (pyproject.toml)
|
# via
|
||||||
aiosignal==1.3.1
|
# httpcore
|
||||||
# via aiohttp
|
# httpx
|
||||||
async-timeout==4.0.3
|
|
||||||
# via aiohttp
|
|
||||||
attrs==24.2.0
|
|
||||||
# via aiohttp
|
|
||||||
brotli==1.1.0
|
|
||||||
# via aiohttp
|
|
||||||
cffi==1.17.1
|
|
||||||
# via pycares
|
|
||||||
click==8.1.7
|
click==8.1.7
|
||||||
# via granian
|
# via granian
|
||||||
frozenlist==1.4.1
|
|
||||||
# via
|
|
||||||
# aiohttp
|
|
||||||
# aiosignal
|
|
||||||
granian==1.6.1
|
granian==1.6.1
|
||||||
# via bugis (pyproject.toml)
|
# via bugis (pyproject.toml)
|
||||||
|
h11==0.14.0
|
||||||
|
# via httpcore
|
||||||
|
h2==4.1.0
|
||||||
|
# via httpx
|
||||||
|
hpack==4.0.0
|
||||||
|
# via h2
|
||||||
|
httpcore==1.0.6
|
||||||
|
# via httpx
|
||||||
|
httpx[http2]==0.27.2
|
||||||
|
# via bugis (pyproject.toml)
|
||||||
|
hyperframe==6.0.1
|
||||||
|
# via h2
|
||||||
idna==3.10
|
idna==3.10
|
||||||
# via yarl
|
# via
|
||||||
|
# anyio
|
||||||
|
# httpx
|
||||||
markdown==3.7
|
markdown==3.7
|
||||||
# via bugis (pyproject.toml)
|
# via bugis (pyproject.toml)
|
||||||
multidict==6.1.0
|
pwo==0.0.4
|
||||||
# via
|
|
||||||
# aiohttp
|
|
||||||
# yarl
|
|
||||||
propcache==0.2.0
|
|
||||||
# via yarl
|
|
||||||
pwo==0.0.3
|
|
||||||
# via bugis (pyproject.toml)
|
# via bugis (pyproject.toml)
|
||||||
pycares==4.4.0
|
|
||||||
# via aiodns
|
|
||||||
pycparser==2.22
|
|
||||||
# via cffi
|
|
||||||
pygments==2.18.0
|
pygments==2.18.0
|
||||||
# via bugis (pyproject.toml)
|
# via bugis (pyproject.toml)
|
||||||
pygraphviz==1.14
|
pygraphviz==1.14
|
||||||
# via bugis (pyproject.toml)
|
# via bugis (pyproject.toml)
|
||||||
pyyaml==6.0.2
|
pyyaml==6.0.2
|
||||||
# via bugis (pyproject.toml)
|
# via bugis (pyproject.toml)
|
||||||
typing-extensions==4.7.1
|
sniffio==1.3.1
|
||||||
# via
|
# via
|
||||||
# multidict
|
# anyio
|
||||||
# pwo
|
# httpx
|
||||||
|
typing-extensions==4.12.2
|
||||||
|
# via pwo
|
||||||
uvloop==0.21.0
|
uvloop==0.21.0
|
||||||
# via granian
|
# via granian
|
||||||
watchdog==5.0.3
|
watchdog==5.0.3
|
||||||
# via bugis (pyproject.toml)
|
# via bugis (pyproject.toml)
|
||||||
yarl==1.16.0
|
|
||||||
# via aiohttp
|
|
||||||
|
@@ -1,60 +1,51 @@
|
|||||||
#
|
#
|
||||||
# This file is autogenerated by pip-compile with Python 3.10
|
# This file is autogenerated by pip-compile with Python 3.12
|
||||||
# by the following command:
|
# by the following command:
|
||||||
#
|
#
|
||||||
# pip-compile --extra-index-url=https://gitea.woggioni.net/api/packages/woggioni/pypi/simple --output-file=requirements.txt --strip-extras pyproject.toml
|
# pip-compile --output-file=requirements.txt pyproject.toml
|
||||||
#
|
#
|
||||||
--extra-index-url https://gitea.woggioni.net/api/packages/woggioni/pypi/simple
|
--index-url https://gitea.woggioni.net/api/packages/woggioni/pypi/simple
|
||||||
|
--extra-index-url https://pypi.org/simple
|
||||||
|
|
||||||
aiodns==3.2.0
|
|
||||||
# via aiohttp
|
|
||||||
aiofiles==24.1.0
|
aiofiles==24.1.0
|
||||||
# via bugis (pyproject.toml)
|
# via bugis (pyproject.toml)
|
||||||
aiohappyeyeballs==2.4.3
|
anyio==4.6.2.post1
|
||||||
# via aiohttp
|
# via httpx
|
||||||
aiohttp==3.10.10
|
certifi==2024.8.30
|
||||||
# via bugis (pyproject.toml)
|
|
||||||
aiosignal==1.3.1
|
|
||||||
# via aiohttp
|
|
||||||
async-timeout==4.0.3
|
|
||||||
# via aiohttp
|
|
||||||
attrs==24.2.0
|
|
||||||
# via aiohttp
|
|
||||||
brotli==1.1.0
|
|
||||||
# via aiohttp
|
|
||||||
cffi==1.17.1
|
|
||||||
# via pycares
|
|
||||||
frozenlist==1.4.1
|
|
||||||
# via
|
# via
|
||||||
# aiohttp
|
# httpcore
|
||||||
# aiosignal
|
# httpx
|
||||||
|
h11==0.14.0
|
||||||
|
# via httpcore
|
||||||
|
h2==4.1.0
|
||||||
|
# via httpx
|
||||||
|
hpack==4.0.0
|
||||||
|
# via h2
|
||||||
|
httpcore==1.0.6
|
||||||
|
# via httpx
|
||||||
|
httpx[http2]==0.27.2
|
||||||
|
# via bugis (pyproject.toml)
|
||||||
|
hyperframe==6.0.1
|
||||||
|
# via h2
|
||||||
idna==3.10
|
idna==3.10
|
||||||
# via yarl
|
# via
|
||||||
|
# anyio
|
||||||
|
# httpx
|
||||||
markdown==3.7
|
markdown==3.7
|
||||||
# via bugis (pyproject.toml)
|
# via bugis (pyproject.toml)
|
||||||
multidict==6.1.0
|
pwo==0.0.4
|
||||||
# via
|
|
||||||
# aiohttp
|
|
||||||
# yarl
|
|
||||||
propcache==0.2.0
|
|
||||||
# via yarl
|
|
||||||
pwo==0.0.3
|
|
||||||
# via bugis (pyproject.toml)
|
# via bugis (pyproject.toml)
|
||||||
pycares==4.4.0
|
|
||||||
# via aiodns
|
|
||||||
pycparser==2.22
|
|
||||||
# via cffi
|
|
||||||
pygments==2.18.0
|
pygments==2.18.0
|
||||||
# via bugis (pyproject.toml)
|
# via bugis (pyproject.toml)
|
||||||
pygraphviz==1.14
|
pygraphviz==1.14
|
||||||
# via bugis (pyproject.toml)
|
# via bugis (pyproject.toml)
|
||||||
pyyaml==6.0.2
|
pyyaml==6.0.2
|
||||||
# via bugis (pyproject.toml)
|
# via bugis (pyproject.toml)
|
||||||
typing-extensions==4.7.1
|
sniffio==1.3.1
|
||||||
# via
|
# via
|
||||||
# multidict
|
# anyio
|
||||||
# pwo
|
# httpx
|
||||||
|
typing-extensions==4.12.2
|
||||||
|
# via pwo
|
||||||
watchdog==5.0.3
|
watchdog==5.0.3
|
||||||
# via bugis (pyproject.toml)
|
# via bugis (pyproject.toml)
|
||||||
yarl==1.16.0
|
|
||||||
# via aiohttp
|
|
||||||
|
4
src/bugis/__main__.py
Normal file
4
src/bugis/__main__.py
Normal file
@@ -0,0 +1,4 @@
|
|||||||
|
from .cli import main
|
||||||
|
import sys
|
||||||
|
|
||||||
|
main(sys.argv[1:])
|
@@ -1,27 +1,18 @@
|
|||||||
import logging
|
import logging
|
||||||
from logging.config import dictConfig as configure_logging
|
from asyncio import get_running_loop
|
||||||
|
from typing import Optional, Awaitable, Callable, Any, Mapping
|
||||||
from yaml import safe_load
|
|
||||||
|
|
||||||
from .configuration import Configuration
|
|
||||||
|
|
||||||
with open(Configuration.instance.logging_configuration_file, 'r') as input_file:
|
|
||||||
conf = safe_load(input_file)
|
|
||||||
configure_logging(conf)
|
|
||||||
|
|
||||||
|
|
||||||
from pwo import Maybe
|
from pwo import Maybe
|
||||||
|
|
||||||
from .server import Server
|
from .server import Server
|
||||||
from asyncio import get_running_loop
|
|
||||||
from .asgi_utils import decode_headers
|
|
||||||
from typing import Optional
|
|
||||||
|
|
||||||
log = logging.getLogger('access')
|
log = logging.getLogger('access')
|
||||||
log.propagate = False
|
log.propagate = False
|
||||||
|
|
||||||
_server : Optional[Server] = None
|
_server: Optional[Server] = None
|
||||||
|
|
||||||
async def application(scope, receive, send):
|
async def application(scope, receive, send : Callable[[Mapping[str, Any]], Awaitable[None]]):
|
||||||
global _server
|
global _server
|
||||||
if scope['type'] == 'lifespan':
|
if scope['type'] == 'lifespan':
|
||||||
while True:
|
while True:
|
||||||
@@ -33,22 +24,9 @@ async def application(scope, receive, send):
|
|||||||
await _server.stop()
|
await _server.stop()
|
||||||
await send({'type': 'lifespan.shutdown.complete'})
|
await send({'type': 'lifespan.shutdown.complete'})
|
||||||
else:
|
else:
|
||||||
def maybe_log(evt):
|
pathsend = (Maybe.of_nullable(scope.get('extensions'))
|
||||||
d = {
|
.map(lambda it: it.get("http.response.pathsend"))
|
||||||
'response_headers': (Maybe.of_nullable(evt.get('headers'))
|
.is_present)
|
||||||
.map(decode_headers)
|
|
||||||
.or_none()),
|
|
||||||
'status': evt['status']
|
|
||||||
}
|
|
||||||
log.info(None, extra=dict(**{k : v for k, v in d.items() if k is not None}, **scope))
|
|
||||||
def wrapped_send(*args, **kwargs):
|
|
||||||
result = send(*args, **kwargs)
|
|
||||||
(Maybe.of(args)
|
|
||||||
.filter(lambda it: len(it) > 0)
|
|
||||||
.map(lambda it: it[0])
|
|
||||||
.filter(lambda it: it.get('type') == 'http.response.start')
|
|
||||||
.if_present(maybe_log))
|
|
||||||
return result
|
|
||||||
await _server.handle_request(
|
await _server.handle_request(
|
||||||
scope['method'],
|
scope['method'],
|
||||||
scope['path'],
|
scope['path'],
|
||||||
@@ -58,6 +36,7 @@ async def application(scope, receive, send):
|
|||||||
.map(lambda it: it.decode())
|
.map(lambda it: it.decode())
|
||||||
.or_else(None),
|
.or_else(None),
|
||||||
Maybe.of_nullable(scope.get('query_string', None)).map(lambda it: it.decode()).or_else(None),
|
Maybe.of_nullable(scope.get('query_string', None)).map(lambda it: it.decode()).or_else(None),
|
||||||
wrapped_send
|
send,
|
||||||
|
pathsend
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@@ -1,54 +1,15 @@
|
|||||||
import asyncio
|
import asyncio
|
||||||
|
from asyncio import Queue, AbstractEventLoop, Future, Task, gather
|
||||||
|
from logging import getLogger, Logger
|
||||||
|
from pathlib import Path
|
||||||
|
|
||||||
|
from pwo import TopicManager, Subscriber
|
||||||
|
from watchdog.events import FileMovedEvent, FileClosedEvent, FileCreatedEvent, FileModifiedEvent
|
||||||
from watchdog.events import FileSystemEventHandler, FileSystemEvent, PatternMatchingEventHandler
|
from watchdog.events import FileSystemEventHandler, FileSystemEvent, PatternMatchingEventHandler
|
||||||
from watchdog.observers import Observer
|
from watchdog.observers import Observer
|
||||||
from watchdog.events import FileMovedEvent, FileClosedEvent, FileCreatedEvent, FileModifiedEvent
|
|
||||||
from pathlib import Path
|
|
||||||
from asyncio import Queue, AbstractEventLoop, Future, CancelledError, Task, gather
|
|
||||||
from typing import Callable, Optional
|
|
||||||
from logging import getLogger, Logger
|
|
||||||
|
|
||||||
log: Logger = getLogger(__name__)
|
log: Logger = getLogger(__name__)
|
||||||
|
|
||||||
class Subscription:
|
|
||||||
_unsubscribe_callback: Callable[['Subscription'], None]
|
|
||||||
_event: Optional[Future]
|
|
||||||
_loop: AbstractEventLoop
|
|
||||||
|
|
||||||
def __init__(self, unsubscribe: Callable[['Subscription'], None], loop: AbstractEventLoop):
|
|
||||||
self._unsubscribe_callback = unsubscribe
|
|
||||||
self._event: Optional[Future] = None
|
|
||||||
self._loop = loop
|
|
||||||
|
|
||||||
|
|
||||||
def unsubscribe(self) -> None:
|
|
||||||
self._event.cancel()
|
|
||||||
self._unsubscribe_callback(self)
|
|
||||||
log.debug('Deleted subscription %s', id(self))
|
|
||||||
|
|
||||||
async def wait(self, tout: float) -> bool:
|
|
||||||
self._event = self._loop.create_future()
|
|
||||||
|
|
||||||
def callback():
|
|
||||||
if not self._event.done():
|
|
||||||
self._event.set_result(False)
|
|
||||||
handle = self._loop.call_later(tout, callback)
|
|
||||||
try:
|
|
||||||
log.debug('Subscription %s is waiting for an event', id(self))
|
|
||||||
return await self._event
|
|
||||||
except CancelledError:
|
|
||||||
return False
|
|
||||||
finally:
|
|
||||||
handle.cancel()
|
|
||||||
|
|
||||||
def notify(self) -> None:
|
|
||||||
log.debug('Subscription %s notified', id(self))
|
|
||||||
if not self._event.done():
|
|
||||||
self._event.set_result(True)
|
|
||||||
|
|
||||||
def reset(self) -> None:
|
|
||||||
self._event = self._loop.create_future()
|
|
||||||
|
|
||||||
|
|
||||||
class _EventHandler(FileSystemEventHandler):
|
class _EventHandler(FileSystemEventHandler):
|
||||||
_queue: Queue
|
_queue: Queue
|
||||||
@@ -67,22 +28,6 @@ class _EventHandler(FileSystemEventHandler):
|
|||||||
self._loop.call_soon_threadsafe(self._queue.put_nowait, event)
|
self._loop.call_soon_threadsafe(self._queue.put_nowait, event)
|
||||||
|
|
||||||
|
|
||||||
class AsyncQueueIterator:
|
|
||||||
_queue: Queue
|
|
||||||
|
|
||||||
def __init__(self, queue: Queue):
|
|
||||||
self._queue = queue
|
|
||||||
|
|
||||||
def __aiter__(self):
|
|
||||||
return self
|
|
||||||
|
|
||||||
async def __anext__(self):
|
|
||||||
item = await self._queue.get()
|
|
||||||
if item is None:
|
|
||||||
raise StopAsyncIteration
|
|
||||||
return item
|
|
||||||
|
|
||||||
|
|
||||||
observer = Observer()
|
observer = Observer()
|
||||||
|
|
||||||
|
|
||||||
@@ -96,56 +41,12 @@ def watch(path: Path, queue: Queue, loop: AbstractEventLoop,
|
|||||||
observer.join()
|
observer.join()
|
||||||
loop.call_soon_threadsafe(queue.put_nowait, None)
|
loop.call_soon_threadsafe(queue.put_nowait, None)
|
||||||
|
|
||||||
class SubscriptionManager:
|
|
||||||
_loop: AbstractEventLoop
|
|
||||||
_queue: Queue
|
|
||||||
_subscriptions: dict[str, set[Subscription]]
|
|
||||||
|
|
||||||
def __init__(self, loop: AbstractEventLoop):
|
|
||||||
self._subscriptions: dict[str, set[Subscription]] = dict()
|
|
||||||
self._loop = loop
|
|
||||||
self._queue = Queue()
|
|
||||||
|
|
||||||
def subscribe(self, path: str) -> Subscription:
|
|
||||||
subscriptions = self._subscriptions
|
|
||||||
subscriptions_per_path = subscriptions.setdefault(path, set())
|
|
||||||
|
|
||||||
def unsubscribe_callback(subscription):
|
|
||||||
subscriptions_per_path.remove(subscription)
|
|
||||||
log.debug('Removed subscription %s to path %s', id(result), path)
|
|
||||||
|
|
||||||
result = Subscription(unsubscribe_callback, self._loop)
|
|
||||||
log.debug('Created subscription %s to path %s', id(result), path)
|
|
||||||
subscriptions_per_path.add(result)
|
|
||||||
return result
|
|
||||||
|
|
||||||
def _notify_subscriptions(self, path):
|
|
||||||
subscriptions = self._subscriptions
|
|
||||||
subscriptions_per_path = subscriptions.get(path, None)
|
|
||||||
if subscriptions_per_path:
|
|
||||||
log.debug(f"Subscriptions on '{path}': {len(subscriptions_per_path)}")
|
|
||||||
for s in subscriptions_per_path:
|
|
||||||
s.notify()
|
|
||||||
|
|
||||||
async def process_events(self):
|
|
||||||
async for evt in AsyncQueueIterator(self._queue):
|
|
||||||
log.debug(f"Processed event for path '{evt}'")
|
|
||||||
self._notify_subscriptions(evt)
|
|
||||||
log.debug(f"Event processor has completed")
|
|
||||||
|
|
||||||
|
|
||||||
def post_event(self, path):
|
|
||||||
def callback():
|
|
||||||
self._queue.put_nowait(path)
|
|
||||||
log.debug(f"Posted event for path '{path}', queue size: {self._queue.qsize()}")
|
|
||||||
self._loop.call_soon_threadsafe(callback)
|
|
||||||
|
|
||||||
|
|
||||||
class FileWatcher(PatternMatchingEventHandler):
|
class FileWatcher(PatternMatchingEventHandler):
|
||||||
_subscription_manager: SubscriptionManager
|
_topic_manager: TopicManager
|
||||||
_loop: AbstractEventLoop
|
_loop: AbstractEventLoop
|
||||||
_subscription_manager_loop: Task
|
_topic_manager_loop: Task
|
||||||
_running_tasks : Future
|
_running_tasks: Future
|
||||||
|
|
||||||
def __init__(self, path):
|
def __init__(self, path):
|
||||||
super().__init__(patterns=['*.md'],
|
super().__init__(patterns=['*.md'],
|
||||||
@@ -155,29 +56,29 @@ class FileWatcher(PatternMatchingEventHandler):
|
|||||||
self._observer: Observer = Observer()
|
self._observer: Observer = Observer()
|
||||||
self._observer.schedule(self, path=path, recursive=True)
|
self._observer.schedule(self, path=path, recursive=True)
|
||||||
self._loop = asyncio.get_running_loop()
|
self._loop = asyncio.get_running_loop()
|
||||||
self._subscription_manager = SubscriptionManager(self._loop)
|
self._topic_manager = TopicManager(self._loop)
|
||||||
self._running_tasks = gather(
|
self._running_tasks = gather(
|
||||||
self._loop.run_in_executor(None, self._observer.start),
|
self._loop.run_in_executor(None, self._observer.start),
|
||||||
self._loop.create_task(self._subscription_manager.process_events())
|
self._loop.create_task(self._topic_manager.process_events())
|
||||||
)
|
)
|
||||||
|
|
||||||
async def stop(self) -> None:
|
async def stop(self) -> None:
|
||||||
def _observer_stop():
|
def _observer_stop():
|
||||||
self._observer.stop()
|
self._observer.stop()
|
||||||
self._observer.join()
|
self._observer.join()
|
||||||
self._subscription_manager.post_event(None)
|
self._topic_manager.post_event(None)
|
||||||
|
|
||||||
await self._loop.run_in_executor(None, _observer_stop)
|
await self._loop.run_in_executor(None, _observer_stop)
|
||||||
await self._running_tasks
|
await self._running_tasks
|
||||||
|
|
||||||
def subscribe(self, path: str) -> Subscription:
|
def subscribe(self, path: str) -> Subscriber:
|
||||||
return self._subscription_manager.subscribe(path)
|
return self._topic_manager.subscribe(path)
|
||||||
|
|
||||||
def on_any_event(self, event: FileSystemEvent) -> None:
|
def on_any_event(self, event: FileSystemEvent) -> None:
|
||||||
what = "directory" if event.is_directory else "file"
|
what = "directory" if event.is_directory else "file"
|
||||||
|
|
||||||
def post_event(path):
|
def post_event(path):
|
||||||
self._subscription_manager.post_event(path)
|
self._topic_manager.post_event(path)
|
||||||
|
|
||||||
if isinstance(event, FileClosedEvent):
|
if isinstance(event, FileClosedEvent):
|
||||||
log.debug("Closed %s: %s", what, event.src_path)
|
log.debug("Closed %s: %s", what, event.src_path)
|
||||||
|
114
src/bugis/cli.py
Normal file
114
src/bugis/cli.py
Normal file
@@ -0,0 +1,114 @@
|
|||||||
|
import argparse
|
||||||
|
from dataclasses import asdict
|
||||||
|
from os import environ
|
||||||
|
from pathlib import Path
|
||||||
|
from typing import Optional, Sequence
|
||||||
|
|
||||||
|
import yaml
|
||||||
|
from granian import Granian
|
||||||
|
from pwo import Maybe
|
||||||
|
|
||||||
|
from .configuration import Configuration
|
||||||
|
from granian.constants import HTTPModes, Interfaces, ThreadModes, Loops
|
||||||
|
|
||||||
|
def main(args: Optional[Sequence[str]] = None):
|
||||||
|
parser = argparse.ArgumentParser(description="A simple CLI program to render Markdown files")
|
||||||
|
default_configuration_file = (Maybe.of(environ.get('XDG_CONFIG_HOME'))
|
||||||
|
.map(lambda it: Path(it))
|
||||||
|
.map(lambda it: it / 'bugis' / 'bugis.yaml')
|
||||||
|
.or_else_get(lambda: Path(environ.get('HOME')) / '.config' / 'bugis' / 'bugis.yaml')
|
||||||
|
.filter(Path.exists)
|
||||||
|
.or_else(None)
|
||||||
|
)
|
||||||
|
parser.add_argument(
|
||||||
|
'-c',
|
||||||
|
'--configuration',
|
||||||
|
help='Path to the configuration file',
|
||||||
|
default=default_configuration_file,
|
||||||
|
type=Path,
|
||||||
|
)
|
||||||
|
parser.add_argument(
|
||||||
|
'-a',
|
||||||
|
'--address',
|
||||||
|
help='Server bind address',
|
||||||
|
default='127.0.0.1',
|
||||||
|
)
|
||||||
|
parser.add_argument(
|
||||||
|
'-p',
|
||||||
|
'--port',
|
||||||
|
help='Server port',
|
||||||
|
default='8000',
|
||||||
|
type=int
|
||||||
|
)
|
||||||
|
parser.add_argument(
|
||||||
|
'--access-log',
|
||||||
|
help='Enable access log',
|
||||||
|
action='store_true',
|
||||||
|
dest='log_access'
|
||||||
|
)
|
||||||
|
parser.add_argument(
|
||||||
|
'--logging-configuration',
|
||||||
|
help='Logging configuration file',
|
||||||
|
dest='log_config_file'
|
||||||
|
)
|
||||||
|
parser.add_argument(
|
||||||
|
'-w', '--workers',
|
||||||
|
help='Number of worker processes',
|
||||||
|
default='1',
|
||||||
|
dest='workers',
|
||||||
|
type = int
|
||||||
|
)
|
||||||
|
parser.add_argument(
|
||||||
|
'-t', '--threads',
|
||||||
|
help='Number of threads per worker',
|
||||||
|
default='1',
|
||||||
|
dest='threads',
|
||||||
|
type=int
|
||||||
|
)
|
||||||
|
parser.add_argument(
|
||||||
|
'--http',
|
||||||
|
help='HTTP protocol version',
|
||||||
|
dest='http',
|
||||||
|
type=lambda it: HTTPModes(it),
|
||||||
|
choices=[str(mode) for mode in HTTPModes],
|
||||||
|
default = 'auto',
|
||||||
|
)
|
||||||
|
parser.add_argument(
|
||||||
|
'--threading-mode',
|
||||||
|
help='Threading mode',
|
||||||
|
dest='threading_mode',
|
||||||
|
type=lambda it: ThreadModes(it),
|
||||||
|
choices=[str(mode) for mode in ThreadModes],
|
||||||
|
default=ThreadModes.workers
|
||||||
|
)
|
||||||
|
parser.add_argument(
|
||||||
|
'--loop',
|
||||||
|
help='Loop',
|
||||||
|
dest='loop',
|
||||||
|
type=lambda it: Loops(it),
|
||||||
|
choices=[str(mode) for mode in Loops]
|
||||||
|
)
|
||||||
|
args = parser.parse_args(args)
|
||||||
|
def parse(configuration: Path):
|
||||||
|
with open(configuration, 'r') as f:
|
||||||
|
return yaml.safe_load(f)
|
||||||
|
|
||||||
|
def assign(it: Configuration):
|
||||||
|
Configuration.instance = it
|
||||||
|
Maybe.of_nullable(args.configuration).map(parse).if_present(assign)
|
||||||
|
conf = Configuration.instance
|
||||||
|
|
||||||
|
|
||||||
|
granian_conf = asdict(conf).setdefault('granian', dict())
|
||||||
|
for k, v in vars(args).items():
|
||||||
|
if v is not None:
|
||||||
|
granian_conf[k] = v
|
||||||
|
if args.log_config_file:
|
||||||
|
with open(args.log_config_file, 'r') as f:
|
||||||
|
granian_conf['log_dictconfig'] = yaml.safe_load(f)
|
||||||
|
granian_conf = Configuration.GranianConfiguration.from_dict(granian_conf)
|
||||||
|
|
||||||
|
Granian(
|
||||||
|
"bugis.asgi:application",
|
||||||
|
**asdict(granian_conf)
|
||||||
|
).serve()
|
@@ -1,45 +1,144 @@
|
|||||||
import os
|
|
||||||
from os import environ
|
from os import environ
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
from dataclasses import dataclass
|
from dataclasses import dataclass, field, asdict
|
||||||
|
|
||||||
class ClassPropertyDescriptor(object):
|
import yaml
|
||||||
|
from granian.constants import Loops, Interfaces, ThreadModes, HTTPModes, StrEnum
|
||||||
|
from granian.log import LogLevels
|
||||||
|
from granian.http import HTTP1Settings, HTTP2Settings
|
||||||
|
from typing import Optional, Sequence, Dict, Any
|
||||||
|
from pwo import classproperty, Maybe
|
||||||
|
from yaml import add_representer, SafeDumper, SafeLoader
|
||||||
|
|
||||||
def __init__(self, fget, fset=None):
|
def parse_log_config(conf_file=None) -> Dict[str, Any]:
|
||||||
self.fget = fget
|
if conf_file is None:
|
||||||
self.fset = fset
|
conf_file = environ.get("LOGGING_CONFIGURATION_FILE",
|
||||||
|
Path(__file__).parent / 'default-conf' / 'logging.yaml')
|
||||||
def __get__(self, obj, klass=None):
|
with open(conf_file, 'r') as file:
|
||||||
if klass is None:
|
return yaml.safe_load(file)
|
||||||
klass = type(obj)
|
|
||||||
return self.fget.__get__(obj, klass)()
|
|
||||||
|
|
||||||
def __set__(self, obj, value):
|
|
||||||
if not self.fset:
|
|
||||||
raise AttributeError("can't set attribute")
|
|
||||||
type_ = type(obj)
|
|
||||||
return self.fset.__get__(obj, type_)(value)
|
|
||||||
|
|
||||||
def setter(self, func):
|
|
||||||
if not isinstance(func, (classmethod, staticmethod)):
|
|
||||||
func = classmethod(func)
|
|
||||||
self.fset = func
|
|
||||||
return self
|
|
||||||
|
|
||||||
def classproperty(func):
|
|
||||||
if not isinstance(func, (classmethod, staticmethod)):
|
|
||||||
func = classmethod(func)
|
|
||||||
return ClassPropertyDescriptor(func)
|
|
||||||
|
|
||||||
@dataclass(frozen=True)
|
@dataclass(frozen=True)
|
||||||
class Configuration:
|
class Configuration:
|
||||||
logging_configuration_file : str = environ.get("LOGGING_CONFIGURATION_FILE", Path(__file__).parent / 'default-conf' / 'logging.yaml')
|
plant_uml_server_address: str = environ.get('PLANT_UML_SERVER_ADDRESS', None)
|
||||||
plant_uml_server_address : str = environ.get('PLANT_UML_SERVER_ADDRESS', None)
|
|
||||||
|
|
||||||
|
@dataclass(frozen=True)
|
||||||
|
class GranianConfiguration:
|
||||||
|
address: str = '127.0.0.1'
|
||||||
|
port: int = 8000
|
||||||
|
interface: str = Interfaces.ASGI
|
||||||
|
workers: int = 1
|
||||||
|
threads: int = 1
|
||||||
|
blocking_threads: Optional[int] = None
|
||||||
|
threading_mode: ThreadModes = ThreadModes.workers
|
||||||
|
loop: Loops = Loops.auto
|
||||||
|
loop_opt: bool = False
|
||||||
|
http: HTTPModes = HTTPModes.auto
|
||||||
|
websockets: bool = True
|
||||||
|
backlog: int = 1024
|
||||||
|
backpressure: Optional[int] = None
|
||||||
|
http1_settings: Optional[HTTP1Settings] = None
|
||||||
|
http2_settings: Optional[HTTP2Settings] = None
|
||||||
|
log_enabled: bool = True
|
||||||
|
log_level: LogLevels = LogLevels.info
|
||||||
|
log_dictconfig: Optional[Dict[str, Any]] = None
|
||||||
|
log_access: bool = False
|
||||||
|
log_access_format: Optional[str] = None
|
||||||
|
ssl_cert: Optional[Path] = None
|
||||||
|
ssl_key: Optional[Path] = None
|
||||||
|
ssl_key_password: Optional[str] = None
|
||||||
|
url_path_prefix: Optional[str] = None
|
||||||
|
respawn_failed_workers: bool = False
|
||||||
|
respawn_interval: float = 3.5
|
||||||
|
workers_lifetime: Optional[int] = None
|
||||||
|
factory: bool = False
|
||||||
|
reload: bool = False
|
||||||
|
reload_paths: Optional[Sequence[Path]] = None
|
||||||
|
reload_ignore_dirs: Optional[Sequence[str]] = None
|
||||||
|
reload_ignore_patterns: Optional[Sequence[str]] = None
|
||||||
|
reload_ignore_paths: Optional[Sequence[Path]] = None
|
||||||
|
process_name: Optional[str] = None
|
||||||
|
pid_file: Optional[Path] = None
|
||||||
|
|
||||||
@classproperty
|
@staticmethod
|
||||||
def instance(cls) -> 'Configuration':
|
def from_dict(d) -> 'Configuration.GranianConfiguration':
|
||||||
return Configuration()
|
return Configuration.GranianConfiguration(**{k: v for k, v in dict(
|
||||||
|
address=d.get('address', None),
|
||||||
|
port=d.get('port', None),
|
||||||
|
interface=Maybe.of_nullable(d.get('interface')).map(lambda it: Interfaces(it)).or_else(None),
|
||||||
|
workers=d.get('workers', None),
|
||||||
|
threads=d.get('threads', None),
|
||||||
|
blocking_threads=d.get('blocking_threads', None),
|
||||||
|
threading_mode=Maybe.of_nullable(d.get('threading_modes')).map(lambda it: ThreadModes(it)).or_else(None),
|
||||||
|
loop=Maybe.of_nullable(d.get('loop')).map(lambda it: Loops(it)).or_else(None),
|
||||||
|
loop_opt=d.get('loop_opt', None),
|
||||||
|
http=Maybe.of_nullable(d.get('http')).map(lambda it: HTTPModes(it)).or_else(None),
|
||||||
|
websockets=d.get('websockets', None),
|
||||||
|
backlog=d.get('backlog', None),
|
||||||
|
backpressure=d.get('backpressure', None),
|
||||||
|
http1_settings=Maybe.of_nullable(d.get('http1_settings')).map(lambda it: HTTP1Settings(**it)).or_else(None),
|
||||||
|
http2_settings=Maybe.of_nullable(d.get('http2_settings')).map(lambda it: HTTP2Settings(**it)).or_else(None),
|
||||||
|
log_enabled=d.get('log_enabled', None),
|
||||||
|
log_level=Maybe.of_nullable(d.get('log_level')).map(lambda it: LogLevels(it)).or_else(None),
|
||||||
|
log_dictconfig=parse_log_config(d.get('log_config_file')),
|
||||||
|
log_access=d.get('log_access', None),
|
||||||
|
log_access_format=d.get('log_access_format', None),
|
||||||
|
ssl_cert=d.get('ssl_cert', None),
|
||||||
|
ssl_key=d.get('ssl_key', None),
|
||||||
|
ssl_key_password=d.get('ssl_key_password', None),
|
||||||
|
url_path_prefix=d.get('url_path_prefix', None),
|
||||||
|
respawn_failed_workers=d.get('respawn_failed_workers', None),
|
||||||
|
respawn_interval=d.get('respawn_interval', None),
|
||||||
|
workers_lifetime=d.get('workers_lifetime', None),
|
||||||
|
factory=d.get('factory', None),
|
||||||
|
reload=d.get('reload', None),
|
||||||
|
reload_paths=d.get('reload_paths', None),
|
||||||
|
reload_ignore_dirs=d.get('reload_ignore_dirs', None),
|
||||||
|
reload_ignore_patterns=d.get('reload_ignore_patterns', None),
|
||||||
|
reload_ignore_paths=d.get('reload_ignore_paths', None),
|
||||||
|
process_name=d.get('process_name', None),
|
||||||
|
pid_file=d.get('pid_file', None),
|
||||||
|
).items() if v is not None})
|
||||||
|
|
||||||
|
granian: GranianConfiguration = GranianConfiguration()
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def from_dict(d) -> 'Configuration':
|
||||||
|
return Configuration(
|
||||||
|
**{k: v for k, v in dict(
|
||||||
|
logging_configuration_file=d.get('logging_configuration_file', None),
|
||||||
|
plant_uml_server_address=d.get('plant_uml_server_address', None),
|
||||||
|
granian=Maybe.of_nullable(d.get('granian'))
|
||||||
|
.map(Configuration.GranianConfiguration.from_dict)
|
||||||
|
.or_else(None)
|
||||||
|
).items() if v is not None
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
def to_yaml(self, stream):
|
||||||
|
dumper = SafeDumper(stream)
|
||||||
|
dumper.add_representer(Configuration, lambda dumper, conf: dumper.represent_dict(asdict(conf)))
|
||||||
|
dumper.add_representer(Configuration.GranianConfiguration,
|
||||||
|
lambda dumper, conf: dumper.represent_dict(asdict(conf)))
|
||||||
|
dumper.add_representer(LogLevels, lambda dumper, level: dumper.represent_str(level.lower()))
|
||||||
|
dumper.add_multi_representer(Path, lambda dumper, path: dumper.represent_str(str(path)))
|
||||||
|
dumper.add_multi_representer(StrEnum, lambda dumper, str_enum: dumper.represent_str(str(str_enum)))
|
||||||
|
dumper.add_representer(HTTP1Settings, lambda dumper, settings: dumper.represent_dict(vars(settings)))
|
||||||
|
dumper.add_representer(HTTP2Settings, lambda dumper, settings: dumper.represent_dict(vars(settings)))
|
||||||
|
|
||||||
|
try:
|
||||||
|
dumper.open()
|
||||||
|
dumper.represent(Configuration.instance)
|
||||||
|
dumper.close()
|
||||||
|
finally:
|
||||||
|
dumper.dispose()
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def from_yaml(stream) -> 'Configuration':
|
||||||
|
loader = SafeLoader(stream)
|
||||||
|
try:
|
||||||
|
conf = loader.get_single_data()
|
||||||
|
return Configuration.from_dict(conf)
|
||||||
|
finally:
|
||||||
|
loader.dispose()
|
||||||
|
|
||||||
|
instance = Configuration()
|
@@ -1,5 +1,5 @@
|
|||||||
version: 1
|
version: 1
|
||||||
disable_existing_loggers: True
|
disable_existing_loggers: False
|
||||||
handlers:
|
handlers:
|
||||||
console:
|
console:
|
||||||
class : logging.StreamHandler
|
class : logging.StreamHandler
|
||||||
@@ -8,28 +8,23 @@ handlers:
|
|||||||
stream : ext://sys.stderr
|
stream : ext://sys.stderr
|
||||||
access:
|
access:
|
||||||
class : logging.StreamHandler
|
class : logging.StreamHandler
|
||||||
formatter: request
|
formatter: access
|
||||||
level : INFO
|
level : INFO
|
||||||
stream : ext://sys.stderr
|
stream : ext://sys.stdout
|
||||||
formatters:
|
formatters:
|
||||||
brief:
|
|
||||||
format: '%(message)s'
|
|
||||||
default:
|
default:
|
||||||
format: '{asctime} [{levelname}] ({processName:s}/{threadName:s}) - {name} - {message}'
|
format: '{asctime}.{msecs:0<3.0f} [{levelname}] ({processName:s}/{threadName:s}) - {name} - {message}'
|
||||||
style: '{'
|
|
||||||
datefmt: '%Y-%m-%d %H:%M:%S'
|
|
||||||
request:
|
|
||||||
format: '{asctime} {client[0]}:{client[1]} HTTP/{http_version} {method} {path} - {status}'
|
|
||||||
style: '{'
|
style: '{'
|
||||||
datefmt: '%Y-%m-%d %H:%M:%S'
|
datefmt: '%Y-%m-%d %H:%M:%S'
|
||||||
|
access:
|
||||||
|
format: '%(message)s'
|
||||||
loggers:
|
loggers:
|
||||||
root:
|
root:
|
||||||
handlers: [console]
|
handlers: [console]
|
||||||
level: DEBUG
|
_granian:
|
||||||
access:
|
|
||||||
handlers: [access]
|
|
||||||
level: INFO
|
level: INFO
|
||||||
watchdog.observers.inotify_buffer:
|
propagate: False
|
||||||
level: INFO
|
granian.access:
|
||||||
MARKDOWN:
|
handlers: [ access ]
|
||||||
level: INFO
|
level: INFO
|
||||||
|
propagate: False
|
||||||
|
@@ -1,17 +1,19 @@
|
|||||||
from typing import TYPE_CHECKING
|
from typing import TYPE_CHECKING
|
||||||
|
|
||||||
from aiofiles import open as async_open
|
from aiofiles import open as async_open
|
||||||
from aiohttp import ClientSession
|
|
||||||
from .configuration import Configuration
|
from .configuration import Configuration
|
||||||
from yarl import URL
|
|
||||||
if TYPE_CHECKING:
|
if TYPE_CHECKING:
|
||||||
from _typeshed import StrOrBytesPath
|
from _typeshed import StrOrBytesPath
|
||||||
|
from httpx import AsyncClient, URL
|
||||||
|
from typing import Callable, Awaitable
|
||||||
|
from urllib.parse import urljoin
|
||||||
|
|
||||||
async def render_plant_uml(path: 'StrOrBytesPath') -> bytes:
|
chunk_size = 0x10000
|
||||||
async with ClientSession() as session:
|
async def render_plant_uml(client: AsyncClient, path: 'StrOrBytesPath', send : Callable[[bytes], Awaitable[None]]):
|
||||||
url = URL(Configuration.instance.plant_uml_server_address) / 'svg'
|
url = URL(urljoin(Configuration.instance.plant_uml_server_address, 'svg'))
|
||||||
async with async_open(path, 'rb') as file:
|
async with async_open(path, 'rb') as file:
|
||||||
source = await file.read()
|
source = await file.read()
|
||||||
async with session.post(url, data=source) as response:
|
response = await client.post(url, content=source)
|
||||||
response.raise_for_status()
|
response.raise_for_status()
|
||||||
return await response.read()
|
async for chunk in response.aiter_bytes(chunk_size=chunk_size):
|
||||||
|
await send(chunk)
|
||||||
|
0
src/bugis/py.typed
Normal file
0
src/bugis/py.typed
Normal file
@@ -6,7 +6,7 @@ from io import BytesIO
|
|||||||
from mimetypes import init as mimeinit, guess_type
|
from mimetypes import init as mimeinit, guess_type
|
||||||
from os import getcwd
|
from os import getcwd
|
||||||
from os.path import join, normpath, splitext, relpath, basename
|
from os.path import join, normpath, splitext, relpath, basename
|
||||||
from typing import Callable, TYPE_CHECKING, Optional, Awaitable, AsyncGenerator, Any
|
from typing import Callable, TYPE_CHECKING, Optional, Awaitable, AsyncGenerator, Any, Mapping
|
||||||
|
|
||||||
import pygraphviz as pgv
|
import pygraphviz as pgv
|
||||||
from aiofiles import open as async_open
|
from aiofiles import open as async_open
|
||||||
@@ -14,10 +14,12 @@ from aiofiles.base import AiofilesContextManager
|
|||||||
from aiofiles.os import listdir
|
from aiofiles.os import listdir
|
||||||
from aiofiles.ospath import exists, isdir, isfile, getmtime
|
from aiofiles.ospath import exists, isdir, isfile, getmtime
|
||||||
from aiofiles.threadpool.binary import AsyncBufferedReader
|
from aiofiles.threadpool.binary import AsyncBufferedReader
|
||||||
|
from httpx import AsyncClient
|
||||||
from pwo import Maybe
|
from pwo import Maybe
|
||||||
|
|
||||||
from .asgi_utils import encode_headers
|
from .asgi_utils import encode_headers
|
||||||
from .async_watchdog import FileWatcher
|
from .async_watchdog import FileWatcher
|
||||||
|
from .configuration import Configuration
|
||||||
from .md2html import compile_html, load_from_cache, STATIC_RESOURCES, MARDOWN_EXTENSIONS
|
from .md2html import compile_html, load_from_cache, STATIC_RESOURCES, MARDOWN_EXTENSIONS
|
||||||
from .plantuml import render_plant_uml
|
from .plantuml import render_plant_uml
|
||||||
|
|
||||||
@@ -29,11 +31,12 @@ mimeinit()
|
|||||||
cwd: 'StrOrBytesPath' = getcwd()
|
cwd: 'StrOrBytesPath' = getcwd()
|
||||||
|
|
||||||
|
|
||||||
def completed_future[T](result : T) -> Future[T]:
|
def completed_future[T](result: T) -> Future[T]:
|
||||||
future = Future()
|
future = Future()
|
||||||
future.set_result(result)
|
future.set_result(result)
|
||||||
return future
|
return future
|
||||||
|
|
||||||
|
|
||||||
def has_extension(filepath, extension):
|
def has_extension(filepath, extension):
|
||||||
_, ext = splitext(filepath)
|
_, ext = splitext(filepath)
|
||||||
return ext == extension
|
return ext == extension
|
||||||
@@ -46,13 +49,19 @@ def is_markdown(filepath):
|
|||||||
def is_dotfile(filepath):
|
def is_dotfile(filepath):
|
||||||
return has_extension(filepath, ".dot")
|
return has_extension(filepath, ".dot")
|
||||||
|
|
||||||
|
|
||||||
def is_plant_uml(filepath):
|
def is_plant_uml(filepath):
|
||||||
return has_extension(filepath, ".puml")
|
return has_extension(filepath, ".puml")
|
||||||
|
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
class Server:
|
class Server:
|
||||||
_loop : AbstractEventLoop
|
root_dir: 'StrOrBytesPath'
|
||||||
|
prefix: Optional['StrOrBytesPath']
|
||||||
|
_loop: AbstractEventLoop
|
||||||
|
_client: AsyncClient
|
||||||
|
|
||||||
def __init__(self,
|
def __init__(self,
|
||||||
root_dir: 'StrOrBytesPath' = getcwd(),
|
root_dir: 'StrOrBytesPath' = getcwd(),
|
||||||
@@ -63,8 +72,16 @@ class Server:
|
|||||||
self.file_watcher = FileWatcher(cwd)
|
self.file_watcher = FileWatcher(cwd)
|
||||||
self.prefix = prefix and normpath(f'{prefix.decode()}')
|
self.prefix = prefix and normpath(f'{prefix.decode()}')
|
||||||
self._loop = loop
|
self._loop = loop
|
||||||
|
self._client = AsyncClient()
|
||||||
|
|
||||||
async def handle_request(self, method: str, url_path: str, etag: Optional[str], query_string: Optional[str], send):
|
async def handle_request(self,
|
||||||
|
method: str,
|
||||||
|
url_path: str,
|
||||||
|
etag: Optional[str],
|
||||||
|
query_string: Optional[str],
|
||||||
|
send: Callable[[Mapping[str, Any]], Awaitable[None]],
|
||||||
|
pathsend: bool = False
|
||||||
|
):
|
||||||
if method != 'GET':
|
if method != 'GET':
|
||||||
await send({
|
await send({
|
||||||
'type': 'http.response.start',
|
'type': 'http.response.start',
|
||||||
@@ -84,11 +101,12 @@ class Server:
|
|||||||
etag, digest = await self.compute_etag_and_digest(
|
etag, digest = await self.compute_etag_and_digest(
|
||||||
etag,
|
etag,
|
||||||
url_path,
|
url_path,
|
||||||
lambda: AiofilesContextManager(completed_future(AsyncBufferedReader(BytesIO(content), loop=self._loop, executor=None))),
|
lambda: AiofilesContextManager(
|
||||||
|
completed_future(AsyncBufferedReader(BytesIO(content), loop=self._loop, executor=None))),
|
||||||
lambda: completed_future(mtime)
|
lambda: completed_future(mtime)
|
||||||
)
|
)
|
||||||
if etag and etag == digest:
|
if etag and etag == digest:
|
||||||
await self.not_modified(send, digest, ('Cache-Control', 'must-revalidate, max-age=86400'))
|
await self.not_modified(send, digest, 'must-revalidate, max-age=86400')
|
||||||
return
|
return
|
||||||
elif content:
|
elif content:
|
||||||
mime_type = guess_type(basename(url_path))[0] or 'application/octet-stream'
|
mime_type = guess_type(basename(url_path))[0] or 'application/octet-stream'
|
||||||
@@ -147,6 +165,7 @@ class Server:
|
|||||||
result = graph.draw(None, format="svg", prog="dot")
|
result = graph.draw(None, format="svg", prog="dot")
|
||||||
logger.debug("Completed Graphviz rendering for file '%s'", filepath)
|
logger.debug("Completed Graphviz rendering for file '%s'", filepath)
|
||||||
return result
|
return result
|
||||||
|
|
||||||
body = await self._loop.run_in_executor(None, render_graphviz, path)
|
body = await self._loop.run_in_executor(None, render_graphviz, path)
|
||||||
await send({
|
await send({
|
||||||
'type': 'http.response.start',
|
'type': 'http.response.start',
|
||||||
@@ -161,9 +180,8 @@ class Server:
|
|||||||
'type': 'http.response.body',
|
'type': 'http.response.body',
|
||||||
'body': body
|
'body': body
|
||||||
})
|
})
|
||||||
elif is_plant_uml(path):
|
elif Configuration.instance.plant_uml_server_address and is_plant_uml(path):
|
||||||
logger.debug("Starting PlantUML rendering for file '%s'", path)
|
logger.debug("Starting PlantUML rendering for file '%s'", path)
|
||||||
body = await render_plant_uml(path)
|
|
||||||
logger.debug("Completed PlantUML rendering for file '%s'", path)
|
logger.debug("Completed PlantUML rendering for file '%s'", path)
|
||||||
await send({
|
await send({
|
||||||
'type': 'http.response.start',
|
'type': 'http.response.start',
|
||||||
@@ -174,13 +192,18 @@ class Server:
|
|||||||
'Cache-Control': 'no-cache'
|
'Cache-Control': 'no-cache'
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
|
await render_plant_uml(self._client, path, lambda chunk: send({
|
||||||
|
'type': 'http.response.body',
|
||||||
|
'body': chunk,
|
||||||
|
'more_body': True
|
||||||
|
}))
|
||||||
await send({
|
await send({
|
||||||
'type': 'http.response.body',
|
'type': 'http.response.body',
|
||||||
'body': body
|
'body': '',
|
||||||
|
'more_body': False
|
||||||
})
|
})
|
||||||
else:
|
else:
|
||||||
async def read_file(file_path):
|
async def read_file(file_path, buffer_size=0x10000):
|
||||||
buffer_size = 0x10000
|
|
||||||
async with async_open(file_path, 'rb') as f:
|
async with async_open(file_path, 'rb') as f:
|
||||||
while True:
|
while True:
|
||||||
result = await f.read(buffer_size)
|
result = await f.read(buffer_size)
|
||||||
@@ -197,7 +220,12 @@ class Server:
|
|||||||
'Cache-Control': 'no-cache'
|
'Cache-Control': 'no-cache'
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
|
if pathsend:
|
||||||
|
await send({
|
||||||
|
'type': 'http.response.pathsend',
|
||||||
|
'path': path
|
||||||
|
})
|
||||||
|
else:
|
||||||
async for chunk in read_file(path):
|
async for chunk in read_file(path):
|
||||||
await send({
|
await send({
|
||||||
'type': 'http.response.body',
|
'type': 'http.response.body',
|
||||||
@@ -227,7 +255,7 @@ class Server:
|
|||||||
await self.not_found(send)
|
await self.not_found(send)
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
async def stream_hash(source: AsyncBufferedReader, bufsize=0x1000) -> bytes:
|
async def stream_hash(source: AsyncBufferedReader, bufsize=0x10000) -> bytes:
|
||||||
if bufsize <= 0:
|
if bufsize <= 0:
|
||||||
raise ValueError("Buffer size must be greater than 0")
|
raise ValueError("Buffer size must be greater than 0")
|
||||||
md5 = hashlib.md5()
|
md5 = hashlib.md5()
|
||||||
@@ -239,7 +267,7 @@ class Server:
|
|||||||
return md5.digest()
|
return md5.digest()
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
async def file_hash(filepath, bufsize=0x1000) -> bytes:
|
async def file_hash(filepath, bufsize=0x10000) -> bytes:
|
||||||
if bufsize <= 0:
|
if bufsize <= 0:
|
||||||
raise ValueError("Buffer size must be greater than 0")
|
raise ValueError("Buffer size must be greater than 0")
|
||||||
md5 = hashlib.md5()
|
md5 = hashlib.md5()
|
||||||
@@ -321,7 +349,7 @@ class Server:
|
|||||||
})
|
})
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
async def not_modified(send, digest: str, cache_control: str ='no-cache') -> []:
|
async def not_modified(send, digest: str, cache_control: str = 'no-cache') -> []:
|
||||||
await send({
|
await send({
|
||||||
'type': 'http.response.start',
|
'type': 'http.response.start',
|
||||||
'status': 304,
|
'status': 304,
|
||||||
@@ -362,15 +390,19 @@ class Server:
|
|||||||
for entry in sorted(await listdir(path)):
|
for entry in sorted(await listdir(path)):
|
||||||
if await filter(join(path, entry)):
|
if await filter(join(path, entry)):
|
||||||
yield entry
|
yield entry
|
||||||
|
|
||||||
return result()
|
return result()
|
||||||
|
|
||||||
async for entry in await ls(isdir):
|
async for entry in await ls(isdir):
|
||||||
result += '<li><a href="' + entry + '/' + '"/>' + entry + '/' + '</li>'
|
result += '<li><a href="' + entry + '/' + '"/>' + entry + '/' + '</li>'
|
||||||
|
|
||||||
async def file_filter(entry: str) -> bool:
|
async def file_filter(entry: str) -> bool:
|
||||||
return await isfile(entry) and is_markdown(entry)
|
return await isfile(entry) and is_markdown(entry)
|
||||||
|
|
||||||
async for entry in await ls(file_filter):
|
async for entry in await ls(file_filter):
|
||||||
result += '<li><a href="' + entry + '"/>' + entry + '</li>'
|
result += '<li><a href="' + entry + '"/>' + entry + '</li>'
|
||||||
return result
|
return result
|
||||||
|
|
||||||
async def stop(self):
|
async def stop(self):
|
||||||
await self.file_watcher.stop()
|
await self.file_watcher.stop()
|
||||||
|
await self._client.aclose()
|
||||||
|
Reference in New Issue
Block a user