Compare commits
2 Commits
8f0320f262
...
dev
Author | SHA1 | Date | |
---|---|---|---|
f158698380
|
|||
56c8e796b7
|
@@ -2,14 +2,18 @@ from abc import ABC, abstractmethod
|
|||||||
from asyncio import Queue, AbstractEventLoop
|
from asyncio import Queue, AbstractEventLoop
|
||||||
from asyncio import get_running_loop
|
from asyncio import get_running_loop
|
||||||
from logging import getLogger
|
from logging import getLogger
|
||||||
from typing import Callable, Awaitable, Any, Mapping, Sequence, Optional, Unpack, Tuple
|
from typing import Callable, Awaitable, Any, Mapping, Sequence, Optional, Unpack, Tuple, TYPE_CHECKING
|
||||||
|
from pathlib import Path, PurePath
|
||||||
from pwo import Maybe, AsyncQueueIterator
|
from pwo import Maybe, AsyncQueueIterator
|
||||||
|
from hashlib import md5
|
||||||
from ._http_context import HttpContext
|
from ._http_context import HttpContext
|
||||||
from ._http_method import HttpMethod
|
from ._http_method import HttpMethod
|
||||||
from ._types import StrOrStrings
|
from ._types import StrOrStrings
|
||||||
|
from base64 import b64encode, b64decode
|
||||||
|
from mimetypes import guess_type
|
||||||
|
|
||||||
|
if TYPE_CHECKING:
|
||||||
|
from _typeshed import StrOrBytesPath
|
||||||
try:
|
try:
|
||||||
from ._rsgi import RsgiContext
|
from ._rsgi import RsgiContext
|
||||||
from granian._granian import RSGIHTTPProtocol, RSGIHTTPScope # type: ignore
|
from granian._granian import RSGIHTTPProtocol, RSGIHTTPScope # type: ignore
|
||||||
@@ -141,3 +145,5 @@ class BugisApp(AbstractBugisApp):
|
|||||||
|
|
||||||
def PATCH(self, path: str, recursive: bool = False) -> Callable[[HttpHandler], HttpHandler]:
|
def PATCH(self, path: str, recursive: bool = False) -> Callable[[HttpHandler], HttpHandler]:
|
||||||
return self.route(path, (HttpMethod.PATCH,), recursive)
|
return self.route(path, (HttpMethod.PATCH,), recursive)
|
||||||
|
|
||||||
|
|
||||||
|
@@ -34,11 +34,11 @@ def decode_headers(headers: Iterable[Tuple[bytes, bytes]]) -> Dict[str, Sequence
|
|||||||
raise NotImplementedError('This should never happen')
|
raise NotImplementedError('This should never happen')
|
||||||
if isinstance(value, bytes):
|
if isinstance(value, bytes):
|
||||||
value_str = value.decode()
|
value_str = value.decode()
|
||||||
elif isinstance(key, str):
|
elif isinstance(value, str):
|
||||||
value_str = value
|
value_str = value
|
||||||
else:
|
else:
|
||||||
raise NotImplementedError('This should never happen')
|
raise NotImplementedError('This should never happen')
|
||||||
ls = result.setdefault(key_str, list())
|
ls = result.setdefault(key_str.lower(), list())
|
||||||
ls.append(value_str)
|
ls.append(value_str)
|
||||||
return {
|
return {
|
||||||
k: tuple(v) for k, v in result.items()
|
k: tuple(v) for k, v in result.items()
|
||||||
@@ -91,7 +91,7 @@ class AsgiContext(HttpContext):
|
|||||||
async def stream_body(self,
|
async def stream_body(self,
|
||||||
status: int,
|
status: int,
|
||||||
body_generator: AsyncGenerator[bytes, None],
|
body_generator: AsyncGenerator[bytes, None],
|
||||||
headers: Optional[Mapping[str, Sequence[str]]] = None) -> None:
|
headers: Optional[Mapping[str, StrOrStrings]] = None) -> None:
|
||||||
await self._send_head(status, headers)
|
await self._send_head(status, headers)
|
||||||
async for chunk in body_generator:
|
async for chunk in body_generator:
|
||||||
await self.send({
|
await self.send({
|
||||||
@@ -105,21 +105,21 @@ class AsgiContext(HttpContext):
|
|||||||
'more_body': False
|
'more_body': False
|
||||||
})
|
})
|
||||||
|
|
||||||
async def send_bytes(self, status: int, body: bytes, headers: Optional[Mapping[str, Sequence[str]]] = None) -> None:
|
async def send_bytes(self, status: int, body: bytes, headers: Optional[Mapping[str, StrOrStrings]] = None) -> None:
|
||||||
await self._send_head(status, headers)
|
await self._send_head(status, headers)
|
||||||
await self.send({
|
await self.send({
|
||||||
'type': 'http.response.body',
|
'type': 'http.response.body',
|
||||||
'body': body,
|
'body': body,
|
||||||
})
|
})
|
||||||
|
|
||||||
async def send_str(self, status: int, body: str, headers: Optional[Mapping[str, Sequence[str]]] = None) -> None:
|
async def send_str(self, status: int, body: str, headers: Optional[Mapping[str, StrOrStrings]] = None) -> None:
|
||||||
await self._send_head(status, headers)
|
await self._send_head(status, headers)
|
||||||
await self.send({
|
await self.send({
|
||||||
'type': 'http.response.body',
|
'type': 'http.response.body',
|
||||||
'body': body.encode(),
|
'body': body.encode(),
|
||||||
})
|
})
|
||||||
|
|
||||||
async def _send_head(self, status: int, headers: Optional[Mapping[str, Sequence[str]]] = None) -> None:
|
async def _send_head(self, status: int, headers: Optional[Mapping[str, StrOrStrings]] = None) -> None:
|
||||||
await self.send({
|
await self.send({
|
||||||
'type': 'http.response.start',
|
'type': 'http.response.start',
|
||||||
'status': status,
|
'status': status,
|
||||||
@@ -129,7 +129,7 @@ class AsgiContext(HttpContext):
|
|||||||
async def send_file(self,
|
async def send_file(self,
|
||||||
status: int,
|
status: int,
|
||||||
path: Path,
|
path: Path,
|
||||||
headers: Optional[Mapping[str, Sequence[str]]] = None) -> None:
|
headers: Optional[Mapping[str, StrOrStrings]] = None) -> None:
|
||||||
if self.pathsend:
|
if self.pathsend:
|
||||||
await self._send_head(status, headers)
|
await self._send_head(status, headers)
|
||||||
await self.send({
|
await self.send({
|
||||||
@@ -139,7 +139,7 @@ class AsgiContext(HttpContext):
|
|||||||
else:
|
else:
|
||||||
raise NotImplementedError()
|
raise NotImplementedError()
|
||||||
|
|
||||||
async def send_empty(self, status: int, headers: Optional[Mapping[str, Sequence[str]]] = None) -> None:
|
async def send_empty(self, status: int, headers: Optional[Mapping[str, StrOrStrings]] = None) -> None:
|
||||||
await self._send_head(status, headers)
|
await self._send_head(status, headers)
|
||||||
await self.send({
|
await self.send({
|
||||||
'type': 'http.response.body',
|
'type': 'http.response.body',
|
||||||
|
@@ -13,6 +13,7 @@ from abc import ABC, abstractmethod
|
|||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
|
|
||||||
from ._http_method import HttpMethod
|
from ._http_method import HttpMethod
|
||||||
|
from ._types.base import StrOrStrings
|
||||||
|
|
||||||
|
|
||||||
class HttpContext(ABC):
|
class HttpContext(ABC):
|
||||||
@@ -32,20 +33,20 @@ class HttpContext(ABC):
|
|||||||
async def stream_body(self,
|
async def stream_body(self,
|
||||||
status: int,
|
status: int,
|
||||||
body_generator: AsyncGenerator[bytes, None],
|
body_generator: AsyncGenerator[bytes, None],
|
||||||
headers: Optional[Mapping[str, Sequence[str]]] = None) -> None:
|
headers: Optional[Mapping[str, StrOrStrings]] = None) -> None:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
@abstractmethod
|
@abstractmethod
|
||||||
async def send_bytes(self, status: int, body: bytes, headers: Optional[Mapping[str, Sequence[str]]] = None) -> None:
|
async def send_bytes(self, status: int, body: bytes, headers: Optional[Mapping[str, StrOrStrings]] = None) -> None:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
async def send_str(self, status: int, body: str, headers: Optional[Mapping[str, Sequence[str]]] = None) -> None:
|
async def send_str(self, status: int, body: str, headers: Optional[Mapping[str, StrOrStrings]] = None) -> None:
|
||||||
await self.send_bytes(status, body.encode(), headers)
|
await self.send_bytes(status, body.encode(), headers)
|
||||||
|
|
||||||
@abstractmethod
|
@abstractmethod
|
||||||
async def send_file(self, status: int, path: Path, headers: Optional[Mapping[str, Sequence[str]]] = None) -> None:
|
async def send_file(self, status: int, path: Path, headers: Optional[Mapping[str, StrOrStrings]] = None) -> None:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
@abstractmethod
|
@abstractmethod
|
||||||
async def send_empty(self, status: int, headers: Optional[Mapping[str, Sequence[str]]] = None) -> None:
|
async def send_empty(self, status: int, headers: Optional[Mapping[str, StrOrStrings]] = None) -> None:
|
||||||
pass
|
pass
|
||||||
|
@@ -17,6 +17,7 @@ from typing import (
|
|||||||
from granian._granian import RSGIHTTPProtocol, RSGIHTTPScope
|
from granian._granian import RSGIHTTPProtocol, RSGIHTTPScope
|
||||||
from pwo import Maybe
|
from pwo import Maybe
|
||||||
|
|
||||||
|
from ._types import StrOrStrings
|
||||||
from ._http_context import HttpContext
|
from ._http_context import HttpContext
|
||||||
from ._http_method import HttpMethod
|
from ._http_method import HttpMethod
|
||||||
|
|
||||||
@@ -40,7 +41,7 @@ class RsgiContext(HttpContext):
|
|||||||
self.query_string = scope.query_string
|
self.query_string = scope.query_string
|
||||||
|
|
||||||
def acc(d: Dict[str, List[str]], t: Tuple[str, str]) -> Dict[str, List[str]]:
|
def acc(d: Dict[str, List[str]], t: Tuple[str, str]) -> Dict[str, List[str]]:
|
||||||
d.setdefault(t[0], list()).append(t[1])
|
d.setdefault(t[0].lower(), list()).append(t[1])
|
||||||
return d
|
return d
|
||||||
|
|
||||||
fun = cast(Callable[[Mapping[str, Sequence[str]], tuple[str, str]], Mapping[str, Sequence[str]]], acc)
|
fun = cast(Callable[[Mapping[str, Sequence[str]], tuple[str, str]], Mapping[str, Sequence[str]]], acc)
|
||||||
@@ -54,16 +55,27 @@ class RsgiContext(HttpContext):
|
|||||||
self.request_body = aiter(protocol)
|
self.request_body = aiter(protocol)
|
||||||
self.protocol = protocol
|
self.protocol = protocol
|
||||||
|
|
||||||
|
# @staticmethod
|
||||||
|
# def _rearrange_headers(headers: Mapping[str, Sequence[str]]) -> List[Tuple[str, str]]:
|
||||||
|
# return list(
|
||||||
|
# ((key, value) for key, values in headers.items() for value in values)
|
||||||
|
# )
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def _rearrange_headers(headers: Mapping[str, Sequence[str]]) -> List[Tuple[str, str]]:
|
def _rearrange_headers(headers: Mapping[str, StrOrStrings]) -> List[Tuple[str, str]]:
|
||||||
return list(
|
result = []
|
||||||
((key, value) for key, values in headers.items() for value in values)
|
for key, value in headers.items():
|
||||||
)
|
if isinstance(value, str):
|
||||||
|
result.append((key, value))
|
||||||
|
elif isinstance(value, Sequence):
|
||||||
|
for single_value in value:
|
||||||
|
result.append((key, single_value))
|
||||||
|
return result
|
||||||
|
|
||||||
async def stream_body(self,
|
async def stream_body(self,
|
||||||
status: int,
|
status: int,
|
||||||
body_generator: AsyncGenerator[bytes, None],
|
body_generator: AsyncGenerator[bytes, None],
|
||||||
headers: Optional[Mapping[str, Sequence[str]]] = None) -> None:
|
headers: Optional[Mapping[str, StrOrStrings]] = None) -> None:
|
||||||
transport = self.protocol.response_stream(status,
|
transport = self.protocol.response_stream(status,
|
||||||
Maybe.of_nullable(headers)
|
Maybe.of_nullable(headers)
|
||||||
.map(self._rearrange_headers)
|
.map(self._rearrange_headers)
|
||||||
@@ -71,24 +83,25 @@ class RsgiContext(HttpContext):
|
|||||||
async for chunk in body_generator:
|
async for chunk in body_generator:
|
||||||
await transport.send_bytes(chunk)
|
await transport.send_bytes(chunk)
|
||||||
|
|
||||||
async def send_bytes(self, status: int, body: bytes, headers: Optional[Mapping[str, Sequence[str]]] = None) -> None:
|
async def send_bytes(self, status: int, body: bytes, headers: Optional[Mapping[str, StrOrStrings]] = None) -> None:
|
||||||
rearranged_headers = Maybe.of_nullable(headers).map(RsgiContext._rearrange_headers).or_else(list())
|
rearranged_headers = Maybe.of_nullable(headers).map(RsgiContext._rearrange_headers).or_else(list())
|
||||||
if len(body) > 0:
|
if len(body) > 0:
|
||||||
self.protocol.response_bytes(status, rearranged_headers, body)
|
self.protocol.response_bytes(status, rearranged_headers, body)
|
||||||
else:
|
else:
|
||||||
self.protocol.response_empty(status, rearranged_headers)
|
self.protocol.response_empty(status, rearranged_headers)
|
||||||
|
|
||||||
async def send_str(self, status: int, body: str, headers: Optional[Mapping[str, Sequence[str]]] = None) -> None:
|
async def send_str(self, status: int, body: str, headers: Optional[Mapping[str, StrOrStrings]] = None) -> None:
|
||||||
rearranged_headers = Maybe.of_nullable(headers).map(RsgiContext._rearrange_headers).or_else(list())
|
rearranged_headers = Maybe.of_nullable(headers).map(RsgiContext._rearrange_headers).or_else(list())
|
||||||
if len(body) > 0:
|
if len(body) > 0:
|
||||||
self.protocol.response_str(status, rearranged_headers, body)
|
self.protocol.response_str(status, rearranged_headers, body)
|
||||||
else:
|
else:
|
||||||
self.protocol.response_empty(status, rearranged_headers)
|
self.protocol.response_empty(status, rearranged_headers)
|
||||||
|
|
||||||
async def send_file(self, status: int, path: Path, headers: Optional[Mapping[str, Sequence[str]]] = None) -> None:
|
async def send_file(self, status: int, path: Path, headers: Optional[Mapping[str, StrOrStrings]] = None) -> None:
|
||||||
rearranged_headers = Maybe.of_nullable(headers).map(RsgiContext._rearrange_headers).or_else(list())
|
rearranged_headers = (Maybe.of_nullable(headers).map(RsgiContext._rearrange_headers)
|
||||||
|
.or_else(list()))
|
||||||
self.protocol.response_file(status, rearranged_headers, str(path))
|
self.protocol.response_file(status, rearranged_headers, str(path))
|
||||||
|
|
||||||
async def send_empty(self, status: int, headers: Optional[Mapping[str, Sequence[str]]] = None) -> None:
|
async def send_empty(self, status: int, headers: Optional[Mapping[str, StrOrStrings]] = None) -> None:
|
||||||
rearranged_headers = Maybe.of_nullable(headers).map(RsgiContext._rearrange_headers).or_else(list())
|
rearranged_headers = Maybe.of_nullable(headers).map(RsgiContext._rearrange_headers).or_else(list())
|
||||||
self.protocol.response_empty(status, rearranged_headers)
|
self.protocol.response_empty(status, rearranged_headers)
|
||||||
|
@@ -20,7 +20,8 @@ from ._http_context import HttpContext
|
|||||||
from ._http_method import HttpMethod
|
from ._http_method import HttpMethod
|
||||||
from ._path_handler import PathHandler
|
from ._path_handler import PathHandler
|
||||||
from ._path_matcher import PathMatcher, IntMatcher, GlobMatcher, StrMatcher, Node
|
from ._path_matcher import PathMatcher, IntMatcher, GlobMatcher, StrMatcher, Node
|
||||||
from ._types import NodeType, Matches
|
from ._path_handler import Matches
|
||||||
|
from ._types import NodeType
|
||||||
|
|
||||||
|
|
||||||
class Tree:
|
class Tree:
|
||||||
|
@@ -12,16 +12,12 @@ from typing import (
|
|||||||
Sequence
|
Sequence
|
||||||
)
|
)
|
||||||
|
|
||||||
|
from .base import StrOrStrings, PathMatcherResult
|
||||||
|
|
||||||
from bugis.core._http_method import HttpMethod
|
from bugis.core._http_method import HttpMethod
|
||||||
|
|
||||||
from bugis.core._path_handler import PathHandler, Matches
|
|
||||||
|
|
||||||
type StrOrStrings = (str | Sequence[str])
|
|
||||||
|
|
||||||
type NodeType = (str | HttpMethod)
|
type NodeType = (str | HttpMethod)
|
||||||
|
|
||||||
type PathMatcherResult = Mapping[str, Any] | Sequence[str]
|
|
||||||
|
|
||||||
|
|
||||||
class ASGIVersions(TypedDict):
|
class ASGIVersions(TypedDict):
|
||||||
spec_version: str
|
spec_version: str
|
||||||
@@ -90,7 +86,6 @@ __all__ = [
|
|||||||
'RSGI',
|
'RSGI',
|
||||||
'ASGIVersions',
|
'ASGIVersions',
|
||||||
'WebSocketScope',
|
'WebSocketScope',
|
||||||
'PathHandler',
|
|
||||||
'NodeType',
|
'NodeType',
|
||||||
'Matches'
|
'StrOrStrings'
|
||||||
]
|
]
|
||||||
|
4
core/src/bugis/core/_types/base.py
Normal file
4
core/src/bugis/core/_types/base.py
Normal file
@@ -0,0 +1,4 @@
|
|||||||
|
from typing import Sequence, Mapping, Any
|
||||||
|
|
||||||
|
type StrOrStrings = (str | Sequence[str])
|
||||||
|
type PathMatcherResult = Mapping[str, Any] | Sequence[str]
|
38
core/tests/test_ciao.py
Normal file
38
core/tests/test_ciao.py
Normal file
@@ -0,0 +1,38 @@
|
|||||||
|
import unittest
|
||||||
|
|
||||||
|
|
||||||
|
# Define some test cases
|
||||||
|
class TestAddition(unittest.TestCase):
|
||||||
|
def test_add_positive(self):
|
||||||
|
self.assertEqual(1 + 2, 3)
|
||||||
|
|
||||||
|
def test_add_negative(self):
|
||||||
|
self.assertEqual(-1 + (-1), -2)
|
||||||
|
|
||||||
|
|
||||||
|
class TestSubtraction(unittest.TestCase):
|
||||||
|
def test_subtract_positive(self):
|
||||||
|
self.assertEqual(5 - 3, 2)
|
||||||
|
|
||||||
|
def test_subtract_negative(self):
|
||||||
|
self.assertEqual(-5 - (-2), -3)
|
||||||
|
|
||||||
|
|
||||||
|
# Now let's create a TestSuite
|
||||||
|
def suite():
|
||||||
|
suite = unittest.TestSuite()
|
||||||
|
|
||||||
|
# Add tests to the suite
|
||||||
|
suite.addTest(TestAddition('test_add_positive'))
|
||||||
|
suite.addTest(TestAddition('test_add_negative'))
|
||||||
|
suite.addTest(TestSubtraction('test_subtract_positive'))
|
||||||
|
# suite.addTest(TestSubtraction('test_subtract_negative'))
|
||||||
|
# suite.addTest(TestSubtraction('test_subtract_negative2'))
|
||||||
|
|
||||||
|
return suite
|
||||||
|
|
||||||
|
|
||||||
|
# Running the suite
|
||||||
|
if __name__ == "__main__":
|
||||||
|
runner = unittest.TextTestRunner()
|
||||||
|
runner.run(suite())
|
12
server/example/test_server.py
Normal file
12
server/example/test_server.py
Normal file
@@ -0,0 +1,12 @@
|
|||||||
|
|
||||||
|
from bugis.core import BugisApp
|
||||||
|
from pathlib import PurePath
|
||||||
|
from bugis.server.server import static_resources
|
||||||
|
import os
|
||||||
|
|
||||||
|
root = os.getenv('STATIC_ROOT') or '.'
|
||||||
|
app = BugisApp()
|
||||||
|
|
||||||
|
static_resources(app, '/view', root)
|
||||||
|
|
||||||
|
|
67
server/pyproject.toml
Normal file
67
server/pyproject.toml
Normal file
@@ -0,0 +1,67 @@
|
|||||||
|
[build-system]
|
||||||
|
requires = ["setuptools>=61.0", "setuptools-scm>=8"]
|
||||||
|
build-backend = "setuptools.build_meta"
|
||||||
|
|
||||||
|
[project]
|
||||||
|
name = "bugis_server"
|
||||||
|
dynamic = ["version"]
|
||||||
|
authors = [
|
||||||
|
{ name="Walter Oggioni", email="oggioni.walter@gmail.com" },
|
||||||
|
]
|
||||||
|
description = "Static file renderer"
|
||||||
|
readme = "README.md"
|
||||||
|
requires-python = ">=3.10"
|
||||||
|
classifiers = [
|
||||||
|
'Development Status :: 3 - Alpha',
|
||||||
|
'Topic :: Utilities',
|
||||||
|
'License :: OSI Approved :: MIT License',
|
||||||
|
'Intended Audience :: System Administrators',
|
||||||
|
'Intended Audience :: Developers',
|
||||||
|
'Environment :: Console',
|
||||||
|
'License :: OSI Approved :: MIT License',
|
||||||
|
'Programming Language :: Python :: 3',
|
||||||
|
]
|
||||||
|
dependencies = [
|
||||||
|
"bugis_core",
|
||||||
|
"Markdown",
|
||||||
|
"Pygments",
|
||||||
|
"watchdog",
|
||||||
|
"pwo",
|
||||||
|
"PyYAML",
|
||||||
|
"pygraphviz",
|
||||||
|
"aiofiles",
|
||||||
|
"httpx[http2]"
|
||||||
|
]
|
||||||
|
|
||||||
|
[project.optional-dependencies]
|
||||||
|
dev = [
|
||||||
|
"build", "granian", "mypy", "ipdb", "twine"
|
||||||
|
]
|
||||||
|
|
||||||
|
run = [
|
||||||
|
"granian"
|
||||||
|
]
|
||||||
|
|
||||||
|
[tool.setuptools.package-data]
|
||||||
|
bugis = ['static/*', 'default-conf/*']
|
||||||
|
|
||||||
|
[project.urls]
|
||||||
|
"Homepage" = "https://github.com/woggioni/bugis"
|
||||||
|
"Bug Tracker" = "https://github.com/woggioni/bugis/issues"
|
||||||
|
|
||||||
|
[tool.mypy]
|
||||||
|
python_version = "3.12"
|
||||||
|
disallow_untyped_defs = true
|
||||||
|
show_error_codes = true
|
||||||
|
no_implicit_optional = true
|
||||||
|
warn_return_any = true
|
||||||
|
warn_unused_ignores = true
|
||||||
|
exclude = ["scripts", "docs", "test"]
|
||||||
|
strict = true
|
||||||
|
|
||||||
|
[tool.setuptools_scm]
|
||||||
|
root='..'
|
||||||
|
version_file = "src/bugis/server/_version.py"
|
||||||
|
|
||||||
|
[project.scripts]
|
||||||
|
bugis = "bugis.server.cli:main"
|
0
server/src/bugis/server/__init__.py
Normal file
0
server/src/bugis/server/__init__.py
Normal file
44
server/src/bugis/server/cache.py
Normal file
44
server/src/bugis/server/cache.py
Normal file
@@ -0,0 +1,44 @@
|
|||||||
|
from abc import ABC, abstractmethod
|
||||||
|
from typing import Optional, Tuple, Callable
|
||||||
|
from functools import lru_cache
|
||||||
|
from pathlib import PurePath
|
||||||
|
from .renderer import RenderingManager
|
||||||
|
|
||||||
|
|
||||||
|
class Cache(ABC):
|
||||||
|
_rendering_manager: RenderingManager
|
||||||
|
|
||||||
|
def __init__(self, rendering_manager: RenderingManager):
|
||||||
|
self._rendering_manager = rendering_manager
|
||||||
|
|
||||||
|
def get(self, key: bytes, file: PurePath) -> Optional[Tuple[str, bytes]]:
|
||||||
|
if self.filter(file):
|
||||||
|
return self.load(key, file)
|
||||||
|
else:
|
||||||
|
return self._rendering_manager.render(file)
|
||||||
|
|
||||||
|
def filter(self, file: PurePath) -> bool:
|
||||||
|
return file.suffix.lower() in {'md', 'puml', 'dot', 'texi', 'texinfo', 'txi'}
|
||||||
|
|
||||||
|
def load(self, key: bytes, file: PurePath) -> Optional[Tuple[str, bytes]]:
|
||||||
|
return self._rendering_manager.render(file)
|
||||||
|
|
||||||
|
|
||||||
|
class NoCache(Cache):
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
class InMemoryCache(Cache):
|
||||||
|
_cached_loader: Callable[[bytes, PurePath], Tuple[str, bytes]]
|
||||||
|
|
||||||
|
def __init__(self, rendering_manager: RenderingManager, max_size: int = 1024):
|
||||||
|
super().__init__(rendering_manager)
|
||||||
|
|
||||||
|
@lru_cache(maxsize=max_size)
|
||||||
|
def cached_loader(key: bytes, file: PurePath) -> Tuple[str, bytes]:
|
||||||
|
return super().load(key, file)
|
||||||
|
|
||||||
|
self._cached_loader = cached_loader
|
||||||
|
|
||||||
|
def load(self, key: bytes, file: PurePath) -> Optional[Tuple[str, bytes]]:
|
||||||
|
return self._cached_loader(key, file)
|
3
server/src/bugis/server/cli.py
Normal file
3
server/src/bugis/server/cli.py
Normal file
@@ -0,0 +1,3 @@
|
|||||||
|
|
||||||
|
def main():
|
||||||
|
pass
|
16
server/src/bugis/server/renderer.py
Normal file
16
server/src/bugis/server/renderer.py
Normal file
@@ -0,0 +1,16 @@
|
|||||||
|
from pathlib import PurePath
|
||||||
|
from typing import Callable, Mapping, Optional, Tuple
|
||||||
|
from pwo import Maybe
|
||||||
|
|
||||||
|
|
||||||
|
class RenderingManager:
|
||||||
|
_renderers: Mapping[str, Callable[[PurePath], Tuple[str, bytes]]]
|
||||||
|
|
||||||
|
def __init__(self, renderers: Mapping[str, Callable[[PurePath], Tuple[str, bytes]]]):
|
||||||
|
self._renderers = renderers
|
||||||
|
|
||||||
|
def render(self, file: PurePath) -> Optional[Tuple[str, bytes]]:
|
||||||
|
return Maybe.of_nullable(self._renderers.get(file.suffix.lower())).map(lambda it: it(file)).or_none()
|
||||||
|
|
||||||
|
# def register(self, suffix: str, renderer: Callable[[PurePath], Tuple[str, bytes]]) -> None:
|
||||||
|
# self._renderers[suffix.lower()] = renderer
|
145
server/src/bugis/server/server.py
Normal file
145
server/src/bugis/server/server.py
Normal file
@@ -0,0 +1,145 @@
|
|||||||
|
from base64 import b64encode, b64decode
|
||||||
|
from hashlib import md5
|
||||||
|
from mimetypes import guess_type
|
||||||
|
from pathlib import Path, PurePath
|
||||||
|
from typing import TYPE_CHECKING, Optional, Callable, Awaitable, AsyncGenerator, Any, Unpack, Mapping, Tuple
|
||||||
|
|
||||||
|
from aiofiles.os import listdir
|
||||||
|
from aiofiles.ospath import isdir, isfile
|
||||||
|
from bugis.core import HttpContext, BugisApp
|
||||||
|
from pwo import Maybe
|
||||||
|
from .cache import Cache, InMemoryCache
|
||||||
|
from .renderer import RenderingManager
|
||||||
|
|
||||||
|
if TYPE_CHECKING:
|
||||||
|
from _typeshed import StrOrBytesPath
|
||||||
|
|
||||||
|
|
||||||
|
def parse_etag(etag: str) -> Optional[str]:
|
||||||
|
def skip_weak_marker(s):
|
||||||
|
if s.startswith('W/'):
|
||||||
|
return s[2:]
|
||||||
|
else:
|
||||||
|
return s
|
||||||
|
|
||||||
|
return (
|
||||||
|
Maybe.of_nullable(etag)
|
||||||
|
.map(skip_weak_marker)
|
||||||
|
.or_else(None)
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def static_resources(app: BugisApp,
|
||||||
|
path: str,
|
||||||
|
root: 'StrOrBytesPath',
|
||||||
|
favicon: PurePath = None,
|
||||||
|
file_filter: Callable[[PurePath], Awaitable[bool]] = None,
|
||||||
|
renderers: Mapping[str, Tuple[str, bytes]] = None,
|
||||||
|
cache_ctor: Callable[[RenderingManager], Cache] = lambda rm: InMemoryCache(rm)
|
||||||
|
):
|
||||||
|
renderer = RenderingManager(renderers or {})
|
||||||
|
cache = cache_ctor(renderer)
|
||||||
|
|
||||||
|
async def no_filter(_: PurePath):
|
||||||
|
return True
|
||||||
|
|
||||||
|
if file_filter is None:
|
||||||
|
file_filter = no_filter
|
||||||
|
|
||||||
|
def compute_etag(resource: Path) -> str:
|
||||||
|
md = md5()
|
||||||
|
print(resource)
|
||||||
|
with resource.open('rb') as file:
|
||||||
|
while True:
|
||||||
|
chunk = file.read(0x10000)
|
||||||
|
if len(chunk):
|
||||||
|
md.update(chunk)
|
||||||
|
else:
|
||||||
|
break
|
||||||
|
return b64encode(md.digest()).decode()
|
||||||
|
|
||||||
|
if isinstance(root, str):
|
||||||
|
folder = Path(root)
|
||||||
|
else:
|
||||||
|
folder = root
|
||||||
|
|
||||||
|
prefix = PurePath(path)
|
||||||
|
|
||||||
|
async def handler(context: HttpContext, *_: Unpack[Any]) -> None:
|
||||||
|
requested = (PurePath(context.path)).relative_to(prefix)
|
||||||
|
resource = folder / requested
|
||||||
|
if not resource.is_relative_to(folder) or not resource.exists():
|
||||||
|
return await context.send_empty(404)
|
||||||
|
|
||||||
|
if await isfile(resource):
|
||||||
|
|
||||||
|
proposed_etag = ((Maybe.of_nullable(context.headers.get('if-none-match'))
|
||||||
|
.filter(lambda it: len(it) > 0)
|
||||||
|
.map(lambda it: it[-1])
|
||||||
|
.map(parse_etag))
|
||||||
|
.or_none())
|
||||||
|
current_etag = compute_etag(resource)
|
||||||
|
|
||||||
|
if proposed_etag == current_etag:
|
||||||
|
return await context.send_empty(304)
|
||||||
|
else:
|
||||||
|
cache_result = cache.get(b64decode(current_etag), resource)
|
||||||
|
if cache_result is None:
|
||||||
|
mime_type = (Maybe.of(guess_type(resource.name))
|
||||||
|
.map(lambda it: it[0])
|
||||||
|
.or_else('application/octet-stream'))
|
||||||
|
return await context.send_file(200, resource, {
|
||||||
|
'content-type': mime_type or 'application/octet-stream',
|
||||||
|
'etag': 'W/' + current_etag,
|
||||||
|
'cache-control': 'no-cache',
|
||||||
|
})
|
||||||
|
else:
|
||||||
|
content_type, body = cache_result
|
||||||
|
await context.send_bytes(200, body, {
|
||||||
|
'content-type': content_type,
|
||||||
|
'etag': 'W/' + current_etag,
|
||||||
|
'cache-control': 'no-cache',
|
||||||
|
})
|
||||||
|
elif isdir(resource):
|
||||||
|
headers = {
|
||||||
|
'content-type': 'text/html'
|
||||||
|
}
|
||||||
|
await context.send_str(200, await directory_listing(prefix, requested, resource, favicon, file_filter),
|
||||||
|
headers)
|
||||||
|
|
||||||
|
return app.GET(path, True)(handler)
|
||||||
|
|
||||||
|
|
||||||
|
async def directory_listing(prefix: PurePath,
|
||||||
|
path_info: PurePath,
|
||||||
|
path: PurePath,
|
||||||
|
favicon: PurePath,
|
||||||
|
file_filter: Callable[[PurePath], Awaitable[bool]]) -> str:
|
||||||
|
title = "Directory listing for %s" % path_info
|
||||||
|
result = "<!DOCTYPE html><html><head>"
|
||||||
|
if favicon:
|
||||||
|
result += f'<link rel="icon" type="image/x-icon" href="{favicon}">'
|
||||||
|
result += "<meta http-equiv=\"Content-Type\" content=\"text/html; charset=utf-8\">"
|
||||||
|
result += "<title>" + title + "</title></head>"
|
||||||
|
result += "<body><h1>" + title + "</h1><hr>"
|
||||||
|
result += "<ul>"
|
||||||
|
if path_info != '/':
|
||||||
|
result += "<li><a href=\"../\"/>../</li>"
|
||||||
|
|
||||||
|
async def ls(entry_filter: Callable[[PurePath], Awaitable[bool]]) -> AsyncGenerator[str, Any]:
|
||||||
|
async def result():
|
||||||
|
for entry in sorted(await listdir(path)):
|
||||||
|
if await entry_filter(path / entry):
|
||||||
|
yield entry
|
||||||
|
|
||||||
|
return result()
|
||||||
|
|
||||||
|
async for entry in await ls(isdir):
|
||||||
|
result += '<li><a href="' + entry + '/' + '"/>' + entry + '/' + '</li>'
|
||||||
|
|
||||||
|
async def composite_file_filter(entry: PurePath) -> bool:
|
||||||
|
return await isfile(entry) and await file_filter(entry)
|
||||||
|
|
||||||
|
async for entry in await ls(composite_file_filter):
|
||||||
|
result += '<li><a href="' + entry + '"/>' + entry + '</li>'
|
||||||
|
return result
|
Reference in New Issue
Block a user