summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--meson.build7
-rw-r--r--pypaste/__init__.py236
-rw-r--r--pypaste/__main__.py236
3 files changed, 243 insertions, 236 deletions
diff --git a/meson.build b/meson.build
index 646a141..9bfcaca 100644
--- a/meson.build
+++ b/meson.build
@@ -4,6 +4,11 @@ python = import('python').find_installation(
modules: ['pygments', 'zstandard', 'aiohttp', 'bozo4'],
)
-sources = files('pypaste/__main__.py', 'pypaste/database.py', 'pypaste/s3.py')
+sources = files(
+ 'pypaste/__init__.py',
+ 'pypaste/__main__.py',
+ 'pypaste/database.py',
+ 'pypaste/s3.py',
+)
python.install_sources(sources, preserve_path: true)
diff --git a/pypaste/__init__.py b/pypaste/__init__.py
index ea50c40..9880337 100644
--- a/pypaste/__init__.py
+++ b/pypaste/__init__.py
@@ -1,5 +1,19 @@
import sys
-from datetime import datetime
+import asyncio
+import secrets
+import aiohttp
+import zstandard
+from pypaste import s3
+from pypaste.database import Database, PasteRow
+from hashlib import sha256
+from aiohttp import web
+from datetime import datetime, UTC
+from dataclasses import dataclass
+from typing import Optional, List
+from pygments import highlight
+from pygments.lexers import guess_lexer, get_lexer_by_name
+from pygments.formatters import HtmlFormatter
+from pygments.styles import get_style_by_name
RESET = "\x1b[0m"
RED = "\x1b[31m"
@@ -20,3 +34,223 @@ def log_warning(msg: str) -> None:
def log_error(msg: str) -> None:
now = datetime.now().isoformat()
print(f"{RED}[warning]{RESET} {now} {msg}", file=sys.stderr)
+
+
+def pygmentize(
+ content: str, syntax: Optional[str], style: str, line_numbers: str | bool
+) -> str:
+ if syntax is not None:
+ try:
+ lexer = get_lexer_by_name(syntax)
+ except Exception as e:
+ print(e, file=sys.stderr)
+ lexer = guess_lexer(content)
+ else:
+ lexer = guess_lexer(content)
+
+ try:
+ s = get_style_by_name(style)
+ except Exception as e:
+ log_error(f"failed to find style: {style}: {e}")
+ s = get_style_by_name("default")
+
+ formatter = HtmlFormatter(full=True, style=s, linenos="table")
+
+ return highlight(content, lexer, formatter)
+
+
+def generate_key(words: List[str], length: int) -> str:
+ choices = []
+ for _ in range(length):
+ choices.append(secrets.choice(words))
+
+ return "-".join(word for word in choices).lower()
+
+
+@dataclass
+class AppConfig:
+ site: str
+ content_length_max_bytes: int
+ s3_max_bytes: int
+ key_length: int
+ dictionary: List[str]
+ default_style: str
+ line_numbers: str | bool
+
+
+class App:
+
+ def __init__(
+ self,
+ config: AppConfig,
+ database: Database,
+ bucket: s3.Bucket,
+ ):
+ self.database = database
+ self.config = config
+ self.bucket = bucket
+
+ async def download(self, request: web.Request) -> web.Response:
+ try:
+ key = request.match_info["key"]
+ except KeyError:
+ return web.HTTPBadRequest(text="provide a key to fetch")
+
+ if not await self.database.exists(key):
+ log_info(f"paste {key} was not found, returning 404")
+ return web.HTTPNotFound()
+
+ req = self.bucket.get(key)
+
+ try:
+ async with aiohttp.ClientSession().get(req.url, headers=req.headers) as get:
+ if get.status == 200:
+ data = await get.read()
+ else:
+ log_error(
+ f"{self.bucket.endpoint} returned status ({get.status}) while fetching {key}"
+ )
+
+ return web.HTTPInternalServerError()
+ except Exception as e:
+ log_error(f"failed to get {key} from s3: {e}")
+ return web.HTTPInternalServerError()
+
+ def decompress():
+ return zstandard.decompress(data)
+
+ try:
+ decompressed = await asyncio.to_thread(decompress)
+ except Exception as e:
+ log_error(f"failed to decompress blob {key}: {e}")
+ return web.HTTPInternalServerError()
+
+ try:
+ text = decompressed.decode()
+ except Exception as e:
+ log_error(f"failed to decode blob: {key}: {e}")
+ return web.HTTPInternalServerError()
+
+ syntax = request.query.get("syntax")
+ raw = request.query.get("raw")
+
+ if (style := request.query.get("style")) is None:
+ style = self.config.default_style
+
+ if raw is not None:
+ log_info(f"sending raw paste {key}")
+
+ return web.HTTPOk(text=text, content_type="text/plain")
+ else:
+
+ def render():
+ return pygmentize(text, syntax, style, self.config.line_numbers)
+
+ highlighted = await asyncio.to_thread(render)
+
+ log_info(
+ f"sending rendered paste {key} with syntax {syntax} and style {style}"
+ )
+
+ return web.HTTPOk(text=highlighted, content_type="text/html")
+
+ async def upload(self, request: web.Request) -> web.Response:
+ syntax = request.query.get("syntax")
+
+ try:
+ await self.vacuum()
+ except Exception as e:
+ log_error(f"vacuum failed: {e}")
+ return web.HTTPInternalServerError()
+
+ match request.content_length:
+ case int(i) if i > self.config.content_length_max_bytes:
+ return web.HTTPBadRequest(
+ text=f"max content length is {self.config.content_length_max_bytes}"
+ )
+ case _:
+ pass
+
+ try:
+ data = await request.read()
+ except Exception as e:
+ log_error(f"failed to read data: {e}")
+ return web.HTTPInternalServerError(text="failed to read data")
+
+ try:
+ data.decode()
+ except UnicodeError:
+ return web.HTTPBadRequest(
+ text="content must be unicode only, no binary data is allowed"
+ )
+
+ def compress():
+ return zstandard.compress(data)
+
+ try:
+ compressed = await asyncio.to_thread(compress)
+ except Exception as e:
+ log_error(f"failed to compress data: {e}")
+ return web.HTTPInternalServerError()
+
+ key = generate_key(self.config.dictionary, self.config.key_length)
+
+ req = self.bucket.put(
+ key,
+ len(compressed),
+ "application/octet-stream",
+ sha256(compressed).hexdigest(),
+ )
+
+ try:
+ async with aiohttp.ClientSession().put(
+ req.url, headers=req.headers, data=compressed
+ ) as put:
+ if put.status != 200:
+ log_error(
+ f"failed to put {key} to bucket with status: {put.status}"
+ )
+ return web.HTTPInternalServerError()
+ except Exception as e:
+ log_error(f"failed to put {key} to bucket: {e}")
+ return web.HTTPInternalServerError()
+
+ try:
+ await self.database.insert(
+ PasteRow(key, datetime.now(UTC), len(compressed), syntax)
+ )
+ except Exception as e:
+ log_error(f"failed to insert {key} into database: {e}")
+ return web.HTTPInternalServerError()
+
+ url = f"{self.config.site}/paste/{key}"
+
+ log_info(
+ f"uploaded paste {key} with syntax {syntax} of size {len(compressed)} bytes: {url}"
+ )
+
+ return web.HTTPOk(text=url)
+
+ async def vacuum(self):
+ log_info("starting vaccum")
+ while (
+ use := await self.database.storage_use()
+ ) is not None and use > self.config.s3_max_bytes:
+ oldest = await self.database.oldest()
+ # If use is not None, there must be at least 1 paste, so we let the
+ # type checker know that this is an unreachable case.
+ assert oldest is not None
+
+ req = self.bucket.delete(oldest)
+
+ async with aiohttp.ClientSession().delete(
+ req.url, headers=req.headers
+ ) as delete:
+ if delete.status == 200:
+ log_info(f"successfully deleted {oldest}")
+ else:
+ log_warning(
+ f"failed to delete {oldest}: got status {delete.status}",
+ )
+
+ await self.database.delete(oldest)
diff --git a/pypaste/__main__.py b/pypaste/__main__.py
index bd098c6..374e32a 100644
--- a/pypaste/__main__.py
+++ b/pypaste/__main__.py
@@ -16,244 +16,12 @@
import sys
import os
import asyncio
-import secrets
-import aiohttp
-import zstandard
-from pypaste import s3
-from pypaste.database import Database, PasteRow
-from pypaste import log_error, log_warning, log_info
+from pypaste.database import Database
+from pypaste import App, AppConfig, s3, log_error, log_info
from socket import socket, AF_UNIX, SOCK_STREAM
-from hashlib import sha256
from argparse import ArgumentParser
from aiohttp import web
-from datetime import datetime, UTC
from pathlib import Path
-from dataclasses import dataclass
-from typing import Optional, List
-from pygments import highlight
-from pygments.lexers import guess_lexer, get_lexer_by_name
-from pygments.formatters import HtmlFormatter
-from pygments.styles import get_style_by_name
-
-
-def pygmentize(
- content: str, syntax: Optional[str], style: str, line_numbers: str | bool
-) -> str:
- if syntax is not None:
- try:
- lexer = get_lexer_by_name(syntax)
- except Exception as e:
- print(e, file=sys.stderr)
- lexer = guess_lexer(content)
- else:
- lexer = guess_lexer(content)
-
- try:
- s = get_style_by_name(style)
- except Exception as e:
- log_error(f"failed to find style: {style}: {e}")
- s = get_style_by_name("default")
-
- formatter = HtmlFormatter(full=True, style=s, linenos="table")
-
- return highlight(content, lexer, formatter)
-
-
-def generate_key(words: List[str], length: int) -> str:
- choices = []
- for _ in range(length):
- choices.append(secrets.choice(words))
-
- return "-".join(word for word in choices).lower()
-
-
-@dataclass
-class AppConfig:
- site: str
- content_length_max_bytes: int
- s3_max_bytes: int
- key_length: int
- dictionary: List[str]
- default_style: str
- line_numbers: str | bool
-
-
-class App:
-
- def __init__(
- self,
- config: AppConfig,
- database: Database,
- bucket: s3.Bucket,
- ):
- self.database = database
- self.config = config
- self.bucket = bucket
-
- async def download(self, request: web.Request) -> web.Response:
- try:
- key = request.match_info["key"]
- except KeyError:
- return web.HTTPBadRequest(text="provide a key to fetch")
-
- if not await self.database.exists(key):
- log_info(f"paste {key} was not found, returning 404")
- return web.HTTPNotFound()
-
- req = self.bucket.get(key)
-
- try:
- async with aiohttp.ClientSession().get(req.url, headers=req.headers) as get:
- if get.status == 200:
- data = await get.read()
- else:
- log_error(
- f"{self.bucket.endpoint} returned status ({get.status}) while fetching {key}"
- )
-
- return web.HTTPInternalServerError()
- except Exception as e:
- log_error(f"failed to get {key} from s3: {e}")
- return web.HTTPInternalServerError()
-
- def decompress():
- return zstandard.decompress(data)
-
- try:
- decompressed = await asyncio.to_thread(decompress)
- except Exception as e:
- log_error(f"failed to decompress blob {key}: {e}")
- return web.HTTPInternalServerError()
-
- try:
- text = decompressed.decode()
- except Exception as e:
- log_error(f"failed to decode blob: {key}: {e}")
- return web.HTTPInternalServerError()
-
- syntax = request.query.get("syntax")
- raw = request.query.get("raw")
-
- if (style := request.query.get("style")) is None:
- style = self.config.default_style
-
- if raw is not None:
- log_info(f"sending raw paste {key}")
-
- return web.HTTPOk(text=text, content_type="text/plain")
- else:
-
- def render():
- return pygmentize(text, syntax, style, self.config.line_numbers)
-
- highlighted = await asyncio.to_thread(render)
-
- log_info(
- f"sending rendered paste {key} with syntax {syntax} and style {style}"
- )
-
- return web.HTTPOk(text=highlighted, content_type="text/html")
-
- async def upload(self, request: web.Request) -> web.Response:
- syntax = request.query.get("syntax")
-
- try:
- await self.vacuum()
- except Exception as e:
- log_error(f"vacuum failed: {e}")
- return web.HTTPInternalServerError()
-
- match request.content_length:
- case int(i) if i > self.config.content_length_max_bytes:
- return web.HTTPBadRequest(
- text=f"max content length is {self.config.content_length_max_bytes}"
- )
- case _:
- pass
-
- try:
- data = await request.read()
- except Exception as e:
- log_error(f"failed to read data: {e}")
- return web.HTTPInternalServerError(text="failed to read data")
-
- try:
- data.decode()
- except UnicodeError:
- return web.HTTPBadRequest(
- text="content must be unicode only, no binary data is allowed"
- )
-
- def compress():
- return zstandard.compress(data)
-
- try:
- compressed = await asyncio.to_thread(compress)
- except Exception as e:
- log_error(f"failed to compress data: {e}")
- return web.HTTPInternalServerError()
-
- key = generate_key(self.config.dictionary, self.config.key_length)
-
- req = self.bucket.put(
- key,
- len(compressed),
- "application/octet-stream",
- sha256(compressed).hexdigest(),
- )
-
- try:
- async with aiohttp.ClientSession().put(
- req.url, headers=req.headers, data=compressed
- ) as put:
- if put.status != 200:
- log_error(
- f"failed to put {key} to bucket with status: {put.status}"
- )
- return web.HTTPInternalServerError()
- except Exception as e:
- log_error(f"failed to put {key} to bucket: {e}")
- return web.HTTPInternalServerError()
-
- try:
- await self.database.insert(
- PasteRow(key, datetime.now(UTC), len(compressed), syntax)
- )
- except Exception as e:
- log_error(f"failed to insert {key} into database: {e}")
- return web.HTTPInternalServerError()
-
- url = f"{self.config.site}/paste/{key}"
-
- log_info(
- f"uploaded paste {key} with syntax {syntax} of size {len(compressed)} bytes: {url}"
- )
-
- return web.HTTPOk(text=url)
-
- async def vacuum(self):
- log_info("starting vaccum")
- while (
- use := await self.database.storage_use()
- ) is not None and use > self.config.s3_max_bytes:
- oldest = await self.database.oldest()
- # If use is not None, there must be at least 1 paste, so we let the
- # type checker know that this is an unreachable case.
- assert oldest is not None
-
- req = self.bucket.delete(oldest)
-
- async with aiohttp.ClientSession().delete(
- req.url, headers=req.headers
- ) as delete:
- if delete.status == 200:
- log_info(f"successfully deleted {oldest}")
- else:
- log_warning(
- f"failed to delete {oldest}: got status {delete.status}",
- )
-
- await self.database.delete(oldest)
async def main() -> int: