Files
training-software/api/storage.py
Paperclip CTO 8054c1e1e4 feat(TRA-233): Django M1 foundation scaffold
- Environment-split settings: base/local/test/prod with django-environ
- Postgres + Redis + Celery wiring (broker, beat, result backend)
- All 9 domain app stubs: accounts, courses, cms, tracking, quizzes,
  training, certificates, reports, notifications
- api app: /healthz/ endpoint, custom DRF exception handler,
  SecurityAuditMiddleware, permissions/throttle/upload-validation stubs
- DRF global baseline: JWT+session auth, closed-by-default permissions,
  cursor/page pagination, drf-spectacular schema generation
- Dockerfile (multi-env build arg), docker-compose.yml (local),
  docker-compose.test.yml (CI-friendly tmpfs Postgres)
- pytest.ini with smoke + settings marker definitions
- tests/test_smoke.py: startup, URL resolution, healthcheck shape
- tests/test_settings_matrix.py: per-profile security assertions
- .github/workflows/ci.yml: test, lint, schema CI jobs
- .env.example with all required vars documented
- .gitignore

Co-Authored-By: Paperclip <noreply@paperclip.ing>
2026-05-07 09:11:23 +02:00

104 lines
3.3 KiB
Python

from __future__ import annotations
import logging
from typing import Any
import boto3
from botocore.exceptions import ClientError, NoCredentialsError
from django.conf import settings
logger = logging.getLogger(__name__)
# Default upload URL validity window (15 minutes is enough for client-side JS)
_UPLOAD_URL_EXPIRY_SECONDS = 900
def generate_presigned_upload_url(
s3_key: str,
content_type: str,
max_size_bytes: int | None = None,
expiration: int = _UPLOAD_URL_EXPIRY_SECONDS,
) -> dict[str, Any]:
"""Return a presigned S3 POST policy for a direct client-to-S3 upload.
The policy enforces content-type and byte-length on the S3 side so the
application server never touches the raw file bytes.
Args:
s3_key: Destination key inside the configured bucket.
content_type: Exact MIME type the client must declare in the upload.
max_size_bytes: Maximum payload size enforced by the S3 policy.
Defaults to ``settings.MAX_UPLOAD_SIZE_BYTES``.
expiration: Seconds until the presigned URL expires.
Returns:
Dict with ``url`` and ``fields`` suitable for a multipart POST upload.
Raises:
RuntimeError: If the presigned URL cannot be generated.
"""
if max_size_bytes is None:
max_size_bytes = getattr(settings, "MAX_UPLOAD_SIZE_BYTES", 100 * 1024 * 1024)
bucket = getattr(settings, "AWS_STORAGE_BUCKET_NAME", None)
region = getattr(settings, "AWS_S3_REGION_NAME", "eu-central-1")
if not bucket:
raise RuntimeError("AWS_STORAGE_BUCKET_NAME is not configured.")
client = boto3.client("s3", region_name=region)
conditions: list[Any] = [
{"Content-Type": content_type},
["content-length-range", 1, max_size_bytes],
]
fields = {"Content-Type": content_type}
try:
response = client.generate_presigned_post(
Bucket=bucket,
Key=s3_key,
Fields=fields,
Conditions=conditions,
ExpiresIn=expiration,
)
except (ClientError, NoCredentialsError) as exc:
logger.error("Failed to generate presigned upload URL for key=%s: %s", s3_key, exc)
raise RuntimeError("Could not generate upload URL.") from exc
return response
def generate_presigned_download_url(
s3_key: str,
expiration: int = 3600,
) -> str:
"""Return a short-lived presigned GET URL for a private S3 object.
Args:
s3_key: Key of the object inside the configured bucket.
expiration: Seconds until the URL expires (default 1 hour).
Returns:
Presigned URL string.
"""
bucket = getattr(settings, "AWS_STORAGE_BUCKET_NAME", None)
region = getattr(settings, "AWS_S3_REGION_NAME", "eu-central-1")
if not bucket:
raise RuntimeError("AWS_STORAGE_BUCKET_NAME is not configured.")
client = boto3.client("s3", region_name=region)
try:
url = client.generate_presigned_url(
"get_object",
Params={"Bucket": bucket, "Key": s3_key},
ExpiresIn=expiration,
)
except (ClientError, NoCredentialsError) as exc:
logger.error("Failed to generate presigned download URL for key=%s: %s", s3_key, exc)
raise RuntimeError("Could not generate download URL.") from exc
return url