2b33bfd78f
Apply the require_permission decorator to all 28 routes that take a
{thread_id} path parameter. Combined with the strict middleware
(previous commit), this gives the double-layer protection that
AUTH_TEST_PLAN test 7.5.9 documents:
Layer 1 (AuthMiddleware): cookie + JWT validation, rejects junk
cookies and stamps request.state.user
Layer 2 (@require_permission with owner_check=True): per-resource
ownership verification via
ThreadMetaStore.check_access — returns
404 if a different user owns the thread
The decorator's owner_check branch is rewritten to use the SQL
thread_meta_repo (the 2.0-rc persistence layer) instead of the
LangGraph store path that PR #1728 used (_store_get / get_store
in routers/threads.py). The inject_record convenience is dropped
— no caller in 2.0 needs the LangGraph blob, and the SQL repo has
a different shape.
Routes decorated (28 total):
- threads.py: delete, patch, get, get-state, post-state, post-history
- thread_runs.py: post-runs, post-runs-stream, post-runs-wait,
list_runs, get_run, cancel_run, join_run, stream_existing_run,
list_thread_messages, list_run_messages, list_run_events,
thread_token_usage
- feedback.py: create, list, stats, delete
- uploads.py: upload (added Request param), list, delete
- artifacts.py: get_artifact
- suggestions.py: generate (renamed body parameter to avoid
conflict with FastAPI Request)
Test fixes:
- test_suggestions_router.py: bypass the decorator via __wrapped__
(the unit tests cover parsing logic, not auth — no point spinning
up a thread_meta_repo just to test JSON unwrapping)
- test_auth_middleware.py 4 fake-cookie tests: already updated in
the previous commit (745bf432)
Tests: 293 passed (auth + persistence + isolation + suggestions)
Lint: clean
174 lines
6.3 KiB
Python
174 lines
6.3 KiB
Python
"""Upload router for handling file uploads."""
|
|
|
|
import logging
|
|
import os
|
|
import stat
|
|
|
|
from fastapi import APIRouter, File, HTTPException, Request, UploadFile
|
|
from pydantic import BaseModel
|
|
|
|
from app.gateway.authz import require_permission
|
|
from deerflow.config.paths import get_paths
|
|
from deerflow.sandbox.sandbox_provider import get_sandbox_provider
|
|
from deerflow.uploads.manager import (
|
|
PathTraversalError,
|
|
delete_file_safe,
|
|
enrich_file_listing,
|
|
ensure_uploads_dir,
|
|
get_uploads_dir,
|
|
list_files_in_dir,
|
|
normalize_filename,
|
|
upload_artifact_url,
|
|
upload_virtual_path,
|
|
)
|
|
from deerflow.utils.file_conversion import CONVERTIBLE_EXTENSIONS, convert_file_to_markdown
|
|
|
|
logger = logging.getLogger(__name__)
|
|
|
|
router = APIRouter(prefix="/api/threads/{thread_id}/uploads", tags=["uploads"])
|
|
|
|
|
|
class UploadResponse(BaseModel):
|
|
"""Response model for file upload."""
|
|
|
|
success: bool
|
|
files: list[dict[str, str]]
|
|
message: str
|
|
|
|
|
|
def _make_file_sandbox_writable(file_path: os.PathLike[str] | str) -> None:
|
|
"""Ensure uploaded files remain writable when mounted into non-local sandboxes.
|
|
|
|
In AIO sandbox mode, the gateway writes the authoritative host-side file
|
|
first, then the sandbox runtime may rewrite the same mounted path. Granting
|
|
world-writable access here prevents permission mismatches between the
|
|
gateway user and the sandbox runtime user.
|
|
"""
|
|
file_stat = os.lstat(file_path)
|
|
if stat.S_ISLNK(file_stat.st_mode):
|
|
logger.warning("Skipping sandbox chmod for symlinked upload path: %s", file_path)
|
|
return
|
|
|
|
writable_mode = stat.S_IMODE(file_stat.st_mode) | stat.S_IWUSR | stat.S_IWGRP | stat.S_IWOTH
|
|
chmod_kwargs = {"follow_symlinks": False} if os.chmod in os.supports_follow_symlinks else {}
|
|
os.chmod(file_path, writable_mode, **chmod_kwargs)
|
|
|
|
|
|
@router.post("", response_model=UploadResponse)
|
|
@require_permission("threads", "write", owner_check=True)
|
|
async def upload_files(
|
|
thread_id: str,
|
|
request: Request,
|
|
files: list[UploadFile] = File(...),
|
|
) -> UploadResponse:
|
|
"""Upload multiple files to a thread's uploads directory."""
|
|
if not files:
|
|
raise HTTPException(status_code=400, detail="No files provided")
|
|
|
|
try:
|
|
uploads_dir = ensure_uploads_dir(thread_id)
|
|
except ValueError as e:
|
|
raise HTTPException(status_code=400, detail=str(e))
|
|
sandbox_uploads = get_paths().sandbox_uploads_dir(thread_id)
|
|
uploaded_files = []
|
|
|
|
sandbox_provider = get_sandbox_provider()
|
|
sandbox_id = sandbox_provider.acquire(thread_id)
|
|
sandbox = sandbox_provider.get(sandbox_id)
|
|
|
|
for file in files:
|
|
if not file.filename:
|
|
continue
|
|
|
|
try:
|
|
safe_filename = normalize_filename(file.filename)
|
|
except ValueError:
|
|
logger.warning(f"Skipping file with unsafe filename: {file.filename!r}")
|
|
continue
|
|
|
|
try:
|
|
content = await file.read()
|
|
file_path = uploads_dir / safe_filename
|
|
file_path.write_bytes(content)
|
|
|
|
virtual_path = upload_virtual_path(safe_filename)
|
|
|
|
if sandbox_id != "local":
|
|
_make_file_sandbox_writable(file_path)
|
|
sandbox.update_file(virtual_path, content)
|
|
|
|
file_info = {
|
|
"filename": safe_filename,
|
|
"size": str(len(content)),
|
|
"path": str(sandbox_uploads / safe_filename),
|
|
"virtual_path": virtual_path,
|
|
"artifact_url": upload_artifact_url(thread_id, safe_filename),
|
|
}
|
|
|
|
logger.info(f"Saved file: {safe_filename} ({len(content)} bytes) to {file_info['path']}")
|
|
|
|
file_ext = file_path.suffix.lower()
|
|
if file_ext in CONVERTIBLE_EXTENSIONS:
|
|
md_path = await convert_file_to_markdown(file_path)
|
|
if md_path:
|
|
md_virtual_path = upload_virtual_path(md_path.name)
|
|
|
|
if sandbox_id != "local":
|
|
_make_file_sandbox_writable(md_path)
|
|
sandbox.update_file(md_virtual_path, md_path.read_bytes())
|
|
|
|
file_info["markdown_file"] = md_path.name
|
|
file_info["markdown_path"] = str(sandbox_uploads / md_path.name)
|
|
file_info["markdown_virtual_path"] = md_virtual_path
|
|
file_info["markdown_artifact_url"] = upload_artifact_url(thread_id, md_path.name)
|
|
|
|
uploaded_files.append(file_info)
|
|
|
|
except Exception as e:
|
|
logger.error(f"Failed to upload {file.filename}: {e}")
|
|
raise HTTPException(status_code=500, detail=f"Failed to upload {file.filename}: {str(e)}")
|
|
|
|
return UploadResponse(
|
|
success=True,
|
|
files=uploaded_files,
|
|
message=f"Successfully uploaded {len(uploaded_files)} file(s)",
|
|
)
|
|
|
|
|
|
@router.get("/list", response_model=dict)
|
|
@require_permission("threads", "read", owner_check=True)
|
|
async def list_uploaded_files(thread_id: str, request: Request) -> dict:
|
|
"""List all files in a thread's uploads directory."""
|
|
try:
|
|
uploads_dir = get_uploads_dir(thread_id)
|
|
except ValueError as e:
|
|
raise HTTPException(status_code=400, detail=str(e))
|
|
result = list_files_in_dir(uploads_dir)
|
|
enrich_file_listing(result, thread_id)
|
|
|
|
# Gateway additionally includes the sandbox-relative path.
|
|
sandbox_uploads = get_paths().sandbox_uploads_dir(thread_id)
|
|
for f in result["files"]:
|
|
f["path"] = str(sandbox_uploads / f["filename"])
|
|
|
|
return result
|
|
|
|
|
|
@router.delete("/{filename}")
|
|
@require_permission("threads", "delete", owner_check=True)
|
|
async def delete_uploaded_file(thread_id: str, filename: str, request: Request) -> dict:
|
|
"""Delete a file from a thread's uploads directory."""
|
|
try:
|
|
uploads_dir = get_uploads_dir(thread_id)
|
|
except ValueError as e:
|
|
raise HTTPException(status_code=400, detail=str(e))
|
|
try:
|
|
return delete_file_safe(uploads_dir, filename, convertible_extensions=CONVERTIBLE_EXTENSIONS)
|
|
except FileNotFoundError:
|
|
raise HTTPException(status_code=404, detail=f"File not found: {filename}")
|
|
except PathTraversalError:
|
|
raise HTTPException(status_code=400, detail="Invalid path")
|
|
except Exception as e:
|
|
logger.error(f"Failed to delete {filename}: {e}")
|
|
raise HTTPException(status_code=500, detail=f"Failed to delete {filename}: {str(e)}")
|