feat(10-01): KB ingestion pipeline - migration, extractors, API router
- Migration 014: add status/error_message/chunk_count to kb_documents, make agent_id nullable - Add GOOGLE_CALENDAR to ChannelTypeEnum in tenant.py - Add brave_api_key, firecrawl_api_key, google_client_id/secret, minio_kb_bucket to config - Add text extractors for PDF, DOCX, PPTX, XLSX/XLS, CSV, TXT, MD - Add KB management API router with upload, list, delete, URL ingest, reindex endpoints - Install pypdf, python-docx, python-pptx, openpyxl, pandas, firecrawl-py, youtube-transcript-api - Update .env.example with new env vars - Unit tests: test_extractors.py (10 tests) and test_kb_upload.py (7 tests) all pass
This commit is contained in:
201
tests/unit/test_extractors.py
Normal file
201
tests/unit/test_extractors.py
Normal file
@@ -0,0 +1,201 @@
|
||||
"""
|
||||
Unit tests for orchestrator.tools.extractors.
|
||||
|
||||
Tests that each document format produces expected text output, and that
|
||||
unsupported formats raise ValueError.
|
||||
|
||||
All test fixtures are constructed in-memory using the same libraries that
|
||||
the extractor uses — no external files needed.
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import csv
|
||||
import io
|
||||
|
||||
import pytest
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Helpers to build minimal valid files in memory
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
|
||||
def _make_pdf_bytes(text: str) -> bytes:
|
||||
"""Create a minimal valid PDF with one page containing the given text."""
|
||||
from pypdf import PdfWriter
|
||||
|
||||
writer = PdfWriter()
|
||||
page = writer.add_blank_page(width=200, height=200)
|
||||
writer.add_page(page)
|
||||
buf = io.BytesIO()
|
||||
writer.write(buf)
|
||||
|
||||
# Build a simple PDF manually since pypdf cannot add text without a font
|
||||
# Instead, use reportlab if available, fall back to a minimal hand-crafted PDF
|
||||
try:
|
||||
from reportlab.pdfgen import canvas as rl_canvas
|
||||
|
||||
buf2 = io.BytesIO()
|
||||
c = rl_canvas.Canvas(buf2)
|
||||
c.drawString(10, 100, text)
|
||||
c.save()
|
||||
return buf2.getvalue()
|
||||
except ImportError:
|
||||
pass
|
||||
|
||||
# Hand-crafted minimal PDF with embedded text stream
|
||||
content_stream = f"BT /F1 12 Tf 50 700 Td ({text}) Tj ET"
|
||||
stream_bytes = content_stream.encode()
|
||||
pdf = (
|
||||
b"%PDF-1.4\n"
|
||||
b"1 0 obj\n<< /Type /Catalog /Pages 2 0 R >>\nendobj\n"
|
||||
b"2 0 obj\n<< /Type /Pages /Kids [3 0 R] /Count 1 >>\nendobj\n"
|
||||
b"3 0 obj\n<< /Type /Page /Parent 2 0 R /MediaBox [0 0 612 792]"
|
||||
b" /Contents 4 0 R /Resources << /Font << /F1 << /Type /Font"
|
||||
b" /Subtype /Type1 /BaseFont /Helvetica >> >> >> >>\nendobj\n"
|
||||
b"4 0 obj\n<< /Length " + str(len(stream_bytes)).encode() + b" >>\n"
|
||||
b"stream\n" + stream_bytes + b"\nendstream\nendobj\n"
|
||||
b"xref\n0 5\n0000000000 65535 f \n"
|
||||
b"trailer\n<< /Size 5 /Root 1 0 R >>\nstartxref\n0\n%%EOF"
|
||||
)
|
||||
return pdf
|
||||
|
||||
|
||||
def _make_docx_bytes(paragraphs: list[str]) -> bytes:
|
||||
"""Create a minimal DOCX with the given paragraph texts."""
|
||||
from docx import Document
|
||||
|
||||
doc = Document()
|
||||
for p in paragraphs:
|
||||
doc.add_paragraph(p)
|
||||
buf = io.BytesIO()
|
||||
doc.save(buf)
|
||||
return buf.getvalue()
|
||||
|
||||
|
||||
def _make_pptx_bytes(slide_texts: list[str]) -> bytes:
|
||||
"""Create a PPTX with one text box per slide."""
|
||||
from pptx import Presentation
|
||||
from pptx.util import Inches
|
||||
|
||||
prs = Presentation()
|
||||
blank_layout = prs.slide_layouts[6] # blank layout
|
||||
for text in slide_texts:
|
||||
slide = prs.slides.add_slide(blank_layout)
|
||||
txBox = slide.shapes.add_textbox(Inches(1), Inches(1), Inches(4), Inches(2))
|
||||
txBox.text_frame.text = text
|
||||
buf = io.BytesIO()
|
||||
prs.save(buf)
|
||||
return buf.getvalue()
|
||||
|
||||
|
||||
def _make_xlsx_bytes(rows: list[list[str]]) -> bytes:
|
||||
"""Create an XLSX with the given rows."""
|
||||
import openpyxl
|
||||
|
||||
wb = openpyxl.Workbook()
|
||||
ws = wb.active
|
||||
for row in rows:
|
||||
ws.append(row)
|
||||
buf = io.BytesIO()
|
||||
wb.save(buf)
|
||||
return buf.getvalue()
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Tests
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
|
||||
class TestExtractTextDocx:
|
||||
def test_extracts_paragraph_text(self) -> None:
|
||||
from orchestrator.tools.extractors import extract_text
|
||||
|
||||
docx_bytes = _make_docx_bytes(["Hello world", "Second paragraph"])
|
||||
result = extract_text("document.docx", docx_bytes)
|
||||
|
||||
assert "Hello world" in result
|
||||
assert "Second paragraph" in result
|
||||
|
||||
def test_empty_docx_returns_string(self) -> None:
|
||||
from orchestrator.tools.extractors import extract_text
|
||||
|
||||
docx_bytes = _make_docx_bytes([])
|
||||
result = extract_text("empty.docx", docx_bytes)
|
||||
assert isinstance(result, str)
|
||||
|
||||
|
||||
class TestExtractTextPptx:
|
||||
def test_extracts_slide_text(self) -> None:
|
||||
from orchestrator.tools.extractors import extract_text
|
||||
|
||||
pptx_bytes = _make_pptx_bytes(["Slide one content", "Slide two content"])
|
||||
result = extract_text("slides.pptx", pptx_bytes)
|
||||
|
||||
assert "Slide one content" in result
|
||||
assert "Slide two content" in result
|
||||
|
||||
|
||||
class TestExtractTextXlsx:
|
||||
def test_extracts_cell_data_as_csv(self) -> None:
|
||||
from orchestrator.tools.extractors import extract_text
|
||||
|
||||
xlsx_bytes = _make_xlsx_bytes([["Name", "Age"], ["Alice", "30"], ["Bob", "25"]])
|
||||
result = extract_text("data.xlsx", xlsx_bytes)
|
||||
|
||||
assert "Name" in result
|
||||
assert "Alice" in result
|
||||
assert "Bob" in result
|
||||
|
||||
|
||||
class TestExtractTextCsv:
|
||||
def test_extracts_csv_text(self) -> None:
|
||||
from orchestrator.tools.extractors import extract_text
|
||||
|
||||
csv_content = "col1,col2\nval1,val2\n"
|
||||
csv_bytes = csv_content.encode("utf-8")
|
||||
result = extract_text("data.csv", csv_bytes)
|
||||
|
||||
assert "col1" in result
|
||||
assert "val1" in result
|
||||
|
||||
def test_handles_non_utf8_gracefully(self) -> None:
|
||||
from orchestrator.tools.extractors import extract_text
|
||||
|
||||
bad_bytes = b"hello\xff world"
|
||||
result = extract_text("data.csv", bad_bytes)
|
||||
assert "hello" in result
|
||||
|
||||
|
||||
class TestExtractTextTxt:
|
||||
def test_extracts_plain_text(self) -> None:
|
||||
from orchestrator.tools.extractors import extract_text
|
||||
|
||||
txt_bytes = b"Hello, this is a plain text file."
|
||||
result = extract_text("notes.txt", txt_bytes)
|
||||
assert "Hello, this is a plain text file." in result
|
||||
|
||||
|
||||
class TestExtractTextMarkdown:
|
||||
def test_extracts_markdown_text(self) -> None:
|
||||
from orchestrator.tools.extractors import extract_text
|
||||
|
||||
md_bytes = b"# Heading\n\nSome paragraph text here."
|
||||
result = extract_text("notes.md", md_bytes)
|
||||
assert "Heading" in result
|
||||
assert "Some paragraph text here." in result
|
||||
|
||||
|
||||
class TestExtractTextUnsupported:
|
||||
def test_raises_value_error_for_unsupported_extension(self) -> None:
|
||||
from orchestrator.tools.extractors import extract_text
|
||||
|
||||
with pytest.raises(ValueError, match="Unsupported file extension"):
|
||||
extract_text("file.exe", b"some bytes")
|
||||
|
||||
def test_raises_for_zip(self) -> None:
|
||||
from orchestrator.tools.extractors import extract_text
|
||||
|
||||
with pytest.raises(ValueError, match="Unsupported file extension"):
|
||||
extract_text("archive.zip", b"PK\x03\x04")
|
||||
278
tests/unit/test_kb_upload.py
Normal file
278
tests/unit/test_kb_upload.py
Normal file
@@ -0,0 +1,278 @@
|
||||
"""
|
||||
Unit tests for the KB upload API router.
|
||||
|
||||
Tests:
|
||||
- POST /{tenant_id}/documents — file upload returns 201 with document_id
|
||||
- GET /{tenant_id}/documents — list returns documents with status field
|
||||
- DELETE /{tenant_id}/documents/{doc_id} — removes document
|
||||
- POST /{tenant_id}/documents/url — URL ingest dispatches Celery task
|
||||
- POST /{tenant_id}/documents/{doc_id}/reindex — re-dispatches Celery task
|
||||
|
||||
All external dependencies (MinIO, DB, Celery) are mocked.
|
||||
Auth dependencies are overridden via FastAPI app.dependency_overrides.
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import uuid
|
||||
from datetime import datetime
|
||||
from typing import Any
|
||||
from unittest.mock import AsyncMock, MagicMock, patch
|
||||
|
||||
import pytest
|
||||
from fastapi import FastAPI
|
||||
from httpx import ASGITransport, AsyncClient
|
||||
|
||||
from shared.api.rbac import require_tenant_admin, require_tenant_member
|
||||
from shared.db import get_session
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Fixtures
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
TENANT_ID = str(uuid.uuid4())
|
||||
DOC_ID = uuid.uuid4()
|
||||
|
||||
|
||||
def _make_mock_caller() -> MagicMock:
|
||||
caller = MagicMock()
|
||||
caller.tenant_id = uuid.UUID(TENANT_ID)
|
||||
caller.role = "admin"
|
||||
return caller
|
||||
|
||||
|
||||
def _make_test_app(mock_session: AsyncMock) -> FastAPI:
|
||||
"""Create a minimal FastAPI app mounting the kb_router with overridden deps."""
|
||||
from shared.api.kb import kb_router
|
||||
|
||||
test_app = FastAPI()
|
||||
test_app.include_router(kb_router)
|
||||
|
||||
# Override auth dependencies so no real JWT validation happens
|
||||
mock_caller = _make_mock_caller()
|
||||
test_app.dependency_overrides[require_tenant_admin] = lambda: mock_caller
|
||||
test_app.dependency_overrides[require_tenant_member] = lambda: mock_caller
|
||||
|
||||
# Override DB session
|
||||
async def _override_session() -> AsyncMock: # type: ignore[return]
|
||||
yield mock_session
|
||||
|
||||
test_app.dependency_overrides[get_session] = _override_session
|
||||
|
||||
return test_app
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def mock_session() -> AsyncMock:
|
||||
session = AsyncMock()
|
||||
session.add = MagicMock()
|
||||
session.flush = AsyncMock()
|
||||
session.commit = AsyncMock()
|
||||
session.delete = AsyncMock()
|
||||
return session
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def mock_doc() -> MagicMock:
|
||||
doc = MagicMock()
|
||||
doc.id = DOC_ID
|
||||
doc.tenant_id = uuid.UUID(TENANT_ID)
|
||||
doc.filename = "test.txt"
|
||||
doc.source_url = None
|
||||
doc.content_type = "text/plain"
|
||||
doc.status = "processing"
|
||||
doc.chunk_count = None
|
||||
doc.created_at = datetime(2026, 1, 1, 12, 0, 0)
|
||||
return doc
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Tests
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
|
||||
class TestKbUploadEndpoint:
|
||||
@pytest.mark.asyncio
|
||||
async def test_upload_file_returns_201(self, mock_session: AsyncMock) -> None:
|
||||
"""Uploading a file should return 201 with document_id."""
|
||||
|
||||
def _side_add(obj: Any) -> None:
|
||||
obj.id = DOC_ID
|
||||
obj.created_at = datetime(2026, 1, 1, 12, 0, 0)
|
||||
|
||||
mock_session.add.side_effect = _side_add
|
||||
|
||||
app = _make_test_app(mock_session)
|
||||
|
||||
with (
|
||||
patch("shared.api.kb._get_minio_client") as mock_minio,
|
||||
patch("shared.api.kb._get_ingest_task") as mock_get_task,
|
||||
):
|
||||
minio_client = MagicMock()
|
||||
minio_client.put_object = MagicMock()
|
||||
minio_client.head_bucket = MagicMock()
|
||||
mock_minio.return_value = minio_client
|
||||
|
||||
mock_task = MagicMock()
|
||||
mock_task.delay = MagicMock()
|
||||
mock_get_task.return_value = mock_task
|
||||
|
||||
async with AsyncClient(
|
||||
transport=ASGITransport(app=app), base_url="http://test"
|
||||
) as client:
|
||||
response = await client.post(
|
||||
f"/api/portal/kb/{TENANT_ID}/documents",
|
||||
files={"file": ("hello.txt", b"Hello world content", "text/plain")},
|
||||
)
|
||||
|
||||
assert response.status_code == 201
|
||||
data = response.json()
|
||||
assert "id" in data
|
||||
assert data["filename"] == "hello.txt"
|
||||
assert data["status"] == "processing"
|
||||
mock_task.delay.assert_called_once()
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_upload_unsupported_extension_returns_400(self, mock_session: AsyncMock) -> None:
|
||||
"""Uploading an unsupported file type should return 400."""
|
||||
app = _make_test_app(mock_session)
|
||||
|
||||
async with AsyncClient(
|
||||
transport=ASGITransport(app=app), base_url="http://test"
|
||||
) as client:
|
||||
response = await client.post(
|
||||
f"/api/portal/kb/{TENANT_ID}/documents",
|
||||
files={"file": ("malware.exe", b"bad bytes", "application/octet-stream")},
|
||||
)
|
||||
|
||||
assert response.status_code == 400
|
||||
assert "Unsupported" in response.json()["detail"]
|
||||
|
||||
|
||||
class TestKbListEndpoint:
|
||||
@pytest.mark.asyncio
|
||||
async def test_list_returns_documents_with_status(
|
||||
self, mock_session: AsyncMock, mock_doc: MagicMock
|
||||
) -> None:
|
||||
"""GET /{tenant_id}/documents should return list with status field."""
|
||||
mock_result = MagicMock()
|
||||
mock_result.scalars.return_value.all.return_value = [mock_doc]
|
||||
mock_session.execute = AsyncMock(return_value=mock_result)
|
||||
|
||||
app = _make_test_app(mock_session)
|
||||
|
||||
async with AsyncClient(
|
||||
transport=ASGITransport(app=app), base_url="http://test"
|
||||
) as client:
|
||||
response = await client.get(f"/api/portal/kb/{TENANT_ID}/documents")
|
||||
|
||||
assert response.status_code == 200
|
||||
data = response.json()
|
||||
assert isinstance(data, list)
|
||||
assert len(data) == 1
|
||||
assert data[0]["status"] == "processing"
|
||||
assert "id" in data[0]
|
||||
|
||||
|
||||
class TestKbDeleteEndpoint:
|
||||
@pytest.mark.asyncio
|
||||
async def test_delete_document_returns_204(
|
||||
self, mock_session: AsyncMock, mock_doc: MagicMock
|
||||
) -> None:
|
||||
"""DELETE /{tenant_id}/documents/{doc_id} should remove document."""
|
||||
mock_result = MagicMock()
|
||||
mock_result.scalar_one_or_none.return_value = mock_doc
|
||||
mock_session.execute = AsyncMock(return_value=mock_result)
|
||||
|
||||
app = _make_test_app(mock_session)
|
||||
|
||||
with patch("shared.api.kb._get_minio_client") as mock_minio:
|
||||
minio_client = MagicMock()
|
||||
minio_client.remove_object = MagicMock()
|
||||
mock_minio.return_value = minio_client
|
||||
|
||||
async with AsyncClient(
|
||||
transport=ASGITransport(app=app), base_url="http://test"
|
||||
) as client:
|
||||
response = await client.delete(
|
||||
f"/api/portal/kb/{TENANT_ID}/documents/{DOC_ID}"
|
||||
)
|
||||
|
||||
assert response.status_code == 204
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_delete_nonexistent_returns_404(self, mock_session: AsyncMock) -> None:
|
||||
"""DELETE on a document that doesn't exist should return 404."""
|
||||
mock_result = MagicMock()
|
||||
mock_result.scalar_one_or_none.return_value = None
|
||||
mock_session.execute = AsyncMock(return_value=mock_result)
|
||||
|
||||
app = _make_test_app(mock_session)
|
||||
|
||||
async with AsyncClient(
|
||||
transport=ASGITransport(app=app), base_url="http://test"
|
||||
) as client:
|
||||
response = await client.delete(
|
||||
f"/api/portal/kb/{TENANT_ID}/documents/{DOC_ID}"
|
||||
)
|
||||
|
||||
assert response.status_code == 404
|
||||
|
||||
|
||||
class TestKbUrlIngestEndpoint:
|
||||
@pytest.mark.asyncio
|
||||
async def test_url_ingest_dispatches_celery(self, mock_session: AsyncMock) -> None:
|
||||
"""POST /{tenant_id}/documents/url should dispatch ingest_document task."""
|
||||
|
||||
def _side_add(obj: Any) -> None:
|
||||
obj.id = DOC_ID
|
||||
obj.created_at = datetime(2026, 1, 1, 12, 0, 0)
|
||||
|
||||
mock_session.add.side_effect = _side_add
|
||||
|
||||
app = _make_test_app(mock_session)
|
||||
|
||||
with patch("shared.api.kb._get_ingest_task") as mock_get_task:
|
||||
mock_task = MagicMock()
|
||||
mock_task.delay = MagicMock()
|
||||
mock_get_task.return_value = mock_task
|
||||
|
||||
async with AsyncClient(
|
||||
transport=ASGITransport(app=app), base_url="http://test"
|
||||
) as client:
|
||||
response = await client.post(
|
||||
f"/api/portal/kb/{TENANT_ID}/documents/url",
|
||||
json={"url": "https://example.com/page", "source_type": "web"},
|
||||
)
|
||||
|
||||
assert response.status_code == 201
|
||||
mock_task.delay.assert_called_once()
|
||||
|
||||
|
||||
class TestKbReindexEndpoint:
|
||||
@pytest.mark.asyncio
|
||||
async def test_reindex_dispatches_celery(
|
||||
self, mock_session: AsyncMock, mock_doc: MagicMock
|
||||
) -> None:
|
||||
"""POST /{tenant_id}/documents/{doc_id}/reindex should dispatch ingest task."""
|
||||
mock_result = MagicMock()
|
||||
mock_result.scalar_one_or_none.return_value = mock_doc
|
||||
mock_session.execute = AsyncMock(return_value=mock_result)
|
||||
|
||||
app = _make_test_app(mock_session)
|
||||
|
||||
with patch("shared.api.kb._get_ingest_task") as mock_get_task:
|
||||
mock_task = MagicMock()
|
||||
mock_task.delay = MagicMock()
|
||||
mock_get_task.return_value = mock_task
|
||||
|
||||
async with AsyncClient(
|
||||
transport=ASGITransport(app=app), base_url="http://test"
|
||||
) as client:
|
||||
response = await client.post(
|
||||
f"/api/portal/kb/{TENANT_ID}/documents/{DOC_ID}/reindex",
|
||||
)
|
||||
|
||||
assert response.status_code == 202
|
||||
mock_task.delay.assert_called_once()
|
||||
Reference in New Issue
Block a user