diff --git a/pyproject.toml b/pyproject.toml
index 0b01271..72fb8fb 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -12,14 +12,12 @@ dependencies = [
"uvicorn>=0.30",
"sqlalchemy>=2.0",
"passlib[bcrypt]>=1.7",
- "python-jose[cryptography]>=3.3"
+ "python-jose[cryptography]>=3.3",
+ "pyyaml>=6.0.2"
]
[project.scripts]
didactopus-api = "didactopus.api:main"
-didactopus-export-svg = "didactopus.export_svg:main"
-didactopus-render-bundle = "didactopus.render_bundle:main"
-didactopus-export-knowledge = "didactopus.knowledge_export:main"
[tool.setuptools.packages.find]
where = ["src"]
diff --git a/src/didactopus/api.py b/src/didactopus/api.py
index 3699cfd..4c88633 100644
--- a/src/didactopus/api.py
+++ b/src/didactopus/api.py
@@ -1,26 +1,27 @@
from __future__ import annotations
-from fastapi import FastAPI, HTTPException, Header, Depends, BackgroundTasks
+from fastapi import FastAPI, HTTPException, Header, Depends
from fastapi.middleware.cors import CORSMiddleware
-from fastapi.responses import FileResponse
import uvicorn
-from datetime import datetime, timedelta, timezone
-from pathlib import Path
from .db import Base, engine
-from .models import LoginRequest, RefreshRequest, TokenPair, CreateLearnerRequest, LearnerState, MediaRenderRequest, ArtifactRetentionUpdate, KnowledgeExportRequest
+from .models import (
+ LoginRequest, TokenPair, KnowledgeCandidateCreate, PromoteRequest,
+ SynthesisRunRequest, SynthesisPromoteRequest, CreateLearnerRequest,
+ ObjectEditRequest, PatchApplyRequest
+)
from .repository import (
- authenticate_user, get_user_by_id, store_refresh_token, refresh_token_active, revoke_refresh_token,
- list_packs_for_user, get_pack, get_pack_row, create_learner, learner_owned_by_user, load_learner_state, save_learner_state,
- create_render_job, list_render_jobs, list_artifacts, get_artifact, update_artifact_retention, soft_delete_artifact
+ authenticate_user, get_user_by_id, create_learner, create_candidate, list_candidates, get_candidate,
+ create_promotion, list_promotions, list_pack_patches, list_curriculum_drafts, list_skill_bundles,
+ list_synthesis_candidates, get_synthesis_candidate,
+ edit_pack_patch, edit_curriculum_draft, edit_skill_bundle, list_versions,
+ apply_pack_patch, export_curriculum_draft, export_skill_bundle
)
from .auth import issue_access_token, issue_refresh_token, decode_token, new_token_id
-from .engine import build_graph_frames, stable_layout
-from .worker import process_render_job
-from .knowledge_export import build_knowledge_snapshot
+from .synthesis import generate_synthesis_candidates
Base.metadata.create_all(bind=engine)
-
-app = FastAPI(title="Didactopus API Prototype")
+app = FastAPI(title="Didactopus Object Versioning and Export API")
app.add_middleware(CORSMiddleware, allow_origins=["*"], allow_credentials=True, allow_methods=["*"], allow_headers=["*"])
+_refresh_tokens = {}
def current_user(authorization: str = Header(default="")):
token = authorization.removeprefix("Bearer ").strip()
@@ -32,164 +33,118 @@ def current_user(authorization: str = Header(default="")):
raise HTTPException(status_code=401, detail="Unauthorized")
return user
-def ensure_learner_access(user, learner_id: str):
- if user.role == "admin":
- return
- if not learner_owned_by_user(user.id, learner_id):
- raise HTTPException(status_code=403, detail="Learner not accessible by this user")
-
-def ensure_pack_access(user, pack_id: str):
- row = get_pack_row(pack_id)
- if row is None:
- raise HTTPException(status_code=404, detail="Pack not found")
- if user.role == "admin":
- return row
- if row.policy_lane == "community":
- return row
- if row.owner_user_id == user.id:
- return row
- raise HTTPException(status_code=403, detail="Pack not accessible by this user")
-
-def future_iso(days: int) -> str:
- return (datetime.now(timezone.utc) + timedelta(days=days)).isoformat()
+def require_reviewer(user = Depends(current_user)):
+ if user.role not in {"admin", "reviewer"}:
+ raise HTTPException(status_code=403, detail="Reviewer role required")
+ return user
@app.post("/api/login", response_model=TokenPair)
def login(payload: LoginRequest):
user = authenticate_user(payload.username, payload.password)
- if user is None:
- raise HTTPException(status_code=401, detail="Invalid credentials")
- token_id = new_token_id()
- store_refresh_token(user.id, token_id)
- return TokenPair(access_token=issue_access_token(user.id, user.username, user.role), refresh_token=issue_refresh_token(user.id, user.username, user.role, token_id), username=user.username, role=user.role)
-
-@app.post("/api/refresh", response_model=TokenPair)
-def refresh(payload: RefreshRequest):
- data = decode_token(payload.refresh_token)
- if not data or data.get("kind") != "refresh":
- raise HTTPException(status_code=401, detail="Invalid refresh token")
- token_id = data.get("jti")
- if not token_id or not refresh_token_active(token_id):
- raise HTTPException(status_code=401, detail="Refresh token inactive")
- user = get_user_by_id(int(data["sub"]))
- if user is None:
- raise HTTPException(status_code=401, detail="User not found")
- revoke_refresh_token(token_id)
- new_jti = new_token_id()
- store_refresh_token(user.id, new_jti)
- return TokenPair(access_token=issue_access_token(user.id, user.username, user.role), refresh_token=issue_refresh_token(user.id, user.username, user.role, new_jti), username=user.username, role=user.role)
-
-@app.get("/api/packs")
-def api_list_packs(user = Depends(current_user)):
- return [p.model_dump() for p in list_packs_for_user(user.id, include_unpublished=(user.role == "admin"))]
+ if user is None: raise HTTPException(status_code=401, detail="Invalid credentials")
+ token_id = new_token_id(); _refresh_tokens[token_id] = user.id
+ return TokenPair(access_token=issue_access_token(user.id, user.username, user.role),
+ refresh_token=issue_refresh_token(user.id, user.username, user.role, token_id),
+ username=user.username, role=user.role)
@app.post("/api/learners")
def api_create_learner(payload: CreateLearnerRequest, user = Depends(current_user)):
create_learner(user.id, payload.learner_id, payload.display_name)
return {"ok": True, "learner_id": payload.learner_id}
-@app.get("/api/learners/{learner_id}/state")
-def api_get_learner_state(learner_id: str, user = Depends(current_user)):
- ensure_learner_access(user, learner_id)
- return load_learner_state(learner_id).model_dump()
+@app.post("/api/knowledge-candidates")
+def api_create_candidate(payload: KnowledgeCandidateCreate, reviewer = Depends(require_reviewer)):
+ return {"candidate_id": create_candidate(payload)}
-@app.put("/api/learners/{learner_id}/state")
-def api_put_learner_state(learner_id: str, state: LearnerState, user = Depends(current_user)):
- ensure_learner_access(user, learner_id)
- if learner_id != state.learner_id:
- raise HTTPException(status_code=400, detail="Learner ID mismatch")
- return save_learner_state(state).model_dump()
+@app.get("/api/knowledge-candidates")
+def api_list_candidates(reviewer = Depends(require_reviewer)):
+ return list_candidates()
-@app.get("/api/packs/{pack_id}/layout")
-def api_pack_layout(pack_id: str, user = Depends(current_user)):
- ensure_pack_access(user, pack_id)
- pack = get_pack(pack_id)
- return {"pack_id": pack_id, "layout": stable_layout(pack)} if pack else {"pack_id": pack_id, "layout": {}}
+@app.post("/api/knowledge-candidates/{candidate_id}/promote")
+def api_promote_candidate(candidate_id: int, payload: PromoteRequest, reviewer = Depends(require_reviewer)):
+ if get_candidate(candidate_id) is None: raise HTTPException(status_code=404, detail="Candidate not found")
+ return {"promotion_id": create_promotion(candidate_id, reviewer.id, payload)}
-@app.get("/api/learners/{learner_id}/graph-animation/{pack_id}")
-def api_graph_animation(learner_id: str, pack_id: str, user = Depends(current_user)):
- ensure_learner_access(user, learner_id)
- ensure_pack_access(user, pack_id)
- pack = get_pack(pack_id)
- state = load_learner_state(learner_id)
- frames = build_graph_frames(state, pack)
- return {
- "learner_id": learner_id,
- "pack_id": pack_id,
- "pack_title": pack.title if pack else "",
- "frames": frames,
- "concepts": [{"id": c.id, "title": c.title, "prerequisites": c.prerequisites, "cross_pack_links": [l.model_dump() for l in c.cross_pack_links]} for c in pack.concepts] if pack else [],
- }
+@app.get("/api/promotions")
+def api_list_promotions(reviewer = Depends(require_reviewer)):
+ return list_promotions()
-@app.post("/api/learners/{learner_id}/render-jobs/{pack_id}")
-def api_render_job(learner_id: str, pack_id: str, payload: MediaRenderRequest, background_tasks: BackgroundTasks, user = Depends(current_user)):
- ensure_learner_access(user, learner_id)
- ensure_pack_access(user, pack_id)
- pack = get_pack(pack_id)
- state = load_learner_state(learner_id)
- animation = {
- "learner_id": learner_id,
- "pack_id": pack_id,
- "pack_title": pack.title if pack else "",
- "frames": build_graph_frames(state, pack),
- }
- job_id = create_render_job(learner_id, pack_id, payload.format, payload.fps, payload.theme)
- background_tasks.add_task(process_render_job, job_id, learner_id, pack_id, payload.format, payload.fps, payload.theme, payload.retention_class, payload.retention_days, animation)
- return {"job_id": job_id, "status": "queued"}
+@app.get("/api/pack-patches")
+def api_list_pack_patches(reviewer = Depends(require_reviewer)):
+ return list_pack_patches()
-@app.get("/api/render-jobs")
-def api_list_render_jobs(learner_id: str | None = None, user = Depends(current_user)):
- if learner_id:
- ensure_learner_access(user, learner_id)
- return list_render_jobs(learner_id)
+@app.get("/api/curriculum-drafts")
+def api_list_curriculum_drafts(reviewer = Depends(require_reviewer)):
+ return list_curriculum_drafts()
-@app.get("/api/artifacts")
-def api_list_artifacts(learner_id: str | None = None, user = Depends(current_user)):
- if learner_id:
- ensure_learner_access(user, learner_id)
- return list_artifacts(learner_id)
+@app.get("/api/skill-bundles")
+def api_list_skill_bundles(reviewer = Depends(require_reviewer)):
+ return list_skill_bundles()
-@app.get("/api/artifacts/{artifact_id}/download")
-def api_download_artifact(artifact_id: int, user = Depends(current_user)):
- artifact = get_artifact(artifact_id)
- if artifact is None or artifact.is_deleted:
- raise HTTPException(status_code=404, detail="Artifact not found")
- ensure_learner_access(user, artifact.learner_id)
- path = Path(artifact.path)
- if not path.exists():
- raise HTTPException(status_code=404, detail="Artifact path missing")
- if path.is_dir():
- manifest = path / "render_manifest.json"
- if not manifest.exists():
- raise HTTPException(status_code=404, detail="Artifact manifest missing")
- return FileResponse(str(manifest), filename=f"artifact-{artifact_id}-manifest.json")
- return FileResponse(str(path), filename=path.name)
+@app.post("/api/pack-patches/{patch_id}/edit")
+def api_edit_patch(patch_id: int, payload: ObjectEditRequest, reviewer = Depends(require_reviewer)):
+ row = edit_pack_patch(patch_id, payload.payload, reviewer.id, payload.note)
+ if row is None: raise HTTPException(status_code=404, detail="Patch not found")
+ return {"patch_id": row.id, "current_version": row.current_version}
-@app.post("/api/artifacts/{artifact_id}/retention")
-def api_update_artifact_retention(artifact_id: int, payload: ArtifactRetentionUpdate, user = Depends(current_user)):
- artifact = get_artifact(artifact_id)
- if artifact is None or artifact.is_deleted:
- raise HTTPException(status_code=404, detail="Artifact not found")
- ensure_learner_access(user, artifact.learner_id)
- expires_at = "" if payload.retention_days is None else future_iso(payload.retention_days)
- updated = update_artifact_retention(artifact_id, payload.retention_class, expires_at)
- return {"artifact_id": updated.id, "retention_class": updated.retention_class, "expires_at": updated.expires_at}
+@app.post("/api/curriculum-drafts/{draft_id}/edit")
+def api_edit_curriculum(draft_id: int, payload: ObjectEditRequest, reviewer = Depends(require_reviewer)):
+ row = edit_curriculum_draft(draft_id, payload.payload, reviewer.id, payload.note)
+ if row is None: raise HTTPException(status_code=404, detail="Draft not found")
+ return {"draft_id": row.id, "current_version": row.current_version}
-@app.delete("/api/artifacts/{artifact_id}")
-def api_delete_artifact(artifact_id: int, user = Depends(current_user)):
- artifact = get_artifact(artifact_id)
- if artifact is None or artifact.is_deleted:
- raise HTTPException(status_code=404, detail="Artifact not found")
- ensure_learner_access(user, artifact.learner_id)
- updated = soft_delete_artifact(artifact_id)
- return {"artifact_id": updated.id, "is_deleted": updated.is_deleted}
+@app.post("/api/skill-bundles/{bundle_id}/edit")
+def api_edit_skill(bundle_id: int, payload: ObjectEditRequest, reviewer = Depends(require_reviewer)):
+ row = edit_skill_bundle(bundle_id, payload.payload, reviewer.id, payload.note)
+ if row is None: raise HTTPException(status_code=404, detail="Skill bundle not found")
+ return {"skill_bundle_id": row.id, "current_version": row.current_version}
-@app.post("/api/learners/{learner_id}/knowledge-export/{pack_id}")
-def api_knowledge_export(learner_id: str, pack_id: str, payload: KnowledgeExportRequest, user = Depends(current_user)):
- ensure_learner_access(user, learner_id)
- ensure_pack_access(user, pack_id)
- snapshot = build_knowledge_snapshot(learner_id, pack_id)
- snapshot["requested_export_kind"] = payload.export_kind
- return snapshot
+@app.get("/api/object-versions/{object_kind}/{object_id}")
+def api_object_versions(object_kind: str, object_id: int, reviewer = Depends(require_reviewer)):
+ return list_versions(object_kind, object_id)
+
+@app.post("/api/pack-patches/{patch_id}/apply")
+def api_apply_patch(patch_id: int, payload: PatchApplyRequest, reviewer = Depends(require_reviewer)):
+ row = apply_pack_patch(patch_id, reviewer.id, payload.note)
+ if row is None: raise HTTPException(status_code=404, detail="Patch or pack not found")
+ return {"patch_id": row.id, "status": row.status}
+
+@app.get("/api/curriculum-drafts/{draft_id}/export")
+def api_export_curriculum(draft_id: int, reviewer = Depends(require_reviewer)):
+ out = export_curriculum_draft(draft_id)
+ if out is None: raise HTTPException(status_code=404, detail="Draft not found")
+ return out
+
+@app.get("/api/skill-bundles/{bundle_id}/export")
+def api_export_skill(bundle_id: int, reviewer = Depends(require_reviewer)):
+ out = export_skill_bundle(bundle_id)
+ if out is None: raise HTTPException(status_code=404, detail="Skill bundle not found")
+ return out
+
+@app.post("/api/synthesis/run")
+def api_run_synthesis(payload: SynthesisRunRequest, reviewer = Depends(require_reviewer)):
+ created = generate_synthesis_candidates(payload.source_pack_id, payload.target_pack_id, payload.limit)
+ return {"created_count": len(created), "synthesis_ids": created}
+
+@app.get("/api/synthesis/candidates")
+def api_list_synthesis(reviewer = Depends(require_reviewer)):
+ return list_synthesis_candidates()
+
+@app.post("/api/synthesis/candidates/{synthesis_id}/promote")
+def api_promote_synthesis(synthesis_id: int, payload: SynthesisPromoteRequest, reviewer = Depends(require_reviewer)):
+ syn = get_synthesis_candidate(synthesis_id)
+ if syn is None: raise HTTPException(status_code=404, detail="Synthesis candidate not found")
+ candidate_id = create_candidate(KnowledgeCandidateCreate(
+ source_type="synthesis_engine", learner_id="system", pack_id=syn["source_pack_id"],
+ candidate_kind="synthesis_proposal",
+ title=f"Synthesis: {syn['source_concept_id']} ↔ {syn['target_concept_id']}",
+ summary=syn["explanation"], structured_payload=syn,
+ evidence_summary="Promoted from synthesis engine candidate",
+ confidence_hint=syn["score_total"], novelty_score=syn["evidence"].get("novelty", 0.0),
+ synthesis_score=syn["score_total"], triage_lane=payload.promotion_target,
+ ))
+ promotion_id = create_promotion(candidate_id, reviewer.id, PromoteRequest(promotion_target=payload.promotion_target, target_object_id="", promotion_status="approved"))
+ return {"candidate_id": candidate_id, "promotion_id": promotion_id}
def main():
uvicorn.run(app, host="127.0.0.1", port=8011)
diff --git a/src/didactopus/models.py b/src/didactopus/models.py
index 8840b5e..31f415f 100644
--- a/src/didactopus/models.py
+++ b/src/didactopus/models.py
@@ -1,6 +1,10 @@
from __future__ import annotations
from pydantic import BaseModel, Field
+class LoginRequest(BaseModel):
+ username: str
+ password: str
+
class TokenPair(BaseModel):
access_token: str
refresh_token: str
@@ -8,81 +12,41 @@ class TokenPair(BaseModel):
username: str
role: str
-class LoginRequest(BaseModel):
- username: str
- password: str
-
-class RefreshRequest(BaseModel):
- refresh_token: str
-
-class GraphPosition(BaseModel):
- x: float
- y: float
-
-class CrossPackLink(BaseModel):
- source_concept_id: str
- target_pack_id: str
- target_concept_id: str
- relationship: str = "related"
-
-class PackConcept(BaseModel):
- id: str
+class KnowledgeCandidateCreate(BaseModel):
+ source_type: str = "learner_export"
+ source_artifact_id: int | None = None
+ learner_id: str
+ pack_id: str
+ candidate_kind: str
title: str
- prerequisites: list[str] = Field(default_factory=list)
- masteryDimension: str = "mastery"
- exerciseReward: str = ""
- position: GraphPosition | None = None
- cross_pack_links: list[CrossPackLink] = Field(default_factory=list)
+ summary: str = ""
+ structured_payload: dict = Field(default_factory=dict)
+ evidence_summary: str = ""
+ confidence_hint: float = 0.0
+ novelty_score: float = 0.0
+ synthesis_score: float = 0.0
+ triage_lane: str = "archive"
-class PackData(BaseModel):
- id: str
- title: str
- subtitle: str = ""
- level: str = "novice-friendly"
- concepts: list[PackConcept] = Field(default_factory=list)
- onboarding: dict = Field(default_factory=dict)
- compliance: dict = Field(default_factory=dict)
+class PromoteRequest(BaseModel):
+ promotion_target: str
+ target_object_id: str = ""
+ promotion_status: str = "approved"
+
+class SynthesisRunRequest(BaseModel):
+ source_pack_id: str | None = None
+ target_pack_id: str | None = None
+ limit: int = 20
+
+class SynthesisPromoteRequest(BaseModel):
+ promotion_target: str = "pack_improvement"
+
+class ObjectEditRequest(BaseModel):
+ payload: dict = Field(default_factory=dict)
+ note: str = ""
+
+class PatchApplyRequest(BaseModel):
+ note: str = "Applied pack patch"
class CreateLearnerRequest(BaseModel):
learner_id: str
display_name: str = ""
-
-class MasteryRecord(BaseModel):
- concept_id: str
- dimension: str
- score: float = 0.0
- confidence: float = 0.0
- evidence_count: int = 0
- last_updated: str = ""
-
-class EvidenceEvent(BaseModel):
- concept_id: str
- dimension: str
- score: float
- confidence_hint: float = 0.5
- timestamp: str
- kind: str = "exercise"
- source_id: str = ""
-
-class LearnerState(BaseModel):
- learner_id: str
- records: list[MasteryRecord] = Field(default_factory=list)
- history: list[EvidenceEvent] = Field(default_factory=list)
-
-class MediaRenderRequest(BaseModel):
- learner_id: str
- pack_id: str
- format: str = "gif"
- fps: int = 2
- theme: str = "default"
- retention_class: str = "standard"
- retention_days: int = 30
-
-class ArtifactRetentionUpdate(BaseModel):
- retention_class: str
- retention_days: int | None = None
-
-class KnowledgeExportRequest(BaseModel):
- learner_id: str
- pack_id: str
- export_kind: str = "knowledge_snapshot"
diff --git a/src/didactopus/orm.py b/src/didactopus/orm.py
index 300796e..4a39dd4 100644
--- a/src/didactopus/orm.py
+++ b/src/didactopus/orm.py
@@ -10,13 +10,6 @@ class UserORM(Base):
role: Mapped[str] = mapped_column(String(50), default="learner")
is_active: Mapped[bool] = mapped_column(Boolean, default=True)
-class RefreshTokenORM(Base):
- __tablename__ = "refresh_tokens"
- id: Mapped[int] = mapped_column(Integer, primary_key=True)
- user_id: Mapped[int] = mapped_column(ForeignKey("users.id"), index=True)
- token_id: Mapped[str] = mapped_column(String(255), unique=True, index=True)
- is_revoked: Mapped[bool] = mapped_column(Boolean, default=False)
-
class PackORM(Base):
__tablename__ = "packs"
id: Mapped[str] = mapped_column(String(100), primary_key=True)
@@ -28,61 +21,85 @@ class PackORM(Base):
data_json: Mapped[str] = mapped_column(Text)
is_published: Mapped[bool] = mapped_column(Boolean, default=False)
-class LearnerORM(Base):
- __tablename__ = "learners"
- id: Mapped[str] = mapped_column(String(100), primary_key=True)
- owner_user_id: Mapped[int] = mapped_column(ForeignKey("users.id"), index=True)
- display_name: Mapped[str] = mapped_column(String(255), default="")
-
-class MasteryRecordORM(Base):
- __tablename__ = "mastery_records"
- id: Mapped[int] = mapped_column(Integer, primary_key=True)
- learner_id: Mapped[str] = mapped_column(ForeignKey("learners.id"), index=True)
- concept_id: Mapped[str] = mapped_column(String(100), index=True)
- dimension: Mapped[str] = mapped_column(String(100), default="mastery")
- score: Mapped[float] = mapped_column(Float, default=0.0)
- confidence: Mapped[float] = mapped_column(Float, default=0.0)
- evidence_count: Mapped[int] = mapped_column(Integer, default=0)
- last_updated: Mapped[str] = mapped_column(String(100), default="")
-
-class EvidenceEventORM(Base):
- __tablename__ = "evidence_events"
- id: Mapped[int] = mapped_column(Integer, primary_key=True)
- learner_id: Mapped[str] = mapped_column(ForeignKey("learners.id"), index=True)
- concept_id: Mapped[str] = mapped_column(String(100), index=True)
- dimension: Mapped[str] = mapped_column(String(100), default="mastery")
- score: Mapped[float] = mapped_column(Float, default=0.0)
- confidence_hint: Mapped[float] = mapped_column(Float, default=0.5)
- timestamp: Mapped[str] = mapped_column(String(100), default="")
- kind: Mapped[str] = mapped_column(String(50), default="exercise")
- source_id: Mapped[str] = mapped_column(String(255), default="")
-
-class RenderJobORM(Base):
- __tablename__ = "render_jobs"
+class KnowledgeCandidateORM(Base):
+ __tablename__ = "knowledge_candidates"
id: Mapped[int] = mapped_column(Integer, primary_key=True)
+ source_type: Mapped[str] = mapped_column(String(50), default="learner_export")
+ source_artifact_id: Mapped[int | None] = mapped_column(Integer, nullable=True)
learner_id: Mapped[str] = mapped_column(String(100), index=True)
pack_id: Mapped[str] = mapped_column(String(100), index=True)
- requested_format: Mapped[str] = mapped_column(String(20), default="gif")
- fps: Mapped[int] = mapped_column(Integer, default=2)
- theme: Mapped[str] = mapped_column(String(100), default="default")
- status: Mapped[str] = mapped_column(String(50), default="queued")
- bundle_dir: Mapped[str] = mapped_column(Text, default="")
- payload_json: Mapped[str] = mapped_column(Text, default="")
- manifest_path: Mapped[str] = mapped_column(Text, default="")
- script_path: Mapped[str] = mapped_column(Text, default="")
- error_text: Mapped[str] = mapped_column(Text, default="")
+ candidate_kind: Mapped[str] = mapped_column(String(100), index=True)
+ title: Mapped[str] = mapped_column(String(255))
+ summary: Mapped[str] = mapped_column(Text, default="")
+ structured_payload_json: Mapped[str] = mapped_column(Text, default="{}")
+ evidence_summary: Mapped[str] = mapped_column(Text, default="")
+ confidence_hint: Mapped[float] = mapped_column(Float, default=0.0)
+ novelty_score: Mapped[float] = mapped_column(Float, default=0.0)
+ synthesis_score: Mapped[float] = mapped_column(Float, default=0.0)
+ triage_lane: Mapped[str] = mapped_column(String(50), default="archive")
+ current_status: Mapped[str] = mapped_column(String(50), default="captured")
+ created_at: Mapped[str] = mapped_column(String(100), default="")
-class ArtifactORM(Base):
- __tablename__ = "artifacts"
+class PromotionRecordORM(Base):
+ __tablename__ = "promotion_records"
id: Mapped[int] = mapped_column(Integer, primary_key=True)
- render_job_id: Mapped[int] = mapped_column(ForeignKey("render_jobs.id"), index=True)
- learner_id: Mapped[str] = mapped_column(String(100), index=True)
+ candidate_id: Mapped[int] = mapped_column(ForeignKey("knowledge_candidates.id"), index=True)
+ promotion_target: Mapped[str] = mapped_column(String(50), index=True)
+ target_object_id: Mapped[str] = mapped_column(String(100), default="")
+ promotion_status: Mapped[str] = mapped_column(String(50), default="draft")
+ promoted_by: Mapped[int] = mapped_column(ForeignKey("users.id"), index=True)
+ created_at: Mapped[str] = mapped_column(String(100), default="")
+
+class PackPatchProposalORM(Base):
+ __tablename__ = "pack_patch_proposals"
+ id: Mapped[int] = mapped_column(Integer, primary_key=True)
+ candidate_id: Mapped[int] = mapped_column(ForeignKey("knowledge_candidates.id"), index=True)
pack_id: Mapped[str] = mapped_column(String(100), index=True)
- artifact_type: Mapped[str] = mapped_column(String(50), default="render_bundle")
- format: Mapped[str] = mapped_column(String(20), default="gif")
- title: Mapped[str] = mapped_column(String(255), default="")
- path: Mapped[str] = mapped_column(Text, default="")
- metadata_json: Mapped[str] = mapped_column(Text, default="{}")
- retention_class: Mapped[str] = mapped_column(String(50), default="standard")
- expires_at: Mapped[str] = mapped_column(String(100), default="")
- is_deleted: Mapped[bool] = mapped_column(Boolean, default=False)
+ patch_type: Mapped[str] = mapped_column(String(100), default="content_revision")
+ title: Mapped[str] = mapped_column(String(255))
+ proposed_change_json: Mapped[str] = mapped_column(Text, default="{}")
+ evidence_summary: Mapped[str] = mapped_column(Text, default="")
+ reviewer_notes: Mapped[str] = mapped_column(Text, default="")
+ status: Mapped[str] = mapped_column(String(50), default="proposed")
+ current_version: Mapped[int] = mapped_column(Integer, default=1)
+ created_at: Mapped[str] = mapped_column(String(100), default="")
+
+class CurriculumDraftORM(Base):
+ __tablename__ = "curriculum_drafts"
+ id: Mapped[int] = mapped_column(Integer, primary_key=True)
+ candidate_id: Mapped[int] = mapped_column(ForeignKey("knowledge_candidates.id"), index=True)
+ topic_focus: Mapped[str] = mapped_column(String(255), default="")
+ product_type: Mapped[str] = mapped_column(String(100), default="lesson_outline")
+ audience: Mapped[str] = mapped_column(String(100), default="general")
+ source_concepts_json: Mapped[str] = mapped_column(Text, default="[]")
+ content_markdown: Mapped[str] = mapped_column(Text, default="")
+ editorial_notes: Mapped[str] = mapped_column(Text, default="")
+ status: Mapped[str] = mapped_column(String(50), default="draft")
+ current_version: Mapped[int] = mapped_column(Integer, default=1)
+ created_at: Mapped[str] = mapped_column(String(100), default="")
+
+class SkillBundleORM(Base):
+ __tablename__ = "skill_bundles"
+ id: Mapped[int] = mapped_column(Integer, primary_key=True)
+ candidate_id: Mapped[int] = mapped_column(ForeignKey("knowledge_candidates.id"), index=True)
+ skill_name: Mapped[str] = mapped_column(String(255))
+ domain: Mapped[str] = mapped_column(String(100), default="")
+ prerequisites_json: Mapped[str] = mapped_column(Text, default="[]")
+ expected_inputs_json: Mapped[str] = mapped_column(Text, default="[]")
+ failure_modes_json: Mapped[str] = mapped_column(Text, default="[]")
+ validation_checks_json: Mapped[str] = mapped_column(Text, default="[]")
+ canonical_examples_json: Mapped[str] = mapped_column(Text, default="[]")
+ status: Mapped[str] = mapped_column(String(50), default="draft")
+ current_version: Mapped[int] = mapped_column(Integer, default=1)
+ created_at: Mapped[str] = mapped_column(String(100), default="")
+
+class ObjectVersionORM(Base):
+ __tablename__ = "object_versions"
+ id: Mapped[int] = mapped_column(Integer, primary_key=True)
+ object_kind: Mapped[str] = mapped_column(String(50), index=True)
+ object_id: Mapped[int] = mapped_column(Integer, index=True)
+ version_number: Mapped[int] = mapped_column(Integer, default=1)
+ payload_json: Mapped[str] = mapped_column(Text, default="{}")
+ editor_id: Mapped[int] = mapped_column(ForeignKey("users.id"), index=True)
+ note: Mapped[str] = mapped_column(Text, default="")
+ created_at: Mapped[str] = mapped_column(String(100), default="")
diff --git a/src/didactopus/repository.py b/src/didactopus/repository.py
index 4610a8c..cf2ba6f 100644
--- a/src/didactopus/repository.py
+++ b/src/didactopus/repository.py
@@ -1,11 +1,17 @@
from __future__ import annotations
import json
+from datetime import datetime, timezone
from sqlalchemy import select
from .db import SessionLocal
-from .orm import UserORM, RefreshTokenORM, PackORM, LearnerORM, MasteryRecordORM, EvidenceEventORM, RenderJobORM, ArtifactORM
-from .models import PackData, LearnerState, MasteryRecord, EvidenceEvent
+from .orm import (
+ UserORM, PackORM, LearnerORM, KnowledgeCandidateORM, PromotionRecordORM,
+ PackPatchProposalORM, CurriculumDraftORM, SkillBundleORM, ObjectVersionORM, SynthesisCandidateORM
+)
from .auth import verify_password
+def now_iso() -> str:
+ return datetime.now(timezone.utc).isoformat()
+
def get_user_by_username(username: str):
with SessionLocal() as db:
return db.execute(select(UserORM).where(UserORM.username == username)).scalar_one_or_none()
@@ -20,216 +26,474 @@ def authenticate_user(username: str, password: str):
return None
return user
-def store_refresh_token(user_id: int, token_id: str):
+def list_packs():
with SessionLocal() as db:
- db.add(RefreshTokenORM(user_id=user_id, token_id=token_id, is_revoked=False))
- db.commit()
-
-def refresh_token_active(token_id: str) -> bool:
- with SessionLocal() as db:
- row = db.execute(select(RefreshTokenORM).where(RefreshTokenORM.token_id == token_id)).scalar_one_or_none()
- return row is not None and not row.is_revoked
-
-def revoke_refresh_token(token_id: str):
- with SessionLocal() as db:
- row = db.execute(select(RefreshTokenORM).where(RefreshTokenORM.token_id == token_id)).scalar_one_or_none()
- if row:
- row.is_revoked = True
- db.commit()
-
-def list_packs_for_user(user_id: int | None = None, include_unpublished: bool = False):
- with SessionLocal() as db:
- stmt = select(PackORM)
- if not include_unpublished:
- stmt = stmt.where(PackORM.is_published == True)
- rows = db.execute(stmt).scalars().all()
- out = []
- for r in rows:
- if r.policy_lane == "community":
- out.append(PackData.model_validate(json.loads(r.data_json)))
- elif user_id is not None and r.owner_user_id == user_id:
- out.append(PackData.model_validate(json.loads(r.data_json)))
- return out
+ return db.execute(select(PackORM).order_by(PackORM.id)).scalars().all()
def get_pack(pack_id: str):
- with SessionLocal() as db:
- row = db.get(PackORM, pack_id)
- return None if row is None else PackData.model_validate(json.loads(row.data_json))
-
-def get_pack_row(pack_id: str):
with SessionLocal() as db:
return db.get(PackORM, pack_id)
-def upsert_pack(pack: PackData, submitted_by_user_id: int, policy_lane: str = "personal", is_published: bool = False):
- with SessionLocal() as db:
- row = db.get(PackORM, pack.id)
- payload = json.dumps(pack.model_dump())
- if row is None:
- row = PackORM(
- id=pack.id,
- owner_user_id=submitted_by_user_id if policy_lane == "personal" else None,
- policy_lane=policy_lane,
- title=pack.title,
- subtitle=pack.subtitle,
- level=pack.level,
- data_json=payload,
- is_published=is_published if policy_lane == "personal" else False,
- )
- db.add(row)
- else:
- row.owner_user_id = submitted_by_user_id if policy_lane == "personal" else row.owner_user_id
- row.policy_lane = policy_lane
- row.title = pack.title
- row.subtitle = pack.subtitle
- row.level = pack.level
- row.data_json = payload
- if policy_lane == "personal":
- row.is_published = is_published
- db.commit()
-
def create_learner(owner_user_id: int, learner_id: str, display_name: str = ""):
with SessionLocal() as db:
if db.get(LearnerORM, learner_id) is None:
db.add(LearnerORM(id=learner_id, owner_user_id=owner_user_id, display_name=display_name))
db.commit()
-def learner_owned_by_user(user_id: int, learner_id: str) -> bool:
+def create_candidate(payload):
with SessionLocal() as db:
- learner = db.get(LearnerORM, learner_id)
- return learner is not None and learner.owner_user_id == user_id
-
-def load_learner_state(learner_id: str):
- with SessionLocal() as db:
- records = db.execute(select(MasteryRecordORM).where(MasteryRecordORM.learner_id == learner_id)).scalars().all()
- history = db.execute(select(EvidenceEventORM).where(EvidenceEventORM.learner_id == learner_id)).scalars().all()
- return LearnerState(
- learner_id=learner_id,
- records=[MasteryRecord(concept_id=r.concept_id, dimension=r.dimension, score=r.score, confidence=r.confidence, evidence_count=r.evidence_count, last_updated=r.last_updated) for r in records],
- history=[EvidenceEvent(concept_id=h.concept_id, dimension=h.dimension, score=h.score, confidence_hint=h.confidence_hint, timestamp=h.timestamp, kind=h.kind, source_id=h.source_id) for h in history],
- )
-
-def save_learner_state(state: LearnerState):
- with SessionLocal() as db:
- db.query(MasteryRecordORM).filter(MasteryRecordORM.learner_id == state.learner_id).delete()
- db.query(EvidenceEventORM).filter(EvidenceEventORM.learner_id == state.learner_id).delete()
- for r in state.records:
- db.add(MasteryRecordORM(learner_id=state.learner_id, concept_id=r.concept_id, dimension=r.dimension, score=r.score, confidence=r.confidence, evidence_count=r.evidence_count, last_updated=r.last_updated))
- for h in state.history:
- db.add(EvidenceEventORM(learner_id=state.learner_id, concept_id=h.concept_id, dimension=h.dimension, score=h.score, confidence_hint=h.confidence_hint, timestamp=h.timestamp, kind=h.kind, source_id=h.source_id))
- db.commit()
- return state
-
-def create_render_job(learner_id: str, pack_id: str, requested_format: str, fps: int, theme: str):
- with SessionLocal() as db:
- row = RenderJobORM(
- learner_id=learner_id,
- pack_id=pack_id,
- requested_format=requested_format,
- fps=fps,
- theme=theme,
- status="queued",
+ row = KnowledgeCandidateORM(
+ source_type=payload.source_type,
+ source_artifact_id=payload.source_artifact_id,
+ learner_id=payload.learner_id,
+ pack_id=payload.pack_id,
+ candidate_kind=payload.candidate_kind,
+ title=payload.title,
+ summary=payload.summary,
+ structured_payload_json=json.dumps(payload.structured_payload),
+ evidence_summary=payload.evidence_summary,
+ confidence_hint=payload.confidence_hint,
+ novelty_score=payload.novelty_score,
+ synthesis_score=payload.synthesis_score,
+ triage_lane=payload.triage_lane,
+ current_status="triaged",
+ created_at=now_iso(),
)
db.add(row)
db.commit()
db.refresh(row)
return row.id
-def update_render_job(job_id: int, **fields):
+def list_candidates():
with SessionLocal() as db:
- row = db.get(RenderJobORM, job_id)
- if row is None:
- return None
- for k, v in fields.items():
- setattr(row, k, v)
- db.commit()
- db.refresh(row)
- return row
-
-def list_render_jobs(learner_id: str | None = None):
- with SessionLocal() as db:
- stmt = select(RenderJobORM).order_by(RenderJobORM.id.desc())
- if learner_id:
- stmt = stmt.where(RenderJobORM.learner_id == learner_id)
- rows = db.execute(stmt).scalars().all()
+ rows = db.execute(select(KnowledgeCandidateORM).order_by(KnowledgeCandidateORM.id.desc())).scalars().all()
return [{
- "job_id": r.id,
- "learner_id": r.learner_id,
+ "candidate_id": r.id,
"pack_id": r.pack_id,
- "requested_format": r.requested_format,
- "fps": r.fps,
- "theme": r.theme,
- "status": r.status,
- "bundle_dir": r.bundle_dir,
- "payload_json": r.payload_json,
- "manifest_path": r.manifest_path,
- "script_path": r.script_path,
- "error_text": r.error_text,
- } for r in rows]
-
-def register_artifact(render_job_id: int, learner_id: str, pack_id: str, artifact_type: str, fmt: str, title: str, path: str, metadata: dict, retention_class: str = "standard", expires_at: str = ""):
- with SessionLocal() as db:
- row = ArtifactORM(
- render_job_id=render_job_id,
- learner_id=learner_id,
- pack_id=pack_id,
- artifact_type=artifact_type,
- format=fmt,
- title=title,
- path=path,
- metadata_json=json.dumps(metadata),
- retention_class=retention_class,
- expires_at=expires_at,
- is_deleted=False,
- )
- db.add(row)
- db.commit()
- db.refresh(row)
- return row.id
-
-def list_artifacts(learner_id: str | None = None, include_deleted: bool = False):
- with SessionLocal() as db:
- stmt = select(ArtifactORM).order_by(ArtifactORM.id.desc())
- if learner_id:
- stmt = stmt.where(ArtifactORM.learner_id == learner_id)
- if not include_deleted:
- stmt = stmt.where(ArtifactORM.is_deleted == False)
- rows = db.execute(stmt).scalars().all()
- return [{
- "artifact_id": r.id,
- "render_job_id": r.render_job_id,
- "learner_id": r.learner_id,
- "pack_id": r.pack_id,
- "artifact_type": r.artifact_type,
- "format": r.format,
+ "candidate_kind": r.candidate_kind,
"title": r.title,
- "path": r.path,
- "retention_class": r.retention_class,
- "expires_at": r.expires_at,
- "is_deleted": r.is_deleted,
- "metadata": json.loads(r.metadata_json or "{}"),
+ "summary": r.summary,
+ "structured_payload": json.loads(r.structured_payload_json or "{}"),
+ "evidence_summary": r.evidence_summary,
+ "confidence_hint": r.confidence_hint,
+ "novelty_score": r.novelty_score,
+ "synthesis_score": r.synthesis_score,
+ "triage_lane": r.triage_lane,
+ "current_status": r.current_status,
} for r in rows]
-def get_artifact(artifact_id: int):
+def get_candidate(candidate_id: int):
with SessionLocal() as db:
- return db.get(ArtifactORM, artifact_id)
-
-def update_artifact_retention(artifact_id: int, retention_class: str, expires_at: str):
- with SessionLocal() as db:
- row = db.get(ArtifactORM, artifact_id)
- if row is None:
+ r = db.get(KnowledgeCandidateORM, candidate_id)
+ if r is None:
return None
- row.retention_class = retention_class
- row.expires_at = expires_at
+ return {
+ "candidate_id": r.id,
+ "pack_id": r.pack_id,
+ "candidate_kind": r.candidate_kind,
+ "title": r.title,
+ "summary": r.summary,
+ "structured_payload": json.loads(r.structured_payload_json or "{}"),
+ "evidence_summary": r.evidence_summary,
+ "confidence_hint": r.confidence_hint,
+ "novelty_score": r.novelty_score,
+ "synthesis_score": r.synthesis_score,
+ "triage_lane": r.triage_lane,
+ "current_status": r.current_status,
+ }
+
+def create_pack_patch(candidate):
+ with SessionLocal() as db:
+ row = PackPatchProposalORM(
+ candidate_id=candidate["candidate_id"],
+ pack_id=candidate["pack_id"],
+ patch_type=candidate["candidate_kind"],
+ title=candidate["title"],
+ proposed_change_json=json.dumps(candidate["structured_payload"]),
+ evidence_summary=candidate["evidence_summary"],
+ reviewer_notes="",
+ status="proposed",
+ current_version=1,
+ created_at=now_iso(),
+ )
+ db.add(row)
db.commit()
db.refresh(row)
- return row
+ _create_version("pack_patch", row.id, 1, {
+ "title": row.title,
+ "proposed_change": json.loads(row.proposed_change_json or "{}"),
+ "status": row.status,
+ "reviewer_notes": row.reviewer_notes,
+ }, 1, "Initial version")
+ return f"patch:{row.id}"
-def soft_delete_artifact(artifact_id: int):
+def create_curriculum_draft(candidate):
with SessionLocal() as db:
- row = db.get(ArtifactORM, artifact_id)
- if row is None:
- return None
- row.is_deleted = True
+ payload = candidate["structured_payload"]
+ source_concepts = payload.get("source_concepts", [payload.get("affected_concept")] if payload.get("affected_concept") else [])
+ content = f"# {candidate['title']}\n\n{candidate['summary']}\n\n## Evidence\n{candidate['evidence_summary']}\n"
+ row = CurriculumDraftORM(
+ candidate_id=candidate["candidate_id"],
+ topic_focus=candidate["title"],
+ product_type="lesson_outline",
+ audience="general",
+ source_concepts_json=json.dumps(source_concepts),
+ content_markdown=content,
+ editorial_notes="",
+ status="draft",
+ current_version=1,
+ created_at=now_iso(),
+ )
+ db.add(row)
db.commit()
db.refresh(row)
- return row
+ _create_version("curriculum_draft", row.id, 1, {
+ "topic_focus": row.topic_focus,
+ "content_markdown": row.content_markdown,
+ "product_type": row.product_type,
+ "audience": row.audience,
+ }, 1, "Initial version")
+ return f"curriculum:{row.id}"
+
+def create_skill_bundle(candidate):
+ with SessionLocal() as db:
+ payload = candidate["structured_payload"]
+ row = SkillBundleORM(
+ candidate_id=candidate["candidate_id"],
+ skill_name=candidate["title"],
+ domain=candidate["pack_id"],
+ prerequisites_json=json.dumps(payload.get("prerequisites", [])),
+ expected_inputs_json=json.dumps(payload.get("expected_inputs", ["text"])),
+ failure_modes_json=json.dumps(payload.get("failure_modes", ["misapplied concept"])),
+ validation_checks_json=json.dumps(payload.get("validation_checks", ["can explain concept clearly"])),
+ canonical_examples_json=json.dumps(payload.get("canonical_examples", [candidate["summary"]])),
+ status="draft",
+ current_version=1,
+ created_at=now_iso(),
+ )
+ db.add(row)
+ db.commit()
+ db.refresh(row)
+ _create_version("skill_bundle", row.id, 1, {
+ "skill_name": row.skill_name,
+ "domain": row.domain,
+ "prerequisites": json.loads(row.prerequisites_json or "[]"),
+ "expected_inputs": json.loads(row.expected_inputs_json or "[]"),
+ "failure_modes": json.loads(row.failure_modes_json or "[]"),
+ "validation_checks": json.loads(row.validation_checks_json or "[]"),
+ "canonical_examples": json.loads(row.canonical_examples_json or "[]"),
+ }, 1, "Initial version")
+ return f"skill:{row.id}"
+
+def create_promotion(candidate_id: int, promoted_by: int, payload):
+ candidate = get_candidate(candidate_id)
+ if candidate is None:
+ return None
+ target_object_id = payload.target_object_id
+ if not target_object_id:
+ if payload.promotion_target == "pack_improvement":
+ target_object_id = create_pack_patch(candidate)
+ elif payload.promotion_target == "curriculum_draft":
+ target_object_id = create_curriculum_draft(candidate)
+ elif payload.promotion_target == "reusable_skill_bundle":
+ target_object_id = create_skill_bundle(candidate)
+ elif payload.promotion_target == "archive":
+ target_object_id = "archive:auto"
+ with SessionLocal() as db:
+ row = PromotionRecordORM(
+ candidate_id=candidate_id,
+ promotion_target=payload.promotion_target,
+ target_object_id=target_object_id,
+ promotion_status=payload.promotion_status,
+ promoted_by=promoted_by,
+ created_at=now_iso(),
+ )
+ db.add(row)
+ cand = db.get(KnowledgeCandidateORM, candidate_id)
+ if cand:
+ cand.current_status = "promoted" if payload.promotion_target != "archive" else "archived"
+ cand.triage_lane = payload.promotion_target
+ db.commit()
+ db.refresh(row)
+ return row.id
+
+def list_promotions():
+ with SessionLocal() as db:
+ rows = db.execute(select(PromotionRecordORM).order_by(PromotionRecordORM.id.desc())).scalars().all()
+ return [{
+ "promotion_id": r.id,
+ "candidate_id": r.candidate_id,
+ "promotion_target": r.promotion_target,
+ "target_object_id": r.target_object_id,
+ "promotion_status": r.promotion_status,
+ "promoted_by": r.promoted_by,
+ "created_at": r.created_at,
+ } for r in rows]
+
+def list_pack_patches():
+ with SessionLocal() as db:
+ rows = db.execute(select(PackPatchProposalORM).order_by(PackPatchProposalORM.id.desc())).scalars().all()
+ return [{
+ "patch_id": r.id,
+ "candidate_id": r.candidate_id,
+ "pack_id": r.pack_id,
+ "patch_type": r.patch_type,
+ "title": r.title,
+ "proposed_change": json.loads(r.proposed_change_json or "{}"),
+ "evidence_summary": r.evidence_summary,
+ "reviewer_notes": r.reviewer_notes,
+ "status": r.status,
+ "current_version": r.current_version,
+ "created_at": r.created_at,
+ } for r in rows]
+
+def list_curriculum_drafts():
+ with SessionLocal() as db:
+ rows = db.execute(select(CurriculumDraftORM).order_by(CurriculumDraftORM.id.desc())).scalars().all()
+ return [{
+ "draft_id": r.id,
+ "candidate_id": r.candidate_id,
+ "topic_focus": r.topic_focus,
+ "product_type": r.product_type,
+ "audience": r.audience,
+ "source_concepts": json.loads(r.source_concepts_json or "[]"),
+ "content_markdown": r.content_markdown,
+ "editorial_notes": r.editorial_notes,
+ "status": r.status,
+ "current_version": r.current_version,
+ "created_at": r.created_at,
+ } for r in rows]
+
+def list_skill_bundles():
+ with SessionLocal() as db:
+ rows = db.execute(select(SkillBundleORM).order_by(SkillBundleORM.id.desc())).scalars().all()
+ return [{
+ "skill_bundle_id": r.id,
+ "candidate_id": r.candidate_id,
+ "skill_name": r.skill_name,
+ "domain": r.domain,
+ "prerequisites": json.loads(r.prerequisites_json or "[]"),
+ "expected_inputs": json.loads(r.expected_inputs_json or "[]"),
+ "failure_modes": json.loads(r.failure_modes_json or "[]"),
+ "validation_checks": json.loads(r.validation_checks_json or "[]"),
+ "canonical_examples": json.loads(r.canonical_examples_json or "[]"),
+ "status": r.status,
+ "current_version": r.current_version,
+ "created_at": r.created_at,
+ } for r in rows]
+
+def get_pack_patch(patch_id: int):
+ with SessionLocal() as db:
+ r = db.get(PackPatchProposalORM, patch_id)
+ if r is None: return None
+ return {
+ "patch_id": r.id, "pack_id": r.pack_id, "title": r.title,
+ "proposed_change": json.loads(r.proposed_change_json or "{}"),
+ "reviewer_notes": r.reviewer_notes, "status": r.status, "current_version": r.current_version
+ }
+
+def get_curriculum_draft(draft_id: int):
+ with SessionLocal() as db:
+ r = db.get(CurriculumDraftORM, draft_id)
+ if r is None: return None
+ return {
+ "draft_id": r.id, "topic_focus": r.topic_focus, "product_type": r.product_type,
+ "audience": r.audience, "source_concepts": json.loads(r.source_concepts_json or "[]"),
+ "content_markdown": r.content_markdown, "editorial_notes": r.editorial_notes,
+ "status": r.status, "current_version": r.current_version
+ }
+
+def get_skill_bundle(bundle_id: int):
+ with SessionLocal() as db:
+ r = db.get(SkillBundleORM, bundle_id)
+ if r is None: return None
+ return {
+ "skill_bundle_id": r.id, "skill_name": r.skill_name, "domain": r.domain,
+ "prerequisites": json.loads(r.prerequisites_json or "[]"),
+ "expected_inputs": json.loads(r.expected_inputs_json or "[]"),
+ "failure_modes": json.loads(r.failure_modes_json or "[]"),
+ "validation_checks": json.loads(r.validation_checks_json or "[]"),
+ "canonical_examples": json.loads(r.canonical_examples_json or "[]"),
+ "status": r.status, "current_version": r.current_version
+ }
+
+def _create_version(object_kind: str, object_id: int, version_number: int, payload: dict, editor_id: int, note: str):
+ with SessionLocal() as db:
+ db.add(ObjectVersionORM(
+ object_kind=object_kind,
+ object_id=object_id,
+ version_number=version_number,
+ payload_json=json.dumps(payload),
+ editor_id=editor_id,
+ note=note,
+ created_at=now_iso(),
+ ))
+ db.commit()
+
+def list_versions(object_kind: str, object_id: int):
+ with SessionLocal() as db:
+ rows = db.execute(
+ select(ObjectVersionORM)
+ .where(ObjectVersionORM.object_kind == object_kind, ObjectVersionORM.object_id == object_id)
+ .order_by(ObjectVersionORM.version_number.desc())
+ ).scalars().all()
+ return [{
+ "version_id": r.id,
+ "object_kind": r.object_kind,
+ "object_id": r.object_id,
+ "version_number": r.version_number,
+ "payload": json.loads(r.payload_json or "{}"),
+ "editor_id": r.editor_id,
+ "note": r.note,
+ "created_at": r.created_at,
+ } for r in rows]
+
+def edit_pack_patch(patch_id: int, payload: dict, editor_id: int, note: str):
+ with SessionLocal() as db:
+ row = db.get(PackPatchProposalORM, patch_id)
+ if row is None: return None
+ if "title" in payload: row.title = payload["title"]
+ if "proposed_change" in payload: row.proposed_change_json = json.dumps(payload["proposed_change"])
+ if "reviewer_notes" in payload: row.reviewer_notes = payload["reviewer_notes"]
+ if "status" in payload: row.status = payload["status"]
+ row.current_version += 1
+ db.commit()
+ db.refresh(row)
+ _create_version("pack_patch", patch_id, row.current_version, {
+ "title": row.title,
+ "proposed_change": json.loads(row.proposed_change_json or "{}"),
+ "reviewer_notes": row.reviewer_notes,
+ "status": row.status,
+ }, editor_id, note)
+ return row
+
+def edit_curriculum_draft(draft_id: int, payload: dict, editor_id: int, note: str):
+ with SessionLocal() as db:
+ row = db.get(CurriculumDraftORM, draft_id)
+ if row is None: return None
+ if "topic_focus" in payload: row.topic_focus = payload["topic_focus"]
+ if "content_markdown" in payload: row.content_markdown = payload["content_markdown"]
+ if "editorial_notes" in payload: row.editorial_notes = payload["editorial_notes"]
+ if "status" in payload: row.status = payload["status"]
+ row.current_version += 1
+ db.commit()
+ db.refresh(row)
+ _create_version("curriculum_draft", draft_id, row.current_version, {
+ "topic_focus": row.topic_focus,
+ "content_markdown": row.content_markdown,
+ "editorial_notes": row.editorial_notes,
+ "status": row.status,
+ }, editor_id, note)
+ return row
+
+def edit_skill_bundle(bundle_id: int, payload: dict, editor_id: int, note: str):
+ with SessionLocal() as db:
+ row = db.get(SkillBundleORM, bundle_id)
+ if row is None: return None
+ if "skill_name" in payload: row.skill_name = payload["skill_name"]
+ if "prerequisites" in payload: row.prerequisites_json = json.dumps(payload["prerequisites"])
+ if "expected_inputs" in payload: row.expected_inputs_json = json.dumps(payload["expected_inputs"])
+ if "failure_modes" in payload: row.failure_modes_json = json.dumps(payload["failure_modes"])
+ if "validation_checks" in payload: row.validation_checks_json = json.dumps(payload["validation_checks"])
+ if "canonical_examples" in payload: row.canonical_examples_json = json.dumps(payload["canonical_examples"])
+ if "status" in payload: row.status = payload["status"]
+ row.current_version += 1
+ db.commit()
+ db.refresh(row)
+ _create_version("skill_bundle", bundle_id, row.current_version, {
+ "skill_name": row.skill_name,
+ "prerequisites": json.loads(row.prerequisites_json or "[]"),
+ "expected_inputs": json.loads(row.expected_inputs_json or "[]"),
+ "failure_modes": json.loads(row.failure_modes_json or "[]"),
+ "validation_checks": json.loads(row.validation_checks_json or "[]"),
+ "canonical_examples": json.loads(row.canonical_examples_json or "[]"),
+ "status": row.status,
+ }, editor_id, note)
+ return row
+
+def apply_pack_patch(patch_id: int, editor_id: int, note: str):
+ with SessionLocal() as db:
+ patch = db.get(PackPatchProposalORM, patch_id)
+ if patch is None: return None
+ pack = db.get(PackORM, patch.pack_id)
+ if pack is None: return None
+ pack_data = json.loads(pack.data_json or "{}")
+ proposed = json.loads(patch.proposed_change_json or "{}")
+ pack_data.setdefault("applied_patches", []).append({
+ "patch_id": patch.id,
+ "title": patch.title,
+ "proposed_change": proposed,
+ "applied_at": now_iso(),
+ })
+ if "affected_concept" in proposed and "suggested_prereq" in proposed:
+ for concept in pack_data.get("concepts", []):
+ if concept.get("id") == proposed["affected_concept"]:
+ prereqs = concept.setdefault("prerequisites", [])
+ if proposed["suggested_prereq"] not in prereqs:
+ prereqs.append(proposed["suggested_prereq"])
+ pack.data_json = json.dumps(pack_data)
+ patch.status = "applied"
+ db.commit()
+ db.refresh(patch)
+ _create_version("pack_patch", patch_id, patch.current_version, {
+ "title": patch.title,
+ "proposed_change": json.loads(patch.proposed_change_json or "{}"),
+ "status": patch.status,
+ }, editor_id, note)
+ return patch
+
+def export_curriculum_draft(draft_id: int):
+ draft = get_curriculum_draft(draft_id)
+ if draft is None: return None
+ return {
+ "markdown": draft["content_markdown"],
+ "json": json.dumps(draft, indent=2)
+ }
+
+def export_skill_bundle(bundle_id: int):
+ import yaml
+ bundle = get_skill_bundle(bundle_id)
+ if bundle is None: return None
+ return {
+ "json": json.dumps(bundle, indent=2),
+ "yaml": yaml.safe_dump(bundle, sort_keys=False)
+ }
+
+def create_synthesis_candidate(source_concept_id, target_concept_id, source_pack_id, target_pack_id, synthesis_kind, score_semantic, score_structural, score_trajectory, score_review_history, explanation, evidence):
+ score_total = 0.35 * score_semantic + 0.25 * score_structural + 0.20 * score_trajectory + 0.10 * score_review_history + 0.10 * evidence.get("novelty", 0.0)
+ with SessionLocal() as db:
+ row = SynthesisCandidateORM(
+ source_concept_id=source_concept_id, target_concept_id=target_concept_id,
+ source_pack_id=source_pack_id, target_pack_id=target_pack_id,
+ synthesis_kind=synthesis_kind, score_total=score_total,
+ score_semantic=score_semantic, score_structural=score_structural,
+ score_trajectory=score_trajectory, score_review_history=score_review_history,
+ explanation=explanation, evidence_json=json.dumps(evidence),
+ current_status="proposed", created_at=now_iso(),
+ )
+ db.add(row); db.commit(); db.refresh(row); return row.id
+
+def list_synthesis_candidates():
+ with SessionLocal() as db:
+ rows = db.execute(select(SynthesisCandidateORM).order_by(SynthesisCandidateORM.score_total.desc(), SynthesisCandidateORM.id.desc())).scalars().all()
+ return [{
+ "synthesis_id": r.id, "source_concept_id": r.source_concept_id, "target_concept_id": r.target_concept_id,
+ "source_pack_id": r.source_pack_id, "target_pack_id": r.target_pack_id, "synthesis_kind": r.synthesis_kind,
+ "score_total": r.score_total, "score_semantic": r.score_semantic, "score_structural": r.score_structural,
+ "score_trajectory": r.score_trajectory, "score_review_history": r.score_review_history,
+ "explanation": r.explanation, "evidence": json.loads(r.evidence_json or "{}"),
+ "current_status": r.current_status, "created_at": r.created_at,
+ } for r in rows]
+
+def get_synthesis_candidate(synthesis_id: int):
+ with SessionLocal() as db:
+ r = db.get(SynthesisCandidateORM, synthesis_id)
+ if r is None: return None
+ return {
+ "synthesis_id": r.id, "source_concept_id": r.source_concept_id, "target_concept_id": r.target_concept_id,
+ "source_pack_id": r.source_pack_id, "target_pack_id": r.target_pack_id, "synthesis_kind": r.synthesis_kind,
+ "score_total": r.score_total, "score_semantic": r.score_semantic, "score_structural": r.score_structural,
+ "score_trajectory": r.score_trajectory, "score_review_history": r.score_review_history,
+ "explanation": r.explanation, "evidence": json.loads(r.evidence_json or "{}"),
+ "current_status": r.current_status, "created_at": r.created_at,
+ }
diff --git a/src/didactopus/seed.py b/src/didactopus/seed.py
index bdc7b86..7541e63 100644
--- a/src/didactopus/seed.py
+++ b/src/didactopus/seed.py
@@ -1,34 +1,53 @@
from __future__ import annotations
+import json
from sqlalchemy import select
from .db import Base, engine, SessionLocal
-from .orm import UserORM
+from .orm import UserORM, PackORM
from .auth import hash_password
-from .repository import upsert_pack, create_learner
-from .models import PackData, PackConcept, GraphPosition, CrossPackLink
def main():
Base.metadata.create_all(bind=engine)
with SessionLocal() as db:
if db.execute(select(UserORM).where(UserORM.username == "wesley")).scalar_one_or_none() is None:
db.add(UserORM(username="wesley", password_hash=hash_password("demo-pass"), role="admin", is_active=True))
+ if db.execute(select(UserORM).where(UserORM.username == "reviewer")).scalar_one_or_none() is None:
+ db.add(UserORM(username="reviewer", password_hash=hash_password("demo-pass"), role="reviewer", is_active=True))
+ if db.get(PackORM, "biology-pack") is None:
+ db.add(PackORM(
+ id="biology-pack",
+ owner_user_id=1,
+ policy_lane="personal",
+ title="Biology Pack",
+ subtitle="Core biology concepts",
+ level="novice-friendly",
+ is_published=True,
+ data_json=json.dumps({
+ "id": "biology-pack",
+ "title": "Biology Pack",
+ "concepts": [
+ {"id": "selection", "title": "Natural Selection", "prerequisites": ["variation"]},
+ {"id": "variation", "title": "Variation", "prerequisites": []},
+ {"id": "drift", "title": "Genetic Drift", "prerequisites": ["variation"]}
+ ]
+ })
+ ))
+ if db.get(PackORM, "math-pack") is None:
+ db.add(PackORM(
+ id="math-pack",
+ owner_user_id=1,
+ policy_lane="personal",
+ title="Math Pack",
+ subtitle="Core math concepts",
+ level="novice-friendly",
+ is_published=True,
+ data_json=json.dumps({
+ "id": "math-pack",
+ "title": "Math Pack",
+ "concepts": [
+ {"id": "random_walk", "title": "Random Walk", "prerequisites": ["variation"]},
+ {"id": "variation", "title": "Variation in Models", "prerequisites": []},
+ {"id": "optimization", "title": "Optimization", "prerequisites": []}
+ ]
+ })
+ ))
db.commit()
- create_learner(1, "wesley-learner", "Wesley learner")
- upsert_pack(
- PackData(
- id="wesley-private-pack",
- title="Wesley Private Pack",
- subtitle="Personal pack example.",
- level="novice-friendly",
- concepts=[
- PackConcept(id="intro", title="Intro", prerequisites=[], position=GraphPosition(x=150, y=120)),
- PackConcept(id="second", title="Second concept", prerequisites=["intro"], position=GraphPosition(x=420, y=120)),
- PackConcept(id="third", title="Third concept", prerequisites=["second"], position=GraphPosition(x=700, y=120), cross_pack_links=[CrossPackLink(source_concept_id="third", target_pack_id="advanced-pack", target_concept_id="adv-1", relationship="next_pack")]),
- PackConcept(id="branch", title="Branch concept", prerequisites=["intro"], position=GraphPosition(x=420, y=320)),
- ],
- onboarding={"headline":"Start privately"},
- compliance={}
- ),
- submitted_by_user_id=1,
- policy_lane="personal",
- is_published=True,
- )
diff --git a/src/didactopus/synthesis.py b/src/didactopus/synthesis.py
index a980f75..feced7a 100644
--- a/src/didactopus/synthesis.py
+++ b/src/didactopus/synthesis.py
@@ -14,57 +14,35 @@ def _norm(text: str) -> set[str]:
def _semantic_similarity(a: dict, b: dict) -> float:
sa = _norm(a.get("title", "")) | _norm(" ".join(a.get("prerequisites", [])))
sb = _norm(b.get("title", "")) | _norm(" ".join(b.get("prerequisites", [])))
- if not sa or not sb:
- return 0.0
+ if not sa or not sb: return 0.0
return len(sa & sb) / len(sa | sb)
def _structural_similarity(a: dict, b: dict) -> float:
- pa = set(a.get("prerequisites", []))
- pb = set(b.get("prerequisites", []))
- if not pa and not pb:
- return 0.6
- if not pa or not pb:
- return 0.2
+ pa = set(a.get("prerequisites", [])); pb = set(b.get("prerequisites", []))
+ if not pa and not pb: return 0.6
+ if not pa or not pb: return 0.2
return len(pa & pb) / len(pa | pb)
def generate_synthesis_candidates(source_pack_id: str | None = None, target_pack_id: str | None = None, limit: int = 20):
- packs = list_packs()
- by_id = {p.id: p for p in packs}
+ packs = list_packs(); by_id = {p.id: p for p in packs}
source_packs = [by_id[source_pack_id]] if source_pack_id and source_pack_id in by_id else packs
target_packs = [by_id[target_pack_id]] if target_pack_id and target_pack_id in by_id else packs
-
- created = []
- seen = set()
+ created = []; seen = set()
for sp in source_packs:
for tp in target_packs:
- if sp.id == tp.id:
- continue
+ if sp.id == tp.id: continue
for ca in _concepts(sp):
for cb in _concepts(tp):
- sem = _semantic_similarity(ca, cb)
- struct = _structural_similarity(ca, cb)
- traj = 0.4
- review_prior = 0.5
- novelty = 1.0 if (ca.get("id"), cb.get("id")) not in seen else 0.0
+ sem = _semantic_similarity(ca, cb); struct = _structural_similarity(ca, cb)
+ traj = 0.4; review_prior = 0.5; novelty = 1.0 if (ca.get("id"), cb.get("id")) not in seen else 0.0
total = 0.35 * sem + 0.25 * struct + 0.20 * traj + 0.10 * review_prior + 0.10 * novelty
- if total < 0.45:
- continue
- explanation = f"Possible cross-pack overlap between '{ca.get('title')}' and '{cb.get('title')}'."
+ if total < 0.45: continue
sid = create_synthesis_candidate(
- source_concept_id=ca.get("id", ""),
- target_concept_id=cb.get("id", ""),
- source_pack_id=sp.id,
- target_pack_id=tp.id,
- synthesis_kind="cross_pack_similarity",
- score_semantic=sem,
- score_structural=struct,
- score_trajectory=traj,
- score_review_history=review_prior,
- explanation=explanation,
- evidence={"novelty": novelty, "source_title": ca.get("title"), "target_title": cb.get("title")},
+ ca.get("id", ""), cb.get("id", ""), sp.id, tp.id, "cross_pack_similarity",
+ sem, struct, traj, review_prior,
+ f"Possible cross-pack overlap between '{ca.get('title')}' and '{cb.get('title')}'.",
+ {"novelty": novelty, "source_title": ca.get("title"), "target_title": cb.get("title")}
)
- seen.add((ca.get("id"), cb.get("id")))
- created.append(sid)
- if len(created) >= limit:
- return created
+ seen.add((ca.get("id"), cb.get("id"))); created.append(sid)
+ if len(created) >= limit: return created
return created
diff --git a/tests/test_scaffold_files.py b/tests/test_scaffold_files.py
index a8a1f89..8e2954a 100644
--- a/tests/test_scaffold_files.py
+++ b/tests/test_scaffold_files.py
@@ -3,6 +3,5 @@ from pathlib import Path
def test_scaffold_files_exist():
assert Path("src/didactopus/api.py").exists()
assert Path("src/didactopus/repository.py").exists()
- assert Path("src/didactopus/worker.py").exists()
- assert Path("src/didactopus/knowledge_export.py").exists()
- assert Path("FAQ.md").exists()
+ assert Path("src/didactopus/orm.py").exists()
+ assert Path("webui/src/App.jsx").exists()
diff --git a/webui/index.html b/webui/index.html
index bce756b..5520378 100644
--- a/webui/index.html
+++ b/webui/index.html
@@ -3,7 +3,7 @@
- Didactopus Artifact Lifecycle
+ Didactopus Object Versioning
diff --git a/webui/package.json b/webui/package.json
index be233a0..698831a 100644
--- a/webui/package.json
+++ b/webui/package.json
@@ -1,5 +1,5 @@
{
- "name": "didactopus-artifact-lifecycle-ui",
+ "name": "didactopus-object-versioning-ui",
"private": true,
"version": "0.1.0",
"type": "module",
diff --git a/webui/src/App.jsx b/webui/src/App.jsx
index 7b8824a..541814d 100644
--- a/webui/src/App.jsx
+++ b/webui/src/App.jsx
@@ -1,120 +1,153 @@
import React, { useEffect, useState } from "react";
-import { login, refresh, fetchPacks, fetchLearnerState, putLearnerState, createRenderJob, listRenderJobs, listArtifacts, updateRetention, exportKnowledge } from "./api";
-import { loadAuth, saveAuth, clearAuth } from "./authStore";
+import { login, createCandidate, promoteCandidate, listPackPatches, listCurriculumDrafts, listSkillBundles, editPatch, applyPatch, editCurriculum, editSkill, listVersions, exportCurriculum, exportSkill } from "./api";
function LoginView({ onAuth }) {
- const [username, setUsername] = useState("wesley");
+ const [username, setUsername] = useState("reviewer");
const [password, setPassword] = useState("demo-pass");
const [error, setError] = useState("");
async function doLogin() {
- try {
- const result = await login(username, password);
- saveAuth(result);
- onAuth(result);
- } catch { setError("Login failed"); }
+ try { onAuth(await login(username, password)); }
+ catch { setError("Login failed"); }
}
return (
-
+
);
}
export default function App() {
- const [auth, setAuth] = useState(loadAuth());
- const [packs, setPacks] = useState([]);
- const [learnerId] = useState("wesley-learner");
- const [packId, setPackId] = useState("");
- const [jobs, setJobs] = useState([]);
- const [artifacts, setArtifacts] = useState([]);
- const [knowledge, setKnowledge] = useState(null);
- const [format, setFormat] = useState("gif");
- const [fps, setFps] = useState(2);
+ const [auth, setAuth] = useState(null);
+ const [patches, setPatches] = useState([]);
+ const [drafts, setDrafts] = useState([]);
+ const [skills, setSkills] = useState([]);
+ const [versions, setVersions] = useState([]);
+ const [exports, setExports] = useState({});
const [message, setMessage] = useState("");
- async function refreshAuthToken() {
- if (!auth?.refresh_token) return null;
- try {
- const result = await refresh(auth.refresh_token);
- saveAuth(result);
- setAuth(result);
- return result;
- } catch {
- clearAuth();
- setAuth(null);
- return null;
- }
+ async function reload(token = auth?.access_token) {
+ if (!token) return;
+ const [p, d, s] = await Promise.all([listPackPatches(token), listCurriculumDrafts(token), listSkillBundles(token)]);
+ setPatches(p); setDrafts(d); setSkills(s);
}
- async function guarded(fn) {
- try { return await fn(auth.access_token); }
- catch {
- const next = await refreshAuthToken();
- if (!next) throw new Error("auth failed");
- return await fn(next.access_token);
- }
+ useEffect(() => { if (auth?.access_token) reload(auth.access_token); }, [auth]);
+
+ async function seedAll() {
+ const candidate = await createCandidate(auth.access_token, {
+ source_type: "learner_export",
+ learner_id: "wesley-learner",
+ pack_id: "biology-pack",
+ candidate_kind: "hidden_prerequisite",
+ title: "Probability intuition before drift",
+ summary: "Learner evidence suggests drift is easier after random-process intuition.",
+ structured_payload: {
+ affected_concept: "drift",
+ suggested_prereq: "random_walk",
+ source_concepts: ["drift", "variation"],
+ prerequisites: ["variation", "random_walk"],
+ expected_inputs: ["text", "example"],
+ failure_modes: ["treating drift as directional"],
+ validation_checks: ["explains stochastic change"],
+ canonical_examples: ["coin-flip drift example"]
+ },
+ evidence_summary: "Repeated learner confusion with stochastic interpretation.",
+ confidence_hint: 0.8,
+ novelty_score: 0.7,
+ synthesis_score: 0.6,
+ triage_lane: "pack_improvement"
+ });
+ const candidateId = candidate.candidate_id;
+ await promoteCandidate(auth.access_token, candidateId, { promotion_target: "pack_improvement", target_object_id: "", promotion_status: "approved" });
+
+ const c2 = await createCandidate(auth.access_token, {
+ source_type: "learner_export",
+ learner_id: "wesley-learner",
+ pack_id: "biology-pack",
+ candidate_kind: "lesson_outline",
+ title: "Intro lesson on stochastic evolutionary change",
+ summary: "A lesson framing drift through random processes.",
+ structured_payload: { source_concepts: ["drift", "variation", "random_walk"] },
+ evidence_summary: "Good bridge opportunity for cross-pack synthesis.",
+ confidence_hint: 0.72,
+ novelty_score: 0.6,
+ synthesis_score: 0.75,
+ triage_lane: "curriculum_draft"
+ });
+ await promoteCandidate(auth.access_token, c2.candidate_id, { promotion_target: "curriculum_draft", target_object_id: "", promotion_status: "approved" });
+
+ const c3 = await createCandidate(auth.access_token, {
+ source_type: "learner_export",
+ learner_id: "wesley-learner",
+ pack_id: "biology-pack",
+ candidate_kind: "skill_bundle_candidate",
+ title: "Explain stochastic biological change",
+ summary: "Skill for recognizing and explaining stochastic population change.",
+ structured_payload: {
+ prerequisites: ["variation", "random_walk"],
+ expected_inputs: ["question", "scenario"],
+ failure_modes: ["teleological explanation"],
+ validation_checks: ["distinguishes drift from selection"],
+ canonical_examples: ["small population allele frequency drift"]
+ },
+ evidence_summary: "Could be reusable as an agent skill.",
+ confidence_hint: 0.74,
+ novelty_score: 0.58,
+ synthesis_score: 0.71,
+ triage_lane: "reusable_skill_bundle"
+ });
+ await promoteCandidate(auth.access_token, c3.candidate_id, { promotion_target: "reusable_skill_bundle", target_object_id: "", promotion_status: "approved" });
+
+ await reload();
+ setMessage("Seeded patch, curriculum draft, and skill bundle.");
}
- async function reloadLists() {
- setJobs(await guarded((token) => listRenderJobs(token, learnerId)));
- setArtifacts(await guarded((token) => listArtifacts(token, learnerId)));
+ async function inspectVersions(kind, id) {
+ const data = await listVersions(auth.access_token, kind, id);
+ setVersions(data);
}
- useEffect(() => {
- if (!auth) return;
- async function load() {
- const p = await guarded((token) => fetchPacks(token));
- setPacks(p);
- setPackId(p[0]?.id || "");
- await reloadLists();
- }
- load();
- }, [auth]);
-
- async function generateDemo() {
- let state = await guarded((token) => fetchLearnerState(token, learnerId));
- const base = Date.now();
- const events = [
- ["intro", 0.30, "exercise", 0],
- ["intro", 0.78, "review", 1000],
- ["second", 0.42, "exercise", 2000],
- ["second", 0.72, "review", 3000],
- ["third", 0.25, "exercise", 4000],
- ["branch", 0.60, "exercise", 5000],
- ];
- const latest = {};
- for (const [cid, score, kind, offset] of events) {
- const ts = new Date(base + offset).toISOString();
- state.history.push({ concept_id: cid, dimension: "mastery", score, confidence_hint: 0.6, timestamp: ts, kind, source_id: `demo-${cid}-${offset}` });
- latest[cid] = { concept_id: cid, dimension: "mastery", score, confidence: Math.min(0.9, score), evidence_count: (latest[cid]?.evidence_count || 0) + 1, last_updated: ts };
- }
- state.records = Object.values(latest);
- await guarded((token) => putLearnerState(token, learnerId, state));
- setMessage("Demo state generated.");
+ async function revisePatch(id) {
+ await editPatch(auth.access_token, id, {
+ payload: { reviewer_notes: "Elevated priority after synthesis review.", status: "approved" },
+ note: "Reviewer note update"
+ });
+ await reload();
}
- async function createJob() {
- const result = await guarded((token) => createRenderJob(token, learnerId, packId, { learner_id: learnerId, pack_id: packId, format, fps, theme: "default", retention_class: "standard", retention_days: 30 }));
- setMessage(`Render job ${result.job_id} queued.`);
- setTimeout(() => reloadLists(), 500);
+ async function applySelectedPatch(id) {
+ await applyPatch(auth.access_token, id, { note: "Merged into pack JSON" });
+ await reload();
}
- async function changeRetention(artifactId) {
- await guarded((token) => updateRetention(token, artifactId, { retention_class: "archive", retention_days: 365 }));
- await reloadLists();
- setMessage(`Artifact ${artifactId} retention updated.`);
+ async function reviseDraft(id) {
+ await editCurriculum(auth.access_token, id, {
+ payload: { editorial_notes: "Add random-walk bridge example.", status: "editorial_review" },
+ note: "Editorial refinement"
+ });
+ await reload();
}
- async function runKnowledgeExport() {
- const result = await guarded((token) => exportKnowledge(token, learnerId, packId, { learner_id: learnerId, pack_id: packId, export_kind: "knowledge_snapshot" }));
- setKnowledge(result);
- setMessage("Knowledge export generated.");
+ async function reviseSkill(id) {
+ await editSkill(auth.access_token, id, {
+ payload: { status: "validation", validation_checks: ["distinguishes drift from selection", "uses stochastic terminology correctly"] },
+ note: "Validation criteria strengthened"
+ });
+ await reload();
+ }
+
+ async function doExportDraft(id) {
+ const out = await exportCurriculum(auth.access_token, id);
+ setExports(prev => ({ ...prev, ["draft:"+id]: out }));
+ }
+
+ async function doExportSkill(id) {
+ const out = await exportSkill(auth.access_token, id);
+ setExports(prev => ({ ...prev, ["skill:"+id]: out }));
}
if (!auth) return ;
@@ -123,46 +156,65 @@ export default function App() {
-
Didactopus artifact lifecycle + knowledge export
-
Manage artifact retention and turn learner state into reusable knowledge outputs.
+
Object editing, versioning, apply, and export
+
Promoted objects can now be revised, versioned, merged into packs, and exported in reusable formats.
{message}
-
-
-
-
-
-
-
-
-
+
+
+
-
-
- Render jobs
- {JSON.stringify(jobs, null, 2)}
+
+
+ Pack patches
+
+ {patches.map(p => (
+
+
{p.title}
+
v{p.current_version} · {p.status}
+
{JSON.stringify(p.proposed_change, null, 2)}
+
+
+
+
+ ))}
+
-
- Artifacts
- {JSON.stringify(artifacts, null, 2)}
- {artifacts[0] ? : null}
+
+ Curriculum drafts
+
+ {drafts.map(d => (
+
+
{d.topic_focus}
+
v{d.current_version} · {d.status}
+
{d.content_markdown}
+
+
+
+ {exports["draft:"+d.draft_id] ?
{JSON.stringify(exports["draft:"+d.draft_id], null, 2)} : null}
+
+ ))}
+
Skill bundles
+ {skills.map(s => (
+
+
{s.skill_name}
+
v{s.current_version} · {s.status}
+
{JSON.stringify(s, null, 2)}
+
+
+
+ {exports["skill:"+s.skill_bundle_id] ?
{JSON.stringify(exports["skill:"+s.skill_bundle_id], null, 2)} : null}
+
+ ))}
+
-
- Knowledge export
- {JSON.stringify(knowledge, null, 2)}
+
+ Version history
+
+
{JSON.stringify(versions, null, 2)}
+
diff --git a/webui/src/api.js b/webui/src/api.js
index 71816cb..5aa6dcb 100644
--- a/webui/src/api.js
+++ b/webui/src/api.js
@@ -1,26 +1,71 @@
const API = "http://127.0.0.1:8011/api";
-
function authHeaders(token, json=true) {
const h = { Authorization: `Bearer ${token}` };
if (json) h["Content-Type"] = "application/json";
return h;
}
-
export async function login(username, password) {
- const res = await fetch(`${API}/login`, { method: "POST", headers: { "Content-Type": "application/json" }, body: JSON.stringify({ username, password }) });
+ const res = await fetch(`${API}/login`, { method: "POST", headers: {"Content-Type": "application/json"}, body: JSON.stringify({ username, password })});
if (!res.ok) throw new Error("login failed");
return await res.json();
}
-export async function refresh(refreshToken) {
- const res = await fetch(`${API}/refresh`, { method: "POST", headers: { "Content-Type": "application/json" }, body: JSON.stringify({ refresh_token: refreshToken }) });
- if (!res.ok) throw new Error("refresh failed");
+export async function createCandidate(token, payload) {
+ const res = await fetch(`${API}/knowledge-candidates`, { method: "POST", headers: authHeaders(token), body: JSON.stringify(payload) });
+ if (!res.ok) throw new Error("createCandidate failed");
+ return await res.json();
+}
+export async function promoteCandidate(token, candidateId, payload) {
+ const res = await fetch(`${API}/knowledge-candidates/${candidateId}/promote`, { method: "POST", headers: authHeaders(token), body: JSON.stringify(payload) });
+ if (!res.ok) throw new Error("promoteCandidate failed");
+ return await res.json();
+}
+export async function listPackPatches(token) {
+ const res = await fetch(`${API}/pack-patches`, { headers: authHeaders(token, false) });
+ if (!res.ok) throw new Error("listPackPatches failed");
+ return await res.json();
+}
+export async function listCurriculumDrafts(token) {
+ const res = await fetch(`${API}/curriculum-drafts`, { headers: authHeaders(token, false) });
+ if (!res.ok) throw new Error("listCurriculumDrafts failed");
+ return await res.json();
+}
+export async function listSkillBundles(token) {
+ const res = await fetch(`${API}/skill-bundles`, { headers: authHeaders(token, false) });
+ if (!res.ok) throw new Error("listSkillBundles failed");
+ return await res.json();
+}
+export async function editPatch(token, patchId, payload) {
+ const res = await fetch(`${API}/pack-patches/${patchId}/edit`, { method: "POST", headers: authHeaders(token), body: JSON.stringify(payload) });
+ if (!res.ok) throw new Error("editPatch failed");
+ return await res.json();
+}
+export async function applyPatch(token, patchId, payload) {
+ const res = await fetch(`${API}/pack-patches/${patchId}/apply`, { method: "POST", headers: authHeaders(token), body: JSON.stringify(payload) });
+ if (!res.ok) throw new Error("applyPatch failed");
+ return await res.json();
+}
+export async function editCurriculum(token, draftId, payload) {
+ const res = await fetch(`${API}/curriculum-drafts/${draftId}/edit`, { method: "POST", headers: authHeaders(token), body: JSON.stringify(payload) });
+ if (!res.ok) throw new Error("editCurriculum failed");
+ return await res.json();
+}
+export async function editSkill(token, bundleId, payload) {
+ const res = await fetch(`${API}/skill-bundles/${bundleId}/edit`, { method: "POST", headers: authHeaders(token), body: JSON.stringify(payload) });
+ if (!res.ok) throw new Error("editSkill failed");
+ return await res.json();
+}
+export async function listVersions(token, objectKind, objectId) {
+ const res = await fetch(`${API}/object-versions/${objectKind}/${objectId}`, { headers: authHeaders(token, false) });
+ if (!res.ok) throw new Error("listVersions failed");
+ return await res.json();
+}
+export async function exportCurriculum(token, draftId) {
+ const res = await fetch(`${API}/curriculum-drafts/${draftId}/export`, { headers: authHeaders(token, false) });
+ if (!res.ok) throw new Error("exportCurriculum failed");
+ return await res.json();
+}
+export async function exportSkill(token, bundleId) {
+ const res = await fetch(`${API}/skill-bundles/${bundleId}/export`, { headers: authHeaders(token, false) });
+ if (!res.ok) throw new Error("exportSkill failed");
return await res.json();
}
-export async function fetchPacks(token) { const res = await fetch(`${API}/packs`, { headers: authHeaders(token, false) }); if (!res.ok) throw new Error("fetchPacks failed"); return await res.json(); }
-export async function fetchLearnerState(token, learnerId) { const res = await fetch(`${API}/learners/${learnerId}/state`, { headers: authHeaders(token, false) }); if (!res.ok) throw new Error("fetchLearnerState failed"); return await res.json(); }
-export async function putLearnerState(token, learnerId, state) { const res = await fetch(`${API}/learners/${learnerId}/state`, { method: "PUT", headers: authHeaders(token), body: JSON.stringify(state) }); if (!res.ok) throw new Error("putLearnerState failed"); return await res.json(); }
-export async function createRenderJob(token, learnerId, packId, payload) { const res = await fetch(`${API}/learners/${learnerId}/render-jobs/${packId}`, { method: "POST", headers: authHeaders(token), body: JSON.stringify(payload) }); if (!res.ok) throw new Error("createRenderJob failed"); return await res.json(); }
-export async function listRenderJobs(token, learnerId) { const res = await fetch(`${API}/render-jobs?learner_id=${encodeURIComponent(learnerId)}`, { headers: authHeaders(token, false) }); if (!res.ok) throw new Error("listRenderJobs failed"); return await res.json(); }
-export async function listArtifacts(token, learnerId) { const res = await fetch(`${API}/artifacts?learner_id=${encodeURIComponent(learnerId)}`, { headers: authHeaders(token, false) }); if (!res.ok) throw new Error("listArtifacts failed"); return await res.json(); }
-export async function updateRetention(token, artifactId, payload) { const res = await fetch(`${API}/artifacts/${artifactId}/retention`, { method: "POST", headers: authHeaders(token), body: JSON.stringify(payload) }); if (!res.ok) throw new Error("updateRetention failed"); return await res.json(); }
-export async function exportKnowledge(token, learnerId, packId, payload) { const res = await fetch(`${API}/learners/${learnerId}/knowledge-export/${packId}`, { method: "POST", headers: authHeaders(token), body: JSON.stringify(payload) }); if (!res.ok) throw new Error("exportKnowledge failed"); return await res.json(); }
diff --git a/webui/src/styles.css b/webui/src/styles.css
index e2085a0..df0b5ce 100644
--- a/webui/src/styles.css
+++ b/webui/src/styles.css
@@ -1,23 +1,24 @@
:root {
- --bg:#f6f8fb; --card:#ffffff; --text:#1f2430; --muted:#60697a; --border:#dbe1ea; --accent:#2d6cdf;
+ --bg:#f6f8fb; --card:#ffffff; --text:#1f2430; --muted:#5f6b7a; --border:#dbe1ea; --accent:#2d6cdf;
}
* { box-sizing:border-box; }
-body { margin:0; font-family:Arial, Helvetica, sans-serif; background:var(--bg); color:var(--text); }
-.page { max-width:1600px; margin:0 auto; padding:24px; }
-.narrow-page { max-width:520px; }
-.hero { background:var(--card); border:1px solid var(--border); border-radius:22px; padding:24px; display:flex; justify-content:space-between; gap:16px; align-items:flex-start; }
-.controls { display:flex; gap:10px; align-items:flex-end; flex-wrap:wrap; }
-label { display:block; font-weight:600; }
-input, select { width:100%; margin-top:6px; border:1px solid var(--border); border-radius:10px; padding:10px; font:inherit; background:white; }
-button { border:1px solid var(--border); background:white; border-radius:12px; padding:10px 14px; cursor:pointer; }
-.card { background:var(--card); border:1px solid var(--border); border-radius:18px; padding:18px; }
-.narrow { margin-top:60px; }
-.layout { display:grid; gap:16px; }
-.threecol { grid-template-columns:1fr 1fr 1fr; }
-.prebox { background:#f7f8fa; border:1px solid var(--border); border-radius:12px; padding:12px; overflow:auto; max-height:460px; }
+body { margin:0; background:var(--bg); color:var(--text); font-family:Arial, Helvetica, sans-serif; }
+.page { max-width:1700px; margin:0 auto; padding:24px; }
+.narrow { max-width:520px; }
+.hero, .card { background:var(--card); border:1px solid var(--border); border-radius:18px; padding:18px; }
+.hero { display:flex; justify-content:space-between; gap:16px; margin-bottom:18px; }
+.grid3 { display:grid; grid-template-columns:1fr 1.2fr 1fr; gap:18px; }
+.stack { display:grid; gap:14px; }
+.card.small h3 { margin-top:0; }
+label { display:block; font-weight:600; margin-bottom:10px; }
+input { width:100%; margin-top:6px; border:1px solid var(--border); border-radius:10px; padding:10px; font:inherit; background:white; }
+button { border:1px solid var(--border); background:white; border-radius:12px; padding:10px 12px; cursor:pointer; margin-right:8px; margin-top:8px; }
+button.primary { background:var(--accent); color:white; border-color:var(--accent); }
+.toolbar { display:flex; gap:8px; align-items:flex-start; flex-wrap:wrap; }
.muted { color:var(--muted); }
+pre { white-space:pre-wrap; word-break:break-word; font-size:12px; margin:0; }
.error { color:#b42318; margin-top:10px; }
-@media (max-width:1200px) {
+@media (max-width: 1350px) {
+ .grid3 { grid-template-columns:1fr; }
.hero { flex-direction:column; }
- .threecol { grid-template-columns:1fr; }
}