Apply ZIP update: 260-didactopus-object-versioning-and-export-layer.zip [2026-03-14T13:21:09]
This commit is contained in:
parent
8074d094fa
commit
cbeb474b17
|
|
@ -12,14 +12,12 @@ dependencies = [
|
||||||
"uvicorn>=0.30",
|
"uvicorn>=0.30",
|
||||||
"sqlalchemy>=2.0",
|
"sqlalchemy>=2.0",
|
||||||
"passlib[bcrypt]>=1.7",
|
"passlib[bcrypt]>=1.7",
|
||||||
"python-jose[cryptography]>=3.3"
|
"python-jose[cryptography]>=3.3",
|
||||||
|
"pyyaml>=6.0.2"
|
||||||
]
|
]
|
||||||
|
|
||||||
[project.scripts]
|
[project.scripts]
|
||||||
didactopus-api = "didactopus.api:main"
|
didactopus-api = "didactopus.api:main"
|
||||||
didactopus-export-svg = "didactopus.export_svg:main"
|
|
||||||
didactopus-render-bundle = "didactopus.render_bundle:main"
|
|
||||||
didactopus-export-knowledge = "didactopus.knowledge_export:main"
|
|
||||||
|
|
||||||
[tool.setuptools.packages.find]
|
[tool.setuptools.packages.find]
|
||||||
where = ["src"]
|
where = ["src"]
|
||||||
|
|
|
||||||
|
|
@ -1,26 +1,27 @@
|
||||||
from __future__ import annotations
|
from __future__ import annotations
|
||||||
from fastapi import FastAPI, HTTPException, Header, Depends, BackgroundTasks
|
from fastapi import FastAPI, HTTPException, Header, Depends
|
||||||
from fastapi.middleware.cors import CORSMiddleware
|
from fastapi.middleware.cors import CORSMiddleware
|
||||||
from fastapi.responses import FileResponse
|
|
||||||
import uvicorn
|
import uvicorn
|
||||||
from datetime import datetime, timedelta, timezone
|
|
||||||
from pathlib import Path
|
|
||||||
from .db import Base, engine
|
from .db import Base, engine
|
||||||
from .models import LoginRequest, RefreshRequest, TokenPair, CreateLearnerRequest, LearnerState, MediaRenderRequest, ArtifactRetentionUpdate, KnowledgeExportRequest
|
from .models import (
|
||||||
|
LoginRequest, TokenPair, KnowledgeCandidateCreate, PromoteRequest,
|
||||||
|
SynthesisRunRequest, SynthesisPromoteRequest, CreateLearnerRequest,
|
||||||
|
ObjectEditRequest, PatchApplyRequest
|
||||||
|
)
|
||||||
from .repository import (
|
from .repository import (
|
||||||
authenticate_user, get_user_by_id, store_refresh_token, refresh_token_active, revoke_refresh_token,
|
authenticate_user, get_user_by_id, create_learner, create_candidate, list_candidates, get_candidate,
|
||||||
list_packs_for_user, get_pack, get_pack_row, create_learner, learner_owned_by_user, load_learner_state, save_learner_state,
|
create_promotion, list_promotions, list_pack_patches, list_curriculum_drafts, list_skill_bundles,
|
||||||
create_render_job, list_render_jobs, list_artifacts, get_artifact, update_artifact_retention, soft_delete_artifact
|
list_synthesis_candidates, get_synthesis_candidate,
|
||||||
|
edit_pack_patch, edit_curriculum_draft, edit_skill_bundle, list_versions,
|
||||||
|
apply_pack_patch, export_curriculum_draft, export_skill_bundle
|
||||||
)
|
)
|
||||||
from .auth import issue_access_token, issue_refresh_token, decode_token, new_token_id
|
from .auth import issue_access_token, issue_refresh_token, decode_token, new_token_id
|
||||||
from .engine import build_graph_frames, stable_layout
|
from .synthesis import generate_synthesis_candidates
|
||||||
from .worker import process_render_job
|
|
||||||
from .knowledge_export import build_knowledge_snapshot
|
|
||||||
|
|
||||||
Base.metadata.create_all(bind=engine)
|
Base.metadata.create_all(bind=engine)
|
||||||
|
app = FastAPI(title="Didactopus Object Versioning and Export API")
|
||||||
app = FastAPI(title="Didactopus API Prototype")
|
|
||||||
app.add_middleware(CORSMiddleware, allow_origins=["*"], allow_credentials=True, allow_methods=["*"], allow_headers=["*"])
|
app.add_middleware(CORSMiddleware, allow_origins=["*"], allow_credentials=True, allow_methods=["*"], allow_headers=["*"])
|
||||||
|
_refresh_tokens = {}
|
||||||
|
|
||||||
def current_user(authorization: str = Header(default="")):
|
def current_user(authorization: str = Header(default="")):
|
||||||
token = authorization.removeprefix("Bearer ").strip()
|
token = authorization.removeprefix("Bearer ").strip()
|
||||||
|
|
@ -32,164 +33,118 @@ def current_user(authorization: str = Header(default="")):
|
||||||
raise HTTPException(status_code=401, detail="Unauthorized")
|
raise HTTPException(status_code=401, detail="Unauthorized")
|
||||||
return user
|
return user
|
||||||
|
|
||||||
def ensure_learner_access(user, learner_id: str):
|
def require_reviewer(user = Depends(current_user)):
|
||||||
if user.role == "admin":
|
if user.role not in {"admin", "reviewer"}:
|
||||||
return
|
raise HTTPException(status_code=403, detail="Reviewer role required")
|
||||||
if not learner_owned_by_user(user.id, learner_id):
|
return user
|
||||||
raise HTTPException(status_code=403, detail="Learner not accessible by this user")
|
|
||||||
|
|
||||||
def ensure_pack_access(user, pack_id: str):
|
|
||||||
row = get_pack_row(pack_id)
|
|
||||||
if row is None:
|
|
||||||
raise HTTPException(status_code=404, detail="Pack not found")
|
|
||||||
if user.role == "admin":
|
|
||||||
return row
|
|
||||||
if row.policy_lane == "community":
|
|
||||||
return row
|
|
||||||
if row.owner_user_id == user.id:
|
|
||||||
return row
|
|
||||||
raise HTTPException(status_code=403, detail="Pack not accessible by this user")
|
|
||||||
|
|
||||||
def future_iso(days: int) -> str:
|
|
||||||
return (datetime.now(timezone.utc) + timedelta(days=days)).isoformat()
|
|
||||||
|
|
||||||
@app.post("/api/login", response_model=TokenPair)
|
@app.post("/api/login", response_model=TokenPair)
|
||||||
def login(payload: LoginRequest):
|
def login(payload: LoginRequest):
|
||||||
user = authenticate_user(payload.username, payload.password)
|
user = authenticate_user(payload.username, payload.password)
|
||||||
if user is None:
|
if user is None: raise HTTPException(status_code=401, detail="Invalid credentials")
|
||||||
raise HTTPException(status_code=401, detail="Invalid credentials")
|
token_id = new_token_id(); _refresh_tokens[token_id] = user.id
|
||||||
token_id = new_token_id()
|
return TokenPair(access_token=issue_access_token(user.id, user.username, user.role),
|
||||||
store_refresh_token(user.id, token_id)
|
refresh_token=issue_refresh_token(user.id, user.username, user.role, token_id),
|
||||||
return TokenPair(access_token=issue_access_token(user.id, user.username, user.role), refresh_token=issue_refresh_token(user.id, user.username, user.role, token_id), username=user.username, role=user.role)
|
username=user.username, role=user.role)
|
||||||
|
|
||||||
@app.post("/api/refresh", response_model=TokenPair)
|
|
||||||
def refresh(payload: RefreshRequest):
|
|
||||||
data = decode_token(payload.refresh_token)
|
|
||||||
if not data or data.get("kind") != "refresh":
|
|
||||||
raise HTTPException(status_code=401, detail="Invalid refresh token")
|
|
||||||
token_id = data.get("jti")
|
|
||||||
if not token_id or not refresh_token_active(token_id):
|
|
||||||
raise HTTPException(status_code=401, detail="Refresh token inactive")
|
|
||||||
user = get_user_by_id(int(data["sub"]))
|
|
||||||
if user is None:
|
|
||||||
raise HTTPException(status_code=401, detail="User not found")
|
|
||||||
revoke_refresh_token(token_id)
|
|
||||||
new_jti = new_token_id()
|
|
||||||
store_refresh_token(user.id, new_jti)
|
|
||||||
return TokenPair(access_token=issue_access_token(user.id, user.username, user.role), refresh_token=issue_refresh_token(user.id, user.username, user.role, new_jti), username=user.username, role=user.role)
|
|
||||||
|
|
||||||
@app.get("/api/packs")
|
|
||||||
def api_list_packs(user = Depends(current_user)):
|
|
||||||
return [p.model_dump() for p in list_packs_for_user(user.id, include_unpublished=(user.role == "admin"))]
|
|
||||||
|
|
||||||
@app.post("/api/learners")
|
@app.post("/api/learners")
|
||||||
def api_create_learner(payload: CreateLearnerRequest, user = Depends(current_user)):
|
def api_create_learner(payload: CreateLearnerRequest, user = Depends(current_user)):
|
||||||
create_learner(user.id, payload.learner_id, payload.display_name)
|
create_learner(user.id, payload.learner_id, payload.display_name)
|
||||||
return {"ok": True, "learner_id": payload.learner_id}
|
return {"ok": True, "learner_id": payload.learner_id}
|
||||||
|
|
||||||
@app.get("/api/learners/{learner_id}/state")
|
@app.post("/api/knowledge-candidates")
|
||||||
def api_get_learner_state(learner_id: str, user = Depends(current_user)):
|
def api_create_candidate(payload: KnowledgeCandidateCreate, reviewer = Depends(require_reviewer)):
|
||||||
ensure_learner_access(user, learner_id)
|
return {"candidate_id": create_candidate(payload)}
|
||||||
return load_learner_state(learner_id).model_dump()
|
|
||||||
|
|
||||||
@app.put("/api/learners/{learner_id}/state")
|
@app.get("/api/knowledge-candidates")
|
||||||
def api_put_learner_state(learner_id: str, state: LearnerState, user = Depends(current_user)):
|
def api_list_candidates(reviewer = Depends(require_reviewer)):
|
||||||
ensure_learner_access(user, learner_id)
|
return list_candidates()
|
||||||
if learner_id != state.learner_id:
|
|
||||||
raise HTTPException(status_code=400, detail="Learner ID mismatch")
|
|
||||||
return save_learner_state(state).model_dump()
|
|
||||||
|
|
||||||
@app.get("/api/packs/{pack_id}/layout")
|
@app.post("/api/knowledge-candidates/{candidate_id}/promote")
|
||||||
def api_pack_layout(pack_id: str, user = Depends(current_user)):
|
def api_promote_candidate(candidate_id: int, payload: PromoteRequest, reviewer = Depends(require_reviewer)):
|
||||||
ensure_pack_access(user, pack_id)
|
if get_candidate(candidate_id) is None: raise HTTPException(status_code=404, detail="Candidate not found")
|
||||||
pack = get_pack(pack_id)
|
return {"promotion_id": create_promotion(candidate_id, reviewer.id, payload)}
|
||||||
return {"pack_id": pack_id, "layout": stable_layout(pack)} if pack else {"pack_id": pack_id, "layout": {}}
|
|
||||||
|
|
||||||
@app.get("/api/learners/{learner_id}/graph-animation/{pack_id}")
|
@app.get("/api/promotions")
|
||||||
def api_graph_animation(learner_id: str, pack_id: str, user = Depends(current_user)):
|
def api_list_promotions(reviewer = Depends(require_reviewer)):
|
||||||
ensure_learner_access(user, learner_id)
|
return list_promotions()
|
||||||
ensure_pack_access(user, pack_id)
|
|
||||||
pack = get_pack(pack_id)
|
|
||||||
state = load_learner_state(learner_id)
|
|
||||||
frames = build_graph_frames(state, pack)
|
|
||||||
return {
|
|
||||||
"learner_id": learner_id,
|
|
||||||
"pack_id": pack_id,
|
|
||||||
"pack_title": pack.title if pack else "",
|
|
||||||
"frames": frames,
|
|
||||||
"concepts": [{"id": c.id, "title": c.title, "prerequisites": c.prerequisites, "cross_pack_links": [l.model_dump() for l in c.cross_pack_links]} for c in pack.concepts] if pack else [],
|
|
||||||
}
|
|
||||||
|
|
||||||
@app.post("/api/learners/{learner_id}/render-jobs/{pack_id}")
|
@app.get("/api/pack-patches")
|
||||||
def api_render_job(learner_id: str, pack_id: str, payload: MediaRenderRequest, background_tasks: BackgroundTasks, user = Depends(current_user)):
|
def api_list_pack_patches(reviewer = Depends(require_reviewer)):
|
||||||
ensure_learner_access(user, learner_id)
|
return list_pack_patches()
|
||||||
ensure_pack_access(user, pack_id)
|
|
||||||
pack = get_pack(pack_id)
|
|
||||||
state = load_learner_state(learner_id)
|
|
||||||
animation = {
|
|
||||||
"learner_id": learner_id,
|
|
||||||
"pack_id": pack_id,
|
|
||||||
"pack_title": pack.title if pack else "",
|
|
||||||
"frames": build_graph_frames(state, pack),
|
|
||||||
}
|
|
||||||
job_id = create_render_job(learner_id, pack_id, payload.format, payload.fps, payload.theme)
|
|
||||||
background_tasks.add_task(process_render_job, job_id, learner_id, pack_id, payload.format, payload.fps, payload.theme, payload.retention_class, payload.retention_days, animation)
|
|
||||||
return {"job_id": job_id, "status": "queued"}
|
|
||||||
|
|
||||||
@app.get("/api/render-jobs")
|
@app.get("/api/curriculum-drafts")
|
||||||
def api_list_render_jobs(learner_id: str | None = None, user = Depends(current_user)):
|
def api_list_curriculum_drafts(reviewer = Depends(require_reviewer)):
|
||||||
if learner_id:
|
return list_curriculum_drafts()
|
||||||
ensure_learner_access(user, learner_id)
|
|
||||||
return list_render_jobs(learner_id)
|
|
||||||
|
|
||||||
@app.get("/api/artifacts")
|
@app.get("/api/skill-bundles")
|
||||||
def api_list_artifacts(learner_id: str | None = None, user = Depends(current_user)):
|
def api_list_skill_bundles(reviewer = Depends(require_reviewer)):
|
||||||
if learner_id:
|
return list_skill_bundles()
|
||||||
ensure_learner_access(user, learner_id)
|
|
||||||
return list_artifacts(learner_id)
|
|
||||||
|
|
||||||
@app.get("/api/artifacts/{artifact_id}/download")
|
@app.post("/api/pack-patches/{patch_id}/edit")
|
||||||
def api_download_artifact(artifact_id: int, user = Depends(current_user)):
|
def api_edit_patch(patch_id: int, payload: ObjectEditRequest, reviewer = Depends(require_reviewer)):
|
||||||
artifact = get_artifact(artifact_id)
|
row = edit_pack_patch(patch_id, payload.payload, reviewer.id, payload.note)
|
||||||
if artifact is None or artifact.is_deleted:
|
if row is None: raise HTTPException(status_code=404, detail="Patch not found")
|
||||||
raise HTTPException(status_code=404, detail="Artifact not found")
|
return {"patch_id": row.id, "current_version": row.current_version}
|
||||||
ensure_learner_access(user, artifact.learner_id)
|
|
||||||
path = Path(artifact.path)
|
|
||||||
if not path.exists():
|
|
||||||
raise HTTPException(status_code=404, detail="Artifact path missing")
|
|
||||||
if path.is_dir():
|
|
||||||
manifest = path / "render_manifest.json"
|
|
||||||
if not manifest.exists():
|
|
||||||
raise HTTPException(status_code=404, detail="Artifact manifest missing")
|
|
||||||
return FileResponse(str(manifest), filename=f"artifact-{artifact_id}-manifest.json")
|
|
||||||
return FileResponse(str(path), filename=path.name)
|
|
||||||
|
|
||||||
@app.post("/api/artifacts/{artifact_id}/retention")
|
@app.post("/api/curriculum-drafts/{draft_id}/edit")
|
||||||
def api_update_artifact_retention(artifact_id: int, payload: ArtifactRetentionUpdate, user = Depends(current_user)):
|
def api_edit_curriculum(draft_id: int, payload: ObjectEditRequest, reviewer = Depends(require_reviewer)):
|
||||||
artifact = get_artifact(artifact_id)
|
row = edit_curriculum_draft(draft_id, payload.payload, reviewer.id, payload.note)
|
||||||
if artifact is None or artifact.is_deleted:
|
if row is None: raise HTTPException(status_code=404, detail="Draft not found")
|
||||||
raise HTTPException(status_code=404, detail="Artifact not found")
|
return {"draft_id": row.id, "current_version": row.current_version}
|
||||||
ensure_learner_access(user, artifact.learner_id)
|
|
||||||
expires_at = "" if payload.retention_days is None else future_iso(payload.retention_days)
|
|
||||||
updated = update_artifact_retention(artifact_id, payload.retention_class, expires_at)
|
|
||||||
return {"artifact_id": updated.id, "retention_class": updated.retention_class, "expires_at": updated.expires_at}
|
|
||||||
|
|
||||||
@app.delete("/api/artifacts/{artifact_id}")
|
@app.post("/api/skill-bundles/{bundle_id}/edit")
|
||||||
def api_delete_artifact(artifact_id: int, user = Depends(current_user)):
|
def api_edit_skill(bundle_id: int, payload: ObjectEditRequest, reviewer = Depends(require_reviewer)):
|
||||||
artifact = get_artifact(artifact_id)
|
row = edit_skill_bundle(bundle_id, payload.payload, reviewer.id, payload.note)
|
||||||
if artifact is None or artifact.is_deleted:
|
if row is None: raise HTTPException(status_code=404, detail="Skill bundle not found")
|
||||||
raise HTTPException(status_code=404, detail="Artifact not found")
|
return {"skill_bundle_id": row.id, "current_version": row.current_version}
|
||||||
ensure_learner_access(user, artifact.learner_id)
|
|
||||||
updated = soft_delete_artifact(artifact_id)
|
|
||||||
return {"artifact_id": updated.id, "is_deleted": updated.is_deleted}
|
|
||||||
|
|
||||||
@app.post("/api/learners/{learner_id}/knowledge-export/{pack_id}")
|
@app.get("/api/object-versions/{object_kind}/{object_id}")
|
||||||
def api_knowledge_export(learner_id: str, pack_id: str, payload: KnowledgeExportRequest, user = Depends(current_user)):
|
def api_object_versions(object_kind: str, object_id: int, reviewer = Depends(require_reviewer)):
|
||||||
ensure_learner_access(user, learner_id)
|
return list_versions(object_kind, object_id)
|
||||||
ensure_pack_access(user, pack_id)
|
|
||||||
snapshot = build_knowledge_snapshot(learner_id, pack_id)
|
@app.post("/api/pack-patches/{patch_id}/apply")
|
||||||
snapshot["requested_export_kind"] = payload.export_kind
|
def api_apply_patch(patch_id: int, payload: PatchApplyRequest, reviewer = Depends(require_reviewer)):
|
||||||
return snapshot
|
row = apply_pack_patch(patch_id, reviewer.id, payload.note)
|
||||||
|
if row is None: raise HTTPException(status_code=404, detail="Patch or pack not found")
|
||||||
|
return {"patch_id": row.id, "status": row.status}
|
||||||
|
|
||||||
|
@app.get("/api/curriculum-drafts/{draft_id}/export")
|
||||||
|
def api_export_curriculum(draft_id: int, reviewer = Depends(require_reviewer)):
|
||||||
|
out = export_curriculum_draft(draft_id)
|
||||||
|
if out is None: raise HTTPException(status_code=404, detail="Draft not found")
|
||||||
|
return out
|
||||||
|
|
||||||
|
@app.get("/api/skill-bundles/{bundle_id}/export")
|
||||||
|
def api_export_skill(bundle_id: int, reviewer = Depends(require_reviewer)):
|
||||||
|
out = export_skill_bundle(bundle_id)
|
||||||
|
if out is None: raise HTTPException(status_code=404, detail="Skill bundle not found")
|
||||||
|
return out
|
||||||
|
|
||||||
|
@app.post("/api/synthesis/run")
|
||||||
|
def api_run_synthesis(payload: SynthesisRunRequest, reviewer = Depends(require_reviewer)):
|
||||||
|
created = generate_synthesis_candidates(payload.source_pack_id, payload.target_pack_id, payload.limit)
|
||||||
|
return {"created_count": len(created), "synthesis_ids": created}
|
||||||
|
|
||||||
|
@app.get("/api/synthesis/candidates")
|
||||||
|
def api_list_synthesis(reviewer = Depends(require_reviewer)):
|
||||||
|
return list_synthesis_candidates()
|
||||||
|
|
||||||
|
@app.post("/api/synthesis/candidates/{synthesis_id}/promote")
|
||||||
|
def api_promote_synthesis(synthesis_id: int, payload: SynthesisPromoteRequest, reviewer = Depends(require_reviewer)):
|
||||||
|
syn = get_synthesis_candidate(synthesis_id)
|
||||||
|
if syn is None: raise HTTPException(status_code=404, detail="Synthesis candidate not found")
|
||||||
|
candidate_id = create_candidate(KnowledgeCandidateCreate(
|
||||||
|
source_type="synthesis_engine", learner_id="system", pack_id=syn["source_pack_id"],
|
||||||
|
candidate_kind="synthesis_proposal",
|
||||||
|
title=f"Synthesis: {syn['source_concept_id']} ↔ {syn['target_concept_id']}",
|
||||||
|
summary=syn["explanation"], structured_payload=syn,
|
||||||
|
evidence_summary="Promoted from synthesis engine candidate",
|
||||||
|
confidence_hint=syn["score_total"], novelty_score=syn["evidence"].get("novelty", 0.0),
|
||||||
|
synthesis_score=syn["score_total"], triage_lane=payload.promotion_target,
|
||||||
|
))
|
||||||
|
promotion_id = create_promotion(candidate_id, reviewer.id, PromoteRequest(promotion_target=payload.promotion_target, target_object_id="", promotion_status="approved"))
|
||||||
|
return {"candidate_id": candidate_id, "promotion_id": promotion_id}
|
||||||
|
|
||||||
def main():
|
def main():
|
||||||
uvicorn.run(app, host="127.0.0.1", port=8011)
|
uvicorn.run(app, host="127.0.0.1", port=8011)
|
||||||
|
|
|
||||||
|
|
@ -1,6 +1,10 @@
|
||||||
from __future__ import annotations
|
from __future__ import annotations
|
||||||
from pydantic import BaseModel, Field
|
from pydantic import BaseModel, Field
|
||||||
|
|
||||||
|
class LoginRequest(BaseModel):
|
||||||
|
username: str
|
||||||
|
password: str
|
||||||
|
|
||||||
class TokenPair(BaseModel):
|
class TokenPair(BaseModel):
|
||||||
access_token: str
|
access_token: str
|
||||||
refresh_token: str
|
refresh_token: str
|
||||||
|
|
@ -8,81 +12,41 @@ class TokenPair(BaseModel):
|
||||||
username: str
|
username: str
|
||||||
role: str
|
role: str
|
||||||
|
|
||||||
class LoginRequest(BaseModel):
|
class KnowledgeCandidateCreate(BaseModel):
|
||||||
username: str
|
source_type: str = "learner_export"
|
||||||
password: str
|
source_artifact_id: int | None = None
|
||||||
|
learner_id: str
|
||||||
class RefreshRequest(BaseModel):
|
pack_id: str
|
||||||
refresh_token: str
|
candidate_kind: str
|
||||||
|
|
||||||
class GraphPosition(BaseModel):
|
|
||||||
x: float
|
|
||||||
y: float
|
|
||||||
|
|
||||||
class CrossPackLink(BaseModel):
|
|
||||||
source_concept_id: str
|
|
||||||
target_pack_id: str
|
|
||||||
target_concept_id: str
|
|
||||||
relationship: str = "related"
|
|
||||||
|
|
||||||
class PackConcept(BaseModel):
|
|
||||||
id: str
|
|
||||||
title: str
|
title: str
|
||||||
prerequisites: list[str] = Field(default_factory=list)
|
summary: str = ""
|
||||||
masteryDimension: str = "mastery"
|
structured_payload: dict = Field(default_factory=dict)
|
||||||
exerciseReward: str = ""
|
evidence_summary: str = ""
|
||||||
position: GraphPosition | None = None
|
confidence_hint: float = 0.0
|
||||||
cross_pack_links: list[CrossPackLink] = Field(default_factory=list)
|
novelty_score: float = 0.0
|
||||||
|
synthesis_score: float = 0.0
|
||||||
|
triage_lane: str = "archive"
|
||||||
|
|
||||||
class PackData(BaseModel):
|
class PromoteRequest(BaseModel):
|
||||||
id: str
|
promotion_target: str
|
||||||
title: str
|
target_object_id: str = ""
|
||||||
subtitle: str = ""
|
promotion_status: str = "approved"
|
||||||
level: str = "novice-friendly"
|
|
||||||
concepts: list[PackConcept] = Field(default_factory=list)
|
class SynthesisRunRequest(BaseModel):
|
||||||
onboarding: dict = Field(default_factory=dict)
|
source_pack_id: str | None = None
|
||||||
compliance: dict = Field(default_factory=dict)
|
target_pack_id: str | None = None
|
||||||
|
limit: int = 20
|
||||||
|
|
||||||
|
class SynthesisPromoteRequest(BaseModel):
|
||||||
|
promotion_target: str = "pack_improvement"
|
||||||
|
|
||||||
|
class ObjectEditRequest(BaseModel):
|
||||||
|
payload: dict = Field(default_factory=dict)
|
||||||
|
note: str = ""
|
||||||
|
|
||||||
|
class PatchApplyRequest(BaseModel):
|
||||||
|
note: str = "Applied pack patch"
|
||||||
|
|
||||||
class CreateLearnerRequest(BaseModel):
|
class CreateLearnerRequest(BaseModel):
|
||||||
learner_id: str
|
learner_id: str
|
||||||
display_name: str = ""
|
display_name: str = ""
|
||||||
|
|
||||||
class MasteryRecord(BaseModel):
|
|
||||||
concept_id: str
|
|
||||||
dimension: str
|
|
||||||
score: float = 0.0
|
|
||||||
confidence: float = 0.0
|
|
||||||
evidence_count: int = 0
|
|
||||||
last_updated: str = ""
|
|
||||||
|
|
||||||
class EvidenceEvent(BaseModel):
|
|
||||||
concept_id: str
|
|
||||||
dimension: str
|
|
||||||
score: float
|
|
||||||
confidence_hint: float = 0.5
|
|
||||||
timestamp: str
|
|
||||||
kind: str = "exercise"
|
|
||||||
source_id: str = ""
|
|
||||||
|
|
||||||
class LearnerState(BaseModel):
|
|
||||||
learner_id: str
|
|
||||||
records: list[MasteryRecord] = Field(default_factory=list)
|
|
||||||
history: list[EvidenceEvent] = Field(default_factory=list)
|
|
||||||
|
|
||||||
class MediaRenderRequest(BaseModel):
|
|
||||||
learner_id: str
|
|
||||||
pack_id: str
|
|
||||||
format: str = "gif"
|
|
||||||
fps: int = 2
|
|
||||||
theme: str = "default"
|
|
||||||
retention_class: str = "standard"
|
|
||||||
retention_days: int = 30
|
|
||||||
|
|
||||||
class ArtifactRetentionUpdate(BaseModel):
|
|
||||||
retention_class: str
|
|
||||||
retention_days: int | None = None
|
|
||||||
|
|
||||||
class KnowledgeExportRequest(BaseModel):
|
|
||||||
learner_id: str
|
|
||||||
pack_id: str
|
|
||||||
export_kind: str = "knowledge_snapshot"
|
|
||||||
|
|
|
||||||
|
|
@ -10,13 +10,6 @@ class UserORM(Base):
|
||||||
role: Mapped[str] = mapped_column(String(50), default="learner")
|
role: Mapped[str] = mapped_column(String(50), default="learner")
|
||||||
is_active: Mapped[bool] = mapped_column(Boolean, default=True)
|
is_active: Mapped[bool] = mapped_column(Boolean, default=True)
|
||||||
|
|
||||||
class RefreshTokenORM(Base):
|
|
||||||
__tablename__ = "refresh_tokens"
|
|
||||||
id: Mapped[int] = mapped_column(Integer, primary_key=True)
|
|
||||||
user_id: Mapped[int] = mapped_column(ForeignKey("users.id"), index=True)
|
|
||||||
token_id: Mapped[str] = mapped_column(String(255), unique=True, index=True)
|
|
||||||
is_revoked: Mapped[bool] = mapped_column(Boolean, default=False)
|
|
||||||
|
|
||||||
class PackORM(Base):
|
class PackORM(Base):
|
||||||
__tablename__ = "packs"
|
__tablename__ = "packs"
|
||||||
id: Mapped[str] = mapped_column(String(100), primary_key=True)
|
id: Mapped[str] = mapped_column(String(100), primary_key=True)
|
||||||
|
|
@ -28,61 +21,85 @@ class PackORM(Base):
|
||||||
data_json: Mapped[str] = mapped_column(Text)
|
data_json: Mapped[str] = mapped_column(Text)
|
||||||
is_published: Mapped[bool] = mapped_column(Boolean, default=False)
|
is_published: Mapped[bool] = mapped_column(Boolean, default=False)
|
||||||
|
|
||||||
class LearnerORM(Base):
|
class KnowledgeCandidateORM(Base):
|
||||||
__tablename__ = "learners"
|
__tablename__ = "knowledge_candidates"
|
||||||
id: Mapped[str] = mapped_column(String(100), primary_key=True)
|
|
||||||
owner_user_id: Mapped[int] = mapped_column(ForeignKey("users.id"), index=True)
|
|
||||||
display_name: Mapped[str] = mapped_column(String(255), default="")
|
|
||||||
|
|
||||||
class MasteryRecordORM(Base):
|
|
||||||
__tablename__ = "mastery_records"
|
|
||||||
id: Mapped[int] = mapped_column(Integer, primary_key=True)
|
|
||||||
learner_id: Mapped[str] = mapped_column(ForeignKey("learners.id"), index=True)
|
|
||||||
concept_id: Mapped[str] = mapped_column(String(100), index=True)
|
|
||||||
dimension: Mapped[str] = mapped_column(String(100), default="mastery")
|
|
||||||
score: Mapped[float] = mapped_column(Float, default=0.0)
|
|
||||||
confidence: Mapped[float] = mapped_column(Float, default=0.0)
|
|
||||||
evidence_count: Mapped[int] = mapped_column(Integer, default=0)
|
|
||||||
last_updated: Mapped[str] = mapped_column(String(100), default="")
|
|
||||||
|
|
||||||
class EvidenceEventORM(Base):
|
|
||||||
__tablename__ = "evidence_events"
|
|
||||||
id: Mapped[int] = mapped_column(Integer, primary_key=True)
|
|
||||||
learner_id: Mapped[str] = mapped_column(ForeignKey("learners.id"), index=True)
|
|
||||||
concept_id: Mapped[str] = mapped_column(String(100), index=True)
|
|
||||||
dimension: Mapped[str] = mapped_column(String(100), default="mastery")
|
|
||||||
score: Mapped[float] = mapped_column(Float, default=0.0)
|
|
||||||
confidence_hint: Mapped[float] = mapped_column(Float, default=0.5)
|
|
||||||
timestamp: Mapped[str] = mapped_column(String(100), default="")
|
|
||||||
kind: Mapped[str] = mapped_column(String(50), default="exercise")
|
|
||||||
source_id: Mapped[str] = mapped_column(String(255), default="")
|
|
||||||
|
|
||||||
class RenderJobORM(Base):
|
|
||||||
__tablename__ = "render_jobs"
|
|
||||||
id: Mapped[int] = mapped_column(Integer, primary_key=True)
|
id: Mapped[int] = mapped_column(Integer, primary_key=True)
|
||||||
|
source_type: Mapped[str] = mapped_column(String(50), default="learner_export")
|
||||||
|
source_artifact_id: Mapped[int | None] = mapped_column(Integer, nullable=True)
|
||||||
learner_id: Mapped[str] = mapped_column(String(100), index=True)
|
learner_id: Mapped[str] = mapped_column(String(100), index=True)
|
||||||
pack_id: Mapped[str] = mapped_column(String(100), index=True)
|
pack_id: Mapped[str] = mapped_column(String(100), index=True)
|
||||||
requested_format: Mapped[str] = mapped_column(String(20), default="gif")
|
candidate_kind: Mapped[str] = mapped_column(String(100), index=True)
|
||||||
fps: Mapped[int] = mapped_column(Integer, default=2)
|
title: Mapped[str] = mapped_column(String(255))
|
||||||
theme: Mapped[str] = mapped_column(String(100), default="default")
|
summary: Mapped[str] = mapped_column(Text, default="")
|
||||||
status: Mapped[str] = mapped_column(String(50), default="queued")
|
structured_payload_json: Mapped[str] = mapped_column(Text, default="{}")
|
||||||
bundle_dir: Mapped[str] = mapped_column(Text, default="")
|
evidence_summary: Mapped[str] = mapped_column(Text, default="")
|
||||||
payload_json: Mapped[str] = mapped_column(Text, default="")
|
confidence_hint: Mapped[float] = mapped_column(Float, default=0.0)
|
||||||
manifest_path: Mapped[str] = mapped_column(Text, default="")
|
novelty_score: Mapped[float] = mapped_column(Float, default=0.0)
|
||||||
script_path: Mapped[str] = mapped_column(Text, default="")
|
synthesis_score: Mapped[float] = mapped_column(Float, default=0.0)
|
||||||
error_text: Mapped[str] = mapped_column(Text, default="")
|
triage_lane: Mapped[str] = mapped_column(String(50), default="archive")
|
||||||
|
current_status: Mapped[str] = mapped_column(String(50), default="captured")
|
||||||
|
created_at: Mapped[str] = mapped_column(String(100), default="")
|
||||||
|
|
||||||
class ArtifactORM(Base):
|
class PromotionRecordORM(Base):
|
||||||
__tablename__ = "artifacts"
|
__tablename__ = "promotion_records"
|
||||||
id: Mapped[int] = mapped_column(Integer, primary_key=True)
|
id: Mapped[int] = mapped_column(Integer, primary_key=True)
|
||||||
render_job_id: Mapped[int] = mapped_column(ForeignKey("render_jobs.id"), index=True)
|
candidate_id: Mapped[int] = mapped_column(ForeignKey("knowledge_candidates.id"), index=True)
|
||||||
learner_id: Mapped[str] = mapped_column(String(100), index=True)
|
promotion_target: Mapped[str] = mapped_column(String(50), index=True)
|
||||||
|
target_object_id: Mapped[str] = mapped_column(String(100), default="")
|
||||||
|
promotion_status: Mapped[str] = mapped_column(String(50), default="draft")
|
||||||
|
promoted_by: Mapped[int] = mapped_column(ForeignKey("users.id"), index=True)
|
||||||
|
created_at: Mapped[str] = mapped_column(String(100), default="")
|
||||||
|
|
||||||
|
class PackPatchProposalORM(Base):
|
||||||
|
__tablename__ = "pack_patch_proposals"
|
||||||
|
id: Mapped[int] = mapped_column(Integer, primary_key=True)
|
||||||
|
candidate_id: Mapped[int] = mapped_column(ForeignKey("knowledge_candidates.id"), index=True)
|
||||||
pack_id: Mapped[str] = mapped_column(String(100), index=True)
|
pack_id: Mapped[str] = mapped_column(String(100), index=True)
|
||||||
artifact_type: Mapped[str] = mapped_column(String(50), default="render_bundle")
|
patch_type: Mapped[str] = mapped_column(String(100), default="content_revision")
|
||||||
format: Mapped[str] = mapped_column(String(20), default="gif")
|
title: Mapped[str] = mapped_column(String(255))
|
||||||
title: Mapped[str] = mapped_column(String(255), default="")
|
proposed_change_json: Mapped[str] = mapped_column(Text, default="{}")
|
||||||
path: Mapped[str] = mapped_column(Text, default="")
|
evidence_summary: Mapped[str] = mapped_column(Text, default="")
|
||||||
metadata_json: Mapped[str] = mapped_column(Text, default="{}")
|
reviewer_notes: Mapped[str] = mapped_column(Text, default="")
|
||||||
retention_class: Mapped[str] = mapped_column(String(50), default="standard")
|
status: Mapped[str] = mapped_column(String(50), default="proposed")
|
||||||
expires_at: Mapped[str] = mapped_column(String(100), default="")
|
current_version: Mapped[int] = mapped_column(Integer, default=1)
|
||||||
is_deleted: Mapped[bool] = mapped_column(Boolean, default=False)
|
created_at: Mapped[str] = mapped_column(String(100), default="")
|
||||||
|
|
||||||
|
class CurriculumDraftORM(Base):
|
||||||
|
__tablename__ = "curriculum_drafts"
|
||||||
|
id: Mapped[int] = mapped_column(Integer, primary_key=True)
|
||||||
|
candidate_id: Mapped[int] = mapped_column(ForeignKey("knowledge_candidates.id"), index=True)
|
||||||
|
topic_focus: Mapped[str] = mapped_column(String(255), default="")
|
||||||
|
product_type: Mapped[str] = mapped_column(String(100), default="lesson_outline")
|
||||||
|
audience: Mapped[str] = mapped_column(String(100), default="general")
|
||||||
|
source_concepts_json: Mapped[str] = mapped_column(Text, default="[]")
|
||||||
|
content_markdown: Mapped[str] = mapped_column(Text, default="")
|
||||||
|
editorial_notes: Mapped[str] = mapped_column(Text, default="")
|
||||||
|
status: Mapped[str] = mapped_column(String(50), default="draft")
|
||||||
|
current_version: Mapped[int] = mapped_column(Integer, default=1)
|
||||||
|
created_at: Mapped[str] = mapped_column(String(100), default="")
|
||||||
|
|
||||||
|
class SkillBundleORM(Base):
|
||||||
|
__tablename__ = "skill_bundles"
|
||||||
|
id: Mapped[int] = mapped_column(Integer, primary_key=True)
|
||||||
|
candidate_id: Mapped[int] = mapped_column(ForeignKey("knowledge_candidates.id"), index=True)
|
||||||
|
skill_name: Mapped[str] = mapped_column(String(255))
|
||||||
|
domain: Mapped[str] = mapped_column(String(100), default="")
|
||||||
|
prerequisites_json: Mapped[str] = mapped_column(Text, default="[]")
|
||||||
|
expected_inputs_json: Mapped[str] = mapped_column(Text, default="[]")
|
||||||
|
failure_modes_json: Mapped[str] = mapped_column(Text, default="[]")
|
||||||
|
validation_checks_json: Mapped[str] = mapped_column(Text, default="[]")
|
||||||
|
canonical_examples_json: Mapped[str] = mapped_column(Text, default="[]")
|
||||||
|
status: Mapped[str] = mapped_column(String(50), default="draft")
|
||||||
|
current_version: Mapped[int] = mapped_column(Integer, default=1)
|
||||||
|
created_at: Mapped[str] = mapped_column(String(100), default="")
|
||||||
|
|
||||||
|
class ObjectVersionORM(Base):
|
||||||
|
__tablename__ = "object_versions"
|
||||||
|
id: Mapped[int] = mapped_column(Integer, primary_key=True)
|
||||||
|
object_kind: Mapped[str] = mapped_column(String(50), index=True)
|
||||||
|
object_id: Mapped[int] = mapped_column(Integer, index=True)
|
||||||
|
version_number: Mapped[int] = mapped_column(Integer, default=1)
|
||||||
|
payload_json: Mapped[str] = mapped_column(Text, default="{}")
|
||||||
|
editor_id: Mapped[int] = mapped_column(ForeignKey("users.id"), index=True)
|
||||||
|
note: Mapped[str] = mapped_column(Text, default="")
|
||||||
|
created_at: Mapped[str] = mapped_column(String(100), default="")
|
||||||
|
|
|
||||||
|
|
@ -1,11 +1,17 @@
|
||||||
from __future__ import annotations
|
from __future__ import annotations
|
||||||
import json
|
import json
|
||||||
|
from datetime import datetime, timezone
|
||||||
from sqlalchemy import select
|
from sqlalchemy import select
|
||||||
from .db import SessionLocal
|
from .db import SessionLocal
|
||||||
from .orm import UserORM, RefreshTokenORM, PackORM, LearnerORM, MasteryRecordORM, EvidenceEventORM, RenderJobORM, ArtifactORM
|
from .orm import (
|
||||||
from .models import PackData, LearnerState, MasteryRecord, EvidenceEvent
|
UserORM, PackORM, LearnerORM, KnowledgeCandidateORM, PromotionRecordORM,
|
||||||
|
PackPatchProposalORM, CurriculumDraftORM, SkillBundleORM, ObjectVersionORM, SynthesisCandidateORM
|
||||||
|
)
|
||||||
from .auth import verify_password
|
from .auth import verify_password
|
||||||
|
|
||||||
|
def now_iso() -> str:
|
||||||
|
return datetime.now(timezone.utc).isoformat()
|
||||||
|
|
||||||
def get_user_by_username(username: str):
|
def get_user_by_username(username: str):
|
||||||
with SessionLocal() as db:
|
with SessionLocal() as db:
|
||||||
return db.execute(select(UserORM).where(UserORM.username == username)).scalar_one_or_none()
|
return db.execute(select(UserORM).where(UserORM.username == username)).scalar_one_or_none()
|
||||||
|
|
@ -20,216 +26,474 @@ def authenticate_user(username: str, password: str):
|
||||||
return None
|
return None
|
||||||
return user
|
return user
|
||||||
|
|
||||||
def store_refresh_token(user_id: int, token_id: str):
|
def list_packs():
|
||||||
with SessionLocal() as db:
|
with SessionLocal() as db:
|
||||||
db.add(RefreshTokenORM(user_id=user_id, token_id=token_id, is_revoked=False))
|
return db.execute(select(PackORM).order_by(PackORM.id)).scalars().all()
|
||||||
db.commit()
|
|
||||||
|
|
||||||
def refresh_token_active(token_id: str) -> bool:
|
|
||||||
with SessionLocal() as db:
|
|
||||||
row = db.execute(select(RefreshTokenORM).where(RefreshTokenORM.token_id == token_id)).scalar_one_or_none()
|
|
||||||
return row is not None and not row.is_revoked
|
|
||||||
|
|
||||||
def revoke_refresh_token(token_id: str):
|
|
||||||
with SessionLocal() as db:
|
|
||||||
row = db.execute(select(RefreshTokenORM).where(RefreshTokenORM.token_id == token_id)).scalar_one_or_none()
|
|
||||||
if row:
|
|
||||||
row.is_revoked = True
|
|
||||||
db.commit()
|
|
||||||
|
|
||||||
def list_packs_for_user(user_id: int | None = None, include_unpublished: bool = False):
|
|
||||||
with SessionLocal() as db:
|
|
||||||
stmt = select(PackORM)
|
|
||||||
if not include_unpublished:
|
|
||||||
stmt = stmt.where(PackORM.is_published == True)
|
|
||||||
rows = db.execute(stmt).scalars().all()
|
|
||||||
out = []
|
|
||||||
for r in rows:
|
|
||||||
if r.policy_lane == "community":
|
|
||||||
out.append(PackData.model_validate(json.loads(r.data_json)))
|
|
||||||
elif user_id is not None and r.owner_user_id == user_id:
|
|
||||||
out.append(PackData.model_validate(json.loads(r.data_json)))
|
|
||||||
return out
|
|
||||||
|
|
||||||
def get_pack(pack_id: str):
|
def get_pack(pack_id: str):
|
||||||
with SessionLocal() as db:
|
|
||||||
row = db.get(PackORM, pack_id)
|
|
||||||
return None if row is None else PackData.model_validate(json.loads(row.data_json))
|
|
||||||
|
|
||||||
def get_pack_row(pack_id: str):
|
|
||||||
with SessionLocal() as db:
|
with SessionLocal() as db:
|
||||||
return db.get(PackORM, pack_id)
|
return db.get(PackORM, pack_id)
|
||||||
|
|
||||||
def upsert_pack(pack: PackData, submitted_by_user_id: int, policy_lane: str = "personal", is_published: bool = False):
|
|
||||||
with SessionLocal() as db:
|
|
||||||
row = db.get(PackORM, pack.id)
|
|
||||||
payload = json.dumps(pack.model_dump())
|
|
||||||
if row is None:
|
|
||||||
row = PackORM(
|
|
||||||
id=pack.id,
|
|
||||||
owner_user_id=submitted_by_user_id if policy_lane == "personal" else None,
|
|
||||||
policy_lane=policy_lane,
|
|
||||||
title=pack.title,
|
|
||||||
subtitle=pack.subtitle,
|
|
||||||
level=pack.level,
|
|
||||||
data_json=payload,
|
|
||||||
is_published=is_published if policy_lane == "personal" else False,
|
|
||||||
)
|
|
||||||
db.add(row)
|
|
||||||
else:
|
|
||||||
row.owner_user_id = submitted_by_user_id if policy_lane == "personal" else row.owner_user_id
|
|
||||||
row.policy_lane = policy_lane
|
|
||||||
row.title = pack.title
|
|
||||||
row.subtitle = pack.subtitle
|
|
||||||
row.level = pack.level
|
|
||||||
row.data_json = payload
|
|
||||||
if policy_lane == "personal":
|
|
||||||
row.is_published = is_published
|
|
||||||
db.commit()
|
|
||||||
|
|
||||||
def create_learner(owner_user_id: int, learner_id: str, display_name: str = ""):
|
def create_learner(owner_user_id: int, learner_id: str, display_name: str = ""):
|
||||||
with SessionLocal() as db:
|
with SessionLocal() as db:
|
||||||
if db.get(LearnerORM, learner_id) is None:
|
if db.get(LearnerORM, learner_id) is None:
|
||||||
db.add(LearnerORM(id=learner_id, owner_user_id=owner_user_id, display_name=display_name))
|
db.add(LearnerORM(id=learner_id, owner_user_id=owner_user_id, display_name=display_name))
|
||||||
db.commit()
|
db.commit()
|
||||||
|
|
||||||
def learner_owned_by_user(user_id: int, learner_id: str) -> bool:
|
def create_candidate(payload):
|
||||||
with SessionLocal() as db:
|
with SessionLocal() as db:
|
||||||
learner = db.get(LearnerORM, learner_id)
|
row = KnowledgeCandidateORM(
|
||||||
return learner is not None and learner.owner_user_id == user_id
|
source_type=payload.source_type,
|
||||||
|
source_artifact_id=payload.source_artifact_id,
|
||||||
def load_learner_state(learner_id: str):
|
learner_id=payload.learner_id,
|
||||||
with SessionLocal() as db:
|
pack_id=payload.pack_id,
|
||||||
records = db.execute(select(MasteryRecordORM).where(MasteryRecordORM.learner_id == learner_id)).scalars().all()
|
candidate_kind=payload.candidate_kind,
|
||||||
history = db.execute(select(EvidenceEventORM).where(EvidenceEventORM.learner_id == learner_id)).scalars().all()
|
title=payload.title,
|
||||||
return LearnerState(
|
summary=payload.summary,
|
||||||
learner_id=learner_id,
|
structured_payload_json=json.dumps(payload.structured_payload),
|
||||||
records=[MasteryRecord(concept_id=r.concept_id, dimension=r.dimension, score=r.score, confidence=r.confidence, evidence_count=r.evidence_count, last_updated=r.last_updated) for r in records],
|
evidence_summary=payload.evidence_summary,
|
||||||
history=[EvidenceEvent(concept_id=h.concept_id, dimension=h.dimension, score=h.score, confidence_hint=h.confidence_hint, timestamp=h.timestamp, kind=h.kind, source_id=h.source_id) for h in history],
|
confidence_hint=payload.confidence_hint,
|
||||||
)
|
novelty_score=payload.novelty_score,
|
||||||
|
synthesis_score=payload.synthesis_score,
|
||||||
def save_learner_state(state: LearnerState):
|
triage_lane=payload.triage_lane,
|
||||||
with SessionLocal() as db:
|
current_status="triaged",
|
||||||
db.query(MasteryRecordORM).filter(MasteryRecordORM.learner_id == state.learner_id).delete()
|
created_at=now_iso(),
|
||||||
db.query(EvidenceEventORM).filter(EvidenceEventORM.learner_id == state.learner_id).delete()
|
|
||||||
for r in state.records:
|
|
||||||
db.add(MasteryRecordORM(learner_id=state.learner_id, concept_id=r.concept_id, dimension=r.dimension, score=r.score, confidence=r.confidence, evidence_count=r.evidence_count, last_updated=r.last_updated))
|
|
||||||
for h in state.history:
|
|
||||||
db.add(EvidenceEventORM(learner_id=state.learner_id, concept_id=h.concept_id, dimension=h.dimension, score=h.score, confidence_hint=h.confidence_hint, timestamp=h.timestamp, kind=h.kind, source_id=h.source_id))
|
|
||||||
db.commit()
|
|
||||||
return state
|
|
||||||
|
|
||||||
def create_render_job(learner_id: str, pack_id: str, requested_format: str, fps: int, theme: str):
|
|
||||||
with SessionLocal() as db:
|
|
||||||
row = RenderJobORM(
|
|
||||||
learner_id=learner_id,
|
|
||||||
pack_id=pack_id,
|
|
||||||
requested_format=requested_format,
|
|
||||||
fps=fps,
|
|
||||||
theme=theme,
|
|
||||||
status="queued",
|
|
||||||
)
|
)
|
||||||
db.add(row)
|
db.add(row)
|
||||||
db.commit()
|
db.commit()
|
||||||
db.refresh(row)
|
db.refresh(row)
|
||||||
return row.id
|
return row.id
|
||||||
|
|
||||||
def update_render_job(job_id: int, **fields):
|
def list_candidates():
|
||||||
with SessionLocal() as db:
|
with SessionLocal() as db:
|
||||||
row = db.get(RenderJobORM, job_id)
|
rows = db.execute(select(KnowledgeCandidateORM).order_by(KnowledgeCandidateORM.id.desc())).scalars().all()
|
||||||
if row is None:
|
|
||||||
return None
|
|
||||||
for k, v in fields.items():
|
|
||||||
setattr(row, k, v)
|
|
||||||
db.commit()
|
|
||||||
db.refresh(row)
|
|
||||||
return row
|
|
||||||
|
|
||||||
def list_render_jobs(learner_id: str | None = None):
|
|
||||||
with SessionLocal() as db:
|
|
||||||
stmt = select(RenderJobORM).order_by(RenderJobORM.id.desc())
|
|
||||||
if learner_id:
|
|
||||||
stmt = stmt.where(RenderJobORM.learner_id == learner_id)
|
|
||||||
rows = db.execute(stmt).scalars().all()
|
|
||||||
return [{
|
return [{
|
||||||
"job_id": r.id,
|
"candidate_id": r.id,
|
||||||
"learner_id": r.learner_id,
|
|
||||||
"pack_id": r.pack_id,
|
"pack_id": r.pack_id,
|
||||||
"requested_format": r.requested_format,
|
"candidate_kind": r.candidate_kind,
|
||||||
"fps": r.fps,
|
|
||||||
"theme": r.theme,
|
|
||||||
"status": r.status,
|
|
||||||
"bundle_dir": r.bundle_dir,
|
|
||||||
"payload_json": r.payload_json,
|
|
||||||
"manifest_path": r.manifest_path,
|
|
||||||
"script_path": r.script_path,
|
|
||||||
"error_text": r.error_text,
|
|
||||||
} for r in rows]
|
|
||||||
|
|
||||||
def register_artifact(render_job_id: int, learner_id: str, pack_id: str, artifact_type: str, fmt: str, title: str, path: str, metadata: dict, retention_class: str = "standard", expires_at: str = ""):
|
|
||||||
with SessionLocal() as db:
|
|
||||||
row = ArtifactORM(
|
|
||||||
render_job_id=render_job_id,
|
|
||||||
learner_id=learner_id,
|
|
||||||
pack_id=pack_id,
|
|
||||||
artifact_type=artifact_type,
|
|
||||||
format=fmt,
|
|
||||||
title=title,
|
|
||||||
path=path,
|
|
||||||
metadata_json=json.dumps(metadata),
|
|
||||||
retention_class=retention_class,
|
|
||||||
expires_at=expires_at,
|
|
||||||
is_deleted=False,
|
|
||||||
)
|
|
||||||
db.add(row)
|
|
||||||
db.commit()
|
|
||||||
db.refresh(row)
|
|
||||||
return row.id
|
|
||||||
|
|
||||||
def list_artifacts(learner_id: str | None = None, include_deleted: bool = False):
|
|
||||||
with SessionLocal() as db:
|
|
||||||
stmt = select(ArtifactORM).order_by(ArtifactORM.id.desc())
|
|
||||||
if learner_id:
|
|
||||||
stmt = stmt.where(ArtifactORM.learner_id == learner_id)
|
|
||||||
if not include_deleted:
|
|
||||||
stmt = stmt.where(ArtifactORM.is_deleted == False)
|
|
||||||
rows = db.execute(stmt).scalars().all()
|
|
||||||
return [{
|
|
||||||
"artifact_id": r.id,
|
|
||||||
"render_job_id": r.render_job_id,
|
|
||||||
"learner_id": r.learner_id,
|
|
||||||
"pack_id": r.pack_id,
|
|
||||||
"artifact_type": r.artifact_type,
|
|
||||||
"format": r.format,
|
|
||||||
"title": r.title,
|
"title": r.title,
|
||||||
"path": r.path,
|
"summary": r.summary,
|
||||||
"retention_class": r.retention_class,
|
"structured_payload": json.loads(r.structured_payload_json or "{}"),
|
||||||
"expires_at": r.expires_at,
|
"evidence_summary": r.evidence_summary,
|
||||||
"is_deleted": r.is_deleted,
|
"confidence_hint": r.confidence_hint,
|
||||||
"metadata": json.loads(r.metadata_json or "{}"),
|
"novelty_score": r.novelty_score,
|
||||||
|
"synthesis_score": r.synthesis_score,
|
||||||
|
"triage_lane": r.triage_lane,
|
||||||
|
"current_status": r.current_status,
|
||||||
} for r in rows]
|
} for r in rows]
|
||||||
|
|
||||||
def get_artifact(artifact_id: int):
|
def get_candidate(candidate_id: int):
|
||||||
with SessionLocal() as db:
|
with SessionLocal() as db:
|
||||||
return db.get(ArtifactORM, artifact_id)
|
r = db.get(KnowledgeCandidateORM, candidate_id)
|
||||||
|
if r is None:
|
||||||
def update_artifact_retention(artifact_id: int, retention_class: str, expires_at: str):
|
|
||||||
with SessionLocal() as db:
|
|
||||||
row = db.get(ArtifactORM, artifact_id)
|
|
||||||
if row is None:
|
|
||||||
return None
|
return None
|
||||||
row.retention_class = retention_class
|
return {
|
||||||
row.expires_at = expires_at
|
"candidate_id": r.id,
|
||||||
|
"pack_id": r.pack_id,
|
||||||
|
"candidate_kind": r.candidate_kind,
|
||||||
|
"title": r.title,
|
||||||
|
"summary": r.summary,
|
||||||
|
"structured_payload": json.loads(r.structured_payload_json or "{}"),
|
||||||
|
"evidence_summary": r.evidence_summary,
|
||||||
|
"confidence_hint": r.confidence_hint,
|
||||||
|
"novelty_score": r.novelty_score,
|
||||||
|
"synthesis_score": r.synthesis_score,
|
||||||
|
"triage_lane": r.triage_lane,
|
||||||
|
"current_status": r.current_status,
|
||||||
|
}
|
||||||
|
|
||||||
|
def create_pack_patch(candidate):
|
||||||
|
with SessionLocal() as db:
|
||||||
|
row = PackPatchProposalORM(
|
||||||
|
candidate_id=candidate["candidate_id"],
|
||||||
|
pack_id=candidate["pack_id"],
|
||||||
|
patch_type=candidate["candidate_kind"],
|
||||||
|
title=candidate["title"],
|
||||||
|
proposed_change_json=json.dumps(candidate["structured_payload"]),
|
||||||
|
evidence_summary=candidate["evidence_summary"],
|
||||||
|
reviewer_notes="",
|
||||||
|
status="proposed",
|
||||||
|
current_version=1,
|
||||||
|
created_at=now_iso(),
|
||||||
|
)
|
||||||
|
db.add(row)
|
||||||
db.commit()
|
db.commit()
|
||||||
db.refresh(row)
|
db.refresh(row)
|
||||||
return row
|
_create_version("pack_patch", row.id, 1, {
|
||||||
|
"title": row.title,
|
||||||
|
"proposed_change": json.loads(row.proposed_change_json or "{}"),
|
||||||
|
"status": row.status,
|
||||||
|
"reviewer_notes": row.reviewer_notes,
|
||||||
|
}, 1, "Initial version")
|
||||||
|
return f"patch:{row.id}"
|
||||||
|
|
||||||
def soft_delete_artifact(artifact_id: int):
|
def create_curriculum_draft(candidate):
|
||||||
with SessionLocal() as db:
|
with SessionLocal() as db:
|
||||||
row = db.get(ArtifactORM, artifact_id)
|
payload = candidate["structured_payload"]
|
||||||
if row is None:
|
source_concepts = payload.get("source_concepts", [payload.get("affected_concept")] if payload.get("affected_concept") else [])
|
||||||
return None
|
content = f"# {candidate['title']}\n\n{candidate['summary']}\n\n## Evidence\n{candidate['evidence_summary']}\n"
|
||||||
row.is_deleted = True
|
row = CurriculumDraftORM(
|
||||||
|
candidate_id=candidate["candidate_id"],
|
||||||
|
topic_focus=candidate["title"],
|
||||||
|
product_type="lesson_outline",
|
||||||
|
audience="general",
|
||||||
|
source_concepts_json=json.dumps(source_concepts),
|
||||||
|
content_markdown=content,
|
||||||
|
editorial_notes="",
|
||||||
|
status="draft",
|
||||||
|
current_version=1,
|
||||||
|
created_at=now_iso(),
|
||||||
|
)
|
||||||
|
db.add(row)
|
||||||
db.commit()
|
db.commit()
|
||||||
db.refresh(row)
|
db.refresh(row)
|
||||||
return row
|
_create_version("curriculum_draft", row.id, 1, {
|
||||||
|
"topic_focus": row.topic_focus,
|
||||||
|
"content_markdown": row.content_markdown,
|
||||||
|
"product_type": row.product_type,
|
||||||
|
"audience": row.audience,
|
||||||
|
}, 1, "Initial version")
|
||||||
|
return f"curriculum:{row.id}"
|
||||||
|
|
||||||
|
def create_skill_bundle(candidate):
|
||||||
|
with SessionLocal() as db:
|
||||||
|
payload = candidate["structured_payload"]
|
||||||
|
row = SkillBundleORM(
|
||||||
|
candidate_id=candidate["candidate_id"],
|
||||||
|
skill_name=candidate["title"],
|
||||||
|
domain=candidate["pack_id"],
|
||||||
|
prerequisites_json=json.dumps(payload.get("prerequisites", [])),
|
||||||
|
expected_inputs_json=json.dumps(payload.get("expected_inputs", ["text"])),
|
||||||
|
failure_modes_json=json.dumps(payload.get("failure_modes", ["misapplied concept"])),
|
||||||
|
validation_checks_json=json.dumps(payload.get("validation_checks", ["can explain concept clearly"])),
|
||||||
|
canonical_examples_json=json.dumps(payload.get("canonical_examples", [candidate["summary"]])),
|
||||||
|
status="draft",
|
||||||
|
current_version=1,
|
||||||
|
created_at=now_iso(),
|
||||||
|
)
|
||||||
|
db.add(row)
|
||||||
|
db.commit()
|
||||||
|
db.refresh(row)
|
||||||
|
_create_version("skill_bundle", row.id, 1, {
|
||||||
|
"skill_name": row.skill_name,
|
||||||
|
"domain": row.domain,
|
||||||
|
"prerequisites": json.loads(row.prerequisites_json or "[]"),
|
||||||
|
"expected_inputs": json.loads(row.expected_inputs_json or "[]"),
|
||||||
|
"failure_modes": json.loads(row.failure_modes_json or "[]"),
|
||||||
|
"validation_checks": json.loads(row.validation_checks_json or "[]"),
|
||||||
|
"canonical_examples": json.loads(row.canonical_examples_json or "[]"),
|
||||||
|
}, 1, "Initial version")
|
||||||
|
return f"skill:{row.id}"
|
||||||
|
|
||||||
|
def create_promotion(candidate_id: int, promoted_by: int, payload):
|
||||||
|
candidate = get_candidate(candidate_id)
|
||||||
|
if candidate is None:
|
||||||
|
return None
|
||||||
|
target_object_id = payload.target_object_id
|
||||||
|
if not target_object_id:
|
||||||
|
if payload.promotion_target == "pack_improvement":
|
||||||
|
target_object_id = create_pack_patch(candidate)
|
||||||
|
elif payload.promotion_target == "curriculum_draft":
|
||||||
|
target_object_id = create_curriculum_draft(candidate)
|
||||||
|
elif payload.promotion_target == "reusable_skill_bundle":
|
||||||
|
target_object_id = create_skill_bundle(candidate)
|
||||||
|
elif payload.promotion_target == "archive":
|
||||||
|
target_object_id = "archive:auto"
|
||||||
|
with SessionLocal() as db:
|
||||||
|
row = PromotionRecordORM(
|
||||||
|
candidate_id=candidate_id,
|
||||||
|
promotion_target=payload.promotion_target,
|
||||||
|
target_object_id=target_object_id,
|
||||||
|
promotion_status=payload.promotion_status,
|
||||||
|
promoted_by=promoted_by,
|
||||||
|
created_at=now_iso(),
|
||||||
|
)
|
||||||
|
db.add(row)
|
||||||
|
cand = db.get(KnowledgeCandidateORM, candidate_id)
|
||||||
|
if cand:
|
||||||
|
cand.current_status = "promoted" if payload.promotion_target != "archive" else "archived"
|
||||||
|
cand.triage_lane = payload.promotion_target
|
||||||
|
db.commit()
|
||||||
|
db.refresh(row)
|
||||||
|
return row.id
|
||||||
|
|
||||||
|
def list_promotions():
|
||||||
|
with SessionLocal() as db:
|
||||||
|
rows = db.execute(select(PromotionRecordORM).order_by(PromotionRecordORM.id.desc())).scalars().all()
|
||||||
|
return [{
|
||||||
|
"promotion_id": r.id,
|
||||||
|
"candidate_id": r.candidate_id,
|
||||||
|
"promotion_target": r.promotion_target,
|
||||||
|
"target_object_id": r.target_object_id,
|
||||||
|
"promotion_status": r.promotion_status,
|
||||||
|
"promoted_by": r.promoted_by,
|
||||||
|
"created_at": r.created_at,
|
||||||
|
} for r in rows]
|
||||||
|
|
||||||
|
def list_pack_patches():
|
||||||
|
with SessionLocal() as db:
|
||||||
|
rows = db.execute(select(PackPatchProposalORM).order_by(PackPatchProposalORM.id.desc())).scalars().all()
|
||||||
|
return [{
|
||||||
|
"patch_id": r.id,
|
||||||
|
"candidate_id": r.candidate_id,
|
||||||
|
"pack_id": r.pack_id,
|
||||||
|
"patch_type": r.patch_type,
|
||||||
|
"title": r.title,
|
||||||
|
"proposed_change": json.loads(r.proposed_change_json or "{}"),
|
||||||
|
"evidence_summary": r.evidence_summary,
|
||||||
|
"reviewer_notes": r.reviewer_notes,
|
||||||
|
"status": r.status,
|
||||||
|
"current_version": r.current_version,
|
||||||
|
"created_at": r.created_at,
|
||||||
|
} for r in rows]
|
||||||
|
|
||||||
|
def list_curriculum_drafts():
|
||||||
|
with SessionLocal() as db:
|
||||||
|
rows = db.execute(select(CurriculumDraftORM).order_by(CurriculumDraftORM.id.desc())).scalars().all()
|
||||||
|
return [{
|
||||||
|
"draft_id": r.id,
|
||||||
|
"candidate_id": r.candidate_id,
|
||||||
|
"topic_focus": r.topic_focus,
|
||||||
|
"product_type": r.product_type,
|
||||||
|
"audience": r.audience,
|
||||||
|
"source_concepts": json.loads(r.source_concepts_json or "[]"),
|
||||||
|
"content_markdown": r.content_markdown,
|
||||||
|
"editorial_notes": r.editorial_notes,
|
||||||
|
"status": r.status,
|
||||||
|
"current_version": r.current_version,
|
||||||
|
"created_at": r.created_at,
|
||||||
|
} for r in rows]
|
||||||
|
|
||||||
|
def list_skill_bundles():
|
||||||
|
with SessionLocal() as db:
|
||||||
|
rows = db.execute(select(SkillBundleORM).order_by(SkillBundleORM.id.desc())).scalars().all()
|
||||||
|
return [{
|
||||||
|
"skill_bundle_id": r.id,
|
||||||
|
"candidate_id": r.candidate_id,
|
||||||
|
"skill_name": r.skill_name,
|
||||||
|
"domain": r.domain,
|
||||||
|
"prerequisites": json.loads(r.prerequisites_json or "[]"),
|
||||||
|
"expected_inputs": json.loads(r.expected_inputs_json or "[]"),
|
||||||
|
"failure_modes": json.loads(r.failure_modes_json or "[]"),
|
||||||
|
"validation_checks": json.loads(r.validation_checks_json or "[]"),
|
||||||
|
"canonical_examples": json.loads(r.canonical_examples_json or "[]"),
|
||||||
|
"status": r.status,
|
||||||
|
"current_version": r.current_version,
|
||||||
|
"created_at": r.created_at,
|
||||||
|
} for r in rows]
|
||||||
|
|
||||||
|
def get_pack_patch(patch_id: int):
|
||||||
|
with SessionLocal() as db:
|
||||||
|
r = db.get(PackPatchProposalORM, patch_id)
|
||||||
|
if r is None: return None
|
||||||
|
return {
|
||||||
|
"patch_id": r.id, "pack_id": r.pack_id, "title": r.title,
|
||||||
|
"proposed_change": json.loads(r.proposed_change_json or "{}"),
|
||||||
|
"reviewer_notes": r.reviewer_notes, "status": r.status, "current_version": r.current_version
|
||||||
|
}
|
||||||
|
|
||||||
|
def get_curriculum_draft(draft_id: int):
|
||||||
|
with SessionLocal() as db:
|
||||||
|
r = db.get(CurriculumDraftORM, draft_id)
|
||||||
|
if r is None: return None
|
||||||
|
return {
|
||||||
|
"draft_id": r.id, "topic_focus": r.topic_focus, "product_type": r.product_type,
|
||||||
|
"audience": r.audience, "source_concepts": json.loads(r.source_concepts_json or "[]"),
|
||||||
|
"content_markdown": r.content_markdown, "editorial_notes": r.editorial_notes,
|
||||||
|
"status": r.status, "current_version": r.current_version
|
||||||
|
}
|
||||||
|
|
||||||
|
def get_skill_bundle(bundle_id: int):
|
||||||
|
with SessionLocal() as db:
|
||||||
|
r = db.get(SkillBundleORM, bundle_id)
|
||||||
|
if r is None: return None
|
||||||
|
return {
|
||||||
|
"skill_bundle_id": r.id, "skill_name": r.skill_name, "domain": r.domain,
|
||||||
|
"prerequisites": json.loads(r.prerequisites_json or "[]"),
|
||||||
|
"expected_inputs": json.loads(r.expected_inputs_json or "[]"),
|
||||||
|
"failure_modes": json.loads(r.failure_modes_json or "[]"),
|
||||||
|
"validation_checks": json.loads(r.validation_checks_json or "[]"),
|
||||||
|
"canonical_examples": json.loads(r.canonical_examples_json or "[]"),
|
||||||
|
"status": r.status, "current_version": r.current_version
|
||||||
|
}
|
||||||
|
|
||||||
|
def _create_version(object_kind: str, object_id: int, version_number: int, payload: dict, editor_id: int, note: str):
|
||||||
|
with SessionLocal() as db:
|
||||||
|
db.add(ObjectVersionORM(
|
||||||
|
object_kind=object_kind,
|
||||||
|
object_id=object_id,
|
||||||
|
version_number=version_number,
|
||||||
|
payload_json=json.dumps(payload),
|
||||||
|
editor_id=editor_id,
|
||||||
|
note=note,
|
||||||
|
created_at=now_iso(),
|
||||||
|
))
|
||||||
|
db.commit()
|
||||||
|
|
||||||
|
def list_versions(object_kind: str, object_id: int):
|
||||||
|
with SessionLocal() as db:
|
||||||
|
rows = db.execute(
|
||||||
|
select(ObjectVersionORM)
|
||||||
|
.where(ObjectVersionORM.object_kind == object_kind, ObjectVersionORM.object_id == object_id)
|
||||||
|
.order_by(ObjectVersionORM.version_number.desc())
|
||||||
|
).scalars().all()
|
||||||
|
return [{
|
||||||
|
"version_id": r.id,
|
||||||
|
"object_kind": r.object_kind,
|
||||||
|
"object_id": r.object_id,
|
||||||
|
"version_number": r.version_number,
|
||||||
|
"payload": json.loads(r.payload_json or "{}"),
|
||||||
|
"editor_id": r.editor_id,
|
||||||
|
"note": r.note,
|
||||||
|
"created_at": r.created_at,
|
||||||
|
} for r in rows]
|
||||||
|
|
||||||
|
def edit_pack_patch(patch_id: int, payload: dict, editor_id: int, note: str):
|
||||||
|
with SessionLocal() as db:
|
||||||
|
row = db.get(PackPatchProposalORM, patch_id)
|
||||||
|
if row is None: return None
|
||||||
|
if "title" in payload: row.title = payload["title"]
|
||||||
|
if "proposed_change" in payload: row.proposed_change_json = json.dumps(payload["proposed_change"])
|
||||||
|
if "reviewer_notes" in payload: row.reviewer_notes = payload["reviewer_notes"]
|
||||||
|
if "status" in payload: row.status = payload["status"]
|
||||||
|
row.current_version += 1
|
||||||
|
db.commit()
|
||||||
|
db.refresh(row)
|
||||||
|
_create_version("pack_patch", patch_id, row.current_version, {
|
||||||
|
"title": row.title,
|
||||||
|
"proposed_change": json.loads(row.proposed_change_json or "{}"),
|
||||||
|
"reviewer_notes": row.reviewer_notes,
|
||||||
|
"status": row.status,
|
||||||
|
}, editor_id, note)
|
||||||
|
return row
|
||||||
|
|
||||||
|
def edit_curriculum_draft(draft_id: int, payload: dict, editor_id: int, note: str):
|
||||||
|
with SessionLocal() as db:
|
||||||
|
row = db.get(CurriculumDraftORM, draft_id)
|
||||||
|
if row is None: return None
|
||||||
|
if "topic_focus" in payload: row.topic_focus = payload["topic_focus"]
|
||||||
|
if "content_markdown" in payload: row.content_markdown = payload["content_markdown"]
|
||||||
|
if "editorial_notes" in payload: row.editorial_notes = payload["editorial_notes"]
|
||||||
|
if "status" in payload: row.status = payload["status"]
|
||||||
|
row.current_version += 1
|
||||||
|
db.commit()
|
||||||
|
db.refresh(row)
|
||||||
|
_create_version("curriculum_draft", draft_id, row.current_version, {
|
||||||
|
"topic_focus": row.topic_focus,
|
||||||
|
"content_markdown": row.content_markdown,
|
||||||
|
"editorial_notes": row.editorial_notes,
|
||||||
|
"status": row.status,
|
||||||
|
}, editor_id, note)
|
||||||
|
return row
|
||||||
|
|
||||||
|
def edit_skill_bundle(bundle_id: int, payload: dict, editor_id: int, note: str):
|
||||||
|
with SessionLocal() as db:
|
||||||
|
row = db.get(SkillBundleORM, bundle_id)
|
||||||
|
if row is None: return None
|
||||||
|
if "skill_name" in payload: row.skill_name = payload["skill_name"]
|
||||||
|
if "prerequisites" in payload: row.prerequisites_json = json.dumps(payload["prerequisites"])
|
||||||
|
if "expected_inputs" in payload: row.expected_inputs_json = json.dumps(payload["expected_inputs"])
|
||||||
|
if "failure_modes" in payload: row.failure_modes_json = json.dumps(payload["failure_modes"])
|
||||||
|
if "validation_checks" in payload: row.validation_checks_json = json.dumps(payload["validation_checks"])
|
||||||
|
if "canonical_examples" in payload: row.canonical_examples_json = json.dumps(payload["canonical_examples"])
|
||||||
|
if "status" in payload: row.status = payload["status"]
|
||||||
|
row.current_version += 1
|
||||||
|
db.commit()
|
||||||
|
db.refresh(row)
|
||||||
|
_create_version("skill_bundle", bundle_id, row.current_version, {
|
||||||
|
"skill_name": row.skill_name,
|
||||||
|
"prerequisites": json.loads(row.prerequisites_json or "[]"),
|
||||||
|
"expected_inputs": json.loads(row.expected_inputs_json or "[]"),
|
||||||
|
"failure_modes": json.loads(row.failure_modes_json or "[]"),
|
||||||
|
"validation_checks": json.loads(row.validation_checks_json or "[]"),
|
||||||
|
"canonical_examples": json.loads(row.canonical_examples_json or "[]"),
|
||||||
|
"status": row.status,
|
||||||
|
}, editor_id, note)
|
||||||
|
return row
|
||||||
|
|
||||||
|
def apply_pack_patch(patch_id: int, editor_id: int, note: str):
|
||||||
|
with SessionLocal() as db:
|
||||||
|
patch = db.get(PackPatchProposalORM, patch_id)
|
||||||
|
if patch is None: return None
|
||||||
|
pack = db.get(PackORM, patch.pack_id)
|
||||||
|
if pack is None: return None
|
||||||
|
pack_data = json.loads(pack.data_json or "{}")
|
||||||
|
proposed = json.loads(patch.proposed_change_json or "{}")
|
||||||
|
pack_data.setdefault("applied_patches", []).append({
|
||||||
|
"patch_id": patch.id,
|
||||||
|
"title": patch.title,
|
||||||
|
"proposed_change": proposed,
|
||||||
|
"applied_at": now_iso(),
|
||||||
|
})
|
||||||
|
if "affected_concept" in proposed and "suggested_prereq" in proposed:
|
||||||
|
for concept in pack_data.get("concepts", []):
|
||||||
|
if concept.get("id") == proposed["affected_concept"]:
|
||||||
|
prereqs = concept.setdefault("prerequisites", [])
|
||||||
|
if proposed["suggested_prereq"] not in prereqs:
|
||||||
|
prereqs.append(proposed["suggested_prereq"])
|
||||||
|
pack.data_json = json.dumps(pack_data)
|
||||||
|
patch.status = "applied"
|
||||||
|
db.commit()
|
||||||
|
db.refresh(patch)
|
||||||
|
_create_version("pack_patch", patch_id, patch.current_version, {
|
||||||
|
"title": patch.title,
|
||||||
|
"proposed_change": json.loads(patch.proposed_change_json or "{}"),
|
||||||
|
"status": patch.status,
|
||||||
|
}, editor_id, note)
|
||||||
|
return patch
|
||||||
|
|
||||||
|
def export_curriculum_draft(draft_id: int):
|
||||||
|
draft = get_curriculum_draft(draft_id)
|
||||||
|
if draft is None: return None
|
||||||
|
return {
|
||||||
|
"markdown": draft["content_markdown"],
|
||||||
|
"json": json.dumps(draft, indent=2)
|
||||||
|
}
|
||||||
|
|
||||||
|
def export_skill_bundle(bundle_id: int):
|
||||||
|
import yaml
|
||||||
|
bundle = get_skill_bundle(bundle_id)
|
||||||
|
if bundle is None: return None
|
||||||
|
return {
|
||||||
|
"json": json.dumps(bundle, indent=2),
|
||||||
|
"yaml": yaml.safe_dump(bundle, sort_keys=False)
|
||||||
|
}
|
||||||
|
|
||||||
|
def create_synthesis_candidate(source_concept_id, target_concept_id, source_pack_id, target_pack_id, synthesis_kind, score_semantic, score_structural, score_trajectory, score_review_history, explanation, evidence):
|
||||||
|
score_total = 0.35 * score_semantic + 0.25 * score_structural + 0.20 * score_trajectory + 0.10 * score_review_history + 0.10 * evidence.get("novelty", 0.0)
|
||||||
|
with SessionLocal() as db:
|
||||||
|
row = SynthesisCandidateORM(
|
||||||
|
source_concept_id=source_concept_id, target_concept_id=target_concept_id,
|
||||||
|
source_pack_id=source_pack_id, target_pack_id=target_pack_id,
|
||||||
|
synthesis_kind=synthesis_kind, score_total=score_total,
|
||||||
|
score_semantic=score_semantic, score_structural=score_structural,
|
||||||
|
score_trajectory=score_trajectory, score_review_history=score_review_history,
|
||||||
|
explanation=explanation, evidence_json=json.dumps(evidence),
|
||||||
|
current_status="proposed", created_at=now_iso(),
|
||||||
|
)
|
||||||
|
db.add(row); db.commit(); db.refresh(row); return row.id
|
||||||
|
|
||||||
|
def list_synthesis_candidates():
|
||||||
|
with SessionLocal() as db:
|
||||||
|
rows = db.execute(select(SynthesisCandidateORM).order_by(SynthesisCandidateORM.score_total.desc(), SynthesisCandidateORM.id.desc())).scalars().all()
|
||||||
|
return [{
|
||||||
|
"synthesis_id": r.id, "source_concept_id": r.source_concept_id, "target_concept_id": r.target_concept_id,
|
||||||
|
"source_pack_id": r.source_pack_id, "target_pack_id": r.target_pack_id, "synthesis_kind": r.synthesis_kind,
|
||||||
|
"score_total": r.score_total, "score_semantic": r.score_semantic, "score_structural": r.score_structural,
|
||||||
|
"score_trajectory": r.score_trajectory, "score_review_history": r.score_review_history,
|
||||||
|
"explanation": r.explanation, "evidence": json.loads(r.evidence_json or "{}"),
|
||||||
|
"current_status": r.current_status, "created_at": r.created_at,
|
||||||
|
} for r in rows]
|
||||||
|
|
||||||
|
def get_synthesis_candidate(synthesis_id: int):
|
||||||
|
with SessionLocal() as db:
|
||||||
|
r = db.get(SynthesisCandidateORM, synthesis_id)
|
||||||
|
if r is None: return None
|
||||||
|
return {
|
||||||
|
"synthesis_id": r.id, "source_concept_id": r.source_concept_id, "target_concept_id": r.target_concept_id,
|
||||||
|
"source_pack_id": r.source_pack_id, "target_pack_id": r.target_pack_id, "synthesis_kind": r.synthesis_kind,
|
||||||
|
"score_total": r.score_total, "score_semantic": r.score_semantic, "score_structural": r.score_structural,
|
||||||
|
"score_trajectory": r.score_trajectory, "score_review_history": r.score_review_history,
|
||||||
|
"explanation": r.explanation, "evidence": json.loads(r.evidence_json or "{}"),
|
||||||
|
"current_status": r.current_status, "created_at": r.created_at,
|
||||||
|
}
|
||||||
|
|
|
||||||
|
|
@ -1,34 +1,53 @@
|
||||||
from __future__ import annotations
|
from __future__ import annotations
|
||||||
|
import json
|
||||||
from sqlalchemy import select
|
from sqlalchemy import select
|
||||||
from .db import Base, engine, SessionLocal
|
from .db import Base, engine, SessionLocal
|
||||||
from .orm import UserORM
|
from .orm import UserORM, PackORM
|
||||||
from .auth import hash_password
|
from .auth import hash_password
|
||||||
from .repository import upsert_pack, create_learner
|
|
||||||
from .models import PackData, PackConcept, GraphPosition, CrossPackLink
|
|
||||||
|
|
||||||
def main():
|
def main():
|
||||||
Base.metadata.create_all(bind=engine)
|
Base.metadata.create_all(bind=engine)
|
||||||
with SessionLocal() as db:
|
with SessionLocal() as db:
|
||||||
if db.execute(select(UserORM).where(UserORM.username == "wesley")).scalar_one_or_none() is None:
|
if db.execute(select(UserORM).where(UserORM.username == "wesley")).scalar_one_or_none() is None:
|
||||||
db.add(UserORM(username="wesley", password_hash=hash_password("demo-pass"), role="admin", is_active=True))
|
db.add(UserORM(username="wesley", password_hash=hash_password("demo-pass"), role="admin", is_active=True))
|
||||||
|
if db.execute(select(UserORM).where(UserORM.username == "reviewer")).scalar_one_or_none() is None:
|
||||||
|
db.add(UserORM(username="reviewer", password_hash=hash_password("demo-pass"), role="reviewer", is_active=True))
|
||||||
|
if db.get(PackORM, "biology-pack") is None:
|
||||||
|
db.add(PackORM(
|
||||||
|
id="biology-pack",
|
||||||
|
owner_user_id=1,
|
||||||
|
policy_lane="personal",
|
||||||
|
title="Biology Pack",
|
||||||
|
subtitle="Core biology concepts",
|
||||||
|
level="novice-friendly",
|
||||||
|
is_published=True,
|
||||||
|
data_json=json.dumps({
|
||||||
|
"id": "biology-pack",
|
||||||
|
"title": "Biology Pack",
|
||||||
|
"concepts": [
|
||||||
|
{"id": "selection", "title": "Natural Selection", "prerequisites": ["variation"]},
|
||||||
|
{"id": "variation", "title": "Variation", "prerequisites": []},
|
||||||
|
{"id": "drift", "title": "Genetic Drift", "prerequisites": ["variation"]}
|
||||||
|
]
|
||||||
|
})
|
||||||
|
))
|
||||||
|
if db.get(PackORM, "math-pack") is None:
|
||||||
|
db.add(PackORM(
|
||||||
|
id="math-pack",
|
||||||
|
owner_user_id=1,
|
||||||
|
policy_lane="personal",
|
||||||
|
title="Math Pack",
|
||||||
|
subtitle="Core math concepts",
|
||||||
|
level="novice-friendly",
|
||||||
|
is_published=True,
|
||||||
|
data_json=json.dumps({
|
||||||
|
"id": "math-pack",
|
||||||
|
"title": "Math Pack",
|
||||||
|
"concepts": [
|
||||||
|
{"id": "random_walk", "title": "Random Walk", "prerequisites": ["variation"]},
|
||||||
|
{"id": "variation", "title": "Variation in Models", "prerequisites": []},
|
||||||
|
{"id": "optimization", "title": "Optimization", "prerequisites": []}
|
||||||
|
]
|
||||||
|
})
|
||||||
|
))
|
||||||
db.commit()
|
db.commit()
|
||||||
create_learner(1, "wesley-learner", "Wesley learner")
|
|
||||||
upsert_pack(
|
|
||||||
PackData(
|
|
||||||
id="wesley-private-pack",
|
|
||||||
title="Wesley Private Pack",
|
|
||||||
subtitle="Personal pack example.",
|
|
||||||
level="novice-friendly",
|
|
||||||
concepts=[
|
|
||||||
PackConcept(id="intro", title="Intro", prerequisites=[], position=GraphPosition(x=150, y=120)),
|
|
||||||
PackConcept(id="second", title="Second concept", prerequisites=["intro"], position=GraphPosition(x=420, y=120)),
|
|
||||||
PackConcept(id="third", title="Third concept", prerequisites=["second"], position=GraphPosition(x=700, y=120), cross_pack_links=[CrossPackLink(source_concept_id="third", target_pack_id="advanced-pack", target_concept_id="adv-1", relationship="next_pack")]),
|
|
||||||
PackConcept(id="branch", title="Branch concept", prerequisites=["intro"], position=GraphPosition(x=420, y=320)),
|
|
||||||
],
|
|
||||||
onboarding={"headline":"Start privately"},
|
|
||||||
compliance={}
|
|
||||||
),
|
|
||||||
submitted_by_user_id=1,
|
|
||||||
policy_lane="personal",
|
|
||||||
is_published=True,
|
|
||||||
)
|
|
||||||
|
|
|
||||||
|
|
@ -14,57 +14,35 @@ def _norm(text: str) -> set[str]:
|
||||||
def _semantic_similarity(a: dict, b: dict) -> float:
|
def _semantic_similarity(a: dict, b: dict) -> float:
|
||||||
sa = _norm(a.get("title", "")) | _norm(" ".join(a.get("prerequisites", [])))
|
sa = _norm(a.get("title", "")) | _norm(" ".join(a.get("prerequisites", [])))
|
||||||
sb = _norm(b.get("title", "")) | _norm(" ".join(b.get("prerequisites", [])))
|
sb = _norm(b.get("title", "")) | _norm(" ".join(b.get("prerequisites", [])))
|
||||||
if not sa or not sb:
|
if not sa or not sb: return 0.0
|
||||||
return 0.0
|
|
||||||
return len(sa & sb) / len(sa | sb)
|
return len(sa & sb) / len(sa | sb)
|
||||||
|
|
||||||
def _structural_similarity(a: dict, b: dict) -> float:
|
def _structural_similarity(a: dict, b: dict) -> float:
|
||||||
pa = set(a.get("prerequisites", []))
|
pa = set(a.get("prerequisites", [])); pb = set(b.get("prerequisites", []))
|
||||||
pb = set(b.get("prerequisites", []))
|
if not pa and not pb: return 0.6
|
||||||
if not pa and not pb:
|
if not pa or not pb: return 0.2
|
||||||
return 0.6
|
|
||||||
if not pa or not pb:
|
|
||||||
return 0.2
|
|
||||||
return len(pa & pb) / len(pa | pb)
|
return len(pa & pb) / len(pa | pb)
|
||||||
|
|
||||||
def generate_synthesis_candidates(source_pack_id: str | None = None, target_pack_id: str | None = None, limit: int = 20):
|
def generate_synthesis_candidates(source_pack_id: str | None = None, target_pack_id: str | None = None, limit: int = 20):
|
||||||
packs = list_packs()
|
packs = list_packs(); by_id = {p.id: p for p in packs}
|
||||||
by_id = {p.id: p for p in packs}
|
|
||||||
source_packs = [by_id[source_pack_id]] if source_pack_id and source_pack_id in by_id else packs
|
source_packs = [by_id[source_pack_id]] if source_pack_id and source_pack_id in by_id else packs
|
||||||
target_packs = [by_id[target_pack_id]] if target_pack_id and target_pack_id in by_id else packs
|
target_packs = [by_id[target_pack_id]] if target_pack_id and target_pack_id in by_id else packs
|
||||||
|
created = []; seen = set()
|
||||||
created = []
|
|
||||||
seen = set()
|
|
||||||
for sp in source_packs:
|
for sp in source_packs:
|
||||||
for tp in target_packs:
|
for tp in target_packs:
|
||||||
if sp.id == tp.id:
|
if sp.id == tp.id: continue
|
||||||
continue
|
|
||||||
for ca in _concepts(sp):
|
for ca in _concepts(sp):
|
||||||
for cb in _concepts(tp):
|
for cb in _concepts(tp):
|
||||||
sem = _semantic_similarity(ca, cb)
|
sem = _semantic_similarity(ca, cb); struct = _structural_similarity(ca, cb)
|
||||||
struct = _structural_similarity(ca, cb)
|
traj = 0.4; review_prior = 0.5; novelty = 1.0 if (ca.get("id"), cb.get("id")) not in seen else 0.0
|
||||||
traj = 0.4
|
|
||||||
review_prior = 0.5
|
|
||||||
novelty = 1.0 if (ca.get("id"), cb.get("id")) not in seen else 0.0
|
|
||||||
total = 0.35 * sem + 0.25 * struct + 0.20 * traj + 0.10 * review_prior + 0.10 * novelty
|
total = 0.35 * sem + 0.25 * struct + 0.20 * traj + 0.10 * review_prior + 0.10 * novelty
|
||||||
if total < 0.45:
|
if total < 0.45: continue
|
||||||
continue
|
|
||||||
explanation = f"Possible cross-pack overlap between '{ca.get('title')}' and '{cb.get('title')}'."
|
|
||||||
sid = create_synthesis_candidate(
|
sid = create_synthesis_candidate(
|
||||||
source_concept_id=ca.get("id", ""),
|
ca.get("id", ""), cb.get("id", ""), sp.id, tp.id, "cross_pack_similarity",
|
||||||
target_concept_id=cb.get("id", ""),
|
sem, struct, traj, review_prior,
|
||||||
source_pack_id=sp.id,
|
f"Possible cross-pack overlap between '{ca.get('title')}' and '{cb.get('title')}'.",
|
||||||
target_pack_id=tp.id,
|
{"novelty": novelty, "source_title": ca.get("title"), "target_title": cb.get("title")}
|
||||||
synthesis_kind="cross_pack_similarity",
|
|
||||||
score_semantic=sem,
|
|
||||||
score_structural=struct,
|
|
||||||
score_trajectory=traj,
|
|
||||||
score_review_history=review_prior,
|
|
||||||
explanation=explanation,
|
|
||||||
evidence={"novelty": novelty, "source_title": ca.get("title"), "target_title": cb.get("title")},
|
|
||||||
)
|
)
|
||||||
seen.add((ca.get("id"), cb.get("id")))
|
seen.add((ca.get("id"), cb.get("id"))); created.append(sid)
|
||||||
created.append(sid)
|
if len(created) >= limit: return created
|
||||||
if len(created) >= limit:
|
|
||||||
return created
|
|
||||||
return created
|
return created
|
||||||
|
|
|
||||||
|
|
@ -3,6 +3,5 @@ from pathlib import Path
|
||||||
def test_scaffold_files_exist():
|
def test_scaffold_files_exist():
|
||||||
assert Path("src/didactopus/api.py").exists()
|
assert Path("src/didactopus/api.py").exists()
|
||||||
assert Path("src/didactopus/repository.py").exists()
|
assert Path("src/didactopus/repository.py").exists()
|
||||||
assert Path("src/didactopus/worker.py").exists()
|
assert Path("src/didactopus/orm.py").exists()
|
||||||
assert Path("src/didactopus/knowledge_export.py").exists()
|
assert Path("webui/src/App.jsx").exists()
|
||||||
assert Path("FAQ.md").exists()
|
|
||||||
|
|
|
||||||
|
|
@ -3,7 +3,7 @@
|
||||||
<head>
|
<head>
|
||||||
<meta charset="UTF-8" />
|
<meta charset="UTF-8" />
|
||||||
<meta name="viewport" content="width=device-width, initial-scale=1.0" />
|
<meta name="viewport" content="width=device-width, initial-scale=1.0" />
|
||||||
<title>Didactopus Artifact Lifecycle</title>
|
<title>Didactopus Object Versioning</title>
|
||||||
<script type="module" src="/src/main.jsx"></script>
|
<script type="module" src="/src/main.jsx"></script>
|
||||||
</head>
|
</head>
|
||||||
<body><div id="root"></div></body>
|
<body><div id="root"></div></body>
|
||||||
|
|
|
||||||
|
|
@ -1,5 +1,5 @@
|
||||||
{
|
{
|
||||||
"name": "didactopus-artifact-lifecycle-ui",
|
"name": "didactopus-object-versioning-ui",
|
||||||
"private": true,
|
"private": true,
|
||||||
"version": "0.1.0",
|
"version": "0.1.0",
|
||||||
"type": "module",
|
"type": "module",
|
||||||
|
|
|
||||||
|
|
@ -1,120 +1,153 @@
|
||||||
import React, { useEffect, useState } from "react";
|
import React, { useEffect, useState } from "react";
|
||||||
import { login, refresh, fetchPacks, fetchLearnerState, putLearnerState, createRenderJob, listRenderJobs, listArtifacts, updateRetention, exportKnowledge } from "./api";
|
import { login, createCandidate, promoteCandidate, listPackPatches, listCurriculumDrafts, listSkillBundles, editPatch, applyPatch, editCurriculum, editSkill, listVersions, exportCurriculum, exportSkill } from "./api";
|
||||||
import { loadAuth, saveAuth, clearAuth } from "./authStore";
|
|
||||||
|
|
||||||
function LoginView({ onAuth }) {
|
function LoginView({ onAuth }) {
|
||||||
const [username, setUsername] = useState("wesley");
|
const [username, setUsername] = useState("reviewer");
|
||||||
const [password, setPassword] = useState("demo-pass");
|
const [password, setPassword] = useState("demo-pass");
|
||||||
const [error, setError] = useState("");
|
const [error, setError] = useState("");
|
||||||
async function doLogin() {
|
async function doLogin() {
|
||||||
try {
|
try { onAuth(await login(username, password)); }
|
||||||
const result = await login(username, password);
|
catch { setError("Login failed"); }
|
||||||
saveAuth(result);
|
|
||||||
onAuth(result);
|
|
||||||
} catch { setError("Login failed"); }
|
|
||||||
}
|
}
|
||||||
return (
|
return (
|
||||||
<div className="page narrow-page">
|
<div className="page narrow"><section className="card">
|
||||||
<section className="card narrow">
|
<h1>Didactopus object versioning</h1>
|
||||||
<h1>Didactopus login</h1>
|
<label>Username<input value={username} onChange={(e)=>setUsername(e.target.value)} /></label>
|
||||||
<label>Username<input value={username} onChange={(e) => setUsername(e.target.value)} /></label>
|
<label>Password<input type="password" value={password} onChange={(e)=>setPassword(e.target.value)} /></label>
|
||||||
<label>Password<input type="password" value={password} onChange={(e) => setPassword(e.target.value)} /></label>
|
<button className="primary" onClick={doLogin}>Login</button>
|
||||||
<button className="primary" onClick={doLogin}>Login</button>
|
{error ? <div className="error">{error}</div> : null}
|
||||||
{error ? <div className="error">{error}</div> : null}
|
</section></div>
|
||||||
</section>
|
|
||||||
</div>
|
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
export default function App() {
|
export default function App() {
|
||||||
const [auth, setAuth] = useState(loadAuth());
|
const [auth, setAuth] = useState(null);
|
||||||
const [packs, setPacks] = useState([]);
|
const [patches, setPatches] = useState([]);
|
||||||
const [learnerId] = useState("wesley-learner");
|
const [drafts, setDrafts] = useState([]);
|
||||||
const [packId, setPackId] = useState("");
|
const [skills, setSkills] = useState([]);
|
||||||
const [jobs, setJobs] = useState([]);
|
const [versions, setVersions] = useState([]);
|
||||||
const [artifacts, setArtifacts] = useState([]);
|
const [exports, setExports] = useState({});
|
||||||
const [knowledge, setKnowledge] = useState(null);
|
|
||||||
const [format, setFormat] = useState("gif");
|
|
||||||
const [fps, setFps] = useState(2);
|
|
||||||
const [message, setMessage] = useState("");
|
const [message, setMessage] = useState("");
|
||||||
|
|
||||||
async function refreshAuthToken() {
|
async function reload(token = auth?.access_token) {
|
||||||
if (!auth?.refresh_token) return null;
|
if (!token) return;
|
||||||
try {
|
const [p, d, s] = await Promise.all([listPackPatches(token), listCurriculumDrafts(token), listSkillBundles(token)]);
|
||||||
const result = await refresh(auth.refresh_token);
|
setPatches(p); setDrafts(d); setSkills(s);
|
||||||
saveAuth(result);
|
|
||||||
setAuth(result);
|
|
||||||
return result;
|
|
||||||
} catch {
|
|
||||||
clearAuth();
|
|
||||||
setAuth(null);
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
async function guarded(fn) {
|
useEffect(() => { if (auth?.access_token) reload(auth.access_token); }, [auth]);
|
||||||
try { return await fn(auth.access_token); }
|
|
||||||
catch {
|
async function seedAll() {
|
||||||
const next = await refreshAuthToken();
|
const candidate = await createCandidate(auth.access_token, {
|
||||||
if (!next) throw new Error("auth failed");
|
source_type: "learner_export",
|
||||||
return await fn(next.access_token);
|
learner_id: "wesley-learner",
|
||||||
}
|
pack_id: "biology-pack",
|
||||||
|
candidate_kind: "hidden_prerequisite",
|
||||||
|
title: "Probability intuition before drift",
|
||||||
|
summary: "Learner evidence suggests drift is easier after random-process intuition.",
|
||||||
|
structured_payload: {
|
||||||
|
affected_concept: "drift",
|
||||||
|
suggested_prereq: "random_walk",
|
||||||
|
source_concepts: ["drift", "variation"],
|
||||||
|
prerequisites: ["variation", "random_walk"],
|
||||||
|
expected_inputs: ["text", "example"],
|
||||||
|
failure_modes: ["treating drift as directional"],
|
||||||
|
validation_checks: ["explains stochastic change"],
|
||||||
|
canonical_examples: ["coin-flip drift example"]
|
||||||
|
},
|
||||||
|
evidence_summary: "Repeated learner confusion with stochastic interpretation.",
|
||||||
|
confidence_hint: 0.8,
|
||||||
|
novelty_score: 0.7,
|
||||||
|
synthesis_score: 0.6,
|
||||||
|
triage_lane: "pack_improvement"
|
||||||
|
});
|
||||||
|
const candidateId = candidate.candidate_id;
|
||||||
|
await promoteCandidate(auth.access_token, candidateId, { promotion_target: "pack_improvement", target_object_id: "", promotion_status: "approved" });
|
||||||
|
|
||||||
|
const c2 = await createCandidate(auth.access_token, {
|
||||||
|
source_type: "learner_export",
|
||||||
|
learner_id: "wesley-learner",
|
||||||
|
pack_id: "biology-pack",
|
||||||
|
candidate_kind: "lesson_outline",
|
||||||
|
title: "Intro lesson on stochastic evolutionary change",
|
||||||
|
summary: "A lesson framing drift through random processes.",
|
||||||
|
structured_payload: { source_concepts: ["drift", "variation", "random_walk"] },
|
||||||
|
evidence_summary: "Good bridge opportunity for cross-pack synthesis.",
|
||||||
|
confidence_hint: 0.72,
|
||||||
|
novelty_score: 0.6,
|
||||||
|
synthesis_score: 0.75,
|
||||||
|
triage_lane: "curriculum_draft"
|
||||||
|
});
|
||||||
|
await promoteCandidate(auth.access_token, c2.candidate_id, { promotion_target: "curriculum_draft", target_object_id: "", promotion_status: "approved" });
|
||||||
|
|
||||||
|
const c3 = await createCandidate(auth.access_token, {
|
||||||
|
source_type: "learner_export",
|
||||||
|
learner_id: "wesley-learner",
|
||||||
|
pack_id: "biology-pack",
|
||||||
|
candidate_kind: "skill_bundle_candidate",
|
||||||
|
title: "Explain stochastic biological change",
|
||||||
|
summary: "Skill for recognizing and explaining stochastic population change.",
|
||||||
|
structured_payload: {
|
||||||
|
prerequisites: ["variation", "random_walk"],
|
||||||
|
expected_inputs: ["question", "scenario"],
|
||||||
|
failure_modes: ["teleological explanation"],
|
||||||
|
validation_checks: ["distinguishes drift from selection"],
|
||||||
|
canonical_examples: ["small population allele frequency drift"]
|
||||||
|
},
|
||||||
|
evidence_summary: "Could be reusable as an agent skill.",
|
||||||
|
confidence_hint: 0.74,
|
||||||
|
novelty_score: 0.58,
|
||||||
|
synthesis_score: 0.71,
|
||||||
|
triage_lane: "reusable_skill_bundle"
|
||||||
|
});
|
||||||
|
await promoteCandidate(auth.access_token, c3.candidate_id, { promotion_target: "reusable_skill_bundle", target_object_id: "", promotion_status: "approved" });
|
||||||
|
|
||||||
|
await reload();
|
||||||
|
setMessage("Seeded patch, curriculum draft, and skill bundle.");
|
||||||
}
|
}
|
||||||
|
|
||||||
async function reloadLists() {
|
async function inspectVersions(kind, id) {
|
||||||
setJobs(await guarded((token) => listRenderJobs(token, learnerId)));
|
const data = await listVersions(auth.access_token, kind, id);
|
||||||
setArtifacts(await guarded((token) => listArtifacts(token, learnerId)));
|
setVersions(data);
|
||||||
}
|
}
|
||||||
|
|
||||||
useEffect(() => {
|
async function revisePatch(id) {
|
||||||
if (!auth) return;
|
await editPatch(auth.access_token, id, {
|
||||||
async function load() {
|
payload: { reviewer_notes: "Elevated priority after synthesis review.", status: "approved" },
|
||||||
const p = await guarded((token) => fetchPacks(token));
|
note: "Reviewer note update"
|
||||||
setPacks(p);
|
});
|
||||||
setPackId(p[0]?.id || "");
|
await reload();
|
||||||
await reloadLists();
|
|
||||||
}
|
|
||||||
load();
|
|
||||||
}, [auth]);
|
|
||||||
|
|
||||||
async function generateDemo() {
|
|
||||||
let state = await guarded((token) => fetchLearnerState(token, learnerId));
|
|
||||||
const base = Date.now();
|
|
||||||
const events = [
|
|
||||||
["intro", 0.30, "exercise", 0],
|
|
||||||
["intro", 0.78, "review", 1000],
|
|
||||||
["second", 0.42, "exercise", 2000],
|
|
||||||
["second", 0.72, "review", 3000],
|
|
||||||
["third", 0.25, "exercise", 4000],
|
|
||||||
["branch", 0.60, "exercise", 5000],
|
|
||||||
];
|
|
||||||
const latest = {};
|
|
||||||
for (const [cid, score, kind, offset] of events) {
|
|
||||||
const ts = new Date(base + offset).toISOString();
|
|
||||||
state.history.push({ concept_id: cid, dimension: "mastery", score, confidence_hint: 0.6, timestamp: ts, kind, source_id: `demo-${cid}-${offset}` });
|
|
||||||
latest[cid] = { concept_id: cid, dimension: "mastery", score, confidence: Math.min(0.9, score), evidence_count: (latest[cid]?.evidence_count || 0) + 1, last_updated: ts };
|
|
||||||
}
|
|
||||||
state.records = Object.values(latest);
|
|
||||||
await guarded((token) => putLearnerState(token, learnerId, state));
|
|
||||||
setMessage("Demo state generated.");
|
|
||||||
}
|
}
|
||||||
|
|
||||||
async function createJob() {
|
async function applySelectedPatch(id) {
|
||||||
const result = await guarded((token) => createRenderJob(token, learnerId, packId, { learner_id: learnerId, pack_id: packId, format, fps, theme: "default", retention_class: "standard", retention_days: 30 }));
|
await applyPatch(auth.access_token, id, { note: "Merged into pack JSON" });
|
||||||
setMessage(`Render job ${result.job_id} queued.`);
|
await reload();
|
||||||
setTimeout(() => reloadLists(), 500);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
async function changeRetention(artifactId) {
|
async function reviseDraft(id) {
|
||||||
await guarded((token) => updateRetention(token, artifactId, { retention_class: "archive", retention_days: 365 }));
|
await editCurriculum(auth.access_token, id, {
|
||||||
await reloadLists();
|
payload: { editorial_notes: "Add random-walk bridge example.", status: "editorial_review" },
|
||||||
setMessage(`Artifact ${artifactId} retention updated.`);
|
note: "Editorial refinement"
|
||||||
|
});
|
||||||
|
await reload();
|
||||||
}
|
}
|
||||||
|
|
||||||
async function runKnowledgeExport() {
|
async function reviseSkill(id) {
|
||||||
const result = await guarded((token) => exportKnowledge(token, learnerId, packId, { learner_id: learnerId, pack_id: packId, export_kind: "knowledge_snapshot" }));
|
await editSkill(auth.access_token, id, {
|
||||||
setKnowledge(result);
|
payload: { status: "validation", validation_checks: ["distinguishes drift from selection", "uses stochastic terminology correctly"] },
|
||||||
setMessage("Knowledge export generated.");
|
note: "Validation criteria strengthened"
|
||||||
|
});
|
||||||
|
await reload();
|
||||||
|
}
|
||||||
|
|
||||||
|
async function doExportDraft(id) {
|
||||||
|
const out = await exportCurriculum(auth.access_token, id);
|
||||||
|
setExports(prev => ({ ...prev, ["draft:"+id]: out }));
|
||||||
|
}
|
||||||
|
|
||||||
|
async function doExportSkill(id) {
|
||||||
|
const out = await exportSkill(auth.access_token, id);
|
||||||
|
setExports(prev => ({ ...prev, ["skill:"+id]: out }));
|
||||||
}
|
}
|
||||||
|
|
||||||
if (!auth) return <LoginView onAuth={setAuth} />;
|
if (!auth) return <LoginView onAuth={setAuth} />;
|
||||||
|
|
@ -123,46 +156,65 @@ export default function App() {
|
||||||
<div className="page">
|
<div className="page">
|
||||||
<header className="hero">
|
<header className="hero">
|
||||||
<div>
|
<div>
|
||||||
<h1>Didactopus artifact lifecycle + knowledge export</h1>
|
<h1>Object editing, versioning, apply, and export</h1>
|
||||||
<p>Manage artifact retention and turn learner state into reusable knowledge outputs.</p>
|
<p>Promoted objects can now be revised, versioned, merged into packs, and exported in reusable formats.</p>
|
||||||
<div className="muted">{message}</div>
|
<div className="muted">{message}</div>
|
||||||
</div>
|
</div>
|
||||||
<div className="controls">
|
<div className="toolbar">
|
||||||
<label>Pack
|
<button onClick={seedAll}>Seed all objects</button>
|
||||||
<select value={packId} onChange={(e) => setPackId(e.target.value)}>
|
<button onClick={() => reload()}>Refresh</button>
|
||||||
{packs.map((p) => <option key={p.id} value={p.id}>{p.title}</option>)}
|
|
||||||
</select>
|
|
||||||
</label>
|
|
||||||
<label>Format
|
|
||||||
<select value={format} onChange={(e) => setFormat(e.target.value)}>
|
|
||||||
<option value="gif">GIF</option>
|
|
||||||
<option value="mp4">MP4</option>
|
|
||||||
</select>
|
|
||||||
</label>
|
|
||||||
<label>FPS
|
|
||||||
<input type="number" value={fps} onChange={(e) => setFps(Number(e.target.value || 2))} />
|
|
||||||
</label>
|
|
||||||
<button onClick={generateDemo}>Generate demo state</button>
|
|
||||||
<button onClick={createJob}>Create render job</button>
|
|
||||||
<button onClick={runKnowledgeExport}>Export knowledge</button>
|
|
||||||
<button onClick={reloadLists}>Refresh lists</button>
|
|
||||||
<button onClick={() => { clearAuth(); setAuth(null); }}>Logout</button>
|
|
||||||
</div>
|
</div>
|
||||||
</header>
|
</header>
|
||||||
|
|
||||||
<main className="layout threecol">
|
<main className="grid3">
|
||||||
<section className="card">
|
<section>
|
||||||
<h2>Render jobs</h2>
|
<h2>Pack patches</h2>
|
||||||
<pre className="prebox">{JSON.stringify(jobs, null, 2)}</pre>
|
<div className="stack">
|
||||||
|
{patches.map(p => (
|
||||||
|
<div key={p.patch_id} className="card small">
|
||||||
|
<h3>{p.title}</h3>
|
||||||
|
<div className="muted">v{p.current_version} · {p.status}</div>
|
||||||
|
<pre>{JSON.stringify(p.proposed_change, null, 2)}</pre>
|
||||||
|
<button onClick={() => revisePatch(p.patch_id)}>Revise</button>
|
||||||
|
<button onClick={() => applySelectedPatch(p.patch_id)}>Apply to pack</button>
|
||||||
|
<button onClick={() => inspectVersions("pack_patch", p.patch_id)}>Versions</button>
|
||||||
|
</div>
|
||||||
|
))}
|
||||||
|
</div>
|
||||||
</section>
|
</section>
|
||||||
<section className="card">
|
<section>
|
||||||
<h2>Artifacts</h2>
|
<h2>Curriculum drafts</h2>
|
||||||
<pre className="prebox">{JSON.stringify(artifacts, null, 2)}</pre>
|
<div className="stack">
|
||||||
{artifacts[0] ? <button onClick={() => changeRetention(artifacts[0].artifact_id)}>Archive newest artifact</button> : null}
|
{drafts.map(d => (
|
||||||
|
<div key={d.draft_id} className="card small">
|
||||||
|
<h3>{d.topic_focus}</h3>
|
||||||
|
<div className="muted">v{d.current_version} · {d.status}</div>
|
||||||
|
<pre>{d.content_markdown}</pre>
|
||||||
|
<button onClick={() => reviseDraft(d.draft_id)}>Revise</button>
|
||||||
|
<button onClick={() => inspectVersions("curriculum_draft", d.draft_id)}>Versions</button>
|
||||||
|
<button onClick={() => doExportDraft(d.draft_id)}>Export</button>
|
||||||
|
{exports["draft:"+d.draft_id] ? <pre>{JSON.stringify(exports["draft:"+d.draft_id], null, 2)}</pre> : null}
|
||||||
|
</div>
|
||||||
|
))}
|
||||||
|
<h2>Skill bundles</h2>
|
||||||
|
{skills.map(s => (
|
||||||
|
<div key={s.skill_bundle_id} className="card small">
|
||||||
|
<h3>{s.skill_name}</h3>
|
||||||
|
<div className="muted">v{s.current_version} · {s.status}</div>
|
||||||
|
<pre>{JSON.stringify(s, null, 2)}</pre>
|
||||||
|
<button onClick={() => reviseSkill(s.skill_bundle_id)}>Revise</button>
|
||||||
|
<button onClick={() => inspectVersions("skill_bundle", s.skill_bundle_id)}>Versions</button>
|
||||||
|
<button onClick={() => doExportSkill(s.skill_bundle_id)}>Export</button>
|
||||||
|
{exports["skill:"+s.skill_bundle_id] ? <pre>{JSON.stringify(exports["skill:"+s.skill_bundle_id], null, 2)}</pre> : null}
|
||||||
|
</div>
|
||||||
|
))}
|
||||||
|
</div>
|
||||||
</section>
|
</section>
|
||||||
<section className="card">
|
<section>
|
||||||
<h2>Knowledge export</h2>
|
<h2>Version history</h2>
|
||||||
<pre className="prebox">{JSON.stringify(knowledge, null, 2)}</pre>
|
<div className="card small">
|
||||||
|
<pre>{JSON.stringify(versions, null, 2)}</pre>
|
||||||
|
</div>
|
||||||
</section>
|
</section>
|
||||||
</main>
|
</main>
|
||||||
</div>
|
</div>
|
||||||
|
|
|
||||||
|
|
@ -1,26 +1,71 @@
|
||||||
const API = "http://127.0.0.1:8011/api";
|
const API = "http://127.0.0.1:8011/api";
|
||||||
|
|
||||||
function authHeaders(token, json=true) {
|
function authHeaders(token, json=true) {
|
||||||
const h = { Authorization: `Bearer ${token}` };
|
const h = { Authorization: `Bearer ${token}` };
|
||||||
if (json) h["Content-Type"] = "application/json";
|
if (json) h["Content-Type"] = "application/json";
|
||||||
return h;
|
return h;
|
||||||
}
|
}
|
||||||
|
|
||||||
export async function login(username, password) {
|
export async function login(username, password) {
|
||||||
const res = await fetch(`${API}/login`, { method: "POST", headers: { "Content-Type": "application/json" }, body: JSON.stringify({ username, password }) });
|
const res = await fetch(`${API}/login`, { method: "POST", headers: {"Content-Type": "application/json"}, body: JSON.stringify({ username, password })});
|
||||||
if (!res.ok) throw new Error("login failed");
|
if (!res.ok) throw new Error("login failed");
|
||||||
return await res.json();
|
return await res.json();
|
||||||
}
|
}
|
||||||
export async function refresh(refreshToken) {
|
export async function createCandidate(token, payload) {
|
||||||
const res = await fetch(`${API}/refresh`, { method: "POST", headers: { "Content-Type": "application/json" }, body: JSON.stringify({ refresh_token: refreshToken }) });
|
const res = await fetch(`${API}/knowledge-candidates`, { method: "POST", headers: authHeaders(token), body: JSON.stringify(payload) });
|
||||||
if (!res.ok) throw new Error("refresh failed");
|
if (!res.ok) throw new Error("createCandidate failed");
|
||||||
|
return await res.json();
|
||||||
|
}
|
||||||
|
export async function promoteCandidate(token, candidateId, payload) {
|
||||||
|
const res = await fetch(`${API}/knowledge-candidates/${candidateId}/promote`, { method: "POST", headers: authHeaders(token), body: JSON.stringify(payload) });
|
||||||
|
if (!res.ok) throw new Error("promoteCandidate failed");
|
||||||
|
return await res.json();
|
||||||
|
}
|
||||||
|
export async function listPackPatches(token) {
|
||||||
|
const res = await fetch(`${API}/pack-patches`, { headers: authHeaders(token, false) });
|
||||||
|
if (!res.ok) throw new Error("listPackPatches failed");
|
||||||
|
return await res.json();
|
||||||
|
}
|
||||||
|
export async function listCurriculumDrafts(token) {
|
||||||
|
const res = await fetch(`${API}/curriculum-drafts`, { headers: authHeaders(token, false) });
|
||||||
|
if (!res.ok) throw new Error("listCurriculumDrafts failed");
|
||||||
|
return await res.json();
|
||||||
|
}
|
||||||
|
export async function listSkillBundles(token) {
|
||||||
|
const res = await fetch(`${API}/skill-bundles`, { headers: authHeaders(token, false) });
|
||||||
|
if (!res.ok) throw new Error("listSkillBundles failed");
|
||||||
|
return await res.json();
|
||||||
|
}
|
||||||
|
export async function editPatch(token, patchId, payload) {
|
||||||
|
const res = await fetch(`${API}/pack-patches/${patchId}/edit`, { method: "POST", headers: authHeaders(token), body: JSON.stringify(payload) });
|
||||||
|
if (!res.ok) throw new Error("editPatch failed");
|
||||||
|
return await res.json();
|
||||||
|
}
|
||||||
|
export async function applyPatch(token, patchId, payload) {
|
||||||
|
const res = await fetch(`${API}/pack-patches/${patchId}/apply`, { method: "POST", headers: authHeaders(token), body: JSON.stringify(payload) });
|
||||||
|
if (!res.ok) throw new Error("applyPatch failed");
|
||||||
|
return await res.json();
|
||||||
|
}
|
||||||
|
export async function editCurriculum(token, draftId, payload) {
|
||||||
|
const res = await fetch(`${API}/curriculum-drafts/${draftId}/edit`, { method: "POST", headers: authHeaders(token), body: JSON.stringify(payload) });
|
||||||
|
if (!res.ok) throw new Error("editCurriculum failed");
|
||||||
|
return await res.json();
|
||||||
|
}
|
||||||
|
export async function editSkill(token, bundleId, payload) {
|
||||||
|
const res = await fetch(`${API}/skill-bundles/${bundleId}/edit`, { method: "POST", headers: authHeaders(token), body: JSON.stringify(payload) });
|
||||||
|
if (!res.ok) throw new Error("editSkill failed");
|
||||||
|
return await res.json();
|
||||||
|
}
|
||||||
|
export async function listVersions(token, objectKind, objectId) {
|
||||||
|
const res = await fetch(`${API}/object-versions/${objectKind}/${objectId}`, { headers: authHeaders(token, false) });
|
||||||
|
if (!res.ok) throw new Error("listVersions failed");
|
||||||
|
return await res.json();
|
||||||
|
}
|
||||||
|
export async function exportCurriculum(token, draftId) {
|
||||||
|
const res = await fetch(`${API}/curriculum-drafts/${draftId}/export`, { headers: authHeaders(token, false) });
|
||||||
|
if (!res.ok) throw new Error("exportCurriculum failed");
|
||||||
|
return await res.json();
|
||||||
|
}
|
||||||
|
export async function exportSkill(token, bundleId) {
|
||||||
|
const res = await fetch(`${API}/skill-bundles/${bundleId}/export`, { headers: authHeaders(token, false) });
|
||||||
|
if (!res.ok) throw new Error("exportSkill failed");
|
||||||
return await res.json();
|
return await res.json();
|
||||||
}
|
}
|
||||||
export async function fetchPacks(token) { const res = await fetch(`${API}/packs`, { headers: authHeaders(token, false) }); if (!res.ok) throw new Error("fetchPacks failed"); return await res.json(); }
|
|
||||||
export async function fetchLearnerState(token, learnerId) { const res = await fetch(`${API}/learners/${learnerId}/state`, { headers: authHeaders(token, false) }); if (!res.ok) throw new Error("fetchLearnerState failed"); return await res.json(); }
|
|
||||||
export async function putLearnerState(token, learnerId, state) { const res = await fetch(`${API}/learners/${learnerId}/state`, { method: "PUT", headers: authHeaders(token), body: JSON.stringify(state) }); if (!res.ok) throw new Error("putLearnerState failed"); return await res.json(); }
|
|
||||||
export async function createRenderJob(token, learnerId, packId, payload) { const res = await fetch(`${API}/learners/${learnerId}/render-jobs/${packId}`, { method: "POST", headers: authHeaders(token), body: JSON.stringify(payload) }); if (!res.ok) throw new Error("createRenderJob failed"); return await res.json(); }
|
|
||||||
export async function listRenderJobs(token, learnerId) { const res = await fetch(`${API}/render-jobs?learner_id=${encodeURIComponent(learnerId)}`, { headers: authHeaders(token, false) }); if (!res.ok) throw new Error("listRenderJobs failed"); return await res.json(); }
|
|
||||||
export async function listArtifacts(token, learnerId) { const res = await fetch(`${API}/artifacts?learner_id=${encodeURIComponent(learnerId)}`, { headers: authHeaders(token, false) }); if (!res.ok) throw new Error("listArtifacts failed"); return await res.json(); }
|
|
||||||
export async function updateRetention(token, artifactId, payload) { const res = await fetch(`${API}/artifacts/${artifactId}/retention`, { method: "POST", headers: authHeaders(token), body: JSON.stringify(payload) }); if (!res.ok) throw new Error("updateRetention failed"); return await res.json(); }
|
|
||||||
export async function exportKnowledge(token, learnerId, packId, payload) { const res = await fetch(`${API}/learners/${learnerId}/knowledge-export/${packId}`, { method: "POST", headers: authHeaders(token), body: JSON.stringify(payload) }); if (!res.ok) throw new Error("exportKnowledge failed"); return await res.json(); }
|
|
||||||
|
|
|
||||||
|
|
@ -1,23 +1,24 @@
|
||||||
:root {
|
:root {
|
||||||
--bg:#f6f8fb; --card:#ffffff; --text:#1f2430; --muted:#60697a; --border:#dbe1ea; --accent:#2d6cdf;
|
--bg:#f6f8fb; --card:#ffffff; --text:#1f2430; --muted:#5f6b7a; --border:#dbe1ea; --accent:#2d6cdf;
|
||||||
}
|
}
|
||||||
* { box-sizing:border-box; }
|
* { box-sizing:border-box; }
|
||||||
body { margin:0; font-family:Arial, Helvetica, sans-serif; background:var(--bg); color:var(--text); }
|
body { margin:0; background:var(--bg); color:var(--text); font-family:Arial, Helvetica, sans-serif; }
|
||||||
.page { max-width:1600px; margin:0 auto; padding:24px; }
|
.page { max-width:1700px; margin:0 auto; padding:24px; }
|
||||||
.narrow-page { max-width:520px; }
|
.narrow { max-width:520px; }
|
||||||
.hero { background:var(--card); border:1px solid var(--border); border-radius:22px; padding:24px; display:flex; justify-content:space-between; gap:16px; align-items:flex-start; }
|
.hero, .card { background:var(--card); border:1px solid var(--border); border-radius:18px; padding:18px; }
|
||||||
.controls { display:flex; gap:10px; align-items:flex-end; flex-wrap:wrap; }
|
.hero { display:flex; justify-content:space-between; gap:16px; margin-bottom:18px; }
|
||||||
label { display:block; font-weight:600; }
|
.grid3 { display:grid; grid-template-columns:1fr 1.2fr 1fr; gap:18px; }
|
||||||
input, select { width:100%; margin-top:6px; border:1px solid var(--border); border-radius:10px; padding:10px; font:inherit; background:white; }
|
.stack { display:grid; gap:14px; }
|
||||||
button { border:1px solid var(--border); background:white; border-radius:12px; padding:10px 14px; cursor:pointer; }
|
.card.small h3 { margin-top:0; }
|
||||||
.card { background:var(--card); border:1px solid var(--border); border-radius:18px; padding:18px; }
|
label { display:block; font-weight:600; margin-bottom:10px; }
|
||||||
.narrow { margin-top:60px; }
|
input { width:100%; margin-top:6px; border:1px solid var(--border); border-radius:10px; padding:10px; font:inherit; background:white; }
|
||||||
.layout { display:grid; gap:16px; }
|
button { border:1px solid var(--border); background:white; border-radius:12px; padding:10px 12px; cursor:pointer; margin-right:8px; margin-top:8px; }
|
||||||
.threecol { grid-template-columns:1fr 1fr 1fr; }
|
button.primary { background:var(--accent); color:white; border-color:var(--accent); }
|
||||||
.prebox { background:#f7f8fa; border:1px solid var(--border); border-radius:12px; padding:12px; overflow:auto; max-height:460px; }
|
.toolbar { display:flex; gap:8px; align-items:flex-start; flex-wrap:wrap; }
|
||||||
.muted { color:var(--muted); }
|
.muted { color:var(--muted); }
|
||||||
|
pre { white-space:pre-wrap; word-break:break-word; font-size:12px; margin:0; }
|
||||||
.error { color:#b42318; margin-top:10px; }
|
.error { color:#b42318; margin-top:10px; }
|
||||||
@media (max-width:1200px) {
|
@media (max-width: 1350px) {
|
||||||
|
.grid3 { grid-template-columns:1fr; }
|
||||||
.hero { flex-direction:column; }
|
.hero { flex-direction:column; }
|
||||||
.threecol { grid-template-columns:1fr; }
|
|
||||||
}
|
}
|
||||||
|
|
|
||||||
Loading…
Reference in New Issue