diff --git a/.gitignore b/.gitignore index b91898d..e53afba 100644 --- a/.gitignore +++ b/.gitignore @@ -6,6 +6,7 @@ dist/ # Rust / Tauri src-tauri/target/ +src-tauri/gen/ # Local dev config (machine-specific paths) src-tauri/.cargo/config.toml diff --git a/docs/superpowers/plans/2026-04-13-orchai-phase3-agent-pipeline.md b/docs/superpowers/plans/2026-04-13-orchai-phase3-agent-pipeline.md new file mode 100644 index 0000000..8e74154 --- /dev/null +++ b/docs/superpowers/plans/2026-04-13-orchai-phase3-agent-pipeline.md @@ -0,0 +1,2335 @@ +# Orchai Phase 3: Agent Pipeline Implementation Plan + +> **For agentic workers:** REQUIRED SUB-SKILL: Use superpowers:subagent-driven-development (recommended) or superpowers:executing-plans to implement this plan task-by-task. Steps use checkbox (`- [ ]`) syntax for tracking. + +**Goal:** Build the agent orchestrator that consumes pending tickets via a sequential FIFO queue, runs a two-step CLI pipeline (analyst then developer), manages git worktrees for code fixes, and provides a frontend for viewing results (markdown reports + diff). + +**Architecture:** A background tokio task (orchestrator) polls the DB every 10 seconds for Pending tickets. For each ticket it runs an analyst CLI command (e.g. `claude --print`) with a structured prompt via stdin, stores the markdown report, then optionally creates a git worktree and runs a developer CLI command. Tauri events stream progress to the frontend. A Worktree Manager service handles git worktree lifecycle (create, diff, cherry-pick, delete). The frontend adds ticket list/detail pages with markdown rendering and a diff viewer. + +**Tech Stack:** tokio (process spawning, async I/O), git CLI (worktree ops), react-markdown + remark-gfm (report rendering) + +--- + +## Phasing Context + +This is Plan 3 of 4: +- **Plan 1 (done):** Foundation -- Tauri scaffold, SQLite, Project Manager +- **Plan 2 (done):** Tuleap Integration -- credentials, API client, poller, filter engine, tracker config +- **Plan 3 (this):** Agent Pipeline -- orchestrator, worktree manager, ticket processing, results UI +- **Plan 4:** Notifications + Polish -- notifier, system notifications, dashboard + +--- + +## File Structure + +``` +src-tauri/ + Cargo.toml # modify: add tokio process+io-util features + src/ + lib.rs # modify: start orchestrator, register new commands + models/ + mod.rs # modify: add worktree module + ticket.rs # modify: add update methods, list_pending, status constants + worktree.rs # create: Worktree struct + CRUD + services/ + mod.rs # modify: add orchestrator, worktree_manager + worktree_manager.rs # create: git worktree operations + orchestrator.rs # create: queue consumer, CLI runner, prompt builder + commands/ + mod.rs # modify: add orchestrator, worktree + orchestrator.rs # create: retry_ticket, cancel_ticket, get_ticket_result + worktree.rs # create: list_worktrees, get_diff, apply_fix, delete, list_branches + +src/ + lib/ + types.ts # modify: add Worktree type, TicketResult type + api.ts # modify: add orchestrator + worktree API wrappers + components/ + tickets/ + TicketList.tsx # create: filterable ticket table + TicketDetail.tsx # create: info + markdown reports + diff + actions + projects/ + ProjectDashboard.tsx # modify: make ticket items clickable links + App.tsx # modify: add /projects/:projectId/tickets and /tickets/:ticketId routes +``` + +--- + +### Task 1: Add tokio features + extend ProcessedTicket model + +**Files:** +- Modify: `src-tauri/Cargo.toml` +- Modify: `src-tauri/src/models/ticket.rs` + +- [ ] **Step 1: Add tokio process and io-util features to Cargo.toml** + +In `src-tauri/Cargo.toml`, change the tokio line: + +```toml +tokio = { version = "1", features = ["time", "sync", "macros", "process", "io-util"] } +``` + +- [ ] **Step 2: Write failing tests for ProcessedTicket update methods** + +Append these tests to the existing `#[cfg(test)] mod tests` block in `src-tauri/src/models/ticket.rs`: + +```rust + #[test] + fn test_update_status() { + let (conn, tracker_id) = setup(); + let ticket = ProcessedTicket::insert_if_new(&conn, &tracker_id, 1, "T1", "{}") + .unwrap() + .unwrap(); + + ProcessedTicket::update_status(&conn, &ticket.id, "Analyzing").unwrap(); + let updated = ProcessedTicket::get_by_id(&conn, &ticket.id).unwrap(); + assert_eq!(updated.status, "Analyzing"); + } + + #[test] + fn test_set_analyst_report() { + let (conn, tracker_id) = setup(); + let ticket = ProcessedTicket::insert_if_new(&conn, &tracker_id, 1, "T1", "{}") + .unwrap() + .unwrap(); + + ProcessedTicket::set_analyst_report(&conn, &ticket.id, "## Report\nAll good.").unwrap(); + let updated = ProcessedTicket::get_by_id(&conn, &ticket.id).unwrap(); + assert_eq!(updated.analyst_report.unwrap(), "## Report\nAll good."); + } + + #[test] + fn test_set_developer_report() { + let (conn, tracker_id) = setup(); + let ticket = ProcessedTicket::insert_if_new(&conn, &tracker_id, 1, "T1", "{}") + .unwrap() + .unwrap(); + + ProcessedTicket::set_developer_report(&conn, &ticket.id, "Fixed in main.rs").unwrap(); + let updated = ProcessedTicket::get_by_id(&conn, &ticket.id).unwrap(); + assert_eq!(updated.developer_report.unwrap(), "Fixed in main.rs"); + assert!(updated.processed_at.is_some()); + } + + #[test] + fn test_set_worktree_info() { + let (conn, tracker_id) = setup(); + let ticket = ProcessedTicket::insert_if_new(&conn, &tracker_id, 1, "T1", "{}") + .unwrap() + .unwrap(); + + ProcessedTicket::set_worktree_info(&conn, &ticket.id, "/tmp/wt", "orchai/1").unwrap(); + let updated = ProcessedTicket::get_by_id(&conn, &ticket.id).unwrap(); + assert_eq!(updated.worktree_path.unwrap(), "/tmp/wt"); + assert_eq!(updated.branch_name.unwrap(), "orchai/1"); + } + + #[test] + fn test_list_pending() { + let (conn, tracker_id) = setup(); + ProcessedTicket::insert_if_new(&conn, &tracker_id, 1, "T1", "{}").unwrap(); + ProcessedTicket::insert_if_new(&conn, &tracker_id, 2, "T2", "{}").unwrap(); + + let pending = ProcessedTicket::list_pending(&conn).unwrap(); + assert_eq!(pending.len(), 2); + // FIFO order: T1 first (oldest detected_at) + assert_eq!(pending[0].artifact_id, 1); + assert_eq!(pending[1].artifact_id, 2); + + // Mark one as Analyzing, it should no longer be in pending + ProcessedTicket::update_status(&conn, &pending[0].id, "Analyzing").unwrap(); + let pending2 = ProcessedTicket::list_pending(&conn).unwrap(); + assert_eq!(pending2.len(), 1); + assert_eq!(pending2[0].artifact_id, 2); + } + + #[test] + fn test_set_error() { + let (conn, tracker_id) = setup(); + let ticket = ProcessedTicket::insert_if_new(&conn, &tracker_id, 1, "T1", "{}") + .unwrap() + .unwrap(); + + ProcessedTicket::set_error(&conn, &ticket.id, "CLI timeout after 600s").unwrap(); + let updated = ProcessedTicket::get_by_id(&conn, &ticket.id).unwrap(); + assert_eq!(updated.status, "Error"); + assert_eq!(updated.analyst_report.unwrap(), "CLI timeout after 600s"); + } +``` + +- [ ] **Step 3: Run tests to verify they fail** + +Run: `cd /home/leclere/Projets/orchai/src-tauri && cargo test --lib models::ticket::tests -- --nocapture 2>&1 | tail -20` +Expected: compilation errors (methods don't exist yet) + +- [ ] **Step 4: Implement the update methods** + +Add these methods to `impl ProcessedTicket` in `src-tauri/src/models/ticket.rs`, before the closing `}` of the impl block: + +```rust + pub fn update_status(conn: &Connection, id: &str, status: &str) -> Result<()> { + conn.execute( + "UPDATE processed_tickets SET status = ?1 WHERE id = ?2", + params![status, id], + )?; + Ok(()) + } + + pub fn set_analyst_report(conn: &Connection, id: &str, report: &str) -> Result<()> { + conn.execute( + "UPDATE processed_tickets SET analyst_report = ?1 WHERE id = ?2", + params![report, id], + )?; + Ok(()) + } + + pub fn set_developer_report(conn: &Connection, id: &str, report: &str) -> Result<()> { + conn.execute( + "UPDATE processed_tickets SET developer_report = ?1, processed_at = datetime('now') WHERE id = ?2", + params![report, id], + )?; + Ok(()) + } + + pub fn set_worktree_info( + conn: &Connection, + id: &str, + worktree_path: &str, + branch_name: &str, + ) -> Result<()> { + conn.execute( + "UPDATE processed_tickets SET worktree_path = ?1, branch_name = ?2 WHERE id = ?3", + params![worktree_path, branch_name, id], + )?; + Ok(()) + } + + pub fn list_pending(conn: &Connection) -> Result> { + let sql = format!("{} WHERE status = 'Pending' ORDER BY detected_at ASC", SELECT_ALL_COLS); + let mut stmt = conn.prepare(&sql)?; + let rows = stmt.query_map([], from_row)?; + rows.collect() + } + + pub fn set_error(conn: &Connection, id: &str, error_message: &str) -> Result<()> { + conn.execute( + "UPDATE processed_tickets SET status = 'Error', analyst_report = COALESCE(analyst_report, '') || ?1, processed_at = datetime('now') WHERE id = ?2", + params![error_message, id], + )?; + Ok(()) + } +``` + +Also remove `#[allow(dead_code)]` from `list_by_tracker` and `get_by_id` since they will be used now. + +- [ ] **Step 5: Run tests to verify they pass** + +Run: `cd /home/leclere/Projets/orchai/src-tauri && cargo test --lib models::ticket::tests -- --nocapture` +Expected: all ticket tests pass + +- [ ] **Step 6: Commit** + +```bash +cd /home/leclere/Projets/orchai +git add src-tauri/Cargo.toml src-tauri/src/models/ticket.rs +git commit -m "feat: extend ProcessedTicket with status updates, list_pending, and set_error" +``` + +--- + +### Task 2: Create Worktree model + +**Files:** +- Create: `src-tauri/src/models/worktree.rs` +- Modify: `src-tauri/src/models/mod.rs` + +- [ ] **Step 1: Add worktree module to mod.rs** + +In `src-tauri/src/models/mod.rs`, add: + +```rust +pub mod worktree; +``` + +- [ ] **Step 2: Create worktree.rs with struct, CRUD, and tests** + +Create `src-tauri/src/models/worktree.rs`: + +```rust +use rusqlite::{params, Connection, Result}; +use serde::{Deserialize, Serialize}; +use uuid::Uuid; + +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct Worktree { + pub id: String, + pub ticket_id: String, + pub path: String, + pub branch_name: String, + pub status: String, + pub created_at: String, + pub merged_at: Option, + pub merged_into: Option, +} + +fn from_row(row: &rusqlite::Row) -> rusqlite::Result { + Ok(Worktree { + id: row.get(0)?, + ticket_id: row.get(1)?, + path: row.get(2)?, + branch_name: row.get(3)?, + status: row.get(4)?, + created_at: row.get(5)?, + merged_at: row.get(6)?, + merged_into: row.get(7)?, + }) +} + +const SELECT_ALL_COLS: &str = "SELECT id, ticket_id, path, branch_name, status, \ + created_at, merged_at, merged_into FROM worktrees"; + +impl Worktree { + pub fn insert( + conn: &Connection, + ticket_id: &str, + path: &str, + branch_name: &str, + ) -> Result { + let id = Uuid::new_v4().to_string(); + let now = chrono::Utc::now().to_rfc3339(); + + conn.execute( + "INSERT INTO worktrees (id, ticket_id, path, branch_name, status, created_at) \ + VALUES (?1, ?2, ?3, ?4, 'Active', ?5)", + params![id, ticket_id, path, branch_name, now], + )?; + + Ok(Worktree { + id, + ticket_id: ticket_id.to_string(), + path: path.to_string(), + branch_name: branch_name.to_string(), + status: "Active".to_string(), + created_at: now, + merged_at: None, + merged_into: None, + }) + } + + pub fn get_by_id(conn: &Connection, id: &str) -> Result { + let sql = format!("{} WHERE id = ?1", SELECT_ALL_COLS); + conn.query_row(&sql, params![id], from_row) + } + + pub fn get_by_ticket_id(conn: &Connection, ticket_id: &str) -> Result> { + let sql = format!("{} WHERE ticket_id = ?1", SELECT_ALL_COLS); + let mut stmt = conn.prepare(&sql)?; + let mut rows = stmt.query_map(params![ticket_id], from_row)?; + match rows.next() { + Some(Ok(w)) => Ok(Some(w)), + Some(Err(e)) => Err(e), + None => Ok(None), + } + } + + pub fn list_by_project(conn: &Connection, project_id: &str) -> Result> { + let sql = format!( + "SELECT w.id, w.ticket_id, w.path, w.branch_name, w.status, \ + w.created_at, w.merged_at, w.merged_into \ + FROM worktrees w \ + JOIN processed_tickets pt ON w.ticket_id = pt.id \ + JOIN watched_trackers wt ON pt.tracker_id = wt.id \ + WHERE wt.project_id = ?1 \ + ORDER BY w.created_at DESC" + ); + let mut stmt = conn.prepare(&sql)?; + let rows = stmt.query_map(params![project_id], from_row)?; + rows.collect() + } + + pub fn set_merged( + conn: &Connection, + id: &str, + target_branch: &str, + ) -> Result<()> { + conn.execute( + "UPDATE worktrees SET status = 'Merged', merged_at = datetime('now'), merged_into = ?1 WHERE id = ?2", + params![target_branch, id], + )?; + Ok(()) + } + + pub fn delete(conn: &Connection, id: &str) -> Result<()> { + conn.execute("DELETE FROM worktrees WHERE id = ?1", params![id])?; + Ok(()) + } +} + +#[cfg(test)] +mod tests { + use super::*; + use crate::db; + use crate::models::project::Project; + use crate::models::ticket::ProcessedTicket; + use crate::models::tracker::{AgentConfig, WatchedTracker}; + + fn setup() -> (Connection, String) { + let conn = db::init_in_memory().expect("db init"); + let project = Project::insert(&conn, "Test", "/path", None, "main").unwrap(); + let agent_config = AgentConfig { + analyst_command: "echo".into(), + analyst_args: vec![], + developer_command: "echo".into(), + developer_args: vec![], + }; + let tracker = + WatchedTracker::insert(&conn, &project.id, 100, "Bugs", 10, agent_config, vec![]) + .unwrap(); + let ticket = ProcessedTicket::insert_if_new(&conn, &tracker.id, 42, "Bug 42", "{}") + .unwrap() + .unwrap(); + (conn, ticket.id) + } + + #[test] + fn test_insert_and_get_by_id() { + let (conn, ticket_id) = setup(); + + let wt = Worktree::insert(&conn, &ticket_id, "/tmp/orchai-42", "orchai/42").unwrap(); + assert_eq!(wt.status, "Active"); + assert_eq!(wt.branch_name, "orchai/42"); + + let found = Worktree::get_by_id(&conn, &wt.id).unwrap(); + assert_eq!(found.id, wt.id); + assert_eq!(found.ticket_id, ticket_id); + assert_eq!(found.path, "/tmp/orchai-42"); + } + + #[test] + fn test_get_by_ticket_id() { + let (conn, ticket_id) = setup(); + + let none = Worktree::get_by_ticket_id(&conn, &ticket_id).unwrap(); + assert!(none.is_none()); + + Worktree::insert(&conn, &ticket_id, "/tmp/wt", "orchai/42").unwrap(); + + let some = Worktree::get_by_ticket_id(&conn, &ticket_id).unwrap(); + assert!(some.is_some()); + assert_eq!(some.unwrap().ticket_id, ticket_id); + } + + #[test] + fn test_list_by_project() { + let conn = db::init_in_memory().expect("db init"); + let project = Project::insert(&conn, "P1", "/path", None, "main").unwrap(); + let agent_config = AgentConfig { + analyst_command: "echo".into(), + analyst_args: vec![], + developer_command: "echo".into(), + developer_args: vec![], + }; + let tracker = + WatchedTracker::insert(&conn, &project.id, 100, "Bugs", 10, agent_config, vec![]) + .unwrap(); + let t1 = ProcessedTicket::insert_if_new(&conn, &tracker.id, 1, "T1", "{}") + .unwrap() + .unwrap(); + let t2 = ProcessedTicket::insert_if_new(&conn, &tracker.id, 2, "T2", "{}") + .unwrap() + .unwrap(); + + Worktree::insert(&conn, &t1.id, "/wt1", "orchai/1").unwrap(); + Worktree::insert(&conn, &t2.id, "/wt2", "orchai/2").unwrap(); + + let worktrees = Worktree::list_by_project(&conn, &project.id).unwrap(); + assert_eq!(worktrees.len(), 2); + } + + #[test] + fn test_set_merged() { + let (conn, ticket_id) = setup(); + let wt = Worktree::insert(&conn, &ticket_id, "/tmp/wt", "orchai/42").unwrap(); + + Worktree::set_merged(&conn, &wt.id, "feature/login").unwrap(); + let updated = Worktree::get_by_id(&conn, &wt.id).unwrap(); + assert_eq!(updated.status, "Merged"); + assert_eq!(updated.merged_into.unwrap(), "feature/login"); + assert!(updated.merged_at.is_some()); + } + + #[test] + fn test_delete() { + let (conn, ticket_id) = setup(); + let wt = Worktree::insert(&conn, &ticket_id, "/tmp/wt", "orchai/42").unwrap(); + + Worktree::delete(&conn, &wt.id).unwrap(); + let result = Worktree::get_by_id(&conn, &wt.id); + assert!(result.is_err()); // Not found + } +} +``` + +- [ ] **Step 3: Run tests to verify they pass** + +Run: `cd /home/leclere/Projets/orchai/src-tauri && cargo test --lib models::worktree::tests -- --nocapture` +Expected: all 5 worktree tests pass + +- [ ] **Step 4: Commit** + +```bash +cd /home/leclere/Projets/orchai +git add src-tauri/src/models/mod.rs src-tauri/src/models/worktree.rs +git commit -m "feat: add Worktree model with CRUD operations" +``` + +--- + +### Task 3: Worktree Manager service + +**Files:** +- Create: `src-tauri/src/services/worktree_manager.rs` +- Modify: `src-tauri/src/services/mod.rs` + +- [ ] **Step 1: Add worktree_manager to services/mod.rs** + +In `src-tauri/src/services/mod.rs`, add: + +```rust +pub mod worktree_manager; +``` + +- [ ] **Step 2: Create worktree_manager.rs with git operations** + +Create `src-tauri/src/services/worktree_manager.rs`: + +```rust +use std::path::Path; +use std::process::Command; + +fn run_git(project_path: &str, args: &[&str]) -> Result { + let output = Command::new("git") + .args(args) + .current_dir(project_path) + .output() + .map_err(|e| format!("Failed to run git: {}", e))?; + + if output.status.success() { + Ok(String::from_utf8_lossy(&output.stdout).to_string()) + } else { + let stderr = String::from_utf8_lossy(&output.stderr); + Err(format!("git {} failed: {}", args.join(" "), stderr)) + } +} + +/// Creates a git worktree at `.orchai/worktrees/orchai-{artifact_id}` +/// with a new branch `orchai/{artifact_id}` based on `base_branch`. +/// Returns (worktree_path, branch_name). +pub fn create_worktree( + project_path: &str, + base_branch: &str, + artifact_id: i32, +) -> Result<(String, String), String> { + let orchai_dir = Path::new(project_path).join(".orchai").join("worktrees"); + std::fs::create_dir_all(&orchai_dir) + .map_err(|e| format!("Failed to create .orchai/worktrees dir: {}", e))?; + + let worktree_name = format!("orchai-{}", artifact_id); + let worktree_path = orchai_dir.join(&worktree_name); + let branch_name = format!("orchai/{}", artifact_id); + + let wt_path_str = worktree_path + .to_str() + .ok_or("Invalid worktree path")?; + + run_git( + project_path, + &["worktree", "add", wt_path_str, "-b", &branch_name, base_branch], + )?; + + Ok((wt_path_str.to_string(), branch_name)) +} + +/// Returns the unified diff between the base branch and the worktree branch. +pub fn get_diff(project_path: &str, base_branch: &str, branch_name: &str) -> Result { + let range = format!("{}...{}", base_branch, branch_name); + run_git(project_path, &["diff", &range]) +} + +/// Lists commit hashes on the worktree branch that are not on the base branch (oldest first). +pub fn list_commits( + project_path: &str, + base_branch: &str, + branch_name: &str, +) -> Result, String> { + let range = format!("{}..{}", base_branch, branch_name); + let output = run_git(project_path, &["log", &range, "--format=%H", "--reverse"])?; + Ok(output.lines().filter(|l| !l.is_empty()).map(String::from).collect()) +} + +/// Cherry-picks commits from the worktree branch into the target branch. +/// Saves and restores the current branch. +pub fn apply_fix( + project_path: &str, + base_branch: &str, + branch_name: &str, + target_branch: &str, +) -> Result<(), String> { + let commits = list_commits(project_path, base_branch, branch_name)?; + if commits.is_empty() { + return Err("No commits to cherry-pick".to_string()); + } + + // Save current branch + let current = run_git(project_path, &["rev-parse", "--abbrev-ref", "HEAD"])?; + let current = current.trim(); + + // Checkout target branch + run_git(project_path, &["checkout", target_branch])?; + + // Cherry-pick each commit + let mut cherry_args = vec!["cherry-pick"]; + let commit_refs: Vec<&str> = commits.iter().map(|s| s.as_str()).collect(); + cherry_args.extend(&commit_refs); + + let result = run_git(project_path, &cherry_args); + + if let Err(e) = &result { + // Abort cherry-pick on conflict + let _ = run_git(project_path, &["cherry-pick", "--abort"]); + // Restore original branch + let _ = run_git(project_path, &["checkout", current]); + return Err(format!("Cherry-pick failed (conflict?): {}", e)); + } + + // Restore original branch + run_git(project_path, &["checkout", current])?; + + Ok(()) +} + +/// Removes the git worktree and deletes the local branch. +pub fn delete_worktree( + project_path: &str, + worktree_path: &str, + branch_name: &str, +) -> Result<(), String> { + // Remove worktree (force in case of unclean state) + run_git(project_path, &["worktree", "remove", worktree_path, "--force"])?; + // Delete branch + let _ = run_git(project_path, &["branch", "-D", branch_name]); + Ok(()) +} + +/// Lists local branch names. +pub fn list_local_branches(project_path: &str) -> Result, String> { + let output = run_git(project_path, &["branch", "--format=%(refname:short)"])?; + Ok(output.lines().filter(|l| !l.is_empty()).map(String::from).collect()) +} + +#[cfg(test)] +mod tests { + use super::*; + use std::process::Command; + + fn setup_test_repo() -> tempfile::TempDir { + let dir = tempfile::tempdir().expect("create temp dir"); + let path = dir.path().to_str().unwrap(); + + Command::new("git").args(["init"]).current_dir(path).output().unwrap(); + Command::new("git").args(["config", "user.email", "test@test.com"]).current_dir(path).output().unwrap(); + Command::new("git").args(["config", "user.name", "Test"]).current_dir(path).output().unwrap(); + + // Create initial commit on main + std::fs::write(dir.path().join("README.md"), "# Test").unwrap(); + Command::new("git").args(["add", "."]).current_dir(path).output().unwrap(); + Command::new("git").args(["commit", "-m", "init"]).current_dir(path).output().unwrap(); + + dir + } + + #[test] + fn test_create_worktree() { + let dir = setup_test_repo(); + let path = dir.path().to_str().unwrap(); + + let (wt_path, branch) = create_worktree(path, "main", 42).unwrap(); + assert!(wt_path.contains("orchai-42")); + assert_eq!(branch, "orchai/42"); + assert!(Path::new(&wt_path).exists()); + } + + #[test] + fn test_get_diff_empty() { + let dir = setup_test_repo(); + let path = dir.path().to_str().unwrap(); + + let (_, branch) = create_worktree(path, "main", 1).unwrap(); + let diff = get_diff(path, "main", &branch).unwrap(); + assert!(diff.is_empty(), "No changes yet, diff should be empty"); + } + + #[test] + fn test_get_diff_with_changes() { + let dir = setup_test_repo(); + let path = dir.path().to_str().unwrap(); + + let (wt_path, branch) = create_worktree(path, "main", 2).unwrap(); + + // Make a change in the worktree + std::fs::write(Path::new(&wt_path).join("fix.txt"), "fixed").unwrap(); + Command::new("git").args(["add", "."]).current_dir(&wt_path).output().unwrap(); + Command::new("git").args(["commit", "-m", "fix"]).current_dir(&wt_path).output().unwrap(); + + let diff = get_diff(path, "main", &branch).unwrap(); + assert!(diff.contains("fix.txt")); + assert!(diff.contains("+fixed")); + } + + #[test] + fn test_list_commits() { + let dir = setup_test_repo(); + let path = dir.path().to_str().unwrap(); + + let (wt_path, branch) = create_worktree(path, "main", 3).unwrap(); + + // Make two commits in the worktree + std::fs::write(Path::new(&wt_path).join("a.txt"), "a").unwrap(); + Command::new("git").args(["add", "."]).current_dir(&wt_path).output().unwrap(); + Command::new("git").args(["commit", "-m", "first"]).current_dir(&wt_path).output().unwrap(); + + std::fs::write(Path::new(&wt_path).join("b.txt"), "b").unwrap(); + Command::new("git").args(["add", "."]).current_dir(&wt_path).output().unwrap(); + Command::new("git").args(["commit", "-m", "second"]).current_dir(&wt_path).output().unwrap(); + + let commits = list_commits(path, "main", &branch).unwrap(); + assert_eq!(commits.len(), 2); + } + + #[test] + fn test_list_local_branches() { + let dir = setup_test_repo(); + let path = dir.path().to_str().unwrap(); + + create_worktree(path, "main", 10).unwrap(); + let branches = list_local_branches(path).unwrap(); + assert!(branches.contains(&"main".to_string())); + assert!(branches.contains(&"orchai/10".to_string())); + } + + #[test] + fn test_delete_worktree() { + let dir = setup_test_repo(); + let path = dir.path().to_str().unwrap(); + + let (wt_path, branch) = create_worktree(path, "main", 99).unwrap(); + assert!(Path::new(&wt_path).exists()); + + delete_worktree(path, &wt_path, &branch).unwrap(); + assert!(!Path::new(&wt_path).exists()); + + let branches = list_local_branches(path).unwrap(); + assert!(!branches.contains(&"orchai/99".to_string())); + } + + #[test] + fn test_apply_fix() { + let dir = setup_test_repo(); + let path = dir.path().to_str().unwrap(); + + // Create a target branch + Command::new("git").args(["branch", "feature/test"]).current_dir(path).output().unwrap(); + + // Create worktree and make a change + let (wt_path, branch) = create_worktree(path, "main", 7).unwrap(); + std::fs::write(Path::new(&wt_path).join("fix.txt"), "the fix").unwrap(); + Command::new("git").args(["add", "."]).current_dir(&wt_path).output().unwrap(); + Command::new("git").args(["commit", "-m", "apply fix"]).current_dir(&wt_path).output().unwrap(); + + // Apply fix to feature branch + apply_fix(path, "main", &branch, "feature/test").unwrap(); + + // Verify: check out feature branch and verify the file exists + Command::new("git").args(["checkout", "feature/test"]).current_dir(path).output().unwrap(); + assert!(Path::new(path).join("fix.txt").exists()); + + // Go back to main + Command::new("git").args(["checkout", "main"]).current_dir(path).output().unwrap(); + } +} +``` + +- [ ] **Step 3: Add `tempfile` dev-dependency to Cargo.toml** + +In `src-tauri/Cargo.toml`, add under `[dev-dependencies]`: + +```toml +[dev-dependencies] +tempfile = "3" +``` + +- [ ] **Step 4: Run tests to verify they pass** + +Run: `cd /home/leclere/Projets/orchai/src-tauri && cargo test --lib services::worktree_manager::tests -- --nocapture` +Expected: all 7 worktree manager tests pass + +- [ ] **Step 5: Commit** + +```bash +cd /home/leclere/Projets/orchai +git add src-tauri/Cargo.toml src-tauri/src/services/mod.rs src-tauri/src/services/worktree_manager.rs +git commit -m "feat: add Worktree Manager service with git worktree operations" +``` + +--- + +### Task 4: Agent Orchestrator - prompt building and verdict parsing + +**Files:** +- Create: `src-tauri/src/services/orchestrator.rs` +- Modify: `src-tauri/src/services/mod.rs` + +- [ ] **Step 1: Add orchestrator to services/mod.rs** + +In `src-tauri/src/services/mod.rs`, add: + +```rust +pub mod orchestrator; +``` + +- [ ] **Step 2: Create orchestrator.rs with prompt building, verdict parsing, and tests** + +Create `src-tauri/src/services/orchestrator.rs`: + +```rust +use crate::models::project::Project; +use crate::models::ticket::ProcessedTicket; +use crate::models::tracker::WatchedTracker; +use crate::models::worktree::Worktree; +use crate::services::worktree_manager; +use rusqlite::Connection; +use std::sync::{Arc, Mutex}; +use tauri::{AppHandle, Emitter}; +use tokio::io::{AsyncBufReadExt, BufReader}; +use tokio::process::Command; +use tokio::time::{interval, timeout, Duration}; + +#[derive(Debug, Clone, PartialEq)] +pub enum Verdict { + FixNeeded, + NoFix, +} + +pub fn build_analyst_prompt(ticket: &ProcessedTicket, project: &Project) -> String { + format!( + r#"Tu es un analyste technique. Voici un ticket Tuleap a analyser. + +## Ticket +- ID: {artifact_id} +- Titre: {title} +- Donnees: {data} + +## Contexte +- Projet: {project_name} +- Repo: {project_path} +- Branche de base: {base_branch} + +## Ta mission +1. Analyse le ticket et identifie les fichiers/fonctions concernes +2. Explique techniquement le probleme +3. Evalue si une correction de code est necessaire +4. Produis un rapport structure en markdown + +Termine ton rapport par un de ces verdicts sur une ligne separee: +[VERDICT: FIX_NEEDED] si une correction de code est necessaire +[VERDICT: NO_FIX] si aucune correction n'est necessaire"#, + artifact_id = ticket.artifact_id, + title = ticket.artifact_title, + data = ticket.artifact_data, + project_name = project.name, + project_path = project.path, + base_branch = project.base_branch, + ) +} + +pub fn build_developer_prompt( + ticket: &ProcessedTicket, + project: &Project, + analyst_report: &str, + worktree_path: &str, +) -> String { + format!( + r#"Tu es un developpeur. Tu dois corriger un bug ou implementer une fonctionnalite d'apres l'analyse suivante. + +## Rapport d'analyse +{analyst_report} + +## Ticket +- ID: {artifact_id} +- Titre: {title} + +## Contexte +- Projet: {project_name} +- Repo (worktree): {worktree_path} +- Branche de base: {base_branch} + +## Ta mission +1. Implemente la correction dans le code +2. Fais des commits atomiques avec des messages clairs +3. Produis un rapport en markdown decrivant les changements effectues"#, + analyst_report = analyst_report, + artifact_id = ticket.artifact_id, + title = ticket.artifact_title, + project_name = project.name, + worktree_path = worktree_path, + base_branch = project.base_branch, + ) +} + +pub fn parse_verdict(report: &str) -> Verdict { + // Search from the end of the report for the verdict line + for line in report.lines().rev() { + let trimmed = line.trim(); + if trimmed.contains("[VERDICT: NO_FIX]") { + return Verdict::NoFix; + } + if trimmed.contains("[VERDICT: FIX_NEEDED]") { + return Verdict::FixNeeded; + } + } + // Default: assume fix is needed if no verdict found + Verdict::FixNeeded +} + +/// Runs a CLI command with the prompt piped to stdin. +/// Streams stdout lines as Tauri events. Returns the full stdout output. +pub async fn run_cli_command( + command: &str, + args: &[String], + prompt: &str, + working_dir: &str, + timeout_secs: u64, + app_handle: &AppHandle, + ticket_id: &str, +) -> Result { + let mut child = Command::new(command) + .args(args) + .stdin(std::process::Stdio::piped()) + .stdout(std::process::Stdio::piped()) + .stderr(std::process::Stdio::piped()) + .current_dir(working_dir) + .spawn() + .map_err(|e| format!("Failed to spawn '{}': {}", command, e))?; + + // Write prompt to stdin + if let Some(mut stdin) = child.stdin.take() { + use tokio::io::AsyncWriteExt; + stdin + .write_all(prompt.as_bytes()) + .await + .map_err(|e| format!("Failed to write to stdin: {}", e))?; + // stdin is dropped here, closing it + } + + // Read stdout line by line, streaming events + let stdout = child.stdout.take().ok_or("Failed to capture stdout")?; + let mut reader = BufReader::new(stdout).lines(); + let mut output = String::new(); + + let read_future = async { + while let Ok(Some(line)) = reader.next_line().await { + let _ = app_handle.emit( + "ticket-processing-progress", + serde_json::json!({ + "ticket_id": ticket_id, + "output_chunk": line, + }), + ); + output.push_str(&line); + output.push('\n'); + } + output + }; + + let result = timeout(Duration::from_secs(timeout_secs), read_future) + .await + .map_err(|_| format!("CLI command timed out after {}s", timeout_secs))?; + + // Wait for process to finish + let status = child + .wait() + .await + .map_err(|e| format!("Failed to wait for process: {}", e))?; + + if !status.success() { + let code = status.code().unwrap_or(-1); + return Err(format!("CLI command exited with code {}", code)); + } + + Ok(result) +} + +/// Processes a single ticket through the analyst -> developer pipeline. +async fn process_ticket( + db: &Arc>, + app_handle: &AppHandle, +) -> Result { + // 1. Get next pending ticket + its tracker + project (under lock) + let (ticket, tracker, project) = { + let conn = db.lock().map_err(|e| format!("DB lock failed: {}", e))?; + + let pending = ProcessedTicket::list_pending(&conn) + .map_err(|e| format!("list_pending failed: {}", e))?; + + let ticket = match pending.into_iter().next() { + Some(t) => t, + None => return Ok(false), // No pending tickets + }; + + let tracker = WatchedTracker::get_by_id(&conn, &ticket.tracker_id) + .map_err(|e| format!("get tracker failed: {}", e))?; + + let project = Project::get_by_id(&conn, &tracker.project_id) + .map_err(|e| format!("get project failed: {}", e))?; + + // Mark as Analyzing before releasing lock + ProcessedTicket::update_status(&conn, &ticket.id, "Analyzing") + .map_err(|e| format!("update_status failed: {}", e))?; + + (ticket, tracker, project) + }; // lock released + + // Emit start event + let _ = app_handle.emit( + "ticket-processing-started", + serde_json::json!({ + "ticket_id": ticket.id, + "step": "analyst", + }), + ); + + // 2. Run analyst + let analyst_prompt = build_analyst_prompt(&ticket, &project); + let analyst_result = run_cli_command( + &tracker.agent_config.analyst_command, + &tracker.agent_config.analyst_args, + &analyst_prompt, + &project.path, + 600, // 10 minute timeout + app_handle, + &ticket.id, + ) + .await; + + let analyst_report = match analyst_result { + Ok(report) => report, + Err(e) => { + let conn = db.lock().map_err(|e| format!("DB lock: {}", e))?; + let _ = ProcessedTicket::set_error(&conn, &ticket.id, &e); + let _ = app_handle.emit( + "ticket-processing-error", + serde_json::json!({ "ticket_id": ticket.id, "error": e }), + ); + return Ok(true); // Processed (with error), continue to next + } + }; + + // 3. Store analyst report + { + let conn = db.lock().map_err(|e| format!("DB lock: {}", e))?; + ProcessedTicket::set_analyst_report(&conn, &ticket.id, &analyst_report) + .map_err(|e| format!("set_analyst_report: {}", e))?; + } + + // 4. Check verdict + let verdict = parse_verdict(&analyst_report); + if verdict == Verdict::NoFix { + let conn = db.lock().map_err(|e| format!("DB lock: {}", e))?; + ProcessedTicket::update_status(&conn, &ticket.id, "Done") + .map_err(|e| format!("update_status: {}", e))?; + let _ = app_handle.emit( + "ticket-processing-done", + serde_json::json!({ "ticket_id": ticket.id }), + ); + return Ok(true); + } + + // 5. Check if ticket was cancelled while analyst was running + { + let conn = db.lock().map_err(|e| format!("DB lock: {}", e))?; + let current = ProcessedTicket::get_by_id(&conn, &ticket.id) + .map_err(|e| format!("get_by_id: {}", e))?; + if current.status == "Cancelled" { + return Ok(true); + } + } + + // 6. Create worktree + let (wt_path, branch_name) = + worktree_manager::create_worktree(&project.path, &project.base_branch, ticket.artifact_id) + .map_err(|e| { + let conn = db.lock().ok(); + if let Some(conn) = conn { + let _ = ProcessedTicket::set_error(&conn, &ticket.id, &e); + } + e + })?; + + // Store worktree info in DB + { + let conn = db.lock().map_err(|e| format!("DB lock: {}", e))?; + ProcessedTicket::set_worktree_info(&conn, &ticket.id, &wt_path, &branch_name) + .map_err(|e| format!("set_worktree_info: {}", e))?; + Worktree::insert(&conn, &ticket.id, &wt_path, &branch_name) + .map_err(|e| format!("insert worktree: {}", e))?; + ProcessedTicket::update_status(&conn, &ticket.id, "Developing") + .map_err(|e| format!("update_status: {}", e))?; + } + + // Emit developer start + let _ = app_handle.emit( + "ticket-processing-started", + serde_json::json!({ + "ticket_id": ticket.id, + "step": "developer", + }), + ); + + // 7. Run developer + let developer_prompt = + build_developer_prompt(&ticket, &project, &analyst_report, &wt_path); + let developer_result = run_cli_command( + &tracker.agent_config.developer_command, + &tracker.agent_config.developer_args, + &developer_prompt, + &wt_path, + 600, + app_handle, + &ticket.id, + ) + .await; + + let developer_report = match developer_result { + Ok(report) => report, + Err(e) => { + let conn = db.lock().map_err(|e2| format!("DB lock: {}", e2))?; + let _ = ProcessedTicket::set_error(&conn, &ticket.id, &e); + let _ = app_handle.emit( + "ticket-processing-error", + serde_json::json!({ "ticket_id": ticket.id, "error": e }), + ); + return Ok(true); + } + }; + + // 8. Store developer report and mark done + { + let conn = db.lock().map_err(|e| format!("DB lock: {}", e))?; + ProcessedTicket::set_developer_report(&conn, &ticket.id, &developer_report) + .map_err(|e| format!("set_developer_report: {}", e))?; + ProcessedTicket::update_status(&conn, &ticket.id, "Done") + .map_err(|e| format!("update_status: {}", e))?; + } + + let _ = app_handle.emit( + "ticket-processing-done", + serde_json::json!({ "ticket_id": ticket.id }), + ); + + Ok(true) // Processed a ticket +} + +/// Starts the orchestrator background task that consumes the ticket queue. +pub fn start(db: Arc>, app_handle: AppHandle) { + tokio::spawn(async move { + let mut tick = interval(Duration::from_secs(10)); + loop { + tick.tick().await; + match process_ticket(&db, &app_handle).await { + Ok(true) => { + // Processed a ticket, immediately check for more + continue; + } + Ok(false) => { + // No pending tickets, wait for next tick + } + Err(e) => { + eprintln!("orchestrator: {}", e); + } + } + } + }); +} + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn test_build_analyst_prompt_contains_ticket_info() { + let ticket = ProcessedTicket { + id: "t1".into(), + tracker_id: "tr1".into(), + artifact_id: 42, + artifact_title: "Login crash on empty password".into(), + artifact_data: r#"{"id":42,"title":"Login crash"}"#.into(), + status: "Pending".into(), + analyst_report: None, + developer_report: None, + worktree_path: None, + branch_name: None, + detected_at: "2026-01-01T00:00:00Z".into(), + processed_at: None, + }; + let project = Project { + id: "p1".into(), + name: "MyApp".into(), + path: "/home/user/myapp".into(), + cloned_from: None, + base_branch: "stable".into(), + created_at: "2026-01-01T00:00:00Z".into(), + }; + + let prompt = build_analyst_prompt(&ticket, &project); + assert!(prompt.contains("42")); + assert!(prompt.contains("Login crash on empty password")); + assert!(prompt.contains("MyApp")); + assert!(prompt.contains("/home/user/myapp")); + assert!(prompt.contains("stable")); + assert!(prompt.contains("[VERDICT: FIX_NEEDED]")); + assert!(prompt.contains("[VERDICT: NO_FIX]")); + } + + #[test] + fn test_build_developer_prompt_contains_report() { + let ticket = ProcessedTicket { + id: "t1".into(), + tracker_id: "tr1".into(), + artifact_id: 42, + artifact_title: "Login crash".into(), + artifact_data: "{}".into(), + status: "Developing".into(), + analyst_report: None, + developer_report: None, + worktree_path: None, + branch_name: None, + detected_at: "2026-01-01T00:00:00Z".into(), + processed_at: None, + }; + let project = Project { + id: "p1".into(), + name: "MyApp".into(), + path: "/home/user/myapp".into(), + cloned_from: None, + base_branch: "main".into(), + created_at: "2026-01-01T00:00:00Z".into(), + }; + + let prompt = build_developer_prompt(&ticket, &project, "## Bug found in auth.rs", "/tmp/wt"); + assert!(prompt.contains("## Bug found in auth.rs")); + assert!(prompt.contains("42")); + assert!(prompt.contains("/tmp/wt")); + } + + #[test] + fn test_parse_verdict_fix_needed() { + let report = "## Analysis\nBug found.\n[VERDICT: FIX_NEEDED]\n"; + assert_eq!(parse_verdict(report), Verdict::FixNeeded); + } + + #[test] + fn test_parse_verdict_no_fix() { + let report = "## Analysis\nThis is a feature request, not a bug.\n[VERDICT: NO_FIX]\n"; + assert_eq!(parse_verdict(report), Verdict::NoFix); + } + + #[test] + fn test_parse_verdict_missing_defaults_to_fix() { + let report = "## Analysis\nSomething is wrong but I forgot the verdict."; + assert_eq!(parse_verdict(report), Verdict::FixNeeded); + } + + #[test] + fn test_parse_verdict_embedded_in_line() { + let report = "Verdict: [VERDICT: NO_FIX] - no code change needed."; + assert_eq!(parse_verdict(report), Verdict::NoFix); + } +} +``` + +- [ ] **Step 3: Run tests to verify they pass** + +Run: `cd /home/leclere/Projets/orchai/src-tauri && cargo test --lib services::orchestrator::tests -- --nocapture` +Expected: all 6 orchestrator tests pass + +- [ ] **Step 4: Commit** + +```bash +cd /home/leclere/Projets/orchai +git add src-tauri/src/services/mod.rs src-tauri/src/services/orchestrator.rs +git commit -m "feat: add Agent Orchestrator with prompt building, verdict parsing, and CLI pipeline" +``` + +--- + +### Task 5: Tauri commands for orchestrator and worktree + +**Files:** +- Create: `src-tauri/src/commands/orchestrator.rs` +- Create: `src-tauri/src/commands/worktree.rs` +- Modify: `src-tauri/src/commands/mod.rs` + +- [ ] **Step 1: Add new command modules to commands/mod.rs** + +In `src-tauri/src/commands/mod.rs`, add: + +```rust +pub mod orchestrator; +pub mod worktree; +``` + +- [ ] **Step 2: Create commands/orchestrator.rs** + +Create `src-tauri/src/commands/orchestrator.rs`: + +```rust +use crate::error::AppError; +use crate::models::ticket::ProcessedTicket; +use crate::models::worktree::Worktree; +use crate::AppState; +use serde::Serialize; +use tauri::State; + +#[derive(Debug, Clone, Serialize)] +pub struct TicketResult { + pub ticket: ProcessedTicket, + pub worktree: Option, +} + +#[tauri::command] +pub fn get_ticket_result( + state: State<'_, AppState>, + ticket_id: String, +) -> Result { + let conn = state.db.lock().map_err(|e| AppError::from(e.to_string()))?; + let ticket = ProcessedTicket::get_by_id(&conn, &ticket_id)?; + let worktree = Worktree::get_by_ticket_id(&conn, &ticket_id)?; + Ok(TicketResult { ticket, worktree }) +} + +#[tauri::command] +pub fn retry_ticket( + state: State<'_, AppState>, + ticket_id: String, +) -> Result<(), AppError> { + let conn = state.db.lock().map_err(|e| AppError::from(e.to_string()))?; + let ticket = ProcessedTicket::get_by_id(&conn, &ticket_id)?; + + // Only allow retry for Error or Done tickets + if ticket.status != "Error" && ticket.status != "Done" && ticket.status != "Cancelled" { + return Err(AppError::from(format!( + "Cannot retry ticket with status '{}'", + ticket.status + ))); + } + + // Reset to Pending + ProcessedTicket::update_status(&conn, &ticket_id, "Pending")?; + // Clear reports + conn.execute( + "UPDATE processed_tickets SET analyst_report = NULL, developer_report = NULL, \ + worktree_path = NULL, branch_name = NULL, processed_at = NULL WHERE id = ?1", + rusqlite::params![ticket_id], + )?; + + // Clean up worktree if exists + if let Some(wt) = Worktree::get_by_ticket_id(&conn, &ticket_id)? { + if wt.status == "Active" { + // Best effort: delete the worktree on disk + let project_id = { + let tracker = crate::models::tracker::WatchedTracker::get_by_id(&conn, &ticket.tracker_id)?; + tracker.project_id + }; + let project = crate::models::project::Project::get_by_id(&conn, &project_id)?; + let _ = crate::services::worktree_manager::delete_worktree( + &project.path, + &wt.path, + &wt.branch_name, + ); + } + Worktree::delete(&conn, &wt.id)?; + } + + Ok(()) +} + +#[tauri::command] +pub fn cancel_ticket( + state: State<'_, AppState>, + ticket_id: String, +) -> Result<(), AppError> { + let conn = state.db.lock().map_err(|e| AppError::from(e.to_string()))?; + let ticket = ProcessedTicket::get_by_id(&conn, &ticket_id)?; + + if ticket.status == "Done" || ticket.status == "Cancelled" { + return Err(AppError::from(format!( + "Cannot cancel ticket with status '{}'", + ticket.status + ))); + } + + ProcessedTicket::update_status(&conn, &ticket_id, "Cancelled")?; + Ok(()) +} +``` + +- [ ] **Step 3: Create commands/worktree.rs** + +Create `src-tauri/src/commands/worktree.rs`: + +```rust +use crate::error::AppError; +use crate::models::project::Project; +use crate::models::ticket::ProcessedTicket; +use crate::models::tracker::WatchedTracker; +use crate::models::worktree::Worktree; +use crate::services::worktree_manager; +use crate::AppState; +use tauri::State; + +#[tauri::command] +pub fn list_worktrees( + state: State<'_, AppState>, + project_id: String, +) -> Result, AppError> { + let conn = state.db.lock().map_err(|e| AppError::from(e.to_string()))?; + let worktrees = Worktree::list_by_project(&conn, &project_id)?; + Ok(worktrees) +} + +#[tauri::command] +pub fn get_worktree_diff( + state: State<'_, AppState>, + worktree_id: String, +) -> Result { + let conn = state.db.lock().map_err(|e| AppError::from(e.to_string()))?; + + let wt = Worktree::get_by_id(&conn, &worktree_id)?; + let ticket = ProcessedTicket::get_by_id(&conn, &wt.ticket_id)?; + let tracker = WatchedTracker::get_by_id(&conn, &ticket.tracker_id)?; + let project = Project::get_by_id(&conn, &tracker.project_id)?; + + drop(conn); // Release lock before git operations + + let diff = worktree_manager::get_diff(&project.path, &project.base_branch, &wt.branch_name) + .map_err(AppError::from)?; + + Ok(diff) +} + +#[tauri::command] +pub fn apply_fix_to_branch( + state: State<'_, AppState>, + worktree_id: String, + target_branch: String, +) -> Result<(), AppError> { + let conn = state.db.lock().map_err(|e| AppError::from(e.to_string()))?; + + let wt = Worktree::get_by_id(&conn, &worktree_id)?; + let ticket = ProcessedTicket::get_by_id(&conn, &wt.ticket_id)?; + let tracker = WatchedTracker::get_by_id(&conn, &ticket.tracker_id)?; + let project = Project::get_by_id(&conn, &tracker.project_id)?; + + drop(conn); // Release lock before git operations + + worktree_manager::apply_fix( + &project.path, + &project.base_branch, + &wt.branch_name, + &target_branch, + ) + .map_err(AppError::from)?; + + // Mark worktree as merged + let conn = state.db.lock().map_err(|e| AppError::from(e.to_string()))?; + Worktree::set_merged(&conn, &worktree_id, &target_branch)?; + + Ok(()) +} + +#[tauri::command] +pub fn delete_worktree_cmd( + state: State<'_, AppState>, + worktree_id: String, +) -> Result<(), AppError> { + let conn = state.db.lock().map_err(|e| AppError::from(e.to_string()))?; + + let wt = Worktree::get_by_id(&conn, &worktree_id)?; + let ticket = ProcessedTicket::get_by_id(&conn, &wt.ticket_id)?; + let tracker = WatchedTracker::get_by_id(&conn, &ticket.tracker_id)?; + let project = Project::get_by_id(&conn, &tracker.project_id)?; + + drop(conn); // Release lock before git operations + + worktree_manager::delete_worktree(&project.path, &wt.path, &wt.branch_name) + .map_err(AppError::from)?; + + let conn = state.db.lock().map_err(|e| AppError::from(e.to_string()))?; + Worktree::delete(&conn, &worktree_id)?; + + Ok(()) +} + +#[tauri::command] +pub fn list_local_branches( + state: State<'_, AppState>, + project_id: String, +) -> Result, AppError> { + let conn = state.db.lock().map_err(|e| AppError::from(e.to_string()))?; + let project = Project::get_by_id(&conn, &project_id)?; + + drop(conn); + + let branches = + worktree_manager::list_local_branches(&project.path).map_err(AppError::from)?; + Ok(branches) +} +``` + +- [ ] **Step 4: Verify compilation** + +Run: `cd /home/leclere/Projets/orchai/src-tauri && cargo check 2>&1 | tail -10` +Expected: compiles without errors + +- [ ] **Step 5: Commit** + +```bash +cd /home/leclere/Projets/orchai +git add src-tauri/src/commands/mod.rs src-tauri/src/commands/orchestrator.rs src-tauri/src/commands/worktree.rs +git commit -m "feat: add Tauri commands for orchestrator (retry, cancel, result) and worktree (diff, apply, delete, branches)" +``` + +--- + +### Task 6: Wire orchestrator into app startup and register commands + +**Files:** +- Modify: `src-tauri/src/lib.rs` + +- [ ] **Step 1: Start orchestrator in setup and register new commands** + +In `src-tauri/src/lib.rs`, modify the `setup` closure to start the orchestrator after the poller, and add all new commands to the invoke_handler. + +The setup block should clone db_arc a second time for the orchestrator: + +```rust + // Start background poller + services::poller::start( + db_arc.clone(), + encryption_key, + http_client, + app.handle().clone(), + ); + + // Start agent orchestrator + services::orchestrator::start( + db_arc, + app.handle().clone(), + ); +``` + +Add the new commands to `invoke_handler`: + +```rust + .invoke_handler(tauri::generate_handler![ + commands::project::create_project, + commands::project::list_projects, + commands::project::get_project, + commands::project::update_project, + commands::project::delete_project, + commands::credential::set_tuleap_credentials, + commands::credential::get_tuleap_credentials, + commands::credential::delete_tuleap_credentials, + commands::credential::test_tuleap_connection, + commands::tracker::add_tracker, + commands::tracker::list_trackers, + commands::tracker::update_tracker, + commands::tracker::remove_tracker, + commands::tracker::get_tracker_fields, + commands::tracker::list_processed_tickets, + commands::poller::manual_poll, + commands::poller::get_queue_status, + commands::orchestrator::get_ticket_result, + commands::orchestrator::retry_ticket, + commands::orchestrator::cancel_ticket, + commands::worktree::list_worktrees, + commands::worktree::get_worktree_diff, + commands::worktree::apply_fix_to_branch, + commands::worktree::delete_worktree_cmd, + commands::worktree::list_local_branches, + ]) +``` + +- [ ] **Step 2: Verify full build compiles** + +Run: `cd /home/leclere/Projets/orchai/src-tauri && cargo check 2>&1 | tail -10` +Expected: compiles without errors + +- [ ] **Step 3: Run all tests** + +Run: `cd /home/leclere/Projets/orchai/src-tauri && cargo test 2>&1 | tail -20` +Expected: all tests pass (existing + new) + +- [ ] **Step 4: Commit** + +```bash +cd /home/leclere/Projets/orchai +git add src-tauri/src/lib.rs +git commit -m "feat: wire orchestrator startup and register Phase 3 Tauri commands" +``` + +--- + +### Task 7: Frontend dependencies, types, and API wrappers + +**Files:** +- Modify: `src/lib/types.ts` +- Modify: `src/lib/api.ts` +- Modify: `package.json` (via npm install) + +- [ ] **Step 1: Install frontend dependencies** + +```bash +cd /home/leclere/Projets/orchai && npm install react-markdown remark-gfm +``` + +- [ ] **Step 2: Add Worktree and TicketResult types to types.ts** + +Append to `src/lib/types.ts`: + +```typescript +export interface Worktree { + id: string; + ticket_id: string; + path: string; + branch_name: string; + status: string; + created_at: string; + merged_at: string | null; + merged_into: string | null; +} + +export interface TicketResult { + ticket: ProcessedTicket; + worktree: Worktree | null; +} +``` + +- [ ] **Step 3: Add API wrappers to api.ts** + +Add the import for the new types in `src/lib/api.ts`: + +```typescript +import type { + Project, + TuleapCredentialsSafe, + AgentConfig, + FilterGroup, + WatchedTracker, + TrackerField, + ProcessedTicket, + Worktree, + TicketResult, +} from "./types"; +``` + +Append these functions to `src/lib/api.ts`: + +```typescript +// Orchestrator +export async function getTicketResult(ticketId: string): Promise { + return invoke("get_ticket_result", { ticketId }); +} +export async function retryTicket(ticketId: string): Promise { + return invoke("retry_ticket", { ticketId }); +} +export async function cancelTicket(ticketId: string): Promise { + return invoke("cancel_ticket", { ticketId }); +} + +// Worktrees +export async function listWorktrees(projectId: string): Promise { + return invoke("list_worktrees", { projectId }); +} +export async function getWorktreeDiff(worktreeId: string): Promise { + return invoke("get_worktree_diff", { worktreeId }); +} +export async function applyFixToBranch(worktreeId: string, targetBranch: string): Promise { + return invoke("apply_fix_to_branch", { worktreeId, targetBranch }); +} +export async function deleteWorktreeCmd(worktreeId: string): Promise { + return invoke("delete_worktree_cmd", { worktreeId }); +} +export async function listLocalBranches(projectId: string): Promise { + return invoke("list_local_branches", { projectId }); +} +``` + +- [ ] **Step 4: Verify TypeScript compiles** + +Run: `cd /home/leclere/Projets/orchai && npx tsc --noEmit 2>&1 | tail -10` +Expected: no errors + +- [ ] **Step 5: Commit** + +```bash +cd /home/leclere/Projets/orchai +git add package.json package-lock.json src/lib/types.ts src/lib/api.ts +git commit -m "feat: add frontend types, API wrappers, and markdown deps for Phase 3" +``` + +--- + +### Task 8: Ticket list page + +**Files:** +- Create: `src/components/tickets/TicketList.tsx` +- Modify: `src/components/projects/ProjectDashboard.tsx` +- Modify: `src/App.tsx` + +- [ ] **Step 1: Create TicketList component** + +Create `src/components/tickets/TicketList.tsx`: + +```tsx +import { useEffect, useState } from "react"; +import { useParams, Link } from "react-router-dom"; +import { listProcessedTickets, getProject } from "../../lib/api"; +import type { ProcessedTicket, Project } from "../../lib/types"; + +function statusBadgeClass(status: string): string { + switch (status) { + case "Pending": + return "bg-yellow-100 text-yellow-700"; + case "Analyzing": + return "bg-blue-100 text-blue-700"; + case "Developing": + return "bg-purple-100 text-purple-700"; + case "Done": + return "bg-green-100 text-green-700"; + case "Error": + return "bg-red-100 text-red-700"; + case "Cancelled": + return "bg-gray-100 text-gray-500"; + default: + return "bg-gray-100 text-gray-700"; + } +} + +export default function TicketList() { + const { projectId } = useParams(); + const [project, setProject] = useState(null); + const [tickets, setTickets] = useState([]); + const [filter, setFilter] = useState("all"); + + useEffect(() => { + if (!projectId) return; + Promise.all([getProject(projectId), listProcessedTickets(projectId)]).then( + ([proj, tkts]) => { + setProject(proj); + setTickets(tkts); + } + ); + }, [projectId]); + + const filtered = + filter === "all" ? tickets : tickets.filter((t) => t.status === filter); + + return ( +
+
+
+ + {project?.name} + +

Processed Tickets

+
+
+ +
+ {["all", "Pending", "Analyzing", "Developing", "Done", "Error"].map( + (s) => ( + + ) + )} +
+ + {filtered.length === 0 ? ( +
+ No tickets found. +
+ ) : ( +
+ {filtered.map((ticket) => ( + +
+
+
+ + #{ticket.artifact_id} + + + {ticket.artifact_title} + +
+
+ {new Date(ticket.detected_at).toLocaleString()} + {ticket.processed_at && ( + + Processed: {new Date(ticket.processed_at).toLocaleString()} + + )} +
+
+ + {ticket.status} + +
+ + ))} +
+ )} +
+ ); +} +``` + +- [ ] **Step 2: Make dashboard ticket items link to detail view** + +In `src/components/projects/ProjectDashboard.tsx`, wrap each ticket in the "Recent Tickets" section with a `Link`. Replace the ticket `
` with: + +Change the ticket rendering from a plain `
` to a ``: + +```tsx + +``` + +Close with `` instead of `
`. + +Also add a "View all tickets" link after the recent tickets list, and a `Link` to the ticket list page in the section header: + +Replace the "Recent Tickets" header with: + +```tsx +
+

Recent Tickets

+ {tickets.length > 0 && ( + + View all ({tickets.length}) + + )} +
+``` + +- [ ] **Step 3: Add routes to App.tsx** + +In `src/App.tsx`, add imports: + +```tsx +import TicketList from "./components/tickets/TicketList"; +``` + +Add routes inside the `}>` block: + +```tsx + } /> +``` + +- [ ] **Step 4: Verify TypeScript compiles** + +Run: `cd /home/leclere/Projets/orchai && npx tsc --noEmit 2>&1 | tail -10` +Expected: no errors + +- [ ] **Step 5: Commit** + +```bash +cd /home/leclere/Projets/orchai +git add src/components/tickets/TicketList.tsx src/components/projects/ProjectDashboard.tsx src/App.tsx +git commit -m "feat: add ticket list page with status filtering and dashboard links" +``` + +--- + +### Task 9: Ticket detail page with markdown reports, diff viewer, and actions + +**Files:** +- Create: `src/components/tickets/TicketDetail.tsx` +- Modify: `src/App.tsx` + +- [ ] **Step 1: Create TicketDetail component** + +Create `src/components/tickets/TicketDetail.tsx`: + +```tsx +import { useEffect, useState } from "react"; +import { useParams, useNavigate } from "react-router-dom"; +import Markdown from "react-markdown"; +import remarkGfm from "remark-gfm"; +import { + getTicketResult, + retryTicket, + cancelTicket, + getWorktreeDiff, + applyFixToBranch, + deleteWorktreeCmd, + listLocalBranches, +} from "../../lib/api"; +import type { ProcessedTicket, Worktree } from "../../lib/types"; + +function statusBadgeClass(status: string): string { + switch (status) { + case "Pending": + return "bg-yellow-100 text-yellow-700"; + case "Analyzing": + return "bg-blue-100 text-blue-700"; + case "Developing": + return "bg-purple-100 text-purple-700"; + case "Done": + return "bg-green-100 text-green-700"; + case "Error": + return "bg-red-100 text-red-700"; + case "Cancelled": + return "bg-gray-100 text-gray-500"; + default: + return "bg-gray-100 text-gray-700"; + } +} + +function DiffViewer({ diff }: { diff: string }) { + if (!diff) { + return ( +
+ No changes detected. +
+ ); + } + + const lines = diff.split("\n"); + return ( +
+      {lines.map((line, i) => {
+        let cls = "";
+        if (line.startsWith("+++") || line.startsWith("---"))
+          cls = "text-gray-400";
+        else if (line.startsWith("+")) cls = "text-green-400 bg-green-900/20";
+        else if (line.startsWith("-")) cls = "text-red-400 bg-red-900/20";
+        else if (line.startsWith("@@")) cls = "text-blue-400";
+        else if (line.startsWith("diff ")) cls = "text-yellow-400 font-bold";
+        return (
+          
+ {line} +
+ ); + })} +
+ ); +} + +export default function TicketDetail() { + const { ticketId } = useParams(); + const navigate = useNavigate(); + const [ticket, setTicket] = useState(null); + const [worktree, setWorktree] = useState(null); + const [diff, setDiff] = useState(null); + const [branches, setBranches] = useState([]); + const [targetBranch, setTargetBranch] = useState(""); + const [tab, setTab] = useState<"info" | "analyst" | "developer" | "diff">( + "info" + ); + const [loading, setLoading] = useState(false); + const [error, setError] = useState(""); + + async function loadData() { + if (!ticketId) return; + try { + const result = await getTicketResult(ticketId); + setTicket(result.ticket); + setWorktree(result.worktree); + + // Auto-select the most relevant tab + if (result.ticket.developer_report) setTab("developer"); + else if (result.ticket.analyst_report) setTab("analyst"); + + // Load diff if worktree exists + if (result.worktree && result.worktree.status === "Active") { + try { + const d = await getWorktreeDiff(result.worktree.id); + setDiff(d); + } catch { + setDiff(null); + } + } + } catch (err) { + setError(err instanceof Error ? err.message : String(err)); + } + } + + useEffect(() => { + loadData(); + }, [ticketId]); + + async function handleRetry() { + if (!ticketId) return; + setLoading(true); + try { + await retryTicket(ticketId); + await loadData(); + } catch (err) { + setError(err instanceof Error ? err.message : String(err)); + } + setLoading(false); + } + + async function handleCancel() { + if (!ticketId) return; + setLoading(true); + try { + await cancelTicket(ticketId); + await loadData(); + } catch (err) { + setError(err instanceof Error ? err.message : String(err)); + } + setLoading(false); + } + + async function handleApplyFix() { + if (!worktree || !targetBranch) return; + setLoading(true); + setError(""); + try { + await applyFixToBranch(worktree.id, targetBranch); + await loadData(); + } catch (err) { + setError(err instanceof Error ? err.message : String(err)); + } + setLoading(false); + } + + async function handleDeleteWorktree() { + if (!worktree) return; + if (!window.confirm("Delete this worktree and its branch?")) return; + setLoading(true); + try { + await deleteWorktreeCmd(worktree.id); + setWorktree(null); + setDiff(null); + } catch (err) { + setError(err instanceof Error ? err.message : String(err)); + } + setLoading(false); + } + + async function loadBranches() { + if (!ticket) return; + try { + // We need the project ID -- navigate through the ticket data + // The tracker_id on the ticket links to the watched tracker + // For simplicity, parse project_id from the URL if available, or use a dedicated call + // Since we already have the worktree, we can get branches through the ticket result + const result = await getTicketResult(ticket.id); + // Get project_id through the ticket -> tracker chain is done server-side + // For branches, we need the project_id. Let's add it to the load flow. + // Actually, we'll use a simple approach: the branches list is loaded on demand + // when the user opens the "Apply fix" section + } catch { + // ignore + } + } + + if (!ticket) { + return
Loading...
; + } + + const tabs = [ + { key: "info" as const, label: "Info" }, + { + key: "analyst" as const, + label: "Analyst Report", + disabled: !ticket.analyst_report, + }, + { + key: "developer" as const, + label: "Developer Report", + disabled: !ticket.developer_report, + }, + { key: "diff" as const, label: "Diff", disabled: !diff && !worktree }, + ]; + + return ( +
+ {/* Header */} +
+
+ +

+ + #{ticket.artifact_id} + + {ticket.artifact_title} + + {ticket.status} + +

+
+
+ {(ticket.status === "Error" || + ticket.status === "Done" || + ticket.status === "Cancelled") && ( + + )} + {(ticket.status === "Pending" || + ticket.status === "Analyzing" || + ticket.status === "Developing") && ( + + )} +
+
+ + {error && ( +
+ {error} +
+ )} + + {/* Tabs */} +
+ {tabs.map((t) => ( + + ))} +
+ + {/* Tab content */} + {tab === "info" && ( +
+
+
+ Status: + {ticket.status} +
+
+ Detected: + + {new Date(ticket.detected_at).toLocaleString()} + +
+ {ticket.processed_at && ( +
+ Processed: + + {new Date(ticket.processed_at).toLocaleString()} + +
+ )} + {worktree && ( +
+ Worktree: + + {worktree.branch_name} + + + {worktree.status} + +
+ )} +
+ + {/* Worktree actions */} + {worktree && worktree.status === "Active" && ( +
+

Worktree Actions

+
+ setTargetBranch(e.target.value)} + className="flex-1 px-3 py-1.5 border border-gray-300 rounded text-sm focus:ring-2 focus:ring-blue-500 focus:border-transparent" + /> + +
+ +
+ )} + + {worktree && worktree.status === "Merged" && ( +
+ Fix applied to branch: {worktree.merged_into} +
+ )} +
+ )} + + {tab === "analyst" && ticket.analyst_report && ( +
+ + {ticket.analyst_report} + +
+ )} + + {tab === "developer" && ticket.developer_report && ( +
+ + {ticket.developer_report} + +
+ )} + + {tab === "diff" && } +
+ ); +} +``` + +- [ ] **Step 2: Add ticket detail route to App.tsx** + +In `src/App.tsx`, add the import: + +```tsx +import TicketDetail from "./components/tickets/TicketDetail"; +``` + +Add the route inside `}>`: + +```tsx + } /> +``` + +- [ ] **Step 3: Verify TypeScript compiles** + +Run: `cd /home/leclere/Projets/orchai && npx tsc --noEmit 2>&1 | tail -10` +Expected: no errors + +- [ ] **Step 4: Commit** + +```bash +cd /home/leclere/Projets/orchai +git add src/components/tickets/TicketDetail.tsx src/App.tsx +git commit -m "feat: add ticket detail page with markdown reports, diff viewer, and worktree actions" +``` + +--- + +### Task 10: Final verification + +- [ ] **Step 1: Run all backend tests** + +Run: `cd /home/leclere/Projets/orchai/src-tauri && cargo test 2>&1 | tail -30` +Expected: all tests pass, including new tests from Tasks 1-4 + +- [ ] **Step 2: Run clippy** + +Run: `cd /home/leclere/Projets/orchai/src-tauri && cargo clippy -- -D warnings 2>&1 | tail -20` +Expected: no warnings + +- [ ] **Step 3: Verify frontend builds** + +Run: `cd /home/leclere/Projets/orchai && npx tsc --noEmit && echo "OK"` +Expected: "OK" + +- [ ] **Step 4: Commit any fixes needed from verification** + +If clippy or tsc produced warnings/errors, fix them and commit: + +```bash +cd /home/leclere/Projets/orchai +git add -u +git commit -m "fix: resolve clippy warnings and TypeScript errors from Phase 3" +```