From c871493476aef5313775139924c14def396e2923 Mon Sep 17 00:00:00 2001 From: thibaud-leclere Date: Mon, 13 Apr 2026 14:12:13 +0200 Subject: [PATCH] plan: Phase 2 Tuleap integration implementation plan 12 tasks covering credentials, Tuleap API client, filter engine, background poller, and tracker config UI with visual filter builder. Co-Authored-By: Claude Opus 4.6 (1M context) --- ...-04-13-orchai-phase2-tuleap-integration.md | 3378 +++++++++++++++++ 1 file changed, 3378 insertions(+) create mode 100644 docs/superpowers/plans/2026-04-13-orchai-phase2-tuleap-integration.md diff --git a/docs/superpowers/plans/2026-04-13-orchai-phase2-tuleap-integration.md b/docs/superpowers/plans/2026-04-13-orchai-phase2-tuleap-integration.md new file mode 100644 index 0000000..55499b0 --- /dev/null +++ b/docs/superpowers/plans/2026-04-13-orchai-phase2-tuleap-integration.md @@ -0,0 +1,3378 @@ +# Orchai Phase 2: Tuleap Integration Implementation Plan + +> **For agentic workers:** REQUIRED SUB-SKILL: Use superpowers:subagent-driven-development (recommended) or superpowers:executing-plans to implement this plan task-by-task. Steps use checkbox (`- [ ]`) syntax for tracking. + +**Goal:** Connect Orchai to Tuleap -- store credentials securely, configure watched trackers with AND/OR filters, poll for new tickets on a timer, and detect new artifacts. + +**Architecture:** Adds a `services/` layer (crypto, Tuleap HTTP client, filter engine, background poller) to the existing Tauri backend. AppState gains `Arc>` for shared DB access, an encryption key for credentials, and a shared reqwest client. The poller runs as a tokio background task spawned at app startup. Frontend gains a Settings page for credentials, a tracker config form with a visual filter builder, and an updated project dashboard showing tracked trackers and recent tickets. + +**Tech Stack:** reqwest (HTTP), tokio (async runtime / timers), aes-gcm + rand + base64 (credential encryption), serde_json (artifact parsing) + +--- + +## Phasing Context + +This is Plan 2 of 4: +- **Plan 1 (done):** Foundation -- Tauri scaffold, SQLite, Project Manager +- **Plan 2 (this):** Tuleap Integration -- credentials, API client, poller, filter engine, tracker config +- **Plan 3:** Agent Pipeline -- orchestrator, worktree manager, ticket processing, results UI +- **Plan 4:** Notifications + Polish -- notifier, system notifications, dashboard + +--- + +## File Structure + +``` +src-tauri/ + migrations/ + 001_init.sql # existing (unchanged) + 002_add_last_polled.sql # create: add last_polled_at + enabled to watched_trackers + src/ + lib.rs # modify: Arc>, add services mod, poller startup + db.rs # modify: add migration 002 + error.rs # modify: add From + models/ + mod.rs # modify: add credential, tracker, ticket + project.rs # existing (unchanged) + credential.rs # create: TuleapCredentials CRUD + tracker.rs # create: WatchedTracker CRUD + ticket.rs # create: ProcessedTicket insert/query + commands/ + mod.rs # modify: add credential, tracker, poller + project.rs # existing (unchanged) + credential.rs # create: set/get/delete/test credentials + tracker.rs # create: tracker CRUD + get_tracker_fields + poller.rs # create: manual_poll, get_queue_status, toggle + services/ + mod.rs # create: re-exports + crypto.rs # create: key file management, AES-GCM encrypt/decrypt + tuleap_client.rs # create: HTTP client (get artifacts, tracker info, test) + filter_engine.rs # create: AND/OR filter evaluation on artifact JSON + poller.rs # create: background polling loop + +src/ + lib/ + types.ts # modify: add Credential, Tracker, Filter, Ticket types + api.ts # modify: add all new Tauri command wrappers + components/ + settings/ + SettingsPage.tsx # create: credentials management + trackers/ + TrackerConfig.tsx # create: add/edit tracker form + FilterBuilder.tsx # create: visual AND/OR filter builder + TrackerList.tsx # create: list of trackers for a project + projects/ + ProjectDashboard.tsx # modify: add tracker section + recent tickets + App.tsx # modify: add /settings and /projects/:id/trackers routes +``` + +--- + +### Task 1: Add Phase 2 dependencies + migration 002 + update AppState + +**Files:** +- Modify: `src-tauri/Cargo.toml` +- Create: `src-tauri/migrations/002_add_last_polled.sql` +- Modify: `src-tauri/src/db.rs` +- Modify: `src-tauri/src/error.rs` +- Modify: `src-tauri/src/lib.rs` + +- [ ] **Step 1: Add dependencies to Cargo.toml** + +Add under `[dependencies]` in `src-tauri/Cargo.toml`: + +```toml +reqwest = { version = "0.12", features = ["json"] } +tokio = { version = "1", features = ["time", "sync", "macros"] } +aes-gcm = "0.10" +rand = "0.8" +base64 = "0.22" +``` + +- [ ] **Step 2: Create migration 002** + +Create `src-tauri/migrations/002_add_last_polled.sql`: + +```sql +ALTER TABLE watched_trackers ADD COLUMN last_polled_at TEXT; +ALTER TABLE watched_trackers ADD COLUMN enabled INTEGER NOT NULL DEFAULT 1; +``` + +- [ ] **Step 3: Update db.rs to run migration 002** + +In `src-tauri/src/db.rs`, add the new migration constant and update the `migrate` function: + +```rust +use rusqlite::{Connection, Result}; +use std::path::Path; + +const MIGRATION_001: &str = include_str!("../migrations/001_init.sql"); +const MIGRATION_002: &str = include_str!("../migrations/002_add_last_polled.sql"); + +pub fn init(db_path: &Path) -> Result { + let conn = Connection::open(db_path)?; + configure(&conn)?; + migrate(&conn)?; + Ok(conn) +} + +#[cfg(test)] +pub fn init_in_memory() -> Result { + let conn = Connection::open_in_memory()?; + configure(&conn)?; + migrate(&conn)?; + Ok(conn) +} + +fn configure(conn: &Connection) -> Result<()> { + conn.pragma_update(None, "journal_mode", "wal")?; + conn.pragma_update(None, "foreign_keys", "ON")?; + Ok(()) +} + +fn migrate(conn: &Connection) -> Result<()> { + let version: i32 = conn.pragma_query_value(None, "user_version", |row| row.get(0))?; + + if version < 1 { + conn.execute_batch(MIGRATION_001)?; + conn.pragma_update(None, "user_version", 1)?; + } + if version < 2 { + conn.execute_batch(MIGRATION_002)?; + conn.pragma_update(None, "user_version", 2)?; + } + + Ok(()) +} +``` + +Update the test `test_migration_is_idempotent` to check for version 2: + +```rust + #[test] + fn test_migration_is_idempotent() { + let conn = init_in_memory().expect("should initialize"); + let version: i32 = conn + .pragma_query_value(None, "user_version", |row| row.get(0)) + .unwrap(); + assert_eq!(version, 2); + } +``` + +- [ ] **Step 4: Add From to error.rs** + +Add to `src-tauri/src/error.rs`: + +```rust +impl From for AppError { + fn from(e: reqwest::Error) -> Self { + AppError { + message: e.to_string(), + } + } +} +``` + +- [ ] **Step 5: Update AppState to use Arc> and add new fields** + +Replace `src-tauri/src/lib.rs`: + +```rust +mod commands; +mod db; +mod error; +mod models; + +use std::sync::{Arc, Mutex}; +use tauri::Manager; + +pub struct AppState { + pub db: Arc>, + pub encryption_key: [u8; 32], + pub http_client: reqwest::Client, +} + +#[cfg_attr(mobile, tauri::mobile_entry_point)] +pub fn run() { + tauri::Builder::default() + .plugin(tauri_plugin_dialog::init()) + .setup(|app| { + let db_dir = app.path().app_data_dir()?; + std::fs::create_dir_all(&db_dir)?; + + // Init database + let db_path = db_dir.join("orchai.db"); + let conn = db::init(&db_path).expect("Failed to initialize database"); + + // Load or generate encryption key + let key_path = db_dir.join("orchai.key"); + let encryption_key = load_or_generate_key(&key_path)?; + + // Shared HTTP client + let http_client = reqwest::Client::new(); + + app.manage(AppState { + db: Arc::new(Mutex::new(conn)), + encryption_key, + http_client, + }); + Ok(()) + }) + .invoke_handler(tauri::generate_handler![ + commands::project::create_project, + commands::project::list_projects, + commands::project::get_project, + commands::project::update_project, + commands::project::delete_project, + ]) + .run(tauri::generate_context!()) + .expect("error while running tauri application"); +} + +fn load_or_generate_key(path: &std::path::Path) -> Result<[u8; 32], Box> { + use rand::RngCore; + + if path.exists() { + let bytes = std::fs::read(path)?; + if bytes.len() != 32 { + return Err("Invalid key file size".into()); + } + let mut key = [0u8; 32]; + key.copy_from_slice(&bytes); + Ok(key) + } else { + let mut key = [0u8; 32]; + rand::rngs::OsRng.fill_bytes(&mut key); + std::fs::write(path, &key)?; + Ok(key) + } +} +``` + +- [ ] **Step 6: Verify compilation and existing tests pass** + +```bash +cd /home/leclere/Projets/orchai/src-tauri +PKG_CONFIG_PATH="/tmp/mypc:$PKG_CONFIG_PATH" cargo test +``` + +Expected: 11 tests pass (3 db + 8 project). Migration test now checks version 2. + +- [ ] **Step 7: Commit** + +```bash +git add -A +git commit -m "feat: Phase 2 dependencies, migration 002, Arc AppState" +``` + +--- + +### Task 2: Crypto service + tests + +**Files:** +- Create: `src-tauri/src/services/mod.rs` +- Create: `src-tauri/src/services/crypto.rs` +- Modify: `src-tauri/src/lib.rs` (add `mod services`) + +- [ ] **Step 1: Write failing tests for crypto** + +Create `src-tauri/src/services/mod.rs`: + +```rust +pub mod crypto; +``` + +Create `src-tauri/src/services/crypto.rs`: + +```rust +use aes_gcm::{ + aead::{Aead, KeyInit, OsRng}, + Aes256Gcm, Key, Nonce, +}; +use base64::{engine::general_purpose::STANDARD, Engine}; +use rand::RngCore; + +pub fn encrypt(key: &[u8; 32], plaintext: &str) -> Result { + todo!() +} + +pub fn decrypt(key: &[u8; 32], encrypted: &str) -> Result { + todo!() +} + +#[cfg(test)] +mod tests { + use super::*; + + fn test_key() -> [u8; 32] { + let mut key = [0u8; 32]; + OsRng.fill_bytes(&mut key); + key + } + + #[test] + fn test_encrypt_decrypt_roundtrip() { + let key = test_key(); + let plaintext = "my_secret_password"; + let encrypted = encrypt(&key, plaintext).expect("encrypt should succeed"); + let decrypted = decrypt(&key, &encrypted).expect("decrypt should succeed"); + assert_eq!(decrypted, plaintext); + } + + #[test] + fn test_encrypt_produces_different_ciphertext() { + let key = test_key(); + let plaintext = "same_password"; + let enc1 = encrypt(&key, plaintext).unwrap(); + let enc2 = encrypt(&key, plaintext).unwrap(); + assert_ne!(enc1, enc2, "random nonce should produce different ciphertext"); + } + + #[test] + fn test_decrypt_with_wrong_key_fails() { + let key1 = test_key(); + let key2 = test_key(); + let encrypted = encrypt(&key1, "secret").unwrap(); + let result = decrypt(&key2, &encrypted); + assert!(result.is_err()); + } + + #[test] + fn test_decrypt_invalid_base64_fails() { + let key = test_key(); + let result = decrypt(&key, "not-valid-base64!!!"); + assert!(result.is_err()); + } + + #[test] + fn test_decrypt_too_short_fails() { + let key = test_key(); + let short = STANDARD.encode(&[0u8; 5]); + let result = decrypt(&key, &short); + assert!(result.is_err()); + } + + #[test] + fn test_encrypt_empty_string() { + let key = test_key(); + let encrypted = encrypt(&key, "").expect("encrypt empty should succeed"); + let decrypted = decrypt(&key, &encrypted).expect("decrypt should succeed"); + assert_eq!(decrypted, ""); + } + + #[test] + fn test_encrypt_unicode() { + let key = test_key(); + let plaintext = "mot de passe avec accents: eaui"; + let encrypted = encrypt(&key, plaintext).unwrap(); + let decrypted = decrypt(&key, &encrypted).unwrap(); + assert_eq!(decrypted, plaintext); + } +} +``` + +Add `mod services;` to `src-tauri/src/lib.rs` (after `mod models;`). + +- [ ] **Step 2: Run tests to verify they fail** + +```bash +cd /home/leclere/Projets/orchai/src-tauri +PKG_CONFIG_PATH="/tmp/mypc:$PKG_CONFIG_PATH" cargo test services::crypto::tests +``` + +Expected: 7 failures with `not yet implemented`. + +- [ ] **Step 3: Implement encrypt and decrypt** + +Replace the `todo!()` stubs in `src-tauri/src/services/crypto.rs`: + +```rust +pub fn encrypt(key: &[u8; 32], plaintext: &str) -> Result { + let cipher = Aes256Gcm::new(Key::::from_slice(key)); + let mut nonce_bytes = [0u8; 12]; + OsRng.fill_bytes(&mut nonce_bytes); + let nonce = Nonce::from_slice(&nonce_bytes); + + let ciphertext = cipher + .encrypt(nonce, plaintext.as_bytes()) + .map_err(|e| format!("encryption failed: {}", e))?; + + let mut combined = nonce_bytes.to_vec(); + combined.extend(ciphertext); + Ok(STANDARD.encode(&combined)) +} + +pub fn decrypt(key: &[u8; 32], encrypted: &str) -> Result { + let combined = STANDARD + .decode(encrypted) + .map_err(|e| format!("base64 decode failed: {}", e))?; + + if combined.len() < 13 { + return Err("encrypted data too short".to_string()); + } + + let (nonce_bytes, ciphertext) = combined.split_at(12); + let cipher = Aes256Gcm::new(Key::::from_slice(key)); + let nonce = Nonce::from_slice(nonce_bytes); + + let plaintext = cipher + .decrypt(nonce, ciphertext) + .map_err(|_| "decryption failed (wrong key or corrupted data)".to_string())?; + + String::from_utf8(plaintext).map_err(|e| format!("invalid UTF-8: {}", e)) +} +``` + +- [ ] **Step 4: Run tests to verify they pass** + +```bash +cd /home/leclere/Projets/orchai/src-tauri +PKG_CONFIG_PATH="/tmp/mypc:$PKG_CONFIG_PATH" cargo test services::crypto::tests +``` + +Expected: 7 tests pass. + +- [ ] **Step 5: Commit** + +```bash +git add -A +git commit -m "feat: AES-256-GCM crypto service for credential encryption" +``` + +--- + +### Task 3: TuleapCredentials model + Tauri commands + +**Files:** +- Create: `src-tauri/src/models/credential.rs` +- Modify: `src-tauri/src/models/mod.rs` +- Create: `src-tauri/src/commands/credential.rs` +- Modify: `src-tauri/src/commands/mod.rs` +- Modify: `src-tauri/src/lib.rs` (register commands) + +- [ ] **Step 1: Write failing tests for TuleapCredentials model** + +Create `src-tauri/src/models/credential.rs`: + +```rust +use rusqlite::{params, Connection, Result}; +use serde::{Deserialize, Serialize}; +use uuid::Uuid; + +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct TuleapCredentials { + pub id: String, + pub tuleap_url: String, + pub username: String, + pub password_encrypted: String, +} + +/// Credential returned to frontend (no encrypted password) +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct TuleapCredentialsSafe { + pub id: String, + pub tuleap_url: String, + pub username: String, +} + +impl TuleapCredentials { + pub fn upsert( + conn: &Connection, + tuleap_url: &str, + username: &str, + password_encrypted: &str, + ) -> Result { + todo!() + } + + pub fn get(conn: &Connection) -> Result> { + todo!() + } + + pub fn delete(conn: &Connection) -> Result<()> { + todo!() + } +} + +#[cfg(test)] +mod tests { + use super::*; + use crate::db; + + fn setup() -> Connection { + db::init_in_memory().expect("db init should succeed") + } + + #[test] + fn test_upsert_creates_credentials() { + let conn = setup(); + let cred = TuleapCredentials::upsert(&conn, "https://tuleap.example.com", "user1", "encrypted_pw") + .expect("upsert should succeed"); + + assert_eq!(cred.tuleap_url, "https://tuleap.example.com"); + assert_eq!(cred.username, "user1"); + assert_eq!(cred.password_encrypted, "encrypted_pw"); + } + + #[test] + fn test_upsert_replaces_existing() { + let conn = setup(); + TuleapCredentials::upsert(&conn, "https://old.com", "old_user", "old_pw").unwrap(); + let cred = TuleapCredentials::upsert(&conn, "https://new.com", "new_user", "new_pw").unwrap(); + + assert_eq!(cred.tuleap_url, "https://new.com"); + let fetched = TuleapCredentials::get(&conn).unwrap().unwrap(); + assert_eq!(fetched.tuleap_url, "https://new.com"); + } + + #[test] + fn test_get_returns_none_when_empty() { + let conn = setup(); + let result = TuleapCredentials::get(&conn).unwrap(); + assert!(result.is_none()); + } + + #[test] + fn test_get_returns_credentials() { + let conn = setup(); + TuleapCredentials::upsert(&conn, "https://tuleap.example.com", "user1", "enc_pw").unwrap(); + let cred = TuleapCredentials::get(&conn).unwrap().unwrap(); + assert_eq!(cred.username, "user1"); + } + + #[test] + fn test_delete_removes_credentials() { + let conn = setup(); + TuleapCredentials::upsert(&conn, "https://tuleap.example.com", "user1", "enc_pw").unwrap(); + TuleapCredentials::delete(&conn).unwrap(); + let result = TuleapCredentials::get(&conn).unwrap(); + assert!(result.is_none()); + } +} +``` + +Add `pub mod credential;` to `src-tauri/src/models/mod.rs`. + +- [ ] **Step 2: Run tests to verify they fail** + +```bash +cd /home/leclere/Projets/orchai/src-tauri +PKG_CONFIG_PATH="/tmp/mypc:$PKG_CONFIG_PATH" cargo test models::credential::tests +``` + +Expected: 5 failures. + +- [ ] **Step 3: Implement TuleapCredentials CRUD** + +Replace stubs in `src-tauri/src/models/credential.rs`: + +```rust + /// We only store one set of credentials. Upsert deletes existing and inserts new. + pub fn upsert( + conn: &Connection, + tuleap_url: &str, + username: &str, + password_encrypted: &str, + ) -> Result { + conn.execute("DELETE FROM tuleap_credentials", [])?; + let id = Uuid::new_v4().to_string(); + conn.execute( + "INSERT INTO tuleap_credentials (id, tuleap_url, username, password_encrypted) VALUES (?1, ?2, ?3, ?4)", + params![id, tuleap_url, username, password_encrypted], + )?; + Ok(TuleapCredentials { + id, + tuleap_url: tuleap_url.to_string(), + username: username.to_string(), + password_encrypted: password_encrypted.to_string(), + }) + } + + pub fn get(conn: &Connection) -> Result> { + let mut stmt = conn.prepare( + "SELECT id, tuleap_url, username, password_encrypted FROM tuleap_credentials LIMIT 1", + )?; + let mut rows = stmt.query_map([], |row| { + Ok(TuleapCredentials { + id: row.get(0)?, + tuleap_url: row.get(1)?, + username: row.get(2)?, + password_encrypted: row.get(3)?, + }) + })?; + match rows.next() { + Some(row) => Ok(Some(row?)), + None => Ok(None), + } + } + + pub fn delete(conn: &Connection) -> Result<()> { + conn.execute("DELETE FROM tuleap_credentials", [])?; + Ok(()) + } +``` + +- [ ] **Step 4: Run tests to verify they pass** + +```bash +cd /home/leclere/Projets/orchai/src-tauri +PKG_CONFIG_PATH="/tmp/mypc:$PKG_CONFIG_PATH" cargo test models::credential::tests +``` + +Expected: 5 pass. + +- [ ] **Step 5: Create credential Tauri commands** + +Create `src-tauri/src/commands/credential.rs`: + +```rust +use crate::error::AppError; +use crate::models::credential::{TuleapCredentials, TuleapCredentialsSafe}; +use crate::services::crypto; +use crate::AppState; +use tauri::State; + +#[tauri::command] +pub fn set_tuleap_credentials( + state: State<'_, AppState>, + tuleap_url: String, + username: String, + password: String, +) -> Result { + let encrypted = crypto::encrypt(&state.encryption_key, &password) + .map_err(|e| AppError::from(e))?; + + let db = state.db.lock().map_err(|e| AppError::from(format!("Database lock failed: {}", e)))?; + let cred = TuleapCredentials::upsert(&db, &tuleap_url, &username, &encrypted)?; + + Ok(TuleapCredentialsSafe { + id: cred.id, + tuleap_url: cred.tuleap_url, + username: cred.username, + }) +} + +#[tauri::command] +pub fn get_tuleap_credentials( + state: State<'_, AppState>, +) -> Result, AppError> { + let db = state.db.lock().map_err(|e| AppError::from(format!("Database lock failed: {}", e)))?; + let cred = TuleapCredentials::get(&db)?; + Ok(cred.map(|c| TuleapCredentialsSafe { + id: c.id, + tuleap_url: c.tuleap_url, + username: c.username, + })) +} + +#[tauri::command] +pub fn delete_tuleap_credentials(state: State<'_, AppState>) -> Result<(), AppError> { + let db = state.db.lock().map_err(|e| AppError::from(format!("Database lock failed: {}", e)))?; + TuleapCredentials::delete(&db)?; + Ok(()) +} + +#[tauri::command] +pub async fn test_tuleap_connection(state: State<'_, AppState>) -> Result { + let (base_url, username, password) = { + let db = state.db.lock().map_err(|e| AppError::from(format!("Database lock failed: {}", e)))?; + let cred = TuleapCredentials::get(&db)? + .ok_or_else(|| AppError::from("No credentials configured".to_string()))?; + let password = crypto::decrypt(&state.encryption_key, &cred.password_encrypted) + .map_err(|e| AppError::from(e))?; + (cred.tuleap_url, cred.username, password) + }; + + let url = format!("{}/api/projects?limit=1", base_url.trim_end_matches('/')); + let response = state.http_client + .get(&url) + .basic_auth(&username, Some(&password)) + .send() + .await?; + + if response.status().is_success() { + Ok("Connection successful".to_string()) + } else { + Err(AppError::from(format!( + "Tuleap API returned status {}", + response.status() + ))) + } +} +``` + +Add `pub mod credential;` to `src-tauri/src/commands/mod.rs`. + +- [ ] **Step 6: Register commands in lib.rs** + +Add to the `invoke_handler` in `src-tauri/src/lib.rs`: + +```rust + .invoke_handler(tauri::generate_handler![ + commands::project::create_project, + commands::project::list_projects, + commands::project::get_project, + commands::project::update_project, + commands::project::delete_project, + commands::credential::set_tuleap_credentials, + commands::credential::get_tuleap_credentials, + commands::credential::delete_tuleap_credentials, + commands::credential::test_tuleap_connection, + ]) +``` + +- [ ] **Step 7: Verify compilation and all tests pass** + +```bash +cd /home/leclere/Projets/orchai/src-tauri +PKG_CONFIG_PATH="/tmp/mypc:$PKG_CONFIG_PATH" cargo test +``` + +Expected: all tests pass (11 existing + 5 credential + 7 crypto = 23 tests). + +- [ ] **Step 8: Commit** + +```bash +git add -A +git commit -m "feat: TuleapCredentials model + encrypted storage + Tauri commands" +``` + +--- + +### Task 4: Tuleap HTTP client service + tests + +**Files:** +- Create: `src-tauri/src/services/tuleap_client.rs` +- Modify: `src-tauri/src/services/mod.rs` + +- [ ] **Step 1: Write the Tuleap client with tests for parsing logic** + +Add `pub mod tuleap_client;` to `src-tauri/src/services/mod.rs`. + +Create `src-tauri/src/services/tuleap_client.rs`: + +```rust +use serde::{Deserialize, Serialize}; + +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct TrackerField { + pub field_id: i64, + pub label: String, + pub field_type: String, + pub values: Vec, +} + +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct FieldValue { + pub id: i64, + pub label: String, +} + +pub struct TuleapClient { + http: reqwest::Client, + base_url: String, + username: String, + password: String, +} + +impl TuleapClient { + pub fn new(http: &reqwest::Client, base_url: &str, username: &str, password: &str) -> Self { + TuleapClient { + http: http.clone(), + base_url: base_url.trim_end_matches('/').to_string(), + username: username.to_string(), + password: password.to_string(), + } + } + + pub async fn test_connection(&self) -> Result<(), String> { + let url = format!("{}/api/projects?limit=1", self.base_url); + let response = self.http + .get(&url) + .basic_auth(&self.username, Some(&self.password)) + .send() + .await + .map_err(|e| format!("HTTP request failed: {}", e))?; + + if response.status().is_success() { + Ok(()) + } else { + Err(format!("Tuleap API returned status {}", response.status())) + } + } + + pub async fn get_tracker_fields(&self, tracker_id: i32) -> Result, String> { + let url = format!("{}/api/trackers/{}", self.base_url, tracker_id); + let response = self.http + .get(&url) + .basic_auth(&self.username, Some(&self.password)) + .send() + .await + .map_err(|e| format!("HTTP request failed: {}", e))?; + + if !response.status().is_success() { + return Err(format!("Tuleap API returned status {}", response.status())); + } + + let body: serde_json::Value = response.json().await + .map_err(|e| format!("Failed to parse response: {}", e))?; + + Ok(parse_tracker_fields(&body)) + } + + pub async fn get_artifacts(&self, tracker_id: i32) -> Result, String> { + let mut all_artifacts = Vec::new(); + let mut offset = 0; + let limit = 100; + + loop { + let url = format!( + "{}/api/trackers/{}/artifacts?limit={}&offset={}&values=all", + self.base_url, tracker_id, limit, offset + ); + let response = self.http + .get(&url) + .basic_auth(&self.username, Some(&self.password)) + .send() + .await + .map_err(|e| format!("HTTP request failed: {}", e))?; + + if !response.status().is_success() { + return Err(format!("Tuleap API returned status {}", response.status())); + } + + let total: i64 = response.headers() + .get("x-pagination-size") + .and_then(|v| v.to_str().ok()) + .and_then(|v| v.parse().ok()) + .unwrap_or(0); + + let artifacts: Vec = response.json().await + .map_err(|e| format!("Failed to parse response: {}", e))?; + + let count = artifacts.len(); + all_artifacts.extend(artifacts); + + offset += count as i64; + if offset >= total || count == 0 { + break; + } + } + + Ok(all_artifacts) + } +} + +/// Parse tracker fields from the Tuleap API tracker response +pub fn parse_tracker_fields(tracker_json: &serde_json::Value) -> Vec { + let fields = match tracker_json.get("fields") { + Some(serde_json::Value::Array(arr)) => arr, + _ => return Vec::new(), + }; + + fields.iter().filter_map(|field| { + let field_id = field.get("field_id")?.as_i64()?; + let label = field.get("label")?.as_str()?.to_string(); + let field_type = field.get("type")?.as_str()?.to_string(); + + // Only include fields with selectable values + let values = match field_type.as_str() { + "sb" | "msb" | "rb" | "cb" => { + extract_field_values(field) + } + _ => Vec::new(), + }; + + Some(TrackerField { + field_id, + label, + field_type, + values, + }) + }).collect() +} + +fn extract_field_values(field: &serde_json::Value) -> Vec { + // Try "values" array first (used by sb, rb) + if let Some(serde_json::Value::Array(vals)) = field.get("values") { + return vals.iter().filter_map(|v| { + let id = v.get("id")?.as_i64()?; + let label = v.get("label")?.as_str()?.to_string(); + if label == "None" { return None; } + Some(FieldValue { id, label }) + }).collect(); + } + // Try "bind_value_objects" (used by msb) + if let Some(serde_json::Value::Array(vals)) = field.get("bind_value_objects") { + return vals.iter().filter_map(|v| { + let id = v.get("id")?.as_i64()?; + let label = v.get("display_name") + .or_else(|| v.get("label")) + .and_then(|l| l.as_str()) + .map(String::from)?; + Some(FieldValue { id, label }) + }).collect(); + } + Vec::new() +} + +/// Extract the displayable values of a field from an artifact's "values" array. +/// Returns the list of labels/names for the field. Used by the filter engine. +pub fn extract_artifact_field_values(artifact: &serde_json::Value, field_label: &str) -> Vec { + let values = match artifact.get("values") { + Some(serde_json::Value::Array(arr)) => arr, + _ => return Vec::new(), + }; + + for field in values { + let label = match field.get("label").and_then(|l| l.as_str()) { + Some(l) => l, + None => continue, + }; + if label != field_label { + continue; + } + + let field_type = field.get("type").and_then(|t| t.as_str()).unwrap_or(""); + + return match field_type { + "sb" | "rb" => { + // Select box / radio button: values[*].label + field.get("values") + .and_then(|v| v.as_array()) + .map(|arr| arr.iter() + .filter_map(|v| v.get("label").and_then(|l| l.as_str()).map(String::from)) + .collect()) + .unwrap_or_default() + } + "msb" | "cb" => { + // Multi-select / checkbox: bind_value_objects[*].display_name or label + field.get("bind_value_objects") + .and_then(|v| v.as_array()) + .map(|arr| arr.iter() + .filter_map(|v| { + v.get("display_name") + .or_else(|| v.get("label")) + .and_then(|l| l.as_str()) + .map(String::from) + }) + .collect()) + .unwrap_or_default() + } + "string" | "text" | "int" | "float" => { + field.get("value") + .and_then(|v| match v { + serde_json::Value::String(s) => Some(s.clone()), + serde_json::Value::Number(n) => Some(n.to_string()), + _ => None, + }) + .into_iter() + .collect() + } + _ => Vec::new(), + }; + } + Vec::new() +} + +#[cfg(test)] +mod tests { + use super::*; + use serde_json::json; + + #[test] + fn test_parse_tracker_fields_extracts_sb() { + let tracker = json!({ + "fields": [ + { + "field_id": 1, + "label": "Status", + "type": "sb", + "values": [ + { "id": 100, "label": "None" }, + { "id": 101, "label": "Nouveau" }, + { "id": 102, "label": "En cours" } + ] + } + ] + }); + let fields = parse_tracker_fields(&tracker); + assert_eq!(fields.len(), 1); + assert_eq!(fields[0].label, "Status"); + assert_eq!(fields[0].values.len(), 2); // "None" filtered out + assert_eq!(fields[0].values[0].label, "Nouveau"); + } + + #[test] + fn test_parse_tracker_fields_extracts_msb() { + let tracker = json!({ + "fields": [ + { + "field_id": 5, + "label": "Assigned to", + "type": "msb", + "bind_value_objects": [ + { "id": 200, "display_name": "Alice" }, + { "id": 201, "display_name": "Bob" } + ] + } + ] + }); + let fields = parse_tracker_fields(&tracker); + assert_eq!(fields.len(), 1); + assert_eq!(fields[0].values.len(), 2); + assert_eq!(fields[0].values[0].label, "Alice"); + } + + #[test] + fn test_parse_tracker_fields_skips_text_fields() { + let tracker = json!({ + "fields": [ + { "field_id": 10, "label": "Description", "type": "text" } + ] + }); + let fields = parse_tracker_fields(&tracker); + assert_eq!(fields.len(), 1); + assert!(fields[0].values.is_empty()); // text fields have no selectable values + } + + #[test] + fn test_extract_artifact_field_values_sb() { + let artifact = json!({ + "values": [ + { + "field_id": 1, + "label": "Status", + "type": "sb", + "values": [{ "id": 101, "label": "Nouveau" }] + } + ] + }); + let vals = extract_artifact_field_values(&artifact, "Status"); + assert_eq!(vals, vec!["Nouveau"]); + } + + #[test] + fn test_extract_artifact_field_values_msb() { + let artifact = json!({ + "values": [ + { + "field_id": 5, + "label": "Assigned to", + "type": "msb", + "bind_value_objects": [ + { "id": 200, "display_name": "Team Maintenance" } + ] + } + ] + }); + let vals = extract_artifact_field_values(&artifact, "Assigned to"); + assert_eq!(vals, vec!["Team Maintenance"]); + } + + #[test] + fn test_extract_artifact_field_values_missing_field() { + let artifact = json!({ "values": [] }); + let vals = extract_artifact_field_values(&artifact, "Nonexistent"); + assert!(vals.is_empty()); + } + + #[test] + fn test_extract_artifact_field_values_string_field() { + let artifact = json!({ + "values": [ + { "field_id": 20, "label": "Summary", "type": "string", "value": "Login broken" } + ] + }); + let vals = extract_artifact_field_values(&artifact, "Summary"); + assert_eq!(vals, vec!["Login broken"]); + } +} +``` + +- [ ] **Step 2: Run tests** + +```bash +cd /home/leclere/Projets/orchai/src-tauri +PKG_CONFIG_PATH="/tmp/mypc:$PKG_CONFIG_PATH" cargo test services::tuleap_client::tests +``` + +Expected: 7 tests pass. + +- [ ] **Step 3: Commit** + +```bash +git add -A +git commit -m "feat: Tuleap HTTP client with artifact parsing and field extraction" +``` + +--- + +### Task 5: WatchedTracker model + CRUD + tests + +**Files:** +- Create: `src-tauri/src/models/tracker.rs` +- Modify: `src-tauri/src/models/mod.rs` + +- [ ] **Step 1: Write failing tests for WatchedTracker** + +Add `pub mod tracker;` to `src-tauri/src/models/mod.rs`. + +Create `src-tauri/src/models/tracker.rs`: + +```rust +use rusqlite::{params, Connection, Result}; +use serde::{Deserialize, Serialize}; +use uuid::Uuid; + +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct AgentConfig { + pub analyst_command: String, + pub analyst_args: Vec, + pub developer_command: String, + pub developer_args: Vec, +} + +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct FilterGroup { + pub conditions: Vec, +} + +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct Filter { + pub field: String, + pub operator: String, // "In", "NotIn", "Equals", "NotEquals" + pub value: Vec, +} + +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct WatchedTracker { + pub id: String, + pub project_id: String, + pub tracker_id: i32, + pub tracker_label: String, + pub polling_interval: i32, + pub agent_config: AgentConfig, + pub filters: Vec, + pub enabled: bool, + pub last_polled_at: Option, + pub created_at: String, +} + +impl WatchedTracker { + pub fn insert( + conn: &Connection, + project_id: &str, + tracker_id: i32, + tracker_label: &str, + polling_interval: i32, + agent_config: &AgentConfig, + filters: &[FilterGroup], + ) -> Result { + todo!() + } + + pub fn list_by_project(conn: &Connection, project_id: &str) -> Result> { + todo!() + } + + pub fn list_all_enabled(conn: &Connection) -> Result> { + todo!() + } + + pub fn get_by_id(conn: &Connection, id: &str) -> Result { + todo!() + } + + pub fn update( + conn: &Connection, + id: &str, + polling_interval: i32, + agent_config: &AgentConfig, + filters: &[FilterGroup], + enabled: bool, + ) -> Result<()> { + todo!() + } + + pub fn update_last_polled(conn: &Connection, id: &str) -> Result<()> { + todo!() + } + + pub fn delete(conn: &Connection, id: &str) -> Result<()> { + todo!() + } +} + +fn from_row(row: &rusqlite::Row) -> rusqlite::Result { + let agent_config_json: String = row.get(5)?; + let filters_json: String = row.get(6)?; + let enabled: i32 = row.get(7)?; + + Ok(WatchedTracker { + id: row.get(0)?, + project_id: row.get(1)?, + tracker_id: row.get(2)?, + tracker_label: row.get(3)?, + polling_interval: row.get(4)?, + agent_config: serde_json::from_str(&agent_config_json).unwrap_or(AgentConfig { + analyst_command: String::new(), + analyst_args: Vec::new(), + developer_command: String::new(), + developer_args: Vec::new(), + }), + filters: serde_json::from_str(&filters_json).unwrap_or_default(), + enabled: enabled == 1, + last_polled_at: row.get(8)?, + created_at: row.get(9)?, + }) +} + +#[cfg(test)] +mod tests { + use super::*; + use crate::db; + use crate::models::project::Project; + + fn setup() -> Connection { + let conn = db::init_in_memory().expect("db init should succeed"); + Project::insert(&conn, "Test Project", "/path/test", None, "main").unwrap(); + conn + } + + fn get_project_id(conn: &Connection) -> String { + Project::list(conn).unwrap()[0].id.clone() + } + + fn default_agent_config() -> AgentConfig { + AgentConfig { + analyst_command: "claude".to_string(), + analyst_args: vec!["--print".to_string()], + developer_command: "claude".to_string(), + developer_args: vec!["--print".to_string()], + } + } + + #[test] + fn test_insert_tracker() { + let conn = setup(); + let project_id = get_project_id(&conn); + let tracker = WatchedTracker::insert( + &conn, &project_id, 456, "Bugs", 10, &default_agent_config(), &[], + ).expect("insert should succeed"); + + assert_eq!(tracker.tracker_id, 456); + assert_eq!(tracker.tracker_label, "Bugs"); + assert_eq!(tracker.polling_interval, 10); + assert!(tracker.enabled); + } + + #[test] + fn test_list_by_project() { + let conn = setup(); + let project_id = get_project_id(&conn); + WatchedTracker::insert(&conn, &project_id, 1, "Bugs", 10, &default_agent_config(), &[]).unwrap(); + WatchedTracker::insert(&conn, &project_id, 2, "Tasks", 15, &default_agent_config(), &[]).unwrap(); + + let trackers = WatchedTracker::list_by_project(&conn, &project_id).unwrap(); + assert_eq!(trackers.len(), 2); + } + + #[test] + fn test_list_all_enabled() { + let conn = setup(); + let project_id = get_project_id(&conn); + let t = WatchedTracker::insert(&conn, &project_id, 1, "Bugs", 10, &default_agent_config(), &[]).unwrap(); + WatchedTracker::insert(&conn, &project_id, 2, "Tasks", 15, &default_agent_config(), &[]).unwrap(); + WatchedTracker::update(&conn, &t.id, 10, &default_agent_config(), &[], false).unwrap(); + + let enabled = WatchedTracker::list_all_enabled(&conn).unwrap(); + assert_eq!(enabled.len(), 1); + assert_eq!(enabled[0].tracker_label, "Tasks"); + } + + #[test] + fn test_get_by_id() { + let conn = setup(); + let project_id = get_project_id(&conn); + let created = WatchedTracker::insert(&conn, &project_id, 456, "Bugs", 10, &default_agent_config(), &[]).unwrap(); + let found = WatchedTracker::get_by_id(&conn, &created.id).unwrap(); + assert_eq!(found.tracker_id, 456); + } + + #[test] + fn test_update_tracker() { + let conn = setup(); + let project_id = get_project_id(&conn); + let created = WatchedTracker::insert(&conn, &project_id, 456, "Bugs", 10, &default_agent_config(), &[]).unwrap(); + + let filters = vec![FilterGroup { + conditions: vec![Filter { + field: "Status".to_string(), + operator: "In".to_string(), + value: vec!["Nouveau".to_string()], + }], + }]; + WatchedTracker::update(&conn, &created.id, 20, &default_agent_config(), &filters, true).unwrap(); + + let updated = WatchedTracker::get_by_id(&conn, &created.id).unwrap(); + assert_eq!(updated.polling_interval, 20); + assert_eq!(updated.filters.len(), 1); + assert_eq!(updated.filters[0].conditions[0].field, "Status"); + } + + #[test] + fn test_update_last_polled() { + let conn = setup(); + let project_id = get_project_id(&conn); + let created = WatchedTracker::insert(&conn, &project_id, 456, "Bugs", 10, &default_agent_config(), &[]).unwrap(); + assert!(created.last_polled_at.is_none()); + + WatchedTracker::update_last_polled(&conn, &created.id).unwrap(); + let updated = WatchedTracker::get_by_id(&conn, &created.id).unwrap(); + assert!(updated.last_polled_at.is_some()); + } + + #[test] + fn test_delete_tracker() { + let conn = setup(); + let project_id = get_project_id(&conn); + let created = WatchedTracker::insert(&conn, &project_id, 456, "Bugs", 10, &default_agent_config(), &[]).unwrap(); + WatchedTracker::delete(&conn, &created.id).unwrap(); + assert!(WatchedTracker::get_by_id(&conn, &created.id).is_err()); + } +} +``` + +- [ ] **Step 2: Run tests to verify they fail** + +```bash +cd /home/leclere/Projets/orchai/src-tauri +PKG_CONFIG_PATH="/tmp/mypc:$PKG_CONFIG_PATH" cargo test models::tracker::tests +``` + +Expected: 7 failures. + +- [ ] **Step 3: Implement WatchedTracker CRUD** + +Replace the `todo!()` stubs in `src-tauri/src/models/tracker.rs`: + +```rust + pub fn insert( + conn: &Connection, + project_id: &str, + tracker_id: i32, + tracker_label: &str, + polling_interval: i32, + agent_config: &AgentConfig, + filters: &[FilterGroup], + ) -> Result { + let id = Uuid::new_v4().to_string(); + let now = chrono::Utc::now().to_rfc3339(); + let agent_config_json = serde_json::to_string(agent_config).unwrap(); + let filters_json = serde_json::to_string(filters).unwrap(); + + conn.execute( + "INSERT INTO watched_trackers (id, project_id, tracker_id, tracker_label, polling_interval, agent_config_json, filters_json, created_at) VALUES (?1, ?2, ?3, ?4, ?5, ?6, ?7, ?8)", + params![id, project_id, tracker_id, tracker_label, polling_interval, agent_config_json, filters_json, now], + )?; + + Ok(WatchedTracker { + id, + project_id: project_id.to_string(), + tracker_id, + tracker_label: tracker_label.to_string(), + polling_interval, + agent_config: agent_config.clone(), + filters: filters.to_vec(), + enabled: true, + last_polled_at: None, + created_at: now, + }) + } + + pub fn list_by_project(conn: &Connection, project_id: &str) -> Result> { + let mut stmt = conn.prepare( + "SELECT id, project_id, tracker_id, tracker_label, polling_interval, agent_config_json, filters_json, enabled, last_polled_at, created_at FROM watched_trackers WHERE project_id = ?1 ORDER BY created_at DESC", + )?; + let rows = stmt.query_map(params![project_id], |row| from_row(row))?; + rows.collect() + } + + pub fn list_all_enabled(conn: &Connection) -> Result> { + let mut stmt = conn.prepare( + "SELECT id, project_id, tracker_id, tracker_label, polling_interval, agent_config_json, filters_json, enabled, last_polled_at, created_at FROM watched_trackers WHERE enabled = 1", + )?; + let rows = stmt.query_map([], |row| from_row(row))?; + rows.collect() + } + + pub fn get_by_id(conn: &Connection, id: &str) -> Result { + conn.query_row( + "SELECT id, project_id, tracker_id, tracker_label, polling_interval, agent_config_json, filters_json, enabled, last_polled_at, created_at FROM watched_trackers WHERE id = ?1", + params![id], + |row| from_row(row), + ) + } + + pub fn update( + conn: &Connection, + id: &str, + polling_interval: i32, + agent_config: &AgentConfig, + filters: &[FilterGroup], + enabled: bool, + ) -> Result<()> { + let agent_config_json = serde_json::to_string(agent_config).unwrap(); + let filters_json = serde_json::to_string(filters).unwrap(); + let enabled_int: i32 = if enabled { 1 } else { 0 }; + + let affected = conn.execute( + "UPDATE watched_trackers SET polling_interval = ?1, agent_config_json = ?2, filters_json = ?3, enabled = ?4 WHERE id = ?5", + params![polling_interval, agent_config_json, filters_json, enabled_int, id], + )?; + if affected == 0 { + return Err(rusqlite::Error::QueryReturnedNoRows); + } + Ok(()) + } + + pub fn update_last_polled(conn: &Connection, id: &str) -> Result<()> { + let now = chrono::Utc::now().to_rfc3339(); + conn.execute( + "UPDATE watched_trackers SET last_polled_at = ?1 WHERE id = ?2", + params![now, id], + )?; + Ok(()) + } + + pub fn delete(conn: &Connection, id: &str) -> Result<()> { + let affected = conn.execute("DELETE FROM watched_trackers WHERE id = ?1", params![id])?; + if affected == 0 { + return Err(rusqlite::Error::QueryReturnedNoRows); + } + Ok(()) + } +``` + +- [ ] **Step 4: Run tests to verify they pass** + +```bash +cd /home/leclere/Projets/orchai/src-tauri +PKG_CONFIG_PATH="/tmp/mypc:$PKG_CONFIG_PATH" cargo test models::tracker::tests +``` + +Expected: 7 pass. + +- [ ] **Step 5: Commit** + +```bash +git add -A +git commit -m "feat: WatchedTracker model with CRUD, filters, and agent config" +``` + +--- + +### Task 6: Filter engine + tests + +**Files:** +- Create: `src-tauri/src/services/filter_engine.rs` +- Modify: `src-tauri/src/services/mod.rs` + +- [ ] **Step 1: Write the filter engine with tests** + +Add `pub mod filter_engine;` to `src-tauri/src/services/mod.rs`. + +Create `src-tauri/src/services/filter_engine.rs`: + +```rust +use crate::models::tracker::{Filter, FilterGroup}; +use crate::services::tuleap_client::extract_artifact_field_values; + +/// Filter artifacts using AND/OR logic. +/// FilterGroups are combined with AND. +/// Conditions within a FilterGroup are combined with OR. +/// Returns only artifacts matching ALL groups. +pub fn apply_filters( + artifacts: &[serde_json::Value], + filter_groups: &[FilterGroup], +) -> Vec { + if filter_groups.is_empty() { + return artifacts.to_vec(); + } + + artifacts.iter() + .filter(|artifact| matches_all_groups(artifact, filter_groups)) + .cloned() + .collect() +} + +fn matches_all_groups(artifact: &serde_json::Value, groups: &[FilterGroup]) -> bool { + groups.iter().all(|group| matches_any_condition(artifact, &group.conditions)) +} + +fn matches_any_condition(artifact: &serde_json::Value, conditions: &[Filter]) -> bool { + if conditions.is_empty() { + return true; + } + conditions.iter().any(|condition| matches_condition(artifact, condition)) +} + +fn matches_condition(artifact: &serde_json::Value, condition: &Filter) -> bool { + let field_values = extract_artifact_field_values(artifact, &condition.field); + + match condition.operator.as_str() { + "Equals" => { + condition.value.len() == 1 + && field_values.iter().any(|v| v == &condition.value[0]) + } + "NotEquals" => { + condition.value.len() == 1 + && !field_values.iter().any(|v| v == &condition.value[0]) + } + "In" => { + field_values.iter().any(|v| condition.value.contains(v)) + } + "NotIn" => { + !field_values.iter().any(|v| condition.value.contains(v)) + } + _ => false, + } +} + +#[cfg(test)] +mod tests { + use super::*; + use serde_json::json; + + fn make_artifact(status: &str, assigned: &str, priority: &str) -> serde_json::Value { + json!({ + "id": 123, + "title": "Test ticket", + "values": [ + { + "field_id": 1, "label": "Status", "type": "sb", + "values": [{ "id": 1, "label": status }] + }, + { + "field_id": 2, "label": "Assigned to", "type": "msb", + "bind_value_objects": [{ "id": 2, "display_name": assigned }] + }, + { + "field_id": 3, "label": "Priority", "type": "sb", + "values": [{ "id": 3, "label": priority }] + } + ] + }) + } + + #[test] + fn test_empty_filters_returns_all() { + let artifacts = vec![make_artifact("Nouveau", "Alice", "Haute")]; + let result = apply_filters(&artifacts, &[]); + assert_eq!(result.len(), 1); + } + + #[test] + fn test_single_in_filter() { + let artifacts = vec![ + make_artifact("Nouveau", "Alice", "Haute"), + make_artifact("Ferme", "Bob", "Basse"), + ]; + let groups = vec![FilterGroup { + conditions: vec![Filter { + field: "Status".to_string(), + operator: "In".to_string(), + value: vec!["Nouveau".to_string()], + }], + }]; + let result = apply_filters(&artifacts, &groups); + assert_eq!(result.len(), 1); + } + + #[test] + fn test_or_within_group() { + let artifacts = vec![ + make_artifact("Nouveau", "Alice", "Haute"), + make_artifact("A traiter", "Bob", "Basse"), + make_artifact("Ferme", "Charlie", "Moyenne"), + ]; + let groups = vec![FilterGroup { + conditions: vec![ + Filter { field: "Status".to_string(), operator: "In".to_string(), value: vec!["Nouveau".to_string()] }, + Filter { field: "Status".to_string(), operator: "In".to_string(), value: vec!["A traiter".to_string()] }, + ], + }]; + let result = apply_filters(&artifacts, &groups); + assert_eq!(result.len(), 2); + } + + #[test] + fn test_and_across_groups() { + // (Status Nouveau OR A traiter) AND (Assigned to Team Maintenance) + let artifacts = vec![ + make_artifact("Nouveau", "Team Maintenance", "Haute"), + make_artifact("Nouveau", "Alice", "Haute"), + make_artifact("Ferme", "Team Maintenance", "Basse"), + ]; + let groups = vec![ + FilterGroup { + conditions: vec![Filter { + field: "Status".to_string(), + operator: "In".to_string(), + value: vec!["Nouveau".to_string(), "A traiter".to_string()], + }], + }, + FilterGroup { + conditions: vec![Filter { + field: "Assigned to".to_string(), + operator: "In".to_string(), + value: vec!["Team Maintenance".to_string()], + }], + }, + ]; + let result = apply_filters(&artifacts, &groups); + assert_eq!(result.len(), 1); + assert_eq!(result[0]["values"][0]["values"][0]["label"], "Nouveau"); + } + + #[test] + fn test_not_in_filter() { + let artifacts = vec![ + make_artifact("Nouveau", "Alice", "Haute"), + make_artifact("Ferme", "Bob", "Basse"), + ]; + let groups = vec![FilterGroup { + conditions: vec![Filter { + field: "Status".to_string(), + operator: "NotIn".to_string(), + value: vec!["Ferme".to_string()], + }], + }]; + let result = apply_filters(&artifacts, &groups); + assert_eq!(result.len(), 1); + } + + #[test] + fn test_equals_filter() { + let artifacts = vec![ + make_artifact("Nouveau", "Alice", "Haute"), + make_artifact("Nouveau", "Bob", "Basse"), + ]; + let groups = vec![FilterGroup { + conditions: vec![Filter { + field: "Priority".to_string(), + operator: "Equals".to_string(), + value: vec!["Haute".to_string()], + }], + }]; + let result = apply_filters(&artifacts, &groups); + assert_eq!(result.len(), 1); + } + + #[test] + fn test_no_match_returns_empty() { + let artifacts = vec![make_artifact("Ferme", "Alice", "Basse")]; + let groups = vec![FilterGroup { + conditions: vec![Filter { + field: "Status".to_string(), + operator: "Equals".to_string(), + value: vec!["Nouveau".to_string()], + }], + }]; + let result = apply_filters(&artifacts, &groups); + assert!(result.is_empty()); + } +} +``` + +- [ ] **Step 2: Run tests** + +```bash +cd /home/leclere/Projets/orchai/src-tauri +PKG_CONFIG_PATH="/tmp/mypc:$PKG_CONFIG_PATH" cargo test services::filter_engine::tests +``` + +Expected: 7 pass. + +- [ ] **Step 3: Commit** + +```bash +git add -A +git commit -m "feat: AND/OR filter engine for Tuleap artifact filtering" +``` + +--- + +### Task 7: ProcessedTicket model + tests + +**Files:** +- Create: `src-tauri/src/models/ticket.rs` +- Modify: `src-tauri/src/models/mod.rs` + +- [ ] **Step 1: Write failing tests** + +Add `pub mod ticket;` to `src-tauri/src/models/mod.rs`. + +Create `src-tauri/src/models/ticket.rs`: + +```rust +use rusqlite::{params, Connection, Result}; +use serde::{Deserialize, Serialize}; +use uuid::Uuid; + +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct ProcessedTicket { + pub id: String, + pub tracker_id: String, + pub artifact_id: i32, + pub artifact_title: String, + pub artifact_data: String, + pub status: String, + pub analyst_report: Option, + pub developer_report: Option, + pub worktree_path: Option, + pub branch_name: Option, + pub detected_at: String, + pub processed_at: Option, +} + +impl ProcessedTicket { + /// Insert a new ticket if it hasn't been processed before. + /// Returns Some(ticket) if inserted, None if already exists. + pub fn insert_if_new( + conn: &Connection, + tracker_id: &str, + artifact_id: i32, + artifact_title: &str, + artifact_data: &str, + ) -> Result> { + todo!() + } + + /// Check if an artifact has already been processed for this tracker. + pub fn exists(conn: &Connection, tracker_id: &str, artifact_id: i32) -> Result { + todo!() + } + + pub fn list_by_tracker(conn: &Connection, tracker_id: &str) -> Result> { + todo!() + } + + pub fn list_by_project(conn: &Connection, project_id: &str) -> Result> { + todo!() + } + + pub fn get_by_id(conn: &Connection, id: &str) -> Result { + todo!() + } +} + +fn from_row(row: &rusqlite::Row) -> rusqlite::Result { + Ok(ProcessedTicket { + id: row.get(0)?, + tracker_id: row.get(1)?, + artifact_id: row.get(2)?, + artifact_title: row.get(3)?, + artifact_data: row.get(4)?, + status: row.get(5)?, + analyst_report: row.get(6)?, + developer_report: row.get(7)?, + worktree_path: row.get(8)?, + branch_name: row.get(9)?, + detected_at: row.get(10)?, + processed_at: row.get(11)?, + }) +} + +#[cfg(test)] +mod tests { + use super::*; + use crate::db; + use crate::models::project::Project; + use crate::models::tracker::{AgentConfig, WatchedTracker}; + + fn setup() -> (Connection, String) { + let conn = db::init_in_memory().expect("db init should succeed"); + let project = Project::insert(&conn, "Test", "/path", None, "main").unwrap(); + let agent_config = AgentConfig { + analyst_command: "claude".to_string(), + analyst_args: vec![], + developer_command: "claude".to_string(), + developer_args: vec![], + }; + let tracker = WatchedTracker::insert(&conn, &project.id, 456, "Bugs", 10, &agent_config, &[]).unwrap(); + (conn, tracker.id) + } + + #[test] + fn test_insert_if_new_creates_ticket() { + let (conn, tracker_id) = setup(); + let ticket = ProcessedTicket::insert_if_new(&conn, &tracker_id, 123, "Bug title", "{}") + .expect("should succeed"); + assert!(ticket.is_some()); + let t = ticket.unwrap(); + assert_eq!(t.artifact_id, 123); + assert_eq!(t.status, "Pending"); + } + + #[test] + fn test_insert_if_new_returns_none_for_duplicate() { + let (conn, tracker_id) = setup(); + ProcessedTicket::insert_if_new(&conn, &tracker_id, 123, "Bug", "{}").unwrap(); + let result = ProcessedTicket::insert_if_new(&conn, &tracker_id, 123, "Bug", "{}").unwrap(); + assert!(result.is_none()); + } + + #[test] + fn test_exists() { + let (conn, tracker_id) = setup(); + assert!(!ProcessedTicket::exists(&conn, &tracker_id, 123).unwrap()); + ProcessedTicket::insert_if_new(&conn, &tracker_id, 123, "Bug", "{}").unwrap(); + assert!(ProcessedTicket::exists(&conn, &tracker_id, 123).unwrap()); + } + + #[test] + fn test_list_by_tracker() { + let (conn, tracker_id) = setup(); + ProcessedTicket::insert_if_new(&conn, &tracker_id, 1, "Bug 1", "{}").unwrap(); + ProcessedTicket::insert_if_new(&conn, &tracker_id, 2, "Bug 2", "{}").unwrap(); + let tickets = ProcessedTicket::list_by_tracker(&conn, &tracker_id).unwrap(); + assert_eq!(tickets.len(), 2); + } + + #[test] + fn test_get_by_id() { + let (conn, tracker_id) = setup(); + let created = ProcessedTicket::insert_if_new(&conn, &tracker_id, 123, "Bug", "{}").unwrap().unwrap(); + let found = ProcessedTicket::get_by_id(&conn, &created.id).unwrap(); + assert_eq!(found.artifact_id, 123); + } +} +``` + +- [ ] **Step 2: Run tests to verify they fail** + +```bash +cd /home/leclere/Projets/orchai/src-tauri +PKG_CONFIG_PATH="/tmp/mypc:$PKG_CONFIG_PATH" cargo test models::ticket::tests +``` + +Expected: 5 failures. + +- [ ] **Step 3: Implement ProcessedTicket methods** + +Replace stubs in `src-tauri/src/models/ticket.rs`: + +```rust + pub fn insert_if_new( + conn: &Connection, + tracker_id: &str, + artifact_id: i32, + artifact_title: &str, + artifact_data: &str, + ) -> Result> { + if Self::exists(conn, tracker_id, artifact_id)? { + return Ok(None); + } + let id = Uuid::new_v4().to_string(); + let now = chrono::Utc::now().to_rfc3339(); + conn.execute( + "INSERT INTO processed_tickets (id, tracker_id, artifact_id, artifact_title, artifact_data, status, detected_at) VALUES (?1, ?2, ?3, ?4, ?5, 'Pending', ?6)", + params![id, tracker_id, artifact_id, artifact_title, artifact_data, now], + )?; + Ok(Some(ProcessedTicket { + id, + tracker_id: tracker_id.to_string(), + artifact_id, + artifact_title: artifact_title.to_string(), + artifact_data: artifact_data.to_string(), + status: "Pending".to_string(), + analyst_report: None, + developer_report: None, + worktree_path: None, + branch_name: None, + detected_at: now, + processed_at: None, + })) + } + + pub fn exists(conn: &Connection, tracker_id: &str, artifact_id: i32) -> Result { + let count: i32 = conn.query_row( + "SELECT COUNT(*) FROM processed_tickets WHERE tracker_id = ?1 AND artifact_id = ?2", + params![tracker_id, artifact_id], + |row| row.get(0), + )?; + Ok(count > 0) + } + + pub fn list_by_tracker(conn: &Connection, tracker_id: &str) -> Result> { + let mut stmt = conn.prepare( + "SELECT id, tracker_id, artifact_id, artifact_title, artifact_data, status, analyst_report, developer_report, worktree_path, branch_name, detected_at, processed_at FROM processed_tickets WHERE tracker_id = ?1 ORDER BY detected_at DESC", + )?; + let rows = stmt.query_map(params![tracker_id], |row| from_row(row))?; + rows.collect() + } + + pub fn list_by_project(conn: &Connection, project_id: &str) -> Result> { + let mut stmt = conn.prepare( + "SELECT pt.id, pt.tracker_id, pt.artifact_id, pt.artifact_title, pt.artifact_data, pt.status, pt.analyst_report, pt.developer_report, pt.worktree_path, pt.branch_name, pt.detected_at, pt.processed_at FROM processed_tickets pt INNER JOIN watched_trackers wt ON pt.tracker_id = wt.id WHERE wt.project_id = ?1 ORDER BY pt.detected_at DESC", + )?; + let rows = stmt.query_map(params![project_id], |row| from_row(row))?; + rows.collect() + } + + pub fn get_by_id(conn: &Connection, id: &str) -> Result { + conn.query_row( + "SELECT id, tracker_id, artifact_id, artifact_title, artifact_data, status, analyst_report, developer_report, worktree_path, branch_name, detected_at, processed_at FROM processed_tickets WHERE id = ?1", + params![id], + |row| from_row(row), + ) + } +``` + +- [ ] **Step 4: Run tests** + +```bash +cd /home/leclere/Projets/orchai/src-tauri +PKG_CONFIG_PATH="/tmp/mypc:$PKG_CONFIG_PATH" cargo test models::ticket::tests +``` + +Expected: 5 pass. + +- [ ] **Step 5: Commit** + +```bash +git add -A +git commit -m "feat: ProcessedTicket model with deduplication for new ticket detection" +``` + +--- + +### Task 8: Tracker + poller Tauri commands + +**Files:** +- Create: `src-tauri/src/commands/tracker.rs` +- Create: `src-tauri/src/commands/poller.rs` +- Modify: `src-tauri/src/commands/mod.rs` +- Modify: `src-tauri/src/lib.rs` (register commands) + +- [ ] **Step 1: Create tracker commands** + +Create `src-tauri/src/commands/tracker.rs`: + +```rust +use crate::error::AppError; +use crate::models::credential::TuleapCredentials; +use crate::models::tracker::{AgentConfig, FilterGroup, WatchedTracker}; +use crate::models::ticket::ProcessedTicket; +use crate::services::crypto; +use crate::services::tuleap_client::TuleapClient; +use crate::AppState; +use tauri::State; + +#[tauri::command] +pub fn add_tracker( + state: State<'_, AppState>, + project_id: String, + tracker_id: i32, + tracker_label: String, + polling_interval: i32, + agent_config: AgentConfig, + filters: Vec, +) -> Result { + let db = state.db.lock().map_err(|e| AppError::from(format!("Database lock failed: {}", e)))?; + let tracker = WatchedTracker::insert(&db, &project_id, tracker_id, &tracker_label, polling_interval, &agent_config, &filters)?; + Ok(tracker) +} + +#[tauri::command] +pub fn list_trackers( + state: State<'_, AppState>, + project_id: String, +) -> Result, AppError> { + let db = state.db.lock().map_err(|e| AppError::from(format!("Database lock failed: {}", e)))?; + let trackers = WatchedTracker::list_by_project(&db, &project_id)?; + Ok(trackers) +} + +#[tauri::command] +pub fn update_tracker( + state: State<'_, AppState>, + id: String, + polling_interval: i32, + agent_config: AgentConfig, + filters: Vec, + enabled: bool, +) -> Result<(), AppError> { + let db = state.db.lock().map_err(|e| AppError::from(format!("Database lock failed: {}", e)))?; + WatchedTracker::update(&db, &id, polling_interval, &agent_config, &filters, enabled)?; + Ok(()) +} + +#[tauri::command] +pub fn remove_tracker(state: State<'_, AppState>, id: String) -> Result<(), AppError> { + let db = state.db.lock().map_err(|e| AppError::from(format!("Database lock failed: {}", e)))?; + WatchedTracker::delete(&db, &id)?; + Ok(()) +} + +#[tauri::command] +pub async fn get_tracker_fields( + state: State<'_, AppState>, + tracker_id: i32, +) -> Result, AppError> { + let client = build_tuleap_client(&state)?; + let fields = client.get_tracker_fields(tracker_id).await + .map_err(|e| AppError::from(e))?; + Ok(fields) +} + +#[tauri::command] +pub fn list_processed_tickets( + state: State<'_, AppState>, + project_id: String, +) -> Result, AppError> { + let db = state.db.lock().map_err(|e| AppError::from(format!("Database lock failed: {}", e)))?; + let tickets = ProcessedTicket::list_by_project(&db, &project_id)?; + Ok(tickets) +} + +fn build_tuleap_client(state: &State<'_, AppState>) -> Result { + let db = state.db.lock().map_err(|e| AppError::from(format!("Database lock failed: {}", e)))?; + let cred = TuleapCredentials::get(&db)? + .ok_or_else(|| AppError::from("No Tuleap credentials configured".to_string()))?; + let password = crypto::decrypt(&state.encryption_key, &cred.password_encrypted) + .map_err(|e| AppError::from(e))?; + Ok(TuleapClient::new(&state.http_client, &cred.tuleap_url, &cred.username, &password)) +} +``` + +- [ ] **Step 2: Create poller commands** + +Create `src-tauri/src/commands/poller.rs`: + +```rust +use crate::error::AppError; +use crate::models::credential::TuleapCredentials; +use crate::models::ticket::ProcessedTicket; +use crate::models::tracker::WatchedTracker; +use crate::services::{crypto, filter_engine}; +use crate::services::tuleap_client::TuleapClient; +use crate::AppState; +use tauri::State; + +#[tauri::command] +pub async fn manual_poll( + state: State<'_, AppState>, + tracker_id: String, +) -> Result, AppError> { + let (tracker, client) = { + let db = state.db.lock().map_err(|e| AppError::from(format!("Database lock failed: {}", e)))?; + let tracker = WatchedTracker::get_by_id(&db, &tracker_id)?; + let cred = TuleapCredentials::get(&db)? + .ok_or_else(|| AppError::from("No Tuleap credentials configured".to_string()))?; + let password = crypto::decrypt(&state.encryption_key, &cred.password_encrypted) + .map_err(|e| AppError::from(e))?; + let client = TuleapClient::new(&state.http_client, &cred.tuleap_url, &cred.username, &password); + (tracker, client) + }; + + let artifacts = client.get_artifacts(tracker.tracker_id).await + .map_err(|e| AppError::from(e))?; + + let filtered = filter_engine::apply_filters(&artifacts, &tracker.filters); + + let mut new_tickets = Vec::new(); + { + let db = state.db.lock().map_err(|e| AppError::from(format!("Database lock failed: {}", e)))?; + for artifact in &filtered { + let artifact_id = artifact.get("id").and_then(|v| v.as_i64()).unwrap_or(0) as i32; + let artifact_title = artifact.get("title").and_then(|v| v.as_str()).unwrap_or("Untitled"); + let artifact_data = serde_json::to_string(artifact).unwrap_or_default(); + + if let Some(ticket) = ProcessedTicket::insert_if_new( + &db, &tracker_id, artifact_id, artifact_title, &artifact_data, + )? { + new_tickets.push(ticket); + } + } + WatchedTracker::update_last_polled(&db, &tracker_id)?; + } + + Ok(new_tickets) +} + +#[tauri::command] +pub fn get_queue_status( + state: State<'_, AppState>, + project_id: String, +) -> Result, AppError> { + let db = state.db.lock().map_err(|e| AppError::from(format!("Database lock failed: {}", e)))?; + let tickets = ProcessedTicket::list_by_project(&db, &project_id)?; + Ok(tickets) +} +``` + +- [ ] **Step 3: Update commands/mod.rs** + +Replace `src-tauri/src/commands/mod.rs`: + +```rust +pub mod credential; +pub mod poller; +pub mod project; +pub mod tracker; +``` + +- [ ] **Step 4: Register all new commands in lib.rs** + +Update the `invoke_handler` in `src-tauri/src/lib.rs`: + +```rust + .invoke_handler(tauri::generate_handler![ + commands::project::create_project, + commands::project::list_projects, + commands::project::get_project, + commands::project::update_project, + commands::project::delete_project, + commands::credential::set_tuleap_credentials, + commands::credential::get_tuleap_credentials, + commands::credential::delete_tuleap_credentials, + commands::credential::test_tuleap_connection, + commands::tracker::add_tracker, + commands::tracker::list_trackers, + commands::tracker::update_tracker, + commands::tracker::remove_tracker, + commands::tracker::get_tracker_fields, + commands::tracker::list_processed_tickets, + commands::poller::manual_poll, + commands::poller::get_queue_status, + ]) +``` + +- [ ] **Step 5: Verify compilation and all tests pass** + +```bash +cd /home/leclere/Projets/orchai/src-tauri +PKG_CONFIG_PATH="/tmp/mypc:$PKG_CONFIG_PATH" cargo test +``` + +Expected: all tests pass. + +- [ ] **Step 6: Commit** + +```bash +git add -A +git commit -m "feat: Tauri commands for tracker CRUD, Tuleap fields, and manual polling" +``` + +--- + +### Task 9: Background poller service + +**Files:** +- Create: `src-tauri/src/services/poller.rs` +- Modify: `src-tauri/src/services/mod.rs` +- Modify: `src-tauri/src/lib.rs` (spawn poller on startup) + +- [ ] **Step 1: Create poller service** + +Add `pub mod poller;` to `src-tauri/src/services/mod.rs`. + +Create `src-tauri/src/services/poller.rs`: + +```rust +use crate::models::credential::TuleapCredentials; +use crate::models::ticket::ProcessedTicket; +use crate::models::tracker::WatchedTracker; +use crate::services::{crypto, filter_engine}; +use crate::services::tuleap_client::TuleapClient; +use rusqlite::Connection; +use std::sync::{Arc, Mutex}; +use tauri::{AppHandle, Emitter}; +use tokio::time::{interval, Duration}; + +/// Start the background polling loop. Checks every 60 seconds if any tracker needs polling. +pub fn start(db: Arc>, encryption_key: [u8; 32], http_client: reqwest::Client, app_handle: AppHandle) { + tokio::spawn(async move { + let mut tick = interval(Duration::from_secs(60)); + loop { + tick.tick().await; + poll_all_trackers(&db, &encryption_key, &http_client, &app_handle).await; + } + }); +} + +async fn poll_all_trackers( + db: &Arc>, + encryption_key: &[u8; 32], + http_client: &reqwest::Client, + app_handle: &AppHandle, +) { + let trackers = { + let conn = match db.lock() { + Ok(c) => c, + Err(_) => return, + }; + WatchedTracker::list_all_enabled(&conn).unwrap_or_default() + }; + + if trackers.is_empty() { + return; + } + + // Build Tuleap client from credentials + let client = { + let conn = match db.lock() { + Ok(c) => c, + Err(_) => return, + }; + let cred = match TuleapCredentials::get(&conn) { + Ok(Some(c)) => c, + _ => return, + }; + let password = match crypto::decrypt(encryption_key, &cred.password_encrypted) { + Ok(p) => p, + Err(_) => return, + }; + TuleapClient::new(http_client, &cred.tuleap_url, &cred.username, &password) + }; + + for tracker in &trackers { + if should_poll(tracker) { + poll_single_tracker(db, &client, tracker, app_handle).await; + } + } +} + +fn should_poll(tracker: &WatchedTracker) -> bool { + let Some(last_polled) = &tracker.last_polled_at else { + return true; // Never polled + }; + let Ok(last) = chrono::DateTime::parse_from_rfc3339(last_polled) else { + return true; + }; + let elapsed = chrono::Utc::now().signed_duration_since(last); + elapsed.num_minutes() >= tracker.polling_interval as i64 +} + +async fn poll_single_tracker( + db: &Arc>, + client: &TuleapClient, + tracker: &WatchedTracker, + app_handle: &AppHandle, +) { + let artifacts = match client.get_artifacts(tracker.tracker_id).await { + Ok(a) => a, + Err(e) => { + eprintln!("Poller error for tracker {}: {}", tracker.tracker_label, e); + return; + } + }; + + let filtered = filter_engine::apply_filters(&artifacts, &tracker.filters); + + let mut new_tickets = Vec::new(); + { + let conn = match db.lock() { + Ok(c) => c, + Err(_) => return, + }; + for artifact in &filtered { + let artifact_id = artifact.get("id").and_then(|v| v.as_i64()).unwrap_or(0) as i32; + let artifact_title = artifact.get("title").and_then(|v| v.as_str()).unwrap_or("Untitled"); + let artifact_data = serde_json::to_string(artifact).unwrap_or_default(); + + match ProcessedTicket::insert_if_new(&conn, &tracker.id, artifact_id, artifact_title, &artifact_data) { + Ok(Some(ticket)) => new_tickets.push(ticket), + Ok(None) => {} // already processed + Err(e) => eprintln!("Failed to insert ticket: {}", e), + } + } + let _ = WatchedTracker::update_last_polled(&conn, &tracker.id); + } + + if !new_tickets.is_empty() { + let _ = app_handle.emit("new-tickets-detected", serde_json::json!({ + "tracker_id": tracker.id, + "tracker_label": tracker.tracker_label, + "count": new_tickets.len(), + })); + } +} +``` + +- [ ] **Step 2: Spawn poller on app startup** + +In `src-tauri/src/lib.rs`, add the poller startup at the end of the `setup` closure, after `app.manage(...)`: + +```rust + .setup(|app| { + let db_dir = app.path().app_data_dir()?; + std::fs::create_dir_all(&db_dir)?; + + let db_path = db_dir.join("orchai.db"); + let conn = db::init(&db_path).expect("Failed to initialize database"); + + let key_path = db_dir.join("orchai.key"); + let encryption_key = load_or_generate_key(&key_path)?; + + let http_client = reqwest::Client::new(); + + let db_arc = Arc::new(Mutex::new(conn)); + + app.manage(AppState { + db: db_arc.clone(), + encryption_key, + http_client: http_client.clone(), + }); + + // Start background poller + services::poller::start( + db_arc, + encryption_key, + http_client, + app.handle().clone(), + ); + + Ok(()) + }) +``` + +- [ ] **Step 3: Verify compilation** + +```bash +cd /home/leclere/Projets/orchai/src-tauri +PKG_CONFIG_PATH="/tmp/mypc:$PKG_CONFIG_PATH" cargo build +``` + +Expected: compiles. All existing tests still pass. + +- [ ] **Step 4: Commit** + +```bash +git add -A +git commit -m "feat: background poller with 60s tick, per-tracker interval, event emission" +``` + +--- + +### Task 10: Frontend types + API wrappers + Settings page + +**Files:** +- Modify: `src/lib/types.ts` +- Modify: `src/lib/api.ts` +- Create: `src/components/settings/SettingsPage.tsx` +- Modify: `src/App.tsx` (add /settings route) +- Modify: `src/components/layout/Sidebar.tsx` (add settings link) + +- [ ] **Step 1: Add new TypeScript types** + +Add to `src/lib/types.ts`: + +```typescript +export interface TuleapCredentialsSafe { + id: string; + tuleap_url: string; + username: string; +} + +export interface AgentConfig { + analyst_command: string; + analyst_args: string[]; + developer_command: string; + developer_args: string[]; +} + +export interface Filter { + field: string; + operator: string; + value: string[]; +} + +export interface FilterGroup { + conditions: Filter[]; +} + +export interface TrackerField { + field_id: number; + label: string; + field_type: string; + values: FieldValue[]; +} + +export interface FieldValue { + id: number; + label: string; +} + +export interface WatchedTracker { + id: string; + project_id: string; + tracker_id: number; + tracker_label: string; + polling_interval: number; + agent_config: AgentConfig; + filters: FilterGroup[]; + enabled: boolean; + last_polled_at: string | null; + created_at: string; +} + +export interface ProcessedTicket { + id: string; + tracker_id: string; + artifact_id: number; + artifact_title: string; + artifact_data: string; + status: string; + analyst_report: string | null; + developer_report: string | null; + worktree_path: string | null; + branch_name: string | null; + detected_at: string; + processed_at: string | null; +} +``` + +- [ ] **Step 2: Add new API wrappers** + +Add to `src/lib/api.ts`: + +```typescript +import type { + Project, + TuleapCredentialsSafe, + WatchedTracker, + AgentConfig, + FilterGroup, + TrackerField, + ProcessedTicket, +} from "./types"; + +// Credentials +export async function setTuleapCredentials( + tuleapUrl: string, + username: string, + password: string +): Promise { + return invoke("set_tuleap_credentials", { tuleapUrl, username, password }); +} + +export async function getTuleapCredentials(): Promise { + return invoke("get_tuleap_credentials"); +} + +export async function deleteTuleapCredentials(): Promise { + return invoke("delete_tuleap_credentials"); +} + +export async function testTuleapConnection(): Promise { + return invoke("test_tuleap_connection"); +} + +// Trackers +export async function addTracker( + projectId: string, + trackerId: number, + trackerLabel: string, + pollingInterval: number, + agentConfig: AgentConfig, + filters: FilterGroup[] +): Promise { + return invoke("add_tracker", { + projectId, trackerId, trackerLabel, pollingInterval, agentConfig, filters, + }); +} + +export async function listTrackers(projectId: string): Promise { + return invoke("list_trackers", { projectId }); +} + +export async function updateTracker( + id: string, + pollingInterval: number, + agentConfig: AgentConfig, + filters: FilterGroup[], + enabled: boolean +): Promise { + return invoke("update_tracker", { id, pollingInterval, agentConfig, filters, enabled }); +} + +export async function removeTracker(id: string): Promise { + return invoke("remove_tracker", { id }); +} + +export async function getTrackerFields(trackerId: number): Promise { + return invoke("get_tracker_fields", { trackerId }); +} + +// Tickets +export async function listProcessedTickets(projectId: string): Promise { + return invoke("list_processed_tickets", { projectId }); +} + +export async function manualPoll(trackerId: string): Promise { + return invoke("manual_poll", { trackerId }); +} + +export async function getQueueStatus(projectId: string): Promise { + return invoke("get_queue_status", { projectId }); +} +``` + +- [ ] **Step 3: Create SettingsPage component** + +Create `src/components/settings/SettingsPage.tsx`: + +```tsx +import { useEffect, useState } from "react"; +import { + getTuleapCredentials, + setTuleapCredentials, + deleteTuleapCredentials, + testTuleapConnection, +} from "../../lib/api"; +import type { TuleapCredentialsSafe } from "../../lib/types"; + +export default function SettingsPage() { + const [credentials, setCredentials] = useState(null); + const [url, setUrl] = useState(""); + const [username, setUsername] = useState(""); + const [password, setPassword] = useState(""); + const [error, setError] = useState(null); + const [success, setSuccess] = useState(null); + const [loading, setLoading] = useState(false); + const [testing, setTesting] = useState(false); + + useEffect(() => { + getTuleapCredentials().then((cred) => { + setCredentials(cred); + if (cred) { + setUrl(cred.tuleap_url); + setUsername(cred.username); + } + }); + }, []); + + async function handleSave(e: React.FormEvent) { + e.preventDefault(); + setError(null); + setSuccess(null); + setLoading(true); + try { + const saved = await setTuleapCredentials(url, username, password); + setCredentials(saved); + setPassword(""); + setSuccess("Credentials saved"); + } catch (err: unknown) { + setError(err instanceof Error ? err.message : String(err)); + } finally { + setLoading(false); + } + } + + async function handleTest() { + setError(null); + setSuccess(null); + setTesting(true); + try { + const msg = await testTuleapConnection(); + setSuccess(msg); + } catch (err: unknown) { + setError(err instanceof Error ? err.message : String(err)); + } finally { + setTesting(false); + } + } + + async function handleDelete() { + if (!window.confirm("Delete Tuleap credentials?")) return; + await deleteTuleapCredentials(); + setCredentials(null); + setUrl(""); + setUsername(""); + setPassword(""); + } + + return ( +
+

Settings

+ +
+

Tuleap Credentials

+ +
+
+ + setUrl(e.target.value)} + required + placeholder="https://tuleap.example.com" + className="w-full border border-gray-300 rounded px-3 py-2 text-sm focus:outline-none focus:ring-2 focus:ring-blue-500" + /> +
+
+ + setUsername(e.target.value)} + required + className="w-full border border-gray-300 rounded px-3 py-2 text-sm focus:outline-none focus:ring-2 focus:ring-blue-500" + /> +
+
+ + setPassword(e.target.value)} + required={!credentials} + placeholder={credentials ? "••••••••" : ""} + className="w-full border border-gray-300 rounded px-3 py-2 text-sm focus:outline-none focus:ring-2 focus:ring-blue-500" + /> +
+ + {error && ( +
{error}
+ )} + {success && ( +
{success}
+ )} + +
+ + {credentials && ( + <> + + + + )} +
+
+
+
+ ); +} +``` + +- [ ] **Step 4: Add settings route and sidebar link** + +In `src/App.tsx`, add the settings route: + +```tsx +import SettingsPage from "./components/settings/SettingsPage"; +``` + +Add inside the `}>` block: + +```tsx +} /> +``` + +In `src/components/layout/Sidebar.tsx`, add a settings link at the bottom of the sidebar (before closing ``): + +```tsx +
+ + Settings + +
+``` + +- [ ] **Step 5: Verify TypeScript compiles and frontend builds** + +```bash +cd /home/leclere/Projets/orchai +npx tsc --noEmit +npm run build +``` + +Expected: both succeed. + +- [ ] **Step 6: Commit** + +```bash +git add -A +git commit -m "feat: frontend types, API wrappers, and Settings page for Tuleap credentials" +``` + +--- + +### Task 11: Tracker config + filter builder UI + +**Files:** +- Create: `src/components/trackers/TrackerList.tsx` +- Create: `src/components/trackers/TrackerConfig.tsx` +- Create: `src/components/trackers/FilterBuilder.tsx` +- Modify: `src/App.tsx` (add tracker routes) + +- [ ] **Step 1: Create FilterBuilder component** + +Create `src/components/trackers/FilterBuilder.tsx`: + +```tsx +import type { FilterGroup, Filter, TrackerField } from "../../lib/types"; + +interface Props { + groups: FilterGroup[]; + onChange: (groups: FilterGroup[]) => void; + availableFields: TrackerField[]; +} + +export default function FilterBuilder({ groups, onChange, availableFields }: Props) { + function addGroup() { + onChange([...groups, { conditions: [{ field: "", operator: "In", value: [] }] }]); + } + + function removeGroup(groupIdx: number) { + onChange(groups.filter((_, i) => i !== groupIdx)); + } + + function addCondition(groupIdx: number) { + const updated = [...groups]; + updated[groupIdx] = { + conditions: [...updated[groupIdx].conditions, { field: "", operator: "In", value: [] }], + }; + onChange(updated); + } + + function removeCondition(groupIdx: number, condIdx: number) { + const updated = [...groups]; + updated[groupIdx] = { + conditions: updated[groupIdx].conditions.filter((_, i) => i !== condIdx), + }; + if (updated[groupIdx].conditions.length === 0) { + onChange(updated.filter((_, i) => i !== groupIdx)); + } else { + onChange(updated); + } + } + + function updateCondition(groupIdx: number, condIdx: number, field: keyof Filter, val: string | string[]) { + const updated = [...groups]; + const condition = { ...updated[groupIdx].conditions[condIdx] }; + if (field === "value") { + condition.value = val as string[]; + } else { + condition[field] = val as string; + // Reset value when field changes + if (field === "field") { + condition.value = []; + } + } + updated[groupIdx] = { + conditions: updated[groupIdx].conditions.map((c, i) => (i === condIdx ? condition : c)), + }; + onChange(updated); + } + + function getFieldValues(fieldLabel: string): string[] { + const field = availableFields.find((f) => f.label === fieldLabel); + return field?.values.map((v) => v.label) ?? []; + } + + function toggleValue(groupIdx: number, condIdx: number, val: string) { + const condition = groups[groupIdx].conditions[condIdx]; + const newValues = condition.value.includes(val) + ? condition.value.filter((v) => v !== val) + : [...condition.value, val]; + updateCondition(groupIdx, condIdx, "value", newValues); + } + + return ( +
+ {groups.map((group, groupIdx) => ( +
+
+ + {groupIdx > 0 && "AND "}Group {groupIdx + 1} + + +
+ + {group.conditions.map((condition, condIdx) => ( +
+ {condIdx > 0 && ( + OR + )} + + + + +
+ {condition.field && getFieldValues(condition.field).map((val) => ( + + ))} +
+ + +
+ ))} + + +
+ ))} + + +
+ ); +} +``` + +- [ ] **Step 2: Create TrackerConfig component** + +Create `src/components/trackers/TrackerConfig.tsx`: + +```tsx +import { useState, useEffect } from "react"; +import { useNavigate, useParams } from "react-router-dom"; +import { addTracker, getTrackerFields } from "../../lib/api"; +import type { AgentConfig, FilterGroup, TrackerField } from "../../lib/types"; +import FilterBuilder from "./FilterBuilder"; + +export default function TrackerConfig() { + const navigate = useNavigate(); + const { projectId } = useParams(); + + const [trackerId, setTrackerId] = useState(""); + const [trackerLabel, setTrackerLabel] = useState(""); + const [pollingInterval, setPollingInterval] = useState(10); + const [filters, setFilters] = useState([]); + const [agentConfig, setAgentConfig] = useState({ + analyst_command: "claude", + analyst_args: ["--print"], + developer_command: "claude", + developer_args: ["--print"], + }); + const [availableFields, setAvailableFields] = useState([]); + const [error, setError] = useState(null); + const [loading, setLoading] = useState(false); + const [loadingFields, setLoadingFields] = useState(false); + + async function handleLoadFields() { + const id = parseInt(trackerId, 10); + if (isNaN(id)) return; + setLoadingFields(true); + setError(null); + try { + const fields = await getTrackerFields(id); + setAvailableFields(fields); + } catch (err: unknown) { + setError(err instanceof Error ? err.message : String(err)); + } finally { + setLoadingFields(false); + } + } + + async function handleSubmit(e: React.FormEvent) { + e.preventDefault(); + if (!projectId) return; + setError(null); + setLoading(true); + try { + await addTracker( + projectId, + parseInt(trackerId, 10), + trackerLabel, + pollingInterval, + agentConfig, + filters + ); + navigate(`/projects/${projectId}`); + } catch (err: unknown) { + setError(err instanceof Error ? err.message : String(err)); + } finally { + setLoading(false); + } + } + + return ( +
+

Add Tracker

+ +
+
+

Tracker

+
+
+ + setTrackerId(e.target.value)} + required + className="w-full border border-gray-300 rounded px-3 py-2 text-sm" + /> +
+
+ + setTrackerLabel(e.target.value)} + required + placeholder="e.g. Bugs" + className="w-full border border-gray-300 rounded px-3 py-2 text-sm" + /> +
+
+
+ + setPollingInterval(parseInt(e.target.value, 10) || 10)} + min={1} + className="w-32 border border-gray-300 rounded px-3 py-2 text-sm" + /> +
+ +
+ + {availableFields.length > 0 && ( +
+

Filters

+ +
+ )} + +
+

Agent Configuration

+
+
+ + setAgentConfig({ ...agentConfig, analyst_command: e.target.value })} + className="w-full border border-gray-300 rounded px-3 py-2 text-sm" + /> +
+
+ + setAgentConfig({ ...agentConfig, developer_command: e.target.value })} + className="w-full border border-gray-300 rounded px-3 py-2 text-sm" + /> +
+
+
+ + {error && ( +
{error}
+ )} + +
+ + +
+
+
+ ); +} +``` + +- [ ] **Step 3: Create TrackerList component** + +Create `src/components/trackers/TrackerList.tsx`: + +```tsx +import { useState } from "react"; +import { Link } from "react-router-dom"; +import { removeTracker, manualPoll, updateTracker } from "../../lib/api"; +import type { WatchedTracker } from "../../lib/types"; + +interface Props { + trackers: WatchedTracker[]; + projectId: string; + onRefresh: () => void; +} + +export default function TrackerList({ trackers, projectId, onRefresh }: Props) { + const [pollingId, setPollingId] = useState(null); + + async function handlePoll(tracker: WatchedTracker) { + setPollingId(tracker.id); + try { + const newTickets = await manualPoll(tracker.id); + if (newTickets.length > 0) { + alert(`${newTickets.length} new ticket(s) detected`); + } else { + alert("No new tickets"); + } + onRefresh(); + } catch (err: unknown) { + alert(err instanceof Error ? err.message : String(err)); + } finally { + setPollingId(null); + } + } + + async function handleToggle(tracker: WatchedTracker) { + await updateTracker( + tracker.id, + tracker.polling_interval, + tracker.agent_config, + tracker.filters, + !tracker.enabled + ); + onRefresh(); + } + + async function handleRemove(tracker: WatchedTracker) { + if (!window.confirm(`Remove tracker "${tracker.tracker_label}"?`)) return; + await removeTracker(tracker.id); + onRefresh(); + } + + if (trackers.length === 0) { + return ( +
+ No trackers configured.{" "} + + Add one + +
+ ); + } + + return ( +
+ {trackers.map((tracker) => ( +
+
+ {tracker.tracker_label} + #{tracker.tracker_id} + + {tracker.enabled ? "Active" : "Paused"} + + {tracker.last_polled_at && ( + + Last poll: {new Date(tracker.last_polled_at).toLocaleTimeString()} + + )} +
+
+ + + +
+
+ ))} + + + Add tracker + +
+ ); +} +``` + +- [ ] **Step 4: Add tracker routes in App.tsx** + +Add import: + +```tsx +import TrackerConfig from "./components/trackers/TrackerConfig"; +``` + +Add route inside `}>`: + +```tsx +} /> +``` + +- [ ] **Step 5: Verify frontend builds** + +```bash +cd /home/leclere/Projets/orchai +npx tsc --noEmit +npm run build +``` + +- [ ] **Step 6: Commit** + +```bash +git add -A +git commit -m "feat: tracker config UI with visual AND/OR filter builder" +``` + +--- + +### Task 12: Update project dashboard + final verification + +**Files:** +- Modify: `src/components/projects/ProjectDashboard.tsx` + +- [ ] **Step 1: Update ProjectDashboard to show trackers and recent tickets** + +Replace `src/components/projects/ProjectDashboard.tsx`: + +```tsx +import { useEffect, useState } from "react"; +import { useParams, Link, useNavigate } from "react-router-dom"; +import { getProject, deleteProject, listTrackers, listProcessedTickets } from "../../lib/api"; +import type { Project, WatchedTracker, ProcessedTicket } from "../../lib/types"; +import TrackerList from "../trackers/TrackerList"; + +export default function ProjectDashboard() { + const { projectId } = useParams(); + const navigate = useNavigate(); + const [project, setProject] = useState(null); + const [trackers, setTrackers] = useState([]); + const [tickets, setTickets] = useState([]); + + function loadData() { + if (!projectId) return; + getProject(projectId).then(setProject); + listTrackers(projectId).then(setTrackers); + listProcessedTickets(projectId).then(setTickets); + } + + useEffect(() => { + loadData(); + }, [projectId]); + + async function handleDelete() { + if (!projectId || !project) return; + if (!window.confirm(`Delete project "${project.name}"?`)) return; + await deleteProject(projectId); + window.dispatchEvent(new Event("orchai:refresh-projects")); + navigate("/"); + } + + if (!project) { + return
Loading...
; + } + + return ( +
+
+

{project.name}

+
+ + Edit + + +
+
+ +
+
+ Path: + {project.path} +
+ {project.cloned_from && ( +
+ Cloned from: + {project.cloned_from} +
+ )} +
+ Base branch: + {project.base_branch} +
+
+ +
+

Watched Trackers

+ +
+ + {tickets.length > 0 && ( +
+

Recent Tickets

+
+ {tickets.slice(0, 10).map((ticket) => ( +
+
+ #{ticket.artifact_id} + {ticket.artifact_title} +
+ + {ticket.status} + +
+ ))} +
+
+ )} +
+ ); +} +``` + +- [ ] **Step 2: Run all Rust tests** + +```bash +cd /home/leclere/Projets/orchai/src-tauri +PKG_CONFIG_PATH="/tmp/mypc:$PKG_CONFIG_PATH" cargo test +``` + +Expected: all tests pass (11 Phase 1 + 7 crypto + 5 credential + 7 tuleap_client + 7 tracker + 7 filter + 5 ticket = 49 tests). + +- [ ] **Step 3: Run clippy** + +```bash +cd /home/leclere/Projets/orchai/src-tauri +PKG_CONFIG_PATH="/tmp/mypc:$PKG_CONFIG_PATH" cargo clippy -- -D warnings +``` + +Fix any warnings. + +- [ ] **Step 4: Verify frontend builds** + +```bash +cd /home/leclere/Projets/orchai +npx tsc --noEmit +npm run build +``` + +- [ ] **Step 5: Commit** + +```bash +git add -A +git commit -m "feat: updated project dashboard with tracker list and recent tickets" +```