Compare commits

...

8 commits

36 changed files with 1601 additions and 385 deletions

View file

@ -0,0 +1,33 @@
INSERT INTO agents (id, name, role, tool, custom_prompt, is_default, created_at, updated_at)
SELECT
'default-reviewer-agent',
'Default Reviewer',
'reviewer',
'codex',
'',
1,
strftime('%Y-%m-%dT%H:%M:%fZ', 'now'),
strftime('%Y-%m-%dT%H:%M:%fZ', 'now')
WHERE NOT EXISTS (
SELECT 1 FROM agents WHERE role = 'reviewer' AND is_default = 1
);
ALTER TABLE watched_trackers ADD COLUMN reviewer_agent_id TEXT REFERENCES agents(id);
UPDATE watched_trackers
SET reviewer_agent_id = 'default-reviewer-agent'
WHERE reviewer_agent_id IS NULL OR TRIM(reviewer_agent_id) = '';
UPDATE watched_trackers
SET status = CASE
WHEN analyst_agent_id IS NULL OR developer_agent_id IS NULL OR reviewer_agent_id IS NULL THEN 'invalid'
ELSE 'valid'
END;
ALTER TABLE graylog_credentials ADD COLUMN reviewer_agent_id TEXT REFERENCES agents(id);
UPDATE graylog_credentials
SET reviewer_agent_id = 'default-reviewer-agent'
WHERE reviewer_agent_id IS NULL OR TRIM(reviewer_agent_id) = '';
ALTER TABLE processed_tickets ADD COLUMN review_report TEXT;

View file

@ -11,6 +11,7 @@ fn agent_role_label(role: &AgentRole) -> &'static str {
match role {
AgentRole::Analyst => "analyste",
AgentRole::Developer => "developpeur",
AgentRole::Reviewer => "reviewer",
}
}
@ -141,26 +142,21 @@ pub fn update_agent(
Agent::update(&db, &id, &name, role.clone(), tool, &custom_prompt)?;
if previous.role != role {
match role {
AgentRole::Analyst => {
db.execute(
"UPDATE watched_trackers
SET developer_agent_id = NULL,
status = 'invalid'
WHERE developer_agent_id = ?1",
params![id],
)?;
}
AgentRole::Developer => {
db.execute(
"UPDATE watched_trackers
SET analyst_agent_id = NULL,
status = 'invalid'
WHERE analyst_agent_id = ?1",
params![id],
)?;
}
}
db.execute(
"UPDATE watched_trackers
SET analyst_agent_id = CASE WHEN analyst_agent_id = ?1 THEN NULL ELSE analyst_agent_id END,
developer_agent_id = CASE WHEN developer_agent_id = ?1 THEN NULL ELSE developer_agent_id END,
reviewer_agent_id = CASE WHEN reviewer_agent_id = ?1 THEN NULL ELSE reviewer_agent_id END,
status = CASE
WHEN (CASE WHEN analyst_agent_id = ?1 THEN NULL ELSE analyst_agent_id END) IS NULL
OR (CASE WHEN developer_agent_id = ?1 THEN NULL ELSE developer_agent_id END) IS NULL
OR (CASE WHEN reviewer_agent_id = ?1 THEN NULL ELSE reviewer_agent_id END) IS NULL
THEN 'invalid'
ELSE 'valid'
END
WHERE analyst_agent_id = ?1 OR developer_agent_id = ?1 OR reviewer_agent_id = ?1",
params![id],
)?;
}
Ok(())

View file

@ -12,13 +12,15 @@ fn validate_input(
base_url: &str,
analyst_agent_id: &str,
developer_agent_id: &str,
reviewer_agent_id: &str,
polling_interval_minutes: i32,
lookback_minutes: i32,
score_threshold: i32,
) -> Result<(String, String, String), AppError> {
) -> Result<(String, String, String, String), AppError> {
let base_url = base_url.trim().to_string();
let analyst_agent_id = analyst_agent_id.trim().to_string();
let developer_agent_id = developer_agent_id.trim().to_string();
let reviewer_agent_id = reviewer_agent_id.trim().to_string();
if base_url.is_empty() {
return Err(AppError::from("Graylog URL is required".to_string()));
@ -29,6 +31,9 @@ fn validate_input(
if developer_agent_id.is_empty() {
return Err(AppError::from("Developer agent is required".to_string()));
}
if reviewer_agent_id.is_empty() {
return Err(AppError::from("Reviewer agent is required".to_string()));
}
if polling_interval_minutes <= 0 {
return Err(AppError::from(
"Polling interval must be strictly positive".to_string(),
@ -45,7 +50,12 @@ fn validate_input(
));
}
Ok((base_url, analyst_agent_id, developer_agent_id))
Ok((
base_url,
analyst_agent_id,
developer_agent_id,
reviewer_agent_id,
))
}
#[tauri::command]
@ -57,16 +67,18 @@ pub fn set_graylog_credentials(
api_token: String,
analyst_agent_id: String,
developer_agent_id: String,
reviewer_agent_id: String,
stream_id: Option<String>,
query_filter: String,
polling_interval_minutes: i32,
lookback_minutes: i32,
score_threshold: i32,
) -> Result<GraylogCredentialsSafe, AppError> {
let (base_url, analyst_agent_id, developer_agent_id) = validate_input(
let (base_url, analyst_agent_id, developer_agent_id, reviewer_agent_id) = validate_input(
&base_url,
&analyst_agent_id,
&developer_agent_id,
&reviewer_agent_id,
polling_interval_minutes,
lookback_minutes,
score_threshold,
@ -97,7 +109,11 @@ pub fn set_graylog_credentials(
&token_encrypted,
&analyst_agent_id,
&developer_agent_id,
stream_id.as_deref().map(str::trim).filter(|v| !v.is_empty()),
&reviewer_agent_id,
stream_id
.as_deref()
.map(str::trim)
.filter(|v| !v.is_empty()),
query_filter.trim(),
polling_interval_minutes,
lookback_minutes,

View file

@ -1,8 +1,9 @@
use crate::error::AppError;
use crate::models::ticket::ProcessedTicket;
use crate::models::project::Project;
use crate::models::ticket::{ProcessedTicket, RetryFromStep};
use crate::models::worktree::Worktree;
use crate::AppState;
use serde::Serialize;
use serde::{Deserialize, Serialize};
use tauri::State;
#[derive(Debug, Clone, Serialize)]
@ -11,6 +12,128 @@ pub struct TicketResult {
pub worktree: Option<Worktree>,
}
#[derive(Debug, Clone, Copy, Deserialize)]
#[serde(rename_all = "lowercase")]
pub enum RetryStep {
Analyst,
Developer,
Review,
}
impl RetryStep {
fn to_model_step(self) -> RetryFromStep {
match self {
RetryStep::Analyst => RetryFromStep::Analyst,
RetryStep::Developer => RetryFromStep::Developer,
RetryStep::Review => RetryFromStep::Review,
}
}
}
fn can_retry_ticket_status(status: &str) -> bool {
matches!(status, "Error" | "Done" | "NoFix" | "Cancelled")
}
fn has_non_empty_text(value: &Option<String>) -> bool {
value
.as_deref()
.map(|text| !text.trim().is_empty())
.unwrap_or(false)
}
fn validate_retry_step_preconditions(ticket: &ProcessedTicket, step: RetryStep) -> Result<(), AppError> {
match step {
RetryStep::Analyst => Ok(()),
RetryStep::Developer => {
if has_non_empty_text(&ticket.analyst_report) {
Ok(())
} else {
Err(AppError::from(
"Cannot retry developer step without an analyst report".to_string(),
))
}
}
RetryStep::Review => {
if !has_non_empty_text(&ticket.analyst_report) {
return Err(AppError::from(
"Cannot retry review step without an analyst report".to_string(),
));
}
if !has_non_empty_text(&ticket.developer_report) {
return Err(AppError::from(
"Cannot retry review step without a developer report".to_string(),
));
}
if ticket
.worktree_path
.as_deref()
.map(|v| !v.trim().is_empty())
.unwrap_or(false)
&& ticket
.branch_name
.as_deref()
.map(|v| !v.trim().is_empty())
.unwrap_or(false)
{
Ok(())
} else {
Err(AppError::from(
"Cannot retry review step without worktree metadata".to_string(),
))
}
}
}
}
fn retry_ticket_internal(
state: &State<'_, AppState>,
ticket_id: &str,
step: RetryStep,
) -> Result<(), AppError> {
let cleanup_target: Option<(Worktree, Option<String>)> = {
let conn = state.db.lock().map_err(|e| AppError::from(e.to_string()))?;
let ticket = ProcessedTicket::get_by_id(&conn, ticket_id)?;
if !can_retry_ticket_status(&ticket.status) {
return Err(AppError::from(format!(
"Cannot retry ticket with status '{}'",
ticket.status
)));
}
validate_retry_step_preconditions(&ticket, step)?;
ProcessedTicket::reset_for_retry_from_step(&conn, ticket_id, step.to_model_step())?;
if !matches!(step, RetryStep::Analyst | RetryStep::Developer) {
None
} else if let Some(wt) = Worktree::get_by_ticket_id(&conn, ticket_id)? {
if wt.status == "Active" {
let project = Project::get_by_id(&conn, &ticket.project_id)?;
Some((wt, Some(project.path)))
} else {
Some((wt, None))
}
} else {
None
}
};
if let Some((worktree, Some(path))) = &cleanup_target {
let _ = crate::services::worktree_manager::delete_worktree(
path,
&worktree.path,
&worktree.branch_name,
);
}
if let Some((worktree, _)) = cleanup_target {
let conn = state.db.lock().map_err(|e| AppError::from(e.to_string()))?;
Worktree::delete(&conn, &worktree.id)?;
}
Ok(())
}
#[tauri::command]
pub fn get_ticket_result(
state: State<'_, AppState>,
@ -24,66 +147,25 @@ pub fn get_ticket_result(
#[tauri::command]
pub fn retry_ticket(state: State<'_, AppState>, ticket_id: String) -> Result<(), AppError> {
let active_worktree_cleanup: Option<(crate::models::worktree::Worktree, String)> = {
let conn = state.db.lock().map_err(|e| AppError::from(e.to_string()))?;
let ticket = ProcessedTicket::get_by_id(&conn, &ticket_id)?;
if ticket.status != "Error" && ticket.status != "Done" && ticket.status != "Cancelled" {
return Err(AppError::from(format!(
"Cannot retry ticket with status '{}'",
ticket.status
)));
}
ProcessedTicket::update_status(&conn, &ticket_id, "Pending")?;
conn.execute(
"UPDATE processed_tickets SET analyst_report = NULL, developer_report = NULL, \
worktree_path = NULL, branch_name = NULL, processed_at = NULL WHERE id = ?1",
rusqlite::params![ticket_id],
)?;
let cleanup_target = if let Some(wt) = Worktree::get_by_ticket_id(&conn, &ticket_id)? {
if wt.status == "Active" {
let project = crate::models::project::Project::get_by_id(&conn, &ticket.project_id)?;
Some((wt, project.path))
} else {
Some((wt, String::new()))
}
} else {
None
};
cleanup_target
};
if let Some((wt, project_path)) = &active_worktree_cleanup {
if wt.status == "Active" {
let _ = crate::services::worktree_manager::delete_worktree(
project_path,
&wt.path,
&wt.branch_name,
);
}
}
if let Some((wt, _)) = active_worktree_cleanup {
let conn = state.db.lock().map_err(|e| AppError::from(e.to_string()))?;
Worktree::delete(&conn, &wt.id)?;
}
Ok(())
retry_ticket_internal(&state, &ticket_id, RetryStep::Analyst)
}
#[tauri::command]
pub async fn cancel_ticket(
pub fn retry_ticket_step(
state: State<'_, AppState>,
ticket_id: String,
step: RetryStep,
) -> Result<(), AppError> {
retry_ticket_internal(&state, &ticket_id, step)
}
#[tauri::command]
pub async fn cancel_ticket(state: State<'_, AppState>, ticket_id: String) -> Result<(), AppError> {
{
let conn = state.db.lock().map_err(|e| AppError::from(e.to_string()))?;
let ticket = ProcessedTicket::get_by_id(&conn, &ticket_id)?;
if ticket.status == "Done" || ticket.status == "Cancelled" {
if ticket.status == "Done" || ticket.status == "NoFix" || ticket.status == "Cancelled" {
return Err(AppError::from(format!(
"Cannot cancel ticket with status '{}'",
ticket.status
@ -95,7 +177,7 @@ pub async fn cancel_ticket(
let conn = state.db.lock().map_err(|e| AppError::from(e.to_string()))?;
let ticket = ProcessedTicket::get_by_id(&conn, &ticket_id)?;
if ticket.status == "Done" || ticket.status == "Cancelled" {
if ticket.status == "Done" || ticket.status == "NoFix" || ticket.status == "Cancelled" {
return Err(AppError::from(format!(
"Cannot cancel ticket with status '{}'",
ticket.status

View file

@ -225,6 +225,7 @@ fn step_from_status(status: &str) -> &'static str {
match status {
"Developing" => "developer",
"Analyzing" => "analyst",
"Reviewing" => "review",
_ => "processing",
}
}
@ -250,7 +251,10 @@ pub fn get_runtime_activity(
let mut agents_by_ticket_id: HashMap<String, RuntimeActiveAgent> = HashMap::new();
for ticket in &project_tickets {
if ticket.status == "Analyzing" || ticket.status == "Developing" {
if ticket.status == "Analyzing"
|| ticket.status == "Developing"
|| ticket.status == "Reviewing"
{
agents_by_ticket_id.insert(
ticket.id.clone(),
RuntimeActiveAgent {

View file

@ -12,9 +12,7 @@ const TASK_LIST_TRUNCATION_NOTICE: &str =
"\n\n[... contenu tronque pour preserver la fluidite de l'interface ...]";
fn truncate_for_task_list(value: Option<String>, max_bytes: usize) -> Option<String> {
let Some(content) = value else {
return None;
};
let content = value?;
if content.len() <= max_bytes {
return Some(content);

View file

@ -58,6 +58,7 @@ pub struct AddTrackerPayload {
pub polling_interval: i32,
pub analyst_agent_id: String,
pub developer_agent_id: String,
pub reviewer_agent_id: String,
pub filters: Vec<FilterGroup>,
}
@ -73,6 +74,7 @@ pub fn add_tracker(
ensure_agent_role(&db, &payload.analyst_agent_id, AgentRole::Analyst)?;
ensure_agent_role(&db, &payload.developer_agent_id, AgentRole::Developer)?;
ensure_agent_role(&db, &payload.reviewer_agent_id, AgentRole::Reviewer)?;
let tracker = WatchedTracker::insert(
&db,
@ -83,6 +85,7 @@ pub fn add_tracker(
polling_interval: payload.polling_interval,
analyst_agent_id: payload.analyst_agent_id,
developer_agent_id: payload.developer_agent_id,
reviewer_agent_id: payload.reviewer_agent_id,
filters: payload.filters,
},
)?;
@ -117,6 +120,7 @@ pub fn update_tracker(
ensure_agent_role(&db, &update.analyst_agent_id, AgentRole::Analyst)?;
ensure_agent_role(&db, &update.developer_agent_id, AgentRole::Developer)?;
ensure_agent_role(&db, &update.reviewer_agent_id, AgentRole::Reviewer)?;
WatchedTracker::update(&db, &id, update)?;
Ok(())

View file

@ -11,6 +11,7 @@ const MIGRATION_007: &str = include_str!("../migrations/007_normalize_timestamps
const MIGRATION_008: &str = include_str!("../migrations/008_project_scoped_tuleap_credentials.sql");
const MIGRATION_009: &str = include_str!("../migrations/009_graylog_auto_resolve.sql");
const MIGRATION_010: &str = include_str!("../migrations/010_agent_runtime_status.sql");
const MIGRATION_011: &str = include_str!("../migrations/011_add_review_step.sql");
pub fn init(db_path: &Path) -> Result<Connection> {
let conn = Connection::open(db_path)?;
@ -76,6 +77,10 @@ fn migrate(conn: &Connection) -> Result<()> {
conn.execute_batch(MIGRATION_010)?;
conn.pragma_update(None, "user_version", 10)?;
}
if version < 11 {
conn.execute_batch(MIGRATION_011)?;
conn.pragma_update(None, "user_version", 11)?;
}
Ok(())
}
@ -134,7 +139,7 @@ mod tests {
let version: i32 = conn
.pragma_query_value(None, "user_version", |row| row.get(0))
.unwrap();
assert_eq!(version, 10);
assert_eq!(version, 11);
}
#[test]
@ -155,9 +160,17 @@ mod tests {
|row| row.get(0),
)
.unwrap();
let reviewer_defaults: i32 = conn
.query_row(
"SELECT COUNT(*) FROM agents WHERE role = 'reviewer' AND is_default = 1",
[],
|row| row.get(0),
)
.unwrap();
assert_eq!(analyst_defaults, 1);
assert_eq!(developer_defaults, 1);
assert_eq!(reviewer_defaults, 1);
}
#[test]

View file

@ -17,9 +17,12 @@ pub struct AppState {
#[cfg_attr(mobile, tauri::mobile_entry_point)]
pub fn run() {
tauri::Builder::default()
.plugin(tauri_plugin_dialog::init())
.plugin(tauri_plugin_notification::init())
let builder = tauri::Builder::default().plugin(tauri_plugin_dialog::init());
#[cfg(not(target_os = "linux"))]
let builder = builder.plugin(tauri_plugin_notification::init());
builder
.setup(|app| {
let db_dir = app.path().app_data_dir()?;
std::fs::create_dir_all(&db_dir)?;
@ -112,6 +115,7 @@ pub fn run() {
commands::module::set_project_module_enabled,
commands::orchestrator::get_ticket_result,
commands::orchestrator::retry_ticket,
commands::orchestrator::retry_ticket_step,
commands::orchestrator::cancel_ticket,
commands::live_agent::create_live_session,
commands::live_agent::list_live_sessions,

View file

@ -4,12 +4,14 @@ use uuid::Uuid;
pub const DEFAULT_ANALYST_AGENT_ID: &str = "default-analyst-agent";
pub const DEFAULT_DEVELOPER_AGENT_ID: &str = "default-developer-agent";
pub const DEFAULT_REVIEWER_AGENT_ID: &str = "default-reviewer-agent";
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq)]
#[serde(rename_all = "snake_case")]
pub enum AgentRole {
Analyst,
Developer,
Reviewer,
}
impl AgentRole {
@ -17,6 +19,7 @@ impl AgentRole {
match self {
AgentRole::Analyst => "analyst",
AgentRole::Developer => "developer",
AgentRole::Reviewer => "reviewer",
}
}
@ -24,6 +27,7 @@ impl AgentRole {
match value {
"analyst" => Ok(AgentRole::Analyst),
"developer" => Ok(AgentRole::Developer),
"reviewer" => Ok(AgentRole::Reviewer),
_ => Err(rusqlite::Error::InvalidParameterName(format!(
"Invalid agent role: {}",
value
@ -74,7 +78,11 @@ impl AgentTool {
"mcp_servers.tuleap.enabled=false".to_string(),
"-".to_string(),
],
AgentTool::ClaudeCode => vec!["-p".to_string()],
AgentTool::ClaudeCode => vec![
"-p".to_string(),
"--permission-mode".to_string(),
"acceptEdits".to_string(),
],
}
}
}
@ -212,6 +220,7 @@ impl Agent {
let default_id = match role {
AgentRole::Analyst => DEFAULT_ANALYST_AGENT_ID,
AgentRole::Developer => DEFAULT_DEVELOPER_AGENT_ID,
AgentRole::Reviewer => DEFAULT_REVIEWER_AGENT_ID,
};
conn.query_row(
@ -326,19 +335,37 @@ impl Agent {
"UPDATE watched_trackers SET analyst_agent_id = ?1 WHERE analyst_agent_id = ?2",
params![default_agent.id, id],
)?;
conn.execute(
"UPDATE graylog_credentials SET analyst_agent_id = ?1 WHERE analyst_agent_id = ?2",
params![default_agent.id, id],
)?;
}
AgentRole::Developer => {
conn.execute(
"UPDATE watched_trackers SET developer_agent_id = ?1 WHERE developer_agent_id = ?2",
params![default_agent.id, id],
)?;
conn.execute(
"UPDATE graylog_credentials SET developer_agent_id = ?1 WHERE developer_agent_id = ?2",
params![default_agent.id, id],
)?;
}
AgentRole::Reviewer => {
conn.execute(
"UPDATE watched_trackers SET reviewer_agent_id = ?1 WHERE reviewer_agent_id = ?2",
params![default_agent.id, id],
)?;
conn.execute(
"UPDATE graylog_credentials SET reviewer_agent_id = ?1 WHERE reviewer_agent_id = ?2",
params![default_agent.id, id],
)?;
}
}
conn.execute(
"UPDATE watched_trackers
SET status = CASE
WHEN analyst_agent_id IS NULL OR developer_agent_id IS NULL THEN 'invalid'
WHEN analyst_agent_id IS NULL OR developer_agent_id IS NULL OR reviewer_agent_id IS NULL THEN 'invalid'
ELSE 'valid'
END",
[],
@ -369,11 +396,14 @@ mod tests {
let analyst = Agent::get_default_by_role(&conn, AgentRole::Analyst).unwrap();
let developer = Agent::get_default_by_role(&conn, AgentRole::Developer).unwrap();
let reviewer = Agent::get_default_by_role(&conn, AgentRole::Reviewer).unwrap();
assert_eq!(analyst.id, DEFAULT_ANALYST_AGENT_ID);
assert_eq!(developer.id, DEFAULT_DEVELOPER_AGENT_ID);
assert_eq!(reviewer.id, DEFAULT_REVIEWER_AGENT_ID);
assert!(analyst.is_default);
assert!(developer.is_default);
assert!(reviewer.is_default);
}
#[test]
@ -415,7 +445,11 @@ mod tests {
);
assert_eq!(
AgentTool::ClaudeCode.to_non_interactive_args(),
vec!["-p".to_string()]
vec![
"-p".to_string(),
"--permission-mode".to_string(),
"acceptEdits".to_string()
]
);
}
@ -515,6 +549,7 @@ mod tests {
let analyst_default = Agent::get_default_by_role(&conn, AgentRole::Analyst).unwrap();
let developer_default = Agent::get_default_by_role(&conn, AgentRole::Developer).unwrap();
let reviewer_default = Agent::get_default_by_role(&conn, AgentRole::Reviewer).unwrap();
let analyst =
Agent::insert(&conn, "Analyst", AgentRole::Analyst, AgentTool::Codex, "").unwrap();
@ -528,6 +563,7 @@ mod tests {
polling_interval: 10,
analyst_agent_id: analyst.id.clone(),
developer_agent_id: developer_default.id.clone(),
reviewer_agent_id: reviewer_default.id.clone(),
filters: vec![],
},
)
@ -545,5 +581,9 @@ mod tests {
reloaded.developer_agent_id.as_deref(),
Some(developer_default.id.as_str())
);
assert_eq!(
reloaded.reviewer_agent_id.as_deref(),
Some(reviewer_default.id.as_str())
);
}
}

View file

@ -10,6 +10,7 @@ pub struct GraylogCredentials {
pub api_token_encrypted: String,
pub analyst_agent_id: String,
pub developer_agent_id: String,
pub reviewer_agent_id: String,
pub stream_id: Option<String>,
pub query_filter: String,
pub polling_interval_minutes: i32,
@ -26,6 +27,7 @@ pub struct GraylogCredentialsSafe {
pub base_url: String,
pub analyst_agent_id: String,
pub developer_agent_id: String,
pub reviewer_agent_id: String,
pub stream_id: Option<String>,
pub query_filter: String,
pub polling_interval_minutes: i32,
@ -80,13 +82,14 @@ fn credentials_from_row(row: &rusqlite::Row) -> rusqlite::Result<GraylogCredenti
api_token_encrypted: row.get(3)?,
analyst_agent_id: row.get(4)?,
developer_agent_id: row.get(5)?,
stream_id: row.get(6)?,
query_filter: row.get(7)?,
polling_interval_minutes: row.get(8)?,
lookback_minutes: row.get(9)?,
score_threshold: row.get(10)?,
created_at: row.get(11)?,
updated_at: row.get(12)?,
reviewer_agent_id: row.get(6)?,
stream_id: row.get(7)?,
query_filter: row.get(8)?,
polling_interval_minutes: row.get(9)?,
lookback_minutes: row.get(10)?,
score_threshold: row.get(11)?,
created_at: row.get(12)?,
updated_at: row.get(13)?,
})
}
@ -136,6 +139,7 @@ impl GraylogCredentials {
api_token_encrypted: &str,
analyst_agent_id: &str,
developer_agent_id: &str,
reviewer_agent_id: &str,
stream_id: Option<&str>,
query_filter: &str,
polling_interval_minutes: i32,
@ -153,6 +157,7 @@ impl GraylogCredentials {
api_token_encrypted,
analyst_agent_id,
developer_agent_id,
reviewer_agent_id,
stream_id,
query_filter,
polling_interval_minutes,
@ -160,12 +165,13 @@ impl GraylogCredentials {
score_threshold,
created_at,
updated_at
) VALUES (?1, ?2, ?3, ?4, ?5, ?6, ?7, ?8, ?9, ?10, ?11, ?12, ?13)
) VALUES (?1, ?2, ?3, ?4, ?5, ?6, ?7, ?8, ?9, ?10, ?11, ?12, ?13, ?14)
ON CONFLICT(project_id) DO UPDATE SET
base_url = excluded.base_url,
api_token_encrypted = excluded.api_token_encrypted,
analyst_agent_id = excluded.analyst_agent_id,
developer_agent_id = excluded.developer_agent_id,
reviewer_agent_id = excluded.reviewer_agent_id,
stream_id = excluded.stream_id,
query_filter = excluded.query_filter,
polling_interval_minutes = excluded.polling_interval_minutes,
@ -179,6 +185,7 @@ impl GraylogCredentials {
api_token_encrypted,
analyst_agent_id,
developer_agent_id,
reviewer_agent_id,
stream_id,
query_filter,
polling_interval_minutes,
@ -204,6 +211,7 @@ impl GraylogCredentials {
api_token_encrypted,
analyst_agent_id,
developer_agent_id,
reviewer_agent_id,
stream_id,
query_filter,
polling_interval_minutes,
@ -229,6 +237,7 @@ impl GraylogCredentials {
api_token_encrypted,
analyst_agent_id,
developer_agent_id,
reviewer_agent_id,
stream_id,
query_filter,
polling_interval_minutes,
@ -259,6 +268,7 @@ impl GraylogCredentials {
base_url: self.base_url.clone(),
analyst_agent_id: self.analyst_agent_id.clone(),
developer_agent_id: self.developer_agent_id.clone(),
reviewer_agent_id: self.reviewer_agent_id.clone(),
stream_id: self.stream_id.clone(),
query_filter: self.query_filter.clone(),
polling_interval_minutes: self.polling_interval_minutes,
@ -567,7 +577,7 @@ mod tests {
use crate::models::ticket::ProcessedTicket;
use rusqlite::Connection;
fn setup() -> (Connection, Project, Agent, Agent) {
fn setup() -> (Connection, Project, Agent, Agent, Agent) {
let conn = db::init_in_memory().expect("db init should succeed");
let project = Project::insert(&conn, "Graylog", "/tmp/graylog", None, "main")
.expect("project insert should succeed");
@ -581,13 +591,15 @@ mod tests {
"",
)
.expect("developer insert should succeed");
let reviewer = Agent::insert(&conn, "Reviewer", AgentRole::Reviewer, AgentTool::Codex, "")
.expect("reviewer insert should succeed");
(conn, project, analyst, developer)
(conn, project, analyst, developer, reviewer)
}
#[test]
fn test_upsert_graylog_credentials_for_project() {
let (conn, project, analyst, developer) = setup();
let (conn, project, analyst, developer, reviewer) = setup();
let creds = GraylogCredentials::upsert_for_project(
&conn,
@ -596,6 +608,7 @@ mod tests {
"enc-token",
&analyst.id,
&developer.id,
&reviewer.id,
Some("stream-1"),
"level:(critical OR error)",
10,
@ -614,6 +627,8 @@ mod tests {
.expect("credentials should exist");
assert_eq!(stored.id, creds.id);
assert_eq!(stored.analyst_agent_id, analyst.id);
assert_eq!(stored.developer_agent_id, developer.id);
assert_eq!(stored.reviewer_agent_id, reviewer.id);
let safe = stored.to_safe();
assert_eq!(safe.project_id, project.id);
@ -627,7 +642,7 @@ mod tests {
#[test]
fn test_upsert_subject_and_get_by_project_and_key() {
let (conn, project, _, _) = setup();
let (conn, project, _, _, _) = setup();
let first_seen = "2026-04-17T08:00:00Z";
let second_seen = "2026-04-17T09:00:00Z";
@ -693,7 +708,7 @@ mod tests {
#[test]
fn test_insert_detection_and_list_by_project_orders_latest_first() {
let (conn, project, _, _) = setup();
let (conn, project, _, _, _) = setup();
let subject = GraylogSubject::upsert_subject(
&conn,
&project.id,
@ -752,7 +767,7 @@ mod tests {
#[test]
fn test_list_by_project_and_message_returns_all_matching_subjects() {
let (conn, project, _, _) = setup();
let (conn, project, _, _, _) = setup();
let message = "timeout while calling payment gateway";
GraylogSubject::upsert_subject(

View file

@ -15,6 +15,7 @@ pub struct ProcessedTicket {
pub status: String,
pub analyst_report: Option<String>,
pub developer_report: Option<String>,
pub review_report: Option<String>,
pub worktree_path: Option<String>,
pub branch_name: Option<String>,
pub detected_at: String,
@ -29,6 +30,13 @@ pub struct ProjectThroughputStats {
pub avg_lead_time_seconds: Option<f64>,
}
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
pub enum RetryFromStep {
Analyst,
Developer,
Review,
}
fn from_row(row: &rusqlite::Row) -> rusqlite::Result<ProcessedTicket> {
Ok(ProcessedTicket {
id: row.get(0)?,
@ -42,18 +50,19 @@ fn from_row(row: &rusqlite::Row) -> rusqlite::Result<ProcessedTicket> {
status: row.get(8)?,
analyst_report: row.get(9)?,
developer_report: row.get(10)?,
worktree_path: row.get(11)?,
branch_name: row.get(12)?,
detected_at: row.get(13)?,
processed_at: row.get(14)?,
review_report: row.get(11)?,
worktree_path: row.get(12)?,
branch_name: row.get(13)?,
detected_at: row.get(14)?,
processed_at: row.get(15)?,
})
}
const SELECT_ALL_COLS: &str = "SELECT id, tracker_id, project_id, source, source_ref, \
artifact_id, artifact_title, artifact_data, status, analyst_report, developer_report, \
artifact_id, artifact_title, artifact_data, status, analyst_report, developer_report, review_report, \
worktree_path, branch_name, detected_at, processed_at FROM processed_tickets";
const SELECT_SUMMARY_COLS: &str = "SELECT id, tracker_id, project_id, source, source_ref, \
artifact_id, artifact_title, '' AS artifact_data, status, NULL AS analyst_report, NULL AS developer_report, \
artifact_id, artifact_title, '' AS artifact_data, status, NULL AS analyst_report, NULL AS developer_report, NULL AS review_report, \
worktree_path, branch_name, detected_at, processed_at FROM processed_tickets";
impl ProcessedTicket {
@ -93,6 +102,7 @@ impl ProcessedTicket {
status: "Pending".to_string(),
analyst_report: None,
developer_report: None,
review_report: None,
worktree_path: None,
branch_name: None,
detected_at: now,
@ -142,6 +152,7 @@ impl ProcessedTicket {
status: "Pending".to_string(),
analyst_report: None,
developer_report: None,
review_report: None,
worktree_path: None,
branch_name: None,
detected_at: now,
@ -205,13 +216,13 @@ impl ProcessedTicket {
"SELECT
COALESCE(SUM(
CASE
WHEN pt.status NOT IN ('Done', 'Error', 'Cancelled') THEN 1
WHEN pt.status NOT IN ('Done', 'NoFix', 'Error', 'Cancelled') THEN 1
ELSE 0
END
), 0) AS backlog_count,
COALESCE(SUM(
CASE
WHEN pt.status = 'Done'
WHEN pt.status IN ('Done', 'NoFix')
AND pt.processed_at IS NOT NULL
AND julianday(pt.processed_at) >= julianday(?2)
THEN 1
@ -229,7 +240,7 @@ impl ProcessedTicket {
), 0) AS error_last_24h,
AVG(
CASE
WHEN pt.status IN ('Done', 'Error')
WHEN pt.status IN ('Done', 'NoFix', 'Error')
AND pt.processed_at IS NOT NULL
AND julianday(pt.processed_at) >= julianday(?2)
THEN (julianday(pt.processed_at) - julianday(pt.detected_at)) * 86400.0
@ -272,9 +283,17 @@ impl ProcessedTicket {
}
pub fn set_developer_report(conn: &Connection, id: &str, report: &str) -> Result<()> {
conn.execute(
"UPDATE processed_tickets SET developer_report = ?1 WHERE id = ?2",
params![report, id],
)?;
Ok(())
}
pub fn set_review_report(conn: &Connection, id: &str, report: &str) -> Result<()> {
let now = chrono::Utc::now().to_rfc3339();
conn.execute(
"UPDATE processed_tickets SET developer_report = ?1, processed_at = ?2 WHERE id = ?3",
"UPDATE processed_tickets SET review_report = ?1, processed_at = ?2 WHERE id = ?3",
params![report, now, id],
)?;
Ok(())
@ -305,7 +324,7 @@ impl ProcessedTicket {
pub fn list_inflight(conn: &Connection) -> Result<Vec<ProcessedTicket>> {
let sql = format!(
"{} WHERE status IN ('Analyzing', 'Developing') ORDER BY detected_at ASC",
"{} WHERE status IN ('Analyzing', 'Developing', 'Reviewing') ORDER BY detected_at ASC",
SELECT_ALL_COLS
);
let mut stmt = conn.prepare(&sql)?;
@ -316,7 +335,7 @@ impl ProcessedTicket {
pub fn reset_for_retry(conn: &Connection, id: &str) -> Result<()> {
conn.execute(
"UPDATE processed_tickets \
SET status = 'Pending', analyst_report = NULL, developer_report = NULL, \
SET status = 'Pending', analyst_report = NULL, developer_report = NULL, review_report = NULL, \
worktree_path = NULL, branch_name = NULL, processed_at = NULL \
WHERE id = ?1",
params![id],
@ -324,6 +343,35 @@ impl ProcessedTicket {
Ok(())
}
pub fn reset_for_retry_from_step(
conn: &Connection,
id: &str,
step: RetryFromStep,
) -> Result<()> {
let sql = match step {
RetryFromStep::Analyst => {
"UPDATE processed_tickets \
SET status = 'Pending', analyst_report = NULL, developer_report = NULL, review_report = NULL, \
worktree_path = NULL, branch_name = NULL, processed_at = NULL \
WHERE id = ?1"
}
RetryFromStep::Developer => {
"UPDATE processed_tickets \
SET status = 'Pending', developer_report = NULL, review_report = NULL, \
worktree_path = NULL, branch_name = NULL, processed_at = NULL \
WHERE id = ?1"
}
RetryFromStep::Review => {
"UPDATE processed_tickets \
SET status = 'Pending', review_report = NULL, processed_at = NULL \
WHERE id = ?1"
}
};
conn.execute(sql, params![id])?;
Ok(())
}
pub fn set_error(conn: &Connection, id: &str, error_message: &str) -> Result<()> {
let now = chrono::Utc::now().to_rfc3339();
conn.execute(
@ -348,6 +396,8 @@ mod tests {
let analyst = Agent::insert(&conn, "A", AgentRole::Analyst, AgentTool::Codex, "").unwrap();
let developer =
Agent::insert(&conn, "D", AgentRole::Developer, AgentTool::ClaudeCode, "").unwrap();
let reviewer =
Agent::insert(&conn, "R", AgentRole::Reviewer, AgentTool::Codex, "").unwrap();
let tracker = WatchedTracker::insert(
&conn,
NewWatchedTracker {
@ -357,6 +407,7 @@ mod tests {
polling_interval: 10,
analyst_agent_id: analyst.id.clone(),
developer_agent_id: developer.id.clone(),
reviewer_agent_id: reviewer.id.clone(),
filters: vec![],
},
)
@ -623,6 +674,19 @@ mod tests {
updated.developer_report.as_deref(),
Some("Fixed in main.rs")
);
assert!(updated.processed_at.is_none());
}
#[test]
fn test_set_review_report_sets_processed_at() {
let (conn, project_id, tracker_id) = setup();
let ticket = ProcessedTicket::insert_if_new(&conn, &project_id, &tracker_id, 2, "T2", "{}")
.unwrap()
.unwrap();
ProcessedTicket::set_review_report(&conn, &ticket.id, "Review ok").unwrap();
let updated = ProcessedTicket::get_by_id(&conn, &ticket.id).unwrap();
assert_eq!(updated.review_report.as_deref(), Some("Review ok"));
let processed_at = updated
.processed_at
.as_deref()
@ -678,11 +742,64 @@ mod tests {
assert_eq!(updated.status, "Pending");
assert!(updated.analyst_report.is_none());
assert!(updated.developer_report.is_none());
assert!(updated.review_report.is_none());
assert!(updated.worktree_path.is_none());
assert!(updated.branch_name.is_none());
assert!(updated.processed_at.is_none());
}
#[test]
fn test_reset_for_retry_from_developer_keeps_analysis_and_invalidates_following_steps() {
let (conn, project_id, tracker_id) = setup();
let ticket = ProcessedTicket::insert_if_new(&conn, &project_id, &tracker_id, 1, "T1", "{}")
.unwrap()
.unwrap();
ProcessedTicket::update_status(&conn, &ticket.id, "Done").unwrap();
ProcessedTicket::set_analyst_report(&conn, &ticket.id, "analysis").unwrap();
ProcessedTicket::set_developer_report(&conn, &ticket.id, "dev report").unwrap();
ProcessedTicket::set_review_report(&conn, &ticket.id, "review report").unwrap();
ProcessedTicket::set_worktree_info(&conn, &ticket.id, "/tmp/wt", "orchai/1").unwrap();
ProcessedTicket::reset_for_retry_from_step(&conn, &ticket.id, RetryFromStep::Developer)
.unwrap();
let updated = ProcessedTicket::get_by_id(&conn, &ticket.id).unwrap();
assert_eq!(updated.status, "Pending");
assert_eq!(updated.analyst_report.as_deref(), Some("analysis"));
assert!(updated.developer_report.is_none());
assert!(updated.review_report.is_none());
assert!(updated.worktree_path.is_none());
assert!(updated.branch_name.is_none());
assert!(updated.processed_at.is_none());
}
#[test]
fn test_reset_for_retry_from_review_keeps_analysis_and_developer_reports() {
let (conn, project_id, tracker_id) = setup();
let ticket = ProcessedTicket::insert_if_new(&conn, &project_id, &tracker_id, 1, "T1", "{}")
.unwrap()
.unwrap();
ProcessedTicket::update_status(&conn, &ticket.id, "Done").unwrap();
ProcessedTicket::set_analyst_report(&conn, &ticket.id, "analysis").unwrap();
ProcessedTicket::set_developer_report(&conn, &ticket.id, "dev report").unwrap();
ProcessedTicket::set_review_report(&conn, &ticket.id, "review report").unwrap();
ProcessedTicket::set_worktree_info(&conn, &ticket.id, "/tmp/wt", "orchai/1").unwrap();
ProcessedTicket::reset_for_retry_from_step(&conn, &ticket.id, RetryFromStep::Review)
.unwrap();
let updated = ProcessedTicket::get_by_id(&conn, &ticket.id).unwrap();
assert_eq!(updated.status, "Pending");
assert_eq!(updated.analyst_report.as_deref(), Some("analysis"));
assert_eq!(updated.developer_report.as_deref(), Some("dev report"));
assert!(updated.review_report.is_none());
assert_eq!(updated.worktree_path.as_deref(), Some("/tmp/wt"));
assert_eq!(updated.branch_name.as_deref(), Some("orchai/1"));
assert!(updated.processed_at.is_none());
}
#[test]
fn test_set_error() {
let (conn, project_id, tracker_id) = setup();
@ -772,17 +889,26 @@ mod tests {
&cancelled_detected,
None,
);
insert_ticket_with_timestamps(
&conn,
&project_id,
&tracker_id,
1007,
"NoFix",
&done_detected,
Some(&done_processed),
);
let stats = ProcessedTicket::get_project_throughput_stats(&conn, &project_id).unwrap();
assert_eq!(stats.backlog_count, 2);
assert_eq!(stats.done_last_24h, 1);
assert_eq!(stats.done_last_24h, 2);
assert_eq!(stats.error_last_24h, 1);
let avg = stats
.avg_lead_time_seconds
.expect("avg lead time should be available");
assert!((avg - 10800.0).abs() < 1.0);
assert!((avg - 9600.0).abs() < 1.0);
}
#[test]
@ -844,6 +970,8 @@ mod tests {
assert!(!tickets.is_empty());
assert!(tickets.iter().all(|ticket| ticket.artifact_data.is_empty()));
assert!(tickets.iter().all(|ticket| ticket.analyst_report.is_none()));
assert!(tickets.iter().all(|ticket| ticket.developer_report.is_none()));
assert!(tickets
.iter()
.all(|ticket| ticket.developer_report.is_none()));
}
}

View file

@ -23,6 +23,7 @@ pub struct WatchedTracker {
pub polling_interval: i32,
pub analyst_agent_id: Option<String>,
pub developer_agent_id: Option<String>,
pub reviewer_agent_id: Option<String>,
pub filters: Vec<FilterGroup>,
pub enabled: bool,
pub status: String,
@ -37,6 +38,7 @@ pub struct TrackerUpdate {
pub polling_interval: i32,
pub analyst_agent_id: String,
pub developer_agent_id: String,
pub reviewer_agent_id: String,
pub filters: Vec<FilterGroup>,
pub enabled: bool,
}
@ -49,6 +51,7 @@ pub struct NewWatchedTracker {
pub polling_interval: i32,
pub analyst_agent_id: String,
pub developer_agent_id: String,
pub reviewer_agent_id: String,
pub filters: Vec<FilterGroup>,
}
@ -64,8 +67,9 @@ fn normalize_agent_id(agent_id: &str) -> Option<String> {
fn compute_status(
analyst_agent_id: &Option<String>,
developer_agent_id: &Option<String>,
reviewer_agent_id: &Option<String>,
) -> String {
if analyst_agent_id.is_some() && developer_agent_id.is_some() {
if analyst_agent_id.is_some() && developer_agent_id.is_some() && reviewer_agent_id.is_some() {
"valid".to_string()
} else {
"invalid".to_string()
@ -73,11 +77,11 @@ fn compute_status(
}
fn from_row(row: &rusqlite::Row) -> rusqlite::Result<WatchedTracker> {
let filters_json: String = row.get(7)?;
let enabled_int: i32 = row.get(8)?;
let filters_json: String = row.get(8)?;
let enabled_int: i32 = row.get(9)?;
let filters: Vec<FilterGroup> = serde_json::from_str(&filters_json).map_err(|e| {
rusqlite::Error::FromSqlConversionFailure(7, rusqlite::types::Type::Text, Box::new(e))
rusqlite::Error::FromSqlConversionFailure(8, rusqlite::types::Type::Text, Box::new(e))
})?;
Ok(WatchedTracker {
@ -88,11 +92,12 @@ fn from_row(row: &rusqlite::Row) -> rusqlite::Result<WatchedTracker> {
polling_interval: row.get(4)?,
analyst_agent_id: row.get(5)?,
developer_agent_id: row.get(6)?,
reviewer_agent_id: row.get(7)?,
filters,
enabled: enabled_int != 0,
status: row.get(9)?,
last_polled_at: row.get(10)?,
created_at: row.get(11)?,
status: row.get(10)?,
last_polled_at: row.get(11)?,
created_at: row.get(12)?,
})
}
@ -105,6 +110,7 @@ impl WatchedTracker {
polling_interval,
analyst_agent_id,
developer_agent_id,
reviewer_agent_id,
filters,
} = new_tracker;
@ -116,11 +122,12 @@ impl WatchedTracker {
let analyst_agent_id = normalize_agent_id(&analyst_agent_id);
let developer_agent_id = normalize_agent_id(&developer_agent_id);
let status = compute_status(&analyst_agent_id, &developer_agent_id);
let reviewer_agent_id = normalize_agent_id(&reviewer_agent_id);
let status = compute_status(&analyst_agent_id, &developer_agent_id, &reviewer_agent_id);
conn.execute(
"INSERT INTO watched_trackers (id, project_id, tracker_id, tracker_label, polling_interval, agent_config_json, filters_json, analyst_agent_id, developer_agent_id, status, created_at) \
VALUES (?1, ?2, ?3, ?4, ?5, ?6, ?7, ?8, ?9, ?10, ?11)",
"INSERT INTO watched_trackers (id, project_id, tracker_id, tracker_label, polling_interval, agent_config_json, filters_json, analyst_agent_id, developer_agent_id, reviewer_agent_id, status, created_at) \
VALUES (?1, ?2, ?3, ?4, ?5, ?6, ?7, ?8, ?9, ?10, ?11, ?12)",
params![
&id,
&project_id,
@ -131,6 +138,7 @@ impl WatchedTracker {
filters_json,
analyst_agent_id.as_deref(),
developer_agent_id.as_deref(),
reviewer_agent_id.as_deref(),
&status,
&now,
],
@ -144,6 +152,7 @@ impl WatchedTracker {
polling_interval,
analyst_agent_id,
developer_agent_id,
reviewer_agent_id,
filters,
enabled: true,
status,
@ -154,7 +163,7 @@ impl WatchedTracker {
pub fn list_by_project(conn: &Connection, project_id: &str) -> Result<Vec<WatchedTracker>> {
let mut stmt = conn.prepare(
"SELECT id, project_id, tracker_id, tracker_label, polling_interval, analyst_agent_id, developer_agent_id, filters_json, enabled, status, last_polled_at, created_at \
"SELECT id, project_id, tracker_id, tracker_label, polling_interval, analyst_agent_id, developer_agent_id, reviewer_agent_id, filters_json, enabled, status, last_polled_at, created_at \
FROM watched_trackers WHERE project_id = ?1 ORDER BY created_at DESC",
)?;
let rows = stmt.query_map(params![project_id], from_row)?;
@ -163,12 +172,13 @@ impl WatchedTracker {
pub fn list_all_enabled(conn: &Connection) -> Result<Vec<WatchedTracker>> {
let mut stmt = conn.prepare(
"SELECT id, project_id, tracker_id, tracker_label, polling_interval, analyst_agent_id, developer_agent_id, filters_json, enabled, status, last_polled_at, created_at \
"SELECT id, project_id, tracker_id, tracker_label, polling_interval, analyst_agent_id, developer_agent_id, reviewer_agent_id, filters_json, enabled, status, last_polled_at, created_at \
FROM watched_trackers \
WHERE enabled = 1 \
AND status = 'valid' \
AND analyst_agent_id IS NOT NULL \
AND developer_agent_id IS NOT NULL \
AND reviewer_agent_id IS NOT NULL \
AND (\n\
EXISTS (\n\
SELECT 1\n\
@ -192,7 +202,7 @@ impl WatchedTracker {
pub fn get_by_id(conn: &Connection, id: &str) -> Result<WatchedTracker> {
conn.query_row(
"SELECT id, project_id, tracker_id, tracker_label, polling_interval, analyst_agent_id, developer_agent_id, filters_json, enabled, status, last_polled_at, created_at \
"SELECT id, project_id, tracker_id, tracker_label, polling_interval, analyst_agent_id, developer_agent_id, reviewer_agent_id, filters_json, enabled, status, last_polled_at, created_at \
FROM watched_trackers WHERE id = ?1",
params![id],
from_row,
@ -206,10 +216,11 @@ impl WatchedTracker {
let analyst_agent_id = normalize_agent_id(&update.analyst_agent_id);
let developer_agent_id = normalize_agent_id(&update.developer_agent_id);
let status = compute_status(&analyst_agent_id, &developer_agent_id);
let reviewer_agent_id = normalize_agent_id(&update.reviewer_agent_id);
let status = compute_status(&analyst_agent_id, &developer_agent_id, &reviewer_agent_id);
let affected = conn.execute(
"UPDATE watched_trackers SET tracker_id = ?1, tracker_label = ?2, polling_interval = ?3, filters_json = ?4, analyst_agent_id = ?5, developer_agent_id = ?6, status = ?7, enabled = ?8 WHERE id = ?9",
"UPDATE watched_trackers SET tracker_id = ?1, tracker_label = ?2, polling_interval = ?3, filters_json = ?4, analyst_agent_id = ?5, developer_agent_id = ?6, reviewer_agent_id = ?7, status = ?8, enabled = ?9 WHERE id = ?10",
params![
update.tracker_id,
update.tracker_label,
@ -217,6 +228,7 @@ impl WatchedTracker {
filters_json,
analyst_agent_id,
developer_agent_id,
reviewer_agent_id,
status,
enabled_int,
id
@ -267,7 +279,7 @@ mod tests {
Project::list(conn).unwrap().into_iter().next().unwrap().id
}
fn create_agents(conn: &Connection) -> (String, String) {
fn create_agents(conn: &Connection) -> (String, String, String) {
let analyst =
Agent::insert(conn, "Analyst", AgentRole::Analyst, AgentTool::Codex, "").unwrap();
@ -280,7 +292,10 @@ mod tests {
)
.unwrap();
(analyst.id, developer.id)
let reviewer =
Agent::insert(conn, "Reviewer", AgentRole::Reviewer, AgentTool::Codex, "").unwrap();
(analyst.id, developer.id, reviewer.id)
}
fn sample_filters() -> Vec<FilterGroup> {
@ -297,7 +312,7 @@ mod tests {
fn test_insert_tracker() {
let conn = setup();
let pid = project_id(&conn);
let (analyst_id, developer_id) = create_agents(&conn);
let (analyst_id, developer_id, reviewer_id) = create_agents(&conn);
let tracker = WatchedTracker::insert(
&conn,
@ -308,6 +323,7 @@ mod tests {
polling_interval: 15,
analyst_agent_id: analyst_id.clone(),
developer_agent_id: developer_id.clone(),
reviewer_agent_id: reviewer_id.clone(),
filters: sample_filters(),
},
)
@ -328,6 +344,10 @@ mod tests {
tracker.developer_agent_id.as_deref(),
Some(developer_id.as_str())
);
assert_eq!(
tracker.reviewer_agent_id.as_deref(),
Some(reviewer_id.as_str())
);
assert!(tracker.last_polled_at.is_none());
assert!(!tracker.created_at.is_empty());
assert_eq!(tracker.filters.len(), 1);
@ -337,7 +357,7 @@ mod tests {
fn test_list_by_project() {
let conn = setup();
let pid = project_id(&conn);
let (analyst_id, developer_id) = create_agents(&conn);
let (analyst_id, developer_id, reviewer_id) = create_agents(&conn);
WatchedTracker::insert(
&conn,
@ -348,6 +368,7 @@ mod tests {
polling_interval: 10,
analyst_agent_id: analyst_id.clone(),
developer_agent_id: developer_id.clone(),
reviewer_agent_id: reviewer_id.clone(),
filters: vec![],
},
)
@ -361,6 +382,7 @@ mod tests {
polling_interval: 20,
analyst_agent_id: analyst_id.clone(),
developer_agent_id: developer_id.clone(),
reviewer_agent_id: reviewer_id.clone(),
filters: vec![],
},
)
@ -374,7 +396,7 @@ mod tests {
fn test_list_all_enabled_ignores_invalid() {
let conn = setup();
let pid = project_id(&conn);
let (analyst_id, developer_id) = create_agents(&conn);
let (analyst_id, developer_id, reviewer_id) = create_agents(&conn);
let valid = WatchedTracker::insert(
&conn,
@ -385,6 +407,7 @@ mod tests {
polling_interval: 10,
analyst_agent_id: analyst_id.clone(),
developer_agent_id: developer_id.clone(),
reviewer_agent_id: reviewer_id.clone(),
filters: vec![],
},
)
@ -398,6 +421,7 @@ mod tests {
polling_interval: 10,
analyst_agent_id: "".to_string(),
developer_agent_id: developer_id.clone(),
reviewer_agent_id: reviewer_id.clone(),
filters: vec![],
},
)
@ -413,7 +437,7 @@ mod tests {
fn test_get_by_id() {
let conn = setup();
let pid = project_id(&conn);
let (analyst_id, developer_id) = create_agents(&conn);
let (analyst_id, developer_id, reviewer_id) = create_agents(&conn);
let created = WatchedTracker::insert(
&conn,
@ -424,6 +448,7 @@ mod tests {
polling_interval: 30,
analyst_agent_id: analyst_id.clone(),
developer_agent_id: developer_id.clone(),
reviewer_agent_id: reviewer_id.clone(),
filters: sample_filters(),
},
)
@ -442,7 +467,7 @@ mod tests {
fn test_update_tracker() {
let conn = setup();
let pid = project_id(&conn);
let (analyst_id, developer_id) = create_agents(&conn);
let (analyst_id, developer_id, reviewer_id) = create_agents(&conn);
let created = WatchedTracker::insert(
&conn,
@ -453,6 +478,7 @@ mod tests {
polling_interval: 5,
analyst_agent_id: analyst_id.clone(),
developer_agent_id: developer_id.clone(),
reviewer_agent_id: reviewer_id.clone(),
filters: sample_filters(),
},
)
@ -475,6 +501,7 @@ mod tests {
polling_interval: 60,
analyst_agent_id: analyst_id,
developer_agent_id: developer_id,
reviewer_agent_id: reviewer_id,
filters: new_filters,
enabled: false,
},
@ -494,7 +521,7 @@ mod tests {
fn test_update_last_polled() {
let conn = setup();
let pid = project_id(&conn);
let (analyst_id, developer_id) = create_agents(&conn);
let (analyst_id, developer_id, reviewer_id) = create_agents(&conn);
let created = WatchedTracker::insert(
&conn,
@ -505,6 +532,7 @@ mod tests {
polling_interval: 10,
analyst_agent_id: analyst_id.clone(),
developer_agent_id: developer_id.clone(),
reviewer_agent_id: reviewer_id.clone(),
filters: vec![],
},
)
@ -527,7 +555,7 @@ mod tests {
fn test_delete_tracker() {
let conn = setup();
let pid = project_id(&conn);
let (analyst_id, developer_id) = create_agents(&conn);
let (analyst_id, developer_id, reviewer_id) = create_agents(&conn);
let created = WatchedTracker::insert(
&conn,
@ -538,6 +566,7 @@ mod tests {
polling_interval: 10,
analyst_agent_id: analyst_id.clone(),
developer_agent_id: developer_id.clone(),
reviewer_agent_id: reviewer_id.clone(),
filters: vec![],
},
)

View file

@ -116,6 +116,8 @@ mod tests {
let analyst = Agent::insert(&conn, "A", AgentRole::Analyst, AgentTool::Codex, "").unwrap();
let developer =
Agent::insert(&conn, "D", AgentRole::Developer, AgentTool::ClaudeCode, "").unwrap();
let reviewer =
Agent::insert(&conn, "R", AgentRole::Reviewer, AgentTool::Codex, "").unwrap();
let tracker = WatchedTracker::insert(
&conn,
NewWatchedTracker {
@ -125,6 +127,7 @@ mod tests {
polling_interval: 10,
analyst_agent_id: analyst.id.clone(),
developer_agent_id: developer.id.clone(),
reviewer_agent_id: reviewer.id.clone(),
filters: vec![],
},
)
@ -171,6 +174,8 @@ mod tests {
let analyst = Agent::insert(&conn, "A", AgentRole::Analyst, AgentTool::Codex, "").unwrap();
let developer =
Agent::insert(&conn, "D", AgentRole::Developer, AgentTool::ClaudeCode, "").unwrap();
let reviewer =
Agent::insert(&conn, "R", AgentRole::Reviewer, AgentTool::Codex, "").unwrap();
let tracker = WatchedTracker::insert(
&conn,
NewWatchedTracker {
@ -180,6 +185,7 @@ mod tests {
polling_interval: 10,
analyst_agent_id: analyst.id.clone(),
developer_agent_id: developer.id.clone(),
reviewer_agent_id: reviewer.id.clone(),
filters: vec![],
},
)

View file

@ -19,11 +19,13 @@ impl ActivityState {
.active_polls
.lock()
.unwrap_or_else(|poison| poison.into_inner());
let entry = polls.entry(key.to_string()).or_insert_with(|| ActivePollEntry {
project_id: project_id.to_string(),
label: label.to_string(),
count: 0,
});
let entry = polls
.entry(key.to_string())
.or_insert_with(|| ActivePollEntry {
project_id: project_id.to_string(),
label: label.to_string(),
count: 0,
});
entry.project_id = project_id.to_string();
entry.label = label.to_string();
entry.count += 1;

View file

@ -353,9 +353,7 @@ mod tests {
assert!(url.contains("/api/search/universal/relative?"));
assert!(url.contains("query=level%3A%3C3"));
assert!(url.contains("range=1800"));
assert!(url.contains(
"filter=streams%3A000000000000000000000001"
));
assert!(url.contains("filter=streams%3A000000000000000000000001"));
assert!(!url.contains("&streams="));
}

View file

@ -12,7 +12,7 @@ use tauri::{AppHandle, Emitter};
use tokio::time::{interval, Duration};
fn is_ticket_active(status: &str) -> bool {
matches!(status, "Pending" | "Analyzing" | "Developing")
matches!(status, "Pending" | "Analyzing" | "Developing" | "Reviewing")
}
fn should_trigger_subject(score: i32, threshold: i32, has_active_ticket: bool) -> bool {

View file

@ -156,6 +156,8 @@ mod tests {
let analyst = Agent::insert(&conn, "A", AgentRole::Analyst, AgentTool::Codex, "").unwrap();
let developer =
Agent::insert(&conn, "D", AgentRole::Developer, AgentTool::ClaudeCode, "").unwrap();
let reviewer =
Agent::insert(&conn, "R", AgentRole::Reviewer, AgentTool::Codex, "").unwrap();
let tracker = WatchedTracker::insert(
&conn,
@ -166,22 +168,22 @@ mod tests {
polling_interval: 10,
analyst_agent_id: analyst.id.clone(),
developer_agent_id: developer.id.clone(),
reviewer_agent_id: reviewer.id.clone(),
filters: vec![],
},
)
.expect("tracker insert should succeed");
let ticket =
ProcessedTicket::insert_if_new(
&conn,
project_id,
&tracker.id,
1,
"Ticket 1",
"{\"id\":1}",
)
.expect("ticket insert should succeed")
.expect("ticket should be inserted");
let ticket = ProcessedTicket::insert_if_new(
&conn,
project_id,
&tracker.id,
1,
"Ticket 1",
"{\"id\":1}",
)
.expect("ticket insert should succeed")
.expect("ticket should be inserted");
ticket.id
}

View file

@ -28,6 +28,13 @@ pub enum Verdict {
NoFix,
}
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
enum ProcessStartStep {
Analyst,
Developer,
Review,
}
pub fn build_analyst_prompt(ticket: &ProcessedTicket, project: &Project) -> String {
let source_ref = ticket.source_ref.as_deref().unwrap_or("-");
format!(
@ -51,6 +58,41 @@ pub fn build_analyst_prompt(ticket: &ProcessedTicket, project: &Project) -> Stri
3. Evalue si une correction de code est necessaire
4. Produis un rapport structure en markdown
## Format de sortie obligatoire
- Ecris un rapport en markdown avec des titres, des sous-titres et des listes.
- Laisse une ligne vide entre chaque section.
- Mets les labels importants en gras (ex: **Impact**, **Cause racine**).
- Evite les gros paragraphes: maximum 4 lignes par paragraphe.
- Respecte cette structure:
# Analyse ticket #{artifact_id} - {title}
## Resume executif
- **Constat:**
- **Impact:**
- **Urgence:**
## Diagnostic technique
### Cause racine
...
### Indices observables
- ...
## Zone de code probable
- `chemin/fichier.ext` - justification
## Plan de correction
1. ...
2. ...
## Risques et validations
- **Risques:**
- **Tests a executer:**
## Conclusion
- **Decision:** FIX_NEEDED ou NO_FIX
- **Rationale courte:** ...
Termine ton rapport par un de ces verdicts sur une ligne separee:
[VERDICT: FIX_NEEDED] si une correction de code est necessaire
[VERDICT: NO_FIX] si aucune correction n'est necessaire"#,
@ -104,6 +146,57 @@ pub fn build_developer_prompt(
)
}
pub fn build_review_prompt(
ticket: &ProcessedTicket,
project: &Project,
analyst_report: &str,
developer_report: &str,
worktree_path: &str,
branch_name: &str,
) -> String {
let source_ref = ticket.source_ref.as_deref().unwrap_or("-");
format!(
r#"Tu es un reviewer technique. Tu dois valider la correction proposee par le developpeur.
## Ticket
- ID: {artifact_id}
- Titre: {title}
- Source: {source}
- Source ref: {source_ref}
## Contexte
- Projet: {project_name}
- Repo (worktree): {worktree_path}
- Branche de base: {base_branch}
- Branche de travail: {branch_name}
## Rapport analyste
{analyst_report}
## Rapport developpeur
{developer_report}
## Ta mission
1. Verifie la coherence entre l'analyse, le correctif et le ticket.
2. Verifie la qualite des changements (risques, dette, regressions possibles, tests manquants).
3. Produis un rapport markdown structuré avec:
- Synthese
- Points conformes
- Risques / points a corriger
- Verdict final"#,
artifact_id = ticket.artifact_id,
title = ticket.artifact_title,
source = ticket.source,
source_ref = source_ref,
project_name = project.name,
worktree_path = worktree_path,
base_branch = project.base_branch,
branch_name = branch_name,
analyst_report = analyst_report,
developer_report = developer_report,
)
}
fn append_custom_prompt(base_prompt: String, custom_prompt: &str) -> String {
let extra = custom_prompt.trim();
if extra.is_empty() {
@ -154,6 +247,12 @@ pub fn parse_verdict(report: &str) -> Verdict {
Verdict::FixNeeded
}
fn has_non_empty_text(value: Option<&str>) -> bool {
value
.map(|text| !text.trim().is_empty())
.unwrap_or(false)
}
fn resolve_path_from_working_dir(working_dir: &Path, path: &str) -> Option<PathBuf> {
let trimmed = path.trim();
if trimmed.is_empty() {
@ -169,6 +268,12 @@ fn resolve_path_from_working_dir(working_dir: &Path, path: &str) -> Option<PathB
}
fn codex_additional_writable_dirs(working_dir: &str) -> Vec<String> {
let working_dir_path = Path::new(working_dir);
let normalized_working_dir =
std::fs::canonicalize(working_dir_path).unwrap_or_else(|_| working_dir_path.to_path_buf());
let mut dirs = vec![normalized_working_dir.to_string_lossy().to_string()];
let output = std::process::Command::new("git")
.args(["rev-parse", "--git-dir", "--git-common-dir"])
.current_dir(working_dir)
@ -176,14 +281,8 @@ fn codex_additional_writable_dirs(working_dir: &str) -> Vec<String> {
let output = match output {
Ok(value) if value.status.success() => value,
_ => return Vec::new(),
_ => return dirs,
};
let working_dir_path = Path::new(working_dir);
let normalized_working_dir =
std::fs::canonicalize(working_dir_path).unwrap_or_else(|_| working_dir_path.to_path_buf());
let mut dirs = Vec::new();
for line in String::from_utf8_lossy(&output.stdout).lines() {
let Some(path) = resolve_path_from_working_dir(working_dir_path, line) else {
continue;
@ -516,8 +615,8 @@ async fn process_ticket(
}
};
let (analyst_agent, developer_agent) = {
let (analyst_id, developer_id) = if ticket.source == "graylog" {
let (analyst_agent, developer_agent, reviewer_agent) = {
let (analyst_id, developer_id, reviewer_id) = if ticket.source == "graylog" {
let conn = db.lock().map_err(|e| format!("DB lock failed: {}", e))?;
let config = match GraylogCredentials::get_by_project(&conn, &project.id)
.map_err(|e| format!("graylog credentials lookup failed: {}", e))?
@ -540,6 +639,7 @@ async fn process_ticket(
(
config.analyst_agent_id.to_string(),
config.developer_agent_id.to_string(),
config.reviewer_agent_id.to_string(),
)
} else if ticket.source == "tuleap" {
let tracker = match &tracker {
@ -564,7 +664,7 @@ async fn process_ticket(
&project.id,
&ticket.id,
ticket.artifact_id,
"Tracker is invalid. Configure analyst and developer agents.",
"Tracker is invalid. Configure analyst, developer and reviewer agents.",
);
return Ok(true);
}
@ -599,7 +699,22 @@ async fn process_ticket(
}
};
(analyst_id, developer_id)
let reviewer_id = match tracker.reviewer_agent_id.as_deref() {
Some(id) => id.to_string(),
None => {
record_ticket_error(
db,
app_handle,
&project.id,
&ticket.id,
ticket.artifact_id,
"Tracker has no reviewer agent configured.",
);
return Ok(true);
}
};
(analyst_id, developer_id, reviewer_id)
} else {
record_ticket_error(
db,
@ -645,6 +760,22 @@ async fn process_ticket(
}
};
let reviewer_agent = match Agent::get_by_id(&conn, &reviewer_id) {
Ok(agent) => agent,
Err(_) => {
drop(conn);
record_ticket_error(
db,
app_handle,
&project.id,
&ticket.id,
ticket.artifact_id,
"Configured reviewer agent was not found.",
);
return Ok(true);
}
};
if analyst_agent.role != AgentRole::Analyst {
drop(conn);
record_ticket_error(
@ -671,94 +802,126 @@ async fn process_ticket(
return Ok(true);
}
(analyst_agent, developer_agent)
};
{
let conn = db.lock().map_err(|e| format!("DB lock failed: {}", e))?;
ProcessedTicket::update_status(&conn, &ticket.id, "Analyzing")
.map_err(|e| format!("update_status failed: {}", e))?;
}
let _ = app_handle.emit(
"ticket-processing-started",
serde_json::json!({
"project_id": &project.id,
"ticket_id": &ticket.id,
"artifact_id": ticket.artifact_id,
"step": "analyst",
}),
);
let analyst_prompt = append_custom_prompt(
build_analyst_prompt(&ticket, &project),
&analyst_agent.custom_prompt,
);
let analyst_args = build_agent_cli_args(&analyst_agent, &project.path);
let analyst_result = run_cli_command(
analyst_agent.tool.to_command(),
&analyst_args,
&analyst_prompt,
&project.path,
600,
TicketCliContext {
app_handle,
ticket_id: &ticket.id,
process_registry,
},
)
.await;
let analyst_report = match analyst_result {
Ok(report) => report,
Err(e) => {
if is_ticket_cancelled(db, &ticket.id)? {
return Ok(true);
}
if reviewer_agent.role != AgentRole::Reviewer {
drop(conn);
record_ticket_error(
db,
app_handle,
&project.id,
&ticket.id,
ticket.artifact_id,
&e,
"Configured reviewer agent has an invalid role.",
);
return Ok(true);
}
(analyst_agent, developer_agent, reviewer_agent)
};
if is_ticket_cancelled(db, &ticket.id)? {
return Ok(true);
}
let start_step = if has_non_empty_text(ticket.analyst_report.as_deref()) {
if has_non_empty_text(ticket.developer_report.as_deref())
&& has_non_empty_text(ticket.worktree_path.as_deref())
&& has_non_empty_text(ticket.branch_name.as_deref())
{
ProcessStartStep::Review
} else {
ProcessStartStep::Developer
}
} else {
ProcessStartStep::Analyst
};
{
let conn = db.lock().map_err(|e| format!("DB lock: {}", e))?;
ProcessedTicket::set_analyst_report(&conn, &ticket.id, &analyst_report)
.map_err(|e| format!("set_analyst_report: {}", e))?;
}
let analyst_report = if start_step == ProcessStartStep::Analyst {
{
let conn = db.lock().map_err(|e| format!("DB lock failed: {}", e))?;
ProcessedTicket::update_status(&conn, &ticket.id, "Analyzing")
.map_err(|e| format!("update_status failed: {}", e))?;
}
let _ = app_handle.emit(
"ticket-processing-started",
serde_json::json!({
"project_id": &project.id,
"ticket_id": &ticket.id,
"artifact_id": ticket.artifact_id,
"step": "analyst",
}),
);
let analyst_prompt = append_custom_prompt(
build_analyst_prompt(&ticket, &project),
&analyst_agent.custom_prompt,
);
let analyst_args = build_agent_cli_args(&analyst_agent, &project.path);
let analyst_result = run_cli_command(
analyst_agent.tool.to_command(),
&analyst_args,
&analyst_prompt,
&project.path,
600,
TicketCliContext {
app_handle,
ticket_id: &ticket.id,
process_registry,
},
)
.await;
let analyst_report = match analyst_result {
Ok(report) => report,
Err(e) => {
if is_ticket_cancelled(db, &ticket.id)? {
return Ok(true);
}
record_ticket_error(
db,
app_handle,
&project.id,
&ticket.id,
ticket.artifact_id,
&e,
);
return Ok(true);
}
};
let verdict = parse_verdict(&analyst_report);
if verdict == Verdict::NoFix {
if is_ticket_cancelled(db, &ticket.id)? {
return Ok(true);
}
{
let conn = db.lock().map_err(|e| format!("DB lock: {}", e))?;
ProcessedTicket::update_status(&conn, &ticket.id, "Done")
.map_err(|e| format!("update_status: {}", e))?;
ProcessedTicket::set_analyst_report(&conn, &ticket.id, &analyst_report)
.map_err(|e| format!("set_analyst_report: {}", e))?;
}
let _ = app_handle.emit(
"ticket-processing-done",
serde_json::json!({
"project_id": &project.id,
"ticket_id": &ticket.id,
"artifact_id": ticket.artifact_id,
}),
);
notifier::notify_analysis_done(db, app_handle, &project.id, &ticket.id, ticket.artifact_id);
return Ok(true);
}
let verdict = parse_verdict(&analyst_report);
if verdict == Verdict::NoFix {
if is_ticket_cancelled(db, &ticket.id)? {
return Ok(true);
}
{
let conn = db.lock().map_err(|e| format!("DB lock: {}", e))?;
ProcessedTicket::update_status(&conn, &ticket.id, "NoFix")
.map_err(|e| format!("update_status: {}", e))?;
}
let _ = app_handle.emit(
"ticket-processing-done",
serde_json::json!({
"project_id": &project.id,
"ticket_id": &ticket.id,
"artifact_id": ticket.artifact_id,
}),
);
notifier::notify_analysis_done(db, app_handle, &project.id, &ticket.id, ticket.artifact_id);
return Ok(true);
}
analyst_report
} else {
ticket.analyst_report.clone().unwrap_or_default()
};
{
let conn = db.lock().map_err(|e| format!("DB lock: {}", e))?;
@ -769,29 +932,151 @@ async fn process_ticket(
}
}
let worktree_result =
worktree_manager::create_worktree(&project.path, &project.base_branch, ticket.artifact_id);
let (wt_path, branch_name, developer_report) = if start_step == ProcessStartStep::Review {
if !has_non_empty_text(ticket.worktree_path.as_deref())
|| !has_non_empty_text(ticket.branch_name.as_deref())
{
record_ticket_error(
db,
app_handle,
&project.id,
&ticket.id,
ticket.artifact_id,
"Cannot resume from review step without worktree metadata.",
);
return Ok(true);
}
if let Err(e) = &worktree_result {
record_ticket_error(
db,
app_handle,
&project.id,
&ticket.id,
if !has_non_empty_text(ticket.developer_report.as_deref()) {
record_ticket_error(
db,
app_handle,
&project.id,
&ticket.id,
ticket.artifact_id,
"Cannot resume from review step without developer report.",
);
return Ok(true);
}
(
ticket.worktree_path.clone().unwrap_or_default(),
ticket.branch_name.clone().unwrap_or_default(),
ticket.developer_report.clone().unwrap_or_default(),
)
} else {
let worktree_result = worktree_manager::create_worktree(
&project.path,
&project.base_branch,
ticket.artifact_id,
e,
);
}
let (wt_path, branch_name) = worktree_result?;
if let Err(e) = &worktree_result {
record_ticket_error(
db,
app_handle,
&project.id,
&ticket.id,
ticket.artifact_id,
e,
);
}
let (wt_path, branch_name) = worktree_result?;
{
let conn = db.lock().map_err(|e| format!("DB lock: {}", e))?;
ProcessedTicket::set_worktree_info(&conn, &ticket.id, &wt_path, &branch_name)
.map_err(|e| format!("set_worktree_info: {}", e))?;
Worktree::insert(&conn, &ticket.id, &wt_path, &branch_name)
.map_err(|e| format!("insert worktree: {}", e))?;
ProcessedTicket::update_status(&conn, &ticket.id, "Developing")
.map_err(|e| format!("update_status: {}", e))?;
}
let _ = app_handle.emit(
"ticket-processing-started",
serde_json::json!({
"project_id": &project.id,
"ticket_id": &ticket.id,
"artifact_id": ticket.artifact_id,
"step": "developer",
}),
);
let developer_prompt = append_custom_prompt(
build_developer_prompt(&ticket, &project, &analyst_report, &wt_path),
&developer_agent.custom_prompt,
);
let developer_args = build_agent_cli_args(&developer_agent, &wt_path);
let developer_result = run_cli_command(
developer_agent.tool.to_command(),
&developer_args,
&developer_prompt,
&wt_path,
600,
TicketCliContext {
app_handle,
ticket_id: &ticket.id,
process_registry,
},
)
.await;
let developer_report = match developer_result {
Ok(report) => report,
Err(e) => {
if is_ticket_cancelled(db, &ticket.id)? {
return Ok(true);
}
record_ticket_error(
db,
app_handle,
&project.id,
&ticket.id,
ticket.artifact_id,
&e,
);
return Ok(true);
}
};
if is_ticket_cancelled(db, &ticket.id)? {
return Ok(true);
}
if let Err(validation_error) =
validate_developer_completion(&project, &branch_name, &developer_report)
{
{
let conn = db.lock().map_err(|e| format!("DB lock: {}", e))?;
ProcessedTicket::set_developer_report(&conn, &ticket.id, &developer_report)
.map_err(|e| format!("set_developer_report: {}", e))?;
}
record_ticket_error(
db,
app_handle,
&project.id,
&ticket.id,
ticket.artifact_id,
&validation_error,
);
return Ok(true);
}
{
let conn = db.lock().map_err(|e| format!("DB lock: {}", e))?;
ProcessedTicket::set_developer_report(&conn, &ticket.id, &developer_report)
.map_err(|e| format!("set_developer_report: {}", e))?;
}
(wt_path, branch_name, developer_report)
};
{
let conn = db.lock().map_err(|e| format!("DB lock: {}", e))?;
ProcessedTicket::set_worktree_info(&conn, &ticket.id, &wt_path, &branch_name)
.map_err(|e| format!("set_worktree_info: {}", e))?;
Worktree::insert(&conn, &ticket.id, &wt_path, &branch_name)
.map_err(|e| format!("insert worktree: {}", e))?;
ProcessedTicket::update_status(&conn, &ticket.id, "Developing")
ProcessedTicket::update_status(&conn, &ticket.id, "Reviewing")
.map_err(|e| format!("update_status: {}", e))?;
}
@ -801,19 +1086,26 @@ async fn process_ticket(
"project_id": &project.id,
"ticket_id": &ticket.id,
"artifact_id": ticket.artifact_id,
"step": "developer",
"step": "review",
}),
);
let developer_prompt = append_custom_prompt(
build_developer_prompt(&ticket, &project, &analyst_report, &wt_path),
&developer_agent.custom_prompt,
let review_prompt = append_custom_prompt(
build_review_prompt(
&ticket,
&project,
&analyst_report,
&developer_report,
&wt_path,
&branch_name,
),
&reviewer_agent.custom_prompt,
);
let developer_args = build_agent_cli_args(&developer_agent, &wt_path);
let developer_result = run_cli_command(
developer_agent.tool.to_command(),
&developer_args,
&developer_prompt,
let review_args = build_agent_cli_args(&reviewer_agent, &wt_path);
let review_result = run_cli_command(
reviewer_agent.tool.to_command(),
&review_args,
&review_prompt,
&wt_path,
600,
TicketCliContext {
@ -824,7 +1116,7 @@ async fn process_ticket(
)
.await;
let developer_report = match developer_result {
let review_report = match review_result {
Ok(report) => report,
Err(e) => {
if is_ticket_cancelled(db, &ticket.id)? {
@ -846,30 +1138,10 @@ async fn process_ticket(
return Ok(true);
}
if let Err(validation_error) =
validate_developer_completion(&project, &branch_name, &developer_report)
{
{
let conn = db.lock().map_err(|e| format!("DB lock: {}", e))?;
ProcessedTicket::set_developer_report(&conn, &ticket.id, &developer_report)
.map_err(|e| format!("set_developer_report: {}", e))?;
}
record_ticket_error(
db,
app_handle,
&project.id,
&ticket.id,
ticket.artifact_id,
&validation_error,
);
return Ok(true);
}
{
let conn = db.lock().map_err(|e| format!("DB lock: {}", e))?;
ProcessedTicket::set_developer_report(&conn, &ticket.id, &developer_report)
.map_err(|e| format!("set_developer_report: {}", e))?;
ProcessedTicket::set_review_report(&conn, &ticket.id, &review_report)
.map_err(|e| format!("set_review_report: {}", e))?;
ProcessedTicket::update_status(&conn, &ticket.id, "Done")
.map_err(|e| format!("update_status: {}", e))?;
}
@ -939,6 +1211,7 @@ mod tests {
status: "Pending".into(),
analyst_report: None,
developer_report: None,
review_report: None,
worktree_path: None,
branch_name: None,
detected_at: "2026-01-01T00:00:00Z".into(),
@ -977,6 +1250,7 @@ mod tests {
status: "Developing".into(),
analyst_report: None,
developer_report: None,
review_report: None,
worktree_path: None,
branch_name: None,
detected_at: "2026-01-01T00:00:00Z".into(),
@ -1211,6 +1485,16 @@ mod tests {
let agent = build_test_agent(crate::models::agent::AgentTool::Codex);
let args = build_agent_cli_args(&agent, &worktree_path);
let add_dirs = collect_add_dirs(&args);
let normalized_worktree = std::fs::canonicalize(&worktree_path)
.unwrap_or_else(|_| Path::new(&worktree_path).to_path_buf())
.to_string_lossy()
.to_string();
assert!(
add_dirs.contains(&normalized_worktree),
"Expected --add-dir to contain worktree '{}', got {:?}",
normalized_worktree,
add_dirs
);
let rev_parse = Command::new("git")
.args(["rev-parse", "--git-dir", "--git-common-dir"])
@ -1257,6 +1541,16 @@ mod tests {
let agent = build_test_agent(crate::models::agent::AgentTool::ClaudeCode);
let args = build_agent_cli_args(&agent, &worktree_path);
let add_dirs = collect_add_dirs(&args);
let normalized_worktree = std::fs::canonicalize(&worktree_path)
.unwrap_or_else(|_| Path::new(&worktree_path).to_path_buf())
.to_string_lossy()
.to_string();
assert!(
add_dirs.contains(&normalized_worktree),
"Expected --add-dir to contain worktree '{}', got {:?}",
normalized_worktree,
add_dirs
);
let rev_parse = Command::new("git")
.args(["rev-parse", "--git-dir", "--git-common-dir"])

View file

@ -20,8 +20,14 @@ pub fn start(
let mut tick = interval(Duration::from_secs(60));
loop {
tick.tick().await;
poll_all_trackers(&db, &encryption_key, &http_client, &app_handle, &activity_state)
.await;
poll_all_trackers(
&db,
&encryption_key,
&http_client,
&app_handle,
&activity_state,
)
.await;
}
});
}

View file

@ -117,6 +117,7 @@ export default function AgentForm() {
>
<option value="analyst">Analyst</option>
<option value="developer">Developer</option>
<option value="reviewer">Reviewer</option>
</select>
</div>

View file

@ -51,7 +51,16 @@ export default function AgentList() {
}
function roleLabel(role: Agent["role"]): string {
return role === "analyst" ? "Analyst" : "Developer";
switch (role) {
case "analyst":
return "Analyst";
case "developer":
return "Developer";
case "reviewer":
return "Reviewer";
default:
return role;
}
}
function toolLabel(tool: Agent["tool"]): string {

View file

@ -4,21 +4,37 @@ import {
requestPermission,
sendNotification,
} from "@tauri-apps/plugin-notification";
import { useEffect, useMemo, useRef, useState } from "react";
import { useCallback, useEffect, useMemo, useRef, useState } from "react";
import { useNavigate, useParams } from "react-router-dom";
import {
getTicketResult,
listNotifications,
markAllNotificationsRead,
markNotificationRead,
} from "../../lib/api";
import type { OrchaiNotification } from "../../lib/types";
import { useLiveRefresh } from "../../lib/useLiveRefresh";
import { buttonClass, cardClass, pillClass } from "../ui/primitives";
type NewNotificationEvent = {
notification: OrchaiNotification;
};
function shouldSkipSystemNotification(): boolean {
if (typeof navigator === "undefined") {
return true;
}
// Workaround: tauri-plugin-notification on Linux can panic in Tokio runtime
// (notify-rust/zbus `block_on` inside async runtime). Keep in-app notifications only.
return navigator.userAgent.toLowerCase().includes("linux");
}
async function showSystemNotification(notification: OrchaiNotification) {
if (shouldSkipSystemNotification()) {
return;
}
try {
let permissionGranted = await isPermissionGranted();
if (!permissionGranted) {
@ -39,31 +55,71 @@ async function showSystemNotification(notification: OrchaiNotification) {
export default function NotificationCenter() {
const navigate = useNavigate();
const { projectId } = useParams();
const { projectId, ticketId } = useParams();
const containerRef = useRef<HTMLDivElement | null>(null);
const [open, setOpen] = useState(false);
const [notifications, setNotifications] = useState<OrchaiNotification[]>([]);
const [ticketProjectId, setTicketProjectId] = useState<string | null>(null);
const [filter, setFilter] = useState<"all" | "unread" | "errors" | "fixes">(
"all"
);
const scopedProjectId = projectId ?? ticketProjectId;
async function loadNotifications() {
if (!projectId) {
useEffect(() => {
if (projectId) {
setTicketProjectId(null);
return;
}
if (!ticketId) {
setTicketProjectId(null);
return;
}
setTicketProjectId(null);
let cancelled = false;
void getTicketResult(ticketId)
.then((result) => {
if (!cancelled) {
setTicketProjectId(result.ticket.project_id);
}
})
.catch(() => {
if (!cancelled) {
setTicketProjectId(null);
}
});
return () => {
cancelled = true;
};
}, [projectId, ticketId]);
const loadNotifications = useCallback(async () => {
if (!scopedProjectId) {
setNotifications([]);
return;
}
try {
const items = await listNotifications(projectId, false);
const items = await listNotifications(scopedProjectId, false);
setNotifications(items);
} catch {
// Ignore load errors in layout chrome
}
}
}, [scopedProjectId]);
useEffect(() => {
loadNotifications();
}, [projectId]);
void loadNotifications();
}, [loadNotifications]);
useLiveRefresh({
enabled: Boolean(scopedProjectId),
projectId: scopedProjectId,
refresh: loadNotifications,
fallbackIntervalMs: 15_000,
});
useEffect(() => {
let cancelled = false;
@ -74,7 +130,7 @@ export default function NotificationCenter() {
const cleanup = await listen<NewNotificationEvent>("new-notification", (event) => {
const incoming = event.payload.notification;
if (projectId && incoming.project_id !== projectId) {
if (scopedProjectId && incoming.project_id !== scopedProjectId) {
return;
}
@ -105,7 +161,7 @@ export default function NotificationCenter() {
unlisten();
}
};
}, [projectId]);
}, [scopedProjectId]);
useEffect(() => {
if (!open) {
@ -177,12 +233,12 @@ export default function NotificationCenter() {
}
async function handleMarkAllRead() {
if (!projectId) {
if (!scopedProjectId) {
return;
}
try {
await markAllNotificationsRead(projectId);
await markAllNotificationsRead(scopedProjectId);
setNotifications((prev) => prev.map((n) => ({ ...n, read: true })));
} catch {
// ignore
@ -212,7 +268,7 @@ export default function NotificationCenter() {
type="button"
onClick={handleMarkAllRead}
className={buttonClass({ variant: "ghost", size: "xs" })}
disabled={!projectId || unreadCount === 0}
disabled={!scopedProjectId || unreadCount === 0}
>
Mark all read
</button>

View file

@ -44,7 +44,7 @@ interface TicketProcessingPayload {
project_id: string;
ticket_id: string;
artifact_id: number;
step?: "analyst" | "developer";
step?: "analyst" | "developer" | "review";
error?: string;
}

View file

@ -46,6 +46,7 @@ export default function ProjectGraylog() {
const [apiToken, setApiToken] = useState("");
const [analystAgentId, setAnalystAgentId] = useState("");
const [developerAgentId, setDeveloperAgentId] = useState("");
const [reviewerAgentId, setReviewerAgentId] = useState("");
const [streamId, setStreamId] = useState("");
const [queryFilter, setQueryFilter] = useState("level:(critical OR error OR warning)");
const [pollingIntervalMinutes, setPollingIntervalMinutes] = useState(10);
@ -61,6 +62,7 @@ export default function ProjectGraylog() {
const analysts = agents.filter((agent) => agent.role === "analyst");
const developers = agents.filter((agent) => agent.role === "developer");
const reviewers = agents.filter((agent) => agent.role === "reviewer");
const hasProjectScopedTuleapCredentials =
Boolean(projectId) && tuleapCredentials?.project_id === projectId;
const usingGlobalTuleapFallback =
@ -100,6 +102,7 @@ export default function ProjectGraylog() {
setBaseUrl(creds.base_url);
setAnalystAgentId(creds.analyst_agent_id);
setDeveloperAgentId(creds.developer_agent_id);
setReviewerAgentId(creds.reviewer_agent_id);
setStreamId(creds.stream_id ?? "");
setQueryFilter(creds.query_filter);
setPollingIntervalMinutes(creds.polling_interval_minutes);
@ -169,6 +172,7 @@ export default function ProjectGraylog() {
apiToken,
analystAgentId,
developerAgentId,
reviewerAgentId,
streamId.trim() || null,
queryFilter,
pollingIntervalMinutes,
@ -322,7 +326,7 @@ export default function ProjectGraylog() {
required={!credentials}
/>
<div className="grid gap-3 md:grid-cols-2">
<div className="grid gap-3 md:grid-cols-3">
<select
className={inputClass}
value={analystAgentId}
@ -350,6 +354,19 @@ export default function ProjectGraylog() {
</option>
))}
</select>
<select
className={inputClass}
value={reviewerAgentId}
onChange={(event) => setReviewerAgentId(event.target.value)}
required
>
<option value="">Reviewer agent</option>
{reviewers.map((agent) => (
<option key={agent.id} value={agent.id}>
{agent.name}
</option>
))}
</select>
</div>
<div className="grid gap-3 md:grid-cols-4">

View file

@ -87,7 +87,11 @@ export default function ProjectLiveAgent() {
const [error, setError] = useState<string | null>(null);
const usableAgents = useMemo(
() => agents.filter((agent) => agent.role === "analyst" || agent.role === "developer"),
() =>
agents.filter(
(agent) =>
agent.role === "analyst" || agent.role === "developer" || agent.role === "reviewer"
),
[agents]
);
const selectedSession = useMemo(

View file

@ -43,7 +43,11 @@ export default function ProjectTasks() {
const [error, setError] = useState<string | null>(null);
const usableAgents = useMemo(
() => agents.filter((agent) => agent.role === "analyst" || agent.role === "developer"),
() =>
agents.filter(
(agent) =>
agent.role === "analyst" || agent.role === "developer" || agent.role === "reviewer"
),
[agents]
);
const selectedAgent = useMemo(

View file

@ -1,4 +1,4 @@
import { useEffect, useState } from "react";
import { useCallback, useEffect, useRef, useState } from "react";
import { useNavigate, useParams } from "react-router-dom";
import Markdown from "react-markdown";
import remarkGfm from "remark-gfm";
@ -10,6 +10,7 @@ import {
getWorktreeDiff,
listLocalBranchesForWorktree,
retryTicket,
retryTicketStep,
} from "../../lib/api";
import { getErrorMessage } from "../../lib/errors";
import {
@ -19,6 +20,7 @@ import {
type TicketResourceConfig,
} from "../../lib/ticketResource";
import type { ProcessedTicket, Worktree } from "../../lib/types";
import { useLiveRefresh } from "../../lib/useLiveRefresh";
import ConfirmModal from "../ui/ConfirmModal";
import TicketStatusBadge from "../ui/TicketStatusBadge";
import {
@ -72,10 +74,28 @@ export default function TicketDetail() {
const [branchesError, setBranchesError] = useState("");
const [diffLoading, setDiffLoading] = useState(false);
const [diffError, setDiffError] = useState("");
const [tab, setTab] = useState<"info" | "analyst" | "developer" | "diff">("info");
const [tab, setTab] = useState<"info" | "analyst" | "developer" | "review" | "diff">("info");
const [loading, setLoading] = useState(false);
const [error, setError] = useState("");
const [isDeleteWorktreeModalOpen, setIsDeleteWorktreeModalOpen] = useState(false);
const latestTicketIdRef = useRef<string | undefined>(ticketId);
const worktreeSignatureRef = useRef<string>("");
useEffect(() => {
latestTicketIdRef.current = ticketId;
}, [ticketId]);
const resetWorktreeUi = useCallback(() => {
setDiff(null);
setDiffError("");
setDiffLoading(false);
setAvailableBranches([]);
setBranchInputMode("select");
setTargetBranch("");
setBranchesError("");
setBranchesLoading(false);
setBranchesLoadedForWorktreeId(null);
}, []);
async function loadBranchOptions(worktreeId: string) {
setBranchesLoading(true);
@ -103,41 +123,63 @@ export default function TicketDetail() {
}
}
async function loadData() {
if (!ticketId) return;
const loadData = useCallback(async () => {
if (!ticketId) {
setTicket(null);
setWorktree(null);
setResourceConfig(DEFAULT_TICKET_RESOURCE_CONFIG);
resetWorktreeUi();
return;
}
setError("");
try {
const result = await getTicketResult(ticketId);
setTicket(result.ticket);
try {
const config = await fetchTicketResourceConfig(result.ticket.project_id);
setResourceConfig(config);
} catch {
setResourceConfig(DEFAULT_TICKET_RESOURCE_CONFIG);
}
setWorktree(result.worktree);
setTab("info");
setDiff(null);
setDiffError("");
setDiffLoading(false);
setAvailableBranches([]);
setBranchInputMode("select");
setTargetBranch("");
setBranchesError("");
setBranchesLoading(false);
setBranchesLoadedForWorktreeId(null);
if (result.worktree && result.worktree.status === "Active") {
if (latestTicketIdRef.current !== ticketId) {
return;
}
setTicket(result.ticket);
const config = await fetchTicketResourceConfig(result.ticket.project_id).catch(
() => DEFAULT_TICKET_RESOURCE_CONFIG
);
if (latestTicketIdRef.current !== ticketId) {
return;
}
setResourceConfig(config);
const worktreeSignature = result.worktree
? `${result.worktree.id}:${result.worktree.status}`
: "none";
if (worktreeSignatureRef.current !== worktreeSignature) {
worktreeSignatureRef.current = worktreeSignature;
resetWorktreeUi();
}
setWorktree(result.worktree);
} catch (err) {
setError(getErrorMessage(err));
if (latestTicketIdRef.current === ticketId) {
setError(getErrorMessage(err));
}
}
}
}, [resetWorktreeUi, ticketId]);
useEffect(() => {
worktreeSignatureRef.current = "";
setTab("info");
resetWorktreeUi();
void loadData();
}, [ticketId]);
}, [loadData, resetWorktreeUi]);
useLiveRefresh({
enabled: Boolean(ticketId),
projectId: ticket?.project_id,
ticketId,
refresh: loadData,
fallbackIntervalMs: 7_000,
});
useEffect(() => {
if (tab !== "info") return;
@ -199,8 +241,9 @@ export default function TicketDetail() {
await loadData();
} catch (err) {
setError(getErrorMessage(err));
} finally {
setLoading(false);
}
setLoading(false);
}
async function handleCancel() {
@ -211,8 +254,22 @@ export default function TicketDetail() {
await loadData();
} catch (err) {
setError(getErrorMessage(err));
} finally {
setLoading(false);
}
}
async function handleRetryDeveloper() {
if (!ticketId) return;
setLoading(true);
try {
await retryTicketStep(ticketId, "developer");
await loadData();
} catch (err) {
setError(getErrorMessage(err));
} finally {
setLoading(false);
}
setLoading(false);
}
async function handleApplyFix() {
@ -240,8 +297,9 @@ export default function TicketDetail() {
await loadData();
} catch (err) {
setError(getErrorMessage(err));
} finally {
setLoading(false);
}
setLoading(false);
}
async function handleDeleteWorktree() {
@ -250,17 +308,12 @@ export default function TicketDetail() {
setLoading(true);
try {
await deleteWorktreeCmd(worktree.id);
setWorktree(null);
setDiff(null);
setAvailableBranches([]);
setTargetBranch("");
setBranchInputMode("select");
setBranchesError("");
setBranchesLoading(false);
await loadData();
} catch (err) {
setError(getErrorMessage(err));
} finally {
setLoading(false);
}
setLoading(false);
}
if (!ticket) {
@ -279,6 +332,11 @@ export default function TicketDetail() {
label: "Developer Report",
disabled: !ticket.developer_report,
},
{
key: "review" as const,
label: "Review Report",
disabled: !ticket.review_report,
},
{
key: "diff" as const,
label: "Diff",
@ -286,6 +344,12 @@ export default function TicketDetail() {
},
];
const sourceLink = buildTicketResourceLink(ticket, resourceConfig);
const canRetryPipeline =
ticket.status === "Error" ||
ticket.status === "Done" ||
ticket.status === "NoFix" ||
ticket.status === "Cancelled";
const canRetryDeveloperStep = canRetryPipeline && Boolean(ticket.analyst_report?.trim());
return (
<div className={pageClass}>
@ -304,18 +368,28 @@ export default function TicketDetail() {
</h2>
</div>
<div className="flex gap-2">
{(ticket.status === "Error" || ticket.status === "Done" || ticket.status === "Cancelled") && (
{canRetryDeveloperStep && (
<button
onClick={handleRetryDeveloper}
disabled={loading}
className={buttonClass({ variant: "secondary", size: "sm" })}
>
Retry dev
</button>
)}
{canRetryPipeline && (
<button
onClick={handleRetry}
disabled={loading}
className={buttonClass({ variant: "primary", size: "sm" })}
>
Retry
Retry all
</button>
)}
{(ticket.status === "Pending" ||
ticket.status === "Analyzing" ||
ticket.status === "Developing") && (
ticket.status === "Developing" ||
ticket.status === "Reviewing") && (
<button
onClick={handleCancel}
disabled={loading}
@ -492,17 +566,23 @@ export default function TicketDetail() {
)}
{tab === "analyst" && ticket.analyst_report && (
<div className="prose prose-sm max-w-none rounded-lg border border-gray-200 bg-white p-6">
<div className="markdown-report rounded-lg border border-gray-200 bg-white p-6">
<Markdown remarkPlugins={[remarkGfm]}>{ticket.analyst_report}</Markdown>
</div>
)}
{tab === "developer" && ticket.developer_report && (
<div className="prose prose-sm max-w-none rounded-lg border border-gray-200 bg-white p-6">
<div className="markdown-report rounded-lg border border-gray-200 bg-white p-6">
<Markdown remarkPlugins={[remarkGfm]}>{ticket.developer_report}</Markdown>
</div>
)}
{tab === "review" && ticket.review_report && (
<div className="markdown-report rounded-lg border border-gray-200 bg-white p-6">
<Markdown remarkPlugins={[remarkGfm]}>{ticket.review_report}</Markdown>
</div>
)}
{tab === "diff" && (
<>
{diffLoading ? (

View file

@ -1,4 +1,4 @@
import { useEffect, useState } from "react";
import { useCallback, useEffect, useRef, useState } from "react";
import { Link, useParams } from "react-router-dom";
import { getProject, listProcessedTickets } from "../../lib/api";
import {
@ -8,6 +8,7 @@ import {
type TicketResourceConfig,
} from "../../lib/ticketResource";
import type { ProcessedTicket, Project } from "../../lib/types";
import { useLiveRefresh } from "../../lib/useLiveRefresh";
import TicketStatusBadge from "../ui/TicketStatusBadge";
import {
cardContentClass,
@ -24,37 +25,50 @@ export default function TicketList() {
DEFAULT_TICKET_RESOURCE_CONFIG
);
const [filter, setFilter] = useState<string>("all");
const latestProjectIdRef = useRef<string | undefined>(projectId);
useEffect(() => {
if (!projectId) return;
let cancelled = false;
Promise.all([getProject(projectId), listProcessedTickets(projectId)])
.then(([proj, tkts]) => {
if (cancelled) return;
setProject(proj);
setTickets(tkts);
})
.catch((error: unknown) => {
console.error("Failed to load ticket list", error);
});
void fetchTicketResourceConfig(projectId)
.then((config) => {
if (!cancelled) {
setResourceConfig(config);
}
})
.catch(() => {
if (!cancelled) {
setResourceConfig(DEFAULT_TICKET_RESOURCE_CONFIG);
}
});
return () => {
cancelled = true;
};
latestProjectIdRef.current = projectId;
}, [projectId]);
const loadData = useCallback(async () => {
if (!projectId) {
setProject(null);
setTickets([]);
setResourceConfig(DEFAULT_TICKET_RESOURCE_CONFIG);
return;
}
try {
const [proj, tkts, config] = await Promise.all([
getProject(projectId),
listProcessedTickets(projectId),
fetchTicketResourceConfig(projectId).catch(() => DEFAULT_TICKET_RESOURCE_CONFIG),
]);
if (latestProjectIdRef.current !== projectId) {
return;
}
setProject(proj);
setTickets(tkts);
setResourceConfig(config);
} catch (error: unknown) {
console.error("Failed to load ticket list", error);
}
}, [projectId]);
useEffect(() => {
void loadData();
}, [loadData]);
useLiveRefresh({
enabled: Boolean(projectId),
projectId,
refresh: loadData,
fallbackIntervalMs: 8_000,
});
const filtered = filter === "all" ? tickets : tickets.filter((t) => t.status === filter);
return (
@ -69,7 +83,7 @@ export default function TicketList() {
</div>
<div className="mb-4 flex gap-2">
{["all", "Pending", "Analyzing", "Developing", "Done", "Error"].map((s) => (
{["all", "Pending", "Analyzing", "Developing", "Reviewing", "Done", "NoFix", "Error"].map((s) => (
<button
key={s}
onClick={() => setFilter(s)}

View file

@ -35,6 +35,7 @@ export default function TrackerConfig() {
const [agents, setAgents] = useState<Agent[]>([]);
const [analystAgentId, setAnalystAgentId] = useState("");
const [developerAgentId, setDeveloperAgentId] = useState("");
const [reviewerAgentId, setReviewerAgentId] = useState("");
const [enabled, setEnabled] = useState(true);
const [trackerStatus, setTrackerStatus] = useState<"valid" | "invalid">("valid");
const [error, setError] = useState<string | null>(null);
@ -56,6 +57,7 @@ export default function TrackerConfig() {
const analystAgents = agents.filter((agent) => agent.role === "analyst");
const developerAgents = agents.filter((agent) => agent.role === "developer");
const reviewerAgents = agents.filter((agent) => agent.role === "reviewer");
useEffect(() => {
async function loadAgents() {
@ -78,7 +80,18 @@ export default function TrackerConfig() {
if (!developerAgentId && developerAgents.length > 0) {
setDeveloperAgentId(developerAgents[0].id);
}
}, [isEditing, analystAgentId, analystAgents, developerAgentId, developerAgents]);
if (!reviewerAgentId && reviewerAgents.length > 0) {
setReviewerAgentId(reviewerAgents[0].id);
}
}, [
isEditing,
analystAgentId,
analystAgents,
developerAgentId,
developerAgents,
reviewerAgentId,
reviewerAgents,
]);
useEffect(() => {
async function loadTrackerForEdit() {
@ -98,6 +111,7 @@ export default function TrackerConfig() {
setFilters(tracker.filters);
setAnalystAgentId(tracker.analyst_agent_id ?? "");
setDeveloperAgentId(tracker.developer_agent_id ?? "");
setReviewerAgentId(tracker.reviewer_agent_id ?? "");
setEnabled(tracker.enabled);
setTrackerStatus(tracker.status === "invalid" ? "invalid" : "valid");
@ -132,8 +146,8 @@ export default function TrackerConfig() {
async function handleSubmit(e: React.FormEvent) {
e.preventDefault();
if (!projectId || trackerId === "") return;
if (!analystAgentId || !developerAgentId) {
setError("Please select one analyst agent and one developer agent.");
if (!analystAgentId || !developerAgentId || !reviewerAgentId) {
setError("Please select one analyst agent, one developer agent and one reviewer agent.");
return;
}
@ -149,6 +163,7 @@ export default function TrackerConfig() {
pollingInterval,
analystAgentId,
developerAgentId,
reviewerAgentId,
filters,
enabled
);
@ -160,6 +175,7 @@ export default function TrackerConfig() {
pollingInterval,
analystAgentId,
developerAgentId,
reviewerAgentId,
filters
);
}
@ -182,9 +198,9 @@ export default function TrackerConfig() {
<div className="text-sm text-gray-500">Loading tracker...</div>
)}
{analystAgents.length === 0 || developerAgents.length === 0 ? (
{analystAgents.length === 0 || developerAgents.length === 0 || reviewerAgents.length === 0 ? (
<div className={noticeClass("warning")}>
You need at least one analyst agent and one developer agent before creating a tracker.
You need at least one analyst agent, one developer agent and one reviewer agent before creating a tracker.
</div>
) : null}
@ -316,6 +332,24 @@ export default function TrackerConfig() {
))}
</select>
</div>
<div>
<label className={labelClass}>
Reviewer agent
</label>
<select
value={reviewerAgentId}
onChange={(e) => setReviewerAgentId(e.target.value)}
required
className={inputClass}
>
<option value="">Select a reviewer agent</option>
{reviewerAgents.map((agent) => (
<option key={agent.id} value={agent.id}>
{agent.name} ({agent.tool === "codex" ? "Codex" : "Claude Code"})
</option>
))}
</select>
</div>
</div>
{error && (

View file

@ -39,6 +39,7 @@ export default function TrackerList({ trackers, projectId, onRefresh }: Props) {
tracker.polling_interval,
tracker.analyst_agent_id ?? "",
tracker.developer_agent_id ?? "",
tracker.reviewer_agent_id ?? "",
tracker.filters,
!tracker.enabled
);

View file

@ -4,7 +4,9 @@ const statusClasses: Record<string, string> = {
Pending: "bg-yellow-100 text-yellow-700",
Analyzing: "bg-blue-100 text-blue-700",
Developing: "bg-purple-100 text-purple-700",
Reviewing: "bg-indigo-100 text-indigo-700",
Done: "bg-green-100 text-green-700",
NoFix: "bg-emerald-100 text-emerald-700",
Error: "bg-red-100 text-red-700",
Cancelled: "bg-gray-100 text-gray-500",
};

View file

@ -24,3 +24,92 @@
cursor: not-allowed;
}
}
.markdown-report {
color: #111827;
line-height: 1.6;
font-size: 0.95rem;
}
.markdown-report > :first-child {
margin-top: 0;
}
.markdown-report > :last-child {
margin-bottom: 0;
}
.markdown-report h1,
.markdown-report h2,
.markdown-report h3,
.markdown-report h4 {
color: #0f172a;
font-weight: 700;
line-height: 1.3;
margin-top: 1.25rem;
margin-bottom: 0.65rem;
}
.markdown-report h1 {
font-size: 1.35rem;
}
.markdown-report h2 {
border-bottom: 1px solid #e5e7eb;
font-size: 1.15rem;
padding-bottom: 0.2rem;
}
.markdown-report h3 {
font-size: 1rem;
}
.markdown-report p {
margin: 0.6rem 0;
white-space: pre-wrap;
}
.markdown-report ul,
.markdown-report ol {
margin: 0.55rem 0 0.8rem;
padding-left: 1.3rem;
}
.markdown-report ul {
list-style: disc;
}
.markdown-report ol {
list-style: decimal;
}
.markdown-report li {
margin: 0.25rem 0;
}
.markdown-report strong {
color: #0f172a;
font-weight: 700;
}
.markdown-report code {
background: #f3f4f6;
border-radius: 0.3rem;
font-size: 0.85em;
padding: 0.1rem 0.3rem;
}
.markdown-report pre {
background: #111827;
border-radius: 0.5rem;
color: #f9fafb;
margin: 0.7rem 0;
overflow-x: auto;
padding: 0.75rem;
}
.markdown-report pre code {
background: transparent;
color: inherit;
padding: 0;
}

View file

@ -118,6 +118,7 @@ export async function setGraylogCredentials(
apiToken: string,
analystAgentId: string,
developerAgentId: string,
reviewerAgentId: string,
streamId: string | null,
queryFilter: string,
pollingIntervalMinutes: number,
@ -130,6 +131,7 @@ export async function setGraylogCredentials(
apiToken,
analystAgentId,
developerAgentId,
reviewerAgentId,
streamId,
queryFilter,
pollingIntervalMinutes,
@ -173,6 +175,7 @@ export async function addTracker(
pollingInterval: number,
analystAgentId: string,
developerAgentId: string,
reviewerAgentId: string,
filters: FilterGroup[]
): Promise<WatchedTracker> {
return invoke("add_tracker", {
@ -183,6 +186,7 @@ export async function addTracker(
pollingInterval,
analystAgentId,
developerAgentId,
reviewerAgentId,
filters,
},
});
@ -197,6 +201,7 @@ export async function updateTracker(
pollingInterval: number,
analystAgentId: string,
developerAgentId: string,
reviewerAgentId: string,
filters: FilterGroup[],
enabled: boolean
): Promise<void> {
@ -208,6 +213,7 @@ export async function updateTracker(
polling_interval: pollingInterval,
analyst_agent_id: analystAgentId,
developer_agent_id: developerAgentId,
reviewer_agent_id: reviewerAgentId,
filters,
enabled,
},
@ -241,9 +247,13 @@ export async function getRuntimeActivity(projectId: string): Promise<RuntimeActi
export async function getTicketResult(ticketId: string): Promise<TicketResult> {
return invoke("get_ticket_result", { ticketId });
}
export type TicketRetryStep = "analyst" | "developer" | "review";
export async function retryTicket(ticketId: string): Promise<void> {
return invoke("retry_ticket", { ticketId });
}
export async function retryTicketStep(ticketId: string, step: TicketRetryStep): Promise<void> {
return invoke("retry_ticket_step", { ticketId, step });
}
export async function cancelTicket(ticketId: string): Promise<void> {
return invoke("cancel_ticket", { ticketId });
}

View file

@ -14,7 +14,7 @@ export interface TuleapCredentialsSafe {
username: string;
}
export type AgentRole = "analyst" | "developer";
export type AgentRole = "analyst" | "developer" | "reviewer";
export type AgentTool = "codex" | "claude_code";
export type AgentRuntimeStatus = "available" | "exhausted";
@ -62,6 +62,7 @@ export interface WatchedTracker {
polling_interval: number;
analyst_agent_id: string | null;
developer_agent_id: string | null;
reviewer_agent_id: string | null;
filters: FilterGroup[];
enabled: boolean;
status: string;
@ -81,6 +82,7 @@ export interface ProcessedTicket {
status: string;
analyst_report: string | null;
developer_report: string | null;
review_report: string | null;
worktree_path: string | null;
branch_name: string | null;
detected_at: string;
@ -93,6 +95,7 @@ export interface GraylogCredentialsSafe {
base_url: string;
analyst_agent_id: string;
developer_agent_id: string;
reviewer_agent_id: string;
stream_id: string | null;
query_filter: string;
polling_interval_minutes: number;

222
src/lib/useLiveRefresh.ts Normal file
View file

@ -0,0 +1,222 @@
import { listen } from "@tauri-apps/api/event";
import { useCallback, useEffect, useRef } from "react";
interface Scope {
projectId?: string | null;
ticketId?: string | null;
}
interface UseLiveRefreshOptions extends Scope {
enabled?: boolean;
refresh: () => Promise<void> | void;
fallbackIntervalMs?: number;
debounceMs?: number;
}
interface EventScope {
projectId?: string;
ticketId?: string;
}
interface EventDescriptor {
name: string;
extractScope: (payload: unknown) => EventScope;
}
const LIVE_EVENTS: EventDescriptor[] = [
{ name: "polling-started", extractScope: extractProjectScope },
{ name: "polling-finished", extractScope: extractProjectScope },
{ name: "polling-error", extractScope: extractProjectScope },
{ name: "new-tickets-detected", extractScope: extractProjectScope },
{ name: "ticket-processing-started", extractScope: extractTicketScope },
{ name: "ticket-processing-done", extractScope: extractTicketScope },
{ name: "ticket-processing-error", extractScope: extractTicketScope },
{ name: "graylog-polling-started", extractScope: extractProjectScope },
{ name: "graylog-polling-finished", extractScope: extractProjectScope },
{ name: "graylog-polling-error", extractScope: extractProjectScope },
{ name: "graylog-subject-triggered", extractScope: extractProjectScope },
{ name: "new-notification", extractScope: extractNotificationScope },
];
const DEFAULT_FALLBACK_INTERVAL_MS = 12_000;
const DEFAULT_DEBOUNCE_MS = 300;
function asRecord(value: unknown): Record<string, unknown> | null {
if (typeof value === "object" && value !== null) {
return value as Record<string, unknown>;
}
return null;
}
function asString(value: unknown): string | undefined {
return typeof value === "string" ? value : undefined;
}
function extractProjectScope(payload: unknown): EventScope {
const raw = asRecord(payload);
return {
projectId: asString(raw?.project_id),
};
}
function extractTicketScope(payload: unknown): EventScope {
const raw = asRecord(payload);
return {
projectId: asString(raw?.project_id),
ticketId: asString(raw?.ticket_id),
};
}
function extractNotificationScope(payload: unknown): EventScope {
const raw = asRecord(payload);
const notification = asRecord(raw?.notification);
return {
projectId: asString(notification?.project_id),
ticketId: asString(notification?.ticket_id),
};
}
function matchesScope(scope: Scope, eventScope: EventScope): boolean {
if (scope.ticketId) {
if (eventScope.ticketId === scope.ticketId) {
return true;
}
if (scope.projectId && eventScope.projectId === scope.projectId) {
return true;
}
return false;
}
if (scope.projectId) {
return eventScope.projectId === scope.projectId;
}
return true;
}
export function useLiveRefresh({
enabled = true,
refresh,
projectId,
ticketId,
fallbackIntervalMs = DEFAULT_FALLBACK_INTERVAL_MS,
debounceMs = DEFAULT_DEBOUNCE_MS,
}: UseLiveRefreshOptions): void {
const refreshRef = useRef(refresh);
const inFlightRef = useRef(false);
const pendingRefreshRef = useRef(false);
const timeoutRef = useRef<number | null>(null);
useEffect(() => {
refreshRef.current = refresh;
}, [refresh]);
const runRefresh = useCallback(() => {
if (!enabled) {
return;
}
if (inFlightRef.current) {
pendingRefreshRef.current = true;
return;
}
inFlightRef.current = true;
void Promise.resolve(refreshRef.current())
.catch((error: unknown) => {
console.error("Live refresh failed", error);
})
.finally(() => {
inFlightRef.current = false;
if (pendingRefreshRef.current) {
pendingRefreshRef.current = false;
runRefresh();
}
});
}, [enabled]);
const scheduleRefresh = useCallback(
(delayMs = debounceMs) => {
if (!enabled) {
return;
}
if (timeoutRef.current !== null) {
window.clearTimeout(timeoutRef.current);
}
timeoutRef.current = window.setTimeout(() => {
timeoutRef.current = null;
runRefresh();
}, delayMs);
},
[debounceMs, enabled, runRefresh]
);
useEffect(() => {
if (!enabled) {
return;
}
let cancelled = false;
let unlistenFns: Array<() => void> = [];
const setup = async () => {
try {
const listeners = await Promise.all(
LIVE_EVENTS.map((descriptor) =>
listen<unknown>(descriptor.name, (event) => {
if (
!matchesScope({ projectId, ticketId }, descriptor.extractScope(event.payload))
) {
return;
}
scheduleRefresh();
})
)
);
if (cancelled) {
listeners.forEach((unlisten) => unlisten());
return;
}
unlistenFns = listeners;
} catch (error: unknown) {
console.error("Failed to subscribe to live refresh events", error);
}
};
void setup();
return () => {
cancelled = true;
unlistenFns.forEach((unlisten) => unlisten());
};
}, [enabled, projectId, scheduleRefresh, ticketId]);
useEffect(() => {
if (!enabled || fallbackIntervalMs <= 0) {
return;
}
const intervalId = window.setInterval(() => {
runRefresh();
}, fallbackIntervalMs);
return () => {
window.clearInterval(intervalId);
};
}, [enabled, fallbackIntervalMs, runRefresh]);
useEffect(
() => () => {
if (timeoutRef.current !== null) {
window.clearTimeout(timeoutRef.current);
}
},
[]
);
}