feat: add ecc2 harness metadata detection

This commit is contained in:
Affaan Mustafa 2026-04-10 07:46:46 -07:00
parent 9c525009d7
commit 29ff44e23e
5 changed files with 451 additions and 7 deletions

View File

@ -1150,8 +1150,13 @@ async fn main() -> Result<()> {
Some(Commands::Sessions) => { Some(Commands::Sessions) => {
sync_runtime_session_metrics(&db, &cfg)?; sync_runtime_session_metrics(&db, &cfg)?;
let sessions = session::manager::list_sessions(&db)?; let sessions = session::manager::list_sessions(&db)?;
let harnesses = db.list_session_harnesses().unwrap_or_default();
for s in sessions { for s in sessions {
println!("{} [{}] {}", s.id, s.state, s.task); let harness = harnesses
.get(&s.id)
.map(|info| info.primary.to_string())
.unwrap_or_else(|| "unknown".to_string());
println!("{} [{}] [{}] {}", s.id, s.state, harness, s.task);
} }
} }
Some(Commands::Status { session_id }) => { Some(Commands::Status { session_id }) => {

View File

@ -11,7 +11,7 @@ use super::runtime::capture_command_output;
use super::store::StateStore; use super::store::StateStore;
use super::{ use super::{
default_project_label, default_task_group_label, normalize_group_label, Session, default_project_label, default_task_group_label, normalize_group_label, Session,
SessionAgentProfile, SessionGrouping, SessionMetrics, SessionState, SessionAgentProfile, SessionGrouping, SessionHarnessInfo, SessionMetrics, SessionState,
}; };
use crate::comms::{self, MessageType}; use crate::comms::{self, MessageType};
use crate::config::Config; use crate::config::Config;
@ -116,6 +116,11 @@ pub fn get_status(db: &StateStore, id: &str) -> Result<SessionStatus> {
let session = resolve_session(db, id)?; let session = resolve_session(db, id)?;
let session_id = session.id.clone(); let session_id = session.id.clone();
Ok(SessionStatus { Ok(SessionStatus {
harness: db
.get_session_harness_info(&session_id)?
.unwrap_or_else(|| {
SessionHarnessInfo::detect(&session.agent_type, &session.working_dir)
}),
profile: db.get_session_profile(&session_id)?, profile: db.get_session_profile(&session_id)?,
session, session,
parent_session: db.latest_task_handoff_source(&session_id)?, parent_session: db.latest_task_handoff_source(&session_id)?,
@ -2670,6 +2675,7 @@ async fn kill_process(pid: u32) -> Result<()> {
} }
pub struct SessionStatus { pub struct SessionStatus {
harness: SessionHarnessInfo,
profile: Option<SessionAgentProfile>, profile: Option<SessionAgentProfile>,
session: Session, session: Session,
parent_session: Option<String>, parent_session: Option<String>,
@ -2962,6 +2968,8 @@ impl fmt::Display for SessionStatus {
writeln!(f, "Session: {}", s.id)?; writeln!(f, "Session: {}", s.id)?;
writeln!(f, "Task: {}", s.task)?; writeln!(f, "Task: {}", s.task)?;
writeln!(f, "Agent: {}", s.agent_type)?; writeln!(f, "Agent: {}", s.agent_type)?;
writeln!(f, "Harness: {}", self.harness.primary)?;
writeln!(f, "Detected: {}", self.harness.detected_summary())?;
writeln!(f, "State: {}", s.state)?; writeln!(f, "State: {}", s.state)?;
if let Some(profile) = self.profile.as_ref() { if let Some(profile) = self.profile.as_ref() {
writeln!(f, "Profile: {}", profile.profile_name)?; writeln!(f, "Profile: {}", profile.profile_name)?;

View File

@ -13,6 +13,139 @@ use std::path::PathBuf;
pub type SessionAgentProfile = crate::config::ResolvedAgentProfile; pub type SessionAgentProfile = crate::config::ResolvedAgentProfile;
#[derive(Debug, Clone, Copy, Default, Serialize, Deserialize, PartialEq, Eq, Hash)]
#[serde(rename_all = "snake_case")]
pub enum HarnessKind {
#[default]
Unknown,
Claude,
Codex,
OpenCode,
Gemini,
Cursor,
Kiro,
Trae,
Zed,
FactoryDroid,
Windsurf,
}
impl HarnessKind {
pub fn from_agent_type(agent_type: &str) -> Self {
match agent_type.trim().to_ascii_lowercase().as_str() {
"claude" | "claude-code" => Self::Claude,
"codex" => Self::Codex,
"opencode" => Self::OpenCode,
"gemini" | "gemini-cli" => Self::Gemini,
"cursor" => Self::Cursor,
"kiro" => Self::Kiro,
"trae" => Self::Trae,
"zed" => Self::Zed,
"factory-droid" | "factory_droid" | "factorydroid" => Self::FactoryDroid,
"windsurf" => Self::Windsurf,
_ => Self::Unknown,
}
}
pub fn from_db_value(value: &str) -> Self {
match value.trim().to_ascii_lowercase().as_str() {
"claude" => Self::Claude,
"codex" => Self::Codex,
"opencode" => Self::OpenCode,
"gemini" => Self::Gemini,
"cursor" => Self::Cursor,
"kiro" => Self::Kiro,
"trae" => Self::Trae,
"zed" => Self::Zed,
"factory_droid" => Self::FactoryDroid,
"windsurf" => Self::Windsurf,
_ => Self::Unknown,
}
}
pub fn as_str(self) -> &'static str {
match self {
Self::Unknown => "unknown",
Self::Claude => "claude",
Self::Codex => "codex",
Self::OpenCode => "opencode",
Self::Gemini => "gemini",
Self::Cursor => "cursor",
Self::Kiro => "kiro",
Self::Trae => "trae",
Self::Zed => "zed",
Self::FactoryDroid => "factory_droid",
Self::Windsurf => "windsurf",
}
}
fn project_markers(self) -> &'static [&'static str] {
match self {
Self::Claude => &[".claude"],
Self::Codex => &[".codex", ".codex-plugin"],
Self::OpenCode => &[".opencode"],
Self::Gemini => &[".gemini"],
Self::Cursor => &[".cursor"],
Self::Kiro => &[".kiro"],
Self::Trae => &[".trae"],
Self::Unknown | Self::Zed | Self::FactoryDroid | Self::Windsurf => &[],
}
}
}
impl fmt::Display for HarnessKind {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(f, "{}", self.as_str())
}
}
#[derive(Debug, Clone, Default, Serialize, Deserialize, PartialEq, Eq)]
pub struct SessionHarnessInfo {
pub primary: HarnessKind,
pub detected: Vec<HarnessKind>,
}
impl SessionHarnessInfo {
pub fn detect(agent_type: &str, working_dir: &Path) -> Self {
let detected = [
HarnessKind::Claude,
HarnessKind::Codex,
HarnessKind::OpenCode,
HarnessKind::Gemini,
HarnessKind::Cursor,
HarnessKind::Kiro,
HarnessKind::Trae,
]
.into_iter()
.filter(|harness| {
harness
.project_markers()
.iter()
.any(|marker| working_dir.join(marker).exists())
})
.collect::<Vec<_>>();
let primary = match HarnessKind::from_agent_type(agent_type) {
HarnessKind::Unknown => detected.first().copied().unwrap_or(HarnessKind::Unknown),
harness => harness,
};
Self { primary, detected }
}
pub fn detected_summary(&self) -> String {
if self.detected.is_empty() {
"none detected".to_string()
} else {
self.detected
.iter()
.map(|harness| harness.to_string())
.collect::<Vec<_>>()
.join(", ")
}
}
}
#[derive(Debug, Clone, Serialize, Deserialize)] #[derive(Debug, Clone, Serialize, Deserialize)]
pub struct Session { pub struct Session {
pub id: String, pub id: String,
@ -315,3 +448,61 @@ pub struct SessionGrouping {
pub project: Option<String>, pub project: Option<String>,
pub task_group: Option<String>, pub task_group: Option<String>,
} }
#[cfg(test)]
mod tests {
use super::*;
use std::fs;
struct TestDir {
path: PathBuf,
}
impl TestDir {
fn new(label: &str) -> Result<Self, Box<dyn std::error::Error>> {
let path =
std::env::temp_dir().join(format!("ecc2-{}-{}", label, uuid::Uuid::new_v4()));
fs::create_dir_all(&path)?;
Ok(Self { path })
}
fn path(&self) -> &Path {
&self.path
}
}
impl Drop for TestDir {
fn drop(&mut self) {
let _ = fs::remove_dir_all(&self.path);
}
}
#[test]
fn detect_session_harness_prefers_agent_type_and_collects_project_markers(
) -> Result<(), Box<dyn std::error::Error>> {
let repo = TestDir::new("session-harness-detect")?;
fs::create_dir_all(repo.path().join(".codex"))?;
fs::create_dir_all(repo.path().join(".claude"))?;
let harness = SessionHarnessInfo::detect("claude", repo.path());
assert_eq!(harness.primary, HarnessKind::Claude);
assert_eq!(
harness.detected,
vec![HarnessKind::Claude, HarnessKind::Codex]
);
assert_eq!(harness.detected_summary(), "claude, codex");
Ok(())
}
#[test]
fn detect_session_harness_falls_back_to_project_markers_for_unknown_agent(
) -> Result<(), Box<dyn std::error::Error>> {
let repo = TestDir::new("session-harness-markers")?;
fs::create_dir_all(repo.path().join(".gemini"))?;
let harness = SessionHarnessInfo::detect("custom-runner", repo.path());
assert_eq!(harness.primary, HarnessKind::Gemini);
assert_eq!(harness.detected, vec![HarnessKind::Gemini]);
Ok(())
}
}

View File

@ -16,8 +16,9 @@ use super::{
default_project_label, default_task_group_label, normalize_group_label, default_project_label, default_task_group_label, normalize_group_label,
ContextGraphCompactionStats, ContextGraphEntity, ContextGraphEntityDetail, ContextGraphCompactionStats, ContextGraphEntity, ContextGraphEntityDetail,
ContextGraphObservation, ContextGraphRecallEntry, ContextGraphRelation, ContextGraphSyncStats, ContextGraphObservation, ContextGraphRecallEntry, ContextGraphRelation, ContextGraphSyncStats,
ContextObservationPriority, DecisionLogEntry, FileActivityAction, FileActivityEntry, Session, ContextObservationPriority, DecisionLogEntry, FileActivityAction, FileActivityEntry,
SessionAgentProfile, SessionMessage, SessionMetrics, SessionState, WorktreeInfo, HarnessKind, Session, SessionAgentProfile, SessionHarnessInfo, SessionMessage, SessionMetrics,
SessionState, WorktreeInfo,
}; };
pub struct StateStore { pub struct StateStore {
@ -171,6 +172,8 @@ impl StateStore {
project TEXT NOT NULL DEFAULT '', project TEXT NOT NULL DEFAULT '',
task_group TEXT NOT NULL DEFAULT '', task_group TEXT NOT NULL DEFAULT '',
agent_type TEXT NOT NULL, agent_type TEXT NOT NULL,
harness TEXT NOT NULL DEFAULT 'unknown',
detected_harnesses_json TEXT NOT NULL DEFAULT '[]',
working_dir TEXT NOT NULL DEFAULT '.', working_dir TEXT NOT NULL DEFAULT '.',
state TEXT NOT NULL DEFAULT 'pending', state TEXT NOT NULL DEFAULT 'pending',
pid INTEGER, pid INTEGER,
@ -399,6 +402,24 @@ impl StateStore {
.context("Failed to add task_group column to sessions table")?; .context("Failed to add task_group column to sessions table")?;
} }
if !self.has_column("sessions", "harness")? {
self.conn
.execute(
"ALTER TABLE sessions ADD COLUMN harness TEXT NOT NULL DEFAULT 'unknown'",
[],
)
.context("Failed to add harness column to sessions table")?;
}
if !self.has_column("sessions", "detected_harnesses_json")? {
self.conn
.execute(
"ALTER TABLE sessions ADD COLUMN detected_harnesses_json TEXT NOT NULL DEFAULT '[]'",
[],
)
.context("Failed to add detected_harnesses_json column to sessions table")?;
}
if !self.has_column("sessions", "input_tokens")? { if !self.has_column("sessions", "input_tokens")? {
self.conn self.conn
.execute( .execute(
@ -624,6 +645,8 @@ impl StateStore {
WHERE hook_event_id IS NOT NULL;", WHERE hook_event_id IS NOT NULL;",
)?; )?;
self.backfill_session_harnesses()?;
Ok(()) Ok(())
} }
@ -637,16 +660,51 @@ impl StateStore {
Ok(columns.iter().any(|existing| existing == column)) Ok(columns.iter().any(|existing| existing == column))
} }
fn backfill_session_harnesses(&self) -> Result<()> {
let mut stmt = self
.conn
.prepare("SELECT id, agent_type, working_dir FROM sessions")?;
let updates = stmt
.query_map([], |row| {
Ok((
row.get::<_, String>(0)?,
row.get::<_, String>(1)?,
row.get::<_, String>(2)?,
))
})?
.collect::<std::result::Result<Vec<_>, _>>()?;
for (session_id, agent_type, working_dir) in updates {
let harness = SessionHarnessInfo::detect(&agent_type, Path::new(&working_dir));
let detected_json =
serde_json::to_string(&harness.detected).context("serialize detected harnesses")?;
self.conn.execute(
"UPDATE sessions
SET harness = ?2,
detected_harnesses_json = ?3
WHERE id = ?1",
rusqlite::params![session_id, harness.primary.to_string(), detected_json],
)?;
}
Ok(())
}
pub fn insert_session(&self, session: &Session) -> Result<()> { pub fn insert_session(&self, session: &Session) -> Result<()> {
let harness = SessionHarnessInfo::detect(&session.agent_type, &session.working_dir);
let detected_json =
serde_json::to_string(&harness.detected).context("serialize detected harnesses")?;
self.conn.execute( self.conn.execute(
"INSERT INTO sessions (id, task, project, task_group, agent_type, working_dir, state, pid, worktree_path, worktree_branch, worktree_base, created_at, updated_at, last_heartbeat_at) "INSERT INTO sessions (id, task, project, task_group, agent_type, harness, detected_harnesses_json, working_dir, state, pid, worktree_path, worktree_branch, worktree_base, created_at, updated_at, last_heartbeat_at)
VALUES (?1, ?2, ?3, ?4, ?5, ?6, ?7, ?8, ?9, ?10, ?11, ?12, ?13, ?14)", VALUES (?1, ?2, ?3, ?4, ?5, ?6, ?7, ?8, ?9, ?10, ?11, ?12, ?13, ?14, ?15, ?16)",
rusqlite::params![ rusqlite::params![
session.id, session.id,
session.task, session.task,
session.project, session.project,
session.task_group, session.task_group,
session.agent_type, session.agent_type,
harness.primary.to_string(),
detected_json,
session.working_dir.to_string_lossy().to_string(), session.working_dir.to_string_lossy().to_string(),
session.state.to_string(), session.state.to_string(),
session.pid.map(i64::from), session.pid.map(i64::from),
@ -1553,6 +1611,55 @@ impl StateStore {
Ok(sessions) Ok(sessions)
} }
pub fn list_session_harnesses(&self) -> Result<HashMap<String, SessionHarnessInfo>> {
let mut stmt = self.conn.prepare(
"SELECT id, harness, detected_harnesses_json, agent_type, working_dir FROM sessions",
)?;
let harnesses = stmt
.query_map([], |row| {
let session_id: String = row.get(0)?;
let primary = HarnessKind::from_db_value(&row.get::<_, String>(1)?);
let detected = serde_json::from_str::<Vec<HarnessKind>>(&row.get::<_, String>(2)?)
.unwrap_or_default();
let agent_type: String = row.get(3)?;
let working_dir = PathBuf::from(row.get::<_, String>(4)?);
let info = if primary == HarnessKind::Unknown && detected.is_empty() {
SessionHarnessInfo::detect(&agent_type, &working_dir)
} else {
SessionHarnessInfo { primary, detected }
};
Ok((session_id, info))
})?
.collect::<std::result::Result<HashMap<_, _>, _>>()?;
Ok(harnesses)
}
pub fn get_session_harness_info(&self, session_id: &str) -> Result<Option<SessionHarnessInfo>> {
let mut stmt = self.conn.prepare(
"SELECT harness, detected_harnesses_json, agent_type, working_dir
FROM sessions
WHERE id = ?1",
)?;
stmt.query_row([session_id], |row| {
let primary = HarnessKind::from_db_value(&row.get::<_, String>(0)?);
let detected = serde_json::from_str::<Vec<HarnessKind>>(&row.get::<_, String>(1)?)
.unwrap_or_default();
let agent_type: String = row.get(2)?;
let working_dir = PathBuf::from(row.get::<_, String>(3)?);
let info = if primary == HarnessKind::Unknown && detected.is_empty() {
SessionHarnessInfo::detect(&agent_type, &working_dir)
} else {
SessionHarnessInfo { primary, detected }
};
Ok(info)
})
.optional()
.map_err(Into::into)
}
pub fn get_latest_session(&self) -> Result<Option<Session>> { pub fn get_latest_session(&self) -> Result<Option<Session>> {
Ok(self.list_sessions()?.into_iter().next()) Ok(self.list_sessions()?.into_iter().next())
} }
@ -3800,12 +3907,83 @@ mod tests {
assert!(column_names.iter().any(|column| column == "pid")); assert!(column_names.iter().any(|column| column == "pid"));
assert!(column_names.iter().any(|column| column == "input_tokens")); assert!(column_names.iter().any(|column| column == "input_tokens"));
assert!(column_names.iter().any(|column| column == "output_tokens")); assert!(column_names.iter().any(|column| column == "output_tokens"));
assert!(column_names.iter().any(|column| column == "harness"));
assert!(column_names
.iter()
.any(|column| column == "detected_harnesses_json"));
assert!(column_names assert!(column_names
.iter() .iter()
.any(|column| column == "last_heartbeat_at")); .any(|column| column == "last_heartbeat_at"));
Ok(()) Ok(())
} }
#[test]
fn open_backfills_session_harness_metadata_for_legacy_rows() -> Result<()> {
let tempdir = TestDir::new("store-harness-backfill")?;
let repo_root = tempdir.path().join("repo");
fs::create_dir_all(repo_root.join(".codex"))?;
let db_path = tempdir.path().join("state.db");
let conn = Connection::open(&db_path)?;
conn.execute_batch(
"
CREATE TABLE sessions (
id TEXT PRIMARY KEY,
task TEXT NOT NULL,
project TEXT NOT NULL DEFAULT '',
task_group TEXT NOT NULL DEFAULT '',
agent_type TEXT NOT NULL,
working_dir TEXT NOT NULL DEFAULT '.',
state TEXT NOT NULL DEFAULT 'pending',
pid INTEGER,
worktree_path TEXT,
worktree_branch TEXT,
worktree_base TEXT,
input_tokens INTEGER DEFAULT 0,
output_tokens INTEGER DEFAULT 0,
tokens_used INTEGER DEFAULT 0,
tool_calls INTEGER DEFAULT 0,
files_changed INTEGER DEFAULT 0,
duration_secs INTEGER DEFAULT 0,
cost_usd REAL DEFAULT 0.0,
created_at TEXT NOT NULL,
updated_at TEXT NOT NULL,
last_heartbeat_at TEXT NOT NULL
);
",
)?;
let now = Utc::now().to_rfc3339();
conn.execute(
"INSERT INTO sessions (
id, task, project, task_group, agent_type, working_dir, state, pid,
worktree_path, worktree_branch, worktree_base, input_tokens, output_tokens,
tokens_used, tool_calls, files_changed, duration_secs, cost_usd, created_at,
updated_at, last_heartbeat_at
) VALUES (
?1, ?2, ?3, ?4, ?5, ?6, 'pending', NULL,
NULL, NULL, NULL, 0, 0, 0, 0, 0, 0, 0.0, ?7, ?7, ?7
)",
rusqlite::params![
"sess-legacy",
"Backfill harness metadata",
"ecc",
"legacy",
"claude",
repo_root.display().to_string(),
now,
],
)?;
drop(conn);
let db = StateStore::open(&db_path)?;
let harness = db
.get_session_harness_info("sess-legacy")?
.expect("legacy row should be backfilled");
assert_eq!(harness.primary, HarnessKind::Claude);
assert_eq!(harness.detected, vec![HarnessKind::Codex]);
Ok(())
}
#[test] #[test]
fn session_profile_round_trips_with_launch_settings() -> Result<()> { fn session_profile_round_trips_with_launch_settings() -> Result<()> {
let tempdir = TestDir::new("store-session-profile")?; let tempdir = TestDir::new("store-session-profile")?;

View File

@ -24,7 +24,7 @@ use crate::session::output::{
use crate::session::store::{DaemonActivity, FileActivityOverlap, StateStore}; use crate::session::store::{DaemonActivity, FileActivityOverlap, StateStore};
use crate::session::{ use crate::session::{
ContextObservationPriority, DecisionLogEntry, FileActivityEntry, Session, SessionGrouping, ContextObservationPriority, DecisionLogEntry, FileActivityEntry, Session, SessionGrouping,
SessionMessage, SessionState, SessionHarnessInfo, SessionMessage, SessionState,
}; };
use crate::worktree; use crate::worktree;
@ -87,6 +87,7 @@ pub struct Dashboard {
notifier: DesktopNotifier, notifier: DesktopNotifier,
webhook_notifier: WebhookNotifier, webhook_notifier: WebhookNotifier,
sessions: Vec<Session>, sessions: Vec<Session>,
session_harnesses: HashMap<String, SessionHarnessInfo>,
session_output_cache: HashMap<String, Vec<OutputLine>>, session_output_cache: HashMap<String, Vec<OutputLine>>,
unread_message_counts: HashMap<String, usize>, unread_message_counts: HashMap<String, usize>,
approval_queue_counts: HashMap<String, usize>, approval_queue_counts: HashMap<String, usize>,
@ -497,6 +498,7 @@ impl Dashboard {
let _ = db.sync_tool_activity_metrics(&cfg.tool_activity_metrics_path()); let _ = db.sync_tool_activity_metrics(&cfg.tool_activity_metrics_path());
} }
let sessions = db.list_sessions().unwrap_or_default(); let sessions = db.list_sessions().unwrap_or_default();
let session_harnesses = db.list_session_harnesses().unwrap_or_default();
let initial_session_states = sessions let initial_session_states = sessions
.iter() .iter()
.map(|session| (session.id.clone(), session.state.clone())) .map(|session| (session.id.clone(), session.state.clone()))
@ -522,6 +524,7 @@ impl Dashboard {
notifier, notifier,
webhook_notifier, webhook_notifier,
sessions, sessions,
session_harnesses,
session_output_cache: HashMap::new(), session_output_cache: HashMap::new(),
unread_message_counts: HashMap::new(), unread_message_counts: HashMap::new(),
approval_queue_counts: HashMap::new(), approval_queue_counts: HashMap::new(),
@ -4035,6 +4038,13 @@ impl Dashboard {
Vec::new() Vec::new()
} }
}; };
self.session_harnesses = match self.db.list_session_harnesses() {
Ok(harnesses) => harnesses,
Err(error) => {
tracing::warn!("Failed to refresh session harnesses: {error}");
HashMap::new()
}
};
self.unread_message_counts = match self.db.unread_message_counts() { self.unread_message_counts = match self.db.unread_message_counts() {
Ok(counts) => counts, Ok(counts) => counts,
Err(error) => { Err(error) => {
@ -6332,6 +6342,14 @@ impl Dashboard {
} }
} }
if let Some(harness) = self.session_harnesses.get(&session.id) {
lines.push(format!(
"Harness {} | Detected {}",
harness.primary,
harness.detected_summary()
));
}
lines.push(format!( lines.push(format!(
"Tokens {} total | In {} | Out {}", "Tokens {} total | In {} | Out {}",
format_token_count(metrics.tokens_used), format_token_count(metrics.tokens_used),
@ -12281,6 +12299,40 @@ diff --git a/src/lib.rs b/src/lib.rs
Ok(()) Ok(())
} }
#[test]
fn selected_session_metrics_text_includes_harness_summary() -> Result<()> {
let tempdir = std::env::temp_dir().join(format!(
"ecc2-dashboard-harness-metrics-{}",
uuid::Uuid::new_v4()
));
fs::create_dir_all(tempdir.join(".claude"))?;
fs::create_dir_all(tempdir.join(".codex"))?;
let now = Utc::now();
let session = Session {
id: "sess-harness".to_string(),
task: "Map harness metadata".to_string(),
project: "ecc".to_string(),
task_group: "compat".to_string(),
agent_type: "claude".to_string(),
working_dir: tempdir.clone(),
state: SessionState::Running,
pid: Some(4242),
worktree: None,
created_at: now - Duration::minutes(3),
updated_at: now - Duration::minutes(1),
last_heartbeat_at: now - Duration::minutes(1),
metrics: SessionMetrics::default(),
};
let dashboard = test_dashboard(vec![session], 0);
let metrics_text = dashboard.selected_session_metrics_text();
assert!(metrics_text.contains("Harness claude | Detected claude, codex"));
let _ = fs::remove_dir_all(tempdir);
Ok(())
}
#[test] #[test]
fn new_session_task_uses_selected_session_context() { fn new_session_task_uses_selected_session_context() {
let dashboard = test_dashboard( let dashboard = test_dashboard(
@ -14429,6 +14481,15 @@ diff --git a/src/lib.rs b/src/lib.rs
.iter() .iter()
.map(|session| (session.id.clone(), session.state.clone())) .map(|session| (session.id.clone(), session.state.clone()))
.collect(); .collect();
let session_harnesses = sessions
.iter()
.map(|session| {
(
session.id.clone(),
SessionHarnessInfo::detect(&session.agent_type, &session.working_dir),
)
})
.collect();
let output_store = SessionOutputStore::default(); let output_store = SessionOutputStore::default();
let output_rx = output_store.subscribe(); let output_rx = output_store.subscribe();
let mut session_table_state = TableState::default(); let mut session_table_state = TableState::default();
@ -14445,6 +14506,7 @@ diff --git a/src/lib.rs b/src/lib.rs
notifier, notifier,
webhook_notifier, webhook_notifier,
sessions, sessions,
session_harnesses,
session_output_cache: HashMap::new(), session_output_cache: HashMap::new(),
unread_message_counts: HashMap::new(), unread_message_counts: HashMap::new(),
approval_queue_counts: HashMap::new(), approval_queue_counts: HashMap::new(),