From ed823f34a3ef75922db21a073767cf5fe1d346ee Mon Sep 17 00:00:00 2001 From: James Peter Date: Fri, 31 Oct 2025 05:54:47 +1000 Subject: [PATCH 1/3] feat(core): include current date in environment context (#5965) - add optional `current_date` field to `EnvironmentContext` so turn context carries ISO dates - serialize `` tags and extend comparisons/tests to tolerate deterministic values - cover default date formatting with new unit tests --- COMMIT_MESSAGE_ISSUE_5965.txt | 5 +++ PR_BODY_ISSUE_5965.md | 12 ++++++ code-rs/core/src/environment_context.rs | 54 +++++++++++++++++++++++++ 3 files changed, 71 insertions(+) create mode 100644 COMMIT_MESSAGE_ISSUE_5965.txt create mode 100644 PR_BODY_ISSUE_5965.md diff --git a/COMMIT_MESSAGE_ISSUE_5965.txt b/COMMIT_MESSAGE_ISSUE_5965.txt new file mode 100644 index 00000000000..0807759bbe4 --- /dev/null +++ b/COMMIT_MESSAGE_ISSUE_5965.txt @@ -0,0 +1,5 @@ +feat(core): include current date in environment context (#5965) + +- add optional `current_date` field to `EnvironmentContext` so turn context carries ISO dates +- serialize `` tags and extend comparisons/tests to tolerate deterministic values +- cover default date formatting with new unit tests diff --git a/PR_BODY_ISSUE_5965.md b/PR_BODY_ISSUE_5965.md new file mode 100644 index 00000000000..606a77e1517 --- /dev/null +++ b/PR_BODY_ISSUE_5965.md @@ -0,0 +1,12 @@ +## Summary +- inject the current local date into `EnvironmentContext` so every turn shares an ISO8601 `` tag with the model +- extend the serializer/equality helpers to account for the new field while keeping comparisons deterministic in tests +- add lightweight unit coverage to lock the XML output and default date format + +## Testing +- ./build-fast.sh + +## Acceptance Criteria +- environment context payloads now surface a `` element in YYYY-MM-DD form +- existing comparisons that ignore shell differences remain stable once the date is normalized +- unit tests document the new field and its formatting so regressions are caught automatically diff --git a/code-rs/core/src/environment_context.rs b/code-rs/core/src/environment_context.rs index 93110b64e92..90495aa4384 100644 --- a/code-rs/core/src/environment_context.rs +++ b/code-rs/core/src/environment_context.rs @@ -1,3 +1,4 @@ +use chrono::Local; use os_info::Type as OsType; use os_info::Version; use serde::Deserialize; @@ -33,6 +34,7 @@ pub(crate) struct EnvironmentContext { pub operating_system: Option, pub common_tools: Option>, pub shell: Option, + pub current_date: Option, } #[derive(Debug, Clone, Serialize, Deserialize, PartialEq)] @@ -145,6 +147,7 @@ impl EnvironmentContext { operating_system: detect_operating_system_info(), common_tools: detect_common_tools(), shell, + current_date: Some(Local::now().format("%Y-%m-%d").to_string()), } } @@ -161,6 +164,7 @@ impl EnvironmentContext { writable_roots, operating_system, common_tools, + current_date, // should compare all fields except shell shell: _, } = other; @@ -172,6 +176,7 @@ impl EnvironmentContext { && self.writable_roots == *writable_roots && self.operating_system == *operating_system && self.common_tools == *common_tools + && self.current_date == *current_date } } @@ -249,6 +254,9 @@ impl EnvironmentContext { lines.push(" ".to_string()); } } + if let Some(current_date) = self.current_date { + lines.push(format!(" {current_date}")); + } if let Some(shell) = self.shell && let Some(shell_name) = shell.name() { @@ -360,6 +368,7 @@ mod tests { ); context.operating_system = None; context.common_tools = None; + context.current_date = Some("2025-01-02".to_string()); let expected = r#" /repo @@ -370,6 +379,7 @@ mod tests { /repo /tmp + 2025-01-02 "#; assert_eq!(context.serialize_to_xml(), expected); @@ -385,11 +395,13 @@ mod tests { ); context.operating_system = None; context.common_tools = None; + context.current_date = Some("2025-01-02".to_string()); let expected = r#" never read-only restricted + 2025-01-02 "#; assert_eq!(context.serialize_to_xml(), expected); @@ -405,11 +417,13 @@ mod tests { ); context.operating_system = None; context.common_tools = None; + context.current_date = Some("2025-01-02".to_string()); let expected = r#" on-failure danger-full-access enabled + 2025-01-02 "#; assert_eq!(context.serialize_to_xml(), expected); @@ -429,6 +443,7 @@ mod tests { architecture: Some("aarch64".to_string()), }); context.common_tools = Some(vec!["rg".to_string(), "git".to_string()]); + context.current_date = Some("2025-01-02".to_string()); let xml = context.serialize_to_xml(); assert!(xml.contains("")); @@ -438,6 +453,7 @@ mod tests { assert!(xml.contains("")); assert!(xml.contains("rg")); assert!(xml.contains("git")); + assert!(xml.contains("2025-01-02")); } #[test] @@ -455,6 +471,12 @@ mod tests { Some(workspace_write_policy(vec!["/repo"], true)), None, ); + // ensure current_date doesn't influence this comparison + let fixed_date = Some("2025-01-02".to_string()); + let mut context1 = context1; + context1.current_date = fixed_date.clone(); + let mut context2 = context2; + context2.current_date = fixed_date; assert!(!context1.equals_except_shell(&context2)); } @@ -472,6 +494,10 @@ mod tests { Some(SandboxPolicy::new_workspace_write_policy()), None, ); + let mut context1 = context1; + context1.current_date = Some("2025-01-02".to_string()); + let mut context2 = context2; + context2.current_date = Some("2025-01-02".to_string()); assert!(!context1.equals_except_shell(&context2)); } @@ -490,6 +516,10 @@ mod tests { Some(workspace_write_policy(vec!["/repo", "/tmp"], true)), None, ); + let mut context1 = context1; + context1.current_date = Some("2025-01-02".to_string()); + let mut context2 = context2; + context2.current_date = Some("2025-01-02".to_string()); assert!(!context1.equals_except_shell(&context2)); } @@ -514,7 +544,31 @@ mod tests { zshrc_path: "/home/user/.zshrc".into(), })), ); + let mut context1 = context1; + context1.current_date = Some("2025-01-02".to_string()); + let mut context2 = context2; + context2.current_date = Some("2025-01-02".to_string()); assert!(context1.equals_except_shell(&context2)); } + + #[test] + fn serialize_environment_context_includes_current_date() { + let mut context = EnvironmentContext::new(None, None, None, None); + context.current_date = Some("2025-01-02".to_string()); + + let xml = context.serialize_to_xml(); + assert!(xml.contains("2025-01-02")); + } + + #[test] + fn current_date_format_is_iso8601() { + let context = EnvironmentContext::new(None, None, None, None); + let date = context + .current_date + .expect("current_date should be populated"); + assert_eq!(date.len(), 10); + assert_eq!(date.chars().nth(4), Some('-')); + assert_eq!(date.chars().nth(7), Some('-')); + } } From cebb2ddfd87083efe1ff2a985f1569949d9e88ed Mon Sep 17 00:00:00 2001 From: James Peter Date: Fri, 31 Oct 2025 06:19:24 +1000 Subject: [PATCH 2/3] fix(tui/update): ignore cached upstream upgrade metadata (#376) - reject cached version.json written by the upstream repo so we don't keep prompting for 0.50.0 - stamp new cache entries with the just-every/code origin to avoid future mixups - add regression tests covering legacy and current cache formats --- code-rs/tui/src/updates.rs | 56 +++++++++++++++++++++++++++++++++++++- 1 file changed, 55 insertions(+), 1 deletion(-) diff --git a/code-rs/tui/src/updates.rs b/code-rs/tui/src/updates.rs index ca18f4c6414..aea80fc54de 100644 --- a/code-rs/tui/src/updates.rs +++ b/code-rs/tui/src/updates.rs @@ -109,6 +109,8 @@ struct VersionInfo { latest_version: String, // ISO-8601 timestamp (RFC3339) last_checked_at: DateTime, + #[serde(default)] + release_repo: Option, } #[derive(Deserialize, Debug, Clone)] @@ -118,6 +120,8 @@ struct ReleaseInfo { const VERSION_FILENAME: &str = "version.json"; const LATEST_RELEASE_URL: &str = "https://api.github.com/repos/just-every/code/releases/latest"; +const CURRENT_RELEASE_REPO: &str = "just-every/code"; +const LEGACY_RELEASE_REPO: &str = "openai/codex"; pub const CODE_RELEASE_URL: &str = "https://github.com/just-every/code/releases/latest"; const AUTO_UPGRADE_LOCK_FILE: &str = "auto-upgrade.lock"; @@ -460,7 +464,17 @@ fn truncate_for_log(text: &str) -> String { fn read_version_info(version_file: &Path) -> anyhow::Result { let contents = std::fs::read_to_string(version_file)?; - Ok(serde_json::from_str(&contents)?) + let info: VersionInfo = serde_json::from_str(&contents)?; + let repo = info + .release_repo + .as_deref() + .unwrap_or(LEGACY_RELEASE_REPO); + if repo != CURRENT_RELEASE_REPO { + anyhow::bail!( + "stale version info from {repo}; discarding cached update metadata" + ); + } + Ok(info) } async fn check_for_update(version_file: &Path, originator: &str) -> anyhow::Result { @@ -496,6 +510,7 @@ async fn check_for_update(version_file: &Path, originator: &str) -> anyhow::Resu let info = VersionInfo { latest_version, last_checked_at: Utc::now(), + release_repo: Some(CURRENT_RELEASE_REPO.to_string()), }; let json_line = format!("{}\n", serde_json::to_string(&info)?); @@ -520,3 +535,42 @@ fn parse_version(v: &str) -> Option<(u64, u64, u64)> { let pat = iter.next()?.parse::().ok()?; Some((maj, min, pat)) } + +#[cfg(test)] +mod tests { + use super::*; + use chrono::TimeZone; + use tempfile::tempdir; + + #[test] + fn read_version_info_rejects_legacy_repo_cache() { + let dir = tempdir().unwrap(); + let path = dir.path().join("version.json"); + let legacy = serde_json::json!({ + "latest_version": "0.50.0", + "last_checked_at": Utc.timestamp_opt(1_696_000_000, 0).unwrap().to_rfc3339(), + }); + std::fs::write(&path, format!("{}\n", legacy)).unwrap(); + + let err = read_version_info(&path).expect_err("legacy cache should be rejected"); + assert!(err + .to_string() + .contains("stale version info")); + } + + #[test] + fn read_version_info_accepts_current_repo_cache() { + let dir = tempdir().unwrap(); + let path = dir.path().join("version.json"); + let info = serde_json::json!({ + "latest_version": "0.4.7", + "last_checked_at": Utc::now().to_rfc3339(), + "release_repo": CURRENT_RELEASE_REPO, + }); + std::fs::write(&path, format!("{}\n", info)).unwrap(); + + let parsed = read_version_info(&path).expect("current repo cache should load"); + assert_eq!(parsed.latest_version, "0.4.7"); + assert_eq!(parsed.release_repo.as_deref(), Some(CURRENT_RELEASE_REPO)); + } +} From 7d193cce78bc2725a09323f2b073b3676ad51262 Mon Sep 17 00:00:00 2001 From: James Peter Date: Fri, 7 Nov 2025 06:27:23 +1000 Subject: [PATCH 3/3] fix(tui/update): harden version cache refresh --- code-rs/tui/src/updates.rs | 392 ++++++++++++++++++++++++++++++++++--- 1 file changed, 361 insertions(+), 31 deletions(-) diff --git a/code-rs/tui/src/updates.rs b/code-rs/tui/src/updates.rs index aea80fc54de..a9a0f3ce89e 100644 --- a/code-rs/tui/src/updates.rs +++ b/code-rs/tui/src/updates.rs @@ -1,4 +1,5 @@ use chrono::DateTime; +use chrono::Duration as ChronoDuration; use chrono::Utc; use serde::Deserialize; use serde::Serialize; @@ -16,9 +17,19 @@ use std::time::UNIX_EPOCH; use code_core::config::resolve_code_path_for_read; use code_core::config::Config; use code_core::default_client::create_client; +use once_cell::sync::Lazy; use tokio::process::Command; +use tokio::sync::Mutex as AsyncMutex; +use tokio::task; use tracing::{info, warn}; +#[cfg(test)] +use futures::future::BoxFuture; +#[cfg(test)] +use std::future::Future; +#[cfg(test)] +use std::sync::Arc; + const FORCE_UPGRADE_UNSET: u8 = 0; const FORCE_UPGRADE_FALSE: u8 = 1; const FORCE_UPGRADE_TRUE: u8 = 2; @@ -61,19 +72,33 @@ pub fn auto_upgrade_runtime_enabled() -> bool { pub fn get_upgrade_version(config: &Config) -> Option { let version_file = version_filepath(config); let read_path = resolve_code_path_for_read(&config.code_home, Path::new(VERSION_FILENAME)); - let info = read_version_info(&read_path).ok(); let originator = config.responses_originator_header.clone(); + let cached_info = match read_version_info(&read_path) { + Ok(info) => info, + Err(err) => { + warn!( + error = %err, + path = %read_path.display(), + "failed to read cached version info" + ); + None + } + }; - // Always refresh the cached latest version in the background so TUI startup - // isn’t blocked by a network call. The UI reads the previously cached - // value (if any) for this run; the next run shows the banner if needed. - tokio::spawn(async move { - check_for_update(&version_file, &originator) - .await - .inspect_err(|e| tracing::error!("Failed to update version: {e}")) - }); + let should_refresh = cached_info + .as_ref() + .map(|info| !is_cache_fresh(info)) + .unwrap_or(true); - info.and_then(|info| { + if should_refresh { + tokio::spawn(async move { + check_for_update(&version_file, &originator) + .await + .inspect_err(|e| tracing::error!("Failed to update version: {e}")) + }); + } + + cached_info.and_then(|info| { let current_version = code_version::version(); if is_newer(&info.latest_version, current_version).unwrap_or(false) { Some(info.latest_version) @@ -124,6 +149,22 @@ const CURRENT_RELEASE_REPO: &str = "just-every/code"; const LEGACY_RELEASE_REPO: &str = "openai/codex"; pub const CODE_RELEASE_URL: &str = "https://github.com/just-every/code/releases/latest"; +const CACHE_TTL_HOURS: i64 = 20; +const MAX_CLOCK_SKEW_MINUTES: i64 = 5; + +static REFRESH_LOCK: Lazy> = Lazy::new(|| AsyncMutex::new(())); + +#[cfg(test)] +type FetchOverrideFn = Arc BoxFuture<'static, anyhow::Result> + Send + Sync>; + +#[cfg(test)] +static FETCH_OVERRIDE: Lazy>> = + Lazy::new(|| std::sync::Mutex::new(None)); + +#[cfg(test)] +static FETCH_OVERRIDE_TEST_LOCK: Lazy> = + Lazy::new(|| std::sync::Mutex::new(())); + const AUTO_UPGRADE_LOCK_FILE: &str = "auto-upgrade.lock"; const AUTO_UPGRADE_LOCK_TTL: Duration = Duration::from_secs(900); // 15 minutes @@ -462,22 +503,91 @@ fn truncate_for_log(text: &str) -> String { } -fn read_version_info(version_file: &Path) -> anyhow::Result { - let contents = std::fs::read_to_string(version_file)?; - let info: VersionInfo = serde_json::from_str(&contents)?; +fn read_version_info(version_file: &Path) -> anyhow::Result> { + let contents = match std::fs::read_to_string(version_file) { + Ok(contents) => contents, + Err(err) if err.kind() == ErrorKind::NotFound => return Ok(None), + Err(err) => return Err(err.into()), + }; + + let mut info: VersionInfo = match serde_json::from_str(&contents) { + Ok(info) => info, + Err(err) => { + warn!( + error = %err, + path = %version_file.display(), + "discarding malformed version cache" + ); + return Ok(None); + } + }; + let repo = info .release_repo .as_deref() .unwrap_or(LEGACY_RELEASE_REPO); if repo != CURRENT_RELEASE_REPO { - anyhow::bail!( - "stale version info from {repo}; discarding cached update metadata" + warn!( + path = %version_file.display(), + release_repo = repo, + "stale version info from different repository" ); + return Ok(None); } - Ok(info) + + info + .release_repo + .get_or_insert_with(|| CURRENT_RELEASE_REPO.to_string()); + Ok(Some(info)) } -async fn check_for_update(version_file: &Path, originator: &str) -> anyhow::Result { +fn is_cache_fresh(info: &VersionInfo) -> bool { + let now = Utc::now(); + let ahead = info.last_checked_at - now; + if ahead > ChronoDuration::minutes(MAX_CLOCK_SKEW_MINUTES) { + return false; + } + + if ahead >= ChronoDuration::zero() { + return true; + } + + let age = now - info.last_checked_at; + age < ChronoDuration::hours(CACHE_TTL_HOURS) +} + +async fn write_version_info(version_file: &Path, info: &VersionInfo) -> anyhow::Result<()> { + let json_line = format!("{}\n", serde_json::to_string(info)?); + if let Some(parent) = version_file.parent() { + tokio::fs::create_dir_all(parent).await?; + } + let path = version_file.to_path_buf(); + task::spawn_blocking(move || -> anyhow::Result<()> { + let parent = path + .parent() + .map(Path::to_path_buf) + .unwrap_or_else(|| PathBuf::from(".")); + let mut tmp = tempfile::Builder::new() + .prefix("version.json.") + .tempfile_in(&parent)?; + tmp.write_all(json_line.as_bytes())?; + tmp.flush()?; + tmp.persist(&path).map_err(|err| err.error)?; + Ok(()) + }) + .await??; + Ok(()) +} + +async fn fetch_latest_version(originator: &str) -> anyhow::Result { + #[cfg(test)] + { + let override_fn = FETCH_OVERRIDE.lock().unwrap().clone(); + if let Some(fetch) = override_fn { + return fetch(originator).await; + } + } + let ReleaseInfo { tag_name: latest_tag_name, } = create_client(originator) @@ -507,20 +617,62 @@ async fn check_for_update(version_file: &Path, originator: &str) -> anyhow::Resu } }; - let info = VersionInfo { + Ok(VersionInfo { latest_version, last_checked_at: Utc::now(), release_repo: Some(CURRENT_RELEASE_REPO.to_string()), - }; + }) +} - let json_line = format!("{}\n", serde_json::to_string(&info)?); - if let Some(parent) = version_file.parent() { - tokio::fs::create_dir_all(parent).await?; +async fn check_for_update(version_file: &Path, originator: &str) -> anyhow::Result { + if let Some(info) = read_version_info(version_file)? { + if is_cache_fresh(&info) { + return Ok(info); + } + } + + let _guard = REFRESH_LOCK.lock().await; + + if let Some(info) = read_version_info(version_file)? { + if is_cache_fresh(&info) { + return Ok(info); + } } - tokio::fs::write(version_file, json_line).await?; + + let info = fetch_latest_version(originator).await?; + write_version_info(version_file, &info).await?; Ok(info) } +#[cfg(test)] +fn with_fetch_override(fetch: F) -> FetchOverrideGuard +where + F: Fn(&str) -> Fut + Send + Sync + 'static, + Fut: Future> + Send + 'static, +{ + let wrapped: FetchOverrideFn = Arc::new(move |originator: &str| Box::pin(fetch(originator))); + let lock = FETCH_OVERRIDE_TEST_LOCK.lock().unwrap(); + *FETCH_OVERRIDE.lock().unwrap() = Some(wrapped); + FetchOverrideGuard { + lock: Some(lock), + } +} + +#[cfg(test)] +struct FetchOverrideGuard { + lock: Option>, +} + +#[cfg(test)] +impl Drop for FetchOverrideGuard { + fn drop(&mut self) { + *FETCH_OVERRIDE.lock().unwrap() = None; + if let Some(guard) = self.lock.take() { + drop(guard); + } + } +} + fn is_newer(latest: &str, current: &str) -> Option { match (parse_version(latest), parse_version(current)) { (Some(l), Some(c)) => Some(l > c), @@ -540,22 +692,28 @@ fn parse_version(v: &str) -> Option<(u64, u64, u64)> { mod tests { use super::*; use chrono::TimeZone; + use std::fs; + use std::sync::Arc; use tempfile::tempdir; + use tokio::sync::Mutex as TokioMutex; + use tokio::time::{sleep, Duration as TokioDuration}; + + fn write_cache(path: &Path, info: &serde_json::Value) { + fs::write(path, format!("{}\n", info)).expect("write version cache"); + } #[test] - fn read_version_info_rejects_legacy_repo_cache() { + fn read_version_info_discard_legacy_repo_cache() { let dir = tempdir().unwrap(); let path = dir.path().join("version.json"); let legacy = serde_json::json!({ "latest_version": "0.50.0", "last_checked_at": Utc.timestamp_opt(1_696_000_000, 0).unwrap().to_rfc3339(), }); - std::fs::write(&path, format!("{}\n", legacy)).unwrap(); + write_cache(&path, &legacy); - let err = read_version_info(&path).expect_err("legacy cache should be rejected"); - assert!(err - .to_string() - .contains("stale version info")); + let result = read_version_info(&path).expect("load cache"); + assert!(result.is_none(), "legacy repo cache should be dropped"); } #[test] @@ -567,10 +725,182 @@ mod tests { "last_checked_at": Utc::now().to_rfc3339(), "release_repo": CURRENT_RELEASE_REPO, }); - std::fs::write(&path, format!("{}\n", info)).unwrap(); + write_cache(&path, &info); - let parsed = read_version_info(&path).expect("current repo cache should load"); + let parsed = read_version_info(&path) + .expect("load cache") + .expect("current repo cache should load"); assert_eq!(parsed.latest_version, "0.4.7"); assert_eq!(parsed.release_repo.as_deref(), Some(CURRENT_RELEASE_REPO)); } + + #[tokio::test] + async fn stale_cache_triggers_network_refresh() { + let dir = tempdir().unwrap(); + let version_file = dir.path().join("version.json"); + let stale = serde_json::json!({ + "latest_version": "0.4.7", + "last_checked_at": (Utc::now() - ChronoDuration::hours(CACHE_TTL_HOURS + 2)).to_rfc3339(), + "release_repo": CURRENT_RELEASE_REPO, + }); + write_cache(&version_file, &stale); + + let counter = Arc::new(TokioMutex::new(0usize)); + let counter_clone = counter.clone(); + let expected_version = "0.4.8".to_string(); + let expected_clone = expected_version.clone(); + let _guard = with_fetch_override(move |_originator| { + let counter = counter_clone.clone(); + let version = expected_clone.clone(); + async move { + let mut hits = counter.lock().await; + *hits += 1; + Ok(VersionInfo { + latest_version: version, + last_checked_at: Utc::now(), + release_repo: Some(CURRENT_RELEASE_REPO.to_string()), + }) + } + }); + + let info = check_for_update(&version_file, "test-originator").await.unwrap(); + assert_eq!(info.latest_version, expected_version); + assert!(is_cache_fresh(&info)); + let persisted = read_version_info(&version_file) + .unwrap() + .expect("updated cache present"); + assert_eq!(persisted.latest_version, expected_version); + assert_eq!(persisted.release_repo.as_deref(), Some(CURRENT_RELEASE_REPO)); + assert_eq!(*counter.lock().await, 1); + } + + #[tokio::test] + async fn fresh_cache_skips_network() { + let dir = tempdir().unwrap(); + let version_file = dir.path().join("version.json"); + let current = serde_json::json!({ + "latest_version": "0.4.7", + "last_checked_at": Utc::now().to_rfc3339(), + "release_repo": CURRENT_RELEASE_REPO, + }); + write_cache(&version_file, ¤t); + + let counter = Arc::new(TokioMutex::new(0usize)); + let counter_clone = counter.clone(); + let _guard = with_fetch_override(move |_originator| { + let counter = counter_clone.clone(); + async move { + let mut hits = counter.lock().await; + *hits += 1; + Ok(VersionInfo { + latest_version: "0.4.99".to_string(), + last_checked_at: Utc::now(), + release_repo: Some(CURRENT_RELEASE_REPO.to_string()), + }) + } + }); + + let info = check_for_update(&version_file, "test-originator").await.unwrap(); + assert_eq!(info.latest_version, "0.4.7"); + assert_eq!(*counter.lock().await, 0, "no network call expected"); + } + + #[tokio::test] + async fn concurrent_refreshes_share_single_fetch() { + let dir = tempdir().unwrap(); + let version_file = dir.path().join("version.json"); + + let counter = Arc::new(TokioMutex::new(0usize)); + let counter_clone = counter.clone(); + let _guard = with_fetch_override(move |_originator| { + let counter = counter_clone.clone(); + async move { + let mut hits = counter.lock().await; + *hits += 1; + drop(hits); + sleep(TokioDuration::from_millis(50)).await; + Ok(VersionInfo { + latest_version: "0.5.0".to_string(), + last_checked_at: Utc::now(), + release_repo: Some(CURRENT_RELEASE_REPO.to_string()), + }) + } + }); + + let tasks: Vec<_> = (0..5) + .map(|_| { + let path = version_file.clone(); + async move { check_for_update(&path, "test-originator").await.unwrap() } + }) + .collect(); + let results = futures::future::join_all(tasks).await; + assert!(results.iter().all(|info| info.latest_version == "0.5.0")); + assert_eq!(*counter.lock().await, 1, "only one fetch should run"); + } + + #[tokio::test] + async fn malformed_cache_is_replaced() { + let dir = tempdir().unwrap(); + let version_file = dir.path().join("version.json"); + fs::write(&version_file, "not json").unwrap(); + + let counter = Arc::new(TokioMutex::new(0usize)); + let counter_clone = counter.clone(); + let _guard = with_fetch_override(move |_originator| { + let counter = counter_clone.clone(); + async move { + let mut hits = counter.lock().await; + *hits += 1; + Ok(VersionInfo { + latest_version: "0.5.1".to_string(), + last_checked_at: Utc::now(), + release_repo: Some(CURRENT_RELEASE_REPO.to_string()), + }) + } + }); + + let info = check_for_update(&version_file, "test-originator").await.unwrap(); + assert_eq!(info.latest_version, "0.5.1"); + assert_eq!(*counter.lock().await, 1); + let persisted = read_version_info(&version_file) + .unwrap() + .expect("cache rewritten"); + assert_eq!(persisted.latest_version, "0.5.1"); + } + + #[tokio::test] + async fn write_fails_when_parent_is_file() { + let dir = tempdir().unwrap(); + let blocker = dir.path().join("cache"); + fs::write(&blocker, "not a directory").unwrap(); + let version_file = blocker.join("version.json"); + + let counter = Arc::new(TokioMutex::new(0usize)); + let counter_clone = counter.clone(); + let _guard = with_fetch_override(move |_originator| { + let counter = counter_clone.clone(); + async move { + let mut hits = counter.lock().await; + *hits += 1; + Ok(VersionInfo { + latest_version: "0.5.2".to_string(), + last_checked_at: Utc::now(), + release_repo: Some(CURRENT_RELEASE_REPO.to_string()), + }) + } + }); + + let err = check_for_update(&version_file, "test-originator") + .await + .expect_err("write should fail"); + let io_err = err + .downcast_ref::() + .or_else(|| err.root_cause().downcast_ref::()) + .expect("io error expected"); + assert!(matches!( + io_err.kind(), + ErrorKind::AlreadyExists | ErrorKind::PermissionDenied | ErrorKind::NotADirectory + )); + assert_eq!(*counter.lock().await, 0, "fetch should not run on path errors"); + } }