added corporate quarterly announcments for the last 4 years

This commit is contained in:
2025-11-23 16:02:23 +01:00
parent cd3f47d91f
commit 462f7ca672
12 changed files with 1457 additions and 104 deletions

1008
Cargo.lock generated

File diff suppressed because it is too large Load Diff

View File

@@ -20,6 +20,7 @@ tokio = { version = "1.38", features = ["full"] }
reqwest = { version = "0.12", features = ["json", "gzip", "brotli", "deflate"] } reqwest = { version = "0.12", features = ["json", "gzip", "brotli", "deflate"] }
scraper = "0.19" # HTML parsing for Yahoo earnings pages scraper = "0.19" # HTML parsing for Yahoo earnings pages
fantoccini = { version = "0.20", features = ["rustls-tls"] } # Headless Chrome for finanzen.net fantoccini = { version = "0.20", features = ["rustls-tls"] } # Headless Chrome for finanzen.net
yfinance-rs = "0.7.2"
# Serialization # Serialization
serde = { version = "1.0", features = ["derive"] } serde = { version = "1.0", features = ["derive"] }

260
data/exchanges.json Normal file
View File

@@ -0,0 +1,260 @@
{
"exchanges": [
{
"mic": "XNYS",
"name": "New York Stock Exchange",
"country": "United States",
"city": "New York City",
"market_cap_trillion_usd": 30.92,
"timezone": "America/New_York",
"tz_offset": "-05:00",
"dst": "MarNov",
"open_local": "09:30",
"close_local": "16:00",
"lunch_break": false,
"open_utc": "14:30",
"close_utc": "21:00",
"currency": "USD"
},
{
"mic": "XNAS",
"name": "Nasdaq",
"country": "United States",
"city": "New York City",
"market_cap_trillion_usd": 31.96,
"timezone": "America/New_York",
"tz_offset": "-05:00",
"dst": "MarNov",
"open_local": "09:30",
"close_local": "16:00",
"lunch_break": false,
"open_utc": "14:30",
"close_utc": "21:00",
"currency": "USD"
},
{
"mic": "XSHG",
"name": "Shanghai Stock Exchange",
"country": "China",
"city": "Shanghai",
"market_cap_trillion_usd": 7.96,
"timezone": "Asia/Shanghai",
"tz_offset": "+08:00",
"dst": null,
"open_local": "09:30",
"close_local": "15:00",
"lunch_break": "11:3013:00",
"open_utc": "01:30",
"close_utc": "07:00",
"currency": "CNY"
},
{
"mic": "XJPX",
"name": "Japan Exchange Group (Tokyo Stock Exchange)",
"country": "Japan",
"city": "Tokyo",
"market_cap_trillion_usd": 7.06,
"timezone": "Asia/Tokyo",
"tz_offset": "+09:00",
"dst": null,
"open_local": "09:00",
"close_local": "15:00",
"lunch_break": "11:3012:30",
"open_utc": "00:00",
"close_utc": "06:00",
"currency": "JPY"
},
{
"mic": "XHKG",
"name": "Hong Kong Stock Exchange",
"country": "Hong Kong",
"city": "Hong Kong",
"market_cap_trillion_usd": 6.41,
"timezone": "Asia/Hong_Kong",
"tz_offset": "+08:00",
"dst": null,
"open_local": "09:30",
"close_local": "16:00",
"lunch_break": "12:0013:00",
"open_utc": "01:30",
"close_utc": "08:00",
"currency": "HKD"
},
{
"mic": "XAMS",
"name": "Euronext Amsterdam",
"country": "Netherlands",
"city": "Amsterdam",
"market_cap_trillion_usd": 5.61,
"timezone": "Europe/Amsterdam",
"tz_offset": "+01:00",
"dst": "MarOct",
"open_local": "09:00",
"close_local": "17:30",
"lunch_break": false,
"open_utc": "08:00",
"close_utc": "16:30",
"currency": "EUR"
},
{
"mic": "XBSE",
"name": "Bombay Stock Exchange",
"country": "India",
"city": "Mumbai",
"market_cap_trillion_usd": 5.25,
"timezone": "Asia/Kolkata",
"tz_offset": "+05:30",
"dst": null,
"open_local": "09:15",
"close_local": "15:30",
"lunch_break": false,
"open_utc": "03:45",
"close_utc": "10:00",
"currency": "INR"
},
{
"mic": "XNSE",
"name": "National Stock Exchange of India",
"country": "India",
"city": "Mumbai",
"market_cap_trillion_usd": 5.32,
"timezone": "Asia/Kolkata",
"tz_offset": "+05:30",
"dst": null,
"open_local": "09:15",
"close_local": "15:d30",
"lunch_break": false,
"open_utc": "03:45",
"close_utc": "10:00",
"currency": "INR"
},
{
"mic": "XSHE",
"name": "Shenzhen Stock Exchange",
"country": "China",
"city": "Shenzhen",
"market_cap_trillion_usd": 5.11,
"timezone": "Asia/Shanghai",
"tz_offset": "+08:00",
"dst": null,
"open_local": "09:30",
"close_local": "15:00",
"lunch_break": "11:3013:00",
"open_utc": "01:30",
"close_utc": "07:00",
"currency": "CNY"
},
{
"mic": "XTSE",
"name": "Toronto Stock Exchange",
"country": "Canada",
"city": "Toronto",
"market_cap_trillion_usd": 4.00,
"timezone": "America/Toronto",
"tz_offset": "-05:00",
"dst": "MarNov",
"open_local": "09:30",
"close_local": "16:00",
"lunch_break": false,
"open_utc": "14:30",
"close_utc": "21:00",
"currency": "CAD"
},
{
"mic": "XLON",
"name": "London Stock Exchange",
"country": "United Kingdom",
"city": "London",
"market_cap_trillion_usd": 3.14,
"timezone": "Europe/London",
"tz_offset": "+00:00",
"dst": "MarOct",
"open_local": "08:00",
"close_local": "16:30",
"lunch_break": false,
"open_utc": "08:00",
"close_utc": "16:30",
"currency": "GBP"
},
{
"mic": "XTAI",
"name": "Taiwan Stock Exchange",
"country": "Taiwan",
"city": "Taipei",
"market_cap_trillion_usd": 2.87,
"timezone": "Asia/Taipei",
"tz_offset": "+08:00",
"dst": null,
"open_local": "09:00",
"close_local": "13:30",
"lunch_break": false,
"open_utc": "01:00",
"close_utc": "05:30",
"currency": "TWD"
},
{
"mic": "XSAU",
"name": "Saudi Exchange (Tadawul)",
"country": "Saudi Arabia",
"city": "Riyadh",
"market_cap_trillion_usd": 2.73,
"timezone": "Asia/Riyadh",
"tz_offset": "+03:00",
"dst": null,
"open_local": "10:00",
"close_local": "15:00",
"lunch_break": false,
"open_utc": "07:00",
"close_utc": "12:00",
"currency": "SAR"
},
{
"mic": "XFRA",
"name": "Deutsche Börse (Xetra)",
"country": "Germany",
"city": "Frankfurt",
"market_cap_trillion_usd": 2.04,
"timezone": "Europe/Berlin",
"tz_offset": "+01:00",
"dst": "MarOct",
"open_local": "09:00",
"close_local": "17:30",
"lunch_break": false,
"open_utc": "08:00",
"close_utc": "16:30",
"currency": "EUR"
},
{
"mic": "XSWX",
"name": "SIX Swiss Exchange",
"country": "Switzerland",
"city": "Zürich",
"market_cap_trillion_usd": 1.97,
"timezone": "Europe/Zurich",
"tz_offset": "+01:00",
"dst": "MarOct",
"open_local": "09:00",
"close_local": "17:30",
"lunch_break": false,
"open_utc": "08:00",
"close_utc": "16:30",
"currency": "CHF"
},
{
"mic": "XASX",
"name": "Australian Securities Exchange",
"country": "Australia",
"city": "Sydney",
"market_cap_trillion_usd": 1.89,
"timezone": "Australia/Sydney",
"tz_offset": "+10:00",
"dst": "OctApr",
"open_local": "10:00",
"close_local": "16:00",
"lunch_break": false,
"open_utc": "00:00",
"close_utc": "06:00",
"currency": "AUD"
}
]
}

View File

@@ -15,7 +15,7 @@ impl Default for Config {
fn default() -> Self { fn default() -> Self {
Self { Self {
economic_start_date: "2007-02-13".to_string(), economic_start_date: "2007-02-13".to_string(),
corporate_start_date: "2010-01-01".to_string(), corporate_start_date: "2007-01-01".to_string(),
economic_lookahead_months: 3, economic_lookahead_months: 3,
} }
} }

52
src/corporate/helpers.rs Normal file
View File

@@ -0,0 +1,52 @@
// src/corporate/helpers.rs
use super::types::*;
use chrono::{Local, NaiveDate};
use std::collections::{HashMap, HashSet};
pub fn event_key(e: &CompanyEvent) -> String {
format!("{}|{}|{}", e.ticker, e.date, e.time)
}
pub fn detect_changes(old: &CompanyEvent, new: &CompanyEvent, today: &str) -> Vec<CompanyEventChange> {
let mut changes = Vec::new();
let ts = Local::now().format("%Y-%m-%d %H:%M:%S").to_string();
if new.date.as_str() <= today { return changes; }
if old.time != new.time {
changes.push(CompanyEventChange {
ticker: new.ticker.clone(),
date: new.date.clone(),
field_changed: "time".to_string(),
old_value: old.time.clone(),
new_value: new.time.clone(),
detected_at: ts.clone(),
});
}
if old.eps_forecast != new.eps_forecast {
changes.push(CompanyEventChange {
ticker: new.ticker.clone(),
date: new.date.clone(),
field_changed: "eps_forecast".to_string(),
old_value: format!("{:?}", old.eps_forecast),
new_value: format!("{:?}", new.eps_forecast),
detected_at: ts.clone(),
});
}
if old.eps_actual != new.eps_actual {
changes.push(CompanyEventChange {
ticker: new.ticker.clone(),
date: new.date.clone(),
field_changed: "eps_actual".to_string(),
old_value: format!("{:?}", old.eps_actual),
new_value: format!("{:?}", new.eps_actual),
detected_at: ts.clone(),
});
}
// Add similar for revenue if applicable
changes
}

View File

@@ -3,6 +3,7 @@ pub mod types;
pub mod scraper; pub mod scraper;
pub mod storage; pub mod storage;
pub mod update; pub mod update;
pub mod helpers;
pub use types::*; pub use types::*;
pub use update::run_full_update; pub use update::run_full_update;

View File

@@ -2,8 +2,10 @@
use super::types::{CompanyEvent, CompanyPrice}; use super::types::{CompanyEvent, CompanyPrice};
use fantoccini::{Client, Locator}; use fantoccini::{Client, Locator};
use scraper::{Html, Selector}; use scraper::{Html, Selector};
use chrono::{NaiveDate, Datelike}; use chrono::{NaiveDate};
use tokio::time::{sleep, Duration}; use tokio::time::{sleep, Duration};
use yfinance_rs::{YfClient, Ticker, Range, Interval};
use yfinance_rs::core::conversions::money_to_f64;
const USER_AGENT: &str = "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36"; const USER_AGENT: &str = "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36";
@@ -54,9 +56,9 @@ pub async fn fetch_earnings_history(client: &Client, ticker: &str) -> anyhow::Re
let cols: Vec<String> = row.select(&Selector::parse("td").unwrap()) let cols: Vec<String> = row.select(&Selector::parse("td").unwrap())
.map(|td| td.text().collect::<Vec<_>>().join(" ").trim().to_string()) .map(|td| td.text().collect::<Vec<_>>().join(" ").trim().to_string())
.collect(); .collect();
if cols.len() < 6 { continue; } if cols.len() < 4 { continue; }
let full_date = &cols[2]; let full_date = &cols[0];
let parts: Vec<&str> = full_date.split(" at ").collect(); let parts: Vec<&str> = full_date.split(" at ").collect();
let raw_date = parts[0].trim(); let raw_date = parts[0].trim();
let time_str = if parts.len() > 1 { parts[1].trim() } else { "" }; let time_str = if parts.len() > 1 { parts[1].trim() } else { "" };
@@ -66,8 +68,8 @@ pub async fn fetch_earnings_history(client: &Client, ticker: &str) -> anyhow::Re
Err(_) => continue, Err(_) => continue,
}; };
let eps_forecast = parse_float(&cols[3]); let eps_forecast = parse_float(&cols[1]);
let eps_actual = if cols[4] == "-" { None } else { parse_float(&cols[4]) }; let eps_actual = if cols[2] == "-" { None } else { parse_float(&cols[2]) };
let surprise_pct = if let (Some(f), Some(a)) = (eps_forecast, eps_actual) { let surprise_pct = if let (Some(f), Some(a)) = (eps_forecast, eps_actual) {
if f.abs() > 0.001 { Some((a - f) / f.abs() * 100.0) } else { None } if f.abs() > 0.001 { Some((a - f) / f.abs() * 100.0) } else { None }
@@ -85,7 +87,7 @@ pub async fn fetch_earnings_history(client: &Client, ticker: &str) -> anyhow::Re
ticker: ticker.to_string(), ticker: ticker.to_string(),
date: date.format("%Y-%m-%d").to_string(), date: date.format("%Y-%m-%d").to_string(),
time, time,
period: "".to_string(), // No period info available, set to empty period: "".to_string(),
eps_forecast, eps_forecast,
eps_actual, eps_actual,
revenue_forecast: None, revenue_forecast: None,
@@ -98,38 +100,46 @@ pub async fn fetch_earnings_history(client: &Client, ticker: &str) -> anyhow::Re
Ok(events) Ok(events)
} }
pub async fn fetch_price_history(client: &Client, ticker: &str, start: &str, end: &str) -> anyhow::Result<Vec<CompanyPrice>> { pub async fn fetch_price_history(
let start_ts = NaiveDate::parse_from_str(start, "%Y-%m-%d")? ticker: &str,
.and_hms_opt(0, 0, 0).unwrap().and_utc() start: &str,
.timestamp(); end: &str,
) -> anyhow::Result<Vec<CompanyPrice>> {
let client = YfClient::default();
let tk = Ticker::new(&client, ticker);
let end_ts = NaiveDate::parse_from_str(end, "%Y-%m-%d")? // We request the maximum range the library will automatically respect Yahoo's limits
.succ_opt().unwrap() let history = tk
.and_hms_opt(0, 0, 0).unwrap().and_utc() .history(Some(Range::Max), Some(Interval::D1), true)
.timestamp(); .await
.map_err(|e| anyhow::anyhow!("Yahoo Finance API error for {ticker}: {e:?}"))?;
let url = format!( let mut prices = Vec::with_capacity(history.len());
"https://query1.finance.yahoo.com/v7/finance/download/{ticker}?period1={start_ts}&period2={end_ts}&interval=1d&events=history&includeAdjustedClose=true"
);
client.goto(&url).await?; for candle in history {
let csv = client.source().await?; let date_str = candle.ts.format("%Y-%m-%d").to_string();
// Filter by user-defined start / end
if date_str < (*start).to_string() || date_str > (*end).to_string() {
continue;
}
let mut prices = Vec::new();
for line in csv.lines().skip(1) {
let cols: Vec<&str> = line.split(',').collect();
if cols.len() < 7 { continue; }
prices.push(CompanyPrice { prices.push(CompanyPrice {
ticker: ticker.to_string(), ticker: ticker.to_string(),
date: cols[0].to_string(), date: date_str,
open: cols[1].parse()?, open: money_to_f64(&candle.open),
high: cols[2].parse()?, high: money_to_f64(&candle.high),
low: cols[3].parse()?, low: money_to_f64(&candle.low),
close: cols[4].parse()?, // close_unadj is the raw (non-adjusted) close; close is the adjusted one
adj_close: cols[5].parse()?, close: money_to_f64(&candle.close_unadj.unwrap_or(candle.close.clone())),
volume: cols[6].parse()?, adj_close: money_to_f64(&candle.close),
volume: candle.volume.unwrap_or(0),
}); });
} }
// Sort just in case (normally already sorted)
prices.sort_by_key(|p| p.date.clone());
Ok(prices) Ok(prices)
} }

View File

@@ -1,11 +1,11 @@
// src/corporate/storage.rs // src/corporate/storage.rs
use super::types::{CompanyEvent, CompanyPrice}; use super::types::{CompanyEvent, CompanyPrice, CompanyEventChange};
use std::collections::{HashMap, HashSet}; use super::helpers::*;
use tokio::fs; use tokio::fs;
use chrono::{Local, NaiveDate}; use chrono::{Local, NaiveDate, Datelike};
use std::collections::HashMap;
/// Load all events from disk into a HashMap<ticker|date, event> pub async fn load_existing_events() -> anyhow::Result<HashMap<String, CompanyEvent>> {
async fn load_all_events_map() -> anyhow::Result<HashMap<String, CompanyEvent>> {
let mut map = HashMap::new(); let mut map = HashMap::new();
let dir = std::path::Path::new("corporate_events"); let dir = std::path::Path::new("corporate_events");
if !dir.exists() { if !dir.exists() {
@@ -16,11 +16,12 @@ async fn load_all_events_map() -> anyhow::Result<HashMap<String, CompanyEvent>>
while let Some(entry) = entries.next_entry().await? { while let Some(entry) = entries.next_entry().await? {
let path = entry.path(); let path = entry.path();
if path.extension().and_then(|s| s.to_str()) == Some("json") { if path.extension().and_then(|s| s.to_str()) == Some("json") {
let content = fs::read_to_string(&path).await?; let name = path.file_name().and_then(|n| n.to_str()).unwrap_or("");
if let Ok(events) = serde_json::from_str::<Vec<CompanyEvent>>(&content) { if name.starts_with("events_") && name.len() == 17 { // events_yyyy-mm.json
let content = fs::read_to_string(&path).await?;
let events: Vec<CompanyEvent> = serde_json::from_str(&content)?;
for event in events { for event in events {
let key = format!("{}|{}", event.ticker, event.date); map.insert(event_key(&event), event);
map.insert(key, event);
} }
} }
} }
@@ -28,34 +29,68 @@ async fn load_all_events_map() -> anyhow::Result<HashMap<String, CompanyEvent>>
Ok(map) Ok(map)
} }
/// Merge new events with existing ones and save back to disk pub async fn save_optimized_events(events: HashMap<String, CompanyEvent>) -> anyhow::Result<()> {
pub async fn merge_and_save_events(ticker: &str, new_events: Vec<CompanyEvent>) -> anyhow::Result<()> {
let mut existing = load_all_events_map().await?;
// Insert or update
for event in new_events {
let key = format!("{}|{}", event.ticker, event.date);
existing.insert(key, event);
}
// Convert back to Vec and save (simple single file for now)
let all_events: Vec<CompanyEvent> = existing.into_values().collect();
let dir = std::path::Path::new("corporate_events"); let dir = std::path::Path::new("corporate_events");
fs::create_dir_all(dir).await?; fs::create_dir_all(dir).await?;
let path = dir.join("all_events.json");
let json = serde_json::to_string_pretty(&all_events)?; // Delete old files
fs::write(&path, json).await?; let mut entries = fs::read_dir(dir).await?;
while let Some(entry) = entries.next_entry().await? {
let path = entry.path();
let name = path.file_name().and_then(|n| n.to_str()).unwrap_or("");
if name.starts_with("events_") && path.extension().map(|e| e == "json").unwrap_or(false) {
fs::remove_file(&path).await?;
}
}
let mut sorted: Vec<_> = events.into_values().collect();
sorted.sort_by_key(|e| (e.ticker.clone(), e.date.clone()));
let mut by_month: HashMap<String, Vec<CompanyEvent>> = HashMap::new();
for e in sorted {
if let Ok(d) = NaiveDate::parse_from_str(&e.date, "%Y-%m-%d") {
let key = format!("{}-{:02}", d.year(), d.month());
by_month.entry(key).or_default().push(e);
}
}
for (month, list) in by_month {
let path = dir.join(format!("events_{}.json", month));
fs::write(&path, serde_json::to_string_pretty(&list)?).await?;
}
Ok(()) Ok(())
} }
/// Save price history for a single ticker (overwrite old file) pub async fn save_changes(changes: &[CompanyEventChange]) -> anyhow::Result<()> {
pub async fn save_prices_for_ticker(ticker: &str, prices: Vec<CompanyPrice>) -> anyhow::Result<()> { if changes.is_empty() { return Ok(()); }
let dir = std::path::Path::new("corporate_event_changes");
fs::create_dir_all(dir).await?;
let mut by_month: HashMap<String, Vec<CompanyEventChange>> = HashMap::new();
for c in changes {
if let Ok(d) = NaiveDate::parse_from_str(&c.date, "%Y-%m-%d") {
let key = format!("{}-{:02}", d.year(), d.month());
by_month.entry(key).or_default().push(c.clone());
}
}
for (month, list) in by_month {
let path = dir.join(format!("changes_{}.json", month));
let mut all = if path.exists() {
let s = fs::read_to_string(&path).await?;
serde_json::from_str(&s).unwrap_or_default()
} else { vec![] };
all.extend(list);
fs::write(&path, serde_json::to_string_pretty(&all)?).await?;
}
Ok(())
}
pub async fn save_prices_for_ticker(ticker: &str, mut prices: Vec<CompanyPrice>) -> anyhow::Result<()> {
let dir = std::path::Path::new("corporate_prices"); let dir = std::path::Path::new("corporate_prices");
fs::create_dir_all(dir).await?; fs::create_dir_all(dir).await?;
let path = dir.join(format!("{}.json", ticker)); let path = dir.join(format!("{}.json", ticker));
// Optional: sort by date
let mut prices = prices;
prices.sort_by_key(|p| p.date.clone()); prices.sort_by_key(|p| p.date.clone());
let json = serde_json::to_string_pretty(&prices)?; let json = serde_json::to_string_pretty(&prices)?;

View File

@@ -31,7 +31,7 @@ pub struct CompanyPrice {
pub struct CompanyEventChange { pub struct CompanyEventChange {
pub ticker: String, pub ticker: String,
pub date: String, pub date: String,
pub field: String, // "time", "eps_forecast", "eps_actual", "new_event" pub field_changed: String, // "time", "eps_forecast", "eps_actual", "new_event"
pub old_value: String, pub old_value: String,
pub new_value: String, pub new_value: String,
pub detected_at: String, pub detected_at: String,

View File

@@ -1,31 +1,83 @@
// src/corporate/update.rs // src/corporate/update.rs
use super::{scraper::*, storage::*, types::*}; use super::{scraper::*, storage::*, helpers::*, types::*};
use crate::config::Config; use crate::config::Config;
use chrono::Local; use chrono::Local;
use std::collections::{HashMap, HashSet}; use std::collections::HashMap;
pub async fn run_full_update(client: &fantoccini::Client, tickers: Vec<String>, config: &Config) -> anyhow::Result<()> { pub async fn run_full_update(client: &fantoccini::Client, tickers: Vec<String>, config: &Config) -> anyhow::Result<()> {
println!("Updating {} tickers (prices from {})", tickers.len(), config.corporate_start_date); println!("Updating {} tickers (prices from {})", tickers.len(), config.corporate_start_date);
let today = chrono::Local::now().format("%Y-%m-%d").to_string(); let today = chrono::Local::now().format("%Y-%m-%d").to_string();
for ticker in tickers { let mut existing = load_existing_events().await?;
for ticker in &tickers {
print!("{:6} ", ticker); print!("{:6} ", ticker);
// Earnings if let Ok(new_events) = fetch_earnings_history(client, ticker).await {
if let Ok(events) = fetch_earnings_history(client, &ticker).await { let result = process_batch(&new_events, &mut existing, &today);
merge_and_save_events(&ticker, events.clone()).await?; save_changes(&result.changes).await?;
println!("{} earnings", events.len()); println!("{} earnings, {} changes", new_events.len(), result.changes.len());
} }
// Prices now using config.corporate_start_date if let Ok(prices) = fetch_price_history(ticker, &config.corporate_start_date, &today).await {
if let Ok(prices) = fetch_price_history(client, &ticker, &config.corporate_start_date, &today).await { save_prices_for_ticker(ticker, prices).await?;
save_prices_for_ticker(&ticker, prices).await?;
} }
tokio::time::sleep(tokio::time::Duration::from_millis(250)).await; tokio::time::sleep(tokio::time::Duration::from_millis(250)).await;
} }
save_optimized_events(existing).await?;
Ok(()) Ok(())
}
pub struct ProcessResult {
pub changes: Vec<CompanyEventChange>,
}
pub fn process_batch(
new_events: &[CompanyEvent],
existing: &mut HashMap<String, CompanyEvent>,
today: &str,
) -> ProcessResult {
let mut changes = Vec::new();
for new in new_events {
let key = event_key(new);
if let Some(old) = existing.get(&key) {
changes.extend(detect_changes(old, new, today));
existing.insert(key, new.clone());
continue;
}
// Check for time change on same date
let date_key = format!("{}|{}", new.ticker, new.date);
let mut found_old = None;
for (k, e) in existing.iter() {
if format!("{}|{}", e.ticker, e.date) == date_key && k != &key {
found_old = Some((k.clone(), e.clone()));
break;
}
}
if let Some((old_key, old_event)) = found_old {
if new.date.as_str() > today {
changes.push(CompanyEventChange {
ticker: new.ticker.clone(),
date: new.date.clone(),
field_changed: "time".to_string(),
old_value: old_event.time.clone(),
new_value: new.time.clone(),
detected_at: Local::now().format("%Y-%m-%d %H:%M:%S").to_string(),
});
}
existing.remove(&old_key);
}
existing.insert(key, new.clone());
}
ProcessResult { changes }
} }