added function aggregating multiple ticker data

This commit is contained in:
2025-11-24 17:19:36 +01:00
parent 7b680f960f
commit 9cfcae84ea
14 changed files with 443 additions and 44 deletions

View File

@@ -0,0 +1,127 @@
// src/corporate/aggregation.rs
use super::types::CompanyPrice;
use super::storage::*;
use tokio::fs;
use std::collections::HashMap;
#[derive(Debug)]
struct DayData {
sources: Vec<CompanyPrice>,
total_volume: u64,
vwap: f64,
open: f64,
high: f64,
low: f64,
close: f64,
}
pub async fn aggregate_best_price_data(isin: &str) -> anyhow::Result<()> {
let company_dir = get_company_dir(isin);
for timeframe in ["daily", "5min"].iter() {
let source_dir = company_dir.join(timeframe);
if !source_dir.exists() {
continue;
}
let mut all_prices: Vec<CompanyPrice> = Vec::new();
let mut by_date_time: HashMap<String, DayData> = HashMap::new();
// Load all sources
let mut entries = tokio::fs::read_dir(&source_dir).await?;
while let Some(entry) = entries.next_entry().await? {
let source_dir = entry.path();
if !source_dir.is_dir() { continue; }
let prices_path = source_dir.join("prices.json");
if !prices_path.exists() { continue; }
let content = tokio::fs::read_to_string(&prices_path).await?;
let mut prices: Vec<CompanyPrice> = serde_json::from_str(&content)?;
all_prices.append(&mut prices);
}
if all_prices.is_empty() {
continue;
}
// Group by date + time (for 5min) or just date
for p in all_prices {
let key = if timeframe == &"5min" && !p.time.is_empty() {
format!("{}_{}", p.date, p.time)
} else {
p.date.clone()
};
let entry = by_date_time.entry(key.clone()).or_insert(DayData {
sources: vec![],
total_volume: 0,
vwap: 0.0,
open: p.open,
high: p.high,
low: p.low,
close: p.close,
});
let volume = p.volume.max(1); // avoid div0
let vwap_contrib = p.close * volume as f64;
entry.sources.push(p.clone());
entry.total_volume += volume;
entry.vwap += vwap_contrib;
// Use first open, last close, max high, min low
if entry.sources.len() == 1 {
entry.open = p.open;
}
entry.close = p.close;
entry.high = entry.high.max(p.high);
entry.low = entry.low.min(p.low);
}
// Convert to USD + finalize
let mut aggregated: Vec<CompanyPrice> = Vec::new();
let by_date_time_len: usize = by_date_time.len();
for (key, data) in by_date_time {
let vwap = data.vwap / data.total_volume as f64;
// Pick currency from highest volume source
let best_source = data.sources.iter()
.max_by_key(|p| p.volume)
.unwrap();
let usd_rate = super::fx::get_usd_rate(&best_source.currency).await.unwrap_or(1.0);
let (date, time) = if key.contains('_') {
let parts: Vec<&str> = key.split('_').collect();
(parts[0].to_string(), parts[1].to_string())
} else {
(key, "".to_string())
};
aggregated.push(CompanyPrice {
ticker: isin.to_string(), // use ISIN as unified ticker
date,
time,
open: data.open * usd_rate,
high: data.high * usd_rate,
low: data.low * usd_rate,
close: data.close * usd_rate,
adj_close: vwap * usd_rate,
volume: data.total_volume,
currency: "USD".to_string(), // aggregated to USD
});
}
aggregated.sort_by_key(|p| (p.date.clone(), p.time.clone()));
// Save aggregated result
let agg_dir = company_dir.join("aggregated").join(timeframe);
fs::create_dir_all(&agg_dir).await?;
let path = agg_dir.join("prices.json");
fs::write(&path, serde_json::to_string_pretty(&aggregated)?).await?;
println!(" Aggregated {} {} bars → {} sources", aggregated.len(), timeframe, by_date_time_len);
}
Ok(())
}