added lei to isin mapping

This commit is contained in:
2025-11-25 00:21:51 +01:00
parent bbc19f2110
commit e57a013224
10 changed files with 574 additions and 104 deletions

View File

@@ -6,122 +6,137 @@ use chrono::Local;
use std::collections::HashMap;
pub async fn run_full_update(client: &fantoccini::Client, config: &Config) -> anyhow::Result<()> {
println!("Starting company-centric corporate update (ISIN-based)");
println!("Starting LEI-based corporate update");
// 1. Download fresh GLEIF ISIN↔LEI mapping on every run
let lei_to_isins: HashMap<String, Vec<String>> = match load_isin_lei_csv() {
Ok(map) => map,
Err(e) => {
println!("Warning: Failed to load ISIN↔LEI mapping: {}", e);
HashMap::new()
}
};
//let _isin_to_lei = load_isin_to_lei()?; // optional, useful for migration scripts
let companies = load_companies().await?;
let today = chrono::Local::now().format("%Y-%m-%d").to_string();
let mut existing_events = load_existing_events().await?;
for company in companies {
println!("\nProcessing company: {} ({})", company.name, company.isin);
let companies = load_companies().await?; // Vec<CompanyMetadata> with lei, isins, tickers
ensure_company_dirs(&company.isin).await?;
save_company_metadata(&company).await?;
for mut company in companies {
println!("\nProcessing company: {} (LEI: {})", company.name, company.lei);
// === STEP 1: Discover all available exchanges ===
let mut all_tickers = company.tickers.clone();
// Try to discover additional exchanges using the primary ticker
if let Some(primary_ticker) = company.tickers.iter().find(|t| t.primary) {
println!(" 🔍 Discovering additional exchanges...");
match discover_available_exchanges(&company.isin, &primary_ticker.ticker).await {
Ok(discovered) => {
// Merge discovered tickers with existing ones
for disc in discovered {
if !all_tickers.iter().any(|t| t.ticker == disc.ticker) {
println!(" ✓ Found new exchange: {} ({})", disc.ticker, disc.exchange_mic);
all_tickers.push(disc);
}
}
// === Enrich with ALL ISINs known to GLEIF (includes ADRs, GDRs, etc.) ===
if let Some(all_isins) = lei_to_isins.get(&company.lei) {
let mut seen = company.isins.iter().cloned().collect::<std::collections::HashSet<_>>();
for isin in all_isins {
if !seen.contains(isin) {
company.isins.push(isin.clone());
seen.insert(isin.clone());
}
Err(e) => println!(" ⚠ Discovery failed: {}", e),
}
}
// Update metadata with newly discovered tickers
if all_tickers.len() > company.tickers.len() {
let updated_company = CompanyMetadata {
isin: company.isin.clone(),
name: company.name.clone(),
tickers: all_tickers.clone(),
};
save_company_metadata(&updated_company).await?;
println!(" 📝 Updated metadata with {} total tickers", all_tickers.len());
// Ensure company directory exists (now uses LEI)
ensure_company_dirs(&company.lei).await?;
save_company_metadata(&company).await?;
// === STEP 1: Discover additional exchanges using each known ISIN ===
let mut all_tickers = company.tickers.clone();
if let Some(primary_ticker) = company.tickers.iter().find(|t| t.primary) {
println!(" Discovering additional exchanges across {} ISIN(s)...", company.isins.len());
for isin in &company.isins {
println!(" → Checking ISIN: {}", isin);
match discover_available_exchanges(isin, &primary_ticker.ticker).await {
Ok(discovered) => {
if discovered.is_empty() {
println!(" No new exchanges found for {}", isin);
} else {
for disc in discovered {
if !all_tickers.iter().any(|t| t.ticker == disc.ticker && t.exchange_mic == disc.exchange_mic) {
println!(" Found new listing: {} ({}) [ISIN: {}]", disc.ticker, disc.exchange_mic, isin);
all_tickers.push(disc);
}
}
}
}
Err(e) => println!(" Discovery failed for {}: {}", isin, e),
}
tokio::time::sleep(tokio::time::Duration::from_millis(300)).await;
}
}
// === STEP 2: Fetch data from all available exchanges ===
// Save updated metadata if we found new listings
if all_tickers.len() > company.tickers.len() {
company.tickers = all_tickers.clone();
save_company_metadata(&company).await?;
println!(" Updated metadata: {} total tickers", all_tickers.len());
}
// === STEP 2: Fetch data from ALL available tickers ===
for ticker_info in &all_tickers {
let ticker = &ticker_info.ticker;
println!("Trying ticker: {} ({})", ticker, ticker_info.exchange_mic);
println!("Fetching: {} ({})", ticker, ticker_info.exchange_mic);
let mut daily_success = false;
let mut intraday_success = false;
// Earnings (only from primary ticker to avoid duplicates)
// Earnings: only fetch from primary ticker to avoid duplicates
if ticker_info.primary {
if let Ok(new_events) = fetch_earnings_history(client, ticker).await {
let result = process_batch(&new_events, &mut existing_events, &today);
save_changes(&result.changes).await?;
println!(" {} earnings events", new_events.len());
println!(" Earnings events: {}", new_events.len());
}
}
// Daily prices
match fetch_daily_price_history(ticker, &config.corporate_start_date, &today).await {
Ok(prices) => {
if !prices.is_empty() {
save_prices_by_source(&company.isin, ticker, "daily", prices.clone()).await?;
daily_success = true;
println!(" ✓ Saved {} daily bars ({} currency)",
prices.len(),
prices.first().map(|p| p.currency.as_str()).unwrap_or("?")
);
}
if let Ok(prices) = fetch_daily_price_history(ticker, &config.corporate_start_date, &today).await {
if !prices.is_empty() {
save_prices_by_source(&company.lei, ticker, "daily", prices).await?;
daily_success = true;
}
Err(e) => println!(" ✗ Daily fetch failed: {}", e),
}
// 5-minute prices (last 60 days)
// 5-minute intraday (last 60 days)
let sixty_days_ago = (chrono::Local::now() - chrono::Duration::days(60))
.format("%Y-%m-%d")
.to_string();
match fetch_price_history_5min(ticker, &sixty_days_ago, &today).await {
Ok(prices) => {
if !prices.is_empty() {
save_prices_by_source(&company.isin, ticker, "5min", prices.clone()).await?;
intraday_success = true;
println!(" ✓ Saved {} 5min bars", prices.len());
}
if let Ok(prices) = fetch_price_history_5min(ticker, &sixty_days_ago, &today).await {
if !prices.is_empty() {
save_prices_by_source(&company.lei, ticker, "5min", prices).await?;
intraday_success = true;
}
Err(e) => println!(" ✗ 5min fetch failed: {}", e),
}
// Record success in available_exchanges.json
if daily_success || intraday_success {
update_available_exchange(
&company.isin,
ticker,
&ticker_info.exchange_mic,
daily_success,
intraday_success,
).await?;
}
// Update available_exchanges.json (now under LEI folder)
update_available_exchange(
&company.lei,
ticker,
&ticker_info.exchange_mic,
daily_success,
intraday_success,
).await?;
tokio::time::sleep(tokio::time::Duration::from_millis(800)).await;
}
// === STEP 3: Aggregate prices from all sources ===
println!(" 📊 Aggregating multi-exchange data with FX conversion...");
match aggregate_best_price_data(&company.isin).await {
Ok(_) => println!(" Aggregation complete"),
Err(e) => println!(" ⚠ Aggregation warning: {}", e),
// === STEP 3: Aggregate all sources into unified USD prices ===
println!(" Aggregating multi-source price data (FX-adjusted)...");
if let Err(e) = aggregate_best_price_data(&company.lei).await {
println!(" Aggregation failed: {}", e);
} else {
println!(" Aggregation complete");
}
}
// Final save of optimized earnings events
save_optimized_events(existing_events).await?;
println!("\nCorporate update complete (ISIN-based)");
println!("\nCorporate update complete (LEI-based)");
Ok(())
}