From a681e21fda6b4a4cb01fdf7e76735bbf851a7515 Mon Sep 17 00:00:00 2001 From: "copilot-swe-agent[bot]" <198982749+Copilot@users.noreply.github.com> Date: Mon, 9 Feb 2026 05:56:10 +0000 Subject: [PATCH 1/2] Initial plan From 6fe38d7e219734c53f6b8bb431fa711ec8a3d6d1 Mon Sep 17 00:00:00 2001 From: "copilot-swe-agent[bot]" <198982749+Copilot@users.noreply.github.com> Date: Mon, 9 Feb 2026 06:05:01 +0000 Subject: [PATCH 2/2] Fix pipeline errors: rename tvl_ratio->tvl_risk, remove deprecated datetime, eliminate redundant API call Co-authored-by: studiofarzulla <62593503+studiofarzulla@users.noreply.github.com> --- src/asri/backtest/backtest.py | 4 ++-- src/asri/pipeline/calculate.py | 4 ++-- src/asri/pipeline/orchestrator.py | 30 +++++++++++++++--------------- src/asri/pipeline/transform.py | 8 ++++---- src/asri/signals/calculator.py | 4 ++-- 5 files changed, 25 insertions(+), 25 deletions(-) diff --git a/src/asri/backtest/backtest.py b/src/asri/backtest/backtest.py index 6719628..69f515f 100644 --- a/src/asri/backtest/backtest.py +++ b/src/asri/backtest/backtest.py @@ -116,7 +116,7 @@ class ASRIBacktester: # Sub-index calculation weights (from transform layer) STABLECOIN_WEIGHTS = { - 'tvl_ratio': 0.4, + 'tvl_risk': 0.4, 'treasury_stress': 0.3, 'concentration_hhi': 0.2, 'peg_volatility': 0.1, @@ -204,7 +204,7 @@ def _snapshot_to_inputs( concentration_risk = normalize_hhi_to_risk(hhi) stablecoin_inputs = StablecoinRiskInputs( - tvl_ratio=tvl_risk, + tvl_risk=tvl_risk, treasury_stress=treasury_stress, concentration_hhi=concentration_risk, peg_volatility=10.0, # Default - can't easily get historical peg data diff --git a/src/asri/pipeline/calculate.py b/src/asri/pipeline/calculate.py index 93a01bf..d110d02 100644 --- a/src/asri/pipeline/calculate.py +++ b/src/asri/pipeline/calculate.py @@ -1,7 +1,7 @@ """ASRI calculation pipeline.""" import asyncio -from datetime import datetime +from datetime import datetime, timezone import structlog @@ -33,7 +33,7 @@ async def calculate_and_store_asri( ASRIDaily database record """ if date is None: - date = datetime.utcnow() + date = datetime.now(timezone.utc) logger.info( "Calculating ASRI", diff --git a/src/asri/pipeline/orchestrator.py b/src/asri/pipeline/orchestrator.py index 7627667..b402608 100644 --- a/src/asri/pipeline/orchestrator.py +++ b/src/asri/pipeline/orchestrator.py @@ -10,7 +10,7 @@ import asyncio import os -from datetime import datetime, timedelta +from datetime import datetime, timedelta, timezone import structlog from dotenv import load_dotenv @@ -38,7 +38,7 @@ class ASRIOrchestrator: # Weights for converting transformed inputs to sub-index scores STABLECOIN_WEIGHTS = { - 'tvl_ratio': 0.4, + 'tvl_risk': 0.4, 'treasury_stress': 0.3, 'concentration_hhi': 0.2, 'peg_volatility': 0.1, @@ -93,11 +93,10 @@ async def fetch_all_data(self) -> dict: # Fetch all data concurrently results = await asyncio.gather( - self.defillama.get_total_tvl(), + self.defillama.get_tvl_history(), # Gets both current and historical TVL self.defillama.get_stablecoins(), self.defillama.get_protocols(), self.defillama.get_bridges(), - self.defillama.get_tvl_history(), self.fred.fetch_series('DGS10', start_date='2024-01-01'), self.fred.fetch_series('VIXCLS', start_date='2024-01-01'), self.fred.fetch_series('T10Y2Y', start_date='2024-01-01'), @@ -109,11 +108,10 @@ async def fetch_all_data(self) -> dict: # Unpack results ( - total_tvl, + tvl_history, stablecoins, protocols, bridges, - tvl_history, dgs10_data, vix_data, spread_data, @@ -172,17 +170,19 @@ def get_fred_price_series(data): crypto_equity_corr = 0.5 # Default if data unavailable logger.warning("Using default correlation (data unavailable)") - # Calculate max historical TVL + # Calculate max historical TVL and extract current TVL if not isinstance(tvl_history, Exception) and tvl_history: max_tvl = max(p.tvl for p in tvl_history) + total_tvl = tvl_history[-1].tvl if tvl_history else 100e9 # Latest TVL historical_tvls = [p.tvl for p in tvl_history[-30:]] # Last 30 days else: - max_tvl = total_tvl if not isinstance(total_tvl, Exception) else 100e9 + max_tvl = 100e9 + total_tvl = 100e9 historical_tvls = None logger.info( "Data fetch complete", - total_tvl=total_tvl if not isinstance(total_tvl, Exception) else "error", + total_tvl=total_tvl, num_stables=len(stablecoins) if not isinstance(stablecoins, Exception) else "error", num_protocols=len(protocols) if not isinstance(protocols, Exception) else "error", treasury_10y=treasury_10y, @@ -197,7 +197,7 @@ def get_fred_price_series(data): reg_sentiment = news_sentiment return { - 'total_tvl': total_tvl if not isinstance(total_tvl, Exception) else 100e9, + 'total_tvl': total_tvl, 'max_tvl': max_tvl, 'stablecoins': stablecoins if not isinstance(stablecoins, Exception) else [], 'protocols': protocols if not isinstance(protocols, Exception) else [], @@ -296,7 +296,7 @@ async def calculate_asri(self) -> dict: ) return { - 'timestamp': datetime.utcnow(), + 'timestamp': datetime.now(timezone.utc), 'asri': result.asri, 'asri_normalized': result.asri_normalized, 'alert_level': result.alert_level, @@ -344,7 +344,7 @@ async def save_to_db(self, result: dict, session: AsyncSession | None = None) -> async with async_session() as db: # Get last 30 days of data for average - thirty_days_ago = datetime.utcnow() - timedelta(days=30) + thirty_days_ago = datetime.now(timezone.utc) - timedelta(days=30) stmt = select(ASRIDaily).where(ASRIDaily.date >= thirty_days_ago).order_by(ASRIDaily.date) history = await db.execute(stmt) history_records = history.scalars().all() @@ -363,7 +363,7 @@ async def save_to_db(self, result: dict, session: AsyncSession | None = None) -> trend = "decreasing" # Check if we already have a record for today - today = datetime.utcnow().replace(hour=0, minute=0, second=0, microsecond=0) + today = datetime.now(timezone.utc).replace(hour=0, minute=0, second=0, microsecond=0) stmt = select(ASRIDaily).where(ASRIDaily.date == today) existing = await db.execute(stmt) record = existing.scalar_one_or_none() @@ -379,7 +379,7 @@ async def save_to_db(self, result: dict, session: AsyncSession | None = None) -> record.defi_liquidity_risk = result['sub_indices']['defi_liquidity_risk'] record.contagion_risk = result['sub_indices']['contagion_risk'] record.arbitrage_opacity = result['sub_indices']['arbitrage_opacity'] - record.updated_at = datetime.utcnow() + record.updated_at = datetime.now(timezone.utc) logger.info("Updated existing ASRI record", date=today) else: # Create new record @@ -394,7 +394,7 @@ async def save_to_db(self, result: dict, session: AsyncSession | None = None) -> defi_liquidity_risk=result['sub_indices']['defi_liquidity_risk'], contagion_risk=result['sub_indices']['contagion_risk'], arbitrage_opacity=result['sub_indices']['arbitrage_opacity'], - created_at=datetime.utcnow(), + created_at=datetime.now(timezone.utc), ) db.add(record) logger.info("Created new ASRI record", date=today) diff --git a/src/asri/pipeline/transform.py b/src/asri/pipeline/transform.py index 7c7ad99..9a16a96 100644 --- a/src/asri/pipeline/transform.py +++ b/src/asri/pipeline/transform.py @@ -6,7 +6,7 @@ """ from dataclasses import dataclass -from datetime import datetime +from datetime import datetime, timezone from typing import Any import numpy as np @@ -24,7 +24,7 @@ @dataclass class StablecoinRiskInputs: """Inputs for Stablecoin Concentration & Treasury Exposure sub-index.""" - tvl_ratio: float # current_tvl / max_historical_tvl + tvl_risk: float # TVL drawdown risk score (0-100) treasury_stress: float # normalized treasury rate stress concentration_hhi: float # Herfindahl-Hirschman Index (0-10000 normalized) peg_volatility: float # weighted average peg deviation @@ -200,7 +200,7 @@ def transform_stablecoin_risk( ) return StablecoinRiskInputs( - tvl_ratio=tvl_risk, + tvl_risk=tvl_risk, treasury_stress=treasury_stress, concentration_hhi=concentration_risk, peg_volatility=peg_volatility, @@ -472,7 +472,7 @@ def transform_all_data( ) return TransformedData( - timestamp=datetime.utcnow(), + timestamp=datetime.now(timezone.utc), stablecoin_risk=stablecoin_inputs, defi_liquidity_risk=defi_inputs, contagion_risk=contagion_inputs, diff --git a/src/asri/signals/calculator.py b/src/asri/signals/calculator.py index a5d29b6..2ec6c1d 100644 --- a/src/asri/signals/calculator.py +++ b/src/asri/signals/calculator.py @@ -5,7 +5,7 @@ """ from dataclasses import dataclass -from datetime import datetime +from datetime import datetime, timezone import numpy as np @@ -222,7 +222,7 @@ def compute_asri( alert_level = determine_alert_level(asri_normalized) return ASRIResult( - timestamp=datetime.utcnow(), + timestamp=datetime.now(timezone.utc), asri=asri, asri_normalized=asri_normalized, sub_indices=sub_indices,