cache, pivots wip

This commit is contained in:
Dobromir Popov
2025-10-20 15:21:44 +03:00
parent ba8813f04f
commit e993bc2831
9 changed files with 1630 additions and 99 deletions

View File

@@ -1894,20 +1894,24 @@ class DataProvider:
# Extract pivot points from all Williams levels
for level_key, level_data in pivot_levels.items():
if level_data and hasattr(level_data, 'swing_points') and level_data.swing_points:
# Get prices from swing points
level_prices = [sp.price for sp in level_data.swing_points]
# Update overall price bounds
price_max = max(price_max, max(level_prices))
price_min = min(price_min, min(level_prices))
# Extract support and resistance levels
if hasattr(level_data, 'support_levels') and level_data.support_levels:
support_levels.extend(level_data.support_levels)
if hasattr(level_data, 'resistance_levels') and level_data.resistance_levels:
resistance_levels.extend(level_data.resistance_levels)
if level_data and hasattr(level_data, 'pivot_points') and level_data.pivot_points:
# Separate pivot points into support and resistance based on type
for pivot in level_data.pivot_points:
price = getattr(pivot, 'price', None)
pivot_type = getattr(pivot, 'pivot_type', 'low')
if price is None:
continue
# Update price bounds
price_max = max(price_max, price)
price_min = min(price_min, price)
# Add to appropriate level list based on pivot type
if pivot_type.lower() == 'high':
resistance_levels.append(price)
else: # 'low'
support_levels.append(price)
# Remove duplicates and sort
support_levels = sorted(list(set(support_levels)))
@@ -1949,6 +1953,8 @@ class DataProvider:
volume_min=float(volume_min),
pivot_support_levels=support_levels,
pivot_resistance_levels=resistance_levels,
pivot_context=pivot_context,
created_timestamp=datetime.now(),
data_period_start=monthly_data['timestamp'].min(),
data_period_end=monthly_data['timestamp'].max(),
total_candles_analyzed=len(monthly_data)
@@ -2242,6 +2248,44 @@ class DataProvider:
"""Get pivot bounds for a symbol"""
return self.pivot_bounds.get(symbol)
def get_williams_pivot_levels(self, symbol: str) -> Dict[int, Any]:
"""Get Williams Market Structure pivot levels with full trend analysis
Returns:
Dictionary mapping level (1-5) to TrendLevel objects containing:
- pivot_points: List of PivotPoint objects with timestamps and prices
- trend_direction: 'up', 'down', or 'sideways'
- trend_strength: 0.0 to 1.0
"""
try:
if symbol not in self.williams_structure:
logger.warning(f"Williams structure not initialized for {symbol}")
return {}
# Calculate fresh pivot points from current cached data
df_1m = self.get_historical_data(symbol, '1m', limit=2000)
if df_1m is None or len(df_1m) < 100:
logger.warning(f"Insufficient 1m data for Williams pivot calculation: {symbol}")
return {}
# Convert DataFrame to numpy array
ohlcv_array = df_1m[['open', 'high', 'low', 'close', 'volume']].copy()
# Add timestamp as first column (convert to seconds)
timestamps = df_1m.index.astype(np.int64) // 10**9 # Convert to seconds
ohlcv_array.insert(0, 'timestamp', timestamps)
ohlcv_array = ohlcv_array.to_numpy()
# Calculate recursive pivot points
williams = self.williams_structure[symbol]
pivot_levels = williams.calculate_recursive_pivot_points(ohlcv_array)
logger.debug(f"Retrieved Williams pivot levels for {symbol}: {len(pivot_levels)} levels")
return pivot_levels
except Exception as e:
logger.error(f"Error getting Williams pivot levels for {symbol}: {e}")
return {}
def get_pivot_normalized_features(self, symbol: str, df: pd.DataFrame) -> Optional[pd.DataFrame]:
"""Get dataframe with pivot-normalized features"""
try: