new bot 2

This commit is contained in:
Dobromir Popov
2025-03-10 10:29:40 +02:00
parent c8b0f77d32
commit 9b6d3f94ed
8 changed files with 1307 additions and 7 deletions

2
crypto/gogo/_prompts.md Normal file
View File

@ -0,0 +1,2 @@
let's extend that to have 32 more values - it will be added later but we need our model architecture to support it.
we'd also want to have 5 different timeframes at once: 1s(ticks - probably only price and emas), 1m,15m, 1h and 1d. each module will accept all the data, but will produce prediction only for it's own timeframe

View File

@ -9,6 +9,10 @@ import ccxt.async_support as ccxt
from dotenv import load_dotenv
import platform
# Set Windows event loop policy at module level
if platform.system() == 'Windows':
asyncio.set_event_loop_policy(asyncio.WindowsSelectorEventLoopPolicy())
class LiveDataManager:
def __init__(self, symbol, exchange_name='mexc', window_size=120):
load_dotenv() # Load environment variables
@ -45,12 +49,16 @@ class LiveDataManager:
retries = 3
for attempt in range(retries):
try:
candles = await self.exchange.fetch_ohlcv(self.symbol, '1m', since=since, limit=self.window_size)
candles = await self.exchange.fetch_ohlcv(
self.symbol, '1m', since=since, limit=self.window_size
)
for candle in candles:
self.candles.append(self._format_candle(candle))
if candles:
self.last_candle_time = candles[-1][0]
print(f"Fetched {len(candles)} initial candles.")
print(f"""Fetched {len(candles)} initial candles for period {since} to {now}.
Price range: {min(candle[1] for candle in candles)} to {max(candle[2] for candle in candles)}.
Current price: {candles[-1][4]}. Total volume: {sum(candle[5] for candle in candles)}""")
return # Exit the function if successful
except Exception as e:
print(f"Attempt {attempt + 1} failed: {e}")

View File

@ -133,6 +133,7 @@ class Transformer(nn.Module):
def __init__(self, input_dim, d_model, num_heads, num_layers, d_ff, dropout=0.1):
super(Transformer, self).__init__()
self.input_dim = input_dim
self.candle_embedding = nn.Linear(input_dim, d_model)
self.tick_embedding = nn.Linear(2, d_model) # Each tick has price and quantity
@ -152,10 +153,11 @@ class Transformer(nn.Module):
self.future_ticks_decoder = Decoder(num_layers, d_model, num_heads, d_ff, dropout)
self.future_ticks_projection = nn.Linear(d_model, 60) # 30 ticks * (price, quantity) = 60
def forward(self, candle_data, tick_data, future_candle_mask, future_ticks_mask):
# candle_data: [batch_size, seq_len, input_dim]
# tick_data: [batch_size, tick_seq_len, 2]
def forward(self, candle_data, tick_data, future_candle_mask=None, future_ticks_mask=None):
# Print shapes for debugging
# print(f"Candle data shape: {candle_data.shape}, Expected input dim: {self.input_dim}")
# Embed candle data
candle_embedded = self.candle_embedding(candle_data)
candle_embedded = self.positional_encoding(candle_embedded) # Add positional info
@ -189,7 +191,7 @@ class Transformer(nn.Module):
# Example instantiation (adjust parameters for ~1B parameters)
if __name__ == '__main__':
input_dim = 6 + len([5, 10, 20, 60, 120, 200]) # OHLCV + EMAs
input_dim = 11 # Changed from 12 to 11 to match your data
d_model = 512 # Hidden dimension
num_heads = 8
num_layers = 6 # Number of encoder/decoder layers
@ -220,3 +222,22 @@ if __name__ == '__main__':
print("Future Candle Prediction Shape:", future_candle_pred.shape) # Expected: [batch_size, 1, 5]
print("Future Volume Prediction Shape:", future_volume_pred.shape) # Expected: [batch_size, 1, 1]
print("Future Ticks Prediction Shape:", future_ticks_pred.shape) # Expected: [batch_size, 30, 2]
# Make sure to use this when instantiating the model
def create_model(input_dim=11):
d_model = 512 # Hidden dimension
num_heads = 8
num_layers = 6 # Number of encoder/decoder layers
d_ff = 2048 # Feedforward dimension
dropout = 0.1
model = Transformer(
input_dim=input_dim,
d_model=d_model,
num_heads=num_heads,
num_layers=num_layers,
d_ff=d_ff,
dropout=dropout
)
return model