reduce cob model to 400m
This commit is contained in:
10
config.yaml
10
config.yaml
@ -199,13 +199,13 @@ memory:
|
||||
|
||||
# Real-time RL COB Trader Configuration
|
||||
realtime_rl:
|
||||
# Model parameters for 1B parameter network
|
||||
# Model parameters for 400M parameter network (faster startup)
|
||||
model:
|
||||
input_size: 2000 # COB feature dimensions
|
||||
hidden_size: 4096 # Massive hidden layer size
|
||||
num_layers: 12 # Deep transformer layers
|
||||
learning_rate: 0.00001 # Very low for stability
|
||||
weight_decay: 0.000001 # L2 regularization
|
||||
hidden_size: 2048 # Optimized hidden layer size for 400M params
|
||||
num_layers: 8 # Efficient transformer layers for faster training
|
||||
learning_rate: 0.0001 # Higher learning rate for faster convergence
|
||||
weight_decay: 0.00001 # Balanced L2 regularization
|
||||
|
||||
# Inference configuration
|
||||
inference_interval_ms: 200 # Inference every 200ms
|
||||
|
Reference in New Issue
Block a user