#148 #158 config refactoring to support profiles/reloading (#165)

This commit is contained in:
David Brazda
2024-03-06 14:30:24 +01:00
committed by GitHub
parent 9cff5fe6a1
commit 9391d89aab
21 changed files with 425 additions and 181 deletions

1
CODEOWNERS Normal file
View File

@ -0,0 +1 @@
* @drew2323

106
run.sh Normal file
View File

@ -0,0 +1,106 @@
#!/bin/bash
# file: runstop.sh
#----
# Simple script to start / stop / restart a python script in the background.
#----
#----
# To Use:
# Run "./run.sh start" to start, "./run.sh stop" to stop, and "./run.sh restart" to restart.
#----
#----BEGIN EDITABLE VARS----
SCRIPT_TO_EXECUTE_PLUS_ARGS='v2realbot/main.py -u'
OUTPUT_PID_FILE=running.pid
OUTPUT_PID_PATH=$HOME
PYTHON_TO_USE="python3"
# If using 'virtualenv' with python, specify the local virtualenv dir.
#VIRTUAL_ENV_DIR=""
#----END EDITABLE VARS-------
# Additions for handling strat.log backup
HISTORY_DIR="$HOME/stratlogs"
TIMESTAMP=$(date +"%Y%m%d-%H%M%S")
LOG_FILE="strat.log"
BACKUP_LOG_FILE="$HISTORY_DIR/${TIMESTAMP}_$LOG_FILE"
# If virtualenv specified & exists, using that version of python instead.
if [ -d "$VIRTUAL_ENV_DIR" ]; then
PYTHON_TO_USE="$VIRTUAL_ENV_DIR/bin/python"
fi
start() {
# Check and create history directory if it doesn't exist
[ ! -d "$HISTORY_DIR" ] && mkdir -p "$HISTORY_DIR"
# Check if strat.log exists and back it up
if [ -f "$LOG_FILE" ]; then
mv "$LOG_FILE" "$BACKUP_LOG_FILE"
echo "Backed up log to $BACKUP_LOG_FILE"
fi
if [ ! -e "$OUTPUT_PID_PATH/$OUTPUT_PID_FILE" ]; then
nohup "$PYTHON_TO_USE" ./$SCRIPT_TO_EXECUTE_PLUS_ARGS > strat.log 2>&1 & echo $! > "$OUTPUT_PID_PATH/$OUTPUT_PID_FILE"
echo "Started $SCRIPT_TO_EXECUTE_PLUS_ARGS @ Process: $!"
sleep .7
echo "Created $OUTPUT_PID_FILE file in $OUTPUT_PID_PATH dir"
else
echo "$SCRIPT_TO_EXECUTE_PLUS_ARGS is already running."
fi
}
stop() {
if [ -e "$OUTPUT_PID_PATH/$OUTPUT_PID_FILE" ]; then
the_pid=$(<$OUTPUT_PID_PATH/$OUTPUT_PID_FILE)
rm "$OUTPUT_PID_PATH/$OUTPUT_PID_FILE"
echo "Deleted $OUTPUT_PID_FILE file in $OUTPUT_PID_PATH dir"
kill "$the_pid"
COUNTER=1
while [ -e /proc/$the_pid ]
do
echo "$SCRIPT_TO_EXECUTE_PLUS_ARGS @: $the_pid is still running"
sleep .7
COUNTER=$[$COUNTER +1]
if [ $COUNTER -eq 20 ]; then
kill -9 "$the_pid"
fi
if [ $COUNTER -eq 40 ]; then
exit 1
fi
done
echo "$SCRIPT_TO_EXECUTE_PLUS_ARGS @: $the_pid has finished"
else
echo "$SCRIPT_TO_EXECUTE_PLUS_ARGS is not running."
fi
}
restart() {
stop
sleep 1
start
}
case "$1" in
start)
start
;;
stop)
stop
;;
restart)
restart
;;
*)
echo "Usage: $0 {start|stop|restart}"
exit 1
esac
exit 0

View File

@ -17,7 +17,7 @@ from v2realbot.strategyblocks.activetrade.activetrade_hub import manage_active_t
from v2realbot.strategyblocks.inits.init_indicators import initialize_dynamic_indicators from v2realbot.strategyblocks.inits.init_indicators import initialize_dynamic_indicators
from v2realbot.strategyblocks.inits.init_directives import intialize_directive_conditions from v2realbot.strategyblocks.inits.init_directives import intialize_directive_conditions
from alpaca.trading.client import TradingClient from alpaca.trading.client import TradingClient
from v2realbot.config import ACCOUNT1_PAPER_API_KEY, ACCOUNT1_PAPER_SECRET_KEY, DATA_DIR, OFFLINE_MODE from v2realbot.config import ACCOUNT1_PAPER_API_KEY, ACCOUNT1_PAPER_SECRET_KEY, DATA_DIR
from alpaca.trading.models import Calendar from alpaca.trading.models import Calendar
from v2realbot.indicators.oscillators import rsi from v2realbot.indicators.oscillators import rsi
from v2realbot.indicators.moving_averages import sma from v2realbot.indicators.moving_averages import sma

View File

@ -43,7 +43,7 @@ from v2realbot.common.model import TradeUpdate, Order
#from rich import print #from rich import print
import threading import threading
import asyncio import asyncio
from v2realbot.config import BT_DELAYS, DATA_DIR, BT_FILL_CONDITION_BUY_LIMIT, BT_FILL_CONDITION_SELL_LIMIT, BT_FILL_LOG_SURROUNDING_TRADES, BT_FILL_CONS_TRADES_REQUIRED,BT_FILL_PRICE_MARKET_ORDER_PREMIUM from v2realbot.config import DATA_DIR
from v2realbot.utils.utils import AttributeDict, ltp, zoneNY, trunc, count_decimals, print from v2realbot.utils.utils import AttributeDict, ltp, zoneNY, trunc, count_decimals, print
from v2realbot.utils.tlog import tlog from v2realbot.utils.tlog import tlog
from v2realbot.enums.enums import FillCondition from v2realbot.enums.enums import FillCondition
@ -60,6 +60,7 @@ from v2realbot.utils.dash_save_html import make_static
import dash_bootstrap_components as dbc import dash_bootstrap_components as dbc
from dash.dependencies import Input, Output from dash.dependencies import Input, Output
from dash import dcc, html, dash_table, Dash from dash import dcc, html, dash_table, Dash
import v2realbot.utils.config_handler as cfh
"""" """"
LATENCY DELAYS LATENCY DELAYS
.000 trigger - last_trade_time (.4246266) .000 trigger - last_trade_time (.4246266)
@ -171,7 +172,7 @@ class Backtester:
todel.append(order) todel.append(order)
elif not self.symbol or order.symbol == self.symbol: elif not self.symbol or order.symbol == self.symbol:
#pricteme mininimalni latency od submittu k fillu #pricteme mininimalni latency od submittu k fillu
if order.submitted_at.timestamp() + BT_DELAYS.sub_to_fill > float(intime): if order.submitted_at.timestamp() + cfh.config_handler.get_val('BT_DELAYS','sub_to_fill') > float(intime):
print(f"too soon for {order.id}") print(f"too soon for {order.id}")
#try to execute #try to execute
else: else:
@ -197,7 +198,7 @@ class Backtester:
#Mazeme, jinak je to hruza #Mazeme, jinak je to hruza
#nechavame na konci trady, které muzeme potrebovat pro consekutivni pravidlo #nechavame na konci trady, které muzeme potrebovat pro consekutivni pravidlo
#osetrujeme, kdy je malo tradu a oriznuti by slo do zaporu #osetrujeme, kdy je malo tradu a oriznuti by slo do zaporu
del_to_index = index_end-2-BT_FILL_CONS_TRADES_REQUIRED del_to_index = index_end-2-cfh.config_handler.get_val('BT_FILL_CONS_TRADES_REQUIRED')
del_to_index = del_to_index if del_to_index > 0 else 0 del_to_index = del_to_index if del_to_index > 0 else 0
del self.btdata[0:del_to_index] del self.btdata[0:del_to_index]
##ic("after delete",len(self.btdata[0:index_end])) ##ic("after delete",len(self.btdata[0:index_end]))
@ -218,7 +219,7 @@ class Backtester:
fill_time = None fill_time = None
fill_price = None fill_price = None
order_min_fill_time = o.submitted_at.timestamp() + BT_DELAYS.sub_to_fill order_min_fill_time = o.submitted_at.timestamp() + cfh.config_handler.get_val('BT_DELAYS','sub_to_fill')
#ic(order_min_fill_time) #ic(order_min_fill_time)
#ic(len(work_range)) #ic(len(work_range))
@ -240,17 +241,18 @@ class Backtester:
#NASTVENI PODMINEK PLNENI #NASTVENI PODMINEK PLNENI
fast_fill_condition = i[1] <= o.limit_price fast_fill_condition = i[1] <= o.limit_price
slow_fill_condition = i[1] < o.limit_price slow_fill_condition = i[1] < o.limit_price
if BT_FILL_CONDITION_BUY_LIMIT == FillCondition.FAST: fill_cond_buy_limit = cfh.config_handler.get_val('BT_FILL_CONDITION_BUY_LIMIT')
if fill_cond_buy_limit == FillCondition.FAST:
fill_condition = fast_fill_condition fill_condition = fast_fill_condition
elif BT_FILL_CONDITION_BUY_LIMIT == FillCondition.SLOW: elif fill_cond_buy_limit == FillCondition.SLOW:
fill_condition = slow_fill_condition fill_condition = slow_fill_condition
else: else:
print("unknow fill condition") print("unknow fill condition")
return -1 return -1
if float(i[0]) > float(order_min_fill_time+BT_DELAYS.limit_order_offset) and fill_condition: if float(i[0]) > float(order_min_fill_time+cfh.config_handler.get_val('BT_DELAYS','limit_order_offset')) and fill_condition:
consec_cnt += 1 consec_cnt += 1
if consec_cnt == BT_FILL_CONS_TRADES_REQUIRED: if consec_cnt == cfh.config_handler.get_val('BT_FILL_CONS_TRADES_REQUIRED'):
#(1679081919.381649, 27.88) #(1679081919.381649, 27.88)
#ic(i) #ic(i)
@ -261,10 +263,10 @@ class Backtester:
#fill_price = i[1] #fill_price = i[1]
print("FILL LIMIT BUY at", fill_time, datetime.fromtimestamp(fill_time).astimezone(zoneNY), "at",i[1]) print("FILL LIMIT BUY at", fill_time, datetime.fromtimestamp(fill_time).astimezone(zoneNY), "at",i[1])
if BT_FILL_LOG_SURROUNDING_TRADES != 0: if cfh.config_handler.get_val('BT_FILL_LOG_SURROUNDING_TRADES') != 0:
#TODO loguru #TODO loguru
print("FILL SURR TRADES: before",work_range[index-BT_FILL_LOG_SURROUNDING_TRADES:index]) print("FILL SURR TRADES: before",work_range[index-cfh.config_handler.get_val('BT_FILL_LOG_SURROUNDING_TRADES'):index])
print("FILL SURR TRADES: fill and after",work_range[index:index+BT_FILL_LOG_SURROUNDING_TRADES]) print("FILL SURR TRADES: fill and after",work_range[index:index+cfh.config_handler.get_val('BT_FILL_LOG_SURROUNDING_TRADES')])
break break
else: else:
consec_cnt = 0 consec_cnt = 0
@ -275,17 +277,18 @@ class Backtester:
#NASTVENI PODMINEK PLNENI #NASTVENI PODMINEK PLNENI
fast_fill_condition = i[1] >= o.limit_price fast_fill_condition = i[1] >= o.limit_price
slow_fill_condition = i[1] > o.limit_price slow_fill_condition = i[1] > o.limit_price
if BT_FILL_CONDITION_SELL_LIMIT == FillCondition.FAST: fill_conf_sell_cfg = cfh.config_handler.get_val('BT_FILL_CONDITION_SELL_LIMIT')
if fill_conf_sell_cfg == FillCondition.FAST:
fill_condition = fast_fill_condition fill_condition = fast_fill_condition
elif BT_FILL_CONDITION_SELL_LIMIT == FillCondition.SLOW: elif fill_conf_sell_cfg == FillCondition.SLOW:
fill_condition = slow_fill_condition fill_condition = slow_fill_condition
else: else:
print("unknown fill condition") print("unknown fill condition")
return -1 return -1
if float(i[0]) > float(order_min_fill_time+BT_DELAYS.limit_order_offset) and fill_condition: if float(i[0]) > float(order_min_fill_time+cfh.config_handler.get_val('BT_DELAYS','limit_order_offset')) and fill_condition:
consec_cnt += 1 consec_cnt += 1
if consec_cnt == BT_FILL_CONS_TRADES_REQUIRED: if consec_cnt == cfh.config_handler.get_val('BT_FILL_CONS_TRADES_REQUIRED'):
#(1679081919.381649, 27.88) #(1679081919.381649, 27.88)
#ic(i) #ic(i)
fill_time = i[0] fill_time = i[0]
@ -297,10 +300,11 @@ class Backtester:
#fill_price = i[1] #fill_price = i[1]
print("FILL LIMIT SELL at", fill_time, datetime.fromtimestamp(fill_time).astimezone(zoneNY), "at",i[1]) print("FILL LIMIT SELL at", fill_time, datetime.fromtimestamp(fill_time).astimezone(zoneNY), "at",i[1])
if BT_FILL_LOG_SURROUNDING_TRADES != 0: surr_trades_cfg = cfh.config_handler.get_val('BT_FILL_LOG_SURROUNDING_TRADES')
if surr_trades_cfg != 0:
#TODO loguru #TODO loguru
print("FILL SELL SURR TRADES: before",work_range[index-BT_FILL_LOG_SURROUNDING_TRADES:index]) print("FILL SELL SURR TRADES: before",work_range[index-surr_trades_cfg:index])
print("FILL SELL SURR TRADES: fill and after",work_range[index:index+BT_FILL_LOG_SURROUNDING_TRADES]) print("FILL SELL SURR TRADES: fill and after",work_range[index:index+surr_trades_cfg])
break break
else: else:
consec_cnt = 0 consec_cnt = 0
@ -314,11 +318,16 @@ class Backtester:
#ic(i) #ic(i)
fill_time = i[0] fill_time = i[0]
fill_price = i[1] fill_price = i[1]
#přičteme MARKET PREMIUM z konfigurace (do budoucna mozna rozdilne pro BUY/SELL a nebo mozna z konfigurace pro dany itutl) #přičteme MARKET PREMIUM z konfigurace (je v pct nebo abs) (do budoucna mozna rozdilne pro BUY/SELL a nebo mozna z konfigurace pro dany titul)
cfg_premium = cfh.config_handler.get_val('BT_FILL_PRICE_MARKET_ORDER_PREMIUM')
if cfg_premium < 0: #configured as percentage
premium = abs(cfg_premium) * fill_price / 100.0
else: #configured as absolute value
premium = cfg_premium
if o.side == OrderSide.BUY: if o.side == OrderSide.BUY:
fill_price = fill_price + BT_FILL_PRICE_MARKET_ORDER_PREMIUM fill_price = fill_price + premium
elif o.side == OrderSide.SELL: elif o.side == OrderSide.SELL:
fill_price = fill_price - BT_FILL_PRICE_MARKET_ORDER_PREMIUM fill_price = fill_price - premium
print("FILL ",o.side,"MARKET at", fill_time, datetime.fromtimestamp(fill_time).astimezone(zoneNY), "cena", i[1]) print("FILL ",o.side,"MARKET at", fill_time, datetime.fromtimestamp(fill_time).astimezone(zoneNY), "cena", i[1])
break break
@ -367,7 +376,7 @@ class Backtester:
def _do_notification_with_callbacks(self, tradeupdate: TradeUpdate, time: float): def _do_notification_with_callbacks(self, tradeupdate: TradeUpdate, time: float):
#do callbacku je třeba zpropagovat filltime čas (včetně latency pro notifikaci), aby se pripadne akce v callbacku udály s tímto časem #do callbacku je třeba zpropagovat filltime čas (včetně latency pro notifikaci), aby se pripadne akce v callbacku udály s tímto časem
self.time = time + float(BT_DELAYS.fill_to_not) self.time = time + float(cfh.config_handler.get_val('BT_DELAYS','fill_to_not'))
print("current bt.time",self.time) print("current bt.time",self.time)
#print("FILL NOTIFICATION: ", tradeupdate) #print("FILL NOTIFICATION: ", tradeupdate)
res = asyncio.run(self.order_fill_callback(tradeupdate)) res = asyncio.run(self.order_fill_callback(tradeupdate))
@ -820,10 +829,10 @@ class Backtester:
Trades:''' + str(len(self.trades))) Trades:''' + str(len(self.trades)))
textik8 = html.Div(''' textik8 = html.Div('''
Profit:''' + str(state.profit)) Profit:''' + str(state.profit))
textik9 = html.Div(f"{BT_FILL_CONS_TRADES_REQUIRED=}") textik9 = html.Div(f"{cfh.config_handler.get_val('BT_FILL_CONS_TRADES_REQUIRED')=}")
textik10 = html.Div(f"{BT_FILL_LOG_SURROUNDING_TRADES=}") textik10 = html.Div(f"{cfh.config_handler.get_val('BT_FILL_LOG_SURROUNDING_TRADES')=}")
textik11 = html.Div(f"{BT_FILL_CONDITION_BUY_LIMIT=}") textik11 = html.Div(f"{cfh.config_handler.get_val('BT_FILL_CONDITION_BUY_LIMIT')=}")
textik12 = html.Div(f"{BT_FILL_CONDITION_SELL_LIMIT=}") textik12 = html.Div(f"{cfh.config_handler.get_val('BT_FILL_CONDITION_SELL_LIMIT')=}")
orders_title = dcc.Markdown('## Open orders') orders_title = dcc.Markdown('## Open orders')
trades_title = dcc.Markdown('## Trades') trades_title = dcc.Markdown('## Trades')

View File

@ -8,9 +8,6 @@ from collections import defaultdict
#the first occurence of using it will load it globally #the first occurence of using it will load it globally
_ml_module_loaded = False _ml_module_loaded = False
#TBD - konfiguracni dict issue #148
#CFG: defaultdict = defaultdict(None)
#directory for generated images and basic reports #directory for generated images and basic reports
MEDIA_DIRECTORY = Path(__file__).parent.parent.parent / "media" MEDIA_DIRECTORY = Path(__file__).parent.parent.parent / "media"
RUNNER_DETAIL_DIRECTORY = Path(__file__).parent.parent.parent / "runner_detail" RUNNER_DETAIL_DIRECTORY = Path(__file__).parent.parent.parent / "runner_detail"
@ -20,58 +17,16 @@ LOG_PATH = Path(__file__).parent.parent
LOG_FILE = Path(__file__).parent.parent / "strat.log" LOG_FILE = Path(__file__).parent.parent / "strat.log"
JOB_LOG_FILE = Path(__file__).parent.parent / "job.log" JOB_LOG_FILE = Path(__file__).parent.parent / "job.log"
#'0.0.0.0',
#currently only prod server has acces to LIVE
PROD_SERVER_HOSTNAMES = ['tradingeastcoast','David-MacBook-Pro.local'] #,'David-MacBook-Pro.local'
TEST_SERVER_HOSTNAMES = ['tradingtest']
#TODO vybrane dat do config db a managovat pres GUI
#DEFAULT AGGREGATOR filter trades
#NOTE pridana F - Inter Market Sweep Order - obcas vytvarela spajky
AGG_EXCLUDED_TRADES = ['C','O','4','B','7','V','P','W','U','Z','F']
OFFLINE_MODE = False
# ilog lvls = 0,1 - 0 debug, 1 info
ILOG_SAVE_LEVEL_FROM = 1
#minimalni vzdalenost mezi trady, kterou agregator pousti pro CBAR(0.001 - blokuje mensi nez 1ms)
GROUP_TRADES_WITH_TIMESTAMP_LESS_THAN = 0.003
#normalized price for tick 0.01
NORMALIZED_TICK_BASE_PRICE = 30.00
LOG_RUNNER_EVENTS = False
#no print in console
QUIET_MODE = True
#how many consecutive trades with the fill price are necessary for LIMIT fill to happen in backtesting
#0 - optimistic, every knot high will fill the order
#N - N consecutive trades required
#not impl.yet
#minimum is 1, na alpace live to vetsinou vychazi 7-8 u BAC, je to hodne podobne tomu, nez je cena překonaná pul centu. tzn. 7-8 a nebo FillCondition.SLOW
BT_FILL_CONS_TRADES_REQUIRED = 2
#during bt trade execution logs X-surrounding trades of the one that triggers the fill
BT_FILL_LOG_SURROUNDING_TRADES = 10
#fill condition for limit order in bt
# fast - price has to be equal or bigger <=
# slow - price has to be bigger <
BT_FILL_CONDITION_BUY_LIMIT = FillCondition.SLOW
BT_FILL_CONDITION_SELL_LIMIT = FillCondition.SLOW
#TBD TODO not implemented yet
BT_FILL_PRICE_MARKET_ORDER_PREMIUM = 0.005
#backend counter of api requests
COUNT_API_REQUESTS = False
#stratvars that cannot be changed in gui #stratvars that cannot be changed in gui
STRATVARS_UNCHANGEABLES = ['pendingbuys', 'blockbuy', 'jevylozeno', 'limitka'] STRATVARS_UNCHANGEABLES = ['pendingbuys', 'blockbuy', 'jevylozeno', 'limitka']
DATA_DIR = user_data_dir("v2realbot", "david") DATA_DIR = user_data_dir("v2realbot", False)
MODEL_DIR = Path(DATA_DIR)/"models" MODEL_DIR = Path(DATA_DIR)/"models"
#BT DELAYS #BT DELAYS
#profiling #profiling
PROFILING_NEXT_ENABLED = False PROFILING_NEXT_ENABLED = False
PROFILING_OUTPUT_DIR = DATA_DIR PROFILING_OUTPUT_DIR = DATA_DIR
#FILL CONFIGURATION CLASS FOR BACKTESTING #WIP - FILL CONFIGURATION CLASS FOR BACKTESTING
#WIP
class BT_FILL_CONF: class BT_FILL_CONF:
"""" """"
Trida pro konfiguraci backtesting fillu pro dany symbol, pokud neexistuje tak fallback na obecny viz vyse- Trida pro konfiguraci backtesting fillu pro dany symbol, pokud neexistuje tak fallback na obecny viz vyse-
@ -85,24 +40,6 @@ class BT_FILL_CONF:
self.BT_FILL_CONDITION_SELL_LIMIT=BT_FILL_CONDITION_SELL_LIMIT self.BT_FILL_CONDITION_SELL_LIMIT=BT_FILL_CONDITION_SELL_LIMIT
self.BT_FILL_PRICE_MARKET_ORDER_PREMIUM=BT_FILL_PRICE_MARKET_ORDER_PREMIUM self.BT_FILL_PRICE_MARKET_ORDER_PREMIUM=BT_FILL_PRICE_MARKET_ORDER_PREMIUM
""""
LATENCY DELAYS for LIVE eastcoast
.000 trigger - last_trade_time (.4246266)
+.020 vstup do strategie a BUY (.444606)
+.023 submitted (.469198)
+.008 filled (.476695552)
+.023 fill not(.499888)
"""
#TODO změnit názvy delay promennych vystizneji a obecneji
class BT_DELAYS:
trigger_to_strat: float = 0.020
strat_to_sub: float = 0.023
sub_to_fill: float = 0.008
fill_to_not: float = 0.023
#doplnit dle live
limit_order_offset: float = 0
class Keys: class Keys:
def __init__(self, api_key, secret_key, paper, feed) -> None: def __init__(self, api_key, secret_key, paper, feed) -> None:
self.API_KEY = api_key self.API_KEY = api_key
@ -209,19 +146,7 @@ except ValueError:
#zatim jsou LIVE_DATA nastaveny jako z account1_paper #zatim jsou LIVE_DATA nastaveny jako z account1_paper
LIVE_DATA_API_KEY = ACCOUNT1_PAPER_API_KEY LIVE_DATA_API_KEY = ACCOUNT1_PAPER_API_KEY
LIVE_DATA_SECRET_KEY = ACCOUNT1_PAPER_SECRET_KEY LIVE_DATA_SECRET_KEY = ACCOUNT1_PAPER_SECRET_KEY
#LIVE_DATA_FEED je nastaveny v config_handleru
# Load the data feed type from environment variable
data_feed_type_str = os.environ.get('LIVE_DATA_FEED', 'iex') # Default to 'sip' if not set
# Convert the string to DataFeed enum
try:
LIVE_DATA_FEED = DataFeed(data_feed_type_str)
except ValueError:
# Handle the case where the environment variable does not match any enum member
print(f"Invalid data feed type: {data_feed_type_str} in LIVE_DATA_FEED defaulting to 'iex'")
LIVE_DATA_FEED = DataFeed.IEX
# LIVE_DATA_FEED = DataFeed.IEX
class KW: class KW:
activate: str = "activate" activate: str = "activate"

View File

@ -1,30 +1,31 @@
import config as cfg
from v2realbot.common.db import pool import v2realbot.common.db as db
from v2realbot.common.model import RunDay, StrategyInstance, Runner, RunRequest, RunArchive, RunArchiveView, RunArchiveViewPagination, RunArchiveDetail, RunArchiveChange, Bar, TradeEvent, TestList, Intervals, ConfigItem, InstantIndicator, DataTablesRequest from v2realbot.common.model import RunDay, StrategyInstance, Runner, RunRequest, RunArchive, RunArchiveView, RunArchiveViewPagination, RunArchiveDetail, RunArchiveChange, Bar, TradeEvent, TestList, Intervals, ConfigItem, InstantIndicator, DataTablesRequest
import orjson import orjson
import v2realbot.utils.config_handler as ch
# region CONFIG db services # region CONFIG db services
#TODO vytvorit modul pro dotahovani z pythonu (get_from_config(var_name, def_value) {)- stejne jako v js #TODO vytvorit modul pro dotahovani z pythonu (get_from_config(var_name, def_value) {)- stejne jako v js
#TODO zvazit presunuti do TOML z JSONu #TODO zvazit presunuti do TOML z JSONu
def get_all_config_items(): def get_all_config_items():
conn = pool.get_connection() conn = db.pool.get_connection()
try: try:
cursor = conn.cursor() cursor = conn.cursor()
cursor.execute('SELECT id, item_name, json_data FROM config_table') cursor.execute('SELECT id, item_name, json_data FROM config_table')
config_items = [{"id": row[0], "item_name": row[1], "json_data": row[2]} for row in cursor.fetchall()] config_items = [{"id": row[0], "item_name": row[1], "json_data": row[2]} for row in cursor.fetchall()]
finally: finally:
pool.release_connection(conn) db.pool.release_connection(conn)
return 0, config_items return 0, config_items
# Function to get a config item by ID # Function to get a config item by ID
def get_config_item_by_id(item_id): def get_config_item_by_id(item_id):
conn = pool.get_connection() conn = db.pool.get_connection()
try: try:
cursor = conn.cursor() cursor = conn.cursor()
cursor.execute('SELECT item_name, json_data FROM config_table WHERE id = ?', (item_id,)) cursor.execute('SELECT item_name, json_data FROM config_table WHERE id = ?', (item_id,))
row = cursor.fetchone() row = cursor.fetchone()
finally: finally:
pool.release_connection(conn) db.pool.release_connection(conn)
if row is None: if row is None:
return -2, "not found" return -2, "not found"
else: else:
@ -33,7 +34,7 @@ def get_config_item_by_id(item_id):
# Function to get a config item by ID # Function to get a config item by ID
def get_config_item_by_name(item_name): def get_config_item_by_name(item_name):
#print(item_name) #print(item_name)
conn = pool.get_connection() conn = db.pool.get_connection()
try: try:
cursor = conn.cursor() cursor = conn.cursor()
query = f"SELECT item_name, json_data FROM config_table WHERE item_name = '{item_name}'" query = f"SELECT item_name, json_data FROM config_table WHERE item_name = '{item_name}'"
@ -42,7 +43,7 @@ def get_config_item_by_name(item_name):
row = cursor.fetchone() row = cursor.fetchone()
#print(row) #print(row)
finally: finally:
pool.release_connection(conn) db.pool.release_connection(conn)
if row is None: if row is None:
return -2, "not found" return -2, "not found"
else: else:
@ -50,7 +51,7 @@ def get_config_item_by_name(item_name):
# Function to create a new config item # Function to create a new config item
def create_config_item(config_item: ConfigItem): def create_config_item(config_item: ConfigItem):
conn = pool.get_connection() conn = db.pool.get_connection()
try: try:
try: try:
cursor = conn.cursor() cursor = conn.cursor()
@ -59,7 +60,7 @@ def create_config_item(config_item: ConfigItem):
conn.commit() conn.commit()
print(item_id) print(item_id)
finally: finally:
pool.release_connection(conn) db.pool.release_connection(conn)
return 0, {"id": item_id, "item_name":config_item.item_name, "json_data":config_item.json_data} return 0, {"id": item_id, "item_name":config_item.item_name, "json_data":config_item.json_data}
except Exception as e: except Exception as e:
@ -67,27 +68,31 @@ def create_config_item(config_item: ConfigItem):
# Function to update a config item by ID # Function to update a config item by ID
def update_config_item(item_id, config_item: ConfigItem): def update_config_item(item_id, config_item: ConfigItem):
conn = pool.get_connection() conn = db.pool.get_connection()
try: try:
try: try:
cursor = conn.cursor() cursor = conn.cursor()
cursor.execute('UPDATE config_table SET item_name = ?, json_data = ? WHERE id = ?', (config_item.item_name, config_item.json_data, item_id)) cursor.execute('UPDATE config_table SET item_name = ?, json_data = ? WHERE id = ?', (config_item.item_name, config_item.json_data, item_id))
conn.commit() conn.commit()
#refresh active item je zatím řešena takto natvrdo při updatu položky "active_profile" a při startu aplikace
if config_item.item_name == "active_profile":
ch.config_handler.activate_profile()
finally: finally:
pool.release_connection(conn) db.pool.release_connection(conn)
return 0, {"id": item_id, **config_item.dict()} return 0, {"id": item_id, **config_item.dict()}
except Exception as e: except Exception as e:
return -2, str(e) return -2, str(e)
# Function to delete a config item by ID # Function to delete a config item by ID
def delete_config_item(item_id): def delete_config_item(item_id):
conn = pool.get_connection() conn = db.pool.get_connection()
try: try:
cursor = conn.cursor() cursor = conn.cursor()
cursor.execute('DELETE FROM config_table WHERE id = ?', (item_id,)) cursor.execute('DELETE FROM config_table WHERE id = ?', (item_id,))
conn.commit() conn.commit()
finally: finally:
pool.release_connection(conn) db.pool.release_connection(conn)
return 0, {"id": item_id} return 0, {"id": item_id}
# endregion # endregion

View File

@ -7,7 +7,7 @@ from v2realbot.common.PrescribedTradeModel import Trade, TradeDirection, TradeSt
from datetime import datetime from datetime import datetime
from v2realbot.loader.trade_offline_streamer import Trade_Offline_Streamer from v2realbot.loader.trade_offline_streamer import Trade_Offline_Streamer
from threading import Thread, current_thread, Event, enumerate from threading import Thread, current_thread, Event, enumerate
from v2realbot.config import STRATVARS_UNCHANGEABLES, ACCOUNT1_PAPER_API_KEY, ACCOUNT1_PAPER_SECRET_KEY, ACCOUNT1_LIVE_API_KEY, ACCOUNT1_LIVE_SECRET_KEY, DATA_DIR,BT_FILL_CONS_TRADES_REQUIRED,BT_FILL_LOG_SURROUNDING_TRADES,BT_FILL_CONDITION_BUY_LIMIT,BT_FILL_CONDITION_SELL_LIMIT, GROUP_TRADES_WITH_TIMESTAMP_LESS_THAN, MEDIA_DIRECTORY, RUNNER_DETAIL_DIRECTORY, OFFLINE_MODE from v2realbot.config import STRATVARS_UNCHANGEABLES, ACCOUNT1_PAPER_API_KEY, ACCOUNT1_PAPER_SECRET_KEY, ACCOUNT1_LIVE_API_KEY, ACCOUNT1_LIVE_SECRET_KEY, DATA_DIR,MEDIA_DIRECTORY, RUNNER_DETAIL_DIRECTORY
import importlib import importlib
from alpaca.trading.requests import GetCalendarRequest from alpaca.trading.requests import GetCalendarRequest
from alpaca.trading.client import TradingClient from alpaca.trading.client import TradingClient

View File

@ -14,7 +14,7 @@ from v2realbot.common.PrescribedTradeModel import Trade, TradeDirection, TradeSt
from datetime import datetime from datetime import datetime
from v2realbot.loader.trade_offline_streamer import Trade_Offline_Streamer from v2realbot.loader.trade_offline_streamer import Trade_Offline_Streamer
from threading import Thread, current_thread, Event, enumerate from threading import Thread, current_thread, Event, enumerate
from v2realbot.config import STRATVARS_UNCHANGEABLES, ACCOUNT1_PAPER_API_KEY, ACCOUNT1_PAPER_SECRET_KEY, ACCOUNT1_PAPER_FEED, ACCOUNT1_LIVE_API_KEY, ACCOUNT1_LIVE_SECRET_KEY, ACCOUNT1_LIVE_FEED, DATA_DIR,BT_FILL_CONS_TRADES_REQUIRED,BT_FILL_LOG_SURROUNDING_TRADES,BT_FILL_CONDITION_BUY_LIMIT,BT_FILL_CONDITION_SELL_LIMIT, GROUP_TRADES_WITH_TIMESTAMP_LESS_THAN, MEDIA_DIRECTORY, RUNNER_DETAIL_DIRECTORY, OFFLINE_MODE, LIVE_DATA_FEED from v2realbot.config import STRATVARS_UNCHANGEABLES, ACCOUNT1_PAPER_API_KEY, ACCOUNT1_PAPER_SECRET_KEY, ACCOUNT1_PAPER_FEED, ACCOUNT1_LIVE_API_KEY, ACCOUNT1_LIVE_SECRET_KEY, ACCOUNT1_LIVE_FEED, DATA_DIR, MEDIA_DIRECTORY, RUNNER_DETAIL_DIRECTORY
import importlib import importlib
from alpaca.trading.requests import GetCalendarRequest from alpaca.trading.requests import GetCalendarRequest
from alpaca.trading.client import TradingClient from alpaca.trading.client import TradingClient
@ -29,7 +29,8 @@ import pandas as pd
from traceback import format_exc from traceback import format_exc
from datetime import timedelta, time from datetime import timedelta, time
from threading import Lock from threading import Lock
from v2realbot.common.db import pool, execute_with_retry, row_to_runarchive, row_to_runarchiveview from v2realbot.common.db import pool, execute_with_retry
import v2realbot.common.transform as tr
from sqlite3 import OperationalError, Row from sqlite3 import OperationalError, Row
import v2realbot.strategyblocks.indicators.custom as ci import v2realbot.strategyblocks.indicators.custom as ci
from v2realbot.strategyblocks.inits.init_indicators import initialize_dynamic_indicators from v2realbot.strategyblocks.inits.init_indicators import initialize_dynamic_indicators
@ -40,6 +41,7 @@ import v2realbot.reporting.metricstoolsimage as mt
import gzip import gzip
import os import os
import msgpack import msgpack
import v2realbot.utils.config_handler as cfh
#import gc #import gc
#from pyinstrument import Profiler #from pyinstrument import Profiler
#adding lock to ensure thread safety of TinyDB (in future will be migrated to proper db) #adding lock to ensure thread safety of TinyDB (in future will be migrated to proper db)
@ -886,14 +888,9 @@ def archive_runner(runner: Runner, strat: StrategyInstance, inter_batch_params:
rectype=strat.state.rectype, rectype=strat.state.rectype,
cache_used=strat.dataloader.cache_used if isinstance(strat.dataloader, Trade_Offline_Streamer) else None, cache_used=strat.dataloader.cache_used if isinstance(strat.dataloader, Trade_Offline_Streamer) else None,
configs=dict( configs=dict(
LIVE_DATA_FEED=str(LIVE_DATA_FEED), CONFIG_HANDLER=dict(profile=cfh.config_handler.active_profile, values=cfh.config_handler.active_config)))
GROUP_TRADES_WITH_TIMESTAMP_LESS_THAN=GROUP_TRADES_WITH_TIMESTAMP_LESS_THAN,
BT_FILL_CONS_TRADES_REQUIRED=BT_FILL_CONS_TRADES_REQUIRED,
BT_FILL_LOG_SURROUNDING_TRADES=BT_FILL_LOG_SURROUNDING_TRADES,
BT_FILL_CONDITION_BUY_LIMIT=BT_FILL_CONDITION_BUY_LIMIT,
BT_FILL_CONDITION_SELL_LIMIT=BT_FILL_CONDITION_SELL_LIMIT))
#add profit of this batch iteration to batch_sum_profit #add profit of this batch iteration to batch_sum_profit
if inter_batch_params is not None: if inter_batch_params is not None:
inter_batch_params["batch_profit"] += round(float(strat.state.profit),2) inter_batch_params["batch_profit"] += round(float(strat.state.profit),2)
@ -1009,7 +1006,7 @@ def get_all_archived_runners() -> list[RunArchiveView]:
rows = c.fetchall() rows = c.fetchall()
results = [] results = []
for row in rows: for row in rows:
results.append(row_to_runarchiveview(row)) results.append(tr.row_to_runarchiveview(row))
finally: finally:
conn.row_factory = None conn.row_factory = None
pool.release_connection(conn) pool.release_connection(conn)
@ -1039,7 +1036,7 @@ def get_all_archived_runners() -> list[RunArchiveView]:
# c.execute(paginated_query) # c.execute(paginated_query)
# rows = c.fetchall() # rows = c.fetchall()
# results = [row_to_runarchiveview(row) for row in rows] # results = [tr.row_to_runarchiveview(row) for row in rows]
# finally: # finally:
# conn.row_factory = None # conn.row_factory = None
@ -1089,7 +1086,7 @@ def get_all_archived_runners_p_original(request: DataTablesRequest) -> Tuple[int
c.execute(filtered_count_query, {'search_value': f'%{search_value}%'}) c.execute(filtered_count_query, {'search_value': f'%{search_value}%'})
filtered_count = c.fetchone()[0] filtered_count = c.fetchone()[0]
results = [row_to_runarchiveview(row) for row in rows] results = [tr.row_to_runarchiveview(row) for row in rows]
finally: finally:
conn.row_factory = None conn.row_factory = None
@ -1162,7 +1159,7 @@ def get_all_archived_runners_p(request: DataTablesRequest) -> Tuple[int, RunArch
c.execute(filtered_count_query, {'search_value': f'%{search_value}%'}) c.execute(filtered_count_query, {'search_value': f'%{search_value}%'})
filtered_count = c.fetchone()[0] filtered_count = c.fetchone()[0]
results = [row_to_runarchiveview(row) for row in rows] results = [tr.row_to_runarchiveview(row) for row in rows]
finally: finally:
conn.row_factory = None conn.row_factory = None
@ -1197,7 +1194,7 @@ def get_archived_runner_header_byID(id: UUID) -> RunArchive:
row = c.fetchone() row = c.fetchone()
if row: if row:
return 0, row_to_runarchive(row) return 0, tr.row_to_runarchive(row)
else: else:
return -2, "not found" return -2, "not found"
@ -1903,7 +1900,7 @@ def get_alpaca_history_bars(symbol: str, datetime_object_from: datetime, datetim
return 0, result return 0, result
else: else:
print(str(e) + format_exc()) print(str(e) + format_exc())
if OFFLINE_MODE: if cfh.config_handler.get_val('OFFLINE_MODE'):
print("OFFLINE MODE ENABLED") print("OFFLINE MODE ENABLED")
return 0, [] return 0, []
return -2, str(e) return -2, str(e)

View File

@ -2,9 +2,9 @@ from alpaca.trading.enums import OrderSide, OrderType
from threading import Lock from threading import Lock
from v2realbot.interfaces.general_interface import GeneralInterface from v2realbot.interfaces.general_interface import GeneralInterface
from v2realbot.backtesting.backtester import Backtester from v2realbot.backtesting.backtester import Backtester
from v2realbot.config import BT_DELAYS, COUNT_API_REQUESTS
from datetime import datetime from datetime import datetime
from v2realbot.utils.utils import zoneNY from v2realbot.utils.utils import zoneNY
import v2realbot.utils.config_handler as cfh
"""" """"
backtester methods can be called backtester methods can be called
@ -19,7 +19,7 @@ class BacktestInterface(GeneralInterface):
def __init__(self, symbol, bt: Backtester) -> None: def __init__(self, symbol, bt: Backtester) -> None:
self.symbol = symbol self.symbol = symbol
self.bt = bt self.bt = bt
self.count_api_requests = COUNT_API_REQUESTS self.count_api_requests = cfh.config_handler.get_val('COUNT_API_REQUESTS')
self.mincnt = list([dict(minute=0,count=0)]) self.mincnt = list([dict(minute=0,count=0)])
#TODO time v API nejspis muzeme dat pryc a BT bude si to brat primo ze self.time (nezapomenout na + BT_DELAYS) #TODO time v API nejspis muzeme dat pryc a BT bude si to brat primo ze self.time (nezapomenout na + BT_DELAYS)
# self.time = self.bt.time # self.time = self.bt.time
@ -43,33 +43,33 @@ class BacktestInterface(GeneralInterface):
def buy(self, size = 1, repeat: bool = False): def buy(self, size = 1, repeat: bool = False):
self.count() self.count()
#add REST API latency #add REST API latency
return self.bt.submit_order(time=self.bt.time + BT_DELAYS.strat_to_sub,symbol=self.symbol,side=OrderSide.BUY,size=size,order_type = OrderType.MARKET) return self.bt.submit_order(time=self.bt.time + cfh.config_handler.get_val('BT_DELAYS','strat_to_sub'),symbol=self.symbol,side=OrderSide.BUY,size=size,order_type = OrderType.MARKET)
"""buy limit""" """buy limit"""
def buy_l(self, price: float, size: int = 1, repeat: bool = False, force: int = 0): def buy_l(self, price: float, size: int = 1, repeat: bool = False, force: int = 0):
self.count() self.count()
return self.bt.submit_order(time=self.bt.time + BT_DELAYS.strat_to_sub,symbol=self.symbol,side=OrderSide.BUY,size=size,price=price,order_type = OrderType.LIMIT) return self.bt.submit_order(time=self.bt.time + cfh.config_handler.get_val('BT_DELAYS','strat_to_sub'),symbol=self.symbol,side=OrderSide.BUY,size=size,price=price,order_type = OrderType.LIMIT)
"""sell market""" """sell market"""
def sell(self, size = 1, repeat: bool = False): def sell(self, size = 1, repeat: bool = False):
self.count() self.count()
return self.bt.submit_order(time=self.bt.time + BT_DELAYS.strat_to_sub,symbol=self.symbol,side=OrderSide.SELL,size=size,order_type = OrderType.MARKET) return self.bt.submit_order(time=self.bt.time + cfh.config_handler.get_val('BT_DELAYS','strat_to_sub'),symbol=self.symbol,side=OrderSide.SELL,size=size,order_type = OrderType.MARKET)
"""sell limit""" """sell limit"""
async def sell_l(self, price: float, size = 1, repeat: bool = False): async def sell_l(self, price: float, size = 1, repeat: bool = False):
self.count() self.count()
return self.bt.submit_order(time=self.bt.time + BT_DELAYS.strat_to_sub,symbol=self.symbol,side=OrderSide.SELL,size=size,price=price,order_type = OrderType.LIMIT) return self.bt.submit_order(time=self.bt.time + cfh.config_handler.get_val('BT_DELAYS','strat_to_sub'),symbol=self.symbol,side=OrderSide.SELL,size=size,price=price,order_type = OrderType.LIMIT)
"""replace order""" """replace order"""
async def repl(self, orderid: str, price: float = None, size: int = None, repeat: bool = False): async def repl(self, orderid: str, price: float = None, size: int = None, repeat: bool = False):
self.count() self.count()
return self.bt.replace_order(time=self.bt.time + BT_DELAYS.strat_to_sub,id=orderid,size=size,price=price) return self.bt.replace_order(time=self.bt.time + cfh.config_handler.get_val('BT_DELAYS','strat_to_sub'),id=orderid,size=size,price=price)
"""cancel order""" """cancel order"""
#TBD exec predtim? #TBD exec predtim?
def cancel(self, orderid: str): def cancel(self, orderid: str):
self.count() self.count()
return self.bt.cancel_order(time=self.bt.time + BT_DELAYS.strat_to_sub, id=orderid) return self.bt.cancel_order(time=self.bt.time + cfh.config_handler.get_val('BT_DELAYS','strat_to_sub'), id=orderid)
"""get positions ->(size,avgp)""" """get positions ->(size,avgp)"""
#TBD exec predtim? #TBD exec predtim?

View File

@ -11,10 +11,10 @@ import threading
from copy import deepcopy from copy import deepcopy
from msgpack import unpackb from msgpack import unpackb
import os import os
from v2realbot.config import DATA_DIR, GROUP_TRADES_WITH_TIMESTAMP_LESS_THAN, AGG_EXCLUDED_TRADES from v2realbot.config import DATA_DIR
import pickle
import dill import dill
import gzip import gzip
import v2realbot.utils.config_handler as cfh
class TradeAggregator: class TradeAggregator:
def __init__(self, def __init__(self,
@ -25,7 +25,7 @@ class TradeAggregator:
align: StartBarAlign = StartBarAlign.ROUND, align: StartBarAlign = StartBarAlign.ROUND,
mintick: int = 0, mintick: int = 0,
exthours: bool = False, exthours: bool = False,
excludes: list = AGG_EXCLUDED_TRADES, excludes: list = cfh.config_handler.get_val('AGG_EXCLUDED_TRADES'),
skip_cache: bool = False): skip_cache: bool = False):
""" """
UPDATED VERSION - vrací více záznamů UPDATED VERSION - vrací více záznamů
@ -293,7 +293,7 @@ class TradeAggregator:
self.diff_price = True self.diff_price = True
self.last_price = data['p'] self.last_price = data['p']
if float(data['t']) - float(self.lasttimestamp) < GROUP_TRADES_WITH_TIMESTAMP_LESS_THAN: if float(data['t']) - float(self.lasttimestamp) < cfh.config_handler.get_val('GROUP_TRADES_WITH_TIMESTAMP_LESS_THAN'):
self.trades_too_close = True self.trades_too_close = True
else: else:
self.trades_too_close = False self.trades_too_close = False
@ -540,7 +540,7 @@ class TradeAggregator:
self.diff_price = True self.diff_price = True
self.last_price = data['p'] self.last_price = data['p']
if float(data['t']) - float(self.lasttimestamp) < GROUP_TRADES_WITH_TIMESTAMP_LESS_THAN: if float(data['t']) - float(self.lasttimestamp) < cfh.config_handler.get_val('GROUP_TRADES_WITH_TIMESTAMP_LESS_THAN'):
self.trades_too_close = True self.trades_too_close = True
else: else:
self.trades_too_close = False self.trades_too_close = False
@ -712,7 +712,7 @@ class TradeAggregator:
self.diff_price = True self.diff_price = True
self.last_price = data['p'] self.last_price = data['p']
if float(data['t']) - float(self.lasttimestamp) < GROUP_TRADES_WITH_TIMESTAMP_LESS_THAN: if float(data['t']) - float(self.lasttimestamp) < cfh.config_handler.get_val('GROUP_TRADES_WITH_TIMESTAMP_LESS_THAN'):
self.trades_too_close = True self.trades_too_close = True
else: else:
self.trades_too_close = False self.trades_too_close = False
@ -872,7 +872,7 @@ class TradeAggregator:
self.diff_price = True self.diff_price = True
self.last_price = data['p'] self.last_price = data['p']
if float(data['t']) - float(self.lasttimestamp) < GROUP_TRADES_WITH_TIMESTAMP_LESS_THAN: if float(data['t']) - float(self.lasttimestamp) < cfh.config_handler.get_val('GROUP_TRADES_WITH_TIMESTAMP_LESS_THAN'):
self.trades_too_close = True self.trades_too_close = True
else: else:
self.trades_too_close = False self.trades_too_close = False
@ -968,7 +968,7 @@ class TradeAggregator2Queue(TradeAggregator):
Child of TradeAggregator - sends items to given queue Child of TradeAggregator - sends items to given queue
In the future others will be added - TradeAggToTxT etc. In the future others will be added - TradeAggToTxT etc.
""" """
def __init__(self, symbol: str, queue: Queue, rectype: RecordType = RecordType.BAR, resolution: int = 5, minsize: int = 100, update_ltp: bool = False, align: StartBarAlign = StartBarAlign.ROUND, mintick: int = 0, exthours: bool = False, excludes: list = AGG_EXCLUDED_TRADES, skip_cache: bool = False): def __init__(self, symbol: str, queue: Queue, rectype: RecordType = RecordType.BAR, resolution: int = 5, minsize: int = 100, update_ltp: bool = False, align: StartBarAlign = StartBarAlign.ROUND, mintick: int = 0, exthours: bool = False, excludes: list = cfh.config_handler.get_val('AGG_EXCLUDED_TRADES'), skip_cache: bool = False):
super().__init__(rectype=rectype, resolution=resolution, minsize=minsize, update_ltp=update_ltp, align=align, mintick=mintick, exthours=exthours, excludes=excludes, skip_cache=skip_cache) super().__init__(rectype=rectype, resolution=resolution, minsize=minsize, update_ltp=update_ltp, align=align, mintick=mintick, exthours=exthours, excludes=excludes, skip_cache=skip_cache)
self.queue = queue self.queue = queue
self.symbol = symbol self.symbol = symbol
@ -1013,7 +1013,7 @@ class TradeAggregator2List(TradeAggregator):
"""" """"
stores records to the list stores records to the list
""" """
def __init__(self, symbol: str, btdata: list, rectype: RecordType = RecordType.BAR, resolution: int = 5, minsize: int = 100, update_ltp: bool = False, align: StartBarAlign = StartBarAlign.ROUND, mintick: int = 0, exthours: bool = False, excludes: list = AGG_EXCLUDED_TRADES, skip_cache: bool = False): def __init__(self, symbol: str, btdata: list, rectype: RecordType = RecordType.BAR, resolution: int = 5, minsize: int = 100, update_ltp: bool = False, align: StartBarAlign = StartBarAlign.ROUND, mintick: int = 0, exthours: bool = False, excludes: list = cfh.config_handler.get_val('AGG_EXCLUDED_TRADES'), skip_cache: bool = False):
super().__init__(rectype=rectype, resolution=resolution, minsize=minsize, update_ltp=update_ltp, align=align, mintick=mintick, exthours=exthours, excludes=excludes, skip_cache=skip_cache) super().__init__(rectype=rectype, resolution=resolution, minsize=minsize, update_ltp=update_ltp, align=align, mintick=mintick, exthours=exthours, excludes=excludes, skip_cache=skip_cache)
self.btdata = btdata self.btdata = btdata
self.symbol = symbol self.symbol = symbol

View File

@ -2,7 +2,7 @@ from v2realbot.loader.aggregator import TradeAggregator, TradeAggregator2List, T
#from v2realbot.loader.cacher import get_cached_agg_data #from v2realbot.loader.cacher import get_cached_agg_data
from alpaca.trading.requests import GetCalendarRequest from alpaca.trading.requests import GetCalendarRequest
from alpaca.data.live import StockDataStream from alpaca.data.live import StockDataStream
from v2realbot.config import ACCOUNT1_PAPER_API_KEY, ACCOUNT1_PAPER_SECRET_KEY, DATA_DIR, OFFLINE_MODE, LIVE_DATA_FEED from v2realbot.config import ACCOUNT1_PAPER_API_KEY, ACCOUNT1_PAPER_SECRET_KEY, DATA_DIR
from alpaca.data.enums import DataFeed from alpaca.data.enums import DataFeed
from alpaca.data.historical import StockHistoricalDataClient from alpaca.data.historical import StockHistoricalDataClient
from alpaca.data.requests import StockLatestQuoteRequest, StockBarsRequest, StockTradesRequest from alpaca.data.requests import StockLatestQuoteRequest, StockBarsRequest, StockTradesRequest
@ -26,6 +26,7 @@ import time
from traceback import format_exc from traceback import format_exc
from collections import defaultdict from collections import defaultdict
import requests import requests
import v2realbot.utils.config_handler as cfh
""" """
Trade offline data streamer, based on Alpaca historical data. Trade offline data streamer, based on Alpaca historical data.
""" """
@ -103,6 +104,8 @@ class Trade_Offline_Streamer(Thread):
print("call add streams to queue first") print("call add streams to queue first")
return 0 return 0
cfh.config_handler.print_current_config()
#iterujeme nad streamy #iterujeme nad streamy
for i in self.streams: for i in self.streams:
self.uniquesymbols.add(i.symbol) self.uniquesymbols.add(i.symbol)
@ -136,8 +139,8 @@ class Trade_Offline_Streamer(Thread):
#datetime.fromtimestamp(data['updated']).astimezone(zoneNY)) #datetime.fromtimestamp(data['updated']).astimezone(zoneNY))
#REFACTOR STARTS HERE #REFACTOR STARTS HERE
#print(f"{self.time_from=} {self.time_to=}") #print(f"{self.time_from=} {self.time_to=}")
if OFFLINE_MODE: if cfh.config_handler.get_val('OFFLINE_MODE'):
#just one day - same like time_from #just one day - same like time_from
den = str(self.time_to.date()) den = str(self.time_to.date())
bt_day = Calendar(date=den,open="9:30",close="16:00") bt_day = Calendar(date=den,open="9:30",close="16:00")
@ -149,6 +152,8 @@ class Trade_Offline_Streamer(Thread):
#zatim podpora pouze main session #zatim podpora pouze main session
live_data_feed = cfh.config_handler.get_val('LIVE_DATA_FEED')
#zatim podpora pouze 1 symbolu, predelat na froloop vsech symbolu ze symbpole #zatim podpora pouze 1 symbolu, predelat na froloop vsech symbolu ze symbpole
#minimalni jednotka pro CACHE je 1 den - a to jen marketopen to marketclose (extended hours not supported yet) #minimalni jednotka pro CACHE je 1 den - a to jen marketopen to marketclose (extended hours not supported yet)
for day in cal_dates: for day in cal_dates:
@ -194,7 +199,7 @@ class Trade_Offline_Streamer(Thread):
#cache resime jen kdyz backtestujeme cely den a mame sip datapoint (iex necachujeme) #cache resime jen kdyz backtestujeme cely den a mame sip datapoint (iex necachujeme)
#pokud ne tak ani necteme, ani nezapisujeme do cache #pokud ne tak ani necteme, ani nezapisujeme do cache
if (self.time_to >= day.close and self.time_from <= day.open) and LIVE_DATA_FEED == DataFeed.SIP: if (self.time_to >= day.close and self.time_from <= day.open) and live_data_feed == DataFeed.SIP:
#tento odstavec obchazime pokud je nastaveno "dont_use_cache" #tento odstavec obchazime pokud je nastaveno "dont_use_cache"
stream_btdata = self.to_run[symbpole[0]][0] stream_btdata = self.to_run[symbpole[0]][0]
cache_btdata, file_btdata = stream_btdata.get_cache(day.open, day.close) cache_btdata, file_btdata = stream_btdata.get_cache(day.open, day.close)
@ -251,7 +256,7 @@ class Trade_Offline_Streamer(Thread):
print("Remote Fetch DAY DATA Complete", day.open, day.close) print("Remote Fetch DAY DATA Complete", day.open, day.close)
#pokud jde o dnešní den a nebyl konec trhu tak cache neukládáme, pripadne pri iex datapointu necachujeme #pokud jde o dnešní den a nebyl konec trhu tak cache neukládáme, pripadne pri iex datapointu necachujeme
if (day.open < datetime.now().astimezone(zoneNY) < day.close) or LIVE_DATA_FEED == DataFeed.IEX: if (day.open < datetime.now().astimezone(zoneNY) < day.close) or live_data_feed == DataFeed.IEX:
print("not saving trade cache, market still open today or IEX datapoint") print("not saving trade cache, market still open today or IEX datapoint")
#ic(datetime.now().astimezone(zoneNY)) #ic(datetime.now().astimezone(zoneNY))
#ic(day.open, day.close) #ic(day.open, day.close)

View File

@ -4,7 +4,7 @@
""" """
from v2realbot.loader.aggregator import TradeAggregator2Queue from v2realbot.loader.aggregator import TradeAggregator2Queue
from alpaca.data.live import StockDataStream from alpaca.data.live import StockDataStream
from v2realbot.config import LIVE_DATA_API_KEY, LIVE_DATA_SECRET_KEY, LIVE_DATA_FEED from v2realbot.config import LIVE_DATA_API_KEY, LIVE_DATA_SECRET_KEY
from alpaca.data.historical import StockHistoricalDataClient from alpaca.data.historical import StockHistoricalDataClient
from alpaca.data.requests import StockLatestQuoteRequest, StockBarsRequest, StockTradesRequest from alpaca.data.requests import StockLatestQuoteRequest, StockBarsRequest, StockTradesRequest
from threading import Thread, current_thread from threading import Thread, current_thread
@ -12,6 +12,7 @@ from v2realbot.utils.utils import parse_alpaca_timestamp, ltp
from datetime import datetime, timedelta from datetime import datetime, timedelta
from threading import Thread, Lock from threading import Thread, Lock
from msgpack import packb from msgpack import packb
import v2realbot.utils.config_handler as cfh
""" """
Shared streamer (can be shared amongst concurrently running strategies) Shared streamer (can be shared amongst concurrently running strategies)
@ -19,11 +20,12 @@ from msgpack import packb
by strategies by strategies
""" """
class Trade_WS_Streamer(Thread): class Trade_WS_Streamer(Thread):
live_data_feed = cfh.config_handler.get_val('LIVE_DATA_FEED')
##tento ws streamer je pouze jeden pro vsechny, tzn. vyuziváme natvrdo placena data primarniho uctu (nezalezi jestli paper nebo live) ##tento ws streamer je pouze jeden pro vsechny, tzn. vyuziváme natvrdo placena data primarniho uctu (nezalezi jestli paper nebo live)
msg = f"Realtime Websocket connection will use FEED: {LIVE_DATA_FEED} and credential of ACCOUNT1" msg = f"Realtime Websocket connection will use FEED: {live_data_feed} and credential of ACCOUNT1"
print(msg) print(msg)
client = StockDataStream(LIVE_DATA_API_KEY, LIVE_DATA_SECRET_KEY, raw_data=True, websocket_params={}, feed=LIVE_DATA_FEED) #cfh.config_handler.print_current_config()
client = StockDataStream(LIVE_DATA_API_KEY, LIVE_DATA_SECRET_KEY, raw_data=True, websocket_params={}, feed=live_data_feed)
#uniquesymbols = set() #uniquesymbols = set()
_streams = [] _streams = []
#to_run = dict() #to_run = dict()
@ -45,6 +47,18 @@ class Trade_WS_Streamer(Thread):
Trade_WS_Streamer._streams.append(obj) Trade_WS_Streamer._streams.append(obj)
if Trade_WS_Streamer.client._running is False: if Trade_WS_Streamer.client._running is False:
print("websocket zatim nebezi, pouze pridavame do pole") print("websocket zatim nebezi, pouze pridavame do pole")
#zde delame refresh clienta (pokud se zmenilo live_data_feed)
# live_data_feed = cfh.config_handler.get_val('LIVE_DATA_FEED')
# #po otestování přepnout jen pokud se live_data_feed změnil
# #if live_data_feed != Trade_WS_Streamer.live_data_feed:
# # Trade_WS_Streamer.live_data_feed = live_data_feed
# msg = f"REFRESH OF CLIENT! Realtime Websocket connection will use FEED: {live_data_feed} and credential of ACCOUNT1"
# print(msg)
# #cfh.config_handler.print_current_config()
# Trade_WS_Streamer.client = StockDataStream(LIVE_DATA_API_KEY, LIVE_DATA_SECRET_KEY, raw_data=True, websocket_params={}, feed=live_data_feed)
else: else:
print("websocket client bezi") print("websocket client bezi")
if self.symbol_exists(obj.symbol): if self.symbol_exists(obj.symbol):

View File

@ -9,8 +9,6 @@ from fastapi import FastAPI, Depends, HTTPException, status, File, UploadFile, R
from fastapi.security import APIKeyHeader from fastapi.security import APIKeyHeader
import uvicorn import uvicorn
from uuid import UUID from uuid import UUID
import v2realbot.controller.services as cs
import v2realbot.controller.configs as cf
from v2realbot.utils.ilog import get_log_window from v2realbot.utils.ilog import get_log_window
from v2realbot.common.model import RunManagerRecord, StrategyInstance, RunnerView, RunRequest, Trade, RunArchive, RunArchiveView, RunArchiveViewPagination, RunArchiveDetail, Bar, RunArchiveChange, TestList, ConfigItem, InstantIndicator, DataTablesRequest, AnalyzerInputs from v2realbot.common.model import RunManagerRecord, StrategyInstance, RunnerView, RunRequest, Trade, RunArchive, RunArchiveView, RunArchiveViewPagination, RunArchiveDetail, Bar, RunArchiveChange, TestList, ConfigItem, InstantIndicator, DataTablesRequest, AnalyzerInputs
from fastapi import FastAPI, WebSocket, WebSocketDisconnect, Depends, HTTPException, status, WebSocketException, Cookie, Query from fastapi import FastAPI, WebSocket, WebSocketDisconnect, Depends, HTTPException, status, WebSocketException, Cookie, Query
@ -44,6 +42,8 @@ from typing import List
import v2realbot.controller.run_manager as rm import v2realbot.controller.run_manager as rm
import v2realbot.scheduler.ap_scheduler as aps import v2realbot.scheduler.ap_scheduler as aps
import re import re
import v2realbot.controller.configs as cf
import v2realbot.controller.services as cs
#from async io import Queue, QueueEmpty #from async io import Queue, QueueEmpty
# #
# install() # install()
@ -813,7 +813,7 @@ def update_item(item_id: int, config_item: ConfigItem) -> ConfigItem:
if res != 0: if res != 0:
raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail=f"No data found") raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail=f"No data found")
res, sada = cs.update_config_item(item_id, config_item) res, sada = cf.update_config_item(item_id, config_item)
if res == 0: return sada if res == 0: return sada
else: else:
raise HTTPException(status_code=status.HTTP_406_NOT_ACCEPTABLE, detail=f"Error not created: {res}:{id}") raise HTTPException(status_code=status.HTTP_406_NOT_ACCEPTABLE, detail=f"Error not created: {res}:{id}")

View File

@ -6,7 +6,7 @@ from v2realbot.common.model import RunManagerRecord, StrategyInstance, RunDay, S
from v2realbot.utils.utils import validate_and_format_time, AttributeDict, zoneNY, zonePRG, safe_get, dict_replace_value, Store, parse_toml_string, json_serial, is_open_hours, send_to_telegram, concatenate_weekdays, transform_data from v2realbot.utils.utils import validate_and_format_time, AttributeDict, zoneNY, zonePRG, safe_get, dict_replace_value, Store, parse_toml_string, json_serial, is_open_hours, send_to_telegram, concatenate_weekdays, transform_data
from v2realbot.common.PrescribedTradeModel import Trade, TradeDirection, TradeStatus, TradeStoplossType from v2realbot.common.PrescribedTradeModel import Trade, TradeDirection, TradeStatus, TradeStoplossType
from datetime import datetime from datetime import datetime
from v2realbot.config import JOB_LOG_FILE, STRATVARS_UNCHANGEABLES, ACCOUNT1_PAPER_API_KEY, ACCOUNT1_PAPER_SECRET_KEY, ACCOUNT1_LIVE_API_KEY, ACCOUNT1_LIVE_SECRET_KEY, DATA_DIR,BT_FILL_CONS_TRADES_REQUIRED,BT_FILL_LOG_SURROUNDING_TRADES,BT_FILL_CONDITION_BUY_LIMIT,BT_FILL_CONDITION_SELL_LIMIT, GROUP_TRADES_WITH_TIMESTAMP_LESS_THAN, MEDIA_DIRECTORY, RUNNER_DETAIL_DIRECTORY, OFFLINE_MODE from v2realbot.config import JOB_LOG_FILE, STRATVARS_UNCHANGEABLES, ACCOUNT1_PAPER_API_KEY, ACCOUNT1_PAPER_SECRET_KEY, ACCOUNT1_LIVE_API_KEY, ACCOUNT1_LIVE_SECRET_KEY, DATA_DIR, MEDIA_DIRECTORY, RUNNER_DETAIL_DIRECTORY
import numpy as np import numpy as np
from rich import print as richprint from rich import print as richprint
import v2realbot.controller.services as cs import v2realbot.controller.services as cs

View File

@ -6,7 +6,7 @@ from v2realbot.utils.utils import AttributeDict, zoneNY, is_open_rush, is_close_
from v2realbot.utils.tlog import tlog from v2realbot.utils.tlog import tlog
from v2realbot.utils.ilog import insert_log, insert_log_multiple_queue from v2realbot.utils.ilog import insert_log, insert_log_multiple_queue
from v2realbot.enums.enums import RecordType, StartBarAlign, Mode, Order, Account from v2realbot.enums.enums import RecordType, StartBarAlign, Mode, Order, Account
from v2realbot.config import BT_DELAYS, get_key, HEARTBEAT_TIMEOUT, QUIET_MODE, LOG_RUNNER_EVENTS, ILOG_SAVE_LEVEL_FROM,PROFILING_NEXT_ENABLED, PROFILING_OUTPUT_DIR, AGG_EXCLUDED_TRADES from v2realbot.config import get_key, HEARTBEAT_TIMEOUT, PROFILING_NEXT_ENABLED, PROFILING_OUTPUT_DIR
import queue import queue
#from rich import print #from rich import print
from v2realbot.loader.aggregator import TradeAggregator2Queue, TradeAggregator2List, TradeAggregator from v2realbot.loader.aggregator import TradeAggregator2Queue, TradeAggregator2List, TradeAggregator
@ -29,6 +29,7 @@ from rich import print as printnow
from collections import defaultdict from collections import defaultdict
import v2realbot.strategyblocks.activetrade.sl.optimsl as optimsl import v2realbot.strategyblocks.activetrade.sl.optimsl as optimsl
from tqdm import tqdm from tqdm import tqdm
import v2realbot.utils.config_handler as cfh
if PROFILING_NEXT_ENABLED: if PROFILING_NEXT_ENABLED:
from pyinstrument import Profiler from pyinstrument import Profiler
@ -93,7 +94,7 @@ class Strategy:
align: StartBarAlign = StartBarAlign.ROUND, align: StartBarAlign = StartBarAlign.ROUND,
mintick: int = 0, mintick: int = 0,
exthours: bool = False, exthours: bool = False,
excludes: list = AGG_EXCLUDED_TRADES): excludes: list = cfh.config_handler.get_val('AGG_EXCLUDED_TRADES')):
##TODO vytvorit self.datas_here containing dict - queue - SYMBOL - RecType - ##TODO vytvorit self.datas_here containing dict - queue - SYMBOL - RecType -
##zatim natvrdo ##zatim natvrdo
@ -327,8 +328,8 @@ class Strategy:
elif self.rectype == RecordType.TRADE: elif self.rectype == RecordType.TRADE:
self.state.last_trade_time = item['t'] self.state.last_trade_time = item['t']
if self.mode == Mode.BT or self.mode == Mode.PREP: if self.mode == Mode.BT or self.mode == Mode.PREP:
self.bt.time = self.state.last_trade_time + BT_DELAYS.trigger_to_strat self.bt.time = self.state.last_trade_time + cfh.config_handler.get_val('BT_DELAYS','trigger_to_strat')
self.state.time = self.state.last_trade_time + BT_DELAYS.trigger_to_strat self.state.time = self.state.last_trade_time + cfh.config_handler.get_val('BT_DELAYS','trigger_to_strat')
elif self.mode == Mode.LIVE or self.mode == Mode.PAPER: elif self.mode == Mode.LIVE or self.mode == Mode.PAPER:
self.state.time = datetime.now().timestamp() self.state.time = datetime.now().timestamp()
#ic('time updated') #ic('time updated')
@ -805,7 +806,7 @@ class StrategyState:
self.iter_log_list = None self.iter_log_list = None
def ilog(self, e: str = None, msg: str = None, lvl: int = 1, **kwargs): def ilog(self, e: str = None, msg: str = None, lvl: int = 1, **kwargs):
if lvl < ILOG_SAVE_LEVEL_FROM: if lvl < cfh.config_handler.get_val('ILOG_SAVE_LEVEL_FROM'):
return return
if self.mode == Mode.LIVE or self.mode == Mode.PAPER: if self.mode == Mode.LIVE or self.mode == Mode.PAPER:
@ -829,6 +830,4 @@ class StrategyState:
row = dict(time=time, event=e, message=msg, details=kwargs) row = dict(time=time, event=e, message=msg, details=kwargs)
self.iter_log_list.append(row) self.iter_log_list.append(row)
row["name"] = self.name row["name"] = self.name
print(row) print(row)
#zatim obecny parametr -predelat per RUN?
#if LOG_RUNNER_EVENTS: insert_log(self.runner_id, time=self.time, logdict=row)

View File

@ -21,7 +21,7 @@ from io import BytesIO
from v2realbot.utils.historicals import get_historical_bars from v2realbot.utils.historicals import get_historical_bars
from alpaca.data.timeframe import TimeFrame, TimeFrameUnit from alpaca.data.timeframe import TimeFrame, TimeFrameUnit
import mlroom.utils.ext_services as es import mlroom.utils.ext_services as es
from v2realbot.common.db import pool, execute_with_retry, row_to_runarchive, row_to_runarchiveview from v2realbot.common.db import pool, execute_with_retry
from v2realbot.utils.utils import ltp, isrising, isfalling,trunc,AttributeDict from v2realbot.utils.utils import ltp, isrising, isfalling,trunc,AttributeDict
import tqdm import tqdm

View File

@ -0,0 +1,64 @@
from alpaca.data.enums import DataFeed
from v2realbot.enums.enums import FillCondition
#Separate file that contains default values for all config variables
#they are loaded by the config_handler and then can be overriden on the fly
#by configuration profiles
#note if the type is not simple (enum etc.) dont forget to add it to config_handler get_val function to transform
#PREMIUM pro MARKET order, if positive it means absolute value (0.005), if negative it means pct (0.0167) #0.005 is approximately 0.0167% of base price 30.
BT_FILL_PRICE_MARKET_ORDER_PREMIUM=0.005
#no dense print in the console
QUIET_MODE=True
BT_FILL_CONS_TRADES_REQUIRED=2
BT_FILL_LOG_SURROUNDING_TRADES= 10
LIVE_DATA_FEED=DataFeed.IEX
OFFLINE_MODE = False
#minimalni vzdalenost mezi trady, kterou agregator pousti pro CBAR(0.001 - blokuje mensi nez 1ms)
GROUP_TRADES_WITH_TIMESTAMP_LESS_THAN = 0.003
#normalized price for tick 0.01
NORMALIZED_TICK_BASE_PRICE = 30.00
#DEFAULT AGGREGATOR filter trades
#NOTE pridana F - Inter Market Sweep Order - obcas vytvarela spajky
AGG_EXCLUDED_TRADES = ['C','O','4','B','7','V','P','W','U','Z','F']
#how many consecutive trades with the fill price are necessary for LIMIT fill to happen in backtesting
#0 - optimistic, every knot high will fill the order
#N - N consecutive trades required
#not impl.yet
#minimum is 1, na alpace live to vetsinou vychazi 7-8 u BAC, je to hodne podobne tomu, nez je cena překonaná pul centu. tzn. 7-8 a nebo FillCondition.SLOW
BT_FILL_CONS_TRADES_REQUIRED = 2
#during bt trade execution logs X-surrounding trades of the one that triggers the fill
BT_FILL_LOG_SURROUNDING_TRADES = 10
#fill condition for limit order in bt
# fast - price has to be equal or bigger <=
# slow - price has to be bigger <
BT_FILL_CONDITION_BUY_LIMIT = FillCondition.SLOW
BT_FILL_CONDITION_SELL_LIMIT = FillCondition.SLOW
#backend counter of api requests
COUNT_API_REQUESTS = False
# ilog lvls = 0,1 - 0 debug, 1 info
ILOG_SAVE_LEVEL_FROM = 1
#currently only prod server has acces to LIVE
PROD_SERVER_HOSTNAMES = ['tradingeastcoast','David-MacBook-Pro.local'] #,'David-MacBook-Pro.local'
TEST_SERVER_HOSTNAMES = ['tradingtest']
""""
LATENCY DELAYS for LIVE eastcoast
.000 trigger - last_trade_time (.4246266)
+.020 vstup do strategie a BUY (.444606)
+.023 submitted (.469198)
+.008 filled (.476695552)
+.023 fill not(.499888)
"""
BT_DELAYS = {
"trigger_to_strat": 0.020,
"strat_to_sub": 0.023,
"sub_to_fill": 0.008,
"fill_to_not": 0.023,
#doplnit dle live
"limit_order_offset": 0,
}
#cfh.config_handler.get_val('BT_DELAYS','trigger_to_strat')

View File

@ -0,0 +1,118 @@
import v2realbot.controller.configs as cfgservices
import orjson
from traceback import format_exc
from alpaca.data.enums import DataFeed
import v2realbot.utils.config_defaults as config_defaults
from v2realbot.enums.enums import FillCondition
from rich import print
def aggregate_configurations(module):
return {key: getattr(module, key) for key in dir(module) if key.isupper()}
#config handler - signleton pattern
#details https://chat.openai.com/share/e056af70-76da-4dbe-93a1-ecf99f0b0f29
#it is initialized on app start, loading default and updating based on active_profile settings
#also there is handler for updating active_profile which changes it immediately (in controller.config.update_config_item)
class ConfigHandler:
_instance = None
#this ensure that it is created only once
def __new__(cls):
if cls._instance is None:
cls._instance = super(ConfigHandler, cls).__new__(cls)
# Initialize your default config here in __new__, since it's only done once
# Default configuration
# Dynamically create the configuration dictionary
cls.default_config = aggregate_configurations(config_defaults)
cls._instance.active_config = cls._instance.default_config.copy()
cls._instance.active_profile = "default"
#if there is profile to be activated, it is loaded overriding default
cls._instance.activate_profile()
return cls._instance
def load_profile(self, profile_name):
"""
Load configuration profiles, JSON with all profiles is stored in config item 'profiles'
"""
try:
config_directive = "profiles"
ret, res = cfgservices.get_config_item_by_name(config_directive)
if ret < 0:
print(f"CONFIG OVERRIDE {config_directive} Error {res}")
return
else:
fetched_dict = orjson.loads(res["json_data"])
override_configuration = fetched_dict.get(profile_name, None)
if override_configuration is not None:
#first reset to default then override profile on top of them
self.active_config = self.default_config.copy()
self.active_config.update(override_configuration)
self.active_profile = profile_name
print(f"Profile {profile_name} loaded successfully.")
print("Current values:", self.active_config)
else:
print(f"Profile {profile_name} does not exist in config item: {config_directive}")
except Exception as e:
print(f"Error while fetching {profile_name} error:" + str(e) + format_exc())
def activate_profile(self):
"""
Activates the profiles which is stored in configuration as currently active.
"""
try:
config_directive = "active_profile"
ret, res = cfgservices.get_config_item_by_name(config_directive)
if ret < 0:
print(f"ERROR fetching item {config_directive} Error {res}")
return
else:
fetched_dict = orjson.loads(res["json_data"])
active_profile = fetched_dict.get("ACTIVE_PROFILE", None)
if active_profile is not None:
print("Activating profile", active_profile)
self.load_profile(active_profile)
else:
print("No ACTIVE_PROFILE element in config item: " + config_directive)
except Exception as e:
print(f"Error while activating profile:" + str(e) + format_exc())
def get_val(self, key, subkey=None):
"""
Retrieve a configuration value by key and optionally transforms to appropriate type
Also supports nested dictionaries - with subkeys
"""
value = self.active_config.get(key, None)
if subkey and isinstance(value, dict):
return value.get(subkey, None)
match key:
case "LIVE_DATA_FEED":
return DataFeed(value) # Convert to DataFeed enum
case "BT_FILL_CONDITION_BUY_LIMIT":
return FillCondition(value)
case "BT_FILL_CONDITION_SELL_LIMIT":
return FillCondition(value)
# Add cases for other enumeration conversions as needed
case _:
return value
def print_current_config(self):
print(f"Active profile {self.active_profile} conf_values: {str(self.active_config)}")
# Global configuratio - it is imported by modules that need it. In the future can be changed to Dependency Ingestion (each service will have the config instance as input parameter)
config_handler = ConfigHandler()
print(f"{config_handler.active_profile=}")
print("config handler initialized")
#this is how to get value
#config_handler.get_val('BT_FILL_PRICE_MARKET_ORDER_PREMIUM')
# config_handler.load_profile('profile1') # Assuming 'profile1.json' exists
# print(f"{config_handler.active_profile=}")
# config_handler.load_profile('profile2') # Assuming 'profile1.json' exists
# print(f"{config_handler.active_profile=}")
# config_handler.activate_profile() # Switch to profile according to active_profile directive

View File

@ -6,7 +6,7 @@ import json
from datetime import datetime from datetime import datetime
from v2realbot.enums.enums import RecordType, StartBarAlign, Mode, Account from v2realbot.enums.enums import RecordType, StartBarAlign, Mode, Account
from v2realbot.common.db import pool, insert_queue from v2realbot.common.db import pool, insert_queue
import sqlite3
#standardne vraci pole tuplů, kde clen tuplu jsou sloupce #standardne vraci pole tuplů, kde clen tuplu jsou sloupce

View File

@ -1,11 +1,11 @@
import socket import socket
from v2realbot.enums.enums import Env from v2realbot.enums.enums import Env
from v2realbot.config import PROD_SERVER_HOSTNAMES, TEST_SERVER_HOSTNAMES import v2realbot.utils.config_handler as cfh
def get_environment(): def get_environment():
"""Determine if the current server is production or test based on hostname.""" """Determine if the current server is production or test based on hostname."""
hostname = socket.gethostname() hostname = socket.gethostname()
if hostname in PROD_SERVER_HOSTNAMES: if hostname in cfh.config_handler.get_val('PROD_SERVER_HOSTNAMES'):
return Env.PROD return Env.PROD
else: else:
return Env.TEST return Env.TEST

View File

@ -13,7 +13,7 @@ from v2realbot.common.model import StrategyInstance, Runner, RunArchive, RunArch
from v2realbot.common.PrescribedTradeModel import Trade, TradeDirection, TradeStatus, TradeStoplossType from v2realbot.common.PrescribedTradeModel import Trade, TradeDirection, TradeStatus, TradeStoplossType
from typing import List from typing import List
import tomli import tomli
from v2realbot.config import DATA_DIR, QUIET_MODE,NORMALIZED_TICK_BASE_PRICE,ACCOUNT1_PAPER_API_KEY, ACCOUNT1_PAPER_SECRET_KEY from v2realbot.config import DATA_DIR, ACCOUNT1_PAPER_API_KEY, ACCOUNT1_PAPER_SECRET_KEY
import requests import requests
from uuid import UUID from uuid import UUID
#from decimal import Decimal #from decimal import Decimal
@ -34,6 +34,7 @@ import re
import tempfile import tempfile
import shutil import shutil
from filelock import FileLock from filelock import FileLock
import v2realbot.utils.config_handler as cfh
def validate_and_format_time(time_string): def validate_and_format_time(time_string):
""" """
@ -456,11 +457,11 @@ def get_tick(price: float, normalized_ticks: float = 0.01):
u cen pod 30, vrací 0.01. U cen nad 30 vrací pomerne zvetsene, u cen pod 30, vrací 0.01. U cen nad 30 vrací pomerne zvetsene,
""" """
if price<NORMALIZED_TICK_BASE_PRICE: if price<cfh.config_handler.get_val('NORMALIZED_TICK_BASE_PRICE'):
return normalized_ticks return normalized_ticks
else: else:
#ratio of price vs base price #ratio of price vs base price
ratio = price/NORMALIZED_TICK_BASE_PRICE ratio = price/cfh.config_handler.get_val('NORMALIZED_TICK_BASE_PRICE')
return price2dec(ratio*normalized_ticks) return price2dec(ratio*normalized_ticks)
def eval_cond_dict(cond: dict) -> tuple[bool, str]: def eval_cond_dict(cond: dict) -> tuple[bool, str]:
@ -681,7 +682,7 @@ zoneUTC = pytz.utc
zonePRG = pytz.timezone('Europe/Amsterdam') zonePRG = pytz.timezone('Europe/Amsterdam')
def print(*args, **kwargs): def print(*args, **kwargs):
if QUIET_MODE: if cfh.config_handler.get_val('QUIET_MODE'):
pass pass
else: else:
####ic(*args, **kwargs) ####ic(*args, **kwargs)