89
testy/getrunnerdetail.py
Normal file
89
testy/getrunnerdetail.py
Normal file
@ -0,0 +1,89 @@
|
|||||||
|
|
||||||
|
from v2realbot.common.model import RunDay, StrategyInstance, Runner, RunRequest, RunArchive, RunArchiveView, RunArchiveViewPagination, RunArchiveDetail, RunArchiveChange, Bar, TradeEvent, TestList, Intervals, ConfigItem, InstantIndicator, DataTablesRequest
|
||||||
|
import v2realbot.controller.services as cs
|
||||||
|
from v2realbot.utils.utils import slice_dict_lists,zoneUTC,safe_get, AttributeDict
|
||||||
|
id = "b11c66d9-a9b6-475a-9ac1-28b11e1b4edf"
|
||||||
|
state = AttributeDict(vars={})
|
||||||
|
|
||||||
|
##základ pro init_attached_data in strategy.init
|
||||||
|
|
||||||
|
# def get_previous_runner(state):
|
||||||
|
# runner : Runner
|
||||||
|
# res, runner = cs.get_runner(state.runner_id)
|
||||||
|
# if res < 0:
|
||||||
|
# print(f"Not running {id}")
|
||||||
|
# return 0, None
|
||||||
|
|
||||||
|
# return 0, runner.batch_id
|
||||||
|
|
||||||
|
def attach_previous_data(state):
|
||||||
|
runner : Runner
|
||||||
|
#get batch_id of current runer
|
||||||
|
res, runner = cs.get_runner(state.runner_id)
|
||||||
|
if res < 0 or runner.batch_id is None:
|
||||||
|
print(f"Couldnt get previous runner {val}")
|
||||||
|
return None
|
||||||
|
|
||||||
|
batch_id = runner.batch_id
|
||||||
|
#batch_id = "6a6b0bcf"
|
||||||
|
|
||||||
|
res, runner_ids =cs.get_archived_runnerslist_byBatchID(batch_id, "desc")
|
||||||
|
if res < 0:
|
||||||
|
msg = f"error whne fetching runners of batch {batch_id} {runner_ids}"
|
||||||
|
print(msg)
|
||||||
|
return None
|
||||||
|
|
||||||
|
if runner_ids is None or len(runner_ids) == 0:
|
||||||
|
print(f"no runners found for batch {batch_id} {runner_ids}")
|
||||||
|
return None
|
||||||
|
|
||||||
|
last_runner = runner_ids[0]
|
||||||
|
print("Previous runner identified:", last_runner)
|
||||||
|
|
||||||
|
#get details from the runner
|
||||||
|
res, val = cs.get_archived_runner_details_byID(last_runner)
|
||||||
|
if res < 0:
|
||||||
|
print(f"no archived runner {last_runner}")
|
||||||
|
|
||||||
|
detail = RunArchiveDetail(**val)
|
||||||
|
#print("toto jsme si dotahnuli", detail.bars)
|
||||||
|
|
||||||
|
# from stratvars directives
|
||||||
|
attach_previous_bars_indicators = safe_get(state.vars, "attach_previous_bars_indicators", 50)
|
||||||
|
attach_previous_cbar_indicators = safe_get(state.vars, "attach_previous_cbar_indicators", 50)
|
||||||
|
# [stratvars]
|
||||||
|
# attach_previous_bars_indicators = 50
|
||||||
|
# attach_previous_cbar_indicators = 50
|
||||||
|
|
||||||
|
#indicators datetime utc
|
||||||
|
indicators = slice_dict_lists(d=detail.indicators[0],last_item=attach_previous_bars_indicators, time_to_datetime=True)
|
||||||
|
|
||||||
|
#time -datetime utc, updated - timestamp float
|
||||||
|
bars = slice_dict_lists(d=detail.bars, last_item=attach_previous_bars_indicators, time_to_datetime=True)
|
||||||
|
|
||||||
|
#cbar_indicatzors #float
|
||||||
|
cbar_inds = slice_dict_lists(d=detail.indicators[1],last_item=attach_previous_cbar_indicators)
|
||||||
|
|
||||||
|
#USE these as INITs - TADY SI TO JESTE ZASTAVIT a POROVNAT
|
||||||
|
print(f"{state.indicators=} NEW:{indicators=}")
|
||||||
|
state.indicators = indicators
|
||||||
|
print(f"{state.bars=} NEW:{bars=}")
|
||||||
|
state.bars = bars
|
||||||
|
print(f"{state.cbar_indicators=} NEW:{cbar_inds=}")
|
||||||
|
state.cbar_indicators = cbar_inds
|
||||||
|
|
||||||
|
print("BARS and INDS INITIALIZED")
|
||||||
|
#bars
|
||||||
|
|
||||||
|
|
||||||
|
#tady budou pripadne dalsi inicializace, z ext_data
|
||||||
|
print("EXT_DATA", detail.ext_data)
|
||||||
|
#podle urciteho nastaveni napr.v konfiguraci se pouziji urcite promenne
|
||||||
|
|
||||||
|
#pridavame dailyBars z extData
|
||||||
|
# if hasattr(detail, "ext_data") and "dailyBars" in detail.ext_data:
|
||||||
|
# state.dailyBars = detail.ext_data["dailyBars"]
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
attach_previous_data(state)
|
||||||
@ -16,6 +16,7 @@ from v2realbot.strategyblocks.newtrade.signals import signal_search
|
|||||||
from v2realbot.strategyblocks.activetrade.activetrade_hub import manage_active_trade
|
from v2realbot.strategyblocks.activetrade.activetrade_hub import manage_active_trade
|
||||||
from v2realbot.strategyblocks.inits.init_indicators import initialize_dynamic_indicators
|
from v2realbot.strategyblocks.inits.init_indicators import initialize_dynamic_indicators
|
||||||
from v2realbot.strategyblocks.inits.init_directives import intialize_directive_conditions
|
from v2realbot.strategyblocks.inits.init_directives import intialize_directive_conditions
|
||||||
|
from v2realbot.strategyblocks.inits.init_attached_data import attach_previous_data
|
||||||
from alpaca.trading.client import TradingClient
|
from alpaca.trading.client import TradingClient
|
||||||
from v2realbot.config import ACCOUNT1_PAPER_API_KEY, ACCOUNT1_PAPER_SECRET_KEY, DATA_DIR
|
from v2realbot.config import ACCOUNT1_PAPER_API_KEY, ACCOUNT1_PAPER_SECRET_KEY, DATA_DIR
|
||||||
from alpaca.trading.models import Calendar
|
from alpaca.trading.models import Calendar
|
||||||
@ -116,7 +117,8 @@ def init(state: StrategyState):
|
|||||||
state.vars.loaded_models = {}
|
state.vars.loaded_models = {}
|
||||||
|
|
||||||
#state attributes for martingale sizing mngmt
|
#state attributes for martingale sizing mngmt
|
||||||
state.vars["martingale"] = dict(cont_loss_series_cnt=0)
|
state.vars["transferables"] = {}
|
||||||
|
state.vars["transferables"]["martingale"] = dict(cont_loss_series_cnt=0)
|
||||||
|
|
||||||
#INITIALIZE CBAR INDICATORS - do vlastni funkce
|
#INITIALIZE CBAR INDICATORS - do vlastni funkce
|
||||||
#state.cbar_indicators['ivwap'] = []
|
#state.cbar_indicators['ivwap'] = []
|
||||||
@ -131,6 +133,9 @@ def init(state: StrategyState):
|
|||||||
initialize_dynamic_indicators(state)
|
initialize_dynamic_indicators(state)
|
||||||
intialize_directive_conditions(state)
|
intialize_directive_conditions(state)
|
||||||
|
|
||||||
|
#attach part of yesterdays data, bars, indicators, cbar_indicators
|
||||||
|
attach_previous_data(state)
|
||||||
|
|
||||||
#intitialize indicator mapping (for use in operation) - mozna presunout do samostatne funkce prip dat do base kdyz se osvedci
|
#intitialize indicator mapping (for use in operation) - mozna presunout do samostatne funkce prip dat do base kdyz se osvedci
|
||||||
local_dict_cbar_inds = {key: state.cbar_indicators[key] for key in state.cbar_indicators.keys() if key != "time"}
|
local_dict_cbar_inds = {key: state.cbar_indicators[key] for key in state.cbar_indicators.keys() if key != "time"}
|
||||||
local_dict_inds = {key: state.indicators[key] for key in state.indicators.keys() if key != "time"}
|
local_dict_inds = {key: state.indicators[key] for key in state.indicators.keys() if key != "time"}
|
||||||
|
|||||||
@ -301,8 +301,7 @@ class RunArchive(BaseModel):
|
|||||||
bt_from: Optional[datetime] = None
|
bt_from: Optional[datetime] = None
|
||||||
bt_to: Optional[datetime] = None
|
bt_to: Optional[datetime] = None
|
||||||
strat_json: Optional[str] = None
|
strat_json: Optional[str] = None
|
||||||
##bude decomiss, misto toho stratvars_toml
|
transferables: Optional[dict] = None #varaibles that are transferrable to next run
|
||||||
stratvars: Optional[dict] = None
|
|
||||||
settings: Optional[dict] = None
|
settings: Optional[dict] = None
|
||||||
ilog_save: Optional[bool] = False
|
ilog_save: Optional[bool] = False
|
||||||
profit: float = 0
|
profit: float = 0
|
||||||
|
|||||||
@ -83,5 +83,6 @@ def row_to_runarchive(row: dict) -> RunArchive:
|
|||||||
end_positions=int(row['end_positions']),
|
end_positions=int(row['end_positions']),
|
||||||
end_positions_avgp=float(row['end_positions_avgp']),
|
end_positions_avgp=float(row['end_positions_avgp']),
|
||||||
metrics=orjson.loads(row['metrics']),
|
metrics=orjson.loads(row['metrics']),
|
||||||
stratvars_toml=row['stratvars_toml']
|
stratvars_toml=row['stratvars_toml'],
|
||||||
|
transferables=orjson.loads(row['transferables']) if row['transferables'] else None
|
||||||
)
|
)
|
||||||
@ -1,7 +1,6 @@
|
|||||||
|
|
||||||
import v2realbot.common.db as db
|
import v2realbot.common.db as db
|
||||||
from v2realbot.common.model import RunDay, StrategyInstance, Runner, RunRequest, RunArchive, RunArchiveView, RunArchiveViewPagination, RunArchiveDetail, RunArchiveChange, Bar, TradeEvent, TestList, Intervals, ConfigItem, InstantIndicator, DataTablesRequest
|
from v2realbot.common.model import ConfigItem
|
||||||
import orjson
|
|
||||||
import v2realbot.utils.config_handler as ch
|
import v2realbot.utils.config_handler as ch
|
||||||
|
|
||||||
# region CONFIG db services
|
# region CONFIG db services
|
||||||
|
|||||||
@ -35,6 +35,7 @@ from sqlite3 import OperationalError, Row
|
|||||||
import v2realbot.strategyblocks.indicators.custom as ci
|
import v2realbot.strategyblocks.indicators.custom as ci
|
||||||
from v2realbot.strategyblocks.inits.init_indicators import initialize_dynamic_indicators
|
from v2realbot.strategyblocks.inits.init_indicators import initialize_dynamic_indicators
|
||||||
from v2realbot.strategyblocks.indicators.indicators_hub import populate_dynamic_indicators
|
from v2realbot.strategyblocks.indicators.indicators_hub import populate_dynamic_indicators
|
||||||
|
from v2realbot.strategyblocks.inits.init_attached_data import attach_previous_data
|
||||||
from v2realbot.interfaces.backtest_interface import BacktestInterface
|
from v2realbot.interfaces.backtest_interface import BacktestInterface
|
||||||
import os
|
import os
|
||||||
import v2realbot.reporting.metricstoolsimage as mt
|
import v2realbot.reporting.metricstoolsimage as mt
|
||||||
@ -412,7 +413,7 @@ def run_batch_stratin(id: UUID, runReq: RunRequest):
|
|||||||
def get_market_days_in_interval(datefrom, dateto, note = None, id = None):
|
def get_market_days_in_interval(datefrom, dateto, note = None, id = None):
|
||||||
#getting dates from calendat
|
#getting dates from calendat
|
||||||
clientTrading = TradingClient(ACCOUNT1_PAPER_API_KEY, ACCOUNT1_PAPER_SECRET_KEY, raw_data=False, paper=True)
|
clientTrading = TradingClient(ACCOUNT1_PAPER_API_KEY, ACCOUNT1_PAPER_SECRET_KEY, raw_data=False, paper=True)
|
||||||
calendar_request = GetCalendarRequest(start=datefrom,end=dateto)
|
calendar_request = GetCalendarRequest(start=datefrom.date(),end=dateto.date())
|
||||||
cal_dates = clientTrading.get_calendar(calendar_request)
|
cal_dates = clientTrading.get_calendar(calendar_request)
|
||||||
#list(Calendar)
|
#list(Calendar)
|
||||||
# Calendar
|
# Calendar
|
||||||
@ -925,7 +926,8 @@ def archive_runner(runner: Runner, strat: StrategyInstance, inter_batch_params:
|
|||||||
end_positions=strat.state.positions,
|
end_positions=strat.state.positions,
|
||||||
end_positions_avgp=round(float(strat.state.avgp),3),
|
end_positions_avgp=round(float(strat.state.avgp),3),
|
||||||
metrics=results_metrics,
|
metrics=results_metrics,
|
||||||
stratvars_toml=runner.run_stratvars_toml
|
stratvars_toml=runner.run_stratvars_toml,
|
||||||
|
transferables=strat.state.vars["transferables"]
|
||||||
)
|
)
|
||||||
|
|
||||||
#flatten indicators from numpy array
|
#flatten indicators from numpy array
|
||||||
@ -1220,17 +1222,43 @@ def get_archived_runner_header_byID(id: UUID) -> RunArchive:
|
|||||||
# else:
|
# else:
|
||||||
# return 0, res
|
# return 0, res
|
||||||
|
|
||||||
#vrátí seznam runneru s danym batch_id
|
# #vrátí seznam runneru s danym batch_id
|
||||||
def get_archived_runnerslist_byBatchID(batch_id: str):
|
# def get_archived_runnerslist_byBatchID(batch_id: str):
|
||||||
|
# conn = pool.get_connection()
|
||||||
|
# try:
|
||||||
|
# cursor = conn.cursor()
|
||||||
|
# cursor.execute(f"SELECT runner_id FROM runner_header WHERE batch_id='{str(batch_id)}'")
|
||||||
|
# runner_list = [row[0] for row in cursor.fetchall()]
|
||||||
|
# finally:
|
||||||
|
# pool.release_connection(conn)
|
||||||
|
# return 0, runner_list
|
||||||
|
|
||||||
|
#update that allows to sort
|
||||||
|
def get_archived_runnerslist_byBatchID(batch_id: str, sort_order: str = "asc"):
|
||||||
|
"""
|
||||||
|
Fetches all runner records by batch_id, sorted by the 'started' column.
|
||||||
|
|
||||||
|
:param batch_id: The batch ID to filter runners by.
|
||||||
|
:param sort_order: The sort order of the 'started' column. Defaults to 'asc'.
|
||||||
|
Accepts 'asc' for ascending or 'desc' for descending order.
|
||||||
|
:return: A tuple with the first element being a status code and the second being the list of runner_ids.
|
||||||
|
"""
|
||||||
|
# Validate sort_order
|
||||||
|
if sort_order.lower() not in ['asc', 'desc']:
|
||||||
|
return -1, [] # Returning an error code and an empty list in case of invalid sort_order
|
||||||
|
|
||||||
conn = pool.get_connection()
|
conn = pool.get_connection()
|
||||||
try:
|
try:
|
||||||
cursor = conn.cursor()
|
cursor = conn.cursor()
|
||||||
cursor.execute(f"SELECT runner_id FROM runner_header WHERE batch_id='{str(batch_id)}'")
|
query = f"""SELECT runner_id FROM runner_header
|
||||||
|
WHERE batch_id=?
|
||||||
|
ORDER BY datetime(started) {sort_order.upper()}"""
|
||||||
|
cursor.execute(query, (batch_id,))
|
||||||
runner_list = [row[0] for row in cursor.fetchall()]
|
runner_list = [row[0] for row in cursor.fetchall()]
|
||||||
finally:
|
finally:
|
||||||
pool.release_connection(conn)
|
pool.release_connection(conn)
|
||||||
return 0, runner_list
|
return 0, runner_list
|
||||||
|
|
||||||
def insert_archive_header(archeader: RunArchive):
|
def insert_archive_header(archeader: RunArchive):
|
||||||
conn = pool.get_connection()
|
conn = pool.get_connection()
|
||||||
try:
|
try:
|
||||||
@ -1239,11 +1267,11 @@ def insert_archive_header(archeader: RunArchive):
|
|||||||
|
|
||||||
res = c.execute("""
|
res = c.execute("""
|
||||||
INSERT INTO runner_header
|
INSERT INTO runner_header
|
||||||
(runner_id, strat_id, batch_id, symbol, name, note, started, stopped, mode, account, bt_from, bt_to, strat_json, settings, ilog_save, profit, trade_count, end_positions, end_positions_avgp, metrics, stratvars_toml)
|
(runner_id, strat_id, batch_id, symbol, name, note, started, stopped, mode, account, bt_from, bt_to, strat_json, settings, ilog_save, profit, trade_count, end_positions, end_positions_avgp, metrics, stratvars_toml, transferables)
|
||||||
VALUES
|
VALUES
|
||||||
(?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
|
(?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
|
||||||
""",
|
""",
|
||||||
(str(archeader.id), str(archeader.strat_id), archeader.batch_id, archeader.symbol, archeader.name, archeader.note, archeader.started, archeader.stopped, archeader.mode, archeader.account, archeader.bt_from, archeader.bt_to, orjson.dumps(archeader.strat_json).decode('utf-8'), orjson.dumps(archeader.settings).decode('utf-8'), archeader.ilog_save, archeader.profit, archeader.trade_count, archeader.end_positions, archeader.end_positions_avgp, orjson.dumps(archeader.metrics, default=json_serial, option=orjson.OPT_PASSTHROUGH_DATETIME).decode('utf-8'), archeader.stratvars_toml))
|
(str(archeader.id), str(archeader.strat_id), archeader.batch_id, archeader.symbol, archeader.name, archeader.note, archeader.started, archeader.stopped, archeader.mode, archeader.account, archeader.bt_from, archeader.bt_to, orjson.dumps(archeader.strat_json).decode('utf-8'), orjson.dumps(archeader.settings).decode('utf-8'), archeader.ilog_save, archeader.profit, archeader.trade_count, archeader.end_positions, archeader.end_positions_avgp, orjson.dumps(archeader.metrics, default=json_serial, option=orjson.OPT_PASSTHROUGH_DATETIME).decode('utf-8'), archeader.stratvars_toml, orjson.dumps(archeader.transferables).decode('utf-8')))
|
||||||
|
|
||||||
#retry not yet supported for statement format above
|
#retry not yet supported for statement format above
|
||||||
#res = execute_with_retry(c,statement)
|
#res = execute_with_retry(c,statement)
|
||||||
@ -1664,10 +1692,15 @@ def preview_indicator_byTOML(id: UUID, indicator: InstantIndicator, save: bool =
|
|||||||
|
|
||||||
##intialize required vars from strat init
|
##intialize required vars from strat init
|
||||||
state.vars["loaded_models"] = {}
|
state.vars["loaded_models"] = {}
|
||||||
|
#state attributes for martingale sizing mngmt
|
||||||
|
state.vars["transferables"] = {}
|
||||||
|
state.vars["transferables"]["martingale"] = dict(cont_loss_series_cnt=0)
|
||||||
|
|
||||||
##intialize dynamic indicators
|
##intialize dynamic indicators
|
||||||
initialize_dynamic_indicators(state)
|
initialize_dynamic_indicators(state)
|
||||||
|
#TODO vazit attached data (z toho potrebuji jen transferables, tzn. najit nejak predchozi runner a prelipnout transferables od zacatku)
|
||||||
|
#nejspis upravit attach_previous_data a nebo udelat specialni verzi
|
||||||
|
#attach_previous_data(state)
|
||||||
|
|
||||||
# print("subtype")
|
# print("subtype")
|
||||||
# function = "ci."+subtype+"."+subtype
|
# function = "ci."+subtype+"."+subtype
|
||||||
|
|||||||
@ -704,6 +704,10 @@
|
|||||||
<label for="stratvars" class="form-label">Stratvars</label>
|
<label for="stratvars" class="form-label">Stratvars</label>
|
||||||
<textarea class="form-control" rows="8" id="editstratvars" name="stratvars"></textarea>
|
<textarea class="form-control" rows="8" id="editstratvars" name="stratvars"></textarea>
|
||||||
</div>
|
</div>
|
||||||
|
<div class="form-group">
|
||||||
|
<label for="stratvars" class="form-label">Transferables</label>
|
||||||
|
<textarea class="form-control" rows="8" id="edittransferables" name="stratvars"></textarea>
|
||||||
|
</div>
|
||||||
<div class="form-group">
|
<div class="form-group">
|
||||||
<label for="strat_json" class="form-label">Strat JSON</label>
|
<label for="strat_json" class="form-label">Strat JSON</label>
|
||||||
<textarea class="form-control" rows="6" id="editstratjson" name="stratjson"></textarea>
|
<textarea class="form-control" rows="6" id="editstratjson" name="stratjson"></textarea>
|
||||||
@ -1164,7 +1168,7 @@
|
|||||||
<script src="/static/js/tables/archivetable/init.js?v=1.12"></script>
|
<script src="/static/js/tables/archivetable/init.js?v=1.12"></script>
|
||||||
<script src="/static/js/tables/archivetable/functions.js?v=1.10"></script>
|
<script src="/static/js/tables/archivetable/functions.js?v=1.10"></script>
|
||||||
<script src="/static/js/tables/archivetable/modals.js?v=1.07"></script>
|
<script src="/static/js/tables/archivetable/modals.js?v=1.07"></script>
|
||||||
<script src="/static/js/tables/archivetable/handlers.js?v=1.08"></script>
|
<script src="/static/js/tables/archivetable/handlers.js?v=1.09"></script>
|
||||||
|
|
||||||
<!-- Runmanager functionality -->
|
<!-- Runmanager functionality -->
|
||||||
<script src="/static/js/tables/runmanager/init.js?v=1.1"></script>
|
<script src="/static/js/tables/runmanager/init.js?v=1.1"></script>
|
||||||
|
|||||||
@ -638,7 +638,7 @@ $(document).ready(function () {
|
|||||||
else{
|
else{
|
||||||
$('#editstratvars').val(JSON.stringify(row.stratvars,null,2));
|
$('#editstratvars').val(JSON.stringify(row.stratvars,null,2));
|
||||||
}
|
}
|
||||||
|
$('#edittransferables').val(JSON.stringify(row.transferables,null,2));
|
||||||
|
|
||||||
$('#editstratjson').val(row.strat_json);
|
$('#editstratjson').val(row.strat_json);
|
||||||
}
|
}
|
||||||
|
|||||||
@ -445,7 +445,7 @@ $(document).ready(function () {
|
|||||||
$('#editstratvars').val(JSON.stringify(row.stratvars,null,2));
|
$('#editstratvars').val(JSON.stringify(row.stratvars,null,2));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
$('#edittransferables').val(JSON.stringify(row.transferables,null,2));
|
||||||
$('#editstratjson').val(row.strat_json);
|
$('#editstratjson').val(row.strat_json);
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
|||||||
@ -154,8 +154,8 @@ class StrategyClassicSL(Strategy):
|
|||||||
rel_profit_cum_calculated = round(np.sum(self.state.rel_profit_cum),5)
|
rel_profit_cum_calculated = round(np.sum(self.state.rel_profit_cum),5)
|
||||||
|
|
||||||
#pro martingale updatujeme loss_series_cnt
|
#pro martingale updatujeme loss_series_cnt
|
||||||
self.state.vars["martingale"]["cont_loss_series_cnt"] = 0 if rel_profit > 0 else self.state.vars["martingale"]["cont_loss_series_cnt"]+1
|
self.state.vars["transferables"]["martingale"]["cont_loss_series_cnt"] = 0 if rel_profit > 0 else self.state.vars["transferables"]["martingale"]["cont_loss_series_cnt"]+1
|
||||||
self.state.ilog(lvl=1, e=f"update cont_loss_series_cnt na {self.state.vars['martingale']['cont_loss_series_cnt']}")
|
self.state.ilog(lvl=1, e=f"update cont_loss_series_cnt na {self.state.vars['transferables']['martingale']['cont_loss_series_cnt']}")
|
||||||
|
|
||||||
self.state.ilog(e=f"BUY notif - SHORT PROFIT: {partial_exit=} {partial_last=} {round(float(trade_profit),3)} celkem:{round(float(self.state.profit),3)} rel:{float(rel_profit)} rel_cum:{round(rel_profit_cum_calculated,7)}", msg=str(data.event), rel_profit_cum=str(self.state.rel_profit_cum), bought_amount=bought_amount, avg_costs=avg_costs, trade_qty=data.qty, trade_price=data.price, orderid=str(data.order.id))
|
self.state.ilog(e=f"BUY notif - SHORT PROFIT: {partial_exit=} {partial_last=} {round(float(trade_profit),3)} celkem:{round(float(self.state.profit),3)} rel:{float(rel_profit)} rel_cum:{round(rel_profit_cum_calculated,7)}", msg=str(data.event), rel_profit_cum=str(self.state.rel_profit_cum), bought_amount=bought_amount, avg_costs=avg_costs, trade_qty=data.qty, trade_price=data.price, orderid=str(data.order.id))
|
||||||
|
|
||||||
@ -303,8 +303,8 @@ class StrategyClassicSL(Strategy):
|
|||||||
rel_profit_cum_calculated = round(np.sum(self.state.rel_profit_cum),5)
|
rel_profit_cum_calculated = round(np.sum(self.state.rel_profit_cum),5)
|
||||||
|
|
||||||
#pro martingale updatujeme loss_series_cnt
|
#pro martingale updatujeme loss_series_cnt
|
||||||
self.state.vars["martingale"]["cont_loss_series_cnt"] = 0 if rel_profit > 0 else self.state.vars["martingale"]["cont_loss_series_cnt"]+1
|
self.state.vars["transferables"]["martingale"]["cont_loss_series_cnt"] = 0 if rel_profit > 0 else self.state.vars["transferables"]["martingale"]["cont_loss_series_cnt"]+1
|
||||||
self.state.ilog(lvl=1, e=f"update cont_loss_series_cnt na {self.state.vars['martingale']['cont_loss_series_cnt']}")
|
self.state.ilog(lvl=1, e=f"update cont_loss_series_cnt na {self.state.vars['transferables']['martingale']['cont_loss_series_cnt']}")
|
||||||
|
|
||||||
self.state.ilog(e=f"SELL notif - LONG PROFIT {partial_exit=} {partial_last=}:{round(float(trade_profit),3)} celkem:{round(float(self.state.profit),3)} rel:{float(rel_profit)} rel_cum:{round(rel_profit_cum_calculated,7)}", msg=str(data.event), rel_profit_cum = str(self.state.rel_profit_cum), sold_amount=sold_amount, avg_costs=avg_costs, trade_qty=data.qty, trade_price=data.price, orderid=str(data.order.id))
|
self.state.ilog(e=f"SELL notif - LONG PROFIT {partial_exit=} {partial_last=}:{round(float(trade_profit),3)} celkem:{round(float(self.state.profit),3)} rel:{float(rel_profit)} rel_cum:{round(rel_profit_cum_calculated,7)}", msg=str(data.event), rel_profit_cum = str(self.state.rel_profit_cum), sold_amount=sold_amount, avg_costs=avg_costs, trade_qty=data.qty, trade_price=data.price, orderid=str(data.order.id))
|
||||||
|
|
||||||
|
|||||||
103
v2realbot/strategyblocks/inits/init_attached_data.py
Normal file
103
v2realbot/strategyblocks/inits/init_attached_data.py
Normal file
@ -0,0 +1,103 @@
|
|||||||
|
|
||||||
|
from v2realbot.common.model import RunDay, StrategyInstance, Runner, RunRequest, RunArchive, RunArchiveView, RunArchiveViewPagination, RunArchiveDetail, RunArchiveChange, Bar, TradeEvent, TestList, Intervals, ConfigItem, InstantIndicator, DataTablesRequest
|
||||||
|
import v2realbot.controller.services as cs
|
||||||
|
from v2realbot.utils.utils import slice_dict_lists,zoneUTC,safe_get, AttributeDict, filter_timeseries_by_timestamp
|
||||||
|
#id = "b11c66d9-a9b6-475a-9ac1-28b11e1b4edf"
|
||||||
|
#state = AttributeDict(vars={})
|
||||||
|
from rich import print
|
||||||
|
|
||||||
|
def attach_previous_data(state):
|
||||||
|
"""""
|
||||||
|
Attaches data from previous runner of the same batch.
|
||||||
|
"""""
|
||||||
|
print("ATTACHING PREVIOUS DATA")
|
||||||
|
runner : Runner
|
||||||
|
#get batch_id of current runer
|
||||||
|
res, runner = cs.get_runner(state.runner_id)
|
||||||
|
if res < 0:
|
||||||
|
if runner.batch_id is None:
|
||||||
|
print(f"No batch_id found for runner {runner.id}")
|
||||||
|
else:
|
||||||
|
print(f"Couldnt get previous runner {state.runner_id} error: {runner}")
|
||||||
|
return None
|
||||||
|
|
||||||
|
batch_id = runner.batch_id
|
||||||
|
#batch_id = "6a6b0bcf"
|
||||||
|
res, runner_ids =cs.get_archived_runnerslist_byBatchID(batch_id, "desc")
|
||||||
|
if res < 0:
|
||||||
|
msg = f"error whne fetching runners of batch {batch_id} {runner_ids}"
|
||||||
|
print(msg)
|
||||||
|
return None
|
||||||
|
|
||||||
|
if runner_ids is None or len(runner_ids) == 0:
|
||||||
|
print(f"NO runners found for batch {batch_id} {runner_ids}")
|
||||||
|
return None
|
||||||
|
|
||||||
|
last_runner = runner_ids[0]
|
||||||
|
print("Previous runner identified:", last_runner)
|
||||||
|
|
||||||
|
#get archived header - to get transferables
|
||||||
|
runner_header : RunArchive = None
|
||||||
|
res, runner_header = cs.get_archived_runner_header_byID(last_runner)
|
||||||
|
if res < 0:
|
||||||
|
print(f"Error when fetching runner header {last_runner}")
|
||||||
|
return None
|
||||||
|
|
||||||
|
state.vars["transferables"] = runner_header.transferables
|
||||||
|
print("INITIALIZED transferables", state.vars["transferables"])
|
||||||
|
|
||||||
|
|
||||||
|
#get details from the runner
|
||||||
|
print(f"Fetching runner details of {last_runner}")
|
||||||
|
res, val = cs.get_archived_runner_details_byID(last_runner)
|
||||||
|
if res < 0:
|
||||||
|
print(f"no archived runner {last_runner}")
|
||||||
|
return None
|
||||||
|
|
||||||
|
detail = RunArchiveDetail(**val)
|
||||||
|
#print("toto jsme si dotahnuli", detail.bars)
|
||||||
|
|
||||||
|
# from stratvars directives
|
||||||
|
attach_previous_bar_data = safe_get(state.vars, "attach_previous_bar_data", 50)
|
||||||
|
attach_previous_tick_data = safe_get(state.vars, "attach_previous_tick_data", None)
|
||||||
|
|
||||||
|
#indicators datetime utc
|
||||||
|
indicators = slice_dict_lists(d=detail.indicators[0],last_item=attach_previous_bar_data, time_to_datetime=True)
|
||||||
|
|
||||||
|
#time -datetime utc, updated - timestamp float
|
||||||
|
bars = slice_dict_lists(d=detail.bars, last_item=attach_previous_bar_data, time_to_datetime=True)
|
||||||
|
|
||||||
|
#zarovname tick spolu s bar daty
|
||||||
|
if attach_previous_tick_data is None:
|
||||||
|
oldest_timestamp = bars["updated"][0]
|
||||||
|
|
||||||
|
#returns only values older that oldest_timestamp
|
||||||
|
cbar_inds = filter_timeseries_by_timestamp(detail.indicators[1], oldest_timestamp)
|
||||||
|
else:
|
||||||
|
cbar_inds = slice_dict_lists(d=detail.indicators[1],last_item=attach_previous_tick_data)
|
||||||
|
|
||||||
|
#USE these as INITs - TADY SI TO JESTE ZASTAVIT a POROVNAT
|
||||||
|
#print("state.indicatorsL", state.indicators, "NEW:", indicators)
|
||||||
|
state.indicators = AttributeDict(**indicators)
|
||||||
|
print("transfered indicators:", len(state.indicators["time"]))
|
||||||
|
#print("state.bars", state.bars, "NEW:", bars)
|
||||||
|
state.bars = AttributeDict(bars)
|
||||||
|
print("transfered bars:", len(state.bars["time"]))
|
||||||
|
#print("state.cbar_indicators", state.cbar_indicators, "NEW:", cbar_inds)
|
||||||
|
state.cbar_indicators = AttributeDict(cbar_inds)
|
||||||
|
print("transfered ticks:", len(state.cbar_indicators["time"]))
|
||||||
|
|
||||||
|
print("TRANSFERABLEs INITIALIZED")
|
||||||
|
#bars
|
||||||
|
#transferable_state_vars = ["martingale", "batch_profit"]
|
||||||
|
#1. pri initu se tyto klice v state vars se namapuji do ext_data ext_data["transferrables"]["martingale"] = state.vars["martingale"]
|
||||||
|
#2. pri transferu se vse z ext_data["trasferrables"] dá do stejnénné state.vars["martingale"]
|
||||||
|
#3. na konci dne se uloží do sloupce transferables v RunArchive
|
||||||
|
|
||||||
|
#pridavame dailyBars z extData
|
||||||
|
# if hasattr(detail, "ext_data") and "dailyBars" in detail.ext_data:
|
||||||
|
# state.dailyBars = detail.ext_data["dailyBars"]
|
||||||
|
return
|
||||||
|
|
||||||
|
# if __name__ == "__main__":
|
||||||
|
# attach_previous_data(state)
|
||||||
@ -151,7 +151,7 @@ def get_multiplier(state: StrategyState, data, signaloptions: dict, direction: T
|
|||||||
|
|
||||||
#pocet ztrátových obchodů v řadě mi udává multiplikátor (0 - 1, 1 ztráta 2x, 3 v řadě - 4x atp.)
|
#pocet ztrátových obchodů v řadě mi udává multiplikátor (0 - 1, 1 ztráta 2x, 3 v řadě - 4x atp.)
|
||||||
if martingale_enabled:
|
if martingale_enabled:
|
||||||
cont_loss_series_cnt = state.vars["martingale"]["cont_loss_series_cnt"]
|
cont_loss_series_cnt = state.vars["transferables"]["martingale"]["cont_loss_series_cnt"]
|
||||||
if cont_loss_series_cnt == 0:
|
if cont_loss_series_cnt == 0:
|
||||||
multiplier = 1
|
multiplier = 1
|
||||||
else:
|
else:
|
||||||
|
|||||||
@ -112,21 +112,45 @@ def concatenate_weekdays(weekday_filter):
|
|||||||
# Concatenate the weekday strings
|
# Concatenate the weekday strings
|
||||||
return ','.join(weekday_strings)
|
return ','.join(weekday_strings)
|
||||||
|
|
||||||
def slice_dict_lists(d, last_item, to_tmstp = False):
|
def filter_timeseries_by_timestamp(timeseries, timestamp):
|
||||||
|
"""
|
||||||
|
Filter a timeseries dictionary, returning a new dictionary with entries
|
||||||
|
where the time value is greater than the provided timestamp.
|
||||||
|
|
||||||
|
Parameters:
|
||||||
|
- timeseries (dict): The original timeseries dictionary.
|
||||||
|
- timestamp (float): The timestamp to filter the timeseries by.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
- dict: A new timeseries dictionary filtered based on the provided timestamp.
|
||||||
|
"""
|
||||||
|
# Find indices where time values are greater than the provided timestamp
|
||||||
|
indices = [i for i, time in enumerate(timeseries['time']) if time > timestamp]
|
||||||
|
|
||||||
|
# Create a new dictionary with values filtered by the indices
|
||||||
|
filtered_timeseries = {key: [value[i] for i in indices] for key, value in timeseries.items()}
|
||||||
|
|
||||||
|
return filtered_timeseries
|
||||||
|
|
||||||
|
def slice_dict_lists(d, last_item, to_tmstp = False, time_to_datetime = False):
|
||||||
"""Slices every list in the dictionary to the last last_item items.
|
"""Slices every list in the dictionary to the last last_item items.
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
d: A dictionary.
|
d: A dictionary.
|
||||||
last_item: The number of items to keep at the end of each list.
|
last_item: The number of items to keep at the end of each list.
|
||||||
to_tmstp: For "time" elements change it to timestamp from datetime if required.
|
to_tmstp: For "time" elements change it from datetime to timestamp from datetime if required.
|
||||||
|
time_to_datetime: For "time" elements change it from timestamp to datetime UTC if required.
|
||||||
Returns:
|
Returns:
|
||||||
A new dictionary with the sliced lists.
|
A new dictionary with the sliced lists.
|
||||||
|
|
||||||
|
datetime.fromtimestamp(data['updated']).astimezone(zoneUTC)
|
||||||
"""
|
"""
|
||||||
sliced_d = {}
|
sliced_d = {}
|
||||||
for key in d.keys():
|
for key in d.keys():
|
||||||
if key == "time" and to_tmstp:
|
if key == "time" and to_tmstp:
|
||||||
sliced_d[key] = [datetime.timestamp(t) for t in d[key][-last_item:]]
|
sliced_d[key] = [datetime.timestamp(t) for t in d[key][-last_item:]]
|
||||||
|
elif key == "time" and time_to_datetime:
|
||||||
|
sliced_d[key] = [datetime.fromtimestamp(t).astimezone(zoneUTC) for t in d[key][-last_item:]]
|
||||||
else:
|
else:
|
||||||
sliced_d[key] = d[key][-last_item:]
|
sliced_d[key] = d[key][-last_item:]
|
||||||
return sliced_d
|
return sliced_d
|
||||||
|
|||||||
Reference in New Issue
Block a user