Compare commits
6 Commits
feature/tr
...
bug/attach
| Author | SHA1 | Date | |
|---|---|---|---|
| 50ad60fa12 | |||
| adc7c3c1b6 | |||
| a6343abe88 | |||
| 075984fcff | |||
| 5fce627fe3 | |||
| 8de1356aa8 |
89
testy/getrunnerdetail.py
Normal file
89
testy/getrunnerdetail.py
Normal file
@ -0,0 +1,89 @@
|
||||
|
||||
from v2realbot.common.model import RunDay, StrategyInstance, Runner, RunRequest, RunArchive, RunArchiveView, RunArchiveViewPagination, RunArchiveDetail, RunArchiveChange, Bar, TradeEvent, TestList, Intervals, ConfigItem, InstantIndicator, DataTablesRequest
|
||||
import v2realbot.controller.services as cs
|
||||
from v2realbot.utils.utils import slice_dict_lists,zoneUTC,safe_get, AttributeDict
|
||||
id = "b11c66d9-a9b6-475a-9ac1-28b11e1b4edf"
|
||||
state = AttributeDict(vars={})
|
||||
|
||||
##základ pro init_attached_data in strategy.init
|
||||
|
||||
# def get_previous_runner(state):
|
||||
# runner : Runner
|
||||
# res, runner = cs.get_runner(state.runner_id)
|
||||
# if res < 0:
|
||||
# print(f"Not running {id}")
|
||||
# return 0, None
|
||||
|
||||
# return 0, runner.batch_id
|
||||
|
||||
def attach_previous_data(state):
|
||||
runner : Runner
|
||||
#get batch_id of current runer
|
||||
res, runner = cs.get_runner(state.runner_id)
|
||||
if res < 0 or runner.batch_id is None:
|
||||
print(f"Couldnt get previous runner {val}")
|
||||
return None
|
||||
|
||||
batch_id = runner.batch_id
|
||||
#batch_id = "6a6b0bcf"
|
||||
|
||||
res, runner_ids =cs.get_archived_runnerslist_byBatchID(batch_id, "desc")
|
||||
if res < 0:
|
||||
msg = f"error whne fetching runners of batch {batch_id} {runner_ids}"
|
||||
print(msg)
|
||||
return None
|
||||
|
||||
if runner_ids is None or len(runner_ids) == 0:
|
||||
print(f"no runners found for batch {batch_id} {runner_ids}")
|
||||
return None
|
||||
|
||||
last_runner = runner_ids[0]
|
||||
print("Previous runner identified:", last_runner)
|
||||
|
||||
#get details from the runner
|
||||
res, val = cs.get_archived_runner_details_byID(last_runner)
|
||||
if res < 0:
|
||||
print(f"no archived runner {last_runner}")
|
||||
|
||||
detail = RunArchiveDetail(**val)
|
||||
#print("toto jsme si dotahnuli", detail.bars)
|
||||
|
||||
# from stratvars directives
|
||||
attach_previous_bars_indicators = safe_get(state.vars, "attach_previous_bars_indicators", 50)
|
||||
attach_previous_cbar_indicators = safe_get(state.vars, "attach_previous_cbar_indicators", 50)
|
||||
# [stratvars]
|
||||
# attach_previous_bars_indicators = 50
|
||||
# attach_previous_cbar_indicators = 50
|
||||
|
||||
#indicators datetime utc
|
||||
indicators = slice_dict_lists(d=detail.indicators[0],last_item=attach_previous_bars_indicators, time_to_datetime=True)
|
||||
|
||||
#time -datetime utc, updated - timestamp float
|
||||
bars = slice_dict_lists(d=detail.bars, last_item=attach_previous_bars_indicators, time_to_datetime=True)
|
||||
|
||||
#cbar_indicatzors #float
|
||||
cbar_inds = slice_dict_lists(d=detail.indicators[1],last_item=attach_previous_cbar_indicators)
|
||||
|
||||
#USE these as INITs - TADY SI TO JESTE ZASTAVIT a POROVNAT
|
||||
print(f"{state.indicators=} NEW:{indicators=}")
|
||||
state.indicators = indicators
|
||||
print(f"{state.bars=} NEW:{bars=}")
|
||||
state.bars = bars
|
||||
print(f"{state.cbar_indicators=} NEW:{cbar_inds=}")
|
||||
state.cbar_indicators = cbar_inds
|
||||
|
||||
print("BARS and INDS INITIALIZED")
|
||||
#bars
|
||||
|
||||
|
||||
#tady budou pripadne dalsi inicializace, z ext_data
|
||||
print("EXT_DATA", detail.ext_data)
|
||||
#podle urciteho nastaveni napr.v konfiguraci se pouziji urcite promenne
|
||||
|
||||
#pridavame dailyBars z extData
|
||||
# if hasattr(detail, "ext_data") and "dailyBars" in detail.ext_data:
|
||||
# state.dailyBars = detail.ext_data["dailyBars"]
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
attach_previous_data(state)
|
||||
@ -16,6 +16,7 @@ from v2realbot.strategyblocks.newtrade.signals import signal_search
|
||||
from v2realbot.strategyblocks.activetrade.activetrade_hub import manage_active_trade
|
||||
from v2realbot.strategyblocks.inits.init_indicators import initialize_dynamic_indicators
|
||||
from v2realbot.strategyblocks.inits.init_directives import intialize_directive_conditions
|
||||
from v2realbot.strategyblocks.inits.init_attached_data import attach_previous_data
|
||||
from alpaca.trading.client import TradingClient
|
||||
from v2realbot.config import ACCOUNT1_PAPER_API_KEY, ACCOUNT1_PAPER_SECRET_KEY, DATA_DIR
|
||||
from alpaca.trading.models import Calendar
|
||||
@ -116,7 +117,8 @@ def init(state: StrategyState):
|
||||
state.vars.loaded_models = {}
|
||||
|
||||
#state attributes for martingale sizing mngmt
|
||||
state.vars["martingale"] = dict(cont_loss_series_cnt=0)
|
||||
state.vars["transferables"] = {}
|
||||
state.vars["transferables"]["martingale"] = dict(cont_loss_series_cnt=0)
|
||||
|
||||
#INITIALIZE CBAR INDICATORS - do vlastni funkce
|
||||
#state.cbar_indicators['ivwap'] = []
|
||||
@ -131,6 +133,9 @@ def init(state: StrategyState):
|
||||
initialize_dynamic_indicators(state)
|
||||
intialize_directive_conditions(state)
|
||||
|
||||
#attach part of yesterdays data, bars, indicators, cbar_indicators
|
||||
attach_previous_data(state)
|
||||
|
||||
#intitialize indicator mapping (for use in operation) - mozna presunout do samostatne funkce prip dat do base kdyz se osvedci
|
||||
local_dict_cbar_inds = {key: state.cbar_indicators[key] for key in state.cbar_indicators.keys() if key != "time"}
|
||||
local_dict_inds = {key: state.indicators[key] for key in state.indicators.keys() if key != "time"}
|
||||
|
||||
@ -524,7 +524,7 @@ class Backtester:
|
||||
if actual_minus_reserved <= 0:
|
||||
cena = price if price else self.get_last_price(time, self.symbol)
|
||||
if (self.cash - reserved_price - float(int(size)*float(cena))) < 0:
|
||||
printanyway("not enough cash for shorting. cash",self.cash,"reserved",reserved,"available",self.cash-reserved,"needed",float(int(size)*float(cena)))
|
||||
printanyway("ERROR: not enough cash for shorting. cash",self.cash,"reserved",reserved,"available",self.cash-reserved,"needed",float(int(size)*float(cena)))
|
||||
return -1
|
||||
|
||||
#check for available cash
|
||||
@ -550,7 +550,7 @@ class Backtester:
|
||||
if actual_plus_reserved_qty >= 0:
|
||||
cena = price if price else self.get_last_price(time, self.symbol)
|
||||
if (self.cash - reserved_price - float(int(size)*float(cena))) < 0:
|
||||
printanyway("not enough cash to buy long. cash",self.cash,"reserved_qty",reserved_qty,"reserved_price",reserved_price, "available",self.cash-reserved_price,"needed",float(int(size)*float(cena)))
|
||||
printanyway("ERROR: not enough cash to buy long. cash",self.cash,"reserved_qty",reserved_qty,"reserved_price",reserved_price, "available",self.cash-reserved_price,"needed",float(int(size)*float(cena)))
|
||||
return -1
|
||||
|
||||
id = str(uuid4())
|
||||
|
||||
@ -301,8 +301,7 @@ class RunArchive(BaseModel):
|
||||
bt_from: Optional[datetime] = None
|
||||
bt_to: Optional[datetime] = None
|
||||
strat_json: Optional[str] = None
|
||||
##bude decomiss, misto toho stratvars_toml
|
||||
stratvars: Optional[dict] = None
|
||||
transferables: Optional[dict] = None #varaibles that are transferrable to next run
|
||||
settings: Optional[dict] = None
|
||||
ilog_save: Optional[bool] = False
|
||||
profit: float = 0
|
||||
|
||||
@ -83,5 +83,6 @@ def row_to_runarchive(row: dict) -> RunArchive:
|
||||
end_positions=int(row['end_positions']),
|
||||
end_positions_avgp=float(row['end_positions_avgp']),
|
||||
metrics=orjson.loads(row['metrics']),
|
||||
stratvars_toml=row['stratvars_toml']
|
||||
stratvars_toml=row['stratvars_toml'],
|
||||
transferables=orjson.loads(row['transferables']) if row['transferables'] else None
|
||||
)
|
||||
@ -1,7 +1,6 @@
|
||||
|
||||
import v2realbot.common.db as db
|
||||
from v2realbot.common.model import RunDay, StrategyInstance, Runner, RunRequest, RunArchive, RunArchiveView, RunArchiveViewPagination, RunArchiveDetail, RunArchiveChange, Bar, TradeEvent, TestList, Intervals, ConfigItem, InstantIndicator, DataTablesRequest
|
||||
import orjson
|
||||
from v2realbot.common.model import ConfigItem
|
||||
import v2realbot.utils.config_handler as ch
|
||||
|
||||
# region CONFIG db services
|
||||
|
||||
@ -3,7 +3,7 @@ from uuid import UUID, uuid4
|
||||
import pickle
|
||||
from alpaca.data.historical import StockHistoricalDataClient
|
||||
from alpaca.data.requests import StockTradesRequest, StockBarsRequest
|
||||
from alpaca.data.enums import DataFeed
|
||||
from alpaca.data.enums import DataFeed
|
||||
from alpaca.data.timeframe import TimeFrame
|
||||
from v2realbot.strategy.base import StrategyState
|
||||
from v2realbot.enums.enums import RecordType, StartBarAlign, Mode, Account, OrderSide
|
||||
@ -35,6 +35,7 @@ from sqlite3 import OperationalError, Row
|
||||
import v2realbot.strategyblocks.indicators.custom as ci
|
||||
from v2realbot.strategyblocks.inits.init_indicators import initialize_dynamic_indicators
|
||||
from v2realbot.strategyblocks.indicators.indicators_hub import populate_dynamic_indicators
|
||||
from v2realbot.strategyblocks.inits.init_attached_data import attach_previous_data
|
||||
from v2realbot.interfaces.backtest_interface import BacktestInterface
|
||||
import os
|
||||
import v2realbot.reporting.metricstoolsimage as mt
|
||||
@ -102,10 +103,10 @@ def create_stratin(si: StrategyInstance):
|
||||
#validate toml
|
||||
res, stp = parse_toml_string(si.stratvars_conf)
|
||||
if res < 0:
|
||||
return (-1,"stratvars invalid")
|
||||
return (-1,f"stratvars invalid: {stp}")
|
||||
res, adp = parse_toml_string(si.add_data_conf)
|
||||
if res < 0:
|
||||
return (-1, "None")
|
||||
return (-1, f"add data conf invalid {adp}")
|
||||
si.id = uuid4()
|
||||
#print(si)
|
||||
db.stratins.append(si)
|
||||
@ -119,10 +120,10 @@ def modify_stratin(si: StrategyInstance, id: UUID):
|
||||
return (-1, "strat is running, use modify_stratin_running")
|
||||
res, stp = parse_toml_string(si.stratvars_conf)
|
||||
if res < 0:
|
||||
return (-1, "stratvars invalid")
|
||||
return (-1, f"stratvars invalid {stp}")
|
||||
res, adp = parse_toml_string(si.add_data_conf)
|
||||
if res < 0:
|
||||
return (-1, "add data conf invalid")
|
||||
return (-1, f"add data conf invalid {adp}")
|
||||
for i in db.stratins:
|
||||
if str(i.id) == str(id):
|
||||
#print("removing",i)
|
||||
@ -180,14 +181,14 @@ def modify_stratin_running(si: StrategyInstance, id: UUID):
|
||||
#validate toml
|
||||
res,stp = parse_toml_string(si.stratvars_conf)
|
||||
if res < 0:
|
||||
return (-1, "new stratvars format invalid")
|
||||
return (-1, f"new stratvars format invalid {stp}")
|
||||
for i in db.stratins:
|
||||
if str(i.id) == str(id):
|
||||
if not is_stratin_running(id=str(id)):
|
||||
return (-1, "not running")
|
||||
res,stp_old = parse_toml_string(i.stratvars_conf)
|
||||
if res < 0:
|
||||
return (-1, "current stratin stratvars invalid")
|
||||
return (-1, f"current stratin stratvars invalid {stp_old}")
|
||||
#TODO reload running strat
|
||||
#print(stp)
|
||||
#print("starting injection", stp)
|
||||
@ -412,7 +413,7 @@ def run_batch_stratin(id: UUID, runReq: RunRequest):
|
||||
def get_market_days_in_interval(datefrom, dateto, note = None, id = None):
|
||||
#getting dates from calendat
|
||||
clientTrading = TradingClient(ACCOUNT1_PAPER_API_KEY, ACCOUNT1_PAPER_SECRET_KEY, raw_data=False, paper=True)
|
||||
calendar_request = GetCalendarRequest(start=datefrom,end=dateto)
|
||||
calendar_request = GetCalendarRequest(start=datefrom.date(),end=dateto.date())
|
||||
cal_dates = clientTrading.get_calendar(calendar_request)
|
||||
#list(Calendar)
|
||||
# Calendar
|
||||
@ -446,7 +447,7 @@ def run_batch_stratin(id: UUID, runReq: RunRequest):
|
||||
cal_list.append(RunDay(start = start_time, end = end_time, note = note, id = id))
|
||||
|
||||
print(f"Getting interval dates from - to - RESULT ({len(cal_list)}):")
|
||||
print(cal_list)
|
||||
#print(cal_list)
|
||||
return cal_list
|
||||
|
||||
#getting days to run into RunDays format
|
||||
@ -618,10 +619,10 @@ def run_stratin(id: UUID, runReq: RunRequest, synchronous: bool = False, inter_b
|
||||
#validate toml
|
||||
res, stp = parse_toml_string(i.stratvars_conf)
|
||||
if res < 0:
|
||||
return (-1, "stratvars invalid")
|
||||
return (-1, f"stratvars invalid {stp}")
|
||||
res, adp = parse_toml_string(i.add_data_conf)
|
||||
if res < 0:
|
||||
return (-1, "add data conf invalid")
|
||||
return (-1, f"add data conf invalid {adp}")
|
||||
id = uuid4()
|
||||
print(f"RUN {id} INITIATED")
|
||||
name = i.name
|
||||
@ -925,7 +926,8 @@ def archive_runner(runner: Runner, strat: StrategyInstance, inter_batch_params:
|
||||
end_positions=strat.state.positions,
|
||||
end_positions_avgp=round(float(strat.state.avgp),3),
|
||||
metrics=results_metrics,
|
||||
stratvars_toml=runner.run_stratvars_toml
|
||||
stratvars_toml=runner.run_stratvars_toml,
|
||||
transferables=strat.state.vars["transferables"]
|
||||
)
|
||||
|
||||
#flatten indicators from numpy array
|
||||
@ -1112,7 +1114,7 @@ def get_all_archived_runners_p(request: DataTablesRequest) -> Tuple[int, RunArch
|
||||
# Total count query
|
||||
total_count_query = """
|
||||
SELECT COUNT(*) FROM runner_header
|
||||
WHERE (:search_value = '' OR strat_id LIKE :search_value OR batch_id LIKE :search_value)
|
||||
WHERE (:search_value = '' OR strat_id LIKE :search_value OR batch_id LIKE :search_value OR symbol like :search_value OR name like :search_value)
|
||||
"""
|
||||
c.execute(total_count_query, {'search_value': f'%{search_value}%'})
|
||||
total_count = c.fetchone()[0]
|
||||
@ -1127,7 +1129,7 @@ def get_all_archived_runners_p(request: DataTablesRequest) -> Tuple[int, RunArch
|
||||
SUM(profit) OVER (PARTITION BY batch_id) AS batch_profit,
|
||||
COUNT(*) OVER (PARTITION BY batch_id) AS batch_count
|
||||
FROM runner_header
|
||||
WHERE (:search_value = '' OR strat_id LIKE :search_value OR batch_id LIKE :search_value)
|
||||
WHERE (:search_value = '' OR strat_id LIKE :search_value OR batch_id LIKE :search_value OR symbol like :search_value OR name like :search_value)
|
||||
),
|
||||
InterleavedGroups AS (
|
||||
SELECT *,
|
||||
@ -1154,7 +1156,7 @@ def get_all_archived_runners_p(request: DataTablesRequest) -> Tuple[int, RunArch
|
||||
# Filtered count query
|
||||
filtered_count_query = """
|
||||
SELECT COUNT(*) FROM runner_header
|
||||
WHERE (:search_value = '' OR strat_id LIKE :search_value OR batch_id LIKE :search_value)
|
||||
WHERE (:search_value = '' OR strat_id LIKE :search_value OR batch_id LIKE :search_value OR symbol like :search_value OR name like :search_value)
|
||||
"""
|
||||
c.execute(filtered_count_query, {'search_value': f'%{search_value}%'})
|
||||
filtered_count = c.fetchone()[0]
|
||||
@ -1220,17 +1222,43 @@ def get_archived_runner_header_byID(id: UUID) -> RunArchive:
|
||||
# else:
|
||||
# return 0, res
|
||||
|
||||
#vrátí seznam runneru s danym batch_id
|
||||
def get_archived_runnerslist_byBatchID(batch_id: str):
|
||||
# #vrátí seznam runneru s danym batch_id
|
||||
# def get_archived_runnerslist_byBatchID(batch_id: str):
|
||||
# conn = pool.get_connection()
|
||||
# try:
|
||||
# cursor = conn.cursor()
|
||||
# cursor.execute(f"SELECT runner_id FROM runner_header WHERE batch_id='{str(batch_id)}'")
|
||||
# runner_list = [row[0] for row in cursor.fetchall()]
|
||||
# finally:
|
||||
# pool.release_connection(conn)
|
||||
# return 0, runner_list
|
||||
|
||||
#update that allows to sort
|
||||
def get_archived_runnerslist_byBatchID(batch_id: str, sort_order: str = "asc"):
|
||||
"""
|
||||
Fetches all runner records by batch_id, sorted by the 'started' column.
|
||||
|
||||
:param batch_id: The batch ID to filter runners by.
|
||||
:param sort_order: The sort order of the 'started' column. Defaults to 'asc'.
|
||||
Accepts 'asc' for ascending or 'desc' for descending order.
|
||||
:return: A tuple with the first element being a status code and the second being the list of runner_ids.
|
||||
"""
|
||||
# Validate sort_order
|
||||
if sort_order.lower() not in ['asc', 'desc']:
|
||||
return -1, [] # Returning an error code and an empty list in case of invalid sort_order
|
||||
|
||||
conn = pool.get_connection()
|
||||
try:
|
||||
cursor = conn.cursor()
|
||||
cursor.execute(f"SELECT runner_id FROM runner_header WHERE batch_id='{str(batch_id)}'")
|
||||
query = f"""SELECT runner_id FROM runner_header
|
||||
WHERE batch_id=?
|
||||
ORDER BY datetime(started) {sort_order.upper()}"""
|
||||
cursor.execute(query, (batch_id,))
|
||||
runner_list = [row[0] for row in cursor.fetchall()]
|
||||
finally:
|
||||
pool.release_connection(conn)
|
||||
return 0, runner_list
|
||||
|
||||
|
||||
def insert_archive_header(archeader: RunArchive):
|
||||
conn = pool.get_connection()
|
||||
try:
|
||||
@ -1239,11 +1267,11 @@ def insert_archive_header(archeader: RunArchive):
|
||||
|
||||
res = c.execute("""
|
||||
INSERT INTO runner_header
|
||||
(runner_id, strat_id, batch_id, symbol, name, note, started, stopped, mode, account, bt_from, bt_to, strat_json, settings, ilog_save, profit, trade_count, end_positions, end_positions_avgp, metrics, stratvars_toml)
|
||||
(runner_id, strat_id, batch_id, symbol, name, note, started, stopped, mode, account, bt_from, bt_to, strat_json, settings, ilog_save, profit, trade_count, end_positions, end_positions_avgp, metrics, stratvars_toml, transferables)
|
||||
VALUES
|
||||
(?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
|
||||
(?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
|
||||
""",
|
||||
(str(archeader.id), str(archeader.strat_id), archeader.batch_id, archeader.symbol, archeader.name, archeader.note, archeader.started, archeader.stopped, archeader.mode, archeader.account, archeader.bt_from, archeader.bt_to, orjson.dumps(archeader.strat_json).decode('utf-8'), orjson.dumps(archeader.settings).decode('utf-8'), archeader.ilog_save, archeader.profit, archeader.trade_count, archeader.end_positions, archeader.end_positions_avgp, orjson.dumps(archeader.metrics, default=json_serial, option=orjson.OPT_PASSTHROUGH_DATETIME).decode('utf-8'), archeader.stratvars_toml))
|
||||
(str(archeader.id), str(archeader.strat_id), archeader.batch_id, archeader.symbol, archeader.name, archeader.note, archeader.started, archeader.stopped, archeader.mode, archeader.account, archeader.bt_from, archeader.bt_to, orjson.dumps(archeader.strat_json).decode('utf-8'), orjson.dumps(archeader.settings).decode('utf-8'), archeader.ilog_save, archeader.profit, archeader.trade_count, archeader.end_positions, archeader.end_positions_avgp, orjson.dumps(archeader.metrics, default=json_serial, option=orjson.OPT_PASSTHROUGH_DATETIME).decode('utf-8'), archeader.stratvars_toml, orjson.dumps(archeader.transferables).decode('utf-8')))
|
||||
|
||||
#retry not yet supported for statement format above
|
||||
#res = execute_with_retry(c,statement)
|
||||
@ -1567,7 +1595,7 @@ def preview_indicator_byTOML(id: UUID, indicator: InstantIndicator, save: bool =
|
||||
# print(row)
|
||||
res, toml_parsed = parse_toml_string(tomlino)
|
||||
if res < 0:
|
||||
return (-2, "toml invalid")
|
||||
return (-2, f"toml invalid: {toml_parsed}")
|
||||
|
||||
#print("parsed toml", toml_parsed)
|
||||
|
||||
@ -1664,10 +1692,15 @@ def preview_indicator_byTOML(id: UUID, indicator: InstantIndicator, save: bool =
|
||||
|
||||
##intialize required vars from strat init
|
||||
state.vars["loaded_models"] = {}
|
||||
#state attributes for martingale sizing mngmt
|
||||
state.vars["transferables"] = {}
|
||||
state.vars["transferables"]["martingale"] = dict(cont_loss_series_cnt=0)
|
||||
|
||||
##intialize dynamic indicators
|
||||
initialize_dynamic_indicators(state)
|
||||
|
||||
#TODO vazit attached data (z toho potrebuji jen transferables, tzn. najit nejak predchozi runner a prelipnout transferables od zacatku)
|
||||
#nejspis upravit attach_previous_data a nebo udelat specialni verzi
|
||||
#attach_previous_data(state)
|
||||
|
||||
# print("subtype")
|
||||
# function = "ci."+subtype+"."+subtype
|
||||
@ -1808,10 +1841,10 @@ def preview_indicator_byTOML(id: UUID, indicator: InstantIndicator, save: bool =
|
||||
|
||||
#vracime list, kde pozice 0 je bar indicators, pozice 1 je ticks indicators
|
||||
if output == "bar":
|
||||
return 0, [output_dict, []]
|
||||
return 0, [output_dict, {}]
|
||||
#return 0, [new_inds[indicator.name], []]
|
||||
else:
|
||||
return 0, [[], output_dict]
|
||||
return 0, [{}, output_dict]
|
||||
#return 0, [[], new_tick_inds[indicator.name]]
|
||||
|
||||
except Exception as e:
|
||||
|
||||
@ -667,14 +667,14 @@
|
||||
</div>
|
||||
<div class="form-group mt-3">
|
||||
<label for="logHere" class="form-label">Log</label>
|
||||
<div id="log-container">
|
||||
<pre id="log-content"></pre>
|
||||
<div id="log-container"style="height:700px;border:1px solid black;">
|
||||
<!-- <pre id="log-content"></pre> -->
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
<div class="modal-footer">
|
||||
<button type="button" class="btn btn-primary" id="logRefreshButton" value="Refresh">Refresh</button>
|
||||
<button type="button" class="btn btn-secondary" data-bs-dismiss="modal">Close</button>
|
||||
<button type="button" class="btn btn-secondary" id="closeLogModal" data-bs-dismiss="modal">Close</button>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
@ -704,6 +704,10 @@
|
||||
<label for="stratvars" class="form-label">Stratvars</label>
|
||||
<textarea class="form-control" rows="8" id="editstratvars" name="stratvars"></textarea>
|
||||
</div>
|
||||
<div class="form-group">
|
||||
<label for="stratvars" class="form-label">Transferables</label>
|
||||
<textarea class="form-control" rows="8" id="edittransferables" name="stratvars"></textarea>
|
||||
</div>
|
||||
<div class="form-group">
|
||||
<label for="strat_json" class="form-label">Strat JSON</label>
|
||||
<textarea class="form-control" rows="6" id="editstratjson" name="stratjson"></textarea>
|
||||
@ -1162,9 +1166,9 @@
|
||||
<!-- <script src="/static/js/archivetables.js?v=1.05"></script> -->
|
||||
<!-- archiveTables split into separate files -->
|
||||
<script src="/static/js/tables/archivetable/init.js?v=1.12"></script>
|
||||
<script src="/static/js/tables/archivetable/functions.js?v=1.10"></script>
|
||||
<script src="/static/js/tables/archivetable/functions.js?v=1.11"></script>
|
||||
<script src="/static/js/tables/archivetable/modals.js?v=1.07"></script>
|
||||
<script src="/static/js/tables/archivetable/handlers.js?v=1.08"></script>
|
||||
<script src="/static/js/tables/archivetable/handlers.js?v=1.11"></script>
|
||||
|
||||
<!-- Runmanager functionality -->
|
||||
<script src="/static/js/tables/runmanager/init.js?v=1.1"></script>
|
||||
@ -1174,7 +1178,7 @@
|
||||
|
||||
<script src="/static/js/livewebsocket.js?v=1.02"></script>
|
||||
<script src="/static/js/realtimechart.js?v=1.02"></script>
|
||||
<script src="/static/js/mytables.js?v=1.02"></script>
|
||||
<script src="/static/js/mytables.js?v=1.03"></script>
|
||||
<script src="/static/js/testlist.js?v=1.01"></script>
|
||||
<script src="/static/js/ml.js?v=1.02"></script>
|
||||
<script src="/static/js/common.js?v=1.01"></script>
|
||||
|
||||
@ -638,7 +638,7 @@ $(document).ready(function () {
|
||||
else{
|
||||
$('#editstratvars').val(JSON.stringify(row.stratvars,null,2));
|
||||
}
|
||||
|
||||
$('#edittransferables').val(JSON.stringify(row.transferables,null,2));
|
||||
|
||||
$('#editstratjson').val(row.strat_json);
|
||||
}
|
||||
|
||||
@ -90,9 +90,55 @@ $(document).ready(function () {
|
||||
|
||||
monaco.languages.register({ id: 'python' });
|
||||
monaco.languages.register({ id: 'json' });
|
||||
//Register mylogs language
|
||||
monaco.languages.register({ id: 'mylogs' });
|
||||
// Register the TOML language
|
||||
monaco.languages.setLanguageConfiguration('mylogs', {
|
||||
comments: {
|
||||
lineComment: '//', // Adjust if your logs use a different comment symbol
|
||||
},
|
||||
brackets: [['[', ']'], ['{', '}']], // Array and object brackets
|
||||
autoClosingPairs: [
|
||||
{ open: '{', close: '}', notIn: ['string'] },
|
||||
{ open: '"', close: '"', notIn: ['string', 'comment'] },
|
||||
{ open: "'", close: "'", notIn: ['string', 'comment'] },
|
||||
],
|
||||
});
|
||||
monaco.languages.setMonarchTokensProvider('mylogs', {
|
||||
tokenizer: {
|
||||
root: [
|
||||
[/#.*/, 'comment'], // Comments (if applicable)
|
||||
|
||||
// Timestamps
|
||||
[/\d{4}-\d{2}-\d{2} \d{2}:\d{2}:\d{2}\.\d+/, 'timestamp'],
|
||||
|
||||
// Log Levels
|
||||
[/\b(INFO|DEBUG|WARNING|ERROR|CRITICAL)\b/, 'log-level'],
|
||||
|
||||
// Strings
|
||||
[/".*"/, 'string'],
|
||||
[/'.*'/, 'string'],
|
||||
|
||||
// Key-Value Pairs
|
||||
[/[A-Za-z_]+\s*:/, 'key'],
|
||||
[/-?\d+\.\d+/, 'number.float'], // Floating-point
|
||||
[/-?\d+/, 'number.integer'], // Integers
|
||||
[/\btrue\b/, 'boolean.true'],
|
||||
[/\bfalse\b/, 'boolean.false'],
|
||||
|
||||
// Other Words and Symbols
|
||||
[/[A-Za-z_]+/, 'identifier'],
|
||||
[/[ \t\r\n]+/, 'white'],
|
||||
[/[\[\]{}(),]/, 'delimiter'], // Expand if more delimiters exist
|
||||
]
|
||||
}
|
||||
});
|
||||
|
||||
|
||||
monaco.languages.register({ id: 'toml' });
|
||||
|
||||
|
||||
|
||||
// Define the TOML language configuration
|
||||
monaco.languages.setLanguageConfiguration('toml', {
|
||||
comments: {
|
||||
|
||||
@ -6,6 +6,7 @@ let editor_diff_arch1
|
||||
let editor_diff_arch2
|
||||
var archData = null
|
||||
var batchHeaders = []
|
||||
var editorLog = null
|
||||
|
||||
function refresh_arch_and_callback(row, callback) {
|
||||
//console.log("entering refresh")
|
||||
@ -462,7 +463,7 @@ function display_batch_report(batch_id) {
|
||||
|
||||
function refresh_logfile() {
|
||||
logfile = $("#logFileSelect").val()
|
||||
lines = 700
|
||||
lines = 1200
|
||||
$.ajax({
|
||||
url:"/log?lines="+lines+"&logfile="+logfile,
|
||||
beforeSend: function (xhr) {
|
||||
@ -472,13 +473,34 @@ function refresh_logfile() {
|
||||
contentType: "application/json",
|
||||
dataType: "json",
|
||||
success:function(response){
|
||||
if (editorLog) {
|
||||
editorLog.dispose();
|
||||
}
|
||||
if (response.lines.length == 0) {
|
||||
$('#log-content').html("no records");
|
||||
value = "no records";
|
||||
// $('#log-content').html("no records");
|
||||
}
|
||||
else {
|
||||
var escapedLines = response.lines.map(line => escapeHtml(line));
|
||||
$('#log-content').html(escapedLines.join('\n'));
|
||||
}
|
||||
//console.log(response.lines)
|
||||
//var escapedLines = response.lines.map(line => escapeHtml(line));
|
||||
value = response.lines.join('\n')
|
||||
// $('#log-content').html(escapedLines.join('\n'));
|
||||
}
|
||||
require(["vs/editor/editor.main"], () => {
|
||||
editorLog = monaco.editor.create(document.getElementById('log-container'), {
|
||||
value: value,
|
||||
language: 'mylogs',
|
||||
theme: 'tomlTheme-dark',
|
||||
automaticLayout: true,
|
||||
readOnly: true
|
||||
});
|
||||
});
|
||||
// Focus at the end of the file:
|
||||
const model = editorLog.getModel();
|
||||
const lastLineNumber = model.getLineCount();
|
||||
const lastLineColumn = model.getLineMaxColumn(lastLineNumber);
|
||||
editorLog.setPosition({ lineNumber: lastLineNumber, column: lastLineColumn });
|
||||
editorLog.revealPosition({ lineNumber: lastLineNumber, column: lastLineColumn });
|
||||
},
|
||||
error: function(xhr, status, error) {
|
||||
var err = eval("(" + xhr.responseText + ")");
|
||||
|
||||
@ -265,8 +265,8 @@ $(document).ready(function () {
|
||||
|
||||
$('#diff_first').text(record1.name);
|
||||
$('#diff_second').text(record2.name);
|
||||
$('#diff_first_id').text(data1.id);
|
||||
$('#diff_second_id').text(data2.id);
|
||||
$('#diff_first_id').text(data1.id + ' Batch: ' + data1.batch_id);
|
||||
$('#diff_second_id').text(data2.id + ' Batch: ' + data2.batch_id);
|
||||
|
||||
//monaco
|
||||
require(["vs/editor/editor.main"], () => {
|
||||
@ -358,8 +358,13 @@ $(document).ready(function () {
|
||||
})
|
||||
});
|
||||
|
||||
$('#closeLogModal').click(function () {
|
||||
editorLog.dispose()
|
||||
});
|
||||
|
||||
//button to query log
|
||||
$('#logRefreshButton').click(function () {
|
||||
editorLog.dispose()
|
||||
refresh_logfile()
|
||||
});
|
||||
|
||||
@ -445,7 +450,7 @@ $(document).ready(function () {
|
||||
$('#editstratvars').val(JSON.stringify(row.stratvars,null,2));
|
||||
}
|
||||
|
||||
|
||||
$('#edittransferables').val(JSON.stringify(row.transferables,null,2));
|
||||
$('#editstratjson').val(row.strat_json);
|
||||
}
|
||||
});
|
||||
|
||||
@ -35,40 +35,62 @@ class StrategyClassicSL(Strategy):
|
||||
|
||||
max_sum_profit_to_quit_rel = safe_get(self.state.vars, "max_sum_profit_to_quit_rel", None)
|
||||
max_sum_loss_to_quit_rel = safe_get(self.state.vars, "max_sum_loss_to_quit_rel", None)
|
||||
#load typ direktivy hard/soft cutoff
|
||||
hard_cutoff = safe_get(self.state.vars, "hard_cutoff", False)
|
||||
|
||||
rel_profit = round(float(np.sum(self.state.rel_profit_cum)),5)
|
||||
if max_sum_profit_to_quit_rel is not None:
|
||||
if rel_profit >= float(max_sum_profit_to_quit_rel):
|
||||
self.state.ilog(e=f"QUITTING MAX SUM REL PROFIT REACHED {max_sum_profit_to_quit_rel=} {self.state.profit=} {rel_profit=} relprofits:{str(self.state.rel_profit_cum)}")
|
||||
msg = f"QUITTING {hard_cutoff=} MAX SUM REL PROFIT REACHED {max_sum_profit_to_quit_rel=} {self.state.profit=} {rel_profit=} relprofits:{str(self.state.rel_profit_cum)}"
|
||||
printanyway(msg)
|
||||
self.state.ilog(e=msg)
|
||||
self.state.vars.pending = "max_sum_profit_to_quit_rel"
|
||||
if self.mode not in [Mode.BT, Mode.PREP]:
|
||||
send_to_telegram(f"QUITTING MAX SUM REL PROFIT REACHED {max_sum_profit_to_quit_rel=} {self.state.profit=} {rel_profit=} relprofits:{str(self.state.rel_profit_cum)}")
|
||||
self.signal_stop = True
|
||||
send_to_telegram(msg)
|
||||
if hard_cutoff:
|
||||
self.hard_stop = True
|
||||
else:
|
||||
self.soft_stop = True
|
||||
return True
|
||||
if max_sum_loss_to_quit_rel is not None:
|
||||
if rel_profit < 0 and rel_profit <= float(max_sum_loss_to_quit_rel):
|
||||
self.state.ilog(e=f"QUITTING MAX SUM REL LOSS REACHED {max_sum_loss_to_quit_rel=} {self.state.profit=} {rel_profit=} relprofits:{str(self.state.rel_profit_cum)}")
|
||||
msg=f"QUITTING {hard_cutoff=} MAX SUM REL LOSS REACHED {max_sum_loss_to_quit_rel=} {self.state.profit=} {rel_profit=} relprofits:{str(self.state.rel_profit_cum)}"
|
||||
printanyway(msg)
|
||||
self.state.ilog(e=msg)
|
||||
self.state.vars.pending = "max_sum_loss_to_quit_rel"
|
||||
if self.mode not in [Mode.BT, Mode.PREP]:
|
||||
send_to_telegram(f"QUITTING MAX SUM REL LOSS REACHED {max_sum_loss_to_quit_rel=} {self.state.profit=} {rel_profit=} relprofits:{str(self.state.rel_profit_cum)}")
|
||||
self.signal_stop = True
|
||||
send_to_telegram(msg)
|
||||
if hard_cutoff:
|
||||
self.hard_stop = True
|
||||
else:
|
||||
self.soft_stop = True
|
||||
return True
|
||||
|
||||
if max_sum_profit_to_quit is not None:
|
||||
if float(self.state.profit) >= float(max_sum_profit_to_quit):
|
||||
self.state.ilog(e=f"QUITTING MAX SUM ABS PROFIT REACHED {max_sum_profit_to_quit=} {self.state.profit=} {rel_profit=} relprofits:{str(self.state.rel_profit_cum)}")
|
||||
msg = f"QUITTING {hard_cutoff=} MAX SUM ABS PROFIT REACHED {max_sum_profit_to_quit=} {self.state.profit=} {rel_profit=} relprofits:{str(self.state.rel_profit_cum)}"
|
||||
printanyway(msg)
|
||||
self.state.ilog(e=msg)
|
||||
self.state.vars.pending = "max_sum_profit_to_quit"
|
||||
if self.mode not in [Mode.BT, Mode.PREP]:
|
||||
send_to_telegram(f"QUITTING MAX SUM ABS PROFIT REACHED {max_sum_profit_to_quit=} {self.state.profit=} {rel_profit=} relprofits:{str(self.state.rel_profit_cum)}")
|
||||
self.signal_stop = True
|
||||
send_to_telegram(msg)
|
||||
if hard_cutoff:
|
||||
self.hard_stop = True
|
||||
else:
|
||||
self.soft_stop = True
|
||||
return True
|
||||
if max_sum_loss_to_quit is not None:
|
||||
if float(self.state.profit) < 0 and float(self.state.profit) <= float(max_sum_loss_to_quit):
|
||||
self.state.ilog(e=f"QUITTING MAX SUM ABS LOSS REACHED {max_sum_loss_to_quit=} {self.state.profit=} {rel_profit=} relprofits:{str(self.state.rel_profit_cum)}")
|
||||
msg = f"QUITTING {hard_cutoff=} MAX SUM ABS LOSS REACHED {max_sum_loss_to_quit=} {self.state.profit=} {rel_profit=} relprofits:{str(self.state.rel_profit_cum)}"
|
||||
printanyway(msg)
|
||||
self.state.ilog(e=msg)
|
||||
self.state.vars.pending = "max_sum_loss_to_quit"
|
||||
if self.mode not in [Mode.BT, Mode.PREP]:
|
||||
send_to_telegram(f"QUITTING MAX SUM ABS LOSS REACHED {max_sum_loss_to_quit=} {self.state.profit=} {rel_profit=} relprofits:{str(self.state.rel_profit_cum)}")
|
||||
self.signal_stop = True
|
||||
send_to_telegram(msg)
|
||||
if hard_cutoff:
|
||||
self.hard_stop = True
|
||||
else:
|
||||
self.soft_stop = True
|
||||
return True
|
||||
|
||||
return False
|
||||
@ -154,8 +176,8 @@ class StrategyClassicSL(Strategy):
|
||||
rel_profit_cum_calculated = round(np.sum(self.state.rel_profit_cum),5)
|
||||
|
||||
#pro martingale updatujeme loss_series_cnt
|
||||
self.state.vars["martingale"]["cont_loss_series_cnt"] = 0 if rel_profit > 0 else self.state.vars["martingale"]["cont_loss_series_cnt"]+1
|
||||
self.state.ilog(lvl=1, e=f"update cont_loss_series_cnt na {self.state.vars['martingale']['cont_loss_series_cnt']}")
|
||||
self.state.vars["transferables"]["martingale"]["cont_loss_series_cnt"] = 0 if rel_profit > 0 else self.state.vars["transferables"]["martingale"]["cont_loss_series_cnt"]+1
|
||||
self.state.ilog(lvl=1, e=f"update cont_loss_series_cnt na {self.state.vars['transferables']['martingale']['cont_loss_series_cnt']}")
|
||||
|
||||
self.state.ilog(e=f"BUY notif - SHORT PROFIT: {partial_exit=} {partial_last=} {round(float(trade_profit),3)} celkem:{round(float(self.state.profit),3)} rel:{float(rel_profit)} rel_cum:{round(rel_profit_cum_calculated,7)}", msg=str(data.event), rel_profit_cum=str(self.state.rel_profit_cum), bought_amount=bought_amount, avg_costs=avg_costs, trade_qty=data.qty, trade_price=data.price, orderid=str(data.order.id))
|
||||
|
||||
@ -303,8 +325,8 @@ class StrategyClassicSL(Strategy):
|
||||
rel_profit_cum_calculated = round(np.sum(self.state.rel_profit_cum),5)
|
||||
|
||||
#pro martingale updatujeme loss_series_cnt
|
||||
self.state.vars["martingale"]["cont_loss_series_cnt"] = 0 if rel_profit > 0 else self.state.vars["martingale"]["cont_loss_series_cnt"]+1
|
||||
self.state.ilog(lvl=1, e=f"update cont_loss_series_cnt na {self.state.vars['martingale']['cont_loss_series_cnt']}")
|
||||
self.state.vars["transferables"]["martingale"]["cont_loss_series_cnt"] = 0 if rel_profit > 0 else self.state.vars["transferables"]["martingale"]["cont_loss_series_cnt"]+1
|
||||
self.state.ilog(lvl=1, e=f"update cont_loss_series_cnt na {self.state.vars['transferables']['martingale']['cont_loss_series_cnt']}")
|
||||
|
||||
self.state.ilog(e=f"SELL notif - LONG PROFIT {partial_exit=} {partial_last=}:{round(float(trade_profit),3)} celkem:{round(float(self.state.profit),3)} rel:{float(rel_profit)} rel_cum:{round(rel_profit_cum_calculated,7)}", msg=str(data.event), rel_profit_cum = str(self.state.rel_profit_cum), sold_amount=sold_amount, avg_costs=avg_costs, trade_qty=data.qty, trade_price=data.price, orderid=str(data.order.id))
|
||||
|
||||
@ -414,7 +436,7 @@ class StrategyClassicSL(Strategy):
|
||||
populate_all_indicators(item, self.state)
|
||||
|
||||
#pro přípravu dat next nevoláme
|
||||
if self.mode == Mode.PREP:
|
||||
if self.mode == Mode.PREP or self.soft_stop:
|
||||
return
|
||||
else:
|
||||
self.next(item, self.state)
|
||||
|
||||
@ -80,7 +80,8 @@ class Strategy:
|
||||
self.pe = pe
|
||||
self.se = se
|
||||
#signal stop - internal
|
||||
self.signal_stop = False
|
||||
self.hard_stop = False #indikuje hard stop, tedy vypnuti strategie
|
||||
self.soft_stop = False #indikuje soft stop (napr. při dosažení max zisku/ztráty), tedy pokracovani strategie, vytvareni dat, jen bez obchodu
|
||||
|
||||
#prdelat queue na dynamic - podle toho jak bud uchtit pracovat s multiresolutions
|
||||
#zatim jen jedna q1
|
||||
@ -433,7 +434,7 @@ class Strategy:
|
||||
#printnow(current_thread().name, "Items waiting in queue:", self.q1.qsize())
|
||||
except queue.Empty:
|
||||
#check internal signals - for profit/loss optim etc - valid for runner
|
||||
if self.signal_stop:
|
||||
if self.hard_stop:
|
||||
print(current_thread().name, "Stopping signal - internal")
|
||||
break
|
||||
|
||||
@ -454,7 +455,7 @@ class Strategy:
|
||||
if item == "last" or self.se.is_set():
|
||||
print(current_thread().name, "stopping")
|
||||
break
|
||||
elif self.signal_stop:
|
||||
elif self.hard_stop:
|
||||
print(current_thread().name, "Stopping signal - internal")
|
||||
break
|
||||
elif self.pe.is_set():
|
||||
|
||||
112
v2realbot/strategyblocks/inits/init_attached_data.py
Normal file
112
v2realbot/strategyblocks/inits/init_attached_data.py
Normal file
@ -0,0 +1,112 @@
|
||||
|
||||
from v2realbot.common.model import RunDay, StrategyInstance, Runner, RunRequest, RunArchive, RunArchiveView, RunArchiveViewPagination, RunArchiveDetail, RunArchiveChange, Bar, TradeEvent, TestList, Intervals, ConfigItem, InstantIndicator, DataTablesRequest
|
||||
import v2realbot.controller.services as cs
|
||||
from v2realbot.utils.utils import slice_dict_lists,zoneUTC,safe_get, AttributeDict, filter_timeseries_by_timestamp
|
||||
#id = "b11c66d9-a9b6-475a-9ac1-28b11e1b4edf"
|
||||
#state = AttributeDict(vars={})
|
||||
from rich import print
|
||||
from traceback import format_exc
|
||||
|
||||
def attach_previous_data(state):
|
||||
"""""
|
||||
Attaches data from previous runner of the same batch.
|
||||
"""""
|
||||
print("ATTACHING PREVIOUS DATA")
|
||||
try:
|
||||
runner : Runner
|
||||
#get batch_id of current runer
|
||||
res, runner = cs.get_runner(state.runner_id)
|
||||
if res < 0:
|
||||
if runner.batch_id is None:
|
||||
print(f"No batch_id found for runner {runner.id}")
|
||||
else:
|
||||
print(f"Couldnt get previous runner {state.runner_id} error: {runner}")
|
||||
return None
|
||||
|
||||
batch_id = runner.batch_id
|
||||
#batch_id = "6a6b0bcf"
|
||||
res, runner_ids =cs.get_archived_runnerslist_byBatchID(batch_id, "desc")
|
||||
if res < 0:
|
||||
msg = f"error whne fetching runners of batch {batch_id} {runner_ids}"
|
||||
print(msg)
|
||||
return None
|
||||
|
||||
if runner_ids is None or len(runner_ids) == 0:
|
||||
print(f"NO runners found for batch {batch_id} {runner_ids}")
|
||||
return None
|
||||
|
||||
last_runner = runner_ids[0]
|
||||
print("Previous runner identified:", last_runner)
|
||||
|
||||
#get archived header - to get transferables
|
||||
runner_header : RunArchive = None
|
||||
res, runner_header = cs.get_archived_runner_header_byID(last_runner)
|
||||
if res < 0:
|
||||
print(f"Error when fetching runner header {last_runner}")
|
||||
return None
|
||||
|
||||
state.vars["transferables"] = runner_header.transferables
|
||||
print("INITIALIZED transferables", state.vars["transferables"])
|
||||
|
||||
|
||||
#get details from the runner
|
||||
print(f"Fetching runner details of {last_runner}")
|
||||
res, val = cs.get_archived_runner_details_byID(last_runner)
|
||||
if res < 0:
|
||||
print(f"no archived runner {last_runner}")
|
||||
return None
|
||||
|
||||
detail = RunArchiveDetail(**val)
|
||||
#print("toto jsme si dotahnuli", detail.bars)
|
||||
|
||||
if len(detail.bars["time"]) == 0:
|
||||
print(f"no bars for runner {last_runner}")
|
||||
return None
|
||||
|
||||
# from stratvars directives
|
||||
attach_previous_bar_data = safe_get(state.vars, "attach_previous_bar_data", 50)
|
||||
attach_previous_tick_data = safe_get(state.vars, "attach_previous_tick_data", None)
|
||||
|
||||
#indicators datetime utc
|
||||
indicators = slice_dict_lists(d=detail.indicators[0],last_item=attach_previous_bar_data, time_to_datetime=True)
|
||||
|
||||
#time -datetime utc, updated - timestamp float
|
||||
bars = slice_dict_lists(d=detail.bars, last_item=attach_previous_bar_data, time_to_datetime=True)
|
||||
|
||||
cbar_ids = {}
|
||||
#zarovname tick spolu s bar daty
|
||||
if attach_previous_tick_data is None:
|
||||
oldest_timestamp = bars["updated"][0]
|
||||
|
||||
#returns only values older that oldest_timestamp
|
||||
cbar_inds = filter_timeseries_by_timestamp(detail.indicators[1], oldest_timestamp)
|
||||
else:
|
||||
cbar_inds = slice_dict_lists(d=detail.indicators[1],last_item=attach_previous_tick_data)
|
||||
|
||||
#USE these as INITs - TADY SI TO JESTE ZASTAVIT a POROVNAT
|
||||
#print("state.indicatorsL", state.indicators, "NEW:", indicators)
|
||||
state.indicators = AttributeDict(**indicators)
|
||||
print("transfered indicators:", len(state.indicators["time"]))
|
||||
#print("state.bars", state.bars, "NEW:", bars)
|
||||
state.bars = AttributeDict(bars)
|
||||
print("transfered bars:", len(state.bars["time"]))
|
||||
#print("state.cbar_indicators", state.cbar_indicators, "NEW:", cbar_inds)
|
||||
state.cbar_indicators = AttributeDict(cbar_inds)
|
||||
print("transfered ticks:", len(state.cbar_indicators["time"]))
|
||||
|
||||
print("TRANSFERABLEs INITIALIZED")
|
||||
#bars
|
||||
#transferable_state_vars = ["martingale", "batch_profit"]
|
||||
#1. pri initu se tyto klice v state vars se namapuji do ext_data ext_data["transferrables"]["martingale"] = state.vars["martingale"]
|
||||
#2. pri transferu se vse z ext_data["trasferrables"] dá do stejnénné state.vars["martingale"]
|
||||
#3. na konci dne se uloží do sloupce transferables v RunArchive
|
||||
|
||||
#pridavame dailyBars z extData
|
||||
# if hasattr(detail, "ext_data") and "dailyBars" in detail.ext_data:
|
||||
# state.dailyBars = detail.ext_data["dailyBars"]
|
||||
return
|
||||
except Exception as e:
|
||||
print(str(e)+format_exc())
|
||||
return None
|
||||
# if __name__ == "__main__":
|
||||
# attach_previous_data(state)
|
||||
@ -78,7 +78,7 @@ def execute_signal_generator(state, data, name):
|
||||
last_update=datetime.fromtimestamp(state.time).astimezone(zoneNY),
|
||||
status=TradeStatus.READY,
|
||||
generated_by=name,
|
||||
size=multiplier*state.vars.chunk,
|
||||
size=int(multiplier*state.vars.chunk),
|
||||
size_multiplier = multiplier,
|
||||
direction=TradeDirection.LONG,
|
||||
entry_price=None,
|
||||
@ -90,7 +90,7 @@ def execute_signal_generator(state, data, name):
|
||||
last_update=datetime.fromtimestamp(state.time).astimezone(zoneNY),
|
||||
status=TradeStatus.READY,
|
||||
generated_by=name,
|
||||
size=multiplier*state.vars.chunk,
|
||||
size=int(multiplier*state.vars.chunk),
|
||||
size_multiplier = multiplier,
|
||||
direction=TradeDirection.SHORT,
|
||||
entry_price=None,
|
||||
|
||||
@ -151,13 +151,17 @@ def get_multiplier(state: StrategyState, data, signaloptions: dict, direction: T
|
||||
|
||||
#pocet ztrátových obchodů v řadě mi udává multiplikátor (0 - 1, 1 ztráta 2x, 3 v řadě - 4x atp.)
|
||||
if martingale_enabled:
|
||||
cont_loss_series_cnt = state.vars["martingale"]["cont_loss_series_cnt"]
|
||||
|
||||
#martingale base - základ umocňování - klasicky 2
|
||||
base = float(utls.safe_get(options, "martingale_base", 2))
|
||||
#pocet aktuálních konsekutivních ztrát
|
||||
cont_loss_series_cnt = state.vars["transferables"]["martingale"]["cont_loss_series_cnt"]
|
||||
if cont_loss_series_cnt == 0:
|
||||
multiplier = 1
|
||||
else:
|
||||
multiplier = 2 ** cont_loss_series_cnt
|
||||
multiplier = base ** cont_loss_series_cnt
|
||||
state.ilog(lvl=1,e=f"SIZER - MARTINGALE {multiplier}", options=options, time=state.time, cont_loss_series_cnt=cont_loss_series_cnt)
|
||||
|
||||
|
||||
if (martingale_enabled is False and multiplier > 1) or multiplier <= 0:
|
||||
state.ilog(lvl=1,e=f"SIZER - Mame nekde problem MULTIPLIER mimo RANGE ERROR {multiplier}", options=options, time=state.time)
|
||||
multiplier = 1
|
||||
|
||||
@ -112,21 +112,45 @@ def concatenate_weekdays(weekday_filter):
|
||||
# Concatenate the weekday strings
|
||||
return ','.join(weekday_strings)
|
||||
|
||||
def slice_dict_lists(d, last_item, to_tmstp = False):
|
||||
def filter_timeseries_by_timestamp(timeseries, timestamp):
|
||||
"""
|
||||
Filter a timeseries dictionary, returning a new dictionary with entries
|
||||
where the time value is greater than the provided timestamp.
|
||||
|
||||
Parameters:
|
||||
- timeseries (dict): The original timeseries dictionary.
|
||||
- timestamp (float): The timestamp to filter the timeseries by.
|
||||
|
||||
Returns:
|
||||
- dict: A new timeseries dictionary filtered based on the provided timestamp.
|
||||
"""
|
||||
# Find indices where time values are greater than the provided timestamp
|
||||
indices = [i for i, time in enumerate(timeseries['time']) if time > timestamp]
|
||||
|
||||
# Create a new dictionary with values filtered by the indices
|
||||
filtered_timeseries = {key: [value[i] for i in indices] for key, value in timeseries.items()}
|
||||
|
||||
return filtered_timeseries
|
||||
|
||||
def slice_dict_lists(d, last_item, to_tmstp = False, time_to_datetime = False):
|
||||
"""Slices every list in the dictionary to the last last_item items.
|
||||
|
||||
Args:
|
||||
d: A dictionary.
|
||||
last_item: The number of items to keep at the end of each list.
|
||||
to_tmstp: For "time" elements change it to timestamp from datetime if required.
|
||||
|
||||
to_tmstp: For "time" elements change it from datetime to timestamp from datetime if required.
|
||||
time_to_datetime: For "time" elements change it from timestamp to datetime UTC if required.
|
||||
Returns:
|
||||
A new dictionary with the sliced lists.
|
||||
|
||||
datetime.fromtimestamp(data['updated']).astimezone(zoneUTC)
|
||||
"""
|
||||
sliced_d = {}
|
||||
for key in d.keys():
|
||||
if key == "time" and to_tmstp:
|
||||
sliced_d[key] = [datetime.timestamp(t) for t in d[key][-last_item:]]
|
||||
elif key == "time" and time_to_datetime:
|
||||
sliced_d[key] = [datetime.fromtimestamp(t).astimezone(zoneUTC) for t in d[key][-last_item:]]
|
||||
else:
|
||||
sliced_d[key] = d[key][-last_item:]
|
||||
return sliced_d
|
||||
@ -654,8 +678,9 @@ def parse_toml_string(tomlst: str):
|
||||
try:
|
||||
tomlst = tomli.loads(tomlst)
|
||||
except tomli.TOMLDecodeError as e:
|
||||
print("Not valid TOML.", str(e))
|
||||
return (-1, None)
|
||||
msg = f"Not valid TOML: " + str(e)
|
||||
richprint(msg)
|
||||
return (-1, msg)
|
||||
return (0, dict_replace_value(tomlst,"None",None))
|
||||
|
||||
#class to persist
|
||||
|
||||
Reference in New Issue
Block a user