bugfix remote signal, serverside search,gui fixes

This commit is contained in:
David Brazda
2023-11-25 13:38:43 +01:00
parent 8963a26ed6
commit 04ca9adeac
7 changed files with 198 additions and 89 deletions

View File

@ -8,6 +8,34 @@ from pydantic import BaseModel
from v2realbot.enums.enums import Mode, Account from v2realbot.enums.enums import Mode, Account
from alpaca.data.enums import Exchange from alpaca.data.enums import Exchange
#models for server side datatables
# Model for individual column data
class ColumnData(BaseModel):
data: str
name: str
searchable: bool
orderable: bool
search: dict
# Model for the search value
class SearchValue(BaseModel):
value: str
regex: bool
class OrderValue(BaseModel):
column: int
dir: str
# Model for incoming DataTables request
class DataTablesRequest(BaseModel):
draw: int
start: int
length: int
search: SearchValue
order: List[OrderValue]
columns: List[ColumnData]
#tu samou variantu pak UpdateStrategyInstanceWhileRunning #tu samou variantu pak UpdateStrategyInstanceWhileRunning
#only those that can be changed UUID id prijde v parametru #only those that can be changed UUID id prijde v parametru

View File

@ -1,4 +1,4 @@
from typing import Any, List from typing import Any, List, Tuple
from uuid import UUID, uuid4 from uuid import UUID, uuid4
import pickle import pickle
from alpaca.data.historical import StockHistoricalDataClient from alpaca.data.historical import StockHistoricalDataClient
@ -7,7 +7,7 @@ from alpaca.data.enums import DataFeed
from alpaca.data.timeframe import TimeFrame from alpaca.data.timeframe import TimeFrame
from v2realbot.strategy.base import StrategyState from v2realbot.strategy.base import StrategyState
from v2realbot.enums.enums import RecordType, StartBarAlign, Mode, Account, OrderSide from v2realbot.enums.enums import RecordType, StartBarAlign, Mode, Account, OrderSide
from v2realbot.common.model import RunDay, StrategyInstance, Runner, RunRequest, RunArchive, RunArchiveView, RunArchiveViewPagination, RunArchiveDetail, RunArchiveChange, Bar, TradeEvent, TestList, Intervals, ConfigItem, InstantIndicator from v2realbot.common.model import RunDay, StrategyInstance, Runner, RunRequest, RunArchive, RunArchiveView, RunArchiveViewPagination, RunArchiveDetail, RunArchiveChange, Bar, TradeEvent, TestList, Intervals, ConfigItem, InstantIndicator, DataTablesRequest
from v2realbot.utils.utils import AttributeDict, zoneNY, zonePRG, safe_get, dict_replace_value, Store, parse_toml_string, json_serial, is_open_hours, send_to_telegram, concatenate_weekdays from v2realbot.utils.utils import AttributeDict, zoneNY, zonePRG, safe_get, dict_replace_value, Store, parse_toml_string, json_serial, is_open_hours, send_to_telegram, concatenate_weekdays
from v2realbot.utils.ilog import delete_logs from v2realbot.utils.ilog import delete_logs
from v2realbot.common.PrescribedTradeModel import Trade, TradeDirection, TradeStatus, TradeStoplossType from v2realbot.common.PrescribedTradeModel import Trade, TradeDirection, TradeStatus, TradeStoplossType
@ -308,14 +308,16 @@ def capsule(target: object, db: object, inter_batch_params: dict = None):
print("Strategy instance stopped. Update runners") print("Strategy instance stopped. Update runners")
reason = None reason = None
# if target.se.is_set(): if target.se.is_set():
# print("STOP FLAG IS SET - cancel BATCH") print("EXTERNAL STOP FLAG IS SET - cancel BATCH")
# inter_batch_params["stop"] = True inter_batch_params["stop"] = True
# reason = "STOP Signal received" reason = "STOP Signal received"
except Exception as e: except Exception as e:
reason = "SHUTDOWN Exception:" + str(e) + format_exc() reason = "SHUTDOWN Exception:" + str(e) + format_exc()
#raise RuntimeError('Exception v runneru POZOR') from e #raise RuntimeError('Exception v runneru POZOR') from e
if inter_batch_params is not None:
inter_batch_params["error"] = reason
print(str(e)) print(str(e))
print(reason) print(reason)
send_to_telegram(reason) send_to_telegram(reason)
@ -521,7 +523,10 @@ def batch_run_manager(id: UUID, runReq: RunRequest, rundays: list[RunDay]):
#protoze jsme v ridicim vlaknu, poustime za sebou jednotlive stratiny v synchronnim modu #protoze jsme v ridicim vlaknu, poustime za sebou jednotlive stratiny v synchronnim modu
res, id_val = run_stratin(id=id, runReq=runReq, synchronous=True, inter_batch_params=inter_batch_params) res, id_val = run_stratin(id=id, runReq=runReq, synchronous=True, inter_batch_params=inter_batch_params)
if res < 0: if res < 0:
print(f"CHyba v runu #{cnt} od:{runReq.bt_from} do {runReq.bt_to} -> {id_val}") msg = f"Chyba v runu #{cnt} od:{runReq.bt_from} do {runReq.bt_to} -> {id_val}"
if inter_batch_params is not None:
inter_batch_params["error"] = msg
print(msg)
break break
if "stop" in inter_batch_params and inter_batch_params["stop"] is True: if "stop" in inter_batch_params and inter_batch_params["stop"] is True:
@ -976,29 +981,79 @@ def get_all_archived_runners() -> list[RunArchiveView]:
return 0, results return 0, results
#with pagination #with pagination
def get_all_archived_runners_p(start: int, length: int, draw: int) -> list[RunArchiveViewPagination]: # def get_all_archived_runners_p(request: DataTablesRequest) -> list[RunArchiveViewPagination]:
# conn = pool.get_connection()
# try:
# conn.row_factory = Row
# c = conn.cursor()
# # Query to get the total count of records
# total_count_query = "SELECT COUNT(*) FROM runner_header"
# c.execute(total_count_query)
# total_count = c.fetchone()[0]
# # Query to get the paginated data
# paginated_query = f"""
# SELECT runner_id, strat_id, batch_id, symbol, name, note, started,
# stopped, mode, account, bt_from, bt_to, ilog_save, profit,
# trade_count, end_positions, end_positions_avgp, metrics
# FROM runner_header
# ORDER BY stopped DESC
# LIMIT {request.length} OFFSET {request.start}
# """
# c.execute(paginated_query)
# rows = c.fetchall()
# results = [row_to_runarchiveview(row) for row in rows]
# finally:
# conn.row_factory = None
# pool.release_connection(conn)
# try:
# obj = RunArchiveViewPagination(draw=request.draw,recordsTotal=total_count, recordsFiltered=total_count,data=results)
# return 0, obj
# except Exception as e:
# return -2, str(e)+format_exc()
#new version to support search and ordering
#TODO index nad strat_id a batch_id mam?
def get_all_archived_runners_p(request: DataTablesRequest) -> Tuple[int, RunArchiveViewPagination]:
conn = pool.get_connection() conn = pool.get_connection()
search_value = request.search.value # Extract the search value from the request
try: try:
conn.row_factory = Row conn.row_factory = Row
c = conn.cursor() c = conn.cursor()
# Query to get the total count of records # Modify the total count query to include the search filter if search_value is provided
total_count_query = "SELECT COUNT(*) FROM runner_header" total_count_query = """
c.execute(total_count_query) SELECT COUNT(*) FROM runner_header
WHERE (:search_value = '' OR strat_id LIKE :search_value OR batch_id LIKE :search_value)
"""
c.execute(total_count_query, {'search_value': f'%{search_value}%'})
total_count = c.fetchone()[0] total_count = c.fetchone()[0]
# Query to get the paginated data # Modify the paginated query to include the search filter if search_value is provided
paginated_query = f""" paginated_query = f"""
SELECT runner_id, strat_id, batch_id, symbol, name, note, started, SELECT runner_id, strat_id, batch_id, symbol, name, note, started,
stopped, mode, account, bt_from, bt_to, ilog_save, profit, stopped, mode, account, bt_from, bt_to, ilog_save, profit,
trade_count, end_positions, end_positions_avgp, metrics trade_count, end_positions, end_positions_avgp, metrics
FROM runner_header FROM runner_header
WHERE (:search_value = '' OR strat_id LIKE :search_value OR batch_id LIKE :search_value)
ORDER BY stopped DESC ORDER BY stopped DESC
LIMIT {length} OFFSET {start} LIMIT {request.length} OFFSET {request.start}
""" """
c.execute(paginated_query) c.execute(paginated_query, {'search_value': f'%{search_value}%'})
rows = c.fetchall() rows = c.fetchall()
# Filtered count might be different from total count when search is applied
filtered_count_query = """
SELECT COUNT(*) FROM runner_header
WHERE (:search_value = '' OR strat_id LIKE :search_value OR batch_id LIKE :search_value)
"""
c.execute(filtered_count_query, {'search_value': f'%{search_value}%'})
filtered_count = c.fetchone()[0]
results = [row_to_runarchiveview(row) for row in rows] results = [row_to_runarchiveview(row) for row in rows]
finally: finally:
@ -1006,27 +1061,12 @@ def get_all_archived_runners_p(start: int, length: int, draw: int) -> list[RunAr
pool.release_connection(conn) pool.release_connection(conn)
try: try:
obj = RunArchiveViewPagination(draw=draw,recordsTotal=total_count, recordsFiltered=total_count,data=results) obj = RunArchiveViewPagination(draw=request.draw, recordsTotal=total_count, recordsFiltered=filtered_count, data=results)
return 0, obj return 0, obj
except Exception as e: except Exception as e:
return -2, str(e) + format_exc() return -2, str(e) + format_exc()
conn = pool.get_connection()
try:
conn.row_factory = Row
c = conn.cursor()
c.execute(f"SELECT runner_id, strat_id, batch_id, symbol, name, note, started, stopped, mode, account, bt_from, bt_to, ilog_save, profit, trade_count, end_positions, end_positions_avgp, metrics FROM runner_header")
rows = c.fetchall()
results = []
for row in rows:
results.append(row_to_runarchiveview(row))
finally:
conn.row_factory = None
pool.release_connection(conn)
return 0, results
#DECOMMS #DECOMMS
# def get_all_archived_runners(): # def get_all_archived_runners():
# conn = pool.get_connection() # conn = pool.get_connection()

View File

@ -11,7 +11,7 @@ import uvicorn
from uuid import UUID from uuid import UUID
import v2realbot.controller.services as cs import v2realbot.controller.services as cs
from v2realbot.utils.ilog import get_log_window from v2realbot.utils.ilog import get_log_window
from v2realbot.common.model import StrategyInstance, RunnerView, RunRequest, Trade, RunArchive, RunArchiveView, RunArchiveViewPagination, RunArchiveDetail, Bar, RunArchiveChange, TestList, ConfigItem, InstantIndicator from v2realbot.common.model import StrategyInstance, RunnerView, RunRequest, Trade, RunArchive, RunArchiveView, RunArchiveViewPagination, RunArchiveDetail, Bar, RunArchiveChange, TestList, ConfigItem, InstantIndicator, DataTablesRequest
from fastapi import FastAPI, WebSocket, WebSocketDisconnect, Depends, HTTPException, status, WebSocketException, Cookie, Query from fastapi import FastAPI, WebSocket, WebSocketDisconnect, Depends, HTTPException, status, WebSocketException, Cookie, Query
from fastapi.responses import FileResponse, StreamingResponse from fastapi.responses import FileResponse, StreamingResponse
from fastapi.staticfiles import StaticFiles from fastapi.staticfiles import StaticFiles
@ -426,9 +426,11 @@ def _get_all_archived_runners() -> list[RunArchiveView]:
raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail=f"No data found") raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail=f"No data found")
#get all archived runners headers - just RunArchiveView - with pagination #get all archived runners headers - just RunArchiveView - with pagination
@app.get("/archived_runners_p/", dependencies=[Depends(api_key_auth)]) @app.post("/archived_runners_p/", dependencies=[Depends(api_key_auth)])
def _get_all_archived_runners_p(start: int = 0, length: int = 10, draw: int = 1) -> RunArchiveViewPagination: def _get_all_archived_runners_p(req: DataTablesRequest) -> RunArchiveViewPagination:
res, set =cs.get_all_archived_runners_p(start, length, draw) #print(req)
#DataTablesRequest
res, set =cs.get_all_archived_runners_p(req)
if res == 0: if res == 0:
return set return set
else: else:

View File

@ -921,15 +921,16 @@ var archiveRecords =
$('#archiveTable').DataTable( { $('#archiveTable').DataTable( {
ajax: { ajax: {
url: '/archived_runners_p/', url: '/archived_runners_p/',
// dataSrc: 'data', dataSrc: 'data',
method:"POST",
contentType: "application/json",
// dataType: "json",
beforeSend: function (xhr) { beforeSend: function (xhr) {
xhr.setRequestHeader('X-API-Key', xhr.setRequestHeader('X-API-Key',
API_KEY); }, API_KEY); },
// data: function(d) { data: function (d) {
// d.start = d.start; return JSON.stringify(d);
// d.length = d.length; },
// d.draw = d.draw;
// },
error: function(xhr, status, error) { error: function(xhr, status, error) {
//var err = eval("(" + xhr.responseText + ")"); //var err = eval("(" + xhr.responseText + ")");
//window.alert(JSON.stringify(xhr)); //window.alert(JSON.stringify(xhr));
@ -1125,11 +1126,29 @@ var archiveRecords =
// } // }
//} //}
// Add row grouping based on 'batch_id' // Add row grouping based on 'batch_id'
//TODO projit a zrevidovat - pripadne optimalizovat
//NOTE zde jse skoncil
rowGroup: { rowGroup: {
dataSrc: 'batch_id', dataSrc: 'batch_id',
//toto je volano pri renderovani groupy //toto je volano pri renderovani headeru grupy
startRender: function (rows, group) { startRender: function (rows, group) {
var groupId = group ? group : 'no-batch-id'; var firstRowData = rows.data()[0];
//pro no-batch-id je idcko prvni id
var groupId = group ? group : 'no-batch-id-' + firstRowData.id;
var stateKey = 'dt-group-state-' + groupId;
var state = localStorage.getItem(stateKey);
// Iterate over each row in the group to set the data attribute
// zaroven pro kazdy node nastavime viditelnost podle nastaveni
rows.every(function (rowIdx, tableLoop, rowLoop) {
var rowNode = $(this.node());
rowNode.attr('data-group-name', groupId);
if (state == 'collapsed') {
rowNode.hide();
} else {
rowNode.show();
}
});
// Initialize variables for the group // Initialize variables for the group
var itemCount = 0; var itemCount = 0;
@ -1163,7 +1182,6 @@ var archiveRecords =
//pokud nemame tak si ho loadneme //pokud nemame tak si ho loadneme
if (group) { if (group) {
const existingBatch = batchHeaders.find(batch => batch.batch_id == group); const existingBatch = batchHeaders.find(batch => batch.batch_id == group);
var firstRowData = rows.data()[0];
//jeste neni v poli batchu - udelame hlavicku //jeste neni v poli batchu - udelame hlavicku
if (!existingBatch) { if (!existingBatch) {
itemCount = extractNumbersFromString(firstRowData.note); itemCount = extractNumbersFromString(firstRowData.note);
@ -1193,37 +1211,47 @@ var archiveRecords =
} }
} }
// Construct the group header //zaroven nastavime u vsech childu
// Construct the GROUP HEADER - sem pripadna tlačítka atp.
//var groupHeaderContent = '<strong>' + (group ? 'Batch ID: ' + group : 'No Batch') + '</strong>';
var groupHeaderContent = '<strong>' + (group ? 'Batch ID: ' + group : 'No Batch')+'</strong>'; var groupHeaderContent = '<strong>' + (group ? 'Batch ID: ' + group : 'No Batch')+'</strong>';
groupHeaderContent += (group ? ' <span class="batchheader-count-info">(' + itemCount + ')</span>' + ' <span class="batchheader-period-info">' + period + '</span> <span class="batchheader-profit-info">Profit: ' + profit + '</span>' : ''); groupHeaderContent += (group ? ' <span class="batchheader-count-info">(' + itemCount + ')</span>' + ' <span class="batchheader-period-info">' + period + '</span> <span class="batchheader-profit-info">Profit: ' + profit + '</span>' : '');
return $('<tr/>') return $('<tr/>')
.append('<td colspan="18">' + groupHeaderContent + '</td>') .append('<td colspan="18">' + groupHeaderContent + '</td>')
.attr('data-name', groupId) .attr('data-name', groupId)
.addClass('group-header collapsed'); .addClass('group-header')
.addClass(state);
} }
}, },
drawCallback: function (settings) { // drawCallback: function (settings) {
var api = this.api(); // var api = this.api();
var rows = api.rows({ page: 'current' }).nodes(); // var rows = api.rows({ page: 'current' }).nodes();
// Iterate over all rows in the current page // api.column(17, { page: 'current' }).data().each(function (group, i) {
api.column(17, { page: 'current' }).data().each(function (group, i) { // console.log("drawCallabck i",i)
var groupName = group ? group : 'no-batch-id'; // console.log("rows", $(rows).eq(i))
var stateKey = 'dt-group-state-' + groupName; // var groupName = group ? group : $(rows).eq(i).attr('data-name');
var state = localStorage.getItem(stateKey); // console.log("groupName", groupName)
// var stateKey = 'dt-group-state-' + groupName;
// var state = localStorage.getItem(stateKey);
if (state === 'collapsed') { // if (state === 'collapsed') {
// Hide all rows in the collapsed group // $(rows).eq(i).hide();
$(rows).eq(i).hide(); // } else {
$('tr[data-name="' + groupName + '"]').addClass('collapsed'); // $(rows).eq(i).show();
} else { // }
// Show all rows in the expanded group
$(rows).eq(i).show(); // // Set the unique identifier as a data attribute on each row
$('tr[data-name="' + groupName + '"]').removeClass('collapsed'); // //$(rows).eq(i).attr('data-group-name', groupName);
}
}); // // // Add or remove the 'collapsed' class based on the state
} // // if (groupName.startsWith('no-batch-id-')) {
// // $('tr[data-name="' + groupName + '"]').toggleClass('collapsed', state === 'collapsed');
// // }
// });
// }
}); });
function extractNumbersFromString(str) { function extractNumbersFromString(str) {
@ -1250,29 +1278,31 @@ function generateStorageKey(batchId) {
// Expand/Collapse functionality // Expand/Collapse functionality
$('#archiveTable tbody').on('click', 'tr.group-header', function () { $('#archiveTable tbody').on('click', 'tr.group-header', function () {
var name = $(this).data('name'); var headerRow = $(this);
var collapsed = $(this).hasClass('collapsed'); var name = headerRow.data('name');
$(this).toggleClass('collapsed'); var collapsed = headerRow.hasClass('collapsed');
headerRow.toggleClass('collapsed');
archiveRecords.rows().every(function () { archiveRecords.rows().every(function () {
var rowGroup = this.data().batch_id ? this.data().batch_id : 'no-batch-id'; var row = $(this.node());
var rowGroup = row.attr('data-group-name');
if (rowGroup == name) { if (rowGroup == name) {
if (collapsed) { row.toggle();
this.node().style.display = '';
} else {
this.node().style.display = 'none';
}
} }
}); });
// Save the state // Save the state
if (collapsed) { if (collapsed) {
localStorage.setItem(generateStorageKey(name), 'expanded'); localStorage.setItem('dt-group-state-' + name, 'expanded');
} else { } else {
localStorage.setItem(generateStorageKey(name), 'collapsed'); localStorage.setItem('dt-group-state-' + name, 'collapsed');
} }
}); });
//WIP buttons to hide datatable columns //WIP buttons to hide datatable columns
// document.querySelectorAll('a.toggle-vis').forEach((el) => { // document.querySelectorAll('a.toggle-vis').forEach((el) => {
// el.addEventListener('click', function (e) { // el.addEventListener('click', function (e) {

View File

@ -42,14 +42,14 @@ class StrategyClassicSL(Strategy):
self.state.ilog(e=f"QUITTING MAX SUM REL PROFIT REACHED {max_sum_profit_to_quit_rel=} {self.state.profit=} {rel_profit=} relprofits:{str(self.state.rel_profit_cum)}") self.state.ilog(e=f"QUITTING MAX SUM REL PROFIT REACHED {max_sum_profit_to_quit_rel=} {self.state.profit=} {rel_profit=} relprofits:{str(self.state.rel_profit_cum)}")
self.state.vars.pending = "max_sum_profit_to_quit_rel" self.state.vars.pending = "max_sum_profit_to_quit_rel"
send_to_telegram(f"QUITTING MAX SUM REL PROFIT REACHED {max_sum_profit_to_quit_rel=} {self.state.profit=} {rel_profit=} relprofits:{str(self.state.rel_profit_cum)}") send_to_telegram(f"QUITTING MAX SUM REL PROFIT REACHED {max_sum_profit_to_quit_rel=} {self.state.profit=} {rel_profit=} relprofits:{str(self.state.rel_profit_cum)}")
self.se.set() self.signal_stop = True
return True return True
if max_sum_loss_to_quit_rel is not None: if max_sum_loss_to_quit_rel is not None:
if rel_profit < 0 and rel_profit <= float(max_sum_loss_to_quit_rel): if rel_profit < 0 and rel_profit <= float(max_sum_loss_to_quit_rel):
self.state.ilog(e=f"QUITTING MAX SUM REL LOSS REACHED {max_sum_loss_to_quit_rel=} {self.state.profit=} {rel_profit=} relprofits:{str(self.state.rel_profit_cum)}") self.state.ilog(e=f"QUITTING MAX SUM REL LOSS REACHED {max_sum_loss_to_quit_rel=} {self.state.profit=} {rel_profit=} relprofits:{str(self.state.rel_profit_cum)}")
self.state.vars.pending = "max_sum_loss_to_quit_rel" self.state.vars.pending = "max_sum_loss_to_quit_rel"
send_to_telegram(f"QUITTING MAX SUM REL LOSS REACHED {max_sum_loss_to_quit_rel=} {self.state.profit=} {rel_profit=} relprofits:{str(self.state.rel_profit_cum)}") send_to_telegram(f"QUITTING MAX SUM REL LOSS REACHED {max_sum_loss_to_quit_rel=} {self.state.profit=} {rel_profit=} relprofits:{str(self.state.rel_profit_cum)}")
self.se.set() self.signal_stop = True
return True return True
if max_sum_profit_to_quit is not None: if max_sum_profit_to_quit is not None:
@ -57,14 +57,14 @@ class StrategyClassicSL(Strategy):
self.state.ilog(e=f"QUITTING MAX SUM ABS PROFIT REACHED {max_sum_profit_to_quit=} {self.state.profit=} {rel_profit=} relprofits:{str(self.state.rel_profit_cum)}") self.state.ilog(e=f"QUITTING MAX SUM ABS PROFIT REACHED {max_sum_profit_to_quit=} {self.state.profit=} {rel_profit=} relprofits:{str(self.state.rel_profit_cum)}")
self.state.vars.pending = "max_sum_profit_to_quit" self.state.vars.pending = "max_sum_profit_to_quit"
send_to_telegram(f"QUITTING MAX SUM ABS PROFIT REACHED {max_sum_profit_to_quit=} {self.state.profit=} {rel_profit=} relprofits:{str(self.state.rel_profit_cum)}") send_to_telegram(f"QUITTING MAX SUM ABS PROFIT REACHED {max_sum_profit_to_quit=} {self.state.profit=} {rel_profit=} relprofits:{str(self.state.rel_profit_cum)}")
self.se.set() self.signal_stop = True
return True return True
if max_sum_loss_to_quit is not None: if max_sum_loss_to_quit is not None:
if float(self.state.profit) < 0 and float(self.state.profit) <= float(max_sum_loss_to_quit): if float(self.state.profit) < 0 and float(self.state.profit) <= float(max_sum_loss_to_quit):
self.state.ilog(e=f"QUITTING MAX SUM ABS LOSS REACHED {max_sum_loss_to_quit=} {self.state.profit=} {rel_profit=} relprofits:{str(self.state.rel_profit_cum)}") self.state.ilog(e=f"QUITTING MAX SUM ABS LOSS REACHED {max_sum_loss_to_quit=} {self.state.profit=} {rel_profit=} relprofits:{str(self.state.rel_profit_cum)}")
self.state.vars.pending = "max_sum_loss_to_quit" self.state.vars.pending = "max_sum_loss_to_quit"
send_to_telegram(f"QUITTING MAX SUM ABS LOSS REACHED {max_sum_loss_to_quit=} {self.state.profit=} {rel_profit=} relprofits:{str(self.state.rel_profit_cum)}") send_to_telegram(f"QUITTING MAX SUM ABS LOSS REACHED {max_sum_loss_to_quit=} {self.state.profit=} {rel_profit=} relprofits:{str(self.state.rel_profit_cum)}")
self.se.set() self.signal_stop = True
return True return True
return False return False

View File

@ -78,6 +78,8 @@ class Strategy:
#pause event and end event #pause event and end event
self.pe = pe self.pe = pe
self.se = se self.se = se
#signal stop - internal
self.signal_stop = False
#prdelat queue na dynamic - podle toho jak bud uchtit pracovat s multiresolutions #prdelat queue na dynamic - podle toho jak bud uchtit pracovat s multiresolutions
#zatim jen jedna q1 #zatim jen jedna q1
@ -429,9 +431,13 @@ class Strategy:
item = self.q1.get(timeout=HEARTBEAT_TIMEOUT) item = self.q1.get(timeout=HEARTBEAT_TIMEOUT)
#printnow(current_thread().name, "Items waiting in queue:", self.q1.qsize()) #printnow(current_thread().name, "Items waiting in queue:", self.q1.qsize())
except queue.Empty: except queue.Empty:
#check signals #check internal signals - for profit/loss optim etc - valid for runner
if self.signal_stop:
print(current_thread().name, "Stopping signal - internal")
break
#check signals - stops also batch
if self.se.is_set(): if self.se.is_set():
print(current_thread().name, "Stopping signal") print(current_thread().name, "External stopping signal")
break break
if self.pe.is_set(): if self.pe.is_set():
print(current_thread().name, "Paused.") print(current_thread().name, "Paused.")
@ -443,6 +449,9 @@ class Strategy:
if item == "last" or self.se.is_set(): if item == "last" or self.se.is_set():
print(current_thread().name, "stopping") print(current_thread().name, "stopping")
break break
elif self.signal_stop:
print(current_thread().name, "Stopping signal - internal")
break
elif self.pe.is_set(): elif self.pe.is_set():
print(current_thread().name, "Paused.") print(current_thread().name, "Paused.")
continue continue

View File

@ -35,7 +35,7 @@ def concatenate_weekdays(weekday_filter):
weekday_strings = [weekdays[day] for day in weekday_filter] weekday_strings = [weekdays[day] for day in weekday_filter]
# Concatenate the weekday strings # Concatenate the weekday strings
return '-'.join(weekday_strings) return ','.join(weekday_strings)
def slice_dict_lists(d, last_item, to_tmstp = False): def slice_dict_lists(d, last_item, to_tmstp = False):
"""Slices every list in the dictionary to the last last_item items. """Slices every list in the dictionary to the last last_item items.