bugfix remote signal, serverside search,gui fixes
This commit is contained in:
@ -8,6 +8,34 @@ from pydantic import BaseModel
|
||||
from v2realbot.enums.enums import Mode, Account
|
||||
from alpaca.data.enums import Exchange
|
||||
|
||||
|
||||
#models for server side datatables
|
||||
# Model for individual column data
|
||||
class ColumnData(BaseModel):
|
||||
data: str
|
||||
name: str
|
||||
searchable: bool
|
||||
orderable: bool
|
||||
search: dict
|
||||
|
||||
# Model for the search value
|
||||
class SearchValue(BaseModel):
|
||||
value: str
|
||||
regex: bool
|
||||
|
||||
class OrderValue(BaseModel):
|
||||
column: int
|
||||
dir: str
|
||||
|
||||
# Model for incoming DataTables request
|
||||
class DataTablesRequest(BaseModel):
|
||||
draw: int
|
||||
start: int
|
||||
length: int
|
||||
search: SearchValue
|
||||
order: List[OrderValue]
|
||||
columns: List[ColumnData]
|
||||
|
||||
#tu samou variantu pak UpdateStrategyInstanceWhileRunning
|
||||
|
||||
#only those that can be changed UUID id prijde v parametru
|
||||
|
||||
@ -1,4 +1,4 @@
|
||||
from typing import Any, List
|
||||
from typing import Any, List, Tuple
|
||||
from uuid import UUID, uuid4
|
||||
import pickle
|
||||
from alpaca.data.historical import StockHistoricalDataClient
|
||||
@ -7,7 +7,7 @@ from alpaca.data.enums import DataFeed
|
||||
from alpaca.data.timeframe import TimeFrame
|
||||
from v2realbot.strategy.base import StrategyState
|
||||
from v2realbot.enums.enums import RecordType, StartBarAlign, Mode, Account, OrderSide
|
||||
from v2realbot.common.model import RunDay, StrategyInstance, Runner, RunRequest, RunArchive, RunArchiveView, RunArchiveViewPagination, RunArchiveDetail, RunArchiveChange, Bar, TradeEvent, TestList, Intervals, ConfigItem, InstantIndicator
|
||||
from v2realbot.common.model import RunDay, StrategyInstance, Runner, RunRequest, RunArchive, RunArchiveView, RunArchiveViewPagination, RunArchiveDetail, RunArchiveChange, Bar, TradeEvent, TestList, Intervals, ConfigItem, InstantIndicator, DataTablesRequest
|
||||
from v2realbot.utils.utils import AttributeDict, zoneNY, zonePRG, safe_get, dict_replace_value, Store, parse_toml_string, json_serial, is_open_hours, send_to_telegram, concatenate_weekdays
|
||||
from v2realbot.utils.ilog import delete_logs
|
||||
from v2realbot.common.PrescribedTradeModel import Trade, TradeDirection, TradeStatus, TradeStoplossType
|
||||
@ -308,14 +308,16 @@ def capsule(target: object, db: object, inter_batch_params: dict = None):
|
||||
print("Strategy instance stopped. Update runners")
|
||||
reason = None
|
||||
|
||||
# if target.se.is_set():
|
||||
# print("STOP FLAG IS SET - cancel BATCH")
|
||||
# inter_batch_params["stop"] = True
|
||||
# reason = "STOP Signal received"
|
||||
if target.se.is_set():
|
||||
print("EXTERNAL STOP FLAG IS SET - cancel BATCH")
|
||||
inter_batch_params["stop"] = True
|
||||
reason = "STOP Signal received"
|
||||
|
||||
except Exception as e:
|
||||
reason = "SHUTDOWN Exception:" + str(e) + format_exc()
|
||||
#raise RuntimeError('Exception v runneru POZOR') from e
|
||||
if inter_batch_params is not None:
|
||||
inter_batch_params["error"] = reason
|
||||
print(str(e))
|
||||
print(reason)
|
||||
send_to_telegram(reason)
|
||||
@ -521,7 +523,10 @@ def batch_run_manager(id: UUID, runReq: RunRequest, rundays: list[RunDay]):
|
||||
#protoze jsme v ridicim vlaknu, poustime za sebou jednotlive stratiny v synchronnim modu
|
||||
res, id_val = run_stratin(id=id, runReq=runReq, synchronous=True, inter_batch_params=inter_batch_params)
|
||||
if res < 0:
|
||||
print(f"CHyba v runu #{cnt} od:{runReq.bt_from} do {runReq.bt_to} -> {id_val}")
|
||||
msg = f"Chyba v runu #{cnt} od:{runReq.bt_from} do {runReq.bt_to} -> {id_val}"
|
||||
if inter_batch_params is not None:
|
||||
inter_batch_params["error"] = msg
|
||||
print(msg)
|
||||
break
|
||||
|
||||
if "stop" in inter_batch_params and inter_batch_params["stop"] is True:
|
||||
@ -976,29 +981,79 @@ def get_all_archived_runners() -> list[RunArchiveView]:
|
||||
return 0, results
|
||||
|
||||
#with pagination
|
||||
def get_all_archived_runners_p(start: int, length: int, draw: int) -> list[RunArchiveViewPagination]:
|
||||
# def get_all_archived_runners_p(request: DataTablesRequest) -> list[RunArchiveViewPagination]:
|
||||
# conn = pool.get_connection()
|
||||
# try:
|
||||
# conn.row_factory = Row
|
||||
# c = conn.cursor()
|
||||
|
||||
# # Query to get the total count of records
|
||||
# total_count_query = "SELECT COUNT(*) FROM runner_header"
|
||||
# c.execute(total_count_query)
|
||||
# total_count = c.fetchone()[0]
|
||||
|
||||
# # Query to get the paginated data
|
||||
# paginated_query = f"""
|
||||
# SELECT runner_id, strat_id, batch_id, symbol, name, note, started,
|
||||
# stopped, mode, account, bt_from, bt_to, ilog_save, profit,
|
||||
# trade_count, end_positions, end_positions_avgp, metrics
|
||||
# FROM runner_header
|
||||
# ORDER BY stopped DESC
|
||||
# LIMIT {request.length} OFFSET {request.start}
|
||||
# """
|
||||
# c.execute(paginated_query)
|
||||
# rows = c.fetchall()
|
||||
|
||||
# results = [row_to_runarchiveview(row) for row in rows]
|
||||
|
||||
# finally:
|
||||
# conn.row_factory = None
|
||||
# pool.release_connection(conn)
|
||||
|
||||
# try:
|
||||
# obj = RunArchiveViewPagination(draw=request.draw,recordsTotal=total_count, recordsFiltered=total_count,data=results)
|
||||
# return 0, obj
|
||||
# except Exception as e:
|
||||
# return -2, str(e)+format_exc()
|
||||
|
||||
#new version to support search and ordering
|
||||
#TODO index nad strat_id a batch_id mam?
|
||||
def get_all_archived_runners_p(request: DataTablesRequest) -> Tuple[int, RunArchiveViewPagination]:
|
||||
conn = pool.get_connection()
|
||||
search_value = request.search.value # Extract the search value from the request
|
||||
try:
|
||||
conn.row_factory = Row
|
||||
c = conn.cursor()
|
||||
|
||||
# Query to get the total count of records
|
||||
total_count_query = "SELECT COUNT(*) FROM runner_header"
|
||||
c.execute(total_count_query)
|
||||
# Modify the total count query to include the search filter if search_value is provided
|
||||
total_count_query = """
|
||||
SELECT COUNT(*) FROM runner_header
|
||||
WHERE (:search_value = '' OR strat_id LIKE :search_value OR batch_id LIKE :search_value)
|
||||
"""
|
||||
c.execute(total_count_query, {'search_value': f'%{search_value}%'})
|
||||
total_count = c.fetchone()[0]
|
||||
|
||||
# Query to get the paginated data
|
||||
# Modify the paginated query to include the search filter if search_value is provided
|
||||
paginated_query = f"""
|
||||
SELECT runner_id, strat_id, batch_id, symbol, name, note, started,
|
||||
stopped, mode, account, bt_from, bt_to, ilog_save, profit,
|
||||
trade_count, end_positions, end_positions_avgp, metrics
|
||||
FROM runner_header
|
||||
WHERE (:search_value = '' OR strat_id LIKE :search_value OR batch_id LIKE :search_value)
|
||||
ORDER BY stopped DESC
|
||||
LIMIT {length} OFFSET {start}
|
||||
LIMIT {request.length} OFFSET {request.start}
|
||||
"""
|
||||
c.execute(paginated_query)
|
||||
c.execute(paginated_query, {'search_value': f'%{search_value}%'})
|
||||
rows = c.fetchall()
|
||||
|
||||
# Filtered count might be different from total count when search is applied
|
||||
filtered_count_query = """
|
||||
SELECT COUNT(*) FROM runner_header
|
||||
WHERE (:search_value = '' OR strat_id LIKE :search_value OR batch_id LIKE :search_value)
|
||||
"""
|
||||
c.execute(filtered_count_query, {'search_value': f'%{search_value}%'})
|
||||
filtered_count = c.fetchone()[0]
|
||||
|
||||
results = [row_to_runarchiveview(row) for row in rows]
|
||||
|
||||
finally:
|
||||
@ -1006,25 +1061,10 @@ def get_all_archived_runners_p(start: int, length: int, draw: int) -> list[RunAr
|
||||
pool.release_connection(conn)
|
||||
|
||||
try:
|
||||
obj = RunArchiveViewPagination(draw=draw,recordsTotal=total_count, recordsFiltered=total_count,data=results)
|
||||
obj = RunArchiveViewPagination(draw=request.draw, recordsTotal=total_count, recordsFiltered=filtered_count, data=results)
|
||||
return 0, obj
|
||||
except Exception as e:
|
||||
return -2, str(e)+format_exc()
|
||||
|
||||
|
||||
conn = pool.get_connection()
|
||||
try:
|
||||
conn.row_factory = Row
|
||||
c = conn.cursor()
|
||||
c.execute(f"SELECT runner_id, strat_id, batch_id, symbol, name, note, started, stopped, mode, account, bt_from, bt_to, ilog_save, profit, trade_count, end_positions, end_positions_avgp, metrics FROM runner_header")
|
||||
rows = c.fetchall()
|
||||
results = []
|
||||
for row in rows:
|
||||
results.append(row_to_runarchiveview(row))
|
||||
finally:
|
||||
conn.row_factory = None
|
||||
pool.release_connection(conn)
|
||||
return 0, results
|
||||
return -2, str(e) + format_exc()
|
||||
|
||||
|
||||
#DECOMMS
|
||||
|
||||
@ -11,7 +11,7 @@ import uvicorn
|
||||
from uuid import UUID
|
||||
import v2realbot.controller.services as cs
|
||||
from v2realbot.utils.ilog import get_log_window
|
||||
from v2realbot.common.model import StrategyInstance, RunnerView, RunRequest, Trade, RunArchive, RunArchiveView, RunArchiveViewPagination, RunArchiveDetail, Bar, RunArchiveChange, TestList, ConfigItem, InstantIndicator
|
||||
from v2realbot.common.model import StrategyInstance, RunnerView, RunRequest, Trade, RunArchive, RunArchiveView, RunArchiveViewPagination, RunArchiveDetail, Bar, RunArchiveChange, TestList, ConfigItem, InstantIndicator, DataTablesRequest
|
||||
from fastapi import FastAPI, WebSocket, WebSocketDisconnect, Depends, HTTPException, status, WebSocketException, Cookie, Query
|
||||
from fastapi.responses import FileResponse, StreamingResponse
|
||||
from fastapi.staticfiles import StaticFiles
|
||||
@ -426,9 +426,11 @@ def _get_all_archived_runners() -> list[RunArchiveView]:
|
||||
raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail=f"No data found")
|
||||
|
||||
#get all archived runners headers - just RunArchiveView - with pagination
|
||||
@app.get("/archived_runners_p/", dependencies=[Depends(api_key_auth)])
|
||||
def _get_all_archived_runners_p(start: int = 0, length: int = 10, draw: int = 1) -> RunArchiveViewPagination:
|
||||
res, set =cs.get_all_archived_runners_p(start, length, draw)
|
||||
@app.post("/archived_runners_p/", dependencies=[Depends(api_key_auth)])
|
||||
def _get_all_archived_runners_p(req: DataTablesRequest) -> RunArchiveViewPagination:
|
||||
#print(req)
|
||||
#DataTablesRequest
|
||||
res, set =cs.get_all_archived_runners_p(req)
|
||||
if res == 0:
|
||||
return set
|
||||
else:
|
||||
|
||||
@ -921,15 +921,16 @@ var archiveRecords =
|
||||
$('#archiveTable').DataTable( {
|
||||
ajax: {
|
||||
url: '/archived_runners_p/',
|
||||
// dataSrc: 'data',
|
||||
dataSrc: 'data',
|
||||
method:"POST",
|
||||
contentType: "application/json",
|
||||
// dataType: "json",
|
||||
beforeSend: function (xhr) {
|
||||
xhr.setRequestHeader('X-API-Key',
|
||||
API_KEY); },
|
||||
// data: function(d) {
|
||||
// d.start = d.start;
|
||||
// d.length = d.length;
|
||||
// d.draw = d.draw;
|
||||
// },
|
||||
data: function (d) {
|
||||
return JSON.stringify(d);
|
||||
},
|
||||
error: function(xhr, status, error) {
|
||||
//var err = eval("(" + xhr.responseText + ")");
|
||||
//window.alert(JSON.stringify(xhr));
|
||||
@ -1125,12 +1126,30 @@ var archiveRecords =
|
||||
// }
|
||||
//}
|
||||
// Add row grouping based on 'batch_id'
|
||||
//TODO projit a zrevidovat - pripadne optimalizovat
|
||||
//NOTE zde jse skoncil
|
||||
rowGroup: {
|
||||
dataSrc: 'batch_id',
|
||||
//toto je volano pri renderovani groupy
|
||||
//toto je volano pri renderovani headeru grupy
|
||||
startRender: function (rows, group) {
|
||||
var groupId = group ? group : 'no-batch-id';
|
||||
|
||||
var firstRowData = rows.data()[0];
|
||||
//pro no-batch-id je idcko prvni id
|
||||
var groupId = group ? group : 'no-batch-id-' + firstRowData.id;
|
||||
var stateKey = 'dt-group-state-' + groupId;
|
||||
var state = localStorage.getItem(stateKey);
|
||||
|
||||
// Iterate over each row in the group to set the data attribute
|
||||
// zaroven pro kazdy node nastavime viditelnost podle nastaveni
|
||||
rows.every(function (rowIdx, tableLoop, rowLoop) {
|
||||
var rowNode = $(this.node());
|
||||
rowNode.attr('data-group-name', groupId);
|
||||
if (state == 'collapsed') {
|
||||
rowNode.hide();
|
||||
} else {
|
||||
rowNode.show();
|
||||
}
|
||||
});
|
||||
|
||||
// Initialize variables for the group
|
||||
var itemCount = 0;
|
||||
var period = '';
|
||||
@ -1163,7 +1182,6 @@ var archiveRecords =
|
||||
//pokud nemame tak si ho loadneme
|
||||
if (group) {
|
||||
const existingBatch = batchHeaders.find(batch => batch.batch_id == group);
|
||||
var firstRowData = rows.data()[0];
|
||||
//jeste neni v poli batchu - udelame hlavicku
|
||||
if (!existingBatch) {
|
||||
itemCount = extractNumbersFromString(firstRowData.note);
|
||||
@ -1193,37 +1211,47 @@ var archiveRecords =
|
||||
}
|
||||
}
|
||||
|
||||
// Construct the group header
|
||||
var groupHeaderContent = '<strong>' + (group ? 'Batch ID: ' + group : 'No Batch') + '</strong>';
|
||||
//zaroven nastavime u vsech childu
|
||||
|
||||
// Construct the GROUP HEADER - sem pripadna tlačítka atp.
|
||||
//var groupHeaderContent = '<strong>' + (group ? 'Batch ID: ' + group : 'No Batch') + '</strong>';
|
||||
var groupHeaderContent = '<strong>' + (group ? 'Batch ID: ' + group : 'No Batch')+'</strong>';
|
||||
groupHeaderContent += (group ? ' <span class="batchheader-count-info">(' + itemCount + ')</span>' + ' <span class="batchheader-period-info">' + period + '</span> <span class="batchheader-profit-info">Profit: ' + profit + '</span>' : '');
|
||||
|
||||
return $('<tr/>')
|
||||
.append('<td colspan="18">' + groupHeaderContent + '</td>')
|
||||
.attr('data-name', groupId)
|
||||
.addClass('group-header collapsed');
|
||||
.addClass('group-header')
|
||||
.addClass(state);
|
||||
}
|
||||
},
|
||||
drawCallback: function (settings) {
|
||||
var api = this.api();
|
||||
var rows = api.rows({ page: 'current' }).nodes();
|
||||
|
||||
// Iterate over all rows in the current page
|
||||
api.column(17, { page: 'current' }).data().each(function (group, i) {
|
||||
var groupName = group ? group : 'no-batch-id';
|
||||
var stateKey = 'dt-group-state-' + groupName;
|
||||
var state = localStorage.getItem(stateKey);
|
||||
|
||||
if (state === 'collapsed') {
|
||||
// Hide all rows in the collapsed group
|
||||
$(rows).eq(i).hide();
|
||||
$('tr[data-name="' + groupName + '"]').addClass('collapsed');
|
||||
} else {
|
||||
// Show all rows in the expanded group
|
||||
$(rows).eq(i).show();
|
||||
$('tr[data-name="' + groupName + '"]').removeClass('collapsed');
|
||||
}
|
||||
});
|
||||
}
|
||||
// drawCallback: function (settings) {
|
||||
// var api = this.api();
|
||||
// var rows = api.rows({ page: 'current' }).nodes();
|
||||
|
||||
// api.column(17, { page: 'current' }).data().each(function (group, i) {
|
||||
// console.log("drawCallabck i",i)
|
||||
// console.log("rows", $(rows).eq(i))
|
||||
// var groupName = group ? group : $(rows).eq(i).attr('data-name');
|
||||
// console.log("groupName", groupName)
|
||||
// var stateKey = 'dt-group-state-' + groupName;
|
||||
// var state = localStorage.getItem(stateKey);
|
||||
|
||||
// if (state === 'collapsed') {
|
||||
// $(rows).eq(i).hide();
|
||||
// } else {
|
||||
// $(rows).eq(i).show();
|
||||
// }
|
||||
|
||||
// // Set the unique identifier as a data attribute on each row
|
||||
// //$(rows).eq(i).attr('data-group-name', groupName);
|
||||
|
||||
// // // Add or remove the 'collapsed' class based on the state
|
||||
// // if (groupName.startsWith('no-batch-id-')) {
|
||||
// // $('tr[data-name="' + groupName + '"]').toggleClass('collapsed', state === 'collapsed');
|
||||
// // }
|
||||
// });
|
||||
// }
|
||||
});
|
||||
|
||||
function extractNumbersFromString(str) {
|
||||
@ -1250,29 +1278,31 @@ function generateStorageKey(batchId) {
|
||||
|
||||
// Expand/Collapse functionality
|
||||
$('#archiveTable tbody').on('click', 'tr.group-header', function () {
|
||||
var name = $(this).data('name');
|
||||
var collapsed = $(this).hasClass('collapsed');
|
||||
$(this).toggleClass('collapsed');
|
||||
var headerRow = $(this);
|
||||
var name = headerRow.data('name');
|
||||
var collapsed = headerRow.hasClass('collapsed');
|
||||
headerRow.toggleClass('collapsed');
|
||||
|
||||
archiveRecords.rows().every(function () {
|
||||
var rowGroup = this.data().batch_id ? this.data().batch_id : 'no-batch-id';
|
||||
var row = $(this.node());
|
||||
var rowGroup = row.attr('data-group-name');
|
||||
if (rowGroup == name) {
|
||||
if (collapsed) {
|
||||
this.node().style.display = '';
|
||||
} else {
|
||||
this.node().style.display = 'none';
|
||||
}
|
||||
row.toggle();
|
||||
}
|
||||
});
|
||||
|
||||
// Save the state
|
||||
// Save the state
|
||||
if (collapsed) {
|
||||
localStorage.setItem(generateStorageKey(name), 'expanded');
|
||||
localStorage.setItem('dt-group-state-' + name, 'expanded');
|
||||
} else {
|
||||
localStorage.setItem(generateStorageKey(name), 'collapsed');
|
||||
localStorage.setItem('dt-group-state-' + name, 'collapsed');
|
||||
}
|
||||
|
||||
});
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
//WIP buttons to hide datatable columns
|
||||
// document.querySelectorAll('a.toggle-vis').forEach((el) => {
|
||||
// el.addEventListener('click', function (e) {
|
||||
|
||||
@ -42,14 +42,14 @@ class StrategyClassicSL(Strategy):
|
||||
self.state.ilog(e=f"QUITTING MAX SUM REL PROFIT REACHED {max_sum_profit_to_quit_rel=} {self.state.profit=} {rel_profit=} relprofits:{str(self.state.rel_profit_cum)}")
|
||||
self.state.vars.pending = "max_sum_profit_to_quit_rel"
|
||||
send_to_telegram(f"QUITTING MAX SUM REL PROFIT REACHED {max_sum_profit_to_quit_rel=} {self.state.profit=} {rel_profit=} relprofits:{str(self.state.rel_profit_cum)}")
|
||||
self.se.set()
|
||||
self.signal_stop = True
|
||||
return True
|
||||
if max_sum_loss_to_quit_rel is not None:
|
||||
if rel_profit < 0 and rel_profit <= float(max_sum_loss_to_quit_rel):
|
||||
self.state.ilog(e=f"QUITTING MAX SUM REL LOSS REACHED {max_sum_loss_to_quit_rel=} {self.state.profit=} {rel_profit=} relprofits:{str(self.state.rel_profit_cum)}")
|
||||
self.state.vars.pending = "max_sum_loss_to_quit_rel"
|
||||
send_to_telegram(f"QUITTING MAX SUM REL LOSS REACHED {max_sum_loss_to_quit_rel=} {self.state.profit=} {rel_profit=} relprofits:{str(self.state.rel_profit_cum)}")
|
||||
self.se.set()
|
||||
self.signal_stop = True
|
||||
return True
|
||||
|
||||
if max_sum_profit_to_quit is not None:
|
||||
@ -57,14 +57,14 @@ class StrategyClassicSL(Strategy):
|
||||
self.state.ilog(e=f"QUITTING MAX SUM ABS PROFIT REACHED {max_sum_profit_to_quit=} {self.state.profit=} {rel_profit=} relprofits:{str(self.state.rel_profit_cum)}")
|
||||
self.state.vars.pending = "max_sum_profit_to_quit"
|
||||
send_to_telegram(f"QUITTING MAX SUM ABS PROFIT REACHED {max_sum_profit_to_quit=} {self.state.profit=} {rel_profit=} relprofits:{str(self.state.rel_profit_cum)}")
|
||||
self.se.set()
|
||||
self.signal_stop = True
|
||||
return True
|
||||
if max_sum_loss_to_quit is not None:
|
||||
if float(self.state.profit) < 0 and float(self.state.profit) <= float(max_sum_loss_to_quit):
|
||||
self.state.ilog(e=f"QUITTING MAX SUM ABS LOSS REACHED {max_sum_loss_to_quit=} {self.state.profit=} {rel_profit=} relprofits:{str(self.state.rel_profit_cum)}")
|
||||
self.state.vars.pending = "max_sum_loss_to_quit"
|
||||
send_to_telegram(f"QUITTING MAX SUM ABS LOSS REACHED {max_sum_loss_to_quit=} {self.state.profit=} {rel_profit=} relprofits:{str(self.state.rel_profit_cum)}")
|
||||
self.se.set()
|
||||
self.signal_stop = True
|
||||
return True
|
||||
|
||||
return False
|
||||
|
||||
@ -78,6 +78,8 @@ class Strategy:
|
||||
#pause event and end event
|
||||
self.pe = pe
|
||||
self.se = se
|
||||
#signal stop - internal
|
||||
self.signal_stop = False
|
||||
|
||||
#prdelat queue na dynamic - podle toho jak bud uchtit pracovat s multiresolutions
|
||||
#zatim jen jedna q1
|
||||
@ -429,9 +431,13 @@ class Strategy:
|
||||
item = self.q1.get(timeout=HEARTBEAT_TIMEOUT)
|
||||
#printnow(current_thread().name, "Items waiting in queue:", self.q1.qsize())
|
||||
except queue.Empty:
|
||||
#check signals
|
||||
#check internal signals - for profit/loss optim etc - valid for runner
|
||||
if self.signal_stop:
|
||||
print(current_thread().name, "Stopping signal - internal")
|
||||
break
|
||||
#check signals - stops also batch
|
||||
if self.se.is_set():
|
||||
print(current_thread().name, "Stopping signal")
|
||||
print(current_thread().name, "External stopping signal")
|
||||
break
|
||||
if self.pe.is_set():
|
||||
print(current_thread().name, "Paused.")
|
||||
@ -443,6 +449,9 @@ class Strategy:
|
||||
if item == "last" or self.se.is_set():
|
||||
print(current_thread().name, "stopping")
|
||||
break
|
||||
elif self.signal_stop:
|
||||
print(current_thread().name, "Stopping signal - internal")
|
||||
break
|
||||
elif self.pe.is_set():
|
||||
print(current_thread().name, "Paused.")
|
||||
continue
|
||||
|
||||
@ -35,7 +35,7 @@ def concatenate_weekdays(weekday_filter):
|
||||
weekday_strings = [weekdays[day] for day in weekday_filter]
|
||||
|
||||
# Concatenate the weekday strings
|
||||
return '-'.join(weekday_strings)
|
||||
return ','.join(weekday_strings)
|
||||
|
||||
def slice_dict_lists(d, last_item, to_tmstp = False):
|
||||
"""Slices every list in the dictionary to the last last_item items.
|
||||
|
||||
Reference in New Issue
Block a user