gui - del arch runner for more ids be-support

This commit is contained in:
David Brazda
2023-09-23 06:48:20 +02:00
parent ff2d539fbf
commit b2365cc318
12 changed files with 232 additions and 56 deletions

46
testy/isfalling.py Normal file
View File

@ -0,0 +1,46 @@
def isfalling_optimized(pole: list, pocet: int = None):
if pocet is None: pocet = len(pole)
if len(pole)<pocet: return False
# Prepare the list - all same consecutive values in the list are considered as one value.
new_pole = []
current_value = pole[0]
for i in range(1, len(pole)):
if pole[i] == current_value:
continue
else:
new_pole.append(current_value)
current_value = pole[i]
new_pole.append(current_value)
new_pole = new_pole[-pocet:]
print(new_pole)
# Perform the current calculation on this list.
res = all(i > j for i, j in zip(new_pole, new_pole[1:]))
return res
def isfalling_optimizedgpt(pole: list, pocet: int = None):
if pocet is None:
pocet = len(pole)
if len(pole) < pocet:
return False
# Prepare the list - all same consecutive values in the list are considered as one value.
new_pole = [pole[0]]
for i in range(1, len(pole)):
if pole[i] != pole[i - 1]:
new_pole.append(pole[i])
if len(new_pole) < pocet:
return False
new_pole = new_pole[-pocet:]
print(new_pole)
# Perform the current calculation on this list.
res = all(i > j for i, j in zip(new_pole, new_pole[1:]))
return res
pole = [8,2,8,1,4,4,4,3,3,3,2,1]
print(isfalling_optimizedgpt(pole,5))

View File

@ -98,46 +98,84 @@ def next(data, state: StrategyState):
state.ilog(lvl=1,e=f"Neexistuje indikator s nazvem {value} vracime 0" + str(e) + format_exc()) state.ilog(lvl=1,e=f"Neexistuje indikator s nazvem {value} vracime 0" + str(e) + format_exc())
return ret return ret
#OPTIMALIZOVANO CHATGPT
#funkce vytvori podminky (bud pro AND/OR) z pracovniho dict #funkce vytvori podminky (bud pro AND/OR) z pracovniho dict
def evaluate_directive_conditions(work_dict, cond_type): def evaluate_directive_conditions(work_dict, cond_type):
def rev(kw, condition):
if directive.endswith(kw):
return not condition
else:
return condition
cond = {}
cond[cond_type] = {}
# Create a dictionary to map directives to functions
directive_functions = {
"above": lambda ind, val: get_source_or_MA(ind)[-1] > value_or_indicator(val),
"equals": lambda ind, val: get_source_or_MA(ind)[-1] == value_or_indicator(val),
"below": lambda ind, val: get_source_or_MA(ind)[-1] < value_or_indicator(val),
"falling": lambda ind, val: isfalling(get_source_or_MA(ind), val),
"rising": lambda ind, val: isrising(get_source_or_MA(ind), val),
"crossed_down": lambda ind, val: buy_if_crossed_down(ind, value_or_indicator(val)),
"crossed_up": lambda ind, val: buy_if_crossed_up(ind, value_or_indicator(val)),
"crossed": lambda ind, val: buy_if_crossed_down(ind, value_or_indicator(val)) or buy_if_crossed_up(ind, value_or_indicator(val)),
"pivot_a": lambda ind, val: is_pivot(source=get_source_or_MA(ind), leg_number=val, type="A"),
"pivot_v": lambda ind, val: is_pivot(source=get_source_or_MA(ind), leg_number=val, type="V"),
"still_for": lambda ind, val: is_still(get_source_or_MA(ind), val, 2),
}
for indname, directive, value in work_dict[cond_type]:
for keyword, func in directive_functions.items():
if directive.endswith(keyword):
cond[cond_type][directive + "_" + indname + "_" + str(value)] = rev("not_" + keyword, func(indname, value))
return eval_cond_dict(cond)
#funkce vytvori podminky (bud pro AND/OR) z pracovniho dict
def evaluate_directive_conditions_old(work_dict, cond_type):
#used for nots, reverse condition for not_ keywords
def rev(kw, condition):
if directive.endswith(kw):
return not condition
else:
return condition
cond = {} cond = {}
cond[cond_type] = {} cond[cond_type] = {}
for indname, directive, value in work_dict[cond_type]: for indname, directive, value in work_dict[cond_type]:
#direktivy zobecnime ve tvaru prefix_ACTION #direktivy zobecnime ve tvaru prefix_ACTION
# ACTIONS = is_above, is_below, is_falling, is_rising, crossed_up, crossed_down, is_pivot_a, is_pivot_v # ACTIONS = is_above, is_below, is_falling, is_rising, crossed_up, crossed_down, is_pivot_a, is_pivot_v
res = None
#OBECNE DIREKTIVY - REUSOVATELNE #OBECNE DIREKTIVY - REUSOVATELNE
if directive.endswith("above"): if directive.endswith("above"):
cond[cond_type][directive+"_"+indname+"_"+str(value)] = get_source_or_MA(indname)[-1] > value_or_indicator(value) #reverse if endswith "not_above"
res = rev("not_above", get_source_or_MA(indname)[-1] > value_or_indicator(value))
elif directive.endswith("equals"): elif directive.endswith("equals"):
cond[cond_type][directive+"_"+indname+"_"+str(value)] = get_source_or_MA(indname)[-1] == value_or_indicator(value) res = rev("not_equals",get_source_or_MA(indname)[-1] == value_or_indicator(value))
elif directive.endswith("below"): elif directive.endswith("below"):
cond[cond_type][directive+"_"+indname+"_"+str(value)] = get_source_or_MA(indname)[-1] < value_or_indicator(value) res = rev("not_below", get_source_or_MA(indname)[-1] < value_or_indicator(value))
elif directive.endswith("falling"): elif directive.endswith("falling"):
if directive.endswith("not_falling"): res = rev("not_falling",isfalling(get_source_or_MA(indname),value))
cond[cond_type][directive+"_"+indname+"_"+str(value)] = not isfalling(get_source_or_MA(indname),value)
else:
cond[cond_type][directive+"_"+indname+"_"+str(value)] = isfalling(get_source_or_MA(indname),value)
elif directive.endswith("rising"): elif directive.endswith("rising"):
if directive.endswith("not_rising"): res = rev("not_rising", isrising(get_source_or_MA(indname),value))
cond[cond_type][directive+"_"+indname+"_"+str(value)] = not isrising(get_source_or_MA(indname),value)
else:
cond[cond_type][directive+"_"+indname+"_"+str(value)] = isrising(get_source_or_MA(indname),value)
elif directive.endswith("crossed_down"): elif directive.endswith("crossed_down"):
cond[cond_type][directive+"_"+indname+"_"+str(value)] = buy_if_crossed_down(indname, value_or_indicator(value)) res = rev("not_crossed_down", buy_if_crossed_down(indname, value_or_indicator(value)))
elif directive.endswith("crossed_up"): elif directive.endswith("crossed_up"):
cond[cond_type][directive+"_"+indname+"_"+str(value)] = buy_if_crossed_up(indname, value_or_indicator(value)) res = rev("not_crossed_up", buy_if_crossed_up(indname, value_or_indicator(value)))
#nefunguje moc dobre #nefunguje moc dobre
elif directive.endswith("crossed"): elif directive.endswith("crossed"):
cond[cond_type][directive+"_"+indname+"_"+str(value)] = buy_if_crossed_down(indname, value_or_indicator(value)) or buy_if_crossed_up(indname, value_or_indicator(value)) res = rev("not_crossed", buy_if_crossed_down(indname, value_or_indicator(value)) or buy_if_crossed_up(indname, value_or_indicator(value)))
elif directive.endswith("pivot_a"): elif directive.endswith("pivot_a"):
cond[cond_type][directive+"_"+indname+"_"+str(value)] = is_pivot(source=get_source_or_MA(indname), leg_number=value, type="A") res = rev("not_pivot_a", is_pivot(source=get_source_or_MA(indname), leg_number=value, type="A"))
elif directive.endswith("pivot_v"): elif directive.endswith("pivot_v"):
cond[cond_type][directive+"_"+indname+"_"+str(value)] = is_pivot(source=get_source_or_MA(indname), leg_number=value, type="V") res = rev("not_pivot_v", is_pivot(source=get_source_or_MA(indname), leg_number=value, type="V"))
elif directive.endswith("still_for"): elif directive.endswith("still_for"):
#for 2 decimals #for 2 decimals
cond[cond_type][directive+"_"+indname+"_"+str(value)] = is_still(get_source_or_MA(indname),value, 2) res = rev("not_still_for", is_still(get_source_or_MA(indname),value, 2))
if res is not None:
cond[cond_type][directive+"_"+indname+"_"+str(value)] = res
#PRIPADNE DALSI SPECIFICKE ZDE #PRIPADNE DALSI SPECIFICKE ZDE
# elif directive == "buy_if_necospecifckeho": # elif directive == "buy_if_necospecifckeho":
# pass # pass
@ -1728,6 +1766,7 @@ def next(data, state: StrategyState):
res = state.buy(size=size) res = state.buy(size=size)
if isinstance(res, int) and res < 0: if isinstance(res, int) and res < 0:
raise Exception(f"error in required operation LONG {res}") raise Exception(f"error in required operation LONG {res}")
#nastaveni SL az do notifikace, kdy je známá
#pokud neni nastaveno SL v prescribe, tak nastavuji default dle stratvars #pokud neni nastaveno SL v prescribe, tak nastavuji default dle stratvars
if state.vars.activeTrade.stoploss_value is None: if state.vars.activeTrade.stoploss_value is None:
sl_defvalue = get_default_sl_value(direction=state.vars.activeTrade.direction) sl_defvalue = get_default_sl_value(direction=state.vars.activeTrade.direction)

View File

@ -35,6 +35,9 @@ COUNT_API_REQUESTS = False
STRATVARS_UNCHANGEABLES = ['pendingbuys', 'blockbuy', 'jevylozeno', 'limitka'] STRATVARS_UNCHANGEABLES = ['pendingbuys', 'blockbuy', 'jevylozeno', 'limitka']
DATA_DIR = user_data_dir("v2realbot") DATA_DIR = user_data_dir("v2realbot")
#BT DELAYS #BT DELAYS
#profiling
PROFILING_NEXT_ENABLED = False
PROFILING_OUTPUT_DIR = DATA_DIR
"""" """"
LATENCY DELAYS for LIVE eastcoast LATENCY DELAYS for LIVE eastcoast

View File

@ -107,11 +107,11 @@ def modify_stratin(si: StrategyInstance, id: UUID):
return (-1, "add data conf invalid") return (-1, "add data conf invalid")
for i in db.stratins: for i in db.stratins:
if str(i.id) == str(id): if str(i.id) == str(id):
print("removing",i) #print("removing",i)
db.stratins.remove(i) db.stratins.remove(i)
print("adding",si) #print("adding",si)
db.stratins.append(si) db.stratins.append(si)
print(db.stratins) #print(db.stratins)
db.save() db.save()
return (0, i.id) return (0, i.id)
return (-2, "not found") return (-2, "not found")
@ -824,21 +824,25 @@ def edit_archived_runners(runner_id: UUID, archChange: RunArchiveChange):
#delete runner in archive and archive detail and runner logs #delete runner in archive and archive detail and runner logs
#predelano do JEDNE TRANSAKCE #predelano do JEDNE TRANSAKCE
def delete_archived_runners_byID(id: UUID): def delete_archived_runners_byIDs(ids: list[UUID]):
try: try:
conn = pool.get_connection() conn = pool.get_connection()
out = []
for id in ids:
c = conn.cursor() c = conn.cursor()
print(str(id))
resh = c.execute(f"DELETE from runner_header WHERE runner_id='{str(id)}';") resh = c.execute(f"DELETE from runner_header WHERE runner_id='{str(id)}';")
print("header deleted",resh.rowcount) print("header deleted",resh.rowcount)
resd = c.execute(f"DELETE from runner_detail WHERE runner_id='{str(id)}';") resd = c.execute(f"DELETE from runner_detail WHERE runner_id='{str(id)}';")
print("detail deleted",resd.rowcount) print("detail deleted",resd.rowcount)
resl = c.execute(f"DELETE from runner_logs WHERE runner_id='{str(id)}';") resl = c.execute(f"DELETE from runner_logs WHERE runner_id='{str(id)}';")
print("log deleted",resl.rowcount) print("log deleted",resl.rowcount)
out.append(str(id) + ": " + str(resh.rowcount) + " " + str(resd.rowcount) + " " + str(resl.rowcount))
conn.commit() conn.commit()
print("commit") print("commit")
# if resh.rowcount == 0 or resd.rowcount == 0: # if resh.rowcount == 0 or resd.rowcount == 0:
# return -1, "not found "+str(resh.rowcount) + " " + str(resd.rowcount) + " " + str(resl.rowcount) # return -1, "not found "+str(resh.rowcount) + " " + str(resd.rowcount) + " " + str(resl.rowcount)
return 0, str(resh.rowcount) + " " + str(resd.rowcount) + " " + str(resl.rowcount) return 0, out
except Exception as e: except Exception as e:
conn.rollback() conn.rollback()

View File

@ -319,9 +319,9 @@ def _get_all_archived_runners() -> list[RunArchive]:
raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail=f"No data found") raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail=f"No data found")
#delete archive runner from header and detail #delete archive runner from header and detail
@app.delete("/archived_runners/{runner_id}", dependencies=[Depends(api_key_auth)], status_code=status.HTTP_200_OK) @app.delete("/archived_runners/", dependencies=[Depends(api_key_auth)], status_code=status.HTTP_200_OK)
def _delete_archived_runners_byID(runner_id): def _delete_archived_runners_byIDs(runner_ids: list[UUID]):
res, id = cs.delete_archived_runners_byID(id=runner_id) res, id = cs.delete_archived_runners_byIDs(ids=runner_ids)
if res == 0: return id if res == 0: return id
elif res < 0: elif res < 0:
raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail=f"Error: {res}:{id}") raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail=f"Error: {res}:{id}")

View File

@ -342,19 +342,21 @@ $("#editModalArchive").on('submit','#editFormArchive', function(event){
}) })
}); });
function delete_arch_row(id) { function delete_arch_rows(ids) {
$.ajax({ $.ajax({
url:"/archived_runners/"+id, url:"/archived_runners/",
beforeSend: function (xhr) { beforeSend: function (xhr) {
xhr.setRequestHeader('X-API-Key', xhr.setRequestHeader('X-API-Key',
API_KEY); }, API_KEY); },
method:"DELETE", method:"DELETE",
contentType: "application/json", contentType: "application/json",
dataType: "json", dataType: "json",
data: JSON.stringify(ids),
success:function(data){ success:function(data){
$('#delFormArchive')[0].reset(); $('#delFormArchive')[0].reset();
window.$('#delModalArchive').modal('hide'); window.$('#delModalArchive').modal('hide');
$('#deletearchive').attr('disabled', false); $('#deletearchive').attr('disabled', false);
console.log(data)
archiveRecords.ajax.reload(); archiveRecords.ajax.reload();
}, },
error: function(xhr, status, error) { error: function(xhr, status, error) {
@ -373,11 +375,13 @@ $("#delModalArchive").on('submit','#delFormArchive', function(event){
$('#deletearchive').attr('disabled','disabled'); $('#deletearchive').attr('disabled','disabled');
//rows = archiveRecords.rows('.selected'); //rows = archiveRecords.rows('.selected');
if(rows.data().length > 0 ) { if(rows.data().length > 0 ) {
runnerIds = []
// Loop through the selected rows and display an alert with each row's ID // Loop through the selected rows and display an alert with each row's ID
rows.every(function (rowIdx, tableLoop, rowLoop ) { rows.every(function (rowIdx, tableLoop, rowLoop ) {
var data = this.data() var data = this.data()
delete_arch_row(data.id) runnerIds.push(data.id);
}); });
delete_arch_rows(runnerIds)
} }
}); });

View File

@ -3,8 +3,8 @@ API_KEY = localStorage.getItem("api-key")
var chart = null var chart = null
// var colors = ["#8B1874","#B71375","#B46060","#61c740","#BE6DB7","#898121","#4389d9","#00425A","#B5D5C5","#e61957","#8B1874","#B71375","#B46060","#61c740","#BE6DB7","#898121","#4389d9","#00425A","#B5D5C5","#e61957"] // var colors = ["#8B1874","#B71375","#B46060","#61c740","#BE6DB7","#898121","#4389d9","#00425A","#B5D5C5","#e61957","#8B1874","#B71375","#B46060","#61c740","#BE6DB7","#898121","#4389d9","#00425A","#B5D5C5","#e61957"]
// var reset_colors = ["#8B1874","#B71375","#B46060","#61c740","#BE6DB7","#898121","#4389d9","#00425A","#B5D5C5","#e61957","#8B1874","#B71375","#B46060","#61c740","#BE6DB7","#898121","#4389d9","#00425A","#B5D5C5","#e61957"] // var reset_colors = ["#8B1874","#B71375","#B46060","#61c740","#BE6DB7","#898121","#4389d9","#00425A","#B5D5C5","#e61957","#8B1874","#B71375","#B46060","#61c740","#BE6DB7","#898121","#4389d9","#00425A","#B5D5C5","#e61957"]
var colors = ["#8B1874","#B71375","#B46060","#61c740","#BE6DB7","#898121","#4389d9","#00425A","#B5D5C5","#e61957","#7B0E60","#550844","#9B2888","#BD38A0","#A30F68","#6E0B50","#CA2183","#E6319B","#A04C54","#643848","#CA7474","#E68D8D","#4F9C34","#3B7128","#73DF4D","#95EF65","#A857A4","#824690","#D087CC","#E2A1DF","#79711B","#635D17","#99912B","#B1A73D","#3779C9","#2B68B3","#5599ED","#77A9F7","#003A4C","#002F3B","#004C67","#00687D","#A1C6B5","#8CC6A5","#C9E6D5","#E4F6EA","#D2144A","#A60F3B","#FA2463","#FF3775"]; var colors = ["#8B1874","#B71375","#B46060","#61c740","#BE6DB7","#898121","#4389d9","#00425A","#B5D5C5","#e61957","#7B0E60","#9B2888","#BD38A0","#A30F68","#6E0B50","#CA2183","#E6319B","#A04C54","#643848","#CA7474","#E68D8D","#4F9C34","#3B7128","#73DF4D","#95EF65","#A857A4","#824690","#D087CC","#E2A1DF","#79711B","#635D17","#99912B","#B1A73D","#3779C9","#2B68B3","#5599ED","#77A9F7","#003A4C","#002F3B","#004C67","#00687D","#A1C6B5","#8CC6A5","#C9E6D5","#E4F6EA","#D2144A","#A60F3B","#FA2463","#FF3775"];
var reset_colors = ["#8B1874","#B71375","#B46060","#61c740","#BE6DB7","#898121","#4389d9","#00425A","#B5D5C5","#e61957","#7B0E60","#550844","#9B2888","#BD38A0","#A30F68","#6E0B50","#CA2183","#E6319B","#A04C54","#643848","#CA7474","#E68D8D","#4F9C34","#3B7128","#73DF4D","#95EF65","#A857A4","#824690","#D087CC","#E2A1DF","#79711B","#635D17","#99912B","#B1A73D","#3779C9","#2B68B3","#5599ED","#77A9F7","#003A4C","#002F3B","#004C67","#00687D","#A1C6B5","#8CC6A5","#C9E6D5","#E4F6EA","#D2144A","#A60F3B","#FA2463","#FF3775"]; var reset_colors = ["#8B1874","#B71375","#B46060","#61c740","#BE6DB7","#898121","#4389d9","#00425A","#B5D5C5","#e61957","#7B0E60","#9B2888","#BD38A0","#A30F68","#6E0B50","#CA2183","#E6319B","#A04C54","#643848","#CA7474","#E68D8D","#4F9C34","#3B7128","#73DF4D","#95EF65","#A857A4","#824690","#D087CC","#E2A1DF","#79711B","#635D17","#99912B","#B1A73D","#3779C9","#2B68B3","#5599ED","#77A9F7","#003A4C","#002F3B","#004C67","#00687D","#A1C6B5","#8CC6A5","#C9E6D5","#E4F6EA","#D2144A","#A60F3B","#FA2463","#FF3775"];
var indList = [] var indList = []

View File

@ -6,7 +6,7 @@ from v2realbot.utils.utils import AttributeDict, zoneNY, is_open_rush, is_close_
from v2realbot.utils.tlog import tlog from v2realbot.utils.tlog import tlog
from v2realbot.utils.ilog import insert_log, insert_log_multiple_queue from v2realbot.utils.ilog import insert_log, insert_log_multiple_queue
from v2realbot.enums.enums import RecordType, StartBarAlign, Mode, Order, Account from v2realbot.enums.enums import RecordType, StartBarAlign, Mode, Order, Account
from v2realbot.config import BT_DELAYS, get_key, HEARTBEAT_TIMEOUT, QUIET_MODE, LOG_RUNNER_EVENTS, ILOG_SAVE_LEVEL_FROM from v2realbot.config import BT_DELAYS, get_key, HEARTBEAT_TIMEOUT, QUIET_MODE, LOG_RUNNER_EVENTS, ILOG_SAVE_LEVEL_FROM,PROFILING_NEXT_ENABLED, PROFILING_OUTPUT_DIR
import queue import queue
#from rich import print #from rich import print
from v2realbot.loader.aggregator import TradeAggregator2Queue, TradeAggregator2List, TradeAggregator from v2realbot.loader.aggregator import TradeAggregator2Queue, TradeAggregator2List, TradeAggregator
@ -25,9 +25,10 @@ from threading import Event, current_thread
import json import json
from uuid import UUID from uuid import UUID
from rich import print as printnow from rich import print as printnow
#from pyinstrument import Profiler if PROFILING_NEXT_ENABLED:
from pyinstrument import Profiler
profiler = Profiler()
#profiler = Profiler()
# obecna Parent strategie podporující queues # obecna Parent strategie podporující queues
class Strategy: class Strategy:
def __init__(self, name: str, symbol: str, next: callable, init: callable, account: Account, mode: str = Mode.PAPER, stratvars: AttributeDict = None, open_rush: int = 30, close_rush: int = 30, pe: Event = None, se: Event = None, runner_id: UUID = None, ilog_save: bool = False) -> None: def __init__(self, name: str, symbol: str, next: callable, init: callable, account: Account, mode: str = Mode.PAPER, stratvars: AttributeDict = None, open_rush: int = 30, close_rush: int = 30, pe: Event = None, se: Event = None, runner_id: UUID = None, ilog_save: bool = False) -> None:
@ -328,9 +329,11 @@ class Strategy:
#self.state.ilog(e="Rush hour - skipping") #self.state.ilog(e="Rush hour - skipping")
else: else:
# Profile the function # Profile the function
#profiler.start() if PROFILING_NEXT_ENABLED:
profiler.start()
self.next(item, self.state) self.next(item, self.state)
#profiler.stop() if PROFILING_NEXT_ENABLED:
profiler.stop()
self.after_iteration(item) self.after_iteration(item)
##run strategy live ##run strategy live
@ -393,10 +396,11 @@ class Strategy:
tlog(f"FINISHED") tlog(f"FINISHED")
print(40*"*",self.mode, "STRATEGY ", self.name,"STOPPING",40*"*") print(40*"*",self.mode, "STRATEGY ", self.name,"STOPPING",40*"*")
# now = datetime.now() if PROFILING_NEXT_ENABLED:
# results_file = "profiler"+now.strftime("%Y-%m-%d_%H-%M-%S")+".html" now = datetime.now()
# with open(results_file, "w", encoding="utf-8") as f_html: results_file = PROFILING_OUTPUT_DIR + "/"+"profiler"+now.strftime("%Y-%m-%d_%H-%M-%S")+".html"
# f_html.write(profiler.output_html()) with open(results_file, "w", encoding="utf-8") as f_html:
f_html.write(profiler.output_html())
self.stop() self.stop()

View File

@ -286,12 +286,41 @@ def send_to_telegram(message):
except Exception as e: except Exception as e:
print(e) print(e)
#datetime to timestamp #OPTIMIZED BY BARD
def json_serial(obj): def json_serial(obj):
"""JSON serializer for objects not serializable by default json code """JSON serializer for objects not serializable by default json code
https://stackoverflow.com/questions/11875770/how-to-overcome-datetime-datetime-not-json-serializable https://stackoverflow.com/questions/11875770/how-to-overcome-datetime-datetime-not-json-serializable
""" """
type_map = {
datetime: lambda obj: obj.timestamp(),
UUID: lambda obj: str(obj),
Enum: lambda obj: str(obj),
np.int64: lambda obj: int(obj),
Order: lambda obj: obj.__dict__,
TradeUpdate: lambda obj: obj.__dict__,
btOrder: lambda obj: obj.__dict__,
btTradeUpdate: lambda obj: obj.__dict__,
RunArchive: lambda obj: obj.__dict__,
Trade: lambda obj: obj.__dict__,
RunArchiveDetail: lambda obj: obj.__dict__,
Intervals: lambda obj: obj.__dict__,
SLHistory: lambda obj: obj.__dict__,
}
serializer = type_map.get(type(obj))
if serializer is not None:
return serializer(obj)
raise TypeError(str(obj) + "Type %s not serializable" % type(obj))
#datetime to timestamp
def json_serial_old(obj):
"""JSON serializer for objects not serializable by default json code
https://stackoverflow.com/questions/11875770/how-to-overcome-datetime-datetime-not-json-serializable
"""
if isinstance(obj, (datetime, date)): if isinstance(obj, (datetime, date)):
return obj.timestamp() return obj.timestamp()
if isinstance(obj, UUID): if isinstance(obj, UUID):
@ -508,21 +537,68 @@ def is_open_hours(dt, business_hours: dict = None):
and business_hours["from"] <= dt.time() < business_hours["to"] and business_hours["from"] <= dt.time() < business_hours["to"]
#vraci zda dane pole je klesajici (bud cele a nebo jen pocet poslednich) #vraci zda dane pole je klesajici (bud cele a nebo jen pocet poslednich)
def isfalling(pole: list, pocet: int = None): def isfalling_old(pole: list, pocet: int = None):
if pocet is None: pocet = len(pole) if pocet is None: pocet = len(pole)
if len(pole)<pocet: return False if len(pole)<pocet: return False
pole = pole[-pocet:] pole = pole[-pocet:]
res = all(i > j for i, j in zip(pole, pole[1:])) res = all(i > j for i, j in zip(pole, pole[1:]))
return res return res
#optimized by gpt and same values are considered as one
def isfalling(pole: list, pocet: int = None):
if pocet is None:
pocet = len(pole)
if len(pole) < pocet:
return False
# Prepare the list - all same consecutive values in the list are considered as one value.
new_pole = [pole[0]]
for i in range(1, len(pole)):
if pole[i] != pole[i - 1]:
new_pole.append(pole[i])
if len(new_pole) < pocet:
return False
new_pole = new_pole[-pocet:]
#print(new_pole)
# Perform the current calculation on this list.
res = all(i > j for i, j in zip(new_pole, new_pole[1:]))
return res
#vraci zda dane pole je roustouci (bud cele a nebo jen pocet poslednich) #vraci zda dane pole je roustouci (bud cele a nebo jen pocet poslednich)
def isrising(pole: list, pocet: int = None): def isrising_old(pole: list, pocet: int = None):
if pocet is None: pocet = len(pole) if pocet is None: pocet = len(pole)
if len(pole)<pocet: return False if len(pole)<pocet: return False
pole = pole[-pocet:] pole = pole[-pocet:]
res = all(i < j for i, j in zip(pole, pole[1:])) res = all(i < j for i, j in zip(pole, pole[1:]))
return res return res
#optimized by gpt and same values are considered as one
def isrising(pole: list, pocet: int = None):
if pocet is None:
pocet = len(pole)
if len(pole) < pocet:
return False
# Prepare the list - all same consecutive values in the list are considered as one value.
new_pole = [pole[0]]
for i in range(1, len(pole)):
if pole[i] != pole[i - 1]:
new_pole.append(pole[i])
if len(new_pole) < pocet:
return False
new_pole = new_pole[-pocet:]
#print(new_pole)
# Perform the current calculation on this list.
res = all(i < j for i, j in zip(new_pole, new_pole[1:]))
return res
def parse_alpaca_timestamp(value: Timestamp): def parse_alpaca_timestamp(value: Timestamp):
return value.seconds + (value.nanoseconds * float(1e-9)) return value.seconds + (value.nanoseconds * float(1e-9))