gui model metadata view + backend json optimalization orjson

This commit is contained in:
David Brazda
2023-12-10 15:02:25 +01:00
parent 523905ece6
commit 232f32467e
38 changed files with 224 additions and 142 deletions

View File

@ -12,6 +12,7 @@ better-exceptions==0.3.3
bleach==6.0.0 bleach==6.0.0
blinker==1.5 blinker==1.5
cachetools==5.3.0 cachetools==5.3.0
CD==1.1.0
certifi==2022.12.7 certifi==2022.12.7
chardet==5.1.0 chardet==5.1.0
charset-normalizer==3.0.1 charset-normalizer==3.0.1
@ -58,7 +59,7 @@ Jinja2==3.1.2
joblib==1.3.2 joblib==1.3.2
jsonschema==4.17.3 jsonschema==4.17.3
jupyterlab-widgets==3.0.9 jupyterlab-widgets==3.0.9
keras==2.13.1 keras==2.15.0
kiwisolver==1.4.4 kiwisolver==1.4.4
libclang==16.0.6 libclang==16.0.6
llvmlite==0.39.1 llvmlite==0.39.1
@ -68,7 +69,8 @@ MarkupSafe==2.1.2
matplotlib==3.8.2 matplotlib==3.8.2
matplotlib-inline==0.1.6 matplotlib-inline==0.1.6
mdurl==0.1.2 mdurl==0.1.2
mlroom @ git+https://github.com/drew2323/mlroom.git@967b1e3b5071854910ea859eca68bf0c3e67f951 ml-dtypes==0.2.0
mlroom @ git+https://github.com/drew2323/mlroom.git@768c88348a0bd24c244a8720c67abb20fcb1403e
mplfinance==0.12.10b0 mplfinance==0.12.10b0
msgpack==1.0.4 msgpack==1.0.4
mypy-extensions==1.0.0 mypy-extensions==1.0.0
@ -77,6 +79,7 @@ numba==0.56.4
numpy==1.23.5 numpy==1.23.5
oauthlib==3.2.2 oauthlib==3.2.2
opt-einsum==3.3.0 opt-einsum==3.3.0
orjson==3.9.10
packaging==23.0 packaging==23.0
pandas==1.5.3 pandas==1.5.3
param==1.13.0 param==1.13.0
@ -114,7 +117,7 @@ requests-oauthlib==1.3.1
rich==13.3.1 rich==13.3.1
rsa==4.9 rsa==4.9
schedule==1.2.1 schedule==1.2.1
scikit-learn==1.3.1 scikit-learn==1.3.2
scipy==1.11.2 scipy==1.11.2
seaborn==0.12.2 seaborn==0.12.2
semver==2.13.0 semver==2.13.0
@ -128,10 +131,10 @@ streamlit==1.20.0
structlog==23.1.0 structlog==23.1.0
TA-Lib==0.4.28 TA-Lib==0.4.28
tenacity==8.2.2 tenacity==8.2.2
tensorboard==2.13.0 tensorboard==2.15.1
tensorboard-data-server==0.7.1 tensorboard-data-server==0.7.1
tensorflow==2.13.0 tensorflow==2.15.0
tensorflow-estimator==2.13.0 tensorflow-estimator==2.15.0
tensorflow-io-gcs-filesystem==0.34.0 tensorflow-io-gcs-filesystem==0.34.0
termcolor==2.3.0 termcolor==2.3.0
threadpoolctl==3.2.0 threadpoolctl==3.2.0
@ -149,12 +152,12 @@ tzdata==2023.2
tzlocal==4.3 tzlocal==4.3
urllib3==1.26.14 urllib3==1.26.14
uvicorn==0.21.1 uvicorn==0.21.1
-e git+https://github.com/drew2323/v2trading.git@d38bf0600fbadbffba78ae23625eaecd1febc7f4#egg=v2realbot -e git+https://github.com/drew2323/v2trading.git@523905ece6d99bf48a8952d39ced6a13f3b9a84e#egg=v2realbot
validators==0.20.0 validators==0.20.0
wcwidth==0.2.9 wcwidth==0.2.9
webencodings==0.5.1 webencodings==0.5.1
websockets==10.4 websockets==10.4
Werkzeug==2.2.3 Werkzeug==2.2.3
widgetsnbextension==4.0.9 widgetsnbextension==4.0.9
wrapt==1.15.0 wrapt==1.14.1
zipp==3.15.0 zipp==3.15.0

View File

@ -1,7 +1,7 @@
from setuptools import find_packages, setup from setuptools import find_packages, setup
setup(name='v2realbot', setup(name='v2realbot',
version='0.9', version='0.91',
description='Realbot trader', description='Realbot trader',
author='David Brazda', author='David Brazda',
author_email='davidbrazda61@gmail.com', author_email='davidbrazda61@gmail.com',

View File

@ -23,7 +23,7 @@ from rich import print
from collections import defaultdict from collections import defaultdict
from pandas import to_datetime from pandas import to_datetime
from msgpack.ext import Timestamp from msgpack.ext import Timestamp
from v2realbot.utils.historicals import convert_daily_bars from v2realbot.utils.historicals import convert_historical_bars
def get_last_close(): def get_last_close():
pass pass
@ -38,7 +38,7 @@ def get_historical_bars(symbol: str, time_from: datetime, time_to: datetime, tim
bars: BarSet = stock_client.get_stock_bars(bar_request) bars: BarSet = stock_client.get_stock_bars(bar_request)
print("puvodni bars", bars["BAC"]) print("puvodni bars", bars["BAC"])
print(bars) print(bars)
return convert_daily_bars(bars[symbol]) return convert_historical_bars(bars[symbol])
#v initu plnime pozadovana historicka data do historicals[] #v initu plnime pozadovana historicka data do historicals[]

File diff suppressed because one or more lines are too long

View File

@ -2,7 +2,7 @@ import sqlite3
from v2realbot.config import DATA_DIR from v2realbot.config import DATA_DIR
from v2realbot.utils.utils import json_serial from v2realbot.utils.utils import json_serial
from uuid import UUID, uuid4 from uuid import UUID, uuid4
import json import orjson
from datetime import datetime from datetime import datetime
from v2realbot.enums.enums import RecordType, StartBarAlign, Mode, Account from v2realbot.enums.enums import RecordType, StartBarAlign, Mode, Account
from v2realbot.common.model import RunArchiveDetail, RunArchive, RunArchiveView from v2realbot.common.model import RunArchiveDetail, RunArchive, RunArchiveView
@ -35,14 +35,14 @@ def row_to_object(row: dict) -> RunArchive:
end_positions=row.get('end_positions'), end_positions=row.get('end_positions'),
end_positions_avgp=row.get('end_positions_avgp'), end_positions_avgp=row.get('end_positions_avgp'),
metrics=row.get('open_orders'), metrics=row.get('open_orders'),
#metrics=json.loads(row.get('metrics')) if row.get('metrics') else None, #metrics=orjson.loads(row.get('metrics')) if row.get('metrics') else None,
stratvars_toml=row.get('stratvars_toml') stratvars_toml=row.get('stratvars_toml')
) )
def get_all_archived_runners(): def get_all_archived_runners():
conn = pool.get_connection() conn = pool.get_connection()
try: try:
conn.row_factory = lambda c, r: json.loads(r[0]) conn.row_factory = lambda c, r: orjson.loads(r[0])
c = conn.cursor() c = conn.cursor()
res = c.execute(f"SELECT data FROM runner_header") res = c.execute(f"SELECT data FROM runner_header")
finally: finally:
@ -54,7 +54,7 @@ def insert_archive_header(archeader: RunArchive):
conn = pool.get_connection() conn = pool.get_connection()
try: try:
c = conn.cursor() c = conn.cursor()
json_string = json.dumps(archeader, default=json_serial) json_string = orjson.dumps(archeader, default=json_serial, option=orjson.OPT_PASSTHROUGH_DATETIME)
if archeader.batch_id is not None: if archeader.batch_id is not None:
statement = f"INSERT INTO runner_header (runner_id, batch_id, ra) VALUES ('{str(archeader.id)}','{str(archeader.batch_id)}','{json_string}')" statement = f"INSERT INTO runner_header (runner_id, batch_id, ra) VALUES ('{str(archeader.id)}','{str(archeader.batch_id)}','{json_string}')"
else: else:
@ -103,7 +103,7 @@ def migrate_to_columns(ra: RunArchive):
SET strat_id=?, batch_id=?, symbol=?, name=?, note=?, started=?, stopped=?, mode=?, account=?, bt_from=?, bt_to=?, strat_json=?, settings=?, ilog_save=?, profit=?, trade_count=?, end_positions=?, end_positions_avgp=?, metrics=?, stratvars_toml=? SET strat_id=?, batch_id=?, symbol=?, name=?, note=?, started=?, stopped=?, mode=?, account=?, bt_from=?, bt_to=?, strat_json=?, settings=?, ilog_save=?, profit=?, trade_count=?, end_positions=?, end_positions_avgp=?, metrics=?, stratvars_toml=?
WHERE runner_id=? WHERE runner_id=?
''', ''',
(str(ra.strat_id), ra.batch_id, ra.symbol, ra.name, ra.note, ra.started, ra.stopped, ra.mode, ra.account, ra.bt_from, ra.bt_to, json.dumps(ra.strat_json), json.dumps(ra.settings), ra.ilog_save, ra.profit, ra.trade_count, ra.end_positions, ra.end_positions_avgp, json.dumps(ra.metrics), ra.stratvars_toml, str(ra.id))) (str(ra.strat_id), ra.batch_id, ra.symbol, ra.name, ra.note, ra.started, ra.stopped, ra.mode, ra.account, ra.bt_from, ra.bt_to, orjson.dumps(ra.strat_json), orjson.dumps(ra.settings), ra.ilog_save, ra.profit, ra.trade_count, ra.end_positions, ra.end_positions_avgp, orjson.dumps(ra.metrics), ra.stratvars_toml, str(ra.id)))
conn.commit() conn.commit()
finally: finally:

View File

@ -2,7 +2,7 @@ import sqlite3
from v2realbot.config import DATA_DIR from v2realbot.config import DATA_DIR
from v2realbot.utils.utils import json_serial from v2realbot.utils.utils import json_serial
from uuid import UUID, uuid4 from uuid import UUID, uuid4
import json import orjson
from datetime import datetime from datetime import datetime
from v2realbot.enums.enums import RecordType, StartBarAlign, Mode, Account from v2realbot.enums.enums import RecordType, StartBarAlign, Mode, Account
from v2realbot.common.model import RunArchiveDetail from v2realbot.common.model import RunArchiveDetail
@ -11,7 +11,7 @@ from tinydb import TinyDB, Query, where
sqlite_db_file = DATA_DIR + "/v2trading.db" sqlite_db_file = DATA_DIR + "/v2trading.db"
conn = sqlite3.connect(sqlite_db_file) conn = sqlite3.connect(sqlite_db_file)
#standardne vraci pole tuplů, kde clen tuplu jsou sloupce #standardne vraci pole tuplů, kde clen tuplu jsou sloupce
#conn.row_factory = lambda c, r: json.loads(r[0]) #conn.row_factory = lambda c, r: orjson.loads(r[0])
#conn.row_factory = lambda c, r: r[0] #conn.row_factory = lambda c, r: r[0]
#conn.row_factory = sqlite3.Row #conn.row_factory = sqlite3.Row
@ -28,7 +28,7 @@ insert_list = [dict(time=datetime.now().timestamp(), side="ddd", rectype=RecordT
def insert_log(runner_id: UUID, time: float, logdict: dict): def insert_log(runner_id: UUID, time: float, logdict: dict):
c = conn.cursor() c = conn.cursor()
json_string = json.dumps(logdict, default=json_serial) json_string = orjson.dumps(logdict, default=json_serial, option=orjson.OPT_PASSTHROUGH_DATETIME)
res = c.execute("INSERT INTO runner_logs VALUES (?,?,?)",[str(runner_id), time, json_string]) res = c.execute("INSERT INTO runner_logs VALUES (?,?,?)",[str(runner_id), time, json_string])
conn.commit() conn.commit()
return res.rowcount return res.rowcount
@ -37,14 +37,14 @@ def insert_log_multiple(runner_id: UUID, loglist: list):
c = conn.cursor() c = conn.cursor()
insert_data = [] insert_data = []
for i in loglist: for i in loglist:
row = (str(runner_id), i["time"], json.dumps(i, default=json_serial)) row = (str(runner_id), i["time"], orjson.dumps(i, default=json_serial, option=orjson.OPT_PASSTHROUGH_DATETIME))
insert_data.append(row) insert_data.append(row)
c.executemany("INSERT INTO runner_logs VALUES (?,?,?)", insert_data) c.executemany("INSERT INTO runner_logs VALUES (?,?,?)", insert_data)
conn.commit() conn.commit()
return c.rowcount return c.rowcount
# c = conn.cursor() # c = conn.cursor()
# json_string = json.dumps(logdict, default=json_serial) # json_string = orjson.dumps(logdict, default=json_serial, option=orjson.OPT_PASSTHROUGH_DATETIME)
# res = c.execute("INSERT INTO runner_logs VALUES (?,?,?)",[str(runner_id), time, json_string]) # res = c.execute("INSERT INTO runner_logs VALUES (?,?,?)",[str(runner_id), time, json_string])
# print(res) # print(res)
# conn.commit() # conn.commit()
@ -52,7 +52,7 @@ def insert_log_multiple(runner_id: UUID, loglist: list):
#returns list of ilog jsons #returns list of ilog jsons
def read_log_window(runner_id: UUID, timestamp_from: float, timestamp_to: float): def read_log_window(runner_id: UUID, timestamp_from: float, timestamp_to: float):
conn.row_factory = lambda c, r: json.loads(r[0]) conn.row_factory = lambda c, r: orjson.loads(r[0])
c = conn.cursor() c = conn.cursor()
res = c.execute(f"SELECT data FROM runner_logs WHERE runner_id='{str(runner_id)}' AND time >={ts_from} AND time <={ts_to}") res = c.execute(f"SELECT data FROM runner_logs WHERE runner_id='{str(runner_id)}' AND time >={ts_from} AND time <={ts_to}")
return res.fetchall() return res.fetchall()
@ -94,21 +94,21 @@ def delete_logs(runner_id: UUID):
def insert_archive_detail(archdetail: RunArchiveDetail): def insert_archive_detail(archdetail: RunArchiveDetail):
c = conn.cursor() c = conn.cursor()
json_string = json.dumps(archdetail, default=json_serial) json_string = orjson.dumps(archdetail, default=json_serial, option=orjson.OPT_PASSTHROUGH_DATETIME)
res = c.execute("INSERT INTO runner_detail VALUES (?,?)",[str(archdetail["id"]), json_string]) res = c.execute("INSERT INTO runner_detail VALUES (?,?)",[str(archdetail["id"]), json_string])
conn.commit() conn.commit()
return res.rowcount return res.rowcount
#returns list of details #returns list of details
def get_all_archive_detail(): def get_all_archive_detail():
conn.row_factory = lambda c, r: json.loads(r[0]) conn.row_factory = lambda c, r: orjson.loads(r[0])
c = conn.cursor() c = conn.cursor()
res = c.execute(f"SELECT data FROM runner_detail") res = c.execute(f"SELECT data FROM runner_detail")
return res.fetchall() return res.fetchall()
#vrátí konkrétní #vrátí konkrétní
def get_archive_detail_byID(runner_id: UUID): def get_archive_detail_byID(runner_id: UUID):
conn.row_factory = lambda c, r: json.loads(r[0]) conn.row_factory = lambda c, r: orjson.loads(r[0])
c = conn.cursor() c = conn.cursor()
res = c.execute(f"SELECT data FROM runner_detail WHERE runner_id='{str(runner_id)}'") res = c.execute(f"SELECT data FROM runner_detail WHERE runner_id='{str(runner_id)}'")
return res.fetchone() return res.fetchone()
@ -123,7 +123,7 @@ def delete_archive_detail(runner_id: UUID):
def get_all_archived_runners_detail(): def get_all_archived_runners_detail():
arch_detail_file = DATA_DIR + "/arch_detail.json" arch_detail_file = DATA_DIR + "/arch_detail.json"
db_arch_d = TinyDB(arch_detail_file, default=json_serial) db_arch_d = TinyDB(arch_detail_file, default=json_serial, option=orjson.OPT_PASSTHROUGH_DATETIME)
res = db_arch_d.all() res = db_arch_d.all()
return 0, res return 0, res

View File

@ -4,7 +4,7 @@ from keras.models import Sequential
from keras.layers import LSTM, Dense from keras.layers import LSTM, Dense
from v2realbot.controller.services import get_archived_runner_details_byID from v2realbot.controller.services import get_archived_runner_details_byID
from v2realbot.common.model import RunArchiveDetail from v2realbot.common.model import RunArchiveDetail
import json import orjson
runner_id = "838e918e-9be0-4251-a968-c13c83f3f173" runner_id = "838e918e-9be0-4251-a968-c13c83f3f173"
result = None result = None

View File

@ -2,7 +2,7 @@ import sqlite3
from v2realbot.config import DATA_DIR from v2realbot.config import DATA_DIR
from v2realbot.utils.utils import json_serial from v2realbot.utils.utils import json_serial
from uuid import UUID, uuid4 from uuid import UUID, uuid4
import json import orjson
from datetime import datetime from datetime import datetime
from v2realbot.enums.enums import RecordType, StartBarAlign, Mode, Account from v2realbot.enums.enums import RecordType, StartBarAlign, Mode, Account
from v2realbot.common.model import RunArchiveDetail from v2realbot.common.model import RunArchiveDetail
@ -11,7 +11,7 @@ from tinydb import TinyDB, Query, where
sqlite_db_file = DATA_DIR + "/v2trading.db" sqlite_db_file = DATA_DIR + "/v2trading.db"
conn = sqlite3.connect(sqlite_db_file) conn = sqlite3.connect(sqlite_db_file)
#standardne vraci pole tuplů, kde clen tuplu jsou sloupce #standardne vraci pole tuplů, kde clen tuplu jsou sloupce
#conn.row_factory = lambda c, r: json.loads(r[0]) #conn.row_factory = lambda c, r: orjson.loads(r[0])
#conn.row_factory = lambda c, r: r[0] #conn.row_factory = lambda c, r: r[0]
#conn.row_factory = sqlite3.Row #conn.row_factory = sqlite3.Row
@ -28,7 +28,7 @@ insert_list = [dict(time=datetime.now().timestamp(), side="ddd", rectype=RecordT
def insert_log(runner_id: UUID, time: float, logdict: dict): def insert_log(runner_id: UUID, time: float, logdict: dict):
c = conn.cursor() c = conn.cursor()
json_string = json.dumps(logdict, default=json_serial) json_string = orjson.dumps(logdict, default=json_serial, option=orjson.OPT_PASSTHROUGH_DATETIME)
res = c.execute("INSERT INTO runner_logs VALUES (?,?,?)",[str(runner_id), time, json_string]) res = c.execute("INSERT INTO runner_logs VALUES (?,?,?)",[str(runner_id), time, json_string])
conn.commit() conn.commit()
return res.rowcount return res.rowcount
@ -37,14 +37,14 @@ def insert_log_multiple(runner_id: UUID, loglist: list):
c = conn.cursor() c = conn.cursor()
insert_data = [] insert_data = []
for i in loglist: for i in loglist:
row = (str(runner_id), i["time"], json.dumps(i, default=json_serial)) row = (str(runner_id), i["time"], orjson.dumps(i, default=json_serial, option=orjson.OPT_PASSTHROUGH_DATETIME))
insert_data.append(row) insert_data.append(row)
c.executemany("INSERT INTO runner_logs VALUES (?,?,?)", insert_data) c.executemany("INSERT INTO runner_logs VALUES (?,?,?)", insert_data)
conn.commit() conn.commit()
return c.rowcount return c.rowcount
# c = conn.cursor() # c = conn.cursor()
# json_string = json.dumps(logdict, default=json_serial) # json_string = orjson.dumps(logdict, default=json_serial, option=orjson.OPT_PASSTHROUGH_DATETIME)
# res = c.execute("INSERT INTO runner_logs VALUES (?,?,?)",[str(runner_id), time, json_string]) # res = c.execute("INSERT INTO runner_logs VALUES (?,?,?)",[str(runner_id), time, json_string])
# print(res) # print(res)
# conn.commit() # conn.commit()
@ -52,7 +52,7 @@ def insert_log_multiple(runner_id: UUID, loglist: list):
#returns list of ilog jsons #returns list of ilog jsons
def read_log_window(runner_id: UUID, timestamp_from: float, timestamp_to: float): def read_log_window(runner_id: UUID, timestamp_from: float, timestamp_to: float):
conn.row_factory = lambda c, r: json.loads(r[0]) conn.row_factory = lambda c, r: orjson.loads(r[0])
c = conn.cursor() c = conn.cursor()
res = c.execute(f"SELECT data FROM runner_logs WHERE runner_id='{str(runner_id)}' AND time >={ts_from} AND time <={ts_to}") res = c.execute(f"SELECT data FROM runner_logs WHERE runner_id='{str(runner_id)}' AND time >={ts_from} AND time <={ts_to}")
return res.fetchall() return res.fetchall()
@ -94,21 +94,21 @@ def delete_logs(runner_id: UUID):
def insert_archive_detail(archdetail: RunArchiveDetail): def insert_archive_detail(archdetail: RunArchiveDetail):
c = conn.cursor() c = conn.cursor()
json_string = json.dumps(archdetail, default=json_serial) json_string = orjson.dumps(archdetail, default=json_serial, option=orjson.OPT_PASSTHROUGH_DATETIME)
res = c.execute("INSERT INTO runner_detail VALUES (?,?)",[str(archdetail["id"]), json_string]) res = c.execute("INSERT INTO runner_detail VALUES (?,?)",[str(archdetail["id"]), json_string])
conn.commit() conn.commit()
return res.rowcount return res.rowcount
#returns list of details #returns list of details
def get_all_archive_detail(): def get_all_archive_detail():
conn.row_factory = lambda c, r: json.loads(r[0]) conn.row_factory = lambda c, r: orjson.loads(r[0])
c = conn.cursor() c = conn.cursor()
res = c.execute(f"SELECT data FROM runner_detail") res = c.execute(f"SELECT data FROM runner_detail")
return res.fetchall() return res.fetchall()
#vrátí konkrétní #vrátí konkrétní
def get_archive_detail_byID(runner_id: UUID): def get_archive_detail_byID(runner_id: UUID):
conn.row_factory = lambda c, r: json.loads(r[0]) conn.row_factory = lambda c, r: orjson.loads(r[0])
c = conn.cursor() c = conn.cursor()
res = c.execute(f"SELECT data FROM runner_detail WHERE runner_id='{str(runner_id)}'") res = c.execute(f"SELECT data FROM runner_detail WHERE runner_id='{str(runner_id)}'")
return res.fetchone() return res.fetchone()
@ -123,7 +123,7 @@ def delete_archive_detail(runner_id: UUID):
def get_all_archived_runners_detail(): def get_all_archived_runners_detail():
arch_detail_file = DATA_DIR + "/arch_detail.json" arch_detail_file = DATA_DIR + "/arch_detail.json"
db_arch_d = TinyDB(arch_detail_file, default=json_serial) db_arch_d = TinyDB(arch_detail_file, default=json_serial, option=orjson.OPT_PASSTHROUGH_DATETIME)
res = db_arch_d.all() res = db_arch_d.all()
return 0, res return 0, res

View File

@ -46,7 +46,7 @@ db.save()
# b = 2 # b = 2
# def toJson(self): # def toJson(self):
# return json.dumps(self, default=lambda o: o.__dict__) # return orjson.dumps(self, default=lambda o: o.__dict__)
# db.append(Neco.a) # db.append(Neco.a)

View File

@ -1,12 +1,12 @@
import timeit import timeit
setup = ''' setup = '''
import msgpack import msgpack
import json import orjson
from copy import deepcopy from copy import deepcopy
data = {'name':'John Doe','ranks':{'sports':13,'edu':34,'arts':45},'grade':5}''' data = {'name':'John Doe','ranks':{'sports':13,'edu':34,'arts':45},'grade':5}'''
print(timeit.timeit('deepcopy(data)', setup=setup)) print(timeit.timeit('deepcopy(data)', setup=setup))
# 12.0860249996 # 12.0860249996
print(timeit.timeit('json.loads(json.dumps(data))', setup=setup)) print(timeit.timeit('orjson.loads(orjson.dumps(data))', setup=setup))
# 9.07182312012 # 9.07182312012
print(timeit.timeit('msgpack.unpackb(msgpack.packb(data))', setup=setup)) print(timeit.timeit('msgpack.unpackb(msgpack.packb(data))', setup=setup))
# 1.42743492126 # 1.42743492126

View File

@ -16,7 +16,7 @@ import importlib
from queue import Queue from queue import Queue
from tinydb import TinyDB, Query, where from tinydb import TinyDB, Query, where
from tinydb.operations import set from tinydb.operations import set
import json import orjson
from rich import print from rich import print
@ -29,7 +29,7 @@ class RunnerLogger:
def __init__(self, runner_id: UUID) -> None: def __init__(self, runner_id: UUID) -> None:
self.runner_id = runner_id self.runner_id = runner_id
runner_log_file = DATA_DIR + "/runner_log.json" runner_log_file = DATA_DIR + "/runner_log.json"
db_runner_log = TinyDB(runner_log_file, default=json_serial) db_runner_log = TinyDB(runner_log_file, default=json_serial, option=orjson.OPT_PASSTHROUGH_DATETIME)
def insert_log_multiple(runner_id: UUID, logList: list): def insert_log_multiple(runner_id: UUID, logList: list):
runner_table = db_runner_log.table(str(runner_id)) runner_table = db_runner_log.table(str(runner_id))

View File

@ -16,7 +16,7 @@ import importlib
from queue import Queue from queue import Queue
#from tinydb import TinyDB, Query, where #from tinydb import TinyDB, Query, where
#from tinydb.operations import set #from tinydb.operations import set
import json import orjson
from rich import print from rich import print
from tinyflux import Point, TinyFlux from tinyflux import Point, TinyFlux
@ -26,7 +26,7 @@ runner_log_file = DATA_DIR + "/runner_flux__log.json"
db_runner_log = TinyFlux(runner_log_file) db_runner_log = TinyFlux(runner_log_file)
insert_dict = {'datum': datetime.now(), 'side': "dd", 'name': 'david','id': uuid4(), 'order': "neco"} insert_dict = {'datum': datetime.now(), 'side': "dd", 'name': 'david','id': uuid4(), 'order': "neco"}
#json.dumps(insert_dict, default=json_serial) #orjson.dumps(insert_dict, default=json_serial, option=orjson.OPT_PASSTHROUGH_DATETIME)
p1 = Point(time=datetime.now(), tags=insert_dict) p1 = Point(time=datetime.now(), tags=insert_dict)
db_runner_log.insert(p1) db_runner_log.insert(p1)

View File

@ -13,7 +13,7 @@ from v2realbot.common.model import Order, TradeUpdate as btTradeUpdate
from alpaca.trading.models import TradeUpdate from alpaca.trading.models import TradeUpdate
from alpaca.trading.enums import TradeEvent, OrderType, OrderSide, OrderType, OrderStatus from alpaca.trading.enums import TradeEvent, OrderType, OrderSide, OrderType, OrderStatus
from rich import print from rich import print
import json import orjson
#storage_with_injected_serialization = JSONStorage() #storage_with_injected_serialization = JSONStorage()
@ -110,7 +110,7 @@ a = Order(id=uuid4(),
limit_price=22.4) limit_price=22.4)
db_file = DATA_DIR + "/db.json" db_file = DATA_DIR + "/db.json"
db = TinyDB(db_file, default=json_serial) db = TinyDB(db_file, default=json_serial, option=orjson.OPT_PASSTHROUGH_DATETIME)
db.truncate() db.truncate()
insert = {'datum': datetime.now(), 'side': OrderSide.BUY, 'name': 'david','id': uuid4(), 'order': orderList} insert = {'datum': datetime.now(), 'side': OrderSide.BUY, 'name': 'david','id': uuid4(), 'order': orderList}

View File

@ -6,7 +6,7 @@ import secrets
from typing import Annotated from typing import Annotated
import os import os
import uvicorn import uvicorn
import json import orjson
from datetime import datetime from datetime import datetime
from v2realbot.utils.utils import zoneNY from v2realbot.utils.utils import zoneNY
@ -103,7 +103,7 @@ async def websocket_endpoint(
'vwap': 123, 'vwap': 123,
'updated': 123, 'updated': 123,
'index': 123} 'index': 123}
await websocket.send_text(json.dumps(data)) await websocket.send_text(orjson.dumps(data))
except WebSocketDisconnect: except WebSocketDisconnect:
print("CLIENT DISCONNECTED for", runner_id) print("CLIENT DISCONNECTED for", runner_id)

View File

@ -6,7 +6,7 @@ import secrets
from typing import Annotated from typing import Annotated
import os import os
import uvicorn import uvicorn
import json import orjson
from datetime import datetime from datetime import datetime
from v2realbot.utils.utils import zoneNY from v2realbot.utils.utils import zoneNY
@ -101,7 +101,7 @@ async def websocket_endpoint(websocket: WebSocket, client_id: int):
# 'close': 123, # 'close': 123,
# 'open': 123, # 'open': 123,
# 'time': "2019-05-25"} # 'time': "2019-05-25"}
await manager.send_personal_message(json.dumps(data), websocket) await manager.send_personal_message(orjson.dumps(data), websocket)
#await manager.broadcast(f"Client #{client_id} says: {data}") #await manager.broadcast(f"Client #{client_id} says: {data}")
except WebSocketDisconnect: except WebSocketDisconnect:
manager.disconnect(websocket) manager.disconnect(websocket)

View File

@ -5,7 +5,7 @@ import threading
import time import time
from v2realbot.common.model import RunArchive, RunArchiveView from v2realbot.common.model import RunArchive, RunArchiveView
from datetime import datetime from datetime import datetime
import json import orjson
sqlite_db_file = DATA_DIR + "/v2trading.db" sqlite_db_file = DATA_DIR + "/v2trading.db"
# Define the connection pool # Define the connection pool
@ -82,7 +82,7 @@ def row_to_runarchiveview(row: dict) -> RunArchiveView:
trade_count=int(row['trade_count']), trade_count=int(row['trade_count']),
end_positions=int(row['end_positions']), end_positions=int(row['end_positions']),
end_positions_avgp=float(row['end_positions_avgp']), end_positions_avgp=float(row['end_positions_avgp']),
metrics=json.loads(row['metrics']) if row['metrics'] else None metrics=orjson.loads(row['metrics']) if row['metrics'] else None
) )
#prevede dict radku zpatky na objekt vcetme retypizace #prevede dict radku zpatky na objekt vcetme retypizace
@ -100,13 +100,13 @@ def row_to_runarchive(row: dict) -> RunArchive:
account=row['account'], account=row['account'],
bt_from=datetime.fromisoformat(row['bt_from']) if row['bt_from'] else None, bt_from=datetime.fromisoformat(row['bt_from']) if row['bt_from'] else None,
bt_to=datetime.fromisoformat(row['bt_to']) if row['bt_to'] else None, bt_to=datetime.fromisoformat(row['bt_to']) if row['bt_to'] else None,
strat_json=json.loads(row['strat_json']), strat_json=orjson.loads(row['strat_json']),
settings=json.loads(row['settings']), settings=orjson.loads(row['settings']),
ilog_save=bool(row['ilog_save']), ilog_save=bool(row['ilog_save']),
profit=float(row['profit']), profit=float(row['profit']),
trade_count=int(row['trade_count']), trade_count=int(row['trade_count']),
end_positions=int(row['end_positions']), end_positions=int(row['end_positions']),
end_positions_avgp=float(row['end_positions_avgp']), end_positions_avgp=float(row['end_positions_avgp']),
metrics=json.loads(row['metrics']), metrics=orjson.loads(row['metrics']),
stratvars_toml=row['stratvars_toml'] stratvars_toml=row['stratvars_toml']
) )

View File

@ -8,10 +8,11 @@ from alpaca.data.timeframe import TimeFrame
from v2realbot.strategy.base import StrategyState from v2realbot.strategy.base import StrategyState
from v2realbot.enums.enums import RecordType, StartBarAlign, Mode, Account, OrderSide from v2realbot.enums.enums import RecordType, StartBarAlign, Mode, Account, OrderSide
from v2realbot.common.model import RunDay, StrategyInstance, Runner, RunRequest, RunArchive, RunArchiveView, RunArchiveViewPagination, RunArchiveDetail, RunArchiveChange, Bar, TradeEvent, TestList, Intervals, ConfigItem, InstantIndicator, DataTablesRequest from v2realbot.common.model import RunDay, StrategyInstance, Runner, RunRequest, RunArchive, RunArchiveView, RunArchiveViewPagination, RunArchiveDetail, RunArchiveChange, Bar, TradeEvent, TestList, Intervals, ConfigItem, InstantIndicator, DataTablesRequest
from v2realbot.utils.utils import AttributeDict, zoneNY, zonePRG, safe_get, dict_replace_value, Store, parse_toml_string, json_serial, is_open_hours, send_to_telegram, concatenate_weekdays from v2realbot.utils.utils import AttributeDict, zoneNY, zonePRG, safe_get, dict_replace_value, Store, parse_toml_string, json_serial, is_open_hours, send_to_telegram, concatenate_weekdays, transform_data
from v2realbot.utils.ilog import delete_logs from v2realbot.utils.ilog import delete_logs
from v2realbot.common.PrescribedTradeModel import Trade, TradeDirection, TradeStatus, TradeStoplossType from v2realbot.common.PrescribedTradeModel import Trade, TradeDirection, TradeStatus, TradeStoplossType
from datetime import datetime from datetime import datetime
from v2realbot.loader.trade_offline_streamer import Trade_Offline_Streamer
from threading import Thread, current_thread, Event, enumerate from threading import Thread, current_thread, Event, enumerate
from v2realbot.config import STRATVARS_UNCHANGEABLES, ACCOUNT1_PAPER_API_KEY, ACCOUNT1_PAPER_SECRET_KEY, ACCOUNT1_LIVE_API_KEY, ACCOUNT1_LIVE_SECRET_KEY, DATA_DIR,BT_FILL_CONS_TRADES_REQUIRED,BT_FILL_LOG_SURROUNDING_TRADES,BT_FILL_CONDITION_BUY_LIMIT,BT_FILL_CONDITION_SELL_LIMIT, GROUP_TRADES_WITH_TIMESTAMP_LESS_THAN, MEDIA_DIRECTORY from v2realbot.config import STRATVARS_UNCHANGEABLES, ACCOUNT1_PAPER_API_KEY, ACCOUNT1_PAPER_SECRET_KEY, ACCOUNT1_LIVE_API_KEY, ACCOUNT1_LIVE_SECRET_KEY, DATA_DIR,BT_FILL_CONS_TRADES_REQUIRED,BT_FILL_LOG_SURROUNDING_TRADES,BT_FILL_CONDITION_BUY_LIMIT,BT_FILL_CONDITION_SELL_LIMIT, GROUP_TRADES_WITH_TIMESTAMP_LESS_THAN, MEDIA_DIRECTORY
import importlib import importlib
@ -21,7 +22,7 @@ from alpaca.trading.client import TradingClient
from queue import Queue from queue import Queue
from tinydb import TinyDB, Query, where from tinydb import TinyDB, Query, where
from tinydb.operations import set from tinydb.operations import set
import json import orjson
import numpy as np import numpy as np
from numpy import ndarray from numpy import ndarray
from rich import print from rich import print
@ -45,8 +46,8 @@ lock = Lock()
arch_header_file = DATA_DIR + "/arch_header.json" arch_header_file = DATA_DIR + "/arch_header.json"
#arch_detail_file = DATA_DIR + "/arch_detail.json" #arch_detail_file = DATA_DIR + "/arch_detail.json"
#db layer to store runner archive #db layer to store runner archive
db_arch_h = TinyDB(arch_header_file, default=json_serial) db_arch_h = TinyDB(arch_header_file, default=json_serial, option=orjson.OPT_PASSTHROUGH_DATETIME)
#db_arch_d = TinyDB(arch_detail_file, default=json_serial) #db_arch_d = TinyDB(arch_detail_file, default=json_serial, option=orjson.OPT_PASSTHROUGH_DATETIME)
#db layer to store stratins, TBD zmigrovat do TinyDB #db layer to store stratins, TBD zmigrovat do TinyDB
db = Store() db = Store()
@ -368,7 +369,7 @@ def get_testlist_byID(record_id: str):
if row is None: if row is None:
return -2, "not found" return -2, "not found"
else: else:
return 0, TestList(id=row[0], name=row[1], dates=json.loads(row[2])) return 0, TestList(id=row[0], name=row[1], dates=orjson.loads(row[2]))
##TADY JSEM SKONCIL PROJIT - dodelat nastavni timezone ##TADY JSEM SKONCIL PROJIT - dodelat nastavni timezone
@ -405,6 +406,11 @@ def run_batch_stratin(id: UUID, runReq: RunRequest):
#u prvni polozky #u prvni polozky
if day == cal_dates[0]: if day == cal_dates[0]:
#pokud je cas od po konci marketa, nedavame tento den
if datefrom > end_time:
print("Cas od je po konci marketu, vynechavame tento den")
continue
#pokud je cas od od vetsi nez open marketu prvniho dne, pouzijeme tento pozdejis cas #pokud je cas od od vetsi nez open marketu prvniho dne, pouzijeme tento pozdejis cas
if datefrom > start_time: if datefrom > start_time:
start_time = datefrom start_time = datefrom
@ -738,7 +744,7 @@ def populate_metrics_output_directory(strat: StrategyInstance, inter_batch_param
res = dict(profit={}) res = dict(profit={})
#filt = max_positions['side'] == 'OrderSide.BUY' #filt = max_positions['side'] == 'OrderSide.BUY'
res["pos_cnt"] = dict(zip(max_positions['qty'], max_positions['count'])) res["pos_cnt"] = dict(zip(str(max_positions['qty']), max_positions['count']))
#naplneni batch sum profitu #naplneni batch sum profitu
if inter_batch_params is not None: if inter_batch_params is not None:
@ -827,7 +833,7 @@ def populate_metrics_output_directory(strat: StrategyInstance, inter_batch_param
res["profit"]["daily_rel_profit_list"] = strat.state.rel_profit_cum res["profit"]["daily_rel_profit_list"] = strat.state.rel_profit_cum
#vlozeni celeho listu #vlozeni celeho listu
res["prescr_trades"]=json.loads(json.dumps(strat.state.vars.prescribedTrades, default=json_serial)) res["prescr_trades"]=transform_data(strat.state.vars.prescribedTrades, json_serial)
except NameError: except NameError:
pass pass
@ -852,10 +858,10 @@ def archive_runner(runner: Runner, strat: StrategyInstance, inter_batch_params:
#ulozime informace o nastavení #ulozime informace o nastavení
# if self.mode in [Mode.BT, Mode.PREP]: # if self.mode in [Mode.BT, Mode.PREP]:
# str(self.dataloader.cache_used) # str(self.dataloader.cache_used)
settings = dict(resolution=strat.state.resolution, settings = dict(resolution=strat.state.resolution,
rectype=strat.state.rectype, rectype=strat.state.rectype,
cache_used=strat.dataloader.cache_used, cache_used=strat.dataloader.cache_used if isinstance(strat.dataloader, Trade_Offline_Streamer) else None,
configs=dict( configs=dict(
GROUP_TRADES_WITH_TIMESTAMP_LESS_THAN=GROUP_TRADES_WITH_TIMESTAMP_LESS_THAN, GROUP_TRADES_WITH_TIMESTAMP_LESS_THAN=GROUP_TRADES_WITH_TIMESTAMP_LESS_THAN,
BT_FILL_CONS_TRADES_REQUIRED=BT_FILL_CONS_TRADES_REQUIRED, BT_FILL_CONS_TRADES_REQUIRED=BT_FILL_CONS_TRADES_REQUIRED,
@ -1076,7 +1082,7 @@ def get_all_archived_runners_p(request: DataTablesRequest) -> Tuple[int, RunArch
# def get_all_archived_runners(): # def get_all_archived_runners():
# conn = pool.get_connection() # conn = pool.get_connection()
# try: # try:
# conn.row_factory = lambda c, r: json.loads(r[0]) # conn.row_factory = lambda c, r: orjson.loads(r[0])
# c = conn.cursor() # c = conn.cursor()
# res = c.execute(f"SELECT data FROM runner_header") # res = c.execute(f"SELECT data FROM runner_header")
# finally: # finally:
@ -1108,7 +1114,7 @@ def get_archived_runner_header_byID(id: UUID) -> RunArchive:
# def get_archived_runner_header_byID(id: UUID): # def get_archived_runner_header_byID(id: UUID):
# conn = pool.get_connection() # conn = pool.get_connection()
# try: # try:
# conn.row_factory = lambda c, r: json.loads(r[0]) # conn.row_factory = lambda c, r: orjson.loads(r[0])
# c = conn.cursor() # c = conn.cursor()
# result = c.execute(f"SELECT data FROM runner_header WHERE runner_id='{str(id)}'") # result = c.execute(f"SELECT data FROM runner_header WHERE runner_id='{str(id)}'")
# res= result.fetchone() # res= result.fetchone()
@ -1135,7 +1141,7 @@ def insert_archive_header(archeader: RunArchive):
conn = pool.get_connection() conn = pool.get_connection()
try: try:
c = conn.cursor() c = conn.cursor()
#json_string = json.dumps(archeader, default=json_serial) #json_string = orjson.dumps(archeader, default=json_serial, option=orjson.OPT_PASSTHROUGH_DATETIME)
res = c.execute(""" res = c.execute("""
INSERT INTO runner_header INSERT INTO runner_header
@ -1143,7 +1149,7 @@ def insert_archive_header(archeader: RunArchive):
VALUES VALUES
(?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?) (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
""", """,
(str(archeader.id), str(archeader.strat_id), archeader.batch_id, archeader.symbol, archeader.name, archeader.note, archeader.started, archeader.stopped, archeader.mode, archeader.account, archeader.bt_from, archeader.bt_to, json.dumps(archeader.strat_json), json.dumps(archeader.settings), archeader.ilog_save, archeader.profit, archeader.trade_count, archeader.end_positions, archeader.end_positions_avgp, json.dumps(archeader.metrics, default=json_serial), archeader.stratvars_toml)) (str(archeader.id), str(archeader.strat_id), archeader.batch_id, archeader.symbol, archeader.name, archeader.note, archeader.started, archeader.stopped, archeader.mode, archeader.account, archeader.bt_from, archeader.bt_to, orjson.dumps(archeader.strat_json).decode('utf-8'), orjson.dumps(archeader.settings).decode('utf-8'), archeader.ilog_save, archeader.profit, archeader.trade_count, archeader.end_positions, archeader.end_positions_avgp, orjson.dumps(archeader.metrics, default=json_serial, option=orjson.OPT_PASSTHROUGH_DATETIME).decode('utf-8'), archeader.stratvars_toml))
#retry not yet supported for statement format above #retry not yet supported for statement format above
#res = execute_with_retry(c,statement) #res = execute_with_retry(c,statement)
@ -1308,7 +1314,7 @@ def delete_archive_detail_byID(id: UUID):
def get_all_archived_runners_detail(): def get_all_archived_runners_detail():
conn = pool.get_connection() conn = pool.get_connection()
try: try:
conn.row_factory = lambda c, r: json.loads(r[0]) conn.row_factory = lambda c, r: orjson.loads(r[0])
c = conn.cursor() c = conn.cursor()
res = c.execute(f"SELECT data FROM runner_detail") res = c.execute(f"SELECT data FROM runner_detail")
finally: finally:
@ -1324,26 +1330,45 @@ def get_all_archived_runners_detail():
# return 0, res # return 0, res
#vrátí konkrétní #vrátí konkrétní
def get_archived_runner_details_byID(id: UUID): # def get_archived_runner_details_byID(id: UUID):
# conn = pool.get_connection()
# try:
# conn.row_factory = lambda c, r: orjson.loads(r[0])
# c = conn.cursor()
# result = c.execute(f"SELECT data FROM runner_detail WHERE runner_id='{str(id)}'")
# res= result.fetchone()
# finally:
# conn.row_factory = None
# pool.release_connection(conn)
# if res==None:
# return -2, "not found"
# else:
# return 0, res
#version allowing return of parsed(json) or json string data
def get_archived_runner_details_byID(id: UUID, parsed: bool = True):
conn = pool.get_connection() conn = pool.get_connection()
try: try:
conn.row_factory = lambda c, r: json.loads(r[0])
c = conn.cursor() c = conn.cursor()
result = c.execute(f"SELECT data FROM runner_detail WHERE runner_id='{str(id)}'") result = c.execute(f"SELECT data FROM runner_detail WHERE runner_id='{str(id)}'")
res= result.fetchone() res = result.fetchone()
finally: finally:
conn.row_factory = None
pool.release_connection(conn) pool.release_connection(conn)
if res==None:
if res is None:
return -2, "not found" return -2, "not found"
else: else:
return 0, res # Return the JSON string directly
if parsed:
orjson.loads(res[0])
else:
return 0, res[0]
def update_archive_detail(id: UUID, archdetail: RunArchiveDetail): def update_archive_detail(id: UUID, archdetail: RunArchiveDetail):
conn = pool.get_connection() conn = pool.get_connection()
try: try:
c = conn.cursor() c = conn.cursor()
json_string = json.dumps(archdetail, default=json_serial) json_string = orjson.dumps(archdetail, default=json_serial, option=orjson.OPT_PASSTHROUGH_DATETIME).decode('utf-8')
statement = "UPDATE runner_detail SET data = ? WHERE runner_id = ?" statement = "UPDATE runner_detail SET data = ? WHERE runner_id = ?"
params = (json_string, str(id)) params = (json_string, str(id))
##statement = f"UPDATE runner_detail SET data = '{json_string}' WHERE runner_id='{str(id)}'" ##statement = f"UPDATE runner_detail SET data = '{json_string}' WHERE runner_id='{str(id)}'"
@ -1358,13 +1383,16 @@ def insert_archive_detail(archdetail: RunArchiveDetail):
conn = pool.get_connection() conn = pool.get_connection()
try: try:
c = conn.cursor() c = conn.cursor()
json_string = json.dumps(archdetail, default=json_serial) json_string = orjson.dumps(archdetail, default=json_serial, option=orjson.OPT_PASSTHROUGH_DATETIME).decode('utf-8')
statement = f"INSERT INTO runner_detail VALUES ('{str(archdetail.id)}','{json_string}')" # Use parameterized queries instead of string formatting
res = execute_with_retry(c,statement) statement = "INSERT INTO runner_detail VALUES (?, ?)"
params = (str(archdetail.id), json_string)
res = execute_with_retry(cursor=c, statement=statement, params=params)
conn.commit() conn.commit()
finally: finally:
pool.release_connection(conn) pool.release_connection(conn)
return res.rowcount return res.rowcount
# endregion # endregion
# region TESTLISTS db services # region TESTLISTS db services
@ -1380,7 +1408,7 @@ def get_testlists():
testlists = [] testlists = []
for row in rows: for row in rows:
#print(row) #print(row)
testlist = TestList(id=row[0], name=row[1], dates=json.loads(row[2])) testlist = TestList(id=row[0], name=row[1], dates=orjson.loads(row[2]))
testlists.append(testlist) testlists.append(testlist)
return 0, testlists return 0, testlists

View File

@ -5,7 +5,7 @@ from alpaca.data.timeframe import TimeFrame, TimeFrameUnit
from datetime import datetime from datetime import datetime
import os import os
from rich import print from rich import print
from fastapi import FastAPI, Depends, HTTPException, status, File, UploadFile from fastapi import FastAPI, Depends, HTTPException, status, File, UploadFile, Response
from fastapi.security import APIKeyHeader from fastapi.security import APIKeyHeader
import uvicorn import uvicorn
from uuid import UUID from uuid import UUID
@ -20,7 +20,7 @@ from v2realbot.enums.enums import Env, Mode
from typing import Annotated from typing import Annotated
import os import os
import uvicorn import uvicorn
import json import orjson
from queue import Queue, Empty from queue import Queue, Empty
from threading import Thread from threading import Thread
import asyncio import asyncio
@ -329,14 +329,14 @@ def migrate():
end_positions=row.get('end_positions'), end_positions=row.get('end_positions'),
end_positions_avgp=row.get('end_positions_avgp'), end_positions_avgp=row.get('end_positions_avgp'),
metrics=row.get('open_orders'), metrics=row.get('open_orders'),
#metrics=json.loads(row.get('metrics')) if row.get('metrics') else None, #metrics=orjson.loads(row.get('metrics')) if row.get('metrics') else None,
stratvars_toml=row.get('stratvars_toml') stratvars_toml=row.get('stratvars_toml')
) )
def get_all_archived_runners(): def get_all_archived_runners():
conn = pool.get_connection() conn = pool.get_connection()
try: try:
conn.row_factory = lambda c, r: json.loads(r[0]) conn.row_factory = lambda c, r: orjson.loads(r[0])
c = conn.cursor() c = conn.cursor()
res = c.execute(f"SELECT data FROM runner_header") res = c.execute(f"SELECT data FROM runner_header")
finally: finally:
@ -381,7 +381,7 @@ def migrate():
SET strat_id=?, batch_id=?, symbol=?, name=?, note=?, started=?, stopped=?, mode=?, account=?, bt_from=?, bt_to=?, strat_json=?, settings=?, ilog_save=?, profit=?, trade_count=?, end_positions=?, end_positions_avgp=?, metrics=?, stratvars_toml=? SET strat_id=?, batch_id=?, symbol=?, name=?, note=?, started=?, stopped=?, mode=?, account=?, bt_from=?, bt_to=?, strat_json=?, settings=?, ilog_save=?, profit=?, trade_count=?, end_positions=?, end_positions_avgp=?, metrics=?, stratvars_toml=?
WHERE runner_id=? WHERE runner_id=?
''', ''',
(str(ra.strat_id), ra.batch_id, ra.symbol, ra.name, ra.note, ra.started, ra.stopped, ra.mode, ra.account, ra.bt_from, ra.bt_to, json.dumps(ra.strat_json), json.dumps(ra.settings), ra.ilog_save, ra.profit, ra.trade_count, ra.end_positions, ra.end_positions_avgp, json.dumps(ra.metrics), ra.stratvars_toml, str(ra.id))) (str(ra.strat_id), ra.batch_id, ra.symbol, ra.name, ra.note, ra.started, ra.stopped, ra.mode, ra.account, ra.bt_from, ra.bt_to, orjson.dumps(ra.strat_json).decode('utf-8'), orjson.dumps(ra.settings).decode('utf-8'), ra.ilog_save, ra.profit, ra.trade_count, ra.end_positions, ra.end_positions_avgp, orjson.dumps(ra.metrics).decode('utf-8'), ra.stratvars_toml, str(ra.id)))
conn.commit() conn.commit()
finally: finally:
@ -524,13 +524,23 @@ def _get_all_archived_runners_detail() -> list[RunArchiveDetail]:
raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail=f"No data found") raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail=f"No data found")
#get archived runners detail by id #get archived runners detail by id
# @app.get("/archived_runners_detail/{runner_id}", dependencies=[Depends(api_key_auth)])
# def _get_archived_runner_details_byID(runner_id) -> RunArchiveDetail:
# res, set = cs.get_archived_runner_details_byID(runner_id)
# if res == 0:
# return set
# else:
# raise HTTPException(status_code=404, detail=f"No runner with id: {runner_id} a {set}")
#this is the variant of above that skips parsing of json and returns JSON string returned from db
@app.get("/archived_runners_detail/{runner_id}", dependencies=[Depends(api_key_auth)]) @app.get("/archived_runners_detail/{runner_id}", dependencies=[Depends(api_key_auth)])
def _get_archived_runner_details_byID(runner_id) -> RunArchiveDetail: def _get_archived_runner_details_byID(runner_id: UUID):
res, set = cs.get_archived_runner_details_byID(runner_id) res, data = cs.get_archived_runner_details_byID(id=runner_id, parsed=False)
if res == 0: if res == 0:
return set # Return the raw JSON string as a plain Response
return Response(content=data, media_type="application/json")
else: else:
raise HTTPException(status_code=404, detail=f"No runner with id: {runner_id} a {set}") raise HTTPException(status_code=404, detail=f"No runner with id: {runner_id}. {data}")
#get archived runners detail by id #get archived runners detail by id
@app.get("/archived_runners_log/{runner_id}", dependencies=[Depends(api_key_auth)]) @app.get("/archived_runners_log/{runner_id}", dependencies=[Depends(api_key_auth)])
@ -647,7 +657,7 @@ def create_record(testlist: TestList):
# Insert the record into the database # Insert the record into the database
conn = pool.get_connection() conn = pool.get_connection()
cursor = conn.cursor() cursor = conn.cursor()
cursor.execute("INSERT INTO test_list (id, name, dates) VALUES (?, ?, ?)", (testlist.id, testlist.name, json.dumps(testlist.dates, default=json_serial))) cursor.execute("INSERT INTO test_list (id, name, dates) VALUES (?, ?, ?)", (testlist.id, testlist.name, orjson.dumps(testlist.dates, default=json_serial, option=orjson.OPT_PASSTHROUGH_DATETIME).decode('utf-8')))
conn.commit() conn.commit()
pool.release_connection(conn) pool.release_connection(conn)
return testlist return testlist
@ -685,7 +695,7 @@ def update_testlist(record_id: str, testlist: TestList):
raise HTTPException(status_code=404, detail='Record not found') raise HTTPException(status_code=404, detail='Record not found')
# Update the record in the database # Update the record in the database
cursor.execute("UPDATE test_list SET name = ?, dates = ? WHERE id = ?", (testlist.name, json.dumps(testlist.dates, default=json_serial), record_id)) cursor.execute("UPDATE test_list SET name = ?, dates = ? WHERE id = ?", (testlist.name, orjson.dumps(testlist.dates, default=json_serial, option=orjson.OPT_PASSTHROUGH_DATETIME).decode('utf-8'), record_id))
conn.commit() conn.commit()
pool.release_connection(conn) pool.release_connection(conn)
@ -849,6 +859,11 @@ def get_metadata(model_name: str):
model_instance = ml.load_model(file=model_name, directory=MODEL_DIR) model_instance = ml.load_model(file=model_name, directory=MODEL_DIR)
try: try:
metadata = model_instance.metadata metadata = model_instance.metadata
except AttributeError:
metadata = model_instance.__dict__
del metadata["scalerX"]
del metadata["scalerY"]
del metadata["model"]
except Exception as e: except Exception as e:
metadata = "No Metada" + str(e) + format_exc() metadata = "No Metada" + str(e) + format_exc()
return metadata return metadata
@ -879,7 +894,7 @@ def insert_queue2db():
c = insert_conn.cursor() c = insert_conn.cursor()
insert_data = [] insert_data = []
for i in loglist: for i in loglist:
row = (str(runner_id), i["time"], json.dumps(i, default=json_serial)) row = (str(runner_id), i["time"], orjson.dumps(i, default=json_serial, option=orjson.OPT_PASSTHROUGH_DATETIME|orjson.OPT_NON_STR_KEYS).decode('utf-8'))
insert_data.append(row) insert_data.append(row)
c.executemany("INSERT INTO runner_logs VALUES (?,?,?)", insert_data) c.executemany("INSERT INTO runner_logs VALUES (?,?,?)", insert_data)
insert_conn.commit() insert_conn.commit()
@ -891,7 +906,10 @@ def insert_queue2db():
insert_queue.put(data) # Put the data back into the queue for retry insert_queue.put(data) # Put the data back into the queue for retry
sleep(1) # You can adjust the sleep duration sleep(1) # You can adjust the sleep duration
else: else:
raise # If it's another error, raise it raise # If it's another error, raise it
except Exception as e:
print("ERROR INSERT LOGQUEUE MODULE:" + str(e)+format_exc())
print(data)
#join cekej na dokonceni vsech #join cekej na dokonceni vsech
for i in cs.db.runners: for i in cs.db.runners:

View File

@ -1,4 +1,3 @@
import json
import numpy as np import numpy as np
import matplotlib import matplotlib
matplotlib.use('Agg') # Set the Matplotlib backend to 'Agg' matplotlib.use('Agg') # Set the Matplotlib backend to 'Agg'

View File

@ -348,9 +348,9 @@ def generate_trading_report_image(runner_ids: list = None, batch_id: str = None,
#Plot 8 Cumulative profit - bud 1 den nebo vice dni + pridame pod to vyvoj ceny #Plot 8 Cumulative profit - bud 1 den nebo vice dni + pridame pod to vyvoj ceny
# Extract the closing prices and times # Extract the closing prices and times
closing_prices = bars['close'] closing_prices = bars.get('close',[])
#times = bars['time'] # Assuming this is a list of pandas Timestamp objects #times = bars['time'] # Assuming this is a list of pandas Timestamp objects
times = pd.to_datetime(bars['time']) # Ensure this is a Pandas datetime series times = pd.to_datetime(bars['time']) if bars is not None else [] # Ensure this is a Pandas datetime series
# # Plot the closing prices over time # # Plot the closing prices over time
# axs[0, 4].plot(times, closing_prices, color='blue') # axs[0, 4].plot(times, closing_prices, color='blue')
# axs[0, 4].tick_params(axis='x', rotation=45) # Rotate date labels if necessar # axs[0, 4].tick_params(axis='x', rotation=45) # Rotate date labels if necessar

View File

@ -916,7 +916,7 @@
<script src="/static/js/realtimechart.js?v=1.01"></script> <script src="/static/js/realtimechart.js?v=1.01"></script>
<script src="/static/js/mytables.js?v=1.01"></script> <script src="/static/js/mytables.js?v=1.01"></script>
<script src="/static/js/testlist.js?v=1.01"></script> <script src="/static/js/testlist.js?v=1.01"></script>
<script src="/static/js/ml.js?v=1.01"></script> <script src="/static/js/ml.js?v=1.02"></script>
<script src="/static/js/common.js?v=1.01"></script> <script src="/static/js/common.js?v=1.01"></script>
<script src="/static/js/configform.js?v=1.01"></script> <script src="/static/js/configform.js?v=1.01"></script>

View File

@ -110,6 +110,7 @@ $(document).ready(function() {
}, },
error: function(xhr, status, error) { error: function(xhr, status, error) {
$('#metadata-container').html('Error fetching metadata: ' + error + xhr.responseText + status); $('#metadata-container').html('Error fetching metadata: ' + error + xhr.responseText + status);
show_metadata(xhr)
} }
}); });
} }
@ -124,14 +125,14 @@ $(document).ready(function() {
require(["vs/editor/editor.main"], () => { require(["vs/editor/editor.main"], () => {
model_editor_json = monaco.editor.create(document.getElementById('toml-editor-container'), { model_editor_json = monaco.editor.create(document.getElementById('toml-editor-container'), {
value: response.cfg_toml, value: response.cfg_toml ? response.cfg_toml : JSON.stringify(response,null,4),
language: 'toml', language: 'toml',
theme: 'tomlTheme-dark', theme: 'tomlTheme-dark',
automaticLayout: true, automaticLayout: true,
readOnly: true readOnly: true
}); });
model_editor_python = monaco.editor.create(document.getElementById('python-editor-container'), { model_editor_python = monaco.editor.create(document.getElementById('python-editor-container'), {
value: response.arch_function, value: response.arch_function ? response.arch_function : '',
language: 'python', language: 'python',
theme: 'tomlTheme-dark', theme: 'tomlTheme-dark',
automaticLayout: true, automaticLayout: true,

View File

@ -1,5 +1,5 @@
from v2realbot.strategy.base import Strategy from v2realbot.strategy.base import Strategy
from v2realbot.utils.utils import parse_alpaca_timestamp, ltp, AttributeDict,trunc,price2dec, zoneNY, print, json_serial, safe_get, get_tick, send_to_telegram from v2realbot.utils.utils import parse_alpaca_timestamp, ltp, AttributeDict,trunc,price2dec, zoneNY, print, json_serial, safe_get, get_tick, send_to_telegram, transform_data
from v2realbot.utils.tlog import tlog, tlog_exception from v2realbot.utils.tlog import tlog, tlog_exception
from v2realbot.enums.enums import Mode, Order, Account, RecordType, Followup from v2realbot.enums.enums import Mode, Order, Account, RecordType, Followup
#from alpaca.trading.models import TradeUpdate #from alpaca.trading.models import TradeUpdate
@ -7,7 +7,7 @@ from v2realbot.common.model import TradeUpdate
from v2realbot.common.PrescribedTradeModel import Trade, TradeDirection, TradeStatus from v2realbot.common.PrescribedTradeModel import Trade, TradeDirection, TradeStatus
from alpaca.trading.enums import TradeEvent, OrderStatus from alpaca.trading.enums import TradeEvent, OrderStatus
from v2realbot.indicators.indicators import ema from v2realbot.indicators.indicators import ema
import json import orjson
from datetime import datetime from datetime import datetime
#from rich import print #from rich import print
from random import randrange from random import randrange
@ -90,7 +90,7 @@ class StrategyClassicSL(Strategy):
o: Order = data.order o: Order = data.order
signal_name = None signal_name = None
##nejak to vymyslet, aby se dal poslat cely Trade a serializoval se ##nejak to vymyslet, aby se dal poslat cely Trade a serializoval se
self.state.ilog(e="Příchozí BUY notif", msg=o.status, trade=json.loads(json.dumps(data, default=json_serial))) self.state.ilog(e="Příchozí BUY notif", msg=o.status, trade=transform_data(data, json_serial))
if data.event == TradeEvent.FILL or data.event == TradeEvent.PARTIAL_FILL: if data.event == TradeEvent.FILL or data.event == TradeEvent.PARTIAL_FILL:
@ -180,7 +180,7 @@ class StrategyClassicSL(Strategy):
setattr(tradeData, "profit_sum", self.state.profit) setattr(tradeData, "profit_sum", self.state.profit)
setattr(tradeData, "signal_name", signal_name) setattr(tradeData, "signal_name", signal_name)
setattr(tradeData, "prescribed_trade_id", self.state.vars.pending) setattr(tradeData, "prescribed_trade_id", self.state.vars.pending)
#self.state.ilog(f"updatnut tradeList o profit", tradeData=json.loads(json.dumps(tradeData, default=json_serial))) #self.state.ilog(f"updatnut tradeList o profit", tradeData=orjson.loads(orjson.dumps(tradeData, default=json_serial, option=orjson.OPT_PASSTHROUGH_DATETIME)))
setattr(tradeData, "rel_profit", rel_profit) setattr(tradeData, "rel_profit", rel_profit)
setattr(tradeData, "rel_profit_cum", rel_profit_cum_calculated) setattr(tradeData, "rel_profit_cum", rel_profit_cum_calculated)
@ -233,8 +233,8 @@ class StrategyClassicSL(Strategy):
async def orderUpdateSell(self, data: TradeUpdate): async def orderUpdateSell(self, data: TradeUpdate):
self.state.ilog(e="Příchozí SELL notif", msg=data.order.status, trade=json.loads(json.dumps(data, default=json_serial))) self.state.ilog(e="Příchozí SELL notif", msg=data.order.status, trade=transform_data(data, json_serial))
#naklady vypocteme z prumerne ceny, kterou mame v pozicich #naklady vypocteme z prumerne ceny, kterou mame v pozicich
if data.event == TradeEvent.FILL or data.event == TradeEvent.PARTIAL_FILL: if data.event == TradeEvent.FILL or data.event == TradeEvent.PARTIAL_FILL:

View File

@ -1,11 +1,11 @@
from v2realbot.strategy.base import Strategy from v2realbot.strategy.base import Strategy
from v2realbot.utils.utils import parse_alpaca_timestamp, ltp, AttributeDict,trunc,price2dec, zoneNY, print, json_serial, safe_get from v2realbot.utils.utils import parse_alpaca_timestamp, ltp, AttributeDict,trunc,price2dec, zoneNY, print, json_serial, safe_get, transform_data
from v2realbot.utils.tlog import tlog, tlog_exception from v2realbot.utils.tlog import tlog, tlog_exception
from v2realbot.enums.enums import Mode, Order, Account from v2realbot.enums.enums import Mode, Order, Account
from alpaca.trading.models import TradeUpdate from alpaca.trading.models import TradeUpdate
from alpaca.trading.enums import TradeEvent, OrderStatus from alpaca.trading.enums import TradeEvent, OrderStatus
from v2realbot.indicators.indicators import ema from v2realbot.indicators.indicators import ema
import json import orjson
#from rich import print #from rich import print
from random import randrange from random import randrange
from alpaca.common.exceptions import APIError from alpaca.common.exceptions import APIError
@ -21,7 +21,7 @@ class StrategyOrderLimitVykladaci(Strategy):
async def orderUpdateBuy(self, data: TradeUpdate): async def orderUpdateBuy(self, data: TradeUpdate):
o: Order = data.order o: Order = data.order
##nejak to vymyslet, aby se dal poslat cely Trade a serializoval se ##nejak to vymyslet, aby se dal poslat cely Trade a serializoval se
self.state.ilog(e="Příchozí BUY notif", msg=o.status, trade=json.loads(json.dumps(data, default=json_serial))) self.state.ilog(e="Příchozí BUY notif", msg=o.status, trade=transform_data(data, json_serial))
if o.status == OrderStatus.FILLED or o.status == OrderStatus.CANCELED: if o.status == OrderStatus.FILLED or o.status == OrderStatus.CANCELED:
#pokud existuje objednavka v pendingbuys - vyhodime ji #pokud existuje objednavka v pendingbuys - vyhodime ji
@ -73,7 +73,7 @@ class StrategyOrderLimitVykladaci(Strategy):
async def orderUpdateSell(self, data: TradeUpdate): async def orderUpdateSell(self, data: TradeUpdate):
self.state.ilog(e="Příchozí SELL notif", msg=data.order.status, trade=json.loads(json.dumps(data, default=json_serial))) self.state.ilog(e="Příchozí SELL notif", msg=data.order.status, trade=transform_data(data, json_serial))
#PROFIT #PROFIT
#profit pocitame z TradeUpdate.price a TradeUpdate.qty - aktualne provedene mnozstvi a cena #profit pocitame z TradeUpdate.price a TradeUpdate.qty - aktualne provedene mnozstvi a cena
#naklady vypocteme z prumerne ceny, kterou mame v pozicich #naklady vypocteme z prumerne ceny, kterou mame v pozicich

View File

@ -5,7 +5,7 @@ from v2realbot.enums.enums import Mode, Order, Account
from alpaca.trading.models import TradeUpdate from alpaca.trading.models import TradeUpdate
from alpaca.trading.enums import TradeEvent, OrderStatus from alpaca.trading.enums import TradeEvent, OrderStatus
from v2realbot.indicators.indicators import ema from v2realbot.indicators.indicators import ema
import json import orjson
#from rich import print #from rich import print
from random import randrange from random import randrange
from alpaca.common.exceptions import APIError from alpaca.common.exceptions import APIError
@ -21,7 +21,7 @@ class StrategyOrderLimitVykladaciNormalized(Strategy):
async def orderUpdateBuy(self, data: TradeUpdate): async def orderUpdateBuy(self, data: TradeUpdate):
o: Order = data.order o: Order = data.order
##nejak to vymyslet, aby se dal poslat cely Trade a serializoval se ##nejak to vymyslet, aby se dal poslat cely Trade a serializoval se
self.state.ilog(e="Příchozí BUY notif", msg=o.status, trade=json.loads(json.dumps(data, default=json_serial))) self.state.ilog(e="Příchozí BUY notif", msg=o.status, trade=orjson.loads(orjson.dumps(data, default=json_serial, option=orjson.OPT_PASSTHROUGH_DATETIME)))
if o.status == OrderStatus.FILLED or o.status == OrderStatus.CANCELED: if o.status == OrderStatus.FILLED or o.status == OrderStatus.CANCELED:
#pokud existuje objednavka v pendingbuys - vyhodime ji #pokud existuje objednavka v pendingbuys - vyhodime ji
@ -73,7 +73,7 @@ class StrategyOrderLimitVykladaciNormalized(Strategy):
async def orderUpdateSell(self, data: TradeUpdate): async def orderUpdateSell(self, data: TradeUpdate):
self.state.ilog(e="Příchozí SELL notif", msg=data.order.status, trade=json.loads(json.dumps(data, default=json_serial))) self.state.ilog(e="Příchozí SELL notif", msg=data.order.status, trade=orjson.loads(orjson.dumps(data, default=json_serial, option=orjson.OPT_PASSTHROUGH_DATETIME)))
#PROFIT #PROFIT
#profit pocitame z TradeUpdate.price a TradeUpdate.qty - aktualne provedene mnozstvi a cena #profit pocitame z TradeUpdate.price a TradeUpdate.qty - aktualne provedene mnozstvi a cena
#naklady vypocteme z prumerne ceny, kterou mame v pozicich #naklady vypocteme z prumerne ceny, kterou mame v pozicich

View File

@ -5,7 +5,7 @@ from v2realbot.enums.enums import Mode, Order, Account, RecordType
from alpaca.trading.models import TradeUpdate from alpaca.trading.models import TradeUpdate
from alpaca.trading.enums import TradeEvent, OrderStatus from alpaca.trading.enums import TradeEvent, OrderStatus
from v2realbot.indicators.indicators import ema from v2realbot.indicators.indicators import ema
import json import orjson
#from rich import print #from rich import print
from random import randrange from random import randrange
from alpaca.common.exceptions import APIError from alpaca.common.exceptions import APIError
@ -21,7 +21,7 @@ class StrategyOrderLimitVykladaciNormalizedMYSELL(Strategy):
async def orderUpdateBuy(self, data: TradeUpdate): async def orderUpdateBuy(self, data: TradeUpdate):
o: Order = data.order o: Order = data.order
##nejak to vymyslet, aby se dal poslat cely Trade a serializoval se ##nejak to vymyslet, aby se dal poslat cely Trade a serializoval se
self.state.ilog(e="Příchozí BUY notif", msg=o.status, trade=json.loads(json.dumps(data, default=json_serial))) self.state.ilog(e="Příchozí BUY notif", msg=o.status, trade=orjson.loads(orjson.dumps(data, default=json_serial, option=orjson.OPT_PASSTHROUGH_DATETIME)))
if o.status == OrderStatus.FILLED or o.status == OrderStatus.CANCELED: if o.status == OrderStatus.FILLED or o.status == OrderStatus.CANCELED:
#pokud existuje objednavka v pendingbuys - vyhodime ji #pokud existuje objednavka v pendingbuys - vyhodime ji
@ -42,7 +42,7 @@ class StrategyOrderLimitVykladaciNormalizedMYSELL(Strategy):
async def orderUpdateSell(self, data: TradeUpdate): async def orderUpdateSell(self, data: TradeUpdate):
self.state.ilog(e="Příchozí SELL notif", msg=data.order.status, trade=json.loads(json.dumps(data, default=json_serial))) self.state.ilog(e="Příchozí SELL notif", msg=data.order.status, trade=orjson.loads(orjson.dumps(data, default=json_serial, option=orjson.OPT_PASSTHROUGH_DATETIME)))
#PROFIT #PROFIT
#profit pocitame z TradeUpdate.price a TradeUpdate.qty - aktualne provedene mnozstvi a cena #profit pocitame z TradeUpdate.price a TradeUpdate.qty - aktualne provedene mnozstvi a cena
#naklady vypocteme z prumerne ceny, kterou mame v pozicich #naklady vypocteme z prumerne ceny, kterou mame v pozicich

View File

@ -23,7 +23,7 @@ from v2realbot.backtesting.backtester import Backtester
from v2realbot.common.model import TradeUpdate from v2realbot.common.model import TradeUpdate
from alpaca.trading.enums import TradeEvent, OrderStatus from alpaca.trading.enums import TradeEvent, OrderStatus
from threading import Event, current_thread from threading import Event, current_thread
import json import orjson
from uuid import UUID from uuid import UUID
from rich import print as printnow from rich import print as printnow
from collections import defaultdict from collections import defaultdict
@ -660,7 +660,7 @@ class Strategy:
#send current values to Realtime display on frontend #send current values to Realtime display on frontend
#all datetime values are converted to timestamp #all datetime values are converted to timestamp
if self.rtqueue is not None: if self.rtqueue is not None:
self.rtqueue.put(json.dumps(rt_out, default=json_serial)) self.rtqueue.put(orjson.dumps(rt_out, default=json_serial, option=orjson.OPT_PASSTHROUGH_DATETIME))
print("RTQUEUE", self.rtqueue) print("RTQUEUE", self.rtqueue)
#cleaning iterlog lsit #cleaning iterlog lsit

View File

@ -9,7 +9,7 @@ from v2realbot.config import KW
from uuid import uuid4 from uuid import uuid4
from datetime import datetime from datetime import datetime
#import random #import random
import json import orjson
import numpy as np import numpy as np
#from icecream import install, ic #from icecream import install, ic
from rich import print as printanyway from rich import print as printanyway

View File

@ -9,7 +9,7 @@ from v2realbot.config import KW
from uuid import uuid4 from uuid import uuid4
from datetime import datetime from datetime import datetime
#import random #import random
import json import orjson
import numpy as np import numpy as np
#from icecream import install, ic #from icecream import install, ic
from rich import print as printanyway from rich import print as printanyway

View File

@ -8,7 +8,7 @@ from uuid import uuid4
from datetime import datetime from datetime import datetime
from v2realbot.strategyblocks.indicators.helpers import value_or_indicator from v2realbot.strategyblocks.indicators.helpers import value_or_indicator
#import random #import random
import json import orjson
import numpy as np import numpy as np
#from icecream import install, ic #from icecream import install, ic
from rich import print as printanyway from rich import print as printanyway

View File

@ -9,8 +9,8 @@ from v2realbot.strategyblocks.indicators.RSI import populate_dynamic_RSI_indicat
from v2realbot.strategyblocks.indicators.natr import populate_dynamic_natr_indicator from v2realbot.strategyblocks.indicators.natr import populate_dynamic_natr_indicator
from v2realbot.strategyblocks.indicators.atr import populate_dynamic_atr_indicator from v2realbot.strategyblocks.indicators.atr import populate_dynamic_atr_indicator
import numpy as np import numpy as np
from v2realbot.utils.utils import isrising, isfalling,zoneNY, price2dec, print, safe_get, is_still, is_window_open, eval_cond_dict, crossed_down, crossed_up, crossed, is_pivot, json_serial, pct_diff, create_new_bars, slice_dict_lists from v2realbot.utils.utils import isrising, isfalling,zoneNY, price2dec, print, safe_get, is_still, is_window_open, eval_cond_dict, crossed_down, crossed_up, crossed, is_pivot, json_serial, pct_diff, create_new_bars, slice_dict_lists, transform_data
import json import orjson
def populate_all_indicators(data, state: StrategyState): def populate_all_indicators(data, state: StrategyState):
@ -55,7 +55,7 @@ def populate_all_indicators(data, state: StrategyState):
#TODO tento lof patri spis do nextu classic SL - je poplatny typu stratefie #TODO tento lof patri spis do nextu classic SL - je poplatny typu stratefie
#TODO na toto se podivam, nejak moc zajasonovani a zpatky - #TODO na toto se podivam, nejak moc zajasonovani a zpatky -
#PERF PROBLEM #PERF PROBLEM
state.ilog(lvl=1,e="ENTRY", msg=f"LP:{lp} P:{state.positions}/{round(float(state.avgp),3)} SL:{state.vars.activeTrade.stoploss_value if state.vars.activeTrade is not None else None} GP:{state.vars.activeTrade.goal_price if state.vars.activeTrade is not None else None} profit:{round(float(state.profit),2)} profit_rel:{round(np.sum(state.rel_profit_cum),6) if len(state.rel_profit_cum)>0 else 0} Trades:{len(state.tradeList)} pend:{state.vars.pending}", rel_profit_cum=str(state.rel_profit_cum), activeTrade=json.loads(json.dumps(state.vars.activeTrade, default=json_serial)), prescribedTrades=json.loads(json.dumps(state.vars.prescribedTrades, default=json_serial)), pending=str(state.vars.pending)) state.ilog(lvl=1,e="ENTRY", msg=f"LP:{lp} P:{state.positions}/{round(float(state.avgp),3)} SL:{state.vars.activeTrade.stoploss_value if state.vars.activeTrade is not None else None} GP:{state.vars.activeTrade.goal_price if state.vars.activeTrade is not None else None} profit:{round(float(state.profit),2)} profit_rel:{round(np.sum(state.rel_profit_cum),6) if len(state.rel_profit_cum)>0 else 0} Trades:{len(state.tradeList)} pend:{state.vars.pending}", rel_profit_cum=str(state.rel_profit_cum), activeTrade=transform_data(state.vars.activeTrade, json_serial), prescribedTrades=transform_data(state.vars.prescribedTrades, json_serial), pending=str(state.vars.pending))
#kroky pro CONFIRMED BAR only #kroky pro CONFIRMED BAR only
if conf_bar == 1: if conf_bar == 1:

View File

@ -9,7 +9,7 @@ from v2realbot.config import KW
from uuid import uuid4 from uuid import uuid4
from datetime import datetime from datetime import datetime
#import random #import random
import json import orjson
import numpy as np import numpy as np
#from icecream import install, ic #from icecream import install, ic
from rich import print as printanyway from rich import print as printanyway

View File

@ -10,7 +10,7 @@ from v2realbot.config import KW, MODEL_DIR
from uuid import uuid4 from uuid import uuid4
from datetime import datetime from datetime import datetime
#import random #import random
import json import orjson
import numpy as np import numpy as np
#from icecream import install, ic #from icecream import install, ic
from rich import print as printanyway from rich import print as printanyway

View File

@ -10,7 +10,7 @@ from uuid import uuid4
from datetime import datetime from datetime import datetime
from v2realbot.strategyblocks.indicators.helpers import value_or_indicator from v2realbot.strategyblocks.indicators.helpers import value_or_indicator
#import random #import random
import json import orjson
import numpy as np import numpy as np
#from icecream import install, ic #from icecream import install, ic
from rich import print as printanyway from rich import print as printanyway

View File

@ -1,9 +1,9 @@
from v2realbot.strategy.base import StrategyState from v2realbot.strategy.base import StrategyState
from v2realbot.common.PrescribedTradeModel import TradeDirection, TradeStatus from v2realbot.common.PrescribedTradeModel import TradeDirection, TradeStatus
from v2realbot.utils.utils import zoneNY, json_serial from v2realbot.utils.utils import zoneNY, json_serial,transform_data
from datetime import datetime from datetime import datetime
#import random #import random
import json import orjson
from v2realbot.strategyblocks.activetrade.helpers import insert_SL_history, get_default_sl_value, normalize_tick, get_profit_target_price from v2realbot.strategyblocks.activetrade.helpers import insert_SL_history, get_default_sl_value, normalize_tick, get_profit_target_price
from v2realbot.strategyblocks.indicators.helpers import value_or_indicator from v2realbot.strategyblocks.indicators.helpers import value_or_indicator
@ -14,12 +14,12 @@ def execute_prescribed_trades(state: StrategyState, data):
if state.vars.activeTrade is not None or len(state.vars.prescribedTrades) == 0: if state.vars.activeTrade is not None or len(state.vars.prescribedTrades) == 0:
return return
#evaluate long (price/market) #evaluate long (price/market)
state.ilog(lvl=1,e="evaluating prescr trades", trades=json.loads(json.dumps(state.vars.prescribedTrades, default=json_serial))) state.ilog(lvl=1,e="evaluating prescr trades", trades=transform_data(state.vars.prescribedTrades, json_serial))
for trade in state.vars.prescribedTrades: for trade in state.vars.prescribedTrades:
if trade.status == TradeStatus.READY and trade.direction == TradeDirection.LONG and (trade.entry_price is None or trade.entry_price >= data['close']): if trade.status == TradeStatus.READY and trade.direction == TradeDirection.LONG and (trade.entry_price is None or trade.entry_price >= data['close']):
trade.status = TradeStatus.ACTIVATED trade.status = TradeStatus.ACTIVATED
trade.last_update = datetime.fromtimestamp(state.time).astimezone(zoneNY) trade.last_update = datetime.fromtimestamp(state.time).astimezone(zoneNY)
state.ilog(lvl=1,e=f"evaluated LONG", trade=json.loads(json.dumps(trade, default=json_serial)), prescrTrades=json.loads(json.dumps(state.vars.prescribedTrades, default=json_serial))) state.ilog(lvl=1,e=f"evaluated LONG", trade=transform_data(trade, json_serial), prescrTrades=transform_data(state.vars.prescribedTrades, json_serial))
state.vars.activeTrade = trade state.vars.activeTrade = trade
state.vars.last_buy_index = data["index"] state.vars.last_buy_index = data["index"]
state.vars.last_in_index = data["index"] state.vars.last_in_index = data["index"]
@ -28,7 +28,7 @@ def execute_prescribed_trades(state: StrategyState, data):
if not state.vars.activeTrade: if not state.vars.activeTrade:
for trade in state.vars.prescribedTrades: for trade in state.vars.prescribedTrades:
if trade.status == TradeStatus.READY and trade.direction == TradeDirection.SHORT and (trade.entry_price is None or trade.entry_price <= data['close']): if trade.status == TradeStatus.READY and trade.direction == TradeDirection.SHORT and (trade.entry_price is None or trade.entry_price <= data['close']):
state.ilog(lvl=1,e=f"evaluaed SHORT", trade=json.loads(json.dumps(trade, default=json_serial)), prescTrades=json.loads(json.dumps(state.vars.prescribedTrades, default=json_serial))) state.ilog(lvl=1,e=f"evaluaed SHORT", trade=transform_data(trade, json_serial), prescrTrades=transform_data(state.vars.prescribedTrades, json_serial))
trade.status = TradeStatus.ACTIVATED trade.status = TradeStatus.ACTIVATED
trade.last_update = datetime.fromtimestamp(state.time).astimezone(zoneNY) trade.last_update = datetime.fromtimestamp(state.time).astimezone(zoneNY)
state.vars.activeTrade = trade state.vars.activeTrade = trade
@ -39,7 +39,7 @@ def execute_prescribed_trades(state: StrategyState, data):
#odeslani ORDER + NASTAVENI STOPLOSS (zatim hardcoded) #odeslani ORDER + NASTAVENI STOPLOSS (zatim hardcoded)
if state.vars.activeTrade: if state.vars.activeTrade:
if state.vars.activeTrade.direction == TradeDirection.LONG: if state.vars.activeTrade.direction == TradeDirection.LONG:
state.ilog(lvl=1,e="odesilame LONG ORDER", trade=json.loads(json.dumps(state.vars.activeTrade, default=json_serial))) state.ilog(lvl=1,e="odesilame LONG ORDER", trade=transform_data(state.vars.activeTrade, json_serial))
if state.vars.activeTrade.size is not None: if state.vars.activeTrade.size is not None:
size = state.vars.activeTrade.size size = state.vars.activeTrade.size
else: else:
@ -71,7 +71,7 @@ def execute_prescribed_trades(state: StrategyState, data):
insert_SL_history(state) insert_SL_history(state)
state.vars.pending = state.vars.activeTrade.id state.vars.pending = state.vars.activeTrade.id
elif state.vars.activeTrade.direction == TradeDirection.SHORT: elif state.vars.activeTrade.direction == TradeDirection.SHORT:
state.ilog(lvl=1,e="odesilame SHORT ORDER",trade=json.loads(json.dumps(state.vars.activeTrade, default=json_serial))) state.ilog(lvl=1,e="odesilame SHORT ORDER", trade=transform_data(state.vars.activeTrade, json_serial))
if state.vars.activeTrade.size is not None: if state.vars.activeTrade.size is not None:
size = state.vars.activeTrade.size size = state.vars.activeTrade.size
else: else:

View File

@ -13,7 +13,7 @@ from collections import defaultdict
from pandas import to_datetime from pandas import to_datetime
from msgpack.ext import Timestamp from msgpack.ext import Timestamp
def convert_daily_bars(daily_bars): def convert_historical_bars(daily_bars):
"""Converts a list of daily bars into a dictionary with the specified keys. """Converts a list of daily bars into a dictionary with the specified keys.
Args: Args:
@ -89,4 +89,6 @@ def get_historical_bars(symbol: str, time_from: datetime, time_to: datetime, tim
bar_request = StockBarsRequest(symbol_or_symbols=symbol,timeframe=timeframe, start=time_from, end=time_to, feed=DataFeed.SIP) bar_request = StockBarsRequest(symbol_or_symbols=symbol,timeframe=timeframe, start=time_from, end=time_to, feed=DataFeed.SIP)
bars: BarSet = stock_client.get_stock_bars(bar_request) bars: BarSet = stock_client.get_stock_bars(bar_request)
#print("puvodni bars", bars["BAC"]) #print("puvodni bars", bars["BAC"])
return convert_daily_bars(bars[symbol]) if bars[symbol][0] is None:
return None
return convert_historical_bars(bars[symbol])

View File

@ -1,7 +1,7 @@
from v2realbot.config import DATA_DIR from v2realbot.config import DATA_DIR
from v2realbot.utils.utils import json_serial from v2realbot.utils.utils import json_serial
from uuid import UUID, uuid4 from uuid import UUID, uuid4
import json import orjson
from datetime import datetime from datetime import datetime
from v2realbot.enums.enums import RecordType, StartBarAlign, Mode, Account from v2realbot.enums.enums import RecordType, StartBarAlign, Mode, Account
from v2realbot.common.db import pool, insert_queue from v2realbot.common.db import pool, insert_queue
@ -9,7 +9,7 @@ import sqlite3
#standardne vraci pole tuplů, kde clen tuplu jsou sloupce #standardne vraci pole tuplů, kde clen tuplu jsou sloupce
#conn.row_factory = lambda c, r: json.loads(r[0]) #conn.row_factory = lambda c, r: orjson.loads(r[0])
#conn.row_factory = lambda c, r: r[0] #conn.row_factory = lambda c, r: r[0]
#conn.row_factory = sqlite3.Row #conn.row_factory = sqlite3.Row
@ -32,7 +32,7 @@ def insert_log(runner_id: UUID, time: float, logdict: dict):
conn = pool.get_connection() conn = pool.get_connection()
try: try:
c = conn.cursor() c = conn.cursor()
json_string = json.dumps(logdict, default=json_serial) json_string = orjson.dumps(logdict, default=json_serial, option=orjson.OPT_PASSTHROUGH_DATETIME).decode('utf-8')
res = c.execute("INSERT INTO runner_logs VALUES (?,?,?)",[str(runner_id), time, json_string]) res = c.execute("INSERT INTO runner_logs VALUES (?,?,?)",[str(runner_id), time, json_string])
conn.commit() conn.commit()
finally: finally:
@ -49,7 +49,7 @@ def insert_log_multiple_queue(runner_id:UUID, loglist: list):
# c = conn.cursor() # c = conn.cursor()
# insert_data = [] # insert_data = []
# for i in loglist: # for i in loglist:
# row = (str(runner_id), i["time"], json.dumps(i, default=json_serial)) # row = (str(runner_id), i["time"], orjson.dumps(i, default=json_serial, option=orjson.OPT_PASSTHROUGH_DATETIME))
# insert_data.append(row) # insert_data.append(row)
# c.executemany("INSERT INTO runner_logs VALUES (?,?,?)", insert_data) # c.executemany("INSERT INTO runner_logs VALUES (?,?,?)", insert_data)
# conn.commit() # conn.commit()
@ -59,7 +59,7 @@ def insert_log_multiple_queue(runner_id:UUID, loglist: list):
def get_log_window(runner_id: UUID, timestamp_from: float = 0, timestamp_to: float = 9682851459): def get_log_window(runner_id: UUID, timestamp_from: float = 0, timestamp_to: float = 9682851459):
conn = pool.get_connection() conn = pool.get_connection()
try: try:
conn.row_factory = lambda c, r: json.loads(r[0]) conn.row_factory = lambda c, r: orjson.loads(r[0])
c = conn.cursor() c = conn.cursor()
res = c.execute(f"SELECT data FROM runner_logs WHERE runner_id='{str(runner_id)}' AND time >={timestamp_from} AND time <={timestamp_to} ORDER BY time") res = c.execute(f"SELECT data FROM runner_logs WHERE runner_id='{str(runner_id)}' AND time >={timestamp_from} AND time <={timestamp_to} ORDER BY time")
finally: finally:

View File

@ -329,6 +329,36 @@ def send_to_telegram(message):
except Exception as e: except Exception as e:
print(e) print(e)
def transform_data(data, transform_function):
"""
Recursively transform the data in a dictionary, list of dictionaries, or nested dictionaries
using a specified transformation function.
This function applies the transformation function to each value in the data structure.
It handles nested dictionaries and lists of dictionaries.
Parameters:
data (dict or list): The dictionary, list of dictionaries, or nested dictionary to be transformed.
transform_function (function): The function to be applied to each value in the data. This function
should accept a single value and return a transformed value.
Returns:
dict or list: The transformed dictionary, list of dictionaries, or nested dictionary with each value
processed by the transform_function.
Raises:
TypeError: If the transform_function cannot process a value, the original value is kept.
"""
if isinstance(data, dict):
return {key: transform_data(value, transform_function) for key, value in data.items()}
elif isinstance(data, list):
return [transform_data(element, transform_function) for element in data]
else:
try:
return transform_function(data)
except TypeError:
return data
#OPTIMIZED BY BARD #OPTIMIZED BY BARD
def json_serial(obj): def json_serial(obj):
"""JSON serializer for objects not serializable by default json code """JSON serializer for objects not serializable by default json code
@ -341,6 +371,7 @@ def json_serial(obj):
UUID: lambda obj: str(obj), UUID: lambda obj: str(obj),
Enum: lambda obj: str(obj), Enum: lambda obj: str(obj),
np.int64: lambda obj: int(obj), np.int64: lambda obj: int(obj),
np.float64: lambda obj: float(obj),
Order: lambda obj: obj.__dict__, Order: lambda obj: obj.__dict__,
TradeUpdate: lambda obj: obj.__dict__, TradeUpdate: lambda obj: obj.__dict__,
btOrder: lambda obj: obj.__dict__, btOrder: lambda obj: obj.__dict__,