From 232f32467eb3ea1578665d4504da3bfc977b838b Mon Sep 17 00:00:00 2001 From: David Brazda Date: Sun, 10 Dec 2023 15:02:25 +0100 Subject: [PATCH] gui model metadata view + backend json optimalization orjson --- requirements.txt | 19 +++-- setup.py | 2 +- testy/archive/alpacasnapshot.py | 4 +- testy/ascending.py | 4 +- testy/migrace/migracerunnerheader.py | 10 +-- testy/migrace/migracesql.py | 20 ++--- testy/ml/test.py | 2 +- testy/testSqlite3.py | 20 ++--- testy/testStore.py | 2 +- testy/testTIMIT | 4 +- testy/tinyDBselect.py | 4 +- testy/tinyFLUXtest.py | 4 +- testy/tinydbTest.py | 4 +- testy/websocketFastApi.py | 4 +- testy/websocketFastApiConnManager.py | 4 +- v2realbot/common/db.py | 10 +-- v2realbot/controller/services.py | 78 +++++++++++++------ v2realbot/main.py | 44 +++++++---- v2realbot/reporting/metricstools.py | 1 - v2realbot/reporting/metricstoolsimage.py | 4 +- v2realbot/static/index.html | 2 +- v2realbot/static/js/ml.js | 5 +- v2realbot/strategy/StrategyClassicSL.py | 12 +-- .../strategy/StrategyOrderLimitVykladaci.py | 8 +- .../StrategyOrderLimitVykladaciNormalized.py | 6 +- ...tegyOrderLimitVykladaciNormalizedMYSELL.py | 6 +- v2realbot/strategy/base.py | 4 +- .../activetrade/close/close_position.py | 2 +- .../activetrade/close/eod_exit.py | 2 +- .../strategyblocks/activetrade/helpers.py | 2 +- .../indicators/indicators_hub.py | 6 +- .../strategyblocks/inits/init_directives.py | 2 +- .../strategyblocks/inits/init_indicators.py | 2 +- .../strategyblocks/newtrade/conditions.py | 2 +- .../newtrade/prescribedtrades.py | 14 ++-- v2realbot/utils/historicals.py | 6 +- v2realbot/utils/ilog.py | 10 +-- v2realbot/utils/utils.py | 31 ++++++++ 38 files changed, 224 insertions(+), 142 deletions(-) diff --git a/requirements.txt b/requirements.txt index 22fb813..69509be 100644 --- a/requirements.txt +++ b/requirements.txt @@ -12,6 +12,7 @@ better-exceptions==0.3.3 bleach==6.0.0 blinker==1.5 cachetools==5.3.0 +CD==1.1.0 certifi==2022.12.7 chardet==5.1.0 charset-normalizer==3.0.1 @@ -58,7 +59,7 @@ Jinja2==3.1.2 joblib==1.3.2 jsonschema==4.17.3 jupyterlab-widgets==3.0.9 -keras==2.13.1 +keras==2.15.0 kiwisolver==1.4.4 libclang==16.0.6 llvmlite==0.39.1 @@ -68,7 +69,8 @@ MarkupSafe==2.1.2 matplotlib==3.8.2 matplotlib-inline==0.1.6 mdurl==0.1.2 -mlroom @ git+https://github.com/drew2323/mlroom.git@967b1e3b5071854910ea859eca68bf0c3e67f951 +ml-dtypes==0.2.0 +mlroom @ git+https://github.com/drew2323/mlroom.git@768c88348a0bd24c244a8720c67abb20fcb1403e mplfinance==0.12.10b0 msgpack==1.0.4 mypy-extensions==1.0.0 @@ -77,6 +79,7 @@ numba==0.56.4 numpy==1.23.5 oauthlib==3.2.2 opt-einsum==3.3.0 +orjson==3.9.10 packaging==23.0 pandas==1.5.3 param==1.13.0 @@ -114,7 +117,7 @@ requests-oauthlib==1.3.1 rich==13.3.1 rsa==4.9 schedule==1.2.1 -scikit-learn==1.3.1 +scikit-learn==1.3.2 scipy==1.11.2 seaborn==0.12.2 semver==2.13.0 @@ -128,10 +131,10 @@ streamlit==1.20.0 structlog==23.1.0 TA-Lib==0.4.28 tenacity==8.2.2 -tensorboard==2.13.0 +tensorboard==2.15.1 tensorboard-data-server==0.7.1 -tensorflow==2.13.0 -tensorflow-estimator==2.13.0 +tensorflow==2.15.0 +tensorflow-estimator==2.15.0 tensorflow-io-gcs-filesystem==0.34.0 termcolor==2.3.0 threadpoolctl==3.2.0 @@ -149,12 +152,12 @@ tzdata==2023.2 tzlocal==4.3 urllib3==1.26.14 uvicorn==0.21.1 --e git+https://github.com/drew2323/v2trading.git@d38bf0600fbadbffba78ae23625eaecd1febc7f4#egg=v2realbot +-e git+https://github.com/drew2323/v2trading.git@523905ece6d99bf48a8952d39ced6a13f3b9a84e#egg=v2realbot validators==0.20.0 wcwidth==0.2.9 webencodings==0.5.1 websockets==10.4 Werkzeug==2.2.3 widgetsnbextension==4.0.9 -wrapt==1.15.0 +wrapt==1.14.1 zipp==3.15.0 diff --git a/setup.py b/setup.py index 7e3d135..7734a92 100644 --- a/setup.py +++ b/setup.py @@ -1,7 +1,7 @@ from setuptools import find_packages, setup setup(name='v2realbot', - version='0.9', + version='0.91', description='Realbot trader', author='David Brazda', author_email='davidbrazda61@gmail.com', diff --git a/testy/archive/alpacasnapshot.py b/testy/archive/alpacasnapshot.py index b7d360c..043efca 100644 --- a/testy/archive/alpacasnapshot.py +++ b/testy/archive/alpacasnapshot.py @@ -23,7 +23,7 @@ from rich import print from collections import defaultdict from pandas import to_datetime from msgpack.ext import Timestamp -from v2realbot.utils.historicals import convert_daily_bars +from v2realbot.utils.historicals import convert_historical_bars def get_last_close(): pass @@ -38,7 +38,7 @@ def get_historical_bars(symbol: str, time_from: datetime, time_to: datetime, tim bars: BarSet = stock_client.get_stock_bars(bar_request) print("puvodni bars", bars["BAC"]) print(bars) - return convert_daily_bars(bars[symbol]) + return convert_historical_bars(bars[symbol]) #v initu plnime pozadovana historicka data do historicals[] diff --git a/testy/ascending.py b/testy/ascending.py index 3980cd9..c7cf49d 100644 --- a/testy/ascending.py +++ b/testy/ascending.py @@ -1,6 +1,6 @@ -import json +import orjson a = """[{"time":1699021800.019723,"value":28.325},{"time":1699021830.05652,"value":28.315},{"time":1699021860.305877,"value":28.31},{"time":1699021890.396815,"value":28.305},{"time":1699021920.767856,"value":28.295},{"time":1699021951.04426,"value":28.31},{"time":1699021982.011999,"value":28.32},{"time":1699022012.571608,"value":28.3},{"time":1699022042.970093,"value":28.32},{"time":1699022073.277922,"value":28.335},{"time":1699022103.336126,"value":28.3401},{"time":1699022134.041774,"value":28.33},{"time":1699022166.753206,"value":28.31},{"time":1699022197.699501,"value":28.305},{"time":1699022228.250299,"value":28.3033},{"time":1699022258.417441,"value":28.305},{"time":1699022289.977942,"value":28.285},{"time":1699022320.095033,"value":28.3168},{"time":1699022353.873671,"value":28.31},{"time":1699022385.189729,"value":28.3383},{"time":1699022415.544463,"value":28.345},{"time":1699022445.59419,"value":28.32},{"time":1699022476.329711,"value":28.33},{"time":1699022512.414328,"value":28.315},{"time":1699022542.768232,"value":28.32},{"time":1699022574.459283,"value":28.28},{"time":1699022606.940348,"value":28.295},{"time":1699022637.404558,"value":28.29},{"time":1699022667.65586,"value":28.305},{"time":1699022700.075183,"value":28.325},{"time":1699022730.456352,"value":28.32},{"time":1699022761.189056,"value":28.335},{"time":1699022791.503085,"value":28.34},{"time":1699022822.939517,"value":28.33},{"time":1699022853.489484,"value":28.355},{"time":1699022884.086032,"value":28.345},{"time":1699022915.034083,"value":28.3599},{"time":1699022945.090479,"value":28.345},{"time":1699022975.854524,"value":28.3501},{"time":1699023007.06636,"value":28.355},{"time":1699023037.257665,"value":28.379},{"time":1699023067.38282,"value":28.3775},{"time":1699023098.046355,"value":28.355},{"time":1699023129.624668,"value":28.343},{"time":1699023159.831437,"value":28.37},{"time":1699023191.562167,"value":28.3503},{"time":1699023222.129191,"value":28.315},{"time":1699023252.141027,"value":28.335},{"time":1699023282.732192,"value":28.325},{"time":1699023314.273008,"value":28.325},{"time":1699023344.476307,"value":28.3316},{"time":1699023375.128845,"value":28.38},{"time":1699023405.654461,"value":28.39},{"time":1699023436.298197,"value":28.395},{"time":1699023467.849952,"value":28.39},{"time":1699023498.607222,"value":28.4},{"time":1699023528.88649,"value":28.41},{"time":1699023559.248959,"value":28.405},{"time":1699023590.036323,"value":28.41},{"time":1699023620.057996,"value":28.43},{"time":1699023650.174218,"value":28.45},{"time":1699023680.550277,"value":28.445},{"time":1699023710.8486,"value":28.435},{"time":1699023742.003348,"value":28.425},{"time":1699023773.361168,"value":28.425},{"time":1699023803.73621,"value":28.4418},{"time":1699023835.004908,"value":28.425},{"time":1699023865.310641,"value":28.42},{"time":1699023895.496276,"value":28.44},{"time":1699023925.818644,"value":28.473},{"time":1699023957.306206,"value":28.4818},{"time":1699023987.438431,"value":28.46},{"time":1699024020.011762,"value":28.46},{"time":1699024050.197554,"value":28.45},{"time":1699024081.520005,"value":28.43},{"time":1699024114.582807,"value":28.445},{"time":1699024146.610304,"value":28.4301},{"time":1699024176.96372,"value":28.425},{"time":1699024209.39291,"value":28.4301},{"time":1699024241.215645,"value":28.42},{"time":1699024271.411628,"value":28.41},{"time":1699024301.419847,"value":28.41},{"time":1699024331.490853,"value":28.425},{"time":1699024361.964565,"value":28.43},{"time":1699024393.914504,"value":28.41},{"time":1699024423.945862,"value":28.405},{"time":1699024458.32594,"value":28.43},{"time":1699024489.974228,"value":28.425},{"time":1699024520.388356,"value":28.45},{"time":1699024550.743977,"value":28.46},{"time":1699024581.591713,"value":28.44},{"time":1699024611.606568,"value":28.41},{"time":1699024642.212998,"value":28.415},{"time":1699024674.862401,"value":28.435},{"time":1699024705.509538,"value":28.435},{"time":1699024735.983247,"value":28.45},{"time":1699024766.575683,"value":28.445},{"time":1699024797.695507,"value":28.445},{"time":1699024828.083169,"value":28.44},{"time":1699024858.758286,"value":28.45},{"time":1699024888.794182,"value":28.45},{"time":1699024919.277686,"value":28.44},{"time":1699024950.819448,"value":28.445},{"time":1699024980.977291,"value":28.455},{"time":1699025011.288962,"value":28.485},{"time":1699025041.837843,"value":28.4786},{"time":1699025072.220646,"value":28.49},{"time":1699025102.310644,"value":28.4899},{"time":1699025132.750789,"value":28.495},{"time":1699025163.385008,"value":28.515},{"time":1699025194.110701,"value":28.52},{"time":1699025225.140705,"value":28.53},{"time":1699025255.623126,"value":28.525},{"time":1699025286.32314,"value":28.525},{"time":1699025318.177313,"value":28.54},{"time":1699025348.351411,"value":28.52},{"time":1699025380.699046,"value":28.46},{"time":1699025411.317513,"value":28.46},{"time":1699025441.334031,"value":28.495},{"time":1699025477.224306,"value":28.51},{"time":1699025508.461651,"value":28.5},{"time":1699025539.933449,"value":28.495},{"time":1699025570.844385,"value":28.505},{"time":1699025601.852059,"value":28.485},{"time":1699025631.875163,"value":28.49},{"time":1699025664.199609,"value":28.48},{"time":1699025697.732327,"value":28.495},{"time":1699025728.404528,"value":28.49},{"time":1699025760.023252,"value":28.485},{"time":1699025791.299449,"value":28.485},{"time":1699025821.362548,"value":28.495},{"time":1699025853.135535,"value":28.49},{"time":1699025884.363134,"value":28.485},{"time":1699025914.712172,"value":28.475},{"time":1699025945.858376,"value":28.495},{"time":1699025979.710519,"value":28.51},{"time":1699026010.889225,"value":28.505},{"time":1699026043.756471,"value":28.505},{"time":1699026074.317655,"value":28.515},{"time":1699026104.755859,"value":28.505},{"time":1699026135.691582,"value":28.51},{"time":1699026165.941072,"value":28.5299},{"time":1699026196.243042,"value":28.515},{"time":1699026226.244767,"value":28.515},{"time":1699026256.991419,"value":28.53},{"time":1699026287.152732,"value":28.515},{"time":1699026317.249027,"value":28.5192},{"time":1699026348.015012,"value":28.52},{"time":1699026381.142881,"value":28.515},{"time":1699026412.066925,"value":28.53},{"time":1699026442.250968,"value":28.52},{"time":1699026475.363941,"value":28.535},{"time":1699026505.738539,"value":28.5},{"time":1699026535.932925,"value":28.4829},{"time":1699026567.468561,"value":28.49},{"time":1699026597.92468,"value":28.5},{"time":1699026628.0574,"value":28.51},{"time":1699026658.116916,"value":28.51},{"time":1699026690.27825,"value":28.515},{"time":1699026720.330602,"value":28.515},{"time":1699026751.926044,"value":28.5101},{"time":1699026782.80328,"value":28.53},{"time":1699026813.872945,"value":28.53},{"time":1699026843.911938,"value":28.535},{"time":1699026874.21488,"value":28.515},{"time":1699026904.435069,"value":28.53},{"time":1699026934.857691,"value":28.53},{"time":1699026965.892694,"value":28.5299},{"time":1699026996.17253,"value":28.5256},{"time":1699027026.770303,"value":28.525},{"time":1699027058.74448,"value":28.52},{"time":1699027088.784821,"value":28.53},{"time":1699027121.945087,"value":28.54},{"time":1699027152.051352,"value":28.53},{"time":1699027182.120519,"value":28.54},{"time":1699027212.641808,"value":28.535},{"time":1699027243.815996,"value":28.54},{"time":1699027274.439364,"value":28.51},{"time":1699027305.789608,"value":28.51},{"time":1699027336.811255,"value":28.49},{"time":1699027368.509487,"value":28.5101},{"time":1699027398.683574,"value":28.53},{"time":1699027429.065187,"value":28.51},{"time":1699027459.517296,"value":28.5031},{"time":1699027489.744654,"value":28.485},{"time":1699027520.114365,"value":28.4987},{"time":1699027550.612786,"value":28.49},{"time":1699027580.67624,"value":28.49},{"time":1699027611.978115,"value":28.4831},{"time":1699027650.886537,"value":28.47},{"time":1699027681.180903,"value":28.4691},{"time":1699027711.893932,"value":28.465},{"time":1699027742.449302,"value":28.48},{"time":1699027775.144211,"value":28.485},{"time":1699027805.996057,"value":28.4875},{"time":1699027836.365314,"value":28.49},{"time":1699027867.176529,"value":28.485},{"time":1699027897.730371,"value":28.495},{"time":1699027929.20047,"value":28.505},{"time":1699027960.400628,"value":28.505},{"time":1699027991.98,"value":28.485},{"time":1699028025.659939,"value":28.475},{"time":1699028058.974496,"value":28.485},{"time":1699028089.503743,"value":28.49},{"time":1699028122.582512,"value":28.485},{"time":1699028153.656545,"value":28.465},{"time":1699028183.8974,"value":28.46},{"time":1699028214.973744,"value":28.47},{"time":1699028245.796393,"value":28.485},{"time":1699028277.769018,"value":28.495},{"time":1699028308.429092,"value":28.4901},{"time":1699028338.880168,"value":28.49},{"time":1699028369.994881,"value":28.495},{"time":1699028401.792756,"value":28.49},{"time":1699028431.970538,"value":28.49},{"time":1699028463.715935,"value":28.47},{"time":1699028494.034909,"value":28.44},{"time":1699028524.570011,"value":28.465},{"time":1699028555.07556,"value":28.485},{"time":1699028585.549326,"value":28.485},{"time":1699028617.013388,"value":28.48},{"time":1699028647.801635,"value":28.475},{"time":1699028677.998473,"value":28.485},{"time":1699028712.12153,"value":28.4954},{"time":1699028743.984318,"value":28.48},{"time":1699028775.623592,"value":28.4748},{"time":1699028806.543861,"value":28.475},{"time":1699028837.050434,"value":28.475},{"time":1699028867.212235,"value":28.465},{"time":1699028897.591785,"value":28.465},{"time":1699028928.264984,"value":28.47},{"time":1699028959.867069,"value":28.475},{"time":1699028991.542011,"value":28.455},{"time":1699029022.549037,"value":28.45},{"time":1699029054.259436,"value":28.46},{"time":1699029084.545072,"value":28.465},{"time":1699029114.900542,"value":28.46},{"time":1699029146.620521,"value":28.46},{"time":1699029179.589725,"value":28.45},{"time":1699029209.826802,"value":28.445},{"time":1699029240.148804,"value":28.435},{"time":1699029271.583122,"value":28.44},{"time":1699029302.460706,"value":28.455},{"time":1699029333.333215,"value":28.455},{"time":1699029364.235944,"value":28.445},{"time":1699029395.488928,"value":28.47},{"time":1699029425.619811,"value":28.435},{"time":1699029456.659012,"value":28.435},{"time":1699029487.403637,"value":28.435},{"time":1699029517.525669,"value":28.435},{"time":1699029547.728824,"value":28.435},{"time":1699029579.479949,"value":28.435},{"time":1699029613.898446,"value":28.4269},{"time":1699029644.27319,"value":28.435},{"time":1699029675.770214,"value":28.425},{"time":1699029705.793104,"value":28.425},{"time":1699029739.392305,"value":28.43},{"time":1699029769.406754,"value":28.425},{"time":1699029800.904643,"value":28.41},{"time":1699029832.43415,"value":28.405},{"time":1699029865.048612,"value":28.42},{"time":1699029896.112643,"value":28.405},{"time":1699029926.712455,"value":28.4},{"time":1699029956.920044,"value":28.395},{"time":1699029987.476193,"value":28.4095},{"time":1699030019.914236,"value":28.425},{"time":1699030051.474218,"value":28.4199},{"time":1699030081.790332,"value":28.425},{"time":1699030113.279521,"value":28.405},{"time":1699030145.058974,"value":28.415},{"time":1699030175.437213,"value":28.42},{"time":1699030205.761275,"value":28.425},{"time":1699030238.219352,"value":28.42},{"time":1699030268.316424,"value":28.425},{"time":1699030298.587675,"value":28.44},{"time":1699030329.148758,"value":28.45},{"time":1699030359.443807,"value":28.455},{"time":1699030389.805798,"value":28.46},{"time":1699030419.920336,"value":28.455},{"time":1699030450.006731,"value":28.455},{"time":1699030480.423882,"value":28.46},{"time":1699030510.492008,"value":28.46},{"time":1699030541.499704,"value":28.46},{"time":1699030573.265158,"value":28.4699},{"time":1699030603.661525,"value":28.485},{"time":1699030634.046372,"value":28.485},{"time":1699030665.943711,"value":28.5099},{"time":1699030697.001381,"value":28.51},{"time":1699030727.215985,"value":28.53},{"time":1699030757.566023,"value":28.55},{"time":1699030788.235149,"value":28.565},{"time":1699030820.008994,"value":28.57},{"time":1699030851.303736,"value":28.5759},{"time":1699030881.666398,"value":28.58},{"time":1699030914.139462,"value":28.6},{"time":1699030945.988479,"value":28.605},{"time":1699030976.119158,"value":28.62},{"time":1699031007.780077,"value":28.61},{"time":1699031038.730324,"value":28.615},{"time":1699031070.374924,"value":28.635},{"time":1699031100.754679,"value":28.595},{"time":1699031132.23774,"value":28.577},{"time":1699031162.755814,"value":28.6},{"time":1699031196.045974,"value":28.595},{"time":1699031226.95054,"value":28.59},{"time":1699031259.33105,"value":28.575},{"time":1699031290.585698,"value":28.565},{"time":1699031321.58565,"value":28.56},{"time":1699031351.630394,"value":28.57},{"time":1699031382.064823,"value":28.565},{"time":1699031412.572971,"value":28.58},{"time":1699031443.50161,"value":28.5695},{"time":1699031474.43398,"value":28.555},{"time":1699031504.614992,"value":28.545},{"time":1699031535.93342,"value":28.55},{"time":1699031567.581363,"value":28.55},{"time":1699031598.169505,"value":28.56},{"time":1699031631.176986,"value":28.54},{"time":1699031663.701909,"value":28.545},{"time":1699031696.887383,"value":28.53},{"time":1699031728.596345,"value":28.545},{"time":1699031762.371156,"value":28.5599},{"time":1699031792.588913,"value":28.565},{"time":1699031823.316266,"value":28.58},{"time":1699031853.321177,"value":28.591},{"time":1699031883.417252,"value":28.585},{"time":1699031914.680392,"value":28.56},{"time":1699031946.898342,"value":28.5692},{"time":1699031977.632583,"value":28.565},{"time":1699032007.711922,"value":28.5599},{"time":1699032037.902385,"value":28.56},{"time":1699032070.087226,"value":28.565},{"time":1699032101.920787,"value":28.57},{"time":1699032132.408531,"value":28.57},{"time":1699032164.124163,"value":28.5785},{"time":1699032194.941612,"value":28.58},{"time":1699032225.926233,"value":28.565},{"time":1699032260.046657,"value":28.56},{"time":1699032290.968033,"value":28.565},{"time":1699032325.746135,"value":28.555},{"time":1699032359.734661,"value":28.547},{"time":1699032391.682556,"value":28.53},{"time":1699032423.44457,"value":28.55},{"time":1699032455.05434,"value":28.545},{"time":1699032487.116926,"value":28.52},{"time":1699032518.664223,"value":28.5001},{"time":1699032549.069279,"value":28.505},{"time":1699032579.861928,"value":28.4811},{"time":1699032609.894083,"value":28.51},{"time":1699032641.179908,"value":28.5259},{"time":1699032671.876168,"value":28.535},{"time":1699032703.667801,"value":28.535},{"time":1699032734.588127,"value":28.53},{"time":1699032768.754475,"value":28.535},{"time":1699032799.186966,"value":28.545},{"time":1699032830.156905,"value":28.545},{"time":1699032862.27748,"value":28.5566},{"time":1699032893.11523,"value":28.565},{"time":1699032924.311403,"value":28.5602},{"time":1699032954.384318,"value":28.55},{"time":1699032986.072031,"value":28.56},{"time":1699033016.893769,"value":28.55},{"time":1699033052.66013,"value":28.545},{"time":1699033084.291394,"value":28.545},{"time":1699033114.515934,"value":28.5421},{"time":1699033145.197537,"value":28.545},{"time":1699033176.333519,"value":28.57},{"time":1699033206.63401,"value":28.56},{"time":1699033236.686076,"value":28.55},{"time":1699033268.171251,"value":28.5501},{"time":1699033298.418461,"value":28.56},{"time":1699033328.998694,"value":28.5499},{"time":1699033360.438221,"value":28.545},{"time":1699033391.117448,"value":28.55},{"time":1699033421.897801,"value":28.545},{"time":1699033454.051103,"value":28.545},{"time":1699033484.532679,"value":28.542},{"time":1699033516.350274,"value":28.545},{"time":1699033546.521525,"value":28.5492},{"time":1699033577.562039,"value":28.55},{"time":1699033607.737315,"value":28.555},{"time":1699033638.54558,"value":28.5678},{"time":1699033668.554351,"value":28.575},{"time":1699033699.302301,"value":28.5633},{"time":1699033730.460731,"value":28.565},{"time":1699033762.673027,"value":28.575},{"time":1699033794.148161,"value":28.575},{"time":1699033824.773173,"value":28.575},{"time":1699033854.849909,"value":28.585},{"time":1699033885.142025,"value":28.59},{"time":1699033921.045471,"value":28.585},{"time":1699033952.331513,"value":28.6},{"time":1699033982.83254,"value":28.5791},{"time":1699034013.501093,"value":28.58},{"time":1699034044.087637,"value":28.58},{"time":1699034078.677498,"value":28.575},{"time":1699034109.697928,"value":28.58},{"time":1699034141.55465,"value":28.585},{"time":1699034174.903248,"value":28.5975},{"time":1699034204.905456,"value":28.5988},{"time":1699034235.990239,"value":28.605},{"time":1699034266.372305,"value":28.62},{"time":1699034296.999459,"value":28.605},{"time":1699034327.163801,"value":28.6175},{"time":1699034357.599528,"value":28.61},{"time":1699034387.616422,"value":28.605},{"time":1699034418.346748,"value":28.615},{"time":1699034449.262557,"value":28.6163},{"time":1699034480.980216,"value":28.605},{"time":1699034511.700762,"value":28.615},{"time":1699034541.790667,"value":28.6081},{"time":1699034573.475685,"value":28.605},{"time":1699034603.900366,"value":28.605},{"time":1699034634.470582,"value":28.605},{"time":1699034665.088378,"value":28.605},{"time":1699034695.338479,"value":28.61},{"time":1699034727.668928,"value":28.6175},{"time":1699034759.330002,"value":28.615},{"time":1699034789.431709,"value":28.59},{"time":1699034819.464919,"value":28.57},{"time":1699034849.504792,"value":28.56},{"time":1699034880.634613,"value":28.545},{"time":1699034914.906698,"value":28.56},{"time":1699034946.525526,"value":28.575},{"time":1699034979.65996,"value":28.585},{"time":1699035009.83088,"value":28.585},{"time":1699035042.159775,"value":28.585},{"time":1699035072.263417,"value":28.59},{"time":1699035102.268049,"value":28.575},{"time":1699035134.984447,"value":28.5811},{"time":1699035165.124667,"value":28.545},{"time":1699035199.434971,"value":28.555},{"time":1699035231.165084,"value":28.565},{"time":1699035262.242613,"value":28.565},{"time":1699035295.228003,"value":28.5528},{"time":1699035325.812834,"value":28.5501},{"time":1699035359.048305,"value":28.54},{"time":1699035389.562774,"value":28.54},{"time":1699035419.568435,"value":28.535},{"time":1699035450.536662,"value":28.535},{"time":1699035481.812288,"value":28.535},{"time":1699035512.149093,"value":28.5329},{"time":1699035543.462682,"value":28.5303},{"time":1699035574.398519,"value":28.535},{"time":1699035604.533554,"value":28.545},{"time":1699035634.734566,"value":28.555},{"time":1699035665.129892,"value":28.5599},{"time":1699035698.020783,"value":28.55},{"time":1699035728.810689,"value":28.56},{"time":1699035759.208408,"value":28.545},{"time":1699035792.110835,"value":28.535},{"time":1699035822.833673,"value":28.5301},{"time":1699035853.453077,"value":28.54},{"time":1699035883.526002,"value":28.5286},{"time":1699035913.537874,"value":28.5202},{"time":1699035945.394027,"value":28.525},{"time":1699035977.757063,"value":28.53},{"time":1699036008.613806,"value":28.5203},{"time":1699036038.64446,"value":28.525},{"time":1699036069.245159,"value":28.535},{"time":1699036102.598537,"value":28.535},{"time":1699036134.079266,"value":28.54},{"time":1699036165.760358,"value":28.545},{"time":1699036196.196676,"value":28.535},{"time":1699036227.618383,"value":28.545},{"time":1699036257.918857,"value":28.545},{"time":1699036290.277819,"value":28.545},{"time":1699036331.975068,"value":28.56},{"time":1699036362.10073,"value":28.555},{"time":1699036392.152704,"value":28.565},{"time":1699036422.904539,"value":28.565},{"time":1699036453.098398,"value":28.575},{"time":1699036483.339595,"value":28.575},{"time":1699036515.753578,"value":28.5701},{"time":1699036552.597911,"value":28.575},{"time":1699036583.135742,"value":28.575},{"time":1699036615.344277,"value":28.58},{"time":1699036649.409152,"value":28.57},{"time":1699036679.824764,"value":28.57},{"time":1699036709.86081,"value":28.565},{"time":1699036740.120599,"value":28.57},{"time":1699036770.392548,"value":28.565},{"time":1699036800.486605,"value":28.5768},{"time":1699036831.729728,"value":28.575},{"time":1699036862.24438,"value":28.59},{"time":1699036892.517737,"value":28.57},{"time":1699036925.424913,"value":28.565},{"time":1699036955.898461,"value":28.57},{"time":1699036987.941833,"value":28.58},{"time":1699037018.421444,"value":28.58},{"time":1699037048.642141,"value":28.58},{"time":1699037079.20277,"value":28.575},{"time":1699037110.456448,"value":28.58},{"time":1699037149.692173,"value":28.575},{"time":1699037179.869723,"value":28.5699},{"time":1699037210.177633,"value":28.575},{"time":1699037240.952886,"value":28.57},{"time":1699037271.543123,"value":28.565},{"time":1699037302.371874,"value":28.565},{"time":1699037332.5291,"value":28.57},{"time":1699037363.232299,"value":28.575},{"time":1699037393.540813,"value":28.575},{"time":1699037426.488602,"value":28.575},{"time":1699037457.202756,"value":28.58},{"time":1699037490.960922,"value":28.585},{"time":1699037522.157414,"value":28.58},{"time":1699037552.463968,"value":28.59},{"time":1699037583.609046,"value":28.59},{"time":1699037614.263874,"value":28.575},{"time":1699037644.894987,"value":28.58},{"time":1699037675.108289,"value":28.58},{"time":1699037705.125869,"value":28.59},{"time":1699037736.407845,"value":28.59},{"time":1699037767.103615,"value":28.585},{"time":1699037797.955553,"value":28.585},{"time":1699037830.323193,"value":28.58},{"time":1699037861.914812,"value":28.59},{"time":1699037891.919869,"value":28.59},{"time":1699037921.958853,"value":28.585},{"time":1699037952.964111,"value":28.595},{"time":1699037984.792681,"value":28.595},{"time":1699038014.896635,"value":28.595},{"time":1699038048.026312,"value":28.595},{"time":1699038079.006321,"value":28.61},{"time":1699038109.560174,"value":28.5966},{"time":1699038140.403307,"value":28.5902},{"time":1699038170.622657,"value":28.595},{"time":1699038201.157202,"value":28.59},{"time":1699038232.079862,"value":28.595},{"time":1699038262.299407,"value":28.595},{"time":1699038292.751656,"value":28.595},{"time":1699038322.909942,"value":28.59},{"time":1699038361.448157,"value":28.58},{"time":1699038391.53072,"value":28.586},{"time":1699038422.809932,"value":28.585},{"time":1699038453.970595,"value":28.55},{"time":1699038487.556973,"value":28.55},{"time":1699038517.575851,"value":28.525},{"time":1699038547.7414,"value":28.53},{"time":1699038579.729595,"value":28.53},{"time":1699038612.61915,"value":28.53},{"time":1699038645.050427,"value":28.525},{"time":1699038675.706518,"value":28.515},{"time":1699038706.093003,"value":28.525},{"time":1699038736.140121,"value":28.49},{"time":1699038767.224909,"value":28.51},{"time":1699038797.866001,"value":28.52},{"time":1699038828.147159,"value":28.525},{"time":1699038858.149396,"value":28.53},{"time":1699038892.621333,"value":28.53},{"time":1699038923.833832,"value":28.5336},{"time":1699038954.372786,"value":28.525},{"time":1699038985.052729,"value":28.525},{"time":1699039016.260912,"value":28.525},{"time":1699039047.500972,"value":28.525},{"time":1699039079.490137,"value":28.5232},{"time":1699039110.546254,"value":28.525},{"time":1699039141.30159,"value":28.525},{"time":1699039174.438761,"value":28.515},{"time":1699039204.445207,"value":28.5099},{"time":1699039235.635853,"value":28.4953},{"time":1699039266.199143,"value":28.505},{"time":1699039299.155788,"value":28.5062},{"time":1699039329.700352,"value":28.515},{"time":1699039362.083176,"value":28.51},{"time":1699039398.665357,"value":28.505},{"time":1699039429.606721,"value":28.5144},{"time":1699039460.044345,"value":28.515},{"time":1699039491.337512,"value":28.5164},{"time":1699039521.756531,"value":28.5299},{"time":1699039552.309251,"value":28.515},{"time":1699039584.559271,"value":28.51},{"time":1699039617.034631,"value":28.515},{"time":1699039647.406552,"value":28.51},{"time":1699039677.525766,"value":28.5},{"time":1699039707.725356,"value":28.495},{"time":1699039738.204286,"value":28.485},{"time":1699039769.23429,"value":28.5},{"time":1699039799.757555,"value":28.5},{"time":1699039830.122355,"value":28.485},{"time":1699039860.599216,"value":28.495},{"time":1699039890.922102,"value":28.515},{"time":1699039921.006488,"value":28.5199},{"time":1699039951.052634,"value":28.51},{"time":1699039981.223662,"value":28.51},{"time":1699040011.874327,"value":28.515},{"time":1699040042.328555,"value":28.52},{"time":1699040073.565195,"value":28.515},{"time":1699040104.660631,"value":28.505},{"time":1699040135.58441,"value":28.495},{"time":1699040165.665001,"value":28.51},{"time":1699040195.75476,"value":28.5},{"time":1699040226.529336,"value":28.51},{"time":1699040257.412581,"value":28.5029},{"time":1699040287.571532,"value":28.52},{"time":1699040318.114847,"value":28.51},{"time":1699040348.224811,"value":28.525},{"time":1699040378.848563,"value":28.515},{"time":1699040409.315775,"value":28.52},{"time":1699040439.40886,"value":28.5},{"time":1699040471.794074,"value":28.49},{"time":1699040501.80612,"value":28.49},{"time":1699040536.469521,"value":28.49},{"time":1699040567.188042,"value":28.4823},{"time":1699040597.521292,"value":28.48},{"time":1699040627.634646,"value":28.46},{"time":1699040658.060501,"value":28.485},{"time":1699040688.31039,"value":28.48},{"time":1699040718.657189,"value":28.5},{"time":1699040749.244547,"value":28.495},{"time":1699040779.2772,"value":28.485},{"time":1699040809.289405,"value":28.4922},{"time":1699040840.252596,"value":28.485},{"time":1699040870.294358,"value":28.485},{"time":1699040902.092143,"value":28.475},{"time":1699040932.518442,"value":28.465},{"time":1699040962.781592,"value":28.4699},{"time":1699040994.042496,"value":28.46},{"time":1699041024.134933,"value":28.455},{"time":1699041054.542756,"value":28.45},{"time":1699041085.064692,"value":28.445},{"time":1699041115.464525,"value":28.455},{"time":1699041145.51358,"value":28.435},{"time":1699041175.622529,"value":28.43},{"time":1699041206.204989,"value":28.44},{"time":1699041236.379647,"value":28.41},{"time":1699041266.580813,"value":28.425},{"time":1699041296.703474,"value":28.425},{"time":1699041327.020957,"value":28.415},{"time":1699041357.276606,"value":28.415},{"time":1699041387.289358,"value":28.43},{"time":1699041417.974708,"value":28.415},{"time":1699041447.979434,"value":28.4242},{"time":1699041478.023777,"value":28.405},{"time":1699041508.292258,"value":28.4},{"time":1699041538.353258,"value":28.41},{"time":1699041568.360393,"value":28.425},{"time":1699041598.747699,"value":28.42}] -f = json.loads(a) +f = orjson.loads(a) print(f) \ No newline at end of file diff --git a/testy/migrace/migracerunnerheader.py b/testy/migrace/migracerunnerheader.py index e025246..0e67bcf 100644 --- a/testy/migrace/migracerunnerheader.py +++ b/testy/migrace/migracerunnerheader.py @@ -2,7 +2,7 @@ import sqlite3 from v2realbot.config import DATA_DIR from v2realbot.utils.utils import json_serial from uuid import UUID, uuid4 -import json +import orjson from datetime import datetime from v2realbot.enums.enums import RecordType, StartBarAlign, Mode, Account from v2realbot.common.model import RunArchiveDetail, RunArchive, RunArchiveView @@ -35,14 +35,14 @@ def row_to_object(row: dict) -> RunArchive: end_positions=row.get('end_positions'), end_positions_avgp=row.get('end_positions_avgp'), metrics=row.get('open_orders'), - #metrics=json.loads(row.get('metrics')) if row.get('metrics') else None, + #metrics=orjson.loads(row.get('metrics')) if row.get('metrics') else None, stratvars_toml=row.get('stratvars_toml') ) def get_all_archived_runners(): conn = pool.get_connection() try: - conn.row_factory = lambda c, r: json.loads(r[0]) + conn.row_factory = lambda c, r: orjson.loads(r[0]) c = conn.cursor() res = c.execute(f"SELECT data FROM runner_header") finally: @@ -54,7 +54,7 @@ def insert_archive_header(archeader: RunArchive): conn = pool.get_connection() try: c = conn.cursor() - json_string = json.dumps(archeader, default=json_serial) + json_string = orjson.dumps(archeader, default=json_serial, option=orjson.OPT_PASSTHROUGH_DATETIME) if archeader.batch_id is not None: statement = f"INSERT INTO runner_header (runner_id, batch_id, ra) VALUES ('{str(archeader.id)}','{str(archeader.batch_id)}','{json_string}')" else: @@ -103,7 +103,7 @@ def migrate_to_columns(ra: RunArchive): SET strat_id=?, batch_id=?, symbol=?, name=?, note=?, started=?, stopped=?, mode=?, account=?, bt_from=?, bt_to=?, strat_json=?, settings=?, ilog_save=?, profit=?, trade_count=?, end_positions=?, end_positions_avgp=?, metrics=?, stratvars_toml=? WHERE runner_id=? ''', - (str(ra.strat_id), ra.batch_id, ra.symbol, ra.name, ra.note, ra.started, ra.stopped, ra.mode, ra.account, ra.bt_from, ra.bt_to, json.dumps(ra.strat_json), json.dumps(ra.settings), ra.ilog_save, ra.profit, ra.trade_count, ra.end_positions, ra.end_positions_avgp, json.dumps(ra.metrics), ra.stratvars_toml, str(ra.id))) + (str(ra.strat_id), ra.batch_id, ra.symbol, ra.name, ra.note, ra.started, ra.stopped, ra.mode, ra.account, ra.bt_from, ra.bt_to, orjson.dumps(ra.strat_json), orjson.dumps(ra.settings), ra.ilog_save, ra.profit, ra.trade_count, ra.end_positions, ra.end_positions_avgp, orjson.dumps(ra.metrics), ra.stratvars_toml, str(ra.id))) conn.commit() finally: diff --git a/testy/migrace/migracesql.py b/testy/migrace/migracesql.py index 7965878..a6b69c6 100644 --- a/testy/migrace/migracesql.py +++ b/testy/migrace/migracesql.py @@ -2,7 +2,7 @@ import sqlite3 from v2realbot.config import DATA_DIR from v2realbot.utils.utils import json_serial from uuid import UUID, uuid4 -import json +import orjson from datetime import datetime from v2realbot.enums.enums import RecordType, StartBarAlign, Mode, Account from v2realbot.common.model import RunArchiveDetail @@ -11,7 +11,7 @@ from tinydb import TinyDB, Query, where sqlite_db_file = DATA_DIR + "/v2trading.db" conn = sqlite3.connect(sqlite_db_file) #standardne vraci pole tuplů, kde clen tuplu jsou sloupce -#conn.row_factory = lambda c, r: json.loads(r[0]) +#conn.row_factory = lambda c, r: orjson.loads(r[0]) #conn.row_factory = lambda c, r: r[0] #conn.row_factory = sqlite3.Row @@ -28,7 +28,7 @@ insert_list = [dict(time=datetime.now().timestamp(), side="ddd", rectype=RecordT def insert_log(runner_id: UUID, time: float, logdict: dict): c = conn.cursor() - json_string = json.dumps(logdict, default=json_serial) + json_string = orjson.dumps(logdict, default=json_serial, option=orjson.OPT_PASSTHROUGH_DATETIME) res = c.execute("INSERT INTO runner_logs VALUES (?,?,?)",[str(runner_id), time, json_string]) conn.commit() return res.rowcount @@ -37,14 +37,14 @@ def insert_log_multiple(runner_id: UUID, loglist: list): c = conn.cursor() insert_data = [] for i in loglist: - row = (str(runner_id), i["time"], json.dumps(i, default=json_serial)) + row = (str(runner_id), i["time"], orjson.dumps(i, default=json_serial, option=orjson.OPT_PASSTHROUGH_DATETIME)) insert_data.append(row) c.executemany("INSERT INTO runner_logs VALUES (?,?,?)", insert_data) conn.commit() return c.rowcount # c = conn.cursor() - # json_string = json.dumps(logdict, default=json_serial) + # json_string = orjson.dumps(logdict, default=json_serial, option=orjson.OPT_PASSTHROUGH_DATETIME) # res = c.execute("INSERT INTO runner_logs VALUES (?,?,?)",[str(runner_id), time, json_string]) # print(res) # conn.commit() @@ -52,7 +52,7 @@ def insert_log_multiple(runner_id: UUID, loglist: list): #returns list of ilog jsons def read_log_window(runner_id: UUID, timestamp_from: float, timestamp_to: float): - conn.row_factory = lambda c, r: json.loads(r[0]) + conn.row_factory = lambda c, r: orjson.loads(r[0]) c = conn.cursor() res = c.execute(f"SELECT data FROM runner_logs WHERE runner_id='{str(runner_id)}' AND time >={ts_from} AND time <={ts_to}") return res.fetchall() @@ -94,21 +94,21 @@ def delete_logs(runner_id: UUID): def insert_archive_detail(archdetail: RunArchiveDetail): c = conn.cursor() - json_string = json.dumps(archdetail, default=json_serial) + json_string = orjson.dumps(archdetail, default=json_serial, option=orjson.OPT_PASSTHROUGH_DATETIME) res = c.execute("INSERT INTO runner_detail VALUES (?,?)",[str(archdetail["id"]), json_string]) conn.commit() return res.rowcount #returns list of details def get_all_archive_detail(): - conn.row_factory = lambda c, r: json.loads(r[0]) + conn.row_factory = lambda c, r: orjson.loads(r[0]) c = conn.cursor() res = c.execute(f"SELECT data FROM runner_detail") return res.fetchall() #vrátí konkrétní def get_archive_detail_byID(runner_id: UUID): - conn.row_factory = lambda c, r: json.loads(r[0]) + conn.row_factory = lambda c, r: orjson.loads(r[0]) c = conn.cursor() res = c.execute(f"SELECT data FROM runner_detail WHERE runner_id='{str(runner_id)}'") return res.fetchone() @@ -123,7 +123,7 @@ def delete_archive_detail(runner_id: UUID): def get_all_archived_runners_detail(): arch_detail_file = DATA_DIR + "/arch_detail.json" - db_arch_d = TinyDB(arch_detail_file, default=json_serial) + db_arch_d = TinyDB(arch_detail_file, default=json_serial, option=orjson.OPT_PASSTHROUGH_DATETIME) res = db_arch_d.all() return 0, res diff --git a/testy/ml/test.py b/testy/ml/test.py index 332a52c..3f341cb 100644 --- a/testy/ml/test.py +++ b/testy/ml/test.py @@ -4,7 +4,7 @@ from keras.models import Sequential from keras.layers import LSTM, Dense from v2realbot.controller.services import get_archived_runner_details_byID from v2realbot.common.model import RunArchiveDetail -import json +import orjson runner_id = "838e918e-9be0-4251-a968-c13c83f3f173" result = None diff --git a/testy/testSqlite3.py b/testy/testSqlite3.py index 7965878..a6b69c6 100644 --- a/testy/testSqlite3.py +++ b/testy/testSqlite3.py @@ -2,7 +2,7 @@ import sqlite3 from v2realbot.config import DATA_DIR from v2realbot.utils.utils import json_serial from uuid import UUID, uuid4 -import json +import orjson from datetime import datetime from v2realbot.enums.enums import RecordType, StartBarAlign, Mode, Account from v2realbot.common.model import RunArchiveDetail @@ -11,7 +11,7 @@ from tinydb import TinyDB, Query, where sqlite_db_file = DATA_DIR + "/v2trading.db" conn = sqlite3.connect(sqlite_db_file) #standardne vraci pole tuplů, kde clen tuplu jsou sloupce -#conn.row_factory = lambda c, r: json.loads(r[0]) +#conn.row_factory = lambda c, r: orjson.loads(r[0]) #conn.row_factory = lambda c, r: r[0] #conn.row_factory = sqlite3.Row @@ -28,7 +28,7 @@ insert_list = [dict(time=datetime.now().timestamp(), side="ddd", rectype=RecordT def insert_log(runner_id: UUID, time: float, logdict: dict): c = conn.cursor() - json_string = json.dumps(logdict, default=json_serial) + json_string = orjson.dumps(logdict, default=json_serial, option=orjson.OPT_PASSTHROUGH_DATETIME) res = c.execute("INSERT INTO runner_logs VALUES (?,?,?)",[str(runner_id), time, json_string]) conn.commit() return res.rowcount @@ -37,14 +37,14 @@ def insert_log_multiple(runner_id: UUID, loglist: list): c = conn.cursor() insert_data = [] for i in loglist: - row = (str(runner_id), i["time"], json.dumps(i, default=json_serial)) + row = (str(runner_id), i["time"], orjson.dumps(i, default=json_serial, option=orjson.OPT_PASSTHROUGH_DATETIME)) insert_data.append(row) c.executemany("INSERT INTO runner_logs VALUES (?,?,?)", insert_data) conn.commit() return c.rowcount # c = conn.cursor() - # json_string = json.dumps(logdict, default=json_serial) + # json_string = orjson.dumps(logdict, default=json_serial, option=orjson.OPT_PASSTHROUGH_DATETIME) # res = c.execute("INSERT INTO runner_logs VALUES (?,?,?)",[str(runner_id), time, json_string]) # print(res) # conn.commit() @@ -52,7 +52,7 @@ def insert_log_multiple(runner_id: UUID, loglist: list): #returns list of ilog jsons def read_log_window(runner_id: UUID, timestamp_from: float, timestamp_to: float): - conn.row_factory = lambda c, r: json.loads(r[0]) + conn.row_factory = lambda c, r: orjson.loads(r[0]) c = conn.cursor() res = c.execute(f"SELECT data FROM runner_logs WHERE runner_id='{str(runner_id)}' AND time >={ts_from} AND time <={ts_to}") return res.fetchall() @@ -94,21 +94,21 @@ def delete_logs(runner_id: UUID): def insert_archive_detail(archdetail: RunArchiveDetail): c = conn.cursor() - json_string = json.dumps(archdetail, default=json_serial) + json_string = orjson.dumps(archdetail, default=json_serial, option=orjson.OPT_PASSTHROUGH_DATETIME) res = c.execute("INSERT INTO runner_detail VALUES (?,?)",[str(archdetail["id"]), json_string]) conn.commit() return res.rowcount #returns list of details def get_all_archive_detail(): - conn.row_factory = lambda c, r: json.loads(r[0]) + conn.row_factory = lambda c, r: orjson.loads(r[0]) c = conn.cursor() res = c.execute(f"SELECT data FROM runner_detail") return res.fetchall() #vrátí konkrétní def get_archive_detail_byID(runner_id: UUID): - conn.row_factory = lambda c, r: json.loads(r[0]) + conn.row_factory = lambda c, r: orjson.loads(r[0]) c = conn.cursor() res = c.execute(f"SELECT data FROM runner_detail WHERE runner_id='{str(runner_id)}'") return res.fetchone() @@ -123,7 +123,7 @@ def delete_archive_detail(runner_id: UUID): def get_all_archived_runners_detail(): arch_detail_file = DATA_DIR + "/arch_detail.json" - db_arch_d = TinyDB(arch_detail_file, default=json_serial) + db_arch_d = TinyDB(arch_detail_file, default=json_serial, option=orjson.OPT_PASSTHROUGH_DATETIME) res = db_arch_d.all() return 0, res diff --git a/testy/testStore.py b/testy/testStore.py index 3f31631..e2aa0be 100644 --- a/testy/testStore.py +++ b/testy/testStore.py @@ -46,7 +46,7 @@ db.save() # b = 2 # def toJson(self): -# return json.dumps(self, default=lambda o: o.__dict__) +# return orjson.dumps(self, default=lambda o: o.__dict__) # db.append(Neco.a) diff --git a/testy/testTIMIT b/testy/testTIMIT index 1070325..eecf2b3 100644 --- a/testy/testTIMIT +++ b/testy/testTIMIT @@ -1,12 +1,12 @@ import timeit setup = ''' import msgpack -import json +import orjson from copy import deepcopy data = {'name':'John Doe','ranks':{'sports':13,'edu':34,'arts':45},'grade':5}''' print(timeit.timeit('deepcopy(data)', setup=setup)) # 12.0860249996 -print(timeit.timeit('json.loads(json.dumps(data))', setup=setup)) +print(timeit.timeit('orjson.loads(orjson.dumps(data))', setup=setup)) # 9.07182312012 print(timeit.timeit('msgpack.unpackb(msgpack.packb(data))', setup=setup)) # 1.42743492126 \ No newline at end of file diff --git a/testy/tinyDBselect.py b/testy/tinyDBselect.py index ff247ce..824ee1f 100644 --- a/testy/tinyDBselect.py +++ b/testy/tinyDBselect.py @@ -16,7 +16,7 @@ import importlib from queue import Queue from tinydb import TinyDB, Query, where from tinydb.operations import set -import json +import orjson from rich import print @@ -29,7 +29,7 @@ class RunnerLogger: def __init__(self, runner_id: UUID) -> None: self.runner_id = runner_id runner_log_file = DATA_DIR + "/runner_log.json" - db_runner_log = TinyDB(runner_log_file, default=json_serial) + db_runner_log = TinyDB(runner_log_file, default=json_serial, option=orjson.OPT_PASSTHROUGH_DATETIME) def insert_log_multiple(runner_id: UUID, logList: list): runner_table = db_runner_log.table(str(runner_id)) diff --git a/testy/tinyFLUXtest.py b/testy/tinyFLUXtest.py index fe911e0..5028135 100644 --- a/testy/tinyFLUXtest.py +++ b/testy/tinyFLUXtest.py @@ -16,7 +16,7 @@ import importlib from queue import Queue #from tinydb import TinyDB, Query, where #from tinydb.operations import set -import json +import orjson from rich import print from tinyflux import Point, TinyFlux @@ -26,7 +26,7 @@ runner_log_file = DATA_DIR + "/runner_flux__log.json" db_runner_log = TinyFlux(runner_log_file) insert_dict = {'datum': datetime.now(), 'side': "dd", 'name': 'david','id': uuid4(), 'order': "neco"} -#json.dumps(insert_dict, default=json_serial) +#orjson.dumps(insert_dict, default=json_serial, option=orjson.OPT_PASSTHROUGH_DATETIME) p1 = Point(time=datetime.now(), tags=insert_dict) db_runner_log.insert(p1) diff --git a/testy/tinydbTest.py b/testy/tinydbTest.py index fb3ef5e..63869a9 100644 --- a/testy/tinydbTest.py +++ b/testy/tinydbTest.py @@ -13,7 +13,7 @@ from v2realbot.common.model import Order, TradeUpdate as btTradeUpdate from alpaca.trading.models import TradeUpdate from alpaca.trading.enums import TradeEvent, OrderType, OrderSide, OrderType, OrderStatus from rich import print -import json +import orjson #storage_with_injected_serialization = JSONStorage() @@ -110,7 +110,7 @@ a = Order(id=uuid4(), limit_price=22.4) db_file = DATA_DIR + "/db.json" -db = TinyDB(db_file, default=json_serial) +db = TinyDB(db_file, default=json_serial, option=orjson.OPT_PASSTHROUGH_DATETIME) db.truncate() insert = {'datum': datetime.now(), 'side': OrderSide.BUY, 'name': 'david','id': uuid4(), 'order': orderList} diff --git a/testy/websocketFastApi.py b/testy/websocketFastApi.py index d9ad3e4..91d24c5 100644 --- a/testy/websocketFastApi.py +++ b/testy/websocketFastApi.py @@ -6,7 +6,7 @@ import secrets from typing import Annotated import os import uvicorn -import json +import orjson from datetime import datetime from v2realbot.utils.utils import zoneNY @@ -103,7 +103,7 @@ async def websocket_endpoint( 'vwap': 123, 'updated': 123, 'index': 123} - await websocket.send_text(json.dumps(data)) + await websocket.send_text(orjson.dumps(data)) except WebSocketDisconnect: print("CLIENT DISCONNECTED for", runner_id) diff --git a/testy/websocketFastApiConnManager.py b/testy/websocketFastApiConnManager.py index 53bebcb..63b340c 100644 --- a/testy/websocketFastApiConnManager.py +++ b/testy/websocketFastApiConnManager.py @@ -6,7 +6,7 @@ import secrets from typing import Annotated import os import uvicorn -import json +import orjson from datetime import datetime from v2realbot.utils.utils import zoneNY @@ -101,7 +101,7 @@ async def websocket_endpoint(websocket: WebSocket, client_id: int): # 'close': 123, # 'open': 123, # 'time': "2019-05-25"} - await manager.send_personal_message(json.dumps(data), websocket) + await manager.send_personal_message(orjson.dumps(data), websocket) #await manager.broadcast(f"Client #{client_id} says: {data}") except WebSocketDisconnect: manager.disconnect(websocket) diff --git a/v2realbot/common/db.py b/v2realbot/common/db.py index afb85e7..4e143d0 100644 --- a/v2realbot/common/db.py +++ b/v2realbot/common/db.py @@ -5,7 +5,7 @@ import threading import time from v2realbot.common.model import RunArchive, RunArchiveView from datetime import datetime -import json +import orjson sqlite_db_file = DATA_DIR + "/v2trading.db" # Define the connection pool @@ -82,7 +82,7 @@ def row_to_runarchiveview(row: dict) -> RunArchiveView: trade_count=int(row['trade_count']), end_positions=int(row['end_positions']), end_positions_avgp=float(row['end_positions_avgp']), - metrics=json.loads(row['metrics']) if row['metrics'] else None + metrics=orjson.loads(row['metrics']) if row['metrics'] else None ) #prevede dict radku zpatky na objekt vcetme retypizace @@ -100,13 +100,13 @@ def row_to_runarchive(row: dict) -> RunArchive: account=row['account'], bt_from=datetime.fromisoformat(row['bt_from']) if row['bt_from'] else None, bt_to=datetime.fromisoformat(row['bt_to']) if row['bt_to'] else None, - strat_json=json.loads(row['strat_json']), - settings=json.loads(row['settings']), + strat_json=orjson.loads(row['strat_json']), + settings=orjson.loads(row['settings']), ilog_save=bool(row['ilog_save']), profit=float(row['profit']), trade_count=int(row['trade_count']), end_positions=int(row['end_positions']), end_positions_avgp=float(row['end_positions_avgp']), - metrics=json.loads(row['metrics']), + metrics=orjson.loads(row['metrics']), stratvars_toml=row['stratvars_toml'] ) \ No newline at end of file diff --git a/v2realbot/controller/services.py b/v2realbot/controller/services.py index 4c2d5eb..4529bc3 100644 --- a/v2realbot/controller/services.py +++ b/v2realbot/controller/services.py @@ -8,10 +8,11 @@ from alpaca.data.timeframe import TimeFrame from v2realbot.strategy.base import StrategyState from v2realbot.enums.enums import RecordType, StartBarAlign, Mode, Account, OrderSide from v2realbot.common.model import RunDay, StrategyInstance, Runner, RunRequest, RunArchive, RunArchiveView, RunArchiveViewPagination, RunArchiveDetail, RunArchiveChange, Bar, TradeEvent, TestList, Intervals, ConfigItem, InstantIndicator, DataTablesRequest -from v2realbot.utils.utils import AttributeDict, zoneNY, zonePRG, safe_get, dict_replace_value, Store, parse_toml_string, json_serial, is_open_hours, send_to_telegram, concatenate_weekdays +from v2realbot.utils.utils import AttributeDict, zoneNY, zonePRG, safe_get, dict_replace_value, Store, parse_toml_string, json_serial, is_open_hours, send_to_telegram, concatenate_weekdays, transform_data from v2realbot.utils.ilog import delete_logs from v2realbot.common.PrescribedTradeModel import Trade, TradeDirection, TradeStatus, TradeStoplossType from datetime import datetime +from v2realbot.loader.trade_offline_streamer import Trade_Offline_Streamer from threading import Thread, current_thread, Event, enumerate from v2realbot.config import STRATVARS_UNCHANGEABLES, ACCOUNT1_PAPER_API_KEY, ACCOUNT1_PAPER_SECRET_KEY, ACCOUNT1_LIVE_API_KEY, ACCOUNT1_LIVE_SECRET_KEY, DATA_DIR,BT_FILL_CONS_TRADES_REQUIRED,BT_FILL_LOG_SURROUNDING_TRADES,BT_FILL_CONDITION_BUY_LIMIT,BT_FILL_CONDITION_SELL_LIMIT, GROUP_TRADES_WITH_TIMESTAMP_LESS_THAN, MEDIA_DIRECTORY import importlib @@ -21,7 +22,7 @@ from alpaca.trading.client import TradingClient from queue import Queue from tinydb import TinyDB, Query, where from tinydb.operations import set -import json +import orjson import numpy as np from numpy import ndarray from rich import print @@ -45,8 +46,8 @@ lock = Lock() arch_header_file = DATA_DIR + "/arch_header.json" #arch_detail_file = DATA_DIR + "/arch_detail.json" #db layer to store runner archive -db_arch_h = TinyDB(arch_header_file, default=json_serial) -#db_arch_d = TinyDB(arch_detail_file, default=json_serial) +db_arch_h = TinyDB(arch_header_file, default=json_serial, option=orjson.OPT_PASSTHROUGH_DATETIME) +#db_arch_d = TinyDB(arch_detail_file, default=json_serial, option=orjson.OPT_PASSTHROUGH_DATETIME) #db layer to store stratins, TBD zmigrovat do TinyDB db = Store() @@ -368,7 +369,7 @@ def get_testlist_byID(record_id: str): if row is None: return -2, "not found" else: - return 0, TestList(id=row[0], name=row[1], dates=json.loads(row[2])) + return 0, TestList(id=row[0], name=row[1], dates=orjson.loads(row[2])) ##TADY JSEM SKONCIL PROJIT - dodelat nastavni timezone @@ -405,6 +406,11 @@ def run_batch_stratin(id: UUID, runReq: RunRequest): #u prvni polozky if day == cal_dates[0]: + #pokud je cas od po konci marketa, nedavame tento den + if datefrom > end_time: + print("Cas od je po konci marketu, vynechavame tento den") + continue + #pokud je cas od od vetsi nez open marketu prvniho dne, pouzijeme tento pozdejis cas if datefrom > start_time: start_time = datefrom @@ -738,7 +744,7 @@ def populate_metrics_output_directory(strat: StrategyInstance, inter_batch_param res = dict(profit={}) #filt = max_positions['side'] == 'OrderSide.BUY' - res["pos_cnt"] = dict(zip(max_positions['qty'], max_positions['count'])) + res["pos_cnt"] = dict(zip(str(max_positions['qty']), max_positions['count'])) #naplneni batch sum profitu if inter_batch_params is not None: @@ -827,7 +833,7 @@ def populate_metrics_output_directory(strat: StrategyInstance, inter_batch_param res["profit"]["daily_rel_profit_list"] = strat.state.rel_profit_cum #vlozeni celeho listu - res["prescr_trades"]=json.loads(json.dumps(strat.state.vars.prescribedTrades, default=json_serial)) + res["prescr_trades"]=transform_data(strat.state.vars.prescribedTrades, json_serial) except NameError: pass @@ -852,10 +858,10 @@ def archive_runner(runner: Runner, strat: StrategyInstance, inter_batch_params: #ulozime informace o nastavení # if self.mode in [Mode.BT, Mode.PREP]: # str(self.dataloader.cache_used) - + settings = dict(resolution=strat.state.resolution, rectype=strat.state.rectype, - cache_used=strat.dataloader.cache_used, + cache_used=strat.dataloader.cache_used if isinstance(strat.dataloader, Trade_Offline_Streamer) else None, configs=dict( GROUP_TRADES_WITH_TIMESTAMP_LESS_THAN=GROUP_TRADES_WITH_TIMESTAMP_LESS_THAN, BT_FILL_CONS_TRADES_REQUIRED=BT_FILL_CONS_TRADES_REQUIRED, @@ -1076,7 +1082,7 @@ def get_all_archived_runners_p(request: DataTablesRequest) -> Tuple[int, RunArch # def get_all_archived_runners(): # conn = pool.get_connection() # try: -# conn.row_factory = lambda c, r: json.loads(r[0]) +# conn.row_factory = lambda c, r: orjson.loads(r[0]) # c = conn.cursor() # res = c.execute(f"SELECT data FROM runner_header") # finally: @@ -1108,7 +1114,7 @@ def get_archived_runner_header_byID(id: UUID) -> RunArchive: # def get_archived_runner_header_byID(id: UUID): # conn = pool.get_connection() # try: -# conn.row_factory = lambda c, r: json.loads(r[0]) +# conn.row_factory = lambda c, r: orjson.loads(r[0]) # c = conn.cursor() # result = c.execute(f"SELECT data FROM runner_header WHERE runner_id='{str(id)}'") # res= result.fetchone() @@ -1135,7 +1141,7 @@ def insert_archive_header(archeader: RunArchive): conn = pool.get_connection() try: c = conn.cursor() - #json_string = json.dumps(archeader, default=json_serial) + #json_string = orjson.dumps(archeader, default=json_serial, option=orjson.OPT_PASSTHROUGH_DATETIME) res = c.execute(""" INSERT INTO runner_header @@ -1143,7 +1149,7 @@ def insert_archive_header(archeader: RunArchive): VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?) """, - (str(archeader.id), str(archeader.strat_id), archeader.batch_id, archeader.symbol, archeader.name, archeader.note, archeader.started, archeader.stopped, archeader.mode, archeader.account, archeader.bt_from, archeader.bt_to, json.dumps(archeader.strat_json), json.dumps(archeader.settings), archeader.ilog_save, archeader.profit, archeader.trade_count, archeader.end_positions, archeader.end_positions_avgp, json.dumps(archeader.metrics, default=json_serial), archeader.stratvars_toml)) + (str(archeader.id), str(archeader.strat_id), archeader.batch_id, archeader.symbol, archeader.name, archeader.note, archeader.started, archeader.stopped, archeader.mode, archeader.account, archeader.bt_from, archeader.bt_to, orjson.dumps(archeader.strat_json).decode('utf-8'), orjson.dumps(archeader.settings).decode('utf-8'), archeader.ilog_save, archeader.profit, archeader.trade_count, archeader.end_positions, archeader.end_positions_avgp, orjson.dumps(archeader.metrics, default=json_serial, option=orjson.OPT_PASSTHROUGH_DATETIME).decode('utf-8'), archeader.stratvars_toml)) #retry not yet supported for statement format above #res = execute_with_retry(c,statement) @@ -1308,7 +1314,7 @@ def delete_archive_detail_byID(id: UUID): def get_all_archived_runners_detail(): conn = pool.get_connection() try: - conn.row_factory = lambda c, r: json.loads(r[0]) + conn.row_factory = lambda c, r: orjson.loads(r[0]) c = conn.cursor() res = c.execute(f"SELECT data FROM runner_detail") finally: @@ -1324,26 +1330,45 @@ def get_all_archived_runners_detail(): # return 0, res #vrátí konkrétní -def get_archived_runner_details_byID(id: UUID): +# def get_archived_runner_details_byID(id: UUID): +# conn = pool.get_connection() +# try: +# conn.row_factory = lambda c, r: orjson.loads(r[0]) +# c = conn.cursor() +# result = c.execute(f"SELECT data FROM runner_detail WHERE runner_id='{str(id)}'") +# res= result.fetchone() +# finally: +# conn.row_factory = None +# pool.release_connection(conn) +# if res==None: +# return -2, "not found" +# else: +# return 0, res + +#version allowing return of parsed(json) or json string data +def get_archived_runner_details_byID(id: UUID, parsed: bool = True): conn = pool.get_connection() try: - conn.row_factory = lambda c, r: json.loads(r[0]) c = conn.cursor() result = c.execute(f"SELECT data FROM runner_detail WHERE runner_id='{str(id)}'") - res= result.fetchone() + res = result.fetchone() finally: - conn.row_factory = None pool.release_connection(conn) - if res==None: + + if res is None: return -2, "not found" else: - return 0, res + # Return the JSON string directly + if parsed: + orjson.loads(res[0]) + else: + return 0, res[0] def update_archive_detail(id: UUID, archdetail: RunArchiveDetail): conn = pool.get_connection() try: c = conn.cursor() - json_string = json.dumps(archdetail, default=json_serial) + json_string = orjson.dumps(archdetail, default=json_serial, option=orjson.OPT_PASSTHROUGH_DATETIME).decode('utf-8') statement = "UPDATE runner_detail SET data = ? WHERE runner_id = ?" params = (json_string, str(id)) ##statement = f"UPDATE runner_detail SET data = '{json_string}' WHERE runner_id='{str(id)}'" @@ -1358,13 +1383,16 @@ def insert_archive_detail(archdetail: RunArchiveDetail): conn = pool.get_connection() try: c = conn.cursor() - json_string = json.dumps(archdetail, default=json_serial) - statement = f"INSERT INTO runner_detail VALUES ('{str(archdetail.id)}','{json_string}')" - res = execute_with_retry(c,statement) + json_string = orjson.dumps(archdetail, default=json_serial, option=orjson.OPT_PASSTHROUGH_DATETIME).decode('utf-8') + # Use parameterized queries instead of string formatting + statement = "INSERT INTO runner_detail VALUES (?, ?)" + params = (str(archdetail.id), json_string) + res = execute_with_retry(cursor=c, statement=statement, params=params) conn.commit() finally: pool.release_connection(conn) return res.rowcount + # endregion # region TESTLISTS db services @@ -1380,7 +1408,7 @@ def get_testlists(): testlists = [] for row in rows: #print(row) - testlist = TestList(id=row[0], name=row[1], dates=json.loads(row[2])) + testlist = TestList(id=row[0], name=row[1], dates=orjson.loads(row[2])) testlists.append(testlist) return 0, testlists diff --git a/v2realbot/main.py b/v2realbot/main.py index 70f2821..1658d84 100644 --- a/v2realbot/main.py +++ b/v2realbot/main.py @@ -5,7 +5,7 @@ from alpaca.data.timeframe import TimeFrame, TimeFrameUnit from datetime import datetime import os from rich import print -from fastapi import FastAPI, Depends, HTTPException, status, File, UploadFile +from fastapi import FastAPI, Depends, HTTPException, status, File, UploadFile, Response from fastapi.security import APIKeyHeader import uvicorn from uuid import UUID @@ -20,7 +20,7 @@ from v2realbot.enums.enums import Env, Mode from typing import Annotated import os import uvicorn -import json +import orjson from queue import Queue, Empty from threading import Thread import asyncio @@ -329,14 +329,14 @@ def migrate(): end_positions=row.get('end_positions'), end_positions_avgp=row.get('end_positions_avgp'), metrics=row.get('open_orders'), - #metrics=json.loads(row.get('metrics')) if row.get('metrics') else None, + #metrics=orjson.loads(row.get('metrics')) if row.get('metrics') else None, stratvars_toml=row.get('stratvars_toml') ) def get_all_archived_runners(): conn = pool.get_connection() try: - conn.row_factory = lambda c, r: json.loads(r[0]) + conn.row_factory = lambda c, r: orjson.loads(r[0]) c = conn.cursor() res = c.execute(f"SELECT data FROM runner_header") finally: @@ -381,7 +381,7 @@ def migrate(): SET strat_id=?, batch_id=?, symbol=?, name=?, note=?, started=?, stopped=?, mode=?, account=?, bt_from=?, bt_to=?, strat_json=?, settings=?, ilog_save=?, profit=?, trade_count=?, end_positions=?, end_positions_avgp=?, metrics=?, stratvars_toml=? WHERE runner_id=? ''', - (str(ra.strat_id), ra.batch_id, ra.symbol, ra.name, ra.note, ra.started, ra.stopped, ra.mode, ra.account, ra.bt_from, ra.bt_to, json.dumps(ra.strat_json), json.dumps(ra.settings), ra.ilog_save, ra.profit, ra.trade_count, ra.end_positions, ra.end_positions_avgp, json.dumps(ra.metrics), ra.stratvars_toml, str(ra.id))) + (str(ra.strat_id), ra.batch_id, ra.symbol, ra.name, ra.note, ra.started, ra.stopped, ra.mode, ra.account, ra.bt_from, ra.bt_to, orjson.dumps(ra.strat_json).decode('utf-8'), orjson.dumps(ra.settings).decode('utf-8'), ra.ilog_save, ra.profit, ra.trade_count, ra.end_positions, ra.end_positions_avgp, orjson.dumps(ra.metrics).decode('utf-8'), ra.stratvars_toml, str(ra.id))) conn.commit() finally: @@ -524,13 +524,23 @@ def _get_all_archived_runners_detail() -> list[RunArchiveDetail]: raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail=f"No data found") #get archived runners detail by id +# @app.get("/archived_runners_detail/{runner_id}", dependencies=[Depends(api_key_auth)]) +# def _get_archived_runner_details_byID(runner_id) -> RunArchiveDetail: +# res, set = cs.get_archived_runner_details_byID(runner_id) +# if res == 0: +# return set +# else: +# raise HTTPException(status_code=404, detail=f"No runner with id: {runner_id} a {set}") + +#this is the variant of above that skips parsing of json and returns JSON string returned from db @app.get("/archived_runners_detail/{runner_id}", dependencies=[Depends(api_key_auth)]) -def _get_archived_runner_details_byID(runner_id) -> RunArchiveDetail: - res, set = cs.get_archived_runner_details_byID(runner_id) +def _get_archived_runner_details_byID(runner_id: UUID): + res, data = cs.get_archived_runner_details_byID(id=runner_id, parsed=False) if res == 0: - return set + # Return the raw JSON string as a plain Response + return Response(content=data, media_type="application/json") else: - raise HTTPException(status_code=404, detail=f"No runner with id: {runner_id} a {set}") + raise HTTPException(status_code=404, detail=f"No runner with id: {runner_id}. {data}") #get archived runners detail by id @app.get("/archived_runners_log/{runner_id}", dependencies=[Depends(api_key_auth)]) @@ -647,7 +657,7 @@ def create_record(testlist: TestList): # Insert the record into the database conn = pool.get_connection() cursor = conn.cursor() - cursor.execute("INSERT INTO test_list (id, name, dates) VALUES (?, ?, ?)", (testlist.id, testlist.name, json.dumps(testlist.dates, default=json_serial))) + cursor.execute("INSERT INTO test_list (id, name, dates) VALUES (?, ?, ?)", (testlist.id, testlist.name, orjson.dumps(testlist.dates, default=json_serial, option=orjson.OPT_PASSTHROUGH_DATETIME).decode('utf-8'))) conn.commit() pool.release_connection(conn) return testlist @@ -685,7 +695,7 @@ def update_testlist(record_id: str, testlist: TestList): raise HTTPException(status_code=404, detail='Record not found') # Update the record in the database - cursor.execute("UPDATE test_list SET name = ?, dates = ? WHERE id = ?", (testlist.name, json.dumps(testlist.dates, default=json_serial), record_id)) + cursor.execute("UPDATE test_list SET name = ?, dates = ? WHERE id = ?", (testlist.name, orjson.dumps(testlist.dates, default=json_serial, option=orjson.OPT_PASSTHROUGH_DATETIME).decode('utf-8'), record_id)) conn.commit() pool.release_connection(conn) @@ -849,6 +859,11 @@ def get_metadata(model_name: str): model_instance = ml.load_model(file=model_name, directory=MODEL_DIR) try: metadata = model_instance.metadata + except AttributeError: + metadata = model_instance.__dict__ + del metadata["scalerX"] + del metadata["scalerY"] + del metadata["model"] except Exception as e: metadata = "No Metada" + str(e) + format_exc() return metadata @@ -879,7 +894,7 @@ def insert_queue2db(): c = insert_conn.cursor() insert_data = [] for i in loglist: - row = (str(runner_id), i["time"], json.dumps(i, default=json_serial)) + row = (str(runner_id), i["time"], orjson.dumps(i, default=json_serial, option=orjson.OPT_PASSTHROUGH_DATETIME|orjson.OPT_NON_STR_KEYS).decode('utf-8')) insert_data.append(row) c.executemany("INSERT INTO runner_logs VALUES (?,?,?)", insert_data) insert_conn.commit() @@ -891,7 +906,10 @@ def insert_queue2db(): insert_queue.put(data) # Put the data back into the queue for retry sleep(1) # You can adjust the sleep duration else: - raise # If it's another error, raise it + raise # If it's another error, raise it + except Exception as e: + print("ERROR INSERT LOGQUEUE MODULE:" + str(e)+format_exc()) + print(data) #join cekej na dokonceni vsech for i in cs.db.runners: diff --git a/v2realbot/reporting/metricstools.py b/v2realbot/reporting/metricstools.py index 1517428..d5cef2c 100644 --- a/v2realbot/reporting/metricstools.py +++ b/v2realbot/reporting/metricstools.py @@ -1,4 +1,3 @@ -import json import numpy as np import matplotlib matplotlib.use('Agg') # Set the Matplotlib backend to 'Agg' diff --git a/v2realbot/reporting/metricstoolsimage.py b/v2realbot/reporting/metricstoolsimage.py index dd62d5b..14a9089 100644 --- a/v2realbot/reporting/metricstoolsimage.py +++ b/v2realbot/reporting/metricstoolsimage.py @@ -348,9 +348,9 @@ def generate_trading_report_image(runner_ids: list = None, batch_id: str = None, #Plot 8 Cumulative profit - bud 1 den nebo vice dni + pridame pod to vyvoj ceny # Extract the closing prices and times - closing_prices = bars['close'] + closing_prices = bars.get('close',[]) #times = bars['time'] # Assuming this is a list of pandas Timestamp objects - times = pd.to_datetime(bars['time']) # Ensure this is a Pandas datetime series + times = pd.to_datetime(bars['time']) if bars is not None else [] # Ensure this is a Pandas datetime series # # Plot the closing prices over time # axs[0, 4].plot(times, closing_prices, color='blue') # axs[0, 4].tick_params(axis='x', rotation=45) # Rotate date labels if necessar diff --git a/v2realbot/static/index.html b/v2realbot/static/index.html index 97451b3..f0ee8b1 100644 --- a/v2realbot/static/index.html +++ b/v2realbot/static/index.html @@ -916,7 +916,7 @@ - + diff --git a/v2realbot/static/js/ml.js b/v2realbot/static/js/ml.js index cce5853..5894c59 100644 --- a/v2realbot/static/js/ml.js +++ b/v2realbot/static/js/ml.js @@ -110,6 +110,7 @@ $(document).ready(function() { }, error: function(xhr, status, error) { $('#metadata-container').html('Error fetching metadata: ' + error + xhr.responseText + status); + show_metadata(xhr) } }); } @@ -124,14 +125,14 @@ $(document).ready(function() { require(["vs/editor/editor.main"], () => { model_editor_json = monaco.editor.create(document.getElementById('toml-editor-container'), { - value: response.cfg_toml, + value: response.cfg_toml ? response.cfg_toml : JSON.stringify(response,null,4), language: 'toml', theme: 'tomlTheme-dark', automaticLayout: true, readOnly: true }); model_editor_python = monaco.editor.create(document.getElementById('python-editor-container'), { - value: response.arch_function, + value: response.arch_function ? response.arch_function : '', language: 'python', theme: 'tomlTheme-dark', automaticLayout: true, diff --git a/v2realbot/strategy/StrategyClassicSL.py b/v2realbot/strategy/StrategyClassicSL.py index 2db84dc..3d54365 100644 --- a/v2realbot/strategy/StrategyClassicSL.py +++ b/v2realbot/strategy/StrategyClassicSL.py @@ -1,5 +1,5 @@ from v2realbot.strategy.base import Strategy -from v2realbot.utils.utils import parse_alpaca_timestamp, ltp, AttributeDict,trunc,price2dec, zoneNY, print, json_serial, safe_get, get_tick, send_to_telegram +from v2realbot.utils.utils import parse_alpaca_timestamp, ltp, AttributeDict,trunc,price2dec, zoneNY, print, json_serial, safe_get, get_tick, send_to_telegram, transform_data from v2realbot.utils.tlog import tlog, tlog_exception from v2realbot.enums.enums import Mode, Order, Account, RecordType, Followup #from alpaca.trading.models import TradeUpdate @@ -7,7 +7,7 @@ from v2realbot.common.model import TradeUpdate from v2realbot.common.PrescribedTradeModel import Trade, TradeDirection, TradeStatus from alpaca.trading.enums import TradeEvent, OrderStatus from v2realbot.indicators.indicators import ema -import json +import orjson from datetime import datetime #from rich import print from random import randrange @@ -90,7 +90,7 @@ class StrategyClassicSL(Strategy): o: Order = data.order signal_name = None ##nejak to vymyslet, aby se dal poslat cely Trade a serializoval se - self.state.ilog(e="Příchozí BUY notif", msg=o.status, trade=json.loads(json.dumps(data, default=json_serial))) + self.state.ilog(e="Příchozí BUY notif", msg=o.status, trade=transform_data(data, json_serial)) if data.event == TradeEvent.FILL or data.event == TradeEvent.PARTIAL_FILL: @@ -180,7 +180,7 @@ class StrategyClassicSL(Strategy): setattr(tradeData, "profit_sum", self.state.profit) setattr(tradeData, "signal_name", signal_name) setattr(tradeData, "prescribed_trade_id", self.state.vars.pending) - #self.state.ilog(f"updatnut tradeList o profit", tradeData=json.loads(json.dumps(tradeData, default=json_serial))) + #self.state.ilog(f"updatnut tradeList o profit", tradeData=orjson.loads(orjson.dumps(tradeData, default=json_serial, option=orjson.OPT_PASSTHROUGH_DATETIME))) setattr(tradeData, "rel_profit", rel_profit) setattr(tradeData, "rel_profit_cum", rel_profit_cum_calculated) @@ -233,8 +233,8 @@ class StrategyClassicSL(Strategy): async def orderUpdateSell(self, data: TradeUpdate): - - self.state.ilog(e="Příchozí SELL notif", msg=data.order.status, trade=json.loads(json.dumps(data, default=json_serial))) + + self.state.ilog(e="Příchozí SELL notif", msg=data.order.status, trade=transform_data(data, json_serial)) #naklady vypocteme z prumerne ceny, kterou mame v pozicich if data.event == TradeEvent.FILL or data.event == TradeEvent.PARTIAL_FILL: diff --git a/v2realbot/strategy/StrategyOrderLimitVykladaci.py b/v2realbot/strategy/StrategyOrderLimitVykladaci.py index 6c9a6c9..8d1a28a 100644 --- a/v2realbot/strategy/StrategyOrderLimitVykladaci.py +++ b/v2realbot/strategy/StrategyOrderLimitVykladaci.py @@ -1,11 +1,11 @@ from v2realbot.strategy.base import Strategy -from v2realbot.utils.utils import parse_alpaca_timestamp, ltp, AttributeDict,trunc,price2dec, zoneNY, print, json_serial, safe_get +from v2realbot.utils.utils import parse_alpaca_timestamp, ltp, AttributeDict,trunc,price2dec, zoneNY, print, json_serial, safe_get, transform_data from v2realbot.utils.tlog import tlog, tlog_exception from v2realbot.enums.enums import Mode, Order, Account from alpaca.trading.models import TradeUpdate from alpaca.trading.enums import TradeEvent, OrderStatus from v2realbot.indicators.indicators import ema -import json +import orjson #from rich import print from random import randrange from alpaca.common.exceptions import APIError @@ -21,7 +21,7 @@ class StrategyOrderLimitVykladaci(Strategy): async def orderUpdateBuy(self, data: TradeUpdate): o: Order = data.order ##nejak to vymyslet, aby se dal poslat cely Trade a serializoval se - self.state.ilog(e="Příchozí BUY notif", msg=o.status, trade=json.loads(json.dumps(data, default=json_serial))) + self.state.ilog(e="Příchozí BUY notif", msg=o.status, trade=transform_data(data, json_serial)) if o.status == OrderStatus.FILLED or o.status == OrderStatus.CANCELED: #pokud existuje objednavka v pendingbuys - vyhodime ji @@ -73,7 +73,7 @@ class StrategyOrderLimitVykladaci(Strategy): async def orderUpdateSell(self, data: TradeUpdate): - self.state.ilog(e="Příchozí SELL notif", msg=data.order.status, trade=json.loads(json.dumps(data, default=json_serial))) + self.state.ilog(e="Příchozí SELL notif", msg=data.order.status, trade=transform_data(data, json_serial)) #PROFIT #profit pocitame z TradeUpdate.price a TradeUpdate.qty - aktualne provedene mnozstvi a cena #naklady vypocteme z prumerne ceny, kterou mame v pozicich diff --git a/v2realbot/strategy/StrategyOrderLimitVykladaciNormalized.py b/v2realbot/strategy/StrategyOrderLimitVykladaciNormalized.py index b8ac5ad..af862a9 100644 --- a/v2realbot/strategy/StrategyOrderLimitVykladaciNormalized.py +++ b/v2realbot/strategy/StrategyOrderLimitVykladaciNormalized.py @@ -5,7 +5,7 @@ from v2realbot.enums.enums import Mode, Order, Account from alpaca.trading.models import TradeUpdate from alpaca.trading.enums import TradeEvent, OrderStatus from v2realbot.indicators.indicators import ema -import json +import orjson #from rich import print from random import randrange from alpaca.common.exceptions import APIError @@ -21,7 +21,7 @@ class StrategyOrderLimitVykladaciNormalized(Strategy): async def orderUpdateBuy(self, data: TradeUpdate): o: Order = data.order ##nejak to vymyslet, aby se dal poslat cely Trade a serializoval se - self.state.ilog(e="Příchozí BUY notif", msg=o.status, trade=json.loads(json.dumps(data, default=json_serial))) + self.state.ilog(e="Příchozí BUY notif", msg=o.status, trade=orjson.loads(orjson.dumps(data, default=json_serial, option=orjson.OPT_PASSTHROUGH_DATETIME))) if o.status == OrderStatus.FILLED or o.status == OrderStatus.CANCELED: #pokud existuje objednavka v pendingbuys - vyhodime ji @@ -73,7 +73,7 @@ class StrategyOrderLimitVykladaciNormalized(Strategy): async def orderUpdateSell(self, data: TradeUpdate): - self.state.ilog(e="Příchozí SELL notif", msg=data.order.status, trade=json.loads(json.dumps(data, default=json_serial))) + self.state.ilog(e="Příchozí SELL notif", msg=data.order.status, trade=orjson.loads(orjson.dumps(data, default=json_serial, option=orjson.OPT_PASSTHROUGH_DATETIME))) #PROFIT #profit pocitame z TradeUpdate.price a TradeUpdate.qty - aktualne provedene mnozstvi a cena #naklady vypocteme z prumerne ceny, kterou mame v pozicich diff --git a/v2realbot/strategy/StrategyOrderLimitVykladaciNormalizedMYSELL.py b/v2realbot/strategy/StrategyOrderLimitVykladaciNormalizedMYSELL.py index aa779de..331169d 100644 --- a/v2realbot/strategy/StrategyOrderLimitVykladaciNormalizedMYSELL.py +++ b/v2realbot/strategy/StrategyOrderLimitVykladaciNormalizedMYSELL.py @@ -5,7 +5,7 @@ from v2realbot.enums.enums import Mode, Order, Account, RecordType from alpaca.trading.models import TradeUpdate from alpaca.trading.enums import TradeEvent, OrderStatus from v2realbot.indicators.indicators import ema -import json +import orjson #from rich import print from random import randrange from alpaca.common.exceptions import APIError @@ -21,7 +21,7 @@ class StrategyOrderLimitVykladaciNormalizedMYSELL(Strategy): async def orderUpdateBuy(self, data: TradeUpdate): o: Order = data.order ##nejak to vymyslet, aby se dal poslat cely Trade a serializoval se - self.state.ilog(e="Příchozí BUY notif", msg=o.status, trade=json.loads(json.dumps(data, default=json_serial))) + self.state.ilog(e="Příchozí BUY notif", msg=o.status, trade=orjson.loads(orjson.dumps(data, default=json_serial, option=orjson.OPT_PASSTHROUGH_DATETIME))) if o.status == OrderStatus.FILLED or o.status == OrderStatus.CANCELED: #pokud existuje objednavka v pendingbuys - vyhodime ji @@ -42,7 +42,7 @@ class StrategyOrderLimitVykladaciNormalizedMYSELL(Strategy): async def orderUpdateSell(self, data: TradeUpdate): - self.state.ilog(e="Příchozí SELL notif", msg=data.order.status, trade=json.loads(json.dumps(data, default=json_serial))) + self.state.ilog(e="Příchozí SELL notif", msg=data.order.status, trade=orjson.loads(orjson.dumps(data, default=json_serial, option=orjson.OPT_PASSTHROUGH_DATETIME))) #PROFIT #profit pocitame z TradeUpdate.price a TradeUpdate.qty - aktualne provedene mnozstvi a cena #naklady vypocteme z prumerne ceny, kterou mame v pozicich diff --git a/v2realbot/strategy/base.py b/v2realbot/strategy/base.py index e7ac173..31bccff 100644 --- a/v2realbot/strategy/base.py +++ b/v2realbot/strategy/base.py @@ -23,7 +23,7 @@ from v2realbot.backtesting.backtester import Backtester from v2realbot.common.model import TradeUpdate from alpaca.trading.enums import TradeEvent, OrderStatus from threading import Event, current_thread -import json +import orjson from uuid import UUID from rich import print as printnow from collections import defaultdict @@ -660,7 +660,7 @@ class Strategy: #send current values to Realtime display on frontend #all datetime values are converted to timestamp if self.rtqueue is not None: - self.rtqueue.put(json.dumps(rt_out, default=json_serial)) + self.rtqueue.put(orjson.dumps(rt_out, default=json_serial, option=orjson.OPT_PASSTHROUGH_DATETIME)) print("RTQUEUE", self.rtqueue) #cleaning iterlog lsit diff --git a/v2realbot/strategyblocks/activetrade/close/close_position.py b/v2realbot/strategyblocks/activetrade/close/close_position.py index 0eb5fec..502b7b6 100644 --- a/v2realbot/strategyblocks/activetrade/close/close_position.py +++ b/v2realbot/strategyblocks/activetrade/close/close_position.py @@ -9,7 +9,7 @@ from v2realbot.config import KW from uuid import uuid4 from datetime import datetime #import random -import json +import orjson import numpy as np #from icecream import install, ic from rich import print as printanyway diff --git a/v2realbot/strategyblocks/activetrade/close/eod_exit.py b/v2realbot/strategyblocks/activetrade/close/eod_exit.py index fa49f99..1967032 100644 --- a/v2realbot/strategyblocks/activetrade/close/eod_exit.py +++ b/v2realbot/strategyblocks/activetrade/close/eod_exit.py @@ -9,7 +9,7 @@ from v2realbot.config import KW from uuid import uuid4 from datetime import datetime #import random -import json +import orjson import numpy as np #from icecream import install, ic from rich import print as printanyway diff --git a/v2realbot/strategyblocks/activetrade/helpers.py b/v2realbot/strategyblocks/activetrade/helpers.py index 1a5ba04..02fd4da 100644 --- a/v2realbot/strategyblocks/activetrade/helpers.py +++ b/v2realbot/strategyblocks/activetrade/helpers.py @@ -8,7 +8,7 @@ from uuid import uuid4 from datetime import datetime from v2realbot.strategyblocks.indicators.helpers import value_or_indicator #import random -import json +import orjson import numpy as np #from icecream import install, ic from rich import print as printanyway diff --git a/v2realbot/strategyblocks/indicators/indicators_hub.py b/v2realbot/strategyblocks/indicators/indicators_hub.py index 223da13..49eca6a 100644 --- a/v2realbot/strategyblocks/indicators/indicators_hub.py +++ b/v2realbot/strategyblocks/indicators/indicators_hub.py @@ -9,8 +9,8 @@ from v2realbot.strategyblocks.indicators.RSI import populate_dynamic_RSI_indicat from v2realbot.strategyblocks.indicators.natr import populate_dynamic_natr_indicator from v2realbot.strategyblocks.indicators.atr import populate_dynamic_atr_indicator import numpy as np -from v2realbot.utils.utils import isrising, isfalling,zoneNY, price2dec, print, safe_get, is_still, is_window_open, eval_cond_dict, crossed_down, crossed_up, crossed, is_pivot, json_serial, pct_diff, create_new_bars, slice_dict_lists -import json +from v2realbot.utils.utils import isrising, isfalling,zoneNY, price2dec, print, safe_get, is_still, is_window_open, eval_cond_dict, crossed_down, crossed_up, crossed, is_pivot, json_serial, pct_diff, create_new_bars, slice_dict_lists, transform_data +import orjson def populate_all_indicators(data, state: StrategyState): @@ -55,7 +55,7 @@ def populate_all_indicators(data, state: StrategyState): #TODO tento lof patri spis do nextu classic SL - je poplatny typu stratefie #TODO na toto se podivam, nejak moc zajasonovani a zpatky - #PERF PROBLEM - state.ilog(lvl=1,e="ENTRY", msg=f"LP:{lp} P:{state.positions}/{round(float(state.avgp),3)} SL:{state.vars.activeTrade.stoploss_value if state.vars.activeTrade is not None else None} GP:{state.vars.activeTrade.goal_price if state.vars.activeTrade is not None else None} profit:{round(float(state.profit),2)} profit_rel:{round(np.sum(state.rel_profit_cum),6) if len(state.rel_profit_cum)>0 else 0} Trades:{len(state.tradeList)} pend:{state.vars.pending}", rel_profit_cum=str(state.rel_profit_cum), activeTrade=json.loads(json.dumps(state.vars.activeTrade, default=json_serial)), prescribedTrades=json.loads(json.dumps(state.vars.prescribedTrades, default=json_serial)), pending=str(state.vars.pending)) + state.ilog(lvl=1,e="ENTRY", msg=f"LP:{lp} P:{state.positions}/{round(float(state.avgp),3)} SL:{state.vars.activeTrade.stoploss_value if state.vars.activeTrade is not None else None} GP:{state.vars.activeTrade.goal_price if state.vars.activeTrade is not None else None} profit:{round(float(state.profit),2)} profit_rel:{round(np.sum(state.rel_profit_cum),6) if len(state.rel_profit_cum)>0 else 0} Trades:{len(state.tradeList)} pend:{state.vars.pending}", rel_profit_cum=str(state.rel_profit_cum), activeTrade=transform_data(state.vars.activeTrade, json_serial), prescribedTrades=transform_data(state.vars.prescribedTrades, json_serial), pending=str(state.vars.pending)) #kroky pro CONFIRMED BAR only if conf_bar == 1: diff --git a/v2realbot/strategyblocks/inits/init_directives.py b/v2realbot/strategyblocks/inits/init_directives.py index 0097af8..bb3dc1b 100644 --- a/v2realbot/strategyblocks/inits/init_directives.py +++ b/v2realbot/strategyblocks/inits/init_directives.py @@ -9,7 +9,7 @@ from v2realbot.config import KW from uuid import uuid4 from datetime import datetime #import random -import json +import orjson import numpy as np #from icecream import install, ic from rich import print as printanyway diff --git a/v2realbot/strategyblocks/inits/init_indicators.py b/v2realbot/strategyblocks/inits/init_indicators.py index 0083a56..fa33b0d 100644 --- a/v2realbot/strategyblocks/inits/init_indicators.py +++ b/v2realbot/strategyblocks/inits/init_indicators.py @@ -10,7 +10,7 @@ from v2realbot.config import KW, MODEL_DIR from uuid import uuid4 from datetime import datetime #import random -import json +import orjson import numpy as np #from icecream import install, ic from rich import print as printanyway diff --git a/v2realbot/strategyblocks/newtrade/conditions.py b/v2realbot/strategyblocks/newtrade/conditions.py index f6d1555..44d7aad 100644 --- a/v2realbot/strategyblocks/newtrade/conditions.py +++ b/v2realbot/strategyblocks/newtrade/conditions.py @@ -10,7 +10,7 @@ from uuid import uuid4 from datetime import datetime from v2realbot.strategyblocks.indicators.helpers import value_or_indicator #import random -import json +import orjson import numpy as np #from icecream import install, ic from rich import print as printanyway diff --git a/v2realbot/strategyblocks/newtrade/prescribedtrades.py b/v2realbot/strategyblocks/newtrade/prescribedtrades.py index e6f05be..54e4a36 100644 --- a/v2realbot/strategyblocks/newtrade/prescribedtrades.py +++ b/v2realbot/strategyblocks/newtrade/prescribedtrades.py @@ -1,9 +1,9 @@ from v2realbot.strategy.base import StrategyState from v2realbot.common.PrescribedTradeModel import TradeDirection, TradeStatus -from v2realbot.utils.utils import zoneNY, json_serial +from v2realbot.utils.utils import zoneNY, json_serial,transform_data from datetime import datetime #import random -import json +import orjson from v2realbot.strategyblocks.activetrade.helpers import insert_SL_history, get_default_sl_value, normalize_tick, get_profit_target_price from v2realbot.strategyblocks.indicators.helpers import value_or_indicator @@ -14,12 +14,12 @@ def execute_prescribed_trades(state: StrategyState, data): if state.vars.activeTrade is not None or len(state.vars.prescribedTrades) == 0: return #evaluate long (price/market) - state.ilog(lvl=1,e="evaluating prescr trades", trades=json.loads(json.dumps(state.vars.prescribedTrades, default=json_serial))) + state.ilog(lvl=1,e="evaluating prescr trades", trades=transform_data(state.vars.prescribedTrades, json_serial)) for trade in state.vars.prescribedTrades: if trade.status == TradeStatus.READY and trade.direction == TradeDirection.LONG and (trade.entry_price is None or trade.entry_price >= data['close']): trade.status = TradeStatus.ACTIVATED trade.last_update = datetime.fromtimestamp(state.time).astimezone(zoneNY) - state.ilog(lvl=1,e=f"evaluated LONG", trade=json.loads(json.dumps(trade, default=json_serial)), prescrTrades=json.loads(json.dumps(state.vars.prescribedTrades, default=json_serial))) + state.ilog(lvl=1,e=f"evaluated LONG", trade=transform_data(trade, json_serial), prescrTrades=transform_data(state.vars.prescribedTrades, json_serial)) state.vars.activeTrade = trade state.vars.last_buy_index = data["index"] state.vars.last_in_index = data["index"] @@ -28,7 +28,7 @@ def execute_prescribed_trades(state: StrategyState, data): if not state.vars.activeTrade: for trade in state.vars.prescribedTrades: if trade.status == TradeStatus.READY and trade.direction == TradeDirection.SHORT and (trade.entry_price is None or trade.entry_price <= data['close']): - state.ilog(lvl=1,e=f"evaluaed SHORT", trade=json.loads(json.dumps(trade, default=json_serial)), prescTrades=json.loads(json.dumps(state.vars.prescribedTrades, default=json_serial))) + state.ilog(lvl=1,e=f"evaluaed SHORT", trade=transform_data(trade, json_serial), prescrTrades=transform_data(state.vars.prescribedTrades, json_serial)) trade.status = TradeStatus.ACTIVATED trade.last_update = datetime.fromtimestamp(state.time).astimezone(zoneNY) state.vars.activeTrade = trade @@ -39,7 +39,7 @@ def execute_prescribed_trades(state: StrategyState, data): #odeslani ORDER + NASTAVENI STOPLOSS (zatim hardcoded) if state.vars.activeTrade: if state.vars.activeTrade.direction == TradeDirection.LONG: - state.ilog(lvl=1,e="odesilame LONG ORDER", trade=json.loads(json.dumps(state.vars.activeTrade, default=json_serial))) + state.ilog(lvl=1,e="odesilame LONG ORDER", trade=transform_data(state.vars.activeTrade, json_serial)) if state.vars.activeTrade.size is not None: size = state.vars.activeTrade.size else: @@ -71,7 +71,7 @@ def execute_prescribed_trades(state: StrategyState, data): insert_SL_history(state) state.vars.pending = state.vars.activeTrade.id elif state.vars.activeTrade.direction == TradeDirection.SHORT: - state.ilog(lvl=1,e="odesilame SHORT ORDER",trade=json.loads(json.dumps(state.vars.activeTrade, default=json_serial))) + state.ilog(lvl=1,e="odesilame SHORT ORDER", trade=transform_data(state.vars.activeTrade, json_serial)) if state.vars.activeTrade.size is not None: size = state.vars.activeTrade.size else: diff --git a/v2realbot/utils/historicals.py b/v2realbot/utils/historicals.py index 88f6c7c..44c0a46 100644 --- a/v2realbot/utils/historicals.py +++ b/v2realbot/utils/historicals.py @@ -13,7 +13,7 @@ from collections import defaultdict from pandas import to_datetime from msgpack.ext import Timestamp -def convert_daily_bars(daily_bars): +def convert_historical_bars(daily_bars): """Converts a list of daily bars into a dictionary with the specified keys. Args: @@ -89,4 +89,6 @@ def get_historical_bars(symbol: str, time_from: datetime, time_to: datetime, tim bar_request = StockBarsRequest(symbol_or_symbols=symbol,timeframe=timeframe, start=time_from, end=time_to, feed=DataFeed.SIP) bars: BarSet = stock_client.get_stock_bars(bar_request) #print("puvodni bars", bars["BAC"]) - return convert_daily_bars(bars[symbol]) + if bars[symbol][0] is None: + return None + return convert_historical_bars(bars[symbol]) diff --git a/v2realbot/utils/ilog.py b/v2realbot/utils/ilog.py index b016c7e..b06a63f 100644 --- a/v2realbot/utils/ilog.py +++ b/v2realbot/utils/ilog.py @@ -1,7 +1,7 @@ from v2realbot.config import DATA_DIR from v2realbot.utils.utils import json_serial from uuid import UUID, uuid4 -import json +import orjson from datetime import datetime from v2realbot.enums.enums import RecordType, StartBarAlign, Mode, Account from v2realbot.common.db import pool, insert_queue @@ -9,7 +9,7 @@ import sqlite3 #standardne vraci pole tuplů, kde clen tuplu jsou sloupce -#conn.row_factory = lambda c, r: json.loads(r[0]) +#conn.row_factory = lambda c, r: orjson.loads(r[0]) #conn.row_factory = lambda c, r: r[0] #conn.row_factory = sqlite3.Row @@ -32,7 +32,7 @@ def insert_log(runner_id: UUID, time: float, logdict: dict): conn = pool.get_connection() try: c = conn.cursor() - json_string = json.dumps(logdict, default=json_serial) + json_string = orjson.dumps(logdict, default=json_serial, option=orjson.OPT_PASSTHROUGH_DATETIME).decode('utf-8') res = c.execute("INSERT INTO runner_logs VALUES (?,?,?)",[str(runner_id), time, json_string]) conn.commit() finally: @@ -49,7 +49,7 @@ def insert_log_multiple_queue(runner_id:UUID, loglist: list): # c = conn.cursor() # insert_data = [] # for i in loglist: -# row = (str(runner_id), i["time"], json.dumps(i, default=json_serial)) +# row = (str(runner_id), i["time"], orjson.dumps(i, default=json_serial, option=orjson.OPT_PASSTHROUGH_DATETIME)) # insert_data.append(row) # c.executemany("INSERT INTO runner_logs VALUES (?,?,?)", insert_data) # conn.commit() @@ -59,7 +59,7 @@ def insert_log_multiple_queue(runner_id:UUID, loglist: list): def get_log_window(runner_id: UUID, timestamp_from: float = 0, timestamp_to: float = 9682851459): conn = pool.get_connection() try: - conn.row_factory = lambda c, r: json.loads(r[0]) + conn.row_factory = lambda c, r: orjson.loads(r[0]) c = conn.cursor() res = c.execute(f"SELECT data FROM runner_logs WHERE runner_id='{str(runner_id)}' AND time >={timestamp_from} AND time <={timestamp_to} ORDER BY time") finally: diff --git a/v2realbot/utils/utils.py b/v2realbot/utils/utils.py index e1baf03..b695851 100644 --- a/v2realbot/utils/utils.py +++ b/v2realbot/utils/utils.py @@ -329,6 +329,36 @@ def send_to_telegram(message): except Exception as e: print(e) +def transform_data(data, transform_function): + """ + Recursively transform the data in a dictionary, list of dictionaries, or nested dictionaries + using a specified transformation function. + + This function applies the transformation function to each value in the data structure. + It handles nested dictionaries and lists of dictionaries. + + Parameters: + data (dict or list): The dictionary, list of dictionaries, or nested dictionary to be transformed. + transform_function (function): The function to be applied to each value in the data. This function + should accept a single value and return a transformed value. + + Returns: + dict or list: The transformed dictionary, list of dictionaries, or nested dictionary with each value + processed by the transform_function. + + Raises: + TypeError: If the transform_function cannot process a value, the original value is kept. + """ + if isinstance(data, dict): + return {key: transform_data(value, transform_function) for key, value in data.items()} + elif isinstance(data, list): + return [transform_data(element, transform_function) for element in data] + else: + try: + return transform_function(data) + except TypeError: + return data + #OPTIMIZED BY BARD def json_serial(obj): """JSON serializer for objects not serializable by default json code @@ -341,6 +371,7 @@ def json_serial(obj): UUID: lambda obj: str(obj), Enum: lambda obj: str(obj), np.int64: lambda obj: int(obj), + np.float64: lambda obj: float(obj), Order: lambda obj: obj.__dict__, TradeUpdate: lambda obj: obj.__dict__, btOrder: lambda obj: obj.__dict__,