91 Commits

Author SHA1 Message Date
9830cbee71 archrunner db query searches for symbol, name 2024-03-15 10:03:35 +01:00
5fce627fe3 toml validation to frontend (#174) 2024-03-14 17:39:52 +01:00
8de1356aa8 #163 transferables (#172) 2024-03-14 14:16:01 +01:00
7f47890cad #168 #166 and additional fixes (#169) 2024-03-13 12:31:06 +01:00
8cf1aea2a8 run updte 2024-03-07 14:07:46 +01:00
9231c1d273 bugfix - kontrolu na maxloss provadime az u eventy FILL, kdy je znama celkova castka 2024-03-06 15:50:16 +01:00
9391d89aab #148 #158 config refactoring to support profiles/reloading (#165) 2024-03-06 14:30:24 +01:00
9cff5fe6a1 #155 + presun row_to from db.py to transform.py 2024-03-06 13:31:09 +01:00
0e5cf5f3e0 Merge pull request #161 from drew2323/local
Minor changes for installation on windows
2024-03-04 17:03:50 +01:00
90c33c0528 Delete run.sh 2024-03-04 17:01:47 +01:00
e9e6534d2b primary live account api and secret changed 2024-03-04 16:57:10 +01:00
5874528d23 line 29 has deleted integrity and crossorigin value 2024-02-28 08:08:21 +01:00
985445d814 user_data_dir function has a second parameter author, ACCOUNT1_LIVE has still PAPER_API_KEY and SECRET_KEY 2024-02-28 08:04:02 +01:00
6c1f7f0e2e changed VIRTUAL_ENV_DIR and PYTHON_TO_USE 2024-02-27 18:15:35 +01:00
20aaa2ac23 #135 -> BT same period button 2024-02-27 12:03:57 +07:00
691514b102 all dates in gui are in market time zone (even start/stop) 2024-02-27 10:53:30 +07:00
84903aff77 batchprofit/batchcount columns hidden from archiverunners gui 2024-02-27 08:15:07 +07:00
4887e32665 #149 2024-02-26 22:42:03 +07:00
ce99448a48 moved config related services into separated package 2024-02-26 19:35:19 +07:00
887ea0ef00 #147 2024-02-26 11:30:13 +07:00
af7b678699 zpet debug podminka 2024-02-24 21:23:17 +07:00
04c63df045 docasny disable pro testing 2024-02-24 21:17:10 +07:00
ebac207489 #143 2024-02-24 20:32:01 +07:00
9f99ddc86a live_data_feed stored in runner_archive 2024-02-23 21:20:07 +07:00
e75fbc7194 bugfix 2024-02-23 21:04:23 +07:00
c4d05f47ff #139 konfigurace LIVE_DATA_FEED 2024-02-23 12:35:02 +07:00
f6e31f45f9 #136 bugfix properly closing ws 2024-02-23 10:30:12 +07:00
c42b1c4e1e fix 2024-02-22 23:23:20 +07:00
1bf11d0dc4 fix 2024-02-22 23:20:54 +07:00
1abbb07390 Scheduler support #24sched 2024-02-22 23:05:49 +07:00
b58639454b unknown symbol msg 2024-02-12 10:45:23 +07:00
a7e83fe051 bugfix create batch image (check for None from Alpaca) 2024-02-11 15:26:15 +07:00
6795338eba createbatch image tool + send to telefram enrichment 2024-02-11 12:37:19 +07:00
9aa8b58877 updatnute requirements.txt 2024-02-10 21:35:53 +07:00
eff78e8157 keys to env variables, optimalizations 2024-02-10 21:02:00 +07:00
d8bcc4bb8f Merge branch 'master' of https://github.com/drew2323/v2trading 2024-02-06 11:16:58 +07:00
7abdf47545 ok 2024-02-06 11:16:09 +07:00
1f8afef042 calendar wrapper with retry, histo bars with retry 2024-02-06 11:14:38 +07:00
df60d16eb4 Update README.md 2024-02-06 09:52:53 +07:00
535c2824b0 Update README.md 2024-02-06 09:34:33 +07:00
9cf936672d Update README.md 2024-02-06 09:30:56 +07:00
c1ad713a12 bugfix None in trade response 2024-02-05 10:22:20 +07:00
e9bb8b84ec fixes 2024-02-04 17:55:43 +07:00
603736d441 Merge branch 'master' of https://github.com/drew2323/v2trading 2024-02-04 17:54:09 +07:00
8456e6d739 tulipy_ind support, multioutput scale and gzip cache support #106 #115 #114 #112 #107 2024-02-04 17:54:03 +07:00
2c968691d1 Update README.md 2024-01-31 13:39:33 +07:00
435b4d899a Create README.md 2024-01-31 13:37:45 +07:00
c1145fec5b multioutput indicators #15 + talib custom indicator support 2024-01-16 15:17:14 +01:00
5d47a7ac58 dailyBars ratio features added 2024-01-11 14:04:09 +01:00
cd461c701e dailyBars inds extended+ gui tick inds disable button 2024-01-09 15:11:56 +01:00
a7df38c61b fix targetema begining 2024-01-03 17:08:09 +01:00
b21bd9487a fix 2024-01-02 16:22:58 +01:00
c3b466c4c0 targetema labeler 2023-12-29 16:35:38 +01:00
0909fa947f weekday daytime dailybars ind support 2023-12-28 11:33:19 +01:00
77faa919c0 jax support added/multiinput 2023-12-26 18:25:25 +01:00
17b9859a73 bugfix 2023-12-17 18:43:46 +01:00
85d4916320 cbar indicators + ml enhancements 2023-12-15 18:02:45 +01:00
a70e2adf45 bugfix 2023-12-12 21:26:24 +01:00
527c3139f2 bugfix 2023-12-12 18:28:04 +01:00
5bbb95eeac bugfix 2023-12-12 15:53:20 +01:00
3158cdb68b tick based support including gui preview, custom suppoer, new classed tickbased inds,#85 2023-12-11 19:24:06 +01:00
5cc3a1c318 bugfix json gui parsing 2023-12-10 20:32:21 +01:00
232f32467e gui model metadata view + backend json optimalization orjson 2023-12-10 15:02:25 +01:00
523905ece6 gui ml modal view 2023-12-08 19:11:08 +01:00
ac11c37e77 bugfix backtesting méně obchodů BLK 2023-12-08 10:50:09 +01:00
90b202cfdd classed indicators draft 2023-12-07 09:46:17 +01:00
8abebcc910 download model 2023-12-06 19:47:03 +01:00
5a5e94eeb5 upload download model from gui 2023-12-06 15:23:05 +01:00
01ff23907f bugfix 2023-12-06 11:12:25 +01:00
6cdc0a45c5 decomm ml, target algorithm a dalsi upravy 2023-12-06 10:51:50 +01:00
d38bf0600f bugfix endmarket stop 2023-12-01 23:59:43 +01:00
0f0b816c7a bugfix live runner crashes after clicking stop 2023-12-01 23:35:48 +01:00
7344e49591 bugfix 2023-11-30 17:04:25 +01:00
116700f3e4 batch header row symbol added 2023-11-30 14:55:05 +01:00
d06faa4c9b bugfix chackboxu 2023-11-30 14:41:43 +01:00
95cd7ead8a cache 2023-11-30 14:24:12 +01:00
8e1fa604a5 cache 2023-11-30 14:21:18 +01:00
db210e6be7 upd cachers 2023-11-30 14:14:20 +01:00
2ecb90d83f dynamic toolbutts on json and plugin report system 2023-11-30 14:11:03 +01:00
648489b0f4 bugfix 2023-11-28 17:19:25 +01:00
b54861bb62 bugfix css 2023-11-28 17:17:54 +01:00
804f4450a8 martin paper account add 2023-11-28 15:38:04 +01:00
c6504043ed martin paper add 2023-11-28 15:36:57 +01:00
c7d7ca96a3 refactor archiveRunner js files + priprava genanal 2023-11-28 13:52:06 +01:00
6a459cd745 uprava barvicek profity 2023-11-27 18:19:51 +01:00
208a1acae5 batch profity barevne rozlisene 2023-11-27 18:10:34 +01:00
5fab264493 jeste uprava nobatch expands - transparentni barva 2023-11-27 18:00:10 +01:00
a1e4d8d726 refresh cashe browseru 2023-11-27 17:50:28 +01:00
78c40f6d1a expand/collapse na gui archrunu (na vyzkouseni) 2023-11-27 17:48:02 +01:00
a520c2fd2f finalizace css 2023-11-27 17:03:23 +01:00
e9c3849bbc bugfix 2023-11-27 14:52:20 +01:00
171 changed files with 13778 additions and 3258 deletions

1
CODEOWNERS Normal file
View File

@ -0,0 +1 @@
* @drew2323

53
README.md Normal file
View File

@ -0,0 +1,53 @@
**README - V2TRADING - Advanced Algorithmic Trading Platform**
**Overview**
Custom-built algorithmic trading platform for research, backtesting and automated trading. Trading engine capable of processing tick data, managing trades, and supporting backtesting in a highly accurate and efficient manner.
**Key Features**
- **Trading Engine**: At the core of the platform is a trading engine that processes tick data in real time. This engine is responsible for aggregating data and managing the execution of trades, ensuring precision and speed in trade placement and execution.
- **High-Fidelity Backtesting Environment**: ability to backtest strategies with 1:1 precision - meaning a tick-by-tick backtesting. This level of precision in backtesting, down to millisecond accuracy, mirrors live trading environments and is vital for developing and testing high-frequency trading strategies.
- **Custom Data Aggregation:** The platform includes a data aggregator that allows for custom aggregation rules. This flexibility supports a variety of data analysis approaches, including non-time based bars and other unique criteria.
- **Indicators** Contains inbuild [tulipy](https://tulipindicators.org/list) [ta-lib](https://ta-lib.github.io/ta-lib-python/) and templates for custom build multioutputs stateful indicators.
- **Machine Learning Integration:** Recently, the platform has expanded to incorporate machine learning capabilities. This includes modules for both training and inference, supporting the complete ML lifecycle. These ML models can be utilized within trading strategies for classification and exploiting statistical advantages.
**Technology Stack**
**Backend and API:** The backbone of the platform is built with Python, utilizing libraries such as FastAPI, NumPy, Keras, and JAX, ensuring high performance and scalability.
**Frontend:** The client-side is developed with Vanilla JavaScript and jQuery, employing LightweightCharts for charting purposes. Additional modules enhance the platform's functionality. The frontend is slated for a future refactoring to modern frameworks like Vue.js and Vuetify for a more robust user interface.
While the platform is fully functional and growing, ongoing development is planned, particularly in the realm of frontend enhancements and further integration of advanced machine learning techniques.
**Contributions**
Contributions to this project are welcome. Whether it's improving the frontend, enhancing the backend capabilities, or experimenting with new trading strategies and machine learning models, your input can help take this platform to the next level.
This repository represents a sophisticated and evolving tool for algorithmic traders, offering precision, speed, and a level of customization that is unparalleled in open-source systems. Join us in shaping the future of algorithmic trading.
<p align="center">
Main screen with entry/exit points and stoploss lines<br>
<img width="700" alt="Main screen with entry/exit points and stoploss lines" src="https://github.com/drew2323/v2trading/assets/28433232/751d5b0e-ef64-453f-8e76-89a39db679c5">
</p>
<p align="center">
Main screen with tick based indicators<br>
<img width="700" alt="Main screen with tick based indicators" src="https://github.com/drew2323/v2trading/assets/28433232/4bf6128c-9b36-4e88-9da1-5a33319976a1">
</p>
<p align="center">
Indicator editor<br>
<img width="700" alt="Indicator editor" src="https://github.com/drew2323/v2trading/assets/28433232/cc417393-7b88-4eea-afcb-3a00402d0a8d">
</p>
<p align="center">
Strategy editor<br>
<img width="700" alt="Strategy editor" src="https://github.com/drew2323/v2trading/assets/28433232/74f67e7a-1efc-4f63-b763-7827b2337b6a">
</p>
<p align="center">
Strategy analytical tools<br>
<img width="700" alt="Strategy analytical tools" src="https://github.com/drew2323/v2trading/assets/28433232/4bf8b3c3-e430-4250-831a-e5876bb6b743">
</p>

51
_run_scheduler.sh Executable file
View File

@ -0,0 +1,51 @@
#!/bin/bash
# Approach: (https://chat.openai.com/c/43be8685-b27b-4e3b-bd18-0856f8d23d7e)
# cron runs this script every minute New York in range of 9:20 - 16:20 US time
# Also this scripts writes the "heartbeat" message to log file, so the user knows
#that cron is running
# Installation steps required:
#chmod +x run_scheduler.sh
#install tzdata package: sudo apt-get install tzdata
#crontab -e
#CRON_TZ=America/New_York
# * 9-16 * * 1-5 /home/david/v2trading/run_scheduler.sh
#
# (Runs every minute of every hour on every day-of-week from Monday to Friday) US East time
# Path to the Python script
PYTHON_SCRIPT="v2realbot/scheduler/scheduler.py"
# Log file path
LOG_FILE="job.log"
# Timezone for New York
TZ='America/New_York'
NY_DATE_TIME=$(TZ=$TZ date +'%Y-%m-%d %H:%M:%S')
echo "NY_DATE_TIME: $NY_DATE_TIME"
# Check if log file exists, create it if it doesn't
if [ ! -f "$LOG_FILE" ]; then
touch "$LOG_FILE"
fi
# Check the last line of the log file
LAST_LINE=$(tail -n 1 "$LOG_FILE")
# Cron trigger message
CRON_TRIGGER="Cron trigger: $NY_DATE_TIME"
# Update the log
if [[ "$LAST_LINE" =~ "Cron trigger:".* ]]; then
# Replace the last line with the new trigger message
sed -i '' '$ d' "$LOG_FILE"
echo "$CRON_TRIGGER" >> "$LOG_FILE"
else
# Append a new cron trigger message
echo "$CRON_TRIGGER" >> "$LOG_FILE"
fi
# FOR DEBUG - Run the Python script and append output to log file
python3 "$PYTHON_SCRIPT" >> "$LOG_FILE" 2>&1

7
deployall.sh Executable file
View File

@ -0,0 +1,7 @@
#!/bin/bash
# Navigate to your git repository directory
# Execute git commands
git push deploytest master
git push deploy master

1251
job.log Normal file

File diff suppressed because it is too large Load Diff

1
jobs.log Normal file
View File

@ -0,0 +1 @@
Current 0 scheduled jobs: []

View File

@ -1,18 +1,24 @@
absl-py==2.0.0
alpaca==1.0.0 alpaca==1.0.0
alpaca-py==0.7.1 alpaca-py==0.7.1
altair==4.2.2 altair==4.2.2
anyio==3.6.2 anyio==3.6.2
appdirs==1.4.4 appdirs==1.4.4
appnope==0.1.3
asttokens==2.2.1 asttokens==2.2.1
astunparse==1.6.3
attrs==22.2.0 attrs==22.2.0
better-exceptions==0.3.3 better-exceptions==0.3.3
bleach==6.0.0 bleach==6.0.0
blinker==1.5 blinker==1.5
cachetools==5.3.0 cachetools==5.3.0
CD==1.1.0
certifi==2022.12.7 certifi==2022.12.7
chardet==5.1.0 chardet==5.1.0
charset-normalizer==3.0.1 charset-normalizer==3.0.1
click==8.1.3 click==8.1.3
colorama==0.4.6
comm==0.1.4
contourpy==1.0.7 contourpy==1.0.7
cycler==0.11.0 cycler==0.11.0
dash==2.9.1 dash==2.9.1
@ -20,35 +26,82 @@ dash-bootstrap-components==1.4.1
dash-core-components==2.0.0 dash-core-components==2.0.0
dash-html-components==2.0.0 dash-html-components==2.0.0
dash-table==5.0.0 dash-table==5.0.0
dateparser==1.1.8
decorator==5.1.1 decorator==5.1.1
defusedxml==0.7.1
dill==0.3.7
dm-tree==0.1.8
entrypoints==0.4 entrypoints==0.4
exceptiongroup==1.1.3
executing==1.2.0 executing==1.2.0
fastapi==0.95.0 fastapi==0.95.0
filelock==3.13.1
Flask==2.2.3 Flask==2.2.3
flatbuffers==23.5.26
fonttools==4.39.0 fonttools==4.39.0
fpdf2==2.7.6
gast==0.4.0
gitdb==4.0.10 gitdb==4.0.10
GitPython==3.1.31 GitPython==3.1.31
google-auth==2.23.0
google-auth-oauthlib==1.0.0
google-pasta==0.2.0
grpcio==1.58.0
h11==0.14.0 h11==0.14.0
h5py==3.10.0
icecream==2.1.3 icecream==2.1.3
idna==3.4 idna==3.4
imageio==2.31.6
importlib-metadata==6.1.0 importlib-metadata==6.1.0
ipython==8.17.2
ipywidgets==8.1.1
itsdangerous==2.1.2 itsdangerous==2.1.2
jax==0.4.23
jaxlib==0.4.23
jedi==0.19.1
Jinja2==3.1.2
joblib==1.3.2
jsonschema==4.17.3 jsonschema==4.17.3
jupyterlab-widgets==3.0.9
keras==3.0.2
keras-core==0.1.7
keras-nightly==3.0.3.dev2024010203
keras-nlp-nightly==0.7.0.dev2024010203
keras-tcn @ git+https://github.com/drew2323/keras-tcn.git@4bddb17a02cb2f31c9fe2e8f616b357b1ddb0e11
kiwisolver==1.4.4 kiwisolver==1.4.4
libclang==16.0.6
llvmlite==0.39.1
Markdown==3.4.3 Markdown==3.4.3
markdown-it-py==2.2.0 markdown-it-py==2.2.0
MarkupSafe==2.1.2 MarkupSafe==2.1.2
matplotlib==3.8.2
matplotlib-inline==0.1.6
mdurl==0.1.2 mdurl==0.1.2
ml-dtypes==0.3.1
mlroom @ git+https://github.com/drew2323/mlroom.git@692900e274c4e0542d945d231645c270fc508437
mplfinance==0.12.10b0
msgpack==1.0.4 msgpack==1.0.4
mypy-extensions==1.0.0
namex==0.0.7
newtulipy==0.4.6 newtulipy==0.4.6
numpy==1.24.2 numba==0.56.4
numpy==1.23.5
oauthlib==3.2.2
opt-einsum==3.3.0
orjson==3.9.10
packaging==23.0 packaging==23.0
pandas==1.5.3 pandas==1.5.3
param==1.13.0 param==1.13.0
parso==0.8.3
patsy==0.5.6
pexpect==4.8.0
Pillow==9.4.0 Pillow==9.4.0
plotly==5.13.1 plotly==5.13.1
prompt-toolkit==3.0.39
proto-plus==1.22.2 proto-plus==1.22.2
protobuf==3.20.3 protobuf==3.20.3
ptyprocess==0.7.0
pure-eval==0.2.2
pyarrow==11.0.0 pyarrow==11.0.0
pyasn1==0.4.8 pyasn1==0.4.8
pyasn1-modules==0.2.8 pyasn1-modules==0.2.8
@ -56,41 +109,72 @@ pyct==0.5.0
pydantic==1.10.5 pydantic==1.10.5
pydeck==0.8.0 pydeck==0.8.0
Pygments==2.14.0 Pygments==2.14.0
pyinstrument==4.5.3
Pympler==1.0.1 Pympler==1.0.1
pyparsing==3.0.9 pyparsing==3.0.9
pyrsistent==0.19.3 pyrsistent==0.19.3
pysos==1.3.0 pysos==1.3.0
python-dateutil==2.8.2 python-dateutil==2.8.2
python-dotenv==1.0.0 python-dotenv==1.0.0
python-multipart==0.0.6
pytz==2022.7.1 pytz==2022.7.1
pytz-deprecation-shim==0.1.0.post0 pytz-deprecation-shim==0.1.0.post0
pyviz-comms==2.2.1 pyviz-comms==2.2.1
PyWavelets==1.5.0
PyYAML==6.0 PyYAML==6.0
requests==2.28.2 regex==2023.10.3
requests==2.31.0
requests-oauthlib==1.3.1
rich==13.3.1 rich==13.3.1
rsa==4.9 rsa==4.9
schedule==1.2.1
scikit-learn==1.3.2
scipy==1.11.2
seaborn==0.12.2 seaborn==0.12.2
semver==2.13.0 semver==2.13.0
six==1.16.0 six==1.16.0
smmap==5.0.0 smmap==5.0.0
sniffio==1.3.0 sniffio==1.3.0
sseclient-py==1.7.2 sseclient-py==1.7.2
stack-data==0.6.3
starlette==0.26.1 starlette==0.26.1
statsmodels==0.14.1
streamlit==1.20.0 streamlit==1.20.0
structlog==23.1.0 structlog==23.1.0
TA-Lib==0.4.28
tb-nightly==2.16.0a20240102
tenacity==8.2.2 tenacity==8.2.2
tensorboard==2.15.1
tensorboard-data-server==0.7.1
tensorflow-addons==0.23.0
tensorflow-estimator==2.15.0
tensorflow-io-gcs-filesystem==0.34.0
termcolor==2.3.0
tf-estimator-nightly==2.14.0.dev2023080308
tf-nightly==2.16.0.dev20240101
tf_keras-nightly==2.16.0.dev2023123010
threadpoolctl==3.2.0
tinydb==4.7.1
tinydb-serialization==2.1.0
tinyflux==0.4.0
toml==0.10.2 toml==0.10.2
tomli==2.0.1 tomli==2.0.1
toolz==0.12.0 toolz==0.12.0
tornado==6.2 tornado==6.2
tqdm==4.65.0 tqdm==4.65.0
traitlets==5.13.0
typeguard==2.13.3
typing_extensions==4.5.0 typing_extensions==4.5.0
tzdata==2023.2 tzdata==2023.2
tzlocal==4.3 tzlocal==4.3
urllib3==1.26.14 urllib3==1.26.14
uvicorn==0.21.1 uvicorn==0.21.1
-e git+https://github.com/drew2323/v2trading.git@b58639454be921f9f0c9dd1880491cfcfdfdf3b7#egg=v2realbot
validators==0.20.0 validators==0.20.0
wcwidth==0.2.9
webencodings==0.5.1 webencodings==0.5.1
websockets==10.4 websockets==10.4
Werkzeug==2.2.3 Werkzeug==2.2.3
widgetsnbextension==4.0.9
wrapt==1.14.1
zipp==3.15.0 zipp==3.15.0

BIN
res_pred_act.png Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 26 KiB

BIN
res_target.png Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 20 KiB

45
restart.sh Executable file
View File

@ -0,0 +1,45 @@
#!/bin/bash
# file: restart.sh
# Usage: ./restart.sh [test|prod|all]
# Define server addresses
TEST_SERVER="david@142.132.188.109"
PROD_SERVER="david@5.161.179.223"
# Define the remote directory where the script is located
REMOTE_DIR="v2trading"
# Check for argument
if [ "$#" -ne 1 ]; then
echo "Usage: $0 [test|prod|all]"
exit 1
fi
# Function to restart a server
restart_server() {
local server=$1
echo "Connecting to $server to restart the Python app..."
ssh -t $server "cd $REMOTE_DIR && . ~/.bashrc && ./run.sh restart" # Sourcing .bashrc here
echo "Operation completed on $server."
}
# Select the server based on the input argument
case $1 in
test)
restart_server $TEST_SERVER
;;
prod)
restart_server $PROD_SERVER
;;
all)
restart_server $TEST_SERVER
restart_server $PROD_SERVER
;;
*)
echo "Invalid argument: $1. Use 'test', 'prod', or 'all'."
exit 1
;;
esac

15
run.sh
View File

@ -26,12 +26,27 @@ PYTHON_TO_USE="python3"
#----END EDITABLE VARS------- #----END EDITABLE VARS-------
# Additions for handling strat.log backup
HISTORY_DIR="$HOME/stratlogs"
TIMESTAMP=$(date +"%Y%m%d-%H%M%S")
LOG_FILE="strat.log"
BACKUP_LOG_FILE="$HISTORY_DIR/${TIMESTAMP}_$LOG_FILE"
# If virtualenv specified & exists, using that version of python instead. # If virtualenv specified & exists, using that version of python instead.
if [ -d "$VIRTUAL_ENV_DIR" ]; then if [ -d "$VIRTUAL_ENV_DIR" ]; then
PYTHON_TO_USE="$VIRTUAL_ENV_DIR/bin/python" PYTHON_TO_USE="$VIRTUAL_ENV_DIR/bin/python"
fi fi
start() { start() {
# Check and create history directory if it doesn't exist
[ ! -d "$HISTORY_DIR" ] && mkdir -p "$HISTORY_DIR"
# Check if strat.log exists and back it up
if [ -f "$LOG_FILE" ]; then
mv "$LOG_FILE" "$BACKUP_LOG_FILE"
echo "Backed up log to $BACKUP_LOG_FILE"
fi
if [ ! -e "$OUTPUT_PID_PATH/$OUTPUT_PID_FILE" ]; then if [ ! -e "$OUTPUT_PID_PATH/$OUTPUT_PID_FILE" ]; then
nohup "$PYTHON_TO_USE" ./$SCRIPT_TO_EXECUTE_PLUS_ARGS > strat.log 2>&1 & echo $! > "$OUTPUT_PID_PATH/$OUTPUT_PID_FILE" nohup "$PYTHON_TO_USE" ./$SCRIPT_TO_EXECUTE_PLUS_ARGS > strat.log 2>&1 & echo $! > "$OUTPUT_PID_PATH/$OUTPUT_PID_FILE"
echo "Started $SCRIPT_TO_EXECUTE_PLUS_ARGS @ Process: $!" echo "Started $SCRIPT_TO_EXECUTE_PLUS_ARGS @ Process: $!"

View File

@ -1,7 +1,7 @@
from setuptools import find_packages, setup from setuptools import find_packages, setup
setup(name='v2realbot', setup(name='v2realbot',
version='0.9', version='0.91',
description='Realbot trader', description='Realbot trader',
author='David Brazda', author='David Brazda',
author_email='davidbrazda61@gmail.com', author_email='davidbrazda61@gmail.com',

BIN
tested_runner.png Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 21 KiB

View File

@ -23,12 +23,12 @@ clientTrading = TradingClient(ACCOUNT1_PAPER_API_KEY, ACCOUNT1_PAPER_SECRET_KEY,
#get previous days bar #get previous days bar
datetime_object_from = datetime.datetime(2023, 10, 11, 4, 0, 00, tzinfo=datetime.timezone.utc) datetime_object_from = datetime.datetime(2024, 3, 9, 13, 29, 00, tzinfo=datetime.timezone.utc)
datetime_object_to = datetime.datetime(2023, 10, 16, 16, 1, 00, tzinfo=datetime.timezone.utc) datetime_object_to = datetime.datetime(2024, 3, 11, 20, 1, 00, tzinfo=datetime.timezone.utc)
calendar_request = GetCalendarRequest(start=datetime_object_from,end=datetime_object_to) # calendar_request = GetCalendarRequest(start=datetime_object_from,end=datetime_object_to)
cal_dates = clientTrading.get_calendar(calendar_request) # cal_dates = clientTrading.get_calendar(calendar_request)
print(cal_dates) # print(cal_dates)
bar_request = StockBarsRequest(symbol_or_symbols="BAC",timeframe=TimeFrame.Day, start=datetime_object_from, end=datetime_object_to, feed=DataFeed.SIP) bar_request = StockBarsRequest(symbol_or_symbols="BAC",timeframe=TimeFrame.Minute, start=datetime_object_from, end=datetime_object_to, feed=DataFeed.SIP)
# bars = client.get_stock_bars(bar_request).df # bars = client.get_stock_bars(bar_request).df

View File

@ -23,7 +23,7 @@ from rich import print
from collections import defaultdict from collections import defaultdict
from pandas import to_datetime from pandas import to_datetime
from msgpack.ext import Timestamp from msgpack.ext import Timestamp
from v2realbot.utils.historicals import convert_daily_bars from v2realbot.utils.historicals import convert_historical_bars
def get_last_close(): def get_last_close():
pass pass
@ -38,7 +38,7 @@ def get_historical_bars(symbol: str, time_from: datetime, time_to: datetime, tim
bars: BarSet = stock_client.get_stock_bars(bar_request) bars: BarSet = stock_client.get_stock_bars(bar_request)
print("puvodni bars", bars["BAC"]) print("puvodni bars", bars["BAC"])
print(bars) print(bars)
return convert_daily_bars(bars[symbol]) return convert_historical_bars(bars[symbol])
#v initu plnime pozadovana historicka data do historicals[] #v initu plnime pozadovana historicka data do historicals[]

View File

@ -1,12 +1,14 @@
import scipy.interpolate as spi import scipy.interpolate as spi
import matplotlib.pyplot as plt import matplotlib.pyplot as plt
import numpy as np
# x = [2, 4, 6, 8, 10, 12, 14, 16, 18, 20]
# y = [4, 7, 11, 16, 22, 29, 38, 49, 63, 80]
x = [2, 4, 6, 8, 10, 12, 14, 16, 18, 20] val = 10
y = [4, 7, 11, 16, 22, 29, 38, 49, 63, 80] new = np.interp(val, [0, 50, 100], [0, 1, 2])
print(new)
# y_interp = spi.interp1d(x, y)
y_interp = spi.interp1d(x, y)
#find y-value associated with x-value of 13 #find y-value associated with x-value of 13
#print(y_interp(13)) #print(y_interp(13))

File diff suppressed because one or more lines are too long

18
testy/createbatchimage.py Normal file
View File

@ -0,0 +1,18 @@
import argparse
import v2realbot.reporting.metricstoolsimage as mt
# Parse the command-line arguments
# parser = argparse.ArgumentParser(description="Generate trading report image with batch ID")
# parser.add_argument("batch_id", type=str, help="The batch ID for the report")
# args = parser.parse_args()
# batch_id = args.batch_id
# Generate the report image
res, val = mt.generate_trading_report_image(batch_id="4d7dc163")
# Print the result
if res == 0:
print("BATCH REPORT CREATED")
else:
print(f"BATCH REPORT ERROR - {val}")

89
testy/getrunnerdetail.py Normal file
View File

@ -0,0 +1,89 @@
from v2realbot.common.model import RunDay, StrategyInstance, Runner, RunRequest, RunArchive, RunArchiveView, RunArchiveViewPagination, RunArchiveDetail, RunArchiveChange, Bar, TradeEvent, TestList, Intervals, ConfigItem, InstantIndicator, DataTablesRequest
import v2realbot.controller.services as cs
from v2realbot.utils.utils import slice_dict_lists,zoneUTC,safe_get, AttributeDict
id = "b11c66d9-a9b6-475a-9ac1-28b11e1b4edf"
state = AttributeDict(vars={})
##základ pro init_attached_data in strategy.init
# def get_previous_runner(state):
# runner : Runner
# res, runner = cs.get_runner(state.runner_id)
# if res < 0:
# print(f"Not running {id}")
# return 0, None
# return 0, runner.batch_id
def attach_previous_data(state):
runner : Runner
#get batch_id of current runer
res, runner = cs.get_runner(state.runner_id)
if res < 0 or runner.batch_id is None:
print(f"Couldnt get previous runner {val}")
return None
batch_id = runner.batch_id
#batch_id = "6a6b0bcf"
res, runner_ids =cs.get_archived_runnerslist_byBatchID(batch_id, "desc")
if res < 0:
msg = f"error whne fetching runners of batch {batch_id} {runner_ids}"
print(msg)
return None
if runner_ids is None or len(runner_ids) == 0:
print(f"no runners found for batch {batch_id} {runner_ids}")
return None
last_runner = runner_ids[0]
print("Previous runner identified:", last_runner)
#get details from the runner
res, val = cs.get_archived_runner_details_byID(last_runner)
if res < 0:
print(f"no archived runner {last_runner}")
detail = RunArchiveDetail(**val)
#print("toto jsme si dotahnuli", detail.bars)
# from stratvars directives
attach_previous_bars_indicators = safe_get(state.vars, "attach_previous_bars_indicators", 50)
attach_previous_cbar_indicators = safe_get(state.vars, "attach_previous_cbar_indicators", 50)
# [stratvars]
# attach_previous_bars_indicators = 50
# attach_previous_cbar_indicators = 50
#indicators datetime utc
indicators = slice_dict_lists(d=detail.indicators[0],last_item=attach_previous_bars_indicators, time_to_datetime=True)
#time -datetime utc, updated - timestamp float
bars = slice_dict_lists(d=detail.bars, last_item=attach_previous_bars_indicators, time_to_datetime=True)
#cbar_indicatzors #float
cbar_inds = slice_dict_lists(d=detail.indicators[1],last_item=attach_previous_cbar_indicators)
#USE these as INITs - TADY SI TO JESTE ZASTAVIT a POROVNAT
print(f"{state.indicators=} NEW:{indicators=}")
state.indicators = indicators
print(f"{state.bars=} NEW:{bars=}")
state.bars = bars
print(f"{state.cbar_indicators=} NEW:{cbar_inds=}")
state.cbar_indicators = cbar_inds
print("BARS and INDS INITIALIZED")
#bars
#tady budou pripadne dalsi inicializace, z ext_data
print("EXT_DATA", detail.ext_data)
#podle urciteho nastaveni napr.v konfiguraci se pouziji urcite promenne
#pridavame dailyBars z extData
# if hasattr(detail, "ext_data") and "dailyBars" in detail.ext_data:
# state.dailyBars = detail.ext_data["dailyBars"]
if __name__ == "__main__":
attach_previous_data(state)

View File

@ -2,7 +2,7 @@ import sqlite3
from v2realbot.config import DATA_DIR from v2realbot.config import DATA_DIR
from v2realbot.utils.utils import json_serial from v2realbot.utils.utils import json_serial
from uuid import UUID, uuid4 from uuid import UUID, uuid4
import json import orjson
from datetime import datetime from datetime import datetime
from v2realbot.enums.enums import RecordType, StartBarAlign, Mode, Account from v2realbot.enums.enums import RecordType, StartBarAlign, Mode, Account
from v2realbot.common.model import RunArchiveDetail, RunArchive, RunArchiveView from v2realbot.common.model import RunArchiveDetail, RunArchive, RunArchiveView
@ -35,14 +35,14 @@ def row_to_object(row: dict) -> RunArchive:
end_positions=row.get('end_positions'), end_positions=row.get('end_positions'),
end_positions_avgp=row.get('end_positions_avgp'), end_positions_avgp=row.get('end_positions_avgp'),
metrics=row.get('open_orders'), metrics=row.get('open_orders'),
#metrics=json.loads(row.get('metrics')) if row.get('metrics') else None, #metrics=orjson.loads(row.get('metrics')) if row.get('metrics') else None,
stratvars_toml=row.get('stratvars_toml') stratvars_toml=row.get('stratvars_toml')
) )
def get_all_archived_runners(): def get_all_archived_runners():
conn = pool.get_connection() conn = pool.get_connection()
try: try:
conn.row_factory = lambda c, r: json.loads(r[0]) conn.row_factory = lambda c, r: orjson.loads(r[0])
c = conn.cursor() c = conn.cursor()
res = c.execute(f"SELECT data FROM runner_header") res = c.execute(f"SELECT data FROM runner_header")
finally: finally:
@ -54,7 +54,7 @@ def insert_archive_header(archeader: RunArchive):
conn = pool.get_connection() conn = pool.get_connection()
try: try:
c = conn.cursor() c = conn.cursor()
json_string = json.dumps(archeader, default=json_serial) json_string = orjson.dumps(archeader, default=json_serial, option=orjson.OPT_PASSTHROUGH_DATETIME)
if archeader.batch_id is not None: if archeader.batch_id is not None:
statement = f"INSERT INTO runner_header (runner_id, batch_id, ra) VALUES ('{str(archeader.id)}','{str(archeader.batch_id)}','{json_string}')" statement = f"INSERT INTO runner_header (runner_id, batch_id, ra) VALUES ('{str(archeader.id)}','{str(archeader.batch_id)}','{json_string}')"
else: else:
@ -103,7 +103,7 @@ def migrate_to_columns(ra: RunArchive):
SET strat_id=?, batch_id=?, symbol=?, name=?, note=?, started=?, stopped=?, mode=?, account=?, bt_from=?, bt_to=?, strat_json=?, settings=?, ilog_save=?, profit=?, trade_count=?, end_positions=?, end_positions_avgp=?, metrics=?, stratvars_toml=? SET strat_id=?, batch_id=?, symbol=?, name=?, note=?, started=?, stopped=?, mode=?, account=?, bt_from=?, bt_to=?, strat_json=?, settings=?, ilog_save=?, profit=?, trade_count=?, end_positions=?, end_positions_avgp=?, metrics=?, stratvars_toml=?
WHERE runner_id=? WHERE runner_id=?
''', ''',
(str(ra.strat_id), ra.batch_id, ra.symbol, ra.name, ra.note, ra.started, ra.stopped, ra.mode, ra.account, ra.bt_from, ra.bt_to, json.dumps(ra.strat_json), json.dumps(ra.settings), ra.ilog_save, ra.profit, ra.trade_count, ra.end_positions, ra.end_positions_avgp, json.dumps(ra.metrics), ra.stratvars_toml, str(ra.id))) (str(ra.strat_id), ra.batch_id, ra.symbol, ra.name, ra.note, ra.started, ra.stopped, ra.mode, ra.account, ra.bt_from, ra.bt_to, orjson.dumps(ra.strat_json), orjson.dumps(ra.settings), ra.ilog_save, ra.profit, ra.trade_count, ra.end_positions, ra.end_positions_avgp, orjson.dumps(ra.metrics), ra.stratvars_toml, str(ra.id)))
conn.commit() conn.commit()
finally: finally:

View File

@ -2,7 +2,7 @@ import sqlite3
from v2realbot.config import DATA_DIR from v2realbot.config import DATA_DIR
from v2realbot.utils.utils import json_serial from v2realbot.utils.utils import json_serial
from uuid import UUID, uuid4 from uuid import UUID, uuid4
import json import orjson
from datetime import datetime from datetime import datetime
from v2realbot.enums.enums import RecordType, StartBarAlign, Mode, Account from v2realbot.enums.enums import RecordType, StartBarAlign, Mode, Account
from v2realbot.common.model import RunArchiveDetail from v2realbot.common.model import RunArchiveDetail
@ -11,7 +11,7 @@ from tinydb import TinyDB, Query, where
sqlite_db_file = DATA_DIR + "/v2trading.db" sqlite_db_file = DATA_DIR + "/v2trading.db"
conn = sqlite3.connect(sqlite_db_file) conn = sqlite3.connect(sqlite_db_file)
#standardne vraci pole tuplů, kde clen tuplu jsou sloupce #standardne vraci pole tuplů, kde clen tuplu jsou sloupce
#conn.row_factory = lambda c, r: json.loads(r[0]) #conn.row_factory = lambda c, r: orjson.loads(r[0])
#conn.row_factory = lambda c, r: r[0] #conn.row_factory = lambda c, r: r[0]
#conn.row_factory = sqlite3.Row #conn.row_factory = sqlite3.Row
@ -28,7 +28,7 @@ insert_list = [dict(time=datetime.now().timestamp(), side="ddd", rectype=RecordT
def insert_log(runner_id: UUID, time: float, logdict: dict): def insert_log(runner_id: UUID, time: float, logdict: dict):
c = conn.cursor() c = conn.cursor()
json_string = json.dumps(logdict, default=json_serial) json_string = orjson.dumps(logdict, default=json_serial, option=orjson.OPT_PASSTHROUGH_DATETIME)
res = c.execute("INSERT INTO runner_logs VALUES (?,?,?)",[str(runner_id), time, json_string]) res = c.execute("INSERT INTO runner_logs VALUES (?,?,?)",[str(runner_id), time, json_string])
conn.commit() conn.commit()
return res.rowcount return res.rowcount
@ -37,14 +37,14 @@ def insert_log_multiple(runner_id: UUID, loglist: list):
c = conn.cursor() c = conn.cursor()
insert_data = [] insert_data = []
for i in loglist: for i in loglist:
row = (str(runner_id), i["time"], json.dumps(i, default=json_serial)) row = (str(runner_id), i["time"], orjson.dumps(i, default=json_serial, option=orjson.OPT_PASSTHROUGH_DATETIME))
insert_data.append(row) insert_data.append(row)
c.executemany("INSERT INTO runner_logs VALUES (?,?,?)", insert_data) c.executemany("INSERT INTO runner_logs VALUES (?,?,?)", insert_data)
conn.commit() conn.commit()
return c.rowcount return c.rowcount
# c = conn.cursor() # c = conn.cursor()
# json_string = json.dumps(logdict, default=json_serial) # json_string = orjson.dumps(logdict, default=json_serial, option=orjson.OPT_PASSTHROUGH_DATETIME)
# res = c.execute("INSERT INTO runner_logs VALUES (?,?,?)",[str(runner_id), time, json_string]) # res = c.execute("INSERT INTO runner_logs VALUES (?,?,?)",[str(runner_id), time, json_string])
# print(res) # print(res)
# conn.commit() # conn.commit()
@ -52,7 +52,7 @@ def insert_log_multiple(runner_id: UUID, loglist: list):
#returns list of ilog jsons #returns list of ilog jsons
def read_log_window(runner_id: UUID, timestamp_from: float, timestamp_to: float): def read_log_window(runner_id: UUID, timestamp_from: float, timestamp_to: float):
conn.row_factory = lambda c, r: json.loads(r[0]) conn.row_factory = lambda c, r: orjson.loads(r[0])
c = conn.cursor() c = conn.cursor()
res = c.execute(f"SELECT data FROM runner_logs WHERE runner_id='{str(runner_id)}' AND time >={ts_from} AND time <={ts_to}") res = c.execute(f"SELECT data FROM runner_logs WHERE runner_id='{str(runner_id)}' AND time >={ts_from} AND time <={ts_to}")
return res.fetchall() return res.fetchall()
@ -94,21 +94,21 @@ def delete_logs(runner_id: UUID):
def insert_archive_detail(archdetail: RunArchiveDetail): def insert_archive_detail(archdetail: RunArchiveDetail):
c = conn.cursor() c = conn.cursor()
json_string = json.dumps(archdetail, default=json_serial) json_string = orjson.dumps(archdetail, default=json_serial, option=orjson.OPT_PASSTHROUGH_DATETIME)
res = c.execute("INSERT INTO runner_detail VALUES (?,?)",[str(archdetail["id"]), json_string]) res = c.execute("INSERT INTO runner_detail VALUES (?,?)",[str(archdetail["id"]), json_string])
conn.commit() conn.commit()
return res.rowcount return res.rowcount
#returns list of details #returns list of details
def get_all_archive_detail(): def get_all_archive_detail():
conn.row_factory = lambda c, r: json.loads(r[0]) conn.row_factory = lambda c, r: orjson.loads(r[0])
c = conn.cursor() c = conn.cursor()
res = c.execute(f"SELECT data FROM runner_detail") res = c.execute(f"SELECT data FROM runner_detail")
return res.fetchall() return res.fetchall()
#vrátí konkrétní #vrátí konkrétní
def get_archive_detail_byID(runner_id: UUID): def get_archive_detail_byID(runner_id: UUID):
conn.row_factory = lambda c, r: json.loads(r[0]) conn.row_factory = lambda c, r: orjson.loads(r[0])
c = conn.cursor() c = conn.cursor()
res = c.execute(f"SELECT data FROM runner_detail WHERE runner_id='{str(runner_id)}'") res = c.execute(f"SELECT data FROM runner_detail WHERE runner_id='{str(runner_id)}'")
return res.fetchone() return res.fetchone()
@ -123,7 +123,7 @@ def delete_archive_detail(runner_id: UUID):
def get_all_archived_runners_detail(): def get_all_archived_runners_detail():
arch_detail_file = DATA_DIR + "/arch_detail.json" arch_detail_file = DATA_DIR + "/arch_detail.json"
db_arch_d = TinyDB(arch_detail_file, default=json_serial) db_arch_d = TinyDB(arch_detail_file, default=json_serial, option=orjson.OPT_PASSTHROUGH_DATETIME)
res = db_arch_d.all() res = db_arch_d.all()
return 0, res return 0, res

View File

@ -4,7 +4,7 @@ from keras.models import Sequential
from keras.layers import LSTM, Dense from keras.layers import LSTM, Dense
from v2realbot.controller.services import get_archived_runner_details_byID from v2realbot.controller.services import get_archived_runner_details_byID
from v2realbot.common.model import RunArchiveDetail from v2realbot.common.model import RunArchiveDetail
import json import orjson
runner_id = "838e918e-9be0-4251-a968-c13c83f3f173" runner_id = "838e918e-9be0-4251-a968-c13c83f3f173"
result = None result = None

39
testy/pickle.py Normal file
View File

@ -0,0 +1,39 @@
import pickle
import os
from v2realbot.config import STRATVARS_UNCHANGEABLES, ACCOUNT1_PAPER_API_KEY, ACCOUNT1_PAPER_SECRET_KEY, ACCOUNT1_LIVE_API_KEY, ACCOUNT1_LIVE_SECRET_KEY, DATA_DIR,BT_FILL_CONS_TRADES_REQUIRED,BT_FILL_LOG_SURROUNDING_TRADES,BT_FILL_CONDITION_BUY_LIMIT,BT_FILL_CONDITION_SELL_LIMIT, GROUP_TRADES_WITH_TIMESTAMP_LESS_THAN, MEDIA_DIRECTORY, RUNNER_DETAIL_DIRECTORY
# #class to persist
# class Store:
# stratins : List[StrategyInstance] = []
# runners: List[Runner] = []
# def __init__(self) -> None:
# self.db_file = DATA_DIR + "/strategyinstances.cache"
# if os.path.exists(self.db_file):
# with open (self.db_file, 'rb') as fp:
# self.stratins = pickle.load(fp)
# def save(self):
# with open(self.db_file, 'wb') as fp:
# pickle.dump(self.stratins, fp)
#db = Store()
def try_reading_after_skipping_bytes(file_path, skip_bytes, chunk_size=1024):
with open(file_path, 'rb') as file:
file.seek(skip_bytes) # Skip initial bytes
while True:
try:
data = pickle.load(file)
print("Recovered data:", data)
break # Exit loop if successful
except EOFError:
print("Reached end of file without recovering data.")
break
except pickle.UnpicklingError:
# Move ahead in file by chunk_size bytes and try again
file.seek(file.tell() + chunk_size, os.SEEK_SET)
file_path = DATA_DIR + "/strategyinstances.cache"
try_reading_after_skipping_bytes(file_path,1)

74
testy/tablesizes.py Normal file
View File

@ -0,0 +1,74 @@
import queue
import sqlite3
import threading
from appdirs import user_data_dir
DATA_DIR = user_data_dir("v2realbot")
sqlite_db_file = DATA_DIR + "/v2trading.db"
class ConnectionPool:
def __init__(self, max_connections):
self.max_connections = max_connections
self.connections = queue.Queue(max_connections)
self.lock = threading.Lock()
def get_connection(self):
with self.lock:
if self.connections.empty():
return self.create_connection()
else:
return self.connections.get()
def release_connection(self, connection):
with self.lock:
self.connections.put(connection)
def create_connection(self):
connection = sqlite3.connect(sqlite_db_file, check_same_thread=False)
return connection
pool = ConnectionPool(10)
def get_table_sizes_in_mb():
# Connect to the SQLite database
conn = pool.get_connection()
cursor = conn.cursor()
# Get the list of tables
cursor.execute("SELECT name FROM sqlite_master WHERE type='table';")
tables = cursor.fetchall()
# Dictionary to store table sizes
table_sizes = {}
for table in tables:
table_name = table[0]
# Get total number of rows in the table
cursor.execute(f"SELECT COUNT(*) FROM {table_name};")
row_count = cursor.fetchone()[0]
if row_count > 0:
# Sample a few rows (e.g., 10 rows) and calculate average row size
cursor.execute(f"SELECT * FROM {table_name} LIMIT 10;")
sample_rows = cursor.fetchall()
total_sample_size = sum(sum(len(str(cell)) for cell in row) for row in sample_rows)
avg_row_size = total_sample_size / len(sample_rows)
# Estimate table size in megabytes
size_in_mb = (avg_row_size * row_count) / (1024 * 1024)
else:
size_in_mb = 0
table_sizes[table_name] = {'size_mb': size_in_mb, 'rows': row_count}
conn.close()
return table_sizes
# Usage example
db_path = 'path_to_your_database.db'
table_sizes = get_table_sizes_in_mb()
for table, info in table_sizes.items():
print(f"Table: {table}, Size: {info['size_mb']} MB, Rows: {info['rows']}")

View File

@ -2,7 +2,7 @@ import sqlite3
from v2realbot.config import DATA_DIR from v2realbot.config import DATA_DIR
from v2realbot.utils.utils import json_serial from v2realbot.utils.utils import json_serial
from uuid import UUID, uuid4 from uuid import UUID, uuid4
import json import orjson
from datetime import datetime from datetime import datetime
from v2realbot.enums.enums import RecordType, StartBarAlign, Mode, Account from v2realbot.enums.enums import RecordType, StartBarAlign, Mode, Account
from v2realbot.common.model import RunArchiveDetail from v2realbot.common.model import RunArchiveDetail
@ -11,7 +11,7 @@ from tinydb import TinyDB, Query, where
sqlite_db_file = DATA_DIR + "/v2trading.db" sqlite_db_file = DATA_DIR + "/v2trading.db"
conn = sqlite3.connect(sqlite_db_file) conn = sqlite3.connect(sqlite_db_file)
#standardne vraci pole tuplů, kde clen tuplu jsou sloupce #standardne vraci pole tuplů, kde clen tuplu jsou sloupce
#conn.row_factory = lambda c, r: json.loads(r[0]) #conn.row_factory = lambda c, r: orjson.loads(r[0])
#conn.row_factory = lambda c, r: r[0] #conn.row_factory = lambda c, r: r[0]
#conn.row_factory = sqlite3.Row #conn.row_factory = sqlite3.Row
@ -28,7 +28,7 @@ insert_list = [dict(time=datetime.now().timestamp(), side="ddd", rectype=RecordT
def insert_log(runner_id: UUID, time: float, logdict: dict): def insert_log(runner_id: UUID, time: float, logdict: dict):
c = conn.cursor() c = conn.cursor()
json_string = json.dumps(logdict, default=json_serial) json_string = orjson.dumps(logdict, default=json_serial, option=orjson.OPT_PASSTHROUGH_DATETIME)
res = c.execute("INSERT INTO runner_logs VALUES (?,?,?)",[str(runner_id), time, json_string]) res = c.execute("INSERT INTO runner_logs VALUES (?,?,?)",[str(runner_id), time, json_string])
conn.commit() conn.commit()
return res.rowcount return res.rowcount
@ -37,14 +37,14 @@ def insert_log_multiple(runner_id: UUID, loglist: list):
c = conn.cursor() c = conn.cursor()
insert_data = [] insert_data = []
for i in loglist: for i in loglist:
row = (str(runner_id), i["time"], json.dumps(i, default=json_serial)) row = (str(runner_id), i["time"], orjson.dumps(i, default=json_serial, option=orjson.OPT_PASSTHROUGH_DATETIME))
insert_data.append(row) insert_data.append(row)
c.executemany("INSERT INTO runner_logs VALUES (?,?,?)", insert_data) c.executemany("INSERT INTO runner_logs VALUES (?,?,?)", insert_data)
conn.commit() conn.commit()
return c.rowcount return c.rowcount
# c = conn.cursor() # c = conn.cursor()
# json_string = json.dumps(logdict, default=json_serial) # json_string = orjson.dumps(logdict, default=json_serial, option=orjson.OPT_PASSTHROUGH_DATETIME)
# res = c.execute("INSERT INTO runner_logs VALUES (?,?,?)",[str(runner_id), time, json_string]) # res = c.execute("INSERT INTO runner_logs VALUES (?,?,?)",[str(runner_id), time, json_string])
# print(res) # print(res)
# conn.commit() # conn.commit()
@ -52,7 +52,7 @@ def insert_log_multiple(runner_id: UUID, loglist: list):
#returns list of ilog jsons #returns list of ilog jsons
def read_log_window(runner_id: UUID, timestamp_from: float, timestamp_to: float): def read_log_window(runner_id: UUID, timestamp_from: float, timestamp_to: float):
conn.row_factory = lambda c, r: json.loads(r[0]) conn.row_factory = lambda c, r: orjson.loads(r[0])
c = conn.cursor() c = conn.cursor()
res = c.execute(f"SELECT data FROM runner_logs WHERE runner_id='{str(runner_id)}' AND time >={ts_from} AND time <={ts_to}") res = c.execute(f"SELECT data FROM runner_logs WHERE runner_id='{str(runner_id)}' AND time >={ts_from} AND time <={ts_to}")
return res.fetchall() return res.fetchall()
@ -94,21 +94,21 @@ def delete_logs(runner_id: UUID):
def insert_archive_detail(archdetail: RunArchiveDetail): def insert_archive_detail(archdetail: RunArchiveDetail):
c = conn.cursor() c = conn.cursor()
json_string = json.dumps(archdetail, default=json_serial) json_string = orjson.dumps(archdetail, default=json_serial, option=orjson.OPT_PASSTHROUGH_DATETIME)
res = c.execute("INSERT INTO runner_detail VALUES (?,?)",[str(archdetail["id"]), json_string]) res = c.execute("INSERT INTO runner_detail VALUES (?,?)",[str(archdetail["id"]), json_string])
conn.commit() conn.commit()
return res.rowcount return res.rowcount
#returns list of details #returns list of details
def get_all_archive_detail(): def get_all_archive_detail():
conn.row_factory = lambda c, r: json.loads(r[0]) conn.row_factory = lambda c, r: orjson.loads(r[0])
c = conn.cursor() c = conn.cursor()
res = c.execute(f"SELECT data FROM runner_detail") res = c.execute(f"SELECT data FROM runner_detail")
return res.fetchall() return res.fetchall()
#vrátí konkrétní #vrátí konkrétní
def get_archive_detail_byID(runner_id: UUID): def get_archive_detail_byID(runner_id: UUID):
conn.row_factory = lambda c, r: json.loads(r[0]) conn.row_factory = lambda c, r: orjson.loads(r[0])
c = conn.cursor() c = conn.cursor()
res = c.execute(f"SELECT data FROM runner_detail WHERE runner_id='{str(runner_id)}'") res = c.execute(f"SELECT data FROM runner_detail WHERE runner_id='{str(runner_id)}'")
return res.fetchone() return res.fetchone()
@ -123,7 +123,7 @@ def delete_archive_detail(runner_id: UUID):
def get_all_archived_runners_detail(): def get_all_archived_runners_detail():
arch_detail_file = DATA_DIR + "/arch_detail.json" arch_detail_file = DATA_DIR + "/arch_detail.json"
db_arch_d = TinyDB(arch_detail_file, default=json_serial) db_arch_d = TinyDB(arch_detail_file, default=json_serial, option=orjson.OPT_PASSTHROUGH_DATETIME)
res = db_arch_d.all() res = db_arch_d.all()
return 0, res return 0, res

View File

@ -46,7 +46,7 @@ db.save()
# b = 2 # b = 2
# def toJson(self): # def toJson(self):
# return json.dumps(self, default=lambda o: o.__dict__) # return orjson.dumps(self, default=lambda o: o.__dict__)
# db.append(Neco.a) # db.append(Neco.a)

View File

@ -1,12 +1,12 @@
import timeit import timeit
setup = ''' setup = '''
import msgpack import msgpack
import json import orjson
from copy import deepcopy from copy import deepcopy
data = {'name':'John Doe','ranks':{'sports':13,'edu':34,'arts':45},'grade':5}''' data = {'name':'John Doe','ranks':{'sports':13,'edu':34,'arts':45},'grade':5}'''
print(timeit.timeit('deepcopy(data)', setup=setup)) print(timeit.timeit('deepcopy(data)', setup=setup))
# 12.0860249996 # 12.0860249996
print(timeit.timeit('json.loads(json.dumps(data))', setup=setup)) print(timeit.timeit('orjson.loads(orjson.dumps(data))', setup=setup))
# 9.07182312012 # 9.07182312012
print(timeit.timeit('msgpack.unpackb(msgpack.packb(data))', setup=setup)) print(timeit.timeit('msgpack.unpackb(msgpack.packb(data))', setup=setup))
# 1.42743492126 # 1.42743492126

View File

@ -16,7 +16,7 @@ import importlib
from queue import Queue from queue import Queue
from tinydb import TinyDB, Query, where from tinydb import TinyDB, Query, where
from tinydb.operations import set from tinydb.operations import set
import json import orjson
from rich import print from rich import print
@ -29,7 +29,7 @@ class RunnerLogger:
def __init__(self, runner_id: UUID) -> None: def __init__(self, runner_id: UUID) -> None:
self.runner_id = runner_id self.runner_id = runner_id
runner_log_file = DATA_DIR + "/runner_log.json" runner_log_file = DATA_DIR + "/runner_log.json"
db_runner_log = TinyDB(runner_log_file, default=json_serial) db_runner_log = TinyDB(runner_log_file, default=json_serial, option=orjson.OPT_PASSTHROUGH_DATETIME)
def insert_log_multiple(runner_id: UUID, logList: list): def insert_log_multiple(runner_id: UUID, logList: list):
runner_table = db_runner_log.table(str(runner_id)) runner_table = db_runner_log.table(str(runner_id))

View File

@ -16,7 +16,7 @@ import importlib
from queue import Queue from queue import Queue
#from tinydb import TinyDB, Query, where #from tinydb import TinyDB, Query, where
#from tinydb.operations import set #from tinydb.operations import set
import json import orjson
from rich import print from rich import print
from tinyflux import Point, TinyFlux from tinyflux import Point, TinyFlux
@ -26,7 +26,7 @@ runner_log_file = DATA_DIR + "/runner_flux__log.json"
db_runner_log = TinyFlux(runner_log_file) db_runner_log = TinyFlux(runner_log_file)
insert_dict = {'datum': datetime.now(), 'side': "dd", 'name': 'david','id': uuid4(), 'order': "neco"} insert_dict = {'datum': datetime.now(), 'side': "dd", 'name': 'david','id': uuid4(), 'order': "neco"}
#json.dumps(insert_dict, default=json_serial) #orjson.dumps(insert_dict, default=json_serial, option=orjson.OPT_PASSTHROUGH_DATETIME)
p1 = Point(time=datetime.now(), tags=insert_dict) p1 = Point(time=datetime.now(), tags=insert_dict)
db_runner_log.insert(p1) db_runner_log.insert(p1)

View File

@ -13,7 +13,7 @@ from v2realbot.common.model import Order, TradeUpdate as btTradeUpdate
from alpaca.trading.models import TradeUpdate from alpaca.trading.models import TradeUpdate
from alpaca.trading.enums import TradeEvent, OrderType, OrderSide, OrderType, OrderStatus from alpaca.trading.enums import TradeEvent, OrderType, OrderSide, OrderType, OrderStatus
from rich import print from rich import print
import json import orjson
#storage_with_injected_serialization = JSONStorage() #storage_with_injected_serialization = JSONStorage()
@ -110,7 +110,7 @@ a = Order(id=uuid4(),
limit_price=22.4) limit_price=22.4)
db_file = DATA_DIR + "/db.json" db_file = DATA_DIR + "/db.json"
db = TinyDB(db_file, default=json_serial) db = TinyDB(db_file, default=json_serial, option=orjson.OPT_PASSTHROUGH_DATETIME)
db.truncate() db.truncate()
insert = {'datum': datetime.now(), 'side': OrderSide.BUY, 'name': 'david','id': uuid4(), 'order': orderList} insert = {'datum': datetime.now(), 'side': OrderSide.BUY, 'name': 'david','id': uuid4(), 'order': orderList}

View File

@ -6,7 +6,7 @@ import secrets
from typing import Annotated from typing import Annotated
import os import os
import uvicorn import uvicorn
import json import orjson
from datetime import datetime from datetime import datetime
from v2realbot.utils.utils import zoneNY from v2realbot.utils.utils import zoneNY
@ -103,7 +103,7 @@ async def websocket_endpoint(
'vwap': 123, 'vwap': 123,
'updated': 123, 'updated': 123,
'index': 123} 'index': 123}
await websocket.send_text(json.dumps(data)) await websocket.send_text(orjson.dumps(data))
except WebSocketDisconnect: except WebSocketDisconnect:
print("CLIENT DISCONNECTED for", runner_id) print("CLIENT DISCONNECTED for", runner_id)

View File

@ -6,7 +6,7 @@ import secrets
from typing import Annotated from typing import Annotated
import os import os
import uvicorn import uvicorn
import json import orjson
from datetime import datetime from datetime import datetime
from v2realbot.utils.utils import zoneNY from v2realbot.utils.utils import zoneNY
@ -101,7 +101,7 @@ async def websocket_endpoint(websocket: WebSocket, client_id: int):
# 'close': 123, # 'close': 123,
# 'open': 123, # 'open': 123,
# 'time': "2019-05-25"} # 'time': "2019-05-25"}
await manager.send_personal_message(json.dumps(data), websocket) await manager.send_personal_message(orjson.dumps(data), websocket)
#await manager.broadcast(f"Client #{client_id} says: {data}") #await manager.broadcast(f"Client #{client_id} says: {data}")
except WebSocketDisconnect: except WebSocketDisconnect:
manager.disconnect(websocket) manager.disconnect(websocket)

View File

@ -3,7 +3,7 @@ sys.path.append(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
from v2realbot.strategy.base import StrategyState from v2realbot.strategy.base import StrategyState
from v2realbot.strategy.StrategyOrderLimitVykladaciNormalizedMYSELL import StrategyOrderLimitVykladaciNormalizedMYSELL from v2realbot.strategy.StrategyOrderLimitVykladaciNormalizedMYSELL import StrategyOrderLimitVykladaciNormalizedMYSELL
from v2realbot.enums.enums import RecordType, StartBarAlign, Mode, Account from v2realbot.enums.enums import RecordType, StartBarAlign, Mode, Account
from v2realbot.utils.utils import zoneNY, print from v2realbot.utils.utils import zoneNY, print, fetch_calendar_data, send_to_telegram
from v2realbot.utils.historicals import get_historical_bars from v2realbot.utils.historicals import get_historical_bars
from datetime import datetime, timedelta from datetime import datetime, timedelta
from rich import print as printanyway from rich import print as printanyway
@ -16,10 +16,13 @@ from v2realbot.strategyblocks.newtrade.signals import signal_search
from v2realbot.strategyblocks.activetrade.activetrade_hub import manage_active_trade from v2realbot.strategyblocks.activetrade.activetrade_hub import manage_active_trade
from v2realbot.strategyblocks.inits.init_indicators import initialize_dynamic_indicators from v2realbot.strategyblocks.inits.init_indicators import initialize_dynamic_indicators
from v2realbot.strategyblocks.inits.init_directives import intialize_directive_conditions from v2realbot.strategyblocks.inits.init_directives import intialize_directive_conditions
from alpaca.trading.requests import GetCalendarRequest from v2realbot.strategyblocks.inits.init_attached_data import attach_previous_data
from alpaca.trading.client import TradingClient from alpaca.trading.client import TradingClient
from v2realbot.config import ACCOUNT1_PAPER_API_KEY, ACCOUNT1_PAPER_SECRET_KEY, DATA_DIR, OFFLINE_MODE from v2realbot.config import ACCOUNT1_PAPER_API_KEY, ACCOUNT1_PAPER_SECRET_KEY, DATA_DIR
from alpaca.trading.models import Calendar from alpaca.trading.models import Calendar
from v2realbot.indicators.oscillators import rsi
from v2realbot.indicators.moving_averages import sma
import numpy as np
print(os.path.dirname(os.path.dirname(os.path.abspath(__file__)))) print(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
"""" """"
@ -97,9 +100,7 @@ def init(state: StrategyState):
#pripadne udelat refresh kazdych x-iterací #pripadne udelat refresh kazdych x-iterací
state.vars['sell_in_progress'] = False state.vars['sell_in_progress'] = False
state.vars.mode = None state.vars.mode = None
state.vars.last_tick_price = 0
state.vars.last_50_deltas = [] state.vars.last_50_deltas = []
state.vars.last_tick_volume = 0
state.vars.next_new = 0 state.vars.next_new = 0
state.vars.last_buy_index = None state.vars.last_buy_index = None
state.vars.last_exit_index = None state.vars.last_exit_index = None
@ -114,19 +115,33 @@ def init(state: StrategyState):
state.vars.blockbuy = 0 state.vars.blockbuy = 0
#models #models
state.vars.loaded_models = {} state.vars.loaded_models = {}
#state attributes for martingale sizing mngmt
state.vars["transferables"] = {}
state.vars["transferables"]["martingale"] = dict(cont_loss_series_cnt=0)
#INITIALIZE CBAR INDICATORS - do vlastni funkce
#state.cbar_indicators['ivwap'] = [] #state.cbar_indicators['ivwap'] = []
state.vars.last_tick_price = 0
state.vars.last_tick_volume = 0
state.vars.last_tick_trades = 0
state.cbar_indicators['tick_price'] = [] state.cbar_indicators['tick_price'] = []
state.cbar_indicators['tick_volume'] = [] state.cbar_indicators['tick_volume'] = []
state.cbar_indicators['tick_trades'] = []
state.cbar_indicators['CRSI'] = [] state.cbar_indicators['CRSI'] = []
initialize_dynamic_indicators(state) initialize_dynamic_indicators(state)
intialize_directive_conditions(state) intialize_directive_conditions(state)
#attach part of yesterdays data, bars, indicators, cbar_indicators
attach_previous_data(state)
#intitialize indicator mapping (for use in operation) - mozna presunout do samostatne funkce prip dat do base kdyz se osvedci #intitialize indicator mapping (for use in operation) - mozna presunout do samostatne funkce prip dat do base kdyz se osvedci
local_dict_cbar_inds = {key: state.cbar_indicators[key] for key in state.cbar_indicators.keys() if key != "time"}
local_dict_inds = {key: state.indicators[key] for key in state.indicators.keys() if key != "time"} local_dict_inds = {key: state.indicators[key] for key in state.indicators.keys() if key != "time"}
local_dict_bars = {key: state.bars[key] for key in state.bars.keys() if key != "time"} local_dict_bars = {key: state.bars[key] for key in state.bars.keys() if key != "time"}
state.ind_mapping = {**local_dict_inds, **local_dict_bars} state.ind_mapping = {**local_dict_inds, **local_dict_bars, **local_dict_cbar_inds}
print("IND MAPPING DONE:", state.ind_mapping) print("IND MAPPING DONE:", state.ind_mapping)
#30 DAYS historicall data fill - pridat do base pokud se osvedci #30 DAYS historicall data fill - pridat do base pokud se osvedci
@ -144,7 +159,8 @@ def init(state: StrategyState):
time_to = state.bt.bp_from time_to = state.bt.bp_from
#TBD pridat i hour data - pro pocitani RSI na hodine #TBD NASLEDUJICI SEKCE BUDE PREDELANA, ABY UMOZNOVALA LIBOVOLNE ROZLISENI
#INDIKATORY SE BUDOU TAKE BRAT Z KONFIGURACE
#get 30 days (history_datetime_from musí být alespoň -2 aby to bralo i vcerejsek) #get 30 days (history_datetime_from musí být alespoň -2 aby to bralo i vcerejsek)
#history_datetime_from = time_to - timedelta(days=40) #history_datetime_from = time_to - timedelta(days=40)
#get previous market day #get previous market day
@ -156,17 +172,25 @@ def init(state: StrategyState):
#time_to = time_to.date() #time_to = time_to.date()
today = time_to.date() today = time_to.date()
several_days_ago = today - timedelta(days=40) several_days_ago = today - timedelta(days=60)
#printanyway(f"{today=}",f"{several_days_ago=}") #printanyway(f"{today=}",f"{several_days_ago=}")
clientTrading = TradingClient(ACCOUNT1_PAPER_API_KEY, ACCOUNT1_PAPER_SECRET_KEY, raw_data=False) #clientTrading = TradingClient(ACCOUNT1_PAPER_API_KEY, ACCOUNT1_PAPER_SECRET_KEY, raw_data=False)
#get all market days from here to 40days ago #get all market days from here to 40days ago
calendar_request = GetCalendarRequest(start=several_days_ago,end=today)
cal_dates = clientTrading.get_calendar(calendar_request) #calendar_request = GetCalendarRequest(start=several_days_ago,end=today)
cal_dates = fetch_calendar_data(several_days_ago, today)
#cal_dates = clientTrading.get_calendar(calendar_request)
#find the first market day - 40days ago #find the first market day - 40days ago
#history_datetime_from = zoneNY.localize(cal_dates[0].open) #history_datetime_from = zoneNY.localize(cal_dates[0].open)
history_datetime_from = cal_dates[0].open history_datetime_from = cal_dates[0].open
#ulozime si dnesni market close
#pro automaticke ukonceni
#TODO pripadne enablovat na parametr
state.today_market_close = zoneNY.localize(cal_dates[-1].close)
# Find the previous market day # Find the previous market day
history_datetime_to = None history_datetime_to = None
for session in reversed(cal_dates): for session in reversed(cal_dates):
@ -180,6 +204,74 @@ def init(state: StrategyState):
#printanyway(history_datetime_from, history_datetime_to) #printanyway(history_datetime_from, history_datetime_to)
#az do predchziho market dne dne #az do predchziho market dne dne
state.dailyBars = get_historical_bars(state.symbol, history_datetime_from, history_datetime_to, TimeFrame.Day) state.dailyBars = get_historical_bars(state.symbol, history_datetime_from, history_datetime_to, TimeFrame.Day)
#NOTE zatim pridano takto do baru dalsi indikatory
#BUDE PREDELANO - v rámci custom rozliseni a static indikátoru
if state.dailyBars is None:
print("Nepodařilo se načíst denní bary")
err_msg = f"Nepodařilo se načíst denní bary (get_historical_bars) pro {state.symbol} od {history_datetime_from} do {history_datetime_to} ve strat.init. Probably wrong symbol?"
send_to_telegram(err_msg)
raise Exception(err_msg)
#RSI vraci pouze pro vsechny + prepend with zeros nepocita prvnich N (dle rsi length)
rsi_calculated = rsi(state.dailyBars["vwap"], 14).tolist()
num_zeros_to_prepend = len(state.dailyBars["vwap"]) - len(rsi_calculated)
state.dailyBars["rsi"] = [0]*num_zeros_to_prepend + rsi_calculated
#VOLUME
volume_sma = sma(state.dailyBars["volume"], 10) #vraci celkovy pocet - 10
items_to_prepend = len(state.dailyBars["volume"]) - len(volume_sma)
volume_sma = np.hstack((np.full(items_to_prepend, np.nan), volume_sma))
#normalized divergence currvol-smavolume/currvol+smavolume
volume_data = np.array(state.dailyBars["volume"])
normalized_divergence = (volume_data - volume_sma) / (volume_data + volume_sma)
# Replace NaN values with 0 or some other placeholder if needed
normalized_divergence = np.nan_to_num(normalized_divergence)
volume_sma = np.nan_to_num(volume_sma)
state.dailyBars["volume_sma_divergence"] = normalized_divergence.tolist()
state.dailyBars["volume_sma"] = volume_sma.tolist()
#vwap_cum and divergence
volume_np = np.array(state.dailyBars["volume"])
close_np = np.array(state.dailyBars["close"])
high_np = np.array(state.dailyBars["high"])
low_np = np.array(state.dailyBars["low"])
vwap_cum_np = np.cumsum(((high_np + low_np + close_np) / 3) * volume_np) / np.cumsum(volume_np)
state.dailyBars["vwap_cum"] = vwap_cum_np.tolist()
normalized_divergence = (close_np - vwap_cum_np) / (close_np + vwap_cum_np)
#divergence close ceny a cumulativniho vwapu
state.dailyBars["div_vwap_cum"] = normalized_divergence.tolist()
#creates log returns for open, close, high and lows
open_np = np.array(state.dailyBars["open"])
state.dailyBars["open_log_return"] = np.log(open_np[1:] / open_np[:-1]).tolist()
state.dailyBars["close_log_return"] = np.log(close_np[1:] / close_np[:-1]).tolist()
state.dailyBars["high_log_return"] = np.log(high_np[1:] / high_np[:-1]).tolist()
state.dailyBars["low_log_return"] = np.log(low_np[1:] / low_np[:-1]).tolist()
#Features to emphasize the shape characteristics of each candlestick. For use in ML https://chat.openai.com/c/c1a22550-643b-4037-bace-3e810dbce087
# Calculate the ratios of
total_range = high_np - low_np
upper_shadow = (high_np - np.maximum(open_np, close_np)) / total_range
lower_shadow = (np.minimum(open_np, close_np) - low_np) / total_range
body_size = np.abs(close_np - open_np) / total_range
body_position = np.where(close_np >= open_np,
(close_np - low_np) / total_range,
(open_np - low_np) / total_range)
#other possibilities
# Open to Close Change: (close[-1] - open[-1]) / open[-1]
# High to Low Range: (high[-1] - low[-1]) / low[-1]
# Store the ratios in the bars dictionary
state.dailyBars['upper_shadow_ratio'] = upper_shadow.tolist()
state.dailyBars['lower_shadow_ratio'] = lower_shadow.tolist()
state.dailyBars['body_size_ratio'] = body_size.tolist()
state.dailyBars['body_position_ratio'] = body_position.tolist()
#printanyway("daily bars FILLED", state.dailyBars) #printanyway("daily bars FILLED", state.dailyBars)
#zatim ukladame do extData - pro instant indicatory a gui #zatim ukladame do extData - pro instant indicatory a gui
state.extData["dailyBars"] = state.dailyBars state.extData["dailyBars"] = state.dailyBars

View File

@ -1,102 +0,0 @@
import numpy as np
from sklearn.preprocessing import StandardScaler
from sklearn.metrics import mean_squared_error
from sklearn.model_selection import train_test_split
import v2realbot.ml.mlutils as mu
from keras.layers import LSTM, Dense
import matplotlib.pyplot as plt
from v2realbot.ml.ml import ModelML
from v2realbot.enums.enums import PredOutput, Source, TargetTRFM
from v2realbot.controller.services import get_archived_runner_details_byID, update_archive_detail
# from collections import defaultdict
# from operator import itemgetter
from joblib import load
#TODO - DO API
# v ml atomicke api pro evaluaci (runneru, batche)
# v services: model.add_vector_prediction_to_archrunner_as_new_indicator (vrátí v podstate obohacený archDetail) - nebo i ukládat do db? uvidime
# v rest api prevolani
# db support: zatim jen ciselnik modelu + jeho zakladni nastaveni, obrabeci api, load modelu zatim z file
cfg: ModelML = mu.load_model("model1", "0.1")
#EVALUATE SPECIFIC RUNNER - VECTOR BASED (toto dat do samostatne API pripadne pak udelat nadstavnu na batch a runners)
#otestuje model na neznamem runnerovi, seznamu runneru nebo batch_id
runner_id = "a38fc269-8df3-4374-9506-f0280d798854"
save_new_ind = True
source_data, target_data, rows_in_day = cfg.load_data(runners_ids=[runner_id])
if len(rows_in_day) > 1:
#pro vis se cela tato sluzba volat v loopu
raise Exception("Vytvareni indikatoru dostupne zatim jen pro jeden runner")
#scalujeme X
source_data = cfg.scalerX.fit_transform(source_data)
#tady si vyzkousim i skrz vice runneru
X_eval, y_eval, y_eval_ref = cfg.create_sequences(combined_data=source_data, target_data=target_data,remove_cross_sequences=True, rows_in_day=rows_in_day)
#toto nutne?
X_eval = np.array(X_eval)
y_eval = np.array(y_eval)
y_eval_ref = np.array(y_eval_ref)
#scaluji target - nemusis
#y_eval = cfg.scalerY.fit_transform(y_eval)
X_eval = cfg.model.predict(X_eval)
X_eval = cfg.scalerY.inverse_transform(X_eval)
print("po predikci x_eval shape", X_eval.shape)
#pokud mame dostupnou i target v runneru, pak pridame porovnavaci indikator
difference_mse = None
if len(y_eval) > 0:
#TODO porad to pliva 1 hodnotu
difference_mse = mean_squared_error(y_eval, X_eval,multioutput="raw_values")
print("ted mam tedy dva nove sloupce")
print("X_eval", X_eval.shape)
if difference_mse is not None:
print("difference_mse", difference_mse.shape)
print(f"zplostime je, dopredu pridame {cfg.input_sequences-1} a dozadu nic")
#print(f"a melo by nam to celkem dat {len(bars['time'])}")
#tohle pak nejak doladit, ale vypada to good
#plus do druheho indikatoru pridat ten difference_mse
#TODO jeste je posledni hodnota predikce nejak OFF (2.52... ) - podivat se na to
#TODO na produkci srovnat se skutecnym BT predictem (mozna zde bude treba seq-1) -
# prvni predikce nejspis uz bude na desítce
ind_pred = list(np.concatenate([np.zeros(cfg.input_sequences-1), X_eval.ravel()]))
print(ind_pred)
print(len(ind_pred))
print("tada")
#ted k nim pridame
if save_new_ind:
#novy ind ulozime do archrunnera (na produkci nejspis jen show)
res, sada = get_archived_runner_details_byID(runner_id)
if res == 0:
print("ok")
else:
print("error",res,sada)
raise Exception(f"error loading runner {runner_id} : {res} {sada}")
sada["indicators"][0]["pred_added"] = ind_pred
req, res = update_archive_detail(runner_id, sada)
print(f"indicator pred_added was ADDED to {runner_id}")
# Plot the predicted vs. actual
plt.plot(y_eval, label='Target')
plt.plot(X_eval, label='Predicted')
#TODO zde nejak vymyslet jinou pricelinu - jako lightweight chart
if difference_mse is not None:
plt.plot(difference_mse, label='diference')
plt.plot(y_eval_ref, label='reference column - vwap')
plt.plot()
plt.legend()
plt.show()

View File

@ -1,278 +0,0 @@
import numpy as np
from sklearn.preprocessing import StandardScaler
from sklearn.metrics import mean_squared_error
from sklearn.model_selection import train_test_split
import v2realbot.ml.mlutils as mu
from keras.layers import LSTM, Dense
import matplotlib.pyplot as plt
from v2realbot.ml.ml import ModelML
from v2realbot.enums.enums import PredOutput, Source, TargetTRFM
# from collections import defaultdict
# from operator import itemgetter
from joblib import load
# region Notes
#ZAKLAD PRO TRAINING SCRIPT na vytvareni model u
# TODO
# podpora pro BINARY TARGET
# podpora hyperpamaetru (activ.funkce sigmoid atp.)
# vyuzit distribuovane prostredi - nebo aspon vlastni VM
# dopracovat denni identifikatory typu lastday close, todays open atp.
# random SEARCH a grid search
# udelat nejaka model metadata (napr, trenovano na (runners+obdobi), nastaveni treningovych dat, počet epoch, hyperparametry, config atribu atp.) - mozna persistovat v db
# udelat nejake verzovani
# predelat do GUI a modulu
# vyuzit VectorBT na dohledani optimalizovanych parametru napr. pro buy,sell atp. Vyuzit podobne API na pripravu dat jako model.
# EVAL MODEL - umoznit vektorové přidání indikátoru do runneru (např. predikce v modulu, vectorBT, optimalizace atp) - vytvorit si na to API, podobne co mam, nacte runner, transformuje, sekvencuje, provede a pak zpetne transformuje a prida jako dalsi indikator. Lze pak použít i v gui.
# nove tlacitko "Display model prediction" na urovni archrunnera, které
# - má volbu model + jestli zobrazit jen predictionu jako novy indikator nebo i mse from ytarget (nutny i target)
# po spusteni pak:
# - zkonztoluje jestli runner ma indikatory,ktere odpovidaji features modelu (bar_ftrs, ind_ftrs, optional i target)
# - vektorově doplní predictionu (transformuje data, udela predictionu a Y transformuje zpet)
# - vysledek (jako nove indikatory) implantuje do runnerdetailu a zobrazi
# podivat se na dalsi parametry kerasu, napr. false positive atp.
# podivat se jeste na rozdil mezi vectorovou predikci a skalarni - proc je nekdy rozdil, odtrasovat - pripadne pogooglit
# odtrasovat, nekde je sum (zkusit si oboji v jednom skriptu a porovnat)
#TODO NAPADY Na modely
#1.binary identifikace trendu napr. pokud nasledujici 3 bary rostou (0-1) nebo nasledujici bary roste momentum
#2.soustredit se na modely s vystupem 0-1 nebo -1 až 1
#3.Vyzkouset jeden model, ktery by identifikoval trendy v obou smerech - -1 pro klesani a 1 pro stoupání.
#4.vyzkouset zda model vytvoreny z casti dne nebude funkcni na druhe casti (on the fly daily models)
#5.zkusit modely s a bez time (prizpusobit tomu kod v ModelML - zejmena jak na crossday sekvence) - mozna ze zecatku dat aspon pryc z indikatoru?
# Dat vsechny zbytecne features pryc, nechat tam jen ty podstatne - attention, tak cílím.
#6. zkusit vyuzit tickprice v nejaekm modelu, pripadne pak dalsi CBAR indikatory . vymslet tickbased features
#7. zkusit jako features nevyuzit standardni ceny, ale pouze indikatory reprezentujici chovani (fastslope,samebarslope,volume,tradencnt)
#8. relativni OHLC - model pouzivajici (jen) bary, ale misto hodnot ohlc udelat features reprezentujici vztahy(pomery) mezi temito velicinami. tzn. relativni ohlc
#9. jiny pristup by byl ucit model na konkretnich chunkach, ktere chci aby mi identifikoval. Např. určité úseky. Vymyslet. Buď nyni jako test intervaly, ale v budoucnu to treba jen nejak oznacit a poslat k nauceni. Pripadne pak udelat nejaky vycuc.
#10. mozna správným výběrem targetu, můžu taky naučit jen určité věci. Specializace. Stačí když se jednou dvakrát denně aktivuje.
# 11. udelat si go IN model, ktery pomuze strategii generovat vstup - staci jen aby mel trochu lepsi edge nez conditiony, o zbytek se postara logika strategie
# 12. model pro neagregované nebo jen filtroné či velmi lehce agregované trady? - tickprice
# 13. jako featury pouzit Fourierovo transformaci, na sekundovem baru nebo tickprice
#DULEZITE
# soustredit se v modelech na predikci nasledujici hodnoty, ideálně nějaký vektor ukazující směr (např. 0 - 1, kde nula nebude růst, 1 - bude růst strmě)
# pro predikcí nějakého většího trendu, zkusti více modelů na různých rozlišení, každý ukazuje
# hodnotu na svém rozlišení a jeho kombinace mi může určit vstup. Zkusit zda by nešel i jeden model.
# Každopádně se soustředit
# 1) na další hodnotu (tzn. vstupy musí být bezprostředně ovlivňující tuto (samebasrlope, atp.))
# 2) její výše ukazuje směr na tomto rozlišení
# 3) ideálně se učit z každého baru, tzn. cílová hodnota musí být známá u každého baru
# (binary ne, potřebuju linární vektor) - i když 1 a 0 target v závislosti na stoupání a klesání by mohla být ok,
# ale asi příliš restriktivní, spíš bych tam mohl dát jak moc. Tzn. +0.32, -0.04. Učilo by se to míru stoupání.
# Tu míru tam potřebuju zachovanou.
# pak si muzu rict, když je urcite pravdepodobnost, ze to bude stoupat (tzn. dalsi hodnota) na urovni 1,2,3 - tak jduvstup
# zkusit na nejnižší úrovni i předvídat CBARy, směr dalšího ticku. Vyzkoušet.
##TODO - doma
#bar_features a ind_features do dokumentace SL classic, stejne tak conditional indikator a mathop indikator
#TODO - co je třeba vyvinout
# GENERATOR test intervalu (vstup name, note, od,do,step)
# napsat API, doma pak simple GUI
# vyuziti ATR (jako hranice historickeho rozsahu) - atr-up, atr-down
# nakreslit v grafu atru = close+atr, atrd = close-atr
# pripadne si vypocet atr nejak customizovat, prip. ruzne multiplikatory pro high low, pripadne si to vypocist podle sebe
# vyuziti:
# pro prekroceni nejake lajny, napr. ema nebo yesterdayclose
# - k identifikaci ze se pohybuje v jejim rozsahu
# - proste je to buffer, ktery musi byt prekonan, aby byla urcita akce
# pro learning pro vypocet conditional parametru (1,0,-1) prekroceni napr. dailyopen, yesterdayclose, gapclose
# kde 1 prekroceno, 0 v rozsahu (atr), -1 prekroceno dolu - to pomuze uceni
# vlastni supertrend strateige
# zaroven moznost vyuzit klouzave či parametrizovane atr, které se na základě
# určitých parametrů bude samo upravovat a cíleně vybočovat z KONTRA frekvencí, např. randomizovaný multiplier nebo nejak jinak ovlivneny minulým
# v indikatorech vsude kde je odkaz ma source jako hodnotu tak defaultne mit moznost uvest lookback, napr. bude treba porovnavat nejak cenu vs predposledni hodnotu ATRka (nechat az vyvstane pozadavek)
# zacit doma na ATRku si postavit supertrend, viz pinescript na ploše
#TODO - obecne vylepsovaky
# 1. v GUI graf container do n-TABů, mozna i draggable order, zaviratelne na Xko (innerContainer)
# 2. mit mozna specialni mod na pripravu dat (agreg+indikator, tzn. vse jen bez vstupů) - můžu pak zapracovat víc vectorové doplňování dat
# TOTO:: mozna by postacil vypnout backtester (tzn. no trades) - a projet jen indikatory. mozna by slo i vectorove optimalizovat.
# indikatory by se mohli predsunout pred next a next by se vubec nemusel volat (jen nekompatibilita s predch.strategiemi)
# 3. kombinace fastslope na fibonacci delkach (1,2,3,5..) jako dobry vstup pro ML
# 4. podivat se na attention based LSTM zda je v kerasu implementace
# do grafu přidat togglovatelné hranice barů určitých rozlišení - což mi jen udělá čáry Xs od sebe (dobré pro navrhování)
# 5. vymyslet optimalizovane vyuziti modelu na produkci (nejak mit zkompilovane, aby to bylo raketově pro skalár) - nyní to backtest zpomalí 4x
# 6. CONVNETS for time series forecasting - small 1D convnets can offer a fast alternative to RNNs for simple tasks such as text classification and timeseries forecasting.
# zkusit small conv1D pro identifikaci víření před trendem, např. jen 6 barů - identifikovat dobře target, musí jít o tutovku na targetu
# pro covnet zkusit cbar price, volume a time. Třeba to zachytí víření (ripples)
# Další oblasti k predikci jsou ripples, vlnky - předzvěst nějakého mocnějšího pohybu. A je pravda, že předtím se mohou objevit nějaké indicie. Ty zkus zachytit.
# Do runner_headers pridat bt_from, bt_to - pro razeni order_by, aby se runnery vzdy vraceli vzestupne dle data (pro machine l)
#TODO
# vyvoj modelů workflow s LSTMtrain.py
# 1) POC - pouze zde ve skriptu, nad 1-2 runnery, okamžité zobrazení v plotu,
# optimalizace zakl. features a hyperparams. Zobrazit i u binary nejak cenu.
# 2) REALITY CHECK - trening modelu na batchi test intervalu, overeni ve strategii v BT, zobrazeni predikce v RT chartu
# 3) FINAL TRAINING
# testovani predikce
#TODO tady
# train model
# - train data- batch nebo runners
# - test data - batch or runners (s cim porovnavat/validovat)
# - vyber architektury
# - soucast skriptu muze byt i porovnavacka pripadne nejaky search optimalnich parametru
#lstmtrain - podporit jednotlive kroky vyse
#modelML - udelat lepsi PODMINKY
#frontend? ma cenu? asi ano - GUI na model - new - train/retrain-change
# (vymyslet jak v gui chytře vybírat arch modelu a hyperparams, loss, optim - treba nejaka templata?)
# mozna ciselnik architektur s editačním polem pro kód -jen pár řádků(.add, .compile) přidat v editoru
# vymyslet jak to udělat pythonově
#testlist generator api
# endregion
#if null,the validation is made on 10% of train data
#runnery pro testovani
validation_runners = ["a38fc269-8df3-4374-9506-f0280d798854"]
#u binary bude target bud hotovy indikator a nebo jej vytvorit on the fly
cfg = ModelML(name="model1",
version = "0.1",
note = None,
pred_output=PredOutput.LINEAR,
input_sequences = 10,
use_bars = True,
bar_features = ["volume","trades"],
ind_features = ["slope20", "ema20","emaFast","samebarslope","fastslope","fastslope4"],
target='target', #referencni hodnota pro target - napr pro graf
target_reference='vwap',
train_target_steps=3,
train_target_transformation=TargetTRFM.KEEPVAL,
train_runner_ids = ["08b7f96e-79bc-4849-9142-19d5b28775a8"],
train_batch_id = None,
train_epochs = 10,
train_remove_cross_sequences = True,
)
#TODO toto cele dat do TRAIN metody - vcetne pripadneho loopu a podpory API
test_size = None
#kdyz neplnime vstup, automaticky se loaduje training data z nastaveni classy
source_data, target_data, rows_in_day = cfg.load_data()
if len(target_data) == 0:
raise Exception("target is empty - required for TRAINING - check target column name")
np.set_printoptions(threshold=10,edgeitems=5)
#print("source_data", source_data)
#print("target_data", target_data)
print("rows_in_day", rows_in_day)
source_data = cfg.scalerX.fit_transform(source_data)
#TODO mozna vyhodit to UNTR
#TODO asi vyhodit i target reference a vymyslet jinak
#vytvořeni sekvenci po vstupních sadách (např. 10 barů) - výstup 3D např. #X_train (6205, 10, 14)
#doplneni transformace target data
X_train, y_train, y_train_ref = cfg.create_sequences(combined_data=source_data,
target_data=target_data,
remove_cross_sequences=cfg.train_remove_cross_sequences,
rows_in_day=rows_in_day)
#zobrazime si transformovany target a jeho referncni sloupec
#ZHOMOGENIZOVAT OSY
plt.plot(y_train, label='Transf target')
plt.plot(y_train_ref, label='Ref target')
plt.plot()
plt.legend()
plt.show()
print("After sequencing")
print("source:X_train", np.shape(X_train))
print("target:y_train", np.shape(y_train))
print("target:", y_train)
y_train = y_train.reshape(-1, 1)
X_complete = np.array(X_train.copy())
Y_complete = np.array(y_train.copy())
X_train = np.array(X_train)
y_train = np.array(y_train)
#target scaluji az po transformaci v create sequence -narozdil od X je stejny shape
y_train = cfg.scalerY.fit_transform(y_train)
if len(validation_runners) == 0:
test_size = 0.10
# Split the data into training and test sets - kazdy vstupni pole rozdeli na dve
#nechame si takhle rozdelit i referencni sloupec
X_train, X_test, y_train, y_test, y_train_ref, y_test_ref = train_test_split(X_train, y_train, y_train_ref, test_size=test_size, shuffle=False) #random_state=42)
print("Splittig the data")
print("X_train", np.shape(X_train))
print("X_test", np.shape(X_test))
print("y_train", np.shape(y_train))
print("y_test", np.shape(y_test))
print("y_test_ref", np.shape(y_test_ref))
print("y_train_ref", np.shape(y_train_ref))
#print(np.shape(X_train))
# Define the input shape of the LSTM layer dynamically based on the reshaped X_train value
input_shape = (X_train.shape[1], X_train.shape[2])
# Build the LSTM model
#cfg.model = Sequential()
cfg.model.add(LSTM(128, input_shape=input_shape))
cfg.model.add(Dense(1, activation="relu"))
#activation: Gelu, relu, elu, sigmoid...
# Compile the model
cfg.model.compile(loss='mse', optimizer='adam')
#loss: mse, binary_crossentropy
# Train the model
cfg.model.fit(X_train, y_train, epochs=cfg.train_epochs)
#save the model
cfg.save()
#TBD db layer
cfg: ModelML = mu.load_model(cfg.name, cfg.version)
# region Live predict
#EVALUATE SIM LIVE - PREDICT SCALAR - based on last X items
barslist, indicatorslist = cfg.load_runners_as_list(runner_id_list=["67b51211-d353-44d7-a58a-5ae298436da7"])
#zmergujeme vsechny data dohromady
bars = mu.merge_dicts(barslist)
indicators = mu.merge_dicts(indicatorslist)
cfg.validate_available_features(bars, indicators)
#VSTUPEM JE standardni pole v strategii
value = cfg.predict(bars, indicators)
print("prediction for LIVE SIM:", value)
# endregion
#EVALUATE TEST DATA - VECTOR BASED
#pokud mame eval runners pouzijeme ty, jinak bereme cast z testovacich dat
if len(validation_runners) > 0:
source_data, target_data, rows_in_day = cfg.load_data(runners_ids=validation_runners)
source_data = cfg.scalerX.fit_transform(source_data)
X_test, y_test, y_test_ref = cfg.create_sequences(combined_data=source_data, target_data=target_data,remove_cross_sequences=True, rows_in_day=rows_in_day)
#prepnout ZDE pokud testovat cely bundle - jinak testujeme jen neznama
#X_test = X_complete
#y_test = Y_complete
X_test = cfg.model.predict(X_test)
X_test = cfg.scalerY.inverse_transform(X_test)
#target testovacim dat proc tu je reshape? y_test.reshape(-1, 1)
y_test = cfg.scalerY.inverse_transform(y_test)
#celkovy mean? nebo spis vector pro graf?
mse = mean_squared_error(y_test, X_test)
print('Test MSE:', mse)
# Plot the predicted vs. actual
plt.plot(y_test, label='Actual')
plt.plot(X_test, label='Predicted')
#TODO zde nejak vymyslet jinou pricelinu - jako lightweight chart
plt.plot(y_test_ref, label='reference column - price')
plt.plot()
plt.legend()
plt.show()

View File

@ -40,10 +40,10 @@
from uuid import UUID, uuid4 from uuid import UUID, uuid4
from alpaca.trading.enums import OrderSide, OrderStatus, TradeEvent, OrderType from alpaca.trading.enums import OrderSide, OrderStatus, TradeEvent, OrderType
from v2realbot.common.model import TradeUpdate, Order from v2realbot.common.model import TradeUpdate, Order
#from rich import print from rich import print as printanyway
import threading import threading
import asyncio import asyncio
from v2realbot.config import BT_DELAYS, DATA_DIR, BT_FILL_CONDITION_BUY_LIMIT, BT_FILL_CONDITION_SELL_LIMIT, BT_FILL_LOG_SURROUNDING_TRADES, BT_FILL_CONS_TRADES_REQUIRED,BT_FILL_PRICE_MARKET_ORDER_PREMIUM from v2realbot.config import DATA_DIR
from v2realbot.utils.utils import AttributeDict, ltp, zoneNY, trunc, count_decimals, print from v2realbot.utils.utils import AttributeDict, ltp, zoneNY, trunc, count_decimals, print
from v2realbot.utils.tlog import tlog from v2realbot.utils.tlog import tlog
from v2realbot.enums.enums import FillCondition from v2realbot.enums.enums import FillCondition
@ -60,6 +60,7 @@ from v2realbot.utils.dash_save_html import make_static
import dash_bootstrap_components as dbc import dash_bootstrap_components as dbc
from dash.dependencies import Input, Output from dash.dependencies import Input, Output
from dash import dcc, html, dash_table, Dash from dash import dcc, html, dash_table, Dash
import v2realbot.utils.config_handler as cfh
"""" """"
LATENCY DELAYS LATENCY DELAYS
.000 trigger - last_trade_time (.4246266) .000 trigger - last_trade_time (.4246266)
@ -171,7 +172,7 @@ class Backtester:
todel.append(order) todel.append(order)
elif not self.symbol or order.symbol == self.symbol: elif not self.symbol or order.symbol == self.symbol:
#pricteme mininimalni latency od submittu k fillu #pricteme mininimalni latency od submittu k fillu
if order.submitted_at.timestamp() + BT_DELAYS.sub_to_fill > float(intime): if order.submitted_at.timestamp() + cfh.config_handler.get_val('BT_DELAYS','sub_to_fill') > float(intime):
print(f"too soon for {order.id}") print(f"too soon for {order.id}")
#try to execute #try to execute
else: else:
@ -196,7 +197,10 @@ class Backtester:
#TEST zkusime to nemazat, jak ovlivni performance #TEST zkusime to nemazat, jak ovlivni performance
#Mazeme, jinak je to hruza #Mazeme, jinak je to hruza
#nechavame na konci trady, které muzeme potrebovat pro consekutivni pravidlo #nechavame na konci trady, které muzeme potrebovat pro consekutivni pravidlo
del self.btdata[0:index_end-2-BT_FILL_CONS_TRADES_REQUIRED] #osetrujeme, kdy je malo tradu a oriznuti by slo do zaporu
del_to_index = index_end-2-cfh.config_handler.get_val('BT_FILL_CONS_TRADES_REQUIRED')
del_to_index = del_to_index if del_to_index > 0 else 0
del self.btdata[0:del_to_index]
##ic("after delete",len(self.btdata[0:index_end])) ##ic("after delete",len(self.btdata[0:index_end]))
if changes: return 1 if changes: return 1
@ -215,7 +219,7 @@ class Backtester:
fill_time = None fill_time = None
fill_price = None fill_price = None
order_min_fill_time = o.submitted_at.timestamp() + BT_DELAYS.sub_to_fill order_min_fill_time = o.submitted_at.timestamp() + cfh.config_handler.get_val('BT_DELAYS','sub_to_fill')
#ic(order_min_fill_time) #ic(order_min_fill_time)
#ic(len(work_range)) #ic(len(work_range))
@ -237,17 +241,18 @@ class Backtester:
#NASTVENI PODMINEK PLNENI #NASTVENI PODMINEK PLNENI
fast_fill_condition = i[1] <= o.limit_price fast_fill_condition = i[1] <= o.limit_price
slow_fill_condition = i[1] < o.limit_price slow_fill_condition = i[1] < o.limit_price
if BT_FILL_CONDITION_BUY_LIMIT == FillCondition.FAST: fill_cond_buy_limit = cfh.config_handler.get_val('BT_FILL_CONDITION_BUY_LIMIT')
if fill_cond_buy_limit == FillCondition.FAST:
fill_condition = fast_fill_condition fill_condition = fast_fill_condition
elif BT_FILL_CONDITION_BUY_LIMIT == FillCondition.SLOW: elif fill_cond_buy_limit == FillCondition.SLOW:
fill_condition = slow_fill_condition fill_condition = slow_fill_condition
else: else:
print("unknow fill condition") print("unknow fill condition")
return -1 return -1
if float(i[0]) > float(order_min_fill_time+BT_DELAYS.limit_order_offset) and fill_condition: if float(i[0]) > float(order_min_fill_time+cfh.config_handler.get_val('BT_DELAYS','limit_order_offset')) and fill_condition:
consec_cnt += 1 consec_cnt += 1
if consec_cnt == BT_FILL_CONS_TRADES_REQUIRED: if consec_cnt == cfh.config_handler.get_val('BT_FILL_CONS_TRADES_REQUIRED'):
#(1679081919.381649, 27.88) #(1679081919.381649, 27.88)
#ic(i) #ic(i)
@ -258,10 +263,10 @@ class Backtester:
#fill_price = i[1] #fill_price = i[1]
print("FILL LIMIT BUY at", fill_time, datetime.fromtimestamp(fill_time).astimezone(zoneNY), "at",i[1]) print("FILL LIMIT BUY at", fill_time, datetime.fromtimestamp(fill_time).astimezone(zoneNY), "at",i[1])
if BT_FILL_LOG_SURROUNDING_TRADES != 0: if cfh.config_handler.get_val('BT_FILL_LOG_SURROUNDING_TRADES') != 0:
#TODO loguru #TODO loguru
print("FILL SURR TRADES: before",work_range[index-BT_FILL_LOG_SURROUNDING_TRADES:index]) print("FILL SURR TRADES: before",work_range[index-cfh.config_handler.get_val('BT_FILL_LOG_SURROUNDING_TRADES'):index])
print("FILL SURR TRADES: fill and after",work_range[index:index+BT_FILL_LOG_SURROUNDING_TRADES]) print("FILL SURR TRADES: fill and after",work_range[index:index+cfh.config_handler.get_val('BT_FILL_LOG_SURROUNDING_TRADES')])
break break
else: else:
consec_cnt = 0 consec_cnt = 0
@ -272,17 +277,18 @@ class Backtester:
#NASTVENI PODMINEK PLNENI #NASTVENI PODMINEK PLNENI
fast_fill_condition = i[1] >= o.limit_price fast_fill_condition = i[1] >= o.limit_price
slow_fill_condition = i[1] > o.limit_price slow_fill_condition = i[1] > o.limit_price
if BT_FILL_CONDITION_SELL_LIMIT == FillCondition.FAST: fill_conf_sell_cfg = cfh.config_handler.get_val('BT_FILL_CONDITION_SELL_LIMIT')
if fill_conf_sell_cfg == FillCondition.FAST:
fill_condition = fast_fill_condition fill_condition = fast_fill_condition
elif BT_FILL_CONDITION_SELL_LIMIT == FillCondition.SLOW: elif fill_conf_sell_cfg == FillCondition.SLOW:
fill_condition = slow_fill_condition fill_condition = slow_fill_condition
else: else:
print("unknown fill condition") print("unknown fill condition")
return -1 return -1
if float(i[0]) > float(order_min_fill_time+BT_DELAYS.limit_order_offset) and fill_condition: if float(i[0]) > float(order_min_fill_time+cfh.config_handler.get_val('BT_DELAYS','limit_order_offset')) and fill_condition:
consec_cnt += 1 consec_cnt += 1
if consec_cnt == BT_FILL_CONS_TRADES_REQUIRED: if consec_cnt == cfh.config_handler.get_val('BT_FILL_CONS_TRADES_REQUIRED'):
#(1679081919.381649, 27.88) #(1679081919.381649, 27.88)
#ic(i) #ic(i)
fill_time = i[0] fill_time = i[0]
@ -294,10 +300,11 @@ class Backtester:
#fill_price = i[1] #fill_price = i[1]
print("FILL LIMIT SELL at", fill_time, datetime.fromtimestamp(fill_time).astimezone(zoneNY), "at",i[1]) print("FILL LIMIT SELL at", fill_time, datetime.fromtimestamp(fill_time).astimezone(zoneNY), "at",i[1])
if BT_FILL_LOG_SURROUNDING_TRADES != 0: surr_trades_cfg = cfh.config_handler.get_val('BT_FILL_LOG_SURROUNDING_TRADES')
if surr_trades_cfg != 0:
#TODO loguru #TODO loguru
print("FILL SELL SURR TRADES: before",work_range[index-BT_FILL_LOG_SURROUNDING_TRADES:index]) print("FILL SELL SURR TRADES: before",work_range[index-surr_trades_cfg:index])
print("FILL SELL SURR TRADES: fill and after",work_range[index:index+BT_FILL_LOG_SURROUNDING_TRADES]) print("FILL SELL SURR TRADES: fill and after",work_range[index:index+surr_trades_cfg])
break break
else: else:
consec_cnt = 0 consec_cnt = 0
@ -311,11 +318,16 @@ class Backtester:
#ic(i) #ic(i)
fill_time = i[0] fill_time = i[0]
fill_price = i[1] fill_price = i[1]
#přičteme MARKET PREMIUM z konfigurace (do budoucna mozna rozdilne pro BUY/SELL a nebo mozna z konfigurace pro dany itutl) #přičteme MARKET PREMIUM z konfigurace (je v pct nebo abs) (do budoucna mozna rozdilne pro BUY/SELL a nebo mozna z konfigurace pro dany titul)
cfg_premium = cfh.config_handler.get_val('BT_FILL_PRICE_MARKET_ORDER_PREMIUM')
if cfg_premium < 0: #configured as percentage
premium = abs(cfg_premium) * fill_price / 100.0
else: #configured as absolute value
premium = cfg_premium
if o.side == OrderSide.BUY: if o.side == OrderSide.BUY:
fill_price = fill_price + BT_FILL_PRICE_MARKET_ORDER_PREMIUM fill_price = fill_price + premium
elif o.side == OrderSide.SELL: elif o.side == OrderSide.SELL:
fill_price = fill_price - BT_FILL_PRICE_MARKET_ORDER_PREMIUM fill_price = fill_price - premium
print("FILL ",o.side,"MARKET at", fill_time, datetime.fromtimestamp(fill_time).astimezone(zoneNY), "cena", i[1]) print("FILL ",o.side,"MARKET at", fill_time, datetime.fromtimestamp(fill_time).astimezone(zoneNY), "cena", i[1])
break break
@ -364,7 +376,7 @@ class Backtester:
def _do_notification_with_callbacks(self, tradeupdate: TradeUpdate, time: float): def _do_notification_with_callbacks(self, tradeupdate: TradeUpdate, time: float):
#do callbacku je třeba zpropagovat filltime čas (včetně latency pro notifikaci), aby se pripadne akce v callbacku udály s tímto časem #do callbacku je třeba zpropagovat filltime čas (včetně latency pro notifikaci), aby se pripadne akce v callbacku udály s tímto časem
self.time = time + float(BT_DELAYS.fill_to_not) self.time = time + float(cfh.config_handler.get_val('BT_DELAYS','fill_to_not'))
print("current bt.time",self.time) print("current bt.time",self.time)
#print("FILL NOTIFICATION: ", tradeupdate) #print("FILL NOTIFICATION: ", tradeupdate)
res = asyncio.run(self.order_fill_callback(tradeupdate)) res = asyncio.run(self.order_fill_callback(tradeupdate))
@ -467,11 +479,11 @@ class Backtester:
print("BT: submit order entry") print("BT: submit order entry")
if not time or time < 0: if not time or time < 0:
print("time musi byt vyplneny") printanyway("time musi byt vyplneny")
return -1 return -1
if not size or int(size) < 0: if not size or int(size) < 0:
print("size musi byt vetsi nez 0") printanyway("size musi byt vetsi nez 0")
return -1 return -1
if (order_type != OrderType.MARKET) and (order_type != OrderType.LIMIT): if (order_type != OrderType.MARKET) and (order_type != OrderType.LIMIT):
@ -479,11 +491,11 @@ class Backtester:
return -1 return -1
if not side == OrderSide.BUY and not side == OrderSide.SELL: if not side == OrderSide.BUY and not side == OrderSide.SELL:
print("side buy/sell required") printanyway("side buy/sell required")
return -1 return -1
if order_type == OrderType.LIMIT and count_decimals(price) > 2: if order_type == OrderType.LIMIT and count_decimals(price) > 2:
print("only 2 decimals supported", price) printanyway("only 2 decimals supported", price)
return -1 return -1
#pokud neexistuje klic v accountu vytvorime si ho #pokud neexistuje klic v accountu vytvorime si ho
@ -505,14 +517,14 @@ class Backtester:
actual_minus_reserved = int(self.account[symbol][0]) - reserved actual_minus_reserved = int(self.account[symbol][0]) - reserved
if actual_minus_reserved > 0 and actual_minus_reserved - int(size) < 0: if actual_minus_reserved > 0 and actual_minus_reserved - int(size) < 0:
print("not enough shares available to sell or shorting while long position",self.account[symbol][0],"reserved",reserved,"available",int(self.account[symbol][0]) - reserved,"selling",size) printanyway("not enough shares available to sell or shorting while long position",self.account[symbol][0],"reserved",reserved,"available",int(self.account[symbol][0]) - reserved,"selling",size)
return -1 return -1
#if is shorting - check available cash to short #if is shorting - check available cash to short
if actual_minus_reserved <= 0: if actual_minus_reserved <= 0:
cena = price if price else self.get_last_price(time, self.symbol) cena = price if price else self.get_last_price(time, self.symbol)
if (self.cash - reserved_price - float(int(size)*float(cena))) < 0: if (self.cash - reserved_price - float(int(size)*float(cena))) < 0:
print("not enough cash for shorting. cash",self.cash,"reserved",reserved,"available",self.cash-reserved,"needed",float(int(size)*float(cena))) printanyway("not enough cash for shorting. cash",self.cash,"reserved",reserved,"available",self.cash-reserved,"needed",float(int(size)*float(cena)))
return -1 return -1
#check for available cash #check for available cash
@ -531,14 +543,14 @@ class Backtester:
#jde o uzavreni shortu #jde o uzavreni shortu
if actual_plus_reserved_qty < 0 and (actual_plus_reserved_qty + int(size)) > 0: if actual_plus_reserved_qty < 0 and (actual_plus_reserved_qty + int(size)) > 0:
print("nejprve je treba uzavrit short pozici pro buy res_qty, size", actual_plus_reserved_qty, size) printanyway("nejprve je treba uzavrit short pozici pro buy res_qty, size", actual_plus_reserved_qty, size)
return -1 return -1
#jde o standardni long, kontroluju cash #jde o standardni long, kontroluju cash
if actual_plus_reserved_qty >= 0: if actual_plus_reserved_qty >= 0:
cena = price if price else self.get_last_price(time, self.symbol) cena = price if price else self.get_last_price(time, self.symbol)
if (self.cash - reserved_price - float(int(size)*float(cena))) < 0: if (self.cash - reserved_price - float(int(size)*float(cena))) < 0:
print("not enough cash to buy long. cash",self.cash,"reserved_qty",reserved_qty,"reserved_price",reserved_price, "available",self.cash-reserved_price,"needed",float(int(size)*float(cena))) printanyway("not enough cash to buy long. cash",self.cash,"reserved_qty",reserved_qty,"reserved_price",reserved_price, "available",self.cash-reserved_price,"needed",float(int(size)*float(cena)))
return -1 return -1
id = str(uuid4()) id = str(uuid4())
@ -565,11 +577,11 @@ class Backtester:
print("BT: replace order entry",id,size,price) print("BT: replace order entry",id,size,price)
if not price and not size: if not price and not size:
print("size or price required") printanyway("size or price required")
return -1 return -1
if len(self.open_orders) == 0: if len(self.open_orders) == 0:
print("BT: order doesnt exist") printanyway("BT: order doesnt exist")
return 0 return 0
#with lock: #with lock:
for o in self.open_orders: for o in self.open_orders:
@ -597,7 +609,7 @@ class Backtester:
""" """
print("BT: cancel order entry",id) print("BT: cancel order entry",id)
if len(self.open_orders) == 0: if len(self.open_orders) == 0:
print("BTC: order doesnt exist") printanyway("BTC: order doesnt exist")
return 0 return 0
#with lock: #with lock:
for o in self.open_orders: for o in self.open_orders:
@ -817,10 +829,10 @@ class Backtester:
Trades:''' + str(len(self.trades))) Trades:''' + str(len(self.trades)))
textik8 = html.Div(''' textik8 = html.Div('''
Profit:''' + str(state.profit)) Profit:''' + str(state.profit))
textik9 = html.Div(f"{BT_FILL_CONS_TRADES_REQUIRED=}") textik9 = html.Div(f"{cfh.config_handler.get_val('BT_FILL_CONS_TRADES_REQUIRED')=}")
textik10 = html.Div(f"{BT_FILL_LOG_SURROUNDING_TRADES=}") textik10 = html.Div(f"{cfh.config_handler.get_val('BT_FILL_LOG_SURROUNDING_TRADES')=}")
textik11 = html.Div(f"{BT_FILL_CONDITION_BUY_LIMIT=}") textik11 = html.Div(f"{cfh.config_handler.get_val('BT_FILL_CONDITION_BUY_LIMIT')=}")
textik12 = html.Div(f"{BT_FILL_CONDITION_SELL_LIMIT=}") textik12 = html.Div(f"{cfh.config_handler.get_val('BT_FILL_CONDITION_SELL_LIMIT')=}")
orders_title = dcc.Markdown('## Open orders') orders_title = dcc.Markdown('## Open orders')
trades_title = dcc.Markdown('## Trades') trades_title = dcc.Markdown('## Trades')

View File

@ -1,11 +1,8 @@
from v2realbot.config import DATA_DIR
import sqlite3 import sqlite3
import queue import queue
import threading import threading
import time import time
from v2realbot.common.model import RunArchive, RunArchiveView from v2realbot.config import DATA_DIR
from datetime import datetime
import json
sqlite_db_file = DATA_DIR + "/v2trading.db" sqlite_db_file = DATA_DIR + "/v2trading.db"
# Define the connection pool # Define the connection pool
@ -31,7 +28,7 @@ class ConnectionPool:
return connection return connection
def execute_with_retry(cursor: sqlite3.Cursor, statement: str, params = None, retry_interval: int = 1) -> sqlite3.Cursor: def execute_with_retry(cursor: sqlite3.Cursor, statement: str, params = None, retry_interval: int = 2) -> sqlite3.Cursor:
"""get connection from pool and execute SQL statement with retry logic if required. """get connection from pool and execute SQL statement with retry logic if required.
Args: Args:
@ -60,53 +57,4 @@ def execute_with_retry(cursor: sqlite3.Cursor, statement: str, params = None, re
pool = ConnectionPool(10) pool = ConnectionPool(10)
#for one shared connection (used for writes only in WAL mode) #for one shared connection (used for writes only in WAL mode)
insert_conn = sqlite3.connect(sqlite_db_file, check_same_thread=False) insert_conn = sqlite3.connect(sqlite_db_file, check_same_thread=False)
insert_queue = queue.Queue() insert_queue = queue.Queue()
#prevede dict radku zpatky na objekt vcetme retypizace
def row_to_runarchiveview(row: dict) -> RunArchiveView:
return RunArchive(
id=row['runner_id'],
strat_id=row['strat_id'],
batch_id=row['batch_id'],
symbol=row['symbol'],
name=row['name'],
note=row['note'],
started=datetime.fromisoformat(row['started']) if row['started'] else None,
stopped=datetime.fromisoformat(row['stopped']) if row['stopped'] else None,
mode=row['mode'],
account=row['account'],
bt_from=datetime.fromisoformat(row['bt_from']) if row['bt_from'] else None,
bt_to=datetime.fromisoformat(row['bt_to']) if row['bt_to'] else None,
ilog_save=bool(row['ilog_save']),
profit=float(row['profit']),
trade_count=int(row['trade_count']),
end_positions=int(row['end_positions']),
end_positions_avgp=float(row['end_positions_avgp']),
metrics=json.loads(row['metrics']) if row['metrics'] else None
)
#prevede dict radku zpatky na objekt vcetme retypizace
def row_to_runarchive(row: dict) -> RunArchive:
return RunArchive(
id=row['runner_id'],
strat_id=row['strat_id'],
batch_id=row['batch_id'],
symbol=row['symbol'],
name=row['name'],
note=row['note'],
started=datetime.fromisoformat(row['started']) if row['started'] else None,
stopped=datetime.fromisoformat(row['stopped']) if row['stopped'] else None,
mode=row['mode'],
account=row['account'],
bt_from=datetime.fromisoformat(row['bt_from']) if row['bt_from'] else None,
bt_to=datetime.fromisoformat(row['bt_to']) if row['bt_to'] else None,
strat_json=json.loads(row['strat_json']),
settings=json.loads(row['settings']),
ilog_save=bool(row['ilog_save']),
profit=float(row['profit']),
trade_count=int(row['trade_count']),
end_positions=int(row['end_positions']),
end_positions_avgp=float(row['end_positions_avgp']),
metrics=json.loads(row['metrics']),
stratvars_toml=row['stratvars_toml']
)

View File

@ -1,14 +1,16 @@
from uuid import UUID from uuid import UUID, uuid4
from alpaca.trading.enums import OrderSide, OrderStatus, TradeEvent,OrderType from alpaca.trading.enums import OrderSide, OrderStatus, TradeEvent,OrderType
#from utils import AttributeDict #from utils import AttributeDict
from rich import print from rich import print
from typing import Any, Optional, List, Union from typing import Any, Optional, List, Union
from datetime import datetime, date from datetime import datetime, date
from pydantic import BaseModel from pydantic import BaseModel, Field
from v2realbot.enums.enums import Mode, Account from v2realbot.enums.enums import Mode, Account, SchedulerStatus, Moddus
from alpaca.data.enums import Exchange from alpaca.data.enums import Exchange
#models for server side datatables #models for server side datatables
# Model for individual column data # Model for individual column data
class ColumnData(BaseModel): class ColumnData(BaseModel):
@ -52,6 +54,15 @@ class DataTablesRequest(BaseModel):
# return user.id # return user.id
# raise HTTPException(status_code=404, detail=f"Could not find user with id: {id}") # raise HTTPException(status_code=404, detail=f"Could not find user with id: {id}")
#obecny vstup pro analyzera (vstupem muze byt bud batch_id nebo seznam runneru)
class AnalyzerInputs(BaseModel):
function: str
batch_id: Optional[str] = None
runner_ids: Optional[List[UUID]] = None
#additional parameter
params: Optional[dict] = {}
class RunDay(BaseModel): class RunDay(BaseModel):
""" """
Helper object for batch run - carries list of days in format required by run batch manager Helper object for batch run - carries list of days in format required by run batch manager
@ -83,12 +94,12 @@ class TestList(BaseModel):
class Trade(BaseModel): class Trade(BaseModel):
symbol: str symbol: str
timestamp: datetime timestamp: datetime
exchange: Optional[Union[Exchange, str]] exchange: Optional[Union[Exchange, str]] = None
price: float price: float
size: float size: float
id: int id: int
conditions: Optional[List[str]] conditions: Optional[List[str]] = None
tape: Optional[str] tape: Optional[str] = None
#persisted object in pickle #persisted object in pickle
@ -103,8 +114,20 @@ class StrategyInstance(BaseModel):
close_rush: int = 0 close_rush: int = 0
stratvars_conf: str stratvars_conf: str
add_data_conf: str add_data_conf: str
note: Optional[str] note: Optional[str] = None
history: Optional[str] history: Optional[str] = None
def __setstate__(self, state: dict[Any, Any]) -> None:
"""
Hack to allow unpickling models stored from pydantic V1
"""
state.setdefault("__pydantic_extra__", {})
state.setdefault("__pydantic_private__", {})
if "__pydantic_fields_set__" not in state:
state["__pydantic_fields_set__"] = state.get("__fields_set__")
super().__setstate__(state)
class RunRequest(BaseModel): class RunRequest(BaseModel):
id: UUID id: UUID
@ -114,8 +137,8 @@ class RunRequest(BaseModel):
debug: bool = False debug: bool = False
strat_json: Optional[str] = None strat_json: Optional[str] = None
ilog_save: bool = False ilog_save: bool = False
bt_from: datetime = None bt_from: Optional[datetime] = None
bt_to: datetime = None bt_to: Optional[datetime] = None
#weekdays filter #weekdays filter
#pokud je uvedeny filtrujeme tyto dny #pokud je uvedeny filtrujeme tyto dny
weekdays_filter: Optional[list] = None weekdays_filter: Optional[list] = None
@ -126,7 +149,33 @@ class RunRequest(BaseModel):
cash: int = 100000 cash: int = 100000
skip_cache: Optional[bool] = False skip_cache: Optional[bool] = False
#Trida, která je nadstavbou runrequestu a pouzivame ji v scheduleru, je zde navic jen par polí
class RunManagerRecord(BaseModel):
moddus: Moddus
id: UUID = Field(default_factory=uuid4)
strat_id: UUID
symbol: Optional[str] = None
account: Account
mode: Mode
note: Optional[str] = None
ilog_save: bool = False
bt_from: Optional[datetime] = None
bt_to: Optional[datetime] = None
#weekdays filter
#pokud je uvedeny filtrujeme tyto dny
weekdays_filter: Optional[list] = None #list of strings 0-6 representing days to run
#GENERATED ID v ramci runu, vaze vsechny runnery v batchovem behu
batch_id: Optional[str] = None
testlist_id: Optional[str] = None
start_time: str #time (HH:MM) that start function is called
stop_time: Optional[str] = None #time (HH:MM) that stop function is called
status: SchedulerStatus
last_processed: Optional[datetime] = None
history: Optional[str] = None
valid_from: Optional[datetime] = None # US East time zone daetime
valid_to: Optional[datetime] = None # US East time zone daetime
runner_id: Optional[UUID] = None #last runner_id from scheduler after stratefy is started
strat_running: Optional[bool] = None #automatically updated field based on status of runner_id above, it is added by row_to_RunManagerRecord
class RunnerView(BaseModel): class RunnerView(BaseModel):
id: UUID id: UUID
strat_id: UUID strat_id: UUID
@ -156,10 +205,10 @@ class Runner(BaseModel):
run_name: Optional[str] = None run_name: Optional[str] = None
run_note: Optional[str] = None run_note: Optional[str] = None
run_ilog_save: Optional[bool] = False run_ilog_save: Optional[bool] = False
run_trade_count: Optional[int] run_trade_count: Optional[int] = None
run_profit: Optional[float] run_profit: Optional[float] = None
run_positions: Optional[int] run_positions: Optional[int] = None
run_avgp: Optional[float] run_avgp: Optional[float] = None
run_strat_json: Optional[str] = None run_strat_json: Optional[str] = None
run_stopped: Optional[datetime] = None run_stopped: Optional[datetime] = None
run_paused: Optional[datetime] = None run_paused: Optional[datetime] = None
@ -193,41 +242,41 @@ class Bar(BaseModel):
low: float low: float
close: float close: float
volume: float volume: float
trade_count: Optional[float] trade_count: Optional[float] = 0
vwap: Optional[float] vwap: Optional[float] = 0
class Order(BaseModel): class Order(BaseModel):
id: UUID id: UUID
submitted_at: datetime submitted_at: datetime
filled_at: Optional[datetime] filled_at: Optional[datetime] = None
canceled_at: Optional[datetime] canceled_at: Optional[datetime] = None
symbol: str symbol: str
qty: int qty: int
status: OrderStatus status: OrderStatus
order_type: OrderType order_type: OrderType
filled_qty: Optional[int] filled_qty: Optional[int] = None
filled_avg_price: Optional[float] filled_avg_price: Optional[float] = None
side: OrderSide side: OrderSide
limit_price: Optional[float] limit_price: Optional[float] = None
#entita pro kazdy kompletni FILL, je navazana na prescribed_trade #entita pro kazdy kompletni FILL, je navazana na prescribed_trade
class TradeUpdate(BaseModel): class TradeUpdate(BaseModel):
event: Union[TradeEvent, str] event: Union[TradeEvent, str]
execution_id: Optional[UUID] execution_id: Optional[UUID] = None
order: Order order: Order
timestamp: datetime timestamp: datetime
position_qty: Optional[float] position_qty: Optional[float] = None
price: Optional[float] price: Optional[float] = None
qty: Optional[float] qty: Optional[float] = None
value: Optional[float] value: Optional[float] = None
cash: Optional[float] cash: Optional[float] = None
pos_avg_price: Optional[float] pos_avg_price: Optional[float] = None
profit: Optional[float] profit: Optional[float] = None
profit_sum: Optional[float] profit_sum: Optional[float] = None
rel_profit: Optional[float] rel_profit: Optional[float] = None
rel_profit_cum: Optional[float] rel_profit_cum: Optional[float] = None
signal_name: Optional[str] signal_name: Optional[str] = None
prescribed_trade_id: Optional[str] prescribed_trade_id: Optional[str] = None
class RunArchiveChange(BaseModel): class RunArchiveChange(BaseModel):
@ -252,8 +301,7 @@ class RunArchive(BaseModel):
bt_from: Optional[datetime] = None bt_from: Optional[datetime] = None
bt_to: Optional[datetime] = None bt_to: Optional[datetime] = None
strat_json: Optional[str] = None strat_json: Optional[str] = None
##bude decomiss, misto toho stratvars_toml transferables: Optional[dict] = None #varaibles that are transferrable to next run
stratvars: Optional[dict] = None
settings: Optional[dict] = None settings: Optional[dict] = None
ilog_save: Optional[bool] = False ilog_save: Optional[bool] = False
profit: float = 0 profit: float = 0
@ -283,6 +331,8 @@ class RunArchiveView(BaseModel):
end_positions: int = 0 end_positions: int = 0
end_positions_avgp: float = 0 end_positions_avgp: float = 0
metrics: Union[dict, str] = None metrics: Union[dict, str] = None
batch_profit: float = 0 # Total profit for the batch - now calculated during query
batch_count: int = 0 # Count of runs in the batch - now calculated during query
#same but with pagination #same but with pagination
class RunArchiveViewPagination(BaseModel): class RunArchiveViewPagination(BaseModel):
@ -293,7 +343,7 @@ class RunArchiveViewPagination(BaseModel):
#trida pro ukladani historie stoplossy do ext_data #trida pro ukladani historie stoplossy do ext_data
class SLHistory(BaseModel): class SLHistory(BaseModel):
id: Optional[UUID] id: Optional[UUID] = None
time: datetime time: datetime
sl_val: float sl_val: float
@ -306,7 +356,7 @@ class RunArchiveDetail(BaseModel):
indicators: List[dict] indicators: List[dict]
statinds: dict statinds: dict
trades: List[TradeUpdate] trades: List[TradeUpdate]
ext_data: Optional[dict] ext_data: Optional[dict] = None
class InstantIndicator(BaseModel): class InstantIndicator(BaseModel):

View File

@ -0,0 +1,88 @@
from v2realbot.common.model import RunArchive, RunArchiveView, RunManagerRecord
from datetime import datetime
import orjson
import v2realbot.controller.services as cs
#prevede dict radku zpatky na objekt vcetme retypizace
def row_to_runmanager(row: dict) -> RunManagerRecord:
is_running = cs.is_runner_running(row['runner_id']) if row['runner_id'] else False
res = RunManagerRecord(
moddus=row['moddus'],
id=row['id'],
strat_id=row['strat_id'],
symbol=row['symbol'],
mode=row['mode'],
account=row['account'],
note=row['note'],
ilog_save=bool(row['ilog_save']),
bt_from=datetime.fromisoformat(row['bt_from']) if row['bt_from'] else None,
bt_to=datetime.fromisoformat(row['bt_to']) if row['bt_to'] else None,
weekdays_filter=[int(x) for x in row['weekdays_filter'].split(',')] if row['weekdays_filter'] else [],
batch_id=row['batch_id'],
testlist_id=row['testlist_id'],
start_time=row['start_time'],
stop_time=row['stop_time'],
status=row['status'],
#last_started=zoneNY.localize(datetime.fromisoformat(row['last_started'])) if row['last_started'] else None,
last_processed=datetime.fromisoformat(row['last_processed']) if row['last_processed'] else None,
history=row['history'],
valid_from=datetime.fromisoformat(row['valid_from']) if row['valid_from'] else None,
valid_to=datetime.fromisoformat(row['valid_to']) if row['valid_to'] else None,
runner_id = row['runner_id'] if row['runner_id'] and is_running else None, #runner_id is only present if it is running
strat_running = is_running) #cant believe this when called from separate process as not current
return res
#prevede dict radku zpatky na objekt vcetme retypizace
def row_to_runarchiveview(row: dict) -> RunArchiveView:
a = RunArchiveView(
id=row['runner_id'],
strat_id=row['strat_id'],
batch_id=row['batch_id'],
symbol=row['symbol'],
name=row['name'],
note=row['note'],
started=datetime.fromisoformat(row['started']) if row['started'] else None,
stopped=datetime.fromisoformat(row['stopped']) if row['stopped'] else None,
mode=row['mode'],
account=row['account'],
bt_from=datetime.fromisoformat(row['bt_from']) if row['bt_from'] else None,
bt_to=datetime.fromisoformat(row['bt_to']) if row['bt_to'] else None,
ilog_save=bool(row['ilog_save']),
profit=float(row['profit']),
trade_count=int(row['trade_count']),
end_positions=int(row['end_positions']),
end_positions_avgp=float(row['end_positions_avgp']),
metrics=orjson.loads(row['metrics']) if row['metrics'] else None,
batch_profit=int(row['batch_profit']) if row['batch_profit'] and row['batch_id'] else 0,
batch_count=int(row['batch_count']) if row['batch_count'] and row['batch_id'] else 0,
)
return a
#prevede dict radku zpatky na objekt vcetme retypizace
def row_to_runarchive(row: dict) -> RunArchive:
return RunArchive(
id=row['runner_id'],
strat_id=row['strat_id'],
batch_id=row['batch_id'],
symbol=row['symbol'],
name=row['name'],
note=row['note'],
started=datetime.fromisoformat(row['started']) if row['started'] else None,
stopped=datetime.fromisoformat(row['stopped']) if row['stopped'] else None,
mode=row['mode'],
account=row['account'],
bt_from=datetime.fromisoformat(row['bt_from']) if row['bt_from'] else None,
bt_to=datetime.fromisoformat(row['bt_to']) if row['bt_to'] else None,
strat_json=orjson.loads(row['strat_json']),
settings=orjson.loads(row['settings']),
ilog_save=bool(row['ilog_save']),
profit=float(row['profit']),
trade_count=int(row['trade_count']),
end_positions=int(row['end_positions']),
end_positions_avgp=float(row['end_positions_avgp']),
metrics=orjson.loads(row['metrics']),
stratvars_toml=row['stratvars_toml'],
transferables=orjson.loads(row['transferables']) if row['transferables'] else None
)

View File

@ -2,64 +2,31 @@ from alpaca.data.enums import DataFeed
from v2realbot.enums.enums import Mode, Account, FillCondition from v2realbot.enums.enums import Mode, Account, FillCondition
from appdirs import user_data_dir from appdirs import user_data_dir
from pathlib import Path from pathlib import Path
import os
from collections import defaultdict
# Global flag to track if the ml module has been imported (solution for long import times of tensorflow)
#the first occurence of using it will load it globally
_ml_module_loaded = False
#directory for generated images and basic reports #directory for generated images and basic reports
MEDIA_DIRECTORY = Path(__file__).parent.parent.parent / "media" MEDIA_DIRECTORY = Path(__file__).parent.parent.parent / "media"
RUNNER_DETAIL_DIRECTORY = Path(__file__).parent.parent.parent / "runner_detail"
#location of strat.log - it is used to fetch by gui #location of strat.log - it is used to fetch by gui
LOG_PATH = Path(__file__).parent.parent
LOG_FILE = Path(__file__).parent.parent / "strat.log" LOG_FILE = Path(__file__).parent.parent / "strat.log"
JOB_LOG_FILE = Path(__file__).parent.parent / "job.log"
#'0.0.0.0',
#currently only prod server has acces to LIVE
PROD_SERVER_HOSTNAMES = ['tradingeastcoast','David-MacBook-Pro.local'] #,'David-MacBook-Pro.local'
TEST_SERVER_HOSTNAMES = ['tradingtest']
#TODO vybrane dat do config db a managovat pres GUI
#AGGREGATOR filter trades
#NOTE pridana F - Inter Market Sweep Order - obcas vytvarela spajky
AGG_EXCLUDED_TRADES = ['C','O','4','B','7','V','P','W','U','Z','F']
OFFLINE_MODE = False
# ilog lvls = 0,1 - 0 debug, 1 info
ILOG_SAVE_LEVEL_FROM = 1
#minimalni vzdalenost mezi trady, kterou agregator pousti pro CBAR(0.001 - blokuje mensi nez 1ms)
GROUP_TRADES_WITH_TIMESTAMP_LESS_THAN = 0.003
#normalized price for tick 0.01
NORMALIZED_TICK_BASE_PRICE = 30.00
LOG_RUNNER_EVENTS = False
#no print in console
QUIET_MODE = True
#how many consecutive trades with the fill price are necessary for LIMIT fill to happen in backtesting
#0 - optimistic, every knot high will fill the order
#N - N consecutive trades required
#not impl.yet
#minimum is 1, na alpace live to vetsinou vychazi 7-8 u BAC, je to hodne podobne tomu, nez je cena překonaná pul centu. tzn. 7-8 a nebo FillCondition.SLOW
BT_FILL_CONS_TRADES_REQUIRED = 2
#during bt trade execution logs X-surrounding trades of the one that triggers the fill
BT_FILL_LOG_SURROUNDING_TRADES = 10
#fill condition for limit order in bt
# fast - price has to be equal or bigger <=
# slow - price has to be bigger <
BT_FILL_CONDITION_BUY_LIMIT = FillCondition.SLOW
BT_FILL_CONDITION_SELL_LIMIT = FillCondition.SLOW
#TBD TODO not implemented yet
BT_FILL_PRICE_MARKET_ORDER_PREMIUM = 0.005
#backend counter of api requests
COUNT_API_REQUESTS = False
#stratvars that cannot be changed in gui #stratvars that cannot be changed in gui
STRATVARS_UNCHANGEABLES = ['pendingbuys', 'blockbuy', 'jevylozeno', 'limitka'] STRATVARS_UNCHANGEABLES = ['pendingbuys', 'blockbuy', 'jevylozeno', 'limitka']
DATA_DIR = user_data_dir("v2realbot") DATA_DIR = user_data_dir("v2realbot", False)
MODEL_DIR = Path(DATA_DIR)/"models"
#BT DELAYS #BT DELAYS
#profiling #profiling
PROFILING_NEXT_ENABLED = False PROFILING_NEXT_ENABLED = False
PROFILING_OUTPUT_DIR = DATA_DIR PROFILING_OUTPUT_DIR = DATA_DIR
#FILL CONFIGURATION CLASS FOR BACKTESTING #WIP - FILL CONFIGURATION CLASS FOR BACKTESTING
#WIP
class BT_FILL_CONF: class BT_FILL_CONF:
"""" """"
Trida pro konfiguraci backtesting fillu pro dany symbol, pokud neexistuje tak fallback na obecny viz vyse- Trida pro konfiguraci backtesting fillu pro dany symbol, pokud neexistuje tak fallback na obecny viz vyse-
@ -73,24 +40,6 @@ class BT_FILL_CONF:
self.BT_FILL_CONDITION_SELL_LIMIT=BT_FILL_CONDITION_SELL_LIMIT self.BT_FILL_CONDITION_SELL_LIMIT=BT_FILL_CONDITION_SELL_LIMIT
self.BT_FILL_PRICE_MARKET_ORDER_PREMIUM=BT_FILL_PRICE_MARKET_ORDER_PREMIUM self.BT_FILL_PRICE_MARKET_ORDER_PREMIUM=BT_FILL_PRICE_MARKET_ORDER_PREMIUM
""""
LATENCY DELAYS for LIVE eastcoast
.000 trigger - last_trade_time (.4246266)
+.020 vstup do strategie a BUY (.444606)
+.023 submitted (.469198)
+.008 filled (.476695552)
+.023 fill not(.499888)
"""
#TODO změnit názvy delay promennych vystizneji a obecneji
class BT_DELAYS:
trigger_to_strat: float = 0.020
strat_to_sub: float = 0.023
sub_to_fill: float = 0.008
fill_to_not: float = 0.023
#doplnit dle live
limit_order_offset: float = 0
class Keys: class Keys:
def __init__(self, api_key, secret_key, paper, feed) -> None: def __init__(self, api_key, secret_key, paper, feed) -> None:
self.API_KEY = api_key self.API_KEY = api_key
@ -99,7 +48,8 @@ class Keys:
self.FEED = feed self.FEED = feed
# podle modu (PAPER, LIVE) vrati objekt # podle modu (PAPER, LIVE) vrati objekt
# obsahujici klice pro pripojeni k alpace # obsahujici klice pro pripojeni k alpace - používá se pro Trading API a order updates websockets (pristupy relevantni per strategie)
#pro real time data se bere LIVE_DATA_API_KEY, LIVE_DATA_SECRET_KEY, LIVE_DATA_FEED nize - jelikoz jde o server wide nastaveni
def get_key(mode: Mode, account: Account): def get_key(mode: Mode, account: Account):
if mode not in [Mode.PAPER, Mode.LIVE]: if mode not in [Mode.PAPER, Mode.LIVE]:
print("has to be LIVE or PAPER only") print("has to be LIVE or PAPER only")
@ -121,25 +71,82 @@ HEARTBEAT_TIMEOUT=5
WEB_API_KEY="david" WEB_API_KEY="david"
#PRIMARY PAPER #PRIMARY PAPER
ACCOUNT1_PAPER_API_KEY = 'PKGGEWIEYZOVQFDRY70L' ACCOUNT1_PAPER_API_KEY = os.environ.get('ACCOUNT1_PAPER_API_KEY')
ACCOUNT1_PAPER_SECRET_KEY = 'O5Kt8X4RLceIOvM98i5LdbalItsX7hVZlbPYHy8Y' ACCOUNT1_PAPER_SECRET_KEY = os.environ.get('ACCOUNT1_PAPER_SECRET_KEY')
ACCOUNT1_PAPER_MAX_BATCH_SIZE = 1 ACCOUNT1_PAPER_MAX_BATCH_SIZE = 1
ACCOUNT1_PAPER_PAPER = True ACCOUNT1_PAPER_PAPER = True
ACCOUNT1_PAPER_FEED = DataFeed.SIP #ACCOUNT1_PAPER_FEED = DataFeed.SIP
# Load the data feed type from environment variable
data_feed_type_str = os.environ.get('ACCOUNT1_PAPER_FEED', 'iex') # Default to 'sip' if not set
# Convert the string to DataFeed enum
try:
ACCOUNT1_PAPER_FEED = DataFeed(data_feed_type_str)
except ValueError:
# Handle the case where the environment variable does not match any enum member
print(f"Invalid data feed type: {data_feed_type_str} in ACCOUNT1_PAPER_FEED defaulting to 'iex'")
ACCOUNT1_PAPER_FEED = DataFeed.SIP
#PRIMARY LIVE #PRIMARY LIVE
ACCOUNT1_LIVE_API_KEY = 'AKB5HD32LPDZC9TPUWJT' ACCOUNT1_LIVE_API_KEY = os.environ.get('ACCOUNT1_LIVE_API_KEY')
ACCOUNT1_LIVE_SECRET_KEY = 'Xq1wPSNOtwmlMTAd4cEmdKvNDgfcUYfrOaCccaAs' ACCOUNT1_LIVE_SECRET_KEY = os.environ.get('ACCOUNT1_LIVE_SECRET_KEY')
ACCOUNT1_LIVE_MAX_BATCH_SIZE = 1 ACCOUNT1_LIVE_MAX_BATCH_SIZE = 1
ACCOUNT1_LIVE_PAPER = False ACCOUNT1_LIVE_PAPER = False
ACCOUNT1_LIVE_FEED = DataFeed.SIP #ACCOUNT1_LIVE_FEED = DataFeed.SIP
#SECONDARY PAPER # Load the data feed type from environment variable
ACCOUNT2_PAPER_API_KEY = 'PK0OQHZG03PUZ1SC560V' data_feed_type_str = os.environ.get('ACCOUNT1_LIVE_FEED', 'iex') # Default to 'sip' if not set
ACCOUNT2_PAPER_SECRET_KEY = 'cTglhm7kwRcZfFT27fQWz31sXaxadzQApFDW6Lat'
# Convert the string to DataFeed enum
try:
ACCOUNT1_LIVE_FEED = DataFeed(data_feed_type_str)
except ValueError:
# Handle the case where the environment variable does not match any enum member
print(f"Invalid data feed type: {data_feed_type_str} in ACCOUNT1_LIVE_FEED defaulting to 'iex'")
ACCOUNT1_LIVE_FEED = DataFeed.IEX
#SECONDARY PAPER - Martin
ACCOUNT2_PAPER_API_KEY = os.environ.get('ACCOUNT2_PAPER_API_KEY')
ACCOUNT2_PAPER_SECRET_KEY = os.environ.get('ACCOUNT2_PAPER_SECRET_KEY')
ACCOUNT2_PAPER_MAX_BATCH_SIZE = 1 ACCOUNT2_PAPER_MAX_BATCH_SIZE = 1
ACCOUNT2_PAPER_PAPER = True ACCOUNT2_PAPER_PAPER = True
ACCOUNT2_PAPER_FEED = DataFeed.IEX #ACCOUNT2_PAPER_FEED = DataFeed.IEX
# Load the data feed type from environment variable
data_feed_type_str = os.environ.get('ACCOUNT2_PAPER_FEED', 'iex') # Default to 'sip' if not set
# Convert the string to DataFeed enum
try:
ACCOUNT2_PAPER_FEED = DataFeed(data_feed_type_str)
except ValueError:
# Handle the case where the environment variable does not match any enum member
print(f"Invalid data feed type: {data_feed_type_str} in ACCOUNT2_PAPER_FEED defaulting to 'iex'")
ACCOUNT2_PAPER_FEED = DataFeed.IEX
#SECONDARY LIVE - Martin
# ACCOUNT2_LIVE_API_KEY = os.environ.get('ACCOUNT2_LIVE_API_KEY')
# ACCOUNT2_LIVE_SECRET_KEY = os.environ.get('ACCOUNT2_LIVE_SECRET_KEY')
# ACCOUNT2_LIVE_MAX_BATCH_SIZE = 1
# ACCOUNT2_LIVE_PAPER = True
# #ACCOUNT2_LIVE_FEED = DataFeed.IEX
# # Load the data feed type from environment variable
# data_feed_type_str = os.environ.get('ACCOUNT2_LIVE_FEED', 'iex') # Default to 'sip' if not set
# # Convert the string to DataFeed enum
# try:
# ACCOUNT2_LIVE_FEED = DataFeed(data_feed_type_str)
# except ValueError:
# # Handle the case where the environment variable does not match any enum member
# print(f"Invalid data feed type: {data_feed_type_str} in ACCOUNT2_LIVE_FEED defaulting to 'iex'")
# ACCOUNT2_LIVE_FEED = DataFeed.IEX
#zatim jsou LIVE_DATA nastaveny jako z account1_paper
LIVE_DATA_API_KEY = ACCOUNT1_PAPER_API_KEY
LIVE_DATA_SECRET_KEY = ACCOUNT1_PAPER_SECRET_KEY
#LIVE_DATA_FEED je nastaveny v config_handleru
class KW: class KW:
activate: str = "activate" activate: str = "activate"

View File

@ -0,0 +1,112 @@
import v2realbot.common.db as db
from v2realbot.common.model import ConfigItem
import v2realbot.utils.config_handler as ch
# region CONFIG db services
#TODO vytvorit modul pro dotahovani z pythonu (get_from_config(var_name, def_value) {)- stejne jako v js
#TODO zvazit presunuti do TOML z JSONu
def get_all_config_items():
conn = db.pool.get_connection()
try:
cursor = conn.cursor()
cursor.execute('SELECT id, item_name, json_data FROM config_table')
config_items = [{"id": row[0], "item_name": row[1], "json_data": row[2]} for row in cursor.fetchall()]
finally:
db.pool.release_connection(conn)
return 0, config_items
# Function to get a config item by ID
def get_config_item_by_id(item_id):
conn = db.pool.get_connection()
try:
cursor = conn.cursor()
cursor.execute('SELECT item_name, json_data FROM config_table WHERE id = ?', (item_id,))
row = cursor.fetchone()
finally:
db.pool.release_connection(conn)
if row is None:
return -2, "not found"
else:
return 0, {"item_name": row[0], "json_data": row[1]}
# Function to get a config item by ID
def get_config_item_by_name(item_name):
#print(item_name)
conn = db.pool.get_connection()
try:
cursor = conn.cursor()
query = f"SELECT item_name, json_data FROM config_table WHERE item_name = '{item_name}'"
#print(query)
cursor.execute(query)
row = cursor.fetchone()
#print(row)
finally:
db.pool.release_connection(conn)
if row is None:
return -2, "not found"
else:
return 0, {"item_name": row[0], "json_data": row[1]}
# Function to create a new config item
def create_config_item(config_item: ConfigItem):
conn = db.pool.get_connection()
try:
try:
cursor = conn.cursor()
cursor.execute('INSERT INTO config_table (item_name, json_data) VALUES (?, ?)', (config_item.item_name, config_item.json_data))
item_id = cursor.lastrowid
conn.commit()
print(item_id)
finally:
db.pool.release_connection(conn)
return 0, {"id": item_id, "item_name":config_item.item_name, "json_data":config_item.json_data}
except Exception as e:
return -2, str(e)
# Function to update a config item by ID
def update_config_item(item_id, config_item: ConfigItem):
conn = db.pool.get_connection()
try:
try:
cursor = conn.cursor()
cursor.execute('UPDATE config_table SET item_name = ?, json_data = ? WHERE id = ?', (config_item.item_name, config_item.json_data, item_id))
conn.commit()
#refresh active item je zatím řešena takto natvrdo při updatu položky "active_profile" a při startu aplikace
if config_item.item_name == "active_profile":
ch.config_handler.activate_profile()
finally:
db.pool.release_connection(conn)
return 0, {"id": item_id, **config_item.dict()}
except Exception as e:
return -2, str(e)
# Function to delete a config item by ID
def delete_config_item(item_id):
conn = db.pool.get_connection()
try:
cursor = conn.cursor()
cursor.execute('DELETE FROM config_table WHERE id = ?', (item_id,))
conn.commit()
finally:
db.pool.release_connection(conn)
return 0, {"id": item_id}
# endregion
#Example of using config directive
# config_directive = "overrides"
# ret, res = get_config_item_by_name(config_directive)
# if ret < 0:
# print(f"CONFIG OVERRIDE {config_directive} Error {res}")
# else:
# config = orjson.loads(res["json_data"])
# print("OVERRIDN CFG:", config)
# for key, value in config.items():
# if hasattr(cfg, key):
# print(f"Overriding {key} with {value}")
# setattr(cfg, key, value)

View File

@ -0,0 +1,463 @@
from typing import Any, List, Tuple
from uuid import UUID, uuid4
from v2realbot.common.model import RunManagerRecord, StrategyInstance, RunDay, StrategyInstance, Runner, RunRequest, RunArchive, RunArchiveView, RunArchiveViewPagination, RunArchiveDetail, RunArchiveChange, Bar, TradeEvent, TestList, Intervals, ConfigItem, InstantIndicator, DataTablesRequest
from v2realbot.utils.utils import validate_and_format_time, AttributeDict, zoneNY, zonePRG, safe_get, dict_replace_value, Store, parse_toml_string, json_serial, is_open_hours, send_to_telegram, concatenate_weekdays, transform_data
from v2realbot.utils.ilog import delete_logs
from v2realbot.common.PrescribedTradeModel import Trade, TradeDirection, TradeStatus, TradeStoplossType
from datetime import datetime
from v2realbot.loader.trade_offline_streamer import Trade_Offline_Streamer
from threading import Thread, current_thread, Event, enumerate
from v2realbot.config import STRATVARS_UNCHANGEABLES, ACCOUNT1_PAPER_API_KEY, ACCOUNT1_PAPER_SECRET_KEY, ACCOUNT1_LIVE_API_KEY, ACCOUNT1_LIVE_SECRET_KEY, DATA_DIR,MEDIA_DIRECTORY, RUNNER_DETAIL_DIRECTORY
import importlib
from alpaca.trading.requests import GetCalendarRequest
from alpaca.trading.client import TradingClient
#from alpaca.trading.models import Calendar
from queue import Queue
from tinydb import TinyDB, Query, where
from tinydb.operations import set
import orjson
import numpy as np
from rich import print
import pandas as pd
from traceback import format_exc
from datetime import timedelta, time
from threading import Lock
import v2realbot.common.db as db
import v2realbot.common.transform as tr
from sqlite3 import OperationalError, Row
import v2realbot.strategyblocks.indicators.custom as ci
from v2realbot.strategyblocks.inits.init_indicators import initialize_dynamic_indicators
from v2realbot.strategyblocks.indicators.indicators_hub import populate_dynamic_indicators
from v2realbot.interfaces.backtest_interface import BacktestInterface
import os
import v2realbot.reporting.metricstoolsimage as mt
import gzip
import os
import msgpack
import v2realbot.controller.services as cs
import v2realbot.scheduler.ap_scheduler as aps
# Functions for your 'run_manager' table
# CREATE TABLE "run_manager" (
# "moddus" TEXT NOT NULL,
# "id" varchar(32),
# "strat_id" varchar(32) NOT NULL,
# "symbol" TEXT,
# "account" TEXT NOT NULL,
# "mode" TEXT NOT NULL,
# "note" TEXT,
# "ilog_save" BOOLEAN,
# "bt_from" TEXT,
# "bt_to" TEXT,
# "weekdays_filter" TEXT,
# "batch_id" TEXT,
# "start_time" TEXT NOT NULL,
# "stop_time" TEXT NOT NULL,
# "status" TEXT NOT NULL,
# "last_processed" TEXT,
# "history" TEXT,
# "valid_from" TEXT,
# "valid_to" TEXT,
# "testlist_id" TEXT,
# "runner_id" varchar2(32),
# PRIMARY KEY("id")
# )
# CREATE INDEX idx_moddus ON run_manager (moddus);
# CREATE INDEX idx_status ON run_manager (status);
# CREATE INDEX idx_status_moddus ON run_manager (status, moddus);
# CREATE INDEX idx_valid_from_to ON run_manager (valid_from, valid_to);
# CREATE INDEX idx_stopped_batch_id ON runner_header (stopped, batch_id);
# CREATE INDEX idx_search_value ON runner_header (strat_id, batch_id);
##weekdays are stored as comma separated values
# Fetching (assume 'weekdays' field is a comma-separated string)
# weekday_str = record['weekdays']
# weekdays = [int(x) for x in weekday_str.split(',')]
# # ... logic to check whether today's weekday is in 'weekdays'
# # Storing
# weekdays = [1, 2, 5] # Example
# weekday_str = ",".join(str(x) for x in weekdays)
# update_data = {'weekdays': weekday_str}
# # ... use in an SQL UPDATE statement
# for row in records:
# row['weekdays_filter'] = [int(x) for x in row['weekdays_filter'].split(',')] if row['weekdays_filter'] else []
#get stratin info return
# strat : StrategyInstance = None
# result, strat = cs.get_stratin("625760ac-6376-47fa-8989-1e6a3f6ab66a")
# if result == 0:
# print(strat)
# else:
# print("Error:", strat)
# Fetch all
#result, records = fetch_all_run_manager_records()
#TODO zvazit rozsireni vystupu o strat_status (running/stopped)
def fetch_all_run_manager_records() -> list[RunManagerRecord]:
conn = db.pool.get_connection()
try:
conn.row_factory = Row
cursor = conn.cursor()
cursor.execute('SELECT * FROM run_manager')
rows = cursor.fetchall()
results = []
#Transform row to object
for row in rows:
#add transformed object into result list
results.append(tr.row_to_runmanager(row))
return 0, results
finally:
conn.row_factory = None
db.pool.release_connection(conn)
# Fetch by strategy_id
# result, record = fetch_run_manager_record_by_id('625760ac-6376-47fa-8989-1e6a3f6ab66a')
def fetch_run_manager_record_by_id(strategy_id) -> RunManagerRecord:
conn = db.pool.get_connection()
try:
conn.row_factory = Row
cursor = conn.cursor()
cursor.execute('SELECT * FROM run_manager WHERE id = ?', (str(strategy_id),))
row = cursor.fetchone()
if row is None:
return -2, "not found"
else:
return 0, tr.row_to_runmanager(row)
except Exception as e:
print("ERROR while fetching all records:", str(e) + format_exc())
return -2, str(e) + format_exc()
finally:
conn.row_factory = None
db.pool.release_connection(conn)
def add_run_manager_record(new_record: RunManagerRecord):
#validation/standardization of time
new_record.start_time = validate_and_format_time(new_record.start_time)
if new_record.start_time is None:
return -2, f"Invalid start_time format {new_record.start_time}"
if new_record.stop_time is not None:
new_record.stop_time = validate_and_format_time(new_record.stop_time)
if new_record.stop_time is None:
return -2, f"Invalid stop_time format {new_record.stop_time}"
if new_record.batch_id is None:
new_record.batch_id = str(uuid4())[:8]
conn = db.pool.get_connection()
try:
strat : StrategyInstance = None
result, strat = cs.get_stratin(id=str(new_record.strat_id))
if result == 0:
new_record.symbol = strat.symbol
else:
return -1, f"Strategy {new_record.strat_id} not found"
cursor = conn.cursor()
# Construct a suitable INSERT query based on your RunManagerRecord fields
insert_query = """
INSERT INTO run_manager (moddus, id, strat_id, symbol,account, mode, note,ilog_save,
bt_from, bt_to, weekdays_filter, batch_id,
start_time, stop_time, status, last_processed,
history, valid_from, valid_to, testlist_id)
VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
"""
values = [
new_record.moddus, str(new_record.id), str(new_record.strat_id), new_record.symbol, new_record.account, new_record.mode, new_record.note,
int(new_record.ilog_save),
new_record.bt_from.isoformat() if new_record.bt_from is not None else None,
new_record.bt_to.isoformat() if new_record.bt_to is not None else None,
",".join(str(x) for x in new_record.weekdays_filter) if new_record.weekdays_filter else None,
new_record.batch_id, new_record.start_time,
new_record.stop_time, new_record.status,
new_record.last_processed.isoformat() if new_record.last_processed is not None else None,
new_record.history,
new_record.valid_from.isoformat() if new_record.valid_from is not None else None,
new_record.valid_to.isoformat() if new_record.valid_to is not None else None,
new_record.testlist_id
]
db.execute_with_retry(cursor, insert_query, values)
conn.commit()
#Add APS scheduler job refresh
res, result = aps.initialize_jobs()
if res < 0:
return -2, f"Error initializing jobs: {res} {result}"
return 0, new_record.id # Assuming success, you might return something more descriptive
except Exception as e:
print("ERROR while adding record:", str(e) + format_exc())
return -2, str(e) + format_exc()
finally:
db.pool.release_connection(conn)
# Update (example)
# update_data = {'last_started': '2024-02-13 10:35:00'}
# result, message = update_run_manager_record('625760ac-6376-47fa-8989-1e6a3f6ab66a', update_data)
def update_run_manager_record(record_id, updated_record: RunManagerRecord):
#validation/standardization of time
updated_record.start_time = validate_and_format_time(updated_record.start_time)
if updated_record.start_time is None:
return -2, f"Invalid start_time format {updated_record.start_time}"
if updated_record.stop_time is not None:
updated_record.stop_time = validate_and_format_time(updated_record.stop_time)
if updated_record.stop_time is None:
return -2, f"Invalid stop_time format {updated_record.stop_time}"
conn = db.pool.get_connection()
try:
cursor = conn.cursor()
#strategy lookup check, if strategy still exists
strat : StrategyInstance = None
result, strat = cs.get_stratin(id=str(updated_record.strat_id))
if result == 0:
updated_record.symbol = strat.symbol
else:
return -1, f"Strategy {updated_record.strat_id} not found"
#remove values with None, so they are not updated
#updated_record_dict = updated_record.dict(exclude_none=True)
# Construct update query and handle weekdays conversion
update_query = 'UPDATE run_manager SET '
update_params = []
for key, value in updated_record.dict().items(): # Iterate over model attributes
if key in ['id', 'strat_running']: # Skip updating the primary key
continue
update_query += f"{key} = ?, "
if key == "ilog_save":
value = int(value)
elif key in ["strat_id", "runner_id"]:
value = str(value) if value else None
elif key == "weekdays_filter":
value = ",".join(str(x) for x in value) if value else None
elif key in ['valid_from', 'valid_to', 'bt_from', 'bt_to', 'last_processed']:
value = value.isoformat() if value else None
update_params.append(value)
# if 'weekdays_filter' in updated_record.dict():
# updated_record.weekdays_filter = ",".join(str(x) for x in updated_record.weekdays_filter)
update_query = update_query[:-2] # Remove trailing comma and space
update_query += ' WHERE id = ?'
update_params.append(str(record_id))
db.execute_with_retry(cursor, update_query, update_params)
#cursor.execute(update_query, update_params)
conn.commit()
#Add APS scheduler job refresh
res, result = aps.initialize_jobs()
if res < 0:
return -2, f"Error initializing jobs: {res} {result}"
except Exception as e:
print("ERROR while updating record:", str(e) + format_exc())
return -2, str(e) + format_exc()
finally:
db.pool.release_connection(conn)
return 0, record_id
# result, message = delete_run_manager_record('625760ac-6376-47fa-8989-1e6a3f6ab66a')
def delete_run_manager_record(record_id):
conn = db.pool.get_connection()
try:
cursor = conn.cursor()
db.execute_with_retry(cursor, 'DELETE FROM run_manager WHERE id = ?', (str(record_id),))
#cursor.execute('DELETE FROM run_manager WHERE id = ?', (str(strategy_id),))
conn.commit()
except Exception as e:
print("ERROR while deleting record:", str(e) + format_exc())
return -2, str(e) + format_exc()
finally:
db.pool.release_connection(conn)
return 0, record_id
def fetch_scheduled_candidates_for_start_and_stop(market_datetime_now, market) -> tuple[int, dict]:
"""
Fetches all active records from the 'run_manager' table where the mode is 'schedule'. It checks if the current
time in the America/New_York timezone is within the operational intervals specified by 'start_time' and 'stop_time'
for each record. This function is designed to correctly handle scenarios where the operational interval crosses
midnight, as well as intervals contained within a single day.
The function localizes 'valid_from', 'valid_to', 'start_time', and 'stop_time' using the 'zoneNY' timezone object
for accurate comparison with the current time.
Parameters:
market_datetime_now (datetime): The current date and time in the America/New_York timezone.
market (str): The market identifier.
Returns:
Tuple[int, dict]: A tuple where the first element is a status code (0 for success, -2 for error), and the
second element is a dictionary. This dictionary has keys 'start' and 'stop', each containing a list of
RunManagerRecord objects meeting the respective criteria. If an error occurs, the second element is a
descriptive error message.
Note:
- This function assumes that the 'zoneNY' pytz timezone object is properly defined and configured to represent
the America/New York timezone.
- It also assumes that the 'run_manager' table exists in the database with the required columns.
- 'start_time' and 'stop_time' are expected to be strings representing times in 24-hour format.
- If 'valid_from', 'valid_to', 'start_time', or 'stop_time' are NULL in the database, they are considered as
having unlimited boundaries.
Pozor: je jeste jeden okrajovy pripad, kdy by to nemuselo zafungovat: kdyby casy byly nastaveny pro
beh strategie pres pulnoc, ale zapla by se pozdeji az po pulnoci
(https://chat.openai.com/c/3c77674a-8a2c-45aa-afbd-ab140f473e07)
"""
conn = db.pool.get_connection()
try:
conn.row_factory = Row
cursor = conn.cursor()
# Get current datetime in America/New York timezone
market_datetime_now_str = market_datetime_now.strftime('%Y-%m-%d %H:%M:%S')
current_time_str = market_datetime_now.strftime('%H:%M')
print("current_market_datetime_str:", market_datetime_now_str)
print("current_time_str:", current_time_str)
# Select also supports scenarios where strategy runs overnight
# SQL query to fetch records with active status and date constraints for both start and stop times
query = """
SELECT *,
CASE
WHEN start_time <= stop_time AND (? >= start_time AND ? < stop_time) OR
start_time > stop_time AND (? >= start_time OR ? < stop_time) THEN 1
ELSE 0
END as is_start_time,
CASE
WHEN start_time <= stop_time AND (? >= stop_time OR ? < start_time) OR
start_time > stop_time AND (? >= stop_time AND ? < start_time) THEN 1
ELSE 0
END as is_stop_time
FROM run_manager
WHERE status = 'active' AND moddus = 'schedule' AND
((valid_from IS NULL OR strftime('%Y-%m-%d %H:%M:%S', valid_from) <= ?) AND
(valid_to IS NULL OR strftime('%Y-%m-%d %H:%M:%S', valid_to) >= ?))
"""
cursor.execute(query, (current_time_str, current_time_str, current_time_str, current_time_str,
current_time_str, current_time_str, current_time_str, current_time_str,
market_datetime_now_str, market_datetime_now_str))
rows = cursor.fetchall()
start_candidates = []
stop_candidates = []
for row in rows:
run_manager_record = tr.row_to_runmanager(row)
if row['is_start_time']:
start_candidates.append(run_manager_record)
if row['is_stop_time']:
stop_candidates.append(run_manager_record)
results = {'start': start_candidates, 'stop': stop_candidates}
return 0, results
except Exception as e:
msg_err = f"ERROR while fetching records for start and stop times with datetime {market_datetime_now_str}: {str(e)} {format_exc()}"
print(msg_err)
return -2, msg_err
finally:
conn.row_factory = None
db.pool.release_connection(conn)
def fetch_startstop_scheduled_candidates(market_datetime_now, time_check, market = "US") -> tuple[int, list[RunManagerRecord]]:
"""
Fetches all active records from the 'run_manager' table where moddus is schedule, the current date and time
in the America/New_York timezone falls between the 'valid_from' and 'valid_to' datetime
fields, and either 'start_time' or 'stop_time' matches the specified condition with the current time.
If 'valid_from', 'valid_to', or the time column ('start_time'/'stop_time') are NULL, they are considered
as having unlimited boundaries.
The function localizes the 'valid_from', 'valid_to', and the time column times using the 'zoneNY'
timezone object for accurate comparison with the current time.
Parameters:
market_datetime_now (datetime): Current datetime in the market timezone.
market (str): The market for which to fetch candidates.
time_check (str): Either 'start' or 'stop', indicating which time condition to check.
Returns:
Tuple[int, list[RunManagerRecord]]: A tuple where the first element is a status code
(0 for success, -2 for error), and the second element is a list of RunManagerRecord
objects meeting the criteria. If an error occurs, the second element is a descriptive
error message.
Note:
This function assumes that the 'zoneNY' pytz timezone object is properly defined and
configured to represent the America/New York timezone. It also assumes that the
'run_manager' table exists in the database with the columns as described in the
provided schema.
"""
if time_check not in ['start', 'stop']:
return -2, "Invalid time_check parameter. Must be 'start' or 'stop'."
conn = db.pool.get_connection()
try:
conn.row_factory = Row
cursor = conn.cursor()
# Get current datetime in America/New York timezone
market_datetime_now_str = market_datetime_now.strftime('%Y-%m-%d %H:%M:%S')
current_time_str = market_datetime_now.strftime('%H:%M')
print("current_market_datetime_str:", market_datetime_now_str)
print("current_time_str:", current_time_str)
# SQL query to fetch records with active status, date constraints, and time condition
time_column = 'start_time' if time_check == 'start' else 'stop_time'
query = f"""
SELECT * FROM run_manager
WHERE status = 'active' AND moddus = 'schedule' AND
((valid_from IS NULL OR strftime('%Y-%m-%d %H:%M:%S', valid_from) <= ?) AND
(valid_to IS NULL OR strftime('%Y-%m-%d %H:%M:%S', valid_to) >= ?)) AND
({time_column} IS NULL OR {time_column} <= ?)
"""
cursor.execute(query, (market_datetime_now_str, market_datetime_now_str, current_time_str))
rows = cursor.fetchall()
results = [tr.row_to_runmanager(row) for row in rows]
return 0, results
except Exception as e:
msg_err = f"ERROR while fetching records based on {time_check} time with datetime {market_datetime_now_str}: {str(e)} {format_exc()}"
print(msg_err)
return -2, msg_err
finally:
conn.row_factory = None
db.pool.release_connection(conn)
if __name__ == "__main__":
res, sada = fetch_startstop_scheduled_candidates(datetime.now().astimezone(zoneNY), "start")
if res == 0:
print(sada)
else:
print("Error:", sada)
# from apscheduler.schedulers.background import BackgroundScheduler
# import time
# def print_hello():
# print("Hello")
# def schedule_job():
# scheduler = BackgroundScheduler()
# scheduler.add_job(print_hello, 'interval', seconds=10)
# scheduler.start()
# schedule_job()

View File

@ -0,0 +1 @@
#PLACEHOLDER TO RUNNER_DETAILS SERVICES - refactored

File diff suppressed because it is too large Load Diff

View File

View File

View File

View File

View File

View File

View File

View File

View File

@ -52,6 +52,16 @@ class Account(str, Enum):
""" """
ACCOUNT1 = "ACCOUNT1" ACCOUNT1 = "ACCOUNT1"
ACCOUNT2 = "ACCOUNT2" ACCOUNT2 = "ACCOUNT2"
class Moddus(str, Enum):
"""
Moddus for RunManager record
schedule - scheduled record
queue - queued record
"""
SCHEDULE = "schedule"
QUEUE = "queue"
class RecordType(str, Enum): class RecordType(str, Enum):
""" """
Represents output of aggregator Represents output of aggregator
@ -60,9 +70,19 @@ class RecordType(str, Enum):
BAR = "bar" BAR = "bar"
CBAR = "cbar" CBAR = "cbar"
CBARVOLUME = "cbarvolume" CBARVOLUME = "cbarvolume"
CBARDOLLAR = "cbardollar"
CBARRENKO = "cbarrenko" CBARRENKO = "cbarrenko"
TRADE = "trade" TRADE = "trade"
class SchedulerStatus(str, Enum):
"""
ACTIVE - active scheduling
SUSPENDED - suspended for scheduling
"""
ACTIVE = "active"
SUSPENDED = "suspended"
class Mode(str, Enum): class Mode(str, Enum):
""" """
LIVE - live on production LIVE - live on production
@ -76,7 +96,6 @@ class Mode(str, Enum):
BT = "backtest" BT = "backtest"
PREP = "prep" PREP = "prep"
class StartBarAlign(str, Enum): class StartBarAlign(str, Enum):
""" """
Represents first bar start time alignement according to timeframe Represents first bar start time alignement according to timeframe

View File

@ -2,9 +2,9 @@ from alpaca.trading.enums import OrderSide, OrderType
from threading import Lock from threading import Lock
from v2realbot.interfaces.general_interface import GeneralInterface from v2realbot.interfaces.general_interface import GeneralInterface
from v2realbot.backtesting.backtester import Backtester from v2realbot.backtesting.backtester import Backtester
from v2realbot.config import BT_DELAYS, COUNT_API_REQUESTS
from datetime import datetime from datetime import datetime
from v2realbot.utils.utils import zoneNY from v2realbot.utils.utils import zoneNY
import v2realbot.utils.config_handler as cfh
"""" """"
backtester methods can be called backtester methods can be called
@ -19,7 +19,7 @@ class BacktestInterface(GeneralInterface):
def __init__(self, symbol, bt: Backtester) -> None: def __init__(self, symbol, bt: Backtester) -> None:
self.symbol = symbol self.symbol = symbol
self.bt = bt self.bt = bt
self.count_api_requests = COUNT_API_REQUESTS self.count_api_requests = cfh.config_handler.get_val('COUNT_API_REQUESTS')
self.mincnt = list([dict(minute=0,count=0)]) self.mincnt = list([dict(minute=0,count=0)])
#TODO time v API nejspis muzeme dat pryc a BT bude si to brat primo ze self.time (nezapomenout na + BT_DELAYS) #TODO time v API nejspis muzeme dat pryc a BT bude si to brat primo ze self.time (nezapomenout na + BT_DELAYS)
# self.time = self.bt.time # self.time = self.bt.time
@ -43,33 +43,33 @@ class BacktestInterface(GeneralInterface):
def buy(self, size = 1, repeat: bool = False): def buy(self, size = 1, repeat: bool = False):
self.count() self.count()
#add REST API latency #add REST API latency
return self.bt.submit_order(time=self.bt.time + BT_DELAYS.strat_to_sub,symbol=self.symbol,side=OrderSide.BUY,size=size,order_type = OrderType.MARKET) return self.bt.submit_order(time=self.bt.time + cfh.config_handler.get_val('BT_DELAYS','strat_to_sub'),symbol=self.symbol,side=OrderSide.BUY,size=size,order_type = OrderType.MARKET)
"""buy limit""" """buy limit"""
def buy_l(self, price: float, size: int = 1, repeat: bool = False, force: int = 0): def buy_l(self, price: float, size: int = 1, repeat: bool = False, force: int = 0):
self.count() self.count()
return self.bt.submit_order(time=self.bt.time + BT_DELAYS.strat_to_sub,symbol=self.symbol,side=OrderSide.BUY,size=size,price=price,order_type = OrderType.LIMIT) return self.bt.submit_order(time=self.bt.time + cfh.config_handler.get_val('BT_DELAYS','strat_to_sub'),symbol=self.symbol,side=OrderSide.BUY,size=size,price=price,order_type = OrderType.LIMIT)
"""sell market""" """sell market"""
def sell(self, size = 1, repeat: bool = False): def sell(self, size = 1, repeat: bool = False):
self.count() self.count()
return self.bt.submit_order(time=self.bt.time + BT_DELAYS.strat_to_sub,symbol=self.symbol,side=OrderSide.SELL,size=size,order_type = OrderType.MARKET) return self.bt.submit_order(time=self.bt.time + cfh.config_handler.get_val('BT_DELAYS','strat_to_sub'),symbol=self.symbol,side=OrderSide.SELL,size=size,order_type = OrderType.MARKET)
"""sell limit""" """sell limit"""
async def sell_l(self, price: float, size = 1, repeat: bool = False): async def sell_l(self, price: float, size = 1, repeat: bool = False):
self.count() self.count()
return self.bt.submit_order(time=self.bt.time + BT_DELAYS.strat_to_sub,symbol=self.symbol,side=OrderSide.SELL,size=size,price=price,order_type = OrderType.LIMIT) return self.bt.submit_order(time=self.bt.time + cfh.config_handler.get_val('BT_DELAYS','strat_to_sub'),symbol=self.symbol,side=OrderSide.SELL,size=size,price=price,order_type = OrderType.LIMIT)
"""replace order""" """replace order"""
async def repl(self, orderid: str, price: float = None, size: int = None, repeat: bool = False): async def repl(self, orderid: str, price: float = None, size: int = None, repeat: bool = False):
self.count() self.count()
return self.bt.replace_order(time=self.bt.time + BT_DELAYS.strat_to_sub,id=orderid,size=size,price=price) return self.bt.replace_order(time=self.bt.time + cfh.config_handler.get_val('BT_DELAYS','strat_to_sub'),id=orderid,size=size,price=price)
"""cancel order""" """cancel order"""
#TBD exec predtim? #TBD exec predtim?
def cancel(self, orderid: str): def cancel(self, orderid: str):
self.count() self.count()
return self.bt.cancel_order(time=self.bt.time + BT_DELAYS.strat_to_sub, id=orderid) return self.bt.cancel_order(time=self.bt.time + cfh.config_handler.get_val('BT_DELAYS','strat_to_sub'), id=orderid)
"""get positions ->(size,avgp)""" """get positions ->(size,avgp)"""
#TBD exec predtim? #TBD exec predtim?

View File

@ -40,7 +40,9 @@ class LiveInterface(GeneralInterface):
return market_order.id return market_order.id
except Exception as e: except Exception as e:
print("Nepodarilo se odeslat buy", str(e)) reason = "Nepodarilo se market buy:" + str(e) + format_exc()
print(reason)
send_to_telegram(reason)
return -1 return -1
"""buy limit""" """buy limit"""
@ -65,7 +67,9 @@ class LiveInterface(GeneralInterface):
return limit_order.id return limit_order.id
except Exception as e: except Exception as e:
print("Nepodarilo se odeslat limitku", str(e)) reason = "Nepodarilo se odeslat buy limitku:" + str(e) + format_exc()
print(reason)
send_to_telegram(reason)
return -1 return -1
"""sell market""" """sell market"""
@ -87,7 +91,9 @@ class LiveInterface(GeneralInterface):
return market_order.id return market_order.id
except Exception as e: except Exception as e:
print("Nepodarilo se odeslat sell", str(e)) reason = "Nepodarilo se odeslat sell:" + str(e) + format_exc()
print(reason)
send_to_telegram(reason)
return -1 return -1
"""sell limit""" """sell limit"""
@ -112,8 +118,9 @@ class LiveInterface(GeneralInterface):
return limit_order.id return limit_order.id
except Exception as e: except Exception as e:
print("Nepodarilo se odeslat sell_l", str(e)) reason = "Nepodarilo se odeslat sell limitku:" + str(e) + format_exc()
#raise Exception(e) print(reason)
send_to_telegram(reason)
return -1 return -1
"""order replace""" """order replace"""
@ -136,7 +143,9 @@ class LiveInterface(GeneralInterface):
if e.code == 42210000: return orderid if e.code == 42210000: return orderid
else: else:
##mozna tady proste vracet vzdy ok ##mozna tady proste vracet vzdy ok
print("Neslo nahradit profitku. Problem",str(e)) reason = "Neslo nahradit profitku. Problem:" + str(e) + format_exc()
print(reason)
send_to_telegram(reason)
return -1 return -1
#raise Exception(e) #raise Exception(e)
@ -150,7 +159,9 @@ class LiveInterface(GeneralInterface):
#order doesnt exist #order doesnt exist
if e.code == 40410000: return 0 if e.code == 40410000: return 0
else: else:
print("nepovedlo se zrusit objednavku", str(e)) reason = "Nepovedlo se zrusit objednavku:" + str(e) + format_exc()
print(reason)
send_to_telegram(reason)
#raise Exception(e) #raise Exception(e)
return -1 return -1
@ -162,7 +173,7 @@ class LiveInterface(GeneralInterface):
return a.avg_entry_price, a.qty return a.avg_entry_price, a.qty
except (APIError, Exception) as e: except (APIError, Exception) as e:
#no position #no position
if e.code == 40410000: return 0,0 if hasattr(e, 'code') and e.code == 40410000: return 0,0
else: else:
reason = "Exception when calling LIVE interface pos, REPEATING:" + str(e) + format_exc() reason = "Exception when calling LIVE interface pos, REPEATING:" + str(e) + format_exc()
print("API ERROR: Nepodarilo se ziskat pozici.", reason) print("API ERROR: Nepodarilo se ziskat pozici.", reason)
@ -178,7 +189,9 @@ class LiveInterface(GeneralInterface):
#list of Orders (orderlist[0].id) #list of Orders (orderlist[0].id)
return orderlist return orderlist
except Exception as e: except Exception as e:
print("Chyba pri dotazeni objednávek.", str(e)) reason = "Chyba pri dotazeni objednávek:" + str(e) + format_exc()
print(reason)
send_to_telegram(reason)
#raise Exception (e) #raise Exception (e)
return -1 return -1

View File

@ -3,7 +3,7 @@
""" """
from v2realbot.enums.enums import RecordType, StartBarAlign from v2realbot.enums.enums import RecordType, StartBarAlign
from datetime import datetime, timedelta from datetime import datetime, timedelta
from v2realbot.utils.utils import parse_alpaca_timestamp, ltp, Queue,is_open_hours,zoneNY from v2realbot.utils.utils import parse_alpaca_timestamp, ltp, Queue,is_open_hours,zoneNY, zoneUTC
from queue import Queue from queue import Queue
from rich import print from rich import print
from v2realbot.enums.enums import Mode from v2realbot.enums.enums import Mode
@ -11,9 +11,10 @@ import threading
from copy import deepcopy from copy import deepcopy
from msgpack import unpackb from msgpack import unpackb
import os import os
from v2realbot.config import DATA_DIR, GROUP_TRADES_WITH_TIMESTAMP_LESS_THAN, AGG_EXCLUDED_TRADES from v2realbot.config import DATA_DIR
import pickle
import dill import dill
import gzip
import v2realbot.utils.config_handler as cfh
class TradeAggregator: class TradeAggregator:
def __init__(self, def __init__(self,
@ -24,7 +25,7 @@ class TradeAggregator:
align: StartBarAlign = StartBarAlign.ROUND, align: StartBarAlign = StartBarAlign.ROUND,
mintick: int = 0, mintick: int = 0,
exthours: bool = False, exthours: bool = False,
excludes: list = AGG_EXCLUDED_TRADES, excludes: list = cfh.config_handler.get_val('AGG_EXCLUDED_TRADES'),
skip_cache: bool = False): skip_cache: bool = False):
""" """
UPDATED VERSION - vrací více záznamů UPDATED VERSION - vrací více záznamů
@ -47,7 +48,7 @@ class TradeAggregator:
self.excludes = excludes self.excludes = excludes
self.skip_cache = skip_cache self.skip_cache = skip_cache
if mintick >= resolution: if resolution > 0 and mintick >= resolution:
print("Mintick musi byt mensi nez resolution") print("Mintick musi byt mensi nez resolution")
raise Exception raise Exception
@ -149,7 +150,7 @@ class TradeAggregator:
# else: # else:
data['t'] = parse_alpaca_timestamp(data['t']) data['t'] = parse_alpaca_timestamp(data['t'])
if not is_open_hours(datetime.fromtimestamp(data['t'])) and self.exthours is False: if not is_open_hours(datetime.fromtimestamp(data['t'], tz=zoneUTC)) and self.exthours is False:
#print("AGG: trade not in open hours skipping", datetime.fromtimestamp(data['t']).astimezone(zoneNY)) #print("AGG: trade not in open hours skipping", datetime.fromtimestamp(data['t']).astimezone(zoneNY))
return [] return []
@ -178,14 +179,30 @@ class TradeAggregator:
# return # return
# else: pass # else: pass
if self.rectype in (RecordType.BAR, RecordType.CBAR): # if self.rectype in (RecordType.BAR, RecordType.CBAR):
return await self.calculate_time_bar(data, symbol) # return await self.calculate_time_bar(data, symbol)
if self.rectype == RecordType.CBARVOLUME: # if self.rectype == RecordType.CBARVOLUME:
return await self.calculate_volume_bar(data, symbol) # return await self.calculate_volume_bar(data, symbol)
if self.rectype == RecordType.CBARRENKO: # if self.rectype == RecordType.CBARVOLUME:
return await self.calculate_renko_bar(data, symbol) # return await self.calculate_volume_bar(data, symbol)
# if self.rectype == RecordType.CBARRENKO:
# return await self.calculate_renko_bar(data, symbol)
match self.rectype:
case RecordType.BAR | RecordType.CBAR:
return await self.calculate_time_bar(data, symbol)
case RecordType.CBARVOLUME:
return await self.calculate_volume_bar(data, symbol)
case RecordType.CBARDOLLAR:
return await self.calculate_dollar_bar(data, symbol)
case RecordType.CBARRENKO:
return await self.calculate_renko_bar(data, symbol)
async def calculate_time_bar(self, data, symbol): async def calculate_time_bar(self, data, symbol):
#print("barstart",datetime.fromtimestamp(self.bar_start)) #print("barstart",datetime.fromtimestamp(self.bar_start))
@ -276,7 +293,7 @@ class TradeAggregator:
self.diff_price = True self.diff_price = True
self.last_price = data['p'] self.last_price = data['p']
if float(data['t']) - float(self.lasttimestamp) < GROUP_TRADES_WITH_TIMESTAMP_LESS_THAN: if float(data['t']) - float(self.lasttimestamp) < cfh.config_handler.get_val('GROUP_TRADES_WITH_TIMESTAMP_LESS_THAN'):
self.trades_too_close = True self.trades_too_close = True
else: else:
self.trades_too_close = False self.trades_too_close = False
@ -303,13 +320,13 @@ class TradeAggregator:
#TODO: do budoucna vymyslet, kdyz bude mene tradu, tak to radit vzdy do spravneho intervalu #TODO: do budoucna vymyslet, kdyz bude mene tradu, tak to radit vzdy do spravneho intervalu
#zarovname time prvniho baru podle timeframu kam patří (např. 5, 10, 15 ...) (ROUND) #zarovname time prvniho baru podle timeframu kam patří (např. 5, 10, 15 ...) (ROUND)
if self.align == StartBarAlign.ROUND and self.bar_start == 0: if self.align == StartBarAlign.ROUND and self.bar_start == 0:
t = datetime.fromtimestamp(data['t']) t = datetime.fromtimestamp(data['t'], tz=zoneUTC)
t = t - timedelta(seconds=t.second % self.resolution,microseconds=t.microsecond) t = t - timedelta(seconds=t.second % self.resolution,microseconds=t.microsecond)
self.bar_start = datetime.timestamp(t) self.bar_start = datetime.timestamp(t)
#nebo pouzijeme datum tradu zaokrouhlene na vteriny (RANDOM) #nebo pouzijeme datum tradu zaokrouhlene na vteriny (RANDOM)
else: else:
#ulozime si jeho timestamp (odtum pocitame resolution) #ulozime si jeho timestamp (odtum pocitame resolution)
t = datetime.fromtimestamp(int(data['t'])) t = datetime.fromtimestamp(int(data['t']), tz=zoneUTC)
#timestamp #timestamp
self.bar_start = int(data['t']) self.bar_start = int(data['t'])
@ -359,7 +376,7 @@ class TradeAggregator:
if self.mintick != 0 and self.lastBarConfirmed: if self.mintick != 0 and self.lastBarConfirmed:
#d zacatku noveho baru musi ubehnout x sekund nez posilame updazte #d zacatku noveho baru musi ubehnout x sekund nez posilame updazte
#pocatek noveho baru + Xs musi byt vetsi nez aktualni trade #pocatek noveho baru + Xs musi byt vetsi nez aktualni trade
if (self.newBar['time'] + timedelta(seconds=self.mintick)) > datetime.fromtimestamp(data['t']): if (self.newBar['time'] + timedelta(seconds=self.mintick)) > datetime.fromtimestamp(data['t'], tz=zoneUTC):
#print("waiting for mintick") #print("waiting for mintick")
return [] return []
else: else:
@ -426,7 +443,7 @@ class TradeAggregator:
"trades": 1, "trades": 1,
"hlcc4": data['p'], "hlcc4": data['p'],
"confirmed": 0, "confirmed": 0,
"time": datetime.fromtimestamp(data['t']), "time": datetime.fromtimestamp(data['t'], tz=zoneUTC),
"updated": data['t'], "updated": data['t'],
"vwap": data['p'], "vwap": data['p'],
"index": self.barindex, "index": self.barindex,
@ -460,7 +477,7 @@ class TradeAggregator:
"trades": 1, "trades": 1,
"hlcc4":data['p'], "hlcc4":data['p'],
"confirmed": 1, "confirmed": 1,
"time": datetime.fromtimestamp(data['t']), "time": datetime.fromtimestamp(data['t'], tz=zoneUTC),
"updated": data['t'], "updated": data['t'],
"vwap": data['p'], "vwap": data['p'],
"index": self.barindex, "index": self.barindex,
@ -523,7 +540,7 @@ class TradeAggregator:
self.diff_price = True self.diff_price = True
self.last_price = data['p'] self.last_price = data['p']
if float(data['t']) - float(self.lasttimestamp) < GROUP_TRADES_WITH_TIMESTAMP_LESS_THAN: if float(data['t']) - float(self.lasttimestamp) < cfh.config_handler.get_val('GROUP_TRADES_WITH_TIMESTAMP_LESS_THAN'):
self.trades_too_close = True self.trades_too_close = True
else: else:
self.trades_too_close = False self.trades_too_close = False
@ -551,6 +568,179 @@ class TradeAggregator:
else: else:
return [] return []
#WIP - revidovant kod a otestovat
async def calculate_dollar_bar(self, data, symbol):
""""
Agreguje DOLLAR BARS -
hlavni promenne
- self.openedBar (dict) = stavová obsahují aktivní nepotvrzený bar
- confirmedBars (list) = nestavová obsahuje confirmnute bary, které budou na konci funkceflushnuty
"""""
#volume_bucket = 10000 #daily MA volume z emackova na 30 deleno 50ti - dat do configu
dollar_bucket = self.resolution
#potvrzene pripravene k vraceni
confirmedBars = []
#potvrdi existujici a nastavi k vraceni
def confirm_existing():
self.openedBar['confirmed'] = 1
self.openedBar['vwap'] = self.vwaphelper / self.openedBar['volume']
self.vwaphelper = 0
#ulozime zacatek potvrzeneho baru
#self.lastBarConfirmed = self.openedBar['time']
self.openedBar['updated'] = data['t']
confirmedBars.append(deepcopy(self.openedBar))
self.openedBar = None
#TBD po každém potvrzení zvýšíme čas o nanosekundu (pro zobrazení v gui)
#data['t'] = data['t'] + 0.000001
#init unconfirmed - velikost bucketu kontrolovana predtim
def initialize_unconfirmed(size):
#inicializuji pro nový bar
self.vwaphelper += (data['p'] * size)
self.barindex +=1
self.openedBar = {
"close": data['p'],
"high": data['p'],
"low": data['p'],
"open": data['p'],
"volume": size,
"trades": 1,
"hlcc4": data['p'],
"confirmed": 0,
"time": datetime.fromtimestamp(data['t'], tz=zoneUTC),
"updated": data['t'],
"vwap": data['p'],
"index": self.barindex,
"resolution":dollar_bucket
}
def update_unconfirmed(size):
#spočteme vwap - potřebujeme předchozí hodnoty
self.vwaphelper += (data['p'] * size)
self.openedBar['updated'] = data['t']
self.openedBar['close'] = data['p']
self.openedBar['high'] = max(self.openedBar['high'],data['p'])
self.openedBar['low'] = min(self.openedBar['low'],data['p'])
self.openedBar['volume'] = self.openedBar['volume'] + size
self.openedBar['trades'] = self.openedBar['trades'] + 1
self.openedBar['vwap'] = self.vwaphelper / self.openedBar['volume']
#pohrat si s timto round
self.openedBar['hlcc4'] = round((self.openedBar['high']+self.openedBar['low']+self.openedBar['close']+self.openedBar['close'])/4,3)
#init new - confirmed
def initialize_confirmed(size):
#ulozime zacatek potvrzeneho baru
#self.lastBarConfirmed = datetime.fromtimestamp(data['t'])
self.barindex +=1
confirmedBars.append({
"close": data['p'],
"high": data['p'],
"low": data['p'],
"open": data['p'],
"volume": size,
"trades": 1,
"hlcc4":data['p'],
"confirmed": 1,
"time": datetime.fromtimestamp(data['t'], tz=zoneUTC),
"updated": data['t'],
"vwap": data['p'],
"index": self.barindex,
"resolution": dollar_bucket
})
#current trade dollar value
trade_dollar_val = int(data['s'])*float(data['p'])
#existuje stávající bar a vejdeme se do nej
if self.openedBar is not None and trade_dollar_val + self.openedBar['volume']*self.openedBar['close'] < dollar_bucket:
#vejdeme se do stávajícího baru (tzn. neprekracujeme bucket)
update_unconfirmed(int(data['s']))
#updatujeme stávající nepotvrzeny bar
#nevejdem se do nej nebo neexistuje predchozi bar
else:
#1)existuje predchozi bar - doplnime zbytkem do valikosti bucketu a nastavime confirmed
if self.openedBar is not None:
#doplnime je zbytkem (v bucket left-je zbyvajici volume)
opened_bar_dollar_val = self.openedBar['volume']*self.openedBar['close']
bucket_left = int((dollar_bucket - opened_bar_dollar_val)/float(data['p']))
# - update and confirm bar
update_unconfirmed(bucket_left)
confirm_existing()
#zbytek mnozství jde do dalsiho zpracovani
data['s'] = int(data['s']) - bucket_left
#nastavime cas o nanosekundu vyssi
data['t'] = round((data['t']) + 0.000001,6)
#2 vytvarime novy bar (bary) a vejdeme se do nej
if int(data['s'])*float(data['p']) < dollar_bucket:
#vytvarime novy nepotvrzeny bar
initialize_unconfirmed(int(data['s']))
#nevejdeme se do nej - pak vytvarime 1 až N dalsich baru (posledni nepotvrzený)
else:
# >>> for i in range(0, 550, 500):
# ... print(i)
# ...
# 0
# 500
#vytvarime plne potvrzene buckety (kolik se jich plne vejde)
for size in range(int(dollar_bucket/float(data['p'])), int(data['s']), int(dollar_bucket/float(data['p']))):
initialize_confirmed(dollar_bucket/float(data['p']))
#nastavime cas o nanosekundu vyssi
data['t'] = round((data['t']) + 0.000001,6)
#create complete full bucket with same prices and size
#naplnit do return pole
#pokud je zbytek vytvorime z nej nepotvrzeny bar
zbytek = int(data['s'])*float(data['p']) % dollar_bucket
#ze zbytku vytvorime nepotvrzeny bar
if zbytek > 0:
#prevedeme zpatky na volume
zbytek = int(zbytek/float(data['p']))
initialize_unconfirmed(zbytek)
#create new open bar with size zbytek s otevrenym
#je cena stejna od predchoziho tradu? pro nepotvrzeny cbar vracime jen pri zmene ceny
if self.last_price == data['p']:
self.diff_price = False
else:
self.diff_price = True
self.last_price = data['p']
if float(data['t']) - float(self.lasttimestamp) < cfh.config_handler.get_val('GROUP_TRADES_WITH_TIMESTAMP_LESS_THAN'):
self.trades_too_close = True
else:
self.trades_too_close = False
#uložíme do předchozí hodnoty (poznáme tak open a close)
self.lasttimestamp = data['t']
self.iterace += 1
# print(self.iterace, data)
#pokud mame confirm bary, tak FLUSHNEME confirm a i případný open (zrejme se pak nejaky vytvoril)
if len(confirmedBars) > 0:
return_set = confirmedBars + ([self.openedBar] if self.openedBar is not None else [])
confirmedBars = []
return return_set
#nemame confirm, FLUSHUJEME CBARVOLUME open - neresime zmenu ceny, ale neposilame kulomet (pokud nam nevytvari conf. bar)
if self.openedBar is not None and self.rectype == RecordType.CBARDOLLAR:
#zkousime pustit i stejnou cenu(potrebujeme kvuli MYSELLU), ale blokoval kulomet,tzn. trady mensi nez GROUP_TRADES_WITH_TIMESTAMP_LESS_THAN (1ms)
#if self.diff_price is True:
if self.trades_too_close is False:
return [self.openedBar]
else:
return []
else:
return []
async def calculate_renko_bar(self, data, symbol): async def calculate_renko_bar(self, data, symbol):
"""" """"
Agreguje RENKO BARS - dle brick size Agreguje RENKO BARS - dle brick size
@ -566,8 +756,14 @@ class TradeAggregator:
Ve strategii je třeba počítat s tím, že open v nepotvrzeném baru není finální. Ve strategii je třeba počítat s tím, že open v nepotvrzeném baru není finální.
""""" """""
if self.resolution < 0: # Treat as percentage
reference_price = self.lastConfirmedBar['close'] if self.lastConfirmedBar is not None else float(data['p'])
brick_size = abs(self.resolution) * reference_price / 100.0
else: # Treat as absolute value pocet ticku
brick_size = self.resolution
#pocet ticku např. 10ticků, případně pak na procenta #pocet ticku např. 10ticků, případně pak na procenta
brick_size = self.resolution #brick_size = self.resolution
#potvrzene pripravene k vraceni #potvrzene pripravene k vraceni
confirmedBars = [] confirmedBars = []
#potvrdi existujici a nastavi k vraceni #potvrdi existujici a nastavi k vraceni
@ -598,7 +794,7 @@ class TradeAggregator:
"trades": 1, "trades": 1,
"hlcc4": data['p'], "hlcc4": data['p'],
"confirmed": 0, "confirmed": 0,
"time": datetime.fromtimestamp(data['t']), "time": datetime.fromtimestamp(data['t'], tz=zoneUTC),
"updated": data['t'], "updated": data['t'],
"vwap": data['p'], "vwap": data['p'],
"index": self.barindex, "index": self.barindex,
@ -633,7 +829,7 @@ class TradeAggregator:
"trades": 1, "trades": 1,
"hlcc4":data['p'], "hlcc4":data['p'],
"confirmed": 1, "confirmed": 1,
"time": datetime.fromtimestamp(data['t']), "time": datetime.fromtimestamp(data['t'], tz=zoneUTC),
"updated": data['t'], "updated": data['t'],
"vwap": data['p'], "vwap": data['p'],
"index": self.barindex, "index": self.barindex,
@ -676,7 +872,7 @@ class TradeAggregator:
self.diff_price = True self.diff_price = True
self.last_price = data['p'] self.last_price = data['p']
if float(data['t']) - float(self.lasttimestamp) < GROUP_TRADES_WITH_TIMESTAMP_LESS_THAN: if float(data['t']) - float(self.lasttimestamp) < cfh.config_handler.get_val('GROUP_TRADES_WITH_TIMESTAMP_LESS_THAN'):
self.trades_too_close = True self.trades_too_close = True
else: else:
self.trades_too_close = False self.trades_too_close = False
@ -709,7 +905,7 @@ class TradeAggregator:
#a take excludes result = ''.join(self.excludes.sort()) #a take excludes result = ''.join(self.excludes.sort())
self.excludes.sort() # Sorts the list in place self.excludes.sort() # Sorts the list in place
excludes_str = ''.join(map(str, self.excludes)) # Joins the sorted elements after converting them to strings excludes_str = ''.join(map(str, self.excludes)) # Joins the sorted elements after converting them to strings
cache_file = self.__class__.__name__ + '-' + self.symbol + '-' + str(int(date_from.timestamp())) + '-' + str(int(date_to.timestamp())) + '-' + str(self.rectype) + "-" + str(self.resolution) + "-" + str(self.minsize) + "-" + str(self.align) + '-' + str(self.mintick) + str(self.exthours) + excludes_str + '.cache' cache_file = self.__class__.__name__ + '-' + self.symbol + '-' + str(int(date_from.timestamp())) + '-' + str(int(date_to.timestamp())) + '-' + str(self.rectype) + "-" + str(self.resolution) + "-" + str(self.minsize) + "-" + str(self.align) + '-' + str(self.mintick) + str(self.exthours) + excludes_str + '.cache.gz'
file_path = DATA_DIR + "/aggcache/" + cache_file file_path = DATA_DIR + "/aggcache/" + cache_file
#print(file_path) #print(file_path)
return file_path return file_path
@ -719,7 +915,7 @@ class TradeAggregator:
file_path = self.populate_file_name(date_from, date_to) file_path = self.populate_file_name(date_from, date_to)
if self.skip_cache is False and os.path.exists(file_path): if self.skip_cache is False and os.path.exists(file_path):
##daily aggregated file exists ##daily aggregated file exists
with open (file_path, 'rb') as fp: with gzip.open (file_path, 'rb') as fp:
cachedobject = dill.load(fp) cachedobject = dill.load(fp)
print("AGG CACHE loaded ", file_path) print("AGG CACHE loaded ", file_path)
@ -752,7 +948,7 @@ class TradeAggregator:
file_path = self.populate_file_name(self.cache_from, self.cache_to) file_path = self.populate_file_name(self.cache_from, self.cache_to)
with open(file_path, 'wb') as fp: with gzip.open(file_path, 'wb') as fp:
dill.dump(self.cached_object, fp) dill.dump(self.cached_object, fp)
print(f"AGG CACHE stored ({num}) :{file_path}") print(f"AGG CACHE stored ({num}) :{file_path}")
print(f"DATES from:{self.cache_from.strftime('%d.%m.%Y %H:%M')} to:{self.cache_to.strftime('%d.%m.%Y %H:%M')}") print(f"DATES from:{self.cache_from.strftime('%d.%m.%Y %H:%M')} to:{self.cache_to.strftime('%d.%m.%Y %H:%M')}")
@ -772,7 +968,7 @@ class TradeAggregator2Queue(TradeAggregator):
Child of TradeAggregator - sends items to given queue Child of TradeAggregator - sends items to given queue
In the future others will be added - TradeAggToTxT etc. In the future others will be added - TradeAggToTxT etc.
""" """
def __init__(self, symbol: str, queue: Queue, rectype: RecordType = RecordType.BAR, resolution: int = 5, minsize: int = 100, update_ltp: bool = False, align: StartBarAlign = StartBarAlign.ROUND, mintick: int = 0, exthours: bool = False, excludes: list = AGG_EXCLUDED_TRADES, skip_cache: bool = False): def __init__(self, symbol: str, queue: Queue, rectype: RecordType = RecordType.BAR, resolution: int = 5, minsize: int = 100, update_ltp: bool = False, align: StartBarAlign = StartBarAlign.ROUND, mintick: int = 0, exthours: bool = False, excludes: list = cfh.config_handler.get_val('AGG_EXCLUDED_TRADES'), skip_cache: bool = False):
super().__init__(rectype=rectype, resolution=resolution, minsize=minsize, update_ltp=update_ltp, align=align, mintick=mintick, exthours=exthours, excludes=excludes, skip_cache=skip_cache) super().__init__(rectype=rectype, resolution=resolution, minsize=minsize, update_ltp=update_ltp, align=align, mintick=mintick, exthours=exthours, excludes=excludes, skip_cache=skip_cache)
self.queue = queue self.queue = queue
self.symbol = symbol self.symbol = symbol
@ -817,7 +1013,7 @@ class TradeAggregator2List(TradeAggregator):
"""" """"
stores records to the list stores records to the list
""" """
def __init__(self, symbol: str, btdata: list, rectype: RecordType = RecordType.BAR, resolution: int = 5, minsize: int = 100, update_ltp: bool = False, align: StartBarAlign = StartBarAlign.ROUND, mintick: int = 0, exthours: bool = False, excludes: list = AGG_EXCLUDED_TRADES, skip_cache: bool = False): def __init__(self, symbol: str, btdata: list, rectype: RecordType = RecordType.BAR, resolution: int = 5, minsize: int = 100, update_ltp: bool = False, align: StartBarAlign = StartBarAlign.ROUND, mintick: int = 0, exthours: bool = False, excludes: list = cfh.config_handler.get_val('AGG_EXCLUDED_TRADES'), skip_cache: bool = False):
super().__init__(rectype=rectype, resolution=resolution, minsize=minsize, update_ltp=update_ltp, align=align, mintick=mintick, exthours=exthours, excludes=excludes, skip_cache=skip_cache) super().__init__(rectype=rectype, resolution=resolution, minsize=minsize, update_ltp=update_ltp, align=align, mintick=mintick, exthours=exthours, excludes=excludes, skip_cache=skip_cache)
self.btdata = btdata self.btdata = btdata
self.symbol = symbol self.symbol = symbol

View File

@ -1,14 +1,13 @@
from v2realbot.loader.aggregator import TradeAggregator, TradeAggregator2List, TradeAggregator2Queue from v2realbot.loader.aggregator import TradeAggregator, TradeAggregator2List, TradeAggregator2Queue
#from v2realbot.loader.cacher import get_cached_agg_data #from v2realbot.loader.cacher import get_cached_agg_data
from alpaca.trading.requests import GetCalendarRequest from alpaca.trading.requests import GetCalendarRequest
from alpaca.trading.client import TradingClient
from alpaca.data.live import StockDataStream from alpaca.data.live import StockDataStream
from v2realbot.config import ACCOUNT1_PAPER_API_KEY, ACCOUNT1_PAPER_SECRET_KEY, DATA_DIR, OFFLINE_MODE from v2realbot.config import ACCOUNT1_PAPER_API_KEY, ACCOUNT1_PAPER_SECRET_KEY, DATA_DIR
from alpaca.data.enums import DataFeed from alpaca.data.enums import DataFeed
from alpaca.data.historical import StockHistoricalDataClient from alpaca.data.historical import StockHistoricalDataClient
from alpaca.data.requests import StockLatestQuoteRequest, StockBarsRequest, StockTradesRequest from alpaca.data.requests import StockLatestQuoteRequest, StockBarsRequest, StockTradesRequest
from threading import Thread, current_thread from threading import Thread, current_thread
from v2realbot.utils.utils import parse_alpaca_timestamp, ltp, zoneNY from v2realbot.utils.utils import parse_alpaca_timestamp, ltp, zoneNY, send_to_telegram, fetch_calendar_data
from v2realbot.utils.tlog import tlog from v2realbot.utils.tlog import tlog
from datetime import datetime, timedelta, date from datetime import datetime, timedelta, date
from threading import Thread from threading import Thread
@ -16,6 +15,7 @@ import asyncio
from msgpack.ext import Timestamp from msgpack.ext import Timestamp
from msgpack import packb from msgpack import packb
from pandas import to_datetime from pandas import to_datetime
import gzip
import pickle import pickle
import os import os
from rich import print from rich import print
@ -25,13 +25,15 @@ from tqdm import tqdm
import time import time
from traceback import format_exc from traceback import format_exc
from collections import defaultdict from collections import defaultdict
import requests
import v2realbot.utils.config_handler as cfh
""" """
Trade offline data streamer, based on Alpaca historical data. Trade offline data streamer, based on Alpaca historical data.
""" """
class Trade_Offline_Streamer(Thread): class Trade_Offline_Streamer(Thread):
#pro BT se pripojujeme vzdy k primarnimu uctu - pouze tahame historicka data + calendar #pro BT se pripojujeme vzdy k primarnimu uctu - pouze tahame historicka data + calendar
client = StockHistoricalDataClient(ACCOUNT1_PAPER_API_KEY, ACCOUNT1_PAPER_SECRET_KEY, raw_data=True) client = StockHistoricalDataClient(ACCOUNT1_PAPER_API_KEY, ACCOUNT1_PAPER_SECRET_KEY, raw_data=True)
clientTrading = TradingClient(ACCOUNT1_PAPER_API_KEY, ACCOUNT1_PAPER_SECRET_KEY, raw_data=False) #clientTrading = TradingClient(ACCOUNT1_PAPER_API_KEY, ACCOUNT1_PAPER_SECRET_KEY, raw_data=False)
def __init__(self, time_from: datetime, time_to: datetime, btdata) -> None: def __init__(self, time_from: datetime, time_to: datetime, btdata) -> None:
# Call the Thread class's init function # Call the Thread class's init function
Thread.__init__(self) Thread.__init__(self)
@ -63,6 +65,35 @@ class Trade_Offline_Streamer(Thread):
def stop(self): def stop(self):
pass pass
def fetch_stock_trades(self, symbol, start, end, max_retries=5, backoff_factor=1):
"""
Attempts to fetch stock trades with exponential backoff. Raises an exception if all retries fail.
:param symbol: The stock symbol to fetch trades for.
:param start: The start time for the trade data.
:param end: The end time for the trade data.
:param max_retries: Maximum number of retries.
:param backoff_factor: Factor to determine the next sleep time.
:return: TradesResponse object.
:raises: ConnectionError if all retries fail.
"""
stockTradeRequest = StockTradesRequest(symbol_or_symbols=symbol, start=start, end=end)
last_exception = None
for attempt in range(max_retries):
try:
tradesResponse = self.client.get_stock_trades(stockTradeRequest)
print("Remote Fetch DAY DATA Complete", start, end)
return tradesResponse
except Exception as e:
print(f"Attempt {attempt + 1} failed: {e}")
last_exception = e
time.sleep(backoff_factor * (2 ** attempt))
print("All attempts to fetch data failed.")
send_to_telegram(f"Failed to fetch stock trades after {max_retries} retries. Last exception: {str(last_exception)} and {format_exc()}")
raise ConnectionError(f"Failed to fetch stock trades after {max_retries} retries. Last exception: {str(last_exception)} and {format_exc()}")
# Override the run() function of Thread class # Override the run() function of Thread class
#odebrano async #odebrano async
def main(self): def main(self):
@ -73,6 +104,8 @@ class Trade_Offline_Streamer(Thread):
print("call add streams to queue first") print("call add streams to queue first")
return 0 return 0
cfh.config_handler.print_current_config()
#iterujeme nad streamy #iterujeme nad streamy
for i in self.streams: for i in self.streams:
self.uniquesymbols.add(i.symbol) self.uniquesymbols.add(i.symbol)
@ -106,25 +139,21 @@ class Trade_Offline_Streamer(Thread):
#datetime.fromtimestamp(data['updated']).astimezone(zoneNY)) #datetime.fromtimestamp(data['updated']).astimezone(zoneNY))
#REFACTOR STARTS HERE #REFACTOR STARTS HERE
#print(f"{self.time_from=} {self.time_to=}") #print(f"{self.time_from=} {self.time_to=}")
if OFFLINE_MODE: if cfh.config_handler.get_val('OFFLINE_MODE'):
#just one day - same like time_from #just one day - same like time_from
den = str(self.time_to.date()) den = str(self.time_to.date())
bt_day = Calendar(date=den,open="9:30",close="16:00") bt_day = Calendar(date=den,open="9:30",close="16:00")
cal_dates = [bt_day] cal_dates = [bt_day]
else: else:
calendar_request = GetCalendarRequest(start=self.time_from,end=self.time_to) start_date = self.time_from # Assuming this is your start date
end_date = self.time_to # Assuming this is your end date
#toto zatim workaround - dat do retry funkce a obecne vymyslet exception handling, abych byl notifikovan a bylo videt okamzite v logu a na frontendu cal_dates = fetch_calendar_data(start_date, end_date)
try:
cal_dates = self.clientTrading.get_calendar(calendar_request)
except Exception as e:
print("CHYBA - retrying in 4s: " + str(e) + format_exc())
time.sleep(5)
cal_dates = self.clientTrading.get_calendar(calendar_request)
#zatim podpora pouze main session #zatim podpora pouze main session
live_data_feed = cfh.config_handler.get_val('LIVE_DATA_FEED')
#zatim podpora pouze 1 symbolu, predelat na froloop vsech symbolu ze symbpole #zatim podpora pouze 1 symbolu, predelat na froloop vsech symbolu ze symbpole
#minimalni jednotka pro CACHE je 1 den - a to jen marketopen to marketclose (extended hours not supported yet) #minimalni jednotka pro CACHE je 1 den - a to jen marketopen to marketclose (extended hours not supported yet)
for day in cal_dates: for day in cal_dates:
@ -167,9 +196,10 @@ class Trade_Offline_Streamer(Thread):
# stream.send_cache_to_output(cache) # stream.send_cache_to_output(cache)
# to_rem.append(stream) # to_rem.append(stream)
#cache resime jen kdyz backtestujeme cely den #cache resime jen kdyz backtestujeme cely den a mame sip datapoint (iex necachujeme)
#pokud ne tak ani necteme, ani nezapisujeme do cache #pokud ne tak ani necteme, ani nezapisujeme do cache
if self.time_to >= day.close:
if (self.time_to >= day.close and self.time_from <= day.open) and live_data_feed == DataFeed.SIP:
#tento odstavec obchazime pokud je nastaveno "dont_use_cache" #tento odstavec obchazime pokud je nastaveno "dont_use_cache"
stream_btdata = self.to_run[symbpole[0]][0] stream_btdata = self.to_run[symbpole[0]][0]
cache_btdata, file_btdata = stream_btdata.get_cache(day.open, day.close) cache_btdata, file_btdata = stream_btdata.get_cache(day.open, day.close)
@ -197,7 +227,7 @@ class Trade_Offline_Streamer(Thread):
stream_main.enable_cache_output(day.open, day.close) stream_main.enable_cache_output(day.open, day.close)
#trade daily file #trade daily file
daily_file = str(symbpole[0]) + '-' + str(int(day.open.timestamp())) + '-' + str(int(day.close.timestamp())) + '.cache' daily_file = str(symbpole[0]) + '-' + str(int(day.open.timestamp())) + '-' + str(int(day.close.timestamp())) + '.cache.gz'
print(daily_file) print(daily_file)
file_path = DATA_DIR + "/tradecache/"+daily_file file_path = DATA_DIR + "/tradecache/"+daily_file
@ -207,23 +237,31 @@ class Trade_Offline_Streamer(Thread):
#pokud je start_time < trade < end_time #pokud je start_time < trade < end_time
#odesíláme do queue #odesíláme do queue
#jinak pass #jinak pass
with open (file_path, 'rb') as fp: with gzip.open (file_path, 'rb') as fp:
tradesResponse = pickle.load(fp) tradesResponse = pickle.load(fp)
print("Loading from Trade CACHE", file_path) print("Loading from Trade CACHE", file_path)
#daily file doesnt exist #daily file doesnt exist
else: else:
# TODO refactor pro zpracovani vice symbolu najednou(multithreads), nyni predpokladame pouze 1
stockTradeRequest = StockTradesRequest(symbol_or_symbols=symbpole[0], start=day.open,end=day.close) #implement retry mechanism
tradesResponse = self.client.get_stock_trades(stockTradeRequest) symbol = symbpole[0] # Assuming symbpole[0] is your target symbol
day_open = day.open # Assuming day.open is the start time
day_close = day.close # Assuming day.close is the end time
tradesResponse = self.fetch_stock_trades(symbol, day_open, day_close)
# # TODO refactor pro zpracovani vice symbolu najednou(multithreads), nyni predpokladame pouze 1
# stockTradeRequest = StockTradesRequest(symbol_or_symbols=symbpole[0], start=day.open,end=day.close)
# tradesResponse = self.client.get_stock_trades(stockTradeRequest)
print("Remote Fetch DAY DATA Complete", day.open, day.close) print("Remote Fetch DAY DATA Complete", day.open, day.close)
#pokud jde o dnešní den a nebyl konec trhu tak cache neukládáme #pokud jde o dnešní den a nebyl konec trhu tak cache neukládáme, pripadne pri iex datapointu necachujeme
if day.open < datetime.now().astimezone(zoneNY) < day.close: if (day.open < datetime.now().astimezone(zoneNY) < day.close) or live_data_feed == DataFeed.IEX:
print("not saving trade cache, market still open today") print("not saving trade cache, market still open today or IEX datapoint")
#ic(datetime.now().astimezone(zoneNY)) #ic(datetime.now().astimezone(zoneNY))
#ic(day.open, day.close) #ic(day.open, day.close)
else: else:
with open(file_path, 'wb') as fp: with gzip.open(file_path, 'wb') as fp:
pickle.dump(tradesResponse, fp) pickle.dump(tradesResponse, fp)
#zde už máme daily data #zde už máme daily data
@ -257,7 +295,7 @@ class Trade_Offline_Streamer(Thread):
cnt = 1 cnt = 1
for t in tqdm(tradesResponse[symbol]): for t in tqdm(tradesResponse[symbol], desc="Loading Trades"):
#protoze je zde cely den, poustime dal, jen ty relevantni #protoze je zde cely den, poustime dal, jen ty relevantni
#pokud je start_time < trade < end_time #pokud je start_time < trade < end_time
@ -270,6 +308,9 @@ class Trade_Offline_Streamer(Thread):
#tmp = to_datetime(t['t'], utc=True).timestamp() #tmp = to_datetime(t['t'], utc=True).timestamp()
#obcas se v response objevoval None radek
if t is None:
continue
datum = to_datetime(t['t'], utc=True) datum = to_datetime(t['t'], utc=True)

View File

@ -4,7 +4,7 @@
""" """
from v2realbot.loader.aggregator import TradeAggregator2Queue from v2realbot.loader.aggregator import TradeAggregator2Queue
from alpaca.data.live import StockDataStream from alpaca.data.live import StockDataStream
from v2realbot.config import ACCOUNT1_PAPER_API_KEY, ACCOUNT1_PAPER_SECRET_KEY, ACCOUNT1_PAPER_FEED from v2realbot.config import LIVE_DATA_API_KEY, LIVE_DATA_SECRET_KEY
from alpaca.data.historical import StockHistoricalDataClient from alpaca.data.historical import StockHistoricalDataClient
from alpaca.data.requests import StockLatestQuoteRequest, StockBarsRequest, StockTradesRequest from alpaca.data.requests import StockLatestQuoteRequest, StockBarsRequest, StockTradesRequest
from threading import Thread, current_thread from threading import Thread, current_thread
@ -12,6 +12,7 @@ from v2realbot.utils.utils import parse_alpaca_timestamp, ltp
from datetime import datetime, timedelta from datetime import datetime, timedelta
from threading import Thread, Lock from threading import Thread, Lock
from msgpack import packb from msgpack import packb
import v2realbot.utils.config_handler as cfh
""" """
Shared streamer (can be shared amongst concurrently running strategies) Shared streamer (can be shared amongst concurrently running strategies)
@ -19,9 +20,12 @@ from msgpack import packb
by strategies by strategies
""" """
class Trade_WS_Streamer(Thread): class Trade_WS_Streamer(Thread):
live_data_feed = cfh.config_handler.get_val('LIVE_DATA_FEED')
##tento ws streamer je pouze jeden pro vsechny, tzn. vyuziváme natvrdo placena data primarniho uctu (nezalezi jestli paper nebo live) ##tento ws streamer je pouze jeden pro vsechny, tzn. vyuziváme natvrdo placena data primarniho uctu (nezalezi jestli paper nebo live)
client = StockDataStream(ACCOUNT1_PAPER_API_KEY, ACCOUNT1_PAPER_SECRET_KEY, raw_data=True, websocket_params={}, feed=ACCOUNT1_PAPER_FEED) msg = f"Realtime Websocket connection will use FEED: {live_data_feed} and credential of ACCOUNT1"
print(msg)
#cfh.config_handler.print_current_config()
client = StockDataStream(LIVE_DATA_API_KEY, LIVE_DATA_SECRET_KEY, raw_data=True, websocket_params={}, feed=live_data_feed)
#uniquesymbols = set() #uniquesymbols = set()
_streams = [] _streams = []
#to_run = dict() #to_run = dict()
@ -38,10 +42,23 @@ class Trade_WS_Streamer(Thread):
return False return False
def add_stream(self, obj: TradeAggregator2Queue): def add_stream(self, obj: TradeAggregator2Queue):
print(Trade_WS_Streamer.msg)
print("stav pred pridavanim", Trade_WS_Streamer._streams) print("stav pred pridavanim", Trade_WS_Streamer._streams)
Trade_WS_Streamer._streams.append(obj) Trade_WS_Streamer._streams.append(obj)
if Trade_WS_Streamer.client._running is False: if Trade_WS_Streamer.client._running is False:
print("websocket zatim nebezi, pouze pridavame do pole") print("websocket zatim nebezi, pouze pridavame do pole")
#zde delame refresh clienta (pokud se zmenilo live_data_feed)
# live_data_feed = cfh.config_handler.get_val('LIVE_DATA_FEED')
# #po otestování přepnout jen pokud se live_data_feed změnil
# #if live_data_feed != Trade_WS_Streamer.live_data_feed:
# # Trade_WS_Streamer.live_data_feed = live_data_feed
# msg = f"REFRESH OF CLIENT! Realtime Websocket connection will use FEED: {live_data_feed} and credential of ACCOUNT1"
# print(msg)
# #cfh.config_handler.print_current_config()
# Trade_WS_Streamer.client = StockDataStream(LIVE_DATA_API_KEY, LIVE_DATA_SECRET_KEY, raw_data=True, websocket_params={}, feed=live_data_feed)
else: else:
print("websocket client bezi") print("websocket client bezi")
if self.symbol_exists(obj.symbol): if self.symbol_exists(obj.symbol):
@ -59,7 +76,12 @@ class Trade_WS_Streamer(Thread):
#if it is the last item at all, stop the client from running #if it is the last item at all, stop the client from running
if len(Trade_WS_Streamer._streams) == 0: if len(Trade_WS_Streamer._streams) == 0:
print("removed last item from WS, stopping the client") print("removed last item from WS, stopping the client")
Trade_WS_Streamer.client.stop() #Trade_WS_Streamer.client.stop_ws()
#Trade_WS_Streamer.client.stop()
#zkusíme explicitně zavolat kroky pro disconnect od ws
if Trade_WS_Streamer.client._stop_stream_queue.empty():
Trade_WS_Streamer.client._stop_stream_queue.put_nowait({"should_stop": True})
Trade_WS_Streamer.client._should_run = False
return return
if not self.symbol_exists(obj.symbol): if not self.symbol_exists(obj.symbol):

View File

@ -1,26 +1,25 @@
import os,sys import os,sys
sys.path.append(os.path.dirname(os.path.dirname(os.path.abspath(__file__)))) sys.path.append(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
from v2realbot.config import WEB_API_KEY, DATA_DIR, MEDIA_DIRECTORY, LOG_FILE os.environ["KERAS_BACKEND"] = "jax"
from v2realbot.config import WEB_API_KEY, DATA_DIR, MEDIA_DIRECTORY, LOG_PATH, MODEL_DIR
from alpaca.data.timeframe import TimeFrame, TimeFrameUnit from alpaca.data.timeframe import TimeFrame, TimeFrameUnit
from datetime import datetime from datetime import datetime
import os
from rich import print from rich import print
from fastapi import FastAPI, Depends, HTTPException, status from fastapi import FastAPI, Depends, HTTPException, status, File, UploadFile, Response
from fastapi.security import APIKeyHeader from fastapi.security import APIKeyHeader
import uvicorn import uvicorn
from uuid import UUID from uuid import UUID
import v2realbot.controller.services as cs
from v2realbot.utils.ilog import get_log_window from v2realbot.utils.ilog import get_log_window
from v2realbot.common.model import StrategyInstance, RunnerView, RunRequest, Trade, RunArchive, RunArchiveView, RunArchiveViewPagination, RunArchiveDetail, Bar, RunArchiveChange, TestList, ConfigItem, InstantIndicator, DataTablesRequest from v2realbot.common.model import RunManagerRecord, StrategyInstance, RunnerView, RunRequest, Trade, RunArchive, RunArchiveView, RunArchiveViewPagination, RunArchiveDetail, Bar, RunArchiveChange, TestList, ConfigItem, InstantIndicator, DataTablesRequest, AnalyzerInputs
from fastapi import FastAPI, WebSocket, WebSocketDisconnect, Depends, HTTPException, status, WebSocketException, Cookie, Query from fastapi import FastAPI, WebSocket, WebSocketDisconnect, Depends, HTTPException, status, WebSocketException, Cookie, Query
from fastapi.responses import FileResponse, StreamingResponse from fastapi.responses import FileResponse, StreamingResponse, JSONResponse
from fastapi.staticfiles import StaticFiles from fastapi.staticfiles import StaticFiles
from fastapi.security import HTTPBasic, HTTPBasicCredentials from fastapi.security import HTTPBasic, HTTPBasicCredentials
from v2realbot.enums.enums import Env, Mode from v2realbot.enums.enums import Env, Mode
from typing import Annotated from typing import Annotated
import os import os
import uvicorn import uvicorn
import json import orjson
from queue import Queue, Empty from queue import Queue, Empty
from threading import Thread from threading import Thread
import asyncio import asyncio
@ -33,7 +32,18 @@ from time import sleep
import v2realbot.reporting.metricstools as mt import v2realbot.reporting.metricstools as mt
from v2realbot.reporting.metricstoolsimage import generate_trading_report_image from v2realbot.reporting.metricstoolsimage import generate_trading_report_image
from traceback import format_exc from traceback import format_exc
from v2realbot.reporting.optimizecutoffs import find_optimal_cutoff #from v2realbot.reporting.optimizecutoffs import find_optimal_cutoff
import v2realbot.reporting.analyzer as ci
import shutil
from starlette.responses import JSONResponse
import mlroom
import mlroom.utils.mlutils as ml
from typing import List
import v2realbot.controller.run_manager as rm
import v2realbot.scheduler.ap_scheduler as aps
import re
import v2realbot.controller.configs as cf
import v2realbot.controller.services as cs
#from async io import Queue, QueueEmpty #from async io import Queue, QueueEmpty
# #
# install() # install()
@ -244,11 +254,13 @@ def _run_stratin(stratin_id: UUID, runReq: RunRequest):
runReq.bt_to = zoneNY.localize(runReq.bt_to) runReq.bt_to = zoneNY.localize(runReq.bt_to)
#pokud jedeme nad test intervaly anebo je požadováno více dní - pouštíme jako batch day by day #pokud jedeme nad test intervaly anebo je požadováno více dní - pouštíme jako batch day by day
#do budoucna dát na FE jako flag #do budoucna dát na FE jako flag
if runReq.mode != Mode.LIVE and runReq.test_batch_id is not None or (runReq.bt_from.date() != runReq.bt_to.date()): #print(runReq)
if runReq.mode not in [Mode.LIVE, Mode.PAPER] and (runReq.test_batch_id is not None or (runReq.bt_from is not None and runReq.bt_to is not None and runReq.bt_from.date() != runReq.bt_to.date())):
res, id = cs.run_batch_stratin(id=stratin_id, runReq=runReq) res, id = cs.run_batch_stratin(id=stratin_id, runReq=runReq)
else: else:
if runReq.weekdays_filter is not None: #not necessary for live/paper the weekdays are simply ignored, in the future maybe add validation if weekdays are presented
raise HTTPException(status_code=status.HTTP_406_NOT_ACCEPTABLE, detail=f"Weekday only for backtest mode with batch (not single day)") #if runReq.weekdays_filter is not None:
# raise HTTPException(status_code=status.HTTP_406_NOT_ACCEPTABLE, detail=f"Weekday only for backtest mode with batch (not single day)")
res, id = cs.run_stratin(id=stratin_id, runReq=runReq) res, id = cs.run_stratin(id=stratin_id, runReq=runReq)
if res == 0: return id if res == 0: return id
elif res < 0: elif res < 0:
@ -324,14 +336,14 @@ def migrate():
end_positions=row.get('end_positions'), end_positions=row.get('end_positions'),
end_positions_avgp=row.get('end_positions_avgp'), end_positions_avgp=row.get('end_positions_avgp'),
metrics=row.get('open_orders'), metrics=row.get('open_orders'),
#metrics=json.loads(row.get('metrics')) if row.get('metrics') else None, #metrics=orjson.loads(row.get('metrics')) if row.get('metrics') else None,
stratvars_toml=row.get('stratvars_toml') stratvars_toml=row.get('stratvars_toml')
) )
def get_all_archived_runners(): def get_all_archived_runners():
conn = pool.get_connection() conn = pool.get_connection()
try: try:
conn.row_factory = lambda c, r: json.loads(r[0]) conn.row_factory = lambda c, r: orjson.loads(r[0])
c = conn.cursor() c = conn.cursor()
res = c.execute(f"SELECT data FROM runner_header") res = c.execute(f"SELECT data FROM runner_header")
finally: finally:
@ -376,7 +388,7 @@ def migrate():
SET strat_id=?, batch_id=?, symbol=?, name=?, note=?, started=?, stopped=?, mode=?, account=?, bt_from=?, bt_to=?, strat_json=?, settings=?, ilog_save=?, profit=?, trade_count=?, end_positions=?, end_positions_avgp=?, metrics=?, stratvars_toml=? SET strat_id=?, batch_id=?, symbol=?, name=?, note=?, started=?, stopped=?, mode=?, account=?, bt_from=?, bt_to=?, strat_json=?, settings=?, ilog_save=?, profit=?, trade_count=?, end_positions=?, end_positions_avgp=?, metrics=?, stratvars_toml=?
WHERE runner_id=? WHERE runner_id=?
''', ''',
(str(ra.strat_id), ra.batch_id, ra.symbol, ra.name, ra.note, ra.started, ra.stopped, ra.mode, ra.account, ra.bt_from, ra.bt_to, json.dumps(ra.strat_json), json.dumps(ra.settings), ra.ilog_save, ra.profit, ra.trade_count, ra.end_positions, ra.end_positions_avgp, json.dumps(ra.metrics), ra.stratvars_toml, str(ra.id))) (str(ra.strat_id), ra.batch_id, ra.symbol, ra.name, ra.note, ra.started, ra.stopped, ra.mode, ra.account, ra.bt_from, ra.bt_to, orjson.dumps(ra.strat_json).decode('utf-8'), orjson.dumps(ra.settings).decode('utf-8'), ra.ilog_save, ra.profit, ra.trade_count, ra.end_positions, ra.end_positions_avgp, orjson.dumps(ra.metrics).decode('utf-8'), ra.stratvars_toml, str(ra.id)))
conn.commit() conn.commit()
finally: finally:
@ -454,6 +466,16 @@ def _delete_archived_runners_byIDs(runner_ids: list[UUID]):
elif res < 0: elif res < 0:
raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail=f"Error: {res}:{id}") raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail=f"Error: {res}:{id}")
#get runners list based on batch_id
@app.get("/archived_runners/batch/{batch_id}", dependencies=[Depends(api_key_auth)])
def _get_archived_runnerslist_byBatchID(batch_id: str) -> list[UUID]:
res, set =cs.get_archived_runnerslist_byBatchID(batch_id)
if res == 0:
return set
else:
raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail=f"No data found")
#delete archive runner from header and detail #delete archive runner from header and detail
@app.delete("/archived_runners/batch/{batch_id}", dependencies=[Depends(api_key_auth)], status_code=status.HTTP_200_OK) @app.delete("/archived_runners/batch/{batch_id}", dependencies=[Depends(api_key_auth)], status_code=status.HTTP_200_OK)
def _delete_archived_runners_byBatchID(batch_id: str): def _delete_archived_runners_byBatchID(batch_id: str):
@ -465,10 +487,11 @@ def _delete_archived_runners_byBatchID(batch_id: str):
raise HTTPException(status_code=status.HTTP_406_NOT_ACCEPTABLE, detail=f"Error not changed: {res}:{batch_id}:{id}") raise HTTPException(status_code=status.HTTP_406_NOT_ACCEPTABLE, detail=f"Error not changed: {res}:{batch_id}:{id}")
#WIP - TOM indicator preview from frontend #WIP - TOM indicator preview from frontend f
#return indicator value for archived runner #return indicator value for archived runner, return values list0 - bar indicators, list1 - ticks indicators
#TBD mozna predelat na dict pro prehlednost
@app.put("/archived_runners/{runner_id}/previewindicator", dependencies=[Depends(api_key_auth)], status_code=status.HTTP_200_OK) @app.put("/archived_runners/{runner_id}/previewindicator", dependencies=[Depends(api_key_auth)], status_code=status.HTTP_200_OK)
def _preview_indicator_byTOML(runner_id: UUID, indicator: InstantIndicator) -> list[float]: def _preview_indicator_byTOML(runner_id: UUID, indicator: InstantIndicator) -> list[dict]:
#mozna pak pridat name #mozna pak pridat name
res, vals = cs.preview_indicator_byTOML(id=runner_id, indicator=indicator) res, vals = cs.preview_indicator_byTOML(id=runner_id, indicator=indicator)
if res == 0: return vals if res == 0: return vals
@ -509,13 +532,23 @@ def _get_all_archived_runners_detail() -> list[RunArchiveDetail]:
raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail=f"No data found") raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail=f"No data found")
#get archived runners detail by id #get archived runners detail by id
# @app.get("/archived_runners_detail/{runner_id}", dependencies=[Depends(api_key_auth)])
# def _get_archived_runner_details_byID(runner_id) -> RunArchiveDetail:
# res, set = cs.get_archived_runner_details_byID(runner_id)
# if res == 0:
# return set
# else:
# raise HTTPException(status_code=404, detail=f"No runner with id: {runner_id} a {set}")
#this is the variant of above that skips parsing of json and returns JSON string returned from db
@app.get("/archived_runners_detail/{runner_id}", dependencies=[Depends(api_key_auth)]) @app.get("/archived_runners_detail/{runner_id}", dependencies=[Depends(api_key_auth)])
def _get_archived_runner_details_byID(runner_id) -> RunArchiveDetail: def _get_archived_runner_details_byID(runner_id: UUID):
res, set = cs.get_archived_runner_details_byID(runner_id) res, data = cs.get_archived_runner_details_byID(id=runner_id, parsed=False)
if res == 0: if res == 0:
return set # Return the raw JSON string as a plain Response
return Response(content=data, media_type="application/json")
else: else:
raise HTTPException(status_code=404, detail=f"No runner with id: {runner_id} a {set}") raise HTTPException(status_code=404, detail=f"No runner with id: {runner_id}. {data}")
#get archived runners detail by id #get archived runners detail by id
@app.get("/archived_runners_log/{runner_id}", dependencies=[Depends(api_key_auth)]) @app.get("/archived_runners_log/{runner_id}", dependencies=[Depends(api_key_auth)])
@ -526,30 +559,68 @@ def _get_archived_runner_log_byID(runner_id: UUID, timestamp_from: float, timest
else: else:
raise HTTPException(status_code=404, detail=f"No logs found with id: {runner_id} and between {timestamp_from} and {timestamp_to}") raise HTTPException(status_code=404, detail=f"No logs found with id: {runner_id} and between {timestamp_from} and {timestamp_to}")
def remove_ansi_codes(text):
ansi_escape = re.compile(r'\x1B[@-_][0-?]*[ -/]*[@-~]')
return ansi_escape.sub('', text)
# endregion # endregion
# A simple function to read the last lines of a file # A simple function to read the last lines of a file
def tail(file_path, n=10, buffer_size=1024): # def tail(file_path, n=10, buffer_size=1024):
with open(file_path, 'rb') as f: # try:
f.seek(0, 2) # Move to the end of the file # with open(file_path, 'rb') as f:
file_size = f.tell() # f.seek(0, 2) # Move to the end of the file
lines = [] # file_size = f.tell()
buffer = bytearray() # lines = []
# buffer = bytearray()
for i in range(file_size // buffer_size + 1): # for i in range(file_size // buffer_size + 1):
read_start = max(-buffer_size * (i + 1), -file_size) # read_start = max(-buffer_size * (i + 1), -file_size)
f.seek(read_start, 2) # f.seek(read_start, 2)
read_size = min(buffer_size, file_size - buffer_size * i) # read_size = min(buffer_size, file_size - buffer_size * i)
buffer[0:0] = f.read(read_size) # Prepend to buffer # buffer[0:0] = f.read(read_size) # Prepend to buffer
if buffer.count(b'\n') >= n + 1: # if buffer.count(b'\n') >= n + 1:
break # break
lines = buffer.decode(errors='ignore').splitlines()[-n:] # lines = buffer.decode(errors='ignore').splitlines()[-n:]
return lines # lines = [remove_ansi_codes(line) for line in lines]
# return lines
# except Exception as e:
# return [str(e) + format_exc()]
#updated version that reads lines line by line
def tail(file_path, n=10):
try:
with open(file_path, 'rb') as f:
f.seek(0, 2) # Move to the end of the file
file_size = f.tell()
lines = []
line = b''
f.seek(-1, 2) # Start at the last byte
while len(lines) < n and f.tell() != 0:
byte = f.read(1)
if byte == b'\n':
# Decode, remove ANSI codes, and append the line
lines.append(remove_ansi_codes(line.decode(errors='ignore')))
line = b''
else:
line = byte + line
f.seek(-2, 1) # Move backwards by two bytes
if line:
# Append any remaining line after removing ANSI codes
lines.append(remove_ansi_codes(line.decode(errors='ignore')))
return lines[::-1] # Reverse the list to get the lines in correct order
except Exception as e:
return [str(e)]
@app.get("/log", dependencies=[Depends(api_key_auth)]) @app.get("/log", dependencies=[Depends(api_key_auth)])
def read_log(lines: int = 10): def read_log(lines: int = 700, logfile: str = "strat.log"):
log_path = LOG_FILE log_path = LOG_PATH / logfile
return {"lines": tail(log_path, lines)} return {"lines": tail(log_path, lines)}
#get alpaca history bars #get alpaca history bars
@ -583,23 +654,46 @@ def _generate_report_image(runner_ids: list[UUID]):
res, stream = generate_trading_report_image(runner_ids=runner_ids,stream=True) res, stream = generate_trading_report_image(runner_ids=runner_ids,stream=True)
if res == 0: return StreamingResponse(stream, media_type="image/png",headers={"Content-Disposition": "attachment; filename=report.png"}) if res == 0: return StreamingResponse(stream, media_type="image/png",headers={"Content-Disposition": "attachment; filename=report.png"})
elif res < 0: elif res < 0:
raise HTTPException(status_code=status.HTTP_406_NOT_ACCEPTABLE, detail=f"Error: {res}:{id}") raise HTTPException(status_code=status.HTTP_406_NOT_ACCEPTABLE, detail=f"Error: {res}:{stream}")
except Exception as e: except Exception as e:
raise HTTPException(status_code=status.HTTP_406_NOT_ACCEPTABLE, detail=f"Error: {str(e)}" + format_exc()) raise HTTPException(status_code=status.HTTP_406_NOT_ACCEPTABLE, detail=f"Error: {str(e)}" + format_exc())
#TODO toto bude zaklad pro obecnou funkci, ktera bude volat ruzne analyzy #TODO toto bude zaklad pro obecnou funkci, ktera bude volat ruzne analyzy
#vstupem bude obecny objekt, ktery ponese nazev analyzy + atributy #vstupem bude obecny objekt, ktery ponese nazev analyzy + atributy
@app.post("/batches/optimizecutoff/{batch_id}", dependencies=[Depends(api_key_auth)], responses={200: {"content": {"image/png": {}}}}) @app.post("/batches/optimizecutoff", dependencies=[Depends(api_key_auth)], responses={200: {"content": {"image/png": {}}}})
def _generate_analysis(batch_id: str): def _optimize_cutoff(analyzerInputs: AnalyzerInputs):
try: try:
res, stream = find_optimal_cutoff(batch_id=batch_id, steps=50, stream=True) if len(analyzerInputs.runner_ids) == 0 and analyzerInputs.batch_id is None:
raise HTTPException(status_code=status.HTTP_406_NOT_ACCEPTABLE, detail=f"Error: batch_id or runner_ids required")
#bude predelano na obecny analyzator s obecnym rozhrannim
res, stream = ci.find_optimal_cutoff.find_optimal_cutoff(runner_ids=analyzerInputs.runner_ids, batch_id=analyzerInputs.batch_id, stream=True, **analyzerInputs.params)
if res == 0: return StreamingResponse(stream, media_type="image/png",headers={"Content-Disposition": "attachment; filename=optimizedcutoff.png"}) if res == 0: return StreamingResponse(stream, media_type="image/png",headers={"Content-Disposition": "attachment; filename=optimizedcutoff.png"})
elif res < 0: elif res < 0:
raise HTTPException(status_code=status.HTTP_406_NOT_ACCEPTABLE, detail=f"Error: {res}:{id}") raise HTTPException(status_code=status.HTTP_406_NOT_ACCEPTABLE, detail=f"Error: {res}:{id}")
except Exception as e: except Exception as e:
raise HTTPException(status_code=status.HTTP_406_NOT_ACCEPTABLE, detail=f"Error: {str(e)}" + format_exc()) raise HTTPException(status_code=status.HTTP_406_NOT_ACCEPTABLE, detail=f"Error: {str(e)}" + format_exc())
#obecna funkce pro analyzy
#vstupem bude obecny objekt, ktery ponese nazev analyzy + atributy
@app.post("/batches/analytics", dependencies=[Depends(api_key_auth)], responses={200: {"content": {"image/png": {}}}})
def _generate_analysis(analyzerInputs: AnalyzerInputs):
try:
if (analyzerInputs.runner_ids is None or len(analyzerInputs.runner_ids) == 0) and analyzerInputs.batch_id is None:
raise HTTPException(status_code=status.HTTP_406_NOT_ACCEPTABLE, detail=f"Error: batch_id or runner_ids required")
funct = "ci."+analyzerInputs.function+"."+analyzerInputs.function
custom_function = eval(funct)
stream = None
res, stream = custom_function(runner_ids=analyzerInputs.runner_ids, batch_id=analyzerInputs.batch_id, stream=True, **analyzerInputs.params)
if res == 0: return StreamingResponse(stream, media_type="image/png")
elif res < 0:
print("Error when generating analysis: ",str(stream))
raise HTTPException(status_code=status.HTTP_406_NOT_ACCEPTABLE, detail=f"Error: {res}:{stream}")
except Exception as e:
raise HTTPException(status_code=status.HTTP_406_NOT_ACCEPTABLE, detail=f"Error: {str(e)}" + format_exc())
#TestList APIS - do budoucna predelat SQL do separatnich funkci #TestList APIS - do budoucna predelat SQL do separatnich funkci
@app.post('/testlists/', dependencies=[Depends(api_key_auth)]) @app.post('/testlists/', dependencies=[Depends(api_key_auth)])
@ -610,7 +704,7 @@ def create_record(testlist: TestList):
# Insert the record into the database # Insert the record into the database
conn = pool.get_connection() conn = pool.get_connection()
cursor = conn.cursor() cursor = conn.cursor()
cursor.execute("INSERT INTO test_list (id, name, dates) VALUES (?, ?, ?)", (testlist.id, testlist.name, json.dumps(testlist.dates, default=json_serial))) cursor.execute("INSERT INTO test_list (id, name, dates) VALUES (?, ?, ?)", (testlist.id, testlist.name, orjson.dumps(testlist.dates, default=json_serial, option=orjson.OPT_PASSTHROUGH_DATETIME).decode('utf-8')))
conn.commit() conn.commit()
pool.release_connection(conn) pool.release_connection(conn)
return testlist return testlist
@ -626,7 +720,7 @@ def get_testlists():
raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail=f"No data found") raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail=f"No data found")
# API endpoint to retrieve a single record by ID # API endpoint to retrieve a single record by ID
@app.get('/testlists/{record_id}') @app.get('/testlists/{record_id}', dependencies=[Depends(api_key_auth)])
def get_testlist(record_id: str): def get_testlist(record_id: str):
res, testlist = cs.get_testlist_byID(record_id=record_id) res, testlist = cs.get_testlist_byID(record_id=record_id)
@ -636,7 +730,7 @@ def get_testlist(record_id: str):
raise HTTPException(status_code=404, detail='Record not found') raise HTTPException(status_code=404, detail='Record not found')
# API endpoint to update a record # API endpoint to update a record
@app.put('/testlists/{record_id}') @app.put('/testlists/{record_id}', dependencies=[Depends(api_key_auth)])
def update_testlist(record_id: str, testlist: TestList): def update_testlist(record_id: str, testlist: TestList):
# Check if the record exists # Check if the record exists
conn = pool.get_connection() conn = pool.get_connection()
@ -648,7 +742,7 @@ def update_testlist(record_id: str, testlist: TestList):
raise HTTPException(status_code=404, detail='Record not found') raise HTTPException(status_code=404, detail='Record not found')
# Update the record in the database # Update the record in the database
cursor.execute("UPDATE test_list SET name = ?, dates = ? WHERE id = ?", (testlist.name, json.dumps(testlist.dates, default=json_serial), record_id)) cursor.execute("UPDATE test_list SET name = ?, dates = ? WHERE id = ?", (testlist.name, orjson.dumps(testlist.dates, default=json_serial, option=orjson.OPT_PASSTHROUGH_DATETIME).decode('utf-8'), record_id))
conn.commit() conn.commit()
pool.release_connection(conn) pool.release_connection(conn)
@ -656,7 +750,7 @@ def update_testlist(record_id: str, testlist: TestList):
return testlist return testlist
# API endpoint to delete a record # API endpoint to delete a record
@app.delete('/testlists/{record_id}') @app.delete('/testlists/{record_id}', dependencies=[Depends(api_key_auth)])
def delete_testlist(record_id: str): def delete_testlist(record_id: str):
# Check if the record exists # Check if the record exists
conn = pool.get_connection() conn = pool.get_connection()
@ -679,7 +773,7 @@ def delete_testlist(record_id: str):
# Get all config items # Get all config items
@app.get("/config-items/", dependencies=[Depends(api_key_auth)]) @app.get("/config-items/", dependencies=[Depends(api_key_auth)])
def get_all_items() -> list[ConfigItem]: def get_all_items() -> list[ConfigItem]:
res, sada = cs.get_all_config_items() res, sada = cf.get_all_config_items()
if res == 0: if res == 0:
return sada return sada
else: else:
@ -689,7 +783,7 @@ def get_all_items() -> list[ConfigItem]:
# Get a config item by ID # Get a config item by ID
@app.get("/config-items/{item_id}", dependencies=[Depends(api_key_auth)]) @app.get("/config-items/{item_id}", dependencies=[Depends(api_key_auth)])
def get_item(item_id: int)-> ConfigItem: def get_item(item_id: int)-> ConfigItem:
res, sada = cs.get_config_item_by_id(item_id) res, sada = cf.get_config_item_by_id(item_id)
if res == 0: if res == 0:
return sada return sada
else: else:
@ -698,7 +792,7 @@ def get_item(item_id: int)-> ConfigItem:
# Get a config item by Name # Get a config item by Name
@app.get("/config-items-by-name/", dependencies=[Depends(api_key_auth)]) @app.get("/config-items-by-name/", dependencies=[Depends(api_key_auth)])
def get_item(item_name: str)-> ConfigItem: def get_item(item_name: str)-> ConfigItem:
res, sada = cs.get_config_item_by_name(item_name) res, sada = cf.get_config_item_by_name(item_name)
if res == 0: if res == 0:
return sada return sada
else: else:
@ -707,7 +801,7 @@ def get_item(item_name: str)-> ConfigItem:
# Create a new config item # Create a new config item
@app.post("/config-items/", dependencies=[Depends(api_key_auth)], status_code=status.HTTP_200_OK) @app.post("/config-items/", dependencies=[Depends(api_key_auth)], status_code=status.HTTP_200_OK)
def create_item(config_item: ConfigItem) -> ConfigItem: def create_item(config_item: ConfigItem) -> ConfigItem:
res, sada = cs.create_config_item(config_item) res, sada = cf.create_config_item(config_item)
if res == 0: return sada if res == 0: return sada
else: else:
raise HTTPException(status_code=status.HTTP_406_NOT_ACCEPTABLE, detail=f"Error not created: {res}:{id} {sada}") raise HTTPException(status_code=status.HTTP_406_NOT_ACCEPTABLE, detail=f"Error not created: {res}:{id} {sada}")
@ -716,11 +810,11 @@ def create_item(config_item: ConfigItem) -> ConfigItem:
# Update a config item by ID # Update a config item by ID
@app.put("/config-items/{item_id}", dependencies=[Depends(api_key_auth)]) @app.put("/config-items/{item_id}", dependencies=[Depends(api_key_auth)])
def update_item(item_id: int, config_item: ConfigItem) -> ConfigItem: def update_item(item_id: int, config_item: ConfigItem) -> ConfigItem:
res, sada = cs.get_config_item_by_id(item_id) res, sada = cf.get_config_item_by_id(item_id)
if res != 0: if res != 0:
raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail=f"No data found") raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail=f"No data found")
res, sada = cs.update_config_item(item_id, config_item) res, sada = cf.update_config_item(item_id, config_item)
if res == 0: return sada if res == 0: return sada
else: else:
raise HTTPException(status_code=status.HTTP_406_NOT_ACCEPTABLE, detail=f"Error not created: {res}:{id}") raise HTTPException(status_code=status.HTTP_406_NOT_ACCEPTABLE, detail=f"Error not created: {res}:{id}")
@ -729,17 +823,171 @@ def update_item(item_id: int, config_item: ConfigItem) -> ConfigItem:
# Delete a config item by ID # Delete a config item by ID
@app.delete("/config-items/{item_id}", dependencies=[Depends(api_key_auth)]) @app.delete("/config-items/{item_id}", dependencies=[Depends(api_key_auth)])
def delete_item(item_id: int) -> dict: def delete_item(item_id: int) -> dict:
res, sada = cs.get_config_item_by_id(item_id) res, sada = cf.get_config_item_by_id(item_id)
if res != 0: if res != 0:
raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail=f"No data found") raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail=f"No data found")
res, sada = cs.delete_config_item(item_id) res, sada = cf.delete_config_item(item_id)
if res == 0: return sada if res == 0: return sada
else: else:
raise HTTPException(status_code=status.HTTP_406_NOT_ACCEPTABLE, detail=f"Error not created: {res}:{id}") raise HTTPException(status_code=status.HTTP_406_NOT_ACCEPTABLE, detail=f"Error not created: {res}:{id}")
# endregion # endregion
# region scheduler
# 1. Fetch All RunManagerRecords
@app.get("/run_manager_records/", dependencies=[Depends(api_key_auth)], response_model=List[RunManagerRecord])
#TODO zvazit rozsireni vystupu o strat_status (running/stopped)
def get_all_run_manager_records():
result, records = rm.fetch_all_run_manager_records()
if result != 0:
raise HTTPException(status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, detail="Error fetching records")
return records
# 2. Fetch RunManagerRecord by ID
@app.get("/run_manager_records/{record_id}", dependencies=[Depends(api_key_auth)], response_model=RunManagerRecord)
#TODO zvazit rozsireni vystupu o strat_status (running/stopped)
def get_run_manager_record(record_id: UUID):
result, record = rm.fetch_run_manager_record_by_id(record_id)
if result == -2: # Record not found
raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail="Record not found")
elif result != 0:
raise HTTPException(status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, detail="Error fetching record")
return record
# 3. Update RunManagerRecord
@app.patch("/run_manager_records/{record_id}", dependencies=[Depends(api_key_auth)], status_code=status.HTTP_200_OK)
def update_run_manager_record(record_id: UUID, update_data: RunManagerRecord):
#make dates zone aware zoneNY
# if update_data.valid_from is not None:
# update_data.valid_from = zoneNY.localize(update_data.valid_from)
# if update_data.valid_to is not None:
# update_data.valid_to = zoneNY.localize(update_data.valid_to)
result, message = rm.update_run_manager_record(record_id, update_data)
if result == -2: # Update failed
raise HTTPException(status_code=status.HTTP_400_BAD_REQUEST, detail=message)
elif result != 0:
raise HTTPException(status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, detail=f"Error during update {result} {message}")
return {"message": "Record updated successfully"}
# 4. Delete RunManagerRecord
@app.delete("/run_manager_records/{record_id}", dependencies=[Depends(api_key_auth)], status_code=status.HTTP_200_OK)
def delete_run_manager_record(record_id: UUID):
result, message = rm.delete_run_manager_record(record_id)
if result == -2: # Delete failed
raise HTTPException(status_code=status.HTTP_400_BAD_REQUEST, detail=message)
elif result != 0:
raise HTTPException(status_code=status.HTTP_406_NOT_ACCEPTABLE, detail=f"Error during deletion {result} {message}")
return {"message": "Record deleted successfully"}
@app.post("/run_manager_records/", status_code=status.HTTP_201_CREATED)
def create_run_manager_record(new_record: RunManagerRecord, api_key_auth: Depends = Depends(api_key_auth)):
#make date zone aware - convert to zoneNY
# if new_record.valid_from is not None:
# new_record.valid_from = zoneNY.localize(new_record.valid_from)
# if new_record.valid_to is not None:
# new_record.valid_to = zoneNY.localize(new_record.valid_to)
result, record_id = rm.add_run_manager_record(new_record)
if result != 0:
raise HTTPException(status_code=status.HTTP_406_NOT_ACCEPTABLE, detail=f"Error during record creation: {result} {record_id}")
return {"id": record_id}
# endregion
#model section
#UPLOAD MODEL
@app.post("/model/upload_model", dependencies=[Depends(api_key_auth)])
async def _upload_model(file: UploadFile = File(...)):
# Specify the directory to save the file
#save_directory = DATA_DIR+'/models/'
save_directory = MODEL_DIR
os.makedirs(save_directory, exist_ok=True)
# Extract just the filename, discarding any path information
base_filename = os.path.basename(file.filename)
file_path = os.path.join(save_directory, base_filename)
# Save the uploaded file
with open(file_path, "wb") as buffer:
while True:
data = await file.read(1024) # Read in chunks
if not data:
break
buffer.write(data)
print(f"saved to {file_path=} file:{base_filename=}")
return {"filename": base_filename, "location": file_path}
#LIST MODELS
@app.get("/model/list-models", dependencies=[Depends(api_key_auth)])
def list_models():
#models_directory = DATA_DIR + '/models/'
models_directory = MODEL_DIR
# Ensure the directory exists
if not os.path.exists(models_directory):
return {"error": "Models directory does not exist."}
# List all files in the directory
model_files = sorted(os.listdir(models_directory))
return {"models": model_files}
@app.post("/model/upload-model", dependencies=[Depends(api_key_auth)])
def upload_model(file: UploadFile = File(...)):
if not file:
raise HTTPException(status_code=400, detail="No file uploaded.")
file_location = os.path.join(MODEL_DIR, file.filename)
with open(file_location, "wb+") as file_object:
shutil.copyfileobj(file.file, file_object)
return JSONResponse(status_code=200, content={"message": "Model uploaded successfully."})
@app.delete("/model/delete-model/{model_name}", dependencies=[Depends(api_key_auth)])
def delete_model(model_name: str):
model_path = os.path.join(MODEL_DIR, model_name)
if os.path.exists(model_path):
os.remove(model_path)
return {"message": "Model deleted successfully."}
else:
raise HTTPException(status_code=404, detail="Model not found.")
@app.get("/model/download-model/{model_name}", dependencies=[Depends(api_key_auth)])
def download_model(model_name: str):
model_path = os.path.join(MODEL_DIR, model_name)
if os.path.exists(model_path):
return FileResponse(path=model_path, filename=model_name, media_type='application/octet-stream')
else:
raise HTTPException(status_code=404, detail="Model not found.")
@app.get("/model/metadata/{model_name}", dependencies=[Depends(api_key_auth)])
def get_metadata(model_name: str):
try:
#loadujeme pouze v modu cfg only
model_instance = ml.load_model(file=model_name, directory=MODEL_DIR, cfg_only = True)
try:
metadata = model_instance.metadata
except AttributeError:
metadata = model_instance.__dict__
del metadata["scalerX"]
del metadata["scalerY"]
del metadata["model"]
except Exception as e:
metadata = "No Metada" + str(e) + format_exc()
return metadata
except Exception as e:
raise HTTPException(status_code=404, detail="Model not found."+str(e) + format_exc())
# model_path = os.path.join(MODEL_DIR, model_name)
# if os.path.exists(model_path):
# # Example: Retrieve metadata from a file or generate it
# metadata = {
# "name": model_name,
# "size": os.path.getsize(model_path),
# "last_modified": os.path.getmtime(model_path),
# # ... other metadata fields ...
# }
# Thread function to insert data from the queue into the database # Thread function to insert data from the queue into the database
def insert_queue2db(): def insert_queue2db():
@ -754,7 +1002,7 @@ def insert_queue2db():
c = insert_conn.cursor() c = insert_conn.cursor()
insert_data = [] insert_data = []
for i in loglist: for i in loglist:
row = (str(runner_id), i["time"], json.dumps(i, default=json_serial)) row = (str(runner_id), i["time"], orjson.dumps(i, default=json_serial, option=orjson.OPT_PASSTHROUGH_DATETIME|orjson.OPT_NON_STR_KEYS).decode('utf-8'))
insert_data.append(row) insert_data.append(row)
c.executemany("INSERT INTO runner_logs VALUES (?,?,?)", insert_data) c.executemany("INSERT INTO runner_logs VALUES (?,?,?)", insert_data)
insert_conn.commit() insert_conn.commit()
@ -766,7 +1014,10 @@ def insert_queue2db():
insert_queue.put(data) # Put the data back into the queue for retry insert_queue.put(data) # Put the data back into the queue for retry
sleep(1) # You can adjust the sleep duration sleep(1) # You can adjust the sleep duration
else: else:
raise # If it's another error, raise it raise # If it's another error, raise it
except Exception as e:
print("ERROR INSERT LOGQUEUE MODULE:" + str(e)+format_exc())
print(data)
#join cekej na dokonceni vsech #join cekej na dokonceni vsech
for i in cs.db.runners: for i in cs.db.runners:
@ -779,15 +1030,25 @@ if __name__ == "__main__":
insert_thread = Thread(target=insert_queue2db) insert_thread = Thread(target=insert_queue2db)
insert_thread.start() insert_thread.start()
#attach debugGER to be able to debug scheduler jobs (run in separate threads)
# debugpy.listen(('localhost', 5678))
# print("Waiting for debugger to attach...")
# debugpy.wait_for_client() # Script will pause here until debugger is attached
#init scheduled tasks from schedule table
#Add APS scheduler job refresh
res, result = aps.initialize_jobs()
if res < 0:
#raise exception
raise Exception(f"Error {res} initializing APS jobs, error {result}")
uvicorn.run("__main__:app", host="0.0.0.0", port=8000, reload=False) uvicorn.run("__main__:app", host="0.0.0.0", port=8000, reload=False)
except Exception as e:
print("Error intializing app: " + str(e) + format_exc())
aps.scheduler.shutdown(wait=False)
finally: finally:
print("closing insert_conn connection") print("closing insert_conn connection")
insert_conn.close() insert_conn.close()
print("closed") print("closed")
##TODO pridat moznost behu na PAPER a LIVE per strategie
# zjistit zda order notification websocket muze bezet na obou soucasne
# pokud ne, mohl bych vyuzivat jen zive data
# a pro paper trading(live interface) a notifications bych pouzival separatni paper ucet
# to by asi slo

View File

@ -1,389 +0,0 @@
# from sklearn.preprocessing import StandardScaler
# # from keras.models import Sequential
# from v2realbot.enums.enums import PredOutput, Source, TargetTRFM
# from v2realbot.config import DATA_DIR
# from joblib import dump
# # import v2realbot.ml.mlutils as mu
# from v2realbot.utils.utils import slice_dict_lists
# import numpy as np
# from copy import deepcopy
# import v2realbot.controller.services as cs
# #Basic classes for machine learning
# #drzi model a jeho zakladni nastaveni
# #Sample Data
# sample_bars = {
# 'time': [1, 2, 3, 4, 5,6,7,8,9,10,11,12,13,14,15],
# 'high': [10, 11, 12, 13, 14,10, 11, 12, 13, 14,10, 11, 12, 13, 14],
# 'low': [8, 9, 7, 6, 8,8, 9, 7, 6, 8,8, 9, 7, 6, 8],
# 'volume': [1000, 1200, 900, 1100, 1300,1000, 1200, 900, 1100, 1300,1000, 1200, 900, 1100, 1300],
# 'close': [9, 10, 11, 12, 13,9, 10, 11, 12, 13,9, 10, 11, 12, 13],
# 'open': [9, 10, 8, 8, 8,9, 10, 8, 8, 8,9, 10, 8, 8, 8],
# 'resolution': [1, 1, 1, 1, 1,1, 1, 1, 1, 1,1, 1, 1, 1, 1]
# }
# sample_indicators = {
# 'time': [1, 2, 3, 4, 5,6,7,8,9,10,11,12,13,14,15],
# 'fastslope': [90, 95, 100, 110, 115,90, 95, 100, 110, 115,90, 95, 100, 110, 115],
# 'fsdelta': [90, 95, 100, 110, 115,90, 95, 100, 110, 115,90, 95, 100, 110, 115],
# 'fastslope2': [90, 95, 100, 110, 115,90, 95, 100, 110, 115,90, 95, 100, 110, 115],
# 'ema': [1000, 1200, 900, 1100, 1300,1000, 1200, 900, 1100, 1300,1000, 1200, 900, 1100, 1300]
# }
# #Trida, která drzi instanci ML modelu a jeho konfigurace
# #take se pouziva jako nastroj na pripravu dat pro train a predikci
# #pozor samotna data trida neobsahuje, jen konfiguraci a pak samotny model
# class ModelML:
# def __init__(self, name: str,
# pred_output: PredOutput,
# bar_features: list,
# ind_features: list,
# input_sequences: int,
# target: str,
# target_reference: str,
# train_target_steps: int, #train
# train_target_transformation: TargetTRFM, #train
# train_epochs: int, #train
# train_runner_ids: list = None, #train
# train_batch_id: str = None, #train
# version: str = "1",
# note : str = None,
# use_bars: bool = True,
# train_remove_cross_sequences: bool = False, #train
# #standardne StandardScaler
# scalerX: StandardScaler = StandardScaler(),
# scalerY: StandardScaler = StandardScaler(),
# model, #Sequential = Sequential()
# )-> None:
# self.name = name
# self.version = version
# self.note = note
# self.pred_output: PredOutput = pred_output
# #model muze byt take bez barů, tzn. jen indikatory
# self.use_bars = use_bars
# #zajistime poradi
# bar_features.sort()
# ind_features.sort()
# self.bar_features = bar_features
# self.ind_features = ind_features
# if (train_runner_ids is None or len(train_runner_ids) == 0) and train_batch_id is None:
# raise Exception("train_runner_ids nebo train_batch_id musi byt vyplnene")
# self.train_runner_ids = train_runner_ids
# self.train_batch_id = train_batch_id
# #target cílový sloupec, který je používám přímo nebo transformován na binary
# self.target = target
# self.target_reference = target_reference
# self.train_target_steps = train_target_steps
# self.train_target_transformation = train_target_transformation
# self.input_sequences = input_sequences
# self.train_epochs = train_epochs
# #keep cross sequences between runners
# self.train_remove_cross_sequences = train_remove_cross_sequences
# self.scalerX = scalerX
# self.scalerY = scalerY
# self.model = model
# def save(self):
# filename = mu.get_full_filename(self.name,self.version)
# dump(self, filename)
# print(f"model {self.name} save")
# #create X data with features
# def column_stack_source(self, bars, indicators, verbose = 1) -> np.array:
# #create SOURCE DATA with features
# # bars and indicators dictionary and features as input
# poradi_sloupcu_inds = [feature for feature in self.ind_features if feature in indicators]
# indicator_data = np.column_stack([indicators[feature] for feature in self.ind_features if feature in indicators])
# if len(bars)>0:
# bar_data = np.column_stack([bars[feature] for feature in self.bar_features if feature in bars])
# poradi_sloupcu_bars = [feature for feature in self.bar_features if feature in bars]
# if verbose == 1:
# print("poradi sloupce v source_data", str(poradi_sloupcu_bars + poradi_sloupcu_inds))
# combined_day_data = np.column_stack([bar_data,indicator_data])
# else:
# combined_day_data = indicator_data
# if verbose == 1:
# print("poradi sloupce v source_data", str(poradi_sloupcu_inds))
# return combined_day_data
# #create TARGET(Y) data
# def column_stack_target(self, bars, indicators) -> np.array:
# target_base = []
# target_reference = []
# try:
# try:
# target_base = bars[self.target]
# except KeyError:
# target_base = indicators[self.target]
# try:
# target_reference = bars[self.target_reference]
# except KeyError:
# target_reference = indicators[self.target_reference]
# except KeyError:
# pass
# target_day_data = np.column_stack([target_base, target_reference])
# return target_day_data
# def load_runners_as_list(self, runner_id_list = None, batch_id = None):
# """Loads all runners data (bars, indicators) for given runners into list of dicts.
# List of runners/train_batch_id may be provided, or self.train_runner_ids/train_batch_id is taken instead.
# Returns:
# tuple (barslist, indicatorslist,) - lists with dictionaries for each runner
# """
# if runner_id_list is not None:
# runner_ids = runner_id_list
# print("loading runners for ",str(runner_id_list))
# elif batch_id is not None:
# print("Loading runners for train_batch_id:", batch_id)
# res, runner_ids = cs.get_archived_runnerslist_byBatchID(batch_id)
# elif self.train_batch_id is not None:
# print("Loading runners for TRAINING BATCH self.train_batch_id:", self.train_batch_id)
# res, runner_ids = cs.get_archived_runnerslist_byBatchID(self.train_batch_id)
# #pripadne bereme z listu runneru
# else:
# runner_ids = self.train_runner_ids
# print("loading runners for TRAINING runners ",str(self.train_runner_ids))
# barslist = []
# indicatorslist = []
# ind_keys = None
# for runner_id in runner_ids:
# bars, indicators = mu.load_runner(runner_id)
# print(f"runner:{runner_id}")
# if self.use_bars:
# barslist.append(bars)
# print(f"bars keys {len(bars)} lng {len(bars[self.bar_features[0]])}")
# indicatorslist.append(indicators)
# print(f"indi keys {len(indicators)} lng {len(indicators[self.ind_features[0]])}")
# if ind_keys is not None and ind_keys != len(indicators):
# raise Exception("V runnerech musi byt stejny pocet indikatoru")
# else:
# ind_keys = len(indicators)
# return barslist, indicatorslist
# #toto nejspis rozdelit na TRAIN mod (kdy ma smysl si brat nataveni napr. remove cross)
# def create_sequences(self, combined_data, target_data = None, remove_cross_sequences: bool = False, rows_in_day = None):
# """Creates sequences of given length seq and optionally target N steps in the future.
# Returns X(source) a Y(transformed target) - vrací take Y_untransformed - napr. referencni target column pro zobrazeni v grafu (napr. cenu)
# Volby pro transformaci targetu:
# - KEEPVAL (keep value as is)
# - KEEPVAL_MOVE(keep value, move target N steps in the future)
# další na zámysl (nejspíš ale data budu připravovat ve stratu a využívat jen KEEPy nahoře)
# - BINARY_prefix - sloupec založený na podmínce, výsledek je 0,1
# - BINARY_TREND RISING - podmínka založena, že v target columnu stoupají/klesají po target N steps
# (podvarianty BINARY TREND RISING(0-1), FALLING(0-1), BOTH(-1 - ))
# - BINARY_READY - předpřipravený sloupec(vytvořený ve strategii jako indikator), stačí jen posunout o target step
# - BINARY_READY_POSUNUTY - předpřipraveny sloupec (již posunutýo o target M) - stačí brát as is
# Args:
# combined_data: A list of combined data.
# target_data: A list of target data (0-target,1-target ref.column)
# remove_cross_sequences: If to remove crossday sequences
# rows_in_day: helper dict to remove crossday sequences
# return_untr: whether to return untransformed reference column
# Returns:
# A list of X sequences and a list of y sequences.
# """
# if remove_cross_sequences is True and rows_in_day is None:
# raise Exception("To remove crossday sequences, rows_in_day param required.")
# if target_data is not None and len(target_data) > 0:
# target_data_untr = target_data[:,1]
# target_data = target_data[:,0]
# else:
# target_data_untr = []
# target_data = []
# X_train = []
# y_train = []
# y_untr = []
# #comb data shape (4073, 13)
# #target shape (4073, 1)
# print("Start Sequencing")
# #range sekvence podle toho jestli je pozadovan MOVE nebo NE
# if self.train_target_transformation == TargetTRFM.KEEPVAL_MOVE:
# right_offset = self.input_sequences + self.train_target_steps
# else:
# right_offset= self.input_sequences
# for i in range(len(combined_data) - right_offset):
# #take neresime cross sekvence kdyz neni vyplneni target nebo neni vyplnena rowsinaday
# if remove_cross_sequences is True and not self.is_same_day(i,i + right_offset, rows_in_day):
# print(f"sekvence vyrazena. NEW Zacatek {combined_data[i, 0]} konec {combined_data[i + right_offset, 0]}")
# continue
# #pridame sekvenci
# X_train.append(combined_data[i:i + self.input_sequences])
# #target hodnotu bude ponecha (na radku mame jiz cilovy target)
# #nebo vezme hodnotu z N(train_target_steps) baru vpredu a da jako target k radku
# #je rizeno nastavenim right_offset vyse
# if target_data is not None and len(target_data) > 0:
# y_train.append(target_data[i + right_offset])
# #udela binary transformaci targetu
# # elif self.target_transformation == TargetTRFM.BINARY_TREND_UP:
# # #mini loop od 0 do počtu target steps - zda jsou successively rising
# # #radeji budu resit vizualne conditional indikatorem pri priprave dat
# # rising = False
# # for step in range(0,self.train_target_steps):
# # if target_data[i + self.input_sequences + step] < target_data[i + self.input_sequences + step + 1]:
# # rising = True
# # else:
# # rising = False
# # break
# # y_train.append([1] if rising else [0])
# # #tato zakomentovana varianta porovnava jen cenu ted a cenu na target baru
# # #y_train.append([1] if target_data[i + self.input_sequences] < target_data[i + self.input_sequences + self.train_target_steps] else [0])
# if target_data is not None and len(target_data) > 0:
# y_untr.append(target_data_untr[i + self.input_sequences])
# return np.array(X_train), np.array(y_train), np.array(y_untr)
# def is_same_day(self, idx_start, idx_end, rows_in_day):
# """Helper for sequencing enables to recognize if the start/end index are from the same day.
# Used for sequences to remove cross runner(day) sequences.
# Args:
# idx_start: Start index
# idx_end: End index
# rows_in_day: 1D array containing number of rows(bars,inds) for each day.
# Cumsumed defines edges where each day ends. [10,30,60]
# Returns:
# A boolean
# refactor to vectors if possible
# i_b, i_e
# podm_pole = i_b<pole and i_s >= pole
# [10,30,60]
# """
# for i in rows_in_day:
# #jde o polozku na pomezi - vyhazujeme
# if idx_start < i and idx_end >= i:
# return False
# if idx_start < i and idx_end < i:
# return True
# return None
# #vytvori X a Y data z nastaveni self
# #pro vybrane runnery stahne data, vybere sloupce dle faature a target
# #a vrátí jako sloupce v numpy poli
# #zaroven vraci i rows_in_day pro nasledny sekvencing
# def load_data(self, runners_ids: list = None, batch_id: list = None, source: Source = Source.RUNNERS):
# """Service to load data for the model. Can be used for training or for vector prediction.
# If input data are not provided, it will get the value from training model configuration (train_runners_ids, train_batch_id)
# Args:
# runner_ids:
# batch_id:
# source: To load sample data.
# Returns:
# source_data,target_data,rows_in_day
# """
# rows_in_day = []
# indicatorslist = []
# #bud natahneme samply
# if source == Source.SAMPLES:
# if self.use_bars:
# bars = sample_bars
# else:
# bars = {}
# indicators = sample_indicators
# indicatorslist.append(indicators)
# #nebo dotahneme pozadovane runnery
# else:
# #nalodujeme vsechny runnery jako listy (bud z runnerids nebo dle batchid)
# barslist, indicatorslist = self.load_runners_as_list(runner_id_list=runners_ids, batch_id=batch_id)
# #nerozumim
# bl = deepcopy(barslist)
# il = deepcopy(indicatorslist)
# #a zmergujeme jejich data dohromady
# bars = mu.merge_dicts(bl)
# indicators = mu.merge_dicts(il)
# #zaroven vytvarime pomocny list, kde stale drzime pocet radku per day (pro nasledny sekvencing)
# #zatim nad indikatory - v budoucnu zvazit, kdyby jelo neco jen nad barama
# for i, val in enumerate(indicatorslist):
# #pro prvni klic z indikatoru pocteme cnt
# pocet = len(indicatorslist[i][self.ind_features[0]])
# print("pro runner vkladame pocet", pocet)
# rows_in_day.append(pocet)
# rows_in_day = np.array(rows_in_day)
# rows_in_day = np.cumsum(rows_in_day)
# print("celkove pole rows_in_day(cumsum):", rows_in_day)
# print("Data LOADED.")
# print(f"number of indicators {len(indicators)}")
# print(f"number of bar elements{len(bars)}")
# print(f"ind list length {len(indicators['time'])}")
# print(f"bar list length {len(bars['time'])}")
# self.validate_available_features(bars, indicators)
# print("Preparing FEATURES")
# source_data, target_data = self.stack_bars_indicators(bars, indicators)
# return source_data, target_data, rows_in_day
# def validate_available_features(self, bars, indicators):
# for k in self.bar_features:
# if not k in bars.keys():
# raise Exception(f"Missing bar feature {k}")
# for k in self.ind_features:
# if not k in indicators.keys():
# raise Exception(f"Missing ind feature {k}")
# def stack_bars_indicators(self, bars, indicators):
# print("Stacking dicts to numpy")
# print("Source - X")
# source_data = self.column_stack_source(bars, indicators)
# print("shape", np.shape(source_data))
# print("Target - Y", self.target)
# target_data = self.column_stack_target(bars, indicators)
# print("shape", np.shape(target_data))
# return source_data, target_data
# #pomocna sluzba, ktera provede vsechny transformace a inverzni scaling a vyleze z nej predikce
# #vstupem je standardni format ve strategii (state.bars, state.indicators)
# #vystupem je jedna hodnota
# def predict(self, bars, indicators) -> float:
# #oriznuti podle seqence - pokud je nastaveno v modelu
# lastNbars = slice_dict_lists(bars, self.input_sequences)
# lastNindicators = slice_dict_lists(indicators, self.input_sequences)
# # print("last5bars", lastNbars)
# # print("last5indicators",lastNindicators)
# combined_live_data = self.column_stack_source(lastNbars, lastNindicators, verbose=0)
# #print("combined_live_data",combined_live_data)
# combined_live_data = self.scalerX.transform(combined_live_data)
# combined_live_data = np.array(combined_live_data)
# #print("last 5 values combined data shape", np.shape(combined_live_data))
# #converts to 3D array
# # 1 number of samples in the array.
# # 2 represents the sequence length.
# # 3 represents the number of features in the data.
# combined_live_data = combined_live_data.reshape((1, self.input_sequences, combined_live_data.shape[1]))
# # Make a prediction
# prediction = self.model(combined_live_data, training=False)
# #prediction = prediction.reshape((1, 1))
# # Convert the prediction back to the original scale
# prediction = self.scalerY.inverse_transform(prediction)
# return float(prediction)

View File

@ -1,55 +0,0 @@
import numpy as np
# import v2realbot.controller.services as cs
from joblib import load
from v2realbot.config import DATA_DIR
def get_full_filename(name, version = "1"):
return DATA_DIR+'/models/'+name+'_v'+version+'.pkl'
def load_model(name, version = "1"):
filename = get_full_filename(name, version)
return load(filename)
#pomocne funkce na manipulaci s daty
def merge_dicts(dict_list):
# Initialize an empty merged dictionary
merged_dict = {}
# Iterate through the dictionaries in the list
for i,d in enumerate(dict_list):
for key, value in d.items():
if key in merged_dict:
merged_dict[key] += value
else:
merged_dict[key] = value
#vlozime element s idenitfikaci runnera
return merged_dict
# # Initialize the merged dictionary with the first dictionary in the list
# merged_dict = dict_list[0].copy()
# merged_dict["index"] = []
# # Iterate through the remaining dictionaries and concatenate their lists
# for i, d in enumerate(dict_list[1:]):
# merged_dict["index"] =
# for key, value in d.items():
# if key in merged_dict:
# merged_dict[key] += value
# else:
# merged_dict[key] = value
# return merged_dict
def load_runner(runner_id):
res, sada = cs.get_archived_runner_details_byID(runner_id)
if res == 0:
print("ok")
else:
print("error",res,sada)
raise Exception(f"error loading runner {runner_id} : {res} {sada}")
bars = sada["bars"]
indicators = sada["indicators"][0]
return bars, indicators

View File

@ -0,0 +1,104 @@
import matplotlib
import matplotlib.dates as mdates
matplotlib.use('Agg') # Set the Matplotlib backend to 'Agg'
import matplotlib.pyplot as plt
from matplotlib.ticker import MaxNLocator
import seaborn as sns
import pandas as pd
from datetime import datetime
from typing import List
from enum import Enum
import numpy as np
import v2realbot.controller.services as cs
from rich import print
from v2realbot.common.model import AnalyzerInputs
from v2realbot.common.PrescribedTradeModel import TradeDirection, TradeStatus, Trade, TradeStoplossType
from v2realbot.utils.utils import isrising, isfalling,zoneNY, price2dec, safe_get#, print
from pathlib import Path
from v2realbot.config import WEB_API_KEY, DATA_DIR, MEDIA_DIRECTORY
from v2realbot.enums.enums import RecordType, StartBarAlign, Mode, Account, OrderSide
from io import BytesIO
from v2realbot.utils.historicals import get_historical_bars
from alpaca.data.timeframe import TimeFrame, TimeFrameUnit
from collections import defaultdict
from scipy.stats import zscore
from io import BytesIO
from v2realbot.reporting.load_trades import load_trades
from typing import Tuple, Optional, List
from traceback import format_exc
import pandas as pd
def daily_profit_distribution(runner_ids: list = None, batch_id: str = None, stream: bool = False):
try:
res, trades, days_cnt = load_trades(runner_ids, batch_id)
if res != 0:
raise Exception("Error in loading trades")
#print(trades)
# Convert list of Trade objects to DataFrame
trades_df = pd.DataFrame([t.__dict__ for t in trades if t.status == "closed"])
# Ensure 'exit_time' is a datetime object and make it timezone-naive if necessary
trades_df['exit_time'] = pd.to_datetime(trades_df['exit_time']).dt.tz_convert(zoneNY)
trades_df['date'] = trades_df['exit_time'].dt.date
daily_profit = trades_df.groupby(['date', 'direction']).profit.sum().unstack(fill_value=0)
#print("dp",daily_profit)
daily_cumulative_profit = trades_df.groupby('date').profit.sum().cumsum()
# Create the plot
fig, ax1 = plt.subplots(figsize=(10, 6))
# Bar chart for daily profit composition
daily_profit.plot(kind='bar', stacked=True, ax=ax1, color=['green', 'red'], zorder=2)
ax1.set_ylabel('Daily Profit')
ax1.set_xlabel('Date')
#ax1.xaxis.set_major_locator(MaxNLocator(10))
# Line chart for cumulative daily profit
#ax2 = ax1.twinx()
#print(daily_cumulative_profit)
#print(daily_cumulative_profit.index)
#ax2.plot(daily_cumulative_profit.index, daily_cumulative_profit, color='yellow', linestyle='-', linewidth=2, zorder=3)
#ax2.set_ylabel('Cumulative Profit')
# Setting the secondary y-axis range dynamically based on cumulative profit values
# ax2.set_ylim(daily_cumulative_profit.min() - (daily_cumulative_profit.std() * 2),
# daily_cumulative_profit.max() + (daily_cumulative_profit.std() * 2))
# Dark mode settings
ax1.set_facecolor('black')
# ax1.grid(True)
#ax2.set_facecolor('black')
fig.patch.set_facecolor('black')
ax1.tick_params(colors='white')
#ax2.tick_params(colors='white')
# ax1.xaxis_date()
# ax1.xaxis.set_major_formatter(mdates.DateFormatter('%d.%m.', tz=zoneNY))
ax1.tick_params(axis='x', rotation=45)
# Footer
footer_text = f'Days Count: {days_cnt} | Parameters: {{"runner_ids": {len(runner_ids) if runner_ids is not None else None}, "batch_id": {batch_id}, "stream": {stream}}}'
plt.figtext(0.5, 0.01, footer_text, wrap=True, horizontalalignment='center', fontsize=8, color='white')
# Save or stream the plot
if stream:
img_stream = BytesIO()
plt.savefig(img_stream, format='png', bbox_inches='tight', facecolor=fig.get_facecolor(), edgecolor='none')
img_stream.seek(0)
plt.close(fig)
return (0, img_stream)
else:
plt.savefig(f'{__name__}.png', bbox_inches='tight', facecolor=fig.get_facecolor(), edgecolor='none')
plt.close(fig)
return (0, None)
except Exception as e:
# Detailed error reporting
return (-1, str(e) + format_exc())
# Local debugging
if __name__ == '__main__':
batch_id = "6f9b012c"
res, val = daily_profit_distribution(batch_id=batch_id)
print(res, val)

View File

@ -0,0 +1,8 @@
import os
for filename in os.listdir("v2realbot/reporting/analyzer"):
if filename.endswith(".py") and filename != "__init__.py":
# __import__(filename[:-3])
__import__(f"v2realbot.reporting.analyzer.{filename[:-3]}")
#importlib.import_module()

View File

@ -0,0 +1,104 @@
import matplotlib
import matplotlib.dates as mdates
matplotlib.use('Agg') # Set the Matplotlib backend to 'Agg'
import matplotlib.pyplot as plt
from matplotlib.ticker import MaxNLocator
import seaborn as sns
import pandas as pd
from datetime import datetime
from typing import List
from enum import Enum
import numpy as np
import v2realbot.controller.services as cs
from rich import print
from v2realbot.common.model import AnalyzerInputs
from v2realbot.common.PrescribedTradeModel import TradeDirection, TradeStatus, Trade, TradeStoplossType
from v2realbot.utils.utils import isrising, isfalling,zoneNY, price2dec, safe_get#, print
from pathlib import Path
from v2realbot.config import WEB_API_KEY, DATA_DIR, MEDIA_DIRECTORY
from v2realbot.enums.enums import RecordType, StartBarAlign, Mode, Account, OrderSide
from io import BytesIO
from v2realbot.utils.historicals import get_historical_bars
from alpaca.data.timeframe import TimeFrame, TimeFrameUnit
from collections import defaultdict
from scipy.stats import zscore
from io import BytesIO
from v2realbot.reporting.load_trades import load_trades
from typing import Tuple, Optional, List
from traceback import format_exc
import pandas as pd
def daily_profit_distribution(runner_ids: list = None, batch_id: str = None, stream: bool = False):
try:
res, trades, days_cnt = load_trades(runner_ids, batch_id)
if res != 0:
raise Exception("Error in loading trades")
#print(trades)
# Convert list of Trade objects to DataFrame
trades_df = pd.DataFrame([t.__dict__ for t in trades if t.status == "closed"])
# Ensure 'exit_time' is a datetime object and make it timezone-naive if necessary
trades_df['exit_time'] = pd.to_datetime(trades_df['exit_time']).dt.tz_convert(zoneNY)
trades_df['date'] = trades_df['exit_time'].dt.date
daily_profit = trades_df.groupby(['date', 'direction']).profit.sum().unstack(fill_value=0)
#print("dp",daily_profit)
daily_cumulative_profit = trades_df.groupby('date').profit.sum().cumsum()
# Create the plot
fig, ax1 = plt.subplots(figsize=(10, 6))
# Bar chart for daily profit composition
daily_profit.plot(kind='bar', stacked=True, ax=ax1, color=['green', 'red'], zorder=2)
ax1.set_ylabel('Daily Profit')
ax1.set_xlabel('Date')
#ax1.xaxis.set_major_locator(MaxNLocator(10))
# Line chart for cumulative daily profit
#ax2 = ax1.twinx()
#print(daily_cumulative_profit)
#print(daily_cumulative_profit.index)
#ax2.plot(daily_cumulative_profit.index, daily_cumulative_profit, color='yellow', linestyle='-', linewidth=2, zorder=3)
#ax2.set_ylabel('Cumulative Profit')
# Setting the secondary y-axis range dynamically based on cumulative profit values
# ax2.set_ylim(daily_cumulative_profit.min() - (daily_cumulative_profit.std() * 2),
# daily_cumulative_profit.max() + (daily_cumulative_profit.std() * 2))
# Dark mode settings
ax1.set_facecolor('black')
# ax1.grid(True)
#ax2.set_facecolor('black')
fig.patch.set_facecolor('black')
ax1.tick_params(colors='white')
#ax2.tick_params(colors='white')
# ax1.xaxis_date()
# ax1.xaxis.set_major_formatter(mdates.DateFormatter('%d.%m.', tz=zoneNY))
ax1.tick_params(axis='x', rotation=45)
# Footer
footer_text = f'Days Count: {days_cnt} | Parameters: {{"runner_ids": {len(runner_ids) if runner_ids is not None else None}, "batch_id": {batch_id}, "stream": {stream}}}'
plt.figtext(0.5, 0.01, footer_text, wrap=True, horizontalalignment='center', fontsize=8, color='white')
# Save or stream the plot
if stream:
img_stream = BytesIO()
plt.savefig(img_stream, format='png', bbox_inches='tight', facecolor=fig.get_facecolor(), edgecolor='none')
img_stream.seek(0)
plt.close(fig)
return (0, img_stream)
else:
plt.savefig(f'{__name__}.png', bbox_inches='tight', facecolor=fig.get_facecolor(), edgecolor='none')
plt.close(fig)
return (0, None)
except Exception as e:
# Detailed error reporting
return (-1, str(e) + format_exc())
# Local debugging
if __name__ == '__main__':
batch_id = "6f9b012c"
res, val = daily_profit_distribution(batch_id=batch_id)
print(res, val)

View File

@ -0,0 +1,203 @@
import matplotlib
import matplotlib.dates as mdates
#matplotlib.use('Agg') # Set the Matplotlib backend to 'Agg'
import matplotlib.pyplot as plt
import seaborn as sns
import pandas as pd
from datetime import datetime
from typing import List
from enum import Enum
import numpy as np
import v2realbot.controller.services as cs
from rich import print
from v2realbot.common.model import AnalyzerInputs
from v2realbot.common.PrescribedTradeModel import TradeDirection, TradeStatus, Trade, TradeStoplossType
from v2realbot.utils.utils import isrising, isfalling,zoneNY, price2dec, safe_get#, print
from pathlib import Path
from v2realbot.config import WEB_API_KEY, DATA_DIR, MEDIA_DIRECTORY
from v2realbot.enums.enums import RecordType, StartBarAlign, Mode, Account, OrderSide
from io import BytesIO
from v2realbot.utils.historicals import get_historical_bars
from alpaca.data.timeframe import TimeFrame, TimeFrameUnit
from collections import defaultdict
from scipy.stats import zscore
from io import BytesIO
from v2realbot.reporting.load_trades import load_trades
from traceback import format_exc
# Assuming Trade, TradeStatus, TradeDirection, TradeStoplossType classes are defined elsewhere
def example_plugin(runner_ids: list = None, batch_id: str = None, stream: bool = False, rem_outliers:bool = False, file: str = "optimalcutoff.png",steps:int = 50):
try:
res, trades, days = load_trades(runner_ids, batch_id)
if res < 0:
return (res, trades)
cnt_max = days
#in trades is list of Trades
#print(trades)
##THIS IS how you can fetch historical data for given period and for given TimeFrame (if needed in future)
# symbol = sada.symbol
# #hour bars for backtested period
# print(start_date,end_date)
# bars= get_historical_bars(symbol, start_date, end_date, TimeFrame.Hour)
# print("bars for given period",bars)
# """Bars a dictionary with the following keys:
# * high: A list of high prices
# * low: A list of low prices
# * volume: A list of volumes
# * close: A list of close prices
# * hlcc4: A list of HLCC4 indicators
# * open: A list of open prices
# * time: A list of times in UTC (ISO 8601 format)
# * trades: A list of number of trades
# * resolution: A list of resolutions (all set to 'D')
# * confirmed: A list of booleans (all set to True)
# * vwap: A list of VWAP indicator
# * updated: A list of booleans (all set to True)
# * index: A list of integers (from 0 to the length of the list of daily bars)
# """
# Filter to only use trades with status 'CLOSED'
closed_trades = [trade for trade in trades if trade.status == TradeStatus.CLOSED]
#print(closed_trades)
if len(closed_trades) == 0:
return -1, "image generation no closed trades"
# # Group trades by date and calculate daily profits
# trades_by_day = defaultdict(list)
# for trade in trades:
# if trade.status == TradeStatus.CLOSED and trade.exit_time:
# trade_day = trade.exit_time.date()
# trades_by_day[trade_day].append(trade)
# Precompute daily cumulative profits
daily_cumulative_profits = defaultdict(list)
for trade in trades:
if trade.status == TradeStatus.CLOSED and trade.exit_time:
day = trade.exit_time.date()
daily_cumulative_profits[day].append(trade.profit)
for day in daily_cumulative_profits:
daily_cumulative_profits[day] = np.cumsum(daily_cumulative_profits[day])
if rem_outliers:
# Remove outliers based on z-scores
def remove_outliers(cumulative_profits):
all_profits = [profit[-1] for profit in cumulative_profits.values() if len(profit) > 0]
z_scores = zscore(all_profits)
print(z_scores)
filtered_profits = {}
for day, profits in cumulative_profits.items():
if len(profits) > 0:
day_z_score = z_scores[list(cumulative_profits.keys()).index(day)]
if abs(day_z_score) < 3: # Adjust threshold as needed
filtered_profits[day] = profits
return filtered_profits
daily_cumulative_profits = remove_outliers(daily_cumulative_profits)
# OPT2 Calculate profit_range and loss_range based on all cumulative profits
all_cumulative_profits = np.concatenate([profits for profits in daily_cumulative_profits.values()])
max_cumulative_profit = np.max(all_cumulative_profits)
min_cumulative_profit = np.min(all_cumulative_profits)
profit_range = (0, max_cumulative_profit) if max_cumulative_profit > 0 else (0, 0)
loss_range = (min_cumulative_profit, 0) if min_cumulative_profit < 0 else (0, 0)
print("Calculated ranges", profit_range, loss_range)
num_points = steps # Adjust for speed vs accuracy
profit_cutoffs = np.linspace(*profit_range, num_points)
loss_cutoffs = np.linspace(*loss_range, num_points)
# OPT 3Statically define ranges for loss and profit cutoffs
# profit_range = (0, 1000) # Adjust based on your data
# loss_range = (-1000, 0)
# num_points = 20 # Adjust for speed vs accuracy
profit_cutoffs = np.linspace(*profit_range, num_points)
loss_cutoffs = np.linspace(*loss_range, num_points)
total_profits_matrix = np.zeros((len(profit_cutoffs), len(loss_cutoffs)))
for i, profit_cutoff in enumerate(profit_cutoffs):
for j, loss_cutoff in enumerate(loss_cutoffs):
total_profit = 0
for daily_profit in daily_cumulative_profits.values():
cutoff_index = np.where((daily_profit >= profit_cutoff) | (daily_profit <= loss_cutoff))[0]
if cutoff_index.size > 0:
total_profit += daily_profit[cutoff_index[0]]
else:
total_profit += daily_profit[-1] if daily_profit.size > 0 else 0
total_profits_matrix[i, j] = total_profit
# Find the optimal combination
optimal_idx = np.unravel_index(total_profits_matrix.argmax(), total_profits_matrix.shape)
optimal_profit_cutoff = profit_cutoffs[optimal_idx[0]]
optimal_loss_cutoff = loss_cutoffs[optimal_idx[1]]
max_profit = total_profits_matrix[optimal_idx]
# Plotting
# Setting up dark mode for the plots
plt.style.use('dark_background')
# Optionally, you can further customize colors, labels, and axes
params = {
'axes.titlesize': 9,
'axes.labelsize': 8,
'xtick.labelsize': 9,
'ytick.labelsize': 9,
'axes.labelcolor': '#a9a9a9', #a1a3aa',
'axes.facecolor': '#121722', #'#0e0e0e', #202020', # Dark background for plot area
'axes.grid': False, # Turn off the grid globally
'grid.color': 'gray', # If the grid is on, set grid line color
'grid.linestyle': '--', # Grid line style
'grid.linewidth': 1,
'xtick.color': '#a9a9a9',
'ytick.color': '#a9a9a9',
'axes.edgecolor': '#a9a9a9'
}
plt.rcParams.update(params)
plt.figure(figsize=(10, 8))
sns.heatmap(total_profits_matrix, xticklabels=np.rint(loss_cutoffs).astype(int), yticklabels=np.rint(profit_cutoffs).astype(int), cmap="viridis")
plt.xticks(rotation=90) # Rotate x-axis labels to be vertical
plt.yticks(rotation=0) # Keep y-axis labels horizontal
plt.gca().invert_yaxis()
plt.gca().invert_xaxis()
plt.suptitle(f"Total Profit for Combinations of Profit/Loss Cutoffs ({cnt_max})", fontsize=16)
plt.title(f"Optimal Profit Cutoff: {optimal_profit_cutoff:.2f}, Optimal Loss Cutoff: {optimal_loss_cutoff:.2f}, Max Profit: {max_profit:.2f}", fontsize=10)
plt.xlabel("Loss Cutoff")
plt.ylabel("Profit Cutoff")
if stream is False:
plt.savefig(file)
plt.close()
print(f"Optimal Profit Cutoff(rem_outliers:{rem_outliers}): {optimal_profit_cutoff}, Optimal Loss Cutoff: {optimal_loss_cutoff}, Max Profit: {max_profit}")
return 0, None
else:
# Return the image as a BytesIO stream
img_stream = BytesIO()
plt.savefig(img_stream, format='png')
plt.close()
img_stream.seek(0) # Rewind the stream to the beginning
return 0, img_stream
except Exception as e:
# Detailed error reporting
return (-1, str(e) + format_exc())
# Example usage
# trades = [list of Trade objects]
if __name__ == '__main__':
# id_list = ["e8938b2e-8462-441a-8a82-d823c6a025cb"]
# generate_trading_report_image(runner_ids=id_list)
batch_id = "73ad1866"
res, val = example_plugin(batch_id=batch_id, file="optimal_cutoff_vectorized.png",steps=20)
#res, val = find_optimal_cutoff(batch_id=batch_id, rem_outliers=True, file="optimal_cutoff_vectorized_nooutliers.png")
print(res,val)

View File

@ -0,0 +1,246 @@
import matplotlib
import matplotlib.dates as mdates
#matplotlib.use('Agg') # Set the Matplotlib backend to 'Agg'
import matplotlib.pyplot as plt
import seaborn as sns
import pandas as pd
from datetime import datetime
from typing import List
from enum import Enum
import numpy as np
import v2realbot.controller.services as cs
from rich import print
from v2realbot.common.model import AnalyzerInputs
from v2realbot.common.PrescribedTradeModel import TradeDirection, TradeStatus, Trade, TradeStoplossType
from v2realbot.utils.utils import isrising, isfalling,zoneNY, price2dec, safe_get#, print
from pathlib import Path
from v2realbot.config import WEB_API_KEY, DATA_DIR, MEDIA_DIRECTORY
from v2realbot.enums.enums import RecordType, StartBarAlign, Mode, Account, OrderSide
from io import BytesIO
from v2realbot.utils.historicals import get_historical_bars
from alpaca.data.timeframe import TimeFrame, TimeFrameUnit
from collections import defaultdict
from scipy.stats import zscore
from io import BytesIO
# Assuming Trade, TradeStatus, TradeDirection, TradeStoplossType classes are defined elsewhere
#LOSS and PROFIT without GRAPH
def find_optimal_cutoff(runner_ids: list = None, batch_id: str = None, stream: bool = False, mode:str="absolute", rem_outliers:bool = False, z_score_threshold:int = 3, file: str = "optimalcutoff.png",steps:int = 50):
#TODO dopracovat drawdown a minimalni a maximalni profity nikoliv cumulovane, zamyslet se
#TODO list of runner_ids
#TODO pridelat na vytvoreni runnera a batche, samostatne REST API + na remove archrunnera
if runner_ids is None and batch_id is None:
return -2, f"runner_id or batch_id must be present"
if batch_id is not None:
res, runner_ids =cs.get_archived_runnerslist_byBatchID(batch_id)
if res != 0:
print(f"no batch {batch_id} found")
return -1, f"no batch {batch_id} found"
trades = []
cnt_max = len(runner_ids)
cnt = 0
#zatim zjistujeme start a end z min a max dni - jelikoz muze byt i seznam runner_ids a nejenom batch
end_date = None
start_date = None
for id in runner_ids:
cnt += 1
#get runner
res, sada =cs.get_archived_runner_header_byID(id)
if res != 0:
print(f"no runner {id} found")
return -1, f"no runner {id} found"
#print("archrunner")
#print(sada)
if cnt == 1:
start_date = sada.bt_from if sada.mode in [Mode.BT,Mode.PREP] else sada.started
if cnt == cnt_max:
end_date = sada.bt_to if sada.mode in [Mode.BT or Mode.PREP] else sada.stopped
# Parse trades
trades_dicts = sada.metrics["prescr_trades"]
for trade_dict in trades_dicts:
trade_dict['last_update'] = datetime.fromtimestamp(trade_dict.get('last_update')).astimezone(zoneNY) if trade_dict['last_update'] is not None else None
trade_dict['entry_time'] = datetime.fromtimestamp(trade_dict.get('entry_time')).astimezone(zoneNY) if trade_dict['entry_time'] is not None else None
trade_dict['exit_time'] = datetime.fromtimestamp(trade_dict.get('exit_time')).astimezone(zoneNY) if trade_dict['exit_time'] is not None else None
trades.append(Trade(**trade_dict))
#print(trades)
# symbol = sada.symbol
# #hour bars for backtested period
# print(start_date,end_date)
# bars= get_historical_bars(symbol, start_date, end_date, TimeFrame.Hour)
# print("bars for given period",bars)
# """Bars a dictionary with the following keys:
# * high: A list of high prices
# * low: A list of low prices
# * volume: A list of volumes
# * close: A list of close prices
# * hlcc4: A list of HLCC4 indicators
# * open: A list of open prices
# * time: A list of times in UTC (ISO 8601 format)
# * trades: A list of number of trades
# * resolution: A list of resolutions (all set to 'D')
# * confirmed: A list of booleans (all set to True)
# * vwap: A list of VWAP indicator
# * updated: A list of booleans (all set to True)
# * index: A list of integers (from 0 to the length of the list of daily bars)
# """
# Filter to only use trades with status 'CLOSED'
closed_trades = [trade for trade in trades if trade.status == TradeStatus.CLOSED]
#print(closed_trades)
if len(closed_trades) == 0:
return -1, "image generation no closed trades"
# # Group trades by date and calculate daily profits
# trades_by_day = defaultdict(list)
# for trade in trades:
# if trade.status == TradeStatus.CLOSED and trade.exit_time:
# trade_day = trade.exit_time.date()
# trades_by_day[trade_day].append(trade)
# Precompute daily cumulative profits
daily_cumulative_profits = defaultdict(list)
for trade in trades:
if trade.status == TradeStatus.CLOSED and trade.exit_time:
day = trade.exit_time.date()
if mode == "absolute":
daily_cumulative_profits[day].append(trade.profit)
#relative profit
else:
daily_cumulative_profits[day].append(trade.rel_profit)
for day in daily_cumulative_profits:
daily_cumulative_profits[day] = np.cumsum(daily_cumulative_profits[day])
if rem_outliers:
# Remove outliers based on z-scores
def remove_outliers(cumulative_profits):
all_profits = [profit[-1] for profit in cumulative_profits.values() if len(profit) > 0]
z_scores = zscore(all_profits)
print(z_scores)
filtered_profits = {}
for day, profits in cumulative_profits.items():
if len(profits) > 0:
day_z_score = z_scores[list(cumulative_profits.keys()).index(day)]
if abs(day_z_score) < z_score_threshold: # Adjust threshold as needed
filtered_profits[day] = profits
return filtered_profits
daily_cumulative_profits = remove_outliers(daily_cumulative_profits)
# OPT1 Dynamically calculate profit_range and loss_range - based on eod daily profit
# all_final_profits = [profits[-1] for profits in daily_cumulative_profits.values() if len(profits) > 0]
# max_profit = max(all_final_profits)
# min_profit = min(all_final_profits)
# profit_range = (0, max_profit) if max_profit > 0 else (0, 0)
# loss_range = (min_profit, 0) if min_profit < 0 else (0, 0)
if mode == "absolute":
# OPT2 Calculate profit_range and loss_range based on all cumulative profits
all_cumulative_profits = np.concatenate([profits for profits in daily_cumulative_profits.values()])
max_cumulative_profit = np.max(all_cumulative_profits)
min_cumulative_profit = np.min(all_cumulative_profits)
profit_range = (0, max_cumulative_profit) if max_cumulative_profit > 0 else (0, 0)
loss_range = (min_cumulative_profit, 0) if min_cumulative_profit < 0 else (0, 0)
else:
#for relative - hardcoded
profit_range = (0, 1) # Adjust based on your data
loss_range = (-1, 0)
print("Ranges", profit_range, loss_range)
num_points = steps # Adjust for speed vs accuracy
profit_cutoffs = np.linspace(*profit_range, num_points)
loss_cutoffs = np.linspace(*loss_range, num_points)
total_profits_matrix = np.zeros((len(profit_cutoffs), len(loss_cutoffs)))
for i, profit_cutoff in enumerate(profit_cutoffs):
for j, loss_cutoff in enumerate(loss_cutoffs):
total_profit = 0
for daily_profit in daily_cumulative_profits.values():
cutoff_index = np.where((daily_profit >= profit_cutoff) | (daily_profit <= loss_cutoff))[0]
if cutoff_index.size > 0:
total_profit += daily_profit[cutoff_index[0]]
else:
total_profit += daily_profit[-1] if daily_profit.size > 0 else 0
total_profits_matrix[i, j] = total_profit
# Find the optimal combination
optimal_idx = np.unravel_index(total_profits_matrix.argmax(), total_profits_matrix.shape)
optimal_profit_cutoff = profit_cutoffs[optimal_idx[0]]
optimal_loss_cutoff = loss_cutoffs[optimal_idx[1]]
max_profit = total_profits_matrix[optimal_idx]
# Plotting
# Setting up dark mode for the plots
plt.style.use('dark_background')
# Optionally, you can further customize colors, labels, and axes
params = {
'axes.titlesize': 9,
'axes.labelsize': 8,
'xtick.labelsize': 9,
'ytick.labelsize': 9,
'axes.labelcolor': '#a9a9a9', #a1a3aa',
'axes.facecolor': '#121722', #'#0e0e0e', #202020', # Dark background for plot area
'axes.grid': False, # Turn off the grid globally
'grid.color': 'gray', # If the grid is on, set grid line color
'grid.linestyle': '--', # Grid line style
'grid.linewidth': 1,
'xtick.color': '#a9a9a9',
'ytick.color': '#a9a9a9',
'axes.edgecolor': '#a9a9a9'
}
plt.rcParams.update(params)
plt.figure(figsize=(10, 8))
sns.heatmap(total_profits_matrix, xticklabels=np.rint(loss_cutoffs).astype(int) if mode == "absolute" else np.around(loss_cutoffs, decimals=3), yticklabels=np.rint(profit_cutoffs).astype(int) if mode == "absolute" else np.around(profit_cutoffs, decimals=3), cmap="viridis")
plt.xticks(rotation=90) # Rotate x-axis labels to be vertical
plt.yticks(rotation=0) # Keep y-axis labels horizontal
plt.gca().invert_yaxis()
plt.gca().invert_xaxis()
plt.suptitle(f"Total {mode} Profit for Profit/Loss Cutoffs ({cnt_max})", fontsize=16)
plt.title(f"Optimal Profit Cutoff: {optimal_profit_cutoff:.2f}, Optimal Loss Cutoff: {optimal_loss_cutoff:.2f}, Max Profit: {max_profit:.2f}", fontsize=10)
plt.xlabel("Loss Cutoff")
plt.ylabel("Profit Cutoff")
if stream is False:
plt.savefig(file)
plt.close()
print(f"Optimal Profit Cutoff(rem_outliers:{rem_outliers}): {optimal_profit_cutoff}, Optimal Loss Cutoff: {optimal_loss_cutoff}, Max Profit: {max_profit}")
return 0, None
else:
# Return the image as a BytesIO stream
img_stream = BytesIO()
plt.savefig(img_stream, format='png')
plt.close()
img_stream.seek(0) # Rewind the stream to the beginning
return 0, img_stream
# Example usage
# trades = [list of Trade objects]
if __name__ == '__main__':
# id_list = ["e8938b2e-8462-441a-8a82-d823c6a025cb"]
# generate_trading_report_image(runner_ids=id_list)
batch_id = "c76b4414"
#vstup = AnalyzerInputs(**params)
res, val = find_optimal_cutoff(batch_id=batch_id, mode="relative", z_score_threshold=2, file="optimal_cutoff_vectorized.png",steps=20)
#res, val = find_optimal_cutoff(batch_id=batch_id, rem_outliers=True, file="optimal_cutoff_vectorized_nooutliers.png")
print(res,val)

View File

@ -10,6 +10,7 @@ from enum import Enum
import numpy as np import numpy as np
import v2realbot.controller.services as cs import v2realbot.controller.services as cs
from rich import print from rich import print
from v2realbot.common.model import AnalyzerInputs
from v2realbot.common.PrescribedTradeModel import TradeDirection, TradeStatus, Trade, TradeStoplossType from v2realbot.common.PrescribedTradeModel import TradeDirection, TradeStatus, Trade, TradeStoplossType
from v2realbot.utils.utils import isrising, isfalling,zoneNY, price2dec, safe_get#, print from v2realbot.utils.utils import isrising, isfalling,zoneNY, price2dec, safe_get#, print
from pathlib import Path from pathlib import Path
@ -23,8 +24,9 @@ from scipy.stats import zscore
from io import BytesIO from io import BytesIO
# Assuming Trade, TradeStatus, TradeDirection, TradeStoplossType classes are defined elsewhere # Assuming Trade, TradeStatus, TradeDirection, TradeStoplossType classes are defined elsewhere
#LOSS and PROFIT without GRAPH #HEATMAPA pro RELATIVNI PROFIT - WIP
def find_optimal_cutoff(runner_ids: list = None, batch_id: str = None, stream: bool = False, rem_outliers:bool = False, file: str = "optimalcutoff.png",steps:int = 50): #po dodelani dat do stejné funkce jen s parametrem typ
def find_optimal_cutoff(runner_ids: list = None, batch_id: str = None, stream: bool = False, rem_outliers:bool = False, z_score_threshold:int = 3, file: str = "optimalcutoff.png",steps:int = 50):
#TODO dopracovat drawdown a minimalni a maximalni profity nikoliv cumulovane, zamyslet se #TODO dopracovat drawdown a minimalni a maximalni profity nikoliv cumulovane, zamyslet se
#TODO list of runner_ids #TODO list of runner_ids
@ -130,7 +132,7 @@ def find_optimal_cutoff(runner_ids: list = None, batch_id: str = None, stream: b
for day, profits in cumulative_profits.items(): for day, profits in cumulative_profits.items():
if len(profits) > 0: if len(profits) > 0:
day_z_score = z_scores[list(cumulative_profits.keys()).index(day)] day_z_score = z_scores[list(cumulative_profits.keys()).index(day)]
if abs(day_z_score) < 3: # Adjust threshold as needed if abs(day_z_score) < z_score_threshold: # Adjust threshold as needed
filtered_profits[day] = profits filtered_profits[day] = profits
return filtered_profits return filtered_profits
@ -211,7 +213,7 @@ def find_optimal_cutoff(runner_ids: list = None, batch_id: str = None, stream: b
plt.yticks(rotation=0) # Keep y-axis labels horizontal plt.yticks(rotation=0) # Keep y-axis labels horizontal
plt.gca().invert_yaxis() plt.gca().invert_yaxis()
plt.gca().invert_xaxis() plt.gca().invert_xaxis()
plt.suptitle("Total Profit for Combinations of Profit and Loss Cutoffs", fontsize=16) plt.suptitle(f"Total Profit for Combinations of Profit/Loss Cutoffs ({cnt_max})", fontsize=16)
plt.title(f"Optimal Profit Cutoff: {optimal_profit_cutoff:.2f}, Optimal Loss Cutoff: {optimal_loss_cutoff:.2f}, Max Profit: {max_profit:.2f}", fontsize=10) plt.title(f"Optimal Profit Cutoff: {optimal_profit_cutoff:.2f}, Optimal Loss Cutoff: {optimal_loss_cutoff:.2f}, Max Profit: {max_profit:.2f}", fontsize=10)
plt.xlabel("Loss Cutoff") plt.xlabel("Loss Cutoff")
plt.ylabel("Profit Cutoff") plt.ylabel("Profit Cutoff")
@ -235,6 +237,7 @@ if __name__ == '__main__':
# id_list = ["e8938b2e-8462-441a-8a82-d823c6a025cb"] # id_list = ["e8938b2e-8462-441a-8a82-d823c6a025cb"]
# generate_trading_report_image(runner_ids=id_list) # generate_trading_report_image(runner_ids=id_list)
batch_id = "c76b4414" batch_id = "c76b4414"
vstup = AnalyzerInputs(**params)
res, val = find_optimal_cutoff(batch_id=batch_id, file="optimal_cutoff_vectorized.png",steps=20) res, val = find_optimal_cutoff(batch_id=batch_id, file="optimal_cutoff_vectorized.png",steps=20)
#res, val = find_optimal_cutoff(batch_id=batch_id, rem_outliers=True, file="optimal_cutoff_vectorized_nooutliers.png") #res, val = find_optimal_cutoff(batch_id=batch_id, rem_outliers=True, file="optimal_cutoff_vectorized_nooutliers.png")

View File

@ -0,0 +1,99 @@
import matplotlib
import matplotlib.dates as mdates
#matplotlib.use('Agg') # Set the Matplotlib backend to 'Agg'
import matplotlib.pyplot as plt
import seaborn as sns
import pandas as pd
from datetime import datetime
from typing import List
from enum import Enum
import numpy as np
import v2realbot.controller.services as cs
from rich import print
from v2realbot.common.model import AnalyzerInputs
from v2realbot.common.PrescribedTradeModel import TradeDirection, TradeStatus, Trade, TradeStoplossType
from v2realbot.utils.utils import isrising, isfalling,zoneNY, price2dec, safe_get#, print
from pathlib import Path
from v2realbot.config import WEB_API_KEY, DATA_DIR, MEDIA_DIRECTORY
from v2realbot.enums.enums import RecordType, StartBarAlign, Mode, Account, OrderSide
from io import BytesIO
from v2realbot.utils.historicals import get_historical_bars
from alpaca.data.timeframe import TimeFrame, TimeFrameUnit
from collections import defaultdict
from scipy.stats import zscore
from io import BytesIO
from v2realbot.reporting.load_trades import load_trades
from typing import Tuple, Optional, List
from traceback import format_exc
# Assuming Trade, TradeStatus, TradeDirection, TradeStoplossType classes are defined elsewhere
def ls_profit_distribution(runner_ids: List = None, batch_id: str = None, stream: bool = False) -> Tuple[int, Optional[BytesIO]]:
try:
# Load trades
result, trades, days_cnt = load_trades(runner_ids, batch_id)
# Proceed only if trades are successfully loaded
if result == 0:
# Filter trades based on direction and calculate profit
long_trades = [trade for trade in trades if trade.direction == TradeDirection.LONG]
short_trades = [trade for trade in trades if trade.direction == TradeDirection.SHORT]
long_profits = [trade.profit for trade in long_trades]
short_profits = [trade.profit for trade in short_trades]
# Setting up dark mode for visualization with custom parameters
plt.style.use('dark_background')
custom_params = {
'axes.titlesize': 9,
'axes.labelsize': 8,
'xtick.labelsize': 9,
'ytick.labelsize': 9,
'axes.labelcolor': '#a9a9a9',
'axes.facecolor': '#121722',
'axes.grid': False,
'grid.color': 'gray',
'grid.linestyle': '--',
'grid.linewidth': 1,
'xtick.color': '#a9a9a9',
'ytick.color': '#a9a9a9',
'axes.edgecolor': '#a9a9a9'
}
plt.rcParams.update(custom_params)
plt.figure(figsize=(10, 6))
sns.histplot(long_profits, color='blue', label='Long Trades', kde=True)
sns.histplot(short_profits, color='red', label='Short Trades', kde=True)
plt.xlabel('Profit')
plt.ylabel('Number of Trades')
plt.title('Profit Distribution by Trade Direction')
plt.legend()
# Handling the output
if stream:
img_stream = BytesIO()
plt.savefig(img_stream, format='png')
plt.close()
img_stream.seek(0)
return (0, img_stream)
else:
plt.savefig('profit_distribution.png')
plt.close()
return (0, None)
else:
return (-1, None) # Error handling in case of unsuccessful trade loading
except Exception as e:
# Detailed error reporting
return (-1, str(e) + format_exc())
# Example usage
# trades = [list of Trade objects]
if __name__ == '__main__':
# id_list = ["e8938b2e-8462-441a-8a82-d823c6a025cb"]
# generate_trading_report_image(runner_ids=id_list)
batch_id = "73ad1866"
res, val = ls_profit_distribution(batch_id=batch_id)
#res, val = find_optimal_cutoff(batch_id=batch_id, rem_outliers=True, file="optimal_cutoff_vectorized_nooutliers.png")
print(res,val)

View File

@ -0,0 +1,82 @@
import matplotlib
import matplotlib.dates as mdates
#matplotlib.use('Agg') # Set the Matplotlib backend to 'Agg'
import matplotlib.pyplot as plt
import seaborn as sns
import pandas as pd
from datetime import datetime
from typing import List
from enum import Enum
import numpy as np
import v2realbot.controller.services as cs
from rich import print
from v2realbot.common.model import AnalyzerInputs
from v2realbot.common.PrescribedTradeModel import TradeDirection, TradeStatus, Trade, TradeStoplossType
from v2realbot.utils.utils import isrising, isfalling,zoneNY, price2dec, safe_get#, print
from pathlib import Path
from v2realbot.config import WEB_API_KEY, DATA_DIR, MEDIA_DIRECTORY
from v2realbot.enums.enums import RecordType, StartBarAlign, Mode, Account, OrderSide
from io import BytesIO
from v2realbot.utils.historicals import get_historical_bars
from alpaca.data.timeframe import TimeFrame, TimeFrameUnit
from collections import defaultdict
from scipy.stats import zscore
from io import BytesIO
from v2realbot.reporting.load_trades import load_trades
from typing import Tuple, Optional, List
from traceback import format_exc
# Assuming Trade, TradeStatus, TradeDirection, TradeStoplossType classes are defined elsewhere
def profit_distribution_by_month(runner_ids: List = None, batch_id: str = None, stream: bool = False) -> Tuple[int, BytesIO or None]:
try:
# Load trades
res, trades, days_cnt = load_trades(runner_ids, batch_id)
if res != 0:
raise Exception("Error in loading trades")
# Filter trades by status and create DataFrame
df_trades = pd.DataFrame([t.dict() for t in trades if t.status == 'closed'])
# Extract month and year from trade exit time
df_trades['month'] = df_trades['exit_time'].apply(lambda x: x.strftime('%Y-%m') if x is not None else None)
# Group by direction and month, and sum the profits
grouped = df_trades.groupby(['direction', 'month']).profit.sum().unstack(fill_value=0)
# Visualization
plt.style.use('dark_background')
fig, ax = plt.subplots(figsize=(10, 6))
# Plotting
grouped.T.plot(kind='bar', ax=ax)
# Styling
ax.set_title('Profit Distribution by Month: Long vs Short')
ax.set_xlabel('Month')
ax.set_ylabel('Total Profit')
ax.legend(title='Trade Direction')
# Adding footer
plt.figtext(0.99, 0.01, f'Days Count: {days_cnt}', horizontalalignment='right')
# Save or stream
if stream:
img = BytesIO()
plt.savefig(img, format='png')
plt.close()
img.seek(0)
return (0, img)
else:
plt.savefig('profit_distribution_by_month.png')
plt.close()
return (0, None)
except Exception as e:
# Detailed error reporting
return (-1, str(e) + format_exc())
# Local debugging
if __name__ == '__main__':
batch_id = "73ad1866"
res, val = profit_distribution_by_month(batch_id=batch_id)
print(res, val)

View File

@ -0,0 +1,106 @@
import matplotlib
import matplotlib.dates as mdates
matplotlib.use('Agg') # Set the Matplotlib backend to 'Agg'
import matplotlib.pyplot as plt
import seaborn as sns
import pandas as pd
from datetime import datetime
from typing import List
from enum import Enum
import numpy as np
import v2realbot.controller.services as cs
from rich import print
from v2realbot.common.model import AnalyzerInputs
from v2realbot.common.PrescribedTradeModel import TradeDirection, TradeStatus, Trade, TradeStoplossType
from v2realbot.utils.utils import isrising, isfalling,zoneNY, price2dec, safe_get#, print
from pathlib import Path
from v2realbot.config import WEB_API_KEY, DATA_DIR, MEDIA_DIRECTORY
from v2realbot.enums.enums import RecordType, StartBarAlign, Mode, Account, OrderSide
from io import BytesIO
from v2realbot.utils.historicals import get_historical_bars
from alpaca.data.timeframe import TimeFrame, TimeFrameUnit
from collections import defaultdict
from scipy.stats import zscore
from io import BytesIO
from v2realbot.reporting.load_trades import load_trades
from typing import Tuple, Optional, List
from traceback import format_exc
# Assuming Trade, TradeStatus, TradeDirection, TradeStoplossType classes are defined elsewhere
def profit_sum_by_hour(runner_ids: list = None, batch_id: str = None, stream: bool = False, group_by: str = 'entry_time'):
try:
# Load trades
res, trades, days_cnt = load_trades(runner_ids, batch_id)
if res != 0:
raise Exception("Error in loading trades")
# Filter closed trades
closed_trades = [trade for trade in trades if trade.status == 'closed']
total_closed_trades = len(closed_trades)
# Extract hour and profit/loss based on group_by parameter
hourly_profit_loss = {}
hourly_trade_count = {}
for trade in closed_trades:
# Determine the time attribute to group by
time_attribute = getattr(trade, group_by) if group_by in ['entry_time', 'exit_time'] else trade.entry_time
if time_attribute:
hour = time_attribute.hour
hourly_profit_loss.setdefault(hour, []).append(trade.profit)
hourly_trade_count[hour] = hourly_trade_count.get(hour, 0) + 1
# Aggregate profits and losses by hour
hourly_aggregated = {hour: sum(profits) for hour, profits in hourly_profit_loss.items()}
# Visualization
hours = list(hourly_aggregated.keys())
profits = list(hourly_aggregated.values())
trade_counts = [hourly_trade_count.get(hour, 0) for hour in hours]
plt.style.use('dark_background')
colors = ['blue' if profit >= 0 else 'orange' for profit in profits]
bars = plt.bar(hours, profits, color=colors)
# Make the grid subtler
plt.grid(True, color='gray', linestyle='--', linewidth=0.5, alpha=0.5)
plt.xlabel('Hour of Day')
plt.ylabel('Profit/Loss')
plt.title(f'Distribution of Profit/Loss Sum by Hour ({group_by.replace("_", " ").title()})')
# Add trade count and percentage inside the bars
for bar, count in zip(bars, trade_counts):
height = bar.get_height()
percent = (count / total_closed_trades) * 100
# Position the text inside the bars
position = height - 20 if height > 0 else height + 20
plt.text(bar.get_x() + bar.get_width() / 2., position,
f'{count} Trades\n({percent:.1f}%)', ha='center', va='center', color='white', fontsize=9)
# Adjust footer position and remove large gap
footer_text = f'Days Count: {days_cnt} | Parameters: {{"runner_ids": {len(runner_ids) if runner_ids is not None else None}, "batch_id": {batch_id}, "stream": {stream}, "group_by": "{group_by}"}}'
plt.gcf().subplots_adjust(bottom=0.2)
plt.figtext(0.5, 0.02, footer_text, ha="center", fontsize=8, color='gray', bbox=dict(facecolor='black', edgecolor='none', pad=3.0))
# Output
if stream:
img = BytesIO()
plt.savefig(img, format='png', bbox_inches='tight')
plt.close()
img.seek(0)
return (0, img)
else:
plt.savefig('profit_loss_by_hour.png', bbox_inches='tight')
plt.close()
return (0, None)
except Exception as e:
# Detailed error reporting
plt.close()
return (-1, str(e))
# Local debugging
if __name__ == '__main__':
batch_id = "9e990e4b"
# Example usage with group_by parameter
res, val = profit_sum_by_hour(batch_id=batch_id, group_by='exit_time')
print(res, val)

View File

@ -0,0 +1,129 @@
import matplotlib
import matplotlib.dates as mdates
matplotlib.use('Agg') # Set the Matplotlib backend to 'Agg'
import matplotlib.pyplot as plt
from matplotlib.ticker import MaxNLocator
import seaborn as sns
import pandas as pd
from datetime import datetime
from typing import List
from enum import Enum
import numpy as np
import v2realbot.controller.services as cs
from rich import print
from v2realbot.common.model import AnalyzerInputs
from v2realbot.common.PrescribedTradeModel import TradeDirection, TradeStatus, Trade, TradeStoplossType
from v2realbot.utils.utils import isrising, isfalling,zoneNY, price2dec, safe_get#, print
from pathlib import Path
from v2realbot.config import WEB_API_KEY, DATA_DIR, MEDIA_DIRECTORY
from v2realbot.enums.enums import RecordType, StartBarAlign, Mode, Account, OrderSide
from io import BytesIO
from v2realbot.utils.historicals import get_historical_bars
from alpaca.data.timeframe import TimeFrame, TimeFrameUnit
from collections import defaultdict
from scipy.stats import zscore
from io import BytesIO
from v2realbot.reporting.load_trades import load_trades
from typing import Tuple, Optional, List
from traceback import format_exc
import pandas as pd
def summarize_trade_metrics(runner_ids: list = None, batch_id: str = None, stream: bool = False):
try:
res, trades, days_cnt = load_trades(runner_ids, batch_id)
if res != 0:
raise Exception("Error in loading trades")
closed_trades = [trade for trade in trades if trade.status == "closed"]
# Calculate metrics
metrics = calculate_metrics(closed_trades)
# Generate and process image
img_stream = generate_table_image(metrics)
# Add footer to image
#img_stream = add_footer_to_image(img_stream, days_cnt, runner_ids, batch_id, stream)
# Output handling
if stream:
img_stream.seek(0)
return (0, img_stream)
else:
with open(f'summarize_trade_metrics_{batch_id}.png', 'wb') as f:
f.write(img_stream.getbuffer())
return (0, None)
except Exception as e:
# Detailed error reporting
return (-1, str(e)+format_exc())
def calculate_metrics(closed_trades):
if not closed_trades:
return {}
total_profit = sum(trade.profit for trade in closed_trades)
max_profit = max(trade.profit for trade in closed_trades)
min_profit = min(trade.profit for trade in closed_trades)
total_trades = len(closed_trades)
long_trades = sum(1 for trade in closed_trades if trade.direction == "long")
short_trades = sum(1 for trade in closed_trades if trade.direction == "short")
# Daily Metrics Calculation
trades_by_day = {}
for trade in closed_trades:
day = trade.entry_time.date() if trade.entry_time else None
if day:
trades_by_day.setdefault(day, []).append(trade)
avg_trades_per_day = sum(len(trades) for trades in trades_by_day.values()) / len(trades_by_day)
avg_long_trades_per_day = sum(sum(1 for trade in trades if trade.direction == "long") for trades in trades_by_day.values()) / len(trades_by_day)
avg_short_trades_per_day = sum(sum(1 for trade in trades if trade.direction == "short") for trades in trades_by_day.values()) / len(trades_by_day)
return {
"Average Profit": total_profit / total_trades,
"Maximum Profit": max_profit,
"Minimum Profit": min_profit,
"Total Number of Trades": total_trades,
"Number of Long Trades": long_trades,
"Number of Short Trades": short_trades,
"Average Trades per Day": avg_trades_per_day,
"Average Long Trades per Day": avg_long_trades_per_day,
"Average Short Trades per Day": avg_short_trades_per_day
}
def generate_table_image(metrics):
fig, ax = plt.subplots(figsize=(10, 6))
ax.axis('tight')
ax.axis('off')
# Convert metrics to a 2D array where each row is a list
cell_text = [[value] for value in metrics.values()]
# Convert dict keys to a list for row labels
row_labels = list(metrics.keys())
ax.table(cellText=cell_text,
rowLabels=row_labels,
loc='center')
plt.subplots_adjust(left=0.2, top=0.8)
plt.title("Trade Metrics Summary", color='white')
img_stream = BytesIO()
plt.savefig(img_stream, format='png', bbox_inches='tight', pad_inches=0.1, facecolor='black')
plt.close(fig)
return img_stream
def add_footer_to_image(img_stream, days_cnt, runner_ids, batch_id, stream):
# Implementation for adding a footer to the image
# This can be done using PIL (Python Imaging Library) or other image processing libraries
# For simplicity, I'm leaving this as a placeholder
pass
# Local debugging
if __name__ == '__main__':
batch_id = "73ad1866"
res, val = summarize_trade_metrics(batch_id=batch_id)
print(res, val)

View File

@ -0,0 +1,70 @@
import matplotlib
import matplotlib.dates as mdates
#matplotlib.use('Agg') # Set the Matplotlib backend to 'Agg'
import matplotlib.pyplot as plt
import seaborn as sns
import pandas as pd
from datetime import datetime
from typing import List
from enum import Enum
import numpy as np
import v2realbot.controller.services as cs
from rich import print
from v2realbot.common.model import AnalyzerInputs
from v2realbot.common.PrescribedTradeModel import TradeDirection, TradeStatus, Trade, TradeStoplossType
from v2realbot.utils.utils import isrising, isfalling,zoneNY, price2dec, safe_get#, print
from pathlib import Path
from v2realbot.config import WEB_API_KEY, DATA_DIR, MEDIA_DIRECTORY
from v2realbot.enums.enums import RecordType, StartBarAlign, Mode, Account, OrderSide
from io import BytesIO
from v2realbot.utils.historicals import get_historical_bars
from alpaca.data.timeframe import TimeFrame, TimeFrameUnit
from collections import defaultdict
from scipy.stats import zscore
from io import BytesIO
from typing import Tuple, Optional, List
from v2realbot.common.PrescribedTradeModel import TradeDirection, TradeStatus, Trade, TradeStoplossType
def load_trades(runner_ids: List = None, batch_id: str = None) -> Tuple[int, List[Trade], int]:
if runner_ids is None and batch_id is None:
return -2, f"runner_id or batch_id must be present", 0
if batch_id is not None:
res, runner_ids =cs.get_archived_runnerslist_byBatchID(batch_id)
if res != 0:
print(f"no batch {batch_id} found")
return -1, f"no batch {batch_id} found", 0
#DATA PREPARATION
trades = []
cnt_max = len(runner_ids)
cnt = 0
#zatim zjistujeme start a end z min a max dni - jelikoz muze byt i seznam runner_ids a nejenom batch
end_date = None
start_date = None
for id in runner_ids:
cnt += 1
#get runner
res, sada =cs.get_archived_runner_header_byID(id)
if res != 0:
print(f"no runner {id} found")
return -1, f"no runner {id} found", 0
#print("archrunner")
#print(sada)
if cnt == 1:
start_date = sada.bt_from if sada.mode in [Mode.BT,Mode.PREP] else sada.started
if cnt == cnt_max:
end_date = sada.bt_to if sada.mode in [Mode.BT or Mode.PREP] else sada.stopped
# Parse trades
trades_dicts = sada.metrics["prescr_trades"]
for trade_dict in trades_dicts:
trade_dict['last_update'] = datetime.fromtimestamp(trade_dict.get('last_update')).astimezone(zoneNY) if trade_dict['last_update'] is not None else None
trade_dict['entry_time'] = datetime.fromtimestamp(trade_dict.get('entry_time')).astimezone(zoneNY) if trade_dict['entry_time'] is not None else None
trade_dict['exit_time'] = datetime.fromtimestamp(trade_dict.get('exit_time')).astimezone(zoneNY) if trade_dict['exit_time'] is not None else None
trades.append(Trade(**trade_dict))
return 0, trades, cnt_max

View File

@ -1,4 +1,3 @@
import json
import numpy as np import numpy as np
import matplotlib import matplotlib
matplotlib.use('Agg') # Set the Matplotlib backend to 'Agg' matplotlib.use('Agg') # Set the Matplotlib backend to 'Agg'

View File

@ -348,9 +348,9 @@ def generate_trading_report_image(runner_ids: list = None, batch_id: str = None,
#Plot 8 Cumulative profit - bud 1 den nebo vice dni + pridame pod to vyvoj ceny #Plot 8 Cumulative profit - bud 1 den nebo vice dni + pridame pod to vyvoj ceny
# Extract the closing prices and times # Extract the closing prices and times
closing_prices = bars['close'] closing_prices = bars.get('close',[]) if bars is not None else []
#times = bars['time'] # Assuming this is a list of pandas Timestamp objects #times = bars['time'] # Assuming this is a list of pandas Timestamp objects
times = pd.to_datetime(bars['time']) # Ensure this is a Pandas datetime series times = pd.to_datetime(bars['time']) if bars is not None else [] # Ensure this is a Pandas datetime series
# # Plot the closing prices over time # # Plot the closing prices over time
# axs[0, 4].plot(times, closing_prices, color='blue') # axs[0, 4].plot(times, closing_prices, color='blue')
# axs[0, 4].tick_params(axis='x', rotation=45) # Rotate date labels if necessar # axs[0, 4].tick_params(axis='x', rotation=45) # Rotate date labels if necessar
@ -372,7 +372,8 @@ def generate_trading_report_image(runner_ids: list = None, batch_id: str = None,
ax2.tick_params(axis='y', labelcolor='orange') ax2.tick_params(axis='y', labelcolor='orange')
# Set the limits for the x-axis to cover the full range of 'times' # Set the limits for the x-axis to cover the full range of 'times'
axs[1, 3].set_xlim(times.min(), times.max()) if isinstance(times, pd.DatetimeIndex):
axs[1, 3].set_xlim(times.min(), times.max())
sns.lineplot(x=exit_times, y=cumulative_profits, ax=axs[1, 3], color='limegreen') sns.lineplot(x=exit_times, y=cumulative_profits, ax=axs[1, 3], color='limegreen')
axs[1, 3].scatter(max_profit_time, max_profit, color='green', label='Max Profit') axs[1, 3].scatter(max_profit_time, max_profit, color='green', label='Max Profit')
axs[1, 3].scatter(min_profit_time, min_profit, color='red', label='Min Profit') axs[1, 3].scatter(min_profit_time, min_profit, color='red', label='Min Profit')

View File

View File

@ -0,0 +1,308 @@
from uuid import UUID
from typing import Any, List, Tuple
from uuid import UUID, uuid4
from v2realbot.enums.enums import Moddus, SchedulerStatus, RecordType, StartBarAlign, Mode, Account, OrderSide
from v2realbot.common.model import RunManagerRecord, StrategyInstance, RunDay, StrategyInstance, Runner, RunRequest, RunArchive, RunArchiveView, RunArchiveViewPagination, RunArchiveDetail, RunArchiveChange, Bar, TradeEvent, TestList, Intervals, ConfigItem, InstantIndicator, DataTablesRequest
from v2realbot.utils.utils import validate_and_format_time, AttributeDict, zoneNY, zonePRG, safe_get, dict_replace_value, Store, parse_toml_string, json_serial, is_open_hours, send_to_telegram, concatenate_weekdays, transform_data
from v2realbot.common.PrescribedTradeModel import Trade, TradeDirection, TradeStatus, TradeStoplossType
from datetime import datetime
from v2realbot.config import JOB_LOG_FILE, STRATVARS_UNCHANGEABLES, ACCOUNT1_PAPER_API_KEY, ACCOUNT1_PAPER_SECRET_KEY, ACCOUNT1_LIVE_API_KEY, ACCOUNT1_LIVE_SECRET_KEY, DATA_DIR, MEDIA_DIRECTORY, RUNNER_DETAIL_DIRECTORY
import numpy as np
from rich import print as richprint
import v2realbot.controller.services as cs
import v2realbot.controller.run_manager as rm
import v2realbot.scheduler.scheduler as sch
from apscheduler.schedulers.background import BackgroundScheduler
from apscheduler.triggers.cron import CronTrigger
from apscheduler.job import Job
#NOTE zatím není podporováno spouštění strategie přes půlnoc - musí se dořešit weekday_filter
#který je zatím jen jeden jak pro start_time tak stop_time - což by v případě strategií běžících
#přes půlnoc nezafungovalo (stop by byl následující den a scheduler by jej nespustil)
def format_apscheduler_jobs(jobs: list[Job]) -> list[dict]:
if not jobs:
print("No scheduled jobs.")
return
jobs_info = []
for job in jobs:
job_info = {
"Job ID": job.id,
"Next Run Time": job.next_run_time,
"Job Function": job.func.__name__,
"Trigger": str(job.trigger),
"Job Args": ', '.join(map(str, job.args)),
"Job Kwargs": ', '.join(f"{k}={v}" for k, v in job.kwargs.items())
}
jobs_info.append(job_info)
return jobs_info
def get_day_of_week(weekdays_filter):
if not weekdays_filter:
return '*' # All days of the week
return ','.join(map(str, weekdays_filter))
#initialize_jobs se spousti
#- pri spusteni
#- triggerovano z add/update a delete
#zatim cely refresh, v budoucnu upravime jen na zmene menene polozky - viz
#https://chat.openai.com/c/2a1423ee-59df-47ff-b073-0c49ade51ed7
#pomocna funkce, ktera vraci strat_id, ktera jsou v scheduleru vickrat (logika pro ne se lisi)
def stratin_occurences(all_records: list[RunManagerRecord]):
# Count occurrences
strat_id_counts = {}
for record in all_records:
if record.strat_id in strat_id_counts:
strat_id_counts[record.strat_id] += 1
else:
strat_id_counts[record.strat_id] = 1
# Find strat_id values that appear twice or more
repeated_strat_ids = [strat_id for strat_id, count in strat_id_counts.items() if count >= 2]
return 0, repeated_strat_ids
def initialize_jobs(run_manager_records: RunManagerRecord = None):
"""
Initialize all scheduled jobs from RunManagerRecords with moddus = "schedule"
Triggered on app init and update of table
It deleted all "schedule_" prefixed jobs and schedule new ones base on runmanager table
prefiX of "schedule_" in aps scheduler allows to distinguisd schedule types jobs and allows more jobs categories
Parameters
----------
run_manager_records : RunManagerRecord, optional
RunManagerRecords to initialize the jobs from, by default None
Returns
-------
Tuple[int, Union[List[dict], str]]
A tuple containing an error code and a message. If there is no error, the
message will contain a list of dictionaries with information about the
scheduled jobs, otherwise it will contain an error message.
"""
if run_manager_records is None:
res, run_manager_records = rm.fetch_all_run_manager_records()
if res < 0:
err_msg= f"Error {res} fetching all runmanager records, error {run_manager_records}"
print(err_msg)
return -2, err_msg
scheduled_jobs = scheduler.get_jobs()
#print(f"Current {len(scheduled_jobs)} scheduled jobs: {str(scheduled_jobs)}")
for job in scheduled_jobs:
if job.id.startswith("scheduler_"):
scheduler.remove_job(job.id)
record : RunManagerRecord = None
for record in run_manager_records:
if record.status == SchedulerStatus.ACTIVE and record.moddus == Moddus.SCHEDULE:
day_of_week = get_day_of_week(record.weekdays_filter)
hour, minute = map(int, record.start_time.split(':'))
start_trigger = CronTrigger(day_of_week=day_of_week, hour=hour, minute=minute,
start_date=record.valid_from, end_date=record.valid_to, timezone=zoneNY)
stop_hour, stop_minute = map(int, record.stop_time.split(':'))
stop_trigger = CronTrigger(day_of_week=day_of_week, hour=stop_hour, minute=stop_minute,
start_date=record.valid_from, end_date=record.valid_to, timezone=zoneNY)
# Schedule new jobs with the 'scheduler_' prefix
scheduler.add_job(start_runman_record, start_trigger, id=f"scheduler_start_{record.id}", args=[record.id])
scheduler.add_job(stop_runman_record, stop_trigger, id=f"scheduler_stop_{record.id}", args=[record.id])
#scheduler.add_job(print_hello, 'interval', seconds=10, id=f"scheduler_testinterval")
scheduled_jobs = scheduler.get_jobs()
print(f"APS jobs refreshed ({len(scheduled_jobs)})")
current_jobs_dict = format_apscheduler_jobs(scheduled_jobs)
richprint(current_jobs_dict)
return 0, current_jobs_dict
#zastresovaci funkce resici error handling a printing
def start_runman_record(id: UUID, market = "US", debug_date = None):
record = None
res, record, msg = _start_runman_record(id=id, market=market, debug_date=debug_date)
if record is not None:
market_time_now = datetime.now().astimezone(zoneNY) if debug_date is None else debug_date
record.last_processed = market_time_now
formatted_date = market_time_now.strftime("%y.%m.%d %H:%M:%S")
history_string = f"{formatted_date}"
history_string += " STARTED" if res == 0 else "NOTE:" + msg if res == -1 else "ERROR:" + msg
print(history_string)
if record.history is None:
record.history = history_string
else:
record.history += "\n" + history_string
rs, msg_rs = update_runman_record(record)
if rs < 0:
msg_rs = f"Error saving result to history: {msg_rs}"
print(msg_rs)
send_to_telegram(msg_rs)
if res < -1:
msg = f"START JOB: {id} ERROR\n" + msg
send_to_telegram(msg)
print(msg)
else:
print(f"START JOB: {id} FINISHED {res}")
def update_runman_record(record: RunManagerRecord):
#update record (nejspis jeste upravit - last_run a history)
res, set = rm.update_run_manager_record(record.id, record)
if res == 0:
print(f"Record updated {set}")
return 0, "OK"
else:
err_msg= f"STOP: Error updating {record.id} errir {set} with values {record}"
return -2, err_msg#toto stopne zpracovani dalsich zaznamu pri chybe, zvazit continue
def stop_runman_record(id: UUID, market = "US", debug_date = None):
res, record, msg = _stop_runman_record(id=id, market=market, debug_date=debug_date)
#results : 0 - ok, -1 not running/already running/not specific, -2 error
#report vzdy zapiseme do history, pokud je record not None, pripadna chyba se stala po dotazeni recordu
if record is not None:
market_time_now = datetime.now().astimezone(zoneNY) if debug_date is None else debug_date
record.last_processed = market_time_now
formatted_date = market_time_now.strftime("%y.%m.%d %H:%M:%S")
history_string = f"{formatted_date}"
history_string += " STOPPED" if res == 0 else "NOTE:" + msg if res == -1 else "ERROR:" + msg
print(history_string)
if record.history is None:
record.history = history_string
else:
record.history += "\n" + history_string
rs, msg_rs = update_runman_record(record)
if rs < 0:
msg_rs = f"Error saving result to history: {msg_rs}"
print(msg_rs)
send_to_telegram(msg_rs)
if res < -1:
msg = f"STOP JOB: {id} ERROR\n" + msg
send_to_telegram(msg)
print(msg)
else:
print(f"STOP JOB: {id} FINISHED")
#start function that is called from the job
def _start_runman_record(id: UUID, market = "US", debug_date = None):
print(f"Start scheduled record {id}")
record : RunManagerRecord = None
res, result = rm.fetch_run_manager_record_by_id(id)
if res < 0:
result = "Error fetching run manager record by id: " + str(id) + " Error: " + str(result)
return res, record, result
record = result
if market is not None and market == "US":
res, sada = sch.get_todays_market_times(market=market, debug_date=debug_date)
if res == 0:
market_time_now, market_open_datetime, market_close_datetime = sada
print(f"OPEN:{market_open_datetime} CLOSE:{market_close_datetime}")
else:
sada = f"Market {market} Error getting market times (CLOSED): " + str(sada)
return res, record, sada
if cs.is_stratin_running(record.strat_id):
return -1, record, f"Stratin {record.strat_id} is already running"
res, result = sch.run_scheduled_strategy(record)
if res < 0:
result = "Error running strategy: " + str(result)
return res, record, result
else:
record.runner_id = UUID(result)
return 0, record, record.runner_id
#stop function that is called from the job
def _stop_runman_record(id: UUID, market = "US", debug_date = None):
record = None
#get all records
print(f"Stopping record {id}")
res, all_records = rm.fetch_all_run_manager_records()
if res < 0:
err_msg= f"Error {res} fetching all runmanager records, error {all_records}"
return -2, record, err_msg
record : RunManagerRecord = None
for rec in all_records:
if rec.id == id:
record = rec
break
if record is None:
return -2, record, f"Record id {id} not found"
#strat_ids that are repeated
res, repeated_strat_ids = stratin_occurences(all_records)
if res < 0:
err_msg= f"Error {res} finding repeated strat_ids, error {repeated_strat_ids}"
return -2, record, err_msg
if record.strat_running is True:
#stopneme na zaklade record.runner_id
#this code
id_to_stop = record.runner_id
#pokud existuje manualne spustena stejna strategie a neni jich vic - je to jednoznacne - stopneme ji
elif cs.is_stratin_running(record.strat_id) and record.strat_id not in repeated_strat_ids:
#stopneme na zaklade record.strat_id
id_to_stop = record.strat_id
else:
msg = f"strategy {record.strat_id} not RUNNING or not distinctive (manually launched or two strat_ids in scheduler)"
print(msg)
return -1, record, msg
print(f"Requesting STOP {id_to_stop}")
res, msg = cs.stop_runner(id=id_to_stop)
if res < 0:
msg = f"ERROR while STOPPING runner_id/strat_id {id_to_stop} {msg}"
return -2, record, msg
else:
record.runner_id = None
return 0, record, "finished"
# Global scheduler instance
scheduler = BackgroundScheduler(timezone=zoneNY)
scheduler.start()
if __name__ == "__main__":
#use naive datetoime
debug_date = None
debug_date = datetime(2024, 2, 16, 9, 37, 0, 0)
#debug_date = datetime(2024, 2, 16, 10, 30, 0, 0)
#debug_date = datetime(2024, 2, 16, 16, 1, 0, 0)
id = UUID("bc4ec7d2-249b-4799-a02f-f1ce66f83d4a")
if debug_date is not None:
# Localize the naive datetime object to the Eastern timezone
debug_date = zoneNY.localize(debug_date)
#debugdate formatted as string in format "23.12.2024 9:30"
formatted_date = debug_date.strftime("%d.%m.%Y %H:%M")
print("Scheduler.py NY time: ", formatted_date)
print("ISoformat", debug_date.isoformat())
# res, result = start_runman_record(id=id, market = "US", debug_date = debug_date)
# print(f"CALL FINISHED, with {debug_date} RESULT: {res}, {result}")
res, result = stop_runman_record(id=id, market = "US", debug_date = debug_date)
print(f"CALL FINISHED, with {debug_date} RESULT: {res}, {result}")

View File

@ -0,0 +1,427 @@
import json
import datetime
import v2realbot.controller.services as cs
import v2realbot.controller.run_manager as rm
from v2realbot.common.model import RunnerView, RunManagerRecord, StrategyInstance, Runner, RunRequest, Trade, RunArchive, RunArchiveView, RunArchiveViewPagination, RunArchiveDetail, Bar, RunArchiveChange, TestList, ConfigItem, InstantIndicator, DataTablesRequest, AnalyzerInputs
from uuid import uuid4, UUID
from v2realbot.utils.utils import json_serial, send_to_telegram, zoneNY, zonePRG, fetch_calendar_data
from datetime import datetime, timedelta
from traceback import format_exc
from rich import print
import requests
from v2realbot.config import WEB_API_KEY
#Puvodni varainta schedulera, ktera mela bezet v pravidelnych intervalech
#a spoustet scheduled items v RunManagerRecord
#Nově bylo zrefaktorováno a využitý apscheduler - knihovna v pythonu
#umožňující plánování jobů, tzn. nyní je každý scheduled záznam RunManagerRecord
#naplanovany jako samostatni job a triggerován pouze jednou v daný čas pro start a stop
#novy kod v aps_scheduler.py
def get_todays_market_times(market = "US", debug_date = None):
try:
if market == "US":
#zjistit vsechny podminky - mozna loopovat - podminky jsou vlevo
if debug_date is not None:
nowNY = debug_date
else:
nowNY = datetime.now().astimezone(zoneNY)
nowNY_date = nowNY.date()
#is market open - nyni pouze US
cal_dates = fetch_calendar_data(nowNY_date, nowNY_date)
if len(cal_dates) == 0:
print("No Market Day today")
return -1, "Market Closed"
#zatim podpora pouze main session
#pouze main session
market_open_datetime = zoneNY.localize(cal_dates[0].open)
market_close_datetime = zoneNY.localize(cal_dates[0].close)
return 0, (nowNY, market_open_datetime, market_close_datetime)
else:
return -1, "Market not supported"
except Exception as e:
err_msg = f"General error in {e} {format_exc()}"
print(err_msg)
return -2, err_msg
def get_running_strategies():
# Construct the URL for the local REST API endpoint on port 8000
api_url = "http://localhost:8000/runners/"
# Headers for the request
headers = {
"X-API-Key": WEB_API_KEY
}
try:
# Make the GET request to the API with the headers
response = requests.get(api_url, headers=headers)
# Check if the request was successful
if response.status_code == 200:
runners = response.json()
print("Successfully fetched runners.")
strat_ids = []
ids = []
for runner_view in runners:
strat_ids.append(UUID(runner_view["strat_id"]))
ids.append(UUID(runner_view["id"]))
return 0, (strat_ids, ids)
else:
err_msg = f"Failed to fetch runners. Status Code: {response.status_code}, Response: {response.text}"
print(err_msg)
return -2, err_msg
except requests.RequestException as e:
err_msg = f"Request failed: {str(e)}"
print(err_msg)
return -2, err_msg
def stop_strategy(runner_id):
# Construct the URL for the local REST API endpoint on port 8000 #option 127.0.0.1
api_url = f"http://localhost:8000/runners/{runner_id}/stop"
# Headers for the request
headers = {
"X-API-Key": WEB_API_KEY
}
try:
# Make the PUT request to the API with the headers
response = requests.put(api_url, headers=headers)
# Check if the request was successful
if response.status_code == 200:
print(f"Runner/strat_id {runner_id} stopped successfully.")
return 0, runner_id
else:
err_msg = f"Failed to stop runner {runner_id}. Status Code: {response.status_code}, Response: {response.text}"
print(err_msg)
return -2, err_msg
except requests.RequestException as e:
err_msg = f"Request failed: {str(e)}"
print(err_msg)
return -2, err_msg
def fetch_stratin(stratin_id):
# Construct the URL for the REST API endpoint
api_url = f"http://localhost:8000/stratins/{stratin_id}"
# Headers for the request
headers = {
"X-API-Key": WEB_API_KEY
}
try:
# Make the GET request to the API with the headers
response = requests.get(api_url, headers=headers)
# Check if the request was successful
if response.status_code == 200:
# Parse the response as a StrategyInstance object
strategy_instance = response.json()
#strategy_instance = response # Assuming the response is in JSON format
print(f"StrategyInstance fetched: {stratin_id}")
return 0, strategy_instance
else:
err_msg = f"Failed to fetch StrategyInstance {stratin_id}. " \
f"Status Code: {response.status_code}, Response: {response.text}"
print(err_msg)
return -1, err_msg
except requests.RequestException as e:
err_msg = f"Request failed: {str(e)}"
print(err_msg)
return -2, err_msg
#return list of strat_ids that are in the scheduled table more than once
#TODO toto je workaround dokud nebude canndidates logika ze selectu nyni presunuta na fetch_all_run_manager_records a logiku v pythonu
def stratin_occurences():
#get all records
res, all_records = rm.fetch_all_run_manager_records()
if res < 0:
err_msg= f"Error {res} fetching all runmanager records, error {all_records}"
print(err_msg)
return -2, err_msg
# Count occurrences
strat_id_counts = {}
for record in all_records:
if record.strat_id in strat_id_counts:
strat_id_counts[record.strat_id] += 1
else:
strat_id_counts[record.strat_id] = 1
# Find strat_id values that appear twice or more
repeated_strat_ids = [strat_id for strat_id, count in strat_id_counts.items() if count >= 2]
return 0, repeated_strat_ids
# in case debug_date is not provided, it takes current time of the given market
#V budoucnu zde bude loopa pro kazdy obsluhovany market, nyni pouze US
def startstop_scheduled(debug_date = None, market = "US") -> tuple[int, str]:
res, sada = get_todays_market_times(market=market, debug_date=debug_date)
if res == 0:
market_time_now, market_open_datetime, market_close_datetime = sada
print(f"OPEN:{market_open_datetime} CLOSE:{market_close_datetime}")
else:
return res, sada
#its market day
res, candidates = rm.fetch_scheduled_candidates_for_start_and_stop(market_time_now, market)
if res == 0:
print(f"Candidates fetched, start: {len(candidates['start'])} stop: {len(candidates['stop'])}")
else:
return res, candidates
if candidates is None or (len(candidates["start"]) == 0 and len(candidates["stop"]) == 0):
return -1, f"No candidates found for {market_time_now} and {market}"
#do budoucna, az budou runnery persistovane, bude stav kazde strategie v RunManagerRecord
#get current runners (mozna optimalizace, fetch per each section start/stop)
res, sada = get_running_strategies()
if res < 0:
err_msg= f"Error fetching running strategies, error {sada}"
print(err_msg)
send_to_telegram(err_msg)
return -2, err_msg
strat_ids_running, runnerids_running = sada
print(f"Currently running: {len(strat_ids_running)}")
#IERATE over START CAndidates
record: RunManagerRecord = None
print(f"START - Looping over {len(candidates['start'])} candidates")
for record in candidates['start']:
print("Candidate: ", record)
if record.weekdays_filter is not None and len(record.weekdays_filter) > 0:
curr_weekday = market_time_now.weekday()
if curr_weekday not in record.weekdays_filter:
print(f"Strategy {record.strat_id} not started, today{curr_weekday} not in weekdays filter {record.weekdays_filter}")
continue
#one strat_id can run only once at time
if record.strat_id in strat_ids_running:
msg = f"strategy already {record.strat_id} is running"
continue
res, result = run_scheduled_strategy(record)
if res < 0:
send_to_telegram(result)
print(result)
else:
record.runner_id = UUID(result)
strat_ids_running.append(record.strat_id)
runnerids_running.append(record.runner_id)
record.last_processed = market_time_now
history_string = f"{market_time_now.isoformat()} strategy STARTED" if res == 0 else "ERROR:" + result
if record.history is None:
record.history = history_string
else:
record.history += "\n" + history_string
#update record (nejspis jeste upravit - last_run a history)
res, set = rm.update_run_manager_record(record.id, record)
if res == 0:
print(f"Record in db updated {set}")
#return 0, set
else:
err_msg= f"Error updating {record.id} errir {set} with values {record}. Process stopped."
print(err_msg)
send_to_telegram(msg)
return -2, err_msg #toto stopne dalsi zpracovani, zvazit continue
#if stop candidates, then fetch existing runners
stop_candidates_cnt = len(candidates['stop'])
if stop_candidates_cnt > 0:
res, repeated_strat_ids = stratin_occurences()
if res < 0:
err_msg= f"Error {res} in callin stratin_occurences, error {repeated_strat_ids}"
send_to_telegram(err_msg)
return -2, err_msg
#dalsi OPEN ISSUE pri STOPu:
# má STOP_TIME strategie záviset na dni v týdnu? jinými slovy pokud je strategie
# nastavená na 9:30-10 v pondělí. Mohu si ji manuálně spustit v úterý a systém ji neshodí?
# Zatím to je postaveno, že předpis určuje okno, kde má strategie běžet a mimo tuto dobu bude
# automaticky shozena. Druhou možností je potom, že scheduler si striktně hlídá jen strategie,
# které byly jím zapnuté a ostatní jsou mu putna. V tomto případě pak např. později ručně spuštěmá
# strategie (např. kvůli opravě bugu) bude scheduler ignorovat a nevypne ji i kdyz je nastavena na vypnuti.
# Dopady: weekdays pri stopu a stratin_occurences
#IERATE over STOP Candidates
record: RunManagerRecord = None
print(f"STOP - Looping over {stop_candidates_cnt} candidates")
for record in candidates['stop']:
print("Candidate: ", record)
#Tento šelmostroj se stratin_occurences tu je jen proto, aby scheduler zafungoval i na manualne spustene strategie (ve vetsine pripadu)
# Při stopu evaluace kandidátů na vypnutí
# - pokud mám v schedules jen 1 strategii s konkretnim strat_id, můžu jet přes strat_id - bezici strategie s timto strat_id bude vypnuta (i manualne startnuta)
# - pokud jich mám více, musím jet přes runnery uložené v schedules
# (v tomto případě je omezení: ručně pouštěna strategii nebude automaticky
# stopnuta - systém neví, která to je)
#zjistime zda strategie bezi
#strategii mame v scheduleru pouze jednou, muzeme pouzit strat_id
if record.strat_id not in repeated_strat_ids:
if record.strat_id not in strat_ids_running:
msg = f"strategy {record.strat_id} NOT RUNNING"
print(msg)
continue
else:
#do stop
id_to_stop = record.strat_id
#strat_id je pouzito v scheduleru vicekrat, musime pouzit runner_id
elif record.runner_id is not None and record.runner_id in runnerids_running:
#do stop
id_to_stop = record.runner_id
#no distinctive condition
else:
#dont do anything
print(f"strategy {record.strat_id} not RUNNING or not distinctive (manually launched or two strat_ids in scheduler)")
continue
print(f"Requesting STOP {id_to_stop}")
res, msg = stop_strategy(id_to_stop)
if res < 0:
msg = f"ERROR while STOPPING runner_id/strat_id {id_to_stop} {msg}"
send_to_telegram(msg)
else:
if record.strat_id in strat_ids_running:
strat_ids_running.remove(record.strat_id)
if record.runner_id is not None and record.runner_id in runnerids_running:
runnerids_running.remove(record.runner_id)
record.runner_id = None
record.last_processed = market_time_now
history_string = f"{market_time_now.isoformat()} strategy {record.strat_id}" + "STOPPED" if res == 0 else "ERROR:" + msg
if record.history is None:
record.history = history_string
else:
record.history += "\n" + history_string
#update record (nejspis jeste upravit - last_run a history)
res, set = rm.update_run_manager_record(record.id, record)
if res == 0:
print(f"Record updated {set}")
else:
err_msg= f"Error updating {record.id} errir {set} with values {record}"
print(err_msg)
send_to_telegram(err_msg)
return -2, err_msg#toto stopne zpracovani dalsich zaznamu pri chybe, zvazit continue
return 0, "DONE"
##LIVE or PAPER
#tato verze využívate REST API, po predelani jobu na apscheduler uz muze vyuzivat prime volani cs.run_stratin
#TODO predelat
def run_scheduled_strategy(record: RunManagerRecord):
#get strat_json
sada : StrategyInstance = None
res, sada = fetch_stratin(record.strat_id)
if res == 0:
# #TODO toto overit jestli je stejny vystup jako JS
# print("Sada", sada)
# #strategy_instance = StrategyInstance(**sada)
strat_json = json.dumps(sada, default=json_serial)
# Replace escaped characters with their unescaped versions so it matches the JS output
#strat_json = strat_json.replace('\\r\\n', '\r\n')
#print(f"Strat_json fetched, {strat_json}")
else:
err_msg= f"Strategy {record.strat_id} not found. ERROR {sada}"
print(err_msg)
return -2, err_msg
#TBD mozna customizovat NOTE
#pokud neni batch_id pak vyhgeneruju a ulozim do db
# if record.batch_id is None:
# record.batch_id = str(uuid4())[:8]
api_url = f"http://localhost:8000/stratins/{record.strat_id}/run"
# Initialize RunRequest with record values
runReq = {
"id": str(record.strat_id),
"strat_json": strat_json,
"mode": record.mode,
"account": record.account,
"ilog_save": record.ilog_save,
"weekdays_filter": record.weekdays_filter,
"test_batch_id": record.testlist_id,
"batch_id": record.batch_id or str(uuid4())[:8],
"bt_from": record.bt_from.isoformat() if record.bt_from else None,
"bt_to": record.bt_to.isoformat() if record.bt_to else None,
"note": f"SCHED {record.start_time}-" + record.stop_time if record.stop_time else "" + record.note if record.note is not None else ""
}
# Headers for the request
headers = {
"X-API-Key": WEB_API_KEY
}
try:
# Make the PUT request to the API with the headers
response = requests.put(api_url, json=runReq, headers=headers)
# Check if the request was successful
if response.status_code == 200:
print(f"Strategy {record.strat_id} started successfully.")
return 0, response.json()
else:
err_msg = f"Strategy {record.strat_id} NOT started. Status Code: {response.status_code}, Response: {response.text}"
print(err_msg)
return -2, err_msg
except requests.RequestException as e:
err_msg = f"Request failed: {str(e)}"
print(err_msg)
return -2, err_msg
# #intiializae RunRequest with record values
# runReq = RunRequest(id=record.strat_id,
# strat_json=strat_json,
# mode=record.mode,
# account=record.account,
# ilog_save=record.ilog_save,
# weekdays_filter=record.weekdays_filter,
# test_batch_id=record.testlist_id,
# batch_id=record.batch_id,
# bt_from=record.bt_from,
# bt_to=record.bt_to,
# note=record.note)
# #call rest API to start strategy
# #start strategy
# res, sada = cs.run_stratin(id=record.strat_id, runReq=runReq, inter_batch_params=None)
# if res == 0:
# print(f"Strategy {sada} started")
# return 0, sada
# else:
# err_msg= f"Strategy {record.strat_id} NOT started. ERROR {sada}"
# print(err_msg)
# return -2, err_msg
if __name__ == "__main__":
#use naive datetoime
debug_date = None
debug_date = datetime(2024, 2, 16, 16, 37, 0, 0)
#debug_date = datetime(2024, 2, 16, 10, 30, 0, 0)
#debug_date = datetime(2024, 2, 16, 16, 1, 0, 0)
if debug_date is not None:
# Localize the naive datetime object to the Eastern timezone
debug_date = zoneNY.localize(debug_date)
#debugdate formatted as string in format "23.12.2024 9:30"
formatted_date = debug_date.strftime("%d.%m.%Y %H:%M")
print("Scheduler.py NY time: ", formatted_date)
print("ISoformat", debug_date.isoformat())
res, msg = startstop_scheduled(debug_date=debug_date, market="US")
print(f"CALL FINISHED, with {debug_date} RESULT: {res}, {msg}")

View File

@ -26,7 +26,7 @@
<!-- <script src="https://code.jquery.com/jquery-3.6.4.js" integrity="sha256-a9jBBRygX1Bh5lt8GZjXDzyOB+bWve9EiO7tROUtj/E=" crossorigin="anonymous"></script> --> <!-- <script src="https://code.jquery.com/jquery-3.6.4.js" integrity="sha256-a9jBBRygX1Bh5lt8GZjXDzyOB+bWve9EiO7tROUtj/E=" crossorigin="anonymous"></script> -->
<script src="/static/js/libs/jquery-3.6.4.js" integrity="sha256-a9jBBRygX1Bh5lt8GZjXDzyOB+bWve9EiO7tROUtj/E=" crossorigin="anonymous"></script> <script src="/static/js/libs/jquery-3.6.4.js"></script>
<!-- <script src="https://cdn.datatables.net/1.13.4/js/jquery.dataTables.min.js"></script> --> <!-- <script src="https://cdn.datatables.net/1.13.4/js/jquery.dataTables.min.js"></script> -->
<script src="/static/js/libs/jquery.dataTables.min.js"></script> <script src="/static/js/libs/jquery.dataTables.min.js"></script>
@ -57,7 +57,7 @@
<!-- <script src="https://code.jquery.com/jquery-3.5.1.js"></script> --> <!-- <script src="https://code.jquery.com/jquery-3.5.1.js"></script> -->
<link rel="stylesheet" href="/static/main.css"> <link rel="stylesheet" href="/static/main.css?v=1.07">
<!-- <script src="https://cdnjs.cloudflare.com/ajax/libs/mousetrap/1.4.6/mousetrap.min.js"></script> --> <!-- <script src="https://cdnjs.cloudflare.com/ajax/libs/mousetrap/1.4.6/mousetrap.min.js"></script> -->
<script src="/static/js/libs/mousetrap.min.js"></script> <script src="/static/js/libs/mousetrap.min.js"></script>
@ -225,7 +225,7 @@
<label>Minsize: <input type="number" id="trade-minsize" autocomplete="off" value="100"/></label> <label>Minsize: <input type="number" id="trade-minsize" autocomplete="off" value="100"/></label>
<label>Filter: C,O,4,B,7,V,P<input type="text" id="trade-filter" autocomplete="off"/></label> <label>Filter: C,O,4,B,7,V,P<input type="text" id="trade-filter" autocomplete="off"/></label>
<button id="bt-trade" class="btn btn-outline-success btn-sm">Show</button></div> <button id="bt-trade" class="btn btn-outline-success btn-sm">Show</button></div>
<div id="trades-data" style="display: none" class="collapse show"> <div id="trades-data" style="display: none" class="collapse show collapsible-section">
<table id="trades-data-table" class="dataTable no-footer" style="width:300px; border-color: #dce1dc; display:contents"></table> <table id="trades-data-table" class="dataTable no-footer" style="width:300px; border-color: #dce1dc; display:contents"></table>
<!-- <table id="trades-data-table" class="dataTable no-footer" style="width: 300px;display: contents;"></table> --> <!-- <table id="trades-data-table" class="dataTable no-footer" style="width: 300px;display: contents;"></table> -->
</div> </div>
@ -234,7 +234,7 @@
<label data-bs-toggle="collapse" data-bs-target="#runner-table-inner"> <label data-bs-toggle="collapse" data-bs-target="#runner-table-inner">
<h4>Running Strategies</h4> <h4>Running Strategies</h4>
</label> </label>
<div id="runner-table-inner" class="collapse show" style="width:58%"> <div id="runner-table-inner" class="collapse show collapsible-section" style="width:58%">
<div id="controls"> <div id="controls">
<label>API-KEY: <input type="password" id="api-key" autocomplete="off"/></label> <label>API-KEY: <input type="password" id="api-key" autocomplete="off"/></label>
<button onclick="store_api_key(event)" id="bt-store" class="btn btn-outline-success btn-sm">Store</button> <button onclick="store_api_key(event)" id="bt-store" class="btn btn-outline-success btn-sm">Store</button>
@ -298,32 +298,279 @@
</div> </div>
</div> </div>
</div> </div>
</div>
<!-- SCHEDULER -->
<div id="runmanager-table" class="flex-items">
<label data-bs-toggle="collapse" data-bs-target="#runmanager-table-inner">
<h4>Run Manager</h4>
</label>
<div id="runmanager-table-inner" class="collapse show collapsible-section" style="width:58%">
<div id="controls">
<button title="Create new" id="button_add_sched" class="btn btn-outline-success btn-sm">Add</button>
<button title="Edit selected" id="button_edit_sched" class="btn btn-outline-success btn-sm">Edit</button>
<button title="Delete selected" id="button_delete_sched" class="btn btn-outline-success btn-sm">Delete</button>
<button title="History" id="button_history_sched" class="btn btn-outline-success btn-sm">History</button>
<button title="Refresh" id="button_refresh_sched" class="btn btn-outline-success btn-sm">Refresh</button>
<div class="btn-group btn-group-toggle" data-toggle="buttons">
<!-- <input type="radio" class="btn-check" name="filterOptions" id="filterNone" autocomplete="off" checked>
<label class="btn btn-outline-primary" for="filterNone">All</label> -->
<input type="radio" class="btn-check" name="filterOptions" id="filterSchedule" autocomplete="off" checked>
<label class="btn btn-outline-primary" for="filterSchedule">Scheduled</label>
<input type="radio" class="btn-check" name="filterOptions" id="filterQueue" autocomplete="off">
<label class="btn btn-outline-primary" for="filterQueue">Queued</label>
</div>
</div>
<table id="runmanagerTable" class="table-striped table dataTable" style="width:100%; border-color: #dce1dc;">
<thead>
<tr>
<th>Id</th>
<th>Type</th>
<th>Strat_Id</th>
<th>Symbol</th>
<th>Account</th>
<th>Mode</th>
<th>Note</th>
<th>Log</th>
<th>BT_from</th>
<th>BT_to</th>
<th>days</th>
<th>batch_id</th>
<th>start</th>
<th>stop</th>
<th>status</th>
<th>last_processed</th>
<th>history</th>
<th>valid_from</th>
<th>valid_to</th>
<th>testlist_id</th>
<th>Running</th>
<th>RunnerId</th>
</tr>
</thead>
<tbody></tbody>
</table>
</div>
<div id="delModalRunmanager" class="modal fade">
<div class="modal-dialog">
<form method="post" id="delFormRunmanager">
<div class="modal-content">
<div class="modal-header">
<h4 class="modal-title"><i class="fa fa-plus"></i> Delete record</h4>
<button type="button" class="btn-close" data-bs-dismiss="modal" aria-label="Close"></button>
</div>
<div class="modal-body">
<div class="form-group">
<label for="delidrunmanager" class="form-label">Id</label>
<!-- <div id="listofids"></div> -->
<input type="text" class="form-control" id="delidrunmanager" name="id" placeholder="id" readonly>
</div>
</div>
<div class="modal-footer">
<input type="submit" name="delete" id="deleterunmanager" class="btn btn-primary" value="Delete" />
<button type="button" class="btn btn-secondary" data-bs-dismiss="modal">Close</button>
</div>
</div>
</form>
</div>
</div>
<div id="addeditModalRunmanager" class="modal fade">
<div class="modal-dialog">
<form method="post" id="addeditFormRunmanager">
<div class="modal-content">
<div class="modal-header">
<h4 class="modal-title_run"><i class="fa fa-plus"></i> Add scheduler record</h4>
<button type="button" class="btn-close" data-bs-dismiss="modal" aria-label="Close"></button>
</div>
<div class="modal-body">
<div class="form-group">
<label for="runmanid" class="form-label">Record Id</label>
<input type="text" class="form-control" id="runmanid" name="id" placeholder="auto generated id" readonly>
</div>
<div class="form-group">
<label for="runmanmoddus" class="form-label">Type</label>
<input type="text" class="form-control" id="runmanmoddus" name="moddus" readonly>
</div>
<div class="form-group">
<label for="runmanstrat_id" class="form-label">StrategyId</label>
<input type="text" class="form-control" id="runmanstrat_id" name="strat_id" placeholder="strategy id">
</div>
<div class="form-group">
<label for="runmode" class="form-label">Mode</label>
<select class="form-control" id="runmanmode" name="mode"><option value="paper">paper</option><option value="live">live</option><option value="backtest">backtest</option><option value="prep">prep</option></select>
</div>
<div class="form-group">
<label for="account" class="form-label">Account</label>
<select class="form-control" id="runmanaccount" name="account"><option value="ACCOUNT1">ACCOUNT1</option><option value="ACCOUNT2">ACCOUNT2</option></select>
</div>
<div class="form-group">
<label for="status" class="form-label">Status</label>
<select class="form-control" id="runmanstatus" name="status"><option value="active">active</option><option value="suspended">suspended</option></select>
</div>
<div class="form-group" id="runmanstart_time_div">
<label for="start" class="form-label">Start Time</label>
<input type="text" class="form-control" id="runmanstart_time" name="start_time" value="9:30" step="1">
</div>
<div class="form-group" id="runmanstop_time_div">
<label for="stop" class="form-label">Stop Time</label>
<input type="text-local" class="form-control" id="runmanstop_time" name="stop_time" value="16:00" step="1">
</div>
<!-- pro budouci queueing backtestu -->
<div class="form-group" id="runmanbt_from_div">
<label for="bt_from" class="form-label">bt_from</label>
<input type="datetime-local" class="form-control" id="runmanbt_from" name="bt_from" placeholder="2023-04-06T09:00:00Z" step="1">
</div>
<div class="form-group" id="runmanbt_to_div">
<label for="bt_to" class="form-label">bt_to</label>
<input type="datetime-local" class="form-control" id="runmanbt_to" name="bt_to" placeholder="2023-04-06T09:00:00Z" step="1">
</div>
<div class="form-group" id="runmantestlist_id_div">
<label for="test_batch_id" class="form-label">Test List ID</label>
<input type="text" class="form-control" id="runmantestlist_id" name="testlist_id" placeholder="test intervals ID">
</div>
<!-- pro budouci queueing backtestu -->
<!-- Initial Checkbox for Enabling Weekday Selection -->
<div class="form-group">
<div style="display:inline-flex">
<label for="runman_enable_weekdays" class="form-label">Limit to Weekdays</label>
<input type="checkbox" class="form-check" id="runman_enable_weekdays" name="enable_weekdays" aria-label="Enable Weekday Selection">
</div>
</div>
<!-- Weekday Checkboxes -->
<div class="form-group weekday-checkboxes" style="display:none;">
<!-- <label class="form-label">Select Weekdays:</label> -->
<div>
<input type="checkbox" id="monday" name="weekdays" value="monday">
<label for="monday">Monday</label>
</div>
<div>
<input type="checkbox" id="tuesday" name="weekdays" value="tuesday">
<label for="tuesday">Tuesday</label>
</div>
<div>
<input type="checkbox" id="wednesday" name="weekdays" value="wednesday">
<label for="wednesday">Wednesday</label>
</div>
<div>
<input type="checkbox" id="thursday" name="weekdays" value="thursday">
<label for="thursday">Thursday</label>
</div>
<div>
<input type="checkbox" id="friday" name="weekdays" value="friday">
<label for="friday">Friday</label>
</div>
</div>
<div class="form-group" id="runmanvalid_from_div">
<label for="runmanvalid_from" class="form-label">Valid from</label>
<input type="datetime-local" class="form-control" id="runmanvalid_from" name="valid_from" placeholder="2023-04-06T09:00:00Z" step="1">
</div>
<div class="form-group" id="runmanvalid_to_div">
<label for="runmanvalid_to" class="form-label">Valid to</label>
<input type="datetime-local" class="form-control" id="runmanvalid_to" name="valid_to" placeholder="2023-04-06T09:00:00Z" step="1">
</div>
<div class="form-group">
<label for="batch_id" class="form-label">Batch ID</label>
<input type="text" class="form-control" id="runmanbatch_id" name="batch_id" placeholder="batch id">
</div>
<div class="form-group">
<div style="display:inline-flex">
<label for="ilog_save" class="form-label">Enable logs</label>
<input type="checkbox" class="form-check" id="runmanilog_save" name="ilog_save" aria-label="Enable logs">
</div>
</div>
<div class="form-group">
<label for="note" class="form-label">note</label>
<textarea class="form-control" rows="1" id="runmannote" name="note"></textarea>
</div>
</div>
<div class="modal-footer">
<input type="hidden" name="runner_id" id="runmanrunner_id" />
<input type="hidden" name="history" id="runmanhistory" />
<input type="hidden" name="last_processed" id="runmanlast_processed" />
<!--<input type="hidden" name="action" id="action" value="" />-->
<input type="submit" id="runmanagersubmit" class="btn btn-primary" value="Add" />
<button type="button" class="btn btn-secondary" data-bs-dismiss="modal">Close</button>
</div>
</div>
</form>
</div>
</div>
<div id="historyModalRunmanager" class="modal fade">
<div class="modal-dialog">
<form method="post" id="historyModalRunmanagerForm">
<div class="modal-content">
<div class="modal-header">
<h4 class="modal-title"><i class="fa fa-plus"></i>View History</h4>
<button type="button" class="btn-close" data-bs-dismiss="modal" aria-label="Close"></button>
</div>
<div class="modal-body">
<div class="form-group">
<label for="RunmanId" class="form-label">Id</label>
<input type="text" class="form-control" id="RunmanId" name="id" placeholder="id" readonly>
</div>
<div class="form-group">
<label for="Runmanlast_processed" class="form-label">Last processed</label>
<input type="text" class="form-control" id="Runmanlast_processed" name="last_processed" readonly>
</div>
<div class="form-group">
<label for="Runmanhistory" class="form-label">History</label>
<textarea class="form-control" rows="8" id="Runmanhistory" name="history" readonly></textarea>
</div>
<!-- <div class="form-group">
<label for="metrics" class="form-label">Metrics</label>
<textarea class="form-control" rows="8" id="metrics" name="metrics"></textarea>
</div>
<div class="form-group">
<label for="stratvars" class="form-label">Stratvars</label>
<textarea class="form-control" rows="8" id="editstratvars" name="stratvars"></textarea>
</div>
<div class="form-group">
<label for="strat_json" class="form-label">Strat JSON</label>
<textarea class="form-control" rows="6" id="editstratjson" name="stratjson"></textarea>
</div> -->
</div>
<div class="modal-footer">
<!-- <input type="submit" name="delete" id="editarchive" class="btn btn-primary" value="Edit" /> -->
<button type="button" class="btn btn-secondary" data-bs-dismiss="modal">Close</button>
</div>
</div>
</form>
</div>
</div>
</div> </div>
<div id="archive-table" class="flex-items"> <div id="archive-table" class="flex-items">
<label data-bs-toggle="collapse" data-bs-target="#archive-table-inner"> <label data-bs-toggle="collapse" data-bs-target="#archive-table-inner">
<h4>Past Runs</h4> <h4>Past Runs</h4>
</label> </label>
<div id="archive-table-inner" class="collapse show" style="width:58%"> <div id="archive-table-inner" class="collapse show collapsible-section" style="width:58%">
<!-- <div id="archive-chart"> <!-- <div id="archive-chart">
<div id="chartArchive" style="position: relative;"></div> <div id="chartArchive" style="position: relative;"></div>
<div class="legend" id="legendArchive"></div> <div class="legend" id="legendArchive"></div>
</div> --> </div> -->
<div id="controls"> <div id="controls">
<button id="button_edit_arch" class="btn btn-outline-success btn-sm">Edit(a)</button> <button title="Edit selected days" id="button_edit_arch" class="btn btn-outline-success btn-sm">Edit(a)</button>
<button id="button_delete_arch" class="btn btn-outline-success btn-sm">Delete(d)</button> <button title="Delete selected days" id="button_delete_arch" class="btn btn-outline-success btn-sm">Delete(d)</button>
<button id="button_delete_batch" class="btn btn-outline-success btn-sm">Delete Batch(b)</button> <!-- <button id="button_delete_batch" class="btn btn-outline-success btn-sm">Delete Batch(b)</button> -->
<button id="button_show_arch" class="btn btn-outline-success btn-sm">Show(w)</button> <button title="Show selected day on the chart" id="button_show_arch" class="btn btn-outline-success btn-sm">Show(w)</button>
<button id="button_refresh" class="refresh btn btn-outline-success btn-sm">Refresh</button> <button id="button_refresh" class="refresh btn btn-outline-success btn-sm">Refresh</button>
<button id="button_compare_arch" class="refresh btn btn-outline-success btn-sm">Compare</button> <button title="Compare selected days" id="button_compare_arch" class="refresh btn btn-outline-success btn-sm">Compare</button>
<button id="button_runagain_arch" class="refresh btn btn-outline-success btn-sm">Run Again(r)</button> <button title="Run selected day" id="button_runagain_arch" class="refresh btn btn-outline-success btn-sm">Run Again(r)</button>
<button id="button_selpage" class="btn btn-outline-success btn-sm">Select all</button> <button title="Runs LIVE/PAPER in BT mode with same dates" id="button_runbt_arch" class="refresh btn btn-outline-success btn-sm">Backtest same period</button>
<button id="button_export_xml" class="btn btn-outline-success btn-sm">Export xml</button> <button title="Select all days on the page" id="button_selpage" class="btn btn-outline-success btn-sm">Select all</button>
<button id="button_export_csv" class="btn btn-outline-success btn-sm">Export csv</button> <button title="Export selected days to XML" id="button_export_xml" class="btn btn-outline-success btn-sm">Export xml</button>
<button title="Export selected days to CSV" id="button_export_csv" class="btn btn-outline-success btn-sm">Export csv</button>
<button title="For selected days generates basic report image." id="button_report" class="btn btn-outline-success btn-sm">Report(q)</button> <button title="For selected days generates basic report image." id="button_report" class="btn btn-outline-success btn-sm">Report(q)</button>
<button title="For selected batch creates heatmap for optimal profit/loss cutoffs" id="button_analyze" class="btn btn-outline-success btn-sm">Optimal cutoffs</button> <!-- <button title="For selected days creates heatmap for optimal profit/loss cutoffs" id="button_analyze" class="btn btn-outline-success btn-sm">Cutoffs Heatmap</button> -->
<!-- <button id="button_stopall" class="btn btn-outline-success btn-sm">Stop All</button> <!-- <button id="button_stopall" class="btn btn-outline-success btn-sm">Stop All</button>
<button id="button_refresh" class="btn btn-outline-success btn-sm">Refresh</button> --> <button id="button_refresh" class="btn btn-outline-success btn-sm">Refresh</button> -->
<div id="buttons-container"></div> <div id="buttons-container" style="display: contents"></div>
</div> </div>
<!-- <div> <!-- <div>
@ -349,7 +596,9 @@
<th>pos</th> <th>pos</th>
<th>avgp</th> <th>avgp</th>
<th>metrics</th> <th>metrics</th>
<th>batchid</th> <th>batchid</th>
<th>batchprofit</th>
<th>batchcount</th>
</tr> </tr>
</thead> </thead>
<tbody></tbody> <tbody></tbody>
@ -402,27 +651,34 @@
</div> </div>
<div id="logModal" class="modal fade" style="--bs-modal-width: 825px;"> <div id="logModal" class="modal fade" style="--bs-modal-width: 825px;">
<div class="modal-dialog"> <div class="modal-dialog">
<div class="modal-content"> <div class="modal-content">
<div class="modal-header"> <div class="modal-header">
<h4 class="modal-title"><i class="fa fa-plus"></i>Log</h4> <h4 class="modal-title"><i class="fa fa-plus"></i>Log</h4>
<button type="button" class="btn-close" data-bs-dismiss="modal" aria-label="Close"></button> <button type="button" class="btn-close" data-bs-dismiss="modal" aria-label="Close"></button>
</div>
<div class="modal-body">
<div class="form-group">
<label for="logFileSelect" class="form-label">Select Log File</label>
<select class="form-select" id="logFileSelect" aria-label="Log file select">
<!-- <option selected>Select a log file</option> -->
<option value="strat.log" selected>strat.log</option>
<option value="job.log">job.log</option>
</select>
</div> </div>
<div class="modal-body"> <div class="form-group mt-3">
<div class="form-group"> <label for="logHere" class="form-label">Log</label>
<label for="logHere" class="form-label">Log</label> <div id="log-container">
<div id="log-container"> <pre id="log-content"></pre>
<pre id="log-content"></pre> </div>
</div>
<!-- <input type="text" class="form-control" id="delidarchive" name="delidarchive" placeholder="id"> -->
</div>
</div>
<div class="modal-footer">
<button type="button" class="btn btn-primary" id="logRefreshButton" value="Refresh">Refresh</button>
<button type="button" class="btn btn-secondary" data-bs-dismiss="modal">Close</button>
</div> </div>
</div> </div>
<div class="modal-footer">
<button type="button" class="btn btn-primary" id="logRefreshButton" value="Refresh">Refresh</button>
<button type="button" class="btn btn-secondary" data-bs-dismiss="modal">Close</button>
</div>
</div>
</div> </div>
</div> </div>
<div id="editModalArchive" class="modal fade"> <div id="editModalArchive" class="modal fade">
<div class="modal-dialog"> <div class="modal-dialog">
<form method="post" id="editFormArchive"> <form method="post" id="editFormArchive">
@ -448,6 +704,10 @@
<label for="stratvars" class="form-label">Stratvars</label> <label for="stratvars" class="form-label">Stratvars</label>
<textarea class="form-control" rows="8" id="editstratvars" name="stratvars"></textarea> <textarea class="form-control" rows="8" id="editstratvars" name="stratvars"></textarea>
</div> </div>
<div class="form-group">
<label for="stratvars" class="form-label">Transferables</label>
<textarea class="form-control" rows="8" id="edittransferables" name="stratvars"></textarea>
</div>
<div class="form-group"> <div class="form-group">
<label for="strat_json" class="form-label">Strat JSON</label> <label for="strat_json" class="form-label">Strat JSON</label>
<textarea class="form-control" rows="6" id="editstratjson" name="stratjson"></textarea> <textarea class="form-control" rows="6" id="editstratjson" name="stratjson"></textarea>
@ -466,7 +726,7 @@
<label data-bs-toggle="collapse" data-bs-target="#stratin-table-inner"> <label data-bs-toggle="collapse" data-bs-target="#stratin-table-inner">
<h4>Strategies</h4> <h4>Strategies</h4>
</label> </label>
<div id="stratin-table-inner" class="collapse show" style="width:58%"> <div id="stratin-table-inner" class="collapse show collapsible-section" style="width:40%">
<div id="controlsStratin"> <div id="controlsStratin">
<button id="button_add" class="btn btn-outline-success btn-sm">Add</button> <button id="button_add" class="btn btn-outline-success btn-sm">Add</button>
<button id="button_add_json" class="btn btn-outline-success btn-sm">Add JSON</button> <button id="button_add_json" class="btn btn-outline-success btn-sm">Add JSON</button>
@ -695,8 +955,8 @@
<input type="text" class="form-control" id="runid" name="runid" placeholder="id" readonly> <input type="text" class="form-control" id="runid" name="runid" placeholder="id" readonly>
</div> </div>
<div class="form-group"> <div class="form-group">
<label for="mode" class="form-label">Mode</label> <label for="runmode" class="form-label">Mode</label>
<select class="form-control" id="mode" name="mode"><option value="paper">paper</option><option value="live">live</option><option value="backtest">backtest</option><option value="prep">prep</option></select> <select class="form-control" id="runmode" name="mode"><option value="paper">paper</option><option value="live">live</option><option value="backtest">backtest</option><option value="prep">prep</option></select>
</div> </div>
<div class="form-group"> <div class="form-group">
<label for="account" class="form-label">Account</label> <label for="account" class="form-label">Account</label>
@ -784,11 +1044,51 @@
</div> </div>
</div> </div>
</div> </div>
<div id="MLContainer" class="flex-items">
<label data-bs-toggle="collapse" data-bs-target="#MLContainerInner" aria-expanded="true">
<h4>Model Configuration</h4>
</label>
<div id="MLContainerInner" class="collapse show collapsible-section">
<button id="ml-refresh-button" class="btn btn-outline-success btn-sm">Refresh Models</button>
<div id="model-list" class="scrollable-div"></div>
<!-- Upload Form -->
<form id="upload-form" enctype="multipart/form-data" style="width: 262px;">
<input type="file" class="form-control form-control-sm" id="model-file" name="model-file">
<button type="submit" class="btn btn-outline-success btn-sm">Upload Model</button>
</form>
</div>
<!-- modal na inspekci -->
<div id="modelModal" class="modal fade" style="--bs-modal-width: 900px;">
<div class="modal-dialog">
<div class="modal-content">
<div class="modal-header">
<h4 class="modal-title_json"><i class="fa fa-plus"></i>Model metadata</h4>
<button type="button" class="btn-close" data-bs-dismiss="modal" aria-label="Close"></button>
</div>
<div class="modal-body">
<div class="form-group">
<label for="metadata-container" id="metadata_label" class="form-label">Metadata</label>
<div id="metadata-container" style="height:700px;border:1px solid black;">
<div id="metadata-container-info"></div>
<div id="toml-editor-container"></div>
<div id="python-editor-container"></div>
</div>
<!-- <div id="metadata-container" style="height:200px;border:1px solid black;"></div> -->
</div>
</div>
<div class="modal-footer">
<button type="button" class="btn btn-secondary" data-bs-dismiss="modal">Close</button>
</div>
</div>
</div>
</div>
</div>
<div id="TestListContainer" class="flex-items"> <div id="TestListContainer" class="flex-items">
<label data-bs-toggle="collapse" data-bs-target="#TestListInner" aria-expanded="true"> <label data-bs-toggle="collapse" data-bs-target="#TestListInner" aria-expanded="true">
<h4>TestList Configuration</h4> <h4>TestList Configuration</h4>
</label> </label>
<div id="TestListInner" class="collapse show"> <div id="TestListInner" class="collapse show collapsible-section">
<div> <div>
<form id="recordFormTestList"> <form id="recordFormTestList">
<input type="hidden" id="recordId"> <input type="hidden" id="recordId">
@ -822,7 +1122,7 @@
<label data-bs-toggle="collapse" data-bs-target="#configInner" aria-expanded="true"> <label data-bs-toggle="collapse" data-bs-target="#configInner" aria-expanded="true">
<h4>Config</h4> <h4>Config</h4>
</label> </label>
<div id="configInner" class="collapse show"> <div id="configInner" class="collapse show collapsible-section">
<form id="configForm"> <form id="configForm">
<label for="configList">Select an Item:</label> <label for="configList">Select an Item:</label>
<select id="configList"></select><br><br> <select id="configList"></select><br><br>
@ -846,22 +1146,43 @@
<BR> <BR>
</div> </div>
</div> </div>
<script src="/static/js/config.js"></script> <script src="/static/js/config.js?v=1.04"></script>
<!-- tady zacina polska docasna lokalizace --> <!-- tady zacina polska docasna lokalizace -->
<!-- <script type="text/javascript" src="https://unpkg.com/lightweight-charts/dist/lightweight-charts.standalone.production.js"></script> --> <!-- <script type="text/javascript" src="https://unpkg.com/lightweight-charts/dist/lightweight-charts.standalone.production.js"></script> -->
<script type="text/javascript" src="/static/js/libs/lightweightcharts/lightweight-charts.standalone.production410.js"></script> <script type="text/javascript" src="/static/js/libs/lightweightcharts/lightweight-charts.standalone.production410.js"></script>
<script src="/static/js/dynamicbuttons.js?v=1.05"></script>
<script src="/static/js/utils.js"></script> <!-- <script src="/static/js/utils.js?v=1.01"></script> -->
<script src="/static/js/instantindicators.js"></script> <!-- new util structure and exports and colors -->
<script src="/static/js/archivechart.js"></script> <script src="/static/js/utils/utils.js?v=1.06"></script>
<script src="/static/js/archivetables.js"></script> <script src="/static/js/utils/exports.js?v=1.04"></script>
<script src="/static/js/livewebsocket.js"></script> <script src="/static/js/utils/colors.js?v=1.04"></script>
<script src="/static/js/realtimechart.js"></script>
<script src="/static/js/mytables.js"></script>
<script src="/static/js/testlist.js"></script> <script src="/static/js/instantindicators.js?v=1.04"></script>
<script src="/static/js/configform.js"></script> <script src="/static/js/archivechart.js?v=1.05"></script>
<!-- <script src="/static/js/dynamicbuttons.js"></script> -->
<!-- <script src="/static/js/archivetables.js?v=1.05"></script> -->
<!-- archiveTables split into separate files -->
<script src="/static/js/tables/archivetable/init.js?v=1.12"></script>
<script src="/static/js/tables/archivetable/functions.js?v=1.10"></script>
<script src="/static/js/tables/archivetable/modals.js?v=1.07"></script>
<script src="/static/js/tables/archivetable/handlers.js?v=1.09"></script>
<!-- Runmanager functionality -->
<script src="/static/js/tables/runmanager/init.js?v=1.1"></script>
<script src="/static/js/tables/runmanager/functions.js?v=1.08"></script>
<script src="/static/js/tables/runmanager/modals.js?v=1.07"></script>
<script src="/static/js/tables/runmanager/handlers.js?v=1.07"></script>
<script src="/static/js/livewebsocket.js?v=1.02"></script>
<script src="/static/js/realtimechart.js?v=1.02"></script>
<script src="/static/js/mytables.js?v=1.02"></script>
<script src="/static/js/testlist.js?v=1.01"></script>
<script src="/static/js/ml.js?v=1.02"></script>
<script src="/static/js/common.js?v=1.01"></script>
<script src="/static/js/configform.js?v=1.01"></script>
<!-- <script src="/static/js/scheduler.js?v=1.01"></script> -->
</body> </body>
</html> </html>

View File

@ -0,0 +1,184 @@
<!DOCTYPE html>
<html lang="en">
<head>
<!-- Bootstrap CSS (Dark Mode Enabled) -->
<link href="https://cdn.jsdelivr.net/npm/bootstrap@5.1.0/dist/css/bootstrap.min.css" rel="stylesheet">
<link rel="stylesheet" href="https://cdn.jsdelivr.net/npm/bootstrap-icons@1.11.2/font/bootstrap-icons.min.css">
<!-- Custom CSS -->
<style>
/* Custom styles for dark mode and form offset */
.dropdown-menu-dark .form-control, .dropdown-menu-dark .btn {
background-color: #343a40;
border-color: #6c757d;
color: white;
}
.dropdown-menu-dark .form-control:focus {
box-shadow: none;
border-color: #5cb85c;
}
.dropdown-item {
position: relative;
display: flex;
align-items: center; /* Align play icon vertically */
}
.hover-icon {
margin-left: auto; /* Push play icon to the right */
cursor: pointer; /* Change cursor on hover */
}
.action-form {
display: none; /* Hide form by default */
position: absolute;
left: 100%; /* Position form to the right of the dropdown item */
top: 0;
white-space: nowrap; /* Prevent wrapping on small screens */
width: max-content;
/* Add some space between the dropdown item and the form */
background: #343a40; /* Match the dropdown background color */
border-radius: 0.25rem; /* Match Bootstrap's border radius */
border: 1px solid #6c757d; /* Slight border for the form */
}
.form-group {
display: flex;
gap: 0.5rem; /* Spacing between form fields */
align-items: center;
margin-bottom: 0.5rem; /* Spacing between each form group */
}
/* Floating label styles */
.form-label-group {
position: relative;
/* padding-top: 15px; */
}
.form-label-group label {
position: absolute;
top: 0;
left: 12px;
font-size: 75%;
/* transform: translateY(-50%); */
margin-top: 0; /* Adjusted for font size */
color: #6c757d;
pointer-events: none;
}
.form-label-group input,
.form-label-group select {
padding-top: 18px;
/* padding-bottom: 2px; */
}
</style>
</head>
<body class="bg-dark text-white">
<div class="container mt-5">
<!-- Dropdown Button -->
<div class="dropdown">
<button class="btn btn-secondary dropdown-toggle" type="button" id="actionDropdown" data-bs-toggle="dropdown" aria-expanded="false">
Choose Action
</button>
<ul class="dropdown-menu dropdown-menu-dark" aria-labelledby="actionDropdown">
<!-- Action 1-->
<li>
<a class="dropdown-item" href="#">
Action 1
<i class="bi bi-play-circle float-end hover-icon"></i>
<!-- ... Action 1 content ... -->
<form class="d-none action-form">
<div class="form-label-group">
<input type="text" id="param1-action1" class="form-control form-control-sm" placeholder="Parameter 1" value="Default Value">
<label for="param1-action1">Parameter 1</label>
</div>
<div class="form-label-group">
<input type="text" id="param2-action1" class="form-control form-control-sm" placeholder="Parameter 2">
<label for="param2-action1">Parameter 2</label>
</div>
<div class="form-label-group">
<select class="form-select form-select-sm" id="select-action1">
<option selected>Option 1</option>
<option value="1">Option 2</option>
<option value="2">Option 3</option>
</select>
<label for="select-action1">Select Option</label>
</div>
<button type="submit" class="btn btn-primary btn-sm">Submit</button>
</form>
</a>
</li>
<!-- ... Additional Actions ... -->
<li>
<a class="dropdown-item" href="#">
Action 2
<i class="bi bi-play-circle float-end hover-icon"></i> <!-- Bootstrap Icons for Play -->
<!-- ... Action 1 content ... -->
<form class="d-none action-form">
<div class="form-label-group">
<input type="text" id="param1-action2" class="form-control form-control-sm" placeholder="Parameter 1" value="Default Value">
<label for="param1-action1">Parameter 1</label>
</div>
<div class="form-label-group">
<input type="text" id="param2-action2" class="form-control form-control-sm" placeholder="Parameter 2">
<label for="param2-action2">Parameter 2</label>
</div>
<div class="form-label-group">
<select class="form-select form-select-sm" id="select-action2">
<option selected>Option 1</option>
<option value="1">Option 2</option>
<option value="2">Option 3</option>
</select>
<label for="select-action2">Select Option</label>
</div>
<button type="submit" class="btn btn-primary btn-sm">Submit</button>
</form>
</a>
</li>
</ul>
</div>
</div>
<!-- jQuery and Bootstrap Bundle -->
<script src="https://code.jquery.com/jquery-3.6.0.min.js"></script>
<script src="https://cdn.jsdelivr.net/npm/bootstrap@5.1.0/dist/js/bootstrap.bundle.min.js"></script>
<script>
$(document).ready(function() {
// Toggle visibility of form on hover for any number of actions
$('.dropdown-menu').on('mouseenter', '.dropdown-item', function() {
$(this).find('.action-form').removeClass('d-none').show();
});
$('.dropdown-menu').on('mouseleave', '.dropdown-item', function() {
setTimeout(() => { // Timeout to prevent flickering effect
if (!$('.action-form:hover').length) {
$(this).find('.action-form').addClass('d-none').hide();
}
}, 100);
});
// // Show the form when hovering over the play icon
// $('.dropdown-menu').on('mouseenter', '.hover-icon', function() {
// $(this).siblings('.action-form').removeClass('d-none').show();
// });
// // Hide the form when hovering out of the play icon and form area
// $('.dropdown-menu').on('mouseleave', '.hover-icon, .action-form', function() {
// setTimeout(() => { // Timeout to prevent flickering effect
// if (!$('.action-form:hover').length) {
// $('.action-form').hide();
// }
// }, 100);
// });
// Hide form when mouse leaves the form area
$('.dropdown-menu').on('mouseleave', '.action-form', function() {
$(this).hide();
});
// Submit form logic
$('.dropdown-menu').on('submit', '.action-form', function(e) {
e.preventDefault();
// Add logic to process form submission
console.log('Form submitted for', $(this).closest('.dropdown-item').text().trim());
});
});
</script>
</body>
</html>

View File

@ -16,6 +16,7 @@ var slLine = []
//input array object bars = { high: [1,2,3], time: [1,2,3], close: [2,2,2]...} //input array object bars = { high: [1,2,3], time: [1,2,3], close: [2,2,2]...}
//output array [{ time: 111, open: 11, high: 33, low: 333, close: 333},..] //output array [{ time: 111, open: 11, high: 33, low: 333, close: 333},..]
function transform_data(data) { function transform_data(data) {
//console.log(data)
var SHOW_SL_DIGITS = get_from_config("SHOW_SL_DIGITS", true) var SHOW_SL_DIGITS = get_from_config("SHOW_SL_DIGITS", true)
transformed = [] transformed = []
//get basic bars, volume and vvwap //get basic bars, volume and vvwap
@ -174,7 +175,9 @@ function transform_data(data) {
data.trades.forEach((trade, index, array) => { data.trades.forEach((trade, index, array) => {
obj = {}; obj = {};
a_markers = {} a_markers = {}
timestamp = Date.parse(trade.order.filled_at)/1000 //tady z predchozi verze muze byt string (pak je to date v iso) a nebo v novejsi uz mame timestamp
timestamp = (typeof trade.order.filled_at === 'string') ? Date.parse(trade.order.filled_at)/1000 : trade.order.filled_at
//light chart neumi vice zaznamu ve stejny cas //light chart neumi vice zaznamu ve stejny cas
//protoze v BT se muze stat vice tradu v jeden cas, testujeme stejne hodnoty a pripadne pricteme jednu ms //protoze v BT se muze stat vice tradu v jeden cas, testujeme stejne hodnoty a pripadne pricteme jednu ms
//tradu s jednim casem muze byt za sebou vic, proto iterator //tradu s jednim casem muze byt za sebou vic, proto iterator
@ -266,9 +269,10 @@ function transform_data(data) {
markers.push(marker) markers.push(marker)
//prevedeme iso data na timestampy //prevedeme iso data na timestampy
trade.order.submitted_at = Date.parse(trade.order.submitted_at)/1000 //open bud zde je iso string (predchozi verze) nebo rovnou float - podporime oboji
trade.order.filled_at = Date.parse(trade.order.filled_at)/1000 trade.order.submitted_at = (typeof trade.order.submitted_at === 'string') ? Date.parse(trade.order.submitted_at)/1000 : trade.order.submitted_at
trade.timestamp = Date.parse(trade.order.timestamp)/1000 trade.order.filled_at = (typeof trade.order.filled_at === 'string') ? Date.parse(trade.order.filled_at)/1000 : trade.order.filled_at
trade.timestamp = (typeof trade.timestamp === 'string') ? Date.parse(trade.order.timestamp)/1000 : trade.order.timestamp
tradeDetails.set(timestamp, trade) tradeDetails.set(timestamp, trade)
//line pro buy/sell markery //line pro buy/sell markery
@ -363,6 +367,7 @@ function prepare_data(archRunner, timeframe_amount, timeframe_unit, archivedRunn
//pomocna sluzba pro naplneni indListu a charting indikatoru //pomocna sluzba pro naplneni indListu a charting indikatoru
function chart_indicators(data, visible, offset) { function chart_indicators(data, visible, offset) {
console.log(data)
//console.log("indikatory", JSON.stringify(data.indicators,null,2)) //console.log("indikatory", JSON.stringify(data.indicators,null,2))
//podobne v livewebsokcets.js - dat do jedne funkce //podobne v livewebsokcets.js - dat do jedne funkce
if (data.hasOwnProperty("indicators")) { if (data.hasOwnProperty("indicators")) {
@ -381,10 +386,13 @@ function chart_indicators(data, visible, offset) {
//console.log("ZPETNE STRINGIFIED", TOML.stringify(TOML.parse(data.archRecord.stratvars_toml), {newline: '\n'})) //console.log("ZPETNE STRINGIFIED", TOML.stringify(TOML.parse(data.archRecord.stratvars_toml), {newline: '\n'}))
//indicatory //indicatory
//console.log("indicatory TOML", stratvars_toml.stratvars.indicators) //console.log("indicatory TOML", stratvars_toml.stratvars.indicators)
indId = 1
var multiOutsCnf = {}
indicatorList.forEach((indicators, index, array) => { indicatorList.forEach((indicators, index, array) => {
//var indicators = data.indicators //var indicators = data.indicators
//index 0 - bar indikatory
//index 1 - tick based indikatory
//if there are indicators it means there must be at least two keys (time which is always present) //if there are indicators it means there must be at least two keys (time which is always present)
if (Object.keys(indicators).length > 1) { if (Object.keys(indicators).length > 1) {
for (const [key, value] of Object.entries(indicators)) { for (const [key, value] of Object.entries(indicators)) {
@ -394,6 +402,7 @@ function chart_indicators(data, visible, offset) {
//pokud je v nastaveni scale, pouzijeme tu //pokud je v nastaveni scale, pouzijeme tu
var scale = null var scale = null
var instant = null var instant = null
var returns = null
//console.log(key) //console.log(key)
//zkusime zda nejde o instantni indikator z arch runneru //zkusime zda nejde o instantni indikator z arch runneru
if ((data.ext_data !== null) && (data.ext_data.instantindicators)) { if ((data.ext_data !== null) && (data.ext_data.instantindicators)) {
@ -403,6 +412,7 @@ function chart_indicators(data, visible, offset) {
cnf = instantIndicator.toml cnf = instantIndicator.toml
scale = TOML.parse(cnf).scale scale = TOML.parse(cnf).scale
instant = 1 instant = 1
returns = TOML.parse(cnf).returns
} }
} }
//pokud nenalezeno, pak bereme standard //pokud nenalezeno, pak bereme standard
@ -411,6 +421,7 @@ function chart_indicators(data, visible, offset) {
if (stratvars_toml.stratvars.indicators[key]) { if (stratvars_toml.stratvars.indicators[key]) {
cnf = "#[stratvars.indicators."+key+"]"+TOML.stringify(stratvars_toml.stratvars.indicators[key], {newline: '\n'}) cnf = "#[stratvars.indicators."+key+"]"+TOML.stringify(stratvars_toml.stratvars.indicators[key], {newline: '\n'})
scale = stratvars_toml.stratvars.indicators[key].scale scale = stratvars_toml.stratvars.indicators[key].scale
returns = stratvars_toml.stratvars.indicators[key].returns
} }
} }
// //kontriolujeme v addedInds // //kontriolujeme v addedInds
@ -430,13 +441,31 @@ function chart_indicators(data, visible, offset) {
// } // }
// } // }
//pro multioutput childs dotahneme scale z parenta
if (multiOutsCnf.hasOwnProperty(key)) {
scale = multiOutsCnf[key];
}
//initialize indicator and store reference to array //initialize indicator and store reference to array
var obj = {name: key, series: null, cnf:cnf, instant: instant} var obj = {name: key, type: index, series: null, cnf:cnf, instant: instant, returns: returns, indId:indId++}
//start //pokud jde o multioutput parenta ukladam scale parenta pro children
//varianty - scale je jeden, ukladam jako scale pro vsechny parenty
// - scale je list - pouzijeme pro kazdy output scale v listu na stejnem indexu jako output
if (returns) {
returns.forEach((returned, index, array) => {
//
if (Array.isArray(scale)) {
multiOutsCnf[returned] = scale[index]
}
else {
multiOutsCnf[returned] = scale
}
})
} //start
//console.log(key) //console.log(key)
//get configuation of indicator to display //get configuation of indicator to display
conf = get_ind_config(key) conf = get_ind_config(key, index)
//pokud neni v configuraci - zobrazujeme defaultne //pokud neni v configuraci - zobrazujeme defaultne
@ -591,7 +620,12 @@ function chart_indicators(data, visible, offset) {
//console.log("true",active?active:conf.display) //console.log("true",active?active:conf.display)
active = true active = true
} }
else {active = false} else {active = false}
//pro main s multioutputem nezobrazujeme
if (returns) {
active = false
}
//add options //add options
obj.series.applyOptions({ obj.series.applyOptions({
visible: active?active:visible, visible: active?active:visible,
@ -615,19 +649,67 @@ function chart_indicators(data, visible, offset) {
}) })
} }
indList.sort((a, b) => { //sort by type first (0-bar,1-cbar inds) and then alphabetically
const nameA = a.name.toUpperCase(); // ignore upper and lowercase // indList.sort((a, b) => {
const nameB = b.name.toUpperCase(); // ignore upper and lowercase // if (a.type !== b.type) {
if (nameA < nameB) { // return a.type - b.type;
return -1; // } else {
} // let nameA = a.name.toUpperCase();
if (nameA > nameB) { // let nameB = b.name.toUpperCase();
return 1; // if (nameA < nameB) {
} // return -1;
// names must be equal // } else if (nameA > nameB) {
return 0; // return 1;
// } else {
// // If uppercase names are equal, compare original names to prioritize uppercase
// return a.name < b.name ? -1 : 1;
// }
// }
// });
//SORTING tak, aby multioutputs atributy byly vzdy na konci dane skupiny (tzn. v zobrazeni jsou zpracovany svými rodiči)
// Step 1: Create a Set of all names in 'returns' arrays
const namesInReturns = new Set();
indList.forEach(item => {
if (Array.isArray(item.returns)) {
item.returns.forEach(name => namesInReturns.add(name));
}
}); });
// Step 2: Custom sort function
indList.sort((a, b) => {
// First, sort by 'type'
if (a.type !== b.type) {
return a.type - b.type;
}
// For items with the same 'type', apply secondary sorting
const aInReturns = namesInReturns.has(a.name);
const bInReturns = namesInReturns.has(b.name);
if (aInReturns && !bInReturns) return 1; // 'a' goes after 'b'
if (!aInReturns && bInReturns) return -1; // 'a' goes before 'b'
// If both or neither are in 'returns', sort alphabetically by 'name'
return a.name.localeCompare(b.name);
});
//puvodni funkce
// indList.sort((a, b) => {
// const nameA = a.name.toUpperCase(); // ignore upper and lowercase
// const nameB = b.name.toUpperCase(); // ignore upper and lowercase
// if (nameA < nameB) {
// return -1;
// }
// if (nameA > nameB) {
// return 1;
// }
// // names must be equal
// return 0;
// });
//vwap a volume zatim jen v detailnim zobrazeni //vwap a volume zatim jen v detailnim zobrazeni
if (!offset) { if (!offset) {
//display vwap and volume //display vwap and volume

File diff suppressed because it is too large Load Diff

View File

@ -638,7 +638,7 @@ $(document).ready(function () {
else{ else{
$('#editstratvars').val(JSON.stringify(row.stratvars,null,2)); $('#editstratvars').val(JSON.stringify(row.stratvars,null,2));
} }
$('#edittransferables').val(JSON.stringify(row.transferables,null,2));
$('#editstratjson').val(row.strat_json); $('#editstratjson').val(row.strat_json);
} }

View File

@ -0,0 +1,30 @@
$(document).ready(function() {
// Function to handle the state of each collapsible section
function handleCollapsibleState() {
$('.collapsible-section').each(function() {
var sectionId = $(this).attr('id');
var isExpanded = localStorage.getItem(sectionId + 'State') === 'true';
if (isExpanded) {
$(this).addClass('show');
$(this).attr('aria-expanded', 'true');
} else {
$(this).removeClass('show');
$(this).attr('aria-expanded', 'false');
}
// Set up event listener for the toggle
$('[data-bs-target="#' + sectionId + '"]').click(function() {
setTimeout(function() { // Set timeout to wait for the toggle action to complete
var currentState = $('#' + sectionId).hasClass('show');
localStorage.setItem(sectionId + 'State', currentState);
}, 350); // Adjust timeout as needed based on the collapse animation duration
});
});
}
// Apply the function to all elements with the 'collapsible-section' class
handleCollapsibleState();
// Additional functionality such as fetching models (as previously defined)
});

View File

@ -9,9 +9,9 @@
// PRIMARY KEY("id" AUTOINCREMENT) // PRIMARY KEY("id" AUTOINCREMENT)
// ); //novy komentar // ); //novy komentar
configData = {} let configData = {}
//pridat sem i config area //sluzba z globalni promenne s JS configuraci dotahne dana data
function get_from_config(name, def_value) { function get_from_config(name, def_value) {
def_value = def_value ? def_value : null def_value = def_value ? def_value : null
console.log("required", name, configData) console.log("required", name, configData)
@ -25,52 +25,58 @@ function get_from_config(name, def_value) {
} }
} }
$(document).ready(function () {
const apiBaseUrl = '';
// Function to populate the config list and load JSON data initially function loadConfig(configName) {
function loadConfig(configName) { return new Promise((resolve, reject) => {
const rec = new Object() const rec = new Object();
rec.item_name = configName rec.item_name = configName;
$.ajax({ $.ajax({
url: `${apiBaseUrl}/config-items-by-name/`, url: `/config-items-by-name/`,
beforeSend: function (xhr) { beforeSend: function (xhr) {
xhr.setRequestHeader('X-API-Key', xhr.setRequestHeader('X-API-Key', API_KEY);
API_KEY); }, },
METHOD: 'GET', method: 'GET',
contentType: "application/json", contentType: "application/json",
dataType: "json", dataType: "json",
data: rec, data: rec,
success: function (data) { success: function (data) {
console.log(data)
try { try {
configData[configName] = JSON.parse(data.json_data) var configData = JSON.parse(data.json_data);
console.log(configData) resolve(configData); // Resolve the promise with configData
console.log("jsme tu")
indConfig = configData["JS"].indConfig
console.log("after")
//console.log(JSON.stringify(indConfig, null,null, 2))
console.log("before CHART_SHOW_TEXT",CHART_SHOW_TEXT)
var CHART_SHOW_TEXT = configData["JS"].CHART_SHOW_TEXT
console.log("after CHART_SHOW_TEXT",CHART_SHOW_TEXT)
} }
catch (error) { catch (error) {
window.alert(`Nešlo rozparsovat JSON_data string ${configName}`, error.message) reject(error); // Reject the promise if there's an error
} }
}, },
error: function(xhr, status, error) { error: function(xhr, status, error) {
var err = eval("(" + xhr.responseText + ")"); reject(new Error(xhr.responseText)); // Reject the promise on AJAX error
window.alert(`Nešlo dotáhnout config nastaveni z db ${configName}`, JSON.stringify(xhr));
console.log(JSON.stringify(xhr));
} }
}); });
});
}
function getConfiguration(area) {
return loadConfig(area).then(configData => {
console.log("Config loaded for", area, configData);
return configData;
}).catch(error => {
console.error('Error loading config for', area, error);
throw error; // Re-throw to allow caller to handle
});
}
//asynchrone naplni promennou
async function loadConfigData(jsConfigName) {
try {
configData[jsConfigName] = await getConfiguration(jsConfigName);
console.log("jsConfigName", jsConfigName);
} catch (error) {
console.error('Failed to load button configuration:',jsConfigName, error);
} }
}
const jsConfigName = "JS"
//naloadovan config
loadConfig(jsConfigName)
$(document).ready(function () {
var jsConfigName = "JS"
loadConfigData(jsConfigName)
}); });

View File

@ -1,66 +1,285 @@
//ekvivalent to ready //ekvivalent to ready
$(function(){ $(function(){
//dynamicke buttony predelat na trdi se vstupem (nazev cfg klice, id conteineru) //load configu buttons
var buttonConfig = get_from_config("analyze_buttons"); loadConfig("dynamic_buttons").then(config => {
console.log("Config loaded for dynamic_buttons", config);
console.log("here") // $(targetElement).append(dropdownHtml)
// // Find the ul element within the dropdown
// var dropdownMenu = $(targetElement).find('.dropdown-menu');
configData["dynamic_buttons"] = config
//toto je obecné nad table buttony
console.log("conf data z buttonu po loadu", configData)
populate_dynamic_buttons($("#buttons-container"), config);
}).catch(error => {
console.error('Error loading config for', "dynamic_buttons", error);
});
buttonConfig.forEach(function(button) {
var $btnGroup = $('<div>', {class: 'btn-group'});
var $btn = $('<button>', {
type: 'button',
class: 'btn btn-primary',
text: button.label
});
var $form = $('<form>', {class: 'input-group'});
// Handling additional parameters
for (var key in button.additionalParameters) {
var param = button.additionalParameters[key];
var $input;
if (param.type === 'select') {
$input = $('<select>', {class: 'form-select', name: key}); })
param.options.forEach(function(option) {
$input.append($('<option>', {value: option, text: option})); //vstupem je #some dropdown menu (TODO mozna dava smysl, abychom si element vytvorili predtim
}); //a nikoliv v nasledne funkci jak to zatim je)
} else { function populate_dynamic_buttons(targetElement, config, batch_id = null) {
$input = $('<input>', { //console.log("buttonConfig",config)
type: param.type === 'number' ? 'number' : 'text',
class: 'form-control', // Function to create form inputs based on the configuration
name: key, function createFormInputs(additionalParameters, batch_id = null) {
placeholder: key var formHtml = ''
}); // else
// {
// $.each(runner_ids, function(index, id) {
// formHtml += '<input type="hidden" name="runner_ids[]" value="' + id + '">';
// });
// }
$.each(additionalParameters, function(key, param) {
// Include 'name' attribute in each input element
var id_prefix = batch_id ? batch_id : ''
var id = id_prefix + key
switch(param.type) {
case 'select':
formHtml += '<div class="form-label-group"><select class="form-select form-select-sm" name="' + key + '" id="' + id + '">';
$.each(param.options, function(index, option) {
var selected = (option == param.defval) ? 'selected' : '';
formHtml += '<option ' + selected + '>' + option + '</option>';
});
formHtml += '</select><label for="' + id + '">' + key + '</label></div>';
break;
case 'string':
formHtml += '<div class="form-label-group"><input type="text" name="' + key + '" id="' + id + '" class="form-control form-control-sm" placeholder="' + key + '" value="' + param.default + '"><label for="' + id + '">' + key + '</label></div>';
break;
case 'number':
formHtml += '<div class="form-label-group"><input type="number" name="' + key + '" id="' + id + '" class="form-control form-control-sm" placeholder="' + key + '" value="' + param.default + '"><label for="' + id + '">' + key + '</label></div>';
break;
case 'boolean':
formHtml += '<div class="form-label-group"><input type="checkbox" name="' + key + '" id="' + id + '" class="form-check" ' + (param.default? 'checked' : '') + '><label for="' + id + '">' + key + '</label></div>';
break
} }
});
return formHtml;
}
//naplnime obecny element (mozna delat ve volajici fci)
//pro batche to je ikonka
if (batch_id) {
dropdownHtml = '<div class="dropdown stat_div" id="dd'+batch_id+'"><span class="material-symbols-outlined tool-icon dropdown-toggle" id="actionDropdown'+batch_id+'" data-bs-toggle="dropdown" aria-expanded="false">query_stats</span><ul class="dropdown-menu dropdown-menu-dark" aria-labelledby="actionDropdown'+batch_id+'" id="ul'+batch_id+'"></ul></div>'
}
//pro runnery je to button
else {
dropdownHtml = '<div class="dropdown stat_div" id="dd'+batch_id+'"><button title="Available analysis to run on selected days" class="btn btn-outline-success btn-sm dropdown-toggle" type="button" id="actionDropdown'+batch_id+'" data-bs-toggle="dropdown" aria-expanded="false">Analytics</button><ul class="dropdown-menu dropdown-menu-dark" aria-labelledby="actionDropdown'+batch_id+'" id="ul'+batch_id+'"></ul></div>'
}
targetElement.append(dropdownHtml)
//console.log("po pridani", targetElement)
// Find the ul element within the dropdown
var dropdownMenu = targetElement.find('.dropdown-menu');
// Dynamically create buttons and forms based on the configuration
$.each(config, function(index, buttonConfig) {
var formHtml = createFormInputs(buttonConfig.additionalParameters, batch_id);
var batchInputHtml = batch_id ? '<input type="hidden" name="batch_id" id="batch'+buttonConfig.function+batch_id+'" value="'+batch_id+'">': ''
var buttonHtml = '<li><a class="dropdown-item" href="#">' + buttonConfig.label +
'<i class="bi bi-play-circle float-end hover-icon"></i><form class="d-none action-form" data-endpoint="' + buttonConfig.apiEndpoint + '"><div class="spinner-border text-primary d-none" role="status" id="formSpinner"><span class="visually-hidden">Loading...</span></div><input type="hidden" name="function" id="func'+buttonConfig.function+batch_id+'" value="'+buttonConfig.function+'"></input>' +
batchInputHtml + formHtml + '<button type="submit" class="btn btn-primary btn-sm">Submit</button></form></a></li>';
dropdownMenu.append(buttonHtml);
//$(targetElement).append(buttonHtml);
//$('#actionDropdown').next('.dropdown-menu').append(buttonHtml);
});
// Submit form logic
targetElement.find('.dropdown-menu').on('submit', '.action-form', function(e) {
e.preventDefault();
var $form = $(this);
var $submitButton = $form.find('input[type="submit"], button[type="submit"]'); // Locate the submit button
var $spinner = $form.find('#formSpinner');
// Serialize the form data to a JSON object
var formData = $form.serializeArray().reduce(function(obj, item) {
// Handle checkbox, translating to boolean
if ($form.find(`[name="${item.name}"]`).attr('type') === 'checkbox') {
obj[item.name] = item.value === 'on' ? true : false;
} else {
obj[item.name] = item.value;
}
//Number should be numbers, not strings
if ($form.find(`[name="${item.name}"]`).attr('type') === 'number') {
obj[item.name] = Number(item.value)
}
return obj;
}, {});
// puvodni bez boolean translatu
//var formData = $(this).serializeJSON();
//pokud nemame batch_id - dotahujeme rows ze selected runnerů
console.log("toto jsou formdata pred submitem", formData)
if (formData.batch_id == undefined) {
console.log("batch undefined")
rows = archiveRecords.rows('.selected');
console.log(rows)
if (rows == undefined || rows.data().length == 0) {
console.log("no selected rows")
alert("no selected rows or batch_id")
return
}
// Creating an array to store the IDs
formData.runner_ids = []
// Iterating over the selected rows to extract the IDs
rows.every(function (rowIdx, tableLoop, rowLoop ) {
var data = this.data()
formData.runner_ids.push(data.id);
});
$form.append($input);
} }
$btnGroup.append($btn).append($form); //population of object that is expected by the endpoint
$('#buttons-container').append($btnGroup); obj = {}
if (formData.runner_ids) {
obj.runner_ids = formData.runner_ids
delete formData.runner_ids
}
if (formData.batch_id) {
obj.batch_id = formData.batch_id
delete formData.batch_id
}
obj.function = formData.function
delete formData.function
obj.params = {}
obj.params = formData
$submitButton.attr('disabled', true);
$spinner.removeClass('d-none');
// Event listener for button console.log("toto jsou transformovana data", obj)
$btn.on('click', function(event) { var apiEndpoint = $(this).data('endpoint');
event.preventDefault(); // console.log("formdata", formData)
// API call (adjust as needed for your backend)
var formData = $form.serializeArray().reduce(function(obj, item) { $.ajax({
obj[item.name] = item.value; url: apiEndpoint,
return obj; beforeSend: function (xhr) {
}, {}); xhr.setRequestHeader('X-API-Key',
API_KEY); },
$.ajax({ method: 'POST',
url: button.apiEndpoint, //menime hlavicku podle toho jestli je uspesne nebo ne, abychom mohli precist chybovou hlasku
method: 'POST', xhr: function() {
data: formData, var xhr = new XMLHttpRequest();
success: function(response) { xhr.onreadystatechange = function() {
console.log('API Call Successful:', response); if (xhr.readyState === 2) { // Headers have been received
}, if (xhr.status === 200) {
error: function(error) { xhr.responseType = "blob"; // Set responseType to 'blob' for successful image responses
console.error('API Call Failed:', error); } else {
xhr.responseType = "text"; // Set responseType to 'text' for error messages
}
}
};
return xhr;
},
xhrFields: {
responseType: 'blob'
},
contentType: "application/json",
processData: false,
data: JSON.stringify(obj),
success: function(data, textStatus, xhr) {
if (xhr.getResponseHeader("Content-Type") === "image/png") {
// Process as Blob
var blob = new Blob([data], { type: 'image/png' });
var url = window.URL || window.webkitURL;
display_image(url.createObjectURL(blob));
} else {
// Process as JSON
console.log('Received JSON', data);
} }
}); $submitButton.attr('disabled', false);
$spinner.addClass('d-none');
},
error: function(xhr, status, error) {
$spinner.addClass('d-none');
$submitButton.attr('disabled', false);
console.log(xhr, status, error)
console.log(xhr.responseJSON.message)
if (xhr.responseJSON && xhr.responseJSON.detail) {
console.log('Error:', xhr.responseJSON.detail);
window.alert(xhr.responseJSON.detail);
} else {
// Fallback error message
console.log('Error:', error);
window.alert('An unexpected error occurred');
}
}
}); });
console.log('Form submitted for', $(this).closest('.dropdown-item').text().trim());
}); });
});
//HANDLERS
//CLICKABLE VERSION (odstranit d-none z action-formu)
// Attach click event to each dropdown item
// $('.dropdown-menu').on('click', '.dropdown-item', function(event) {
// event.stopPropagation(); // Stop the event from bubbling up
// var currentForm = $(this).find('.action-form');
// // Hide all other forms
// $('.action-form').not(currentForm).hide();
// // Toggle current form
// currentForm.toggle();
// });
// // Hide form when clicking outside
// $(document).on('click', function(event) {
// if (!$(event.target).closest('.dropdown-item').length) {
// $('.action-form').hide();
// }
// });
// // Prevent global click event from hiding form when clicking inside a form
// $('.dropdown-menu').on('click', '.action-form', function(event) {
// event.stopPropagation();
// });
//VERZE on HOVER (je treba pridat class d-none do action formu)
// Toggle visibility of form on hover
targetElement.find('.dropdown-menu').on('mouseenter', '.dropdown-item', function() {
$(this).find('.action-form').removeClass('d-none').show();
}).on('mouseleave', '.dropdown-item', function() {
setTimeout(() => {
if (!$('.action-form:hover').length) {
$(this).find('.action-form').addClass('d-none').hide();
}
}, 50);
});
// // Hide form when mouse leaves the form area
// targetElement.find('.dropdown-menu').on('mouseleave', '.action-form', function() {
// $(this).hide();
// });
// stop propagating click up
targetElement.find('.dropdown').on('click', function(event) {
// Stop the event from propagating to parent elements
event.stopPropagation();
});
// stop propagating click up
targetElement.find('.action-form').on('click', function(event) {
// Stop the event from propagating to parent elements
event.stopPropagation();
// Check if the clicked element or any of its parents is a submit button
if (!$(event.target).closest('input[type="submit"], button[type="submit"], input[type="checkbox"]').length) {
// Stop the event from propagating to parent elements
event.preventDefault();
}
});
}

View File

@ -0,0 +1,140 @@
//ekvivalent to ready
$(function(){
// Toggle input fields based on the selected button
$('.main-btn, .dropdown-item').on('click', function(e) {
e.preventDefault();
var targetId = $(this).data('target');
// Hide all input groups
$('.input-group').hide();
// Show the corresponding input group
$(targetId).show();
});
// //load configu buttons
// loadConfig("dynamic_buttons").then(configData => {
// console.log("Config loaded for dynamic_buttons", configData);
// populate_dynamic_buttons(configData);
// }).catch(error => {
// console.error('Error loading config for', area, error);
// });
function populate_dynamic_buttons(buttonConfig) {
console.log("buttonConfig",buttonConfig)
buttonConfig.forEach(function(button) {
var modalId = 'modal-' + button.id;
var $btn = $('<button>', {
type: 'button',
class: 'btn btn-primary',
'data-bs-toggle': 'modal',
'data-bs-target': '#' + modalId,
text: button.label
});
// Create and append modal structure
var $modal = createModalStructure(button, modalId);
$('#buttons-container').append($btn).append($modal);
});
// Global event listener for modal form submission
$(document).on('submit', '.modal form', function(event) {
event.preventDefault();
var $form = $(this);
var formData = $form.serializeArray().reduce(function(obj, item) {
obj[item.name] = item.value;
return obj;
}, {});
var apiEndpoint = $form.data('api-endpoint');
$.ajax({
url: apiEndpoint,
method: 'POST',
data: formData,
success: function(response) {
console.log('API Call Successful:', response);
$form.closest('.modal').modal('hide');
},
error: function(error) {
console.error('API Call Failed:', error);
}
});
});
}
});
function createModalStructure(button, modalId) {
var $modal = $('<div>', {
class: 'modal fade',
id: modalId,
tabindex: '-1',
'aria-labelledby': modalId + 'Label',
'aria-hidden': 'true'
});
var $modalDialog = $('<div>', {class: 'modal-dialog'});
var $modalContent = $('<div>', {class: 'modal-content'});
var $modalHeader = $('<div>', {class: 'modal-header'});
$modalHeader.append($('<h5>', {
class: 'modal-title',
id: modalId + 'Label',
text: button.label
}));
$modalHeader.append($('<button>', {
type: 'button',
class: 'btn-close',
'data-bs-dismiss': 'modal',
'aria-label': 'Close'
}));
var $modalBody = $('<div>', {class: 'modal-body'});
var $form = $('<form>', {
'data-api-endpoint': button.apiEndpoint
});
// Handling additional parameters
for (var key in button.additionalParameters) {
var param = button.additionalParameters[key];
var $formGroup = $('<div>', {class: 'mb-3'});
if (param.type === 'select') {
var $label = $('<label>', {class: 'form-label', text: key});
var $select = $('<select>', {class: 'form-select', name: key});
param.options.forEach(function(option) {
$select.append($('<option>', {value: option, text: option}));
});
$formGroup.append($label).append($select);
} else {
$formGroup.append($('<label>', {class: 'form-label', text: key}));
$formGroup.append($('<input>', {
type: param.type === 'number' ? 'number' : 'text',
class: 'form-control',
name: key,
placeholder: key
}));
}
$form.append($formGroup);
}
var $modalFooter = $('<div>', {class: 'modal-footer'});
$modalFooter.append($('<button>', {
type: 'submit',
class: 'btn btn-primary',
text: 'Submit'
}));
$modalBody.append($form);
$modalContent.append($modalHeader).append($modalBody).append($modalFooter);
$modalDialog.append($modalContent);
$modal.append($modalDialog);
return $modal;
}

View File

@ -56,13 +56,16 @@ $(document).ready(function () {
if (archData.indicators[0][indname]) { if (archData.indicators[0][indname]) {
delete archData.indicators[0][indname] delete archData.indicators[0][indname]
//delete addedInds[indname]
//get active resolution
const element = document.querySelector('.switcher-active-item');
resolution = element.textContent
//console.log("aktivni rozliseni", resolution)
switch_to_interval(resolution, archData)
} }
else if (archData.indicators[1][indname]) {
delete archData.indicators[1][indname]
}
//delete addedInds[indname]
//get active resolution
const element = document.querySelector('.switcher-active-item');
resolution = element.textContent
//console.log("aktivni rozliseni", resolution)
switch_to_interval(resolution, archData)
}, },
error: function(xhr, status, error) { error: function(xhr, status, error) {
var err = eval("(" + xhr.responseText + ")"); var err = eval("(" + xhr.responseText + ")");
@ -142,11 +145,35 @@ $(document).ready(function () {
success:function(data){ success:function(data){
//kod pro update/vytvoreni je zde stejny - updatujeme jen zdrojove dictionary //kod pro update/vytvoreni je zde stejny - updatujeme jen zdrojove dictionary
window.$('#indicatorModal').modal('hide'); window.$('#indicatorModal').modal('hide');
//console.log(data) console.log("navrat",data)
//indName = $('#indicatorName').val() //indName = $('#indicatorName').val()
//updatneme/vytvorime klic v globalni promennou obsahujici vsechny arch data //updatneme/vytvorime klic v globalni promennou obsahujici vsechny arch data
//TBD nebude fungovat az budu mit vic chartů otevřených - předělat //TBD nebude fungovat az budu mit vic chartů otevřených - předělat
archData.indicators[0][indName] = data
//v ramci podpory multioutputu je navrat nazevind:timeserie a to
//pro indicators [0] nebo cbar_indicators [1] list
if (Object.keys(data[0]).length > 0) {
for (let key in data[0]) {
if (data[0].hasOwnProperty(key)) {
archData.indicators[0][key] = data[0][key]
console.log("barind updatovan " + key)
//console.log(data[0][key]);
}
}
//archData.indicators[0][indName] = data[0]
} else if (Object.keys(data[1]).length > 0) {
for (let key in data[1]) {
if (data[1].hasOwnProperty(key)) {
archData.indicators[1][key] = data[1][key]
console.log("cbarind updatovan " + key)
//console.log(data[1][key]);
}
}
//archData.indicators[1][indName] = data[1]
}
else {
alert("neco spatne s response ", data)
}
//pridame pripadne upatneme v ext_data //pridame pripadne upatneme v ext_data

View File

@ -2,4 +2,4 @@
* *
* ©2020 SpryMedia Ltd, all rights reserved. * ©2020 SpryMedia Ltd, all rights reserved.
* License: MIT datatables.net/license/mit * License: MIT datatables.net/license/mit
*/table.dataTable{clear:both;margin-top:6px !important;margin-bottom:6px !important;max-width:none !important;border-collapse:separate !important;border-spacing:0}table.dataTable td,table.dataTable th{-webkit-box-sizing:content-box;box-sizing:content-box}table.dataTable td.dataTables_empty,table.dataTable th.dataTables_empty{text-align:center}table.dataTable.nowrap th,table.dataTable.nowrap td{white-space:nowrap}table.dataTable.table-striped>tbody>tr:nth-of-type(2n+1)>*{box-shadow:none}table.dataTable>tbody>tr{background-color:transparent}table.dataTable>tbody>tr.selected>*{box-shadow:inset 0 0 0 9999px rgb(13, 110, 253);box-shadow:inset 0 0 0 9999px rgb(var(--dt-row-selected));color:rgb(255, 255, 255);color:rgb(var(--dt-row-selected-text))}table.dataTable>tbody>tr.selected a{color:rgb(9, 10, 11);color:rgb(var(--dt-row-selected-link))}table.dataTable.table-striped>tbody>tr.odd>*{box-shadow:inset 0 0 0 9999px rgba(0, 0, 0, 0.05)}table.dataTable.table-striped>tbody>tr.odd.selected>*{box-shadow:inset 0 0 0 9999px rgba(13, 110, 253, 0.95);box-shadow:inset 0 0 0 9999px rgba(var(--dt-row-selected), 0.95)}table.dataTable.table-hover>tbody>tr:hover>*{box-shadow:inset 0 0 0 9999px rgba(0, 0, 0, 0.075)}table.dataTable.table-hover>tbody>tr.selected:hover>*{box-shadow:inset 0 0 0 9999px rgba(13, 110, 253, 0.975);box-shadow:inset 0 0 0 9999px rgba(var(--dt-row-selected), 0.975)}div.dataTables_wrapper div.dataTables_length label{font-weight:normal;text-align:left;white-space:nowrap}div.dataTables_wrapper div.dataTables_length select{width:auto;display:inline-block}div.dataTables_wrapper div.dataTables_filter{text-align:right}div.dataTables_wrapper div.dataTables_filter label{font-weight:normal;white-space:nowrap;text-align:left}div.dataTables_wrapper div.dataTables_filter input{margin-left:.5em;display:inline-block;width:auto}div.dataTables_wrapper div.dataTables_info{padding-top:.85em}div.dataTables_wrapper div.dataTables_paginate{margin:0;white-space:nowrap;text-align:right}div.dataTables_wrapper div.dataTables_paginate ul.pagination{margin:2px 0;white-space:nowrap;justify-content:flex-end}div.dataTables_wrapper div.dt-row{position:relative}div.dataTables_scrollHead table.dataTable{margin-bottom:0 !important}div.dataTables_scrollBody>table{border-top:none;margin-top:0 !important;margin-bottom:0 !important}div.dataTables_scrollBody>table>thead .sorting:before,div.dataTables_scrollBody>table>thead .sorting_asc:before,div.dataTables_scrollBody>table>thead .sorting_desc:before,div.dataTables_scrollBody>table>thead .sorting:after,div.dataTables_scrollBody>table>thead .sorting_asc:after,div.dataTables_scrollBody>table>thead .sorting_desc:after{display:none}div.dataTables_scrollBody>table>tbody tr:first-child th,div.dataTables_scrollBody>table>tbody tr:first-child td{border-top:none}div.dataTables_scrollFoot>.dataTables_scrollFootInner{box-sizing:content-box}div.dataTables_scrollFoot>.dataTables_scrollFootInner>table{margin-top:0 !important;border-top:none}@media screen and (max-width: 767px){div.dataTables_wrapper div.dataTables_length,div.dataTables_wrapper div.dataTables_filter,div.dataTables_wrapper div.dataTables_info,div.dataTables_wrapper div.dataTables_paginate{text-align:center}div.dataTables_wrapper div.dataTables_paginate ul.pagination{justify-content:center !important}}table.dataTable.table-sm>thead>tr>th:not(.sorting_disabled){padding-right:20px}table.table-bordered.dataTable{border-right-width:0}table.table-bordered.dataTable thead tr:first-child th,table.table-bordered.dataTable thead tr:first-child td{border-top-width:1px}table.table-bordered.dataTable th,table.table-bordered.dataTable td{border-left-width:0}table.table-bordered.dataTable th:first-child,table.table-bordered.dataTable th:first-child,table.table-bordered.dataTable td:first-child,table.table-bordered.dataTable td:first-child{border-left-width:1px}table.table-bordered.dataTable th:last-child,table.table-bordered.dataTable th:last-child,table.table-bordered.dataTable td:last-child,table.table-bordered.dataTable td:last-child{border-right-width:1px}table.table-bordered.dataTable th,table.table-bordered.dataTable td{border-bottom-width:1px}div.dataTables_scrollHead table.table-bordered{border-bottom-width:0}div.table-responsive>div.dataTables_wrapper>div.row{margin:0}div.table-responsive>div.dataTables_wrapper>div.row>div[class^=col-]:first-child{padding-left:0}div.table-responsive>div.dataTables_wrapper>div.row>div[class^=col-]:last-child{padding-right:0} */table.dataTable{clear:both;margin-top:6px !important;margin-bottom:6px !important;max-width:none !important;border-collapse:separate !important;border-spacing:0}table.dataTable td,table.dataTable th{-webkit-box-sizing:content-box;box-sizing:content-box}table.dataTable td.dataTables_empty,table.dataTable th.dataTables_empty{text-align:center}table.dataTable.nowrap th,table.dataTable.nowrap td{white-space:nowrap}/*table.dataTable.table-striped>tbody>tr:nth-of-type(2n+1)>*{box-shadow:none}*/table.dataTable>tbody>tr{background-color:transparent}table.dataTable>tbody>tr.selected>*{box-shadow:inset 0 0 0 9999px rgb(13, 110, 253);box-shadow:inset 0 0 0 9999px rgb(var(--dt-row-selected));color:rgb(255, 255, 255);color:rgb(var(--dt-row-selected-text))}table.dataTable>tbody>tr.selected a{color:rgb(9, 10, 11);color:rgb(var(--dt-row-selected-link))}table.dataTable.table-striped>tbody>tr.odd>*{box-shadow:inset 0 0 0 9999px rgba(0, 0, 0, 0.05)}table.dataTable.table-striped>tbody>tr.odd.selected>*{box-shadow:inset 0 0 0 9999px rgba(13, 110, 253, 0.95);box-shadow:inset 0 0 0 9999px rgba(var(--dt-row-selected), 0.95)}table.dataTable.table-hover>tbody>tr:hover>*{box-shadow:inset 0 0 0 9999px rgba(0, 0, 0, 0.075)}table.dataTable.table-hover>tbody>tr.selected:hover>*{box-shadow:inset 0 0 0 9999px rgba(13, 110, 253, 0.975);box-shadow:inset 0 0 0 9999px rgba(var(--dt-row-selected), 0.975)}div.dataTables_wrapper div.dataTables_length label{font-weight:normal;text-align:left;white-space:nowrap}div.dataTables_wrapper div.dataTables_length select{width:auto;display:inline-block}div.dataTables_wrapper div.dataTables_filter{text-align:right}div.dataTables_wrapper div.dataTables_filter label{font-weight:normal;white-space:nowrap;text-align:left}div.dataTables_wrapper div.dataTables_filter input{margin-left:.5em;display:inline-block;width:auto}div.dataTables_wrapper div.dataTables_info{padding-top:.85em}div.dataTables_wrapper div.dataTables_paginate{margin:0;white-space:nowrap;text-align:right}div.dataTables_wrapper div.dataTables_paginate ul.pagination{margin:2px 0;white-space:nowrap;justify-content:flex-end}div.dataTables_wrapper div.dt-row{position:relative}div.dataTables_scrollHead table.dataTable{margin-bottom:0 !important}div.dataTables_scrollBody>table{border-top:none;margin-top:0 !important;margin-bottom:0 !important}div.dataTables_scrollBody>table>thead .sorting:before,div.dataTables_scrollBody>table>thead .sorting_asc:before,div.dataTables_scrollBody>table>thead .sorting_desc:before,div.dataTables_scrollBody>table>thead .sorting:after,div.dataTables_scrollBody>table>thead .sorting_asc:after,div.dataTables_scrollBody>table>thead .sorting_desc:after{display:none}div.dataTables_scrollBody>table>tbody tr:first-child th,div.dataTables_scrollBody>table>tbody tr:first-child td{border-top:none}div.dataTables_scrollFoot>.dataTables_scrollFootInner{box-sizing:content-box}div.dataTables_scrollFoot>.dataTables_scrollFootInner>table{margin-top:0 !important;border-top:none}@media screen and (max-width: 767px){div.dataTables_wrapper div.dataTables_length,div.dataTables_wrapper div.dataTables_filter,div.dataTables_wrapper div.dataTables_info,div.dataTables_wrapper div.dataTables_paginate{text-align:center}div.dataTables_wrapper div.dataTables_paginate ul.pagination{justify-content:center !important}}table.dataTable.table-sm>thead>tr>th:not(.sorting_disabled){padding-right:20px}table.table-bordered.dataTable{border-right-width:0}table.table-bordered.dataTable thead tr:first-child th,table.table-bordered.dataTable thead tr:first-child td{border-top-width:1px}table.table-bordered.dataTable th,table.table-bordered.dataTable td{border-left-width:0}table.table-bordered.dataTable th:first-child,table.table-bordered.dataTable th:first-child,table.table-bordered.dataTable td:first-child,table.table-bordered.dataTable td:first-child{border-left-width:1px}table.table-bordered.dataTable th:last-child,table.table-bordered.dataTable th:last-child,table.table-bordered.dataTable td:last-child,table.table-bordered.dataTable td:last-child{border-right-width:1px}table.table-bordered.dataTable th,table.table-bordered.dataTable td{border-bottom-width:1px}div.dataTables_scrollHead table.table-bordered{border-bottom-width:0}div.table-responsive>div.dataTables_wrapper>div.row{margin:0}div.table-responsive>div.dataTables_wrapper>div.row>div[class^=col-]:first-child{padding-left:0}div.table-responsive>div.dataTables_wrapper>div.row>div[class^=col-]:last-child{padding-right:0}

File diff suppressed because one or more lines are too long

227
v2realbot/static/js/ml.js Normal file
View File

@ -0,0 +1,227 @@
//ML Model GUI section
let model_editor_json
let model_editor_python
$(document).ready(function() {
function fetchModels() {
$.ajax({
url: '/model/list-models',
type: 'GET',
beforeSend: function (xhr) {
xhr.setRequestHeader('X-API-Key',
API_KEY); },
success: function(response) {
$('#model-list').empty();
if(response.error) {
$('#model-list').html('Error: ' + response.error);
} else {
const models = response.models;
models.forEach(function(model) {
$('#model-list').append(`
<p>${model}
<span class="inspect-model" data-model="${model}">[🔍]</span>
<span class="download-model" data-model="${model}">[↓]</span>
<span class="delete-model" data-model="${model}">[x]</span>
</p>
`);
});
}
},
error: function(xhr, status, error) {
$('#model-list').html('An error occurred: ' + error + xhr.responseText + status);
}
});
}
function deleteModel(modelName) {
$.ajax({
url: '/model/delete-model/' + modelName,
type: 'DELETE',
beforeSend: function (xhr) {
xhr.setRequestHeader('X-API-Key',
API_KEY); },
success: function(response) {
fetchModels(); // Refresh the list after deletion
},
error: function(xhr, status, error) {
alert('Error deleting model: ' + error + xhr.responseText + status);
}
});
}
function uploadModel(formData) {
$.ajax({
url: '/model/upload-model',
type: 'POST',
beforeSend: function (xhr) {
xhr.setRequestHeader('X-API-Key',
API_KEY); },
data: formData,
processData: false,
contentType: false,
success: function(response) {
fetchModels(); // Refresh the list after uploading
alert('Model uploaded successfully');
},
error: function(xhr, status, error) {
alert('Error uploading model: ' + error + xhr.responseText + status);
}
});
}
// function downloadModel(modelName) {
// $.ajax({
// url: '/model/download-model/' + modelName,
// type: 'GET',
// processData: false,
// contentType: false,
// responseType: 'blob', // This is important
// beforeSend: function (xhr) {
// xhr.setRequestHeader('X-API-Key', API_KEY);
// },
// success: function(data, status, xhr) {
// // Get a URL for the blob to download
// var blob = new Blob([data], { type: 'application/octet-stream' });
// //var blob = new Blob([data], { type: xhr.getResponseHeader('Content-Type') });
// var downloadUrl = URL.createObjectURL(blob);
// var a = document.createElement('a');
// a.href = downloadUrl;
// a.download = modelName;
// document.body.appendChild(a);
// a.click();
// // Clean up
// window.URL.revokeObjectURL(downloadUrl);
// a.remove();
// },
// error: function(xhr, status, error) {
// alert('Error downloading model: ' + error + xhr.responseText + status);
// }
// });
// }
function downloadModel(modelName) {
fetch('/model/download-model/' + modelName, {
method: 'GET', // GET is the default method, but it's good to be explicit
headers: {
'X-API-Key': API_KEY
}
})
.then(response => {
if (response.ok) return response.blob();
throw new Error('Network response was not ok.');
})
.then(blob => {
// Check the size of the blob here; it should match the Content-Length from the server
console.log('Size of downloaded blob:', blob.size);
// Create a link element, use it for download, and remove it
let url = window.URL.createObjectURL(blob);
let a = document.createElement('a');
a.style.display = 'none';
a.href = url;
a.download = modelName;
document.body.appendChild(a);
a.click();
window.setTimeout(() => {
document.body.removeChild(a);
window.URL.revokeObjectURL(url);
}, 100); // Cleanup after a small delay
})
.catch(error => {
console.error('Download error:', error);
});
}
// Function to fetch metadata
function fetchMetadata(modelName) {
$.ajax({
url: '/model/metadata/' + modelName,
type: 'GET',
beforeSend: function (xhr) {
xhr.setRequestHeader('X-API-Key', API_KEY);
},
success: function(response) {
$('#metadata-container-info').html("");
show_metadata(response, modelName)
},
error: function(xhr, status, error) {
$('#metadata-container-info').html('Error fetching metadata: ' + error + xhr.responseText + status);
show_metadata(xhr, modelName, true)
}
});
}
function show_metadata(response, name, error = false) {
// var formattedMetadata = '<pre>cfg:' + JSON.stringify(response.cfg, null, 4) + '</pre>';
// formattedMetadata += '<pre>arch_function:' + response.arch_function + '</pre>';
// $('#metadata-container').html(formattedMetadata);
//console.log(response)
console.log(JSON.stringify(response,null,4))
$('#metadata_label').html(name);
if (!error) {
console.log("init editoru", error)
require(["vs/editor/editor.main"], () => {
model_editor_json = monaco.editor.create(document.getElementById('toml-editor-container'), {
value: response.cfg_toml ? response.cfg_toml + ((response.history) ? "\nHISTORY:\n" + JSON.stringify(response.history,null,4) : "") : JSON.stringify(response,null,4),
language: 'toml',
theme: 'tomlTheme-dark',
automaticLayout: true,
readOnly: true
});
model_editor_python = monaco.editor.create(document.getElementById('python-editor-container'), {
value: response.arch_function ? response.arch_function : '',
language: 'python',
theme: 'tomlTheme-dark',
automaticLayout: true,
readOnly: true
});
});
}
}
// Fetch models on page load
fetchModels();
// Refresh models on button click
$('#ml-refresh-button').click(function() {
fetchModels();
});
$('#model-list').on('click', '.delete-model', function() {
const modelName = $(this).data('model');
if (confirm('Are you sure you want to delete ' + modelName + '?')) {
deleteModel(modelName);
}
});
$('#upload-form').submit(function(e) {
e.preventDefault();
var formData = new FormData(this);
if (!$('#model-file')[0].files[0]) {
console.log("prazdne")
alert("No file selected.")
return
}
formData.append('file', $('#model-file')[0].files[0]); // Make sure 'file' matches the FastAPI parameter
uploadModel(formData);
});
// Event handler for the inspect icon
$('#model-list').on('click', '.inspect-model', function() {
if (model_editor_json) {model_editor_json.dispose()}
if (model_editor_python) {model_editor_python.dispose()}
const modelName = $(this).data('model');
fetchMetadata(modelName);
window.$('#modelModal').modal('show');
});
//Handler to download the model
$('#model-list').on('click', '.download-model', function() {
const modelName = $(this).data('model');
downloadModel(modelName);
});
});

View File

@ -88,6 +88,8 @@ $(document).ready(function () {
require(["vs/editor/editor.main"], () => { require(["vs/editor/editor.main"], () => {
monaco.languages.register({ id: 'python' });
monaco.languages.register({ id: 'json' });
// Register the TOML language // Register the TOML language
monaco.languages.register({ id: 'toml' }); monaco.languages.register({ id: 'toml' });
@ -302,6 +304,7 @@ $(document).ready(function () {
runnerRecords.ajax.reload(); runnerRecords.ajax.reload();
stratinRecords.ajax.reload(); stratinRecords.ajax.reload();
archiveRecords.ajax.reload(); archiveRecords.ajax.reload();
disable_arch_buttons();
}) })
//button copy //button copy
@ -618,7 +621,6 @@ $(document).ready(function () {
}) })
}); });
//button run //button run
$('#button_run').click(function () { $('#button_run').click(function () {
row = stratinRecords.row('.selected').data(); row = stratinRecords.row('.selected').data();
@ -633,7 +635,7 @@ $(document).ready(function () {
$('#bt_to').val(localStorage.getItem("bt_to")); $('#bt_to').val(localStorage.getItem("bt_to"));
//console.log(localStorage.getItem("bt_to")) //console.log(localStorage.getItem("bt_to"))
$('#test_batch_id').val(localStorage.getItem("test_batch_id")); $('#test_batch_id').val(localStorage.getItem("test_batch_id"));
$('#mode').val(localStorage.getItem("mode")); $('#runmode').val(localStorage.getItem("runmode"));
$('#account').val(localStorage.getItem("account")); $('#account').val(localStorage.getItem("account"));
$('#debug').val(localStorage.getItem("debug")); $('#debug').val(localStorage.getItem("debug"));
$('#ilog_save').val(localStorage.getItem("ilog_save")); $('#ilog_save').val(localStorage.getItem("ilog_save"));
@ -927,18 +929,41 @@ var runnerRecords =
], ],
paging: false, paging: false,
processing: false, processing: false,
columnDefs: [ { columnDefs: [
targets: [0,1], {
targets: [0],
render: function ( data, type, row ) { render: function ( data, type, row ) {
return '<div class="tdnowrap" title="'+data+'">'+data+'</i>' return '<div class="tdnowrap" title="'+data+'">'+data+'</i>'
},
}, },
{
targets: 1,
render: function ( data, type, row ) {
if (type === 'display') {
//console.log("arch")
var color = getColorForId(data);
return '<div class="tdnowrap" data-bs-toggle="tooltip" data-bs-placement="top" title="'+data+'"><span class="color-tag" style="background-color:' + color + ';"></span>'+data+'</div>';
}
return data;
},
}, },
{ {
targets: [2], targets: [2],
render: function ( data, type, row ) { render: function ( data, type, row ) {
return format_date(data) return format_date(data)
}, },
},
{
targets: [4], //symbol
render: function ( data, type, row ) {
if (type === 'display') {
//console.log("arch")
var color = getColorForId(row.strat_id);
return '<span style="color:' + color + ';">'+data+'</span>';
}
return data;
}, },
},
], ],
// select: { // select: {
// style: 'multi' // style: 'multi'
@ -950,7 +975,9 @@ $("#runModal").on('submit','#runForm', function(event){
localStorage.setItem("bt_from", $('#bt_from').val()); localStorage.setItem("bt_from", $('#bt_from').val());
localStorage.setItem("bt_to", $('#bt_to').val()); localStorage.setItem("bt_to", $('#bt_to').val());
localStorage.setItem("test_batch_id", $('#test_batch_id').val()); localStorage.setItem("test_batch_id", $('#test_batch_id').val());
localStorage.setItem("mode", $('#mode').val()); localStorage.setItem("runmode", $('#runmode').val());
console.log("mode set to", $('#runmode').val())
console.log("mode loaded value", localStorage.getItem("runmode"))
localStorage.setItem("account", $('#account').val()); localStorage.setItem("account", $('#account').val());
localStorage.setItem("debug", $('#debug').val()); localStorage.setItem("debug", $('#debug').val());
localStorage.setItem("ilog_save", $('#ilog_save').val()); localStorage.setItem("ilog_save", $('#ilog_save').val());

View File

@ -0,0 +1,581 @@
//funkce a promenne specificke pro archiveTable
//usually work with archiveRecords
//ARCHIVE TABLES
let editor_diff_arch1
let editor_diff_arch2
var archData = null
var batchHeaders = []
function refresh_arch_and_callback(row, callback) {
//console.log("entering refresh")
var request = $.ajax({
url: "/archived_runners/"+row.id,
beforeSend: function (xhr) {
xhr.setRequestHeader('X-API-Key',
API_KEY); },
method:"GET",
contentType: "application/json",
dataType: "json",
success:function(data){
//console.log("fetched data ok")
//console.log(JSON.stringify(data,null,2));
},
error: function(xhr, status, error) {
var err = eval("(" + xhr.responseText + ")");
window.alert(JSON.stringify(xhr));
console.log(JSON.stringify(xhr));
}
});
// Handling the responses of both requests
$.when(request).then(function(response) {
// Both requests have completed successfully
//console.log("Result from request:", response);
//console.log("Response received. calling callback")
//call callback function
callback(response)
}, function(error) {
// Handle errors from either request here
// Example:
console.error("Error from first request:", error);
console.log("requesting id error")
});
}
//triggers charting
function get_detail_and_chart(row) {
$.ajax({
url:"/archived_runners_detail/"+row.id,
beforeSend: function (xhr) {
xhr.setRequestHeader('X-API-Key',
API_KEY); },
method:"GET",
contentType: "application/json",
dataType: "json",
success:function(data){
$('#button_show_arch').attr('disabled',false);
$('#chartContainerInner').addClass("show");
//$("#lines").html("<pre>"+JSON.stringify(row.stratvars,null,2)+"</pre>")
//$('#chartArchive').append(JSON.stringify(data,null,2));
//console.log(JSON.stringify(data,null,2));
//if lower res is required call prepare_data otherwise call chart_archived_run()
//get other base resolutions
// console.log("received detail", data)
// data = JSON.parse(data)
// console.log("parsed detail", data)
prepare_data(row, 1, "Min", data)
},
error: function(xhr, status, error) {
var err = eval("(" + xhr.responseText + ")");
window.alert(JSON.stringify(xhr));
//console.log(JSON.stringify(xhr));
$('#button_show_arch').attr('disabled',false);
}
})
}
//rerun stratin (use to rerun strategy and also to rerun live/paper as bt on same period)
function run_day_again(turnintobt=false) {
row = archiveRecords.row('.selected').data();
var button_name = turnintobt ? '#button_runbt_arch' : '#button_runagain_arch'
$(button_name).attr('disabled',true)
var record1 = new Object()
//console.log(JSON.stringify(rows))
//record1 = JSON.parse(rows[0].strat_json)
//record1.json = rows[0].json
//TBD mozna zkopirovat jen urcite?
//getting required data (detail of the archived runner + stratin to be run)
var request1 = $.ajax({
url: "/archived_runners/"+row.id,
beforeSend: function (xhr) {
xhr.setRequestHeader('X-API-Key',
API_KEY); },
method:"GET",
contentType: "application/json",
dataType: "json",
success:function(data){
//console.log("fetched data ok")
//console.log(JSON.stringify(data,null,2));
},
error: function(xhr, status, error) {
var err = eval("(" + xhr.responseText + ")");
window.alert(JSON.stringify(xhr));
console.log(JSON.stringify(xhr));
}
});
//nalaodovat data pro strategii
var request2 = $.ajax({
url: "/stratins/"+row.strat_id,
beforeSend: function (xhr) {
xhr.setRequestHeader('X-API-Key',
API_KEY); },
method:"GET",
contentType: "application/json",
dataType: "json",
success:function(data){
//console.log("fetched data ok")
//console.log(JSON.stringify(data,null,2));
},
error: function(xhr, status, error) {
var err = eval("(" + xhr.responseText + ")");
window.alert(JSON.stringify(xhr));
console.log(JSON.stringify(xhr));
}
});
// Handling the responses of both requests
$.when(request1, request2).then(function(response1, response2) {
// Both requests have completed successfully
var result1 = response1[0];
var result2 = response2[0];
//console.log("Result from first request:", result1);
//console.log("Result from second request:", result2);
//console.log("calling compare")
rerun_strategy(result1, result2, turnintobt)
// Perform your action with the results from both requests
// Example:
}, function(error1, error2) {
// Handle errors from either request here
// Example:
console.error("Error from first request:", error1);
console.error("Error from second request:", error2);
});
function rerun_strategy(archRunner, stratData, turnintobt) {
record1 = archRunner
//console.log(record1)
var note_prefix = "RERUN "
if ((turnintobt) && ((record1.mode == 'live') || (record1.mode == 'paper'))) {
record1.mode = 'backtest'
record1.bt_from = record1.started
record1.bt_to = record1.stopped
note_prefix = "BT SAME PERIOD "
}
record1.note = note_prefix + record1.note
//nebudeme muset odstanovat pri kazdem pridani noveho atributu v budoucnu
//smazeneme nepotrebne a pridame potrebne
//do budoucna predelat na vytvoreni noveho objektu
delete record1["end_positions"];
delete record1["end_positions_avgp"];
delete record1["profit"];
delete record1["trade_count"];
delete record1["stratvars_toml"];
delete record1["started"];
delete record1["stopped"];
delete record1["metrics"];
delete record1["settings"];
delete record1["stratvars"];
if (record1.bt_from == "") {delete record1["bt_from"];}
if (record1.bt_to == "") {delete record1["bt_to"];}
//mazeme, pouze rerunujeme single
delete record1["test_batch_id"];
delete record1["batch_id"];
const rec = new Object()
rec.id2 = parseInt(stratData.id2);
rec.name = stratData.name;
rec.symbol = stratData.symbol;
rec.class_name = stratData.class_name;
rec.script = stratData.script;
rec.open_rush = stratData.open_rush;
rec.close_rush = stratData.close_rush;
rec.stratvars_conf = stratData.stratvars_conf;
rec.add_data_conf = stratData.add_data_conf;
rec.note = stratData.note;
rec.history = "";
strat_json = JSON.stringify(rec, null, 2);
record1.strat_json = strat_json
//zkopirujeme strat_id do id a smazeme strat_id
record1.id = record1.strat_id
delete record1["strat_id"];
//console.log("record1 pred odeslanim", record1)
jsonString = JSON.stringify(record1);
$.ajax({
url:"/stratins/"+record1.id+"/run",
beforeSend: function (xhr) {
xhr.setRequestHeader('X-API-Key',
API_KEY); },
method:"PUT",
contentType: "application/json",
data: jsonString,
success:function(data){
$(button_name).attr('disabled',false);
setTimeout(function () {
runnerRecords.ajax.reload();
stratinRecords.ajax.reload();
}, 1500);
},
error: function(xhr, status, error) {
var err = eval("(" + xhr.responseText + ")");
window.alert(JSON.stringify(xhr));
//console.log(JSON.stringify(xhr));
$(button_name).attr('disabled',false);
}
})
}
}
function expand_collapse_rows(event) {
event.stopPropagation()
var headerRow = $(this);
var name = headerRow.data('name');
var collapsed = headerRow.hasClass('collapsed');
// Toggle the expand icon name
var expandIcon = headerRow.find('.expand-icon');
if (collapsed) {
expandIcon.text('expand_less');
} else {
expandIcon.text('expand_more');
}
headerRow.toggleClass('collapsed');
archiveRecords.rows().every(function () {
var row = $(this.node());
var rowGroup = row.attr('data-group-name');
if (rowGroup == name) {
row.toggle();
}
});
// Save the state
if (collapsed) {
localStorage.setItem('dt-group-state-' + name, 'expanded');
} else {
localStorage.setItem('dt-group-state-' + name, 'collapsed');
}
}
function delete_batch(event){
event.preventDefault();
batch_id = $('#batch_id_del').val();
$('#deletebatch').attr('disabled', 'disabled');
$.ajax({
url:"/archived_runners/batch/"+batch_id,
beforeSend: function (xhr) {
xhr.setRequestHeader('X-API-Key',
API_KEY); },
method:"DELETE",
contentType: "application/json",
dataType: "json",
data: JSON.stringify(batch_id),
success:function(data){
$('#delFormBatch')[0].reset();
window.$('#delModalBatch').modal('hide');
$('#deletebatch').attr('disabled', false);
$('#button_delete_batch').attr('disabled', false);
//console.log(data)
archiveRecords.ajax.reload();
disable_arch_buttons();
},
error: function(xhr, status, error) {
var err = eval("(" + xhr.responseText + ")");
window.alert(JSON.stringify(xhr));
console.log(JSON.stringify(xhr));
$('#deletebatch').attr('disabled', false);
$('#button_delete_batch').attr('disabled', false);
archiveRecords.ajax.reload();
disable_arch_buttons();
}
})
}
function analyze_optimal_cutoff(batch_id = null) {
//definice parametru
param_obj = { rem_outliers:false, steps:50}
obj = {function: "analyze_optimal_cutoff", runner_ids:[], batch_id:null, params:param_obj}
//bereme bud selected runners
if (!batch_id) {
rows = archiveRecords.rows('.selected').data();
if (rows == undefined) {
return
}
$('#button_analyze').attr('disabled','disabled');
// Extract IDs from each row's data and store them in an array
obj.runner_ids = [];
for (var i = 0; i < rows.length; i++) {
obj.runner_ids.push(rows[i].id); // Assuming 'id' is the property that contains the row ID
}
}
//nebo batch
else {
obj.batch_id = batch_id
}
console.log("analyze cutoff objekt", obj)
// batch_id: Optional[str] = None
// runner_ids: Optional[List[UUID]] = None
// #additional parameter
// params: Optional[dict] = None
$.ajax({
url:"/batches/optimizecutoff/",
beforeSend: function (xhr) {
xhr.setRequestHeader('X-API-Key',
API_KEY); },
method:"POST",
xhrFields: {
responseType: 'blob'
},
xhr: function() {
var xhr = new XMLHttpRequest();
xhr.onreadystatechange = function() {
if (xhr.readyState === 2) { // Headers have been received
if (xhr.status === 200) {
xhr.responseType = "blob"; // Set responseType to 'blob' for successful image responses
} else {
xhr.responseType = "text"; // Set responseType to 'text' for error messages
}
}
};
return xhr;
},
contentType: "application/json",
processData: false,
data: JSON.stringify(obj),
success:function(blob){
var url = window.URL || window.webkitURL;
console.log("vraceny obraz", blob)
console.log("url",url.createObjectURL(blob))
display_image(url.createObjectURL(blob))
if (!batch_id) {
$('#button_analyze').attr('disabled',false);
}
},
error: function(xhr, status, error) {
console.log("proc to skace do erroru?")
window.alert(JSON.stringify(xhr));
console.log(JSON.stringify(xhr));
$('#button_analyze').attr('disabled',false);
if (!batch_id) {
$('#button_analyze').attr('disabled',false);
}
}
})
}
//pomocna funkce, ktera vraci filtrovane radky tabulky (bud oznacene nebo batchove)
function get_selected_or_batch(batch_id = null) {
if (!batch_id) {
rows = archiveRecords.rows('.selected');
} else {
rows = archiveRecords.rows( function ( idx, data, node ) {
return data.batch_id == batch_id;
});
//console.log("batch rows",batch_id, rows)
}
return rows
}
//prepares export data, either for selected rows or based on batch_id
function prepare_export(batch_id = null) {
rows = get_selected_or_batch(batch_id)
var trdList = []
if(rows.data().length > 0 ) {
//console.log(rows.data())
// Loop through the selected rows and display an alert with each row's ID
rows.every(function (rowIdx, tableLoop, rowLoop ) {
var data = this.data()
data.metrics.prescr_trades.forEach((trade) => {
new_obj = {}
new_obj["entry_time"] = (trade.entry_time) ? new Date(trade.entry_time * 1000) : null
new_obj["entry_time"] = (new_obj["entry_time"]) ? new_obj["entry_time"].toLocaleString('cs-CZ', {
timeZone: 'America/New_York',
}) : null
new_obj["exit_time"] = (trade.exit_time) ? new Date(trade.exit_time * 1000):null
new_obj["exit_time"] = (new_obj["exit_time"]) ? new_obj["exit_time"].toLocaleString('cs-CZ', {
timeZone: 'America/New_York',
}) : null
new_obj["direction"] = trade.direction
new_obj["profit"] = trade.profit
new_obj["rel_profit"] = trade.rel_profit
trdList.push(new_obj)
})
});
}
return trdList
}
function download_exported_data(type, batch_id = null) {
filename = batch_id ? "batch"+batch_id+"-trades" : "trades"
if (type == "xml") {
response_type = "application/xml"
output = convertToXml(prepare_export(batch_id))
}
else {
response_type = "text/csv"
output = convertToCsv(prepare_export(batch_id))
}
console.log(output)
downloadFile(response_type,type, filename, output)
}
function display_image(imageUrl) {
// Attempt to load the image
var img = new Image();
img.src = imageUrl;
img.onload = function() {
// If the image loads successfully, display it
$('#previewImg').attr('src', imageUrl);
//$('#imagePreview').show();
window.$('#imageModal').modal('show');
};
img.onerror = function(e) {
console.log("Image load error", e);
console.log("Image object:", img);
console.log("no image available")
// If the image fails to load, do nothing
};
}
function display_batch_report(batch_id) {
//var imageUrl = '/media/report_'+data.id+".png"; // Replace with your logic to get image URL
var imageUrl = '/media/basic/'+batch_id+'.png'; // Replace with your logic to get image URL
//console.log(imageUrl)
display_image(imageUrl)
}
function refresh_logfile() {
logfile = $("#logFileSelect").val()
lines = 1200
$.ajax({
url:"/log?lines="+lines+"&logfile="+logfile,
beforeSend: function (xhr) {
xhr.setRequestHeader('X-API-Key',
API_KEY); },
method:"GET",
contentType: "application/json",
dataType: "json",
success:function(response){
if (response.lines.length == 0) {
$('#log-content').html("no records");
}
else {
var escapedLines = response.lines.map(line => escapeHtml(line));
$('#log-content').html(escapedLines.join('\n'));
}
},
error: function(xhr, status, error) {
var err = eval("(" + xhr.responseText + ")");
window.alert(JSON.stringify(xhr));
}
})
}
function escapeHtml(text) {
return text
.replace(/&/g, "&amp;")
.replace(/</g, "&lt;")
.replace(/>/g, "&gt;")
.replace(/"/g, "&quot;")
.replace(/'/g, "&#039;");
}
function delete_arch_rows(ids) {
$.ajax({
url:"/archived_runners/",
beforeSend: function (xhr) {
xhr.setRequestHeader('X-API-Key',
API_KEY); },
method:"DELETE",
contentType: "application/json",
dataType: "json",
data: JSON.stringify(ids),
success:function(data){
$('#delFormArchive')[0].reset();
window.$('#delModalArchive').modal('hide');
$('#deletearchive').attr('disabled', false);
//console.log(data)
archiveRecords.ajax.reload();
disable_arch_buttons()
},
error: function(xhr, status, error) {
var err = eval("(" + xhr.responseText + ")");
window.alert(JSON.stringify(xhr));
console.log(JSON.stringify(xhr));
$('#deletearchive').attr('disabled', false);
//archiveRecords.ajax.reload();
}
})
}
function extractNumbersFromString(str) {
// Regular expression to match the pattern #number1/number2
const pattern = /#(\d+)\/(\d+)/;
const match = str.match(pattern);
if (match) {
// Extract number1 and number2 from the match
const number1 = parseInt(match[1], 10);
const number2 = parseInt(match[2], 10);
//return { number1, number2 };
return number2;
} else {
return null;
}
}
// Function to generate a unique key for localStorage based on batch_id
function generateStorageKey(batchId) {
return 'dt-group-state-' + batchId;
}
function disable_arch_buttons() {
//disable buttons (enable on row selection)
$('#button_runagain_arch').attr('disabled','disabled');
$('#button_runbt_arch').attr('disabled','disabled');
$('#button_show_arch').attr('disabled','disabled');
$('#button_delete_arch').attr('disabled','disabled');
$('#button_delete_batch').attr('disabled','disabled');
$('#button_analyze').attr('disabled','disabled');
$('#button_edit_arch').attr('disabled','disabled');
$('#button_compare_arch').attr('disabled','disabled');
$('#button_report').attr('disabled','disabled');
$('#button_export_xml').attr('disabled','disabled');
$('#button_export_csv').attr('disabled','disabled');
}
function enable_arch_buttons() {
$('#button_analyze').attr('disabled',false);
$('#button_show_arch').attr('disabled',false);
$('#button_runagain_arch').attr('disabled',false);
$('#button_delete_arch').attr('disabled',false);
$('#button_delete_batch').attr('disabled',false);
$('#button_edit_arch').attr('disabled',false);
$('#button_compare_arch').attr('disabled',false);
$('#button_report').attr('disabled',false);
$('#button_export_xml').attr('disabled',false);
$('#button_export_csv').attr('disabled',false);
//Backtest same period button is displayed only when row with mode paper/live is selected
row = archiveRecords.row('.selected').data();
if ((row.mode == 'paper') || (row.mode == 'live')) {
$('#button_runbt_arch').attr('disabled',false);
}
}

View File

@ -0,0 +1,487 @@
//event handlers for archiveTables
$(document).ready(function () {
initialize_archiveRecords();
archiveRecords.ajax.reload();
disable_arch_buttons();
// Use 'td:nth-child(2)' to target the second column
$('#archiveTable tbody').on('click', 'td:nth-child(2)', function () {
var data = archiveRecords.row(this).data();
//var imageUrl = '/media/report_'+data.id+".png"; // Replace with your logic to get image URL
var imageUrl = '/media/basic/'+data.id+'.png'; // Replace with your logic to get image URL
//console.log(imageUrl)
display_image(imageUrl)
});
// Use 'td:nth-child(2)' to target the second column
$('#archiveTable tbody').on('click', 'td:nth-child(18)', function () {
var data = archiveRecords.row(this).data();
if (data.batch_id) {
display_batch_report(data.batch_id)
}
});
//selectable rows in archive table
$('#archiveTable tbody').on('click', 'tr[data-group-name]', function () {
if ($(this).hasClass('selected')) {
//$(this).removeClass('selected');
//aadd here condition that disable is called only when there is no other selected class on tr[data-group-name]
// Check if there are no other selected rows before disabling buttons
if ($('#archiveTable tr[data-group-name].selected').length === 1) {
disable_arch_buttons();
}
//disable_arch_buttons()
} else {
//archiveRecords.$('tr.selected').removeClass('selected');
$(this).addClass('selected');
enable_arch_buttons()
}
});
//TOOL BUTTONs on BATCH HEADER
// Event listener for click to display batch report
$('#archiveTable tbody').on('click', 'tr.group-header #batchtool_report_button', function (event) {
event.stopPropagation();
// Get the parent <tr> element
var parentTr = $(this).closest('tr');
// Retrieve the 'data-name' attribute from the parent <tr>
var batch_id = parentTr.data('name');
display_batch_report(batch_id)
});
// Event listener for click to delete batch
$('#archiveTable tbody').on('click', 'tr.group-header #batchtool_delete_button', function (event) {
event.stopPropagation();
// Get the parent <tr> element
var parentTr = $(this).closest('tr');
// Retrieve the 'data-name' attribute from the parent <tr>
var batch_id = parentTr.data('name');
$('#batch_id_del').val(batch_id);
$('#listofids').html("");
window.$('#delModalBatch').modal('show');
});
// Event listener for click to xml export batch
$('#archiveTable tbody').on('click', 'tr.group-header #batchtool_exportxml_button', function (event) {
event.stopPropagation();
// Get the parent <tr> element
var parentTr = $(this).closest('tr');
// Retrieve the 'data-name' attribute from the parent <tr>
var batch_id = parentTr.data('name');
download_exported_data("xml", batch_id);
});
// Event listener for click to csv export batch
$('#archiveTable tbody').on('click', 'tr.group-header #batchtool_exportcsv_button', function (event) {
event.stopPropagation();
// Get the parent <tr> element
var parentTr = $(this).closest('tr');
// Retrieve the 'data-name' attribute from the parent <tr>
var batch_id = parentTr.data('name');
console.log(batch_id)
download_exported_data("csv", batch_id);
});
// Event listener for optimal batch cutoff
$('#archiveTable tbody').on('click', 'tr.group-header #batchtool_cutoff_button', function (event) {
event.stopPropagation();
// Get the parent <tr> element
var parentTr = $(this).closest('tr');
// Retrieve the 'data-name' attribute from the parent <tr>
var batch_id = parentTr.data('name');
console.log(batch_id)
analyze_optimal_cutoff(batch_id)
});
//TOOL BUTTONs above the TABLE - for selected days
//button export
$('#button_export_xml').click(function(event) {
download_exported_data("xml");
});
//button export
$('#button_export_csv').click(function(event) {
download_exported_data("csv");
});
//button select page
$('#button_selpage').click(function () {
if ($('#button_selpage').hasClass('active')) {
$('#button_selpage').removeClass('active');
archiveRecords.rows().deselect();
disable_arch_buttons();
}
else {
$('#button_selpage').addClass('active');
archiveRecords.rows( { page: 'current' } ).select();
enable_arch_buttons();
}
});
//button clear log
$('#button_clearlog').click(function () {
$('#lines').empty();
});
//button compare arch
$('#button_compare_arch').click(function () {
if (editor_diff_arch1) {editor_diff_arch1.dispose()}
if (editor_diff_stratin1) {editor_diff_stratin1.dispose()}
if (editor_diff_arch2) {editor_diff_arch2.dispose()}
if (editor_diff_stratin2) {editor_diff_stratin2.dispose()}
window.$('#diffModal').modal('show');
rows = archiveRecords.rows('.selected').data();
id1 = rows[0].id
id2 = rows[1].id
var request1 = $.ajax({
url: "/archived_runners/"+id1,
beforeSend: function (xhr) {
xhr.setRequestHeader('X-API-Key',
API_KEY); },
method:"GET",
contentType: "application/json",
dataType: "json",
success:function(data){
//console.log("first request ok")
//console.log(JSON.stringify(data,null,2));
},
error: function(xhr, status, error) {
var err = eval("(" + xhr.responseText + ")");
window.alert(JSON.stringify(xhr));
console.log(JSON.stringify(xhr));
console.log("first request error")
}
});
var request2 = $.ajax({
url: "/archived_runners/"+id2,
beforeSend: function (xhr) {
xhr.setRequestHeader('X-API-Key',
API_KEY); },
method:"GET",
contentType: "application/json",
dataType: "json",
success:function(data){
//console.log("first request ok")
//console.log(JSON.stringify(data,null,2));
},
error: function(xhr, status, error) {
var err = eval("(" + xhr.responseText + ")");
window.alert(JSON.stringify(xhr));
console.log(JSON.stringify(xhr));
console.log("first request error")
}
});
// Handling the responses of both requests
$.when(request1, request2).then(function(response1, response2) {
// Both requests have completed successfully
var result1 = response1[0];
var result2 = response2[0];
//console.log("Result from first request:", result1);
//console.log("Result from second request:", result2);
//console.log("calling compare")
perform_compare(result1, result2)
// Perform your action with the results from both requests
// Example:
}, function(error1, error2) {
// Handle errors from either request here
// Example:
console.error("Error from first request:", error1);
console.error("Error from second request:", error2);
});
//sem vstupuji dva nove natahnute objekty
function perform_compare(data1, data2) {
var record1 = new Object()
//console.log(JSON.stringify(rows))
record1 = JSON.parse(data1.strat_json)
//record1.json = rows[0].json
//record1.id = rows[0].id;
// record1.id2 = parseInt(rows[0].id2);
//record1.name = rows[0].name;
// record1.symbol = rows[0].symbol;
// record1.class_name = rows[0].class_name;
// record1.script = rows[0].script;
// record1.open_rush = rows[0].open_rush;
// record1.close_rush = rows[0].close_rush;
//console.log(record1.stratvars_conf)
//ELEMENTS TO COMPARE
//profit sekce
//console.log(data1.metrics)
try {
record1["profit"] = JSON.parse(data1.metrics.profit)
}
catch (e) {
console.log(e.message)
}
//record1.stratvars_conf = TOML.parse(record1.stratvars_conf);
//record1.add_data_conf = TOML.parse(record1.add_data_conf);
// record1.note = rows[0].note;
// record1.history = "";
//jsonString1 = JSON.stringify(record1, null, 2);
var record2 = new Object()
record2 = JSON.parse(data2.strat_json)
// record2.id = rows[1].id;
// record2.id2 = parseInt(rows[1].id2);
//record2.name = rows[1].name;
// record2.symbol = rows[1].symbol;
// record2.class_name = rows[1].class_name;
// record2.script = rows[1].script;
// record2.open_rush = rows[1].open_rush;
// record2.close_rush = rows[1].close_rush;
//ELEMENTS TO COMPARE
//console.log(data2.metrics)
try {
record2["profit"] = JSON.parse(data2.metrics.profit)
}
catch (e) {
console.log(e.message)
}
//record2.stratvars_conf = TOML.parse(record2.stratvars_conf);
//record2.add_data_conf = TOML.parse(record2.add_data_conf);
// record2.note = rows[1].note;
// record2.history = "";
//jsonString2 = JSON.stringify(record2, null, 2);
$('#diff_first').text(record1.name);
$('#diff_second').text(record2.name);
$('#diff_first_id').text(data1.id);
$('#diff_second_id').text(data2.id);
//monaco
require(["vs/editor/editor.main"], () => {
editor_diff_arch1 = monaco.editor.createDiffEditor(document.getElementById('diff_content1'),
{
language: 'toml',
theme: 'tomlTheme-dark',
originalEditable: false,
automaticLayout: true
}
);
console.log(record1.stratvars_conf)
console.log(record2.stratvars_conf)
editor_diff_arch1.setModel({
original: monaco.editor.createModel(record1.stratvars_conf, 'toml'),
modified: monaco.editor.createModel(record2.stratvars_conf, 'toml'),
});
editor_diff_arch2 = monaco.editor.createDiffEditor(document.getElementById('diff_content2'),
{
language: 'toml',
theme: 'tomlTheme-dark',
originalEditable: false,
automaticLayout: true
}
);
editor_diff_arch2.setModel({
original: monaco.editor.createModel(record1.add_data_conf, 'toml'),
modified: monaco.editor.createModel(record2.add_data_conf, 'toml'),
});
});
// var delta = compareObjects(record1, record2)
// const htmlMarkup2 = `<pre>{\n${generateHTML(record2, delta)}}\n</pre>`;
// document.getElementById('second').innerHTML = htmlMarkup2;
// const htmlMarkup1 = `<pre>{\n${generateHTML(record1, delta)}}\n</pre>`;
// document.getElementById('first').innerHTML = htmlMarkup1;
event.preventDefault();
//$('#button_compare').attr('disabled','disabled');
}
});
//generate batch optimization cutoff (predelat na button pro obecne analyzy batche)
$('#button_analyze').click(function () {
analyze_optimal_cutoff();
});
//generate report button
$('#button_report').click(function () {
rows = archiveRecords.rows('.selected');
if (rows == undefined) {
return
}
$('#button_report').attr('disabled','disabled');
runnerIds = []
if(rows.data().length > 0 ) {
// Loop through the selected rows and display an alert with each row's ID
rows.every(function (rowIdx, tableLoop, rowLoop ) {
var data = this.data()
runnerIds.push(data.id);
});
}
$.ajax({
url:"/archived_runners/generatereportimage",
beforeSend: function (xhr) {
xhr.setRequestHeader('X-API-Key',
API_KEY); },
method:"POST",
xhrFields: {
responseType: 'blob'
},
contentType: "application/json",
processData: false,
data: JSON.stringify(runnerIds),
success:function(blob){
var url = window.URL || window.webkitURL;
console.log("vraceny obraz", blob)
console.log("url",url.createObjectURL(blob))
display_image(url.createObjectURL(blob))
$('#button_report').attr('disabled',false);
},
error: function(xhr, status, error) {
console.log("proc to skace do erroru?")
//window.alert(JSON.stringify(xhr));
console.log(JSON.stringify(xhr));
$('#button_report').attr('disabled',false);
}
})
});
//button to query log
$('#logRefreshButton').click(function () {
refresh_logfile()
});
$('#logFileSelect').change(function() {
refresh_logfile();
});
//button to open log modal
$('#button_show_log').click(function () {
window.$('#logModal').modal('show');
refresh_logfile()
});
//delete batch button - open modal - DECOMISS - dostupne jen na batche
// $('#button_delete_batch').click(function () {
// row = archiveRecords.row('.selected').data();
// if (row == undefined || row.batch_id == undefined) {
// return
// }
// $('#batch_id_del').val(row.batch_id);
// rows = archiveRecords.rows('.selected');
// if (rows == undefined) {
// return
// }
// $('#listofids').html("");
// window.$('#delModalBatch').modal('show');
// });
//delete batch submit modal
$("#delModalBatch").on('submit','#delFormBatch', delete_batch);
//delete arch button - open modal
$('#button_delete_arch').click(function () {
rows = archiveRecords.rows('.selected');
if (rows == undefined) {
return
}
$('#listofids').html("");
if(rows.data().length > 0 ) {
ids_to_del = ""
// Loop through the selected rows and display an alert with each row's ID
rows.every(function (rowIdx, tableLoop, rowLoop ) {
var data = this.data()
ids_to_del = ids_to_del + data.id + "<br>"
});
$('#listofids').html(ids_to_del);
window.$('#delModalArchive').modal('show');
//$('#delidarchive').val(row.id);
}
});
//edit button
$('#button_edit_arch').click(function () {
row = archiveRecords.row('.selected').data();
if (row == undefined) {
return
}
refresh_arch_and_callback(row, display_edit_modal)
function display_edit_modal(row) {
window.$('#editModalArchive').modal('show');
$('#editidarchive').val(row.id);
$('#editnote').val(row.note);
try {
metrics = JSON.parse(row.metrics)
}
catch (e) {
metrics = row.metrics
}
$('#metrics').val(JSON.stringify(metrics,null,2));
//$('#metrics').val(TOML.parse(row.metrics));
if (row.stratvars_toml) {
$('#editstratvars').val(row.stratvars_toml);
}
else{
$('#editstratvars').val(JSON.stringify(row.stratvars,null,2));
}
$('#edittransferables').val(JSON.stringify(row.transferables,null,2));
$('#editstratjson').val(row.strat_json);
}
});
//show button
$('#button_show_arch').click(function () {
row = archiveRecords.row('.selected').data();
if (row == undefined) {
return
}
refresh_arch_and_callback(row, get_detail_and_chart)
});
//run again button
$('#button_runagain_arch').click(run_day_again)
//run in bt mode
$('#button_runbt_arch').click(function() {
run_day_again(true);
});
//workaround pro spatne oznacovani selectu i pro group-headery
// $('#archiveTable tbody').on('click', 'tr.group-header', function(event) {
// var $row = $(this);
// // Schedule the class removal/addition for the next event loop
// setTimeout(function() {
// if ($row.hasClass("selected")) {
// console.log("Header selected, removing selection");
// $row.removeClass("selected");
// }
// }, 0);
// });
// Expand/Collapse functionality
$('#archiveTable tbody').on('click', 'tr.group-header', expand_collapse_rows);
})

View File

@ -0,0 +1,453 @@
var archiveRecords = null
//ekvivalent to ready
function initialize_archiveRecords() {
//archive table
archiveRecords =
$('#archiveTable').DataTable( {
ajax: {
url: '/archived_runners_p/',
dataSrc: 'data',
method:"POST",
contentType: "application/json",
// dataType: "json",
beforeSend: function (xhr) {
xhr.setRequestHeader('X-API-Key',
API_KEY); },
data: function (d) {
return JSON.stringify(d);
},
error: function(xhr, status, error) {
//var err = eval("(" + xhr.responseText + ")");
//window.alert(JSON.stringify(xhr));
console.log(JSON.stringify(xhr));
}
},
columns: [{ data: 'id' },
{data: 'strat_id'},
{data: 'name'},
{data: 'symbol'},
{data: 'note'},
{data: 'started'},
{data: 'stopped'},
{data: 'mode'},
{data: 'account', visible: true},
{data: 'bt_from', visible: true},
{data: 'bt_to', visible: true},
{data: 'ilog_save', visible: true},
{data: 'profit'},
{data: 'trade_count', visible: true},
{data: 'end_positions', visible: true},
{data: 'end_positions_avgp', visible: true},
{data: 'metrics', visible: true},
{data: 'batch_id', visible: true},
{data: 'batch_profit', visible: false},
{data: 'batch_count', visible: false},
],
paging: true,
processing: true,
serverSide: true,
columnDefs: [
{
targets: 1,
render: function ( data, type, row ) {
if (type === 'display') {
//console.log("arch")
var color = getColorForId(data);
return '<div class="tdnowrap" data-bs-toggle="tooltip" data-bs-placement="top" title="'+data+'"><span class="color-tag" style="background-color:' + color + ';"></span>'+data+'</div>';
}
return data;
},
},
{
targets: [0,17],
render: function ( data, type, row ) {
if (!data) return data
return '<div class="tdnowrap" title="'+data+'">'+data+'</i>'
},
},
{
targets: [5],
render: function ( data, type, row ) {
if (type == "sort") {
return new Date(data).getTime();
}
//data = "2024-02-26T19:29:13.400621-05:00"
// Create a date object from the string, represents given moment in time in UTC time
var date = new Date(data);
tit = date.toLocaleString('cs-CZ', {
timeZone: 'America/New_York',
})
if (isToday(date)) {
//console.log("volame isToday s", date)
//return local time only
return '<div title="'+tit+'">'+ 'dnes ' + format_date(data,true,true)+'</div>'
}
else
{
//return local datetime
return '<div title="'+tit+'">'+ format_date(data,true,false)+'</div>'
}
},
},
{
targets: [6],
render: function ( data, type, row ) {
if (type == "sort") {
return new Date(data).getTime();
}
var date = new Date(data);
tit = date.toLocaleString('cs-CZ', {
timeZone: 'America/New_York',
})
if (isToday(date)) {
//return local time only
return '<div title="'+tit+'" class="token level comment">'+ 'dnes ' + format_date(data,true,true)+'</div>'
}
else
{
//return local datetime
return '<div title="'+tit+'" class="token level number">'+ format_date(data,true,false)+'</div>'
}
},
},
{
targets: [9,10],
render: function ( data, type, row ) {
if (type == "sort") {
return new Date(data).getTime();
}
//console.log(data)
//market datetime
return data ? format_date(data, true) : data
},
},
{
targets: [2],
render: function ( data, type, row ) {
return '<div class="tdname tdnowrap" title="'+data+'">'+data+'</div>'
},
},
// {
// targets: [4],
// render: function ( data, type, row ) {
// return '<div class="tdname tdnowrap" title="'+data+'">'+data+'</div>'
// },
// },
{
targets: [16],
render: function ( data, type, row ) {
//console.log("metrics", data)
try {
data = JSON.parse(data)
}
catch (error) {
//console.log(error)
}
var res = JSON.stringify(data)
var unquoted = res.replace(/"([^"]+)":/g, '$1:')
//zobrazujeme jen kratkou summary pokud mame, jinak davame vse, do titlu davame vzdy vse
//console.log(data)
short = null
if ((data) && (data.profit) && (data.profit.sum)) {
short = data.profit.sum
}
else {
short = unquoted
}
return '<div class="tdmetrics" title="'+unquoted+'">'+short+'</div>'
},
},
{
targets: [4],
render: function ( data, type, row ) {
return '<div class="tdnote" title="'+data+'">'+data+'</div>'
},
},
{
targets: [13,14,15],
render: function ( data, type, row ) {
return '<div class="tdsmall">'+data+'</div>'
},
},
{
targets: [11],
render: function ( data, type, row ) {
//if ilog_save true
if (data) {
return '<span class="material-symbols-outlined">done_outline</span>'
}
else {
return null
}
},
},
{
targets: [8],
render: function ( data, type, row ) {
//if ilog_save true
if (data == "ACCOUNT1") {
res="ACC1"
}
else if (data == "ACCOUNT2") {
res="ACC2"
}
else { res=data}
return res
},
},
{
targets: [7],
render: function ( data, type, row ) {
//if ilog_save true
if (data == "backtest") {
res="bt"
}
else { res=data}
return res
},
}
],
order: [[6, 'desc']],
select: {
info: true,
style: 'multi',
//selector: 'tbody > tr:not(.group-header)'
selector: 'tbody > tr:not(.group-header)'
},
paging: true,
// lengthChange: false,
// select: true,
// createdRow: function( row, data, dataIndex){
// if (is_running(data.id) ){
// alert("runner");
// $(row).addClass('highlight');
// }
//}
// Add row grouping based on 'batch_id'
//TODO projit a zrevidovat - pripadne optimalizovat
//NOTE zde jse skoncil
rowGroup: {
dataSrc: 'batch_id',
//toto je volano pri renderovani headeru grupy
startRender: function (rows, group) {
var firstRowData = rows.data()[0];
//pro no-batch-id je idcko prvni id
var groupId = group ? group : 'no-batch-id-' + firstRowData.id;
var stateKey = 'dt-group-state-' + groupId;
var state = localStorage.getItem(stateKey);
var profit = firstRowData.batch_profit
var itemCount = firstRowData.batch_count
// Iterate over each row in the group to set the data attribute
// zaroven pro kazdy node nastavime viditelnost podle nastaveni
rows.every(function (rowIdx, tableLoop, rowLoop) {
var rowNode = $(this.node());
rowNode.attr('data-group-name', groupId);
//defaultne jsou batche zabalene a nobatche rozbalene, pokud nenastavim jinak
if (state == 'collapsed' || (!state) && group) {
rowNode.hide();
} else {
rowNode.show();
}
});
// Initialize variables for the group
//var itemCount = 0;
var period = '';
var batch_note = '';
//var profit = '';
var started = null;
var stratinId = null;
var symbol = null;
// // Process each item only once
// archiveRecords.rows({ search: 'applied' }).every(function (rowIdx, tableLoop, rowLoop) {
// var data = this.data();
// if ((group && data.batch_id == group)) {
// itemCount++;
// if (itemCount === 1 ) {
// firstNote = data.note ? data.note.substring(0, 14) : '';
// if (data.note) {
// better_counter = extractNumbersFromString(data.note);
// }
// try {
// profit = data.metrics.profit.batch_sum_profit;
// } catch (e) {
// profit = 'N/A';
// }
// }
// }
// });
//pokud mame batch_id podivame se zda jeho nastaveni uz nema a pokud ano pouzijeme to
//pokud nemame tak si ho loadneme
//Tento kod parsuje informace do header hlavicky podle notes, je to relevantni pouze pro
//backtest batche, nikoliv pro paper a live, kde pocet dni je neznamy a poznamka se muze menit
//do budoucna tento parsing na frontendu bude nahrazen batch tabulkou v db, ktera persistuje
//tyto data
if (group) {
const existingBatch = batchHeaders.find(batch => batch.batch_id == group);
//jeste neni v poli batchu - udelame hlavicku
if (!existingBatch) {
// itemCount = extractNumbersFromString(firstRowData.note);
// if (!itemCount) {
// itemCount="NA"
// }
// try { profit = firstRowData.metrics.profit.batch_sum_profit;}
// catch (e) {profit = 'NA'}
// if (!profit) {profit = 'NA'}
period = firstRowData.note ? firstRowData.note.substring(0, 14) : '';
try {
batch_note = firstRowData.note ? firstRowData.note.split("N:")[1].trim() : ''
} catch (e) { batch_note = ''}
started = firstRowData.started
stratinId = firstRowData.strat_id
symbol = firstRowData.symbol
if (period.startsWith("SCHED")) {
period = "SCHEDULER";
}
var newBatchHeader = {batch_id:group, batch_note:batch_note, profit:profit, itemCount:itemCount, period:period, started:started, stratinId:stratinId, symbol:symbol};
batchHeaders.push(newBatchHeader)
}
//uz je v poli, ale mame novejsi (pribyl v ramci backtestu napr.) - updatujeme
else if (new Date(existingBatch.started) < new Date(firstRowData.started)) {
// try {itemCount = extractNumbersFromString(firstRowData.note);}
// catch (e) {itemCount = 'NA'}
// try {profit = firstRowData.metrics.profit.batch_sum_profit;}
// catch (e) {profit = 'NA'}
period = firstRowData.note ? firstRowData.note.substring(0, 14) : '';
if (period.startsWith("SCHED")) {
period = "SCHEDULER";
}
try {
batch_note = firstRowData.note ? firstRowData.note.split("N:")[1].trim() : ''
} catch (e) { batch_note = ''}
started = firstRowData.started
stratinId = firstRowData.strat_id
symbol = firstRowData.symbol
existingBatch.itemCount = itemCount;
existingBatch.profit = profit;
existingBatch.period = period;
existingBatch.started = started;
existingBatch.batch_note = batch_note
}
//uz je v poli batchu vytahneme
else {
profit = existingBatch.profit
itemCount = existingBatch.itemCount
period = existingBatch.period
started = existingBatch.started
stratinId = existingBatch.stratinId
symbol = existingBatch.symbol
batch_note = existingBatch.batch_note
}
}
//zaroven nastavime u vsech childu
// Construct the GROUP HEADER - sem pripadna tlačítka atp.
//var groupHeaderContent = '<strong>' + (group ? 'Batch ID: ' + group : 'No Batch') + '</strong>';
var tools = ''
var icon = ''
icon_color = ''
profit_icon_color = ''
exp_coll_icon_name = ''
exp_coll_icon_name = (state == 'collapsed') ? 'expand_more' : 'expand_less'
if (group) {
tools = '<span class="batchtool">'
tools += '<span id="batchtool_report_button" class="material-symbols-outlined tool-icon" title="Batch Report">lab_profile</span>'
tools += '<span id="batchtool_delete_button" class="material-symbols-outlined tool-icon" title="Delete Batch">delete</span>'
tools += '<span id="batchtool_exportcsv_button" class="material-symbols-outlined tool-icon" title="Export batch to csv">csv</span>'
tools += '<span id="batchtool_exportxml_button" class="material-symbols-outlined tool-icon" title="Export batch to xml">insert_drive_file</span>'
tools += '<span id="batchtool_cutoff_button" class="material-symbols-outlined tool-icon" title="Cutoff heatmap for batch">cut</span>'
//dynamic button placeholder
//tools += '<div class="dropdown"><button class="btn btn-outline-success btn-sm dropdown-toggle" type="button" id="actionDropdown" data-bs-toggle="dropdown" aria-expanded="false">Choose analyzer</button><ul class="dropdown-menu dropdown-menu-dark" aria-labelledby="actionDropdown"></ul></div>'
tools += '<div class="batch_buttons_container" id="bb'+group+'" data-batch-id="'+group+'"></div>'
//final closure
tools += '</span>'
icon_color = getColorForId(stratinId)
profit_icon_color = (profit>0) ? "#4f8966" : "#bb2f5e" //"#d42962"
}
else {
//def color for no batch - semi transparent
icon_color = "#ced4da17"
}
icon = '<span class="material-symbols-outlined expand-icon" style="background-color:' + icon_color + ';" title="Expand">'+exp_coll_icon_name+'</span>'
//console.log(group, groupId, stratinId)
//var groupHeaderContent = '<span class="batchheader-batch-id">'+(group ? '<span class="color-tag" style="background-color:' + getColorForId(stratinId) + ';"></span>Batch ID: ' + group: 'No Batch')+'</span>';
var groupHeaderContent = '<span class="batchheader-batch-id">'+ icon + (group ? 'Batch ID: ' + group: 'No Batch')+'</span>';
groupHeaderContent += (group ? '<span class="batchheader-symbol-info" style="color:'+icon_color+'">' + symbol + '</span><span class="batchheader-count-info">(' + itemCount + ')</span>' + ' <span class="batchheader-period-info">' + period + '</span> <span class="batchheader-profit-info" style="color:'+profit_icon_color+'">Profit: ' + profit + '</span>' : '');
groupHeaderContent += group ? tools : ""
groupHeaderContent += group ? '<span class="batchheader-note-info">' + batch_note + '</span>' : ''
return $('<tr/>')
.append('<td colspan="18">' + groupHeaderContent + '</td>')
.attr('data-name', groupId)
.addClass('group-header')
.addClass(state);
}
},
lengthMenu: [ [10, 50, 200, 500, -1], [10, 50, 200, 500, "All"] ],
drawCallback: function (settings) {
//console.log("drawcallback", configData)
setTimeout(function(){
//populate all tool buttons on batch header
// Loop over all divs with the class 'batch-buttons-container'
if (configData["dynamic_buttons"]) {
//console.log("jsme tu po cekani")
//console.log("pred loopem")
$('.batch_buttons_container').each((index, element) => {
//console.log("jsme uvnitr foreach");
idecko = $(element).attr('id')
//console.log("idecko", idecko)
var batchId = $(element).data('batch-id'); // Get the data-batch-id attribute
//console.log("nalezeno pred", batchId, $(element));
populate_dynamic_buttons($(element), configData["dynamic_buttons"], batchId);
//console.log("po", $(element));
});
}else {
console.log("no dynamic_buttons configuration loaded")
}
}, 1);
// var api = this.api();
// var rows = api.rows({ page: 'current' }).nodes();
// api.column(17, { page: 'current' }).data().each(function (group, i) {
// console.log("drawCallabck i",i)
// console.log("rows", $(rows).eq(i))
// var groupName = group ? group : $(rows).eq(i).attr('data-name');
// console.log("groupName", groupName)
// var stateKey = 'dt-group-state-' + groupName;
// var state = localStorage.getItem(stateKey);
// if (state === 'collapsed') {
// $(rows).eq(i).hide();
// } else {
// $(rows).eq(i).show();
// }
// Set the unique identifier as a data attribute on each row
//$(rows).eq(i).attr('data-group-name', groupName);
// // Add or remove the 'collapsed' class based on the state
// if (groupName.startsWith('no-batch-id-')) {
// $('tr[data-name="' + groupName + '"]').toggleClass('collapsed', state === 'collapsed');
// }
// });
}
});
}

View File

@ -0,0 +1,55 @@
//mozna dat do dokument ready a rozdelit na handlers a funkce
//edit modal
$("#editModalArchive").on('submit','#editFormArchive', function(event){
event.preventDefault();
$('#editarchive').attr('disabled','disabled');
trow = archiveRecords.row('.selected').data();
note = $('#editnote').val()
var formData = $(this).serializeJSON();
row = {}
row["id"] = trow.id
row["note"] = note
jsonString = JSON.stringify(row);
//console.log("pred odeslanim json string", jsonString)
$.ajax({
url:"/archived_runners/"+trow.id,
beforeSend: function (xhr) {
xhr.setRequestHeader('X-API-Key',
API_KEY); },
method:"PATCH",
contentType: "application/json",
// dataType: "json",
data: jsonString,
success:function(data){
$('#editFormArchive')[0].reset();
window.$('#editModalArchive').modal('hide');
$('#editarchive').attr('disabled', false);
archiveRecords.ajax.reload();
disable_arch_buttons();
},
error: function(xhr, status, error) {
var err = eval("(" + xhr.responseText + ")");
window.alert(JSON.stringify(xhr));
console.log(JSON.stringify(xhr));
$('#editarchive').attr('disabled', false);
}
})
});
//delete modal
$("#delModalArchive").on('submit','#delFormArchive', function(event){
event.preventDefault();
$('#deletearchive').attr('disabled','disabled');
//rows = archiveRecords.rows('.selected');
if(rows.data().length > 0 ) {
runnerIds = []
// Loop through the selected rows and display an alert with each row's ID
rows.every(function (rowIdx, tableLoop, rowLoop ) {
var data = this.data()
runnerIds.push(data.id);
});
delete_arch_rows(runnerIds)
}
});

View File

@ -0,0 +1,100 @@
function refresh_runmanager_and_callback(row, callback) {
//console.log("entering refresh")
var request = $.ajax({
url: "/run_manager_records/"+row.id,
beforeSend: function (xhr) {
xhr.setRequestHeader('X-API-Key',
API_KEY); },
method:"GET",
contentType: "application/json",
dataType: "json",
success:function(data){
//console.log("fetched data ok")
//console.log(JSON.stringify(data,null,2));
},
error: function(xhr, status, error) {
var err = eval("(" + xhr.responseText + ")");
window.alert(JSON.stringify(xhr));
console.log(JSON.stringify(xhr));
}
});
// Handling the responses of both requests
$.when(request).then(function(response) {
// Both requests have completed successfully
//console.log("Result from request:", response);
//console.log("Response received. calling callback")
//call callback function
callback(response)
}, function(error) {
// Handle errors from either request here
// Example:
console.error("Error from first request:", error);
console.log("requesting id error")
});
}
function delete_runmanager_row(id) {
$.ajax({
url:"/run_manager_records/"+id,
beforeSend: function (xhr) {
xhr.setRequestHeader('X-API-Key',
API_KEY); },
method:"DELETE",
contentType: "application/json",
dataType: "json",
// data: JSON.stringify(ids),
success:function(data){
$('#delFormRunmanager')[0].reset();
window.$('#delModalRunmanager').modal('hide');
$('#deleterunmanager').attr('disabled', false);
//console.log(data)
runmanagerRecords.ajax.reload();
disable_runmanager_buttons()
},
error: function(xhr, status, error) {
var err = eval("(" + xhr.responseText + ")");
window.alert(JSON.stringify(xhr));
console.log(JSON.stringify(xhr));
$('#deleterunmanager').attr('disabled', false);
//archiveRecords.ajax.reload();
}
})
}
//enable/disable based if row(s) selected
function disable_runmanager_buttons() {
//disable buttons (enable on row selection)
//$('#button_add_sched').attr('disabled','disabled');
$('#button_edit_sched').attr('disabled','disabled');
$('#button_delete_sched').attr('disabled','disabled');
$('#button_history_sched').attr('disabled','disabled');
}
function enable_runmanager_buttons() {
//enable buttons
//$('#button_add_sched').attr('disabled',false);
$('#button_edit_sched').attr('disabled',false);
$('#button_delete_sched').attr('disabled',false);
$('#button_history_sched').attr('disabled',false);
}
// Function to update options
function updateSelectOptions(type) {
var allOptions = {
'paper': '<option value="paper">paper</option>',
'live': '<option value="live">live</option>',
'backtest': '<option value="backtest">backtest</option>',
'prep': '<option value="prep">prep</option>'
};
var allowedOptions = (type === "schedule") ? ['paper', 'live'] : Object.keys(allOptions);
var $select = $('#runmanmode');
$select.empty(); // Clear current options
allowedOptions.forEach(function(opt) {
$select.append(allOptions[opt]); // Append allowed options
});
}

View File

@ -0,0 +1,296 @@
/* <button title="Create new" id="button_add_sched" class="btn btn-outline-success btn-sm">Add</button>
<button title="Edit selected" id="button_edit_sched" class="btn btn-outline-success btn-sm">Edit</button>
<button title="Delete selected" id="button_delete_sched" class="btn btn-outline-success btn-sm">Delete</button>
id="delModalRunmanager"
id="addeditModalRunmanager" id="runmanagersubmit" == "Add vs Edit"
*/
// Function to apply filter
function applyFilter(filter) {
switch (filter) {
case 'filterSchedule':
runmanagerRecords.column(1).search('schedule').draw();
break;
case 'filterQueue':
runmanagerRecords.column(1).search('queue').draw();
break;
// default:
// runmanagerRecords.search('').columns().search('').draw();
// break;
}
}
// Function to get the ID of current active filter
function getCurrentFilter() {
var activeFilter = $('input[name="filterOptions"]:checked').attr('id');
console.log("activeFilter", activeFilter)
return activeFilter;
}
// Function to show/hide input fields based on the current filter
function updateInputFields() {
var activeFilter = getCurrentFilter();
switch (activeFilter) {
case 'filterSchedule':
$('#runmantestlist_id_div').hide();
$('#runmanbt_from_div').hide();
$('#runmanbt_to_div').hide();
$('#runmanvalid_from_div').show();
$('#runmanvalid_to_div').show();
$('#runmanstart_time_div').show();
$('#runmanstop_time_div').show();
break;
case 'filterQueue':
$('#runmantestlist_id_div').show();
$('#runmanbt_from_div').show();
$('#runmanbt_to_div').show();
$('#runmanvalid_from_div').hide();
$('#runmanvalid_to_div').hide();
$('#runmanstart_time_div').hide();
$('#runmanstop_time_div').hide();
break;
default:
//$('#inputForSchedule, #inputForQueue').hide();
break;
}
}
//event handlers for runmanager table
$(document).ready(function () {
initialize_runmanagerRecords();
runmanagerRecords.ajax.reload();
disable_runmanager_buttons();
//on click on #button_refresh_sched call runmanagerRecords.ajax.reload()
$('#button_refresh_sched').click(function () {
runmanagerRecords.ajax.reload();
});
// Event listener for changes in the radio buttons
$('input[name="filterOptions"]').on('change', function() {
var selectedFilter = $(this).attr('id');
applyFilter(selectedFilter);
// Save the selected filter to local storage
localStorage.setItem('selectedFilter', selectedFilter);
});
// Load the last selected filter from local storage and apply it
var lastSelectedFilter = localStorage.getItem('selectedFilter');
if (lastSelectedFilter) {
$('#' + lastSelectedFilter).prop('checked', true).change();
}
//listen for changes on weekday enabling button
$('#runman_enable_weekdays').change(function() {
if ($(this).is(':checked')) {
$('.weekday-checkboxes').show();
} else {
$('.weekday-checkboxes').hide();
}
});
//selectable rows in runmanager table
$('#runmanagerTable tbody').on('click', 'tr', function () {
if ($(this).hasClass('selected')) {
//$(this).removeClass('selected');
//aadd here condition that disable is called only when there is no other selected class on tr[data-group-name]
// Check if there are no other selected rows before disabling buttons
if ($('#runmanagerTable tr.selected').length === 1) {
disable_runmanager_buttons();
}
//disable_arch_buttons()
} else {
//archiveRecords.$('tr.selected').removeClass('selected');
$(this).addClass('selected');
enable_runmanager_buttons()
}
});
//delete button
$('#button_delete_sched').click(function () {
row = runmanagerRecords.row('.selected').data();
window.$('#delModalRunmanager').modal('show');
$('#delidrunmanager').val(row.id);
// $('#action').val('delRecord');
// $('#save').val('Delete');
});
//button add
$('#button_add_sched').click(function () {
window.$('#addeditModalRunmanager').modal('show');
$('#addeditFormRunmanager')[0].reset();
//$("#runmanid").prop('readonly', false);
if (getCurrentFilter() == 'filterQueue') {
mode = 'queue';
} else {
mode = 'schedule';
}
//set modus
$('#runmanmoddus').val(mode);
//updates fields according to selected type
updateInputFields();
updateSelectOptions(mode);
// Initially, check the value of "batch" and enable/disable "btfrom" and "btto" accordingly
if ($("#runmantestlist_id").val() !== "") {
$("#runmanbt_from, #runmanbt_to").prop("disabled", true);
} else {
$("#runmanbt_from, #runmanbt_to").prop("disabled", false);
}
// Listen for changes in the "batch" input and diasble/enable "btfrom" and "btto" accordingly
$("#runmantestlist_id").on("input", function() {
if ($(this).val() !== "") {
// If "batch" is not empty, disable "from" and "to"
$("#runmanbt_from, #runmanbt_to").prop("disabled", true);
} else {
// If "batch" is empty, enable "from" and "to"
$("#runmanbt_from, #runmanbt_to").prop("disabled", false);
}
});
$('.modal-title_run').html("<i class='fa fa-plus'></i> Add Record");
$('#runmanagersubmit').val('Add');
$('#runmanager_enable_weekdays').prop('checked', false);
$('.weekday-checkboxes').hide();
});
//edit button
$('#button_edit_sched').click(function () {
row = runmanagerRecords.row('.selected').data();
if (row == undefined) {
return
}
window.$('#addeditModalRunmanager').modal('show');
//set fields as readonly
//$("#runmanid").prop('readonly', true);
//$("#runmanmoddus").prop('readonly', true);
console.log("pred editem puvodni row", row)
refresh_runmanager_and_callback(row, show_edit_modal)
function show_edit_modal(row) {
console.log("pred editem refreshnuta row", row);
$('#addeditFormRunmanager')[0].reset();
$('.modal-title_run').html("<i class='fa fa-plus'></i> Edit Record");
$('#runmanagersubmit').val('Edit');
//updates fields according to selected type
updateInputFields();
// get shared attributess
$('#runmanid').val(row.id);
$('#runmanhistory').val(row.history);
$('#runmanlast_processed').val(row.last_processed);
$('#runmanstrat_id').val(row.strat_id);
$('#runmanmode').val(row.mode);
$('#runmanmoddus').val(row.moddus);
$('#runmanaccount').val(row.account);
$('#runmanstatus').val(row.status);
$('#runmanbatch_id').val(row.batch_id);
$('#runmanrunner_id').val(row.runner_id);
$("#runmanilog_save").prop("checked", row.ilog_save);
$('#runmannote').val(row.note);
$('#runmantestlist_id').val(row.testlist_id);
$('#runmanbt_from').val(row.bt_from);
$('#runmanbt_to').val(row.bt_to);
$('#runmanvalid_from').val(row.valid_from);
$('#runmanvalid_to').val(row.valid_to);
$('#runmanstart_time').val(row.start_time);
$('#runmanstop_time').val(row.stop_time);
// Initially, check the value of "batch" and enable/disable "from" and "to" accordingly
if ($("#runmantestlist_id").val() !== "") {
$("#runmanbt_from, #runmanbt_to").prop("disabled", true);
} else {
$("#runmanbt_from, #runmanbt_to").prop("disabled", false);
}
// Listen for changes in the "batch" input
$("#runmantestlist_id").on("input", function() {
if ($(this).val() !== "") {
// If "batch" is not empty, disable "from" and "to"
$("#runmanbt_from, #runmanbt_to").prop("disabled", true);
} else {
// If "batch" is empty, enable "from" and "to"
$("#runmanbt_from, #runmanbt_to").prop("disabled", false);
}
});
type = $('#runmanmoddus').val();
updateSelectOptions(type);
//add weekdays_filter transformation from string "1,2,3" to array [1,2,3]
// Assuming you have row.weekend_filter available here
var weekdayFilter = row.weekdays_filter;
//
if (weekdayFilter) {
$('#runman_enable_weekdays').prop('checked', true);
$(".weekday-checkboxes").show();
// Map numbers to weekday names
var dayOfWeekMap = {
"0": "monday",
"1": "tuesday",
"2": "wednesday",
"3": "thursday",
"4": "friday",
"5": "saturday", // Adjust if needed for your mapping
"6": "sunday" // Adjust if needed for your mapping
};
// Iterate through the selected days
$.each(weekdayFilter, function(index, dayIndex) {
var dayOfWeek = dayOfWeekMap[dayIndex];
if (dayOfWeek) { // Make sure the day exists in the map
$("#" + dayOfWeek).prop("checked", true);
}
});
}
else {
$('#runman_enable_weekdays').prop('checked', false);
$(".weekday-checkboxes").hide();
}
}
});
//edit button
$('#button_history_sched').click(function () {
row = runmanagerRecords.row('.selected').data();
if (row == undefined) {
return
}
window.$('#historyModalRunmanager').modal('show');
//set fields as readonly
//$("#runmanid").prop('readonly', true);
//$("#runmanmoddus").prop('readonly', true);
//console.log("pred editem puvodni row", row)
refresh_runmanager_and_callback(row, show_history_modal)
function show_history_modal(row) {
//console.log("pred editem refreshnuta row", row);
$('#historyModalRunmanagerForm')[0].reset();
// get shared attributess
$('#RunmanId').val(row.id);
var date = new Date(row.last_processed);
formatted = date.toLocaleString('cs-CZ', {
timeZone: 'America/New_York',
})
$('#Runmanlast_processed').val(formatted);
$('#Runmanhistory').val(row.history);
}
});
});

View File

@ -0,0 +1,321 @@
var runmanagerRecords = null
//ekvivalent to ready
function initialize_runmanagerRecords() {
//archive table
runmanagerRecords =
$('#runmanagerTable').DataTable( {
ajax: {
url: '/run_manager_records/',
dataSrc: '',
method:"GET",
contentType: "application/json",
// dataType: "json",
beforeSend: function (xhr) {
xhr.setRequestHeader('X-API-Key',
API_KEY); },
data: function (d) {
return JSON.stringify(d);
},
error: function(xhr, status, error) {
//var err = eval("(" + xhr.responseText + ")");
//window.alert(JSON.stringify(xhr));
console.log(JSON.stringify(xhr));
}
},
columns: [ { data: 'id' },
{ data: 'moddus' },
{ data: 'strat_id' },
{data: 'symbol'},
{data: 'account'},
{data: 'mode'},
{data: 'note'},
{data: 'ilog_save'},
{data: 'bt_from'},
{data: 'bt_to'},
{data: 'weekdays_filter', visible: true},
{data: 'batch_id', visible: true},
{data: 'start_time', visible: true},
{data: 'stop_time', visible: true},
{data: 'status'},
{data: 'last_processed', visible: true},
{data: 'history', visible: false},
{data: 'valid_from', visible: true},
{data: 'valid_to', visible: true},
{data: 'testlist_id', visible: true},
{data: 'strat_running', visible: true},
{data: 'runner_id', visible: true},
],
paging: true,
processing: true,
serverSide: false,
columnDefs: [
{ //history
targets: [6],
render: function(data, type, row, meta) {
if (!data) return data;
var stateClass = 'truncated-text';
var uniqueId = 'note-' + row.id;
if (localStorage.getItem(uniqueId) === 'expanded') {
stateClass = 'expanded-text';
}
if (type === 'display') {
return '<div class="' + stateClass + '" id="' + uniqueId + '">' + data + '</div>';
}
return data;
},
},
{ //iloc_save
targets: [7],
render: function ( data, type, row ) {
//if ilog_save true
if (data) {
return '<span class="material-symbols-outlined">done_outline</span>'
}
else {
return null
}
},
},
{
targets: [10], //weekdays
render: function (data, type, row) {
if (!data) return data;
// Map each number in the array to a weekday
var weekdays = ["monday", "tuesday", "wednesday", "thursday", "friday", "saturday", "sunday"];
return data.map(function(dayNumber) {
return weekdays[dayNumber];
}).join(', ');
},
},
{
targets: [0, 21], //interni id, runner_id
render: function ( data, type, row ) {
if (!data) return data;
if (type === 'display') {
return '<div class="tdnowrap" data-bs-toggle="tooltip" data-bs-placement="top" title="'+data+'">'+data+'</div>';
}
return data;
},
},
{
targets: [2], //strat_id
render: function ( data, type, row ) {
if (type === 'display') {
//console.log("arch")
var color = getColorForId(data);
return '<div class="tdnowrap" data-bs-toggle="tooltip" data-bs-placement="top" title="'+data+'"><span class="color-tag" style="background-color:' + color + ';"></span>'+data+'</div>';
}
return data;
},
},
{
targets: [3,12,13], //symbol, start_time, stop_time
render: function ( data, type, row ) {
if (type === 'display') {
//console.log("arch")
var color = getColorForId(row.strat_id);
return '<span style="color:' + color + ';">'+data+'</span>';
}
return data;
},
},
{
targets: [16], //history
render: function ( data, type, row ) {
if (type === 'display') {
if (!data) data = "";
return '<div data-bs-toggle="tooltip" data-bs-placement="top" title="'+data+'">'+data+'</div>';
}
return data;
},
},
{
targets: [14], //status
render: function ( data, type, row ) {
if (type === 'display') {
//console.log("arch")
var color = data == "active" ? "#3f953f" : "#f84c4c";
return '<span style="color:' + color + ';">'+data+'</span>';
}
return data;
},
},
{
targets: [20], //strat_running
render: function ( data, type, row ) {
if (type === 'display') {
if (!data) data = "";
console.log("running", data)
//var color = data == "active" ? "#3f953f" : "#f84c4c";
data = data ? "running" : ""
return '<div title="' + row.runner_id + '" style="color:#3f953f;">'+data+'</div>';
}
return data;
},
},
// {
// targets: [0,17],
// render: function ( data, type, row ) {
// if (!data) return data
// return '<div class="tdnowrap" title="'+data+'">'+data+'</i>'
// },
// },
{
targets: [15,17, 18, 8, 9], //start, stop, valid_from, valid_to, bt_from, bt_to, last_proccessed
render: function ( data, type, row ) {
if (!data) return data
if (type == "sort") {
return new Date(data).getTime();
}
var date = new Date(data);
tit = date.toLocaleString('cs-CZ', {
timeZone: 'America/New_York',
})
return '<div title="'+tit+'">'+ format_date(data,true,false)+'</div>'
// if (isToday(now)) {
// //return local time only
// return '<div title="'+tit+'">'+ 'dnes ' + format_date(data,true,true)+'</div>'
// }
// else
// {
// //return local datetime
// return '<div title="'+tit+'">'+ format_date(data,true,false)+'</div>'
// }
},
},
// {
// targets: [6],
// render: function ( data, type, row ) {
// now = new Date(data)
// if (type == "sort") {
// return new Date(data).getTime();
// }
// var date = new Date(data);
// tit = date.toLocaleString('cs-CZ', {
// timeZone: 'America/New_York',
// })
// if (isToday(now)) {
// //return local time only
// return '<div title="'+tit+'" class="token level comment">'+ 'dnes ' + format_date(data,false,true)+'</div>'
// }
// else
// {
// //return local datetime
// return '<div title="'+tit+'" class="token level number">'+ format_date(data,false,false)+'</div>'
// }
// },
// },
// {
// targets: [9,10],
// render: function ( data, type, row ) {
// if (type == "sort") {
// return new Date(data).getTime();
// }
// //console.log(data)
// //market datetime
// return data ? format_date(data, true) : data
// },
// },
// {
// targets: [2],
// render: function ( data, type, row ) {
// return '<div class="tdname tdnowrap" title="'+data+'">'+data+'</div>'
// },
// },
// // {
// // targets: [4],
// // render: function ( data, type, row ) {
// // return '<div class="tdname tdnowrap" title="'+data+'">'+data+'</div>'
// // },
// // },
// {
// targets: [16],
// render: function ( data, type, row ) {
// //console.log("metrics", data)
// try {
// data = JSON.parse(data)
// }
// catch (error) {
// //console.log(error)
// }
// var res = JSON.stringify(data)
// var unquoted = res.replace(/"([^"]+)":/g, '$1:')
// //zobrazujeme jen kratkou summary pokud mame, jinak davame vse, do titlu davame vzdy vse
// //console.log(data)
// short = null
// if ((data) && (data.profit) && (data.profit.sum)) {
// short = data.profit.sum
// }
// else {
// short = unquoted
// }
// return '<div class="tdmetrics" title="'+unquoted+'">'+short+'</div>'
// },
// },
// {
// targets: [4],
// render: function ( data, type, row ) {
// return '<div class="tdnote" title="'+data+'">'+data+'</div>'
// },
// },
// {
// targets: [13,14,15],
// render: function ( data, type, row ) {
// return '<div class="tdsmall">'+data+'</div>'
// },
// },
// {
// targets: [11],
// render: function ( data, type, row ) {
// //if ilog_save true
// if (data) {
// return '<span class="material-symbols-outlined">done_outline</span>'
// }
// else {
// return null
// }
// },
// },
{
targets: [4], //account
render: function ( data, type, row ) {
//if ilog_save true
if (data == "ACCOUNT1") {
res="ACC1"
}
else if (data == "ACCOUNT2") {
res="ACC2"
}
else { res=data}
return res
},
},
{
targets: [5], //mode
render: function ( data, type, row ) {
//if ilog_save true
if (data == "backtest") {
res="bt"
}
else { res=data}
return res
},
}
],
order: [[1, 'asc']],
select: {
info: true,
style: 'multi',
//selector: 'tbody > tr:not(.group-header)'
selector: 'tbody > tr:not(.group-header)'
},
paging: true
});
}

View File

@ -0,0 +1,195 @@
//delete modal
$("#delModalRunmanager").on('submit','#delFormRunmanager', function(event){
event.preventDefault();
$('#deleterunmanager').attr('disabled','disabled');
//get val from #delidrunmanager
id = $('#delidrunmanager').val();
delete_runmanager_row(id);
});
//add api
// fetch(`/run_manager_records/`, {
// method: 'POST',
// headers: {
// 'Content-Type': 'application/json',
// 'X-API-Key': API_KEY
// },
// body: JSON.stringify(newRecord)
// })
// fetch(`/run_manager_records/${recordId}`, {
// method: 'PATCH',
// headers: {
// 'Content-Type': 'application/json',
// 'X-API-Key': API_KEY
// },
// body: JSON.stringify(updatedData)
// })
function getCheckedWeekdays() {
const checkboxes = document.querySelectorAll('input[name="weekdays_filter[]"]:checked');
const selectedDays = Array.from(checkboxes).map(checkbox => checkbox.value);
return selectedDays;
}
//submit form
$("#addeditModalRunmanager").on('submit','#addeditFormRunmanager', function(event){
//event.preventDefault();
//code for add
if ($('#runmanagersubmit').val() == "Add") {
event.preventDefault();
//set id as editable
$('#runmanagersubmit').attr('disabled','disabled');
//trow = runmanagerRecords.row('.selected').data();
//note = $('#editnote').val()
// Handle weekdays functionality
var weekdays = [];
if ($('#runman_enable_weekdays').is(':checked')) {
$('#addeditFormRunmanager input[name="weekdays"]:checked').each(function() {
var weekday = $(this).val();
switch(weekday) {
case 'monday': weekdays.push(0); break;
case 'tuesday': weekdays.push(1); break;
case 'wednesday': weekdays.push(2); break;
case 'thursday': weekdays.push(3); break;
case 'friday': weekdays.push(4); break;
// Add cases for Saturday and Sunday if needed
}
});
}
console.log("weekdays pole", weekdays)
var formData = $(this).serializeJSON();
console.log("formData", formData)
delete formData["enable_weekdays"]
delete formData["weekdays"]
//pokud je zatrzeno tak aplikujeme filter, jinak nevyplnujeme
if (weekdays.length > 0) {
formData.weekdays_filter = weekdays
}
console.log(formData)
if ($('#runmanilog_save').prop('checked')) {
formData.ilog_save = true;
}
else
{
formData.ilog_save = false;
}
//if (formData.batch_id == "") {delete formData["batch_id"];}
//projede vsechny atributy a kdyz jsou "" tak je smaze, default nahradi backend
for (let key in formData) {
if (formData.hasOwnProperty(key) && formData[key] === "") {
delete formData[key];
}
}
jsonString = JSON.stringify(formData);
console.log("json string pro formData pred odeslanim", jsonString)
$.ajax({
url:"/run_manager_records/",
beforeSend: function (xhr) {
xhr.setRequestHeader('X-API-Key',
API_KEY); },
method:"POST",
contentType: "application/json",
// dataType: "json",
data: jsonString,
success:function(data){
$('#addeditFormRunmanager')[0].reset();
window.$('#addeditModalRunmanager').modal('hide');
$('#runmanagersubmit').attr('disabled', false);
runmanagerRecords.ajax.reload();
disable_runmanager_buttons();
},
error: function(xhr, status, error) {
var err = eval("(" + xhr.responseText + ")");
window.alert(JSON.stringify(xhr));
console.log(JSON.stringify(xhr));
$('#runmanagersubmit').attr('disabled', false);
}
})
}
//code for edit
else {
event.preventDefault();
$('#runmanagersubmit').attr('disabled','disabled');
//trow = runmanagerRecords.row('.selected').data();
//note = $('#editnote').val()
// Handle weekdays functionality
var weekdays = [];
if ($('#runman_enable_weekdays').is(':checked')) {
$('#addeditFormRunmanager input[name="weekdays"]:checked').each(function() {
var weekday = $(this).val();
switch(weekday) {
case 'monday': weekdays.push(0); break;
case 'tuesday': weekdays.push(1); break;
case 'wednesday': weekdays.push(2); break;
case 'thursday': weekdays.push(3); break;
case 'friday': weekdays.push(4); break;
// Add cases for Saturday and Sunday if needed
}
});
}
var formData = $(this).serializeJSON();
delete formData["enable_weekdays"]
delete formData["weekdays"]
//pokud je zatrzeno tak aplikujeme filter, jinak nevyplnujeme
if (weekdays.length > 0) {
formData.weekdays_filter = weekdays
}
console.log(formData)
if ($('#runmanilog_save').prop('checked')) {
formData.ilog_save = true;
}
else
{
formData.ilog_save = false;
}
//projede formatributy a kdyz jsou "" tak je smaze, default nahradi backend - tzn. smaze se puvodni hodnota
for (let key in formData) {
if (formData.hasOwnProperty(key) && formData[key] === "") {
delete formData[key];
}
}
jsonString = JSON.stringify(formData);
console.log("EDIT json string pro formData pred odeslanim", jsonString);
$.ajax({
url:"/run_manager_records/"+formData.id,
beforeSend: function (xhr) {
xhr.setRequestHeader('X-API-Key',
API_KEY); },
method:"PATCH",
contentType: "application/json",
// dataType: "json",
data: jsonString,
success:function(data){
console.log("EDIT success data", data);
$('#addeditFormRunmanager')[0].reset();
window.$('#addeditModalRunmanager').modal('hide');
$('#runmanagersubmit').attr('disabled', false);
runmanagerRecords.ajax.reload();
disable_runmanager_buttons();
},
error: function(xhr, status, error) {
var err = eval("(" + xhr.responseText + ")");
window.alert(JSON.stringify(xhr));
console.log(JSON.stringify(xhr));
$('#runmanagersubmit').attr('disabled', false);
}
});
}
});

View File

Some files were not shown because too many files have changed in this diff Show More