From 1abbb073908cf32e65aeee958ca4d077b3d36ae3 Mon Sep 17 00:00:00 2001 From: David Brazda Date: Thu, 22 Feb 2024 23:05:49 +0700 Subject: [PATCH] Scheduler support #24sched --- _run_scheduler.sh | 51 + job.log | 1251 +++++++++++++++++ jobs.log | 1 + requirements.txt | 3 +- v2realbot/common/db.py | 38 +- v2realbot/common/model.py | 37 +- v2realbot/config.py | 6 + v2realbot/controller/run_manager.py | 466 ++++++ v2realbot/controller/services.py | 104 +- v2realbot/enums/enums.py | 20 +- v2realbot/loader/trade_offline_streamer.py | 2 +- v2realbot/main.py | 131 +- v2realbot/scheduler/__init__.py | 0 v2realbot/scheduler/ap_scheduler.py | 307 ++++ v2realbot/scheduler/scheduler.py | 427 ++++++ v2realbot/static/index.html | 294 +++- v2realbot/static/js/mytables.js | 12 +- .../js/tables/archivetable/functions.js | 4 +- .../static/js/tables/archivetable/handlers.js | 4 + .../static/js/tables/archivetable/init.js | 21 +- .../static/js/tables/runmanager/functions.js | 100 ++ .../static/js/tables/runmanager/handlers.js | 296 ++++ v2realbot/static/js/tables/runmanager/init.js | 322 +++++ .../static/js/tables/runmanager/modals.js | 195 +++ v2realbot/static/main.css | 11 + v2realbot/strategy/base.py | 4 +- .../strategyblocks/inits/init_indicators.py | 13 +- v2realbot/utils/utils.py | 48 +- 28 files changed, 4096 insertions(+), 72 deletions(-) create mode 100755 _run_scheduler.sh create mode 100644 job.log create mode 100644 jobs.log create mode 100644 v2realbot/controller/run_manager.py create mode 100644 v2realbot/scheduler/__init__.py create mode 100644 v2realbot/scheduler/ap_scheduler.py create mode 100644 v2realbot/scheduler/scheduler.py create mode 100644 v2realbot/static/js/tables/runmanager/functions.js create mode 100644 v2realbot/static/js/tables/runmanager/handlers.js create mode 100644 v2realbot/static/js/tables/runmanager/init.js create mode 100644 v2realbot/static/js/tables/runmanager/modals.js diff --git a/_run_scheduler.sh b/_run_scheduler.sh new file mode 100755 index 0000000..fad5a78 --- /dev/null +++ b/_run_scheduler.sh @@ -0,0 +1,51 @@ +#!/bin/bash + +# Approach: (https://chat.openai.com/c/43be8685-b27b-4e3b-bd18-0856f8d23d7e) +# cron runs this script every minute New York in range of 9:20 - 16:20 US time +# Also this scripts writes the "heartbeat" message to log file, so the user knows +#that cron is running + +# Installation steps required: +#chmod +x run_scheduler.sh +#install tzdata package: sudo apt-get install tzdata +#crontab -e +#CRON_TZ=America/New_York +# * 9-16 * * 1-5 /home/david/v2trading/run_scheduler.sh +# +# (Runs every minute of every hour on every day-of-week from Monday to Friday) US East time + +# Path to the Python script +PYTHON_SCRIPT="v2realbot/scheduler/scheduler.py" + +# Log file path +LOG_FILE="job.log" + +# Timezone for New York +TZ='America/New_York' +NY_DATE_TIME=$(TZ=$TZ date +'%Y-%m-%d %H:%M:%S') +echo "NY_DATE_TIME: $NY_DATE_TIME" + +# Check if log file exists, create it if it doesn't +if [ ! -f "$LOG_FILE" ]; then + touch "$LOG_FILE" +fi + +# Check the last line of the log file +LAST_LINE=$(tail -n 1 "$LOG_FILE") + +# Cron trigger message +CRON_TRIGGER="Cron trigger: $NY_DATE_TIME" + +# Update the log +if [[ "$LAST_LINE" =~ "Cron trigger:".* ]]; then + # Replace the last line with the new trigger message + sed -i '' '$ d' "$LOG_FILE" + echo "$CRON_TRIGGER" >> "$LOG_FILE" +else + # Append a new cron trigger message + echo "$CRON_TRIGGER" >> "$LOG_FILE" +fi + + +# FOR DEBUG - Run the Python script and append output to log file +python3 "$PYTHON_SCRIPT" >> "$LOG_FILE" 2>&1 \ No newline at end of file diff --git a/job.log b/job.log new file mode 100644 index 0000000..71a1ed5 --- /dev/null +++ b/job.log @@ -0,0 +1,1251 @@ +Cron trigger: 2024-02-16 21:24:36 +2024-02-17 09:24:41.627053: I tensorflow/core/platform/cpu_feature_guard.cc:210] This TensorFlow binary is optimized to use available CPU instructions in performance-critical operations. +To enable the following instructions: AVX2 FMA, in other operations, rebuild TensorFlow with the appropriate compiler flags. +Calendar data fetch successful 2024-12-31 2024-12-31 +Market open at 2024-12-31 09:30:00-05:00 and close at 2024-12-31 16:00:00-05:00 +current_market_datetime_str: 2024-12-31 09:30:00 +current_time_str: 09:30 +Candidates fetched, start: 0 stop: 0 +CALL FINISHED, with 2024-12-31 09:30:00-04:56 RESULT: -1, No candidates found for 2024-12-31 09:30:00-04:56 and US +Cron trigger: 2024-02-16 21:26:20 +2024-02-17 09:26:24.178102: I tensorflow/core/platform/cpu_feature_guard.cc:210] This TensorFlow binary is optimized to use available CPU instructions in performance-critical operations. +To enable the following instructions: AVX2 FMA, in other operations, rebuild TensorFlow with the appropriate compiler flags. +Calendar data fetch successful 2024-12-31 2024-12-31 +Market open at 2024-12-31 09:30:00-05:00 and close at 2024-12-31 16:00:00-05:00 +current_market_datetime_str: 2024-12-31 09:30:00 +current_time_str: 09:30 +Candidates fetched, start: 0 stop: 0 +CALL FINISHED, with 2024-12-31 09:30:00-04:56 RESULT: -1, No candidates found for 2024-12-31 09:30:00-04:56 and US +Cron trigger: 2024-02-16 21:27:28 +Cron trigger: 2024-02-16 21:27:30 +Cron trigger: 2024-02-16 21:27:31 +Cron trigger: 2024-02-16 21:27:42 +Cron trigger: 2024-02-16 21:27:43 +Cron trigger: 2024-02-16 21:31:28 +Traceback (most recent call last): + File "/Users/davidbrazda/Documents/Development/python/v2trading/v2realbot/scheduler/scheduler.py", line 7, in + import v2realbot.controller.services as cs + File "/Users/davidbrazda/Documents/Development/python/v2trading/v2realbot/controller/services.py", line 35, in + from v2realbot.strategyblocks.inits.init_indicators import initialize_dynamic_indicators + File "/Users/davidbrazda/Documents/Development/python/v2trading/v2realbot/strategyblocks/inits/init_indicators.py", line 7, in + import mlroom.utils.mlutils as ml + File "/Users/davidbrazda/Documents/Development/python/v2trading/.venv/lib/python3.10/site-packages/mlroom/utils/mlutils.py", line 9, in + from keras.models import model_from_json + File "/Users/davidbrazda/Documents/Development/python/v2trading/.venv/lib/python3.10/site-packages/keras/__init__.py", line 8, in + from keras import _tf_keras + File "/Users/davidbrazda/Documents/Development/python/v2trading/.venv/lib/python3.10/site-packages/keras/_tf_keras/__init__.py", line 1, in + from keras._tf_keras import keras + File "/Users/davidbrazda/Documents/Development/python/v2trading/.venv/lib/python3.10/site-packages/keras/_tf_keras/keras/__init__.py", line 8, in + from keras import activations + File "/Users/davidbrazda/Documents/Development/python/v2trading/.venv/lib/python3.10/site-packages/keras/activations/__init__.py", line 8, in + from keras.src.activations import deserialize + File "/Users/davidbrazda/Documents/Development/python/v2trading/.venv/lib/python3.10/site-packages/keras/src/__init__.py", line 1, in + from keras.src import activations + File "/Users/davidbrazda/Documents/Development/python/v2trading/.venv/lib/python3.10/site-packages/keras/src/activations/__init__.py", line 3, in + from keras.src.activations.activations import elu + File "/Users/davidbrazda/Documents/Development/python/v2trading/.venv/lib/python3.10/site-packages/keras/src/activations/activations.py", line 1, in + from keras.src import backend + File "/Users/davidbrazda/Documents/Development/python/v2trading/.venv/lib/python3.10/site-packages/keras/src/backend/__init__.py", line 33, in + from keras.src.backend.tensorflow import * # noqa: F403 + File "/Users/davidbrazda/Documents/Development/python/v2trading/.venv/lib/python3.10/site-packages/keras/src/backend/tensorflow/__init__.py", line 1, in + from keras.src.backend.tensorflow import core + File "/Users/davidbrazda/Documents/Development/python/v2trading/.venv/lib/python3.10/site-packages/keras/src/backend/tensorflow/core.py", line 4, in + import tensorflow as tf +ModuleNotFoundError: No module named 'tensorflow' +Cron trigger: 2024-02-16 21:31:52 +Traceback (most recent call last): + File "/Users/davidbrazda/Documents/Development/python/v2trading/v2realbot/scheduler/scheduler.py", line 7, in + import v2realbot.controller.services as cs + File "/Users/davidbrazda/Documents/Development/python/v2trading/v2realbot/controller/services.py", line 35, in + from v2realbot.strategyblocks.inits.init_indicators import initialize_dynamic_indicators + File "/Users/davidbrazda/Documents/Development/python/v2trading/v2realbot/strategyblocks/inits/init_indicators.py", line 7, in + import mlroom.utils.mlutils as ml + File "/Users/davidbrazda/Documents/Development/python/v2trading/.venv/lib/python3.10/site-packages/mlroom/utils/mlutils.py", line 9, in + from keras.models import model_from_json + File "/Users/davidbrazda/Documents/Development/python/v2trading/.venv/lib/python3.10/site-packages/keras/__init__.py", line 8, in + from keras import _tf_keras + File "/Users/davidbrazda/Documents/Development/python/v2trading/.venv/lib/python3.10/site-packages/keras/_tf_keras/__init__.py", line 1, in + from keras._tf_keras import keras + File "/Users/davidbrazda/Documents/Development/python/v2trading/.venv/lib/python3.10/site-packages/keras/_tf_keras/keras/__init__.py", line 8, in + from keras import activations + File "/Users/davidbrazda/Documents/Development/python/v2trading/.venv/lib/python3.10/site-packages/keras/activations/__init__.py", line 8, in + from keras.src.activations import deserialize + File "/Users/davidbrazda/Documents/Development/python/v2trading/.venv/lib/python3.10/site-packages/keras/src/__init__.py", line 1, in + from keras.src import activations + File "/Users/davidbrazda/Documents/Development/python/v2trading/.venv/lib/python3.10/site-packages/keras/src/activations/__init__.py", line 3, in + from keras.src.activations.activations import elu + File "/Users/davidbrazda/Documents/Development/python/v2trading/.venv/lib/python3.10/site-packages/keras/src/activations/activations.py", line 1, in + from keras.src import backend + File "/Users/davidbrazda/Documents/Development/python/v2trading/.venv/lib/python3.10/site-packages/keras/src/backend/__init__.py", line 33, in + from keras.src.backend.tensorflow import * # noqa: F403 + File "/Users/davidbrazda/Documents/Development/python/v2trading/.venv/lib/python3.10/site-packages/keras/src/backend/tensorflow/__init__.py", line 1, in + from keras.src.backend.tensorflow import core + File "/Users/davidbrazda/Documents/Development/python/v2trading/.venv/lib/python3.10/site-packages/keras/src/backend/tensorflow/core.py", line 4, in + import tensorflow as tf +ModuleNotFoundError: No module named 'tensorflow' +Cron trigger: 2024-02-16 21:32:53 +Traceback (most recent call last): + File "/Users/davidbrazda/Documents/Development/python/v2trading/v2realbot/scheduler/scheduler.py", line 7, in + import v2realbot.controller.services as cs + File "/Users/davidbrazda/Documents/Development/python/v2trading/v2realbot/controller/services.py", line 35, in + from v2realbot.strategyblocks.inits.init_indicators import initialize_dynamic_indicators + File "/Users/davidbrazda/Documents/Development/python/v2trading/v2realbot/strategyblocks/inits/init_indicators.py", line 7, in + import mlroom.utils.mlutils as ml + File "/Users/davidbrazda/Documents/Development/python/v2trading/.venv/lib/python3.10/site-packages/mlroom/utils/mlutils.py", line 9, in + from keras.models import model_from_json + File "/Users/davidbrazda/Documents/Development/python/v2trading/.venv/lib/python3.10/site-packages/keras/__init__.py", line 8, in + from keras import _tf_keras + File "/Users/davidbrazda/Documents/Development/python/v2trading/.venv/lib/python3.10/site-packages/keras/_tf_keras/__init__.py", line 1, in + from keras._tf_keras import keras + File "/Users/davidbrazda/Documents/Development/python/v2trading/.venv/lib/python3.10/site-packages/keras/_tf_keras/keras/__init__.py", line 8, in + from keras import activations + File "/Users/davidbrazda/Documents/Development/python/v2trading/.venv/lib/python3.10/site-packages/keras/activations/__init__.py", line 8, in + from keras.src.activations import deserialize + File "/Users/davidbrazda/Documents/Development/python/v2trading/.venv/lib/python3.10/site-packages/keras/src/__init__.py", line 1, in + from keras.src import activations + File "/Users/davidbrazda/Documents/Development/python/v2trading/.venv/lib/python3.10/site-packages/keras/src/activations/__init__.py", line 3, in + from keras.src.activations.activations import elu + File "/Users/davidbrazda/Documents/Development/python/v2trading/.venv/lib/python3.10/site-packages/keras/src/activations/activations.py", line 1, in + from keras.src import backend + File "/Users/davidbrazda/Documents/Development/python/v2trading/.venv/lib/python3.10/site-packages/keras/src/backend/__init__.py", line 33, in + from keras.src.backend.tensorflow import * # noqa: F403 + File "/Users/davidbrazda/Documents/Development/python/v2trading/.venv/lib/python3.10/site-packages/keras/src/backend/tensorflow/__init__.py", line 1, in + from keras.src.backend.tensorflow import core + File "/Users/davidbrazda/Documents/Development/python/v2trading/.venv/lib/python3.10/site-packages/keras/src/backend/tensorflow/core.py", line 4, in + import tensorflow as tf +ModuleNotFoundError: No module named 'tensorflow' +Cron trigger: 2024-02-16 21:38:52 +Calendar data fetch successful 2024-12-31 2024-12-31 +Market open at 2024-12-31 09:30:00-05:00 and close at 2024-12-31 16:00:00-05:00 +current_market_datetime_str: 2024-12-31 09:30:00 +current_time_str: 09:30 +Candidates fetched, start: 0 stop: 0 +CALL FINISHED, with 2024-12-31 09:30:00-04:56 RESULT: -1, No candidates found for 2024-12-31 09:30:00-04:56 and US +Cron trigger: 2024-02-16 21:50:14 +Calendar data fetch successful 2024-12-31 2024-12-31 +Market open at 2024-12-31 09:30:00-05:00 and close at 2024-12-31 16:00:00-05:00 +current_market_datetime_str: 2024-12-31 09:30:00 +current_time_str: 09:30 +Candidates fetched, start: 0 stop: 0 +CALL FINISHED, with 2024-12-31 09:30:00-04:56 RESULT: -1, No candidates found for 2024-12-31 09:30:00-04:56 and US +Cron trigger: 2024-02-16 21:50:41 +Calendar data fetch successful 2024-12-31 2024-12-31 +Market open at 2024-12-31 09:30:00-05:00 and close at 2024-12-31 16:00:00-05:00 +current_market_datetime_str: 2024-12-31 09:30:00 +current_time_str: 09:30 +Candidates fetched, start: 0 stop: 0 +CALL FINISHED, with 2024-12-31 09:30:00-04:56 RESULT: -1, No candidates found for 2024-12-31 09:30:00-04:56 and US +Cron trigger: 2024-02-16 21:58:56 +Calendar data fetch successful 2024-12-31 2024-12-31 +Market open at 2024-12-31 09:30:00-05:00 and close at 2024-12-31 16:00:00-05:00 +current_market_datetime_str: 2024-12-31 09:30:00 +current_time_str: 09:30 +Candidates fetched, start: 0 stop: 0 +CALL FINISHED, with 2024-12-31 09:30:00-04:56 RESULT: -1, No candidates found for 2024-12-31 09:30:00-04:56 and US +Cron trigger: 2024-02-16 21:59:08 +Calendar data fetch successful 2024-12-31 2024-12-31 +Market open at 2024-12-31 09:30:00-05:00 and close at 2024-12-31 16:00:00-05:00 +current_market_datetime_str: 2024-12-31 09:30:00 +current_time_str: 09:30 +Candidates fetched, start: 0 stop: 0 +CALL FINISHED, with 2024-12-31 09:30:00-04:56 RESULT: -1, No candidates found for 2024-12-31 09:30:00-04:56 and US +Cron trigger: 2024-02-16 21:59:14 +Calendar data fetch successful 2024-12-31 2024-12-31 +Market open at 2024-12-31 09:30:00-05:00 and close at 2024-12-31 16:00:00-05:00 +current_market_datetime_str: 2024-12-31 09:30:00 +current_time_str: 09:30 +Candidates fetched, start: 0 stop: 0 +CALL FINISHED, with 2024-12-31 09:30:00-04:56 RESULT: -1, No candidates found for 2024-12-31 09:30:00-04:56 and US +Cron trigger: 2024-02-16 22:39:19 +Calendar data fetch successful 2024-12-31 2024-12-31 +Market open at 2024-12-31 09:30:00-05:00 and close at 2024-12-31 16:00:00-05:00 +current_market_datetime_str: 2024-12-31 09:30:00 +current_time_str: 09:30 +Candidates fetched, start: 0 stop: 0 +CALL FINISHED, with 2024-12-31 09:30:00-04:56 RESULT: -1, No candidates found for 2024-12-31 09:30:00-04:56 and US +Cron trigger: 2024-02-16 23:29:34 +Traceback (most recent call last): + File "/Users/davidbrazda/Documents/Development/python/v2trading/v2realbot/scheduler/scheduler.py", line 207, in + print("SCheduler NY time: ", datetime.strptime("debug_date", "%d.%m.%Y %H:%M")) + File "/Library/Frameworks/Python.framework/Versions/3.10/lib/python3.10/_strptime.py", line 568, in _strptime_datetime + tt, fraction, gmtoff_fraction = _strptime(data_string, format) + File "/Library/Frameworks/Python.framework/Versions/3.10/lib/python3.10/_strptime.py", line 349, in _strptime + raise ValueError("time data %r does not match format %r" % +ValueError: time data 'debug_date' does not match format '%d.%m.%Y %H:%M' +Cron trigger: 2024-02-16 23:29:47 +Traceback (most recent call last): + File "/Users/davidbrazda/Documents/Development/python/v2trading/v2realbot/scheduler/scheduler.py", line 207, in + print("SCheduler NY time: ", datetime.strptime("debug_date", "%d.%m.%Y %H:%M")) + File "/Library/Frameworks/Python.framework/Versions/3.10/lib/python3.10/_strptime.py", line 568, in _strptime_datetime + tt, fraction, gmtoff_fraction = _strptime(data_string, format) + File "/Library/Frameworks/Python.framework/Versions/3.10/lib/python3.10/_strptime.py", line 349, in _strptime + raise ValueError("time data %r does not match format %r" % +ValueError: time data 'debug_date' does not match format '%d.%m.%Y %H:%M' +Cron trigger: 2024-02-16 23:31:05 +Scheduler.py NY time: 16.02.2024 23:31 +Calendar data fetch successful 2024-02-16 2024-02-16 +Market open at 2024-02-16 09:30:00-05:00 and close at 2024-02-16 16:00:00-05:00 +current_market_datetime_str: 2024-02-16 23:31:09 +current_time_str: 23:31 +Candidates fetched, start: 1 stop: 3 +START - Looping over 1 candidates +Candidate: start +Traceback (most recent call last): + File "/Users/davidbrazda/Documents/Development/python/v2trading/v2realbot/scheduler/scheduler.py", line 210, in + res, msg = startstop_scheduled(debug_date=debug_date, market="US") + File "/Users/davidbrazda/Documents/Development/python/v2trading/v2realbot/scheduler/scheduler.py", line 87, in startstop_scheduled + if cs.is_stratin_running(record.strat_id): +AttributeError: 'str' object has no attribute 'strat_id' +Cron trigger: 2024-02-17 08:23:05 +Traceback (most recent call last): + File "/Users/davidbrazda/Documents/Development/python/v2trading/v2realbot/scheduler/scheduler.py", line 6, in + import pytz # Install using 'pip install pytz' + ^^^^^^^^^^^ +ModuleNotFoundError: No module named 'pytz' +Current 0 scheduled jobs: [] +Current 0 scheduled jobs: [] +Current 0 scheduled jobs: [] +Hello +Current 0 scheduled jobs: [] +Current 0 scheduled jobs: [] +starting insert_queue2db thread +Current 0 scheduled jobs: [] +Updated 9 scheduled jobs: [<Job (id=scheduler_testinterval name=print_hello)>, , , ,  +, , , , ] +Current 0 scheduled jobs: [] +Updated 9 scheduled jobs: [<Job (id=scheduler_testinterval name=print_hello)>, , , ,  +, , , , ] +Current 9 scheduled jobs: [<Job (id=scheduler_testinterval name=print_hello)>, , , ,  +, , , , ] +Updated 9 scheduled jobs: [<Job (id=scheduler_testinterval name=print_hello)>, , , ,  +, , , , ] +Current 9 scheduled jobs: [<Job (id=scheduler_testinterval name=print_hello)>, , , ,  +, , , , ] +********** WS Streamer STARTED ********** +Updated 9 scheduled jobs: [<Job (id=scheduler_testinterval name=print_hello)>, , , ,  +, , , , ] +Current 9 scheduled jobs: [<Job (id=scheduler_testinterval name=print_hello)>, , , ,  +, , , , ] +Updated 9 scheduled jobs: [<Job (id=scheduler_testinterval name=print_hello)>, , , ,  +, , , , ] +Current 0 scheduled jobs: [] +Job ID: scheduler_testinterval +Next Run Time: 2024-02-20 22:05:12.162884-05:00 +Job Function: print_hello +Trigger: interval[0:00:10] +Job Args: +Job Kwargs: + +Job ID: scheduler_start_6f54c363-e30c-4ad2-b232-ccc5c11d6745 +Next Run Time: 2024-02-21 09:30:00-05:00 +Job Function: start_runman_record +Trigger: cron +Job Args: 6f54c363-e30c-4ad2-b232-ccc5c11d6745 +Job Kwargs: + +Job ID: scheduler_start_ea6d79f2-d441-4236-8969-90c77b565e10 +Next Run Time: 2024-02-21 09:30:00-05:00 +Job Function: start_runman_record +Trigger: cron +Job Args: ea6d79f2-d441-4236-8969-90c77b565e10 +Job Kwargs: + +Job ID: scheduler_stop_6f54c363-e30c-4ad2-b232-ccc5c11d6745 +Next Run Time: 2024-02-21 09:30:00-05:00 +Job Function: stop_runman_record +Trigger: cron +Job Args: 6f54c363-e30c-4ad2-b232-ccc5c11d6745 +Job Kwargs: + +Job ID: scheduler_stop_ea6d79f2-d441-4236-8969-90c77b565e10 +Next Run Time: 2024-02-21 09:30:00-05:00 +Job Function: stop_runman_record +Trigger: cron +Job Args: ea6d79f2-d441-4236-8969-90c77b565e10 +Job Kwargs: + +Job ID: scheduler_start_bc4ec7d2-249b-4799-a02f-f1ce66f83d4a +Next Run Time: 2024-02-21 21:50:00-05:00 +Job Function: start_runman_record +Trigger: cron +Job Args: bc4ec7d2-249b-4799-a02f-f1ce66f83d4a +Job Kwargs: + +Job ID: scheduler_stop_bc4ec7d2-249b-4799-a02f-f1ce66f83d4a +Next Run Time: 2024-02-21 21:50:00-05:00 +Job Function: stop_runman_record +Trigger: cron +Job Args: bc4ec7d2-249b-4799-a02f-f1ce66f83d4a +Job Kwargs: + +Job ID: scheduler_start_8aeae4b4-f200-478b-93af-6c8afec1fed9 +Next Run Time: 2024-02-23 09:30:00-05:00 +Job Function: start_runman_record +Trigger: cron +Job Args: 8aeae4b4-f200-478b-93af-6c8afec1fed9 +Job Kwargs: + +Job ID: scheduler_stop_8aeae4b4-f200-478b-93af-6c8afec1fed9 +Next Run Time: 2024-02-23 09:30:00-05:00 +Job Function: stop_runman_record +Trigger: cron +Job Args: 8aeae4b4-f200-478b-93af-6c8afec1fed9 +Job Kwargs: + +Updated 9 scheduled jobs. +Current 9 scheduled jobs: [<Job (id=scheduler_testinterval name=print_hello)>, , , ,  +, , , , ] +Job ID: scheduler_testinterval +Next Run Time: 2024-02-20 22:06:39.698718-05:00 +Job Function: print_hello +Trigger: interval[0:00:10] +Job Args: +Job Kwargs: + +Job ID: scheduler_start_bc4ec7d2-249b-4799-a02f-f1ce66f83d4a +Next Run Time: 2024-02-20 22:07:00-05:00 +Job Function: start_runman_record +Trigger: cron +Job Args: bc4ec7d2-249b-4799-a02f-f1ce66f83d4a +Job Kwargs: + +Job ID: scheduler_stop_bc4ec7d2-249b-4799-a02f-f1ce66f83d4a +Next Run Time: 2024-02-20 22:07:00-05:00 +Job Function: stop_runman_record +Trigger: cron +Job Args: bc4ec7d2-249b-4799-a02f-f1ce66f83d4a +Job Kwargs: + +Job ID: scheduler_start_6f54c363-e30c-4ad2-b232-ccc5c11d6745 +Next Run Time: 2024-02-21 09:30:00-05:00 +Job Function: start_runman_record +Trigger: cron +Job Args: 6f54c363-e30c-4ad2-b232-ccc5c11d6745 +Job Kwargs: + +Job ID: scheduler_start_ea6d79f2-d441-4236-8969-90c77b565e10 +Next Run Time: 2024-02-21 09:30:00-05:00 +Job Function: start_runman_record +Trigger: cron +Job Args: ea6d79f2-d441-4236-8969-90c77b565e10 +Job Kwargs: + +Job ID: scheduler_stop_6f54c363-e30c-4ad2-b232-ccc5c11d6745 +Next Run Time: 2024-02-21 09:30:00-05:00 +Job Function: stop_runman_record +Trigger: cron +Job Args: 6f54c363-e30c-4ad2-b232-ccc5c11d6745 +Job Kwargs: + +Job ID: scheduler_stop_ea6d79f2-d441-4236-8969-90c77b565e10 +Next Run Time: 2024-02-21 09:30:00-05:00 +Job Function: stop_runman_record +Trigger: cron +Job Args: ea6d79f2-d441-4236-8969-90c77b565e10 +Job Kwargs: + +Job ID: scheduler_start_8aeae4b4-f200-478b-93af-6c8afec1fed9 +Next Run Time: 2024-02-23 09:30:00-05:00 +Job Function: start_runman_record +Trigger: cron +Job Args: 8aeae4b4-f200-478b-93af-6c8afec1fed9 +Job Kwargs: + +Job ID: scheduler_stop_8aeae4b4-f200-478b-93af-6c8afec1fed9 +Next Run Time: 2024-02-23 09:30:00-05:00 +Job Function: stop_runman_record +Trigger: cron +Job Args: 8aeae4b4-f200-478b-93af-6c8afec1fed9 +Job Kwargs: + +Updated 9 scheduled jobs. +Current 9 scheduled jobs: [<Job (id=scheduler_testinterval name=print_hello)>, , , ,  +, , , , ] +Job ID: scheduler_testinterval +Next Run Time: 2024-02-20 22:07:11.334985-05:00 +Job Function: print_hello +Trigger: interval[0:00:10] +Job Args: +Job Kwargs: + +Job ID: scheduler_start_6f54c363-e30c-4ad2-b232-ccc5c11d6745 +Next Run Time: 2024-02-21 09:30:00-05:00 +Job Function: start_runman_record +Trigger: cron +Job Args: 6f54c363-e30c-4ad2-b232-ccc5c11d6745 +Job Kwargs: + +Job ID: scheduler_start_ea6d79f2-d441-4236-8969-90c77b565e10 +Next Run Time: 2024-02-21 09:30:00-05:00 +Job Function: start_runman_record +Trigger: cron +Job Args: ea6d79f2-d441-4236-8969-90c77b565e10 +Job Kwargs: + +Job ID: scheduler_stop_6f54c363-e30c-4ad2-b232-ccc5c11d6745 +Next Run Time: 2024-02-21 09:30:00-05:00 +Job Function: stop_runman_record +Trigger: cron +Job Args: 6f54c363-e30c-4ad2-b232-ccc5c11d6745 +Job Kwargs: + +Job ID: scheduler_stop_ea6d79f2-d441-4236-8969-90c77b565e10 +Next Run Time: 2024-02-21 09:30:00-05:00 +Job Function: stop_runman_record +Trigger: cron +Job Args: ea6d79f2-d441-4236-8969-90c77b565e10 +Job Kwargs: + +Job ID: scheduler_start_bc4ec7d2-249b-4799-a02f-f1ce66f83d4a +Next Run Time: 2024-02-21 22:07:00-05:00 +Job Function: start_runman_record +Trigger: cron +Job Args: bc4ec7d2-249b-4799-a02f-f1ce66f83d4a +Job Kwargs: + +Job ID: scheduler_stop_bc4ec7d2-249b-4799-a02f-f1ce66f83d4a +Next Run Time: 2024-02-21 22:07:00-05:00 +Job Function: stop_runman_record +Trigger: cron +Job Args: bc4ec7d2-249b-4799-a02f-f1ce66f83d4a +Job Kwargs: + +Job ID: scheduler_start_8aeae4b4-f200-478b-93af-6c8afec1fed9 +Next Run Time: 2024-02-23 09:30:00-05:00 +Job Function: start_runman_record +Trigger: cron +Job Args: 8aeae4b4-f200-478b-93af-6c8afec1fed9 +Job Kwargs: + +Job ID: scheduler_stop_8aeae4b4-f200-478b-93af-6c8afec1fed9 +Next Run Time: 2024-02-23 09:30:00-05:00 +Job Function: stop_runman_record +Trigger: cron +Job Args: 8aeae4b4-f200-478b-93af-6c8afec1fed9 +Job Kwargs: + +Updated 9 scheduled jobs. +Current 0 scheduled jobs: [] +Job ID: scheduler_testinterval +Next Run Time: 2024-02-20 22:17:40.327829-05:00 +Job Function: print_hello +Trigger: interval[0:00:10] +Job Args: +Job Kwargs: + +Job ID: scheduler_start_6f54c363-e30c-4ad2-b232-ccc5c11d6745 +Next Run Time: 2024-02-21 09:30:00-05:00 +Job Function: wrapper +Trigger: cron +Job Args: 6f54c363-e30c-4ad2-b232-ccc5c11d6745 +Job Kwargs: + +Job ID: scheduler_start_ea6d79f2-d441-4236-8969-90c77b565e10 +Next Run Time: 2024-02-21 09:30:00-05:00 +Job Function: wrapper +Trigger: cron +Job Args: ea6d79f2-d441-4236-8969-90c77b565e10 +Job Kwargs: + +Job ID: scheduler_stop_6f54c363-e30c-4ad2-b232-ccc5c11d6745 +Next Run Time: 2024-02-21 09:30:00-05:00 +Job Function: wrapper +Trigger: cron +Job Args: 6f54c363-e30c-4ad2-b232-ccc5c11d6745 +Job Kwargs: + +Job ID: scheduler_stop_ea6d79f2-d441-4236-8969-90c77b565e10 +Next Run Time: 2024-02-21 09:30:00-05:00 +Job Function: wrapper +Trigger: cron +Job Args: ea6d79f2-d441-4236-8969-90c77b565e10 +Job Kwargs: + +Job ID: scheduler_start_bc4ec7d2-249b-4799-a02f-f1ce66f83d4a +Next Run Time: 2024-02-21 22:07:00-05:00 +Job Function: wrapper +Trigger: cron +Job Args: bc4ec7d2-249b-4799-a02f-f1ce66f83d4a +Job Kwargs: + +Job ID: scheduler_stop_bc4ec7d2-249b-4799-a02f-f1ce66f83d4a +Next Run Time: 2024-02-21 22:07:00-05:00 +Job Function: wrapper +Trigger: cron +Job Args: bc4ec7d2-249b-4799-a02f-f1ce66f83d4a +Job Kwargs: + +Job ID: scheduler_start_8aeae4b4-f200-478b-93af-6c8afec1fed9 +Next Run Time: 2024-02-23 09:30:00-05:00 +Job Function: wrapper +Trigger: cron +Job Args: 8aeae4b4-f200-478b-93af-6c8afec1fed9 +Job Kwargs: + +Job ID: scheduler_stop_8aeae4b4-f200-478b-93af-6c8afec1fed9 +Next Run Time: 2024-02-23 09:30:00-05:00 +Job Function: wrapper +Trigger: cron +Job Args: 8aeae4b4-f200-478b-93af-6c8afec1fed9 +Job Kwargs: + +Current 9 scheduled jobs: [<Job (id=scheduler_testinterval name=print_hello)>, .wrapper)>, .wrapper)>, .wrapper)>, .wrapper)>, .wrapper)>, .wrapper)>, .wrapper)>, .wrapper)>] +Job ID: scheduler_start_bc4ec7d2-249b-4799-a02f-f1ce66f83d4a +Next Run Time: 2024-02-20 22:18:00-05:00 +Job Function: wrapper +Trigger: cron +Job Args: bc4ec7d2-249b-4799-a02f-f1ce66f83d4a +Job Kwargs: + +Job ID: scheduler_stop_bc4ec7d2-249b-4799-a02f-f1ce66f83d4a +Next Run Time: 2024-02-20 22:18:00-05:00 +Job Function: wrapper +Trigger: cron +Job Args: bc4ec7d2-249b-4799-a02f-f1ce66f83d4a +Job Kwargs: + +Job ID: scheduler_testinterval +Next Run Time: 2024-02-20 22:18:00.488891-05:00 +Job Function: print_hello +Trigger: interval[0:00:10] +Job Args: +Job Kwargs: + +Job ID: scheduler_start_6f54c363-e30c-4ad2-b232-ccc5c11d6745 +Next Run Time: 2024-02-21 09:30:00-05:00 +Job Function: wrapper +Trigger: cron +Job Args: 6f54c363-e30c-4ad2-b232-ccc5c11d6745 +Job Kwargs: + +Job ID: scheduler_start_ea6d79f2-d441-4236-8969-90c77b565e10 +Next Run Time: 2024-02-21 09:30:00-05:00 +Job Function: wrapper +Trigger: cron +Job Args: ea6d79f2-d441-4236-8969-90c77b565e10 +Job Kwargs: + +Job ID: scheduler_stop_6f54c363-e30c-4ad2-b232-ccc5c11d6745 +Next Run Time: 2024-02-21 09:30:00-05:00 +Job Function: wrapper +Trigger: cron +Job Args: 6f54c363-e30c-4ad2-b232-ccc5c11d6745 +Job Kwargs: + +Job ID: scheduler_stop_ea6d79f2-d441-4236-8969-90c77b565e10 +Next Run Time: 2024-02-21 09:30:00-05:00 +Job Function: wrapper +Trigger: cron +Job Args: ea6d79f2-d441-4236-8969-90c77b565e10 +Job Kwargs: + +Job ID: scheduler_start_8aeae4b4-f200-478b-93af-6c8afec1fed9 +Next Run Time: 2024-02-23 09:30:00-05:00 +Job Function: wrapper +Trigger: cron +Job Args: 8aeae4b4-f200-478b-93af-6c8afec1fed9 +Job Kwargs: + +Job ID: scheduler_stop_8aeae4b4-f200-478b-93af-6c8afec1fed9 +Next Run Time: 2024-02-23 09:30:00-05:00 +Job Function: wrapper +Trigger: cron +Job Args: 8aeae4b4-f200-478b-93af-6c8afec1fed9 +Job Kwargs: + +Startj g record bc4ec7d2-249b-4799-a02f-f1ce66f83d4a +Stopping record bc4ec7d2-249b-4799-a02f-f1ce66f83d4a +strategy 15687653-04fb-46aa-8806-ef4af4e45d96 not RUNNING or not distinctive (manually launched or two strat_ids in scheduler) +Hello +Calendar data fetch successful 2024-02-20 2024-02-20 +OPEN:2024-02-20 09:30:00-05:00 CLOSE:2024-02-20 16:00:00-05:00 +StrategyInstance fetched: 15687653-04fb-46aa-8806-ef4af4e45d96 +RunRequest( + id=UUID('15687653-04fb-46aa-8806-ef4af4e45d96'), + account=<Account.ACCOUNT1: 'ACCOUNT1'>, + mode=, + note='SCHED 22:18-22:19', + debug=False, + strat_json='{"id": "15687653-04fb-46aa-8806-ef4af4e45d96", "id2": -2013, "name": "SUPERTREND TV", "symbol": "BAC", "class_name": "StrategyClassicSL", "script":  +"ENTRY_ClassicSL_v01", "open_rush": 0, "close_rush": 0, "stratvars_conf": "[stratvars]\\r\\n#obecne platici pro vsechny signaly \\r\\n #abs profit/loss to suspend  +strategy\\r\\n #max_sum_profit_to_quit = 300\\r\\n #max_sum_loss_to_quit = -300\\r\\n\\r\\n #procenta profit/loss to suspend startegy\\r\\n  +#max_sum_profit_to_quit_rel = 0.3\\r\\n #max_sum_loss_to_quit_rel = -0.3\\r\\n normalize_ticks = true\\r\\n normalized_base_price = 30\\r\\n long_enabled =  +true\\r\\n short_enabled = true\\r\\n chunk = 2000\\r\\n maxpozic = 4000\\r\\n #nastavuje pevny profit\\r\\n profit = 0.25\\r\\n max_profit = 0.30\\r\\n  +consolidation_bar_count = 5\\r\\n #po minutach\\r\\n window_open = 0\\r\\n window_close = 360\\r\\n signal_only_on_confirmed = true\\r\\n  +forced_exit_window_start = 361\\r\\n forced_exit_window_end = 389\\r\\n #pouze posledni minutu prod\\u00e1me, ne\\u010dek\\u00e1me na breakeven\\r\\n  +#forced_exit_breakeven_period = false\\r\\n\\r\\n[stratvars.indicators.atr10]\\r\\n #[stratvars.indicators.atr10]\\r\\n type = \'ATR\'\\r\\n length = 10\\r\\n  +on_confirmed_only = true\\r\\n[stratvars.indicators.MESA]\\r\\n type = \'custom\'\\r\\n subtype = \'talib_ind\'\\r\\n returns = [\\"mama\\", \\"fama\\"]\\r\\n  +on_confirmed_only = true\\r\\n cp.defval = \'close\'\\r\\n #cp.lookback = 100\\r\\n cp.type = \'MAMA\'\\r\\n cp.params.series = [\'close\']\\r\\n  +#cp.params.keys.fastlimit = 0\\r\\n #cp.params.keys.slowlimit = 0\\r\\n scale = \\"right\\"\\r\\n[stratvars.indicators.supertrend]\\r\\n output = \'bar\'\\r\\n  +type = \'custom\'\\r\\n returns = [\'basic_up\',\'basic_dn\',\'final_up\', \'final_down\', \'trend\']\\r\\n scale = [\'right\', \'right\', \'right\', \'right\',  +\'signal\']\\r\\n subtype = \'classed\'\\r\\n on_confirmed_only = true\\r\\n cp.class_name = \'SuperTrend\'\\r\\n cp.next = [\'high\', \'low\', \'close\']\\r\\n  +[stratvars.indicators.supertrend.cp.init] #params is send to init, opt\\r\\n atr_period=14\\r\\n multiplier=1.2\\r\\n[stratvars.indicators.supertrend1]\\r\\n output = +\\"bar\\"\\r\\n type = \\"custom\\"\\r\\n returns = [\'dn\',\'up\',\'trend1\']\\r\\n scale = [\'right\',\'right\',\'signal\']\\r\\n subtype = \\"classed\\"\\r\\n  +on_confirmed_only = true\\r\\n cp.class_name = \\"SuperTrend1\\"\\r\\n cp.next = [\'high\',\'low\',\'close\'] #optional, source posilany explicitne do next ve  +stejnojmenn\\u00e9m parametru, note that next has access to state\\r\\n #cp.next_mapping = [\\"data\\"] \\r\\n [stratvars.indicators.supertrend1.cp.init] #params is send to +init, opt\\r\\n period=14\\r\\n multiplier=1.2\\r\\n[stratvars.indicators.supertrendTV]\\r\\noutput = \'bar\'\\r\\ntype = \'custom\'\\r\\nreturns = [\'dnB\', \'upB\',  +\'trendB\']\\r\\nscale = [\'right\', \'right\', \'signal\']\\r\\nsubtype = \'classed\'\\r\\non_confirmed_only = true\\r\\ncp.class_name = \'SuperTrendTV\'\\r\\ncp.next =  +[\'high\', \'low\', \'close\']\\r\\n[stratvars.indicators.supertrendBard.cp.init] #params is send to init, opt\\r\\natr_period = 10\\r\\natr_multiplier =  +3\\r\\n[stratvars.indicators.mama_angle3]\\r\\n type = \'custom\'\\r\\n subtype = \'talib_ind\'\\r\\n on_confirmed_only = true\\r\\n cp.lookback = 10\\r\\n  +cp.type = \'LINEARREG_ANGLE\'\\r\\n cp.params.series = [\'mama\']\\r\\n cp.params.keys.timeperiod = 3\\r\\n scale =  +\'linreg\'\\r\\n[stratvars.indicators.ht_trendline]\\r\\n type = \'custom\'\\r\\n subtype = \'talib_ind\'\\r\\n on_confirmed_only = true\\r\\n cp.defval =  +\\"close\\" #pokud nejsou hodnoty, vracime close\\r\\n #cp.lookback = 100 #celkovy lookback pro vsechny serie\\r\\n cp.type = \'HT_TRENDLINE\'\\r\\n cp.params.series  += [\\"close\\"] #vstupni serie pro pozicni parametry pro ta-lib\\r\\n #cp.params.keys.timeperiod = 14 #keyword argumenty pro ta-lib\\r\\n scale =  +\\"right\\"\\r\\n[stratvars.indicators.ht_dcperiod]\\r\\n type = \'custom\'\\r\\n subtype = \'talib_ind\'\\r\\n on_confirmed_only = true\\r\\n #cp.source =  +\'close\'\\r\\n cp.defval = \\"close\\" #pokud nejsou hodnoty, vracime close\\r\\n cp.lookback = 100 #celkovy lookback pro vsechny serie\\r\\n cp.type =  +\'HT_DCPERIOD\'\\r\\n cp.params.series = [\\"close\\"] #vstupni serie pro pozicni parametry pro ta-lib\\r\\n #cp.params.keys.timeperiod = 14 #keyword argumenty pro  +ta-lib\\r\\n scale = \\"period\\"\\r\\n[stratvars.indicators.bbands]\\r\\n type = \'custom\'\\r\\n subtype = \'talib_ind\'\\r\\n returns = [\\"upperband\\",  +\\"middleband\\", \\"lowerband\\"]\\r\\n on_confirmed_only = true\\r\\n cp.defval = \'close\'\\r\\n cp.lookback = 100\\r\\n cp.type = \'BBANDS\'\\r\\n  +cp.params.series = [\'close\']\\r\\n cp.params.keys.timeperiod = 5\\r\\n cp.params.keys.nbdevup=2\\r\\n cp.params.keys.nbdevdn=2\\r\\n  +cp.params.keys.matype=0\\r\\n scale = \'right\'\\r\\n[stratvars.indicators.vwap_cum]\\r\\n type = \'custom\'\\r\\n subtype = \'expression\'\\r\\n on_confirmed_only += true\\r\\n cp.convertToNumpy = [\'hlcc4\',\'volume\']\\r\\n cp.expression = \'np.sum((hlcc4 * volume) / np.sum(volume))\'\\r\\n scale =  +\'right\'\\r\\n[stratvars.indicators.vwap_angle5]\\r\\n type = \'custom\'\\r\\n subtype = \'talib_ind\'\\r\\n on_confirmed_only = true\\r\\n cp.lookback = 5\\r\\n  +cp.type = \'LINEARREG_ANGLE\'\\r\\n cp.params.series = [\'vwap_cum\']\\r\\n cp.params.keys.timeperiod = 5\\r\\n scale =  +\'linreg\'\\r\\n[stratvars.indicators.div_vwap_cum]\\r\\n type = \'custom\'\\r\\n subtype = \'divergence\'\\r\\n on_confirmed_only = true\\r\\n cp.source1 =  +\'close\'\\r\\n cp.source2 = \'vwap_cum\'\\r\\n cp.type = \'reln\'\\r\\n scale = \'left\'\\r\\n[stratvars.indicators.div_vwap_cum_mul] #multiplikator predchoziho  +indikatoru kvuli lepsimu zobrazeni a zaokrouhleni\\r\\n type = \'custom\'\\r\\n subtype = \'expression\'\\r\\n cp.expression = \'div_vwap_cum[-1]*1000\'\\r\\n  +scale = \'own\'\\r\\n[stratvars.indicators.profit]\\r\\n type = \'custom\'\\r\\n subtype = \'expression\'\\r\\n on_confirmed_only = true\\r\\n cp.expression =  +\'state.profit\'\\r\\n[stratvars.indicators.rsi14]\\r\\n output = \\"bar\\"\\r\\n type = \'custom\'\\r\\n subtype = \'rsi\'\\r\\n MA_length = 10 #vytvori dalsi  +stejnojmenny indikator s postfixem _MA\\r\\n on_confirmed_only = true\\r\\n cp.source = \'vwap\'\\r\\n cp.length = 15\\r\\n cp.start = \\"linear\\"\\r\\n scale  += \'left\'\\r\\n[stratvars.indicators.weekday]\\r\\n output = \\"bar\\"\\r\\n type = \\"custom\\"\\r\\n subtype = \\"classed\\"\\r\\n on_confirmed_only =  +true\\r\\n cp.class_name = \\"WeekDay\\"\\r\\n[stratvars.indicators.volumebar]\\r\\n type = \'custom\'\\r\\n subtype = \'barparams\'\\r\\n on_confirmed_only =  +true\\r\\n cp.source = \'volume\'\\r\\n[stratvars.indicators.indexbar]\\r\\n type = \'custom\'\\r\\n subtype = \'barparams\'\\r\\n on_confirmed_only = true\\r\\n  +cp.source = \'index\'\\r\\n[stratvars.indicators.volumebarema]\\r\\n type = \'custom\'\\r\\n subtype = \'ma\'\\r\\n on_confirmed_only = true\\r\\n cp.source =  +\'volumebar\'\\r\\n cp.lookback = 30\\r\\n cp.type = \'ema\'\\r\\n[stratvars.indicators.voldiv]\\r\\n type = \'custom\'\\r\\n subtype = \'divergence\'\\r\\n  +on_confirmed_only = true\\r\\n cp.source1 = \'volumebar\'\\r\\n cp.source2 = \'volumebarema\'\\r\\n cp.type = \'reln\'\\r\\n scale =  +\\"left\\"\\r\\n[stratvars.indicators.div_vwap_angle6]\\r\\n type = \'custom\'\\r\\n subtype = \'talib_ind\'\\r\\n on_confirmed_only = true\\r\\n cp.lookback =  +10\\r\\n cp.type = \'LINEARREG_ANGLE\'\\r\\n cp.params.series = [\'div_vwap_cum\']\\r\\n cp.params.keys.timeperiod = 10\\r\\n scale =  +\'linreg\'\\r\\n[stratvars.indicators.signal]\\r\\n output = \\"bar\\"\\r\\n type = \'custom\'\\r\\n subtype = \'conditional\'\\r\\n on_confirmed_only = true\\r\\n +scale = \'left\'\\r\\n[stratvars.indicators.signal.cp.conditions.buy]\\r\\n #supertrend up\\r\\n trendB.AND.change_val_if_equals = 1\\r\\n #cena je nad vwapem urcitou +divergence\\r\\n div_vwap_cum_mul.AND.change_val_if_above = 4\\r\\n #strmy uhel divergence\\r\\n div_vwap_angle6.AND.change_val_if_above = 0.023\\r\\n true_val =  +1\\r\\n[stratvars.indicators.signal.cp.conditions.sell]\\r\\n trendB.AND.change_val_if_equals = -1\\r\\n #cena je pod vwapem o danou divergen ce\\r\\n  +div_vwap_cum_mul.AND.change_val_if_below = -3\\r\\n\\r\\n #uhel divergence je strm\\u00fd\\r\\n div_vwap_angle6.AND.change_val_if_below = -0.009\\r\\n true_val =  +-1\\r\\n[stratvars.indicators.sl_long]\\r\\n #[stratvars.indicators.freq_period_multiplier]\\r\\n type = \'custom\'\\r\\n subtype = \'expression\'\\r\\n  +on_confirmed_only = true\\r\\n cp.expression = \'close[-1]-(atr10[-1]*2)\'\\r\\n scale= \\"right\\"\\r\\n[stratvars.indicators.sl_short]\\r\\n  +#[stratvars.indicators.freq_period_multiplier]\\r\\n type = \'custom\'\\r\\n subtype = \'expression\'\\r\\n on_confirmed_only = true\\r\\n cp.expression =  +\'close[-1]+(atr10[-1]*2)\'\\r\\n scale= \\"right\\"\\r\\n[stratvars.indicators.long_goal]\\r\\n type = \'custom\'\\r\\n subtype = \'expression\'\\r\\n  +on_confirmed_only = true\\r\\n cp.expression = \'close[-1]+(atr10[-1]*3)\'\\r\\n scale = \'right\'\\r\\n[stratvars.indicators.short_goal]\\r\\n type =  +\'custom\'\\r\\n subtype = \'expression\'\\r\\n on_confirmed_only = true\\r\\n cp.expression = \'close[-1]-(atr10[-1]*3)\'\\r\\n scale =  +\'right\'\\r\\n[stratvars.signals.main]\\r\\n signal_only_on_confirmed = true\\r\\n next_signal_offset_from_last_exit = 0\\r\\n #6.5h, 360=6h, \\r\\n window_open = +2\\r\\n window_close = 360\\r\\n min_bar_index = 0\\r\\n #activated = true\\r\\n profit = 0.50\\r\\n #profit_short = 0.07\\r\\n #profit_short =  +\\"short_goal\\"\\r\\n #profit_long = 0.07\\r\\n #profit_long = \\"long_goal\\"\\r\\n #ochrana proti extremnim hodntoam z indikatoru - fallback\\r\\n  +profit_min_ind_tick_value = 0.03\\r\\n max_profit = 0.50\\r\\n #SL_defval_short = \\"sl_short\\"\\r\\n #SL_defval_short = \\"high1bars\\"\\r\\n #SL_defval_long =  +\\"sl_long\\"\\r\\n SL_defval_short = 0.30\\r\\n SL_defval_long = 0.30\\r\\n SL_trailing_enabled_short = false\\r\\n SL_trailing_enabled_long = false\\r\\n  +SL_trailing_offset_short = 0.04\\r\\n SL_trailing_offset_long = 0.04\\r\\n SL_trailing_step_short = 0.01\\r\\n SL_trailing_step_long = 0.01\\r\\n  +SL_trailing_stop_at_breakeven_short = false\\r\\n SL_trailing_stop_at_breakeven_long = false\\r\\n reverse_for_SL_exit_short = \\"no\\"\\r\\n reverse_for_SL_exit_long += \\"cond\\"\\r\\n #[0.236, 0.382, 0.618]\\r\\n #SL_opt_exit_levels_short = [0.236, 0.382, 0.618]\\r\\n #SL_opt_exit_levels_long = [0.236, 0.382, 0.618]\\r\\n  +##SL_opt_exit_sizes_short = [0.2]\\r\\n #SL_opt_exit_sizes_long = [0.2]\\r\\n[stratvars.signals.main.conditions]\\r\\n #preconditions \\r\\n\\r\\n #SHORT ENTRY\\r\\n  +signal.AND.go_short_if_equals = -1\\r\\n\\r\\n #pouze pri prepnuti\\r\\n #vwap_angle5.AND.go_short_if_below = -0.1\\r\\n \\r\\n #trendB.AND.go_short_if_fallingc =  +2\\r\\n #signal_linreg.go_short_if_equals = -1\\r\\n #signal.go_short_if_equals = -1\\r\\n #signal_conf_short.go_short_if_equals = 1\\r\\n #EXIT PROTECTION\\r\\n  +#drzime dokud stoupame\\r\\n #slopetick5.dont_exit_long_if_above = 0\\r\\n #mama_angle3.dont_exit_long_if_above = 0\\r\\n #mama_angle3.dont_exit_short_if_below = 0  +\\r\\n #signal_linreg.dont_exit_short_if_equals = -1 \\r\\n #EXIT - drzime po dobu trvani signalu\\r\\n signal.exit_long_if_not_equals = 1\\r\\n  +signal.exit_short_if_not_equals = -1\\r\\n #okamzity exit, kdyz je pod 0 ??\\r\\n #signal_linreg.exit_long_if_not_equals = 1\\r\\n  +#signal_linreg.exit_short_if_not_equals = -1\\r\\n\\r\\n #LONG ENTRY\\r\\n signal.go_long_if_equals = 1\\r\\n\\r\\n #vwap_angle5.AND.go_long_if_above = 0.1\\r\\n  +#trendB.AND.go_long_if_risingc = 2\\r\\n #signal_linreg.go_long_if_equals = 1\\r\\n\\r\\n #reverse\\r\\n #trend1.AND.reverse_long_if_equals = -1\\r\\n  +#vwap_angle5.AND.reverse_long_if_below = -0.05\\r\\n #trend1.AND.reverse_short_if_equals = 1\\r\\n #vwap_angle5.AND.reverse_short_if_above = 0.05\\r\\n  +#signal_conf_sell.reverse_long_if_equals = 1\\r\\n\\r\\n #CONDITIONAL REVERSE ONLY\\r\\n #slope1.slreverseonly_long_if_below = 0\\r\\n  +#slope1.slreverseonly_short_if_above = 0\\r\\n\\r\\n[stratvars.exit]\\r\\n #maximalni stoploss, fallout pro \\"exit_short_if\\" direktivy\\r\\n SL_defval_short =  +0.10\\r\\n SL_defval_long = 0.02\\r\\n SL_trailing_enabled_short = false\\r\\n SL_trailing_enabled_long = false\\r\\n #minimalni vzdalenost od aktualni SL, aby se  +SL posunula na \\r\\n SL_trailing_offset_short = 0.02\\r\\n SL_trailing_offset_long = 0.02\\r\\n #zda trailing zastavit na brakeeven\\r\\n  +SL_trailing_stop_at_breakeven_short = false\\r\\n SL_trailing_stop_at_breakeven_long = false\\r\\n[stratvars.exit.conditions]\\r\\n #obecne exit conditions platne pro  +vsechny\\r\\n #slope20.exit_long_if_below = 20\\r\\n #slope10.exit_short_if_above = 0.3\\r\\n\\r\\n #TBD - mozna udelat EOD exit jako direktivu\\r\\n",  +"add_data_conf": "[[add_data]]\\r\\n symbol=\\"BAC\\"\\r\\n rectype= \\"cbar\\"\\r\\n resolution=12\\r\\n update_ltp=true\\r\\n align=\\"round\\"\\r\\n  +mintick=0\\r\\n minsize=100\\r\\n exthours=false", "note": "", "history": "\\nREASON:STOP Signal received\\nREASON:STOP Signal received\\nREASON:STOP Signal  +received\\nREASON:STOP Signal received"}', + ilog_save=False, + bt_from=None, + bt_to=None, + weekdays_filter=[], + test_batch_id=None, + batch_id='22', + cash=100000, + skip_cache=False +) +RUN 7d8198bd-bbc7-4dbb-a37f-c1df8edde461 INITIATED +instance vytvorena <v2realbot.strategy.StrategyClassicSL.StrategyClassicSL object at 0x144d11cf0> +adding stream +{ + 'symbol': 'BAC', + 'rectype': <RecordType.CBAR: 'cbar'>, + 'resolution': 12, + 'update_ltp': True, + 'align': , + 'mintick': 0, + 'minsize': 100, + 'exthours': False +} +stav pred pridavanim [] +websocket zatim nebezi, pouze pridavame do pole +Starting strategy SUPERTREND TV +SpuÅ”těna SUPERTREND TV +********** WS Streamer - run WS-LDR-SUPERTREND TV ********** +symbol ve streams BAC +subscribed to BAC +WS-LDR-SUPERTREND TV it is not running, starting by calling RUN +********** WS Streamer STARTED ********** +********** WS Order Update Streamer started for SUPERTREND TV ********** +creating workdict for buy value {'AND': [('trendB', 'change_val_if_equals', 1), ('div_vwap_cum_mul', 'change_val_if_above', 4), ('div_vwap_angle6', 'change_val_if_above', +0.023)], 'OR': []} +creating workdict for sell value {'AND': [('trendB', 'change_val_if_equals', -1), ('div_vwap_cum_mul', 'change_val_if_below', -3), ('div_vwap_angle6', 'change_val_if_below', +-0.009)], 'OR': []} +Strategy 15687653-04fb-46aa-8806-ef4af4e45d96 started successfully. +Current 9 scheduled jobs: [<Job (id=scheduler_testinterval name=print_hello)>, .wrapper)>, .wrapper)>, .wrapper)>, .wrapper)>, .wrapper)>, .wrapper)>, .wrapper)>, .wrapper)>] +Job ID: scheduler_testinterval +Next Run Time: 2024-02-20 22:18:11.020523-05:00 +Job Function: print_hello +Trigger: interval[0:00:10] +Job Args: +Job Kwargs: + +Job ID: scheduler_start_6f54c363-e30c-4ad2-b232-ccc5c11d6745 +Next Run Time: 2024-02-21 09:30:00-05:00 +Job Function: wrapper +Trigger: cron +Job Args: 6f54c363-e30c-4ad2-b232-ccc5c11d6745 +Job Kwargs: + +Job ID: scheduler_start_ea6d79f2-d441-4236-8969-90c77b565e10 +Next Run Time: 2024-02-21 09:30:00-05:00 +Job Function: wrapper +Trigger: cron +Job Args: ea6d79f2-d441-4236-8969-90c77b565e10 +Job Kwargs: + +Job ID: scheduler_stop_6f54c363-e30c-4ad2-b232-ccc5c11d6745 +Next Run Time: 2024-02-21 09:30:00-05:00 +Job Function: wrapper +Trigger: cron +Job Args: 6f54c363-e30c-4ad2-b232-ccc5c11d6745 +Job Kwargs: + +Job ID: scheduler_stop_ea6d79f2-d441-4236-8969-90c77b565e10 +Next Run Time: 2024-02-21 09:30:00-05:00 +Job Function: wrapper +Trigger: cron +Job Args: ea6d79f2-d441-4236-8969-90c77b565e10 +Job Kwargs: + +Job ID: scheduler_start_bc4ec7d2-249b-4799-a02f-f1ce66f83d4a +Next Run Time: 2024-02-21 22:18:00-05:00 +Job Function: wrapper +Trigger: cron +Job Args: bc4ec7d2-249b-4799-a02f-f1ce66f83d4a +Job Kwargs: + +Job ID: scheduler_stop_bc4ec7d2-249b-4799-a02f-f1ce66f83d4a +Next Run Time: 2024-02-21 22:18:00-05:00 +Job Function: wrapper +Trigger: cron +Job Args: bc4ec7d2-249b-4799-a02f-f1ce66f83d4a +Job Kwargs: + +Job ID: scheduler_start_8aeae4b4-f200-478b-93af-6c8afec1fed9 +Next Run Time: 2024-02-23 09:30:00-05:00 +Job Function: wrapper +Trigger: cron +Job Args: 8aeae4b4-f200-478b-93af-6c8afec1fed9 +Job Kwargs: + +Job ID: scheduler_stop_8aeae4b4-f200-478b-93af-6c8afec1fed9 +Next Run Time: 2024-02-23 09:30:00-05:00 +Job Function: wrapper +Trigger: cron +Job Args: 8aeae4b4-f200-478b-93af-6c8afec1fed9 +Job Kwargs: + +APS jobs initialized +Record in db updated bc4ec7d2-249b-4799-a02f-f1ce66f83d4a +Current 0 scheduled jobs: [] +Job ID: scheduler_start_6f54c363-e30c-4ad2-b232-ccc5c11d6745 +Next Run Time: 2024-02-21 09:30:00-05:00 +Job Function: wrapper +Trigger: cron[day_of_week='*', hour='9', minute='30'] +Job Args: 6f54c363-e30c-4ad2-b232-ccc5c11d6745 +Job Kwargs: + +Job ID: scheduler_start_ea6d79f2-d441-4236-8969-90c77b565e10 +Next Run Time: 2024-02-21 09:30:00-05:00 +Job Function: wrapper +Trigger: cron[day_of_week='*', hour='9', minute='30'] +Job Args: ea6d79f2-d441-4236-8969-90c77b565e10 +Job Kwargs: + +Job ID: scheduler_stop_6f54c363-e30c-4ad2-b232-ccc5c11d6745 +Next Run Time: 2024-02-21 09:30:00-05:00 +Job Function: wrapper +Trigger: cron[day_of_week='*', hour='9', minute='30'] +Job Args: 6f54c363-e30c-4ad2-b232-ccc5c11d6745 +Job Kwargs: + +Job ID: scheduler_stop_ea6d79f2-d441-4236-8969-90c77b565e10 +Next Run Time: 2024-02-21 09:30:00-05:00 +Job Function: wrapper +Trigger: cron[day_of_week='*', hour='9', minute='30'] +Job Args: ea6d79f2-d441-4236-8969-90c77b565e10 +Job Kwargs: + +Job ID: scheduler_start_bc4ec7d2-249b-4799-a02f-f1ce66f83d4a +Next Run Time: 2024-02-21 22:18:00-05:00 +Job Function: wrapper +Trigger: cron[day_of_week='*', hour='22', minute='18'] +Job Args: bc4ec7d2-249b-4799-a02f-f1ce66f83d4a +Job Kwargs: + +Job ID: scheduler_stop_bc4ec7d2-249b-4799-a02f-f1ce66f83d4a +Next Run Time: 2024-02-21 22:18:00-05:00 +Job Function: wrapper +Trigger: cron[day_of_week='*', hour='22', minute='18'] +Job Args: bc4ec7d2-249b-4799-a02f-f1ce66f83d4a +Job Kwargs: + +Job ID: scheduler_start_8aeae4b4-f200-478b-93af-6c8afec1fed9 +Next Run Time: 2024-02-23 09:30:00-05:00 +Job Function: wrapper +Trigger: cron[day_of_week='0,4', hour='9', minute='30'] +Job Args: 8aeae4b4-f200-478b-93af-6c8afec1fed9 +Job Kwargs: + +Job ID: scheduler_stop_8aeae4b4-f200-478b-93af-6c8afec1fed9 +Next Run Time: 2024-02-23 09:30:00-05:00 +Job Function: wrapper +Trigger: cron[day_of_week='0,4', hour='9', minute='30'] +Job Args: 8aeae4b4-f200-478b-93af-6c8afec1fed9 +Job Kwargs: + +Current 8 scheduled jobs: [.wrapper)>, .wrapper)>, .wrapper)>, .wrapper)>, .wrapper)>, .wrapper)>, .wrapper)>, .wrapper)>] +Job ID: scheduler_start_6f54c363-e30c-4ad2-b232-ccc5c11d6745 +Next Run Time: 2024-02-21 09:30:00-05:00 +Job Function: wrapper +Trigger: cron[day_of_week='*', hour='9', minute='30'] +Job Args: 6f54c363-e30c-4ad2-b232-ccc5c11d6745 +Job Kwargs: + +Job ID: scheduler_start_ea6d79f2-d441-4236-8969-90c77b565e10 +Next Run Time: 2024-02-21 09:30:00-05:00 +Job Function: wrapper +Trigger: cron[day_of_week='*', hour='9', minute='30'] +Job Args: ea6d79f2-d441-4236-8969-90c77b565e10 +Job Kwargs: + +Job ID: scheduler_stop_6f54c363-e30c-4ad2-b232-ccc5c11d6745 +Next Run Time: 2024-02-21 09:30:00-05:00 +Job Function: wrapper +Trigger: cron[day_of_week='*', hour='9', minute='30'] +Job Args: 6f54c363-e30c-4ad2-b232-ccc5c11d6745 +Job Kwargs: + +Job ID: scheduler_stop_ea6d79f2-d441-4236-8969-90c77b565e10 +Next Run Time: 2024-02-21 09:30:00-05:00 +Job Function: wrapper +Trigger: cron[day_of_week='*', hour='9', minute='30'] +Job Args: ea6d79f2-d441-4236-8969-90c77b565e10 +Job Kwargs: + +Job ID: scheduler_start_bc4ec7d2-249b-4799-a02f-f1ce66f83d4a +Next Run Time: 2024-02-21 22:18:00-05:00 +Job Function: wrapper +Trigger: cron[day_of_week='*', hour='22', minute='18'] +Job Args: bc4ec7d2-249b-4799-a02f-f1ce66f83d4a +Job Kwargs: + +Job ID: scheduler_stop_bc4ec7d2-249b-4799-a02f-f1ce66f83d4a +Next Run Time: 2024-02-21 22:18:00-05:00 +Job Function: wrapper +Trigger: cron[day_of_week='*', hour='22', minute='18'] +Job Args: bc4ec7d2-249b-4799-a02f-f1ce66f83d4a +Job Kwargs: + +Job ID: scheduler_start_8aeae4b4-f200-478b-93af-6c8afec1fed9 +Next Run Time: 2024-02-23 09:30:00-05:00 +Job Function: wrapper +Trigger: cron[day_of_week='0,4', hour='9', minute='30'] +Job Args: 8aeae4b4-f200-478b-93af-6c8afec1fed9 +Job Kwargs: + +Job ID: scheduler_stop_8aeae4b4-f200-478b-93af-6c8afec1fed9 +Next Run Time: 2024-02-23 09:30:00-05:00 +Job Function: wrapper +Trigger: cron[day_of_week='0,4', hour='9', minute='30'] +Job Args: 8aeae4b4-f200-478b-93af-6c8afec1fed9 +Job Kwargs: + +Current 0 scheduled jobs: [] +Job ID: scheduler_start_6f54c363-e30c-4ad2-b232-ccc5c11d6745 +Next Run Time: 2024-02-21 09:30:00-05:00 +Job Function: wrapper +Trigger: cron[day_of_week='*', hour='9', minute='30'] +Job Args: 6f54c363-e30c-4ad2-b232-ccc5c11d6745 +Job Kwargs: + +Job ID: scheduler_start_ea6d79f2-d441-4236-8969-90c77b565e10 +Next Run Time: 2024-02-21 09:30:00-05:00 +Job Function: wrapper +Trigger: cron[day_of_week='*', hour='9', minute='30'] +Job Args: ea6d79f2-d441-4236-8969-90c77b565e10 +Job Kwargs: + +Job ID: scheduler_stop_6f54c363-e30c-4ad2-b232-ccc5c11d6745 +Next Run Time: 2024-02-21 16:00:00-05:00 +Job Function: wrapper +Trigger: cron[day_of_week='*', hour='16', minute='0'] +Job Args: 6f54c363-e30c-4ad2-b232-ccc5c11d6745 +Job Kwargs: + +Job ID: scheduler_stop_ea6d79f2-d441-4236-8969-90c77b565e10 +Next Run Time: 2024-02-21 16:00:00-05:00 +Job Function: wrapper +Trigger: cron[day_of_week='*', hour='16', minute='0'] +Job Args: ea6d79f2-d441-4236-8969-90c77b565e10 +Job Kwargs: + +Job ID: scheduler_start_bc4ec7d2-249b-4799-a02f-f1ce66f83d4a +Next Run Time: 2024-02-21 22:18:00-05:00 +Job Function: wrapper +Trigger: cron[day_of_week='*', hour='22', minute='18'] +Job Args: bc4ec7d2-249b-4799-a02f-f1ce66f83d4a +Job Kwargs: + +Job ID: scheduler_stop_bc4ec7d2-249b-4799-a02f-f1ce66f83d4a +Next Run Time: 2024-02-21 22:29:00-05:00 +Job Function: wrapper +Trigger: cron[day_of_week='*', hour='22', minute='29'] +Job Args: bc4ec7d2-249b-4799-a02f-f1ce66f83d4a +Job Kwargs: + +Job ID: scheduler_start_8aeae4b4-f200-478b-93af-6c8afec1fed9 +Next Run Time: 2024-02-23 09:30:00-05:00 +Job Function: wrapper +Trigger: cron[day_of_week='0,4', hour='9', minute='30'] +Job Args: 8aeae4b4-f200-478b-93af-6c8afec1fed9 +Job Kwargs: + +Job ID: scheduler_stop_8aeae4b4-f200-478b-93af-6c8afec1fed9 +Next Run Time: 2024-02-23 16:00:00-05:00 +Job Function: wrapper +Trigger: cron[day_of_week='0,4', hour='16', minute='0'] +Job Args: 8aeae4b4-f200-478b-93af-6c8afec1fed9 +Job Kwargs: + +Current 8 scheduled jobs: [.wrapper)>, .wrapper)>, .wrapper)>, .wrapper)>, .wrapper)>, .wrapper)>, .wrapper)>, .wrapper)>] +Job ID: scheduler_start_bc4ec7d2-249b-4799-a02f-f1ce66f83d4a +Next Run Time: 2024-02-20 22:32:00-05:00 +Job Function: wrapper +Trigger: cron[day_of_week='*', hour='22', minute='32'] +Job Args: bc4ec7d2-249b-4799-a02f-f1ce66f83d4a +Job Kwargs: + +Job ID: scheduler_stop_bc4ec7d2-249b-4799-a02f-f1ce66f83d4a +Next Run Time: 2024-02-20 22:33:00-05:00 +Job Function: wrapper +Trigger: cron[day_of_week='*', hour='22', minute='33'] +Job Args: bc4ec7d2-249b-4799-a02f-f1ce66f83d4a +Job Kwargs: + +Job ID: scheduler_start_6f54c363-e30c-4ad2-b232-ccc5c11d6745 +Next Run Time: 2024-02-21 09:30:00-05:00 +Job Function: wrapper +Trigger: cron[day_of_week='*', hour='9', minute='30'] +Job Args: 6f54c363-e30c-4ad2-b232-ccc5c11d6745 +Job Kwargs: + +Job ID: scheduler_start_ea6d79f2-d441-4236-8969-90c77b565e10 +Next Run Time: 2024-02-21 09:30:00-05:00 +Job Function: wrapper +Trigger: cron[day_of_week='*', hour='9', minute='30'] +Job Args: ea6d79f2-d441-4236-8969-90c77b565e10 +Job Kwargs: + +Job ID: scheduler_stop_6f54c363-e30c-4ad2-b232-ccc5c11d6745 +Next Run Time: 2024-02-21 16:00:00-05:00 +Job Function: wrapper +Trigger: cron[day_of_week='*', hour='16', minute='0'] +Job Args: 6f54c363-e30c-4ad2-b232-ccc5c11d6745 +Job Kwargs: + +Job ID: scheduler_stop_ea6d79f2-d441-4236-8969-90c77b565e10 +Next Run Time: 2024-02-21 16:00:00-05:00 +Job Function: wrapper +Trigger: cron[day_of_week='*', hour='16', minute='0'] +Job Args: ea6d79f2-d441-4236-8969-90c77b565e10 +Job Kwargs: + +Job ID: scheduler_start_8aeae4b4-f200-478b-93af-6c8afec1fed9 +Next Run Time: 2024-02-23 09:30:00-05:00 +Job Function: wrapper +Trigger: cron[day_of_week='0,4', hour='9', minute='30'] +Job Args: 8aeae4b4-f200-478b-93af-6c8afec1fed9 +Job Kwargs: + +Job ID: scheduler_stop_8aeae4b4-f200-478b-93af-6c8afec1fed9 +Next Run Time: 2024-02-23 16:00:00-05:00 +Job Function: wrapper +Trigger: cron[day_of_week='0,4', hour='16', minute='0'] +Job Args: 8aeae4b4-f200-478b-93af-6c8afec1fed9 +Job Kwargs: + +Startj g record bc4ec7d2-249b-4799-a02f-f1ce66f83d4a +Calendar data fetch successful 2024-02-20 2024-02-20 +OPEN:2024-02-20 09:30:00-05:00 CLOSE:2024-02-20 16:00:00-05:00 +StrategyInstance fetched: 15687653-04fb-46aa-8806-ef4af4e45d96 +RunRequest( + id=UUID('15687653-04fb-46aa-8806-ef4af4e45d96'), + account=<Account.ACCOUNT1: 'ACCOUNT1'>, + mode=, + note='SCHED 22:32-22:33', + debug=False, + strat_json='{"id": "15687653-04fb-46aa-8806-ef4af4e45d96", "id2": -2013, "name": "SUPERTREND TV", "symbol": "BAC", "class_name": "StrategyClassicSL", "script": "ENTRY_ClassicSL_v01", "open_rush": 0,  +"close_rush": 0, "stratvars_conf": "[stratvars]\\r\\n#obecne platici pro vsechny signaly \\r\\n #abs profit/loss to suspend strategy\\r\\n #max_sum_profit_to_quit = 300\\r\\n #max_sum_loss_to_quit =  +-300\\r\\n\\r\\n #procenta profit/loss to suspend startegy\\r\\n #max_sum_profit_to_quit_rel = 0.3\\r\\n #max_sum_loss_to_quit_rel = -0.3\\r\\n normalize_ticks = true\\r\\n normalized_base_price =  +30\\r\\n long_enabled = true\\r\\n short_enabled = true\\r\\n chunk = 2000\\r\\n maxpozic = 4000\\r\\n #nastavuje pevny profit\\r\\n profit = 0.25\\r\\n max_profit = 0.30\\r\\n  +consolidation_bar_count = 5\\r\\n #po minutach\\r\\n window_open = 0\\r\\n window_close = 360\\r\\n signal_only_on_confirmed = true\\r\\n forced_exit_window_start = 361\\r\\n  +forced_exit_window_end = 389\\r\\n #pouze posledni minutu prod\\u00e1me, ne\\u010dek\\u00e1me na breakeven\\r\\n #forced_exit_breakeven_period = false\\r\\n\\r\\n[stratvars.indicators.atr10]\\r\\n  +#[stratvars.indicators.atr10]\\r\\n type = \'ATR\'\\r\\n length = 10\\r\\n on_confirmed_only = true\\r\\n[stratvars.indicators.MESA]\\r\\n type = \'custom\'\\r\\n subtype = \'talib_ind\'\\r\\n  +returns = [\\"mama\\", \\"fama\\"]\\r\\n on_confirmed_only = true\\r\\n cp.defval = \'close\'\\r\\n #cp.lookback = 100\\r\\n cp.type = \'MAMA\'\\r\\n cp.params.series = [\'close\']\\r\\n  +#cp.params.keys.fastlimit = 0\\r\\n #cp.params.keys.slowlimit = 0\\r\\n scale = \\"right\\"\\r\\n[stratvars.indicators.supertrend]\\r\\n output = \'bar\'\\r\\n type = \'custom\'\\r\\n returns =  +[\'basic_up\',\'basic_dn\',\'final_up\', \'final_down\', \'trend\']\\r\\n scale = [\'right\', \'right\', \'right\', \'right\', \'signal\']\\r\\n subtype = \'classed\'\\r\\n on_confirmed_only = true\\r\\n  +cp.class_name = \'SuperTrend\'\\r\\n cp.next = [\'high\', \'low\', \'close\']\\r\\n [stratvars.indicators.supertrend.cp.init] #params is send to init, opt\\r\\n atr_period=14\\r\\n  +multiplier=1.2\\r\\n[stratvars.indicators.supertrend1]\\r\\n output = \\"bar\\"\\r\\n type = \\"custom\\"\\r\\n returns = [\'dn\',\'up\',\'trend1\']\\r\\n scale = [\'right\',\'right\',\'signal\']\\r\\n +subtype = \\"classed\\"\\r\\n on_confirmed_only = true\\r\\n cp.class_name = \\"SuperTrend1\\"\\r\\n cp.next = [\'high\',\'low\',\'close\'] #optional, source posilany explicitne do next ve  +stejnojmenn\\u00e9m parametru, note that next has access to state\\r\\n #cp.next_mapping = [\\"data\\"] \\r\\n [stratvars.indicators.supertrend1.cp.init] #params is send to init, opt\\r\\n period=14\\r\\n  +multiplier=1.2\\r\\n[stratvars.indicators.supertrendTV]\\r\\noutput = \'bar\'\\r\\ntype = \'custom\'\\r\\nreturns = [\'dnB\', \'upB\', \'trendB\']\\r\\nscale = [\'right\', \'right\', \'signal\']\\r\\nsubtype =  +\'classed\'\\r\\non_confirmed_only = true\\r\\ncp.class_name = \'SuperTrendTV\'\\r\\ncp.next = [\'high\', \'low\', \'close\']\\r\\n[stratvars.indicators.supertrendBard.cp.init] #params is send to init,  +opt\\r\\natr_period = 10\\r\\natr_multiplier = 3\\r\\n[stratvars.indicators.mama_angle3]\\r\\n type = \'custom\'\\r\\n subtype = \'talib_ind\'\\r\\n on_confirmed_only = true\\r\\n cp.lookback =  +10\\r\\n cp.type = \'LINEARREG_ANGLE\'\\r\\n cp.params.series = [\'mama\']\\r\\n cp.params.keys.timeperiod = 3\\r\\n scale = \'linreg\'\\r\\n[stratvars.indicators.ht_trendline]\\r\\n type =  +\'custom\'\\r\\n subtype = \'talib_ind\'\\r\\n on_confirmed_only = true\\r\\n cp.defval = \\"close\\" #pokud nejsou hodnoty, vracime close\\r\\n #cp.lookback = 100 #celkovy lookback pro vsechny  +serie\\r\\n cp.type = \'HT_TRENDLINE\'\\r\\n cp.params.series = [\\"close\\"] #vstupni serie pro pozicni parametry pro ta-lib\\r\\n #cp.params.keys.timeperiod = 14 #keyword argumenty pro ta-lib\\r\\n  +scale = \\"right\\"\\r\\n[stratvars.indicators.ht_dcperiod]\\r\\n type = \'custom\'\\r\\n subtype = \'talib_ind\'\\r\\n on_confirmed_only = true\\r\\n #cp.source = \'close\'\\r\\n cp.defval =  +\\"close\\" #pokud nejsou hodnoty, vracime close\\r\\n cp.lookback = 100 #celkovy lookback pro vsechny serie\\r\\n cp.type = \'HT_DCPERIOD\'\\r\\n cp.params.series = [\\"close\\"] #vstupni serie pro  +pozicni parametry pro ta-lib\\r\\n #cp.params.keys.timeperiod = 14 #keyword argumenty pro ta-lib\\r\\n scale = \\"period\\"\\r\\n[stratvars.indicators.bbands]\\r\\n type = \'custom\'\\r\\n subtype =  +\'talib_ind\'\\r\\n returns = [\\"upperband\\", \\"middleband\\", \\"lowerband\\"]\\r\\n on_confirmed_only = true\\r\\n cp.defval = \'close\'\\r\\n cp.lookback = 100\\r\\n cp.type = \'BBANDS\'\\r\\n +cp.params.series = [\'close\']\\r\\n cp.params.keys.timeperiod = 5\\r\\n cp.params.keys.nbdevup=2\\r\\n cp.params.keys.nbdevdn=2\\r\\n cp.params.keys.matype=0\\r\\n scale =  +\'right\'\\r\\n[stratvars.indicators.vwap_cum]\\r\\n type = \'custom\'\\r\\n subtype = \'expression\'\\r\\n on_confirmed_only = true\\r\\n cp.convertToNumpy = [\'hlcc4\',\'volume\']\\r\\n  +cp.expression = \'np.sum((hlcc4 * volume) / np.sum(volume))\'\\r\\n scale = \'right\'\\r\\n[stratvars.indicators.vwap_angle5]\\r\\n type = \'custom\'\\r\\n subtype = \'talib_ind\'\\r\\n  +on_confirmed_only = true\\r\\n cp.lookback = 5\\r\\n cp.type = \'LINEARREG_ANGLE\'\\r\\n cp.params.series = [\'vwap_cum\']\\r\\n cp.params.keys.timeperiod = 5\\r\\n scale =  +\'linreg\'\\r\\n[stratvars.indicators.div_vwap_cum]\\r\\n type = \'custom\'\\r\\n subtype = \'divergence\'\\r\\n on_confirmed_only = true\\r\\n cp.source1 = \'close\'\\r\\n cp.source2 =  +\'vwap_cum\'\\r\\n cp.type = \'reln\'\\r\\n scale = \'left\'\\r\\n[stratvars.indicators.div_vwap_cum_mul] #multiplikator predchoziho indikatoru kvuli lepsimu zobrazeni a zaokrouhleni\\r\\n type =  +\'custom\'\\r\\n subtype = \'expression\'\\r\\n cp.expression = \'div_vwap_cum[-1]*1000\'\\r\\n scale = \'own\'\\r\\n[stratvars.indicators.profit]\\r\\n type = \'custom\'\\r\\n subtype =  +\'expression\'\\r\\n on_confirmed_only = true\\r\\n cp.expression = \'state.profit\'\\r\\n[stratvars.indicators.rsi14]\\r\\n output = \\"bar\\"\\r\\n type = \'custom\'\\r\\n subtype = \'rsi\'\\r\\n  +MA_length = 10 #vytvori dalsi stejnojmenny indikator s postfixem _MA\\r\\n on_confirmed_only = true\\r\\n cp.source = \'vwap\'\\r\\n cp.length = 15\\r\\n cp.start = \\"linear\\"\\r\\n scale =  +\'left\'\\r\\n[stratvars.indicators.weekday]\\r\\n output = \\"bar\\"\\r\\n type = \\"custom\\"\\r\\n subtype = \\"classed\\"\\r\\n on_confirmed_only = true\\r\\n cp.class_name =  +\\"WeekDay\\"\\r\\n[stratvars.indicators.volumebar]\\r\\n type = \'custom\'\\r\\n subtype = \'barparams\'\\r\\n on_confirmed_only = true\\r\\n cp.source =  +\'volume\'\\r\\n[stratvars.indicators.indexbar]\\r\\n type = \'custom\'\\r\\n subtype = \'barparams\'\\r\\n on_confirmed_only = true\\r\\n cp.source =  +\'index\'\\r\\n[stratvars.indicators.volumebarema]\\r\\n type = \'custom\'\\r\\n subtype = \'ma\'\\r\\n on_confirmed_only = true\\r\\n cp.source = \'volumebar\'\\r\\n cp.lookback = 30\\r\\n  +cp.type = \'ema\'\\r\\n[stratvars.indicators.voldiv]\\r\\n type = \'custom\'\\r\\n subtype = \'divergence\'\\r\\n on_confirmed_only = true\\r\\n cp.source1 = \'volumebar\'\\r\\n cp.source2 =  +\'volumebarema\'\\r\\n cp.type = \'reln\'\\r\\n scale = \\"left\\"\\r\\n[stratvars.indicators.div_vwap_angle6]\\r\\n type = \'custom\'\\r\\n subtype = \'talib_ind\'\\r\\n on_confirmed_only =  +true\\r\\n cp.lookback = 10\\r\\n cp.type = \'LINEARREG_ANGLE\'\\r\\n cp.params.series = [\'div_vwap_cum\']\\r\\n cp.params.keys.timeperiod = 10\\r\\n scale =  +\'linreg\'\\r\\n[stratvars.indicators.signal]\\r\\n output = \\"bar\\"\\r\\n type = \'custom\'\\r\\n subtype = \'conditional\'\\r\\n on_confirmed_only = true\\r\\n scale =  +\'left\'\\r\\n[stratvars.indicators.signal.cp.conditions.buy]\\r\\n #supertrend up\\r\\n trendB.AND.change_val_if_equals = 1\\r\\n #cena je nad vwapem urcitou divergence\\r\\n  +div_vwap_cum_mul.AND.change_val_if_above = 4\\r\\n #strmy uhel divergence\\r\\n div_vwap_angle6.AND.change_val_if_above = 0.023\\r\\n true_val =  +1\\r\\n[stratvars.indicators.signal.cp.conditions.sell]\\r\\n trendB.AND.change_val_if_equals = -1\\r\\n #cena je pod vwapem o danou divergen ce\\r\\n div_vwap_cum_mul.AND.change_val_if_below =  +-3\\r\\n\\r\\n #uhel divergence je strm\\u00fd\\r\\n div_vwap_angle6.AND.change_val_if_below = -0.009\\r\\n true_val = -1\\r\\n[stratvars.indicators.sl_long]\\r\\n  +#[stratvars.indicators.freq_period_multiplier]\\r\\n type = \'custom\'\\r\\n subtype = \'expression\'\\r\\n on_confirmed_only = true\\r\\n cp.expression = \'close[-1]-(atr10[-1]*2)\'\\r\\n scale=  +\\"right\\"\\r\\n[stratvars.indicators.sl_short]\\r\\n #[stratvars.indicators.freq_period_multiplier]\\r\\n type = \'custom\'\\r\\n subtype = \'expression\'\\r\\n on_confirmed_only = true\\r\\n  +cp.expression = \'close[-1]+(atr10[-1]*2)\'\\r\\n scale= \\"right\\"\\r\\n[stratvars.indicators.long_goal]\\r\\n type = \'custom\'\\r\\n subtype = \'expression\'\\r\\n on_confirmed_only = true\\r\\n  +cp.expression = \'close[-1]+(atr10[-1]*3)\'\\r\\n scale = \'right\'\\r\\n[stratvars.indicators.short_goal]\\r\\n type = \'custom\'\\r\\n subtype = \'expression\'\\r\\n on_confirmed_only = true\\r\\n  +cp.expression = \'close[-1]-(atr10[-1]*3)\'\\r\\n scale = \'right\'\\r\\n[stratvars.signals.main]\\r\\n signal_only_on_confirmed = true\\r\\n next_signal_offset_from_last_exit = 0\\r\\n #6.5h, 360=6h,  +\\r\\n window_open = 2\\r\\n window_close = 360\\r\\n min_bar_index = 0\\r\\n #activated = true\\r\\n profit = 0.50\\r\\n #profit_short = 0.07\\r\\n #profit_short = \\"short_goal\\"\\r\\n  +#profit_long = 0.07\\r\\n #profit_long = \\"long_goal\\"\\r\\n #ochrana proti extremnim hodntoam z indikatoru - fallback\\r\\n profit_min_ind_tick_value = 0.03\\r\\n max_profit = 0.50\\r\\n  +#SL_defval_short = \\"sl_short\\"\\r\\n #SL_defval_short = \\"high1bars\\"\\r\\n #SL_defval_long = \\"sl_long\\"\\r\\n SL_defval_short = 0.30\\r\\n SL_defval_long = 0.30\\r\\n  +SL_trailing_enabled_short = false\\r\\n SL_trailing_enabled_long = false\\r\\n SL_trailing_offset_short = 0.04\\r\\n SL_trailing_offset_long = 0.04\\r\\n SL_trailing_step_short = 0.01\\r\\n  +SL_trailing_step_long = 0.01\\r\\n SL_trailing_stop_at_breakeven_short = false\\r\\n SL_trailing_stop_at_breakeven_long = false\\r\\n reverse_for_SL_exit_short = \\"no\\"\\r\\n reverse_for_SL_exit_long += \\"cond\\"\\r\\n #[0.236, 0.382, 0.618]\\r\\n #SL_opt_exit_levels_short = [0.236, 0.382, 0.618]\\r\\n #SL_opt_exit_levels_long = [0.236, 0.382, 0.618]\\r\\n ##SL_opt_exit_sizes_short = [0.2]\\r\\n  +#SL_opt_exit_sizes_long = [0.2]\\r\\n[stratvars.signals.main.conditions]\\r\\n #preconditions \\r\\n\\r\\n #SHORT ENTRY\\r\\n signal.AND.go_short_if_equals = -1\\r\\n\\r\\n #pouze pri prepnuti\\r\\n  +#vwap_angle5.AND.go_short_if_below = -0.1\\r\\n \\r\\n #trendB.AND.go_short_if_fallingc = 2\\r\\n #signal_linreg.go_short_if_equals = -1\\r\\n #signal.go_short_if_equals = -1\\r\\n  +#signal_conf_short.go_short_if_equals = 1\\r\\n #EXIT PROTECTION\\r\\n #drzime dokud stoupame\\r\\n #slopetick5.dont_exit_long_if_above = 0\\r\\n #mama_angle3.dont_exit_long_if_above = 0\\r\\n  +#mama_angle3.dont_exit_short_if_below = 0 \\r\\n #signal_linreg.dont_exit_short_if_equals = -1 \\r\\n #EXIT - drzime po dobu trvani signalu\\r\\n signal.exit_long_if_not_equals = 1\\r\\n  +signal.exit_short_if_not_equals = -1\\r\\n #okamzity exit, kdyz je pod 0 ??\\r\\n #signal_linreg.exit_long_if_not_equals = 1\\r\\n #signal_linreg.exit_short_if_not_equals = -1\\r\\n\\r\\n #LONG  +ENTRY\\r\\n signal.go_long_if_equals = 1\\r\\n\\r\\n #vwap_angle5.AND.go_long_if_above = 0.1\\r\\n #trendB.AND.go_long_if_risingc = 2\\r\\n #signal_linreg.go_long_if_equals = 1\\r\\n\\r\\n  +#reverse\\r\\n #trend1.AND.reverse_long_if_equals = -1\\r\\n #vwap_angle5.AND.reverse_long_if_below = -0.05\\r\\n #trend1.AND.reverse_short_if_equals = 1\\r\\n #vwap_angle5.AND.reverse_short_if_above = +0.05\\r\\n #signal_conf_sell.reverse_long_if_equals = 1\\r\\n\\r\\n #CONDITIONAL REVERSE ONLY\\r\\n #slope1.slreverseonly_long_if_below = 0\\r\\n #slope1.slreverseonly_short_if_above =  +0\\r\\n\\r\\n[stratvars.exit]\\r\\n #maximalni stoploss, fallout pro \\"exit_short_if\\" direktivy\\r\\n SL_defval_short = 0.10\\r\\n SL_defval_long = 0.02\\r\\n SL_trailing_enabled_short = false\\r\\n +SL_trailing_enabled_long = false\\r\\n #minimalni vzdalenost od aktualni SL, aby se SL posunula na \\r\\n SL_trailing_offset_short = 0.02\\r\\n SL_trailing_offset_long = 0.02\\r\\n #zda trailing  +zastavit na brakeeven\\r\\n SL_trailing_stop_at_breakeven_short = false\\r\\n SL_trailing_stop_at_breakeven_long = false\\r\\n[stratvars.exit.conditions]\\r\\n #obecne exit conditions platne pro  +vsechny\\r\\n #slope20.exit_long_if_below = 20\\r\\n #slope10.exit_short_if_above = 0.3\\r\\n\\r\\n #TBD - mozna udelat EOD exit jako direktivu\\r\\n", "add_data_conf": "[[add_data]]\\r\\n  +symbol=\\"BAC\\"\\r\\n rectype= \\"cbar\\"\\r\\n resolution=12\\r\\n update_ltp=true\\r\\n align=\\"round\\"\\r\\n mintick=0\\r\\n minsize=100\\r\\n exthours=false", "note": "", "history":  +"\\nREASON:STOP Signal received\\nREASON:STOP Signal received\\nREASON:STOP Signal received\\nREASON:STOP Signal received"}', + ilog_save=False, + bt_from=None, + bt_to=None, + weekdays_filter=[], + test_batch_id=None, + batch_id='22', + cash=100000, + skip_cache=False +) +RUN 208c9c2b-d25b-4dd9-8654-c19bdd2d4a30 INITIATED +instance vytvorena <v2realbot.strategy.StrategyClassicSL.StrategyClassicSL object at 0x14ac7b310> +adding stream +{'symbol': 'BAC', 'rectype': <RecordType.CBAR: 'cbar'>, 'resolution': 12, 'update_ltp': True, 'align': , 'mintick': 0, 'minsize': 100, 'exthours': False} +stav pred pridavanim [] +websocket zatim nebezi, pouze pridavame do pole +Starting strategy SUPERTREND TV +SpuÅ”těna SUPERTREND TV +********** WS Streamer - run WS-LDR-SUPERTREND TV ********** +symbol ve streams BAC +subscribed to BAC +WS-LDR-SUPERTREND TV it is not running, starting by calling RUN +********** WS Streamer STARTED ********** +********** WS Order Update Streamer started for SUPERTREND TV ********** +creating workdict for buy value {'AND': [('trendB', 'change_val_if_equals', 1), ('div_vwap_cum_mul', 'change_val_if_above', 4), ('div_vwap_angle6', 'change_val_if_above', 0.023)], 'OR': []} +creating workdict for sell value {'AND': [('trendB', 'change_val_if_equals', -1), ('div_vwap_cum_mul', 'change_val_if_below', -3), ('div_vwap_angle6', 'change_val_if_below', -0.009)], 'OR': []} +Strategy 15687653-04fb-46aa-8806-ef4af4e45d96 started successfully. +Current 8 scheduled jobs: [.wrapper)>, .wrapper)>, .wrapper)>, .wrapper)>, .wrapper)>, .wrapper)>, .wrapper)>, .wrapper)>] +Job ID: scheduler_stop_bc4ec7d2-249b-4799-a02f-f1ce66f83d4a +Next Run Time: 2024-02-20 22:33:00-05:00 +Job Function: wrapper +Trigger: cron[day_of_week='*', hour='22', minute='33'] +Job Args: bc4ec7d2-249b-4799-a02f-f1ce66f83d4a +Job Kwargs: + +Job ID: scheduler_start_6f54c363-e30c-4ad2-b232-ccc5c11d6745 +Next Run Time: 2024-02-21 09:30:00-05:00 +Job Function: wrapper +Trigger: cron[day_of_week='*', hour='9', minute='30'] +Job Args: 6f54c363-e30c-4ad2-b232-ccc5c11d6745 +Job Kwargs: + +Job ID: scheduler_start_ea6d79f2-d441-4236-8969-90c77b565e10 +Next Run Time: 2024-02-21 09:30:00-05:00 +Job Function: wrapper +Trigger: cron[day_of_week='*', hour='9', minute='30'] +Job Args: ea6d79f2-d441-4236-8969-90c77b565e10 +Job Kwargs: + +Job ID: scheduler_stop_6f54c363-e30c-4ad2-b232-ccc5c11d6745 +Next Run Time: 2024-02-21 16:00:00-05:00 +Job Function: wrapper +Trigger: cron[day_of_week='*', hour='16', minute='0'] +Job Args: 6f54c363-e30c-4ad2-b232-ccc5c11d6745 +Job Kwargs: + +Job ID: scheduler_stop_ea6d79f2-d441-4236-8969-90c77b565e10 +Next Run Time: 2024-02-21 16:00:00-05:00 +Job Function: wrapper +Trigger: cron[day_of_week='*', hour='16', minute='0'] +Job Args: ea6d79f2-d441-4236-8969-90c77b565e10 +Job Kwargs: + +Job ID: scheduler_start_bc4ec7d2-249b-4799-a02f-f1ce66f83d4a +Next Run Time: 2024-02-21 22:32:00-05:00 +Job Function: wrapper +Trigger: cron[day_of_week='*', hour='22', minute='32'] +Job Args: bc4ec7d2-249b-4799-a02f-f1ce66f83d4a +Job Kwargs: + +Job ID: scheduler_start_8aeae4b4-f200-478b-93af-6c8afec1fed9 +Next Run Time: 2024-02-23 09:30:00-05:00 +Job Function: wrapper +Trigger: cron[day_of_week='0,4', hour='9', minute='30'] +Job Args: 8aeae4b4-f200-478b-93af-6c8afec1fed9 +Job Kwargs: + +Job ID: scheduler_stop_8aeae4b4-f200-478b-93af-6c8afec1fed9 +Next Run Time: 2024-02-23 16:00:00-05:00 +Job Function: wrapper +Trigger: cron[day_of_week='0,4', hour='16', minute='0'] +Job Args: 8aeae4b4-f200-478b-93af-6c8afec1fed9 +Job Kwargs: + +APS jobs initialized +Record in db updated bc4ec7d2-249b-4799-a02f-f1ce66f83d4a +Current 8 scheduled jobs: [.wrapper)>, .wrapper)>, .wrapper)>, .wrapper)>, .wrapper)>, .wrapper)>, .wrapper)>, .wrapper)>] +Job ID: scheduler_start_6f54c363-e30c-4ad2-b232-ccc5c11d6745 +Next Run Time: 2024-02-21 09:30:00-05:00 +Job Function: wrapper +Trigger: cron[day_of_week='*', hour='9', minute='30'] +Job Args: 6f54c363-e30c-4ad2-b232-ccc5c11d6745 +Job Kwargs: + +Job ID: scheduler_start_ea6d79f2-d441-4236-8969-90c77b565e10 +Next Run Time: 2024-02-21 09:30:00-05:00 +Job Function: wrapper +Trigger: cron[day_of_week='*', hour='9', minute='30'] +Job Args: ea6d79f2-d441-4236-8969-90c77b565e10 +Job Kwargs: + +Job ID: scheduler_stop_6f54c363-e30c-4ad2-b232-ccc5c11d6745 +Next Run Time: 2024-02-21 16:00:00-05:00 +Job Function: wrapper +Trigger: cron[day_of_week='*', hour='16', minute='0'] +Job Args: 6f54c363-e30c-4ad2-b232-ccc5c11d6745 +Job Kwargs: + +Job ID: scheduler_stop_ea6d79f2-d441-4236-8969-90c77b565e10 +Next Run Time: 2024-02-21 16:00:00-05:00 +Job Function: wrapper +Trigger: cron[day_of_week='*', hour='16', minute='0'] +Job Args: ea6d79f2-d441-4236-8969-90c77b565e10 +Job Kwargs: + +Job ID: scheduler_start_bc4ec7d2-249b-4799-a02f-f1ce66f83d4a +Next Run Time: 2024-02-21 22:32:00-05:00 +Job Function: wrapper +Trigger: cron[day_of_week='*', hour='22', minute='32'] +Job Args: bc4ec7d2-249b-4799-a02f-f1ce66f83d4a +Job Kwargs: + +Job ID: scheduler_stop_bc4ec7d2-249b-4799-a02f-f1ce66f83d4a +Next Run Time: 2024-02-21 22:33:00-05:00 +Job Function: wrapper +Trigger: cron[day_of_week='*', hour='22', minute='33'] +Job Args: bc4ec7d2-249b-4799-a02f-f1ce66f83d4a +Job Kwargs: + +Job ID: scheduler_start_8aeae4b4-f200-478b-93af-6c8afec1fed9 +Next Run Time: 2024-02-23 09:30:00-05:00 +Job Function: wrapper +Trigger: cron[day_of_week='0,4', hour='9', minute='30'] +Job Args: 8aeae4b4-f200-478b-93af-6c8afec1fed9 +Job Kwargs: + +Job ID: scheduler_stop_8aeae4b4-f200-478b-93af-6c8afec1fed9 +Next Run Time: 2024-02-23 16:00:00-05:00 +Job Function: wrapper +Trigger: cron[day_of_week='0,4', hour='16', minute='0'] +Job Args: 8aeae4b4-f200-478b-93af-6c8afec1fed9 +Job Kwargs: + +Stopping record bc4ec7d2-249b-4799-a02f-f1ce66f83d4a +Requesting STOP 208c9c2b-d25b-4dd9-8654-c19bdd2d4a30 +APS jobs initialized +Record updated bc4ec7d2-249b-4799-a02f-f1ce66f83d4a diff --git a/jobs.log b/jobs.log new file mode 100644 index 0000000..14d7726 --- /dev/null +++ b/jobs.log @@ -0,0 +1 @@ +Current 0 scheduled jobs: [] diff --git a/requirements.txt b/requirements.txt index 9578c07..2a1bddf 100644 --- a/requirements.txt +++ b/requirements.txt @@ -35,6 +35,7 @@ entrypoints==0.4 exceptiongroup==1.1.3 executing==1.2.0 fastapi==0.95.0 +filelock==3.13.1 Flask==2.2.3 flatbuffers==23.5.26 fonttools==4.39.0 @@ -168,7 +169,7 @@ tzdata==2023.2 tzlocal==4.3 urllib3==1.26.14 uvicorn==0.21.1 --e git+https://github.com/drew2323/v2trading.git@eff78e8157c44b064c169e80ffa3d0b18cdb3d23#egg=v2realbot +-e git+https://github.com/drew2323/v2trading.git@b58639454be921f9f0c9dd1880491cfcfdfdf3b7#egg=v2realbot validators==0.20.0 wcwidth==0.2.9 webencodings==0.5.1 diff --git a/v2realbot/common/db.py b/v2realbot/common/db.py index 4e143d0..6a44c8a 100644 --- a/v2realbot/common/db.py +++ b/v2realbot/common/db.py @@ -3,9 +3,12 @@ import sqlite3 import queue import threading import time -from v2realbot.common.model import RunArchive, RunArchiveView +from v2realbot.common.model import RunArchive, RunArchiveView, RunManagerRecord from datetime import datetime import orjson +from v2realbot.utils.utils import json_serial, send_to_telegram, zoneNY +import v2realbot.controller.services as cs +from uuid import UUID sqlite_db_file = DATA_DIR + "/v2trading.db" # Define the connection pool @@ -31,7 +34,7 @@ class ConnectionPool: return connection -def execute_with_retry(cursor: sqlite3.Cursor, statement: str, params = None, retry_interval: int = 1) -> sqlite3.Cursor: +def execute_with_retry(cursor: sqlite3.Cursor, statement: str, params = None, retry_interval: int = 2) -> sqlite3.Cursor: """get connection from pool and execute SQL statement with retry logic if required. Args: @@ -62,6 +65,37 @@ pool = ConnectionPool(10) insert_conn = sqlite3.connect(sqlite_db_file, check_same_thread=False) insert_queue = queue.Queue() +#prevede dict radku zpatky na objekt vcetme retypizace +def row_to_runmanager(row: dict) -> RunManagerRecord: + + is_running = cs.is_runner_running(row['runner_id']) if row['runner_id'] else False + + res = RunManagerRecord( + moddus=row['moddus'], + id=row['id'], + strat_id=row['strat_id'], + symbol=row['symbol'], + mode=row['mode'], + account=row['account'], + note=row['note'], + ilog_save=bool(row['ilog_save']), + bt_from=datetime.fromisoformat(row['bt_from']) if row['bt_from'] else None, + bt_to=datetime.fromisoformat(row['bt_to']) if row['bt_to'] else None, + weekdays_filter=[int(x) for x in row['weekdays_filter'].split(',')] if row['weekdays_filter'] else [], + batch_id=row['batch_id'], + testlist_id=row['testlist_id'], + start_time=row['start_time'], + stop_time=row['stop_time'], + status=row['status'], + #last_started=zoneNY.localize(datetime.fromisoformat(row['last_started'])) if row['last_started'] else None, + last_processed=datetime.fromisoformat(row['last_processed']) if row['last_processed'] else None, + history=row['history'], + valid_from=datetime.fromisoformat(row['valid_from']) if row['valid_from'] else None, + valid_to=datetime.fromisoformat(row['valid_to']) if row['valid_to'] else None, + runner_id = row['runner_id'] if row['runner_id'] and is_running else None, #runner_id is only present if it is running + strat_running = is_running) #cant believe this when called from separate process as not current + return res + #prevede dict radku zpatky na objekt vcetme retypizace def row_to_runarchiveview(row: dict) -> RunArchiveView: return RunArchive( diff --git a/v2realbot/common/model.py b/v2realbot/common/model.py index 7dbc1b9..f2dc6db 100644 --- a/v2realbot/common/model.py +++ b/v2realbot/common/model.py @@ -1,13 +1,16 @@ -from uuid import UUID +from uuid import UUID, uuid4 from alpaca.trading.enums import OrderSide, OrderStatus, TradeEvent,OrderType #from utils import AttributeDict from rich import print from typing import Any, Optional, List, Union from datetime import datetime, date -from pydantic import BaseModel -from v2realbot.enums.enums import Mode, Account +from pydantic import BaseModel, Field +from v2realbot.enums.enums import Mode, Account, SchedulerStatus, Moddus from alpaca.data.enums import Exchange + + + #models for server side datatables # Model for individual column data class ColumnData(BaseModel): @@ -134,7 +137,33 @@ class RunRequest(BaseModel): cash: int = 100000 skip_cache: Optional[bool] = False - +#Trida, kterĆ” je nadstavbou runrequestu a pouzivame ji v scheduleru, je zde navic jen par polĆ­ +class RunManagerRecord(BaseModel): + moddus: Moddus + id: UUID = Field(default_factory=uuid4) + strat_id: UUID + symbol: Optional[str] = None + account: Account + mode: Mode + note: Optional[str] = None + ilog_save: bool = False + bt_from: datetime = None + bt_to: datetime = None + #weekdays filter + #pokud je uvedeny filtrujeme tyto dny + weekdays_filter: Optional[list] = None #list of strings 0-6 representing days to run + #GENERATED ID v ramci runu, vaze vsechny runnery v batchovem behu + batch_id: Optional[str] = None + testlist_id: Optional[str] = None + start_time: str #time (HH:MM) that start function is called + stop_time: Optional[str] #time (HH:MM) that stop function is called + status: SchedulerStatus + last_processed: Optional[datetime] + history: Optional[str] = None + valid_from: Optional[datetime] = None # US East time zone daetime + valid_to: Optional[datetime] = None # US East time zone daetime + runner_id: Optional[UUID] = None #last runner_id from scheduler after stratefy is started + strat_running: Optional[bool] = None #automatically updated field based on status of runner_id above, it is added by row_to_RunManagerRecord class RunnerView(BaseModel): id: UUID strat_id: UUID diff --git a/v2realbot/config.py b/v2realbot/config.py index 089ee8e..4487e69 100644 --- a/v2realbot/config.py +++ b/v2realbot/config.py @@ -4,12 +4,18 @@ from appdirs import user_data_dir from pathlib import Path import os +# Global flag to track if the ml module has been imported (solution for long import times of tensorflow) +#the first occurence of using it will load it globally +_ml_module_loaded = False + #directory for generated images and basic reports MEDIA_DIRECTORY = Path(__file__).parent.parent.parent / "media" RUNNER_DETAIL_DIRECTORY = Path(__file__).parent.parent.parent / "runner_detail" #location of strat.log - it is used to fetch by gui +LOG_PATH = Path(__file__).parent.parent LOG_FILE = Path(__file__).parent.parent / "strat.log" +JOB_LOG_FILE = Path(__file__).parent.parent / "job.log" #'0.0.0.0', #currently only prod server has acces to LIVE diff --git a/v2realbot/controller/run_manager.py b/v2realbot/controller/run_manager.py new file mode 100644 index 0000000..560177c --- /dev/null +++ b/v2realbot/controller/run_manager.py @@ -0,0 +1,466 @@ +from typing import Any, List, Tuple +from uuid import UUID, uuid4 +import pickle +from alpaca.data.historical import StockHistoricalDataClient +from alpaca.data.requests import StockTradesRequest, StockBarsRequest +from alpaca.data.enums import DataFeed +from alpaca.data.timeframe import TimeFrame +from v2realbot.strategy.base import StrategyState +from v2realbot.enums.enums import RecordType, StartBarAlign, Mode, Account, OrderSide +from v2realbot.common.model import RunManagerRecord, StrategyInstance, RunDay, StrategyInstance, Runner, RunRequest, RunArchive, RunArchiveView, RunArchiveViewPagination, RunArchiveDetail, RunArchiveChange, Bar, TradeEvent, TestList, Intervals, ConfigItem, InstantIndicator, DataTablesRequest +from v2realbot.utils.utils import validate_and_format_time, AttributeDict, zoneNY, zonePRG, safe_get, dict_replace_value, Store, parse_toml_string, json_serial, is_open_hours, send_to_telegram, concatenate_weekdays, transform_data +from v2realbot.utils.ilog import delete_logs +from v2realbot.common.PrescribedTradeModel import Trade, TradeDirection, TradeStatus, TradeStoplossType +from datetime import datetime +from v2realbot.loader.trade_offline_streamer import Trade_Offline_Streamer +from threading import Thread, current_thread, Event, enumerate +from v2realbot.config import STRATVARS_UNCHANGEABLES, ACCOUNT1_PAPER_API_KEY, ACCOUNT1_PAPER_SECRET_KEY, ACCOUNT1_LIVE_API_KEY, ACCOUNT1_LIVE_SECRET_KEY, DATA_DIR,BT_FILL_CONS_TRADES_REQUIRED,BT_FILL_LOG_SURROUNDING_TRADES,BT_FILL_CONDITION_BUY_LIMIT,BT_FILL_CONDITION_SELL_LIMIT, GROUP_TRADES_WITH_TIMESTAMP_LESS_THAN, MEDIA_DIRECTORY, RUNNER_DETAIL_DIRECTORY, OFFLINE_MODE +import importlib +from alpaca.trading.requests import GetCalendarRequest +from alpaca.trading.client import TradingClient +#from alpaca.trading.models import Calendar +from queue import Queue +from tinydb import TinyDB, Query, where +from tinydb.operations import set +import orjson +import numpy as np +from rich import print +import pandas as pd +from traceback import format_exc +from datetime import timedelta, time +from threading import Lock +import v2realbot.common.db as db +from sqlite3 import OperationalError, Row +import v2realbot.strategyblocks.indicators.custom as ci +from v2realbot.strategyblocks.inits.init_indicators import initialize_dynamic_indicators +from v2realbot.strategyblocks.indicators.indicators_hub import populate_dynamic_indicators +from v2realbot.interfaces.backtest_interface import BacktestInterface +import os +import v2realbot.reporting.metricstoolsimage as mt +import gzip +import os +import msgpack +import v2realbot.controller.services as cs +import v2realbot.scheduler.ap_scheduler as aps + +# Functions for your 'run_manager' table + +# CREATE TABLE "run_manager" ( +# "moddus" TEXT NOT NULL, +# "id" varchar(32), +# "strat_id" varchar(32) NOT NULL, +# "symbol" TEXT, +# "account" TEXT NOT NULL, +# "mode" TEXT NOT NULL, +# "note" TEXT, +# "ilog_save" BOOLEAN, +# "bt_from" TEXT, +# "bt_to" TEXT, +# "weekdays_filter" TEXT, +# "batch_id" TEXT, +# "start_time" TEXT NOT NULL, +# "stop_time" TEXT NOT NULL, +# "status" TEXT NOT NULL, +# "last_processed" TEXT, +# "history" TEXT, +# "valid_from" TEXT, +# "valid_to" TEXT, +# "testlist_id" TEXT, +# "runner_id" varchar2(32), +# PRIMARY KEY("id") +# ) + +# CREATE INDEX idx_moddus ON run_manager (moddus); +# CREATE INDEX idx_status ON run_manager (status); +# CREATE INDEX idx_status_moddus ON run_manager (status, moddus); +# CREATE INDEX idx_valid_from_to ON run_manager (valid_from, valid_to); +# CREATE INDEX idx_stopped_batch_id ON runner_header (stopped, batch_id); +# CREATE INDEX idx_search_value ON runner_header (strat_id, batch_id); + + +##weekdays are stored as comma separated values +# Fetching (assume 'weekdays' field is a comma-separated string) +# weekday_str = record['weekdays'] +# weekdays = [int(x) for x in weekday_str.split(',')] + +# # ... logic to check whether today's weekday is in 'weekdays' + +# # Storing +# weekdays = [1, 2, 5] # Example +# weekday_str = ",".join(str(x) for x in weekdays) +# update_data = {'weekdays': weekday_str} +# # ... use in an SQL UPDATE statement + + # for row in records: + # row['weekdays_filter'] = [int(x) for x in row['weekdays_filter'].split(',')] if row['weekdays_filter'] else [] + + +#get stratin info return +# strat : StrategyInstance = None +# result, strat = cs.get_stratin("625760ac-6376-47fa-8989-1e6a3f6ab66a") +# if result == 0: +# print(strat) +# else: +# print("Error:", strat) + + +# Fetch all +#result, records = fetch_all_run_manager_records() + +#TODO zvazit rozsireni vystupu o strat_status (running/stopped) + + +def fetch_all_run_manager_records() -> list[RunManagerRecord]: + conn = db.pool.get_connection() + try: + conn.row_factory = Row + cursor = conn.cursor() + cursor.execute('SELECT * FROM run_manager') + rows = cursor.fetchall() + results = [] + #Transform row to object + for row in rows: + #add transformed object into result list + results.append(db.row_to_runmanager(row)) + + return 0, results + finally: + conn.row_factory = None + db.pool.release_connection(conn) + +# Fetch by strategy_id +# result, record = fetch_run_manager_record_by_id('625760ac-6376-47fa-8989-1e6a3f6ab66a') +def fetch_run_manager_record_by_id(strategy_id) -> RunManagerRecord: + conn = db.pool.get_connection() + try: + conn.row_factory = Row + cursor = conn.cursor() + cursor.execute('SELECT * FROM run_manager WHERE id = ?', (str(strategy_id),)) + row = cursor.fetchone() + if row is None: + return -2, "not found" + else: + return 0, db.row_to_runmanager(row) + + except Exception as e: + print("ERROR while fetching all records:", str(e) + format_exc()) + return -2, str(e) + format_exc() + finally: + conn.row_factory = None + db.pool.release_connection(conn) + +def add_run_manager_record(new_record: RunManagerRecord): + #validation/standardization of time + new_record.start_time = validate_and_format_time(new_record.start_time) + if new_record.start_time is None: + return -2, f"Invalid start_time format {new_record.start_time}" + + if new_record.stop_time is not None: + new_record.stop_time = validate_and_format_time(new_record.stop_time) + if new_record.stop_time is None: + return -2, f"Invalid stop_time format {new_record.stop_time}" + + conn = db.pool.get_connection() + try: + + strat : StrategyInstance = None + result, strat = cs.get_stratin(id=str(new_record.strat_id)) + if result == 0: + new_record.symbol = strat.symbol + else: + return -1, f"Strategy {new_record.strat_id} not found" + + cursor = conn.cursor() + + # Construct a suitable INSERT query based on your RunManagerRecord fields + insert_query = """ + INSERT INTO run_manager (moddus, id, strat_id, symbol,account, mode, note,ilog_save, + bt_from, bt_to, weekdays_filter, batch_id, + start_time, stop_time, status, last_processed, + history, valid_from, valid_to, testlist_id) + VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?) + """ + values = [ + new_record.moddus, str(new_record.id), str(new_record.strat_id), new_record.symbol, new_record.account, new_record.mode, new_record.note, + int(new_record.ilog_save), + new_record.bt_from.isoformat() if new_record.bt_from is not None else None, + new_record.bt_to.isoformat() if new_record.bt_to is not None else None, + ",".join(str(x) for x in new_record.weekdays_filter) if new_record.weekdays_filter else None, + new_record.batch_id, new_record.start_time, + new_record.stop_time, new_record.status, + new_record.last_processed.isoformat() if new_record.last_processed is not None else None, + new_record.history, + new_record.valid_from.isoformat() if new_record.valid_from is not None else None, + new_record.valid_to.isoformat() if new_record.valid_to is not None else None, + new_record.testlist_id + ] + db.execute_with_retry(cursor, insert_query, values) + conn.commit() + + #Add APS scheduler job refresh + res, result = aps.initialize_jobs() + if res < 0: + return -2, f"Error initializing jobs: {res} {result}" + + return 0, new_record.id # Assuming success, you might return something more descriptive + except Exception as e: + print("ERROR while adding record:", str(e) + format_exc()) + return -2, str(e) + format_exc() + finally: + db.pool.release_connection(conn) + +# Update (example) +# update_data = {'last_started': '2024-02-13 10:35:00'} +# result, message = update_run_manager_record('625760ac-6376-47fa-8989-1e6a3f6ab66a', update_data) +def update_run_manager_record(record_id, updated_record: RunManagerRecord): + #validation/standardization of time + updated_record.start_time = validate_and_format_time(updated_record.start_time) + if updated_record.start_time is None: + return -2, f"Invalid start_time format {updated_record.start_time}" + + if updated_record.stop_time is not None: + updated_record.stop_time = validate_and_format_time(updated_record.stop_time) + if updated_record.stop_time is None: + return -2, f"Invalid stop_time format {updated_record.stop_time}" + + conn = db.pool.get_connection() + try: + cursor = conn.cursor() + + #strategy lookup check, if strategy still exists + strat : StrategyInstance = None + result, strat = cs.get_stratin(id=str(updated_record.strat_id)) + if result == 0: + updated_record.symbol = strat.symbol + else: + return -1, f"Strategy {updated_record.strat_id} not found" + + #remove values with None, so they are not updated + #updated_record_dict = updated_record.dict(exclude_none=True) + + # Construct update query and handle weekdays conversion + update_query = 'UPDATE run_manager SET ' + update_params = [] + for key, value in updated_record.dict().items(): # Iterate over model attributes + if key in ['id', 'strat_running']: # Skip updating the primary key + continue + update_query += f"{key} = ?, " + if key == "ilog_save": + value = int(value) + elif key in ["strat_id", "runner_id"]: + value = str(value) if value else None + elif key == "weekdays_filter": + value = ",".join(str(x) for x in value) if value else None + elif key in ['valid_from', 'valid_to', 'bt_from', 'bt_to', 'last_processed']: + value = value.isoformat() if value else None + update_params.append(value) + # if 'weekdays_filter' in updated_record.dict(): + # updated_record.weekdays_filter = ",".join(str(x) for x in updated_record.weekdays_filter) + update_query = update_query[:-2] # Remove trailing comma and space + update_query += ' WHERE id = ?' + update_params.append(str(record_id)) + + db.execute_with_retry(cursor, update_query, update_params) + #cursor.execute(update_query, update_params) + conn.commit() + + #Add APS scheduler job refresh + res, result = aps.initialize_jobs() + if res < 0: + return -2, f"Error initializing jobs: {res} {result}" + + except Exception as e: + print("ERROR while updating record:", str(e) + format_exc()) + return -2, str(e) + format_exc() + finally: + db.pool.release_connection(conn) + return 0, record_id + +# result, message = delete_run_manager_record('625760ac-6376-47fa-8989-1e6a3f6ab66a') +def delete_run_manager_record(record_id): + conn = db.pool.get_connection() + try: + cursor = conn.cursor() + db.execute_with_retry(cursor, 'DELETE FROM run_manager WHERE id = ?', (str(record_id),)) + #cursor.execute('DELETE FROM run_manager WHERE id = ?', (str(strategy_id),)) + conn.commit() + except Exception as e: + print("ERROR while deleting record:", str(e) + format_exc()) + return -2, str(e) + format_exc() + finally: + db.pool.release_connection(conn) + return 0, record_id + +def fetch_scheduled_candidates_for_start_and_stop(market_datetime_now, market) -> tuple[int, dict]: + """ + Fetches all active records from the 'run_manager' table where the mode is 'schedule'. It checks if the current + time in the America/New_York timezone is within the operational intervals specified by 'start_time' and 'stop_time' + for each record. This function is designed to correctly handle scenarios where the operational interval crosses + midnight, as well as intervals contained within a single day. + + The function localizes 'valid_from', 'valid_to', 'start_time', and 'stop_time' using the 'zoneNY' timezone object + for accurate comparison with the current time. + + Parameters: + market_datetime_now (datetime): The current date and time in the America/New_York timezone. + market (str): The market identifier. + + Returns: + Tuple[int, dict]: A tuple where the first element is a status code (0 for success, -2 for error), and the + second element is a dictionary. This dictionary has keys 'start' and 'stop', each containing a list of + RunManagerRecord objects meeting the respective criteria. If an error occurs, the second element is a + descriptive error message. + + Note: + - This function assumes that the 'zoneNY' pytz timezone object is properly defined and configured to represent + the America/New York timezone. + - It also assumes that the 'run_manager' table exists in the database with the required columns. + - 'start_time' and 'stop_time' are expected to be strings representing times in 24-hour format. + - If 'valid_from', 'valid_to', 'start_time', or 'stop_time' are NULL in the database, they are considered as + having unlimited boundaries. + + Pozor: je jeste jeden okrajovy pripad, kdy by to nemuselo zafungovat: kdyby casy byly nastaveny pro + beh strategie pres pulnoc, ale zapla by se pozdeji az po pulnoci + (https://chat.openai.com/c/3c77674a-8a2c-45aa-afbd-ab140f473e07) + + """ + conn = db.pool.get_connection() + try: + conn.row_factory = Row + cursor = conn.cursor() + + # Get current datetime in America/New York timezone + market_datetime_now_str = market_datetime_now.strftime('%Y-%m-%d %H:%M:%S') + current_time_str = market_datetime_now.strftime('%H:%M') + print("current_market_datetime_str:", market_datetime_now_str) + print("current_time_str:", current_time_str) + + # Select also supports scenarios where strategy runs overnight + # SQL query to fetch records with active status and date constraints for both start and stop times + query = """ + SELECT *, + CASE + WHEN start_time <= stop_time AND (? >= start_time AND ? < stop_time) OR + start_time > stop_time AND (? >= start_time OR ? < stop_time) THEN 1 + ELSE 0 + END as is_start_time, + CASE + WHEN start_time <= stop_time AND (? >= stop_time OR ? < start_time) OR + start_time > stop_time AND (? >= stop_time AND ? < start_time) THEN 1 + ELSE 0 + END as is_stop_time + FROM run_manager + WHERE status = 'active' AND moddus = 'schedule' AND + ((valid_from IS NULL OR strftime('%Y-%m-%d %H:%M:%S', valid_from) <= ?) AND + (valid_to IS NULL OR strftime('%Y-%m-%d %H:%M:%S', valid_to) >= ?)) + """ + cursor.execute(query, (current_time_str, current_time_str, current_time_str, current_time_str, + current_time_str, current_time_str, current_time_str, current_time_str, + market_datetime_now_str, market_datetime_now_str)) + rows = cursor.fetchall() + + start_candidates = [] + stop_candidates = [] + for row in rows: + run_manager_record = db.row_to_runmanager(row) + if row['is_start_time']: + start_candidates.append(run_manager_record) + if row['is_stop_time']: + stop_candidates.append(run_manager_record) + + results = {'start': start_candidates, 'stop': stop_candidates} + + return 0, results + except Exception as e: + msg_err = f"ERROR while fetching records for start and stop times with datetime {market_datetime_now_str}: {str(e)} {format_exc()}" + print(msg_err) + return -2, msg_err + finally: + conn.row_factory = None + db.pool.release_connection(conn) + + +def fetch_startstop_scheduled_candidates(market_datetime_now, time_check, market = "US") -> tuple[int, list[RunManagerRecord]]: + """ + Fetches all active records from the 'run_manager' table where moddus is schedule, the current date and time + in the America/New_York timezone falls between the 'valid_from' and 'valid_to' datetime + fields, and either 'start_time' or 'stop_time' matches the specified condition with the current time. + If 'valid_from', 'valid_to', or the time column ('start_time'/'stop_time') are NULL, they are considered + as having unlimited boundaries. + + The function localizes the 'valid_from', 'valid_to', and the time column times using the 'zoneNY' + timezone object for accurate comparison with the current time. + + Parameters: + market_datetime_now (datetime): Current datetime in the market timezone. + market (str): The market for which to fetch candidates. + time_check (str): Either 'start' or 'stop', indicating which time condition to check. + + Returns: + Tuple[int, list[RunManagerRecord]]: A tuple where the first element is a status code + (0 for success, -2 for error), and the second element is a list of RunManagerRecord + objects meeting the criteria. If an error occurs, the second element is a descriptive + error message. + + Note: + This function assumes that the 'zoneNY' pytz timezone object is properly defined and + configured to represent the America/New York timezone. It also assumes that the + 'run_manager' table exists in the database with the columns as described in the + provided schema. + """ + if time_check not in ['start', 'stop']: + return -2, "Invalid time_check parameter. Must be 'start' or 'stop'." + + conn = db.pool.get_connection() + try: + conn.row_factory = Row + cursor = conn.cursor() + + # Get current datetime in America/New York timezone + market_datetime_now_str = market_datetime_now.strftime('%Y-%m-%d %H:%M:%S') + current_time_str = market_datetime_now.strftime('%H:%M') + print("current_market_datetime_str:", market_datetime_now_str) + print("current_time_str:", current_time_str) + + # SQL query to fetch records with active status, date constraints, and time condition + time_column = 'start_time' if time_check == 'start' else 'stop_time' + query = f""" + SELECT * FROM run_manager + WHERE status = 'active' AND moddus = 'schedule' AND + ((valid_from IS NULL OR strftime('%Y-%m-%d %H:%M:%S', valid_from) <= ?) AND + (valid_to IS NULL OR strftime('%Y-%m-%d %H:%M:%S', valid_to) >= ?)) AND + ({time_column} IS NULL OR {time_column} <= ?) + """ + cursor.execute(query, (market_datetime_now_str, market_datetime_now_str, current_time_str)) + rows = cursor.fetchall() + results = [db.row_to_runmanager(row) for row in rows] + + return 0, results + except Exception as e: + msg_err = f"ERROR while fetching records based on {time_check} time with datetime {market_datetime_now_str}: {str(e)} {format_exc()}" + print(msg_err) + return -2, msg_err + finally: + conn.row_factory = None + db.pool.release_connection(conn) + + +if __name__ == "__main__": + res, sada = fetch_startstop_scheduled_candidates(datetime.now().astimezone(zoneNY), "start") + if res == 0: + print(sada) + else: + print("Error:", sada) + +# from apscheduler.schedulers.background import BackgroundScheduler +# import time + +# def print_hello(): +# print("Hello") + +# def schedule_job(): +# scheduler = BackgroundScheduler() +# scheduler.add_job(print_hello, 'interval', seconds=10) +# scheduler.start() + +# schedule_job() \ No newline at end of file diff --git a/v2realbot/controller/services.py b/v2realbot/controller/services.py index 1fa1b61..790143d 100644 --- a/v2realbot/controller/services.py +++ b/v2realbot/controller/services.py @@ -80,7 +80,7 @@ def get_all_stratins(): else: return (0, []) -def get_stratin(id: UUID): +def get_stratin(id: UUID) -> List[StrategyInstance]: for i in db.stratins: if str(i.id) == str(id): return (0, i) @@ -105,7 +105,7 @@ def create_stratin(si: StrategyInstance): if res < 0: return (-1, "None") si.id = uuid4() - print(si) + #print(si) db.stratins.append(si) db.save() #print(db.stratins) @@ -242,13 +242,14 @@ def pause_runner(id: UUID): return (0, "paused runner " + str(i.id)) print("no ID found") return (-1, "not running instance found") - -def stop_runner(id: UUID = None): +#allows to delete runner based on runner_id, strat_id or all (both none) +#podpruje i hodnotu strat_id v id +def stop_runner(id: UUID = None, strat_id: UUID = None): chng = [] try: for i in db.runners: #print(i['id']) - if id is None or str(i.id) == id: + if (id is None and strat_id is None) or str(i.id) == str(id) or str(i.strat_id) == str(strat_id) or str(i.strat_id) == str(id): chng.append(i.id) print("Sending STOP signal to Runner", i.id) #just sending the signal, update is done in stop after plugin @@ -356,7 +357,20 @@ def capsule(target: object, db: object, inter_batch_params: dict = None): except Exception as e: err_msg = "Nepodarilo se vytvorit daily report image" + str(e)+format_exc() send_to_telegram(err_msg) - print(err_msg) + print(err_msg) + #PRO LIVE a PAPER pri vyplnenem batchi vytvarime batchovy soubor zde (pro BT ridi batch_manager) + if i.run_mode in [Mode.LIVE, Mode.PAPER] and i.batch_id is not None: + try: + res, val = mt.generate_trading_report_image(batch_id=i.batch_id) + if res == 0: + print("BATCH REPORT CREATED") + else: + print(f"BATCH REPORT ERROR - {val}") + except Exception as e: + err_msg = f"Nepodarilo se vytvorit batchj report image pro {i.strat_id} a batch{i.batch_id}" + str(e)+format_exc() + send_to_telegram(err_msg) + print(err_msg) + target.release() print("Runner STOPPED") @@ -477,6 +491,9 @@ def run_batch_stratin(id: UUID, runReq: RunRequest): # bud ceka na dokonceni v runners nebo to bude ridit jinak a bude mit jednoho runnera? # nejak vymyslet. # logovani zatim jen do print + +##OFFLINE BATCH RUN MANAGER (generuje batch_id, ridi datove provazani runnerÅÆ(inter_batch_data) a generuje batch report +## a samozrejme spousti jednotlivĆ© dny def batch_run_manager(id: UUID, runReq: RunRequest, rundays: list[RunDay]): #zde muzu iterovat nad intervaly #cekat az dobehne jeden interval a pak spustit druhy @@ -1035,7 +1052,7 @@ def get_all_archived_runners() -> list[RunArchiveView]: #new version to support search and ordering #TODO index nad strat_id a batch_id mam? -def get_all_archived_runners_p(request: DataTablesRequest) -> Tuple[int, RunArchiveViewPagination]: +def get_all_archived_runners_p_original(request: DataTablesRequest) -> Tuple[int, RunArchiveViewPagination]: conn = pool.get_connection() search_value = request.search.value # Extract the search value from the request try: @@ -1084,6 +1101,76 @@ def get_all_archived_runners_p(request: DataTablesRequest) -> Tuple[int, RunArch return -2, str(e) + format_exc() +#new version with batch_id asc sortin https://chat.openai.com/c/64511445-5181-411b-b9d0-51d16930bf71 +#Tato verze sprĆ”vně groupuje zĆ”znamy se stejnym batch_id (podle maximalniho batche) a non batch zaznamy prolne mezi ne podle jeho stopped date - vlozi zaznam po nebo pred jednotlivou skupinu (dle jejiho max.date) +#diky tomu se mi radi batche a nonbatche spravne a pokud do batche pridame zaznam zobrazi se nam batch nahore +def get_all_archived_runners_p(request: DataTablesRequest) -> Tuple[int, RunArchiveViewPagination]: + conn = pool.get_connection() + search_value = request.search.value # Extract the search value from the request + try: + conn.row_factory = Row + c = conn.cursor() + + # Total count query + total_count_query = """ + SELECT COUNT(*) FROM runner_header + WHERE (:search_value = '' OR strat_id LIKE :search_value OR batch_id LIKE :search_value) + """ + c.execute(total_count_query, {'search_value': f'%{search_value}%'}) + total_count = c.fetchone()[0] + + # Paginated query with advanced sorting logic + paginated_query = f""" + WITH GroupedData AS ( + SELECT runner_id, strat_id, batch_id, symbol, name, note, started, + stopped, mode, account, bt_from, bt_to, ilog_save, profit, + trade_count, end_positions, end_positions_avgp, metrics, + MAX(stopped) OVER (PARTITION BY batch_id) AS max_stopped + FROM runner_header + WHERE (:search_value = '' OR strat_id LIKE :search_value OR batch_id LIKE :search_value) + ), + InterleavedGroups AS ( + SELECT *, + CASE + WHEN batch_id IS NOT NULL THEN max_stopped + ELSE stopped + END AS sort_key + FROM GroupedData + ) + SELECT runner_id, strat_id, batch_id, symbol, name, note, started, + stopped, mode, account, bt_from, bt_to, ilog_save, profit, + trade_count, end_positions, end_positions_avgp, metrics + FROM InterleavedGroups + ORDER BY + sort_key DESC, + CASE WHEN batch_id IS NOT NULL THEN 0 ELSE 1 END, + stopped DESC + LIMIT {request.length} OFFSET {request.start} + """ + c.execute(paginated_query, {'search_value': f'%{search_value}%'}) + rows = c.fetchall() + + # Filtered count query + filtered_count_query = """ + SELECT COUNT(*) FROM runner_header + WHERE (:search_value = '' OR strat_id LIKE :search_value OR batch_id LIKE :search_value) + """ + c.execute(filtered_count_query, {'search_value': f'%{search_value}%'}) + filtered_count = c.fetchone()[0] + + results = [row_to_runarchiveview(row) for row in rows] + + finally: + conn.row_factory = None + pool.release_connection(conn) + + try: + obj = RunArchiveViewPagination(draw=request.draw, recordsTotal=total_count, recordsFiltered=filtered_count, data=results) + return 0, obj + except Exception as e: + return -2, str(e) + format_exc() + + #DECOMMS # def get_all_archived_runners(): # conn = pool.get_connection() @@ -1574,6 +1661,9 @@ def preview_indicator_byTOML(id: UUID, indicator: InstantIndicator, save: bool = state.ind_mapping = {**local_dict_inds, **local_dict_bars, **local_dict_cbar_inds} #print("IND MAPPING DONE:", state.ind_mapping) + ##intialize required vars from strat init + state.vars["loaded_models"] = {} + ##intialize dynamic indicators initialize_dynamic_indicators(state) diff --git a/v2realbot/enums/enums.py b/v2realbot/enums/enums.py index 9130c13..bdeb882 100644 --- a/v2realbot/enums/enums.py +++ b/v2realbot/enums/enums.py @@ -52,6 +52,16 @@ class Account(str, Enum): """ ACCOUNT1 = "ACCOUNT1" ACCOUNT2 = "ACCOUNT2" + +class Moddus(str, Enum): + """ + Moddus for RunManager record + + schedule - scheduled record + queue - queued record + """ + SCHEDULE = "schedule" + QUEUE = "queue" class RecordType(str, Enum): """ Represents output of aggregator @@ -64,6 +74,15 @@ class RecordType(str, Enum): CBARRENKO = "cbarrenko" TRADE = "trade" +class SchedulerStatus(str, Enum): + """ + ACTIVE - active scheduling + SUSPENDED - suspended for scheduling + """ + + ACTIVE = "active" + SUSPENDED = "suspended" + class Mode(str, Enum): """ LIVE - live on production @@ -77,7 +96,6 @@ class Mode(str, Enum): BT = "backtest" PREP = "prep" - class StartBarAlign(str, Enum): """ Represents first bar start time alignement according to timeframe diff --git a/v2realbot/loader/trade_offline_streamer.py b/v2realbot/loader/trade_offline_streamer.py index da748d2..c2b6461 100644 --- a/v2realbot/loader/trade_offline_streamer.py +++ b/v2realbot/loader/trade_offline_streamer.py @@ -289,7 +289,7 @@ class Trade_Offline_Streamer(Thread): cnt = 1 - for t in tqdm(tradesResponse[symbol]): + for t in tqdm(tradesResponse[symbol], desc="Loading Trades"): #protoze je zde cely den, poustime dal, jen ty relevantni #pokud je start_time < trade < end_time diff --git a/v2realbot/main.py b/v2realbot/main.py index f90825b..4cb2654 100644 --- a/v2realbot/main.py +++ b/v2realbot/main.py @@ -1,7 +1,7 @@ import os,sys sys.path.append(os.path.dirname(os.path.dirname(os.path.abspath(__file__)))) os.environ["KERAS_BACKEND"] = "jax" -from v2realbot.config import WEB_API_KEY, DATA_DIR, MEDIA_DIRECTORY, LOG_FILE, MODEL_DIR +from v2realbot.config import WEB_API_KEY, DATA_DIR, MEDIA_DIRECTORY, LOG_PATH, MODEL_DIR from alpaca.data.timeframe import TimeFrame, TimeFrameUnit from datetime import datetime from rich import print @@ -11,7 +11,7 @@ import uvicorn from uuid import UUID import v2realbot.controller.services as cs from v2realbot.utils.ilog import get_log_window -from v2realbot.common.model import StrategyInstance, RunnerView, RunRequest, Trade, RunArchive, RunArchiveView, RunArchiveViewPagination, RunArchiveDetail, Bar, RunArchiveChange, TestList, ConfigItem, InstantIndicator, DataTablesRequest, AnalyzerInputs +from v2realbot.common.model import RunManagerRecord, StrategyInstance, RunnerView, RunRequest, Trade, RunArchive, RunArchiveView, RunArchiveViewPagination, RunArchiveDetail, Bar, RunArchiveChange, TestList, ConfigItem, InstantIndicator, DataTablesRequest, AnalyzerInputs from fastapi import FastAPI, WebSocket, WebSocketDisconnect, Depends, HTTPException, status, WebSocketException, Cookie, Query from fastapi.responses import FileResponse, StreamingResponse, JSONResponse from fastapi.staticfiles import StaticFiles @@ -39,6 +39,9 @@ import shutil from starlette.responses import JSONResponse import mlroom import mlroom.utils.mlutils as ml +from typing import List +import v2realbot.controller.run_manager as rm +import v2realbot.scheduler.ap_scheduler as aps #from async io import Queue, QueueEmpty # # install() @@ -249,11 +252,13 @@ def _run_stratin(stratin_id: UUID, runReq: RunRequest): runReq.bt_to = zoneNY.localize(runReq.bt_to) #pokud jedeme nad test intervaly anebo je požadovĆ”no vĆ­ce dnĆ­ - pouÅ”tĆ­me jako batch day by day #do budoucna dĆ”t na FE jako flag - if runReq.mode != Mode.LIVE and runReq.test_batch_id is not None or (runReq.bt_from.date() != runReq.bt_to.date()): + print(runReq) + if runReq.mode not in [Mode.LIVE, Mode.PAPER] and (runReq.test_batch_id is not None or (runReq.bt_from is not None and runReq.bt_to is not None and runReq.bt_from.date() != runReq.bt_to.date())): res, id = cs.run_batch_stratin(id=stratin_id, runReq=runReq) else: - if runReq.weekdays_filter is not None: - raise HTTPException(status_code=status.HTTP_406_NOT_ACCEPTABLE, detail=f"Weekday only for backtest mode with batch (not single day)") + #not necessary for live/paper the weekdays are simply ignored, in the future maybe add validation if weekdays are presented + #if runReq.weekdays_filter is not None: + # raise HTTPException(status_code=status.HTTP_406_NOT_ACCEPTABLE, detail=f"Weekday only for backtest mode with batch (not single day)") res, id = cs.run_stratin(id=stratin_id, runReq=runReq) if res == 0: return id elif res < 0: @@ -555,27 +560,30 @@ def _get_archived_runner_log_byID(runner_id: UUID, timestamp_from: float, timest # endregion # A simple function to read the last lines of a file def tail(file_path, n=10, buffer_size=1024): - with open(file_path, 'rb') as f: - f.seek(0, 2) # Move to the end of the file - file_size = f.tell() - lines = [] - buffer = bytearray() + try: + with open(file_path, 'rb') as f: + f.seek(0, 2) # Move to the end of the file + file_size = f.tell() + lines = [] + buffer = bytearray() - for i in range(file_size // buffer_size + 1): - read_start = max(-buffer_size * (i + 1), -file_size) - f.seek(read_start, 2) - read_size = min(buffer_size, file_size - buffer_size * i) - buffer[0:0] = f.read(read_size) # Prepend to buffer + for i in range(file_size // buffer_size + 1): + read_start = max(-buffer_size * (i + 1), -file_size) + f.seek(read_start, 2) + read_size = min(buffer_size, file_size - buffer_size * i) + buffer[0:0] = f.read(read_size) # Prepend to buffer - if buffer.count(b'\n') >= n + 1: - break + if buffer.count(b'\n') >= n + 1: + break - lines = buffer.decode(errors='ignore').splitlines()[-n:] - return lines + lines = buffer.decode(errors='ignore').splitlines()[-n:] + return lines + except Exception as e: + return [str(e) + format_exc()] @app.get("/log", dependencies=[Depends(api_key_auth)]) -def read_log(lines: int = 10): - log_path = LOG_FILE +def read_log(lines: int = 700, logfile: str = "strat.log"): + log_path = LOG_PATH / logfile return {"lines": tail(log_path, lines)} #get alpaca history bars @@ -674,7 +682,7 @@ def get_testlists(): raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail=f"No data found") # API endpoint to retrieve a single record by ID -@app.get('/testlists/{record_id}') +@app.get('/testlists/{record_id}', dependencies=[Depends(api_key_auth)]) def get_testlist(record_id: str): res, testlist = cs.get_testlist_byID(record_id=record_id) @@ -684,7 +692,7 @@ def get_testlist(record_id: str): raise HTTPException(status_code=404, detail='Record not found') # API endpoint to update a record -@app.put('/testlists/{record_id}') +@app.put('/testlists/{record_id}', dependencies=[Depends(api_key_auth)]) def update_testlist(record_id: str, testlist: TestList): # Check if the record exists conn = pool.get_connection() @@ -704,7 +712,7 @@ def update_testlist(record_id: str, testlist: TestList): return testlist # API endpoint to delete a record -@app.delete('/testlists/{record_id}') +@app.delete('/testlists/{record_id}', dependencies=[Depends(api_key_auth)]) def delete_testlist(record_id: str): # Check if the record exists conn = pool.get_connection() @@ -788,6 +796,66 @@ def delete_item(item_id: int) -> dict: # endregion +# region scheduler +# 1. Fetch All RunManagerRecords +@app.get("/run_manager_records/", dependencies=[Depends(api_key_auth)], response_model=List[RunManagerRecord]) +#TODO zvazit rozsireni vystupu o strat_status (running/stopped) +def get_all_run_manager_records(): + result, records = rm.fetch_all_run_manager_records() + if result != 0: + raise HTTPException(status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, detail="Error fetching records") + return records + +# 2. Fetch RunManagerRecord by ID +@app.get("/run_manager_records/{record_id}", dependencies=[Depends(api_key_auth)], response_model=RunManagerRecord) +#TODO zvazit rozsireni vystupu o strat_status (running/stopped) +def get_run_manager_record(record_id: UUID): + result, record = rm.fetch_run_manager_record_by_id(record_id) + if result == -2: # Record not found + raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail="Record not found") + elif result != 0: + raise HTTPException(status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, detail="Error fetching record") + return record + +# 3. Update RunManagerRecord +@app.patch("/run_manager_records/{record_id}", dependencies=[Depends(api_key_auth)], status_code=status.HTTP_200_OK) +def update_run_manager_record(record_id: UUID, update_data: RunManagerRecord): + #make dates zone aware zoneNY + # if update_data.valid_from is not None: + # update_data.valid_from = zoneNY.localize(update_data.valid_from) + # if update_data.valid_to is not None: + # update_data.valid_to = zoneNY.localize(update_data.valid_to) + result, message = rm.update_run_manager_record(record_id, update_data) + if result == -2: # Update failed + raise HTTPException(status_code=status.HTTP_400_BAD_REQUEST, detail=message) + elif result != 0: + raise HTTPException(status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, detail=f"Error during update {result} {message}") + return {"message": "Record updated successfully"} + +# 4. Delete RunManagerRecord +@app.delete("/run_manager_records/{record_id}", dependencies=[Depends(api_key_auth)], status_code=status.HTTP_200_OK) +def delete_run_manager_record(record_id: UUID): + result, message = rm.delete_run_manager_record(record_id) + if result == -2: # Delete failed + raise HTTPException(status_code=status.HTTP_400_BAD_REQUEST, detail=message) + elif result != 0: + raise HTTPException(status_code=status.HTTP_406_NOT_ACCEPTABLE, detail=f"Error during deletion {result} {message}") + return {"message": "Record deleted successfully"} + +@app.post("/run_manager_records/", status_code=status.HTTP_201_CREATED) +def create_run_manager_record(new_record: RunManagerRecord, api_key_auth: Depends = Depends(api_key_auth)): + #make date zone aware - convert to zoneNY + # if new_record.valid_from is not None: + # new_record.valid_from = zoneNY.localize(new_record.valid_from) + # if new_record.valid_to is not None: + # new_record.valid_to = zoneNY.localize(new_record.valid_to) + + result, record_id = rm.add_run_manager_record(new_record) + if result != 0: + raise HTTPException(status_code=status.HTTP_406_NOT_ACCEPTABLE, detail=f"Error during record creation: {result} {record_id}") + return {"id": record_id} +# endregion + #model section #UPLOAD MODEL @app.post("/model/upload_model", dependencies=[Depends(api_key_auth)]) @@ -924,7 +992,22 @@ if __name__ == "__main__": insert_thread = Thread(target=insert_queue2db) insert_thread.start() + #attach debugGER to be able to debug scheduler jobs (run in separate threads) + # debugpy.listen(('localhost', 5678)) + # print("Waiting for debugger to attach...") + # debugpy.wait_for_client() # Script will pause here until debugger is attached + + #init scheduled tasks from schedule table + #Add APS scheduler job refresh + res, result = aps.initialize_jobs() + if res < 0: + #raise exception + raise Exception(f"Error {res} initializing APS jobs, error {result}") + uvicorn.run("__main__:app", host="0.0.0.0", port=8000, reload=False) + except Exception as e: + print("Error intializing app: " + str(e) + format_exc()) + aps.scheduler.shutdown(wait=False) finally: print("closing insert_conn connection") insert_conn.close() diff --git a/v2realbot/scheduler/__init__.py b/v2realbot/scheduler/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/v2realbot/scheduler/ap_scheduler.py b/v2realbot/scheduler/ap_scheduler.py new file mode 100644 index 0000000..227cd5e --- /dev/null +++ b/v2realbot/scheduler/ap_scheduler.py @@ -0,0 +1,307 @@ +from uuid import UUID +from typing import Any, List, Tuple +from uuid import UUID, uuid4 +from v2realbot.enums.enums import Moddus, SchedulerStatus, RecordType, StartBarAlign, Mode, Account, OrderSide +from v2realbot.common.model import RunManagerRecord, StrategyInstance, RunDay, StrategyInstance, Runner, RunRequest, RunArchive, RunArchiveView, RunArchiveViewPagination, RunArchiveDetail, RunArchiveChange, Bar, TradeEvent, TestList, Intervals, ConfigItem, InstantIndicator, DataTablesRequest +from v2realbot.utils.utils import validate_and_format_time, AttributeDict, zoneNY, zonePRG, safe_get, dict_replace_value, Store, parse_toml_string, json_serial, is_open_hours, send_to_telegram, concatenate_weekdays, transform_data +from v2realbot.common.PrescribedTradeModel import Trade, TradeDirection, TradeStatus, TradeStoplossType +from datetime import datetime +from v2realbot.config import JOB_LOG_FILE, STRATVARS_UNCHANGEABLES, ACCOUNT1_PAPER_API_KEY, ACCOUNT1_PAPER_SECRET_KEY, ACCOUNT1_LIVE_API_KEY, ACCOUNT1_LIVE_SECRET_KEY, DATA_DIR,BT_FILL_CONS_TRADES_REQUIRED,BT_FILL_LOG_SURROUNDING_TRADES,BT_FILL_CONDITION_BUY_LIMIT,BT_FILL_CONDITION_SELL_LIMIT, GROUP_TRADES_WITH_TIMESTAMP_LESS_THAN, MEDIA_DIRECTORY, RUNNER_DETAIL_DIRECTORY, OFFLINE_MODE +import numpy as np +from rich import print as richprint +import v2realbot.controller.services as cs +import v2realbot.controller.run_manager as rm +import v2realbot.scheduler.scheduler as sch +from apscheduler.schedulers.background import BackgroundScheduler +from apscheduler.triggers.cron import CronTrigger +from apscheduler.job import Job + +#NOTE zatĆ­m nenĆ­ podporovĆ”no spouÅ”těnĆ­ strategie přes pÅÆlnoc - musĆ­ se dořeÅ”it weekday_filter +#který je zatĆ­m jen jeden jak pro start_time tak stop_time - což by v případě strategiĆ­ běžícĆ­ch +#přes pÅÆlnoc nezafungovalo (stop by byl nĆ”sledujĆ­cĆ­ den a scheduler by jej nespustil) + +def format_apscheduler_jobs(jobs: list[Job]) -> list[dict]: + if not jobs: + print("No scheduled jobs.") + return + + jobs_info = [] + + for job in jobs: + job_info = { + "Job ID": job.id, + "Next Run Time": job.next_run_time, + "Job Function": job.func.__name__, + "Trigger": str(job.trigger), + "Job Args": ', '.join(map(str, job.args)), + "Job Kwargs": ', '.join(f"{k}={v}" for k, v in job.kwargs.items()) + } + jobs_info.append(job_info) + + return jobs_info + +def get_day_of_week(weekdays_filter): + if not weekdays_filter: + return '*' # All days of the week + return ','.join(map(str, weekdays_filter)) + +#initialize_jobs se spousti +#- pri spusteni +#- triggerovano z add/update a delete + +#zatim cely refresh, v budoucnu upravime jen na zmene menene polozky - viz +#https://chat.openai.com/c/2a1423ee-59df-47ff-b073-0c49ade51ed7 + +#pomocna funkce, ktera vraci strat_id, ktera jsou v scheduleru vickrat (logika pro ne se lisi) +def stratin_occurences(all_records: list[RunManagerRecord]): + # Count occurrences + strat_id_counts = {} + for record in all_records: + if record.strat_id in strat_id_counts: + strat_id_counts[record.strat_id] += 1 + else: + strat_id_counts[record.strat_id] = 1 + + # Find strat_id values that appear twice or more + repeated_strat_ids = [strat_id for strat_id, count in strat_id_counts.items() if count >= 2] + + return 0, repeated_strat_ids + + +def initialize_jobs(run_manager_records: RunManagerRecord = None): + """ + Initialize all scheduled jobs from RunManagerRecords with moddus = "schedule" + Triggered on app init and update of table + It deleted all "schedule_" prefixed jobs and schedule new ones base on runmanager table + prefiX of "schedule_" in aps scheduler allows to distinguisd schedule types jobs and allows more jobs categories + + Parameters + ---------- + run_manager_records : RunManagerRecord, optional + RunManagerRecords to initialize the jobs from, by default None + + Returns + ------- + Tuple[int, Union[List[dict], str]] + A tuple containing an error code and a message. If there is no error, the + message will contain a list of dictionaries with information about the + scheduled jobs, otherwise it will contain an error message. + """ + if run_manager_records is None: + res, run_manager_records = rm.fetch_all_run_manager_records() + if res < 0: + err_msg= f"Error {res} fetching all runmanager records, error {run_manager_records}" + print(err_msg) + return -2, err_msg + + scheduled_jobs = scheduler.get_jobs() + + #print(f"Current {len(scheduled_jobs)} scheduled jobs: {str(scheduled_jobs)}") + for job in scheduled_jobs: + if job.id.startswith("scheduler_"): + scheduler.remove_job(job.id) + record : RunManagerRecord = None + for record in run_manager_records: + if record.status == SchedulerStatus.ACTIVE and record.moddus == Moddus.SCHEDULE: + day_of_week = get_day_of_week(record.weekdays_filter) + + hour, minute = map(int, record.start_time.split(':')) + start_trigger = CronTrigger(day_of_week=day_of_week, hour=hour, minute=minute, + start_date=record.valid_from, end_date=record.valid_to, timezone=zoneNY) + stop_hour, stop_minute = map(int, record.stop_time.split(':')) + stop_trigger = CronTrigger(day_of_week=day_of_week, hour=stop_hour, minute=stop_minute, + start_date=record.valid_from, end_date=record.valid_to, timezone=zoneNY) + + # Schedule new jobs with the 'scheduler_' prefix + scheduler.add_job(start_runman_record, start_trigger, id=f"scheduler_start_{record.id}", args=[record.id]) + scheduler.add_job(stop_runman_record, stop_trigger, id=f"scheduler_stop_{record.id}", args=[record.id]) + + #scheduler.add_job(print_hello, 'interval', seconds=10, id=f"scheduler_testinterval") + scheduled_jobs = scheduler.get_jobs() + print(f"APS jobs refreshed ({len(scheduled_jobs)})") + current_jobs_dict = format_apscheduler_jobs(scheduled_jobs) + richprint(current_jobs_dict) + return 0, current_jobs_dict + +#zastresovaci funkce resici error handling a printing +def start_runman_record(id: UUID, market = "US", debug_date = None): + record = None + res, record, msg = _start_runman_record(id=id, market=market, debug_date=debug_date) + + if record is not None: + market_time_now = datetime.now().astimezone(zoneNY) if debug_date is None else debug_date + record.last_processed = market_time_now + formatted_date = market_time_now.strftime("%y.%m.%d %H:%M:%S") + history_string = f"{formatted_date}" + history_string += " STARTED" if res == 0 else "NOTE:" + msg if res == -1 else "ERROR:" + msg + print(history_string) + if record.history is None: + record.history = history_string + else: + record.history += "\n" + history_string + + rs, msg_rs = update_runman_record(record) + if rs < 0: + msg_rs = f"Error saving result to history: {msg_rs}" + print(msg_rs) + send_to_telegram(msg_rs) + + + if res < -1: + msg = f"START JOB: {id} ERROR\n" + msg + send_to_telegram(msg) + print(msg) + else: + print(f"START JOB: {id} FINISHED {res}") + + +def update_runman_record(record: RunManagerRecord): + #update record (nejspis jeste upravit - last_run a history) + res, set = rm.update_run_manager_record(record.id, record) + if res == 0: + print(f"Record updated {set}") + return 0, "OK" + else: + err_msg= f"STOP: Error updating {record.id} errir {set} with values {record}" + return -2, err_msg#toto stopne zpracovani dalsich zaznamu pri chybe, zvazit continue + +def stop_runman_record(id: UUID, market = "US", debug_date = None): + res, record, msg = _stop_runman_record(id=id, market=market, debug_date=debug_date) + #results : 0 - ok, -1 not running/already running/not specific, -2 error + + #report vzdy zapiseme do history, pokud je record not None, pripadna chyba se stala po dotazeni recordu + if record is not None: + market_time_now = datetime.now().astimezone(zoneNY) if debug_date is None else debug_date + record.last_processed = market_time_now + formatted_date = market_time_now.strftime("%y.%m.%d %H:%M:%S") + history_string = f"{formatted_date}" + history_string += " STOPPED" if res == 0 else "NOTE:" + msg if res == -1 else "ERROR:" + msg + print(history_string) + if record.history is None: + record.history = history_string + else: + record.history += "\n" + history_string + + rs, msg_rs = update_runman_record(record) + if rs < 0: + msg_rs = f"Error saving result to history: {msg_rs}" + print(msg_rs) + send_to_telegram(msg_rs) + + if res < -1: + msg = f"STOP JOB: {id} ERROR\n" + msg + send_to_telegram(msg) + print(msg) + else: + print(f"STOP JOB: {id} FINISHED") + +#start function that is called from the job +def _start_runman_record(id: UUID, market = "US", debug_date = None): + print(f"Start scheduled record {id}") + + record : RunManagerRecord = None + res, result = rm.fetch_run_manager_record_by_id(id) + if res < 0: + result = "Error fetching run manager record by id: " + str(id) + " Error: " + str(result) + return res, record, result + + record = result + + res, sada = sch.get_todays_market_times(market=market, debug_date=debug_date) + if res == 0: + market_time_now, market_open_datetime, market_close_datetime = sada + print(f"OPEN:{market_open_datetime} CLOSE:{market_close_datetime}") + else: + sada = "Error getting market times (CLOSED): " + str(sada) + return res, record, sada + + if cs.is_stratin_running(record.strat_id): + return -1, record, f"Stratin {record.strat_id} is already running" + + res, result = sch.run_scheduled_strategy(record) + if res < 0: + result = "Error running strategy: " + str(result) + return res, record, result + else: + record.runner_id = UUID(result) + + return 0, record, record.runner_id + +#stop function that is called from the job +def _stop_runman_record(id: UUID, market = "US", debug_date = None): + record = None + #get all records + print(f"Stopping record {id}") + res, all_records = rm.fetch_all_run_manager_records() + if res < 0: + err_msg= f"Error {res} fetching all runmanager records, error {all_records}" + return -2, record, err_msg + + record : RunManagerRecord = None + for rec in all_records: + if rec.id == id: + record = rec + break + + if record is None: + return -2, record, f"Record id {id} not found" + + #strat_ids that are repeated + res, repeated_strat_ids = stratin_occurences(all_records) + if res < 0: + err_msg= f"Error {res} finding repeated strat_ids, error {repeated_strat_ids}" + return -2, record, err_msg + + if record.strat_running is True: + #stopneme na zaklade record.runner_id + #this code + id_to_stop = record.runner_id + + #pokud existuje manualne spustena stejna strategie a neni jich vic - je to jednoznacne - stopneme ji + elif cs.is_stratin_running(record.strat_id) and record.strat_id not in repeated_strat_ids: + #stopneme na zaklade record.strat_id + id_to_stop = record.strat_id + + else: + msg = f"strategy {record.strat_id} not RUNNING or not distinctive (manually launched or two strat_ids in scheduler)" + print(msg) + return -1, record, msg + + print(f"Requesting STOP {id_to_stop}") + res, msg = cs.stop_runner(id=id_to_stop) + if res < 0: + msg = f"ERROR while STOPPING runner_id/strat_id {id_to_stop} {msg}" + return -2, record, msg + else: + record.runner_id = None + + return 0, record, "finished" + +# Global scheduler instance +scheduler = BackgroundScheduler(timezone=zoneNY) +scheduler.start() + + +if __name__ == "__main__": + #use naive datetoime + debug_date = None + debug_date = datetime(2024, 2, 16, 9, 37, 0, 0) + #debug_date = datetime(2024, 2, 16, 10, 30, 0, 0) + #debug_date = datetime(2024, 2, 16, 16, 1, 0, 0) + + id = UUID("bc4ec7d2-249b-4799-a02f-f1ce66f83d4a") + + if debug_date is not None: + # Localize the naive datetime object to the Eastern timezone + debug_date = zoneNY.localize(debug_date) + #debugdate formatted as string in format "23.12.2024 9:30" + formatted_date = debug_date.strftime("%d.%m.%Y %H:%M") + print("Scheduler.py NY time: ", formatted_date) + print("ISoformat", debug_date.isoformat()) + + # res, result = start_runman_record(id=id, market = "US", debug_date = debug_date) + # print(f"CALL FINISHED, with {debug_date} RESULT: {res}, {result}") + + + res, result = stop_runman_record(id=id, market = "US", debug_date = debug_date) + print(f"CALL FINISHED, with {debug_date} RESULT: {res}, {result}") \ No newline at end of file diff --git a/v2realbot/scheduler/scheduler.py b/v2realbot/scheduler/scheduler.py new file mode 100644 index 0000000..bfadc34 --- /dev/null +++ b/v2realbot/scheduler/scheduler.py @@ -0,0 +1,427 @@ +import json +import datetime +import v2realbot.controller.services as cs +import v2realbot.controller.run_manager as rm +from v2realbot.common.model import RunnerView, RunManagerRecord, StrategyInstance, Runner, RunRequest, Trade, RunArchive, RunArchiveView, RunArchiveViewPagination, RunArchiveDetail, Bar, RunArchiveChange, TestList, ConfigItem, InstantIndicator, DataTablesRequest, AnalyzerInputs +from uuid import uuid4, UUID +from v2realbot.utils.utils import json_serial, send_to_telegram, zoneNY, zonePRG, fetch_calendar_data +from datetime import datetime, timedelta +from traceback import format_exc +from rich import print +import requests +from v2realbot.config import WEB_API_KEY + +#Puvodni varainta schedulera, ktera mela bezet v pravidelnych intervalech +#a spoustet scheduled items v RunManagerRecord +#Nově bylo zrefaktorovĆ”no a využitý apscheduler - knihovna v pythonu +#umožňujĆ­cĆ­ plĆ”novĆ”nĆ­ jobÅÆ, tzn. nynĆ­ je každý scheduled zĆ”znam RunManagerRecord +#naplanovany jako samostatni job a triggerovĆ”n pouze jednou v daný čas pro start a stop +#novy kod v aps_scheduler.py + +def get_todays_market_times(market = "US", debug_date = None): + try: + if market == "US": + #zjistit vsechny podminky - mozna loopovat - podminky jsou vlevo + if debug_date is not None: + nowNY = debug_date + else: + nowNY = datetime.now().astimezone(zoneNY) + nowNY_date = nowNY.date() + #is market open - nyni pouze US + cal_dates = fetch_calendar_data(nowNY_date, nowNY_date) + + if len(cal_dates) == 0: + print("No Market Day today") + return -1, "Market Closed" + #zatim podpora pouze main session + + #pouze main session + market_open_datetime = zoneNY.localize(cal_dates[0].open) + market_close_datetime = zoneNY.localize(cal_dates[0].close) + return 0, (nowNY, market_open_datetime, market_close_datetime) + else: + return -1, "Market not supported" + except Exception as e: + err_msg = f"General error in {e} {format_exc()}" + print(err_msg) + return -2, err_msg + +def get_running_strategies(): + # Construct the URL for the local REST API endpoint on port 8000 + api_url = "http://localhost:8000/runners/" + + # Headers for the request + headers = { + "X-API-Key": WEB_API_KEY + } + + try: + # Make the GET request to the API with the headers + response = requests.get(api_url, headers=headers) + + # Check if the request was successful + if response.status_code == 200: + runners = response.json() + print("Successfully fetched runners.") + strat_ids = [] + ids = [] + + for runner_view in runners: + strat_ids.append(UUID(runner_view["strat_id"])) + ids.append(UUID(runner_view["id"])) + + return 0, (strat_ids, ids) + else: + err_msg = f"Failed to fetch runners. Status Code: {response.status_code}, Response: {response.text}" + print(err_msg) + return -2, err_msg + except requests.RequestException as e: + err_msg = f"Request failed: {str(e)}" + print(err_msg) + return -2, err_msg + +def stop_strategy(runner_id): + # Construct the URL for the local REST API endpoint on port 8000 #option 127.0.0.1 + api_url = f"http://localhost:8000/runners/{runner_id}/stop" + + # Headers for the request + headers = { + "X-API-Key": WEB_API_KEY + } + + try: + # Make the PUT request to the API with the headers + response = requests.put(api_url, headers=headers) + + # Check if the request was successful + if response.status_code == 200: + print(f"Runner/strat_id {runner_id} stopped successfully.") + return 0, runner_id + else: + err_msg = f"Failed to stop runner {runner_id}. Status Code: {response.status_code}, Response: {response.text}" + print(err_msg) + return -2, err_msg + except requests.RequestException as e: + err_msg = f"Request failed: {str(e)}" + print(err_msg) + return -2, err_msg + +def fetch_stratin(stratin_id): + # Construct the URL for the REST API endpoint + api_url = f"http://localhost:8000/stratins/{stratin_id}" + + # Headers for the request + headers = { + "X-API-Key": WEB_API_KEY + } + + try: + # Make the GET request to the API with the headers + response = requests.get(api_url, headers=headers) + + # Check if the request was successful + if response.status_code == 200: + # Parse the response as a StrategyInstance object + strategy_instance = response.json() + #strategy_instance = response # Assuming the response is in JSON format + print(f"StrategyInstance fetched: {stratin_id}") + return 0, strategy_instance + else: + err_msg = f"Failed to fetch StrategyInstance {stratin_id}. " \ + f"Status Code: {response.status_code}, Response: {response.text}" + print(err_msg) + return -1, err_msg + except requests.RequestException as e: + err_msg = f"Request failed: {str(e)}" + print(err_msg) + return -2, err_msg + +#return list of strat_ids that are in the scheduled table more than once +#TODO toto je workaround dokud nebude canndidates logika ze selectu nyni presunuta na fetch_all_run_manager_records a logiku v pythonu +def stratin_occurences(): +#get all records + res, all_records = rm.fetch_all_run_manager_records() + if res < 0: + err_msg= f"Error {res} fetching all runmanager records, error {all_records}" + print(err_msg) + return -2, err_msg + + # Count occurrences + strat_id_counts = {} + for record in all_records: + if record.strat_id in strat_id_counts: + strat_id_counts[record.strat_id] += 1 + else: + strat_id_counts[record.strat_id] = 1 + + # Find strat_id values that appear twice or more + repeated_strat_ids = [strat_id for strat_id, count in strat_id_counts.items() if count >= 2] + + return 0, repeated_strat_ids + +# in case debug_date is not provided, it takes current time of the given market + #V budoucnu zde bude loopa pro kazdy obsluhovany market, nyni pouze US +def startstop_scheduled(debug_date = None, market = "US") -> tuple[int, str]: + res, sada = get_todays_market_times(market=market, debug_date=debug_date) + if res == 0: + market_time_now, market_open_datetime, market_close_datetime = sada + print(f"OPEN:{market_open_datetime} CLOSE:{market_close_datetime}") + else: + return res, sada + + #its market day + res, candidates = rm.fetch_scheduled_candidates_for_start_and_stop(market_time_now, market) + if res == 0: + print(f"Candidates fetched, start: {len(candidates['start'])} stop: {len(candidates['stop'])}") + else: + return res, candidates + + if candidates is None or (len(candidates["start"]) == 0 and len(candidates["stop"]) == 0): + return -1, f"No candidates found for {market_time_now} and {market}" + #do budoucna, az budou runnery persistovane, bude stav kazde strategie v RunManagerRecord + #get current runners (mozna optimalizace, fetch per each section start/stop) + res, sada = get_running_strategies() + if res < 0: + err_msg= f"Error fetching running strategies, error {sada}" + print(err_msg) + send_to_telegram(err_msg) + return -2, err_msg + strat_ids_running, runnerids_running = sada + print(f"Currently running: {len(strat_ids_running)}") + + #IERATE over START CAndidates + record: RunManagerRecord = None + print(f"START - Looping over {len(candidates['start'])} candidates") + for record in candidates['start']: + print("Candidate: ", record) + + if record.weekdays_filter is not None and len(record.weekdays_filter) > 0: + curr_weekday = market_time_now.weekday() + if curr_weekday not in record.weekdays_filter: + print(f"Strategy {record.strat_id} not started, today{curr_weekday} not in weekdays filter {record.weekdays_filter}") + continue + #one strat_id can run only once at time + if record.strat_id in strat_ids_running: + msg = f"strategy already {record.strat_id} is running" + continue + + res, result = run_scheduled_strategy(record) + if res < 0: + send_to_telegram(result) + print(result) + else: + record.runner_id = UUID(result) + strat_ids_running.append(record.strat_id) + runnerids_running.append(record.runner_id) + + record.last_processed = market_time_now + history_string = f"{market_time_now.isoformat()} strategy STARTED" if res == 0 else "ERROR:" + result + + if record.history is None: + record.history = history_string + else: + record.history += "\n" + history_string + + #update record (nejspis jeste upravit - last_run a history) + res, set = rm.update_run_manager_record(record.id, record) + if res == 0: + print(f"Record in db updated {set}") + #return 0, set + else: + err_msg= f"Error updating {record.id} errir {set} with values {record}. Process stopped." + print(err_msg) + send_to_telegram(msg) + return -2, err_msg #toto stopne dalsi zpracovani, zvazit continue + + #if stop candidates, then fetch existing runners + stop_candidates_cnt = len(candidates['stop']) + + if stop_candidates_cnt > 0: + res, repeated_strat_ids = stratin_occurences() + if res < 0: + err_msg= f"Error {res} in callin stratin_occurences, error {repeated_strat_ids}" + send_to_telegram(err_msg) + return -2, err_msg + + #dalsi OPEN ISSUE pri STOPu: + # mĆ” STOP_TIME strategie zĆ”viset na dni v týdnu? jinými slovy pokud je strategie + # nastavenĆ” na 9:30-10 v pondělĆ­. Mohu si ji manuĆ”lně spustit v Ćŗterý a systĆ©m ji neshodĆ­? + # ZatĆ­m to je postaveno, že předpis určuje okno, kde mĆ” strategie běžet a mimo tuto dobu bude + # automaticky shozena. Druhou možnostĆ­ je potom, že scheduler si striktně hlĆ­dĆ” jen strategie, + # kterĆ© byly jĆ­m zapnutĆ© a ostatnĆ­ jsou mu putna. V tomto případě pak např. později ručně spuÅ”těmĆ” + # strategie (např. kvÅÆli opravě bugu) bude scheduler ignorovat a nevypne ji i kdyz je nastavena na vypnuti. + # Dopady: weekdays pri stopu a stratin_occurences + + #IERATE over STOP Candidates + record: RunManagerRecord = None + print(f"STOP - Looping over {stop_candidates_cnt} candidates") + for record in candidates['stop']: + print("Candidate: ", record) + + #Tento Å”elmostroj se stratin_occurences tu je jen proto, aby scheduler zafungoval i na manualne spustene strategie (ve vetsine pripadu) + # Při stopu evaluace kandidĆ”tÅÆ na vypnutĆ­ + # - pokud mĆ”m v schedules jen 1 strategii s konkretnim strat_id, můžu jet přes strat_id - bezici strategie s timto strat_id bude vypnuta (i manualne startnuta) + # - pokud jich mĆ”m vĆ­ce, musĆ­m jet přes runnery uloženĆ© v schedules + # (v tomto případě je omezenĆ­: ručně pouÅ”těna strategii nebude automaticky + # stopnuta - systĆ©m nevĆ­, kterĆ” to je) + + #zjistime zda strategie bezi + + #strategii mame v scheduleru pouze jednou, muzeme pouzit strat_id + if record.strat_id not in repeated_strat_ids: + if record.strat_id not in strat_ids_running: + msg = f"strategy {record.strat_id} NOT RUNNING" + print(msg) + continue + else: + #do stop + id_to_stop = record.strat_id + #strat_id je pouzito v scheduleru vicekrat, musime pouzit runner_id + elif record.runner_id is not None and record.runner_id in runnerids_running: + #do stop + id_to_stop = record.runner_id + #no distinctive condition + else: + #dont do anything + print(f"strategy {record.strat_id} not RUNNING or not distinctive (manually launched or two strat_ids in scheduler)") + continue + + print(f"Requesting STOP {id_to_stop}") + res, msg = stop_strategy(id_to_stop) + if res < 0: + msg = f"ERROR while STOPPING runner_id/strat_id {id_to_stop} {msg}" + send_to_telegram(msg) + else: + if record.strat_id in strat_ids_running: + strat_ids_running.remove(record.strat_id) + if record.runner_id is not None and record.runner_id in runnerids_running: + runnerids_running.remove(record.runner_id) + record.runner_id = None + + record.last_processed = market_time_now + history_string = f"{market_time_now.isoformat()} strategy {record.strat_id}" + "STOPPED" if res == 0 else "ERROR:" + msg + if record.history is None: + record.history = history_string + else: + record.history += "\n" + history_string + + #update record (nejspis jeste upravit - last_run a history) + res, set = rm.update_run_manager_record(record.id, record) + if res == 0: + print(f"Record updated {set}") + else: + err_msg= f"Error updating {record.id} errir {set} with values {record}" + print(err_msg) + send_to_telegram(err_msg) + return -2, err_msg#toto stopne zpracovani dalsich zaznamu pri chybe, zvazit continue + + return 0, "DONE" + +##LIVE or PAPER +#tato verze využívate REST API, po predelani jobu na apscheduler uz muze vyuzivat prime volani cs.run_stratin +#TODO predelat +def run_scheduled_strategy(record: RunManagerRecord): + #get strat_json + sada : StrategyInstance = None + res, sada = fetch_stratin(record.strat_id) + if res == 0: + # #TODO toto overit jestli je stejny vystup jako JS + # print("Sada", sada) + # #strategy_instance = StrategyInstance(**sada) + strat_json = json.dumps(sada, default=json_serial) + # Replace escaped characters with their unescaped versions so it matches the JS output + #strat_json = strat_json.replace('\\r\\n', '\r\n') + #print(f"Strat_json fetched, {strat_json}") + else: + err_msg= f"Strategy {record.strat_id} not found. ERROR {sada}" + print(err_msg) + return -2, err_msg + + #TBD mozna customizovat NOTE + + #pokud neni batch_id pak vyhgeneruju a ulozim do db + # if record.batch_id is None: + # record.batch_id = str(uuid4())[:8] + + api_url = f"http://localhost:8000/stratins/{record.strat_id}/run" + + # Initialize RunRequest with record values + runReq = { + "id": str(record.strat_id), + "strat_json": strat_json, + "mode": record.mode, + "account": record.account, + "ilog_save": record.ilog_save, + "weekdays_filter": record.weekdays_filter, + "test_batch_id": record.testlist_id, + "batch_id": record.batch_id or str(uuid4())[:8], + "bt_from": record.bt_from.isoformat() if record.bt_from else None, + "bt_to": record.bt_to.isoformat() if record.bt_to else None, + "note": f"SCHED {record.start_time}-" + record.stop_time if record.stop_time else "" + record.note if record.note is not None else "" + } + + # Headers for the request + headers = { + "X-API-Key": WEB_API_KEY + } + + try: + # Make the PUT request to the API with the headers + response = requests.put(api_url, json=runReq, headers=headers) + + # Check if the request was successful + if response.status_code == 200: + print(f"Strategy {record.strat_id} started successfully.") + return 0, response.json() + else: + err_msg = f"Strategy {record.strat_id} NOT started. Status Code: {response.status_code}, Response: {response.text}" + print(err_msg) + return -2, err_msg + except requests.RequestException as e: + err_msg = f"Request failed: {str(e)}" + print(err_msg) + return -2, err_msg + + # #intiializae RunRequest with record values + # runReq = RunRequest(id=record.strat_id, + # strat_json=strat_json, + # mode=record.mode, + # account=record.account, + # ilog_save=record.ilog_save, + # weekdays_filter=record.weekdays_filter, + # test_batch_id=record.testlist_id, + # batch_id=record.batch_id, + # bt_from=record.bt_from, + # bt_to=record.bt_to, + # note=record.note) + # #call rest API to start strategy + + + # #start strategy + # res, sada = cs.run_stratin(id=record.strat_id, runReq=runReq, inter_batch_params=None) + # if res == 0: + # print(f"Strategy {sada} started") + # return 0, sada + # else: + # err_msg= f"Strategy {record.strat_id} NOT started. ERROR {sada}" + # print(err_msg) + # return -2, err_msg + + +if __name__ == "__main__": + #use naive datetoime + debug_date = None + debug_date = datetime(2024, 2, 16, 16, 37, 0, 0) + #debug_date = datetime(2024, 2, 16, 10, 30, 0, 0) + #debug_date = datetime(2024, 2, 16, 16, 1, 0, 0) + + if debug_date is not None: + # Localize the naive datetime object to the Eastern timezone + debug_date = zoneNY.localize(debug_date) + #debugdate formatted as string in format "23.12.2024 9:30" + formatted_date = debug_date.strftime("%d.%m.%Y %H:%M") + print("Scheduler.py NY time: ", formatted_date) + print("ISoformat", debug_date.isoformat()) + + res, msg = startstop_scheduled(debug_date=debug_date, market="US") + print(f"CALL FINISHED, with {debug_date} RESULT: {res}, {msg}") \ No newline at end of file diff --git a/v2realbot/static/index.html b/v2realbot/static/index.html index 7f462f8..f8a1431 100644 --- a/v2realbot/static/index.html +++ b/v2realbot/static/index.html @@ -298,6 +298,251 @@ + + +
+ +
+
+ + + + + +
+ + + + + + + +
+
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + +
IdTypeStrat_IdSymbolAccountModeNoteLogBT_fromBT_todaysbatch_idstartstopstatuslast_processedhistoryvalid_fromvalid_totestlist_idRunningRunnerId
+
+ + +