64 KiB
64 KiB
CANDLEGAPS¶
- gaps on second based bars indicates short-term up/down move
TODO:
- dodělat shorty
- přidat kombinace angle nebo nějaké podobné krátkodobé momentum jako doplňkový indikátor
- vyzkouset ruzne timeframe (sec a min) + hodnotu gapu a dalsi podminky
In [19]:
from dotenv import load_dotenv #as V2realbot is client , load env variables here env_file = "/Users/davidbrazda/Documents/Development/python/.env" # Load the .env file load_dotenv(env_file) from v2realbot.utils.utils import zoneNY import pandas as pd import numpy as np import vectorbtpro as vbt # from itables import init_notebook_mode, show import datetime from itertools import product from v2realbot.config import DATA_DIR from lightweight_charts import JupyterChart, chart, Panel, PlotAccessor from IPython.display import display # init_notebook_mode(all_interactive=True) vbt.settings.set_theme("dark") vbt.settings['plotting']['layout']['width'] = 1280 vbt.settings.plotting.auto_rangebreaks = True # Set the option to display with pagination pd.set_option('display.notebook_repr_html', True) pd.set_option('display.max_rows', 10) # Number of rows per page
In [20]:
# Define the market open and close times market_open = datetime.time(9, 30) market_close = datetime.time(16, 0) entry_window_opens = 1 entry_window_closes = 370 forced_exit_start = 380 forced_exit_end = 390 #LOAD FROM PARQUET #list all files is dir directory with parquet extension dir = DATA_DIR + "/notebooks/" import os files = [f for f in os.listdir(dir) if f.endswith(".parquet")] print('\n'.join(map(str, files))) file_name = "ohlcv_df-BAC-2023-01-01T09_30_00-2024-05-25T15_30_00-47BCFOPUVWZ-100.parquet" ohlcv_df = pd.read_parquet(dir+file_name,engine='pyarrow') #filter ohlcv_df to certain date range (assuming datetime index) ohlcv_df = ohlcv_df.loc["2024-02-12 9:30":"2024-02-19 16:00"] #add vwap column to ohlcv_df #ohlcv_df["hlcc4"] = (ohlcv_df["close"] + ohlcv_df["high"] + ohlcv_df["low"] + ohlcv_df["close"]) / 4 basic_data = vbt.Data.from_data(vbt.symbol_dict({"BAC": ohlcv_df}), tz_convert=zoneNY) ohlcv_df= None basic_data.wrapper.index.normalize().nunique()
trades_df-BAC-2024-01-01T09_30_00-2024-05-14T16_00_00-CO4B7VPWUZF-100.parquet trades_df-BAC-2024-01-11T09:30:00-2024-01-12T16:00:00.parquet trades_df-SPY-2024-01-01T09:30:00-2024-05-14T16:00:00.parquet trades_df-BAC-2023-01-01T09_30_00-2024-05-25T16_00_00-47BCFOPUVWZ-100.parquet ohlcv_df-BAC-2024-01-11T09:30:00-2024-01-12T16:00:00.parquet trades_df-BAC-2024-05-15T09_30_00-2024-05-25T16_00_00-47BCFOPUVWZ-100.parquet ohlcv_df-BAC-2024-01-01T09_30_00-2024-05-25T16_00_00-47BCFOPUVWZ-100.parquet ohlcv_df-SPY-2024-01-01T09:30:00-2024-05-14T16:00:00.parquet ohlcv_df-BAC-2024-01-01T09_30_00-2024-05-14T16_00_00-CO4B7VPWUZF-100.parquet ohlcv_df-BAC-2023-01-01T09_30_00-2024-05-25T16_00_00-47BCFOPUVWZ-100.parquet ohlcv_df-BAC-2023-01-01T09_30_00-2024-05-25T15_30_00-47BCFOPUVWZ-100.parquet
Out[20]:
5
In [21]:
basic_data.wrapper.index.normalize().nunique()
Out[21]:
5
In [22]:
basic_data.data["BAC"].info()
<class 'pandas.core.frame.DataFrame'> DatetimeIndex: 57966 entries, 2024-02-12 09:30:00-05:00 to 2024-02-16 15:59:59-05:00 Data columns (total 10 columns): # Column Non-Null Count Dtype --- ------ -------------- ----- 0 open 57966 non-null float64 1 high 57966 non-null float64 2 low 57966 non-null float64 3 close 57966 non-null float64 4 volume 57966 non-null float64 5 trades 57966 non-null float64 6 updated 57966 non-null datetime64[us, US/Eastern] 7 vwap 57966 non-null float64 8 buyvolume 57966 non-null float64 9 sellvolume 57966 non-null float64 dtypes: datetime64[us, US/Eastern](1), float64(9) memory usage: 4.9 MB
Add resample function to custom columns
In [17]:
from vectorbtpro.utils.config import merge_dicts, Config, HybridConfig from vectorbtpro import _typing as tp from vectorbtpro.generic import nb as generic_nb _feature_config: tp.ClassVar[Config] = HybridConfig( { "buyvolume": dict( resample_func=lambda self, obj, resampler: obj.vbt.resample_apply( resampler, generic_nb.sum_reduce_nb, ) ), "sellvolume": dict( resample_func=lambda self, obj, resampler: obj.vbt.resample_apply( resampler, generic_nb.sum_reduce_nb, ) ), "trades": dict( resample_func=lambda self, obj, resampler: obj.vbt.resample_apply( resampler, generic_nb.sum_reduce_nb, ) ) } ) basic_data._feature_config = _feature_config
In [18]:
s1data = basic_data[['open', 'high', 'low', 'close', 'volume','vwap','buyvolume','trades','sellvolume']] s2data = s1data.resample("2s") s2data = s2data.transform(lambda df: df.between_time('09:30', '16:00').dropna()) s5data = s1data.resample("5s") s5data = s5data.transform(lambda df: df.between_time('09:30', '16:00').dropna()) t1data = basic_data[['open', 'high', 'low', 'close', 'volume','vwap','buyvolume','trades','sellvolume']].resample("1T") t1data = t1data.transform(lambda df: df.between_time('09:30', '16:00').dropna()) # t1data.data["BAC"].info() t30data = basic_data[['open', 'high', 'low', 'close', 'volume','vwap','buyvolume','trades','sellvolume']].resample("30T") t30data = t30data.transform(lambda df: df.between_time('09:30', '16:00').dropna()) # t30data.data["BAC"].info() s2close = s2data.close s1close = s1data.close t1close = t1data.close t30close = t30data.close t30volume = t30data.volume #resample on specific index resampler = vbt.Resampler(t30data.index, s1data.index, source_freq="30T", target_freq="1s") t30close_realigned = t30close.vbt.realign_closing(resampler) #resample 1min to s resampler_s = vbt.Resampler(t1data.index, s1data.index, source_freq="1T", target_freq="1s") t1close_realigned = t1close.vbt.realign_closing(resampler_s) resampler_s = vbt.Resampler(s2data.index, s1data.index, source_freq="2s", target_freq="1s") s2close_realigned = s2close.vbt.realign_closing(resampler_s)
--------------------------------------------------------------------------- TypingError Traceback (most recent call last) Cell In[18], line 25 23 #resample on specific index 24 resampler = vbt.Resampler(t30data.index, s1data.index, source_freq="30T", target_freq="1s") ---> 25 t30close_realigned = t30close.vbt.realign_closing(resampler) 27 #resample 1min to s 28 resampler_s = vbt.Resampler(t1data.index, s1data.index, source_freq="1T", target_freq="1s") File ~/Documents/Development/python/strategy-lab1/.venv/lib/python3.10/site-packages/vectorbtpro/generic/accessors.py:2471, in GenericAccessor.realign_closing(self, *args, **kwargs) 2465 def realign_closing(self, *args, **kwargs) -> tp.MaybeSeriesFrame: 2466 """`GenericAccessor.realign` but creating a resampler and using the right bound 2467 of the source and target index. 2468 2469 !!! note 2470 The timestamps in the source and target index should denote the open time.""" -> 2471 return self.realign(*args, source_rbound=True, target_rbound=True, **kwargs) File ~/Documents/Development/python/strategy-lab1/.venv/lib/python3.10/site-packages/vectorbtpro/generic/accessors.py:2443, in GenericAccessor.realign(self, index, freq, nan_value, ffill, source_rbound, target_rbound, jitted, chunked, wrap_kwargs, silence_warnings) 2441 func = jit_reg.resolve_option(nb.realign_nb, jitted) 2442 func = ch_reg.resolve_option(func, chunked) -> 2443 out = func( 2444 self.to_2d_array(), 2445 resampler.source_index.values, 2446 resampler.target_index.values, 2447 source_freq=source_freq, 2448 target_freq=target_freq, 2449 source_rbound=use_source_rbound, 2450 target_rbound=use_target_rbound, 2451 nan_value=nan_value, 2452 ffill=ffill, 2453 ) 2454 wrap_kwargs = merge_dicts(dict(index=index), wrap_kwargs) 2455 out = self.wrapper.wrap(out, group_by=False, **wrap_kwargs) File ~/Documents/Development/python/strategy-lab1/.venv/lib/python3.10/site-packages/numba/core/dispatcher.py:423, in _DispatcherBase._compile_for_args(self, *args, **kws) 419 msg = (f"{str(e).rstrip()} \n\nThis error may have been caused " 420 f"by the following argument(s):\n{args_str}\n") 421 e.patch_message(msg) --> 423 error_rewrite(e, 'typing') 424 except errors.UnsupportedError as e: 425 # Something unsupported is present in the user code, add help info 426 error_rewrite(e, 'unsupported_error') File ~/Documents/Development/python/strategy-lab1/.venv/lib/python3.10/site-packages/numba/core/dispatcher.py:364, in _DispatcherBase._compile_for_args.<locals>.error_rewrite(e, issue_type) 362 raise e 363 else: --> 364 raise e.with_traceback(None) TypingError: Failed in nopython mode pipeline (step: nopython frontend) No implementation of function Function(<function _realign_nb at 0x11ccb0940>) found for signature: >>> _realign_nb(array(float64, 2d, C), array(datetime64[us], 1d, C), array(datetime64[us], 1d, C), timedelta64[ns], timedelta64[ns], bool, bool, float64, bool) There are 2 candidate implementations: - Of which 1 did not match due to: Overload in function '_realign_nb': File: vectorbtpro/generic/nb/base.py: Line 1673. With argument(s): '(array(float64, 2d, C), array(datetime64[us], 1d, C), array(datetime64[us], 1d, C), timedelta64[ns], timedelta64[ns], bool, bool, float64, bool)': Rejected as the implementation raised a specific error: TypingError: Failed in nopython mode pipeline (step: nopython frontend) Failed in nopython mode pipeline (step: nopython frontend) No implementation of function Function(<function _realign_1d_nb at 0x11cc595a0>) found for signature: >>> _realign_1d_nb(array(float64, 1d, A), array(datetime64[us], 1d, C), array(datetime64[us], 1d, C), timedelta64[ns], timedelta64[ns], bool, bool, float64, bool) There are 2 candidate implementations: - Of which 2 did not match due to: Overload in function '_realign_1d_nb': File: vectorbtpro/generic/nb/base.py: Line 1509. With argument(s): '(array(float64, 1d, A), array(datetime64[us], 1d, C), array(datetime64[us], 1d, C), timedelta64[ns], timedelta64[ns], bool, bool, float64, bool)': Rejected as the implementation raised a specific error: TypingError: Failed in nopython mode pipeline (step: nopython frontend) Cannot unify datetime64[ns] and datetime64[us] for 'source_val.4', defined at /Users/davidbrazda/Documents/Development/python/strategy-lab1/.venv/lib/python3.10/site-packages/vectorbtpro/generic/nb/base.py (1577) File "../../.venv/lib/python3.10/site-packages/vectorbtpro/generic/nb/base.py", line 1577: def impl( <source elided> target_val = target_index[i] + target_freq if source_val > target_val: ^ During: typing of assignment at /Users/davidbrazda/Documents/Development/python/strategy-lab1/.venv/lib/python3.10/site-packages/vectorbtpro/generic/nb/base.py (1577) File "../../.venv/lib/python3.10/site-packages/vectorbtpro/generic/nb/base.py", line 1577: def impl( <source elided> target_val = target_index[i] + target_freq if source_val > target_val: ^ raised from /Users/davidbrazda/Documents/Development/python/strategy-lab1/.venv/lib/python3.10/site-packages/numba/core/typeinfer.py:1091 During: resolving callee type: Function(<function _realign_1d_nb at 0x11cc595a0>) During: typing of call at /Users/davidbrazda/Documents/Development/python/strategy-lab1/.venv/lib/python3.10/site-packages/vectorbtpro/generic/nb/base.py (1660) File "../../.venv/lib/python3.10/site-packages/vectorbtpro/generic/nb/base.py", line 1660: def realign_1d_nb( <source elided> If `arr` contains bar data, both indexes must represent the opening time.""" return _realign_1d_nb( ^ During: resolving callee type: type(CPUDispatcher(<function realign_1d_nb at 0x11ccb0af0>)) During: typing of call at /Users/davidbrazda/Documents/Development/python/strategy-lab1/.venv/lib/python3.10/site-packages/vectorbtpro/generic/nb/base.py (1706) During: resolving callee type: type(CPUDispatcher(<function realign_1d_nb at 0x11ccb0af0>)) During: typing of call at /Users/davidbrazda/Documents/Development/python/strategy-lab1/.venv/lib/python3.10/site-packages/vectorbtpro/generic/nb/base.py (1706) During: resolving callee type: type(CPUDispatcher(<function realign_1d_nb at 0x11ccb0af0>)) During: typing of call at /Users/davidbrazda/Documents/Development/python/strategy-lab1/.venv/lib/python3.10/site-packages/vectorbtpro/generic/nb/base.py (1706) During: resolving callee type: type(CPUDispatcher(<function realign_1d_nb at 0x11ccb0af0>)) During: typing of call at /Users/davidbrazda/Documents/Development/python/strategy-lab1/.venv/lib/python3.10/site-packages/vectorbtpro/generic/nb/base.py (1706) During: resolving callee type: type(CPUDispatcher(<function realign_1d_nb at 0x11ccb0af0>)) During: typing of call at /Users/davidbrazda/Documents/Development/python/strategy-lab1/.venv/lib/python3.10/site-packages/vectorbtpro/generic/nb/base.py (1706) During: resolving callee type: type(CPUDispatcher(<function realign_1d_nb at 0x11ccb0af0>)) During: typing of call at /Users/davidbrazda/Documents/Development/python/strategy-lab1/.venv/lib/python3.10/site-packages/vectorbtpro/generic/nb/base.py (1706) File "../../.venv/lib/python3.10/site-packages/vectorbtpro/generic/nb/base.py", line 1706: def impl( <source elided> for col in prange(arr.shape[1]): out[:, col] = realign_1d_nb( ^ raised from /Users/davidbrazda/Documents/Development/python/strategy-lab1/.venv/lib/python3.10/site-packages/numba/core/typeinfer.py:1091 - Of which 1 did not match due to: Overload in function '_realign_nb': File: vectorbtpro/generic/nb/base.py: Line 1673. With argument(s): '(array(float64, 2d, C), array(datetime64[us], 1d, C), array(datetime64[us], 1d, C), timedelta64[ns], timedelta64[ns], bool, bool, float64, bool)': Rejected as the implementation raised a specific error: TypingError: Failed in nopython mode pipeline (step: nopython frontend) Failed in nopython mode pipeline (step: nopython frontend) No implementation of function Function(<function _realign_1d_nb at 0x11cc595a0>) found for signature: >>> _realign_1d_nb(array(float64, 1d, A), array(datetime64[us], 1d, C), array(datetime64[us], 1d, C), timedelta64[ns], timedelta64[ns], bool, bool, float64, bool) There are 2 candidate implementations: - Of which 2 did not match due to: Overload in function '_realign_1d_nb': File: vectorbtpro/generic/nb/base.py: Line 1509. With argument(s): '(array(float64, 1d, A), array(datetime64[us], 1d, C), array(datetime64[us], 1d, C), timedelta64[ns], timedelta64[ns], bool, bool, float64, bool)': Rejected as the implementation raised a specific error: TypingError: Failed in nopython mode pipeline (step: nopython frontend) Cannot unify datetime64[ns] and datetime64[us] for 'source_val.4', defined at /Users/davidbrazda/Documents/Development/python/strategy-lab1/.venv/lib/python3.10/site-packages/vectorbtpro/generic/nb/base.py (1577) File "../../.venv/lib/python3.10/site-packages/vectorbtpro/generic/nb/base.py", line 1577: def impl( <source elided> target_val = target_index[i] + target_freq if source_val > target_val: ^ During: typing of assignment at /Users/davidbrazda/Documents/Development/python/strategy-lab1/.venv/lib/python3.10/site-packages/vectorbtpro/generic/nb/base.py (1577) File "../../.venv/lib/python3.10/site-packages/vectorbtpro/generic/nb/base.py", line 1577: def impl( <source elided> target_val = target_index[i] + target_freq if source_val > target_val: ^ raised from /Users/davidbrazda/Documents/Development/python/strategy-lab1/.venv/lib/python3.10/site-packages/numba/core/typeinfer.py:1091 During: resolving callee type: Function(<function _realign_1d_nb at 0x11cc595a0>) During: typing of call at /Users/davidbrazda/Documents/Development/python/strategy-lab1/.venv/lib/python3.10/site-packages/vectorbtpro/generic/nb/base.py (1660) File "../../.venv/lib/python3.10/site-packages/vectorbtpro/generic/nb/base.py", line 1660: def realign_1d_nb( <source elided> If `arr` contains bar data, both indexes must represent the opening time.""" return _realign_1d_nb( ^ During: resolving callee type: type(CPUDispatcher(<function realign_1d_nb at 0x11ccb0af0>)) During: typing of call at /Users/davidbrazda/Documents/Development/python/strategy-lab1/.venv/lib/python3.10/site-packages/vectorbtpro/generic/nb/base.py (1706) During: resolving callee type: type(CPUDispatcher(<function realign_1d_nb at 0x11ccb0af0>)) During: typing of call at /Users/davidbrazda/Documents/Development/python/strategy-lab1/.venv/lib/python3.10/site-packages/vectorbtpro/generic/nb/base.py (1706) During: resolving callee type: type(CPUDispatcher(<function realign_1d_nb at 0x11ccb0af0>)) During: typing of call at /Users/davidbrazda/Documents/Development/python/strategy-lab1/.venv/lib/python3.10/site-packages/vectorbtpro/generic/nb/base.py (1706) During: resolving callee type: type(CPUDispatcher(<function realign_1d_nb at 0x11ccb0af0>)) During: typing of call at /Users/davidbrazda/Documents/Development/python/strategy-lab1/.venv/lib/python3.10/site-packages/vectorbtpro/generic/nb/base.py (1706) During: resolving callee type: type(CPUDispatcher(<function realign_1d_nb at 0x11ccb0af0>)) During: typing of call at /Users/davidbrazda/Documents/Development/python/strategy-lab1/.venv/lib/python3.10/site-packages/vectorbtpro/generic/nb/base.py (1706) During: resolving callee type: type(CPUDispatcher(<function realign_1d_nb at 0x11ccb0af0>)) During: typing of call at /Users/davidbrazda/Documents/Development/python/strategy-lab1/.venv/lib/python3.10/site-packages/vectorbtpro/generic/nb/base.py (1706) During: resolving callee type: type(CPUDispatcher(<function realign_1d_nb at 0x11ccb0af0>)) During: typing of call at /Users/davidbrazda/Documents/Development/python/strategy-lab1/.venv/lib/python3.10/site-packages/vectorbtpro/generic/nb/base.py (1706) During: resolving callee type: type(CPUDispatcher(<function realign_1d_nb at 0x11ccb0af0>)) During: typing of call at /Users/davidbrazda/Documents/Development/python/strategy-lab1/.venv/lib/python3.10/site-packages/vectorbtpro/generic/nb/base.py (1706) File "../../.venv/lib/python3.10/site-packages/vectorbtpro/generic/nb/base.py", line 1706: def impl( <source elided> for col in prange(arr.shape[1]): out[:, col] = realign_1d_nb( ^ raised from /Users/davidbrazda/Documents/Development/python/strategy-lab1/.venv/lib/python3.10/site-packages/numba/core/typeinfer.py:1091 During: resolving callee type: Function(<function _realign_nb at 0x11ccb0940>) During: typing of call at /Users/davidbrazda/Documents/Development/python/strategy-lab1/.venv/lib/python3.10/site-packages/vectorbtpro/generic/nb/base.py (1766) File "../../.venv/lib/python3.10/site-packages/vectorbtpro/generic/nb/base.py", line 1766: def realign_nb( <source elided> """2-dim version of `realign_1d_nb`.""" return _realign_nb( ^
In [ ]:
vbt.IF.list_indicators("*vwap") vbt.phelp(vbt.VWAP.run)
VWAP¶
In [ ]:
t1vwap_h = vbt.VWAP.run(t1data.high, t1data.low, t1data.close, t1data.volume, anchor="H") t1vwap_d = vbt.VWAP.run(t1data.high, t1data.low, t1data.close, t1data.volume, anchor="D") t1vwap_t = vbt.VWAP.run(t1data.high, t1data.low, t1data.close, t1data.volume, anchor="T") t1vwap_h_real = t1vwap_h.vwap.vbt.realign_closing(resampler_s) t1vwap_d_real = t1vwap_d.vwap.vbt.realign_closing(resampler_s) t1vwap_t_real = t1vwap_t.vwap.vbt.realign_closing(resampler_s) #t1vwap_5t.xloc["2024-01-3 09:30:00":"2024-01-03 16:00:00"].plot() div_rel = (s1data.close.vbt - t1vwap_h_real) - 1 div_rel
In [ ]:
#m30data.close.lw.plot() #quick few liner pane1 = Panel( histogram=[ #(s1data.volume, "volume",None, 0.8), #(m30volume, "m30volume",None, 1) ], # [(series, name, "rgba(53, 94, 59, 0.6)", opacity)] right=[ (s1data.close, "1s close"), (s2data.close, "1s close"), (t1data.close, "1min close"), (t1vwap_t, "1mvwap_t"), (t1vwap_h, "1mvwap_h"), (t1vwap_d, "1mvwap_d"), (t1vwap_t_real, "1mvwap_t_real"), (t1vwap_h_real, "1mvwap_h_real"), (t1vwap_d_real, "1mvwap_d_real") # (t1close_realigned, "1min close realigned"), # (m30data.close, "30min-close"), # (m30close_realigned, "30min close realigned"), ], left = [ (div_rel, "reldiv1s_1Hvwap",) ] ) ch = chart([pane1], size="s", xloc=slice("2024-05-1 09:30:00","2024-05-5 16:00:00"))
SUPERTREND¶
In [ ]:
supertrend_s1 = vbt.SUPERTREND.run(s1data.high, s1data.low, s1data.close, period=5, multiplier=3) direction_series_s1 = supertrend_s1.direction supertrend_t1 = vbt.SUPERTREND.run(t1data.high, t1data.low, t1data.close, period=14, multiplier=3) direction_series_t1 = supertrend_t1.direction supertrend_t30 = vbt.SUPERTREND.run(t30data.high, t30data.low, t30data.close, period=14, multiplier=3) direction_series_t30 = supertrend_t30.direction resampler_1t_sec = vbt.Resampler(direction_series_t1.index, direction_series_s1.index, source_freq="1T", target_freq="1s") resampler_30t_sec = vbt.Resampler(direction_series_t30.index, direction_series_s1.index, source_freq="30T", target_freq="1s") direction_series_t1_realigned = direction_series_t1.vbt.realign_closing(resampler_1t_sec) direction_series_t30_realigned = direction_series_t30.vbt.realign_closing(resampler_30t_sec) #supertrend_s1.xloc["2024-01-3 09:30:00":"2024-01-03 16:00:00"].plot()
In [ ]:
# aligned_ups= pd.Series(False, index=direction_real.index) # aligned_downs= pd.Series(False, index=direction_real.index) # aligned_ups = direction_real == 1 & supertrend.direction == 1 # aligned_ups
In [ ]:
s5close = s5data.data["BAC"].close s5open = s5data.data["BAC"].open s5high = s5data.data["BAC"].high s5low = s5data.data["BAC"].low s5close_prev = s5close.shift(1) s5open_prev = s5open.shift(1) s5high_prev = s5high.shift(1) s5low_prev = s5low.shift(1) #gap nahoru od byci svicky a nevraci se zpet na jeji uroven entry_ups = (s5close_prev > s5open_prev) & (s5open > s5high_prev + 0.010) & (s5close > s5close_prev) & (s5close > s5open) print(entry_ups.value_counts()) entry_downs = (s5close_prev < s5open_prev) & (s5open < s5low_prev - 0.012) & (s5close < s5close_prev) print(entry_downs.value_counts()) #entry_ups.info()
Entry window¶
In [ ]:
market_open = datetime.time(9, 30) market_close = datetime.time(16, 0) entry_window_opens = 10 entry_window_closes = 370 forced_exit_start = 380 forced_exit_end = 390
In [ ]:
#entry_ups = pd.Series(False, index=s5data.index) entry_window_open= pd.Series(False, index=entry_ups.index) # Calculate the time difference in minutes from market open for each timestamp elapsed_min_from_open = (entry_ups.index.hour - market_open.hour) * 60 + (entry_ups.index.minute - market_open.minute) entry_window_open[(elapsed_min_from_open >= entry_window_opens) & (elapsed_min_from_open < entry_window_closes)] = True entry_ups = entry_ups & entry_window_open # entry_ups entry_down_window_open= pd.Series(False, index=entry_downs.index) entry_down_window_open[(elapsed_min_from_open >= entry_window_opens) & (elapsed_min_from_open < entry_window_closes)] = True entry_downs = entry_downs & entry_down_window_open forced_exits = pd.Series(False, index=s5data.index) forced_exits[(elapsed_min_from_open >= forced_exit_start) & (elapsed_min_from_open < forced_exit_end)] = True # forced_exits
In [ ]:
entry_ups.value_counts()
In [ ]:
s5vwap_h = vbt.VWAP.run(s5data.high, s5data.low, s5data.close, s5data.volume, anchor="H") s5vwap_d = vbt.VWAP.run(s5data.high, s5data.low, s5data.close, s5data.volume, anchor="D") # s5vwap_h_real = s5vwap_h.vwap.vbt.realign_closing(resampler_s) # s5vwap_d_real = s5vwap_d.vwap.vbt.realign_closing(resampler_s)
In [ ]:
pane1 = Panel( ohlcv=(s5data.data["BAC"],), #(series, entries, exits, other_markers) histogram=[], # [(series, name, "rgba(53, 94, 59, 0.6), opacity")] right=[#(bbands,), #[(series, name, entries, exits, other_markers)] (s5data.data["BAC"].close, "close", entry_ups, entry_downs), (s5data.data["BAC"].open, "open"), (s5vwap_h, "vwap5s_H",), (s5vwap_d, "vwap5s_D",) # (t1data.data["BAC"].vwap, "vwap"), # (t1data.close, "1min close"), # (supertrend_s1.trend,"STtrend"), # (supertrend_s1.long,"STlong"), # (supertrend_s1.short,"STshort") ], left = [ #(direction_series_s1,"direction_s1"), # (direction_series_t1,"direction_t1"), # (direction_series_t30,"direction_t30") ], # right=[(bbands.upperband, "upperband",), # (bbands.lowerband, "lowerband",), # (bbands.middleband, "middleband",) # ], #[(series, name, entries, exits, other_markers)] middle1=[], middle2=[], ) # pane2 = Panel( # ohlcv=(t1data.data["BAC"],uptrend_m30, downtrend_m30), #(series, entries, exits, other_markers) # histogram=[], # [(series, name, "rgba(53, 94, 59, 0.6), opacity")] # left=[#(bbands,), #[(series, name, entries, exits, other_markers)] # (direction_real,"direction30min_real"), # ], # # left = [(supertrendm30.direction,"STdirection30")], # # # right=[(bbands.upperband, "upperband",), # # # (bbands.lowerband, "lowerband",), # # # (bbands.middleband, "middleband",) # # # ], #[(series, name, entries, exits, other_markers)] # middle1=[], # middle2=[], # title = "1m") ch = chart([pane1], sync=True, size="s", xloc=slice("2024-05-20 09:30:00","2024-05-25 16:00:00"), precision=6)
In [ ]:
pd.set_option('display.max_rows', None) # data = s5data.xloc["2024-01-03 09:30:00":"2024-03-10 16:00:00"] # entry = entry_ups.vbt.xloc["2024-01-03 09:30:00":"2024-03-10 16:00:00"].obj pf = vbt.Portfolio.from_signals(close=s5data, entries=entry_ups, exits=forced_exits, direction="longonly", sl_stop=0.05/100, tp_stop = 0.05/100, fees=0.0167/100, freq="5s") pf.stats()
In [ ]:
pf.xloc["2024-02-20 09:30:00":"2024-05-25 16:00:00"].plot()
In [ ]:
vbt.pdir(pf)
In [ ]:
pf.xloc["2024-05-20 09:30:00":"2024-05-25 16:00:00"].asset_value
In [ ]:
hourly_returns = pf.returns.resample("h").get() hourly_returns.plot()
In [ ]:
pf.returns
In [ ]:
pf.value
In [ ]:
pf.value.vbt.lineplot()
In [ ]:
pf5 = pf.xloc["2024-05-20 09:30:00":"2024-05-25 16:00:00"]
In [ ]:
monthly_returns = pf5.returns_acc.resample("5T").get() monthly_returns = monthly_returns[monthly_returns!=0] monthly_returns #monthly_returns.vbt.heatmap() # fig = monthly_returns.vbt.heatmap() # fig = monthly_returns.vbt.ts_heatmap()
In [ ]:
pd.set_option('display.max_rows', None) pf.stats()
In [ ]:
pf.plot().save_png()
In [ ]:
pf.trades.records_readable.sort_values(by="PnL")
In [ ]:
pf.xloc["2024-03-13 09:30:00":"2024-03-20 16:00:00"].plot()
In [ ]:
pf.xloc["2024-01-26 09:30:00":"2024-01-28 16:00:00"].plot()
In [ ]:
pd.set_option('display.max_rows', None) pf.stats() # pf.xloc["monday"].stats()
In [ ]:
buyvolume = t1data.data["BAC"].buyvolume sellvolume = t1data.data["BAC"].sellvolume totalvolume = buyvolume + sellvolume #adjust to minimal value to avoid division by zero sellvolume_adjusted = sellvolume.replace(0, 1e-10) oibratio = buyvolume / sellvolume #cumulative order flow (net difference) cof = buyvolume - sellvolume # Calculate the order imbalance (net differene) normalize the order imbalance by calculating the difference between buy and sell volumes and then scaling it by the total volume. order_imbalance = cof / totalvolume order_imbalance = order_imbalance.fillna(0) #nan nahradime 0 order_imbalance_allvolume = cof / t1data.data["BAC"].volume order_imbalance_sma = vbt.indicator("talib:EMA").run(order_imbalance, timeperiod=5) short_signals = order_imbalance.vbt < -0.5 #short_entries = oibratio.vbt < 0.01 short_signals.value_counts() short_signals.name = "short_entries" #.fillna(False) short_exits = short_signals.shift(-2).fillna(False).astype(bool)
In [ ]:
pane1 = Panel( ohlcv=(t1data.data["BAC"],), #(series, entries, exits, other_markers) histogram=[(order_imbalance_allvolume, "oib_allvolume", "rgba(53, 94, 59, 0.6)",0.5), (t1data.data["BAC"].trades, "trades",None,0.4), ], # [(series, name, "rgba(53, 94, 59, 0.6)", opacity)] # right=[ # (supertrend.trend,"STtrend"), # (supertrend.long,"STlong"), # (supertrend.short,"STshort") # ], # left = [(supertrend.direction,"STdirection")], # right=[(bbands.upperband, "upperband",), # (bbands.lowerband, "lowerband",), # (bbands.middleband, "middleband",) # ], #[(series, name, entries, exits, other_markers)] middle1=[], middle2=[], ) pane2 = Panel( ohlcv=(basic_data.data["BAC"],), #(series, entries, exits, other_markers) left=[(basic_data.data["BAC"].trades, "trades")], histogram=[(basic_data.data["BAC"].trades, "trades_hist", "white", 0.5)], #"rgba(53, 94, 59, 0.6)" # ], # [(series, name, "rgba(53, 94, 59, 0.6)")] # right=[ # (supertrend.trend,"STtrend"), # (supertrend.long,"STlong"), # (supertrend.short,"STshort") # ], # left = [(supertrend.direction,"STdirection")], # right=[(bbands.upperband, "upperband",), # (bbands.lowerband, "lowerband",), # (bbands.middleband, "middleband",) # ], #[(series, name, entries, exits, other_markers)] middle1=[], middle2=[], ) ch = chart([pane1, pane2], size="m")
In [ ]:
#short_signal = t1slope.real_below(t1_th) & t2slope.real_below(t2_th) & t3slope.real_below(t3_th) & t4slope.real_below(t4_th) #long_signal = t1slope.real_above(t1_th) & t2slope.real_above(t2_th) & t3slope.real_above(t3_th) & t4slope.real_above(t4_th) #test na daily s reversem crossed 0 short_signal = t2slope.vbt < -0.01 & t3slope.vbt < -0.01 #min value of threshold long_signal = t2slope.vbt > 0.01 & t3slope.vbt > 0.01 #min # thirty_up_signal = t3slope.vbt.crossed_above(0.01) # thirty_down_signal = t3slope.vbt.crossed_below(-0.01) fig = plot_2y_close(priminds=[], secinds=[t3slope], close=t1data.close) #short_signal.vbt.signals.plot_as_entries(basic_data.close, fig=fig) short_signal.vbt.signals.plot_as_entries(t1data.close, fig=fig, trace_kwargs=dict(name="SHORTS", line=dict(color="#ffe476"), marker=dict(color="red", symbol="triangle-down"), fill=None, connectgaps=True, )) long_signal.vbt.signals.plot_as_entries(t1data.close, fig=fig, trace_kwargs=dict(name="LONGS", line=dict(color="#ffe476"), marker=dict(color="limegreen"), fill=None, connectgaps=True, )) # thirty_down_signal.vbt.signals.plot_as_entries(t1data.close, fig=fig, trace_kwargs=dict(name="DOWN30", # line=dict(color="#ffe476"), # marker=dict(color="yellow", symbol="triangle-down"), # fill=None, # connectgaps=True, # )) # thirty_up_signal.vbt.signals.plot_as_entries(t1data.close, fig=fig, trace_kwargs=dict(name="UP30", # line=dict(color="#ffe476"), # marker=dict(color="grey"), # fill=None, # connectgaps=True, # )) # thirtymin_slope_to_compare.vbt.plot(fig=fig, add_trace_kwargs=dict(secondary_y=True), trace_kwargs=dict(name="30min slope", # line=dict(color="yellow"), # fill=None, # connectgaps=True, # )) fig.show() # print("short signal") # print(short_signal.value_counts()) #forced_exit = pd.Series(False, index=close.index) forced_exit = basic_data.symbol_wrapper.fill(False) #entry_window_open = pd.Series(False, index=close.index) entry_window_open= basic_data.symbol_wrapper.fill(False) # Calculate the time difference in minutes from market open for each timestamp elapsed_min_from_open = (forced_exit.index.hour - market_open.hour) * 60 + (forced_exit.index.minute - market_open.minute) entry_window_open[(elapsed_min_from_open >= entry_window_opens) & (elapsed_min_from_open < entry_window_closes)] = True #print(entry_window_open.value_counts()) forced_exit[(elapsed_min_from_open >= forced_exit_start) & (elapsed_min_from_open < forced_exit_end)] = True short_entries = (short_signal & entry_window_open) short_exits = forced_exit entries = (long_signal & entry_window_open) exits = forced_exit #long_entries.info() #number of trues and falses in long_entries # print(short_exits.value_counts()) # print(short_entries.value_counts()) #fig = plot_2y_close([],[momshort, rocp], close) #short_signal.vbt.signals.plot_as_entries(close, fig=fig, add_trace_kwargs=dict(secondary_y=False)) #print(sl_stop) #short_entries=short_entries, short_exits=short_exits, # pf = vbt.Portfolio.from_signals(close=basic_data, entries=short_entries, exits=exits, tsl_stop=0.005, tp_stop = 0.05, fees=0.0167/100, freq="1s") #sl_stop=sl_stop, tp_stop = sl_stop, # pf.stats()
In [ ]:
forced_exit = t1data.symbol_wrapper.fill(False) #entry_window_open = pd.Series(False, index=close.index) entry_window_open= t1data.symbol_wrapper.fill(False) # Calculate the time difference in minutes from market open for each timestamp elapsed_min_from_open = (forced_exit.index.hour - market_open.hour) * 60 + (forced_exit.index.minute - market_open.minute) entry_window_open[(elapsed_min_from_open >= entry_window_opens) & (elapsed_min_from_open < entry_window_closes)] = True #print(entry_window_open.value_counts()) forced_exit[(elapsed_min_from_open >= forced_exit_start) & (elapsed_min_from_open < forced_exit_end)] = True short_entries = (short_signals & entry_window_open) short_exits = forced_exit entries = (long_signals & entry_window_open) exits = forced_exit pf = vbt.Portfolio.from_signals(close=t1data, entries=entries, exits=exits, short_entries=short_entries, short_exits=exits, td_stop=2, time_delta_format="rows", tsl_stop=0.005, tp_stop = 0.005, fees=0.0167/100)#, freq="1s") #sl_stop=sl_stop, tp_stop = sl_stop, pf.stats()
In [ ]:
pf.plot()
In [ ]:
pf.get_drawdowns().records_readable
In [ ]:
pf.orders.records_readable