Compare commits
13 Commits
feature/ma
...
feature/ve
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
dc46ab2b49 | ||
|
|
9e7d974ebd | ||
|
|
66a4cb5d7c | ||
|
|
0bf9aadb0c | ||
|
|
81ca678f55 | ||
|
|
96c7f7207f | ||
|
|
26b72763da | ||
|
|
adc7c3c1b6 | ||
|
|
a6343abe88 | ||
|
|
075984fcff | ||
|
|
5fce627fe3 | ||
|
|
8de1356aa8 | ||
|
|
7f47890cad |
104044
research/basic.ipynb
Normal file
104044
research/basic.ipynb
Normal file
File diff suppressed because it is too large
Load Diff
1526
research/indcross_parametrized.ipynb
Normal file
1526
research/indcross_parametrized.ipynb
Normal file
File diff suppressed because one or more lines are too long
316
research/loading_trades_aggregation.ipynb
Normal file
316
research/loading_trades_aggregation.ipynb
Normal file
@@ -0,0 +1,316 @@
|
||||
{
|
||||
"cells": [
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"metadata": {},
|
||||
"source": [
|
||||
"# Loading trades and vectorized aggregation"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"import pandas as pd\n",
|
||||
"import numpy as np\n",
|
||||
"from numba import jit\n",
|
||||
"from alpaca.data.historical import StockHistoricalDataClient\n",
|
||||
"from v2realbot.config import ACCOUNT1_PAPER_API_KEY, ACCOUNT1_PAPER_SECRET_KEY, DATA_DIR\n",
|
||||
"from alpaca.data.requests import StockTradesRequest\n",
|
||||
"from v2realbot.enums.enums import BarType\n",
|
||||
"import time\n",
|
||||
"\n",
|
||||
"from datetime import datetime\n",
|
||||
"from v2realbot.utils.utils import parse_alpaca_timestamp, ltp, zoneNY, send_to_telegram, fetch_calendar_data\n",
|
||||
"import pyarrow\n",
|
||||
"from v2realbot.loader.aggregator_vectorized import fetch_daily_stock_trades, fetch_trades_parallel, generate_time_bars_nb, aggregate_trades\n",
|
||||
"import vectorbtpro as vbt\n",
|
||||
"\n",
|
||||
"vbt.settings.set_theme(\"dark\")\n",
|
||||
"vbt.settings['plotting']['layout']['width'] = 1280\n",
|
||||
"vbt.settings.plotting.auto_rangebreaks = True\n",
|
||||
"# Set the option to display with pagination\n",
|
||||
"pd.set_option('display.notebook_repr_html', True)\n",
|
||||
"pd.set_option('display.max_rows', 10) # Number of rows per page"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"symbol = \"SPY\"\n",
|
||||
"#datetime in zoneNY \n",
|
||||
"day_start = datetime(2024, 5, 15, 9, 30, 0)\n",
|
||||
"day_stop = datetime(2024, 5, 16, 16, 00, 0)\n",
|
||||
"day_start = zoneNY.localize(day_start)\n",
|
||||
"day_stop = zoneNY.localize(day_stop)\n",
|
||||
"#neslo by zrychlit, kdyz se zobrazuje pomalu Searching cache - nejaky bottle neck?\n",
|
||||
"df = fetch_trades_parallel(symbol, day_start, day_stop, minsize=50) #exclude_conditions=['C','O','4','B','7','V','P','W','U','Z','F'])\n",
|
||||
"ohlcv_df = aggregate_trades(symbol=symbol, trades_df=df, resolution=1, type=BarType.TIME)\n",
|
||||
"#df.info()\n",
|
||||
"ohlcv_df\n"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"df"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"basic_data = vbt.Data.from_data(vbt.symbol_dict({symbol: ohlcv_df}), tz_convert=zoneNY)\n",
|
||||
"vbt.settings['plotting']['auto_rangebreaks'] = True\n",
|
||||
"basic_data.ohlcv.plot()"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"import pickle\n",
|
||||
"from v2realbot.config import ACCOUNT1_PAPER_API_KEY, ACCOUNT1_PAPER_SECRET_KEY, DATA_DIR\n",
|
||||
"import gzip\n",
|
||||
"\n",
|
||||
"file_path = f\"{DATA_DIR}/tradecache/BAC-1709044200-1709067600.cache.gz\"\n",
|
||||
"\n",
|
||||
"with gzip.open(file_path, 'rb') as fp:\n",
|
||||
" tradesResponse = pickle.load(fp)\n",
|
||||
"\n",
|
||||
"tradesResponse"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"def convert_dict_to_multiindex_df(tradesResponse):\n",
|
||||
" # Create a DataFrame for each key and add the key as part of the MultiIndex\n",
|
||||
" dfs = []\n",
|
||||
" for key, values in tradesResponse.items():\n",
|
||||
" df = pd.DataFrame(values)\n",
|
||||
" # Rename columns\n",
|
||||
" # Select and order columns explicitly\n",
|
||||
" #print(df)\n",
|
||||
" df = df[['t', 'x', 'p', 's', 'i', 'c','z']]\n",
|
||||
" df.rename(columns={'t': 'timestamp', 'c': 'conditions', 'p': 'price', 's': 'size', 'x': 'exchange', 'z':'tape', 'i':'id'}, inplace=True)\n",
|
||||
" df['symbol'] = key # Add ticker as a column\n",
|
||||
" df['timestamp'] = pd.to_datetime(df['timestamp']) # Convert 't' from string to datetime before setting it as an index\n",
|
||||
" df.set_index(['symbol', 'timestamp'], inplace=True) # Set the multi-level index using both 'ticker' and 't'\n",
|
||||
" df = df.tz_convert(zoneNY, level='timestamp')\n",
|
||||
" dfs.append(df)\n",
|
||||
"\n",
|
||||
" # Concatenate all DataFrames into a single DataFrame with MultiIndex\n",
|
||||
" final_df = pd.concat(dfs)\n",
|
||||
"\n",
|
||||
" return final_df\n",
|
||||
"\n",
|
||||
"# Convert and print the DataFrame\n",
|
||||
"df = convert_dict_to_multiindex_df(tradesResponse)\n",
|
||||
"df\n"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"df.info()"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"ohlcv_df.info()"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"ohlcv_df.info()"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"ohlcv_df = aggregate_trades(symbol=symbol, trades_df=df, resolution=1000, type=\"dollar\")"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"ohlcv_df.index.strftime('%Y-%m-%d %H').unique()"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"#ohlcv_df.groupby(ohlcv_df.index.date).size()\n",
|
||||
"ohlcv_df.head(100)"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"df"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"#access just BCA\n",
|
||||
"df_filtered = df.loc[\"BAC\"]\n",
|
||||
"\n",
|
||||
"df_filtered.info()"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"df_filtered= df_filtered.reset_index()\n",
|
||||
"ticks = df_filtered[['timestamp', 'price', 'size']].to_numpy()\n",
|
||||
"ticks\n",
|
||||
"timestamps = ticks[:, 0]\n"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"df_filtered= df_filtered.reset_index()\n",
|
||||
"ticks = df_filtered[['timestamp', 'price', 'size']].to_numpy()\n",
|
||||
"\n",
|
||||
"#timestamp to integer\n",
|
||||
"# Extract the timestamps column (assuming it's the first column)\n",
|
||||
"timestamps = ticks[:, 0]\n",
|
||||
"\n",
|
||||
"# Convert the timestamps to Unix timestamps in seconds with microsecond precision\n",
|
||||
"unix_timestamps_s = np.array([ts.timestamp() for ts in timestamps], dtype='float64')\n",
|
||||
"\n",
|
||||
"# Replace the original timestamps in the NumPy array with the converted Unix timestamps\n",
|
||||
"ticks[:, 0] = unix_timestamps_s\n",
|
||||
"\n",
|
||||
"#ticks[:, 0] = pd.to_datetime(ticks[:, 0]).astype('int64') // 1_000_000_000 # Convert to Unix timestamp\n",
|
||||
"ticks\n"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"ticks = ticks.astype(np.float64)\n",
|
||||
"ticks"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"\n",
|
||||
"resolution = 1 # Example resolution of 60 seconds\n",
|
||||
"ohlcv_bars = generate_time_bars_nb(ticks, resolution)"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"ohlcv_bars"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"# Convert the resulting array back to a DataFrame\n",
|
||||
"columns = ['time', 'open', 'high', 'low', 'close', 'volume', 'trades']\n",
|
||||
"ohlcv_df = pd.DataFrame(ohlcv_bars, columns=columns)\n",
|
||||
"ohlcv_df['time'] = pd.to_datetime(ohlcv_df['time'], unit='s')\n",
|
||||
"ohlcv_df.set_index('time', inplace=True)\n",
|
||||
"ohlcv_df.index = ohlcv_df.index.tz_localize('UTC').tz_convert(zoneNY)\n",
|
||||
"#ohlcv_df = ohlcv_df.loc[\"2024-03-1 15:50:00\":\"2024-03-28 13:40:00\"]\n",
|
||||
"#ohlcv_df.index.strftime('%Y-%m-%d %H').unique()\n",
|
||||
"\n",
|
||||
"ohlcv_df"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": []
|
||||
}
|
||||
],
|
||||
"metadata": {
|
||||
"kernelspec": {
|
||||
"display_name": ".venv",
|
||||
"language": "python",
|
||||
"name": "python3"
|
||||
},
|
||||
"language_info": {
|
||||
"codemirror_mode": {
|
||||
"name": "ipython",
|
||||
"version": 3
|
||||
},
|
||||
"file_extension": ".py",
|
||||
"mimetype": "text/x-python",
|
||||
"name": "python",
|
||||
"nbconvert_exporter": "python",
|
||||
"pygments_lexer": "ipython3",
|
||||
"version": "3.10.11"
|
||||
}
|
||||
},
|
||||
"nbformat": 4,
|
||||
"nbformat_minor": 2
|
||||
}
|
||||
26673
research/rsi_alpaca.ipynb
Normal file
26673
research/rsi_alpaca.ipynb
Normal file
File diff suppressed because one or more lines are too long
1639
research/strat1/strat1_v1_MULTI.ipynb
Normal file
1639
research/strat1/strat1_v1_MULTI.ipynb
Normal file
File diff suppressed because one or more lines are too long
1526
research/strat1/strat1_v1_SINGLE.ipynb
Normal file
1526
research/strat1/strat1_v1_SINGLE.ipynb
Normal file
File diff suppressed because one or more lines are too long
23637
research/test.ipynb
Normal file
23637
research/test.ipynb
Normal file
File diff suppressed because it is too large
Load Diff
421
research/test1sbars.ipynb
Normal file
421
research/test1sbars.ipynb
Normal file
@@ -0,0 +1,421 @@
|
||||
{
|
||||
"cells": [
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"from v2realbot.tools.loadbatch import load_batch\n",
|
||||
"from v2realbot.utils.utils import zoneNY\n",
|
||||
"import pandas as pd\n",
|
||||
"import numpy as np\n",
|
||||
"import vectorbtpro as vbt\n",
|
||||
"from itables import init_notebook_mode, show\n",
|
||||
"\n",
|
||||
"init_notebook_mode(all_interactive=True)\n",
|
||||
"\n",
|
||||
"vbt.settings.set_theme(\"dark\")\n",
|
||||
"vbt.settings['plotting']['layout']['width'] = 1280\n",
|
||||
"vbt.settings.plotting.auto_rangebreaks = True\n",
|
||||
"# Set the option to display with pagination\n",
|
||||
"pd.set_option('display.notebook_repr_html', True)\n",
|
||||
"pd.set_option('display.max_rows', 10) # Number of rows per page\n",
|
||||
"\n",
|
||||
"res, df = load_batch(batch_id=\"0fb5043a\", #46 days 1.3 - 6.5.\n",
|
||||
" space_resolution_evenly=False,\n",
|
||||
" indicators_columns=[\"Rsi14\"],\n",
|
||||
" main_session_only=True,\n",
|
||||
" verbose = False)\n",
|
||||
"if res < 0:\n",
|
||||
" print(\"Error\" + str(res) + str(df))\n",
|
||||
"df = df[\"bars\"]\n",
|
||||
"\n",
|
||||
"df"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"metadata": {},
|
||||
"source": [
|
||||
"# filter dates"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"#filter na dny\n",
|
||||
"# dates_of_interest = pd.to_datetime(['2024-04-22', '2024-04-23']).tz_localize('US/Eastern')\n",
|
||||
"# filtered_df = df.loc[df.index.normalize().isin(dates_of_interest)]\n",
|
||||
"\n",
|
||||
"# df = filtered_df\n",
|
||||
"# df.info()"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"import plotly.io as pio\n",
|
||||
"pio.renderers.default = 'notebook'\n",
|
||||
"\n",
|
||||
"#naloadujeme do vbt symbol as column\n",
|
||||
"basic_data = vbt.Data.from_data({\"BAC\": df}, tz_convert=zoneNY)\n",
|
||||
"start_date = pd.Timestamp('2024-03-12 09:30', tz=zoneNY)\n",
|
||||
"end_date = pd.Timestamp('2024-03-13 16:00', tz=zoneNY)\n",
|
||||
"\n",
|
||||
"#basic_data = basic_data.transform(lambda df: df[df.index.date == start_date.date()])\n",
|
||||
"#basic_data = basic_data.transform(lambda df: df[(df.index >= start_date) & (df.index <= end_date)])\n",
|
||||
"#basic_data.data[\"BAC\"].info()\n",
|
||||
"\n",
|
||||
"# fig = basic_data.plot(plot_volume=False)\n",
|
||||
"# pivot_info = basic_data.run(\"pivotinfo\", up_th=0.003, down_th=0.002)\n",
|
||||
"# #pivot_info.plot()\n",
|
||||
"# pivot_info.plot(fig=fig, conf_value_trace_kwargs=dict(visible=True))\n",
|
||||
"# fig.show()\n",
|
||||
"\n",
|
||||
"\n",
|
||||
"# rsi14 = basic_data.data[\"BAC\"][\"Rsi14\"].rename(\"Rsi14\")\n",
|
||||
"\n",
|
||||
"# rsi14.vbt.plot().show()\n",
|
||||
"#basic_data.xloc[\"09:30\":\"10:00\"].data[\"BAC\"].vbt.ohlcv.plot().show()\n",
|
||||
"\n",
|
||||
"vbt.settings.plotting.auto_rangebreaks = True\n",
|
||||
"#basic_data.data[\"BAC\"].vbt.ohlcv.plot()\n",
|
||||
"\n",
|
||||
"#basic_data.data[\"BAC\"]"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"m1_data = basic_data[['Open', 'High', 'Low', 'Close', 'Volume']]\n",
|
||||
"\n",
|
||||
"m1_data.data[\"BAC\"]\n",
|
||||
"#m5_data = m1_data.resample(\"5T\")\n",
|
||||
"\n",
|
||||
"#m5_data.data[\"BAC\"].head(10)\n",
|
||||
"\n",
|
||||
"# m15_data = m1_data.resample(\"15T\")\n",
|
||||
"\n",
|
||||
"# m15 = m15_data.data[\"BAC\"]\n",
|
||||
"\n",
|
||||
"# m15.vbt.ohlcv.plot()\n",
|
||||
"\n",
|
||||
"# m1_data.wrapper.index\n",
|
||||
"\n",
|
||||
"# m1_resampler = m1_data.wrapper.get_resampler(\"1T\")\n",
|
||||
"# m1_resampler.index_difference(reverse=True)\n",
|
||||
"\n",
|
||||
"\n",
|
||||
"# m5_resampler.prettify()"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"metadata": {},
|
||||
"source": [
|
||||
"# defining ENTRY WINDOW and forced EXIT window"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"#m1_data.data[\"BAC\"].info()\n",
|
||||
"import datetime\n",
|
||||
"# Define the market open and close times\n",
|
||||
"market_open = datetime.time(9, 30)\n",
|
||||
"market_close = datetime.time(16, 0)\n",
|
||||
"entry_window_opens = 1\n",
|
||||
"entry_window_closes = 350\n",
|
||||
"\n",
|
||||
"forced_exit_start = 380\n",
|
||||
"forced_exit_end = 390\n",
|
||||
"\n",
|
||||
"forced_exit = m1_data.symbol_wrapper.fill(False)\n",
|
||||
"entry_window_open= m1_data.symbol_wrapper.fill(False)\n",
|
||||
"\n",
|
||||
"# Calculate the time difference in minutes from market open for each timestamp\n",
|
||||
"elapsed_min_from_open = (forced_exit.index.hour - market_open.hour) * 60 + (forced_exit.index.minute - market_open.minute)\n",
|
||||
"\n",
|
||||
"entry_window_open[(elapsed_min_from_open >= entry_window_opens) & (elapsed_min_from_open < entry_window_closes)] = True\n",
|
||||
"forced_exit[(elapsed_min_from_open >= forced_exit_start) & (elapsed_min_from_open < forced_exit_end)] = True\n",
|
||||
"\n",
|
||||
"#entry_window_open.info()\n",
|
||||
"# forced_exit.tail(100)"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"close = m1_data.close\n",
|
||||
"\n",
|
||||
"rsi = vbt.RSI.run(close, window=14)\n",
|
||||
"\n",
|
||||
"long_entries = (rsi.rsi.vbt.crossed_below(20) & entry_window_open)\n",
|
||||
"long_exits = (rsi.rsi.vbt.crossed_above(70) | forced_exit)\n",
|
||||
"#long_entries.info()\n",
|
||||
"#number of trues and falses in long_entries\n",
|
||||
"long_entries.value_counts()\n",
|
||||
"#long_exits.value_counts()"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"def plot_rsi(rsi, close, entries, exits):\n",
|
||||
" fig = vbt.make_subplots(rows=1, cols=1, shared_xaxes=True, specs=[[{\"secondary_y\": True}]], vertical_spacing=0.02, subplot_titles=(\"RSI\", \"Price\" ))\n",
|
||||
" close.vbt.plot(fig=fig, add_trace_kwargs=dict(secondary_y=True))\n",
|
||||
" rsi.plot(fig=fig, add_trace_kwargs=dict(secondary_y=False))\n",
|
||||
" entries.vbt.signals.plot_as_entries(rsi.rsi, fig=fig, add_trace_kwargs=dict(secondary_y=False)) \n",
|
||||
" exits.vbt.signals.plot_as_exits(rsi.rsi, fig=fig, add_trace_kwargs=dict(secondary_y=False)) \n",
|
||||
" return fig\n",
|
||||
"\n",
|
||||
"plot_rsi(rsi, close, long_entries, long_exits)\n"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"vbt.phelp(vbt.Portfolio.from_signals)"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"sl_stop = np.arange(0.03/100, 0.2/100, 0.02/100).tolist()\n",
|
||||
"# Using the round function\n",
|
||||
"sl_stop = [round(val, 4) for val in sl_stop]\n",
|
||||
"print(sl_stop)\n",
|
||||
"sl_stop = vbt.Param(sl_stop) #np.nan mean s no stoploss\n",
|
||||
"\n",
|
||||
"pf = vbt.Portfolio.from_signals(close=close, entries=long_entries, sl_stop=sl_stop, tp_stop = sl_stop, exits=long_exits,fees=0.0167/100, freq=\"1s\") #sl_stop=sl_stop, tp_stop = sl_stop, \n",
|
||||
"\n",
|
||||
"#pf.stats()"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"pf.plot()"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"pf[(0.0015,0.0013)].plot()"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"pf[0.03].plot_trade_signals()\n"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"metadata": {},
|
||||
"source": [
|
||||
"# pristup k pf jako multi index"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"#pf[0.03].plot()\n",
|
||||
"#pf.order_records\n",
|
||||
"pf[(0.03)].stats()"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"#zgrupovane statistiky\n",
|
||||
"stats_df = pf.stats([\n",
|
||||
" 'total_return',\n",
|
||||
" 'total_trades',\n",
|
||||
" 'win_rate',\n",
|
||||
" 'expectancy'\n",
|
||||
"], agg_func=None)\n",
|
||||
"stats_df\n",
|
||||
"\n",
|
||||
"\n",
|
||||
"stats_df.nlargest(50, 'Total Return [%]')\n",
|
||||
"#stats_df.info()"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"pf[(0.0011,0.0013)].plot()\n",
|
||||
"\n",
|
||||
"#pf[(0.0011,0.0013000000000000002)].plot()"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"from pandas.tseries.offsets import DateOffset\n",
|
||||
"\n",
|
||||
"temp_data = basic_data['2024-4-22']\n",
|
||||
"temp_data\n",
|
||||
"res1m = temp_data[[\"Open\", \"High\", \"Low\", \"Close\", \"Volume\"]]\n",
|
||||
"\n",
|
||||
"# Define a custom date offset that starts at 9:30 AM and spans 4 hours\n",
|
||||
"custom_offset = DateOffset(hours=4, minutes=30)\n",
|
||||
"\n",
|
||||
"# res1m = res1m.get().resample(\"4H\").agg({ \n",
|
||||
"# \"Open\": \"first\",\n",
|
||||
"# \"High\": \"max\",\n",
|
||||
"# \"Low\": \"min\",\n",
|
||||
"# \"Close\": \"last\",\n",
|
||||
"# \"Volume\": \"sum\"\n",
|
||||
"# })\n",
|
||||
"\n",
|
||||
"res4h = res1m.resample(\"1h\", resample_kwargs=dict(origin=\"start\"))\n",
|
||||
"\n",
|
||||
"res4h.data\n",
|
||||
"\n",
|
||||
"res15m = res1m.resample(\"15T\", resample_kwargs=dict(origin=\"start\"))\n",
|
||||
"\n",
|
||||
"res15m.data[\"BAC\"]"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"@vbt.njit\n",
|
||||
"def long_entry_place_func_nb(c, low, close, time_in_ns, rsi14, window_open, window_close):\n",
|
||||
" market_open_minutes = 570 # 9 hours * 60 minutes + 30 minutes\n",
|
||||
"\n",
|
||||
" for out_i in range(len(c.out)):\n",
|
||||
" i = c.from_i + out_i\n",
|
||||
"\n",
|
||||
" current_minutes = vbt.dt_nb.hour_nb(time_in_ns[i]) * 60 + vbt.dt_nb.minute_nb(time_in_ns[i])\n",
|
||||
" #print(\"current_minutes\", current_minutes)\n",
|
||||
" # Calculate elapsed minutes since market open at 9:30 AM\n",
|
||||
" elapsed_from_open = current_minutes - market_open_minutes\n",
|
||||
" elapsed_from_open = elapsed_from_open if elapsed_from_open >= 0 else 0\n",
|
||||
" #print( \"elapsed_from_open\", elapsed_from_open)\n",
|
||||
"\n",
|
||||
" #elapsed_from_open = elapsed_minutes_from_open_nb(time_in_ns) \n",
|
||||
" in_window = elapsed_from_open > window_open and elapsed_from_open < window_close\n",
|
||||
" #print(\"in_window\", in_window)\n",
|
||||
" # if in_window:\n",
|
||||
" # print(\"in window\")\n",
|
||||
"\n",
|
||||
" if in_window and rsi14[i] > 60: # and low[i, c.col] <= hit_price: # and hour == 9: # (4)!\n",
|
||||
" return out_i\n",
|
||||
" return -1\n",
|
||||
"\n",
|
||||
"@vbt.njit\n",
|
||||
"def long_exit_place_func_nb(c, high, close, time_index, tp, sl): # (5)!\n",
|
||||
" entry_i = c.from_i - c.wait\n",
|
||||
" entry_price = close[entry_i, c.col]\n",
|
||||
" hit_price = entry_price * (1 + tp)\n",
|
||||
" stop_price = entry_price * (1 - sl)\n",
|
||||
" for out_i in range(len(c.out)):\n",
|
||||
" i = c.from_i + out_i\n",
|
||||
" last_bar_of_day = vbt.dt_nb.day_changed_nb(time_index[i], time_index[i + 1])\n",
|
||||
"\n",
|
||||
" #print(next_day)\n",
|
||||
" if last_bar_of_day: #pokud je dalsi next day, tak zavirame posledni\n",
|
||||
" print(\"ted\",out_i)\n",
|
||||
" return out_i\n",
|
||||
" if close[i, c.col] >= hit_price or close[i, c.col] <= stop_price :\n",
|
||||
" return out_i\n",
|
||||
" return -1\n",
|
||||
"\n",
|
||||
"\n"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"df = pd.DataFrame(np.random.random(size=(5, 10)), columns=list('abcdefghij'))\n",
|
||||
"\n",
|
||||
"df"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": null,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"df.sum()"
|
||||
]
|
||||
}
|
||||
],
|
||||
"metadata": {
|
||||
"kernelspec": {
|
||||
"display_name": ".venv",
|
||||
"language": "python",
|
||||
"name": "python3"
|
||||
},
|
||||
"language_info": {
|
||||
"codemirror_mode": {
|
||||
"name": "ipython",
|
||||
"version": 3
|
||||
},
|
||||
"file_extension": ".py",
|
||||
"mimetype": "text/x-python",
|
||||
"name": "python",
|
||||
"nbconvert_exporter": "python",
|
||||
"pygments_lexer": "ipython3",
|
||||
"version": "3.10.11"
|
||||
}
|
||||
},
|
||||
"nbformat": 4,
|
||||
"nbformat_minor": 2
|
||||
}
|
||||
1639
research/test1sbars_roc.ipynb
Normal file
1639
research/test1sbars_roc.ipynb
Normal file
File diff suppressed because one or more lines are too long
@@ -1,7 +1,7 @@
|
||||
#!/bin/bash
|
||||
|
||||
# file: restart.sh
|
||||
|
||||
|
||||
# Usage: ./restart.sh [test|prod|all]
|
||||
|
||||
# Define server addresses
|
||||
|
||||
@@ -23,12 +23,12 @@ clientTrading = TradingClient(ACCOUNT1_PAPER_API_KEY, ACCOUNT1_PAPER_SECRET_KEY,
|
||||
|
||||
#get previous days bar
|
||||
|
||||
datetime_object_from = datetime.datetime(2023, 10, 11, 4, 0, 00, tzinfo=datetime.timezone.utc)
|
||||
datetime_object_to = datetime.datetime(2023, 10, 16, 16, 1, 00, tzinfo=datetime.timezone.utc)
|
||||
calendar_request = GetCalendarRequest(start=datetime_object_from,end=datetime_object_to)
|
||||
cal_dates = clientTrading.get_calendar(calendar_request)
|
||||
print(cal_dates)
|
||||
bar_request = StockBarsRequest(symbol_or_symbols="BAC",timeframe=TimeFrame.Day, start=datetime_object_from, end=datetime_object_to, feed=DataFeed.SIP)
|
||||
datetime_object_from = datetime.datetime(2024, 3, 9, 13, 29, 00, tzinfo=datetime.timezone.utc)
|
||||
datetime_object_to = datetime.datetime(2024, 3, 11, 20, 1, 00, tzinfo=datetime.timezone.utc)
|
||||
# calendar_request = GetCalendarRequest(start=datetime_object_from,end=datetime_object_to)
|
||||
# cal_dates = clientTrading.get_calendar(calendar_request)
|
||||
# print(cal_dates)
|
||||
bar_request = StockBarsRequest(symbol_or_symbols="BAC",timeframe=TimeFrame.Minute, start=datetime_object_from, end=datetime_object_to, feed=DataFeed.SIP)
|
||||
|
||||
# bars = client.get_stock_bars(bar_request).df
|
||||
|
||||
|
||||
89
testy/getrunnerdetail.py
Normal file
89
testy/getrunnerdetail.py
Normal file
@@ -0,0 +1,89 @@
|
||||
|
||||
from v2realbot.common.model import RunDay, StrategyInstance, Runner, RunRequest, RunArchive, RunArchiveView, RunArchiveViewPagination, RunArchiveDetail, RunArchiveChange, Bar, TradeEvent, TestList, Intervals, ConfigItem, InstantIndicator, DataTablesRequest
|
||||
import v2realbot.controller.services as cs
|
||||
from v2realbot.utils.utils import slice_dict_lists,zoneUTC,safe_get, AttributeDict
|
||||
id = "b11c66d9-a9b6-475a-9ac1-28b11e1b4edf"
|
||||
state = AttributeDict(vars={})
|
||||
|
||||
##základ pro init_attached_data in strategy.init
|
||||
|
||||
# def get_previous_runner(state):
|
||||
# runner : Runner
|
||||
# res, runner = cs.get_runner(state.runner_id)
|
||||
# if res < 0:
|
||||
# print(f"Not running {id}")
|
||||
# return 0, None
|
||||
|
||||
# return 0, runner.batch_id
|
||||
|
||||
def attach_previous_data(state):
|
||||
runner : Runner
|
||||
#get batch_id of current runer
|
||||
res, runner = cs.get_runner(state.runner_id)
|
||||
if res < 0 or runner.batch_id is None:
|
||||
print(f"Couldnt get previous runner {val}")
|
||||
return None
|
||||
|
||||
batch_id = runner.batch_id
|
||||
#batch_id = "6a6b0bcf"
|
||||
|
||||
res, runner_ids =cs.get_archived_runnerslist_byBatchID(batch_id, "desc")
|
||||
if res < 0:
|
||||
msg = f"error whne fetching runners of batch {batch_id} {runner_ids}"
|
||||
print(msg)
|
||||
return None
|
||||
|
||||
if runner_ids is None or len(runner_ids) == 0:
|
||||
print(f"no runners found for batch {batch_id} {runner_ids}")
|
||||
return None
|
||||
|
||||
last_runner = runner_ids[0]
|
||||
print("Previous runner identified:", last_runner)
|
||||
|
||||
#get details from the runner
|
||||
res, val = cs.get_archived_runner_details_byID(last_runner)
|
||||
if res < 0:
|
||||
print(f"no archived runner {last_runner}")
|
||||
|
||||
detail = RunArchiveDetail(**val)
|
||||
#print("toto jsme si dotahnuli", detail.bars)
|
||||
|
||||
# from stratvars directives
|
||||
attach_previous_bars_indicators = safe_get(state.vars, "attach_previous_bars_indicators", 50)
|
||||
attach_previous_cbar_indicators = safe_get(state.vars, "attach_previous_cbar_indicators", 50)
|
||||
# [stratvars]
|
||||
# attach_previous_bars_indicators = 50
|
||||
# attach_previous_cbar_indicators = 50
|
||||
|
||||
#indicators datetime utc
|
||||
indicators = slice_dict_lists(d=detail.indicators[0],last_item=attach_previous_bars_indicators, time_to_datetime=True)
|
||||
|
||||
#time -datetime utc, updated - timestamp float
|
||||
bars = slice_dict_lists(d=detail.bars, last_item=attach_previous_bars_indicators, time_to_datetime=True)
|
||||
|
||||
#cbar_indicatzors #float
|
||||
cbar_inds = slice_dict_lists(d=detail.indicators[1],last_item=attach_previous_cbar_indicators)
|
||||
|
||||
#USE these as INITs - TADY SI TO JESTE ZASTAVIT a POROVNAT
|
||||
print(f"{state.indicators=} NEW:{indicators=}")
|
||||
state.indicators = indicators
|
||||
print(f"{state.bars=} NEW:{bars=}")
|
||||
state.bars = bars
|
||||
print(f"{state.cbar_indicators=} NEW:{cbar_inds=}")
|
||||
state.cbar_indicators = cbar_inds
|
||||
|
||||
print("BARS and INDS INITIALIZED")
|
||||
#bars
|
||||
|
||||
|
||||
#tady budou pripadne dalsi inicializace, z ext_data
|
||||
print("EXT_DATA", detail.ext_data)
|
||||
#podle urciteho nastaveni napr.v konfiguraci se pouziji urcite promenne
|
||||
|
||||
#pridavame dailyBars z extData
|
||||
# if hasattr(detail, "ext_data") and "dailyBars" in detail.ext_data:
|
||||
# state.dailyBars = detail.ext_data["dailyBars"]
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
attach_previous_data(state)
|
||||
@@ -16,6 +16,7 @@ from v2realbot.strategyblocks.newtrade.signals import signal_search
|
||||
from v2realbot.strategyblocks.activetrade.activetrade_hub import manage_active_trade
|
||||
from v2realbot.strategyblocks.inits.init_indicators import initialize_dynamic_indicators
|
||||
from v2realbot.strategyblocks.inits.init_directives import intialize_directive_conditions
|
||||
from v2realbot.strategyblocks.inits.init_attached_data import attach_previous_data
|
||||
from alpaca.trading.client import TradingClient
|
||||
from v2realbot.config import ACCOUNT1_PAPER_API_KEY, ACCOUNT1_PAPER_SECRET_KEY, DATA_DIR
|
||||
from alpaca.trading.models import Calendar
|
||||
@@ -115,6 +116,10 @@ def init(state: StrategyState):
|
||||
#models
|
||||
state.vars.loaded_models = {}
|
||||
|
||||
#state attributes for martingale sizing mngmt
|
||||
state.vars["transferables"] = {}
|
||||
state.vars["transferables"]["martingale"] = dict(cont_loss_series_cnt=0)
|
||||
|
||||
#INITIALIZE CBAR INDICATORS - do vlastni funkce
|
||||
#state.cbar_indicators['ivwap'] = []
|
||||
state.vars.last_tick_price = 0
|
||||
@@ -128,6 +133,9 @@ def init(state: StrategyState):
|
||||
initialize_dynamic_indicators(state)
|
||||
intialize_directive_conditions(state)
|
||||
|
||||
#attach part of yesterdays data, bars, indicators, cbar_indicators
|
||||
attach_previous_data(state)
|
||||
|
||||
#intitialize indicator mapping (for use in operation) - mozna presunout do samostatne funkce prip dat do base kdyz se osvedci
|
||||
local_dict_cbar_inds = {key: state.cbar_indicators[key] for key in state.cbar_indicators.keys() if key != "time"}
|
||||
local_dict_inds = {key: state.indicators[key] for key in state.indicators.keys() if key != "time"}
|
||||
|
||||
@@ -40,7 +40,7 @@
|
||||
from uuid import UUID, uuid4
|
||||
from alpaca.trading.enums import OrderSide, OrderStatus, TradeEvent, OrderType
|
||||
from v2realbot.common.model import TradeUpdate, Order
|
||||
#from rich import print
|
||||
from rich import print as printanyway
|
||||
import threading
|
||||
import asyncio
|
||||
from v2realbot.config import DATA_DIR
|
||||
@@ -479,11 +479,11 @@ class Backtester:
|
||||
print("BT: submit order entry")
|
||||
|
||||
if not time or time < 0:
|
||||
print("time musi byt vyplneny")
|
||||
printanyway("time musi byt vyplneny")
|
||||
return -1
|
||||
|
||||
if not size or int(size) < 0:
|
||||
print("size musi byt vetsi nez 0")
|
||||
printanyway("size musi byt vetsi nez 0")
|
||||
return -1
|
||||
|
||||
if (order_type != OrderType.MARKET) and (order_type != OrderType.LIMIT):
|
||||
@@ -491,11 +491,11 @@ class Backtester:
|
||||
return -1
|
||||
|
||||
if not side == OrderSide.BUY and not side == OrderSide.SELL:
|
||||
print("side buy/sell required")
|
||||
printanyway("side buy/sell required")
|
||||
return -1
|
||||
|
||||
if order_type == OrderType.LIMIT and count_decimals(price) > 2:
|
||||
print("only 2 decimals supported", price)
|
||||
printanyway("only 2 decimals supported", price)
|
||||
return -1
|
||||
|
||||
#pokud neexistuje klic v accountu vytvorime si ho
|
||||
@@ -517,14 +517,14 @@ class Backtester:
|
||||
|
||||
actual_minus_reserved = int(self.account[symbol][0]) - reserved
|
||||
if actual_minus_reserved > 0 and actual_minus_reserved - int(size) < 0:
|
||||
print("not enough shares available to sell or shorting while long position",self.account[symbol][0],"reserved",reserved,"available",int(self.account[symbol][0]) - reserved,"selling",size)
|
||||
printanyway("not enough shares available to sell or shorting while long position",self.account[symbol][0],"reserved",reserved,"available",int(self.account[symbol][0]) - reserved,"selling",size)
|
||||
return -1
|
||||
|
||||
#if is shorting - check available cash to short
|
||||
if actual_minus_reserved <= 0:
|
||||
cena = price if price else self.get_last_price(time, self.symbol)
|
||||
if (self.cash - reserved_price - float(int(size)*float(cena))) < 0:
|
||||
print("not enough cash for shorting. cash",self.cash,"reserved",reserved,"available",self.cash-reserved,"needed",float(int(size)*float(cena)))
|
||||
printanyway("ERROR: not enough cash for shorting. cash",self.cash,"reserved",reserved,"available",self.cash-reserved,"needed",float(int(size)*float(cena)))
|
||||
return -1
|
||||
|
||||
#check for available cash
|
||||
@@ -543,14 +543,14 @@ class Backtester:
|
||||
|
||||
#jde o uzavreni shortu
|
||||
if actual_plus_reserved_qty < 0 and (actual_plus_reserved_qty + int(size)) > 0:
|
||||
print("nejprve je treba uzavrit short pozici pro buy res_qty, size", actual_plus_reserved_qty, size)
|
||||
printanyway("nejprve je treba uzavrit short pozici pro buy res_qty, size", actual_plus_reserved_qty, size)
|
||||
return -1
|
||||
|
||||
#jde o standardni long, kontroluju cash
|
||||
if actual_plus_reserved_qty >= 0:
|
||||
cena = price if price else self.get_last_price(time, self.symbol)
|
||||
if (self.cash - reserved_price - float(int(size)*float(cena))) < 0:
|
||||
print("not enough cash to buy long. cash",self.cash,"reserved_qty",reserved_qty,"reserved_price",reserved_price, "available",self.cash-reserved_price,"needed",float(int(size)*float(cena)))
|
||||
printanyway("ERROR: not enough cash to buy long. cash",self.cash,"reserved_qty",reserved_qty,"reserved_price",reserved_price, "available",self.cash-reserved_price,"needed",float(int(size)*float(cena)))
|
||||
return -1
|
||||
|
||||
id = str(uuid4())
|
||||
@@ -577,11 +577,11 @@ class Backtester:
|
||||
print("BT: replace order entry",id,size,price)
|
||||
|
||||
if not price and not size:
|
||||
print("size or price required")
|
||||
printanyway("size or price required")
|
||||
return -1
|
||||
|
||||
if len(self.open_orders) == 0:
|
||||
print("BT: order doesnt exist")
|
||||
printanyway("BT: order doesnt exist")
|
||||
return 0
|
||||
#with lock:
|
||||
for o in self.open_orders:
|
||||
@@ -609,7 +609,7 @@ class Backtester:
|
||||
"""
|
||||
print("BT: cancel order entry",id)
|
||||
if len(self.open_orders) == 0:
|
||||
print("BTC: order doesnt exist")
|
||||
printanyway("BTC: order doesnt exist")
|
||||
return 0
|
||||
#with lock:
|
||||
for o in self.open_orders:
|
||||
|
||||
@@ -5,7 +5,7 @@ from rich import print
|
||||
from typing import Any, Optional, List, Union
|
||||
from datetime import datetime, date
|
||||
from pydantic import BaseModel, Field
|
||||
from v2realbot.enums.enums import Mode, Account, SchedulerStatus, Moddus
|
||||
from v2realbot.enums.enums import Mode, Account, SchedulerStatus, Moddus, Market
|
||||
from alpaca.data.enums import Exchange
|
||||
|
||||
|
||||
@@ -94,12 +94,12 @@ class TestList(BaseModel):
|
||||
class Trade(BaseModel):
|
||||
symbol: str
|
||||
timestamp: datetime
|
||||
exchange: Optional[Union[Exchange, str]]
|
||||
exchange: Optional[Union[Exchange, str]] = None
|
||||
price: float
|
||||
size: float
|
||||
id: int
|
||||
conditions: Optional[List[str]]
|
||||
tape: Optional[str]
|
||||
conditions: Optional[List[str]] = None
|
||||
tape: Optional[str] = None
|
||||
|
||||
|
||||
#persisted object in pickle
|
||||
@@ -114,8 +114,20 @@ class StrategyInstance(BaseModel):
|
||||
close_rush: int = 0
|
||||
stratvars_conf: str
|
||||
add_data_conf: str
|
||||
note: Optional[str]
|
||||
history: Optional[str]
|
||||
note: Optional[str] = None
|
||||
history: Optional[str] = None
|
||||
|
||||
def __setstate__(self, state: dict[Any, Any]) -> None:
|
||||
"""
|
||||
Hack to allow unpickling models stored from pydantic V1
|
||||
"""
|
||||
state.setdefault("__pydantic_extra__", {})
|
||||
state.setdefault("__pydantic_private__", {})
|
||||
|
||||
if "__pydantic_fields_set__" not in state:
|
||||
state["__pydantic_fields_set__"] = state.get("__fields_set__")
|
||||
|
||||
super().__setstate__(state)
|
||||
|
||||
class RunRequest(BaseModel):
|
||||
id: UUID
|
||||
@@ -125,8 +137,8 @@ class RunRequest(BaseModel):
|
||||
debug: bool = False
|
||||
strat_json: Optional[str] = None
|
||||
ilog_save: bool = False
|
||||
bt_from: datetime = None
|
||||
bt_to: datetime = None
|
||||
bt_from: Optional[datetime] = None
|
||||
bt_to: Optional[datetime] = None
|
||||
#weekdays filter
|
||||
#pokud je uvedeny filtrujeme tyto dny
|
||||
weekdays_filter: Optional[list] = None
|
||||
@@ -147,8 +159,9 @@ class RunManagerRecord(BaseModel):
|
||||
mode: Mode
|
||||
note: Optional[str] = None
|
||||
ilog_save: bool = False
|
||||
bt_from: datetime = None
|
||||
bt_to: datetime = None
|
||||
market: Optional[Market] = Market.US
|
||||
bt_from: Optional[datetime] = None
|
||||
bt_to: Optional[datetime] = None
|
||||
#weekdays filter
|
||||
#pokud je uvedeny filtrujeme tyto dny
|
||||
weekdays_filter: Optional[list] = None #list of strings 0-6 representing days to run
|
||||
@@ -156,9 +169,9 @@ class RunManagerRecord(BaseModel):
|
||||
batch_id: Optional[str] = None
|
||||
testlist_id: Optional[str] = None
|
||||
start_time: str #time (HH:MM) that start function is called
|
||||
stop_time: Optional[str] #time (HH:MM) that stop function is called
|
||||
stop_time: Optional[str] = None #time (HH:MM) that stop function is called
|
||||
status: SchedulerStatus
|
||||
last_processed: Optional[datetime]
|
||||
last_processed: Optional[datetime] = None
|
||||
history: Optional[str] = None
|
||||
valid_from: Optional[datetime] = None # US East time zone daetime
|
||||
valid_to: Optional[datetime] = None # US East time zone daetime
|
||||
@@ -193,10 +206,10 @@ class Runner(BaseModel):
|
||||
run_name: Optional[str] = None
|
||||
run_note: Optional[str] = None
|
||||
run_ilog_save: Optional[bool] = False
|
||||
run_trade_count: Optional[int]
|
||||
run_profit: Optional[float]
|
||||
run_positions: Optional[int]
|
||||
run_avgp: Optional[float]
|
||||
run_trade_count: Optional[int] = None
|
||||
run_profit: Optional[float] = None
|
||||
run_positions: Optional[int] = None
|
||||
run_avgp: Optional[float] = None
|
||||
run_strat_json: Optional[str] = None
|
||||
run_stopped: Optional[datetime] = None
|
||||
run_paused: Optional[datetime] = None
|
||||
@@ -230,41 +243,41 @@ class Bar(BaseModel):
|
||||
low: float
|
||||
close: float
|
||||
volume: float
|
||||
trade_count: Optional[float]
|
||||
vwap: Optional[float]
|
||||
trade_count: Optional[float] = 0
|
||||
vwap: Optional[float] = 0
|
||||
|
||||
class Order(BaseModel):
|
||||
id: UUID
|
||||
submitted_at: datetime
|
||||
filled_at: Optional[datetime]
|
||||
canceled_at: Optional[datetime]
|
||||
filled_at: Optional[datetime] = None
|
||||
canceled_at: Optional[datetime] = None
|
||||
symbol: str
|
||||
qty: int
|
||||
status: OrderStatus
|
||||
order_type: OrderType
|
||||
filled_qty: Optional[int]
|
||||
filled_avg_price: Optional[float]
|
||||
filled_qty: Optional[int] = None
|
||||
filled_avg_price: Optional[float] = None
|
||||
side: OrderSide
|
||||
limit_price: Optional[float]
|
||||
limit_price: Optional[float] = None
|
||||
|
||||
#entita pro kazdy kompletni FILL, je navazana na prescribed_trade
|
||||
class TradeUpdate(BaseModel):
|
||||
event: Union[TradeEvent, str]
|
||||
execution_id: Optional[UUID]
|
||||
execution_id: Optional[UUID] = None
|
||||
order: Order
|
||||
timestamp: datetime
|
||||
position_qty: Optional[float]
|
||||
price: Optional[float]
|
||||
qty: Optional[float]
|
||||
value: Optional[float]
|
||||
cash: Optional[float]
|
||||
pos_avg_price: Optional[float]
|
||||
profit: Optional[float]
|
||||
profit_sum: Optional[float]
|
||||
rel_profit: Optional[float]
|
||||
rel_profit_cum: Optional[float]
|
||||
signal_name: Optional[str]
|
||||
prescribed_trade_id: Optional[str]
|
||||
position_qty: Optional[float] = None
|
||||
price: Optional[float] = None
|
||||
qty: Optional[float] = None
|
||||
value: Optional[float] = None
|
||||
cash: Optional[float] = None
|
||||
pos_avg_price: Optional[float] = None
|
||||
profit: Optional[float] = None
|
||||
profit_sum: Optional[float] = None
|
||||
rel_profit: Optional[float] = None
|
||||
rel_profit_cum: Optional[float] = None
|
||||
signal_name: Optional[str] = None
|
||||
prescribed_trade_id: Optional[str] = None
|
||||
|
||||
|
||||
class RunArchiveChange(BaseModel):
|
||||
@@ -289,8 +302,7 @@ class RunArchive(BaseModel):
|
||||
bt_from: Optional[datetime] = None
|
||||
bt_to: Optional[datetime] = None
|
||||
strat_json: Optional[str] = None
|
||||
##bude decomiss, misto toho stratvars_toml
|
||||
stratvars: Optional[dict] = None
|
||||
transferables: Optional[dict] = None #varaibles that are transferrable to next run
|
||||
settings: Optional[dict] = None
|
||||
ilog_save: Optional[bool] = False
|
||||
profit: float = 0
|
||||
@@ -332,7 +344,7 @@ class RunArchiveViewPagination(BaseModel):
|
||||
|
||||
#trida pro ukladani historie stoplossy do ext_data
|
||||
class SLHistory(BaseModel):
|
||||
id: Optional[UUID]
|
||||
id: Optional[UUID] = None
|
||||
time: datetime
|
||||
sl_val: float
|
||||
|
||||
@@ -345,7 +357,7 @@ class RunArchiveDetail(BaseModel):
|
||||
indicators: List[dict]
|
||||
statinds: dict
|
||||
trades: List[TradeUpdate]
|
||||
ext_data: Optional[dict]
|
||||
ext_data: Optional[dict] = None
|
||||
|
||||
|
||||
class InstantIndicator(BaseModel):
|
||||
|
||||
@@ -5,9 +5,7 @@ import v2realbot.controller.services as cs
|
||||
|
||||
#prevede dict radku zpatky na objekt vcetme retypizace
|
||||
def row_to_runmanager(row: dict) -> RunManagerRecord:
|
||||
|
||||
is_running = cs.is_runner_running(row['runner_id']) if row['runner_id'] else False
|
||||
|
||||
res = RunManagerRecord(
|
||||
moddus=row['moddus'],
|
||||
id=row['id'],
|
||||
@@ -17,6 +15,7 @@ def row_to_runmanager(row: dict) -> RunManagerRecord:
|
||||
account=row['account'],
|
||||
note=row['note'],
|
||||
ilog_save=bool(row['ilog_save']),
|
||||
market=row['market'] if row['market'] is not None else None,
|
||||
bt_from=datetime.fromisoformat(row['bt_from']) if row['bt_from'] else None,
|
||||
bt_to=datetime.fromisoformat(row['bt_to']) if row['bt_to'] else None,
|
||||
weekdays_filter=[int(x) for x in row['weekdays_filter'].split(',')] if row['weekdays_filter'] else [],
|
||||
@@ -83,5 +82,6 @@ def row_to_runarchive(row: dict) -> RunArchive:
|
||||
end_positions=int(row['end_positions']),
|
||||
end_positions_avgp=float(row['end_positions_avgp']),
|
||||
metrics=orjson.loads(row['metrics']),
|
||||
stratvars_toml=row['stratvars_toml']
|
||||
stratvars_toml=row['stratvars_toml'],
|
||||
transferables=orjson.loads(row['transferables']) if row['transferables'] else None
|
||||
)
|
||||
@@ -1,7 +1,6 @@
|
||||
|
||||
import v2realbot.common.db as db
|
||||
from v2realbot.common.model import RunDay, StrategyInstance, Runner, RunRequest, RunArchive, RunArchiveView, RunArchiveViewPagination, RunArchiveDetail, RunArchiveChange, Bar, TradeEvent, TestList, Intervals, ConfigItem, InstantIndicator, DataTablesRequest
|
||||
import orjson
|
||||
from v2realbot.common.model import ConfigItem
|
||||
import v2realbot.utils.config_handler as ch
|
||||
|
||||
# region CONFIG db services
|
||||
|
||||
@@ -172,14 +172,14 @@ def add_run_manager_record(new_record: RunManagerRecord):
|
||||
# Construct a suitable INSERT query based on your RunManagerRecord fields
|
||||
insert_query = """
|
||||
INSERT INTO run_manager (moddus, id, strat_id, symbol,account, mode, note,ilog_save,
|
||||
bt_from, bt_to, weekdays_filter, batch_id,
|
||||
market, bt_from, bt_to, weekdays_filter, batch_id,
|
||||
start_time, stop_time, status, last_processed,
|
||||
history, valid_from, valid_to, testlist_id)
|
||||
VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
|
||||
VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?,?)
|
||||
"""
|
||||
values = [
|
||||
new_record.moddus, str(new_record.id), str(new_record.strat_id), new_record.symbol, new_record.account, new_record.mode, new_record.note,
|
||||
int(new_record.ilog_save),
|
||||
int(new_record.ilog_save), new_record.market,
|
||||
new_record.bt_from.isoformat() if new_record.bt_from is not None else None,
|
||||
new_record.bt_to.isoformat() if new_record.bt_to is not None else None,
|
||||
",".join(str(x) for x in new_record.weekdays_filter) if new_record.weekdays_filter else None,
|
||||
|
||||
@@ -3,7 +3,7 @@ from uuid import UUID, uuid4
|
||||
import pickle
|
||||
from alpaca.data.historical import StockHistoricalDataClient
|
||||
from alpaca.data.requests import StockTradesRequest, StockBarsRequest
|
||||
from alpaca.data.enums import DataFeed
|
||||
from alpaca.data.enums import DataFeed
|
||||
from alpaca.data.timeframe import TimeFrame
|
||||
from v2realbot.strategy.base import StrategyState
|
||||
from v2realbot.enums.enums import RecordType, StartBarAlign, Mode, Account, OrderSide
|
||||
@@ -35,6 +35,7 @@ from sqlite3 import OperationalError, Row
|
||||
import v2realbot.strategyblocks.indicators.custom as ci
|
||||
from v2realbot.strategyblocks.inits.init_indicators import initialize_dynamic_indicators
|
||||
from v2realbot.strategyblocks.indicators.indicators_hub import populate_dynamic_indicators
|
||||
from v2realbot.strategyblocks.inits.init_attached_data import attach_previous_data
|
||||
from v2realbot.interfaces.backtest_interface import BacktestInterface
|
||||
import os
|
||||
import v2realbot.reporting.metricstoolsimage as mt
|
||||
@@ -102,10 +103,10 @@ def create_stratin(si: StrategyInstance):
|
||||
#validate toml
|
||||
res, stp = parse_toml_string(si.stratvars_conf)
|
||||
if res < 0:
|
||||
return (-1,"stratvars invalid")
|
||||
return (-1,f"stratvars invalid: {stp}")
|
||||
res, adp = parse_toml_string(si.add_data_conf)
|
||||
if res < 0:
|
||||
return (-1, "None")
|
||||
return (-1, f"add data conf invalid {adp}")
|
||||
si.id = uuid4()
|
||||
#print(si)
|
||||
db.stratins.append(si)
|
||||
@@ -119,10 +120,10 @@ def modify_stratin(si: StrategyInstance, id: UUID):
|
||||
return (-1, "strat is running, use modify_stratin_running")
|
||||
res, stp = parse_toml_string(si.stratvars_conf)
|
||||
if res < 0:
|
||||
return (-1, "stratvars invalid")
|
||||
return (-1, f"stratvars invalid {stp}")
|
||||
res, adp = parse_toml_string(si.add_data_conf)
|
||||
if res < 0:
|
||||
return (-1, "add data conf invalid")
|
||||
return (-1, f"add data conf invalid {adp}")
|
||||
for i in db.stratins:
|
||||
if str(i.id) == str(id):
|
||||
#print("removing",i)
|
||||
@@ -180,14 +181,14 @@ def modify_stratin_running(si: StrategyInstance, id: UUID):
|
||||
#validate toml
|
||||
res,stp = parse_toml_string(si.stratvars_conf)
|
||||
if res < 0:
|
||||
return (-1, "new stratvars format invalid")
|
||||
return (-1, f"new stratvars format invalid {stp}")
|
||||
for i in db.stratins:
|
||||
if str(i.id) == str(id):
|
||||
if not is_stratin_running(id=str(id)):
|
||||
return (-1, "not running")
|
||||
res,stp_old = parse_toml_string(i.stratvars_conf)
|
||||
if res < 0:
|
||||
return (-1, "current stratin stratvars invalid")
|
||||
return (-1, f"current stratin stratvars invalid {stp_old}")
|
||||
#TODO reload running strat
|
||||
#print(stp)
|
||||
#print("starting injection", stp)
|
||||
@@ -412,7 +413,7 @@ def run_batch_stratin(id: UUID, runReq: RunRequest):
|
||||
def get_market_days_in_interval(datefrom, dateto, note = None, id = None):
|
||||
#getting dates from calendat
|
||||
clientTrading = TradingClient(ACCOUNT1_PAPER_API_KEY, ACCOUNT1_PAPER_SECRET_KEY, raw_data=False, paper=True)
|
||||
calendar_request = GetCalendarRequest(start=datefrom,end=dateto)
|
||||
calendar_request = GetCalendarRequest(start=datefrom.date(),end=dateto.date())
|
||||
cal_dates = clientTrading.get_calendar(calendar_request)
|
||||
#list(Calendar)
|
||||
# Calendar
|
||||
@@ -446,7 +447,7 @@ def run_batch_stratin(id: UUID, runReq: RunRequest):
|
||||
cal_list.append(RunDay(start = start_time, end = end_time, note = note, id = id))
|
||||
|
||||
print(f"Getting interval dates from - to - RESULT ({len(cal_list)}):")
|
||||
print(cal_list)
|
||||
#print(cal_list)
|
||||
return cal_list
|
||||
|
||||
#getting days to run into RunDays format
|
||||
@@ -618,10 +619,10 @@ def run_stratin(id: UUID, runReq: RunRequest, synchronous: bool = False, inter_b
|
||||
#validate toml
|
||||
res, stp = parse_toml_string(i.stratvars_conf)
|
||||
if res < 0:
|
||||
return (-1, "stratvars invalid")
|
||||
return (-1, f"stratvars invalid {stp}")
|
||||
res, adp = parse_toml_string(i.add_data_conf)
|
||||
if res < 0:
|
||||
return (-1, "add data conf invalid")
|
||||
return (-1, f"add data conf invalid {adp}")
|
||||
id = uuid4()
|
||||
print(f"RUN {id} INITIATED")
|
||||
name = i.name
|
||||
@@ -925,7 +926,8 @@ def archive_runner(runner: Runner, strat: StrategyInstance, inter_batch_params:
|
||||
end_positions=strat.state.positions,
|
||||
end_positions_avgp=round(float(strat.state.avgp),3),
|
||||
metrics=results_metrics,
|
||||
stratvars_toml=runner.run_stratvars_toml
|
||||
stratvars_toml=runner.run_stratvars_toml,
|
||||
transferables=strat.state.vars["transferables"]
|
||||
)
|
||||
|
||||
#flatten indicators from numpy array
|
||||
@@ -1112,7 +1114,7 @@ def get_all_archived_runners_p(request: DataTablesRequest) -> Tuple[int, RunArch
|
||||
# Total count query
|
||||
total_count_query = """
|
||||
SELECT COUNT(*) FROM runner_header
|
||||
WHERE (:search_value = '' OR strat_id LIKE :search_value OR batch_id LIKE :search_value)
|
||||
WHERE (:search_value = '' OR strat_id LIKE :search_value OR batch_id LIKE :search_value OR symbol like :search_value OR name like :search_value)
|
||||
"""
|
||||
c.execute(total_count_query, {'search_value': f'%{search_value}%'})
|
||||
total_count = c.fetchone()[0]
|
||||
@@ -1127,7 +1129,7 @@ def get_all_archived_runners_p(request: DataTablesRequest) -> Tuple[int, RunArch
|
||||
SUM(profit) OVER (PARTITION BY batch_id) AS batch_profit,
|
||||
COUNT(*) OVER (PARTITION BY batch_id) AS batch_count
|
||||
FROM runner_header
|
||||
WHERE (:search_value = '' OR strat_id LIKE :search_value OR batch_id LIKE :search_value)
|
||||
WHERE (:search_value = '' OR strat_id LIKE :search_value OR batch_id LIKE :search_value OR symbol like :search_value OR name like :search_value)
|
||||
),
|
||||
InterleavedGroups AS (
|
||||
SELECT *,
|
||||
@@ -1154,7 +1156,7 @@ def get_all_archived_runners_p(request: DataTablesRequest) -> Tuple[int, RunArch
|
||||
# Filtered count query
|
||||
filtered_count_query = """
|
||||
SELECT COUNT(*) FROM runner_header
|
||||
WHERE (:search_value = '' OR strat_id LIKE :search_value OR batch_id LIKE :search_value)
|
||||
WHERE (:search_value = '' OR strat_id LIKE :search_value OR batch_id LIKE :search_value OR symbol like :search_value OR name like :search_value)
|
||||
"""
|
||||
c.execute(filtered_count_query, {'search_value': f'%{search_value}%'})
|
||||
filtered_count = c.fetchone()[0]
|
||||
@@ -1220,17 +1222,43 @@ def get_archived_runner_header_byID(id: UUID) -> RunArchive:
|
||||
# else:
|
||||
# return 0, res
|
||||
|
||||
#vrátí seznam runneru s danym batch_id
|
||||
def get_archived_runnerslist_byBatchID(batch_id: str):
|
||||
# #vrátí seznam runneru s danym batch_id
|
||||
# def get_archived_runnerslist_byBatchID(batch_id: str):
|
||||
# conn = pool.get_connection()
|
||||
# try:
|
||||
# cursor = conn.cursor()
|
||||
# cursor.execute(f"SELECT runner_id FROM runner_header WHERE batch_id='{str(batch_id)}'")
|
||||
# runner_list = [row[0] for row in cursor.fetchall()]
|
||||
# finally:
|
||||
# pool.release_connection(conn)
|
||||
# return 0, runner_list
|
||||
|
||||
#update that allows to sort
|
||||
def get_archived_runnerslist_byBatchID(batch_id: str, sort_order: str = "asc"):
|
||||
"""
|
||||
Fetches all runner records by batch_id, sorted by the 'started' column.
|
||||
|
||||
:param batch_id: The batch ID to filter runners by.
|
||||
:param sort_order: The sort order of the 'started' column. Defaults to 'asc'.
|
||||
Accepts 'asc' for ascending or 'desc' for descending order.
|
||||
:return: A tuple with the first element being a status code and the second being the list of runner_ids.
|
||||
"""
|
||||
# Validate sort_order
|
||||
if sort_order.lower() not in ['asc', 'desc']:
|
||||
return -1, [] # Returning an error code and an empty list in case of invalid sort_order
|
||||
|
||||
conn = pool.get_connection()
|
||||
try:
|
||||
cursor = conn.cursor()
|
||||
cursor.execute(f"SELECT runner_id FROM runner_header WHERE batch_id='{str(batch_id)}'")
|
||||
query = f"""SELECT runner_id FROM runner_header
|
||||
WHERE batch_id=?
|
||||
ORDER BY datetime(started) {sort_order.upper()}"""
|
||||
cursor.execute(query, (batch_id,))
|
||||
runner_list = [row[0] for row in cursor.fetchall()]
|
||||
finally:
|
||||
pool.release_connection(conn)
|
||||
return 0, runner_list
|
||||
|
||||
|
||||
def insert_archive_header(archeader: RunArchive):
|
||||
conn = pool.get_connection()
|
||||
try:
|
||||
@@ -1239,11 +1267,11 @@ def insert_archive_header(archeader: RunArchive):
|
||||
|
||||
res = c.execute("""
|
||||
INSERT INTO runner_header
|
||||
(runner_id, strat_id, batch_id, symbol, name, note, started, stopped, mode, account, bt_from, bt_to, strat_json, settings, ilog_save, profit, trade_count, end_positions, end_positions_avgp, metrics, stratvars_toml)
|
||||
(runner_id, strat_id, batch_id, symbol, name, note, started, stopped, mode, account, bt_from, bt_to, strat_json, settings, ilog_save, profit, trade_count, end_positions, end_positions_avgp, metrics, stratvars_toml, transferables)
|
||||
VALUES
|
||||
(?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
|
||||
(?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
|
||||
""",
|
||||
(str(archeader.id), str(archeader.strat_id), archeader.batch_id, archeader.symbol, archeader.name, archeader.note, archeader.started, archeader.stopped, archeader.mode, archeader.account, archeader.bt_from, archeader.bt_to, orjson.dumps(archeader.strat_json).decode('utf-8'), orjson.dumps(archeader.settings).decode('utf-8'), archeader.ilog_save, archeader.profit, archeader.trade_count, archeader.end_positions, archeader.end_positions_avgp, orjson.dumps(archeader.metrics, default=json_serial, option=orjson.OPT_PASSTHROUGH_DATETIME).decode('utf-8'), archeader.stratvars_toml))
|
||||
(str(archeader.id), str(archeader.strat_id), archeader.batch_id, archeader.symbol, archeader.name, archeader.note, archeader.started, archeader.stopped, archeader.mode, archeader.account, archeader.bt_from, archeader.bt_to, orjson.dumps(archeader.strat_json).decode('utf-8'), orjson.dumps(archeader.settings).decode('utf-8'), archeader.ilog_save, archeader.profit, archeader.trade_count, archeader.end_positions, archeader.end_positions_avgp, orjson.dumps(archeader.metrics, default=json_serial, option=orjson.OPT_PASSTHROUGH_DATETIME).decode('utf-8'), archeader.stratvars_toml, orjson.dumps(archeader.transferables).decode('utf-8')))
|
||||
|
||||
#retry not yet supported for statement format above
|
||||
#res = execute_with_retry(c,statement)
|
||||
@@ -1567,7 +1595,7 @@ def preview_indicator_byTOML(id: UUID, indicator: InstantIndicator, save: bool =
|
||||
# print(row)
|
||||
res, toml_parsed = parse_toml_string(tomlino)
|
||||
if res < 0:
|
||||
return (-2, "toml invalid")
|
||||
return (-2, f"toml invalid: {toml_parsed}")
|
||||
|
||||
#print("parsed toml", toml_parsed)
|
||||
|
||||
@@ -1664,10 +1692,15 @@ def preview_indicator_byTOML(id: UUID, indicator: InstantIndicator, save: bool =
|
||||
|
||||
##intialize required vars from strat init
|
||||
state.vars["loaded_models"] = {}
|
||||
#state attributes for martingale sizing mngmt
|
||||
state.vars["transferables"] = {}
|
||||
state.vars["transferables"]["martingale"] = dict(cont_loss_series_cnt=0)
|
||||
|
||||
##intialize dynamic indicators
|
||||
initialize_dynamic_indicators(state)
|
||||
|
||||
#TODO vazit attached data (z toho potrebuji jen transferables, tzn. najit nejak predchozi runner a prelipnout transferables od zacatku)
|
||||
#nejspis upravit attach_previous_data a nebo udelat specialni verzi
|
||||
#attach_previous_data(state)
|
||||
|
||||
# print("subtype")
|
||||
# function = "ci."+subtype+"."+subtype
|
||||
@@ -1808,10 +1841,10 @@ def preview_indicator_byTOML(id: UUID, indicator: InstantIndicator, save: bool =
|
||||
|
||||
#vracime list, kde pozice 0 je bar indicators, pozice 1 je ticks indicators
|
||||
if output == "bar":
|
||||
return 0, [output_dict, []]
|
||||
return 0, [output_dict, {}]
|
||||
#return 0, [new_inds[indicator.name], []]
|
||||
else:
|
||||
return 0, [[], output_dict]
|
||||
return 0, [{}, output_dict]
|
||||
#return 0, [[], new_tick_inds[indicator.name]]
|
||||
|
||||
except Exception as e:
|
||||
@@ -1896,7 +1929,7 @@ def get_alpaca_history_bars(symbol: str, datetime_object_from: datetime, datetim
|
||||
# Workaround of error when no data foun d AttributeError and has the specific message
|
||||
if isinstance(e, AttributeError) and str(e) == "'NoneType' object has no attribute 'items'":
|
||||
print("Caught the specific AttributeError: 'NoneType' object has no attribute 'items' means NO DATA FOUND")
|
||||
#print(str(e) + format_exc())
|
||||
print(str(e) + format_exc())
|
||||
return 0, result
|
||||
else:
|
||||
print(str(e) + format_exc())
|
||||
|
||||
@@ -1,6 +1,11 @@
|
||||
from enum import Enum
|
||||
from alpaca.trading.enums import OrderSide, OrderStatus, OrderType
|
||||
|
||||
class BarType(str, Enum):
|
||||
TIME = "time"
|
||||
VOLUME = "volume"
|
||||
DOLLAR = "dollar"
|
||||
|
||||
class Env(str, Enum):
|
||||
PROD = "prod"
|
||||
TEST = "test"
|
||||
@@ -103,4 +108,10 @@ class StartBarAlign(str, Enum):
|
||||
RANDOM = first bar starts when first trade occurs
|
||||
"""
|
||||
ROUND = "round"
|
||||
RANDOM = "random"
|
||||
RANDOM = "random"
|
||||
|
||||
class Market(str, Enum):
|
||||
US = "US"
|
||||
CRYPTO = "CRYPTO"
|
||||
|
||||
|
||||
@@ -40,7 +40,9 @@ class LiveInterface(GeneralInterface):
|
||||
|
||||
return market_order.id
|
||||
except Exception as e:
|
||||
print("Nepodarilo se odeslat buy", str(e))
|
||||
reason = "Nepodarilo se market buy:" + str(e) + format_exc()
|
||||
print(reason)
|
||||
send_to_telegram(reason)
|
||||
return -1
|
||||
|
||||
"""buy limit"""
|
||||
@@ -65,7 +67,9 @@ class LiveInterface(GeneralInterface):
|
||||
|
||||
return limit_order.id
|
||||
except Exception as e:
|
||||
print("Nepodarilo se odeslat limitku", str(e))
|
||||
reason = "Nepodarilo se odeslat buy limitku:" + str(e) + format_exc()
|
||||
print(reason)
|
||||
send_to_telegram(reason)
|
||||
return -1
|
||||
|
||||
"""sell market"""
|
||||
@@ -87,7 +91,9 @@ class LiveInterface(GeneralInterface):
|
||||
|
||||
return market_order.id
|
||||
except Exception as e:
|
||||
print("Nepodarilo se odeslat sell", str(e))
|
||||
reason = "Nepodarilo se odeslat sell:" + str(e) + format_exc()
|
||||
print(reason)
|
||||
send_to_telegram(reason)
|
||||
return -1
|
||||
|
||||
"""sell limit"""
|
||||
@@ -112,8 +118,9 @@ class LiveInterface(GeneralInterface):
|
||||
return limit_order.id
|
||||
|
||||
except Exception as e:
|
||||
print("Nepodarilo se odeslat sell_l", str(e))
|
||||
#raise Exception(e)
|
||||
reason = "Nepodarilo se odeslat sell limitku:" + str(e) + format_exc()
|
||||
print(reason)
|
||||
send_to_telegram(reason)
|
||||
return -1
|
||||
|
||||
"""order replace"""
|
||||
@@ -136,7 +143,9 @@ class LiveInterface(GeneralInterface):
|
||||
if e.code == 42210000: return orderid
|
||||
else:
|
||||
##mozna tady proste vracet vzdy ok
|
||||
print("Neslo nahradit profitku. Problem",str(e))
|
||||
reason = "Neslo nahradit profitku. Problem:" + str(e) + format_exc()
|
||||
print(reason)
|
||||
send_to_telegram(reason)
|
||||
return -1
|
||||
#raise Exception(e)
|
||||
|
||||
@@ -150,7 +159,9 @@ class LiveInterface(GeneralInterface):
|
||||
#order doesnt exist
|
||||
if e.code == 40410000: return 0
|
||||
else:
|
||||
print("nepovedlo se zrusit objednavku", str(e))
|
||||
reason = "Nepovedlo se zrusit objednavku:" + str(e) + format_exc()
|
||||
print(reason)
|
||||
send_to_telegram(reason)
|
||||
#raise Exception(e)
|
||||
return -1
|
||||
|
||||
@@ -178,7 +189,9 @@ class LiveInterface(GeneralInterface):
|
||||
#list of Orders (orderlist[0].id)
|
||||
return orderlist
|
||||
except Exception as e:
|
||||
print("Chyba pri dotazeni objednávek.", str(e))
|
||||
reason = "Chyba pri dotazeni objednávek:" + str(e) + format_exc()
|
||||
print(reason)
|
||||
send_to_telegram(reason)
|
||||
#raise Exception (e)
|
||||
return -1
|
||||
|
||||
|
||||
1411
v2realbot/loader/agg_vect.ipynb
Normal file
1411
v2realbot/loader/agg_vect.ipynb
Normal file
File diff suppressed because it is too large
Load Diff
535
v2realbot/loader/aggregator_vectorized.py
Normal file
535
v2realbot/loader/aggregator_vectorized.py
Normal file
@@ -0,0 +1,535 @@
|
||||
import pandas as pd
|
||||
import numpy as np
|
||||
from numba import jit
|
||||
from alpaca.data.historical import StockHistoricalDataClient
|
||||
from sqlalchemy import column
|
||||
from v2realbot.config import ACCOUNT1_PAPER_API_KEY, ACCOUNT1_PAPER_SECRET_KEY, DATA_DIR
|
||||
from alpaca.data.requests import StockTradesRequest
|
||||
import time as time_module
|
||||
from v2realbot.utils.utils import parse_alpaca_timestamp, ltp, zoneNY, send_to_telegram, fetch_calendar_data
|
||||
import pyarrow
|
||||
from traceback import format_exc
|
||||
from datetime import timedelta, datetime, time
|
||||
from concurrent.futures import ThreadPoolExecutor
|
||||
import os
|
||||
import gzip
|
||||
import pickle
|
||||
import random
|
||||
from alpaca.data.models import BarSet, QuoteSet, TradeSet
|
||||
import v2realbot.utils.config_handler as cfh
|
||||
from v2realbot.enums.enums import BarType
|
||||
""""
|
||||
Module used for vectorized aggregation of trades.
|
||||
|
||||
Includes fetch (remote/cached) methods and numba aggregator function for TIME BASED, VOLUME BASED and DOLLAR BARS
|
||||
|
||||
"""""
|
||||
|
||||
def aggregate_trades(symbol: str, trades_df: pd.DataFrame, resolution: int, type: BarType = BarType.TIME):
|
||||
""""
|
||||
Accepts dataframe with trades keyed by symbol. Preparess dataframe to
|
||||
numpy and call nNumba optimized aggregator for given bar type. (time/volume/dollar)
|
||||
"""""
|
||||
trades_df = trades_df.loc[symbol]
|
||||
trades_df= trades_df.reset_index()
|
||||
ticks = trades_df[['timestamp', 'price', 'size']].to_numpy()
|
||||
# Extract the timestamps column (assuming it's the first column)
|
||||
timestamps = ticks[:, 0]
|
||||
# Convert the timestamps to Unix timestamps in seconds with microsecond precision
|
||||
unix_timestamps_s = np.array([ts.timestamp() for ts in timestamps], dtype='float64')
|
||||
# Replace the original timestamps in the NumPy array with the converted Unix timestamps
|
||||
ticks[:, 0] = unix_timestamps_s
|
||||
ticks = ticks.astype(np.float64)
|
||||
#based on type, specific aggregator function is called
|
||||
match type:
|
||||
case BarType.TIME:
|
||||
ohlcv_bars = generate_time_bars_nb(ticks, resolution)
|
||||
case BarType.VOLUME:
|
||||
ohlcv_bars = generate_volume_bars_nb(ticks, resolution)
|
||||
case BarType.DOLLAR:
|
||||
ohlcv_bars = generate_dollar_bars_nb(ticks, resolution)
|
||||
case _:
|
||||
raise ValueError("Invalid bar type. Supported types are 'time', 'volume' and 'dollar'.")
|
||||
# Convert the resulting array back to a DataFrame
|
||||
columns = ['time', 'open', 'high', 'low', 'close', 'volume', 'trades']
|
||||
if type == BarType.DOLLAR:
|
||||
columns.append('amount')
|
||||
ohlcv_df = pd.DataFrame(ohlcv_bars, columns=columns)
|
||||
ohlcv_df['time'] = pd.to_datetime(ohlcv_df['time'], unit='s')
|
||||
ohlcv_df.set_index('time', inplace=True)
|
||||
ohlcv_df.index = ohlcv_df.index.tz_localize('UTC').tz_convert(zoneNY)
|
||||
return ohlcv_df
|
||||
|
||||
def convert_dict_to_multiindex_df(tradesResponse):
|
||||
""""
|
||||
Converts dictionary from cache or from remote (raw input) to multiindex dataframe.
|
||||
"""""
|
||||
# Create a DataFrame for each key and add the key as part of the MultiIndex
|
||||
dfs = []
|
||||
for key, values in tradesResponse.items():
|
||||
df = pd.DataFrame(values)
|
||||
# Rename columns
|
||||
# Select and order columns explicitly
|
||||
#print(df)
|
||||
df = df[['t', 'x', 'p', 's', 'i', 'c','z']]
|
||||
df.rename(columns={'t': 'timestamp', 'c': 'conditions', 'p': 'price', 's': 'size', 'x': 'exchange', 'z':'tape', 'i':'id'}, inplace=True)
|
||||
df['symbol'] = key # Add ticker as a column
|
||||
df['timestamp'] = pd.to_datetime(df['timestamp']) # Convert 't' from string to datetime before setting it as an index
|
||||
df.set_index(['symbol', 'timestamp'], inplace=True) # Set the multi-level index using both 'ticker' and 't'
|
||||
df = df.tz_convert(zoneNY, level='timestamp')
|
||||
dfs.append(df)
|
||||
|
||||
# Concatenate all DataFrames into a single DataFrame with MultiIndex
|
||||
final_df = pd.concat(dfs)
|
||||
|
||||
return final_df
|
||||
|
||||
def dict_to_df(tradesResponse, start, end, exclude_conditions = None, minsize = None):
|
||||
""""
|
||||
Transforms dict to Tradeset, then df and to zone aware
|
||||
Also filters to start and end if necessary (ex. 9:30 to 15:40 is required only)
|
||||
|
||||
NOTE: prepodkladame, ze tradesResponse je dict from Raw data (cached/remote)
|
||||
"""""
|
||||
|
||||
df = convert_dict_to_multiindex_df(tradesResponse)
|
||||
|
||||
#REQUIRED FILTERING
|
||||
#pokud je zacatek pozdeji nebo konec driv tak orizneme
|
||||
if (start.time() > time(9, 30) or end.time() < time(16, 0)):
|
||||
print(f"filtrujeme {start.time()} {end.time()}")
|
||||
# Define the time range
|
||||
# start_time = pd.Timestamp(start.time(), tz=zoneNY).time()
|
||||
# end_time = pd.Timestamp(end.time(), tz=zoneNY).time()
|
||||
|
||||
# Create a mask to filter rows within the specified time range
|
||||
mask = (df.index.get_level_values('timestamp') >= start) & \
|
||||
(df.index.get_level_values('timestamp') <= end)
|
||||
|
||||
# Apply the mask to the DataFrame
|
||||
df = df[mask]
|
||||
|
||||
if exclude_conditions is not None:
|
||||
print(f"excluding conditions {exclude_conditions}")
|
||||
# Create a mask to exclude rows with any of the specified conditions
|
||||
mask = df['conditions'].apply(lambda x: any(cond in exclude_conditions for cond in x))
|
||||
|
||||
# Filter out the rows with specified conditions
|
||||
df = df[~mask]
|
||||
|
||||
if minsize is not None:
|
||||
print(f"minsize {minsize}")
|
||||
#exclude conditions
|
||||
df = df[df['size'] >= minsize]
|
||||
return df
|
||||
|
||||
#fetches daily stock tradess - currently only main session is supported
|
||||
def fetch_daily_stock_trades_old(symbol, start, end, exclude_conditions = None, minsize = None, force_remote = False, max_retries=5, backoff_factor=1):
|
||||
"""
|
||||
Attempts to fetch stock trades with exponential backoff. Raises an exception if all retries fail.
|
||||
|
||||
:param symbol: The stock symbol to fetch trades for.
|
||||
:param start: The start time for the trade data.
|
||||
:param end: The end time for the trade data.
|
||||
:param max_retries: Maximum number of retries.
|
||||
:param backoff_factor: Factor to determine the next sleep time.
|
||||
:return: TradesResponse object.
|
||||
:raises: ConnectionError if all retries fail.
|
||||
|
||||
We use tradecache only for main sessison request = 9:30 to 16:00
|
||||
"""
|
||||
use_daily_tradecache = False
|
||||
if (start.time() >= time(9, 30) and end.time() <= time(16, 0)):
|
||||
use_daily_tradecache = True
|
||||
filename_start = zoneNY.localize(datetime.combine(start.date(), time(9, 30)))
|
||||
filename_end= zoneNY.localize(datetime.combine(end.date(), time(16, 0)))
|
||||
daily_file = "TS" + str(symbol) + '-' + str(int(filename_start.timestamp())) + '-' + str(int(filename_end.timestamp())) + '.cache.gz'
|
||||
file_path = DATA_DIR + "/tradecache/"+daily_file
|
||||
|
||||
if use_daily_tradecache and not force_remote and os.path.exists(file_path):
|
||||
print("Searching cache: " + daily_file)
|
||||
with gzip.open (file_path, 'rb') as fp:
|
||||
tradesResponse = pickle.load(fp)
|
||||
print("FOUND in CACHE", daily_file)
|
||||
#response je vzdy ulozena jako raw(dict), davame zpet do TradeSetu, ktery umi i df
|
||||
return dict_to_df(tradesResponse, start, end, exclude_conditions, minsize)
|
||||
|
||||
#daily file doesnt exist
|
||||
else:
|
||||
print("NOT FOUND. Fetching from remote")
|
||||
client = StockHistoricalDataClient(ACCOUNT1_PAPER_API_KEY, ACCOUNT1_PAPER_SECRET_KEY, raw_data=False)
|
||||
stockTradeRequest = StockTradesRequest(symbol_or_symbols=symbol, start=start, end=end)
|
||||
last_exception = None
|
||||
|
||||
for attempt in range(max_retries):
|
||||
try:
|
||||
tradesResponse = client.get_stock_trades(stockTradeRequest)
|
||||
is_empty = not tradesResponse[symbol]
|
||||
print(f"Remote fetched: {is_empty=}", start, end)
|
||||
#pokud jde o dnešní den a nebyl konec trhu tak cache neukládáme, pripadne pri iex datapointu necachujeme
|
||||
if use_daily_tradecache and not is_empty:
|
||||
if (start < datetime.now().astimezone(zoneNY) < end):
|
||||
print("not saving trade cache, market still open today")
|
||||
else:
|
||||
with gzip.open(file_path, 'wb') as fp:
|
||||
pickle.dump(tradesResponse, fp)
|
||||
print("Saving to Trade CACHE", file_path)
|
||||
return pd.DataFrame() if is_empty else dict_to_df(tradesResponse, start, end)
|
||||
except Exception as e:
|
||||
print(f"Attempt {attempt + 1} failed: {e}")
|
||||
last_exception = e
|
||||
time_module.sleep(backoff_factor * (2 ** attempt))
|
||||
|
||||
print("All attempts to fetch data failed.")
|
||||
raise ConnectionError(f"Failed to fetch stock trades after {max_retries} retries. Last exception: {str(last_exception)} and {format_exc()}")
|
||||
|
||||
def fetch_daily_stock_trades(symbol, start, end, exclude_conditions=None, minsize=None, force_remote=False, max_retries=5, backoff_factor=1):
|
||||
#doc for this function
|
||||
"""
|
||||
Attempts to fetch stock trades either from cache or remote. When remote, it uses retry mechanism with exponential backoff.
|
||||
Also it stores the data to cache if it is not already there.
|
||||
by using force_remote - forcess using remote data always and thus refreshing cache for these dates
|
||||
Attributes:
|
||||
:param symbol: The stock symbol to fetch trades for.
|
||||
:param start: The start time for the trade data.
|
||||
:param end: The end time for the trade data.
|
||||
:exclude_conditions: list of string conditions to exclude from the data
|
||||
:minsize minimum size of trade to be included in the data
|
||||
:force_remote will always use remote data and refresh cache
|
||||
:param max_retries: Maximum number of retries.
|
||||
:param backoff_factor: Factor to determine the next sleep time.
|
||||
:return: TradesResponse object.
|
||||
:raises: ConnectionError if all retries fail.
|
||||
|
||||
We use tradecache only for main sessison requests = 9:30 to 16:00
|
||||
Do budoucna ukládat celý den BAC-20240203.cache.gz a z toho si pak filtrovat bud main sesssionu a extended
|
||||
Ale zatim je uloženo jen main session v BAC-timestampopenu-timestampclose.cache.gz
|
||||
"""
|
||||
# Determine if the requested times fall within the main session
|
||||
in_main_session = (time(9, 30) <= start.time() < time(16, 0)) and (time(9, 30) <= end.time() <= time(16, 0))
|
||||
file_path = ''
|
||||
|
||||
if in_main_session:
|
||||
filename_start = zoneNY.localize(datetime.combine(start.date(), time(9, 30)))
|
||||
filename_end = zoneNY.localize(datetime.combine(end.date(), time(16, 0)))
|
||||
daily_file = f"{symbol}-{int(filename_start.timestamp())}-{int(filename_end.timestamp())}.cache.gz"
|
||||
file_path = f"{DATA_DIR}/tradecache/{daily_file}"
|
||||
if not force_remote and os.path.exists(file_path):
|
||||
print("Searching cache: " + daily_file)
|
||||
with gzip.open(file_path, 'rb') as fp:
|
||||
tradesResponse = pickle.load(fp)
|
||||
print("FOUND in CACHE", daily_file)
|
||||
return dict_to_df(tradesResponse, start, end, exclude_conditions, minsize)
|
||||
|
||||
print("NOT FOUND. Fetching from remote")
|
||||
client = StockHistoricalDataClient(ACCOUNT1_PAPER_API_KEY, ACCOUNT1_PAPER_SECRET_KEY, raw_data=True)
|
||||
stockTradeRequest = StockTradesRequest(symbol_or_symbols=symbol, start=start, end=end)
|
||||
last_exception = None
|
||||
|
||||
for attempt in range(max_retries):
|
||||
try:
|
||||
tradesResponse = client.get_stock_trades(stockTradeRequest)
|
||||
is_empty = not tradesResponse[symbol]
|
||||
print(f"Remote fetched: {is_empty=}", start, end)
|
||||
if in_main_session and not is_empty:
|
||||
current_time = datetime.now().astimezone(zoneNY)
|
||||
if not (start < current_time < end):
|
||||
with gzip.open(file_path, 'wb') as fp:
|
||||
pickle.dump(tradesResponse, fp)
|
||||
print("Saving to Trade CACHE", file_path)
|
||||
|
||||
else: # Don't save the cache if the market is still open
|
||||
print("Not saving trade cache, market still open today")
|
||||
return pd.DataFrame() if is_empty else dict_to_df(tradesResponse, start, end)
|
||||
except Exception as e:
|
||||
print(f"Attempt {attempt + 1} failed: {e}")
|
||||
last_exception = e
|
||||
time_module.sleep(backoff_factor * (2 ** attempt) + random.uniform(0, 1)) # Adding random jitter
|
||||
|
||||
print("All attempts to fetch data failed.")
|
||||
raise ConnectionError(f"Failed to fetch stock trades after {max_retries} retries. Last exception: {str(last_exception)} and {format_exc()}")
|
||||
|
||||
|
||||
def fetch_trades_parallel(symbol, start_date, end_date, exclude_conditions = cfh.config_handler.get_val('AGG_EXCLUDED_TRADES'), minsize = 100, force_remote = False):
|
||||
"""
|
||||
Fetches trades for each day between start_date and end_date during market hours (9:30-16:00) in parallel and concatenates them into a single DataFrame.
|
||||
|
||||
:param symbol: Stock symbol.
|
||||
:param start_date: Start date as datetime.
|
||||
:param end_date: End date as datetime.
|
||||
:return: DataFrame containing all trades from start_date to end_date.
|
||||
"""
|
||||
futures = []
|
||||
results = []
|
||||
|
||||
market_open_days = fetch_calendar_data(start_date, end_date)
|
||||
day_count = len(market_open_days)
|
||||
print("Contains", day_count, " market days")
|
||||
max_workers = min(10, max(5, day_count // 2)) # Heuristic: half the days to process, but at least 1 and no more than 10
|
||||
|
||||
|
||||
with ThreadPoolExecutor(max_workers=max_workers) as executor:
|
||||
#for single_date in (start_date + timedelta(days=i) for i in range((end_date - start_date).days + 1)):
|
||||
for market_day in market_open_days:
|
||||
#start = datetime.combine(single_date, time(9, 30)) # Market opens at 9:30 AM
|
||||
#end = datetime.combine(single_date, time(16, 0)) # Market closes at 4:00 PM
|
||||
|
||||
interval_from = zoneNY.localize(market_day.open)
|
||||
interval_to = zoneNY.localize(market_day.close)
|
||||
|
||||
#pripadne orizneme pokud je pozadovane pozdejsi zacatek a drivejsi konek
|
||||
start = start_date if interval_from < start_date else interval_from
|
||||
#start = max(start_date, interval_from)
|
||||
end = end_date if interval_to > end_date else interval_to
|
||||
#end = min(end_date, interval_to)
|
||||
|
||||
future = executor.submit(fetch_daily_stock_trades, symbol, start, end, exclude_conditions, minsize, force_remote)
|
||||
futures.append(future)
|
||||
|
||||
for future in futures:
|
||||
try:
|
||||
result = future.result()
|
||||
results.append(result)
|
||||
except Exception as e:
|
||||
print(f"Error fetching data for a day: {e}")
|
||||
|
||||
return pd.concat(results, ignore_index=False)
|
||||
|
||||
@jit(nopython=True)
|
||||
def generate_dollar_bars_nb(ticks, amount_per_bar):
|
||||
""""
|
||||
Generates Dollar based bars from ticks.
|
||||
|
||||
There is also simple prevention of aggregation from different days
|
||||
as described here https://chatgpt.com/c/17804fc1-a7bc-495d-8686-b8392f3640a2
|
||||
Downside: split days by UTC (which is ok for main session, but when extended hours it should be reworked by preprocessing new column identifying session)
|
||||
|
||||
|
||||
When trade is split into multiple bars it is counted as trade in each of the bars.
|
||||
Other option: trade count can be proportionally distributed by weight (0.2 to 1st bar, 0.8 to 2nd bar) - but this is not implemented yet
|
||||
https://chatgpt.com/c/ff4802d9-22a2-4b72-8ab7-97a91e7a515f
|
||||
"""""
|
||||
ohlcv_bars = []
|
||||
remaining_amount = amount_per_bar
|
||||
|
||||
# Initialize bar values based on the first tick to avoid uninitialized values
|
||||
open_price = ticks[0, 1]
|
||||
high_price = ticks[0, 1]
|
||||
low_price = ticks[0, 1]
|
||||
close_price = ticks[0, 1]
|
||||
volume = 0
|
||||
trades_count = 0
|
||||
current_day = np.floor(ticks[0, 0] / 86400) # Calculate the initial day from the first tick timestamp
|
||||
bar_time = ticks[0, 0] # Initialize bar time with the time of the first tick
|
||||
|
||||
for tick in ticks:
|
||||
tick_time = tick[0]
|
||||
price = tick[1]
|
||||
tick_volume = tick[2]
|
||||
tick_amount = price * tick_volume
|
||||
tick_day = np.floor(tick_time / 86400) # Calculate the day of the current tick
|
||||
|
||||
# Check if the new tick is from a different day, then close the current bar
|
||||
if tick_day != current_day:
|
||||
if trades_count > 0:
|
||||
ohlcv_bars.append([bar_time, open_price, high_price, low_price, close_price, volume, trades_count, amount_per_bar])
|
||||
# Reset for the new day using the current tick data
|
||||
open_price = price
|
||||
high_price = price
|
||||
low_price = price
|
||||
close_price = price
|
||||
volume = 0
|
||||
trades_count = 0
|
||||
remaining_amount = amount_per_bar
|
||||
current_day = tick_day
|
||||
bar_time = tick_time
|
||||
|
||||
# Start new bar if needed because of the dollar value
|
||||
while tick_amount > 0:
|
||||
if tick_amount < remaining_amount:
|
||||
# Add the entire tick to the current bar
|
||||
high_price = max(high_price, price)
|
||||
low_price = min(low_price, price)
|
||||
close_price = price
|
||||
volume += tick_volume
|
||||
remaining_amount -= tick_amount
|
||||
trades_count += 1
|
||||
tick_amount = 0
|
||||
else:
|
||||
# Calculate the amount of volume that fits within the remaining dollar amount
|
||||
volume_to_add = remaining_amount / price
|
||||
volume += volume_to_add # Update the volume here before appending and resetting
|
||||
|
||||
# Append the partially filled bar to the list
|
||||
ohlcv_bars.append([bar_time, open_price, high_price, low_price, close_price, volume, trades_count + 1, amount_per_bar])
|
||||
|
||||
# Fill the current bar and continue with a new bar
|
||||
tick_volume -= volume_to_add
|
||||
tick_amount -= remaining_amount
|
||||
|
||||
# Reset bar values for the new bar using the current tick data
|
||||
open_price = price
|
||||
high_price = price
|
||||
low_price = price
|
||||
close_price = price
|
||||
volume = 0 # Reset volume for the new bar
|
||||
trades_count = 0
|
||||
remaining_amount = amount_per_bar
|
||||
|
||||
# Increment bar time if splitting a trade
|
||||
if tick_volume > 0: #pokud v tradu je jeste zbytek nastavujeme cas o nanosekundu vetsi
|
||||
bar_time = tick_time + 1e-6
|
||||
else:
|
||||
bar_time = tick_time #jinak nastavujeme cas ticku
|
||||
#bar_time = tick_time
|
||||
|
||||
# Add the last bar if it contains any trades
|
||||
if trades_count > 0:
|
||||
ohlcv_bars.append([bar_time, open_price, high_price, low_price, close_price, volume, trades_count, amount_per_bar])
|
||||
|
||||
return np.array(ohlcv_bars)
|
||||
|
||||
|
||||
@jit(nopython=True)
|
||||
def generate_volume_bars_nb(ticks, volume_per_bar):
|
||||
""""
|
||||
Generates Volume based bars from ticks.
|
||||
|
||||
NOTE: UTC day split here (doesnt aggregate trades from different days)
|
||||
but realized from UTC (ok for main session) - but needs rework for extension by preprocessing ticks_df and introduction sesssion column
|
||||
|
||||
When trade is split into multiple bars it is counted as trade in each of the bars.
|
||||
Other option: trade count can be proportionally distributed by weight (0.2 to 1st bar, 0.8 to 2nd bar) - but this is not implemented yet
|
||||
https://chatgpt.com/c/ff4802d9-22a2-4b72-8ab7-97a91e7a515f
|
||||
"""""
|
||||
ohlcv_bars = []
|
||||
remaining_volume = volume_per_bar
|
||||
|
||||
# Initialize bar values based on the first tick to avoid uninitialized values
|
||||
open_price = ticks[0, 1]
|
||||
high_price = ticks[0, 1]
|
||||
low_price = ticks[0, 1]
|
||||
close_price = ticks[0, 1]
|
||||
volume = 0
|
||||
trades_count = 0
|
||||
current_day = np.floor(ticks[0, 0] / 86400) # Calculate the initial day from the first tick timestamp
|
||||
bar_time = ticks[0, 0] # Initialize bar time with the time of the first tick
|
||||
|
||||
for tick in ticks:
|
||||
tick_time = tick[0]
|
||||
price = tick[1]
|
||||
tick_volume = tick[2]
|
||||
tick_day = np.floor(tick_time / 86400) # Calculate the day of the current tick
|
||||
|
||||
# Check if the new tick is from a different day, then close the current bar
|
||||
if tick_day != current_day:
|
||||
if trades_count > 0:
|
||||
ohlcv_bars.append([bar_time, open_price, high_price, low_price, close_price, volume, trades_count])
|
||||
# Reset for the new day using the current tick data
|
||||
open_price = price
|
||||
high_price = price
|
||||
low_price = price
|
||||
close_price = price
|
||||
volume = 0
|
||||
trades_count = 0
|
||||
remaining_volume = volume_per_bar
|
||||
current_day = tick_day
|
||||
bar_time = tick_time # Update bar time to the current tick time
|
||||
|
||||
# Start new bar if needed because of the volume
|
||||
while tick_volume > 0:
|
||||
if tick_volume < remaining_volume:
|
||||
# Add the entire tick to the current bar
|
||||
high_price = max(high_price, price)
|
||||
low_price = min(low_price, price)
|
||||
close_price = price
|
||||
volume += tick_volume
|
||||
remaining_volume -= tick_volume
|
||||
trades_count += 1
|
||||
tick_volume = 0
|
||||
else:
|
||||
# Fill the current bar and continue with a new bar
|
||||
volume_to_add = remaining_volume
|
||||
volume += volume_to_add
|
||||
tick_volume -= volume_to_add
|
||||
trades_count += 1
|
||||
# Append the completed bar to the list
|
||||
ohlcv_bars.append([bar_time, open_price, high_price, low_price, close_price, volume, trades_count])
|
||||
|
||||
# Reset bar values for the new bar using the current tick data
|
||||
open_price = price
|
||||
high_price = price
|
||||
low_price = price
|
||||
close_price = price
|
||||
volume = 0
|
||||
trades_count = 0
|
||||
remaining_volume = volume_per_bar
|
||||
# Increment bar time if splitting a trade
|
||||
if tick_volume > 0: #pokud v tradu je jeste zbytek nastavujeme cas o nanosekundu vetsi
|
||||
bar_time = tick_time + 1e-6
|
||||
else:
|
||||
bar_time = tick_time #jinak nastavujeme cas ticku
|
||||
|
||||
|
||||
# Add the last bar if it contains any trades
|
||||
if trades_count > 0:
|
||||
ohlcv_bars.append([bar_time, open_price, high_price, low_price, close_price, volume, trades_count])
|
||||
|
||||
return np.array(ohlcv_bars)
|
||||
|
||||
@jit(nopython=True)
|
||||
def generate_time_bars_nb(ticks, resolution):
|
||||
# Initialize the start and end time
|
||||
start_time = np.floor(ticks[0, 0] / resolution) * resolution
|
||||
end_time = np.floor(ticks[-1, 0] / resolution) * resolution
|
||||
|
||||
# # Calculate number of bars
|
||||
# num_bars = int((end_time - start_time) // resolution + 1)
|
||||
|
||||
# Using a list to append data only when trades exist
|
||||
ohlcv_bars = []
|
||||
|
||||
# Variables to track the current bar
|
||||
current_bar_index = -1
|
||||
open_price = 0
|
||||
high_price = -np.inf
|
||||
low_price = np.inf
|
||||
close_price = 0
|
||||
volume = 0
|
||||
trades_count = 0
|
||||
|
||||
for tick in ticks:
|
||||
tick_time = np.floor(tick[0] / resolution) * resolution
|
||||
price = tick[1]
|
||||
tick_volume = tick[2]
|
||||
|
||||
# Check if the tick belongs to a new bar
|
||||
if tick_time != start_time + current_bar_index * resolution:
|
||||
if current_bar_index >= 0 and trades_count > 0: # Save the previous bar if trades happened
|
||||
ohlcv_bars.append([start_time + current_bar_index * resolution, open_price, high_price, low_price, close_price, volume, trades_count])
|
||||
|
||||
# Reset bar values
|
||||
current_bar_index = int((tick_time - start_time) / resolution)
|
||||
open_price = price
|
||||
high_price = price
|
||||
low_price = price
|
||||
volume = 0
|
||||
trades_count = 0
|
||||
|
||||
# Update the OHLCV values for the current bar
|
||||
high_price = max(high_price, price)
|
||||
low_price = min(low_price, price)
|
||||
close_price = price
|
||||
volume += tick_volume
|
||||
trades_count += 1
|
||||
|
||||
# Save the last processed bar
|
||||
if trades_count > 0:
|
||||
ohlcv_bars.append([start_time + current_bar_index * resolution, open_price, high_price, low_price, close_price, volume, trades_count])
|
||||
|
||||
return np.array(ohlcv_bars)
|
||||
|
||||
# Example usage
|
||||
if __name__ == '__main__':
|
||||
pass
|
||||
#example in agg_vect.ipynb
|
||||
@@ -690,7 +690,8 @@ def _generate_analysis(analyzerInputs: AnalyzerInputs):
|
||||
|
||||
if res == 0: return StreamingResponse(stream, media_type="image/png")
|
||||
elif res < 0:
|
||||
raise HTTPException(status_code=status.HTTP_406_NOT_ACCEPTABLE, detail=f"Error: {res}:{id}")
|
||||
print("Error when generating analysis: ",str(stream))
|
||||
raise HTTPException(status_code=status.HTTP_406_NOT_ACCEPTABLE, detail=f"Error: {res}:{stream}")
|
||||
except Exception as e:
|
||||
raise HTTPException(status_code=status.HTTP_406_NOT_ACCEPTABLE, detail=f"Error: {str(e)}" + format_exc())
|
||||
|
||||
|
||||
@@ -2,7 +2,7 @@ from uuid import UUID
|
||||
from typing import Any, List, Tuple
|
||||
from uuid import UUID, uuid4
|
||||
from v2realbot.enums.enums import Moddus, SchedulerStatus, RecordType, StartBarAlign, Mode, Account, OrderSide
|
||||
from v2realbot.common.model import RunManagerRecord, StrategyInstance, RunDay, StrategyInstance, Runner, RunRequest, RunArchive, RunArchiveView, RunArchiveViewPagination, RunArchiveDetail, RunArchiveChange, Bar, TradeEvent, TestList, Intervals, ConfigItem, InstantIndicator, DataTablesRequest
|
||||
from v2realbot.common.model import RunManagerRecord, StrategyInstance, RunDay, StrategyInstance, Runner, RunRequest, RunArchive, RunArchiveView, RunArchiveViewPagination, RunArchiveDetail, RunArchiveChange, Bar, TradeEvent, TestList, Intervals, ConfigItem, InstantIndicator, DataTablesRequest, Market
|
||||
from v2realbot.utils.utils import validate_and_format_time, AttributeDict, zoneNY, zonePRG, safe_get, dict_replace_value, Store, parse_toml_string, json_serial, is_open_hours, send_to_telegram, concatenate_weekdays, transform_data
|
||||
from v2realbot.common.PrescribedTradeModel import Trade, TradeDirection, TradeStatus, TradeStoplossType
|
||||
from datetime import datetime
|
||||
@@ -116,7 +116,8 @@ def initialize_jobs(run_manager_records: RunManagerRecord = None):
|
||||
scheduler.add_job(start_runman_record, start_trigger, id=f"scheduler_start_{record.id}", args=[record.id])
|
||||
scheduler.add_job(stop_runman_record, stop_trigger, id=f"scheduler_stop_{record.id}", args=[record.id])
|
||||
|
||||
#scheduler.add_job(print_hello, 'interval', seconds=10, id=f"scheduler_testinterval")
|
||||
#scheduler.add_job(print_hello, 'interval', seconds=10, id=
|
||||
# f"scheduler_testinterval")
|
||||
scheduled_jobs = scheduler.get_jobs()
|
||||
print(f"APS jobs refreshed ({len(scheduled_jobs)})")
|
||||
current_jobs_dict = format_apscheduler_jobs(scheduled_jobs)
|
||||
@@ -124,9 +125,9 @@ def initialize_jobs(run_manager_records: RunManagerRecord = None):
|
||||
return 0, current_jobs_dict
|
||||
|
||||
#zastresovaci funkce resici error handling a printing
|
||||
def start_runman_record(id: UUID, market = "US", debug_date = None):
|
||||
def start_runman_record(id: UUID, debug_date = None):
|
||||
record = None
|
||||
res, record, msg = _start_runman_record(id=id, market=market, debug_date=debug_date)
|
||||
res, record, msg = _start_runman_record(id=id, debug_date=debug_date)
|
||||
|
||||
if record is not None:
|
||||
market_time_now = datetime.now().astimezone(zoneNY) if debug_date is None else debug_date
|
||||
@@ -165,8 +166,8 @@ def update_runman_record(record: RunManagerRecord):
|
||||
err_msg= f"STOP: Error updating {record.id} errir {set} with values {record}"
|
||||
return -2, err_msg#toto stopne zpracovani dalsich zaznamu pri chybe, zvazit continue
|
||||
|
||||
def stop_runman_record(id: UUID, market = "US", debug_date = None):
|
||||
res, record, msg = _stop_runman_record(id=id, market=market, debug_date=debug_date)
|
||||
def stop_runman_record(id: UUID, debug_date = None):
|
||||
res, record, msg = _stop_runman_record(id=id, debug_date=debug_date)
|
||||
#results : 0 - ok, -1 not running/already running/not specific, -2 error
|
||||
|
||||
#report vzdy zapiseme do history, pokud je record not None, pripadna chyba se stala po dotazeni recordu
|
||||
@@ -196,7 +197,7 @@ def stop_runman_record(id: UUID, market = "US", debug_date = None):
|
||||
print(f"STOP JOB: {id} FINISHED")
|
||||
|
||||
#start function that is called from the job
|
||||
def _start_runman_record(id: UUID, market = "US", debug_date = None):
|
||||
def _start_runman_record(id: UUID, debug_date = None):
|
||||
print(f"Start scheduled record {id}")
|
||||
|
||||
record : RunManagerRecord = None
|
||||
@@ -207,15 +208,16 @@ def _start_runman_record(id: UUID, market = "US", debug_date = None):
|
||||
|
||||
record = result
|
||||
|
||||
if market is not None and market == "US":
|
||||
res, sada = sch.get_todays_market_times(market=market, debug_date=debug_date)
|
||||
if record.market == Market.US or record.market == Market.CRYPTO:
|
||||
res, sada = sch.get_todays_market_times(market=record.market, debug_date=debug_date)
|
||||
if res == 0:
|
||||
market_time_now, market_open_datetime, market_close_datetime = sada
|
||||
print(f"OPEN:{market_open_datetime} CLOSE:{market_close_datetime}")
|
||||
else:
|
||||
sada = f"Market {market} Error getting market times (CLOSED): " + str(sada)
|
||||
sada = f"Market {record.market} Error getting market times (CLOSED): " + str(sada)
|
||||
return res, record, sada
|
||||
|
||||
else:
|
||||
print("Market type is unknown.")
|
||||
if cs.is_stratin_running(record.strat_id):
|
||||
return -1, record, f"Stratin {record.strat_id} is already running"
|
||||
|
||||
@@ -229,7 +231,7 @@ def _start_runman_record(id: UUID, market = "US", debug_date = None):
|
||||
return 0, record, record.runner_id
|
||||
|
||||
#stop function that is called from the job
|
||||
def _stop_runman_record(id: UUID, market = "US", debug_date = None):
|
||||
def _stop_runman_record(id: UUID, debug_date = None):
|
||||
record = None
|
||||
#get all records
|
||||
print(f"Stopping record {id}")
|
||||
@@ -304,5 +306,5 @@ if __name__ == "__main__":
|
||||
# print(f"CALL FINISHED, with {debug_date} RESULT: {res}, {result}")
|
||||
|
||||
|
||||
res, result = stop_runman_record(id=id, market = "US", debug_date = debug_date)
|
||||
res, result = stop_runman_record(id=id, debug_date = debug_date)
|
||||
print(f"CALL FINISHED, with {debug_date} RESULT: {res}, {result}")
|
||||
@@ -2,10 +2,10 @@ import json
|
||||
import datetime
|
||||
import v2realbot.controller.services as cs
|
||||
import v2realbot.controller.run_manager as rm
|
||||
from v2realbot.common.model import RunnerView, RunManagerRecord, StrategyInstance, Runner, RunRequest, Trade, RunArchive, RunArchiveView, RunArchiveViewPagination, RunArchiveDetail, Bar, RunArchiveChange, TestList, ConfigItem, InstantIndicator, DataTablesRequest, AnalyzerInputs
|
||||
from v2realbot.common.model import RunnerView, RunManagerRecord, StrategyInstance, Runner, RunRequest, Trade, RunArchive, RunArchiveView, RunArchiveViewPagination, RunArchiveDetail, Bar, RunArchiveChange, TestList, ConfigItem, InstantIndicator, DataTablesRequest, AnalyzerInputs, Market
|
||||
from uuid import uuid4, UUID
|
||||
from v2realbot.utils.utils import json_serial, send_to_telegram, zoneNY, zonePRG, fetch_calendar_data
|
||||
from datetime import datetime, timedelta
|
||||
from v2realbot.utils.utils import json_serial, send_to_telegram, zoneNY, zonePRG, zoneUTC, fetch_calendar_data
|
||||
from datetime import datetime, timedelta, time
|
||||
from traceback import format_exc
|
||||
from rich import print
|
||||
import requests
|
||||
@@ -18,9 +18,18 @@ from v2realbot.config import WEB_API_KEY
|
||||
#naplanovany jako samostatni job a triggerován pouze jednou v daný čas pro start a stop
|
||||
#novy kod v aps_scheduler.py
|
||||
|
||||
def get_todays_market_times(market = "US", debug_date = None):
|
||||
def is_US_market_day(date):
|
||||
cal_dates = fetch_calendar_data(date, date)
|
||||
if len(cal_dates) == 0:
|
||||
print("Today is not a market day.")
|
||||
return False, cal_dates
|
||||
else:
|
||||
print("Market is open")
|
||||
return True, cal_dates
|
||||
|
||||
def get_todays_market_times(market, debug_date = None):
|
||||
try:
|
||||
if market == "US":
|
||||
if market == Market.US:
|
||||
#zjistit vsechny podminky - mozna loopovat - podminky jsou vlevo
|
||||
if debug_date is not None:
|
||||
nowNY = debug_date
|
||||
@@ -28,17 +37,20 @@ def get_todays_market_times(market = "US", debug_date = None):
|
||||
nowNY = datetime.now().astimezone(zoneNY)
|
||||
nowNY_date = nowNY.date()
|
||||
#is market open - nyni pouze US
|
||||
cal_dates = fetch_calendar_data(nowNY_date, nowNY_date)
|
||||
|
||||
if len(cal_dates) == 0:
|
||||
print("No Market Day today")
|
||||
return -1, "Market Closed"
|
||||
stat, calendar_dates = is_US_market_day(nowNY_date)
|
||||
if stat:
|
||||
#zatim podpora pouze main session
|
||||
|
||||
#pouze main session
|
||||
market_open_datetime = zoneNY.localize(cal_dates[0].open)
|
||||
market_close_datetime = zoneNY.localize(cal_dates[0].close)
|
||||
return 0, (nowNY, market_open_datetime, market_close_datetime)
|
||||
market_open_datetime = zoneNY.localize(calendar_dates[0].open)
|
||||
market_close_datetime = zoneNY.localize(calendar_dates[0].close)
|
||||
return 0, (nowNY, market_open_datetime, market_close_datetime)
|
||||
else:
|
||||
return -1, "Market is closed."
|
||||
elif market == Market.CRYPTO:
|
||||
now_market_datetime = datetime.now().astimezone(zoneUTC)
|
||||
market_open_datetime = datetime.combine(datetime.now(), time.min)
|
||||
matket_close_datetime = datetime.combine(datetime.now(), time.max)
|
||||
return 0, (now_market_datetime, market_open_datetime, matket_close_datetime)
|
||||
else:
|
||||
return -1, "Market not supported"
|
||||
except Exception as e:
|
||||
|
||||
@@ -347,6 +347,7 @@
|
||||
<th>testlist_id</th>
|
||||
<th>Running</th>
|
||||
<th>RunnerId</th>
|
||||
<th>Market</th>
|
||||
</tr>
|
||||
</thead>
|
||||
<tbody></tbody>
|
||||
@@ -667,14 +668,14 @@
|
||||
</div>
|
||||
<div class="form-group mt-3">
|
||||
<label for="logHere" class="form-label">Log</label>
|
||||
<div id="log-container">
|
||||
<pre id="log-content"></pre>
|
||||
<div id="log-container"style="height:700px;border:1px solid black;">
|
||||
<!-- <pre id="log-content"></pre> -->
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
<div class="modal-footer">
|
||||
<button type="button" class="btn btn-primary" id="logRefreshButton" value="Refresh">Refresh</button>
|
||||
<button type="button" class="btn btn-secondary" data-bs-dismiss="modal">Close</button>
|
||||
<button type="button" class="btn btn-secondary" id="closeLogModal" data-bs-dismiss="modal">Close</button>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
@@ -704,6 +705,10 @@
|
||||
<label for="stratvars" class="form-label">Stratvars</label>
|
||||
<textarea class="form-control" rows="8" id="editstratvars" name="stratvars"></textarea>
|
||||
</div>
|
||||
<div class="form-group">
|
||||
<label for="stratvars" class="form-label">Transferables</label>
|
||||
<textarea class="form-control" rows="8" id="edittransferables" name="stratvars"></textarea>
|
||||
</div>
|
||||
<div class="form-group">
|
||||
<label for="strat_json" class="form-label">Strat JSON</label>
|
||||
<textarea class="form-control" rows="6" id="editstratjson" name="stratjson"></textarea>
|
||||
@@ -1145,7 +1150,7 @@
|
||||
<script src="/static/js/config.js?v=1.04"></script>
|
||||
<!-- tady zacina polska docasna lokalizace -->
|
||||
<!-- <script type="text/javascript" src="https://unpkg.com/lightweight-charts/dist/lightweight-charts.standalone.production.js"></script> -->
|
||||
<script type="text/javascript" src="/static/js/libs/lightweightcharts/lightweight-charts.standalone.production410.js"></script>
|
||||
<script type="text/javascript" src="/static/js/libs/lightweightcharts/lightweight-charts.standalone.production413.js"></script>
|
||||
<script src="/static/js/dynamicbuttons.js?v=1.05"></script>
|
||||
|
||||
|
||||
@@ -1162,9 +1167,9 @@
|
||||
<!-- <script src="/static/js/archivetables.js?v=1.05"></script> -->
|
||||
<!-- archiveTables split into separate files -->
|
||||
<script src="/static/js/tables/archivetable/init.js?v=1.12"></script>
|
||||
<script src="/static/js/tables/archivetable/functions.js?v=1.10"></script>
|
||||
<script src="/static/js/tables/archivetable/functions.js?v=1.11"></script>
|
||||
<script src="/static/js/tables/archivetable/modals.js?v=1.07"></script>
|
||||
<script src="/static/js/tables/archivetable/handlers.js?v=1.08"></script>
|
||||
<script src="/static/js/tables/archivetable/handlers.js?v=1.11"></script>
|
||||
|
||||
<!-- Runmanager functionality -->
|
||||
<script src="/static/js/tables/runmanager/init.js?v=1.1"></script>
|
||||
@@ -1174,7 +1179,7 @@
|
||||
|
||||
<script src="/static/js/livewebsocket.js?v=1.02"></script>
|
||||
<script src="/static/js/realtimechart.js?v=1.02"></script>
|
||||
<script src="/static/js/mytables.js?v=1.02"></script>
|
||||
<script src="/static/js/mytables.js?v=1.03"></script>
|
||||
<script src="/static/js/testlist.js?v=1.01"></script>
|
||||
<script src="/static/js/ml.js?v=1.02"></script>
|
||||
<script src="/static/js/common.js?v=1.01"></script>
|
||||
|
||||
@@ -638,7 +638,7 @@ $(document).ready(function () {
|
||||
else{
|
||||
$('#editstratvars').val(JSON.stringify(row.stratvars,null,2));
|
||||
}
|
||||
|
||||
$('#edittransferables').val(JSON.stringify(row.transferables,null,2));
|
||||
|
||||
$('#editstratjson').val(row.strat_json);
|
||||
}
|
||||
|
||||
File diff suppressed because one or more lines are too long
@@ -90,9 +90,55 @@ $(document).ready(function () {
|
||||
|
||||
monaco.languages.register({ id: 'python' });
|
||||
monaco.languages.register({ id: 'json' });
|
||||
//Register mylogs language
|
||||
monaco.languages.register({ id: 'mylogs' });
|
||||
// Register the TOML language
|
||||
monaco.languages.setLanguageConfiguration('mylogs', {
|
||||
comments: {
|
||||
lineComment: '//', // Adjust if your logs use a different comment symbol
|
||||
},
|
||||
brackets: [['[', ']'], ['{', '}']], // Array and object brackets
|
||||
autoClosingPairs: [
|
||||
{ open: '{', close: '}', notIn: ['string'] },
|
||||
{ open: '"', close: '"', notIn: ['string', 'comment'] },
|
||||
{ open: "'", close: "'", notIn: ['string', 'comment'] },
|
||||
],
|
||||
});
|
||||
monaco.languages.setMonarchTokensProvider('mylogs', {
|
||||
tokenizer: {
|
||||
root: [
|
||||
[/#.*/, 'comment'], // Comments (if applicable)
|
||||
|
||||
// Timestamps
|
||||
[/\d{4}-\d{2}-\d{2} \d{2}:\d{2}:\d{2}\.\d+/, 'timestamp'],
|
||||
|
||||
// Log Levels
|
||||
[/\b(INFO|DEBUG|WARNING|ERROR|CRITICAL)\b/, 'log-level'],
|
||||
|
||||
// Strings
|
||||
[/".*"/, 'string'],
|
||||
[/'.*'/, 'string'],
|
||||
|
||||
// Key-Value Pairs
|
||||
[/[A-Za-z_]+\s*:/, 'key'],
|
||||
[/-?\d+\.\d+/, 'number.float'], // Floating-point
|
||||
[/-?\d+/, 'number.integer'], // Integers
|
||||
[/\btrue\b/, 'boolean.true'],
|
||||
[/\bfalse\b/, 'boolean.false'],
|
||||
|
||||
// Other Words and Symbols
|
||||
[/[A-Za-z_]+/, 'identifier'],
|
||||
[/[ \t\r\n]+/, 'white'],
|
||||
[/[\[\]{}(),]/, 'delimiter'], // Expand if more delimiters exist
|
||||
]
|
||||
}
|
||||
});
|
||||
|
||||
|
||||
monaco.languages.register({ id: 'toml' });
|
||||
|
||||
|
||||
|
||||
// Define the TOML language configuration
|
||||
monaco.languages.setLanguageConfiguration('toml', {
|
||||
comments: {
|
||||
|
||||
@@ -6,6 +6,7 @@ let editor_diff_arch1
|
||||
let editor_diff_arch2
|
||||
var archData = null
|
||||
var batchHeaders = []
|
||||
var editorLog = null
|
||||
|
||||
function refresh_arch_and_callback(row, callback) {
|
||||
//console.log("entering refresh")
|
||||
@@ -462,7 +463,7 @@ function display_batch_report(batch_id) {
|
||||
|
||||
function refresh_logfile() {
|
||||
logfile = $("#logFileSelect").val()
|
||||
lines = 700
|
||||
lines = 1200
|
||||
$.ajax({
|
||||
url:"/log?lines="+lines+"&logfile="+logfile,
|
||||
beforeSend: function (xhr) {
|
||||
@@ -472,13 +473,34 @@ function refresh_logfile() {
|
||||
contentType: "application/json",
|
||||
dataType: "json",
|
||||
success:function(response){
|
||||
if (editorLog) {
|
||||
editorLog.dispose();
|
||||
}
|
||||
if (response.lines.length == 0) {
|
||||
$('#log-content').html("no records");
|
||||
value = "no records";
|
||||
// $('#log-content').html("no records");
|
||||
}
|
||||
else {
|
||||
var escapedLines = response.lines.map(line => escapeHtml(line));
|
||||
$('#log-content').html(escapedLines.join('\n'));
|
||||
}
|
||||
//console.log(response.lines)
|
||||
//var escapedLines = response.lines.map(line => escapeHtml(line));
|
||||
value = response.lines.join('\n')
|
||||
// $('#log-content').html(escapedLines.join('\n'));
|
||||
}
|
||||
require(["vs/editor/editor.main"], () => {
|
||||
editorLog = monaco.editor.create(document.getElementById('log-container'), {
|
||||
value: value,
|
||||
language: 'mylogs',
|
||||
theme: 'tomlTheme-dark',
|
||||
automaticLayout: true,
|
||||
readOnly: true
|
||||
});
|
||||
});
|
||||
// Focus at the end of the file:
|
||||
const model = editorLog.getModel();
|
||||
const lastLineNumber = model.getLineCount();
|
||||
const lastLineColumn = model.getLineMaxColumn(lastLineNumber);
|
||||
editorLog.setPosition({ lineNumber: lastLineNumber, column: lastLineColumn });
|
||||
editorLog.revealPosition({ lineNumber: lastLineNumber, column: lastLineColumn });
|
||||
},
|
||||
error: function(xhr, status, error) {
|
||||
var err = eval("(" + xhr.responseText + ")");
|
||||
|
||||
@@ -265,8 +265,8 @@ $(document).ready(function () {
|
||||
|
||||
$('#diff_first').text(record1.name);
|
||||
$('#diff_second').text(record2.name);
|
||||
$('#diff_first_id').text(data1.id);
|
||||
$('#diff_second_id').text(data2.id);
|
||||
$('#diff_first_id').text(data1.id + ' Batch: ' + data1.batch_id);
|
||||
$('#diff_second_id').text(data2.id + ' Batch: ' + data2.batch_id);
|
||||
|
||||
//monaco
|
||||
require(["vs/editor/editor.main"], () => {
|
||||
@@ -358,8 +358,13 @@ $(document).ready(function () {
|
||||
})
|
||||
});
|
||||
|
||||
$('#closeLogModal').click(function () {
|
||||
editorLog.dispose()
|
||||
});
|
||||
|
||||
//button to query log
|
||||
$('#logRefreshButton').click(function () {
|
||||
editorLog.dispose()
|
||||
refresh_logfile()
|
||||
});
|
||||
|
||||
@@ -445,7 +450,7 @@ $(document).ready(function () {
|
||||
$('#editstratvars').val(JSON.stringify(row.stratvars,null,2));
|
||||
}
|
||||
|
||||
|
||||
$('#edittransferables').val(JSON.stringify(row.transferables,null,2));
|
||||
$('#editstratjson').val(row.strat_json);
|
||||
}
|
||||
});
|
||||
|
||||
@@ -45,7 +45,8 @@ function initialize_runmanagerRecords() {
|
||||
{data: 'valid_to', visible: true},
|
||||
{data: 'testlist_id', visible: true},
|
||||
{data: 'strat_running', visible: true},
|
||||
{data: 'runner_id', visible: true},
|
||||
{data: 'runner_id', visible: true},
|
||||
{data: 'market', visible: true},
|
||||
],
|
||||
paging: true,
|
||||
processing: true,
|
||||
|
||||
@@ -371,9 +371,10 @@ function initialize_chart() {
|
||||
}
|
||||
|
||||
chart = LightweightCharts.createChart(document.getElementById('chart'), chartOptions);
|
||||
chart.applyOptions({ timeScale: { visible: true, timeVisible: true, secondsVisible: true }, crosshair: {
|
||||
chart.applyOptions({ timeScale: { visible: true, timeVisible: true, secondsVisible: true, minBarSpacing: 0.003}, crosshair: {
|
||||
mode: LightweightCharts.CrosshairMode.Normal, labelVisible: true
|
||||
}})
|
||||
console.log("chart intiialized")
|
||||
}
|
||||
|
||||
//mozna atributy last value visible
|
||||
|
||||
6
v2realbot/static/js/vbt/.html
Normal file
6
v2realbot/static/js/vbt/.html
Normal file
File diff suppressed because one or more lines are too long
157
v2realbot/static/js/vbt/api/_opt_deps/index.html
Normal file
157
v2realbot/static/js/vbt/api/_opt_deps/index.html
Normal file
File diff suppressed because one or more lines are too long
1280
v2realbot/static/js/vbt/api/_settings/index.html
Normal file
1280
v2realbot/static/js/vbt/api/_settings/index.html
Normal file
File diff suppressed because one or more lines are too long
82
v2realbot/static/js/vbt/api/accessors/index.html
Normal file
82
v2realbot/static/js/vbt/api/accessors/index.html
Normal file
File diff suppressed because one or more lines are too long
607
v2realbot/static/js/vbt/api/base/accessors/index.html
Normal file
607
v2realbot/static/js/vbt/api/base/accessors/index.html
Normal file
File diff suppressed because one or more lines are too long
68
v2realbot/static/js/vbt/api/base/chunking/index.html
Normal file
68
v2realbot/static/js/vbt/api/base/chunking/index.html
Normal file
File diff suppressed because one or more lines are too long
98
v2realbot/static/js/vbt/api/base/combining/index.html
Normal file
98
v2realbot/static/js/vbt/api/base/combining/index.html
Normal file
File diff suppressed because one or more lines are too long
13
v2realbot/static/js/vbt/api/base/decorators/index.html
Normal file
13
v2realbot/static/js/vbt/api/base/decorators/index.html
Normal file
File diff suppressed because one or more lines are too long
68
v2realbot/static/js/vbt/api/base/flex_indexing/index.html
Normal file
68
v2realbot/static/js/vbt/api/base/flex_indexing/index.html
Normal file
File diff suppressed because one or more lines are too long
103
v2realbot/static/js/vbt/api/base/grouping/base/index.html
Normal file
103
v2realbot/static/js/vbt/api/base/grouping/base/index.html
Normal file
File diff suppressed because one or more lines are too long
6
v2realbot/static/js/vbt/api/base/grouping/index.html
Normal file
6
v2realbot/static/js/vbt/api/base/grouping/index.html
Normal file
File diff suppressed because one or more lines are too long
33
v2realbot/static/js/vbt/api/base/grouping/nb/index.html
Normal file
33
v2realbot/static/js/vbt/api/base/grouping/nb/index.html
Normal file
File diff suppressed because one or more lines are too long
6
v2realbot/static/js/vbt/api/base/index.html
Normal file
6
v2realbot/static/js/vbt/api/base/index.html
Normal file
File diff suppressed because one or more lines are too long
115
v2realbot/static/js/vbt/api/base/indexes/index.html
Normal file
115
v2realbot/static/js/vbt/api/base/indexes/index.html
Normal file
File diff suppressed because one or more lines are too long
569
v2realbot/static/js/vbt/api/base/indexing/index.html
Normal file
569
v2realbot/static/js/vbt/api/base/indexing/index.html
Normal file
File diff suppressed because one or more lines are too long
75
v2realbot/static/js/vbt/api/base/merging/index.html
Normal file
75
v2realbot/static/js/vbt/api/base/merging/index.html
Normal file
File diff suppressed because one or more lines are too long
105
v2realbot/static/js/vbt/api/base/preparing/index.html
Normal file
105
v2realbot/static/js/vbt/api/base/preparing/index.html
Normal file
File diff suppressed because one or more lines are too long
81
v2realbot/static/js/vbt/api/base/resampling/base/index.html
Normal file
81
v2realbot/static/js/vbt/api/base/resampling/base/index.html
Normal file
File diff suppressed because one or more lines are too long
6
v2realbot/static/js/vbt/api/base/resampling/index.html
Normal file
6
v2realbot/static/js/vbt/api/base/resampling/index.html
Normal file
File diff suppressed because one or more lines are too long
51
v2realbot/static/js/vbt/api/base/resampling/nb/index.html
Normal file
51
v2realbot/static/js/vbt/api/base/resampling/nb/index.html
Normal file
File diff suppressed because one or more lines are too long
538
v2realbot/static/js/vbt/api/base/reshaping/index.html
Normal file
538
v2realbot/static/js/vbt/api/base/reshaping/index.html
Normal file
File diff suppressed because one or more lines are too long
530
v2realbot/static/js/vbt/api/base/wrapping/index.html
Normal file
530
v2realbot/static/js/vbt/api/base/wrapping/index.html
Normal file
File diff suppressed because one or more lines are too long
831
v2realbot/static/js/vbt/api/data/base/index.html
Normal file
831
v2realbot/static/js/vbt/api/data/base/index.html
Normal file
File diff suppressed because one or more lines are too long
70
v2realbot/static/js/vbt/api/data/custom/alpaca/index.html
Normal file
70
v2realbot/static/js/vbt/api/data/custom/alpaca/index.html
Normal file
File diff suppressed because one or more lines are too long
77
v2realbot/static/js/vbt/api/data/custom/av/index.html
Normal file
77
v2realbot/static/js/vbt/api/data/custom/av/index.html
Normal file
File diff suppressed because one or more lines are too long
71
v2realbot/static/js/vbt/api/data/custom/bento/index.html
Normal file
71
v2realbot/static/js/vbt/api/data/custom/bento/index.html
Normal file
File diff suppressed because one or more lines are too long
73
v2realbot/static/js/vbt/api/data/custom/binance/index.html
Normal file
73
v2realbot/static/js/vbt/api/data/custom/binance/index.html
Normal file
File diff suppressed because one or more lines are too long
102
v2realbot/static/js/vbt/api/data/custom/ccxt/index.html
Normal file
102
v2realbot/static/js/vbt/api/data/custom/ccxt/index.html
Normal file
File diff suppressed because one or more lines are too long
61
v2realbot/static/js/vbt/api/data/custom/csv/index.html
Normal file
61
v2realbot/static/js/vbt/api/data/custom/csv/index.html
Normal file
File diff suppressed because one or more lines are too long
51
v2realbot/static/js/vbt/api/data/custom/custom/index.html
Normal file
51
v2realbot/static/js/vbt/api/data/custom/custom/index.html
Normal file
File diff suppressed because one or more lines are too long
22
v2realbot/static/js/vbt/api/data/custom/db/index.html
Normal file
22
v2realbot/static/js/vbt/api/data/custom/db/index.html
Normal file
File diff suppressed because one or more lines are too long
156
v2realbot/static/js/vbt/api/data/custom/duckdb/index.html
Normal file
156
v2realbot/static/js/vbt/api/data/custom/duckdb/index.html
Normal file
File diff suppressed because one or more lines are too long
51
v2realbot/static/js/vbt/api/data/custom/feather/index.html
Normal file
51
v2realbot/static/js/vbt/api/data/custom/feather/index.html
Normal file
File diff suppressed because one or more lines are too long
58
v2realbot/static/js/vbt/api/data/custom/file/index.html
Normal file
58
v2realbot/static/js/vbt/api/data/custom/file/index.html
Normal file
File diff suppressed because one or more lines are too long
34
v2realbot/static/js/vbt/api/data/custom/gbm/index.html
Normal file
34
v2realbot/static/js/vbt/api/data/custom/gbm/index.html
Normal file
File diff suppressed because one or more lines are too long
35
v2realbot/static/js/vbt/api/data/custom/gbm_ohlc/index.html
Normal file
35
v2realbot/static/js/vbt/api/data/custom/gbm_ohlc/index.html
Normal file
File diff suppressed because one or more lines are too long
76
v2realbot/static/js/vbt/api/data/custom/hdf/index.html
Normal file
76
v2realbot/static/js/vbt/api/data/custom/hdf/index.html
Normal file
File diff suppressed because one or more lines are too long
6
v2realbot/static/js/vbt/api/data/custom/index.html
Normal file
6
v2realbot/static/js/vbt/api/data/custom/index.html
Normal file
File diff suppressed because one or more lines are too long
22
v2realbot/static/js/vbt/api/data/custom/local/index.html
Normal file
22
v2realbot/static/js/vbt/api/data/custom/local/index.html
Normal file
File diff suppressed because one or more lines are too long
48
v2realbot/static/js/vbt/api/data/custom/ndl/index.html
Normal file
48
v2realbot/static/js/vbt/api/data/custom/ndl/index.html
Normal file
File diff suppressed because one or more lines are too long
67
v2realbot/static/js/vbt/api/data/custom/parquet/index.html
Normal file
67
v2realbot/static/js/vbt/api/data/custom/parquet/index.html
Normal file
File diff suppressed because one or more lines are too long
69
v2realbot/static/js/vbt/api/data/custom/polygon/index.html
Normal file
69
v2realbot/static/js/vbt/api/data/custom/polygon/index.html
Normal file
File diff suppressed because one or more lines are too long
34
v2realbot/static/js/vbt/api/data/custom/random/index.html
Normal file
34
v2realbot/static/js/vbt/api/data/custom/random/index.html
Normal file
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
22
v2realbot/static/js/vbt/api/data/custom/remote/index.html
Normal file
22
v2realbot/static/js/vbt/api/data/custom/remote/index.html
Normal file
File diff suppressed because one or more lines are too long
201
v2realbot/static/js/vbt/api/data/custom/sql/index.html
Normal file
201
v2realbot/static/js/vbt/api/data/custom/sql/index.html
Normal file
File diff suppressed because one or more lines are too long
71
v2realbot/static/js/vbt/api/data/custom/synthetic/index.html
Normal file
71
v2realbot/static/js/vbt/api/data/custom/synthetic/index.html
Normal file
File diff suppressed because one or more lines are too long
159
v2realbot/static/js/vbt/api/data/custom/tv/index.html
Normal file
159
v2realbot/static/js/vbt/api/data/custom/tv/index.html
Normal file
File diff suppressed because one or more lines are too long
50
v2realbot/static/js/vbt/api/data/custom/yf/index.html
Normal file
50
v2realbot/static/js/vbt/api/data/custom/yf/index.html
Normal file
File diff suppressed because one or more lines are too long
9
v2realbot/static/js/vbt/api/data/decorators/index.html
Normal file
9
v2realbot/static/js/vbt/api/data/decorators/index.html
Normal file
File diff suppressed because one or more lines are too long
6
v2realbot/static/js/vbt/api/data/index.html
Normal file
6
v2realbot/static/js/vbt/api/data/index.html
Normal file
File diff suppressed because one or more lines are too long
34
v2realbot/static/js/vbt/api/data/nb/index.html
Normal file
34
v2realbot/static/js/vbt/api/data/nb/index.html
Normal file
File diff suppressed because one or more lines are too long
77
v2realbot/static/js/vbt/api/data/saver/index.html
Normal file
77
v2realbot/static/js/vbt/api/data/saver/index.html
Normal file
File diff suppressed because one or more lines are too long
25
v2realbot/static/js/vbt/api/data/updater/index.html
Normal file
25
v2realbot/static/js/vbt/api/data/updater/index.html
Normal file
File diff suppressed because one or more lines are too long
2115
v2realbot/static/js/vbt/api/generic/accessors/index.html
Normal file
2115
v2realbot/static/js/vbt/api/generic/accessors/index.html
Normal file
File diff suppressed because one or more lines are too long
14
v2realbot/static/js/vbt/api/generic/analyzable/index.html
Normal file
14
v2realbot/static/js/vbt/api/generic/analyzable/index.html
Normal file
File diff suppressed because one or more lines are too long
12
v2realbot/static/js/vbt/api/generic/decorators/index.html
Normal file
12
v2realbot/static/js/vbt/api/generic/decorators/index.html
Normal file
File diff suppressed because one or more lines are too long
624
v2realbot/static/js/vbt/api/generic/drawdowns/index.html
Normal file
624
v2realbot/static/js/vbt/api/generic/drawdowns/index.html
Normal file
File diff suppressed because one or more lines are too long
309
v2realbot/static/js/vbt/api/generic/enums/index.html
Normal file
309
v2realbot/static/js/vbt/api/generic/enums/index.html
Normal file
File diff suppressed because one or more lines are too long
6
v2realbot/static/js/vbt/api/generic/index.html
Normal file
6
v2realbot/static/js/vbt/api/generic/index.html
Normal file
File diff suppressed because one or more lines are too long
374
v2realbot/static/js/vbt/api/generic/nb/apply_reduce/index.html
Normal file
374
v2realbot/static/js/vbt/api/generic/nb/apply_reduce/index.html
Normal file
File diff suppressed because one or more lines are too long
358
v2realbot/static/js/vbt/api/generic/nb/base/index.html
Normal file
358
v2realbot/static/js/vbt/api/generic/nb/base/index.html
Normal file
File diff suppressed because one or more lines are too long
6
v2realbot/static/js/vbt/api/generic/nb/index.html
Normal file
6
v2realbot/static/js/vbt/api/generic/nb/index.html
Normal file
File diff suppressed because one or more lines are too long
30
v2realbot/static/js/vbt/api/generic/nb/iter_/index.html
Normal file
30
v2realbot/static/js/vbt/api/generic/nb/iter_/index.html
Normal file
File diff suppressed because one or more lines are too long
77
v2realbot/static/js/vbt/api/generic/nb/patterns/index.html
Normal file
77
v2realbot/static/js/vbt/api/generic/nb/patterns/index.html
Normal file
File diff suppressed because one or more lines are too long
207
v2realbot/static/js/vbt/api/generic/nb/records/index.html
Normal file
207
v2realbot/static/js/vbt/api/generic/nb/records/index.html
Normal file
File diff suppressed because one or more lines are too long
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user