This commit is contained in:
@ -14,7 +14,7 @@ from v2realbot.common.PrescribedTradeModel import Trade, TradeDirection, TradeSt
|
|||||||
from datetime import datetime
|
from datetime import datetime
|
||||||
from v2realbot.loader.trade_offline_streamer import Trade_Offline_Streamer
|
from v2realbot.loader.trade_offline_streamer import Trade_Offline_Streamer
|
||||||
from threading import Thread, current_thread, Event, enumerate
|
from threading import Thread, current_thread, Event, enumerate
|
||||||
from v2realbot.config import STRATVARS_UNCHANGEABLES, ACCOUNT1_PAPER_API_KEY, ACCOUNT1_PAPER_SECRET_KEY, ACCOUNT1_LIVE_API_KEY, ACCOUNT1_LIVE_SECRET_KEY, DATA_DIR,BT_FILL_CONS_TRADES_REQUIRED,BT_FILL_LOG_SURROUNDING_TRADES,BT_FILL_CONDITION_BUY_LIMIT,BT_FILL_CONDITION_SELL_LIMIT, GROUP_TRADES_WITH_TIMESTAMP_LESS_THAN, MEDIA_DIRECTORY, RUNNER_DETAIL_DIRECTORY
|
from v2realbot.config import STRATVARS_UNCHANGEABLES, ACCOUNT1_PAPER_API_KEY, ACCOUNT1_PAPER_SECRET_KEY, ACCOUNT1_LIVE_API_KEY, ACCOUNT1_LIVE_SECRET_KEY, DATA_DIR,BT_FILL_CONS_TRADES_REQUIRED,BT_FILL_LOG_SURROUNDING_TRADES,BT_FILL_CONDITION_BUY_LIMIT,BT_FILL_CONDITION_SELL_LIMIT, GROUP_TRADES_WITH_TIMESTAMP_LESS_THAN, MEDIA_DIRECTORY, RUNNER_DETAIL_DIRECTORY, OFFLINE_MODE
|
||||||
import importlib
|
import importlib
|
||||||
from alpaca.trading.requests import GetCalendarRequest
|
from alpaca.trading.requests import GetCalendarRequest
|
||||||
from alpaca.trading.client import TradingClient
|
from alpaca.trading.client import TradingClient
|
||||||
@ -1609,6 +1609,11 @@ def preview_indicator_byTOML(id: UUID, indicator: InstantIndicator, save: bool =
|
|||||||
|
|
||||||
|
|
||||||
#print("Done", state.indicators[indicator.name])
|
#print("Done", state.indicators[indicator.name])
|
||||||
|
|
||||||
|
#print pro multioutput
|
||||||
|
for ind_name in returns:
|
||||||
|
print("Done",ind_name, state.indicators[indicator.name])
|
||||||
|
|
||||||
output_dict = {}
|
output_dict = {}
|
||||||
new_inds[indicator.name] = state.indicators[indicator.name]
|
new_inds[indicator.name] = state.indicators[indicator.name]
|
||||||
|
|
||||||
@ -1664,7 +1669,7 @@ def preview_indicator_byTOML(id: UUID, indicator: InstantIndicator, save: bool =
|
|||||||
except Exception as e:
|
except Exception as e:
|
||||||
print(str(e) + format_exc())
|
print(str(e) + format_exc())
|
||||||
|
|
||||||
#print("Done", state.indicators[indicator.name])
|
#print("Done", state.cbar_indicators[indicator.name])
|
||||||
output_dict = {}
|
output_dict = {}
|
||||||
new_tick_inds[indicator.name] = state.cbar_indicators[indicator.name]
|
new_tick_inds[indicator.name] = state.cbar_indicators[indicator.name]
|
||||||
|
|
||||||
@ -1884,8 +1889,10 @@ def get_alpaca_history_bars(symbol: str, datetime_object_from: datetime, datetim
|
|||||||
return 0, result
|
return 0, result
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
print(str(e) + format_exc())
|
print(str(e) + format_exc())
|
||||||
return -2, str(e)
|
if OFFLINE_MODE:
|
||||||
|
print("OFFLINE MODE ENABLED")
|
||||||
|
return 0, []
|
||||||
|
return -2, str(e)
|
||||||
# change_archived_runner
|
# change_archived_runner
|
||||||
# delete_archived_runner_details
|
# delete_archived_runner_details
|
||||||
|
|
||||||
|
|||||||
@ -3,7 +3,7 @@
|
|||||||
"""
|
"""
|
||||||
from v2realbot.enums.enums import RecordType, StartBarAlign
|
from v2realbot.enums.enums import RecordType, StartBarAlign
|
||||||
from datetime import datetime, timedelta
|
from datetime import datetime, timedelta
|
||||||
from v2realbot.utils.utils import parse_alpaca_timestamp, ltp, Queue,is_open_hours,zoneNY
|
from v2realbot.utils.utils import parse_alpaca_timestamp, ltp, Queue,is_open_hours,zoneNY, zoneUTC
|
||||||
from queue import Queue
|
from queue import Queue
|
||||||
from rich import print
|
from rich import print
|
||||||
from v2realbot.enums.enums import Mode
|
from v2realbot.enums.enums import Mode
|
||||||
@ -14,6 +14,7 @@ import os
|
|||||||
from v2realbot.config import DATA_DIR, GROUP_TRADES_WITH_TIMESTAMP_LESS_THAN, AGG_EXCLUDED_TRADES
|
from v2realbot.config import DATA_DIR, GROUP_TRADES_WITH_TIMESTAMP_LESS_THAN, AGG_EXCLUDED_TRADES
|
||||||
import pickle
|
import pickle
|
||||||
import dill
|
import dill
|
||||||
|
import gzip
|
||||||
|
|
||||||
class TradeAggregator:
|
class TradeAggregator:
|
||||||
def __init__(self,
|
def __init__(self,
|
||||||
@ -149,7 +150,7 @@ class TradeAggregator:
|
|||||||
# else:
|
# else:
|
||||||
data['t'] = parse_alpaca_timestamp(data['t'])
|
data['t'] = parse_alpaca_timestamp(data['t'])
|
||||||
|
|
||||||
if not is_open_hours(datetime.fromtimestamp(data['t'])) and self.exthours is False:
|
if not is_open_hours(datetime.fromtimestamp(data['t'], tz=zoneUTC)) and self.exthours is False:
|
||||||
#print("AGG: trade not in open hours skipping", datetime.fromtimestamp(data['t']).astimezone(zoneNY))
|
#print("AGG: trade not in open hours skipping", datetime.fromtimestamp(data['t']).astimezone(zoneNY))
|
||||||
return []
|
return []
|
||||||
|
|
||||||
@ -442,7 +443,7 @@ class TradeAggregator:
|
|||||||
"trades": 1,
|
"trades": 1,
|
||||||
"hlcc4": data['p'],
|
"hlcc4": data['p'],
|
||||||
"confirmed": 0,
|
"confirmed": 0,
|
||||||
"time": datetime.fromtimestamp(data['t']),
|
"time": datetime.fromtimestamp(data['t'], tz=zoneUTC),
|
||||||
"updated": data['t'],
|
"updated": data['t'],
|
||||||
"vwap": data['p'],
|
"vwap": data['p'],
|
||||||
"index": self.barindex,
|
"index": self.barindex,
|
||||||
@ -476,7 +477,7 @@ class TradeAggregator:
|
|||||||
"trades": 1,
|
"trades": 1,
|
||||||
"hlcc4":data['p'],
|
"hlcc4":data['p'],
|
||||||
"confirmed": 1,
|
"confirmed": 1,
|
||||||
"time": datetime.fromtimestamp(data['t']),
|
"time": datetime.fromtimestamp(data['t'], tz=zoneUTC),
|
||||||
"updated": data['t'],
|
"updated": data['t'],
|
||||||
"vwap": data['p'],
|
"vwap": data['p'],
|
||||||
"index": self.barindex,
|
"index": self.barindex,
|
||||||
@ -608,7 +609,7 @@ class TradeAggregator:
|
|||||||
"trades": 1,
|
"trades": 1,
|
||||||
"hlcc4": data['p'],
|
"hlcc4": data['p'],
|
||||||
"confirmed": 0,
|
"confirmed": 0,
|
||||||
"time": datetime.fromtimestamp(data['t']),
|
"time": datetime.fromtimestamp(data['t'], tz=zoneUTC),
|
||||||
"updated": data['t'],
|
"updated": data['t'],
|
||||||
"vwap": data['p'],
|
"vwap": data['p'],
|
||||||
"index": self.barindex,
|
"index": self.barindex,
|
||||||
@ -642,7 +643,7 @@ class TradeAggregator:
|
|||||||
"trades": 1,
|
"trades": 1,
|
||||||
"hlcc4":data['p'],
|
"hlcc4":data['p'],
|
||||||
"confirmed": 1,
|
"confirmed": 1,
|
||||||
"time": datetime.fromtimestamp(data['t']),
|
"time": datetime.fromtimestamp(data['t'], tz=zoneUTC),
|
||||||
"updated": data['t'],
|
"updated": data['t'],
|
||||||
"vwap": data['p'],
|
"vwap": data['p'],
|
||||||
"index": self.barindex,
|
"index": self.barindex,
|
||||||
@ -787,7 +788,7 @@ class TradeAggregator:
|
|||||||
"trades": 1,
|
"trades": 1,
|
||||||
"hlcc4": data['p'],
|
"hlcc4": data['p'],
|
||||||
"confirmed": 0,
|
"confirmed": 0,
|
||||||
"time": datetime.fromtimestamp(data['t']),
|
"time": datetime.fromtimestamp(data['t'], tz=zoneUTC),
|
||||||
"updated": data['t'],
|
"updated": data['t'],
|
||||||
"vwap": data['p'],
|
"vwap": data['p'],
|
||||||
"index": self.barindex,
|
"index": self.barindex,
|
||||||
@ -822,7 +823,7 @@ class TradeAggregator:
|
|||||||
"trades": 1,
|
"trades": 1,
|
||||||
"hlcc4":data['p'],
|
"hlcc4":data['p'],
|
||||||
"confirmed": 1,
|
"confirmed": 1,
|
||||||
"time": datetime.fromtimestamp(data['t']),
|
"time": datetime.fromtimestamp(data['t'], tz=zoneUTC),
|
||||||
"updated": data['t'],
|
"updated": data['t'],
|
||||||
"vwap": data['p'],
|
"vwap": data['p'],
|
||||||
"index": self.barindex,
|
"index": self.barindex,
|
||||||
@ -898,7 +899,7 @@ class TradeAggregator:
|
|||||||
#a take excludes result = ''.join(self.excludes.sort())
|
#a take excludes result = ''.join(self.excludes.sort())
|
||||||
self.excludes.sort() # Sorts the list in place
|
self.excludes.sort() # Sorts the list in place
|
||||||
excludes_str = ''.join(map(str, self.excludes)) # Joins the sorted elements after converting them to strings
|
excludes_str = ''.join(map(str, self.excludes)) # Joins the sorted elements after converting them to strings
|
||||||
cache_file = self.__class__.__name__ + '-' + self.symbol + '-' + str(int(date_from.timestamp())) + '-' + str(int(date_to.timestamp())) + '-' + str(self.rectype) + "-" + str(self.resolution) + "-" + str(self.minsize) + "-" + str(self.align) + '-' + str(self.mintick) + str(self.exthours) + excludes_str + '.cache'
|
cache_file = self.__class__.__name__ + '-' + self.symbol + '-' + str(int(date_from.timestamp())) + '-' + str(int(date_to.timestamp())) + '-' + str(self.rectype) + "-" + str(self.resolution) + "-" + str(self.minsize) + "-" + str(self.align) + '-' + str(self.mintick) + str(self.exthours) + excludes_str + '.cache.gz'
|
||||||
file_path = DATA_DIR + "/aggcache/" + cache_file
|
file_path = DATA_DIR + "/aggcache/" + cache_file
|
||||||
#print(file_path)
|
#print(file_path)
|
||||||
return file_path
|
return file_path
|
||||||
@ -908,7 +909,7 @@ class TradeAggregator:
|
|||||||
file_path = self.populate_file_name(date_from, date_to)
|
file_path = self.populate_file_name(date_from, date_to)
|
||||||
if self.skip_cache is False and os.path.exists(file_path):
|
if self.skip_cache is False and os.path.exists(file_path):
|
||||||
##daily aggregated file exists
|
##daily aggregated file exists
|
||||||
with open (file_path, 'rb') as fp:
|
with gzip.open (file_path, 'rb') as fp:
|
||||||
cachedobject = dill.load(fp)
|
cachedobject = dill.load(fp)
|
||||||
print("AGG CACHE loaded ", file_path)
|
print("AGG CACHE loaded ", file_path)
|
||||||
|
|
||||||
@ -941,7 +942,7 @@ class TradeAggregator:
|
|||||||
|
|
||||||
file_path = self.populate_file_name(self.cache_from, self.cache_to)
|
file_path = self.populate_file_name(self.cache_from, self.cache_to)
|
||||||
|
|
||||||
with open(file_path, 'wb') as fp:
|
with gzip.open(file_path, 'wb') as fp:
|
||||||
dill.dump(self.cached_object, fp)
|
dill.dump(self.cached_object, fp)
|
||||||
print(f"AGG CACHE stored ({num}) :{file_path}")
|
print(f"AGG CACHE stored ({num}) :{file_path}")
|
||||||
print(f"DATES from:{self.cache_from.strftime('%d.%m.%Y %H:%M')} to:{self.cache_to.strftime('%d.%m.%Y %H:%M')}")
|
print(f"DATES from:{self.cache_from.strftime('%d.%m.%Y %H:%M')} to:{self.cache_to.strftime('%d.%m.%Y %H:%M')}")
|
||||||
|
|||||||
@ -16,6 +16,7 @@ import asyncio
|
|||||||
from msgpack.ext import Timestamp
|
from msgpack.ext import Timestamp
|
||||||
from msgpack import packb
|
from msgpack import packb
|
||||||
from pandas import to_datetime
|
from pandas import to_datetime
|
||||||
|
import gzip
|
||||||
import pickle
|
import pickle
|
||||||
import os
|
import os
|
||||||
from rich import print
|
from rich import print
|
||||||
@ -197,7 +198,7 @@ class Trade_Offline_Streamer(Thread):
|
|||||||
stream_main.enable_cache_output(day.open, day.close)
|
stream_main.enable_cache_output(day.open, day.close)
|
||||||
|
|
||||||
#trade daily file
|
#trade daily file
|
||||||
daily_file = str(symbpole[0]) + '-' + str(int(day.open.timestamp())) + '-' + str(int(day.close.timestamp())) + '.cache'
|
daily_file = str(symbpole[0]) + '-' + str(int(day.open.timestamp())) + '-' + str(int(day.close.timestamp())) + '.cache.gz'
|
||||||
print(daily_file)
|
print(daily_file)
|
||||||
file_path = DATA_DIR + "/tradecache/"+daily_file
|
file_path = DATA_DIR + "/tradecache/"+daily_file
|
||||||
|
|
||||||
@ -207,7 +208,7 @@ class Trade_Offline_Streamer(Thread):
|
|||||||
#pokud je start_time < trade < end_time
|
#pokud je start_time < trade < end_time
|
||||||
#odesíláme do queue
|
#odesíláme do queue
|
||||||
#jinak pass
|
#jinak pass
|
||||||
with open (file_path, 'rb') as fp:
|
with gzip.open (file_path, 'rb') as fp:
|
||||||
tradesResponse = pickle.load(fp)
|
tradesResponse = pickle.load(fp)
|
||||||
print("Loading from Trade CACHE", file_path)
|
print("Loading from Trade CACHE", file_path)
|
||||||
#daily file doesnt exist
|
#daily file doesnt exist
|
||||||
@ -223,7 +224,7 @@ class Trade_Offline_Streamer(Thread):
|
|||||||
#ic(datetime.now().astimezone(zoneNY))
|
#ic(datetime.now().astimezone(zoneNY))
|
||||||
#ic(day.open, day.close)
|
#ic(day.open, day.close)
|
||||||
else:
|
else:
|
||||||
with open(file_path, 'wb') as fp:
|
with gzip.open(file_path, 'wb') as fp:
|
||||||
pickle.dump(tradesResponse, fp)
|
pickle.dump(tradesResponse, fp)
|
||||||
|
|
||||||
#zde už máme daily data
|
#zde už máme daily data
|
||||||
|
|||||||
@ -387,6 +387,7 @@ function chart_indicators(data, visible, offset) {
|
|||||||
//indicatory
|
//indicatory
|
||||||
//console.log("indicatory TOML", stratvars_toml.stratvars.indicators)
|
//console.log("indicatory TOML", stratvars_toml.stratvars.indicators)
|
||||||
indId = 1
|
indId = 1
|
||||||
|
var multiOutsCnf = {}
|
||||||
indicatorList.forEach((indicators, index, array) => {
|
indicatorList.forEach((indicators, index, array) => {
|
||||||
|
|
||||||
//var indicators = data.indicators
|
//var indicators = data.indicators
|
||||||
@ -440,10 +441,28 @@ function chart_indicators(data, visible, offset) {
|
|||||||
// }
|
// }
|
||||||
// }
|
// }
|
||||||
|
|
||||||
|
//pro multioutput childs dotahneme scale z parenta
|
||||||
|
if (multiOutsCnf.hasOwnProperty(key)) {
|
||||||
|
scale = multiOutsCnf[key];
|
||||||
|
}
|
||||||
|
|
||||||
//initialize indicator and store reference to array
|
//initialize indicator and store reference to array
|
||||||
var obj = {name: key, type: index, series: null, cnf:cnf, instant: instant, returns: returns, indId:indId++}
|
var obj = {name: key, type: index, series: null, cnf:cnf, instant: instant, returns: returns, indId:indId++}
|
||||||
|
|
||||||
//start
|
//pokud jde o multioutput parenta ukladam scale parenta pro children
|
||||||
|
//varianty - scale je jeden, ukladam jako scale pro vsechny parenty
|
||||||
|
// - scale je list - pouzijeme pro kazdy output scale v listu na stejnem indexu jako output
|
||||||
|
if (returns) {
|
||||||
|
returns.forEach((returned, index, array) => {
|
||||||
|
//
|
||||||
|
if (Array.isArray(scale)) {
|
||||||
|
multiOutsCnf[returned] = scale[index]
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
multiOutsCnf[returned] = scale
|
||||||
|
}
|
||||||
|
})
|
||||||
|
} //start
|
||||||
//console.log(key)
|
//console.log(key)
|
||||||
//get configuation of indicator to display
|
//get configuation of indicator to display
|
||||||
conf = get_ind_config(key, index)
|
conf = get_ind_config(key, index)
|
||||||
@ -601,7 +620,12 @@ function chart_indicators(data, visible, offset) {
|
|||||||
//console.log("true",active?active:conf.display)
|
//console.log("true",active?active:conf.display)
|
||||||
active = true
|
active = true
|
||||||
}
|
}
|
||||||
else {active = false}
|
else {active = false}
|
||||||
|
|
||||||
|
//pro main s multioutputem nezobrazujeme
|
||||||
|
if (returns) {
|
||||||
|
active = false
|
||||||
|
}
|
||||||
//add options
|
//add options
|
||||||
obj.series.applyOptions({
|
obj.series.applyOptions({
|
||||||
visible: active?active:visible,
|
visible: active?active:visible,
|
||||||
|
|||||||
@ -702,7 +702,8 @@ function create_multioutput_button(item, def, active) {
|
|||||||
multiOutEl.classList.add('multiOut');
|
multiOutEl.classList.add('multiOut');
|
||||||
multiOutEl.classList.add('switcher-item');
|
multiOutEl.classList.add('switcher-item');
|
||||||
//pouze def - u main indikatoru nepamatujeme stav a pozadujeme noaction pro leftclick
|
//pouze def - u main indikatoru nepamatujeme stav a pozadujeme noaction pro leftclick
|
||||||
itemEl = create_indicator_button(item, def, true);
|
//def||active - ani def
|
||||||
|
itemEl = create_indicator_button(item, false, true);
|
||||||
//hlavni button ridi expand/collapse
|
//hlavni button ridi expand/collapse
|
||||||
itemEl.setAttribute('data-bs-toggle', 'collapse');
|
itemEl.setAttribute('data-bs-toggle', 'collapse');
|
||||||
itemEl.setAttribute('data-bs-target', '.'+item.name);
|
itemEl.setAttribute('data-bs-target', '.'+item.name);
|
||||||
@ -744,7 +745,6 @@ function create_multioutput_button(item, def, active) {
|
|||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
|
||||||
|
|
||||||
return multiOutEl
|
return multiOutEl
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@ -0,0 +1,22 @@
|
|||||||
|
from v2realbot.strategyblocks.indicators.custom.classes.indicatorbase import IndicatorBase
|
||||||
|
from collections import deque
|
||||||
|
|
||||||
|
class SimpleMovingAverage(IndicatorBase):
|
||||||
|
"""
|
||||||
|
Calculates the Simple Moving Average (SMA) of a given data list.
|
||||||
|
The SMA is calculated over a specified window size.
|
||||||
|
If there are insufficient data points for the full window, the behavior
|
||||||
|
can be controlled by `return_last_if_insufficient`: if True, the last data point is returned,
|
||||||
|
otherwise, 0 is returned.
|
||||||
|
"""
|
||||||
|
def __init__(self, period, return_last_if_insufficient=False, state=None):
|
||||||
|
super().__init__(state)
|
||||||
|
self.window_size = period
|
||||||
|
self.return_last_if_insufficient = return_last_if_insufficient
|
||||||
|
self.data_points = deque(maxlen=period)
|
||||||
|
|
||||||
|
def next(self, data):
|
||||||
|
self.data_points.append(data[-1])
|
||||||
|
if len(self.data_points) < self.window_size:
|
||||||
|
return data[-1] if self.return_last_if_insufficient else 0
|
||||||
|
return sum(self.data_points) / len(self.data_points)
|
||||||
@ -0,0 +1,64 @@
|
|||||||
|
from v2realbot.strategyblocks.indicators.custom.classes.indicatorbase import IndicatorBase
|
||||||
|
from collections import deque
|
||||||
|
import numpy as np
|
||||||
|
|
||||||
|
class SuperTrend(IndicatorBase):
|
||||||
|
"""
|
||||||
|
Advanced implementation of the SuperTrend indicator, dynamically calculating the trend direction.
|
||||||
|
This approach considers the previous trend when determining the new trend, making the indicator
|
||||||
|
more responsive to price changes. Returns [up, dn, is_trend] with is_trend being 0 (no trend),
|
||||||
|
1 (uptrend), or -1 (downtrend). If there aren't enough values for ATR, it returns the last close
|
||||||
|
for up and dn, and 0 for is_trend.
|
||||||
|
Generated with Indicator Plugin Builder.
|
||||||
|
Link: [https://chat.openai.com/g/g-aCKuSmbIe-indicator-plugin-builder/c/8cf9ec38-31e0-4577-8331-22919ae149ab]
|
||||||
|
|
||||||
|
Zajimavá advanced verze - detaily viz link výše
|
||||||
|
"""
|
||||||
|
def __init__(self, atr_period=14, multiplier=3, state=None):
|
||||||
|
super().__init__(state)
|
||||||
|
self.atr_period = atr_period
|
||||||
|
self.multiplier = multiplier
|
||||||
|
self.tr_queue = deque(maxlen=atr_period)
|
||||||
|
self.final_upperband = None
|
||||||
|
self.final_lowerband = None
|
||||||
|
self.is_trend = 0
|
||||||
|
|
||||||
|
def next(self, high, low, close):
|
||||||
|
if len(close) < self.atr_period:
|
||||||
|
return [close[-1], close[-1],close[-1], close[-1], 0]
|
||||||
|
|
||||||
|
# True Range calculation
|
||||||
|
current_high = high[-1]
|
||||||
|
current_low = low[-1]
|
||||||
|
previous_close = close[-2] if len(close) > 1 else close[-1]
|
||||||
|
true_range = max(current_high - current_low, abs(current_high - previous_close), abs(current_low - previous_close))
|
||||||
|
|
||||||
|
# Updating the True Range queue
|
||||||
|
self.tr_queue.append(true_range)
|
||||||
|
if len(self.tr_queue) < self.atr_period:
|
||||||
|
return [close[-1], close[-1],close[-1], close[-1], 0]
|
||||||
|
|
||||||
|
# ATR calculation
|
||||||
|
atr = sum(self.tr_queue) / self.atr_period
|
||||||
|
|
||||||
|
# Basic upper and lower bands
|
||||||
|
basic_upperband = (current_high + current_low) / 2 + self.multiplier * atr
|
||||||
|
basic_lowerband = (current_high + current_low) / 2 - self.multiplier * atr
|
||||||
|
|
||||||
|
# Final upper and lower bands
|
||||||
|
if self.final_upperband is None or self.final_lowerband is None:
|
||||||
|
self.final_upperband = basic_upperband
|
||||||
|
self.final_lowerband = basic_lowerband
|
||||||
|
else:
|
||||||
|
if close[-1] <= self.final_upperband:
|
||||||
|
self.final_upperband = basic_upperband
|
||||||
|
if close[-1] >= self.final_lowerband:
|
||||||
|
self.final_lowerband = basic_lowerband
|
||||||
|
|
||||||
|
# Trend determination
|
||||||
|
if close[-1] > self.final_upperband:
|
||||||
|
self.is_trend = 1
|
||||||
|
elif close[-1] < self.final_lowerband:
|
||||||
|
self.is_trend = -1
|
||||||
|
|
||||||
|
return [basic_upperband,basic_lowerband,self.final_upperband, self.final_lowerband, self.is_trend]
|
||||||
@ -0,0 +1,57 @@
|
|||||||
|
from v2realbot.strategyblocks.indicators.custom.classes.indicatorbase import IndicatorBase
|
||||||
|
from collections import deque
|
||||||
|
|
||||||
|
class SuperTrend1(IndicatorBase):
|
||||||
|
"""
|
||||||
|
The SuperTrend indicator is a trend following indicator which uses ATR to calculate its values.
|
||||||
|
It returns a list with three elements: [up, dn, is_trend].
|
||||||
|
`is_trend` can be 1 (uptrend), -1 (downtrend), or 0 (no trend or not enough data).
|
||||||
|
If there are not enough values for ATR, it returns close[-1] for both `up` and `dn`, and 0 for `is_trend`.
|
||||||
|
|
||||||
|
Note: Code generated with Indicator Plugin Builder.
|
||||||
|
Link: [Indicator Plugin Builder Conversation](https://openai.com/chat/)
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(self, multiplier=3, period=14, state=None):
|
||||||
|
super().__init__(state)
|
||||||
|
self.multiplier = multiplier
|
||||||
|
self.period = period
|
||||||
|
self.atr_values = deque(maxlen=period)
|
||||||
|
self.previous_supertrend = None
|
||||||
|
self.previous_close = None
|
||||||
|
self.previous_trend = 0
|
||||||
|
|
||||||
|
def next(self, high, low, close):
|
||||||
|
if len(high) < self.period or len(low) < self.period or len(close) < self.period:
|
||||||
|
return [close[-1], close[-1], 0]
|
||||||
|
|
||||||
|
# Calculate True Range
|
||||||
|
tr = max(high[-1] - low[-1], abs(high[-1] - close[-2]), abs(low[-1] - close[-2]))
|
||||||
|
self.atr_values.append(tr)
|
||||||
|
|
||||||
|
if len(self.atr_values) < self.period:
|
||||||
|
return [close[-1], close[-1], 0]
|
||||||
|
|
||||||
|
# Calculate ATR
|
||||||
|
atr = sum(self.atr_values) / self.period
|
||||||
|
|
||||||
|
# Calculate Supertrend
|
||||||
|
up = close[-1] - (self.multiplier * atr)
|
||||||
|
dn = close[-1] + (self.multiplier * atr)
|
||||||
|
|
||||||
|
if self.previous_supertrend is None:
|
||||||
|
self.previous_supertrend = up
|
||||||
|
|
||||||
|
trend = 0
|
||||||
|
if close[-1] > self.previous_supertrend:
|
||||||
|
trend = 1
|
||||||
|
up = max(up, self.previous_supertrend)
|
||||||
|
elif close[-1] < self.previous_supertrend:
|
||||||
|
trend = -1
|
||||||
|
dn = min(dn, self.previous_supertrend)
|
||||||
|
|
||||||
|
self.previous_supertrend = up if trend == 1 else dn
|
||||||
|
self.previous_close = close[-1]
|
||||||
|
self.previous_trend = trend
|
||||||
|
|
||||||
|
return [up, dn, trend]
|
||||||
@ -0,0 +1,61 @@
|
|||||||
|
from v2realbot.strategyblocks.indicators.custom.classes.indicatorbase import IndicatorBase
|
||||||
|
from collections import deque
|
||||||
|
|
||||||
|
class SuperTrendBard(IndicatorBase):
|
||||||
|
"""
|
||||||
|
Calculates Supertrend indicator values.
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(self, period=10, multiplier=3, state=None):
|
||||||
|
super().__init__(state)
|
||||||
|
self.atr_period = period
|
||||||
|
self.multiplier = multiplier
|
||||||
|
self.highs = deque(maxlen=period)
|
||||||
|
self.lows = deque(maxlen=period)
|
||||||
|
self.atr = 0
|
||||||
|
self.upbound = None # Can set a default value if desired
|
||||||
|
self.downbound = None # Can set a default value if desired
|
||||||
|
self.is_trend = None
|
||||||
|
|
||||||
|
def next(self, high, low, close):
|
||||||
|
high = high[-1]
|
||||||
|
low = low[-1]
|
||||||
|
close = close[-1]
|
||||||
|
|
||||||
|
# Update ATR calculation
|
||||||
|
self.highs.append(high)
|
||||||
|
self.lows.append(low)
|
||||||
|
|
||||||
|
# Check for sufficient data
|
||||||
|
if len(self.highs) < self.atr_period or len(self.lows) < self.atr_period:
|
||||||
|
return [close, close, 0]
|
||||||
|
|
||||||
|
if len(self.highs) == self.atr_period:
|
||||||
|
true_range = max(high - low, abs(high - self.highs[0]), abs(low - self.lows[0]))
|
||||||
|
self.atr = (self.atr * (self.atr_period - 1) + true_range) / self.atr_period
|
||||||
|
|
||||||
|
# Calculate Supertrend
|
||||||
|
if self.upbound is None:
|
||||||
|
self.upbound = close - (self.multiplier * self.atr)
|
||||||
|
self.downbound = close + (self.multiplier * self.atr)
|
||||||
|
self.is_trend = None # Set initial trend state to unknown
|
||||||
|
else:
|
||||||
|
# Determine trend based on previous trend and current price
|
||||||
|
if self.is_trend == 1:
|
||||||
|
# Uptrend continuation
|
||||||
|
self.upbound = max(self.upbound, close - (self.multiplier * self.atr)) # Adjust upbound dynamically
|
||||||
|
self.is_trend = 1 if close > self.upbound else 0 # Recalculate trend if needed
|
||||||
|
elif self.is_trend == -1 and close < self.downbound:
|
||||||
|
# Downtrend continues
|
||||||
|
self.downbound = min(self.downbound, low + (self.multiplier * self.atr))
|
||||||
|
self.is_trend = -1
|
||||||
|
else:
|
||||||
|
# Recalculate trend based on current price
|
||||||
|
self.is_trend = 1 if close > self.upbound else -1 if close < self.downbound else 0
|
||||||
|
# Update Supertrend lines based on new trend
|
||||||
|
if self.is_trend == 1:
|
||||||
|
self.upbound = max(self.upbound, close - (self.multiplier * self.atr))
|
||||||
|
else:
|
||||||
|
self.downbound = min(self.downbound, low + (self.multiplier * self.atr))
|
||||||
|
|
||||||
|
return [self.upbound, self.downbound, self.is_trend]
|
||||||
@ -0,0 +1,50 @@
|
|||||||
|
from v2realbot.strategyblocks.indicators.custom.classes.indicatorbase import IndicatorBase
|
||||||
|
from collections import deque
|
||||||
|
import numpy as np
|
||||||
|
|
||||||
|
class SuperTrendTV(IndicatorBase):
|
||||||
|
"""
|
||||||
|
The SuperTrend indicator is a trend following indicator that is used to identify the direction of the price relative to its historical volatility.
|
||||||
|
It combines the Average True Range (ATR) with the moving average to determine trend direction and reversal points.
|
||||||
|
This implementation was generated with Indicator Plugin Builder.
|
||||||
|
See conversation: [Indicator Plugin Builder](https://chat.openai.com/g/g-aCKuSmbIe-indicator-plugin-builder/c/1ad650dc-05f1-4cf6-b936-772c0ea86ffa)
|
||||||
|
inspirace https://www.tradingview.com/script/r6dAP7yi/
|
||||||
|
"""
|
||||||
|
def __init__(self, atr_period=10, atr_multiplier=3.0, state=None):
|
||||||
|
super().__init__(state)
|
||||||
|
self.atr_period = atr_period
|
||||||
|
self.atr_multiplier = atr_multiplier
|
||||||
|
self.highs = deque(maxlen=atr_period)
|
||||||
|
self.lows = deque(maxlen=atr_period)
|
||||||
|
self.closes = deque(maxlen=atr_period)
|
||||||
|
self.up = None
|
||||||
|
self.down = None
|
||||||
|
self.trend = 1
|
||||||
|
|
||||||
|
def next(self, high, low, close):
|
||||||
|
self.highs.append(high[-1])
|
||||||
|
self.lows.append(low[-1])
|
||||||
|
self.closes.append(close[-1])
|
||||||
|
|
||||||
|
if len(self.highs) < self.atr_period:
|
||||||
|
return [close[-1], close[-1], 0]
|
||||||
|
|
||||||
|
tr = [max(hi - lo, abs(hi - cl), abs(lo - cl))
|
||||||
|
for hi, lo, cl in zip(self.highs, self.lows, self.closes)]
|
||||||
|
atr = np.mean(tr[-self.atr_period:])
|
||||||
|
|
||||||
|
src = (high[-1] + low[-1]) / 2
|
||||||
|
up = src - (self.atr_multiplier * atr)
|
||||||
|
dn = src + (self.atr_multiplier * atr)
|
||||||
|
|
||||||
|
if self.up is None:
|
||||||
|
self.up = up
|
||||||
|
self.down = dn
|
||||||
|
else:
|
||||||
|
self.up = max(up, self.up) if close[-2] > self.up else up
|
||||||
|
self.down = min(dn, self.down) if close[-2] < self.down else dn
|
||||||
|
|
||||||
|
previous_trend = self.trend
|
||||||
|
self.trend = 1 if (self.trend == -1 and close[-1] > self.down) else -1 if (self.trend == 1 and close[-1] < self.up) else self.trend
|
||||||
|
|
||||||
|
return [self.up, self.down, self.trend]
|
||||||
@ -11,36 +11,30 @@ from v2realbot.strategyblocks.indicators.helpers import value_or_indicator
|
|||||||
import talib
|
import talib
|
||||||
|
|
||||||
|
|
||||||
# příklad toml pro indikátor ATR(high, low, close, timeperiod=14)
|
# příklad toml pro indikátor ATR(high, low, close, timeperiod=14) -
|
||||||
# POSITION MATTERS
|
|
||||||
# params.series.high = "high" //series key určuje, že jde o série
|
|
||||||
# params.series.low = "low"
|
|
||||||
# params.series.low = "close"
|
|
||||||
# params.val.timeperiod = 14 //val key určuje, že jde o konkrétní hodnotu, tzn. buď hodnotu nebo název série, ze které vezmu poslední hodnotu (v tom případě by to byl string)
|
|
||||||
|
|
||||||
|
|
||||||
# params.series = ["high","low","close"] #pozicni parametry
|
# params.series = ["high","low","close"] #pozicni parametry
|
||||||
# params.keys.timeperiod = 14 #keyword argumenty
|
# params.keys.timeperiod = 14 #keyword argumenty
|
||||||
|
|
||||||
#TA-lib prijma positional arguments (zejmena teda ty series)m tzn. series musi byt pozicni
|
#TA-lib prijma positional arguments (zejmena teda ty series)m tzn. series musi byt pozicni
|
||||||
|
# lookback se aplikuje na vsechy
|
||||||
# lookback se aplikuje na vsechy ?
|
|
||||||
|
|
||||||
|
|
||||||
#IMPLEMENTS usiong of any indicator from TA-lib library
|
#IMPLEMENTS usiong of any indicator from TA-lib library
|
||||||
def talib_ind(state, params, name, returns):
|
def talib_ind(state, params, name, returns):
|
||||||
funcName = "ma"
|
funcName = "talib_ind"
|
||||||
type = safe_get(params, "type", "SMA")
|
type = safe_get(params, "type", None)
|
||||||
|
if type is None:
|
||||||
|
return -2, "type is required"
|
||||||
#ßsource = safe_get(params, "source", None)
|
#ßsource = safe_get(params, "source", None)
|
||||||
lookback = safe_get(params, "lookback",None) #celkovy lookback pro vsechny vstupni serie
|
lookback = safe_get(params, "lookback",None) #celkovy lookback pro vsechny vstupni serie
|
||||||
|
if lookback is not None:
|
||||||
|
#lookback muze byt odkaz na indikator, pak berem jeho hodnotu
|
||||||
|
lookback = int(value_or_indicator(state, lookback))
|
||||||
|
|
||||||
start = safe_get(params, "start","linear") #linear/sharp
|
start = safe_get(params, "start","linear") #linear/sharp
|
||||||
defval = safe_get(params, "defval",0)
|
defval = safe_get(params, "defval",0)
|
||||||
|
|
||||||
params = safe_get(params, "params", dict(series=[], keys=[]))
|
params = safe_get(params, "params", dict(series=[], keys=[]))
|
||||||
#lookback muze byt odkaz na indikator, pak berem jeho hodnotu
|
defval = float(value_or_indicator(state, defval))
|
||||||
lookback = int(value_or_indicator(state, lookback))
|
|
||||||
defval = int(value_or_indicator(state, defval))
|
|
||||||
|
|
||||||
|
|
||||||
#TODO dopracovat caching, tzn. jen jednou pri inicializaci (linkuje se list) nicmene pri kazde iteraci musime prevest na numpy
|
#TODO dopracovat caching, tzn. jen jednou pri inicializaci (linkuje se list) nicmene pri kazde iteraci musime prevest na numpy
|
||||||
#NOTE doresit, kdyz je val indiaktor, aby se i po inicializaci bral z indikatoru (doresit az pokud bude treba)
|
#NOTE doresit, kdyz je val indiaktor, aby se i po inicializaci bral z indikatoru (doresit az pokud bude treba)
|
||||||
@ -55,7 +49,7 @@ def talib_ind(state, params, name, returns):
|
|||||||
if akt_pocet < lookback and start == "linear":
|
if akt_pocet < lookback and start == "linear":
|
||||||
lookback = akt_pocet
|
lookback = akt_pocet
|
||||||
|
|
||||||
series_list.append(np.array(source_series[-lookback:] if lookback is not None else source_series))
|
series_list.append(np.array(source_series[-lookback:] if lookback is not None else source_series, dtype=np.float64))
|
||||||
|
|
||||||
for key, val in params.get("keys",{}).items():
|
for key, val in params.get("keys",{}).items():
|
||||||
keyArgs[key] = int(value_or_indicator(state, val))
|
keyArgs[key] = int(value_or_indicator(state, val))
|
||||||
@ -65,13 +59,28 @@ def talib_ind(state, params, name, returns):
|
|||||||
|
|
||||||
ma_value = talib_function(*series_list, **keyArgs)
|
ma_value = talib_function(*series_list, **keyArgs)
|
||||||
|
|
||||||
if not np.isfinite(ma_value[-1]):
|
#jde o multioutput, dostavame tuple a prevedeme na list (odpovida poradi v returns)
|
||||||
val = defval
|
#TODO zapracovat sem def val a isfinite
|
||||||
|
if isinstance(ma_value, tuple):
|
||||||
|
ma_value = list(ma_value)
|
||||||
|
for index, res in enumerate(ma_value):
|
||||||
|
if not np.isfinite(res[-1]):
|
||||||
|
ma_value[index] = defval
|
||||||
|
else:
|
||||||
|
ma_value[index] = round(res[-1],5)
|
||||||
|
|
||||||
|
if res[-1] == 0:
|
||||||
|
ma_value[index] = defval
|
||||||
|
val = ma_value
|
||||||
|
#single output
|
||||||
else:
|
else:
|
||||||
val = round(ma_value[-1],4)
|
if not np.isfinite(ma_value[-1]):
|
||||||
|
val = defval
|
||||||
|
else:
|
||||||
|
val = round(ma_value[-1],4)
|
||||||
|
|
||||||
if val == 0:
|
if val == 0:
|
||||||
val = defval
|
val = defval
|
||||||
|
|
||||||
state.ilog(lvl=1,e=f"INSIDE {name}:{funcName} {val} {type=} {lookback=}", **params)
|
state.ilog(lvl=1,e=f"INSIDE {name}:{funcName} {str(val)} {type=} {lookback=}", **params)
|
||||||
return 0, val
|
return 0, val
|
||||||
100
v2realbot/strategyblocks/indicators/custom/tulipy_ind.py
Normal file
100
v2realbot/strategyblocks/indicators/custom/tulipy_ind.py
Normal file
@ -0,0 +1,100 @@
|
|||||||
|
from v2realbot.utils.utils import isrising, isfalling,zoneNY, price2dec, print, safe_get, is_still, is_window_open, eval_cond_dict, crossed_down, crossed_up, crossed, is_pivot, json_serial, pct_diff, create_new_bars, slice_dict_lists
|
||||||
|
from v2realbot.strategy.base import StrategyState
|
||||||
|
import v2realbot.indicators.moving_averages as mi
|
||||||
|
from v2realbot.strategyblocks.indicators.helpers import get_source_series
|
||||||
|
from rich import print as printanyway
|
||||||
|
from traceback import format_exc
|
||||||
|
import numpy as np
|
||||||
|
from collections import defaultdict
|
||||||
|
from v2realbot.strategyblocks.indicators.helpers import value_or_indicator
|
||||||
|
# from talib import BBANDS, MACD, RSI, MA_Type
|
||||||
|
import tulipy
|
||||||
|
|
||||||
|
#NOTE if Exception is raised by the plugin - the system stores previous value for the indicator
|
||||||
|
#plugin returns configurable default value when exception happens
|
||||||
|
#this overrides default behaviour which is when plugin raises exception the custom_hub will store previous value of the indicator as next value
|
||||||
|
|
||||||
|
#IMPLEMENTS usiong of any indicator from tulipy library
|
||||||
|
def tulipy_ind(state, params, name, returns):
|
||||||
|
funcName = "tulipy_ind"
|
||||||
|
type = safe_get(params, "type", None)
|
||||||
|
if type is None:
|
||||||
|
return -2, "type is required"
|
||||||
|
#ßsource = safe_get(params, "source", None)
|
||||||
|
lookback = safe_get(params, "lookback",None) #celkovy lookback pro vsechny vstupni serie
|
||||||
|
if lookback is not None:
|
||||||
|
#lookback muze byt odkaz na indikator, pak berem jeho hodnotu
|
||||||
|
lookback = int(value_or_indicator(state, lookback))
|
||||||
|
|
||||||
|
start = safe_get(params, "start","sharp") #linear/sharp
|
||||||
|
defval = safe_get(params, "defval",0)
|
||||||
|
|
||||||
|
params = safe_get(params, "params", dict(series=[], keys=[]))
|
||||||
|
defval = float(value_or_indicator(state, defval))
|
||||||
|
|
||||||
|
try:
|
||||||
|
#TODO dopracovat caching, tzn. jen jednou pri inicializaci (linkuje se list) nicmene pri kazde iteraci musime prevest na numpy
|
||||||
|
#NOTE doresit, kdyz je val indiaktor, aby se i po inicializaci bral z indikatoru (doresit az pokud bude treba)
|
||||||
|
#NOTE doresit lookback, zda se aplikuje na vsechny pred volanim funkce nebo kdy?
|
||||||
|
series_list = []
|
||||||
|
keyArgs = {}
|
||||||
|
for index, item in enumerate(params.get("series",[])):
|
||||||
|
source_series = get_source_series(state, item)
|
||||||
|
#upravujeme lookback pokud not enough values (staci jen pro prvni - jsou vsechny stejne)
|
||||||
|
#REMOVED: neupravujeme lookback, ale az options nize
|
||||||
|
# if index == 0 and lookback is not None:
|
||||||
|
# akt_pocet = len(source_series)
|
||||||
|
# if akt_pocet < lookback and start == "linear":
|
||||||
|
# lookback = akt_pocet
|
||||||
|
|
||||||
|
#to same pokud mame nejake op
|
||||||
|
|
||||||
|
series_list.append(np.array(source_series[-lookback:] if lookback is not None else source_series, dtype=np.float64))
|
||||||
|
|
||||||
|
for key, val in params.get("keys",{}).items():
|
||||||
|
keyArgs[key] = int(value_or_indicator(state, val))
|
||||||
|
|
||||||
|
#pokud jsou zde nejake options s period nebo timeperiodou a mame nastaveny linear, pak zkracujeme pro lepsi rozjezd
|
||||||
|
#zatim porovnavame s prvni serii - bereme ze jsou vsechny ze stejne skupiny
|
||||||
|
#zatim pri linearu davame vzdy akt. - 1 (napr. RSI potrebuje extra datapoint)
|
||||||
|
if key in ["period", "timeperiod"]:
|
||||||
|
akt_pocet = len(series_list[0])
|
||||||
|
if akt_pocet < keyArgs[key] and start == "linear" and akt_pocet != 1:
|
||||||
|
keyArgs[key] = akt_pocet - 1
|
||||||
|
printanyway(f"zkracujeme na rozjezd celkem v serii {akt_pocet} nastavujeme period na {keyArgs[key]}")
|
||||||
|
|
||||||
|
type = "tulipy."+type
|
||||||
|
talib_function = eval(type)
|
||||||
|
|
||||||
|
ma_value = talib_function(*series_list, **keyArgs)
|
||||||
|
|
||||||
|
#jde o multioutput, dostavame tuple a prevedeme na list (odpovida poradi v returns)
|
||||||
|
#TODO zapracovat sem def val a isfinite
|
||||||
|
if isinstance(ma_value, tuple):
|
||||||
|
ma_value = list(ma_value)
|
||||||
|
for index, res in enumerate(ma_value):
|
||||||
|
if not np.isfinite(res[-1]):
|
||||||
|
ma_value[index] = defval
|
||||||
|
else:
|
||||||
|
ma_value[index] = round(res[-1],5)
|
||||||
|
|
||||||
|
if res[-1] == 0:
|
||||||
|
ma_value[index] = defval
|
||||||
|
val = ma_value
|
||||||
|
#single output
|
||||||
|
else:
|
||||||
|
if not np.isfinite(ma_value[-1]):
|
||||||
|
val = defval
|
||||||
|
else:
|
||||||
|
val = round(ma_value[-1],4)
|
||||||
|
|
||||||
|
if val == 0:
|
||||||
|
val = defval
|
||||||
|
|
||||||
|
state.ilog(lvl=1,e=f"INSIDE {name}:{funcName} {str(val)} {type=} {lookback=}", **params)
|
||||||
|
#pri Exceptione vracime default value (pokud raisneme Exception do custom_hubu o patro vys, tak ten pouzije posledni hodnotu)
|
||||||
|
except Exception as e:
|
||||||
|
state.ilog(lvl=1,e=f"IND ERROR {name} {funcName} vracime default {defval}", message=str(e)+format_exc())
|
||||||
|
val = defval if defval is not None else 0
|
||||||
|
finally:
|
||||||
|
return 0, val
|
||||||
@ -7,6 +7,7 @@ from traceback import format_exc
|
|||||||
import importlib
|
import importlib
|
||||||
import v2realbot.strategyblocks.indicators.custom as ci
|
import v2realbot.strategyblocks.indicators.custom as ci
|
||||||
from v2realbot.strategyblocks.indicators.helpers import find_index_optimized
|
from v2realbot.strategyblocks.indicators.helpers import find_index_optimized
|
||||||
|
import numpy as np
|
||||||
|
|
||||||
|
|
||||||
def populate_dynamic_custom_indicator(data, state: StrategyState, name):
|
def populate_dynamic_custom_indicator(data, state: StrategyState, name):
|
||||||
@ -172,6 +173,9 @@ def populate_dynamic_custom_indicator(data, state: StrategyState, name):
|
|||||||
ret_val = dict(zip(returns, ret_val))
|
ret_val = dict(zip(returns, ret_val))
|
||||||
#pokud je to neco jineho nez dict (float,int..) jde o puvodni single output udelame z toho dict s hlavnim jmenem as key
|
#pokud je to neco jineho nez dict (float,int..) jde o puvodni single output udelame z toho dict s hlavnim jmenem as key
|
||||||
elif not isinstance(ret_val, dict):
|
elif not isinstance(ret_val, dict):
|
||||||
|
#checkneme jestli nejde o numpy typ (napr. posledni clen z numpy), prevedem na python basic typ
|
||||||
|
if isinstance(ret_val, (np.ndarray, np.generic)):
|
||||||
|
ret_val = ret_val.item()
|
||||||
ret_val = {name: ret_val}
|
ret_val = {name: ret_val}
|
||||||
#v ostatnich pripadech predpokladame jiz dict
|
#v ostatnich pripadech predpokladame jiz dict
|
||||||
|
|
||||||
|
|||||||
@ -28,6 +28,7 @@ def find_index_optimized(time_list, seconds):
|
|||||||
#podle toho jak se osvedci se zakl.indikatory to s state
|
#podle toho jak se osvedci se zakl.indikatory to s state
|
||||||
#zatim se mi to moc nezda
|
#zatim se mi to moc nezda
|
||||||
|
|
||||||
|
#vraci skalar nebo posledni hodnoty indikatoru
|
||||||
def value_or_indicator(state,value):
|
def value_or_indicator(state,value):
|
||||||
#preklad direktivy podle typu, pokud je int anebo float - je to primo hodnota
|
#preklad direktivy podle typu, pokud je int anebo float - je to primo hodnota
|
||||||
#pokud je str, jde o indikator a dotahujeme posledni hodnotu z nej
|
#pokud je str, jde o indikator a dotahujeme posledni hodnotu z nej
|
||||||
@ -45,7 +46,28 @@ def value_or_indicator(state,value):
|
|||||||
ret = 0
|
ret = 0
|
||||||
state.ilog(lvl=1,e=f"Neexistuje indikator s nazvem {value} vracime 0" + str(e) + format_exc())
|
state.ilog(lvl=1,e=f"Neexistuje indikator s nazvem {value} vracime 0" + str(e) + format_exc())
|
||||||
return ret
|
return ret
|
||||||
|
|
||||||
|
#vraci skalar nebo pokud je indikator tak cely list indikatoru
|
||||||
|
def value_or_indicator_list(state,value):
|
||||||
|
#preklad direktivy podle typu, pokud je int anebo float - je to primo hodnota
|
||||||
|
#pokud je str, jde o indikator a dotahujeme posledni hodnotu z nej
|
||||||
|
if isinstance(value, (float, int)):
|
||||||
|
return value
|
||||||
|
elif isinstance(value, str):
|
||||||
|
try:
|
||||||
|
#pokud existuje v indikatoru MA bereme MA jinak indikator, pokud neexistuje bereme bar
|
||||||
|
ret = get_source_or_MA(state, indicator=value)
|
||||||
|
lvl = 0
|
||||||
|
#TODO tento len dat pryc po overeni
|
||||||
|
delka = len(ret)
|
||||||
|
if delka == 0:
|
||||||
|
lvl = 1
|
||||||
|
state.ilog(lvl=1,e=f"Pro porovnani bereme cely list s delkou {delka} z indikatoru {value}")
|
||||||
|
except Exception as e :
|
||||||
|
ret = 0
|
||||||
|
state.ilog(lvl=1,e=f"Neexistuje indikator s nazvem {value} vracime 0" + str(e) + format_exc())
|
||||||
|
return ret
|
||||||
|
|
||||||
#OPTIMALIZOVANO CHATGPT
|
#OPTIMALIZOVANO CHATGPT
|
||||||
#funkce vytvori podminky (bud pro AND/OR) z pracovniho dict
|
#funkce vytvori podminky (bud pro AND/OR) z pracovniho dict
|
||||||
def evaluate_directive_conditions(state, work_dict, cond_type):
|
def evaluate_directive_conditions(state, work_dict, cond_type):
|
||||||
@ -67,9 +89,9 @@ def evaluate_directive_conditions(state, work_dict, cond_type):
|
|||||||
"risingc": lambda ind, val: isrisingc(get_source_or_MA(state, ind), val),
|
"risingc": lambda ind, val: isrisingc(get_source_or_MA(state, ind), val),
|
||||||
"falling": lambda ind, val: isfalling(get_source_or_MA(state, ind), val),
|
"falling": lambda ind, val: isfalling(get_source_or_MA(state, ind), val),
|
||||||
"rising": lambda ind, val: isrising(get_source_or_MA(state, ind), val),
|
"rising": lambda ind, val: isrising(get_source_or_MA(state, ind), val),
|
||||||
"crossed_down": lambda ind, val: buy_if_crossed_down(state, ind, value_or_indicator(state,val)),
|
"crossed_down": lambda ind, val: buy_if_crossed_down(state, ind, value_or_indicator_list(state,val)),
|
||||||
"crossed_up": lambda ind, val: buy_if_crossed_up(state, ind, value_or_indicator(state,val)),
|
"crossed_up": lambda ind, val: buy_if_crossed_up(state, ind, value_or_indicator_list(state,val)),
|
||||||
"crossed": lambda ind, val: buy_if_crossed_down(state, ind, value_or_indicator(state,val)) or buy_if_crossed_up(state, ind, value_or_indicator(state,val)),
|
"crossed": lambda ind, val: buy_if_crossed_down(state, ind, value_or_indicator_list(state,val)) or buy_if_crossed_up(state, ind, value_or_indicator_list(state,val)),
|
||||||
"pivot_a": lambda ind, val: is_pivot(source=get_source_or_MA(state, ind), leg_number=val, type="A"),
|
"pivot_a": lambda ind, val: is_pivot(source=get_source_or_MA(state, ind), leg_number=val, type="A"),
|
||||||
"pivot_v": lambda ind, val: is_pivot(source=get_source_or_MA(state, ind), leg_number=val, type="V"),
|
"pivot_v": lambda ind, val: is_pivot(source=get_source_or_MA(state, ind), leg_number=val, type="V"),
|
||||||
"still_for": lambda ind, val: is_still(get_source_or_MA(state, ind), val, 2),
|
"still_for": lambda ind, val: is_still(get_source_or_MA(state, ind), val, 2),
|
||||||
@ -129,13 +151,13 @@ def get_source_series(state, source: str, numpy: bool = False):
|
|||||||
#TYTO NEJSPIS DAT do util
|
#TYTO NEJSPIS DAT do util
|
||||||
#vrati true pokud dany indikator prekrocil threshold dolu
|
#vrati true pokud dany indikator prekrocil threshold dolu
|
||||||
def buy_if_crossed_down(state, indicator, value):
|
def buy_if_crossed_down(state, indicator, value):
|
||||||
res = crossed_down(threshold=value, list=get_source_or_MA(state, indicator))
|
res = crossed_down(value=value, primary_line=get_source_or_MA(state, indicator))
|
||||||
#state.ilog(lvl=0,e=f"signal_if_crossed_down {indicator} {value} {res}")
|
#state.ilog(lvl=0,e=f"signal_if_crossed_down {indicator} {value} {res}")
|
||||||
return res
|
return res
|
||||||
|
|
||||||
#vrati true pokud dany indikator prekrocil threshold nahoru
|
#vrati true pokud dany indikator prekrocil threshold nahoru
|
||||||
def buy_if_crossed_up(state, indicator, value):
|
def buy_if_crossed_up(state, indicator, value):
|
||||||
res = crossed_up(threshold=value, list=get_source_or_MA(state, indicator))
|
res = crossed_up(value=value, primary_line=get_source_or_MA(state, indicator))
|
||||||
#state.ilog(lvl=0,e=f"signal_if_crossed_up {indicator} {value} {res}")
|
#state.ilog(lvl=0,e=f"signal_if_crossed_up {indicator} {value} {res}")
|
||||||
return res
|
return res
|
||||||
|
|
||||||
@ -175,8 +175,162 @@ def is_pivot(source: list, leg_number: int, type: str = "A"):
|
|||||||
print("Unknown type")
|
print("Unknown type")
|
||||||
return False
|
return False
|
||||||
|
|
||||||
|
#upravene a rozsirene o potencialne vetsi confrm body
|
||||||
|
#puvodni verze odpovida confirm_points = 1
|
||||||
|
#https://chat.openai.com/c/0e614d96-6af4-40db-a6ec-a8c57ce481b8
|
||||||
|
# def crossed_up(threshold, list, confirm_points=2):
|
||||||
|
# """
|
||||||
|
# Check if the threshold has crossed up in the last few values in price_list.
|
||||||
|
# A crossover is confirmed if the threshold is below the earlier prices and then crosses above in the later prices.
|
||||||
|
# The number of confirmation points can be specified; the default is 2.
|
||||||
|
# """
|
||||||
|
# try:
|
||||||
|
# if len(list) < confirm_points * 2:
|
||||||
|
# # Not enough data to confirm crossover
|
||||||
|
# return False
|
||||||
|
|
||||||
def crossed_up(threshold, list):
|
# # Split the list into two parts for comparison
|
||||||
|
# earlier_prices = list[-confirm_points*2:-confirm_points]
|
||||||
|
# later_prices = list[-confirm_points:]
|
||||||
|
|
||||||
|
# # Check if threshold was below earlier prices and then crossed above
|
||||||
|
# was_below = all(threshold < price for price in earlier_prices)
|
||||||
|
# now_above = all(threshold >= price for price in later_prices)
|
||||||
|
|
||||||
|
# return was_below and now_above
|
||||||
|
|
||||||
|
# except IndexError:
|
||||||
|
# # In case of an IndexError, return False
|
||||||
|
# return False
|
||||||
|
|
||||||
|
#recent cross up of two arrays (price1 crossed up price2), fallback to standard
|
||||||
|
#inputs are numpy arrays
|
||||||
|
# def crossed_up_numpy(price1, price2):
|
||||||
|
# if price1.size < 2 or price2.size < 2:
|
||||||
|
# return False # Not enough data
|
||||||
|
|
||||||
|
# # Calculate slopes for the last two points
|
||||||
|
# x = np.array([price1.size - 2, price1.size - 1])
|
||||||
|
# slope1, intercept1 = np.polyfit(x, price1[-2:], 1)
|
||||||
|
# slope2, intercept2 = np.polyfit(x, price2[-2:], 1)
|
||||||
|
|
||||||
|
# # Check if lines are almost parallel
|
||||||
|
# if np.isclose(slope1, slope2):
|
||||||
|
# return False
|
||||||
|
|
||||||
|
# # Calculate intersection point
|
||||||
|
# x_intersect = (intercept2 - intercept1) / (slope1 - slope2)
|
||||||
|
# y_intersect = slope1 * x_intersect + intercept1
|
||||||
|
|
||||||
|
# # Check if the intersection occurred between the last two points
|
||||||
|
# if x[0] < x_intersect <= x[1]:
|
||||||
|
# # Check if line1 crossed up line2
|
||||||
|
# return price1[-1] > price2[-1] and price1[-2] <= price2[-2]
|
||||||
|
|
||||||
|
# return False
|
||||||
|
|
||||||
|
#same but more efficient approach
|
||||||
|
def crossed_up_numpy(price1, price2):
|
||||||
|
if price1.size < 2 or price2.size < 2:
|
||||||
|
return False # Not enough data
|
||||||
|
|
||||||
|
# Indices for the last two points
|
||||||
|
x1, x2 = price1.size - 2, price1.size - 1
|
||||||
|
|
||||||
|
# Direct calculation of slopes and intercepts
|
||||||
|
slope1 = (price1[-1] - price1[-2]) / (x2 - x1)
|
||||||
|
intercept1 = price1[-1] - slope1 * x2
|
||||||
|
slope2 = (price2[-1] - price2[-2]) / (x2 - x1)
|
||||||
|
intercept2 = price2[-1] - slope2 * x2
|
||||||
|
|
||||||
|
# Check if lines are almost parallel
|
||||||
|
if np.isclose(slope1, slope2):
|
||||||
|
return False
|
||||||
|
|
||||||
|
# Calculate intersection point (x-coordinate only)
|
||||||
|
if slope1 != slope2: # Avoid division by zero
|
||||||
|
x_intersect = (intercept2 - intercept1) / (slope1 - slope2)
|
||||||
|
|
||||||
|
# Check if the intersection occurred between the last two points
|
||||||
|
if x1 < x_intersect <= x2:
|
||||||
|
# Check if line1 crossed up line2
|
||||||
|
return price1[-1] > price2[-1] and price1[-2] <= price2[-2]
|
||||||
|
|
||||||
|
return False
|
||||||
|
|
||||||
|
|
||||||
|
#recent cross up of two arrays (price1 crossed up price2), fallback to standard
|
||||||
|
#inputs are numpy arrays
|
||||||
|
# def crossed_down_numpy(price1, price2):
|
||||||
|
# if price1.size < 2 or price2.size < 2:
|
||||||
|
# return False # Not enough data
|
||||||
|
|
||||||
|
# # Calculate slopes for the last two points
|
||||||
|
# x = np.array([price1.size - 2, price1.size - 1])
|
||||||
|
# slope1, intercept1 = np.polyfit(x, price1[-2:], 1)
|
||||||
|
# slope2, intercept2 = np.polyfit(x, price2[-2:], 1)
|
||||||
|
|
||||||
|
# # Check if lines are almost parallel
|
||||||
|
# if np.isclose(slope1, slope2):
|
||||||
|
# return False
|
||||||
|
|
||||||
|
# # Calculate intersection point
|
||||||
|
# x_intersect = (intercept2 - intercept1) / (slope1 - slope2)
|
||||||
|
# y_intersect = slope1 * x_intersect + intercept1
|
||||||
|
|
||||||
|
# # Check if the intersection occurred between the last two points
|
||||||
|
# if x[0] < x_intersect <= x[1]:
|
||||||
|
# # Check if line1 crossed down line2
|
||||||
|
# return price1[-1] < price2[-1] and price1[-2] >= price2[-2]
|
||||||
|
|
||||||
|
# return False
|
||||||
|
|
||||||
|
#more efficient yet same, price1 - faster, price2 - slower
|
||||||
|
def crossed_down_numpy(price1, price2):
|
||||||
|
if price1.size < 2 or price2.size < 2:
|
||||||
|
return False # Not enough data
|
||||||
|
|
||||||
|
# Indices for the last two points
|
||||||
|
x1, x2 = price1.size - 2, price1.size - 1
|
||||||
|
|
||||||
|
# Direct calculation of slopes and intercepts
|
||||||
|
slope1 = (price1[-1] - price1[-2]) / (x2 - x1)
|
||||||
|
intercept1 = price1[-1] - slope1 * x2
|
||||||
|
slope2 = (price2[-1] - price2[-2]) / (x2 - x1)
|
||||||
|
intercept2 = price2[-1] - slope2 * x2
|
||||||
|
|
||||||
|
# Check if lines are almost parallel
|
||||||
|
if np.isclose(slope1, slope2):
|
||||||
|
return False
|
||||||
|
|
||||||
|
# Calculate intersection point (x-coordinate only)
|
||||||
|
if slope1 != slope2: # Avoid division by zero
|
||||||
|
x_intersect = (intercept2 - intercept1) / (slope1 - slope2)
|
||||||
|
|
||||||
|
# Check if the intersection occurred between the last two points
|
||||||
|
if x1 < x_intersect <= x2:
|
||||||
|
# Check if line1 crossed down line2
|
||||||
|
return price1[-1] < price2[-1] and price1[-2] >= price2[-2]
|
||||||
|
|
||||||
|
return False
|
||||||
|
|
||||||
|
#obalka pro crossup listu a thresholdu nebo listu a druheho listu
|
||||||
|
#value - svcalar or list, primary_line - usually faster
|
||||||
|
def crossed_up(value, primary_line):
|
||||||
|
if isinstance(value, list):
|
||||||
|
return crossed_up_numpy(np.array(primary_line), np.array(value))
|
||||||
|
else:
|
||||||
|
return crossed_up_threshold(threshold=value, list=primary_line)
|
||||||
|
|
||||||
|
#obalka pro crossdown listu a thresholdu nebo listu a druheho listu
|
||||||
|
#value - svcalar or list, primary_line - usually faster
|
||||||
|
def crossed_down(value, primary_line):
|
||||||
|
if isinstance(value, list):
|
||||||
|
return crossed_down_numpy(np.array(primary_line), np.array(value))
|
||||||
|
else:
|
||||||
|
return crossed_down_threshold(threshold=value, list=primary_line)
|
||||||
|
|
||||||
|
def crossed_up_threshold(threshold, list):
|
||||||
"""check if threshold has crossed up last thresholdue in list"""
|
"""check if threshold has crossed up last thresholdue in list"""
|
||||||
try:
|
try:
|
||||||
if threshold < list[-1] and threshold > list[-2]:
|
if threshold < list[-1] and threshold > list[-2]:
|
||||||
@ -190,7 +344,7 @@ def crossed_up(threshold, list):
|
|||||||
except IndexError:
|
except IndexError:
|
||||||
return False
|
return False
|
||||||
|
|
||||||
def crossed_down(threshold, list):
|
def crossed_down_threshold(threshold, list):
|
||||||
"""check if threshold has crossed down last thresholdue in list"""
|
"""check if threshold has crossed down last thresholdue in list"""
|
||||||
"""
|
"""
|
||||||
Checks if a threshold has just crossed down a line represented by a list.
|
Checks if a threshold has just crossed down a line represented by a list.
|
||||||
@ -383,6 +537,7 @@ def json_serial(obj):
|
|||||||
datetime: lambda obj: obj.timestamp(),
|
datetime: lambda obj: obj.timestamp(),
|
||||||
UUID: lambda obj: str(obj),
|
UUID: lambda obj: str(obj),
|
||||||
Enum: lambda obj: str(obj),
|
Enum: lambda obj: str(obj),
|
||||||
|
np.int32: lambda obj: int(obj),
|
||||||
np.int64: lambda obj: int(obj),
|
np.int64: lambda obj: int(obj),
|
||||||
np.float64: lambda obj: float(obj),
|
np.float64: lambda obj: float(obj),
|
||||||
Order: lambda obj: obj.__dict__,
|
Order: lambda obj: obj.__dict__,
|
||||||
@ -445,7 +600,7 @@ qu = Queue()
|
|||||||
|
|
||||||
#zoneNY = tz.gettz('America/New_York')
|
#zoneNY = tz.gettz('America/New_York')
|
||||||
zoneNY = pytz.timezone('US/Eastern')
|
zoneNY = pytz.timezone('US/Eastern')
|
||||||
|
zoneUTC = pytz.utc
|
||||||
zonePRG = pytz.timezone('Europe/Amsterdam')
|
zonePRG = pytz.timezone('Europe/Amsterdam')
|
||||||
|
|
||||||
def print(*args, **kwargs):
|
def print(*args, **kwargs):
|
||||||
|
|||||||
Reference in New Issue
Block a user