other changes
This commit is contained in:
62
testy/domfreq.py
Normal file
62
testy/domfreq.py
Normal file
@ -0,0 +1,62 @@
|
|||||||
|
import numpy as np
|
||||||
|
import matplotlib.pyplot as plt
|
||||||
|
from scipy.fft import fft
|
||||||
|
|
||||||
|
# Define the sampling frequency and time vector
|
||||||
|
fs = 500 # Sampling frequency
|
||||||
|
t = np.arange(0, 1, 1/fs) # Time vector
|
||||||
|
|
||||||
|
# Define the frequencies
|
||||||
|
f1 = 5 # Frequency that occurs most often but with lower amplitude
|
||||||
|
f2 = 20 # Frequency with the highest amplitude
|
||||||
|
|
||||||
|
# Creating the individual signals
|
||||||
|
signal_f1 = 0.5 * np.sin(2 * np.pi * f1 * t) # Signal with frequency f1
|
||||||
|
signal_f2 = 2 * np.sin(2 * np.pi * f2 * t) # Signal with frequency f2
|
||||||
|
|
||||||
|
# Composite signal
|
||||||
|
signal = signal_f1 + signal_f2
|
||||||
|
|
||||||
|
# Performing a Fourier Transform
|
||||||
|
freq = np.fft.fftfreq(len(t), 1/fs)
|
||||||
|
fft_values = fft(signal)
|
||||||
|
|
||||||
|
# Plotting all the signals and the frequency spectrum
|
||||||
|
plt.figure(figsize=(14, 10))
|
||||||
|
|
||||||
|
# Plot 1: Composite Signal
|
||||||
|
plt.subplot(4, 1, 1)
|
||||||
|
plt.plot(t, signal)
|
||||||
|
plt.title('Composite Signal (f1 + f2)')
|
||||||
|
plt.xlabel('Time [s]')
|
||||||
|
plt.ylabel('Amplitude')
|
||||||
|
|
||||||
|
# Plot 2: Frequency f1 Signal
|
||||||
|
plt.subplot(4, 1, 2)
|
||||||
|
plt.plot(t, signal_f1)
|
||||||
|
plt.title('Individual Frequency f1 Signal')
|
||||||
|
plt.xlabel('Time [s]')
|
||||||
|
plt.ylabel('Amplitude')
|
||||||
|
|
||||||
|
# Plot 3: Frequency f2 Signal
|
||||||
|
plt.subplot(4, 1, 3)
|
||||||
|
plt.plot(t, signal_f2)
|
||||||
|
plt.title('Individual Frequency f2 Signal')
|
||||||
|
plt.xlabel('Time [s]')
|
||||||
|
plt.ylabel('Amplitude')
|
||||||
|
|
||||||
|
# Plot 4: Frequency Spectrum
|
||||||
|
plt.subplot(4, 1, 4)
|
||||||
|
plt.plot(freq, np.abs(fft_values))
|
||||||
|
plt.title('Frequency Spectrum of Composite Signal')
|
||||||
|
plt.xlabel('Frequency [Hz]')
|
||||||
|
plt.ylabel('Amplitude')
|
||||||
|
plt.xlim([0, 30])
|
||||||
|
|
||||||
|
# Highlighting the dominant frequencies in the spectrum
|
||||||
|
plt.axvline(x=f1, color='green', linestyle='--', label='Frequency f1')
|
||||||
|
plt.axvline(x=f2, color='red', linestyle='--', label='Frequency f2')
|
||||||
|
|
||||||
|
plt.legend()
|
||||||
|
plt.tight_layout()
|
||||||
|
plt.show()
|
||||||
28
testy/getlastcalendardate.py
Normal file
28
testy/getlastcalendardate.py
Normal file
@ -0,0 +1,28 @@
|
|||||||
|
from alpaca.data.historical import CryptoHistoricalDataClient, StockHistoricalDataClient
|
||||||
|
from alpaca.data.requests import CryptoLatestTradeRequest, StockLatestTradeRequest, StockLatestBarRequest, StockTradesRequest, StockBarsRequest
|
||||||
|
from alpaca.data.enums import DataFeed
|
||||||
|
from config import API_KEY, SECRET_KEY, MAX_BATCH_SIZE
|
||||||
|
import datetime
|
||||||
|
import time
|
||||||
|
from alpaca.data import Quote, Trade, Snapshot, Bar
|
||||||
|
from alpaca.data.models import BarSet, QuoteSet, TradeSet
|
||||||
|
from alpaca.data.timeframe import TimeFrame
|
||||||
|
# import mplfinance as mpf
|
||||||
|
import pandas as pd
|
||||||
|
from rich import print
|
||||||
|
from v2realbot.utils.utils import zoneNY
|
||||||
|
from v2realbot.config import ACCOUNT1_PAPER_API_KEY, ACCOUNT1_PAPER_SECRET_KEY
|
||||||
|
from alpaca.trading.requests import GetCalendarRequest
|
||||||
|
from alpaca.trading.client import TradingClient
|
||||||
|
|
||||||
|
parametry = {}
|
||||||
|
|
||||||
|
clientTrading = TradingClient(ACCOUNT1_PAPER_API_KEY, ACCOUNT1_PAPER_SECRET_KEY, raw_data=False)
|
||||||
|
|
||||||
|
#get previous days bar
|
||||||
|
|
||||||
|
datetime_object_from = datetime.datetime(2023, 10, 11, 4, 0, 00, tzinfo=datetime.timezone.utc)
|
||||||
|
datetime_object_to = datetime.datetime(2023, 10, 16, 16, 1, 00, tzinfo=datetime.timezone.utc)
|
||||||
|
calendar_request = GetCalendarRequest(start=datetime_object_from,end=today)
|
||||||
|
cal_dates = clientTrading.get_calendar(calendar_request)
|
||||||
|
print(cal_dates)
|
||||||
34
testy/histogramnumpy.py
Normal file
34
testy/histogramnumpy.py
Normal file
@ -0,0 +1,34 @@
|
|||||||
|
import numpy as np
|
||||||
|
|
||||||
|
data = np.array([1,2,3,4,3,2,4,7,8,4,3,0,0,0,0,9,9,9,11,23,2,3,4,29,23])
|
||||||
|
|
||||||
|
counts, bin_edges = np.histogram(data, bins=4)
|
||||||
|
# returns a tuple containing two arrays:
|
||||||
|
# counts: An array containing the number of data points in each bin.
|
||||||
|
# bin_edges: An array containing the edges of each bin.
|
||||||
|
#(array([10, 6, 0, 1]), array([ 1. , 6.5, 12. , 17.5, 23. ]))
|
||||||
|
print(counts, bin_edges)
|
||||||
|
|
||||||
|
edge_from = bin_edges[3]
|
||||||
|
edge_to = bin_edges[4]
|
||||||
|
print(edge_from)
|
||||||
|
print(edge_to)
|
||||||
|
print("test where", data[np.where((edge_from<data) & (data<edge_to))])
|
||||||
|
|
||||||
|
ctvrty_bin = [datum for datum in data if edge_from <= datum <= edge_to]
|
||||||
|
|
||||||
|
print(np.mean(ctvrty_bin))
|
||||||
|
|
||||||
|
#print(histo[0][-2])
|
||||||
|
|
||||||
|
bins = 4
|
||||||
|
mean_of_4th_bin = np.mean(data[np.where(np.histogram(data, bins)[1][3] <= data)[0]])
|
||||||
|
# print(mean_of_4th_bin)
|
||||||
|
# print(mean_of_fourth_bucket)
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
# Print the data from the 3rd bin using a list comprehension
|
||||||
|
|
||||||
|
#print([datum for datum in data if bin_edges[2] <= datum < bin_edges[3]])
|
||||||
14
testy/numpylistmutability.py
Normal file
14
testy/numpylistmutability.py
Normal file
@ -0,0 +1,14 @@
|
|||||||
|
import numpy as np
|
||||||
|
from array import array
|
||||||
|
|
||||||
|
# Original list
|
||||||
|
puvodni = array('i', [1, 2, 3, 4])
|
||||||
|
|
||||||
|
# Create a NumPy array using the original list
|
||||||
|
numpied = np.array(puvodni)
|
||||||
|
|
||||||
|
# Now, if puvodni changes, numpied will be updated as well
|
||||||
|
puvodni.append(5)
|
||||||
|
|
||||||
|
# Check the updated numpied array
|
||||||
|
print(numpied)
|
||||||
25
testy/picklequeue.py
Normal file
25
testy/picklequeue.py
Normal file
@ -0,0 +1,25 @@
|
|||||||
|
import queue
|
||||||
|
import msgpack
|
||||||
|
# Creating the original queue
|
||||||
|
original_queue = queue.Queue()
|
||||||
|
new_queue = queue.Queue()
|
||||||
|
|
||||||
|
# Adding elements to the original queue
|
||||||
|
original_queue.put(5)
|
||||||
|
original_queue.put(10)
|
||||||
|
original_queue.put(15)
|
||||||
|
|
||||||
|
# Pickling the queue
|
||||||
|
pickled_queue = msgpack.packb(original_queue)
|
||||||
|
|
||||||
|
# Unpickling the queue
|
||||||
|
unpickled_queue = msgpack.unpackb(pickled_queue)
|
||||||
|
# Pickling the queue
|
||||||
|
new_queue.queue = unpickled_queue.queue
|
||||||
|
|
||||||
|
|
||||||
|
print(new_queue)
|
||||||
|
|
||||||
|
# Checking the contents of the new queue
|
||||||
|
while not new_queue.empty():
|
||||||
|
print(new_queue.get())
|
||||||
@ -16,6 +16,10 @@ from v2realbot.strategyblocks.newtrade.signals import signal_search
|
|||||||
from v2realbot.strategyblocks.activetrade.activetrade_hub import manage_active_trade
|
from v2realbot.strategyblocks.activetrade.activetrade_hub import manage_active_trade
|
||||||
from v2realbot.strategyblocks.inits.init_indicators import initialize_dynamic_indicators
|
from v2realbot.strategyblocks.inits.init_indicators import initialize_dynamic_indicators
|
||||||
from v2realbot.strategyblocks.inits.init_directives import intialize_directive_conditions
|
from v2realbot.strategyblocks.inits.init_directives import intialize_directive_conditions
|
||||||
|
from alpaca.trading.requests import GetCalendarRequest
|
||||||
|
from alpaca.trading.client import TradingClient
|
||||||
|
from v2realbot.config import ACCOUNT1_PAPER_API_KEY, ACCOUNT1_PAPER_SECRET_KEY, DATA_DIR, OFFLINE_MODE
|
||||||
|
from alpaca.trading.models import Calendar
|
||||||
|
|
||||||
print(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
|
print(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
|
||||||
""""
|
""""
|
||||||
@ -141,11 +145,42 @@ def init(state: StrategyState):
|
|||||||
|
|
||||||
|
|
||||||
#TBD pridat i hour data - pro pocitani RSI na hodine
|
#TBD pridat i hour data - pro pocitani RSI na hodine
|
||||||
#get 30 days (time_from musí být alespoň -2 aby to bralo i vcerejsek)
|
#get 30 days (history_datetime_from musí být alespoň -2 aby to bralo i vcerejsek)
|
||||||
time_from = time_to - timedelta(days=40)
|
#history_datetime_from = time_to - timedelta(days=40)
|
||||||
time_to = time_to - timedelta(days=1)
|
#get previous market day
|
||||||
state.dailyBars = get_historical_bars(state.symbol, time_from, time_to, TimeFrame.Day)
|
#time_to = time_to - timedelta(days=1)
|
||||||
#printanyway("daily bars FILLED", state.dailyBars)
|
|
||||||
|
#time_to = time_to.date()
|
||||||
|
|
||||||
|
#vypocet posledniho market dne - do samostatne funkce get_previous_market_day(today)
|
||||||
|
#time_to = time_to.date()
|
||||||
|
|
||||||
|
today = time_to.date()
|
||||||
|
several_days_ago = today - timedelta(days=40)
|
||||||
|
printanyway(f"{today=}",f"{several_days_ago=}")
|
||||||
|
clientTrading = TradingClient(ACCOUNT1_PAPER_API_KEY, ACCOUNT1_PAPER_SECRET_KEY, raw_data=False)
|
||||||
|
#get all market days from here to 40days ago
|
||||||
|
calendar_request = GetCalendarRequest(start=several_days_ago,end=today)
|
||||||
|
cal_dates = clientTrading.get_calendar(calendar_request)
|
||||||
|
|
||||||
|
#find the first market day - 40days ago
|
||||||
|
#history_datetime_from = zoneNY.localize(cal_dates[0].open)
|
||||||
|
history_datetime_from = cal_dates[0].open
|
||||||
|
|
||||||
|
# Find the previous market day
|
||||||
|
history_datetime_to = None
|
||||||
|
for session in reversed(cal_dates):
|
||||||
|
if session.date < today:
|
||||||
|
#history_datetime_to = zoneNY.localize(session.close)
|
||||||
|
history_datetime_to = session.close
|
||||||
|
break
|
||||||
|
printanyway("Previous Market Day Close:", history_datetime_to)
|
||||||
|
printanyway("Market day 40days ago Open:", history_datetime_from)
|
||||||
|
|
||||||
|
printanyway(history_datetime_from, history_datetime_to)
|
||||||
|
#az do predchziho market dne dne
|
||||||
|
state.dailyBars = get_historical_bars(state.symbol, history_datetime_from, history_datetime_to, TimeFrame.Day)
|
||||||
|
printanyway("daily bars FILLED", state.dailyBars)
|
||||||
#zatim ukladame do extData - pro instant indicatory a gui
|
#zatim ukladame do extData - pro instant indicatory a gui
|
||||||
state.extData["dailyBars"] = state.dailyBars
|
state.extData["dailyBars"] = state.dailyBars
|
||||||
|
|
||||||
|
|||||||
@ -31,13 +31,14 @@ import threading
|
|||||||
class Cacher:
|
class Cacher:
|
||||||
def __init__(self,
|
def __init__(self,
|
||||||
|
|
||||||
rectype: RecordType = RecordType.BAR,
|
rectype: RecordType,
|
||||||
resolution: int = 5,
|
resolution: int = 5,
|
||||||
minsize: int = 100,
|
minsize: int = 100,
|
||||||
update_ltp: bool = False,
|
update_ltp: bool = False,
|
||||||
align: StartBarAlign = StartBarAlign.ROUND,
|
align: StartBarAlign = StartBarAlign.ROUND,
|
||||||
mintick: int = 0,
|
mintick: int = 0,
|
||||||
exthours: bool = False):
|
exthours: bool = False):
|
||||||
|
|
||||||
#vstupuje seznam aggregatoru - obvykle 1 pro queue, 1 pro backtest engine
|
#vstupuje seznam aggregatoru - obvykle 1 pro queue, 1 pro backtest engine
|
||||||
def get_cached_agg_data(agg_list, open, close):
|
def get_cached_agg_data(agg_list, open, close):
|
||||||
file_path = DATA_DIR + "/cache/"+populate_file_name(agg_list[0], open, close)
|
file_path = DATA_DIR + "/cache/"+populate_file_name(agg_list[0], open, close)
|
||||||
|
|||||||
@ -297,6 +297,8 @@
|
|||||||
<button id="button_compare_arch" class="refresh btn btn-outline-success btn-sm">Compare</button>
|
<button id="button_compare_arch" class="refresh btn btn-outline-success btn-sm">Compare</button>
|
||||||
<button id="button_runagain_arch" class="refresh btn btn-outline-success btn-sm">Run Again(r)</button>
|
<button id="button_runagain_arch" class="refresh btn btn-outline-success btn-sm">Run Again(r)</button>
|
||||||
<button id="button_selpage" class="btn btn-outline-success btn-sm">Select all</button>
|
<button id="button_selpage" class="btn btn-outline-success btn-sm">Select all</button>
|
||||||
|
<button id="button_export_xml" class="btn btn-outline-success btn-sm">Export xml</button>
|
||||||
|
<button id="button_export_csv" class="btn btn-outline-success btn-sm">Export csv</button>
|
||||||
<!-- <button id="button_stopall" class="btn btn-outline-success btn-sm">Stop All</button>
|
<!-- <button id="button_stopall" class="btn btn-outline-success btn-sm">Stop All</button>
|
||||||
<button id="button_refresh" class="btn btn-outline-success btn-sm">Refresh</button> -->
|
<button id="button_refresh" class="btn btn-outline-success btn-sm">Refresh</button> -->
|
||||||
</div>
|
</div>
|
||||||
|
|||||||
@ -40,9 +40,101 @@ function refresh_arch_and_callback(row, callback) {
|
|||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
|
//type ("text/csv","application/xml"), filetype (csv), filename
|
||||||
|
function downloadFile(type, filetype, filename, content) {
|
||||||
|
var blob = new Blob([content], { type: type });
|
||||||
|
var url = window.URL.createObjectURL(blob);
|
||||||
|
var link = document.createElement("a");
|
||||||
|
link.href = url;
|
||||||
|
link.download = filename +"."+filetype;
|
||||||
|
link.click();
|
||||||
|
}
|
||||||
|
|
||||||
|
// Function to convert a JavaScript object to XML
|
||||||
|
function convertToXml(data) {
|
||||||
|
var xml = '<?xml version="1.0" encoding="UTF-8"?>\n<trades>\n';
|
||||||
|
data.forEach(function (item) {
|
||||||
|
xml += ' <trade>\n';
|
||||||
|
Object.keys(item).forEach(function (key) {
|
||||||
|
xml += ' <' + key + '>' + item[key] + '</' + key + '>\n';
|
||||||
|
});
|
||||||
|
xml += ' </trade>\n';
|
||||||
|
});
|
||||||
|
xml += '</trades>';
|
||||||
|
return xml;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Function to convert a JavaScript object to CSV
|
||||||
|
function convertToCsv(data) {
|
||||||
|
var csv = '';
|
||||||
|
// Get the headers
|
||||||
|
var headers = Object.keys(data[0]);
|
||||||
|
csv += headers.join(',') + '\n';
|
||||||
|
|
||||||
|
// Iterate over the data
|
||||||
|
data.forEach(function (item) {
|
||||||
|
var row = headers.map(function (header) {
|
||||||
|
return item[header];
|
||||||
|
});
|
||||||
|
csv += row.join(',') + '\n';
|
||||||
|
});
|
||||||
|
|
||||||
|
return csv;
|
||||||
|
}
|
||||||
|
|
||||||
|
function prepare_export() {
|
||||||
|
rows = archiveRecords.rows('.selected');
|
||||||
|
var trdList = []
|
||||||
|
if(rows.data().length > 0 ) {
|
||||||
|
//console.log(rows.data())
|
||||||
|
// Loop through the selected rows and display an alert with each row's ID
|
||||||
|
rows.every(function (rowIdx, tableLoop, rowLoop ) {
|
||||||
|
var data = this.data()
|
||||||
|
data.metrics.prescr_trades.forEach((trade) => {
|
||||||
|
new_obj = {}
|
||||||
|
new_obj["entry_time"] = (trade.entry_time) ? new Date(trade.entry_time * 1000) : null
|
||||||
|
new_obj["entry_time"] = (new_obj["entry_time"]) ? new_obj["entry_time"].toLocaleString('cs-CZ', {
|
||||||
|
timeZone: 'America/New_York',
|
||||||
|
}) : null
|
||||||
|
new_obj["exit_time"] = (trade.exit_time) ? new Date(trade.exit_time * 1000):null
|
||||||
|
new_obj["exit_time"] = (new_obj["exit_time"]) ? new_obj["exit_time"].toLocaleString('cs-CZ', {
|
||||||
|
timeZone: 'America/New_York',
|
||||||
|
}) : null
|
||||||
|
new_obj["direction"] = trade.direction
|
||||||
|
new_obj["profit"] = trade.profit
|
||||||
|
new_obj["rel_profit"] = trade.rel_profit
|
||||||
|
trdList.push(new_obj)
|
||||||
|
})
|
||||||
|
});
|
||||||
|
}
|
||||||
|
return trdList
|
||||||
|
}
|
||||||
|
|
||||||
$(document).ready(function () {
|
$(document).ready(function () {
|
||||||
archiveRecords.ajax.reload();
|
archiveRecords.ajax.reload();
|
||||||
|
|
||||||
|
//button export
|
||||||
|
$('#button_export_xml').click(function () {
|
||||||
|
xmled = convertToXml(prepare_export())
|
||||||
|
console.log(xmled)
|
||||||
|
//csved = convertToCsv(trdList)
|
||||||
|
//console.log(csved)
|
||||||
|
//type ("text/csv","application/xml"), filetype (csv), filename, content
|
||||||
|
//downloadFile("text/csv","csv", "trades", csved)
|
||||||
|
downloadFile("application/xml","xml", "trades", xmled)
|
||||||
|
//console.log(jsonobj)
|
||||||
|
//downloadCsv(jsonobj);
|
||||||
|
});
|
||||||
|
|
||||||
|
//button export
|
||||||
|
$('#button_export_csv').click(function () {
|
||||||
|
csved = convertToCsv(prepare_export())
|
||||||
|
console.log(csved)
|
||||||
|
//type ("text/csv","application/xml"), filetype (csv), filename, content
|
||||||
|
downloadFile("text/csv","csv", "trades", csved)
|
||||||
|
//console.log(jsonobj)
|
||||||
|
//downloadCsv(jsonobj);
|
||||||
|
});
|
||||||
|
|
||||||
//button select page
|
//button select page
|
||||||
$('#button_selpage').click(function () {
|
$('#button_selpage').click(function () {
|
||||||
|
|||||||
@ -161,23 +161,32 @@ def get_profit_target_price(state, data, direction: TradeDirection):
|
|||||||
|
|
||||||
#mame v direktivve ticky
|
#mame v direktivve ticky
|
||||||
if isinstance(def_profit, (float, int)):
|
if isinstance(def_profit, (float, int)):
|
||||||
normalized_def_profit = normalize_tick(state, data, float(def_profit))
|
to_return = get_normalized_profitprice_from_tick(state, data, def_profit, direction)
|
||||||
|
|
||||||
state.ilog(lvl=0,e=f"PROFIT {def_profit=} {normalized_def_profit=}")
|
|
||||||
|
|
||||||
base_price = state.avgp if state.avgp != 0 else data["close"]
|
|
||||||
|
|
||||||
to_return = price2dec(float(base_price)+normalized_def_profit,3) if direction == TradeDirection.LONG else price2dec(float(base_price)-normalized_def_profit,3)
|
|
||||||
#mame v direktive indikator
|
#mame v direktive indikator
|
||||||
elif isinstance(def_profit, str):
|
elif isinstance(def_profit, str):
|
||||||
to_return = float(value_or_indicator(state, def_profit))
|
to_return = float(value_or_indicator(state, def_profit))
|
||||||
|
|
||||||
if direction == TradeDirection.LONG and to_return < data['close'] or direction == TradeDirection.SHORT and to_return > data['close']:
|
#min profit (ochrana extremnich hodnot indikatoru)
|
||||||
state.ilog(lvl=1,e=f"SPATNA HODOTA DOTAZENEHO PROFITU z ind {def_profit} {to_return=} {smer} {data['close']}")
|
directive_name = 'profit_min_ind_tick_value'
|
||||||
raise Exception(f"SPATNA HODOTA DOTAZENEHO PROFITU z ind{def_profit} {to_return=} {smer} {data['close']}")
|
profit_min_ind_tick_value = get_override_for_active_trade(state, directive_name=directive_name, default_value=def_profit_both_directions)
|
||||||
state.ilog(lvl=1,e=f"DOTAZENY PROFIT z indikatoru {def_profit} {to_return=}")
|
profit_min_ind_price_value = get_normalized_profitprice_from_tick(state, data, profit_min_ind_tick_value, direction)
|
||||||
|
|
||||||
|
#ochrana pri nastaveni profitu prilis nizko
|
||||||
|
if direction == TradeDirection.LONG and to_return < profit_min_ind_price_value or direction == TradeDirection.SHORT and to_return > profit_min_ind_price_value:
|
||||||
|
state.ilog(lvl=1,e=f"SPATNA HODOTA DOTAZENEHO PROFITU z ind {def_profit} {to_return=} MINIMUM:{profit_min_ind_price_value} {smer} {data['close']}")
|
||||||
|
#fallback na profit_min_ind_price_value
|
||||||
|
to_return = profit_min_ind_price_value
|
||||||
|
state.ilog(lvl=1,e=f"PROFIT z indikatoru {def_profit} {to_return=}")
|
||||||
return to_return
|
return to_return
|
||||||
|
|
||||||
|
##based on tick a direction, returns normalized prfoit price (LONG = avgp(nebo currprice)+norm.tick, SHORT=avgp(or currprice)-norm.tick)
|
||||||
|
def get_normalized_profitprice_from_tick(state, data, tick, direction: TradeDirection):
|
||||||
|
normalized_tick = normalize_tick(state, data, float(tick))
|
||||||
|
base_price = state.avgp if state.avgp != 0 else data["close"]
|
||||||
|
returned_price = price2dec(float(base_price)+normalized_tick,3) if direction == TradeDirection.LONG else price2dec(float(base_price)-normalized_tick,3)
|
||||||
|
state.ilog(lvl=0,e=f"NORMALIZED TICK {tick=} {normalized_tick=} NORM.PRICE {returned_price}")
|
||||||
|
return returned_price
|
||||||
|
|
||||||
def get_max_profit_price(state, data, direction: TradeDirection):
|
def get_max_profit_price(state, data, direction: TradeDirection):
|
||||||
if direction == TradeDirection.LONG:
|
if direction == TradeDirection.LONG:
|
||||||
smer = "long"
|
smer = "long"
|
||||||
|
|||||||
@ -4,6 +4,7 @@ from v2realbot.indicators.indicators import ema, natr, roc
|
|||||||
from v2realbot.indicators.oscillators import rsi
|
from v2realbot.indicators.oscillators import rsi
|
||||||
from traceback import format_exc
|
from traceback import format_exc
|
||||||
from v2realbot.strategyblocks.indicators.helpers import get_source_series, value_or_indicator
|
from v2realbot.strategyblocks.indicators.helpers import get_source_series, value_or_indicator
|
||||||
|
import numpy as np
|
||||||
|
|
||||||
#RSI INDICATOR
|
#RSI INDICATOR
|
||||||
# type = RSI, source = [close, vwap, hlcc4], rsi_length = [14], MA_length = int (optional), on_confirmed_only = [true, false]
|
# type = RSI, source = [close, vwap, hlcc4], rsi_length = [14], MA_length = int (optional), on_confirmed_only = [true, false]
|
||||||
@ -39,7 +40,8 @@ def populate_dynamic_RSI_indicator(data, state: StrategyState, name):
|
|||||||
rsi_length = delka
|
rsi_length = delka
|
||||||
|
|
||||||
rsi_res = rsi(source, rsi_length)
|
rsi_res = rsi(source, rsi_length)
|
||||||
rsi_value = round(rsi_res[-1],4)
|
val = rsi_res[-1] if np.isfinite(rsi_res[-1]) else 0
|
||||||
|
rsi_value = round(val,4)
|
||||||
state.indicators[name][-1]=rsi_value
|
state.indicators[name][-1]=rsi_value
|
||||||
state.ilog(lvl=0,e=f"IND {name} RSI {rsi_value}")
|
state.ilog(lvl=0,e=f"IND {name} RSI {rsi_value}")
|
||||||
|
|
||||||
|
|||||||
@ -3,7 +3,8 @@ from v2realbot.indicators.oscillators import rsi
|
|||||||
from v2realbot.utils.utils import isrising, isfalling,zoneNY, price2dec, print, safe_get, is_still, is_window_open, eval_cond_dict, crossed_down, crossed_up, crossed, is_pivot, json_serial, pct_diff, create_new_bars, slice_dict_lists
|
from v2realbot.utils.utils import isrising, isfalling,zoneNY, price2dec, print, safe_get, is_still, is_window_open, eval_cond_dict, crossed_down, crossed_up, crossed, is_pivot, json_serial, pct_diff, create_new_bars, slice_dict_lists
|
||||||
from v2realbot.strategy.base import StrategyState
|
from v2realbot.strategy.base import StrategyState
|
||||||
from traceback import format_exc
|
from traceback import format_exc
|
||||||
|
from v2realbot.strategyblocks.indicators.helpers import get_source_series
|
||||||
|
from collections import defaultdict
|
||||||
#TODO ATR INDICATOR - predelat na CUSTOM a udelat scitani a odecteni od close (atru, atrd)
|
#TODO ATR INDICATOR - predelat na CUSTOM a udelat scitani a odecteni od close (atru, atrd)
|
||||||
# type = ATR, ĺength = [14], on_confirmed_only = [true, false]
|
# type = ATR, ĺength = [14], on_confirmed_only = [true, false]
|
||||||
def populate_dynamic_atr_indicator(data, state: StrategyState, name):
|
def populate_dynamic_atr_indicator(data, state: StrategyState, name):
|
||||||
@ -16,17 +17,28 @@ def populate_dynamic_atr_indicator(data, state: StrategyState, name):
|
|||||||
#poustet kazdy tick nebo jenom na confirmed baru (on_confirmed_only = true)
|
#poustet kazdy tick nebo jenom na confirmed baru (on_confirmed_only = true)
|
||||||
on_confirmed_only = safe_get(options, 'on_confirmed_only', False)
|
on_confirmed_only = safe_get(options, 'on_confirmed_only', False)
|
||||||
atr_length = int(safe_get(options, "length",5))
|
atr_length = int(safe_get(options, "length",5))
|
||||||
|
|
||||||
|
# priceline with high/low/close (bars/daily bars)
|
||||||
|
|
||||||
|
#TODO tady jsem skoncil - dodelat
|
||||||
|
source = safe_get(options, "source", "bars")
|
||||||
|
source_dict = eval(f"state.{source}")
|
||||||
|
|
||||||
if on_confirmed_only is False or (on_confirmed_only is True and data['confirmed']==1):
|
if on_confirmed_only is False or (on_confirmed_only is True and data['confirmed']==1):
|
||||||
try:
|
try:
|
||||||
source_high = state.bars["high"][-atr_length:]
|
delka_close = len(source_dict["close"])
|
||||||
source_low = state.bars["low"][-atr_length:]
|
if atr_length > delka_close:
|
||||||
source_close = state.bars["close"][-atr_length:]
|
atr_length = delka_close
|
||||||
|
|
||||||
|
source_high = source_dict["high"][-atr_length:]
|
||||||
|
source_low = source_dict["low"][-atr_length:]
|
||||||
|
source_close = source_dict["close"][-atr_length:]
|
||||||
#if len(source) > ema_length:
|
#if len(source) > ema_length:
|
||||||
atr_value = atr(source_high, source_low, source_close, atr_length)
|
atr_value = atr(source_high, source_low, source_close, atr_length)
|
||||||
val = round(atr_value[-1],4)
|
val = round(atr_value[-1],4)
|
||||||
state.indicators[name][-1]= val
|
state.indicators[name][-1]= val
|
||||||
#state.indicators[name][-1]= round2five(val)
|
#state.indicators[name][-1]= round2five(val)
|
||||||
state.ilog(lvl=0,e=f"IND {name} ATR {val} {atr_length=}")
|
state.ilog(lvl=0,e=f"IND {name} on {source} ATR {val} {atr_length=}")
|
||||||
#else:
|
#else:
|
||||||
# state.ilog(lvl=0,e=f"IND {name} EMA necháváme 0", message="not enough source data", source=source, ema_length=ema_length)
|
# state.ilog(lvl=0,e=f"IND {name} EMA necháváme 0", message="not enough source data", source=source, ema_length=ema_length)
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
|
|||||||
@ -111,6 +111,16 @@ def basestats(state, params, name):
|
|||||||
#vracime most dominant
|
#vracime most dominant
|
||||||
val = float(np.max(dominant_frequencies))
|
val = float(np.max(dominant_frequencies))
|
||||||
return 0, val
|
return 0, val
|
||||||
|
|
||||||
|
elif func == "histogram":
|
||||||
|
#takes only first N - items
|
||||||
|
dt = np.array(source_array)
|
||||||
|
#creates 4 buckets
|
||||||
|
bins = 4
|
||||||
|
mean_of_4th_bin = np.mean(dt[np.where(np.histogram(dt, bins)[1][3] <= dt)[0]])
|
||||||
|
if not np.isfinite(mean_of_4th_bin):
|
||||||
|
mean_of_4th_bin = 0
|
||||||
|
return 0, float(mean_of_4th_bin)
|
||||||
|
|
||||||
elif func == "maxima":
|
elif func == "maxima":
|
||||||
if len(source_array) < 3:
|
if len(source_array) < 3:
|
||||||
|
|||||||
@ -3,6 +3,7 @@ import numpy as np
|
|||||||
from rich import print as printanyway
|
from rich import print as printanyway
|
||||||
from traceback import format_exc
|
from traceback import format_exc
|
||||||
import v2realbot.utils.utils as utls
|
import v2realbot.utils.utils as utls
|
||||||
|
from copy import deepcopy
|
||||||
# from v2realbot.utils.utils import isrising, isfalling,zoneNY, price2dec, print, safe_get, is_still, is_window_open, eval_cond_dict, crossed_down, crossed_up, crossed, is_pivot, json_serial, pct_diff, create_new_bars, slice_dict_lists
|
# from v2realbot.utils.utils import isrising, isfalling,zoneNY, price2dec, print, safe_get, is_still, is_window_open, eval_cond_dict, crossed_down, crossed_up, crossed, is_pivot, json_serial, pct_diff, create_new_bars, slice_dict_lists
|
||||||
|
|
||||||
|
|
||||||
@ -18,19 +19,43 @@ def expression(state: StrategyState, params, name):
|
|||||||
funcName = "expression"
|
funcName = "expression"
|
||||||
#indicator name
|
#indicator name
|
||||||
operation = utls.safe_get(params, "expression", None)
|
operation = utls.safe_get(params, "expression", None)
|
||||||
|
|
||||||
if operation is None :
|
if operation is None :
|
||||||
return -2, "required param missing"
|
return -2, "required param missing"
|
||||||
|
|
||||||
|
#list of indicators that should be converted beforehands
|
||||||
|
convertToNumpy = utls.safe_get(params, "convertToNumpy", [])
|
||||||
|
|
||||||
state.ilog(lvl=0,e=f"BEFORE {name}:{funcName} {operation=}", **params)
|
state.ilog(lvl=0,e=f"BEFORE {name}:{funcName} {operation=}", **params)
|
||||||
|
|
||||||
|
#nyni vytvarime kazdou iteraci nove numpy pole
|
||||||
|
#pro optimalizaci by slo pouzit array.array ktery umi
|
||||||
|
#sdilet s numpy pamet a nevytvari se pak kopie pole
|
||||||
|
#nevyhoda: neumi comprehensions a dalsi
|
||||||
|
#viz https://chat.openai.com/c/03bb0c1d-450e-4f0e-8036-d338692c1082
|
||||||
|
|
||||||
|
#opt by chatGPT
|
||||||
|
temp_ind_mapping = {k: np.array(v) if k in convertToNumpy else v for k, v in state.ind_mapping.items()}
|
||||||
|
|
||||||
|
# temp_ind_mapping = {}
|
||||||
|
# if len(convertToNumpy) > 0:
|
||||||
|
# #mozna msgpack ext in
|
||||||
|
# temp_ind_mapping = deepcopy(state.ind_mapping)
|
||||||
|
# for key in convertToNumpy:
|
||||||
|
# try:
|
||||||
|
# temp_ind_mapping[key] = np.array(state.ind_mapping[key])
|
||||||
|
# print(f"numpyed {key}")
|
||||||
|
# except Exception:
|
||||||
|
# pass
|
||||||
|
|
||||||
|
# if len(temp_ind_mapping) == 0:
|
||||||
|
# temp_ind_mapping = state.ind_mapping
|
||||||
|
|
||||||
#pro zacatek eval
|
#pro zacatek eval
|
||||||
val = eval(operation, {'state': state, 'np': np, 'utls': utls}, state.ind_mapping)
|
val = eval(operation, {'state': state, 'np': np, 'utls': utls}, temp_ind_mapping)
|
||||||
|
|
||||||
#printanyway(val)
|
#printanyway(val)
|
||||||
|
val = 0 if not np.isfinite(val) else val
|
||||||
if not np.isfinite(val):
|
|
||||||
val = 0
|
|
||||||
#val = ne.evaluate(operation, state.ind_mapping)
|
#val = ne.evaluate(operation, state.ind_mapping)
|
||||||
|
|
||||||
state.ilog(lvl=1,e=f"IND {name}:{funcName} {operation=} res:{val}", **params)
|
state.ilog(lvl=1,e=f"IND {name}:{funcName} {operation=} res:{val}", **params)
|
||||||
|
|||||||
@ -88,5 +88,5 @@ def get_historical_bars(symbol: str, time_from: datetime, time_to: datetime, tim
|
|||||||
|
|
||||||
bar_request = StockBarsRequest(symbol_or_symbols=symbol,timeframe=timeframe, start=time_from, end=time_to, feed=DataFeed.SIP)
|
bar_request = StockBarsRequest(symbol_or_symbols=symbol,timeframe=timeframe, start=time_from, end=time_to, feed=DataFeed.SIP)
|
||||||
bars: BarSet = stock_client.get_stock_bars(bar_request)
|
bars: BarSet = stock_client.get_stock_bars(bar_request)
|
||||||
##print("puvodni bars", bars["BAC"])
|
#print("puvodni bars", bars["BAC"])
|
||||||
return convert_daily_bars(bars[symbol])
|
return convert_daily_bars(bars[symbol])
|
||||||
|
|||||||
Reference in New Issue
Block a user