first commit

This commit is contained in:
David Brazda
2023-04-12 21:00:03 +02:00
commit af9e944928
158 changed files with 19422 additions and 0 deletions

27
testy/archive/.gtignore Normal file
View File

@ -0,0 +1,27 @@
# Byte-compiled / optimized / DLL files
__pycache__/
*.py[cod]
*$py.class
# C extensions
*.so
# Distribution / packaging
.Python
build/
develop-eggs/
dist/
downloads/
eggs/
.eggs/
lib/
lib64/
parts/
sdist/
var/
wheels/
*.egg-info/
.installed.cfg
*.egg
MANIFEST
cache/

Binary file not shown.

View File

@ -0,0 +1,52 @@
from alpaca.data.historical import CryptoHistoricalDataClient, StockHistoricalDataClient
from alpaca.data.requests import CryptoLatestTradeRequest, StockLatestTradeRequest, StockLatestBarRequest, StockTradesRequest, StockBarsRequest
from alpaca.data.enums import DataFeed
from config import API_KEY, SECRET_KEY, MAX_BATCH_SIZE
import datetime
import time
from alpaca.data import Quote, Trade, Snapshot, Bar
from alpaca.data.models import BarSet, QuoteSet, TradeSet
from alpaca.data.timeframe import TimeFrame
import mplfinance as mpf
import pandas as pd
parametry = {}
# no keys required
#client = CryptoHistoricalDataClient()
client = StockHistoricalDataClient(API_KEY, SECRET_KEY, raw_data=False)
datetime_object_from = datetime.datetime(2023, 2, 27, 18, 51, 38, tzinfo=datetime.timezone.utc)
datetime_object_to = datetime.datetime(2023, 2, 27, 21, 51, 39, tzinfo=datetime.timezone.utc)
bar_request = StockBarsRequest(symbol_or_symbols="BAC",timeframe=TimeFrame.Hour, start=datetime_object_from, end=datetime_object_to, feed=DataFeed.SIP)
bars = client.get_stock_bars(bar_request).df
#bars = bars.drop(['symbol'])
#print(bars.df.close)
bars = bars.tz_convert('America/New_York')
print(bars)
print(bars.df.columns)
#Index(['open', 'high', 'low', 'close', 'volume', 'trade_count', 'vwap'], dtype='object')
bars.df.set_index('timestamp', inplace=True)
mpf.plot(bars.df, # the dataframe containing the OHLC (Open, High, Low and Close) data
type='candle', # use candlesticks
volume=True, # also show the volume
mav=(3,6,9), # use three different moving averages
figratio=(3,1), # set the ratio of the figure
style='yahoo', # choose the yahoo style
title='Prvni chart');
# #vrací se list od dict
# print(bars["BAC"])
# # k nemu muzeme pristupovat s
# dict = bars["BAC"]
# print(type(dict))
# print(dict[2].timestamp)
# print(dict[2].close)
# print(dict[].close)

View File

@ -0,0 +1,51 @@
#udajne working example for using a minute
# timeframe in backtrader with alpaca api
import alpaca_backtrader_api
import backtrader as bt
import pandas as pd
from datetime import datetime
from strategies.tos_strategy import TOS
from dotenv import load_dotenv
import os
load_dotenv()
api_key = os.getenv('API_KEY_ID')
api_secret = os.getenv('API_SECRET')
alpaca_paper = os.getenv('ALPACA_PAPER')
cerebro = bt.Cerebro()
cerebro.addstrategy(TOS)
cerebro.broker.setcash(100000)
cerebro.broker.setcommission(commission=0.0)
cerebro.addsizer(bt.sizers.PercentSizer, percents=20)
store = alpaca_backtrader_api.AlpacaStore(
key_id=api_key,
secret_key=api_secret,
paper=alpaca_paper
)
if not alpaca_paper:
broker = store.getbroker() # or just alpaca_backtrader_api.AlpacaBroker()
cerebro.setbroker(broker)
DataFactory = store.getdata # or use alpaca_backtrader_api.AlpacaData
data0 = DataFactory(
dataname='AAPL',
timeframe=bt.TimeFrame.TFrame("Minutes"),
fromdate=pd.Timestamp('2018-11-15'),
todate=pd.Timestamp('2018-11-17'),
historical=True)
cerebro.adddata(data0)
#Resampler for 15 minutes
cerebro.resampledata(data0,timeframe=bt.TimeFrame.Minutes,compression=15)
print('Starting Portfolio Value: %.2f' % cerebro.broker.getvalue())
cerebro.run()
print('Final Portfolio Value: %.2f' % cerebro.broker.getvalue())
cerebro.plot()

View File

@ -0,0 +1,172 @@
# použití websocket loaderu v samostatném threadu
# v dalsim threadu pak input a cteni globalniho dataframu
# a stopnutí websocket loopu
#import clients
from alpaca.data.live import StockDataStream, CryptoDataStream
from alpaca.data.enums import DataFeed
from config import API_KEY, SECRET_KEY, MAX_BATCH_SIZE
from datetime import datetime
import pandas as pd
import threading
# pripadne parametry pro request
# parametry = {
# "brand": "Ford",
# "model": "Mustang",
# "year": 1964
# }
sloupce=["timestamp","price","size","condition"]
sloupce_q=["timestamp","ask_size","ask_price","bid_price","bid_ask"]
# deklarace globalniho df s timeindexem
gdf = pd.DataFrame(columns=sloupce, index=pd.to_datetime([]))
gdf_q = pd.DataFrame(columns=sloupce_q, index=pd.to_datetime([]))
# # pro komunikaci mezi thready budeme pouzivat globalni variable
# # pro zamezeni race condition pouzijeme mutual lock (mutex)
# create a lock
# lock = threading.Lock()
# with lock:
# # add to the variable
# variable = variable + 10
# # release the lock automatically
prev_timestamp = "new"
batch = []
batch_q = []
seconds_list = []
parametry = {}
now = datetime.now() # current date and time aware of timezones
now = now.astimezone()
# client musi byt globalni, aby ho druhy thread dokazal stopnout
#client = StockDataStream(API_KEY, SECRET_KEY, raw_data=False, websocket_params=parametry, feed=DataFeed.SIP)
client = StockDataStream(API_KEY, SECRET_KEY, raw_data=True, websocket_params=parametry, feed=DataFeed.SIP)
## thread pro cteni websocketu a plneni glob.dataframu
# pozdeji prepsat do samostatne Class WSReader
def ws_reader():
print("vstup do threadu ws reader")
#handler pro ws trader data
async def data_handler(data):
global gdf
global batch
#record_list = (data.timestamp, data.open,data.high,data.low,data.close)
#batch.append(record_list)
print(data)
# kazdou davku pak zapiseme do datasetu
if len(batch) == MAX_BATCH_SIZE:
## z aktualniho listu batch udelame DataFrame
new_df = pd.DataFrame.from_records(data=batch, columns = sloupce)
## tento dataframe pridame ke globalnimu
gdf = pd.concat([gdf,new_df], axis=0, ignore_index=True)
batch = []
#print(gdf)
#handler pro ws quote data
async def data_handler_q(data):
global gdf_q
global batch_q
global prev_timestamp
global seconds_list
record_list = (data.timestamp, data.ask_size,data.ask_price,data.bid_price,data.bid_size)
batch_q.append(record_list)
#print(data.ask_size,data.ask_price,data.bid_price,data.bid_size)
#print("sestaveni je",sestaveni, "\\n batch ma ", len(batch), "clenu")
print(batch_q)
##max a min hodnota z druhych hodnot listu
def max_value(inputlist):
return max([sublist[1] for sublist in inputlist])
def min_value(inputlist):
return min([sublist[1] for sublist in inputlist])
def sum_value(inputlist):
for sublist in inputlist: print(sublist[-1])
return sum([sublist[-1] for sublist in inputlist])
#pokud jde o stejnou vterinu nebo o prvni zaznam, pridame do pole
if (prev_timestamp=="new") or (data.timestamp.second==prev_timestamp.second):
print("stejna vterina")
seconds_list.append([data.timestamp, data.ask_price, data.ask_size])
#print("po appendu",seconds_list)
else:
print("nova vterina")
# dopocitame ohlc
print("max", max_value(seconds_list), "min ", min_value(seconds_list), "sum", sum_value(seconds_list), "open", seconds_list[0][1], "close", seconds_list[-1][1])
print("-"*40)
seconds_list = []
seconds_list.append([data.timestamp, data.ask_price, data.ask_size])
print(seconds_list)
#vypisu z listu
print("akt.cas",data.timestamp,"minuly cas", prev_timestamp)
prev_timestamp = data.timestamp
# kazdou davku pak zapiseme do datasetu
if len(batch_q) == MAX_BATCH_SIZE:
## z aktualniho listu batch udelame DataFrame
new_df = pd.DataFrame.from_records(data=batch_q, columns = sloupce_q)
## tento dataframe pridame ke globalnimu
gdf_q = pd.concat([gdf_q,new_df], axis=0, ignore_index=True)
batch_q = []
#print(gdf)t
#client.subscribe_quotes(data_handler, "BAC")
#client.subscribe_trades(data_handler, "BAC")
#client.subscribe_updated_bars(data_handler, "BAC")
## taddy to ceka a bezi
print("spoustim run")
client.run()
print("run skoncil")
def user_prompt():
print("Tady je druhy thread, kde muzu delat co chci, pripadne ovladat ws loader")
while True:
delej = input("Vypsat dataframe: [t-trades;q-quotes;e-exit]")
if delej == "t": print(gdf)
elif delej =="q": print(gdf_q.tail(20))
elif delej =="e": break
print("bye")
client.stop()
def main():
# definujeme thready
t1 = threading.Thread(target=ws_reader)
#t2 = threading.Thread(target=user_prompt)
#spustime thready
t1.start()#, t2.start()
# Wait threads to complete
t1.join()#, t2.join()
if __name__ == "__main__":
main()
# tbd jeste si vyzkouset, zda to bez threadu nepujde takto s asynciem
# if __name__ == '__main__':
# logging.basicConfig(format='%(asctime)s %(levelname)s %(message)s',
# level=logging.INFO)
# logging.log(logging.INFO, 'Starting up...')
# try:
# loop = asyncio.get_event_loop()
# loop.run_until_complete(main())
# loop.close()
# except KeyboardInterrupt:
# pass

View File

@ -0,0 +1,39 @@
from alpaca.data.historical import CryptoHistoricalDataClient, StockHistoricalDataClient
from alpaca.data.requests import CryptoLatestTradeRequest, StockLatestTradeRequest, StockLatestBarRequest, StockTradesRequest
from alpaca.data.enums import DataFeed
from config import API_KEY, SECRET_KEY, MAX_BATCH_SIZE
import datetime
import time
parametry = {}
# no keys required
#client = CryptoHistoricalDataClient()
client = StockHistoricalDataClient(API_KEY, SECRET_KEY, raw_data=True)
# single symbol request
#request_trade_params = StockTradesRequest(symbol_or_symbols="BAC", feed = DataFeed.SIP)
#request_last_bar_params = StockLatestBarRequest(symbol_or_symbols="BAC", feed=DataFeed.SIP)
#2023, 2, 27, 18, 51, 38
datetime_object_from = datetime.datetime(2023, 2, 26, 17, 51, 38, tzinfo=datetime.timezone.utc)
datetime_object_to = datetime.datetime(2023, 2, 28, 17, 51, 39, tzinfo=datetime.timezone.utc)
trades_request = StockTradesRequest(symbol_or_symbols="C", feed = DataFeed.SIP, start=datetime_object_from, end=datetime_object_to)
#latest_trade = client.get_stock_latest_trade(request_trade_params)
#latest_bar = client.get_stock_latest_bar(request_last_bar_params)
# for i in range(1,1000):
# latest_bar = client.get_stock_latest_bar(request_last_bar_params)
# data = latest_bar['BAC']
# print(data.timestamp,data.trade_count, data.trade_count, data.high, data.low, data.close, data.volume, data.vwap)
# time.sleep(1)
all_trades = client.get_stock_trades(trades_request)
# must use symbol to access even though it is single symbol
# print("last trade",latest_trade)
# print("latest bar",latest_bar)
# print("Trades Today", all_trades)
print(len(all_trades["C"]))

View File

@ -0,0 +1,66 @@
# 2 clients for historical data StockHistoricalDataClient (needs keys), CryptoHistoricalDataClient
# 2 clients for real time data CryptoDataStream, StockDataStream
# naimportuju si daneho clienta
from alpaca.data.historical import StockHistoricalDataClient, CryptoHistoricalDataClient
#pokdu pouzivam historicke data(tzn. REST) tak si naimportuju dany request object
from alpaca.data.requests import StockLatestQuoteRequest, StockBarsRequest, StockTradesRequest
#objekty se kterymi pak pracuju (jsou soucasi package výše, tady jen informačně)
from alpaca.data import Quote, Trade, Snapshot, Bar
from alpaca.data.models import BarSet, QuoteSet, TradeSet
from config import API_KEY, SECRET_KEY
from datetime import datetime, timedelta
import pandas as pd
import rich
# vytvorim si clienta
stock_client = StockHistoricalDataClient(API_KEY, SECRET_KEY, raw_data=True)
crypto_client = CryptoHistoricalDataClient()
time_from = datetime(2023, 2, 17, 14, 30, 0, 0)
time_to = datetime(2023, 2, 17, 14, 30, 1, 0)
#print(time_from)
# vytvorim request objekt
#latestQuoteRequest = StockLatestQuoteRequest(symbol_or_symbols=["SPY", "GLD", "TLT"])
stockTradeRequest = StockTradesRequest(symbol_or_symbols=["BAC","C","MSFT"], start=time_from,end=time_to)
#zavolam na clientovi metodu s request objektem, vrací se mi Dict[str, Quote] - obj.Quote pro kazdy symbol
#latestQuoteObject = stock_client.get_stock_latest_quote(latestQuoteRequest)
tradesResponse = stock_client.get_stock_trades(stockTradeRequest)
print(tradesResponse)
for i in tradesResponse['BAC']:
print(i)
# vrací m to tradeset dict = Trades identifikovane symbolem
#for
#access as a list
#print(tradesResponse["BAC"])
# The scope of these changes made to
# pandas settings are local to with statement.
# with pd.option_context('display.max_rows', None,
# 'display.max_columns', None,
# 'display.precision', 3,
# ):
# #convert to dataframe
# print(tradesResponse.df)
# this is the Quote object for
#bacquote=latestQuoteObject["SPY"]
# print(bacquote)
#vrati se mi objekt typu LatestQuote
# print(type(latestQuoteObject))
# print(latestQuoteObject)
#gld_latest_ask_price = latestQuoteObject["GLD"].ask_price
#print(gld_latest_ask_price, latestQuoteObject["GLD"].timestamp)

View File

@ -0,0 +1,74 @@
from alpaca.data.live import StockDataStream, CryptoDataStream
from alpaca.trading.stream import TradingStream
from alpaca.data.enums import DataFeed
from config import API_KEY, SECRET_KEY, MAX_BATCH_SIZE
from datetime import datetime
import mplfinance as mpf
import matplotlib.pyplot as plt
import threading
# pripadne parametry pro request
# parametry = {
# "brand": "Ford",
# "model": "Mustang",
# "year": 1964
# }
parametry = {}
i = 0
u = 0
async def data_handler1(data):
print("HANDLER1")
global i
i += 1
print(data)
async def data_handler2(data):
print("HANDLER2")
global u
u += 1
print(data)
async def data_handler3(data):
print("HANDLER3")
global u
u += 1
print(data)
# plt.ion()
# def animate(ival):
# # PREPARE DATAFRAME WITH OHLC AND "BUYS" AND "SELLS" HERE
# apds = [mpf.make_addplot(buys, color='tab:green', ax=ax_buys),
# mpf.make_addplot(sells, color='tab:red', ax=ax_sells)]
# for ax in axes:
# ax.clear()
# mpf.plot(df_ohlc, type='candle', addplot=apds, ax=ax_main)
# print('a')
#client = CryptoDataStream(API_KEY, SECRET_KEY, raw_data=True, websocket_params=parametry)
client1 = TradingStream(API_KEY, SECRET_KEY, paper=True)
client1.subscribe_trade_updates(data_handler1)
t1 = threading.Thread(target=client1.run)
t1.start()
print("started1")
client2 = TradingStream(API_KEY, SECRET_KEY, paper=True)
client2.subscribe_trade_updates(data_handler2)
t2 = threading.Thread(target=client2.run)
t2.start()
client3 = TradingStream(API_KEY, SECRET_KEY, paper=True)
client3.subscribe_trade_updates(data_handler3)
t3 = threading.Thread(target=client3.run)
t3.start()
print("started2")
print(threading.enumerate())
t2.join()
t1.join()
t3.join()
# client.subscribe_trades(data_handler, "BTC/USD")
# #client.subscribe_quotes(data_handler_ETH, "ETH/USD")
# print("pred spustenim runu")
# client.run()
# print("po spusteni runu - muzu neco delat?")

47
testy/archive/async.py Normal file
View File

@ -0,0 +1,47 @@
#Asyncio - umoznuje a řídí konkurentni kod (v ramci jednoho vlakna, vice vlaken nebo i procesu - skrz concurrent.futures)
# async si pri definici oznacim funkci, kterou chci asynchronne volat a ve které muze byt dalsi asynchronni volani(await)
# await - označuje mi, že na volanou funkci čekám - pokud je pouzito v tasku pak task vraci pokračování zpět o level výše
# create task(asynchr.funkce) - vtakto zavolám paralelni funkci bez cekani a blok pokracuje dal
# asyncio.wait - ceka na tas
# Here is a list of what you need in order to make your program async:
# Add async keyword in front of your function declarations to make them awaitable.
# Add await keyword when you call your async functions (without it they wont run).
# Create tasks from the async functions you wish to start asynchronously. Also wait for their finish.
# Call asyncio.run to start the asynchronous section of your program. Only one per thread.
# time.sleep(5) - blokuje, await asyncio.sleep(5) - neblokuje kod(asynchroni task)
import asyncio
#tbd pridat logger
async def makej(pracant, cekani):
print("pracant ",pracant, "zacal", "bude cekat",cekani,"sekubd")
await asyncio.sleep(cekani)
print("pracant ",pracant,"docekal",cekani,"sekund")
async def main():
print("vstup do funkce main")
#vytvoreni asynchronnich tasků
task1 = asyncio.create_task(makej(1,5))
task2 = asyncio.create_task(makej(2,3))
task3 = asyncio.create_task(makej(3,1))
print("tasky jedou - ted jsme v kodu za jejich spustenim - ale pred await.wait")
#budeme cekat na dokonceni tasků
await asyncio.wait([task1,task2,task3])
print("dočekáno na vysledek po volani await.wait")
print("a ted volani funkce standardním synchro způsobem, kdy cekame na vysledek ")
#volani funkce standardním synchro způsobem, kdy cekame na vysledek
await makej(1,1)
await makej(2,1)
# hlavni volani - run by mela byt jedna pro jedno vlakno
#asyncio.run(main())
#feature to convert async to sync
#asyncio.get_event_loop().run_until_complete() --nebo je tu tento decorator https://github.com/miyakogi/syncer
newfeature = asyncio.run(makej(1,1))

View File

@ -0,0 +1,57 @@
# to test change iterable (adding items) while iterating
class Notif:
def __init__(self,time):
self.time = time
open_orders = []
for i in range(1,10):
open_orders.append(Notif(i))
print("cele pole objektu",open_orders)
# Here, 'reversed' returns a lazy iterator, so it's performant! reversed(l):
#musi fungovat removing stare a pridavani novych
#this list contains all not processed notification, that we try to process during this iteration
#if time is not right we leave the message for next iter
#if time is right we process the message (- note it can trigger additional open_orders, that are added to queue)
def process_message(notif: Notif):
global open_orders
if notif.time % 2 == 0 and notif.time < 300:
open_orders.append(Notif(notif.time+50))
todel = []
for i in open_orders:
print("*******start iterace polozky", i.time)
process_message(i)
print("removing element",i.time)
todel.append(i)
print("*****konec iterace", i.time)
print()
print("to del", todel)
#removing processed from the list
for i in todel:
open_orders.remove(i)
print("cely list po skonceni vseho")
for i in open_orders: print(i.time)
""""
pred iteraci se zavola synchroné
EXECUTE open orders(time)
- pokusi se vytvorit vsechny otevrene ordery do daneho casu (casu dalsi iterace)
- podporuje i volani callbacku a to vcetne pokynu vytvoreneho z pokynu
- tento novy pokyn muze byt i take exekuovan pokud se vcetne roundtripu vejde do daneho casu
pripadne soucasne vytvoreni i exekuci pokynu
"""

View File

@ -0,0 +1,38 @@
from uuid import UUID, uuid4
from alpaca.trading.enums import OrderSide, OrderStatus, TradeEvent, OrderClass, OrderType, TimeInForce
#from utils import AttributeDict
from rich import print
import threading
#import utils
import asyncio
from typing import Any, Optional, List, Union
from datetime import datetime, date
from pydantic import BaseModel
class Order(BaseModel):
id: UUID
submitted_at: datetime
filled_at: Optional[datetime]
symbol: str
qty: Optional[str]
filled_qty: Optional[str]
filled_avg_price: Optional[str]
side: OrderSide
limit_price: Optional[str]
class TradeUpdate(BaseModel):
event: Union[TradeEvent, str]
execution_id: Optional[UUID]
order: Order
timestamp: datetime
position_qty: Optional[float]
price: Optional[float]
qty: Optional[float]
class User(BaseModel):
id: int
name = "Jana"
a = Order(id = uuid4(), submitted_at= datetime.now(), symbol = "BAC", side=OrderSide.BUY)
print(a)

3
testy/archive/config.py Normal file
View File

@ -0,0 +1,3 @@
API_KEY = 'PKGGEWIEYZOVQFDRY70L'
SECRET_KEY = 'O5Kt8X4RLceIOvM98i5LdbalItsX7hVZlbPYHy8Y'
MAX_BATCH_SIZE = 1

View File

@ -0,0 +1,5 @@
import panel as pn
app = pn.Column("DDD")
# app.servable()
# app.append("Nice")
app.show()

View File

@ -0,0 +1,17 @@
import scipy.interpolate as spi
import matplotlib.pyplot as plt
x = [2, 4, 6, 8, 10, 12, 14, 16, 18, 20]
y = [4, 7, 11, 16, 22, 29, 38, 49, 63, 80]
y_interp = spi.interp1d(x, y)
#find y-value associated with x-value of 13
#print(y_interp(13))
#create plot of x vs. y
#plt.plot(x, y, '-ob')

View File

@ -0,0 +1,31 @@
# narozdil od asyncio a threadingu ma vetsi rezii na vytvoreni
# hodí se pro CPU a GPU narocné úlohy, tnz. treba na strategie, kde kazda strategie = 1 process
import multiprocessing
import logging
logger = multiprocessing.log_to_stderr()
logger.setLevel(logging.INFO)
logger.warning('doomed')
def do_first():
print("Running do_first line 1")
print("Running do_first line 2")
print("Running do_first line 3")
def do_second():
print("Running do_second line 1")
print("Running do_second line 2")
print("Running do_second line 3")
def main():
t1 = multiprocessing.Process(target=do_first)
t2 = multiprocessing.Process(target=do_second)
# Start processes
t1.start(), t2.start()
# Wait processes to complete
t1.join(), t2.join()
if __name__ == "__main__":
main()

145
testy/archive/test.ipynb Normal file
View File

@ -0,0 +1,145 @@
{
"cells": [
{
"cell_type": "code",
"execution_count": 97,
"metadata": {},
"outputs": [
{
"ename": "ValidationError",
"evalue": "1 validation error for StockBarsRequest\ntimeframe\n instance of TimeFrame expected (type=type_error.arbitrary_type; expected_arbitrary_type=TimeFrame)",
"output_type": "error",
"traceback": [
"\u001b[0;31m---------------------------------------------------------------------------\u001b[0m",
"\u001b[0;31mValidationError\u001b[0m Traceback (most recent call last)",
"Cell \u001b[0;32mIn[97], line 35\u001b[0m\n\u001b[1;32m 30\u001b[0m \u001b[39m#print(time_from)\u001b[39;00m\n\u001b[1;32m 31\u001b[0m \n\u001b[1;32m 32\u001b[0m \u001b[39m# vytvorim request objekt\u001b[39;00m\n\u001b[1;32m 33\u001b[0m \u001b[39m#latestQuoteRequest = StockLatestQuoteRequest(symbol_or_symbols=[\"SPY\", \"GLD\", \"TLT\"])\u001b[39;00m\n\u001b[1;32m 34\u001b[0m stockTradeRequest \u001b[39m=\u001b[39m StockTradesRequest(symbol_or_symbols\u001b[39m=\u001b[39m[\u001b[39m\"\u001b[39m\u001b[39mBAC\u001b[39m\u001b[39m\"\u001b[39m], start\u001b[39m=\u001b[39mtime_from,end\u001b[39m=\u001b[39mtime_to)\n\u001b[0;32m---> 35\u001b[0m stockBarRequest \u001b[39m=\u001b[39m StockBarsRequest(symbol_or_symbols\u001b[39m=\u001b[39;49m[\u001b[39m\"\u001b[39;49m\u001b[39mBAC\u001b[39;49m\u001b[39m\"\u001b[39;49m], start\u001b[39m=\u001b[39;49mtime_from,end\u001b[39m=\u001b[39;49mtime_to, timeframe\u001b[39m=\u001b[39;49m\u001b[39m\"\u001b[39;49m\u001b[39m15s\u001b[39;49m\u001b[39m\"\u001b[39;49m)\n\u001b[1;32m 37\u001b[0m \u001b[39m#zavolam na clientovi metodu s request objektem, vrací se mi Dict[str, Quote] - obj.Quote pro kazdy symbol\u001b[39;00m\n\u001b[1;32m 38\u001b[0m \u001b[39m#latestQuoteObject = stock_client.get_stock_latest_quote(latestQuoteRequest)\u001b[39;00m\n\u001b[1;32m 39\u001b[0m \u001b[39m#tradesResponse = stock_client.get_stock_trades(stockTradeRequest).df\u001b[39;00m\n\u001b[1;32m 40\u001b[0m stocksResponse \u001b[39m=\u001b[39m stock_client\u001b[39m.\u001b[39mget_stock_bars(stockBarRequest)\u001b[39m.\u001b[39mdf\n",
"File \u001b[0;32m~/Documents/Development/python/trading/.venv/lib/python3.11/site-packages/alpaca/data/requests.py:45\u001b[0m, in \u001b[0;36mBaseTimeseriesDataRequest.__init__\u001b[0;34m(self, **data)\u001b[0m\n\u001b[1;32m 37\u001b[0m \u001b[39mif\u001b[39;00m (\n\u001b[1;32m 38\u001b[0m \u001b[39m\"\u001b[39m\u001b[39mend\u001b[39m\u001b[39m\"\u001b[39m \u001b[39min\u001b[39;00m data\n\u001b[1;32m 39\u001b[0m \u001b[39mand\u001b[39;00m data[\u001b[39m\"\u001b[39m\u001b[39mend\u001b[39m\u001b[39m\"\u001b[39m] \u001b[39mis\u001b[39;00m \u001b[39mnot\u001b[39;00m \u001b[39mNone\u001b[39;00m\n\u001b[1;32m 40\u001b[0m \u001b[39mand\u001b[39;00m \u001b[39misinstance\u001b[39m(data[\u001b[39m\"\u001b[39m\u001b[39mend\u001b[39m\u001b[39m\"\u001b[39m], datetime)\n\u001b[1;32m 41\u001b[0m \u001b[39mand\u001b[39;00m data[\u001b[39m\"\u001b[39m\u001b[39mend\u001b[39m\u001b[39m\"\u001b[39m]\u001b[39m.\u001b[39mtzinfo \u001b[39mis\u001b[39;00m \u001b[39mnot\u001b[39;00m \u001b[39mNone\u001b[39;00m\n\u001b[1;32m 42\u001b[0m ):\n\u001b[1;32m 43\u001b[0m data[\u001b[39m\"\u001b[39m\u001b[39mend\u001b[39m\u001b[39m\"\u001b[39m] \u001b[39m=\u001b[39m data[\u001b[39m\"\u001b[39m\u001b[39mend\u001b[39m\u001b[39m\"\u001b[39m]\u001b[39m.\u001b[39mastimezone(pytz\u001b[39m.\u001b[39mutc)\u001b[39m.\u001b[39mreplace(tzinfo\u001b[39m=\u001b[39m\u001b[39mNone\u001b[39;00m)\n\u001b[0;32m---> 45\u001b[0m \u001b[39msuper\u001b[39;49m()\u001b[39m.\u001b[39;49m\u001b[39m__init__\u001b[39;49m(\u001b[39m*\u001b[39;49m\u001b[39m*\u001b[39;49mdata)\n",
"File \u001b[0;32m~/Documents/Development/python/trading/.venv/lib/python3.11/site-packages/pydantic/main.py:342\u001b[0m, in \u001b[0;36mpydantic.main.BaseModel.__init__\u001b[0;34m()\u001b[0m\n",
"\u001b[0;31mValidationError\u001b[0m: 1 validation error for StockBarsRequest\ntimeframe\n instance of TimeFrame expected (type=type_error.arbitrary_type; expected_arbitrary_type=TimeFrame)"
]
}
],
"source": [
"# 2 clients for historical data StockHistoricalDataClient (needs keys), CryptoHistoricalDataClient\n",
"# 2 clients for real time data CryptoDataStream, StockDataStream\n",
"\n",
"\n",
"# naimportuju si daneho clienta\n",
"from alpaca.data.historical import StockHistoricalDataClient, CryptoHistoricalDataClient\n",
"\n",
"#pokdu pouzivam historicke data(tzn. REST) tak si naimportuju dany request object\n",
"from alpaca.data.requests import StockLatestQuoteRequest, StockBarsRequest, StockTradesRequest\n",
"\n",
"#objekty se kterymi pak pracuju (jsou soucasi package výše, tady jen informačně)\n",
"from alpaca.data import Quote, Trade, Snapshot, Bar\n",
"from alpaca.data.models import BarSet, QuoteSet, TradeSet\n",
"\n",
"\n",
"from config import API_KEY, SECRET_KEY\n",
"import datetime\n",
"import pandas as pd\n",
"\n",
"# vytvorim si clienta\n",
"stock_client = StockHistoricalDataClient(API_KEY, SECRET_KEY, raw_data=False)\n",
"\n",
"sloupce=[\"symbol\",\"timestamp\",\"exchange\",\"price\",\"size\",\"id\",\"conditions\",\"tape\"]\n",
"\n",
"# deklarace globalniho df s timeindexem\n",
"#gdf = pd.DataFrame(columns=sloupce, index=pd.to_datetime([]))\n",
"\n",
"time_from = datetime.datetime(2023, 2, 17, 14, 50, 0, 0)\n",
"time_to = datetime.datetime(2023, 2, 17, 14, 55, 1, 0)\n",
"#print(time_from)\n",
"\n",
"# vytvorim request objekt\n",
"#latestQuoteRequest = StockLatestQuoteRequest(symbol_or_symbols=[\"SPY\", \"GLD\", \"TLT\"])\n",
"stockTradeRequest = StockTradesRequest(symbol_or_symbols=[\"BAC\"], start=time_from,end=time_to)\n",
"stockBarRequest = StockBarsRequest(symbol_or_symbols=[\"BAC\"], start=time_from,end=time_to, timeframe=\"15s\")\n",
"\n",
"#zavolam na clientovi metodu s request objektem, vrací se mi Dict[str, Quote] - obj.Quote pro kazdy symbol\n",
"#latestQuoteObject = stock_client.get_stock_latest_quote(latestQuoteRequest)\n",
"#tradesResponse = stock_client.get_stock_trades(stockTradeRequest).df\n",
"stocksResponse = stock_client.get_stock_bars(stockBarRequest).df\n",
"\n",
"#data = [{'t': '2023-02-17T14:50:00.582845696Z', 'x': 'D', 'p': 34.83, 's': 1, 'c': [' ', 'I'], 'i': 71675642337847, 'z': 'A'}, {'t': '2023-02-17T14:50:00.948229632Z', 'x': 'D', 'p': 34.8383, 's': 10, 'c': [' ', 'I'], 'i': 79371872323411, 'z': 'A'}]\n",
"# data = [{ 'conditions': [' ', 'I'],\n",
"# 'exchange': 'D',\n",
"# 'id': 71675642337847,\n",
"# 'price': 34.83,\n",
"# 'size': 1.0,\n",
"# 'symbol': 'BAC',\n",
"# 'tape': 'A',\n",
"# 'timestamp': datetime.datetime(2023, 2, 17, 14, 50, 0, 582845, tzinfo=datetime.timezone.utc)}, { 'conditions': [' ', 'I'],\n",
"# 'exchange': 'D',\n",
"# 'id': 79371872323411,\n",
"# 'price': 34.8383,\n",
"# 'size': 10.0,\n",
"# 'symbol': 'BAC',\n",
"# 'tape': 'A',\n",
"# 'timestamp': datetime.datetime(2023, 2, 17, 14, 50, 0, 948229, tzinfo=datetime.timezone.utc)}, { 'conditions': [' ', 'I'],\n",
"# 'exchange': 'D',\n",
"# 'id': 71675642400306,\n",
"# 'price': 34.835,\n",
"# 'size': 1.0,\n",
"# 'symbol': 'BAC',\n",
"# 'tape': 'A',\n",
"# 'timestamp': datetime.datetime(2023, 2, 17, 14, 50, 1, 870989, tzinfo=datetime.timezone.utc)}, { 'conditions': [' ', 'I'],\n",
"# 'exchange': 'D',\n",
"# 'id': 71675642400308,\n",
"# 'price': 34.84,\n",
"# 'size': 100.0,\n",
"# 'symbol': 'BAC',\n",
"# 'tape': 'A',\n",
"# 'timestamp': datetime.datetime(2023, 2, 17, 14, 55, 0, 88460, tzinfo=datetime.timezone.utc)}]\n",
"# datetime.datetime(2023, 2, 17, 14, 50, 0, 948229, tzinfo=datetime.timezone.utc)\n",
"#data = tradesResponse\n",
"\n",
"#gdf = pd.DataFrame.from_dict(data=data, orient='index')\n",
"#gdf = pd.DataFrame(data)\n",
"\n",
"#gdf = pd.DataFrame(tradesResponse.data[\"BAC\"])\n",
"\n",
"# works with raw data\n",
"#gdf = pd.DataFrame([t for t in tradesResponse[\"BAC\"]], columns=sloupce)\n",
"\n",
"#gdf = tradesResponse.df\n",
"print(stocksResponse)\n",
"# print(tradesResponse)\n",
"#print(tradesResponse[\"BAC\"])\n",
"# print(tradesResponse.data[\"BAC\"])\n",
"\n",
"# positions_df = pd.concat((pd.DataFrame(position).set_index(0) for position in positions),axis=1)\n",
"# positions_df = positions_df.T.apply(pd.to_numeric, errors='ignore').T # convert strings to numeric\n",
"# For orders:\n",
"# orders_df = pd.concat((pd.DataFrame(order).set_index(0) for order in orders),axis=1).T\n"
]
}
],
"metadata": {
"kernelspec": {
"display_name": ".venv",
"language": "python",
"name": "python3"
},
"language_info": {
"codemirror_mode": {
"name": "ipython",
"version": 3
},
"file_extension": ".py",
"mimetype": "text/x-python",
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.10.10"
},
"orig_nbformat": 4,
"vscode": {
"interpreter": {
"hash": "9391835cf7167c62e8e53032533e4da7e63c83f818ef5f19912128bc45706236"
}
}
},
"nbformat": 4,
"nbformat_minor": 2
}

66
testy/archive/test.py Normal file
View File

@ -0,0 +1,66 @@
from alpaca.data.live import StockDataStream
from alpaca.common.enums import BaseURL
import datetime
import pandas as pd
from alpaca.data.models import Bar, Quote, Trade
import csv
from config import API_KEY, SECRET_KEY
key = 'PKHVMXQA09IVXALL92JR'
secret = 'FmPwQRFIl7jhLRrXee0Ui73zM9NmAf5O4VH2tyAf'
# keys required for stock historical data client
#client = StockHistoricalDataClient(key, secret)
# keys required
client = StockDataStream(api_key=API_KEY,secret_key=SECRET_KEY)
df_glob = pd.DataFrame(columns=['timestamp','symbol', 'exchange','size','price','id','conditions','tape'])
file = open('Trades.txt', 'w')
# async handler
async def quote_data_handler(data):
#global df_glob
#f_loc = pd.DataFrame(data)
#df_glob = df_glob.append(df_loc, ignore_index=True)
# quote data will arrive here
print(data)
ne = str(data) + "\n"
file.write(ne)
#print(data.timestamp,data.symbol, data.price, data.size, data.exchange, data.id, data.conditios,tape)
print("-"*40)
#client.subscribe_updated_bars(quote_data_handler, "BAC")
#client.subscribe_quotes(quote_data_handler, "BAC")
client.subscribe_trades(quote_data_handler, "BAC")
print("pred spustenim run")
try:
client.run()
#print(df)
except Exception as err:
print(f"{type(err).__name__} was raised: {err}")
print("globalni dataframe")
print(df_glob)
file.close()
print(df_glob)
# timestamp symbol exchange size price id conditions tape 0 1
# 0 NaN NaN NaN NaN NaN NaN NaN NaN symbol BAC
# 1 NaN NaN NaN NaN NaN NaN NaN NaN timestamp 2023-02-15 19:47:19.430511+00:00
# 2 NaN NaN NaN NaN NaN NaN NaN NaN exchange V
# 3 NaN NaN NaN NaN NaN NaN NaN NaN price 35.52
# 4 NaN NaN NaN NaN NaN NaN NaN NaN size 50.0
# .. ... ... ... ... ... ... ... ... ... ...
# 59 NaN NaN NaN NaN NaN NaN NaN NaN price 35.51
# 60 NaN NaN NaN NaN NaN NaN NaN NaN size 7.0
# 61 NaN NaN NaN NaN NaN NaN NaN NaN id 56493486924086
# 62 NaN NaN NaN NaN NaN NaN NaN NaN conditions [ , I]
# 63 NaN NaN NaN NaN NaN NaN NaN NaN tape A
order_data_json = request.get_json()
# validate data
MarketOrderRequest(**order_data_json)

View File

@ -0,0 +1 @@

View File

@ -0,0 +1,35 @@
# A condition variable allows one or more threads to wait until they are
# notified by another thread.
import threading
import time
import logging
logging.basicConfig(level=logging.DEBUG,
format='(%(threadName)-9s) %(message)s',)
def consumer(cv):
logging.debug('Consumer thread started ...')
with cv:
logging.debug('Consumer waiting ...')
cv.wait()
logging.debug('Consumer consumed the resource')
def producer(cv):
logging.debug('Producer thread started ...')
with cv:
logging.debug('Making resource available')
logging.debug('Notifying to all consumers')
cv.notify_all()
if __name__ == '__main__':
condition = threading.Condition()
cs1 = threading.Thread(name='consumer1', target=consumer, args=(condition,))
cs2 = threading.Thread(name='consumer2', target=consumer, args=(condition,))
pd = threading.Thread(name='producer', target=producer, args=(condition,))
cs1.start()
time.sleep(2)
cs2.start()
time.sleep(2)
pd.start()

54
testy/archive/thready.py Normal file
View File

@ -0,0 +1,54 @@
# pouziti threadu - narozdil od asyncio - nemame pod tim uplnou kontrolu a ridi to knihovna
# thready jsou výhodne pro naročné IO operace, např. loadery, requestory, scrapery, ukladače atp.
# how to share data between Threads
# 1.Sharing a boolean variable with a threading.Event.
# declare in unset or false state
# event = threading.Event()
# if event.is_set(): # check if set
# event.set() # set the event true
# event.clear() # or false
# 2.Protecting global shared data with a threading.Lock.
# lock = threading.Lock()
# with lock:
# variable = variable + 10
# 3.Sharing data with a queue.Queue. Queue can be shared between threads.
# create a queue
# queue = Queue() #create FIFO
# queue.put(i) #enque
# data = queue.get() #dequeue
# dale je tu condition - takova roura mezi consumerem a producerem
# cond = threading.Condition()
# cond.wait() #consumer waiting
# cond.notifyAll() #producer notifiying consumer, they can continue
# consumer threads wait for the Condition to be set before continuing.
# The producer thread is responsible for setting the condition and notifying the other threads
# that they can continue. Více v sam.test filu.
import threading
def do_first():
print("Running do_first line 1")
print("Running do_first line 2")
print("Running do_first line 3")
def do_second():
print("Running do_second line 1")
print("Running do_second line 2")
print("Running do_second line 3")
def main():
t1 = threading.Thread(target=do_first)
t2 = threading.Thread(target=do_second)
# Start threads
t1.start(), t2.start()
# Wait threads to complete
t1.join(), t2.join()
if __name__ == "__main__":
main()

View File

@ -0,0 +1,74 @@
from uuid import UUID, uuid4
from alpaca.trading.enums import OrderSide, OrderStatus, TradeEvent, OrderClass, OrderType, TimeInForce
#from utils import AttributeDict
from rich import print
from typing import Any, Optional, List, Union
from datetime import datetime, date
from pydantic import BaseModel
from common.model import Order
# to test change iterable (adding items) while iterating
import asyncio
class Notif:
def __init__(self,time):
self.time = time
open_orders: list = []
for i in range(1,10):
open_orders.append(Order(id=uuid4(),
submitted_at = datetime.utcnow(),
qty=1,
order_type=OrderType.MARKET,
symbol = "BAC",
status = OrderStatus.ACCEPTED,
side = OrderSide.BUY))
print("cele pole objektu",open_orders)
# Here, 'reversed' returns a lazy iterator, so it's performant! reversed(l):
#musi fungovat removing stare a pridavani novych
#this list contains all not processed notification, that we try to process during this iteration
#if time is not right we leave the message for next iter
#if time is right we process the message (- note it can trigger additional open_orders, that are added to queue)
async def apenduj():
global open_orders
open_orders.append("cago")
# if notif.time % 2 == 0 and notif.time < 300:
# open_orders.append(Notif(notif.time+50))
todel = []
for i in open_orders:
#print("*******start iterace polozky", i.time)
print(i)
print("removing element",i)
res = asyncio.run(apenduj())
todel.append(i)
print("*****konec iterace", i)
print()
print("to del", todel)
#removing processed from the list
for i in todel:
open_orders.remove(i)
print("cely list po skonceni vseho")
for i in open_orders: print(i.id)
""""
pred iteraci se zavola synchroné
EXECUTE open orders(time)
- pokusi se vytvorit vsechny otevrene ordery do daneho casu (casu dalsi iterace)
- podporuje i volani callbacku a to vcetne pokynu vytvoreneho z pokynu
- tento novy pokyn muze byt i take exekuovan pokud se vcetne roundtripu vejde do daneho casu
pripadne soucasne vytvoreni i exekuci pokynu
"""

70
testy/changeiterable.py Normal file
View File

@ -0,0 +1,70 @@
from uuid import UUID, uuid4
from alpaca.trading.enums import OrderSide, OrderStatus, TradeEvent, OrderClass, OrderType, TimeInForce
#from utils import AttributeDict
from rich import print
from typing import Any, Optional, List, Union
from datetime import datetime, date
from pydantic import BaseModel
from common.model import Order
# to test change iterable (adding items) while iterating
class Notif:
def __init__(self,time):
self.time = time
open_orders: list(Order) = []
for i in range(1,10):
open_orders.append(Order(id=uuid4()),
submitted_at = datetime.utcnow(),
symbol = "BAC",
status = OrderStatus.ACCEPTED,
side = OrderSide.BUY)
print("cele pole objektu",open_orders)
# Here, 'reversed' returns a lazy iterator, so it's performant! reversed(l):
#musi fungovat removing stare a pridavani novych
#this list contains all not processed notification, that we try to process during this iteration
#if time is not right we leave the message for next iter
#if time is right we process the message (- note it can trigger additional open_orders, that are added to queue)
def process_message(notif: Notif):
global open_orders
pass
# if notif.time % 2 == 0 and notif.time < 300:
# open_orders.append(Notif(notif.time+50))
todel = []
for i in open_orders:
#print("*******start iterace polozky", i.time)
process_message(i.id)
print("removing element",i.id)
todel.append(i)
print("*****konec iterace", i.id)
print()
print("to del", todel)
#removing processed from the list
for i in todel:
open_orders.remove(i)
print("cely list po skonceni vseho")
for i in open_orders: print(i.id)
""""
pred iteraci se zavola synchroné
EXECUTE open orders(time)
- pokusi se vytvorit vsechny otevrene ordery do daneho casu (casu dalsi iterace)
- podporuje i volani callbacku a to vcetne pokynu vytvoreneho z pokynu
- tento novy pokyn muze byt i take exekuovan pokud se vcetne roundtripu vejde do daneho casu
pripadne soucasne vytvoreni i exekuci pokynu
"""

File diff suppressed because it is too large Load Diff

151
testy/dash_save_html.py Normal file
View File

@ -0,0 +1,151 @@
# -*- coding: utf-8 -*-
import os
from html.parser import HTMLParser
import dash
import pandas as pd
import plotly.express as px
import requests
from dash import html, dcc, dash_table, Input, Output
def patch_file(file_path: str, content: bytes, extra: dict = None) -> bytes:
if file_path == 'index.html':
index_html_content = content.decode('utf8')
extra_jsons = f'''
var patched_jsons_content={{
{','.join(["'/" + k + "':" + v.decode("utf8") + "" for k, v in extra.items()])}
}};
'''
patched_content = index_html_content.replace(
'<footer>',
f'''
<footer>
<script>
''' + extra_jsons + '''
const origFetch = window.fetch;
window.fetch = function () {
const e = arguments[0]
if (patched_jsons_content.hasOwnProperty(e)) {
return Promise.resolve({
json: () => Promise.resolve(patched_jsons_content[e]),
headers: new Headers({'content-type': 'application/json'}),
status: 200,
});
} else {
return origFetch.apply(this, arguments)
}
}
</script>
'''
).replace(
'href="/',
'href="'
).replace(
'src="/',
'src="'
)
return patched_content.encode('utf8')
else:
return content
def write_file(file_path: str, content: bytes, target_dir='target', ):
target_file_path = os.path.join(target_dir, file_path.lstrip('/').split('?')[0])
target_leaf_dir = os.path.dirname(target_file_path)
os.makedirs(target_leaf_dir, exist_ok=True)
with open(target_file_path, 'wb') as f:
f.write(content)
pass
class ExternalResourceParser(HTMLParser):
def __init__(self):
super().__init__()
self.resources = []
def handle_starttag(self, tag, attrs):
if tag == 'link':
for k, v in attrs:
if k == 'href':
self.resources.append(v)
if tag == 'script':
for k, v in attrs:
if k == 'src':
self.resources.append(v)
def make_static(base_url, target_dir='target'):
index_html_bytes = requests.get(base_url).content
json_paths = ['_dash-layout', '_dash-dependencies', ]
extra_json = {}
for json_path in json_paths:
json_content = requests.get(base_url + json_path).content
extra_json[json_path] = json_content
patched_bytes = patch_file('index.html', index_html_bytes, extra=extra_json)
write_file('index.html', patched_bytes, target_dir)
parser = ExternalResourceParser()
parser.feed(patched_bytes.decode('utf8'))
extra_js = [
'_dash-component-suites/dash/dcc/async-graph.js',
'_dash-component-suites/dash/dcc/async-plotlyjs.js',
'_dash-component-suites/dash/dash_table/async-table.js',
'_dash-component-suites/dash/dash_table/async-highlight.js'
]
for resource_url in parser.resources + extra_js:
resource_url_full = base_url + resource_url
print(f'get {resource_url_full}')
resource_bytes = requests.get(resource_url_full).content
patched_bytes = patch_file(resource_url, resource_bytes)
write_file(resource_url, patched_bytes, target_dir)
def main():
port = 9050
app = dash.Dash(__name__)
df = pd.DataFrame({
"Fruit": ["Apples", "Oranges", "Bananas", "Apples", "Oranges", "Bananas"],
"Amount": [4, 1, 2, 2, 4, 5],
"City": ["SF", "SF", "SF", "Montreal", "Montreal", "Montreal"]
})
fig = px.bar(df, x="Fruit", y="Amount", color="City", barmode="group")
app.layout = html.Div(children=[
html.Button('save static', id='save', n_clicks=0),
html.Span('', id='saved'),
html.H1(children='Hello Dash'),
html.Div(children='''
Dash: A web application framework for your data.
'''),
dcc.Graph(
id='example-graph',
figure=fig
),
dash_table.DataTable(
id='table',
columns=[{"name": i, "id": i} for i in df.columns],
data=df.to_dict('records'),
)
])
@app.callback(
Output('saved', 'children'),
Input('save', 'n_clicks'),
)
def save_result(n_clicks):
if n_clicks == 0:
return 'not saved'
else:
make_static(f'http://127.0.0.1:{port}/')
return 'saved'
app.run_server(debug=False, port=port)
if __name__ == '__main__':
main()

38
testy/debugprints.py Normal file
View File

@ -0,0 +1,38 @@
import inspect
import re
import pprint
from rich import print
from datetime import datetime
def d(x, n=None):
frame = inspect.currentframe().f_back
s = inspect.getframeinfo(frame).code_context[0]
print(s)
r = re.search(r"\((.*)\)", s).group(1)
print("{} = {}".format(r,x), n)
def prinfo(*args):
frame = inspect.currentframe().f_back
s = inspect.getframeinfo(frame).code_context[0]
r = re.search(r"\((.*)\)", s).group(1)
print(r)
vnames = r.split(", ")
print(vnames)
for i,(var,val) in enumerate(zip(vnames, args)):
print(f"{var} = {val}")
def p(var, n = None):
if n: print(n, f'{var = }')
else: print(f'{var = }')
a = 34
b= dict(a1=123,b2="cus")
c = "covece"
#p(a)
#d(b, "neco")
p(a)
p(a,"neco")
prinfo(b,c)

70
testy/decorator_test.py Normal file
View File

@ -0,0 +1,70 @@
import inspect
class LiveInterface:
def prepost(f):
def prepost_wrapper(self, *args, **kwargs):
pre_name = 'pre_' + f.__name__
post_name = 'post_' + f.__name__
print(dir(self))
print(self.__repr__)
res = 1
if hasattr(self, pre_name):
res = getattr(self, pre_name) (*args, **kwargs)
if res > 0:
ret = f(self, *args, **kwargs)
if hasattr(self, post_name): getattr(self, post_name)(*args, **kwargs)
return ret
else:
print("plugin vratil zaporné. Skipping")
return res
return prepost_wrapper
def __init__(self) -> None:
pass
@prepost
def buy(self):
print("buy")
##
# class NewInterface(LiveInterface):
# def __init__(self) -> None:
# super().__init__()
class Strategy():
def __init__(self) -> None:
#tady is prepnu na live or bt
self.interface = Strategy.StrategyInterface()
self.neco = 1
#self.interface.buy()
#self.interface = LiveInterface()
#self.interface.buy = self.buy_more
self.interface.buy()
class StrategyInterface(LiveInterface):
def __init__(self) -> None:
super().__init__()
def pre_buy(self):
print("prebuy")
return 3
def post_buy(self):
print("postbuy")
return -2
def main():
a = Strategy()
if __name__ == "__main__":
main()
##

34
testy/gethistorytrades.py Normal file
View File

@ -0,0 +1,34 @@
import os,sys
sys.path.append(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
print(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
from alpaca.data.historical import CryptoHistoricalDataClient, StockHistoricalDataClient
from alpaca.data.requests import CryptoLatestTradeRequest, StockLatestTradeRequest, StockLatestBarRequest, StockTradesRequest
from alpaca.data.enums import DataFeed
from config import API_KEY, SECRET_KEY, MAX_BATCH_SIZE
from datetime import datetime, timezone, time, timedelta, date
import pytz
from rich import print
#práce s datumy
zone_NY = pytz.timezone('America/New_York')
parametry = {}
symbol = ["BAC"]
client = StockHistoricalDataClient(API_KEY, SECRET_KEY, raw_data=True)
datetime_object_from = datetime(2023, 3, 16, 9, 30, 0, tzinfo=zone_NY)
datetime_object_to = datetime(2023, 3, 16, 16, 00, 0, tzinfo=zone_NY)
trades_request = StockTradesRequest(symbol_or_symbols=symbol, feed = DataFeed.SIP, start=datetime_object_from, end=datetime_object_to)
all_trades = client.get_stock_trades(trades_request)
#print(all_trades)
print(len(all_trades['BAC']))
# for i in all_trades:
# print(all_trades[i])
if __name__ == "__main__":
# bar will be invoked if this module is being run directly, but not via import!
print("hello")

49
testy/loghandlers.py Normal file
View File

@ -0,0 +1,49 @@
from v2realbot.enums.enums import Mode, Account
from v2realbot.config import get_key
import structlog
from rich import print
from datetime import datetime
from v2realbot.utils.utils import zoneNY
def timestamper(_, __, event_dict):
event_dict["time"] = datetime.now().isoformat()
return event_dict
#structlog.configure(processors=[timestamper, structlog.processors.KeyValueRenderer()])
log = structlog.get_logger()
def neco(arg: int):
log.bind(arg=arg)
log.info("neco funkce")
arg = arg + 2
return arg
def neco2(kwargs):
print("neco 2")
for i in 12:
print(i)
ted = datetime.now().astimezone(zoneNY)
promena = [1,2]
log.bind(ted=ted, promena=promena)
d = dict(a=2,b="33",dalsiklic=4432, pole=[2,3,4])
log.info("beforeprint")
print(d)
d = neco(3)
log.info("udalost")
log.info("incoming",d=d)

View File

@ -0,0 +1,88 @@
from threading import Thread, current_thread
from alpaca.data.live import StockDataStream, CryptoDataStream
from v2realbot.config import API_KEY, SECRET_KEY, MAX_BATCH_SIZE, PAPER
import queue
from alpaca.data.enums import DataFeed
from typing_extensions import Any
import time
from v2realbot.loader.aggregator import TradeAggregator
# class ws_agg() :
# def __init__(self, client, symbol) -> None:
# # Call the Thread class's init function
# Thread.__init__(self)
# self.client = client
# self.symbol = symbol
#object composition
ws_client = CryptoDataStream(API_KEY, SECRET_KEY, raw_data=True, websocket_params={})
_streams = []
def add_stream(self, **data):
#object composition - pomocí append
self._streams.append(data)
async def handler(self, data):
print("handler ve threadu:",current_thread().name)
# podíváme kolik streamů je instancovaných pro tento symbol - v dict[symbol] a spusteni
# pro každý stream zavoláme
print(data)
print("*"*40)
def run(self) :
print(current_thread().name)
print(self._streams)
unique = set()
## for each symbol we subscribe
for i in self._streams:
print(i['symbol'])
#instanciace tradeAggregatoru a uložení do dict[symbol]
#zde
unique.add(i['symbol'])
print(unique)
#subscribe for unique symbols
#
##TODO *PROBLEM* co kdyz chci subscribe stejneho symbolu co uz konzumuje jina strategie. PROBLEM koncepční
##TODO pri skonceni jedne strategie, udelat teardown kroky jako unsubscribe pripadne stop
for i in unique:
WS_Stream.client.subscribe_trades(self.handler, i)
print("subscribed to",i)
#timto se spusti jenom poprve v 1 vlaknu
#ostatni pouze vyuzivaji
if WS_Stream.client._running is False:
print("it is not running, starting by calling RUN")
WS_Stream.client.run()
#tímto se spustí pouze 1.vlakno, nicmene subscribe i pripadny unsubscribe zafunguji
else:
print("it is running, not calling RUN")
# class SymbolStream():
# def __init__(self, symbol) -> None:
# self.symbol = symbol
# s
# class StreamRequest:
# symbol: str
# resolution: int
#clientDataStream = CryptoDataStream(API_KEY, SECRET_KEY, raw_data=True, websocket_params={})
# novy ws stream - vždy jednom vláknu
obj= WS_Stream("jednicka")
obj.add_stream(symbol="BTC/USD",resolution=15)
# novy ws stream - vždy jednom vláknu
obj2= WS_Stream("dvojka")
obj2.add_stream(symbol="ETH/USD",resolution=5)
obj.start()
time.sleep(1)
obj2.start()
# clientDataStream.run()
# clientDataStream2.run()
obj2.join()
obj.join()
print("po startu")

75
testy/pandasinsert.py Normal file
View File

@ -0,0 +1,75 @@
from uuid import UUID, uuid4
from alpaca.trading.enums import OrderSide, OrderStatus, TradeEvent, OrderType
from common.model import TradeUpdate, Order
from rich import print
import threading
import asyncio
from config import BT_DELAYS
from utils.utils import AttributeDict, ltp, zoneNY, trunc
from utils.tlog import tlog
from datetime import datetime
import pandas as pd
import mplfinance as mpf
trade1 = TradeUpdate(order =Order(id=uuid4(),
submitted_at = datetime(2023, 3, 17, 9, 30, 0, 0, tzinfo=zoneNY),
symbol = "BAC",
qty = 1,
status = OrderStatus.ACCEPTED,
order_type = OrderType.LIMIT,
side = OrderSide.BUY,
limit_price=22.4),
event = TradeEvent.FILL,
execution_id = uuid4(),
timestamp = datetime.now(),
position_qty= 2,
price=22.3,
qty = 2,
value = 44.6)
trade2 = TradeUpdate(order =Order(id=uuid4(),
submitted_at = datetime(2023, 3, 17, 9, 34, 0, 0, tzinfo=zoneNY),
symbol = "BAC",
qty = 1,
status = OrderStatus.ACCEPTED,
order_type = OrderType.LIMIT,
side = OrderSide.SELL,
limit_price=22.4),
event = TradeEvent.FILL,
execution_id = uuid4(),
timestamp = datetime.now(),
position_qty= 2,
price=24.3,
qty = 2,
value = 48.6)
trades= [trade1,trade2]
#print(trades)
trade_dict = AttributeDict(timestamp=[],symbol=[],qty=[],price=[],position_qty=[],value=[])
for t in trades:
trade_dict.timestamp.append(t.timestamp)
trade_dict.symbol.append(t.order.symbol)
trade_dict.qty.append(t.qty)
trade_dict.price.append(t.price)
trade_dict.position_qty.append(t.position_qty)
trade_dict.value.append(t.value)
print(trade_dict)
trade_df = pd.DataFrame(trade_dict)
trade_df = trade_df.set_index('timestamp')
mpf.plot(trade_df, # the dataframe containing the OHLC (Open, High, Low and Close) data
type='candle', # use candlesticks
volume=True, # also show the volume
mav=(3,6,9), # use three different moving averages
figratio=(3,1), # set the ratio of the figure
style='yahoo', # choose the yahoo style
title='Bitcoin on Wednesday morning');
print(trade_df)
#pd.DataFrame()
#self.trades.append(trade)

133
testy/pracesdatumem.py Normal file
View File

@ -0,0 +1,133 @@
from alpaca.data.historical import CryptoHistoricalDataClient, StockHistoricalDataClient
from alpaca.data.requests import CryptoLatestTradeRequest, StockLatestTradeRequest, StockLatestBarRequest, StockTradesRequest
from alpaca.data.enums import DataFeed
from v2realbot.config import API_KEY, SECRET_KEY, MAX_BATCH_SIZE
#from v2realbot.utils.utils import zoneNY
from datetime import datetime, timezone, time, timedelta, date
import pytz
from rich import print
from pandas import to_datetime
#práce s datumy
zone_NY = pytz.timezone('America/New_York')
zoneNY=zone_NY
# parametry = {}
# symbol = ["C","BAC"]
# client = StockHistoricalDataClient(API_KEY, SECRET_KEY, raw_data=True)
# datetime_object_from = datetime(2023, 3, 16, 17, 51, 38, tzinfo=timezone.utc)
# datetime_object_to = datetime(2023, 3, 16, 17, 52, 39, tzinfo=timezone.utc)
# trades_request = StockTradesRequest(symbol_or_symbols=symbol, feed = DataFeed.SIP, start=datetime_object_from, end=datetime_object_to)
# all_trades = client.get_stock_trades(trades_request)
# print(len(all_trades))
# for i in all_trades:
# print(all_trades[i])
# timeZ_Ny = pytz.timezone('America/New_York')
# MARKET_OPEN = time(hour=9, minute=30, second=0, tzinfo=timeZ_Ny)
# MARKET_CLOSE = time(hour=16, minute=30, second=0, tzinfo=timeZ_Ny)
# print(MARKET_OPEN)
# print(MARKET_CLOSE)
def is_open_rush(dt: datetime, mins: int = 30):
""""
Returns true if time is within morning rush (open+mins)
"""
dt = dt.astimezone(zoneNY)
business_hours = {
"from": time(hour=9, minute=30),
"to": time(hour=16, minute=0)
}
rushtime = (datetime.combine(date.today(), business_hours["from"]) + timedelta(minutes=mins)).time()
return business_hours["from"] <= dt.time() < rushtime
def is_close_rush(dt: datetime, mins: int = 30):
""""
Returns true if time is within morning rush (open+mins)
"""
dt = dt.astimezone(zoneNY)
business_hours = {
"from": time(hour=9, minute=30),
"to": time(hour=16, minute=0)
}
rushtime = (datetime.combine(date.today(), business_hours["to"]) - timedelta(minutes=mins)).time()
return rushtime <= dt.time() <= business_hours["to"]
now = datetime.now(tz=zone_NY)
now = datetime(2023, 3, 16, 15, 50, 00, tzinfo=zone_NY)
print(now)
print("is closing rush", is_close_rush(now, 0))
""""
TODO toto pridat do utils a pak bud do agregatoru
a nebo do spis offline_loaderu (tam je muzu filtrovat) - pripadne nejake flagy
pak pokracovat v BASE kde jsem skoncil vcera
returns if date is within market open times (no holidays implemented yet)
input is timezone aware datetime
"""
def is_open_hours(dt):
dt = dt.astimezone(pytz.timezone('America/New_York'))
print("ameriko time", dt)
business_hours = {
# monday = 0, tuesday = 1, ... same pattern as date.weekday()
"weekdays": [0, 1, 2, 3, 4],
"from": time(hour=9, minute=30),
"to": time(hour=16, minute=30)
}
holidays = [date(2022, 12, 24), date(2022, 2, 24)]
return dt.weekday() in business_hours["weekdays"] \
and dt.date() not in holidays \
and business_hours["from"] <= dt.time() < business_hours["to"]
now = datetime.now(tz=zone_NY)
now = datetime(2023, 3, 16, 15, 51, 38, tzinfo=zone_NY)
now = datetime(2023, 3, 16, 15, 51, 38, tzinfo=timezone.utc)
print(now)
print("is business hour", is_open_hours(now))
def parse_nanodate(s):
"""
parse date, ignore nanoseconds
sample input: 2020-12-31T16:20:00.000000123Z
--> 123ns will be ignored
"""
print(s[0:26]+s[len(s) - 1]+'+0000')
return datetime.strptime(
s[0:26]+s[len(s) - 1]+'+0000', '%Y-%m-%dT%H:%M:%S.%fZ%z')
a = "2023-03-17T12:56:37.588388864Z"
b = "2023-03-17T12:56:41.332702720Z"
c = "2023-03-17T12:56:41.3327027Z"
d = "2023-03-17T12:01:36.13168Z"
print(to_datetime(d))
print(int(datetime(2023, 3, 17, 9, 30, 0, 0, tzinfo=zone_NY).timestamp()))
print(int(datetime(2023, 3, 17, 16, 00, 0, 0, tzinfo=zone_NY).timestamp()))
# print(a)
# print(parse_nanodate(a).astimezone(tz=zone_NY))
# print(b)
# print(parse_nanodate(b))
# print(c)
# print(parse_nanodate(c))
# print(d)
# print(parse_nanodate(d))

19
testy/printoverride.py Normal file
View File

@ -0,0 +1,19 @@
from rich import print
from icecream import ic
def p(*args, **kwargs):
if ic.enabled:
print(*args, **kwargs)
else:
p("nazdar")
a = "helo"
b = dict(a=123,b="CUS")
c = 123
p(a,b,c,"nazdar")
p("nazdar","covece",a,c)

View File

@ -0,0 +1,233 @@
"""
Zjistovani ceny z listu tradu pomocí bisect left
"""
from datetime import datetime
from bisect import bisect_left
btdata = [(1679081913.290388, 27.8634), (1679081913.68588, 27.865), (1679081913.986394, 27.86), (1679081914.095521, 27.865), (1679081914.396844, 27.8601), (1679081914.601457, 27.865), (1679081914.721968, 27.86), (1679081914.739287, 27.86), (1679081914.739305, 27.865), (1679081914.739314, 27.865), (1679081914.73941, 27.865), (1679081914.739554, 27.86), (1679081914.739569, 27.86), (1679081914.739572, 27.86), (1679081914.739635, 27.86), (1679081914.739644, 27.86), (1679081914.739771, 27.86), (1679081914.74, 27.865), (1679081914.74048, 27.865), (1679081914.740531, 27.865), (1679081914.740691, 27.865), (1679081914.746943, 27.865), (1679081914.779766, 27.86), (1679081914.779769, 27.86), (1679081914.779901, 27.86), (1679081914.779904, 27.865), (1679081914.77991, 27.865), (1679081914.780006, 27.865), (1679081914.780388, 27.865), (1679081914.780415, 27.865), (1679081914.79638, 27.86), (1679081914.79638, 27.86), (1679081914.796383, 27.865), (1679081914.796498, 27.865), (1679081914.796901, 27.865), (1679081914.816074, 27.865), (1679081914.942793, 27.865), (1679081915.424626, 27.8625), (1679081915.863117, 27.865), (1679081915.863255, 27.8675), (1679081915.870084, 27.865), (1679081915.877677, 27.865), (1679081916.015251, 27.865), (1679081916.018716, 27.865), (1679081916.494838, 27.8656), (1679081916.827929, 27.868), (1679081916.870675, 27.8636), (1679081917.140228, 27.87), (1679081917.140763, 27.87), (1679081917.150359, 27.865), (1679081917.753467, 27.865), (1679081917.853001, 27.865), (1679081918.012672, 27.865), (1679081918.736837, 27.865), (1679081918.737011, 27.865), (1679081918.737177, 27.87), (1679081918.742472, 27.87), (1679081918.743335, 27.87), (1679081918.868673, 27.8699), (1679081919.01883, 27.87), (1679081919.018832, 27.87), (1679081919.018835, 27.87), (1679081919.018839, 27.87), (1679081919.018839, 27.87), (1679081919.018857, 27.87), (1679081919.018905, 27.87), (1679081919.018911, 27.87), (1679081919.018911, 27.87), (1679081919.018914, 27.87), (1679081919.018914, 27.87), (1679081919.01892, 27.87), (1679081919.01892, 27.87), (1679081919.018923, 27.87), (1679081919.018929, 27.87), (1679081919.018932, 27.87), (1679081919.018938, 27.87), (1679081919.018941, 27.87), (1679081919.018947, 27.87), (1679081919.01895, 27.87), (1679081919.018956, 27.87), (1679081919.018968, 27.87), (1679081919.018986, 27.87), (1679081919.019074, 27.87), (1679081919.019077, 27.87), (1679081919.019077, 27.87), (1679081919.019079, 27.87), (1679081919.019082, 27.87), (1679081919.019082, 27.87), (1679081919.019095, 27.87), (1679081919.019095, 27.87), (1679081919.0191, 27.87), (1679081919.019103, 27.87), (1679081919.019106, 27.87), (1679081919.019109, 27.87), (1679081919.019112, 27.87), (1679081919.019112, 27.87), (1679081919.019124, 27.87), (1679081919.019127, 27.87), (1679081919.019133, 27.87), (1679081919.019139, 27.87), (1679081919.019323, 27.87), (1679081919.019323, 27.87), (1679081919.019323, 27.87), (1679081919.019323, 27.87), (1679081919.019326, 27.87), (1679081919.019326, 27.87), (1679081919.019936, 27.87), (1679081919.019978, 27.87), (1679081919.020189, 27.87), (1679081919.020264, 27.87), (1679081919.020312, 27.87), (1679081919.020628, 27.87), (1679081919.025445, 27.87), (1679081919.02565, 27.87), (1679081919.066583, 27.87), (1679081919.066953, 27.87), (1679081919.067248, 27.87), (1679081919.067398, 27.875), (1679081919.067672, 27.875), (1679081919.067939, 27.875), (1679081919.067975, 27.875), (1679081919.071849, 27.875), (1679081919.157709, 27.875), (1679081919.184806, 27.875), (1679081919.301574, 27.87), (1679081919.381201, 27.88), (1679081919.381204, 27.88), (1679081919.381237, 27.88), (1679081919.381264, 27.875), (1679081919.381643, 27.88), (1679081919.381649, 27.88), (1679081919.381676, 27.88), (1679081919.381685, 27.88), (1679081919.381697, 27.88), (1679081919.381706, 27.88), (1679081919.381718, 27.88), (1679081919.395142, 27.875), (1679081919.469476, 27.88), (1679081919.570886, 27.88), (1679081919.690577, 27.875), (1679081920.168907, 27.878)]
###
# 1679081919.381264
# 1679081919.381643
# 1679081919.381649
#orizneme pole
index_start = None
index_end = None
range_start = 1679081914.73941
range_end = 1679081917.150359
print("range_start",range_start)
print("range_end",range_end)
a= datetime.now().timestamp()
print("start 1.varianta", a)
# for i in range(len(btdata)):
# print(btdata[i][0])
# print(i)
# if btdata[i][0] <= range_start: index_start = i
# if btdata[i][0] >= range_end:
# index_end = i
# break
print("index_start", index_start)
print("index_end", index_end)
print("oriznuto",btdata[index_start:index_end+1])
new_range =btdata[index_start:index_end+1]
#LIMIT FILL - BUY
submitted_at: float = 1679081914.739644
limit_price: float = 27.865
fill_time = None
bisect_left(submitted_at + 0.020,)
for i in new_range:
#print(i)
##najde prvni nejvetsi čas vetsi nez minfill a majici
## pro LIMITku uděláme nějaký spešl BT_DELAY.LIMIT_OFFSET, aby se nevyplnilo hned jako prvni s touto cenou
## tzn. o kolik se prumerne vyplni limitka pozdeji
if float(i[0]) > float(float(submitted_at) + float(0.020)) and i[1] <= limit_price:
#(1679081919.381649, 27.88)
print(i)
fill_time = i[0]
print("FILL LIMIT BUY at", fill_time, "at",i[1])
break
if not fill_time: print("NO FILL for ", limit_price)
#LIMIT FILL - SELL
for i in new_range:
#print(i)
##najde prvni nejvetsi čas vetsi nez minfill a majici
## pro LIMITku uděláme nějaký spešl BT_DELAY.LIMIT_OFFSET, aby se nevyplnilo hned jako prvni s touto cenou
## tzn. o kolik se prumerne vyplni limitka pozdeji
if float(i[0]) > float(float(submitted_at) + float(0.020)) and i[1] >= limit_price:
#(1679081919.381649, 27.88)
print(i)
fill_time = i[0]
print("FILL LIMIT SELL at", fill_time, "at",i[1])
break
if not fill_time: print("NO FILL for ", limit_price)
#MARKET FILL BUY/SELL:
for i in new_range:
#print(i)
#najde prvni nejvetsi čas vetsi nez minfill
if i[0] > submitted_at + 0.020:
#(1679081919.381649, 27.88)
print(i)
print("FILL MARKET at", i[0], "cena", i[1])
break
b= datetime.now().timestamp()
print("stop 1.varianta", b)
print("rozdil", b-a)
#0.0006699562072753906
#0.0007920265197753906
# (1679081913.290388, 27.8634)
# (1679081913.68588, 27.865)
# (1679081913.986394, 27.86)
# (1679081914.095521, 27.865)
# (1679081914.396844, 27.8601)
# (1679081914.601457, 27.865)
# (1679081914.721968, 27.86)
# (1679081914.739287, 27.86)
# (1679081914.739305, 27.865)
# (1679081914.739314, 27.865)
# (1679081914.73941, 27.865)*
# (1679081914.739554, 27.86)
# (1679081914.739569, 27.86)
# (1679081914.739572, 27.86)
# (1679081914.739635, 27.86)
# (1679081914.739644, 27.86)submit
# (1679081914.739771, 27.86)
# (1679081914.74, 27.865)
# (1679081914.74048, 27.865)
# (1679081914.740531, 27.865)
# (1679081914.740691, 27.865)
# (1679081914.746943, 27.865)
# (1679081914.779766, 27.86)
# (1679081914.779769, 27.86)
# (1679081914.779901, 27.86)
# (1679081914.779904, 27.865)
# (1679081914.77991, 27.865)
# (1679081914.780006, 27.865)
# (1679081914.780388, 27.865)
# (1679081914.780415, 27.865)
# (1679081914.79638, 27.86)
# (1679081914.79638, 27.86)
# (1679081914.796383, 27.865)
# (1679081914.796498, 27.865)
# (1679081914.796901, 27.865)
# (1679081914.816074, 27.865)
# (1679081914.942793, 27.865)
# (1679081915.424626, 27.8625)
# (1679081915.863117, 27.865)
# (1679081915.863255, 27.8675)
# (1679081915.870084, 27.865)
# (1679081915.877677, 27.865)
# (1679081916.015251, 27.865)
# (1679081916.018716, 27.865)
# (1679081916.494838, 27.8656)
# (1679081916.827929, 27.868)
# (1679081916.870675, 27.8636)
# (1679081917.140228, 27.87)
# (1679081917.140763, 27.87)
# (1679081917.150359, 27.865)end
# (1679081917.753467, 27.865)
# (1679081917.853001, 27.865)
# (1679081918.012672, 27.865)
# (1679081918.736837, 27.865)
# (1679081918.737011, 27.865)
# (1679081918.737177, 27.87)
# (1679081918.742472, 27.87)
# (1679081918.743335, 27.87)
# (1679081918.868673, 27.8699)
# (1679081919.01883, 27.87)
# (1679081919.018832, 27.87)
# (1679081919.018835, 27.87)
# (1679081919.018839, 27.87)
# (1679081919.018839, 27.87)
# (1679081919.018857, 27.87)
# (1679081919.018905, 27.87)
# (1679081919.018911, 27.87)
# (1679081919.018911, 27.87)
# (1679081919.018914, 27.87)
# (1679081919.018914, 27.87)
# (1679081919.01892, 27.87)
# (1679081919.01892, 27.87)
# (1679081919.018923, 27.87)
# (1679081919.018929, 27.87)
# (1679081919.018932, 27.87)
# (1679081919.018938, 27.87)
# (1679081919.018941, 27.87)
# (1679081919.018947, 27.87)
# (1679081919.01895, 27.87)
# (1679081919.018956, 27.87)
# (1679081919.018968, 27.87)
# (1679081919.018986, 27.87)
# (1679081919.019074, 27.87)
# (1679081919.019077, 27.87)
# (1679081919.019077, 27.87)
# (1679081919.019079, 27.87)
# (1679081919.019082, 27.87)
# (1679081919.019082, 27.87)
# (1679081919.019095, 27.87)
# (1679081919.019095, 27.87)
# (1679081919.0191, 27.87)
# (1679081919.019103, 27.87)
# (1679081919.019106, 27.87)
# (1679081919.019109, 27.87)
# (1679081919.019112, 27.87)
# (1679081919.019112, 27.87)
# (1679081919.019124, 27.87)
# (1679081919.019127, 27.87)
# (1679081919.019133, 27.87)
# (1679081919.019139, 27.87)
# (1679081919.019323, 27.87)
# (1679081919.019323, 27.87)
# (1679081919.019323, 27.87)
# (1679081919.019323, 27.87)
# (1679081919.019326, 27.87)
# (1679081919.019326, 27.87)
# (1679081919.019936, 27.87)
# (1679081919.019978, 27.87)
# (1679081919.020189, 27.87)
# (1679081919.020264, 27.87)
# (1679081919.020312, 27.87)
# (1679081919.020628, 27.87)
# (1679081919.025445, 27.87)
# (1679081919.02565, 27.87)
# (1679081919.066583, 27.87)
# (1679081919.066953, 27.87)
# (1679081919.067248, 27.87)
# (1679081919.067398, 27.875)
# (1679081919.067672, 27.875)
# (1679081919.067939, 27.875)
# (1679081919.067975, 27.875)
# (1679081919.071849, 27.875)
# (1679081919.157709, 27.875)
# (1679081919.184806, 27.875)
# (1679081919.301574, 27.87)
# (1679081919.381201, 27.88)
# (1679081919.381204, 27.88)
# (1679081919.381237, 27.88)
# (1679081919.381264, 27.875)
# (1679081919.381643, 27.88)
# (1679081919.381649, 27.88)
# (1679081919.381676, 27.88)
# (1679081919.381685, 27.88)
# (1679081919.381697, 27.88)
# (1679081919.381706, 27.88)
# (1679081919.381718, 27.88)
# (1679081919.395142, 27.875)
# (1679081919.469476, 27.88)
# (1679081919.570886, 27.88)
# (1679081919.690577, 27.875)
# (1679081920.168907, 27.878)

View File

@ -0,0 +1,354 @@
"""
standardni pristup zjistovani ceny z listu tradu
"""
from datetime import datetime
import timeit
btdata = [(1679081913.290388, 27.8634), (1679081913.68588, 27.865), (1679081913.986394, 27.86), (1679081914.095521, 27.865), (1679081914.396844, 27.8601), (1679081914.601457, 27.865), (1679081914.721968, 27.86), (1679081914.739287, 27.86), (1679081914.739305, 27.865), (1679081914.739314, 27.865), (1679081914.73941, 27.865), (1679081914.739554, 27.86), (1679081914.739569, 27.86), (1679081914.739572, 27.86), (1679081914.739635, 27.86), (1679081914.739644, 27.86), (1679081914.739771, 27.86), (1679081914.74, 27.865), (1679081914.74048, 27.865), (1679081914.740531, 27.865), (1679081914.740691, 27.865), (1679081914.746943, 27.865), (1679081914.779766, 27.86), (1679081914.779769, 27.86), (1679081914.779901, 27.86), (1679081914.779904, 27.865), (1679081914.77991, 27.865), (1679081914.780006, 27.865), (1679081914.780388, 27.865), (1679081914.780415, 27.865), (1679081914.79638, 27.86), (1679081914.79638, 27.86), (1679081914.796383, 27.865), (1679081914.796498, 27.865), (1679081914.796901, 27.865), (1679081914.816074, 27.865), (1679081914.942793, 27.865), (1679081915.424626, 27.8625), (1679081915.863117, 27.865), (1679081915.863255, 27.8675), (1679081915.870084, 27.865), (1679081915.877677, 27.865), (1679081916.015251, 27.865), (1679081916.018716, 27.865), (1679081916.494838, 27.8656), (1679081916.827929, 27.868), (1679081916.870675, 27.8636), (1679081917.140228, 27.87), (1679081917.140763, 27.87), (1679081917.150359, 27.865), (1679081917.753467, 27.865), (1679081917.853001, 27.865), (1679081918.012672, 27.865), (1679081918.736837, 27.865), (1679081918.737011, 27.865), (1679081918.737177, 27.87), (1679081918.742472, 27.87), (1679081918.743335, 27.87), (1679081918.868673, 27.8699), (1679081919.01883, 27.87), (1679081919.018832, 27.87), (1679081919.018835, 27.87), (1679081919.018839, 27.87), (1679081919.018839, 27.87), (1679081919.018857, 27.87), (1679081919.018905, 27.87), (1679081919.018911, 27.87), (1679081919.018911, 27.87), (1679081919.018914, 27.87), (1679081919.018914, 27.87), (1679081919.01892, 27.87), (1679081919.01892, 27.87), (1679081919.018923, 27.87), (1679081919.018929, 27.87), (1679081919.018932, 27.87), (1679081919.018938, 27.87), (1679081919.018941, 27.87), (1679081919.018947, 27.87), (1679081919.01895, 27.87), (1679081919.018956, 27.87), (1679081919.018968, 27.87), (1679081919.018986, 27.87), (1679081919.019074, 27.87), (1679081919.019077, 27.87), (1679081919.019077, 27.87), (1679081919.019079, 27.87), (1679081919.019082, 27.87), (1679081919.019082, 27.87), (1679081919.019095, 27.87), (1679081919.019095, 27.87), (1679081919.0191, 27.87), (1679081919.019103, 27.87), (1679081919.019106, 27.87), (1679081919.019109, 27.87), (1679081919.019112, 27.87), (1679081919.019112, 27.87), (1679081919.019124, 27.87), (1679081919.019127, 27.87), (1679081919.019133, 27.87), (1679081919.019139, 27.87), (1679081919.019323, 27.87), (1679081919.019323, 27.87), (1679081919.019323, 27.87), (1679081919.019323, 27.87), (1679081919.019326, 27.87), (1679081919.019326, 27.87), (1679081919.019936, 27.87), (1679081919.019978, 27.87), (1679081919.020189, 27.87), (1679081919.020264, 27.87), (1679081919.020312, 27.87), (1679081919.020628, 27.87), (1679081919.025445, 27.87), (1679081919.02565, 27.87), (1679081919.066583, 27.87), (1679081919.066953, 27.87), (1679081919.067248, 27.87), (1679081919.067398, 27.875), (1679081919.067672, 27.875), (1679081919.067939, 27.875), (1679081919.067975, 27.875), (1679081919.071849, 27.875), (1679081919.157709, 27.875), (1679081919.184806, 27.875), (1679081919.301574, 27.87), (1679081919.381201, 27.88), (1679081919.381204, 27.88), (1679081919.381237, 27.88), (1679081919.381264, 27.875), (1679081919.381643, 27.88), (1679081919.381649, 27.88), (1679081919.381676, 27.88), (1679081919.381685, 27.88), (1679081919.381697, 27.88), (1679081919.381706, 27.88), (1679081919.381718, 27.88), (1679081919.395142, 27.875), (1679081919.469476, 27.88), (1679081919.570886, 27.88), (1679081919.690577, 27.875), (1679081920.168907, 27.878)]
from bisect import bisect_left
def get_last_price(time: float, symbol: str = None):
"""""
returns equity price in timestamp. Used for validations later.
TODO: optimalize
"""""
for i in range(len(btdata)):
#print(btdata[i][0])
#print(i)
if btdata[i][0] >= time:
break
return btdata[i-1]
def take_closest(myList, myNumber):
"""
Assumes myList is sorted. Returns first lower value to the number.
"""
pos = bisect_left(myList, (myNumber,))
if pos == 0:
return myList[0]
# if pos == len(myList):
# return myList[-1]
after, afterPrice = myList[pos-1]
return after,afterPrice
print("bisect price")
print(take_closest(btdata, 1679081913.986395))
print("stamdard price")
print(get_last_price(1679081913.986395))
#(1679081919.018929, 27.87), (1679081919.018932, 27.87), (1679081919.018938, 27.87),
# def looper(cislo, btdata):
# for i in range(len(btdata)):
# #print(btdata[i][0])
# #print(i)
# if btdata[i][0] >= cislo:
# index_end = i
# break
# return btdata[i]
# setup = '''
# btdata = [(1679081913.290388, 27.8634), (1679081913.68588, 27.865), (1679081913.986394, 27.86), (1679081914.095521, 27.865), (1679081914.396844, 27.8601), (1679081914.601457, 27.865), (1679081914.721968, 27.86), (1679081914.739287, 27.86), (1679081914.739305, 27.865), (1679081914.739314, 27.865), (1679081914.73941, 27.865), (1679081914.739554, 27.86), (1679081914.739569, 27.86), (1679081914.739572, 27.86), (1679081914.739635, 27.86), (1679081914.739644, 27.86), (1679081914.739771, 27.86), (1679081914.74, 27.865), (1679081914.74048, 27.865), (1679081914.740531, 27.865), (1679081914.740691, 27.865), (1679081914.746943, 27.865), (1679081914.779766, 27.86), (1679081914.779769, 27.86), (1679081914.779901, 27.86), (1679081914.779904, 27.865), (1679081914.77991, 27.865), (1679081914.780006, 27.865), (1679081914.780388, 27.865), (1679081914.780415, 27.865), (1679081914.79638, 27.86), (1679081914.79638, 27.86), (1679081914.796383, 27.865), (1679081914.796498, 27.865), (1679081914.796901, 27.865), (1679081914.816074, 27.865), (1679081914.942793, 27.865), (1679081915.424626, 27.8625), (1679081915.863117, 27.865), (1679081915.863255, 27.8675), (1679081915.870084, 27.865), (1679081915.877677, 27.865), (1679081916.015251, 27.865), (1679081916.018716, 27.865), (1679081916.494838, 27.8656), (1679081916.827929, 27.868), (1679081916.870675, 27.8636), (1679081917.140228, 27.87), (1679081917.140763, 27.87), (1679081917.150359, 27.865), (1679081917.753467, 27.865), (1679081917.853001, 27.865), (1679081918.012672, 27.865), (1679081918.736837, 27.865), (1679081918.737011, 27.865), (1679081918.737177, 27.87), (1679081918.742472, 27.87), (1679081918.743335, 27.87), (1679081918.868673, 27.8699), (1679081919.01883, 27.87), (1679081919.018832, 27.87), (1679081919.018835, 27.87), (1679081919.018839, 27.87), (1679081919.018839, 27.87), (1679081919.018857, 27.87), (1679081919.018905, 27.87), (1679081919.018911, 27.87), (1679081919.018911, 27.87), (1679081919.018914, 27.87), (1679081919.018914, 27.87), (1679081919.01892, 27.87), (1679081919.01892, 27.87), (1679081919.018923, 27.87), (1679081919.018929, 27.87), (1679081919.018932, 27.87), (1679081919.018938, 27.87), (1679081919.018941, 27.87), (1679081919.018947, 27.87), (1679081919.01895, 27.87), (1679081919.018956, 27.87), (1679081919.018968, 27.87), (1679081919.018986, 27.87), (1679081919.019074, 27.87), (1679081919.019077, 27.87), (1679081919.019077, 27.87), (1679081919.019079, 27.87), (1679081919.019082, 27.87), (1679081919.019082, 27.87), (1679081919.019095, 27.87), (1679081919.019095, 27.87), (1679081919.0191, 27.87), (1679081919.019103, 27.87), (1679081919.019106, 27.87), (1679081919.019109, 27.87), (1679081919.019112, 27.87), (1679081919.019112, 27.87), (1679081919.019124, 27.87), (1679081919.019127, 27.87), (1679081919.019133, 27.87), (1679081919.019139, 27.87), (1679081919.019323, 27.87), (1679081919.019323, 27.87), (1679081919.019323, 27.87), (1679081919.019323, 27.87), (1679081919.019326, 27.87), (1679081919.019326, 27.87), (1679081919.019936, 27.87), (1679081919.019978, 27.87), (1679081919.020189, 27.87), (1679081919.020264, 27.87), (1679081919.020312, 27.87), (1679081919.020628, 27.87), (1679081919.025445, 27.87), (1679081919.02565, 27.87), (1679081919.066583, 27.87), (1679081919.066953, 27.87), (1679081919.067248, 27.87), (1679081919.067398, 27.875), (1679081919.067672, 27.875), (1679081919.067939, 27.875), (1679081919.067975, 27.875), (1679081919.071849, 27.875), (1679081919.157709, 27.875), (1679081919.184806, 27.875), (1679081919.301574, 27.87), (1679081919.381201, 27.88), (1679081919.381204, 27.88), (1679081919.381237, 27.88), (1679081919.381264, 27.875), (1679081919.381643, 27.88), (1679081919.381649, 27.88), (1679081919.381676, 27.88), (1679081919.381685, 27.88), (1679081919.381697, 27.88), (1679081919.381706, 27.88), (1679081919.381718, 27.88), (1679081919.395142, 27.875), (1679081919.469476, 27.88), (1679081919.570886, 27.88), (1679081919.690577, 27.875), (1679081920.168907, 27.878)]
# from bisect import bisect_left
# def take_closest(myList, myNumber):
# """
# Assumes myList is sorted. Returns closest value to myNumber.
# If two numbers are equally close, return the smallest number.
# """
# pos = bisect_left(myList, (myNumber,))
# if pos == 0:
# return myList[0]
# if pos == len(myList):
# return myList[-1]
# after, afterPrice = myList[pos]
# return after,afterPrice
# # before, beforePrice = myList[pos - 1]
# # if after - myNumber < myNumber - before:
# # return after,afterPrice
# # else:
# # return before,beforePrice
# #(1679081919.018929, 27.87), (1679081919.018932, 27.87), (1679081919.018938, 27.87),
# def looper(cislo, btdata):
# for i in range(len(btdata)):
# #print(btdata[i][0])
# #print(i)
# if btdata[i][0] >= cislo:
# index_end = i
# break
# return btdata[i]
# '''
# print("bisect")
# print(take_closest(btdata, 1679081919.018939))
# print("standard")
# print(looper(1679081919.018939, btdata))
# print(timeit.timeit('take_closest(btdata, 1679081919.018939)', setup=setup))
# #0.4
# print(timeit.timeit('looper(1679081919.018939, btdata)', setup=setup))
# #2.4
# ###
# # 1679081919.381264
# # 1679081919.381643
# # 1679081919.381649
# #orizneme pole
# """
# btdata obsahuje vsechny aktualni timestampy tradu a jejich cenu.
# 1) pracujeme vzdy na zacatku listu do indexu odpovidajici aktualnimu casu
# 2) zjistime si index a pak iterujeme nad nim
# 3) po skonceni pak tento pracovni kus umazeme
# """
# # def match(time: float):
# a= datetime.now().timestamp()
# print("start 1.varianta", a)
# def get_index_bisect(myList, time):
# """
# Assumes myList is sorted. Returns first biggeer value to the number.
# """
# pos = bisect_left(myList, (time,))
# if pos == 0:
# return myList[0]
# if pos == len(myList):
# return myList[-1]
# return pos
# #after, afterPrice = myList[pos]
# #return after,afterPrice
# def get_index(btdata, time: float):
# index_end = None #
# range_end = time
# print("range_end",range_end)
# for i in range(len(btdata)):
# #print(btdata[i][0])
# #print(i)
# if btdata[i][0] >= range_end:
# index_end = i
# break
# print("index_end", index_end)
# print("oriznuto",btdata[0:index_end+1])
# return index_end
# index_end = get_index(btdata, 1679081919.018939)
# print("get_index", index_end)
# index_end = get_index_bisect(btdata, 1679081919.018939)
# print("get_index_bisect", index_end)
# new_range = btdata[0:index_end+1]
# print("novy rozsah?", len(new_range))
# print("puvodni pole", len(btdata))
# #LIMIT FILL - BUY
# submitted_at: float = 1679081914.739644
# limit_price: float = 27.865
# fill_time = None
# for i in new_range:
# #print(i)
# ##najde prvni nejvetsi čas vetsi nez minfill a majici
# ## pro LIMITku uděláme nějaký spešl BT_DELAY.LIMIT_OFFSET, aby se nevyplnilo hned jako prvni s touto cenou
# ## tzn. o kolik se prumerne vyplni limitka pozdeji
# if float(i[0]) > float(float(submitted_at) + float(0.020)) and i[1] <= limit_price:
# #(1679081919.381649, 27.88)
# print(i)
# fill_time = i[0]
# print("FILL LIMIT BUY at", fill_time, "at",i[1])
# break
# if not fill_time: print("NO FILL for ", limit_price)
# #LIMIT FILL - SELL
# for i in new_range:
# #print(i)
# ##najde prvni nejvetsi čas vetsi nez minfill a majici
# ## pro LIMITku uděláme nějaký spešl BT_DELAY.LIMIT_OFFSET, aby se nevyplnilo hned jako prvni s touto cenou
# ## tzn. o kolik se prumerne vyplni limitka pozdeji
# if float(i[0]) > float(float(submitted_at) + float(0.020)) and i[1] >= limit_price:
# #(1679081919.381649, 27.88)
# print(i)
# fill_time = i[0]
# print("FILL LIMIT SELL at", fill_time, "at",i[1])
# break
# if not fill_time: print("NO FILL for ", limit_price)
# #MARKET FILL BUY/SELL:
# for i in new_range:
# #print(i)
# #najde prvni nejvetsi čas vetsi nez minfill
# if i[0] > submitted_at + 0.020:
# #(1679081919.381649, 27.88)
# print(i)
# print("FILL MARKET at", i[0], "cena", i[1])
# break
# del btdata[0:index_end]
# #0.0006699562072753906
# #0.0007920265197753906
# # (1679081913.290388, 27.8634)
# # (1679081913.68588, 27.865)
# # (1679081913.986394, 27.86)
# # (1679081914.095521, 27.865)
# # (1679081914.396844, 27.8601)
# # (1679081914.601457, 27.865)
# # (1679081914.721968, 27.86)
# # (1679081914.739287, 27.86)
# # (1679081914.739305, 27.865)
# # (1679081914.739314, 27.865)
# # (1679081914.73941, 27.865)*
# # (1679081914.739554, 27.86)
# # (1679081914.739569, 27.86)
# # (1679081914.739572, 27.86)
# # (1679081914.739635, 27.86)
# # (1679081914.739644, 27.86)submit
# # (1679081914.739771, 27.86)
# # (1679081914.74, 27.865)
# # (1679081914.74048, 27.865)
# # (1679081914.740531, 27.865)
# # (1679081914.740691, 27.865)
# # (1679081914.746943, 27.865)
# # (1679081914.779766, 27.86)
# # (1679081914.779769, 27.86)
# # (1679081914.779901, 27.86)
# # (1679081914.779904, 27.865)
# # (1679081914.77991, 27.865)
# # (1679081914.780006, 27.865)
# # (1679081914.780388, 27.865)
# # (1679081914.780415, 27.865)
# # (1679081914.79638, 27.86)
# # (1679081914.79638, 27.86)
# # (1679081914.796383, 27.865)
# # (1679081914.796498, 27.865)
# # (1679081914.796901, 27.865)
# # (1679081914.816074, 27.865)
# # (1679081914.942793, 27.865)
# # (1679081915.424626, 27.8625)
# # (1679081915.863117, 27.865)
# # (1679081915.863255, 27.8675)
# # (1679081915.870084, 27.865)
# # (1679081915.877677, 27.865)
# # (1679081916.015251, 27.865)
# # (1679081916.018716, 27.865)
# # (1679081916.494838, 27.8656)
# # (1679081916.827929, 27.868)
# # (1679081916.870675, 27.8636)
# # (1679081917.140228, 27.87)
# # (1679081917.140763, 27.87)
# # (1679081917.150359, 27.865)end
# # (1679081917.753467, 27.865)
# # (1679081917.853001, 27.865)
# # (1679081918.012672, 27.865)
# # (1679081918.736837, 27.865)
# # (1679081918.737011, 27.865)
# # (1679081918.737177, 27.87)
# # (1679081918.742472, 27.87)
# # (1679081918.743335, 27.87)
# # (1679081918.868673, 27.8699)
# # (1679081919.01883, 27.87)
# # (1679081919.018832, 27.87)
# # (1679081919.018835, 27.87)
# # (1679081919.018839, 27.87)
# # (1679081919.018839, 27.87)
# # (1679081919.018857, 27.87)
# # (1679081919.018905, 27.87)
# # (1679081919.018911, 27.87)
# # (1679081919.018911, 27.87)
# # (1679081919.018914, 27.87)
# # (1679081919.018914, 27.87)
# # (1679081919.01892, 27.87)
# # (1679081919.01892, 27.87)
# # (1679081919.018923, 27.87)
# # (1679081919.018929, 27.87)
# # (1679081919.018932, 27.87)
# # (1679081919.018938, 27.87)
# # (1679081919.018941, 27.87)
# # (1679081919.018947, 27.87)
# # (1679081919.01895, 27.87)
# # (1679081919.018956, 27.87)
# # (1679081919.018968, 27.87)
# # (1679081919.018986, 27.87)
# # (1679081919.019074, 27.87)
# # (1679081919.019077, 27.87)
# # (1679081919.019077, 27.87)
# # (1679081919.019079, 27.87)
# # (1679081919.019082, 27.87)
# # (1679081919.019082, 27.87)
# # (1679081919.019095, 27.87)
# # (1679081919.019095, 27.87)
# # (1679081919.0191, 27.87)
# # (1679081919.019103, 27.87)
# # (1679081919.019106, 27.87)
# # (1679081919.019109, 27.87)
# # (1679081919.019112, 27.87)
# # (1679081919.019112, 27.87)
# # (1679081919.019124, 27.87)
# # (1679081919.019127, 27.87)
# # (1679081919.019133, 27.87)
# # (1679081919.019139, 27.87)
# # (1679081919.019323, 27.87)
# # (1679081919.019323, 27.87)
# # (1679081919.019323, 27.87)
# # (1679081919.019323, 27.87)
# # (1679081919.019326, 27.87)
# # (1679081919.019326, 27.87)
# # (1679081919.019936, 27.87)
# # (1679081919.019978, 27.87)
# # (1679081919.020189, 27.87)
# # (1679081919.020264, 27.87)
# # (1679081919.020312, 27.87)
# # (1679081919.020628, 27.87)
# # (1679081919.025445, 27.87)
# # (1679081919.02565, 27.87)
# # (1679081919.066583, 27.87)
# # (1679081919.066953, 27.87)
# # (1679081919.067248, 27.87)
# # (1679081919.067398, 27.875)
# # (1679081919.067672, 27.875)
# # (1679081919.067939, 27.875)
# # (1679081919.067975, 27.875)
# # (1679081919.071849, 27.875)
# # (1679081919.157709, 27.875)
# # (1679081919.184806, 27.875)
# # (1679081919.301574, 27.87)
# # (1679081919.381201, 27.88)
# # (1679081919.381204, 27.88)
# # (1679081919.381237, 27.88)
# # (1679081919.381264, 27.875)
# # (1679081919.381643, 27.88)
# # (1679081919.381649, 27.88)
# # (1679081919.381676, 27.88)
# # (1679081919.381685, 27.88)
# # (1679081919.381697, 27.88)
# # (1679081919.381706, 27.88)
# # (1679081919.381718, 27.88)
# # (1679081919.395142, 27.875)
# # (1679081919.469476, 27.88)
# # (1679081919.570886, 27.88)
# # (1679081919.690577, 27.875)
# # (1679081920.168907, 27.878)

22
testy/testServices.py Normal file
View File

@ -0,0 +1,22 @@
from v2realbot.utils.utils import AttributeDict, zoneNY, dict_replace_value, Store, parse_toml_string
import os,sys
sys.path.append(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
from v2realbot.enums.enums import Mode, Account
from v2realbot.config import WEB_API_KEY
from datetime import datetime
from icecream import install, ic
import os
from rich import print
from threading import current_thread
from fastapi import FastAPI, Depends, HTTPException, status
from fastapi.security import APIKeyHeader
import uvicorn
from uuid import UUID
import controller.services as cs
from v2realbot.common.model import StrategyInstance, RunnerView
d = "[stratvars] maxpozic = 205 chunk = 114 MA = 2 Trend = 3 profit = 0.02 lastbuyindex=-6 pendingbuys={} limitka = 'None' jevylozeno=0 vykladka=5 curve = [0.01, 0.01, 0.01, 0.0, 0.02, 0.02, 0.01,0.01, 0.01,0.03, 0.01, 0.01, 0.01,0.04, 0.01,0.01, 0.01,0.05, 0.01,0.01, 0.01,0.01, 0.06,0.01, 0.01,0.01, 0.01] blockbuy = 0 ticks2reset = 0.04"
d='[stratvars]\r\n maxpozic = 200\r\n chunk = 111\r\n MA = 2\r\n Trend = 3\r\n profit = 0.02\r\n lastbuyindex=-6\r\n pendingbuys={}\r\n limitka = "None"\r\n jevylozeno=0\r\n vykladka=5\r\n curve = [0.01, 0.01, 0.01, 0.0, 0.02, 0.02, 0.01,0.01, 0.01,0.03, 0.01, 0.01, 0.01,0.04, 0.01,0.01, 0.01,0.05, 0.01,0.01, 0.01,0.01, 0.06,0.01, 0.01,0.01, 0.01]\r\n blockbuy = 0\r\n ticks2reset = 0.04'
print(d)
a,b = parse_toml_string(d)
print(a,b)

57
testy/testStore Normal file
View File

@ -0,0 +1,57 @@
import os,sys
sys.path.append(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
import pickle
from v2realbot.common.model import StrategyInstance
from typing import List, Self
#class to persist
class Store:
def __init__(self) -> None:
self.silist : List[StrategyInstance] = None
self.db_file ="cache/strategyinstances.cache"
if os.path.exists(self.db_file):
with open (self.db_file, 'rb') as fp:
self.silist = pickle.load(fp)
def save(self):
with open(self.db_file, 'wb') as fp:
pickle.dump(self.silist, fp)
db = Store()
print(db.silist)
db.silist.append(StrategyInstance(
id2=1,
name="DD",
symbol="DD",
class_name="DD",
script="DD",
open_rush=1,
close_rush=1,
stratvars_conf="DD",
add_data_conf="DD"))
print(db.silist)
db.silist = []
print(len(db.silist))
db.save()
# class Neco:
# def __init__(self) -> None:
# pass
# a = 1
# b = 2
# def toJson(self):
# return json.dumps(self, default=lambda o: o.__dict__)
# db.append(Neco.a)
# db.append(Neco.b)
# db.append(Neco)
# print(Neco)

12
testy/testTIMIT Normal file
View File

@ -0,0 +1,12 @@
import timeit
setup = '''
import msgpack
import json
from copy import deepcopy
data = {'name':'John Doe','ranks':{'sports':13,'edu':34,'arts':45},'grade':5}'''
print(timeit.timeit('deepcopy(data)', setup=setup))
# 12.0860249996
print(timeit.timeit('json.loads(json.dumps(data))', setup=setup))
# 9.07182312012
print(timeit.timeit('msgpack.unpackb(msgpack.packb(data))', setup=setup))
# 1.42743492126

View File

@ -0,0 +1,86 @@
from datetime import date, timedelta
from alpaca.data.historical import CryptoHistoricalDataClient, StockHistoricalDataClient
from alpaca.data.requests import CryptoLatestTradeRequest, StockLatestTradeRequest, StockLatestBarRequest, StockTradesRequest
from alpaca.data.enums import DataFeed
from alpaca.trading.requests import GetCalendarRequest
from alpaca.trading.client import TradingClient
from alpaca.trading.models import Calendar
from config import API_KEY, SECRET_KEY, MAX_BATCH_SIZE
from datetime import datetime, timezone, time, timedelta, date
import pytz
from rich import print
import os
from icecream import install, ic
install()
import os
# print('Get current working directory : ', os.getcwd())
# print('File name : ', os.path.basename(__file__))
# print('Directory Name: ', os.path.dirname(__file__))
#práce s datumy
zone_NY = pytz.timezone('America/New_York')
symbol = "BAC"
client = TradingClient(API_KEY, SECRET_KEY, raw_data=False)
datetime_object_from = datetime(2023, 3, 16, 17, 51, 38, tzinfo=timezone.utc)
datetime_object_to = datetime(2023, 3, 22, 17, 52, 39, tzinfo=timezone.utc)
calendar_request = GetCalendarRequest(start=datetime_object_from,end=datetime_object_to)
cal_dates = client.get_calendar(calendar_request)
#curr_dir = os.path.dirname(__file__)
#backtesting a obecne prace startegie s dnem
#zatim podporime pouze main session
#backtest
#- market open trade - Q
#- market close trade - M
#minimalni jednotka pro CACHE je 1 den - a to jen marketopen to marketclose (extended hours not supported yet)
for day in cal_dates:
print("Processing DAY", day.date)
print(day.date)
print(day.open)
print(day.close)
#get file name
daily_file = str(symbol) + '-' + str(int(day.open.timestamp())) + '-' + str(int(day.close.timestamp())) + '.cache'
print(daily_file)
if os.path.exists(daily_file):
pass
##denní file existuje
#loadujeme ze souboru
#pokud je start_time < trade < end_time
#odesíláme do queue
#jinak pass
else:
ic("cache not exists")
#denni file není - loadujeme den z Alpacy
#ukládáme do cache s daily_file jako název
#pokud jde o dnešní den a nebyl konec trhu tak cache neukládáme
if datetime.now() < day.close:
print("not saving the cache, market still open today")
ic(datetime.now())
ic(day.close)
else:
pass
#save to daily cache file curr_dir+'/'+daily_file
#pokud je start_time < trade < end_time
#odesíláme do queue
#jinak ne
print("Processing DAY END",day.date)
# start_date = date(2008, 8, 15)
# end_date = date(2008, 9, 15) # perhaps date.now()
# ##get number of days between days
# delta = end_date - start_date # returns timedelta
# for i in range(delta.days + 1):
# day = start_date + timedelta(days=i)
# print(day)
# #pro kazde datum volame get cala - jestli byl trader date

165
testy/threadclassestest.py Normal file
View File

@ -0,0 +1,165 @@
from threading import Thread, current_thread
import threading
from alpaca.data.live import StockDataStream, CryptoDataStream
from v2realbot.config import API_KEY, SECRET_KEY, MAX_BATCH_SIZE, PAPER
import queue
from alpaca.data.enums import DataFeed
from typing_extensions import Any
import time
from v2realbot.loader.aggregator import TradeAggregator2Queue
from v2realbot.enums.enums import RecordType, StartBarAlign, Mode, Order
# class ws_agg() :
# def __init__(self, client, symbol) -> None:
# # Call the Thread class's init function
# Thread.__init__(self)
# self.client = client
# self.symbol = symbol
#object composition
"""""
vlakno je zde pro asynchronni zapnuti klienta,
vlakno je vzdy pouze jedno, nicmene instancovani teto tridy je kvuli stejnemu chovani
s ostatnimi streamery (v budoucnu mozna predelat na dedicated streamer a shared streamer)
"""""
class WS_Stream(Thread):
client = CryptoDataStream(API_KEY, SECRET_KEY, raw_data=True, websocket_params={})
_streams = []
lock = threading.Lock()
def __init__(self, name) -> None:
# Call the Thread class's init function
Thread.__init__(self, name=name)
#promenna bude obsahovat seznam streamů
name = name
def symbol_exists(self, symbol):
for i in WS_Stream._streams:
if i.symbol == symbol:
return True
return False
def add_stream(self, obj):
WS_Stream._streams.append(obj)
if WS_Stream.client._running is False:
print("websocket zatim nebezi, pridavame do pole")
#do promenne tridy se zapise agregator
else:
print("websokcet bezi - pouze subscribujeme")
WS_Stream.client.subscribe_trades(self.handler, obj.symbol)
print("muze se vratit uz subscribnuto, coz je ok")
def remove_stream(self, obj):
#delete added stream
try:
WS_Stream._streams.remove(obj)
except ValueError:
print("value not found in _streams")
return
#if it is the last item at all, stop the client from running
if len( WS_Stream._streams) == 0:
print("removed last item from WS, stopping the client")
WS_Stream.client.stop()
return
if not self.symbol_exists(obj.symbol):
WS_Stream.client.unsubscribe_trades(obj.symbol)
print("symbol no longer used, unsubscribed from ", obj.symbol)
@classmethod
async def handler(cls, data):
print("handler ve threadu:",current_thread().name)
# podíváme kolik streamů je instancovaných pro tento symbol - v dict[symbol] a spusteni
# pro každý stream zavoláme
print(data)
print("*"*40)
def run(self):
print(self.name, "AKtualni vlakno")
if(len(self._streams)==0):
print(self.name, "no streams. no run")
return
#print(self._streams)
unique = set()
## for each symbol we subscribe
for i in self._streams:
#print(self.name, i.symbol)
#instanciace tradeAggregatoru a uložení do dict[symbol]
#zde
unique.add(i.symbol)
#print(unique)
#subscribe for unique symbols
#
##TODO *PROBLEM* co kdyz chci subscribe stejneho symbolu co uz konzumuje jina strategie. PROBLEM koncepční
##TODO pri skonceni jedne strategie, udelat teardown kroky jako unsubscribe pripadne stop
for i in unique:
WS_Stream.client.subscribe_trades(self.handler, i)
print(self.name, "subscribed to",i)
#timto se spusti jenom poprve v 1 vlaknu
#ostatni pouze vyuzivaji
if WS_Stream.client._running is False:
print(self.name, "it is not running, starting by calling RUN")
WS_Stream.client.run()
#tímto se spustí pouze 1.vlakno, nicmene subscribe i pripadny unsubscribe zafunguji
else:
print(self.name, "it is running, not calling RUN")
# class SymbolStream():
# def __init__(self, symbol) -> None:
# self.symbol = symbol
# s
# class StreamRequest:
# symbol: str
# resolution: int
#clientDataStream = CryptoDataStream(API_KEY, SECRET_KEY, raw_data=True, websocket_params={})
# novy ws stream - vždy jednom vláknu
obj= WS_Stream(name="jednicka")
q1 = queue.Queue()
stream1 = TradeAggregator2Queue(symbol="BTC/USD",queue=q1,rectype=RecordType.BAR,timeframe=15,update_ltp=False,align=StartBarAlign.ROUND,mintick = 0, mode = Mode.LIVE)
obj.add_stream(stream1)
print("1", WS_Stream._streams)
# novy ws stream - vždy jednom vláknu
obj2= WS_Stream("dvojka")
stream2 = TradeAggregator2Queue(symbol="ETH/USD",queue=q1,rectype=RecordType.BAR,timeframe=5,update_ltp=False,align=StartBarAlign.ROUND,mintick = 0, mode = Mode.LIVE)
obj2.add_stream(stream2)
print("2", WS_Stream._streams)
obj.start()
print("po startu prvniho")
print(WS_Stream._streams)
time.sleep(1)
obj2.start()
print("po startu druheho")
time.sleep(2)
print("pridavame treti")
obj3 = WS_Stream(name="trojka")
stream3 = TradeAggregator2Queue(symbol="BTC/USD",queue=q1,rectype=RecordType.BAR,timeframe=1,update_ltp=False,align=StartBarAlign.ROUND,mintick = 0, mode = Mode.LIVE)
obj3.add_stream(stream3)
obj3.start()
print(WS_Stream._streams)
print("po zapnuti trojky")
time.sleep(5)
print("cekame na skonceni")
print("celkem enumerate", threading.enumerate())
time.sleep(2)
print("rusim jednicku")
obj.remove_stream(stream1)
print("po ruseni")
time.sleep(2)
print("rusim dvojku")
obj2.remove_stream(stream2)
print("po ruseni")
time.sleep(2)
print("rusim trojku")
obj3.remove_stream(stream3)
obj2.join()
obj.join()

111
testy/websocketFastApi.py Normal file
View File

@ -0,0 +1,111 @@
from fastapi import FastAPI, WebSocket, WebSocketDisconnect, Depends, HTTPException, status, WebSocketException, Cookie, Query
from fastapi.responses import HTMLResponse
from fastapi.staticfiles import StaticFiles
from fastapi.security import HTTPBasic, HTTPBasicCredentials
import secrets
from typing import Annotated
import os
import uvicorn
import json
from datetime import datetime
from v2realbot.utils.utils import zoneNY
app = FastAPI()
root = os.path.dirname(os.path.abspath(__file__))
app.mount("/static", StaticFiles(directory=os.path.join(root, 'static')), name="static")
html = """
<!DOCTYPE html>
<html>
<head>
<title>Chat</title>
<script type="text/javascript" src="https://unpkg.com/lightweight-charts/dist/lightweight-charts.standalone.production.js"
></script>
</head>
<body>
<h1>Realtime chart</h1>
<h2>Your ID: <span id="ws-id"></span></h2>
<h3>Status: <span id="status">Not connected</span></h3>
<form action="" onsubmit="sendMessage(event)">
<label>Runner ID: <input type="text" id="runnerId" autocomplete="off" value="foo"/></label>
<label>Token: <input type="text" id="token" autocomplete="off" value="some-key-token"/></label>
<button onclick="connect(event)" id="bt-conn">Connect</button>
<button onclick="disconnect(event)" id="bt-disc" style="display: None">Disconnect</button>
<hr>
<label>Message: <input type="text" id="messageText" autocomplete="off"/></label>
<button>Send</button>
</form>
<ul id='messages'>
</ul>
<div id="chart"></div>
<div id="conteiner"></div>
<script src="/static/js/mywebsocket.js"></script>
<script src="/static/js/mychart.js"></script>
</body>
</html>
"""
security = HTTPBasic()
async def get_cookie_or_token(
websocket: WebSocket,
session: Annotated[str | None, Cookie()] = None,
token: Annotated[str | None, Query()] = None,
):
if session is None and token is None:
raise WebSocketException(code=status.WS_1008_POLICY_VIOLATION)
return session or token
def get_current_username(
credentials: Annotated[HTTPBasicCredentials, Depends(security)]
):
if not (credentials.username == "david") or not (credentials.password == "david"):
raise HTTPException(
status_code=status.HTTP_401_UNAUTHORIZED,
detail="Incorrect email or password",
headers={"WWW-Authenticate": "Basic"},
)
@app.get("/")
async def get(username: Annotated[str, Depends(get_current_username)]):
return HTMLResponse(html)
@app.websocket("/runners/{runner_id}/ws")
async def websocket_endpoint(
*,
websocket: WebSocket,
runner_id: str,
q: int | None = None,
cookie_or_token: Annotated[str, Depends(get_cookie_or_token)],
):
await websocket.accept()
try:
while True:
data = await websocket.receive_text()
await websocket.send_text(
f"Session cookie or query token value is: {cookie_or_token}"
)
if q is not None:
await websocket.send_text(f"Query parameter q is: {q}")
data = {'high': 195,
'low': 180,
'volume': 123,
'close': 185,
'hlcc4': 123,
'open': 190,
'time': "2019-05-25",
'trades':123,
'resolution':123,
'confirmed': 123,
'vwap': 123,
'updated': 123,
'index': 123}
await websocket.send_text(json.dumps(data))
except WebSocketDisconnect:
print("CLIENT DISCONNECTED for", runner_id)
if __name__ == "__main__":
uvicorn.run("__main__:app", host="0.0.0.0", port=8000, reload=False)

View File

@ -0,0 +1,111 @@
from fastapi import FastAPI, WebSocket, WebSocketDisconnect, Depends, HTTPException, status
from fastapi.responses import HTMLResponse
from fastapi.staticfiles import StaticFiles
from fastapi.security import HTTPBasic, HTTPBasicCredentials
import secrets
from typing import Annotated
import os
import uvicorn
import json
from datetime import datetime
from v2realbot.utils.utils import zoneNY
app = FastAPI()
root = os.path.dirname(os.path.abspath(__file__))
app.mount("/static", StaticFiles(directory=os.path.join(root, 'static')), name="static")
html = """
<!DOCTYPE html>
<html>
<head>
<title>Chat</title>
<script type="text/javascript" src="https://unpkg.com/lightweight-charts/dist/lightweight-charts.standalone.production.js"
></script>
</head>
<body>
<h1>Realtime chart</h1>
<h2>Your ID: <span id="ws-id"></span></h2>
<form action="" onsubmit="sendMessage(event)">
<input type="text" id="messageText" autocomplete="off"/>
<button>Send</button>
</form>
<ul id='messages'>
</ul>
<div id="chart"></div>
<div id="conteiner"></div>
<script src="/static/js/mywebsocket.js"></script>
<script src="/static/js/mychart.js"></script>
</body>
</html>
"""
class ConnectionManager:
def __init__(self):
self.active_connections: list[WebSocket] = []
async def connect(self, websocket: WebSocket):
await websocket.accept()
self.active_connections.append(websocket)
def disconnect(self, websocket: WebSocket):
self.active_connections.remove(websocket)
async def send_personal_message(self, message: str, websocket: WebSocket):
await websocket.send_text(message)
async def broadcast(self, message: str):
for connection in self.active_connections:
await connection.send_text(message)
manager = ConnectionManager()
security = HTTPBasic()
def get_current_username(
credentials: Annotated[HTTPBasicCredentials, Depends(security)]
):
if not (credentials.username == "david") or not (credentials.password == "david"):
raise HTTPException(
status_code=status.HTTP_401_UNAUTHORIZED,
detail="Incorrect email or password",
headers={"WWW-Authenticate": "Basic"},
)
@app.get("/")
async def get(username: Annotated[str, Depends(get_current_username)]):
return HTMLResponse(html)
@app.websocket("/ws/{client_id}")
async def websocket_endpoint(websocket: WebSocket, client_id: int):
await manager.connect(websocket)
try:
while True:
data = await websocket.receive_text()
data = {'high': 195,
'low': 180,
'volume': 123,
'close': 185,
'hlcc4': 123,
'open': 190,
'time': "2019-05-25",
'trades':123,
'resolution':123,
'confirmed': 123,
'vwap': 123,
'updated': 123,
'index': 123}
# data = {'high': 123,
# 'low': 123,
# 'close': 123,
# 'open': 123,
# 'time': "2019-05-25"}
await manager.send_personal_message(json.dumps(data), websocket)
#await manager.broadcast(f"Client #{client_id} says: {data}")
except WebSocketDisconnect:
manager.disconnect(websocket)
await manager.broadcast(f"Client #{client_id} left the chat")
if __name__ == "__main__":
uvicorn.run("__main__:app", host="0.0.0.0", port=8000, reload=False)