first commit
This commit is contained in:
27
testy/archive/.gtignore
Normal file
27
testy/archive/.gtignore
Normal file
@ -0,0 +1,27 @@
|
||||
# Byte-compiled / optimized / DLL files
|
||||
__pycache__/
|
||||
*.py[cod]
|
||||
*$py.class
|
||||
|
||||
# C extensions
|
||||
*.so
|
||||
|
||||
# Distribution / packaging
|
||||
.Python
|
||||
build/
|
||||
develop-eggs/
|
||||
dist/
|
||||
downloads/
|
||||
eggs/
|
||||
.eggs/
|
||||
lib/
|
||||
lib64/
|
||||
parts/
|
||||
sdist/
|
||||
var/
|
||||
wheels/
|
||||
*.egg-info/
|
||||
.installed.cfg
|
||||
*.egg
|
||||
MANIFEST
|
||||
cache/
|
||||
BIN
testy/archive/__pycache__/config.cpython-311.pyc
Normal file
BIN
testy/archive/__pycache__/config.cpython-311.pyc
Normal file
Binary file not shown.
52
testy/archive/alpacaGetHistoryBars.py
Normal file
52
testy/archive/alpacaGetHistoryBars.py
Normal file
@ -0,0 +1,52 @@
|
||||
from alpaca.data.historical import CryptoHistoricalDataClient, StockHistoricalDataClient
|
||||
from alpaca.data.requests import CryptoLatestTradeRequest, StockLatestTradeRequest, StockLatestBarRequest, StockTradesRequest, StockBarsRequest
|
||||
from alpaca.data.enums import DataFeed
|
||||
from config import API_KEY, SECRET_KEY, MAX_BATCH_SIZE
|
||||
import datetime
|
||||
import time
|
||||
from alpaca.data import Quote, Trade, Snapshot, Bar
|
||||
from alpaca.data.models import BarSet, QuoteSet, TradeSet
|
||||
from alpaca.data.timeframe import TimeFrame
|
||||
import mplfinance as mpf
|
||||
import pandas as pd
|
||||
|
||||
parametry = {}
|
||||
|
||||
# no keys required
|
||||
#client = CryptoHistoricalDataClient()
|
||||
client = StockHistoricalDataClient(API_KEY, SECRET_KEY, raw_data=False)
|
||||
datetime_object_from = datetime.datetime(2023, 2, 27, 18, 51, 38, tzinfo=datetime.timezone.utc)
|
||||
datetime_object_to = datetime.datetime(2023, 2, 27, 21, 51, 39, tzinfo=datetime.timezone.utc)
|
||||
bar_request = StockBarsRequest(symbol_or_symbols="BAC",timeframe=TimeFrame.Hour, start=datetime_object_from, end=datetime_object_to, feed=DataFeed.SIP)
|
||||
|
||||
bars = client.get_stock_bars(bar_request).df
|
||||
#bars = bars.drop(['symbol'])
|
||||
|
||||
#print(bars.df.close)
|
||||
bars = bars.tz_convert('America/New_York')
|
||||
print(bars)
|
||||
print(bars.df.columns)
|
||||
#Index(['open', 'high', 'low', 'close', 'volume', 'trade_count', 'vwap'], dtype='object')
|
||||
bars.df.set_index('timestamp', inplace=True)
|
||||
|
||||
mpf.plot(bars.df, # the dataframe containing the OHLC (Open, High, Low and Close) data
|
||||
type='candle', # use candlesticks
|
||||
volume=True, # also show the volume
|
||||
mav=(3,6,9), # use three different moving averages
|
||||
figratio=(3,1), # set the ratio of the figure
|
||||
style='yahoo', # choose the yahoo style
|
||||
title='Prvni chart');
|
||||
|
||||
# #vrací se list od dict
|
||||
# print(bars["BAC"])
|
||||
|
||||
# # k nemu muzeme pristupovat s
|
||||
# dict = bars["BAC"]
|
||||
# print(type(dict))
|
||||
# print(dict[2].timestamp)
|
||||
|
||||
# print(dict[2].close)
|
||||
|
||||
# print(dict[].close)
|
||||
|
||||
|
||||
51
testy/archive/alpacabacktrader/test.py
Normal file
51
testy/archive/alpacabacktrader/test.py
Normal file
@ -0,0 +1,51 @@
|
||||
#udajne working example for using a minute
|
||||
# timeframe in backtrader with alpaca api
|
||||
|
||||
|
||||
import alpaca_backtrader_api
|
||||
import backtrader as bt
|
||||
import pandas as pd
|
||||
from datetime import datetime
|
||||
from strategies.tos_strategy import TOS
|
||||
|
||||
from dotenv import load_dotenv
|
||||
import os
|
||||
load_dotenv()
|
||||
|
||||
api_key = os.getenv('API_KEY_ID')
|
||||
api_secret = os.getenv('API_SECRET')
|
||||
alpaca_paper = os.getenv('ALPACA_PAPER')
|
||||
|
||||
cerebro = bt.Cerebro()
|
||||
cerebro.addstrategy(TOS)
|
||||
|
||||
cerebro.broker.setcash(100000)
|
||||
cerebro.broker.setcommission(commission=0.0)
|
||||
cerebro.addsizer(bt.sizers.PercentSizer, percents=20)
|
||||
|
||||
store = alpaca_backtrader_api.AlpacaStore(
|
||||
key_id=api_key,
|
||||
secret_key=api_secret,
|
||||
paper=alpaca_paper
|
||||
)
|
||||
|
||||
if not alpaca_paper:
|
||||
broker = store.getbroker() # or just alpaca_backtrader_api.AlpacaBroker()
|
||||
cerebro.setbroker(broker)
|
||||
|
||||
DataFactory = store.getdata # or use alpaca_backtrader_api.AlpacaData
|
||||
data0 = DataFactory(
|
||||
dataname='AAPL',
|
||||
timeframe=bt.TimeFrame.TFrame("Minutes"),
|
||||
fromdate=pd.Timestamp('2018-11-15'),
|
||||
todate=pd.Timestamp('2018-11-17'),
|
||||
historical=True)
|
||||
cerebro.adddata(data0)
|
||||
|
||||
#Resampler for 15 minutes
|
||||
cerebro.resampledata(data0,timeframe=bt.TimeFrame.Minutes,compression=15)
|
||||
|
||||
print('Starting Portfolio Value: %.2f' % cerebro.broker.getvalue())
|
||||
cerebro.run()
|
||||
print('Final Portfolio Value: %.2f' % cerebro.broker.getvalue())
|
||||
cerebro.plot()
|
||||
172
testy/archive/alpacaexampleslive.py
Normal file
172
testy/archive/alpacaexampleslive.py
Normal file
@ -0,0 +1,172 @@
|
||||
# použití websocket loaderu v samostatném threadu
|
||||
# v dalsim threadu pak input a cteni globalniho dataframu
|
||||
# a stopnutí websocket loopu
|
||||
|
||||
#import clients
|
||||
from alpaca.data.live import StockDataStream, CryptoDataStream
|
||||
from alpaca.data.enums import DataFeed
|
||||
from config import API_KEY, SECRET_KEY, MAX_BATCH_SIZE
|
||||
from datetime import datetime
|
||||
import pandas as pd
|
||||
import threading
|
||||
|
||||
|
||||
# pripadne parametry pro request
|
||||
# parametry = {
|
||||
# "brand": "Ford",
|
||||
# "model": "Mustang",
|
||||
# "year": 1964
|
||||
# }
|
||||
|
||||
sloupce=["timestamp","price","size","condition"]
|
||||
sloupce_q=["timestamp","ask_size","ask_price","bid_price","bid_ask"]
|
||||
|
||||
# deklarace globalniho df s timeindexem
|
||||
gdf = pd.DataFrame(columns=sloupce, index=pd.to_datetime([]))
|
||||
gdf_q = pd.DataFrame(columns=sloupce_q, index=pd.to_datetime([]))
|
||||
|
||||
|
||||
# # pro komunikaci mezi thready budeme pouzivat globalni variable
|
||||
# # pro zamezeni race condition pouzijeme mutual lock (mutex)
|
||||
# create a lock
|
||||
# lock = threading.Lock()
|
||||
# with lock:
|
||||
# # add to the variable
|
||||
# variable = variable + 10
|
||||
# # release the lock automatically
|
||||
|
||||
prev_timestamp = "new"
|
||||
batch = []
|
||||
batch_q = []
|
||||
seconds_list = []
|
||||
parametry = {}
|
||||
now = datetime.now() # current date and time aware of timezones
|
||||
now = now.astimezone()
|
||||
|
||||
# client musi byt globalni, aby ho druhy thread dokazal stopnout
|
||||
#client = StockDataStream(API_KEY, SECRET_KEY, raw_data=False, websocket_params=parametry, feed=DataFeed.SIP)
|
||||
client = StockDataStream(API_KEY, SECRET_KEY, raw_data=True, websocket_params=parametry, feed=DataFeed.SIP)
|
||||
|
||||
|
||||
|
||||
## thread pro cteni websocketu a plneni glob.dataframu
|
||||
# pozdeji prepsat do samostatne Class WSReader
|
||||
def ws_reader():
|
||||
print("vstup do threadu ws reader")
|
||||
|
||||
#handler pro ws trader data
|
||||
async def data_handler(data):
|
||||
global gdf
|
||||
global batch
|
||||
#record_list = (data.timestamp, data.open,data.high,data.low,data.close)
|
||||
#batch.append(record_list)
|
||||
print(data)
|
||||
|
||||
# kazdou davku pak zapiseme do datasetu
|
||||
if len(batch) == MAX_BATCH_SIZE:
|
||||
## z aktualniho listu batch udelame DataFrame
|
||||
new_df = pd.DataFrame.from_records(data=batch, columns = sloupce)
|
||||
|
||||
## tento dataframe pridame ke globalnimu
|
||||
gdf = pd.concat([gdf,new_df], axis=0, ignore_index=True)
|
||||
batch = []
|
||||
#print(gdf)
|
||||
|
||||
#handler pro ws quote data
|
||||
async def data_handler_q(data):
|
||||
global gdf_q
|
||||
global batch_q
|
||||
global prev_timestamp
|
||||
global seconds_list
|
||||
record_list = (data.timestamp, data.ask_size,data.ask_price,data.bid_price,data.bid_size)
|
||||
|
||||
batch_q.append(record_list)
|
||||
#print(data.ask_size,data.ask_price,data.bid_price,data.bid_size)
|
||||
#print("sestaveni je",sestaveni, "\\n batch ma ", len(batch), "clenu")
|
||||
print(batch_q)
|
||||
|
||||
##max a min hodnota z druhych hodnot listu
|
||||
def max_value(inputlist):
|
||||
return max([sublist[1] for sublist in inputlist])
|
||||
def min_value(inputlist):
|
||||
return min([sublist[1] for sublist in inputlist])
|
||||
def sum_value(inputlist):
|
||||
for sublist in inputlist: print(sublist[-1])
|
||||
return sum([sublist[-1] for sublist in inputlist])
|
||||
|
||||
#pokud jde o stejnou vterinu nebo o prvni zaznam, pridame do pole
|
||||
if (prev_timestamp=="new") or (data.timestamp.second==prev_timestamp.second):
|
||||
print("stejna vterina")
|
||||
seconds_list.append([data.timestamp, data.ask_price, data.ask_size])
|
||||
#print("po appendu",seconds_list)
|
||||
else:
|
||||
print("nova vterina")
|
||||
# dopocitame ohlc
|
||||
print("max", max_value(seconds_list), "min ", min_value(seconds_list), "sum", sum_value(seconds_list), "open", seconds_list[0][1], "close", seconds_list[-1][1])
|
||||
print("-"*40)
|
||||
seconds_list = []
|
||||
seconds_list.append([data.timestamp, data.ask_price, data.ask_size])
|
||||
print(seconds_list)
|
||||
#vypisu z listu
|
||||
|
||||
print("akt.cas",data.timestamp,"minuly cas", prev_timestamp)
|
||||
|
||||
prev_timestamp = data.timestamp
|
||||
|
||||
# kazdou davku pak zapiseme do datasetu
|
||||
if len(batch_q) == MAX_BATCH_SIZE:
|
||||
## z aktualniho listu batch udelame DataFrame
|
||||
new_df = pd.DataFrame.from_records(data=batch_q, columns = sloupce_q)
|
||||
|
||||
## tento dataframe pridame ke globalnimu
|
||||
gdf_q = pd.concat([gdf_q,new_df], axis=0, ignore_index=True)
|
||||
batch_q = []
|
||||
#print(gdf)t
|
||||
|
||||
#client.subscribe_quotes(data_handler, "BAC")
|
||||
#client.subscribe_trades(data_handler, "BAC")
|
||||
#client.subscribe_updated_bars(data_handler, "BAC")
|
||||
|
||||
## taddy to ceka a bezi
|
||||
print("spoustim run")
|
||||
client.run()
|
||||
print("run skoncil")
|
||||
|
||||
|
||||
def user_prompt():
|
||||
print("Tady je druhy thread, kde muzu delat co chci, pripadne ovladat ws loader")
|
||||
while True:
|
||||
delej = input("Vypsat dataframe: [t-trades;q-quotes;e-exit]")
|
||||
if delej == "t": print(gdf)
|
||||
elif delej =="q": print(gdf_q.tail(20))
|
||||
elif delej =="e": break
|
||||
print("bye")
|
||||
client.stop()
|
||||
|
||||
def main():
|
||||
# definujeme thready
|
||||
t1 = threading.Thread(target=ws_reader)
|
||||
#t2 = threading.Thread(target=user_prompt)
|
||||
|
||||
#spustime thready
|
||||
t1.start()#, t2.start()
|
||||
|
||||
# Wait threads to complete
|
||||
t1.join()#, t2.join()
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
|
||||
|
||||
# tbd jeste si vyzkouset, zda to bez threadu nepujde takto s asynciem
|
||||
# if __name__ == '__main__':
|
||||
# logging.basicConfig(format='%(asctime)s %(levelname)s %(message)s',
|
||||
# level=logging.INFO)
|
||||
|
||||
# logging.log(logging.INFO, 'Starting up...')
|
||||
# try:
|
||||
# loop = asyncio.get_event_loop()
|
||||
# loop.run_until_complete(main())
|
||||
# loop.close()
|
||||
# except KeyboardInterrupt:
|
||||
# pass
|
||||
39
testy/archive/alpacagetlasttrades.py
Normal file
39
testy/archive/alpacagetlasttrades.py
Normal file
@ -0,0 +1,39 @@
|
||||
from alpaca.data.historical import CryptoHistoricalDataClient, StockHistoricalDataClient
|
||||
from alpaca.data.requests import CryptoLatestTradeRequest, StockLatestTradeRequest, StockLatestBarRequest, StockTradesRequest
|
||||
from alpaca.data.enums import DataFeed
|
||||
from config import API_KEY, SECRET_KEY, MAX_BATCH_SIZE
|
||||
import datetime
|
||||
import time
|
||||
|
||||
parametry = {}
|
||||
|
||||
# no keys required
|
||||
#client = CryptoHistoricalDataClient()
|
||||
client = StockHistoricalDataClient(API_KEY, SECRET_KEY, raw_data=True)
|
||||
|
||||
# single symbol request
|
||||
#request_trade_params = StockTradesRequest(symbol_or_symbols="BAC", feed = DataFeed.SIP)
|
||||
#request_last_bar_params = StockLatestBarRequest(symbol_or_symbols="BAC", feed=DataFeed.SIP)
|
||||
|
||||
#2023, 2, 27, 18, 51, 38
|
||||
|
||||
datetime_object_from = datetime.datetime(2023, 2, 26, 17, 51, 38, tzinfo=datetime.timezone.utc)
|
||||
|
||||
datetime_object_to = datetime.datetime(2023, 2, 28, 17, 51, 39, tzinfo=datetime.timezone.utc)
|
||||
|
||||
trades_request = StockTradesRequest(symbol_or_symbols="C", feed = DataFeed.SIP, start=datetime_object_from, end=datetime_object_to)
|
||||
#latest_trade = client.get_stock_latest_trade(request_trade_params)
|
||||
#latest_bar = client.get_stock_latest_bar(request_last_bar_params)
|
||||
|
||||
# for i in range(1,1000):
|
||||
# latest_bar = client.get_stock_latest_bar(request_last_bar_params)
|
||||
# data = latest_bar['BAC']
|
||||
# print(data.timestamp,data.trade_count, data.trade_count, data.high, data.low, data.close, data.volume, data.vwap)
|
||||
# time.sleep(1)
|
||||
|
||||
all_trades = client.get_stock_trades(trades_request)
|
||||
# must use symbol to access even though it is single symbol
|
||||
# print("last trade",latest_trade)
|
||||
# print("latest bar",latest_bar)
|
||||
# print("Trades Today", all_trades)
|
||||
print(len(all_trades["C"]))
|
||||
66
testy/archive/alpacapyexampleshistorical.py
Normal file
66
testy/archive/alpacapyexampleshistorical.py
Normal file
@ -0,0 +1,66 @@
|
||||
# 2 clients for historical data StockHistoricalDataClient (needs keys), CryptoHistoricalDataClient
|
||||
# 2 clients for real time data CryptoDataStream, StockDataStream
|
||||
|
||||
|
||||
# naimportuju si daneho clienta
|
||||
from alpaca.data.historical import StockHistoricalDataClient, CryptoHistoricalDataClient
|
||||
|
||||
#pokdu pouzivam historicke data(tzn. REST) tak si naimportuju dany request object
|
||||
from alpaca.data.requests import StockLatestQuoteRequest, StockBarsRequest, StockTradesRequest
|
||||
|
||||
#objekty se kterymi pak pracuju (jsou soucasi package výše, tady jen informačně)
|
||||
from alpaca.data import Quote, Trade, Snapshot, Bar
|
||||
from alpaca.data.models import BarSet, QuoteSet, TradeSet
|
||||
|
||||
|
||||
from config import API_KEY, SECRET_KEY
|
||||
from datetime import datetime, timedelta
|
||||
import pandas as pd
|
||||
import rich
|
||||
|
||||
# vytvorim si clienta
|
||||
stock_client = StockHistoricalDataClient(API_KEY, SECRET_KEY, raw_data=True)
|
||||
crypto_client = CryptoHistoricalDataClient()
|
||||
|
||||
time_from = datetime(2023, 2, 17, 14, 30, 0, 0)
|
||||
time_to = datetime(2023, 2, 17, 14, 30, 1, 0)
|
||||
#print(time_from)
|
||||
|
||||
# vytvorim request objekt
|
||||
#latestQuoteRequest = StockLatestQuoteRequest(symbol_or_symbols=["SPY", "GLD", "TLT"])
|
||||
stockTradeRequest = StockTradesRequest(symbol_or_symbols=["BAC","C","MSFT"], start=time_from,end=time_to)
|
||||
|
||||
#zavolam na clientovi metodu s request objektem, vrací se mi Dict[str, Quote] - obj.Quote pro kazdy symbol
|
||||
#latestQuoteObject = stock_client.get_stock_latest_quote(latestQuoteRequest)
|
||||
tradesResponse = stock_client.get_stock_trades(stockTradeRequest)
|
||||
print(tradesResponse)
|
||||
|
||||
for i in tradesResponse['BAC']:
|
||||
print(i)
|
||||
|
||||
# vrací m to tradeset dict = Trades identifikovane symbolem
|
||||
|
||||
#for
|
||||
|
||||
#access as a list
|
||||
#print(tradesResponse["BAC"])
|
||||
|
||||
# The scope of these changes made to
|
||||
# pandas settings are local to with statement.
|
||||
# with pd.option_context('display.max_rows', None,
|
||||
# 'display.max_columns', None,
|
||||
# 'display.precision', 3,
|
||||
# ):
|
||||
# #convert to dataframe
|
||||
# print(tradesResponse.df)
|
||||
|
||||
# this is the Quote object for
|
||||
#bacquote=latestQuoteObject["SPY"]
|
||||
|
||||
# print(bacquote)
|
||||
#vrati se mi objekt typu LatestQuote
|
||||
# print(type(latestQuoteObject))
|
||||
|
||||
# print(latestQuoteObject)
|
||||
#gld_latest_ask_price = latestQuoteObject["GLD"].ask_price
|
||||
#print(gld_latest_ask_price, latestQuoteObject["GLD"].timestamp)
|
||||
74
testy/archive/alpacawebsocketsimple.py
Normal file
74
testy/archive/alpacawebsocketsimple.py
Normal file
@ -0,0 +1,74 @@
|
||||
|
||||
from alpaca.data.live import StockDataStream, CryptoDataStream
|
||||
from alpaca.trading.stream import TradingStream
|
||||
from alpaca.data.enums import DataFeed
|
||||
from config import API_KEY, SECRET_KEY, MAX_BATCH_SIZE
|
||||
from datetime import datetime
|
||||
import mplfinance as mpf
|
||||
import matplotlib.pyplot as plt
|
||||
import threading
|
||||
# pripadne parametry pro request
|
||||
# parametry = {
|
||||
# "brand": "Ford",
|
||||
# "model": "Mustang",
|
||||
# "year": 1964
|
||||
# }
|
||||
parametry = {}
|
||||
i = 0
|
||||
u = 0
|
||||
async def data_handler1(data):
|
||||
print("HANDLER1")
|
||||
global i
|
||||
i += 1
|
||||
print(data)
|
||||
|
||||
async def data_handler2(data):
|
||||
print("HANDLER2")
|
||||
global u
|
||||
u += 1
|
||||
print(data)
|
||||
|
||||
async def data_handler3(data):
|
||||
print("HANDLER3")
|
||||
global u
|
||||
u += 1
|
||||
print(data)
|
||||
|
||||
# plt.ion()
|
||||
|
||||
# def animate(ival):
|
||||
# # PREPARE DATAFRAME WITH OHLC AND "BUYS" AND "SELLS" HERE
|
||||
|
||||
# apds = [mpf.make_addplot(buys, color='tab:green', ax=ax_buys),
|
||||
# mpf.make_addplot(sells, color='tab:red', ax=ax_sells)]
|
||||
# for ax in axes:
|
||||
# ax.clear()
|
||||
# mpf.plot(df_ohlc, type='candle', addplot=apds, ax=ax_main)
|
||||
# print('a')
|
||||
|
||||
|
||||
#client = CryptoDataStream(API_KEY, SECRET_KEY, raw_data=True, websocket_params=parametry)
|
||||
client1 = TradingStream(API_KEY, SECRET_KEY, paper=True)
|
||||
client1.subscribe_trade_updates(data_handler1)
|
||||
t1 = threading.Thread(target=client1.run)
|
||||
t1.start()
|
||||
print("started1")
|
||||
client2 = TradingStream(API_KEY, SECRET_KEY, paper=True)
|
||||
client2.subscribe_trade_updates(data_handler2)
|
||||
t2 = threading.Thread(target=client2.run)
|
||||
t2.start()
|
||||
client3 = TradingStream(API_KEY, SECRET_KEY, paper=True)
|
||||
client3.subscribe_trade_updates(data_handler3)
|
||||
t3 = threading.Thread(target=client3.run)
|
||||
t3.start()
|
||||
print("started2")
|
||||
print(threading.enumerate())
|
||||
t2.join()
|
||||
t1.join()
|
||||
t3.join()
|
||||
|
||||
# client.subscribe_trades(data_handler, "BTC/USD")
|
||||
# #client.subscribe_quotes(data_handler_ETH, "ETH/USD")
|
||||
# print("pred spustenim runu")
|
||||
# client.run()
|
||||
# print("po spusteni runu - muzu neco delat?")
|
||||
47
testy/archive/async.py
Normal file
47
testy/archive/async.py
Normal file
@ -0,0 +1,47 @@
|
||||
#Asyncio - umoznuje a řídí konkurentni kod (v ramci jednoho vlakna, vice vlaken nebo i procesu - skrz concurrent.futures)
|
||||
|
||||
# async si pri definici oznacim funkci, kterou chci asynchronne volat a ve které muze byt dalsi asynchronni volani(await)
|
||||
# await - označuje mi, že na volanou funkci čekám - pokud je pouzito v tasku pak task vraci pokračování zpět o level výše
|
||||
# create task(asynchr.funkce) - vtakto zavolám paralelni funkci bez cekani a blok pokracuje dal
|
||||
# asyncio.wait - ceka na tas
|
||||
# Here is a list of what you need in order to make your program async:
|
||||
|
||||
# Add async keyword in front of your function declarations to make them awaitable.
|
||||
# Add await keyword when you call your async functions (without it they won’t run).
|
||||
# Create tasks from the async functions you wish to start asynchronously. Also wait for their finish.
|
||||
# Call asyncio.run to start the asynchronous section of your program. Only one per thread.
|
||||
# time.sleep(5) - blokuje, await asyncio.sleep(5) - neblokuje kod(asynchroni task)
|
||||
|
||||
import asyncio
|
||||
|
||||
#tbd pridat logger
|
||||
|
||||
async def makej(pracant, cekani):
|
||||
print("pracant ",pracant, "zacal", "bude cekat",cekani,"sekubd")
|
||||
await asyncio.sleep(cekani)
|
||||
print("pracant ",pracant,"docekal",cekani,"sekund")
|
||||
|
||||
async def main():
|
||||
print("vstup do funkce main")
|
||||
#vytvoreni asynchronnich tasků
|
||||
task1 = asyncio.create_task(makej(1,5))
|
||||
task2 = asyncio.create_task(makej(2,3))
|
||||
task3 = asyncio.create_task(makej(3,1))
|
||||
print("tasky jedou - ted jsme v kodu za jejich spustenim - ale pred await.wait")
|
||||
#budeme cekat na dokonceni tasků
|
||||
await asyncio.wait([task1,task2,task3])
|
||||
print("dočekáno na vysledek po volani await.wait")
|
||||
print("a ted volani funkce standardním synchro způsobem, kdy cekame na vysledek ")
|
||||
#volani funkce standardním synchro způsobem, kdy cekame na vysledek
|
||||
await makej(1,1)
|
||||
await makej(2,1)
|
||||
|
||||
# hlavni volani - run by mela byt jedna pro jedno vlakno
|
||||
#asyncio.run(main())
|
||||
|
||||
|
||||
#feature to convert async to sync
|
||||
#asyncio.get_event_loop().run_until_complete() --nebo je tu tento decorator https://github.com/miyakogi/syncer
|
||||
|
||||
newfeature = asyncio.run(makej(1,1))
|
||||
|
||||
57
testy/archive/changeiterable.py
Normal file
57
testy/archive/changeiterable.py
Normal file
@ -0,0 +1,57 @@
|
||||
# to test change iterable (adding items) while iterating
|
||||
|
||||
class Notif:
|
||||
def __init__(self,time):
|
||||
self.time = time
|
||||
|
||||
open_orders = []
|
||||
|
||||
for i in range(1,10):
|
||||
open_orders.append(Notif(i))
|
||||
|
||||
print("cele pole objektu",open_orders)
|
||||
|
||||
# Here, 'reversed' returns a lazy iterator, so it's performant! reversed(l):
|
||||
|
||||
#musi fungovat removing stare a pridavani novych
|
||||
|
||||
#this list contains all not processed notification, that we try to process during this iteration
|
||||
#if time is not right we leave the message for next iter
|
||||
#if time is right we process the message (- note it can trigger additional open_orders, that are added to queue)
|
||||
|
||||
def process_message(notif: Notif):
|
||||
global open_orders
|
||||
if notif.time % 2 == 0 and notif.time < 300:
|
||||
open_orders.append(Notif(notif.time+50))
|
||||
|
||||
todel = []
|
||||
for i in open_orders:
|
||||
print("*******start iterace polozky", i.time)
|
||||
process_message(i)
|
||||
print("removing element",i.time)
|
||||
todel.append(i)
|
||||
print("*****konec iterace", i.time)
|
||||
print()
|
||||
|
||||
print("to del", todel)
|
||||
#removing processed from the list
|
||||
for i in todel:
|
||||
open_orders.remove(i)
|
||||
|
||||
|
||||
print("cely list po skonceni vseho")
|
||||
for i in open_orders: print(i.time)
|
||||
|
||||
|
||||
|
||||
""""
|
||||
pred iteraci se zavola synchroné
|
||||
EXECUTE open orders(time)
|
||||
- pokusi se vytvorit vsechny otevrene ordery do daneho casu (casu dalsi iterace)
|
||||
- podporuje i volani callbacku a to vcetne pokynu vytvoreneho z pokynu
|
||||
- tento novy pokyn muze byt i take exekuovan pokud se vcetne roundtripu vejde do daneho casu
|
||||
pripadne soucasne vytvoreni i exekuci pokynu
|
||||
|
||||
|
||||
"""
|
||||
|
||||
38
testy/archive/classscope.py
Normal file
38
testy/archive/classscope.py
Normal file
@ -0,0 +1,38 @@
|
||||
from uuid import UUID, uuid4
|
||||
from alpaca.trading.enums import OrderSide, OrderStatus, TradeEvent, OrderClass, OrderType, TimeInForce
|
||||
#from utils import AttributeDict
|
||||
from rich import print
|
||||
import threading
|
||||
#import utils
|
||||
import asyncio
|
||||
|
||||
from typing import Any, Optional, List, Union
|
||||
from datetime import datetime, date
|
||||
from pydantic import BaseModel
|
||||
|
||||
class Order(BaseModel):
|
||||
id: UUID
|
||||
submitted_at: datetime
|
||||
filled_at: Optional[datetime]
|
||||
symbol: str
|
||||
qty: Optional[str]
|
||||
filled_qty: Optional[str]
|
||||
filled_avg_price: Optional[str]
|
||||
side: OrderSide
|
||||
limit_price: Optional[str]
|
||||
|
||||
class TradeUpdate(BaseModel):
|
||||
event: Union[TradeEvent, str]
|
||||
execution_id: Optional[UUID]
|
||||
order: Order
|
||||
timestamp: datetime
|
||||
position_qty: Optional[float]
|
||||
price: Optional[float]
|
||||
qty: Optional[float]
|
||||
|
||||
class User(BaseModel):
|
||||
id: int
|
||||
name = "Jana"
|
||||
|
||||
a = Order(id = uuid4(), submitted_at= datetime.now(), symbol = "BAC", side=OrderSide.BUY)
|
||||
print(a)
|
||||
3
testy/archive/config.py
Normal file
3
testy/archive/config.py
Normal file
@ -0,0 +1,3 @@
|
||||
API_KEY = 'PKGGEWIEYZOVQFDRY70L'
|
||||
SECRET_KEY = 'O5Kt8X4RLceIOvM98i5LdbalItsX7hVZlbPYHy8Y'
|
||||
MAX_BATCH_SIZE = 1
|
||||
5
testy/archive/hello_panel_world.py
Normal file
5
testy/archive/hello_panel_world.py
Normal file
@ -0,0 +1,5 @@
|
||||
import panel as pn
|
||||
app = pn.Column("DDD")
|
||||
# app.servable()
|
||||
# app.append("Nice")
|
||||
app.show()
|
||||
17
testy/archive/interpolace.py
Normal file
17
testy/archive/interpolace.py
Normal file
@ -0,0 +1,17 @@
|
||||
import scipy.interpolate as spi
|
||||
import matplotlib.pyplot as plt
|
||||
|
||||
|
||||
x = [2, 4, 6, 8, 10, 12, 14, 16, 18, 20]
|
||||
y = [4, 7, 11, 16, 22, 29, 38, 49, 63, 80]
|
||||
|
||||
|
||||
y_interp = spi.interp1d(x, y)
|
||||
|
||||
#find y-value associated with x-value of 13
|
||||
#print(y_interp(13))
|
||||
|
||||
|
||||
#create plot of x vs. y
|
||||
#plt.plot(x, y, '-ob')
|
||||
|
||||
31
testy/archive/multiprocess.py
Normal file
31
testy/archive/multiprocess.py
Normal file
@ -0,0 +1,31 @@
|
||||
# narozdil od asyncio a threadingu ma vetsi rezii na vytvoreni
|
||||
# hodí se pro CPU a GPU narocné úlohy, tnz. treba na strategie, kde kazda strategie = 1 process
|
||||
import multiprocessing
|
||||
import logging
|
||||
|
||||
logger = multiprocessing.log_to_stderr()
|
||||
logger.setLevel(logging.INFO)
|
||||
logger.warning('doomed')
|
||||
|
||||
def do_first():
|
||||
print("Running do_first line 1")
|
||||
print("Running do_first line 2")
|
||||
print("Running do_first line 3")
|
||||
|
||||
def do_second():
|
||||
print("Running do_second line 1")
|
||||
print("Running do_second line 2")
|
||||
print("Running do_second line 3")
|
||||
|
||||
def main():
|
||||
t1 = multiprocessing.Process(target=do_first)
|
||||
t2 = multiprocessing.Process(target=do_second)
|
||||
|
||||
# Start processes
|
||||
t1.start(), t2.start()
|
||||
|
||||
# Wait processes to complete
|
||||
t1.join(), t2.join()
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
145
testy/archive/test.ipynb
Normal file
145
testy/archive/test.ipynb
Normal file
@ -0,0 +1,145 @@
|
||||
{
|
||||
"cells": [
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 97,
|
||||
"metadata": {},
|
||||
"outputs": [
|
||||
{
|
||||
"ename": "ValidationError",
|
||||
"evalue": "1 validation error for StockBarsRequest\ntimeframe\n instance of TimeFrame expected (type=type_error.arbitrary_type; expected_arbitrary_type=TimeFrame)",
|
||||
"output_type": "error",
|
||||
"traceback": [
|
||||
"\u001b[0;31m---------------------------------------------------------------------------\u001b[0m",
|
||||
"\u001b[0;31mValidationError\u001b[0m Traceback (most recent call last)",
|
||||
"Cell \u001b[0;32mIn[97], line 35\u001b[0m\n\u001b[1;32m 30\u001b[0m \u001b[39m#print(time_from)\u001b[39;00m\n\u001b[1;32m 31\u001b[0m \n\u001b[1;32m 32\u001b[0m \u001b[39m# vytvorim request objekt\u001b[39;00m\n\u001b[1;32m 33\u001b[0m \u001b[39m#latestQuoteRequest = StockLatestQuoteRequest(symbol_or_symbols=[\"SPY\", \"GLD\", \"TLT\"])\u001b[39;00m\n\u001b[1;32m 34\u001b[0m stockTradeRequest \u001b[39m=\u001b[39m StockTradesRequest(symbol_or_symbols\u001b[39m=\u001b[39m[\u001b[39m\"\u001b[39m\u001b[39mBAC\u001b[39m\u001b[39m\"\u001b[39m], start\u001b[39m=\u001b[39mtime_from,end\u001b[39m=\u001b[39mtime_to)\n\u001b[0;32m---> 35\u001b[0m stockBarRequest \u001b[39m=\u001b[39m StockBarsRequest(symbol_or_symbols\u001b[39m=\u001b[39;49m[\u001b[39m\"\u001b[39;49m\u001b[39mBAC\u001b[39;49m\u001b[39m\"\u001b[39;49m], start\u001b[39m=\u001b[39;49mtime_from,end\u001b[39m=\u001b[39;49mtime_to, timeframe\u001b[39m=\u001b[39;49m\u001b[39m\"\u001b[39;49m\u001b[39m15s\u001b[39;49m\u001b[39m\"\u001b[39;49m)\n\u001b[1;32m 37\u001b[0m \u001b[39m#zavolam na clientovi metodu s request objektem, vrací se mi Dict[str, Quote] - obj.Quote pro kazdy symbol\u001b[39;00m\n\u001b[1;32m 38\u001b[0m \u001b[39m#latestQuoteObject = stock_client.get_stock_latest_quote(latestQuoteRequest)\u001b[39;00m\n\u001b[1;32m 39\u001b[0m \u001b[39m#tradesResponse = stock_client.get_stock_trades(stockTradeRequest).df\u001b[39;00m\n\u001b[1;32m 40\u001b[0m stocksResponse \u001b[39m=\u001b[39m stock_client\u001b[39m.\u001b[39mget_stock_bars(stockBarRequest)\u001b[39m.\u001b[39mdf\n",
|
||||
"File \u001b[0;32m~/Documents/Development/python/trading/.venv/lib/python3.11/site-packages/alpaca/data/requests.py:45\u001b[0m, in \u001b[0;36mBaseTimeseriesDataRequest.__init__\u001b[0;34m(self, **data)\u001b[0m\n\u001b[1;32m 37\u001b[0m \u001b[39mif\u001b[39;00m (\n\u001b[1;32m 38\u001b[0m \u001b[39m\"\u001b[39m\u001b[39mend\u001b[39m\u001b[39m\"\u001b[39m \u001b[39min\u001b[39;00m data\n\u001b[1;32m 39\u001b[0m \u001b[39mand\u001b[39;00m data[\u001b[39m\"\u001b[39m\u001b[39mend\u001b[39m\u001b[39m\"\u001b[39m] \u001b[39mis\u001b[39;00m \u001b[39mnot\u001b[39;00m \u001b[39mNone\u001b[39;00m\n\u001b[1;32m 40\u001b[0m \u001b[39mand\u001b[39;00m \u001b[39misinstance\u001b[39m(data[\u001b[39m\"\u001b[39m\u001b[39mend\u001b[39m\u001b[39m\"\u001b[39m], datetime)\n\u001b[1;32m 41\u001b[0m \u001b[39mand\u001b[39;00m data[\u001b[39m\"\u001b[39m\u001b[39mend\u001b[39m\u001b[39m\"\u001b[39m]\u001b[39m.\u001b[39mtzinfo \u001b[39mis\u001b[39;00m \u001b[39mnot\u001b[39;00m \u001b[39mNone\u001b[39;00m\n\u001b[1;32m 42\u001b[0m ):\n\u001b[1;32m 43\u001b[0m data[\u001b[39m\"\u001b[39m\u001b[39mend\u001b[39m\u001b[39m\"\u001b[39m] \u001b[39m=\u001b[39m data[\u001b[39m\"\u001b[39m\u001b[39mend\u001b[39m\u001b[39m\"\u001b[39m]\u001b[39m.\u001b[39mastimezone(pytz\u001b[39m.\u001b[39mutc)\u001b[39m.\u001b[39mreplace(tzinfo\u001b[39m=\u001b[39m\u001b[39mNone\u001b[39;00m)\n\u001b[0;32m---> 45\u001b[0m \u001b[39msuper\u001b[39;49m()\u001b[39m.\u001b[39;49m\u001b[39m__init__\u001b[39;49m(\u001b[39m*\u001b[39;49m\u001b[39m*\u001b[39;49mdata)\n",
|
||||
"File \u001b[0;32m~/Documents/Development/python/trading/.venv/lib/python3.11/site-packages/pydantic/main.py:342\u001b[0m, in \u001b[0;36mpydantic.main.BaseModel.__init__\u001b[0;34m()\u001b[0m\n",
|
||||
"\u001b[0;31mValidationError\u001b[0m: 1 validation error for StockBarsRequest\ntimeframe\n instance of TimeFrame expected (type=type_error.arbitrary_type; expected_arbitrary_type=TimeFrame)"
|
||||
]
|
||||
}
|
||||
],
|
||||
"source": [
|
||||
"# 2 clients for historical data StockHistoricalDataClient (needs keys), CryptoHistoricalDataClient\n",
|
||||
"# 2 clients for real time data CryptoDataStream, StockDataStream\n",
|
||||
"\n",
|
||||
"\n",
|
||||
"# naimportuju si daneho clienta\n",
|
||||
"from alpaca.data.historical import StockHistoricalDataClient, CryptoHistoricalDataClient\n",
|
||||
"\n",
|
||||
"#pokdu pouzivam historicke data(tzn. REST) tak si naimportuju dany request object\n",
|
||||
"from alpaca.data.requests import StockLatestQuoteRequest, StockBarsRequest, StockTradesRequest\n",
|
||||
"\n",
|
||||
"#objekty se kterymi pak pracuju (jsou soucasi package výše, tady jen informačně)\n",
|
||||
"from alpaca.data import Quote, Trade, Snapshot, Bar\n",
|
||||
"from alpaca.data.models import BarSet, QuoteSet, TradeSet\n",
|
||||
"\n",
|
||||
"\n",
|
||||
"from config import API_KEY, SECRET_KEY\n",
|
||||
"import datetime\n",
|
||||
"import pandas as pd\n",
|
||||
"\n",
|
||||
"# vytvorim si clienta\n",
|
||||
"stock_client = StockHistoricalDataClient(API_KEY, SECRET_KEY, raw_data=False)\n",
|
||||
"\n",
|
||||
"sloupce=[\"symbol\",\"timestamp\",\"exchange\",\"price\",\"size\",\"id\",\"conditions\",\"tape\"]\n",
|
||||
"\n",
|
||||
"# deklarace globalniho df s timeindexem\n",
|
||||
"#gdf = pd.DataFrame(columns=sloupce, index=pd.to_datetime([]))\n",
|
||||
"\n",
|
||||
"time_from = datetime.datetime(2023, 2, 17, 14, 50, 0, 0)\n",
|
||||
"time_to = datetime.datetime(2023, 2, 17, 14, 55, 1, 0)\n",
|
||||
"#print(time_from)\n",
|
||||
"\n",
|
||||
"# vytvorim request objekt\n",
|
||||
"#latestQuoteRequest = StockLatestQuoteRequest(symbol_or_symbols=[\"SPY\", \"GLD\", \"TLT\"])\n",
|
||||
"stockTradeRequest = StockTradesRequest(symbol_or_symbols=[\"BAC\"], start=time_from,end=time_to)\n",
|
||||
"stockBarRequest = StockBarsRequest(symbol_or_symbols=[\"BAC\"], start=time_from,end=time_to, timeframe=\"15s\")\n",
|
||||
"\n",
|
||||
"#zavolam na clientovi metodu s request objektem, vrací se mi Dict[str, Quote] - obj.Quote pro kazdy symbol\n",
|
||||
"#latestQuoteObject = stock_client.get_stock_latest_quote(latestQuoteRequest)\n",
|
||||
"#tradesResponse = stock_client.get_stock_trades(stockTradeRequest).df\n",
|
||||
"stocksResponse = stock_client.get_stock_bars(stockBarRequest).df\n",
|
||||
"\n",
|
||||
"#data = [{'t': '2023-02-17T14:50:00.582845696Z', 'x': 'D', 'p': 34.83, 's': 1, 'c': [' ', 'I'], 'i': 71675642337847, 'z': 'A'}, {'t': '2023-02-17T14:50:00.948229632Z', 'x': 'D', 'p': 34.8383, 's': 10, 'c': [' ', 'I'], 'i': 79371872323411, 'z': 'A'}]\n",
|
||||
"# data = [{ 'conditions': [' ', 'I'],\n",
|
||||
"# 'exchange': 'D',\n",
|
||||
"# 'id': 71675642337847,\n",
|
||||
"# 'price': 34.83,\n",
|
||||
"# 'size': 1.0,\n",
|
||||
"# 'symbol': 'BAC',\n",
|
||||
"# 'tape': 'A',\n",
|
||||
"# 'timestamp': datetime.datetime(2023, 2, 17, 14, 50, 0, 582845, tzinfo=datetime.timezone.utc)}, { 'conditions': [' ', 'I'],\n",
|
||||
"# 'exchange': 'D',\n",
|
||||
"# 'id': 79371872323411,\n",
|
||||
"# 'price': 34.8383,\n",
|
||||
"# 'size': 10.0,\n",
|
||||
"# 'symbol': 'BAC',\n",
|
||||
"# 'tape': 'A',\n",
|
||||
"# 'timestamp': datetime.datetime(2023, 2, 17, 14, 50, 0, 948229, tzinfo=datetime.timezone.utc)}, { 'conditions': [' ', 'I'],\n",
|
||||
"# 'exchange': 'D',\n",
|
||||
"# 'id': 71675642400306,\n",
|
||||
"# 'price': 34.835,\n",
|
||||
"# 'size': 1.0,\n",
|
||||
"# 'symbol': 'BAC',\n",
|
||||
"# 'tape': 'A',\n",
|
||||
"# 'timestamp': datetime.datetime(2023, 2, 17, 14, 50, 1, 870989, tzinfo=datetime.timezone.utc)}, { 'conditions': [' ', 'I'],\n",
|
||||
"# 'exchange': 'D',\n",
|
||||
"# 'id': 71675642400308,\n",
|
||||
"# 'price': 34.84,\n",
|
||||
"# 'size': 100.0,\n",
|
||||
"# 'symbol': 'BAC',\n",
|
||||
"# 'tape': 'A',\n",
|
||||
"# 'timestamp': datetime.datetime(2023, 2, 17, 14, 55, 0, 88460, tzinfo=datetime.timezone.utc)}]\n",
|
||||
"# datetime.datetime(2023, 2, 17, 14, 50, 0, 948229, tzinfo=datetime.timezone.utc)\n",
|
||||
"#data = tradesResponse\n",
|
||||
"\n",
|
||||
"#gdf = pd.DataFrame.from_dict(data=data, orient='index')\n",
|
||||
"#gdf = pd.DataFrame(data)\n",
|
||||
"\n",
|
||||
"#gdf = pd.DataFrame(tradesResponse.data[\"BAC\"])\n",
|
||||
"\n",
|
||||
"# works with raw data\n",
|
||||
"#gdf = pd.DataFrame([t for t in tradesResponse[\"BAC\"]], columns=sloupce)\n",
|
||||
"\n",
|
||||
"#gdf = tradesResponse.df\n",
|
||||
"print(stocksResponse)\n",
|
||||
"# print(tradesResponse)\n",
|
||||
"#print(tradesResponse[\"BAC\"])\n",
|
||||
"# print(tradesResponse.data[\"BAC\"])\n",
|
||||
"\n",
|
||||
"# positions_df = pd.concat((pd.DataFrame(position).set_index(0) for position in positions),axis=1)\n",
|
||||
"# positions_df = positions_df.T.apply(pd.to_numeric, errors='ignore').T # convert strings to numeric\n",
|
||||
"# For orders:\n",
|
||||
"# orders_df = pd.concat((pd.DataFrame(order).set_index(0) for order in orders),axis=1).T\n"
|
||||
]
|
||||
}
|
||||
],
|
||||
"metadata": {
|
||||
"kernelspec": {
|
||||
"display_name": ".venv",
|
||||
"language": "python",
|
||||
"name": "python3"
|
||||
},
|
||||
"language_info": {
|
||||
"codemirror_mode": {
|
||||
"name": "ipython",
|
||||
"version": 3
|
||||
},
|
||||
"file_extension": ".py",
|
||||
"mimetype": "text/x-python",
|
||||
"name": "python",
|
||||
"nbconvert_exporter": "python",
|
||||
"pygments_lexer": "ipython3",
|
||||
"version": "3.10.10"
|
||||
},
|
||||
"orig_nbformat": 4,
|
||||
"vscode": {
|
||||
"interpreter": {
|
||||
"hash": "9391835cf7167c62e8e53032533e4da7e63c83f818ef5f19912128bc45706236"
|
||||
}
|
||||
}
|
||||
},
|
||||
"nbformat": 4,
|
||||
"nbformat_minor": 2
|
||||
}
|
||||
66
testy/archive/test.py
Normal file
66
testy/archive/test.py
Normal file
@ -0,0 +1,66 @@
|
||||
from alpaca.data.live import StockDataStream
|
||||
from alpaca.common.enums import BaseURL
|
||||
import datetime
|
||||
import pandas as pd
|
||||
from alpaca.data.models import Bar, Quote, Trade
|
||||
import csv
|
||||
from config import API_KEY, SECRET_KEY
|
||||
|
||||
key = 'PKHVMXQA09IVXALL92JR'
|
||||
secret = 'FmPwQRFIl7jhLRrXee0Ui73zM9NmAf5O4VH2tyAf'
|
||||
|
||||
# keys required for stock historical data client
|
||||
#client = StockHistoricalDataClient(key, secret)
|
||||
|
||||
# keys required
|
||||
client = StockDataStream(api_key=API_KEY,secret_key=SECRET_KEY)
|
||||
|
||||
df_glob = pd.DataFrame(columns=['timestamp','symbol', 'exchange','size','price','id','conditions','tape'])
|
||||
|
||||
file = open('Trades.txt', 'w')
|
||||
|
||||
# async handler
|
||||
async def quote_data_handler(data):
|
||||
#global df_glob
|
||||
#f_loc = pd.DataFrame(data)
|
||||
#df_glob = df_glob.append(df_loc, ignore_index=True)
|
||||
# quote data will arrive here
|
||||
print(data)
|
||||
ne = str(data) + "\n"
|
||||
file.write(ne)
|
||||
#print(data.timestamp,data.symbol, data.price, data.size, data.exchange, data.id, data.conditios,tape)
|
||||
print("-"*40)
|
||||
|
||||
#client.subscribe_updated_bars(quote_data_handler, "BAC")
|
||||
#client.subscribe_quotes(quote_data_handler, "BAC")
|
||||
client.subscribe_trades(quote_data_handler, "BAC")
|
||||
|
||||
print("pred spustenim run")
|
||||
try:
|
||||
client.run()
|
||||
#print(df)
|
||||
except Exception as err:
|
||||
print(f"{type(err).__name__} was raised: {err}")
|
||||
print("globalni dataframe")
|
||||
print(df_glob)
|
||||
file.close()
|
||||
|
||||
print(df_glob)
|
||||
|
||||
# timestamp symbol exchange size price id conditions tape 0 1
|
||||
# 0 NaN NaN NaN NaN NaN NaN NaN NaN symbol BAC
|
||||
# 1 NaN NaN NaN NaN NaN NaN NaN NaN timestamp 2023-02-15 19:47:19.430511+00:00
|
||||
# 2 NaN NaN NaN NaN NaN NaN NaN NaN exchange V
|
||||
# 3 NaN NaN NaN NaN NaN NaN NaN NaN price 35.52
|
||||
# 4 NaN NaN NaN NaN NaN NaN NaN NaN size 50.0
|
||||
# .. ... ... ... ... ... ... ... ... ... ...
|
||||
# 59 NaN NaN NaN NaN NaN NaN NaN NaN price 35.51
|
||||
# 60 NaN NaN NaN NaN NaN NaN NaN NaN size 7.0
|
||||
# 61 NaN NaN NaN NaN NaN NaN NaN NaN id 56493486924086
|
||||
# 62 NaN NaN NaN NaN NaN NaN NaN NaN conditions [ , I]
|
||||
# 63 NaN NaN NaN NaN NaN NaN NaN NaN tape A
|
||||
|
||||
order_data_json = request.get_json()
|
||||
|
||||
# validate data
|
||||
MarketOrderRequest(**order_data_json)
|
||||
1
testy/archive/testqueue.py
Normal file
1
testy/archive/testqueue.py
Normal file
@ -0,0 +1 @@
|
||||
|
||||
35
testy/archive/threadcondition.py
Normal file
35
testy/archive/threadcondition.py
Normal file
@ -0,0 +1,35 @@
|
||||
# A condition variable allows one or more threads to wait until they are
|
||||
# notified by another thread.
|
||||
|
||||
import threading
|
||||
import time
|
||||
import logging
|
||||
|
||||
logging.basicConfig(level=logging.DEBUG,
|
||||
format='(%(threadName)-9s) %(message)s',)
|
||||
|
||||
def consumer(cv):
|
||||
logging.debug('Consumer thread started ...')
|
||||
with cv:
|
||||
logging.debug('Consumer waiting ...')
|
||||
cv.wait()
|
||||
logging.debug('Consumer consumed the resource')
|
||||
|
||||
def producer(cv):
|
||||
logging.debug('Producer thread started ...')
|
||||
with cv:
|
||||
logging.debug('Making resource available')
|
||||
logging.debug('Notifying to all consumers')
|
||||
cv.notify_all()
|
||||
|
||||
if __name__ == '__main__':
|
||||
condition = threading.Condition()
|
||||
cs1 = threading.Thread(name='consumer1', target=consumer, args=(condition,))
|
||||
cs2 = threading.Thread(name='consumer2', target=consumer, args=(condition,))
|
||||
pd = threading.Thread(name='producer', target=producer, args=(condition,))
|
||||
|
||||
cs1.start()
|
||||
time.sleep(2)
|
||||
cs2.start()
|
||||
time.sleep(2)
|
||||
pd.start()
|
||||
54
testy/archive/thready.py
Normal file
54
testy/archive/thready.py
Normal file
@ -0,0 +1,54 @@
|
||||
# pouziti threadu - narozdil od asyncio - nemame pod tim uplnou kontrolu a ridi to knihovna
|
||||
# thready jsou výhodne pro naročné IO operace, např. loadery, requestory, scrapery, ukladače atp.
|
||||
# how to share data between Threads
|
||||
# 1.Sharing a boolean variable with a threading.Event.
|
||||
# declare in unset or false state
|
||||
# event = threading.Event()
|
||||
# if event.is_set(): # check if set
|
||||
# event.set() # set the event true
|
||||
# event.clear() # or false
|
||||
|
||||
# 2.Protecting global shared data with a threading.Lock.
|
||||
# lock = threading.Lock()
|
||||
# with lock:
|
||||
# variable = variable + 10
|
||||
|
||||
# 3.Sharing data with a queue.Queue. Queue can be shared between threads.
|
||||
# create a queue
|
||||
# queue = Queue() #create FIFO
|
||||
# queue.put(i) #enque
|
||||
# data = queue.get() #dequeue
|
||||
|
||||
# dale je tu condition - takova roura mezi consumerem a producerem
|
||||
# cond = threading.Condition()
|
||||
# cond.wait() #consumer waiting
|
||||
# cond.notifyAll() #producer notifiying consumer, they can continue
|
||||
# consumer threads wait for the Condition to be set before continuing.
|
||||
# The producer thread is responsible for setting the condition and notifying the other threads
|
||||
# that they can continue. Více v sam.test filu.
|
||||
|
||||
|
||||
import threading
|
||||
|
||||
def do_first():
|
||||
print("Running do_first line 1")
|
||||
print("Running do_first line 2")
|
||||
print("Running do_first line 3")
|
||||
|
||||
def do_second():
|
||||
print("Running do_second line 1")
|
||||
print("Running do_second line 2")
|
||||
print("Running do_second line 3")
|
||||
|
||||
def main():
|
||||
t1 = threading.Thread(target=do_first)
|
||||
t2 = threading.Thread(target=do_second)
|
||||
|
||||
# Start threads
|
||||
t1.start(), t2.start()
|
||||
|
||||
# Wait threads to complete
|
||||
t1.join(), t2.join()
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
Reference in New Issue
Block a user