refactoring archrunners+new ma inds
This commit is contained in:
@ -9,25 +9,37 @@ from alpaca.data.models import BarSet, QuoteSet, TradeSet
|
||||
from alpaca.data.timeframe import TimeFrame
|
||||
# import mplfinance as mpf
|
||||
import pandas as pd
|
||||
from rich import print
|
||||
from v2realbot.utils.utils import zoneNY
|
||||
from v2realbot.config import ACCOUNT1_PAPER_API_KEY, ACCOUNT1_PAPER_SECRET_KEY
|
||||
from alpaca.trading.requests import GetCalendarRequest
|
||||
from alpaca.trading.client import TradingClient
|
||||
parametry = {}
|
||||
|
||||
# no keys required
|
||||
#client = CryptoHistoricalDataClient()
|
||||
client = StockHistoricalDataClient(ACCOUNT1_PAPER_API_KEY, ACCOUNT1_PAPER_SECRET_KEY, raw_data=False)
|
||||
datetime_object_from = datetime.datetime(2023, 2, 27, 18, 51, 38, tzinfo=datetime.timezone.utc)
|
||||
datetime_object_to = datetime.datetime(2023, 2, 27, 21, 51, 39, tzinfo=datetime.timezone.utc)
|
||||
bar_request = StockBarsRequest(symbol_or_symbols="BAC",timeframe=TimeFrame.Minute, start=datetime_object_from, end=datetime_object_to, feed=DataFeed.SIP)
|
||||
clientTrading = TradingClient(ACCOUNT1_PAPER_API_KEY, ACCOUNT1_PAPER_SECRET_KEY, raw_data=False)
|
||||
|
||||
#get previous days bar
|
||||
|
||||
datetime_object_from = datetime.datetime(2023, 10, 11, 4, 0, 00, tzinfo=datetime.timezone.utc)
|
||||
datetime_object_to = datetime.datetime(2023, 10, 16, 16, 1, 00, tzinfo=datetime.timezone.utc)
|
||||
calendar_request = GetCalendarRequest(start=datetime_object_from,end=datetime_object_to)
|
||||
cal_dates = clientTrading.get_calendar(calendar_request)
|
||||
print(cal_dates)
|
||||
bar_request = StockBarsRequest(symbol_or_symbols="BAC",timeframe=TimeFrame.Day, start=datetime_object_from, end=datetime_object_to, feed=DataFeed.SIP)
|
||||
|
||||
# bars = client.get_stock_bars(bar_request).df
|
||||
|
||||
bars = client.get_stock_bars(bar_request)
|
||||
bars: BarSet = client.get_stock_bars(bar_request)
|
||||
#bars = bars.drop(['symbol'])
|
||||
|
||||
#print(bars.df.close)
|
||||
#bars = bars.tz_convert('America/New_York')
|
||||
print(bars.data["BAC"])
|
||||
#print(len(bars))
|
||||
print(bars)
|
||||
#print(bars.data["BAC"][0])
|
||||
#print(bars.df.columns)
|
||||
#Index(['open', 'high', 'low', 'close', 'volume', 'trade_count', 'vwap'], dtype='object')
|
||||
# bars.df.set_index('timestamp', inplace=True)
|
||||
|
||||
142
testy/migrace/migracerunnerheader.py
Normal file
142
testy/migrace/migracerunnerheader.py
Normal file
@ -0,0 +1,142 @@
|
||||
import sqlite3
|
||||
from v2realbot.config import DATA_DIR
|
||||
from v2realbot.utils.utils import json_serial
|
||||
from uuid import UUID, uuid4
|
||||
import json
|
||||
from datetime import datetime
|
||||
from v2realbot.enums.enums import RecordType, StartBarAlign, Mode, Account
|
||||
from v2realbot.common.model import RunArchiveDetail, RunArchive, RunArchiveView
|
||||
from tinydb import TinyDB, Query, where
|
||||
from v2realbot.common.db import pool, execute_with_retry
|
||||
|
||||
|
||||
|
||||
# Helper function to transform a row to a RunArchive object
|
||||
def row_to_object(row: dict) -> RunArchive:
|
||||
return RunArchive(
|
||||
id=row.get('id'),
|
||||
strat_id=row.get('strat_id'),
|
||||
batch_id=row.get('batch_id'),
|
||||
symbol=row.get('symbol'),
|
||||
name=row.get('name'),
|
||||
note=row.get('note'),
|
||||
started=row.get('started'),
|
||||
stopped=row.get('stopped'),
|
||||
mode=row.get('mode'),
|
||||
account=row.get('account'),
|
||||
bt_from=row.get('bt_from'),
|
||||
bt_to=row.get('bt_to'),
|
||||
strat_json=row.get('strat_json'),
|
||||
stratvars=row.get('stratvars'),
|
||||
settings=row.get('settings'),
|
||||
ilog_save=row.get('ilog_save'),
|
||||
profit=row.get('profit'),
|
||||
trade_count=row.get('trade_count'),
|
||||
end_positions=row.get('end_positions'),
|
||||
end_positions_avgp=row.get('end_positions_avgp'),
|
||||
metrics=row.get('open_orders'),
|
||||
#metrics=json.loads(row.get('metrics')) if row.get('metrics') else None,
|
||||
stratvars_toml=row.get('stratvars_toml')
|
||||
)
|
||||
|
||||
def get_all_archived_runners():
|
||||
conn = pool.get_connection()
|
||||
try:
|
||||
conn.row_factory = lambda c, r: json.loads(r[0])
|
||||
c = conn.cursor()
|
||||
res = c.execute(f"SELECT data FROM runner_header")
|
||||
finally:
|
||||
conn.row_factory = None
|
||||
pool.release_connection(conn)
|
||||
return 0, res.fetchall()
|
||||
|
||||
def insert_archive_header(archeader: RunArchive):
|
||||
conn = pool.get_connection()
|
||||
try:
|
||||
c = conn.cursor()
|
||||
json_string = json.dumps(archeader, default=json_serial)
|
||||
if archeader.batch_id is not None:
|
||||
statement = f"INSERT INTO runner_header (runner_id, batch_id, ra) VALUES ('{str(archeader.id)}','{str(archeader.batch_id)}','{json_string}')"
|
||||
else:
|
||||
statement = f"INSERT INTO runner_header (runner_id, ra) VALUES ('{str(archeader.id)}','{json_string}')"
|
||||
|
||||
res = execute_with_retry(c,statement)
|
||||
conn.commit()
|
||||
finally:
|
||||
pool.release_connection(conn)
|
||||
return res.rowcount
|
||||
|
||||
set = list[RunArchive]
|
||||
|
||||
def migrate_to_columns(ra: RunArchive):
|
||||
conn = pool.get_connection()
|
||||
try:
|
||||
|
||||
c = conn.cursor()
|
||||
# statement = f"""UPDATE runner_header SET
|
||||
# strat_id='{str(ra.strat_id)}',
|
||||
# batch_id='{ra.batch_id}',
|
||||
# symbol='{ra.symbol}',
|
||||
# name='{ra.name}',
|
||||
# note='{ra.note}',
|
||||
# started='{ra.started}',
|
||||
# stopped='{ra.stopped}',
|
||||
# mode='{ra.mode}',
|
||||
# account='{ra.account}',
|
||||
# bt_from='{ra.bt_from}',
|
||||
# bt_to='{ra.bt_to}',
|
||||
# strat_json='ra.strat_json)',
|
||||
# settings='{ra.settings}',
|
||||
# ilog_save='{ra.ilog_save}',
|
||||
# profit='{ra.profit}',
|
||||
# trade_count='{ra.trade_count}',
|
||||
# end_positions='{ra.end_positions}',
|
||||
# end_positions_avgp='{ra.end_positions_avgp}',
|
||||
# metrics='{ra.metrics}',
|
||||
# stratvars_toml="{ra.stratvars_toml}"
|
||||
# WHERE runner_id='{str(ra.strat_id)}'
|
||||
# """
|
||||
# print(statement)
|
||||
|
||||
res = c.execute('''
|
||||
UPDATE runner_header
|
||||
SET strat_id=?, batch_id=?, symbol=?, name=?, note=?, started=?, stopped=?, mode=?, account=?, bt_from=?, bt_to=?, strat_json=?, settings=?, ilog_save=?, profit=?, trade_count=?, end_positions=?, end_positions_avgp=?, metrics=?, stratvars_toml=?
|
||||
WHERE runner_id=?
|
||||
''',
|
||||
(str(ra.strat_id), ra.batch_id, ra.symbol, ra.name, ra.note, ra.started, ra.stopped, ra.mode, ra.account, ra.bt_from, ra.bt_to, json.dumps(ra.strat_json), json.dumps(ra.settings), ra.ilog_save, ra.profit, ra.trade_count, ra.end_positions, ra.end_positions_avgp, json.dumps(ra.metrics), ra.stratvars_toml, str(ra.id)))
|
||||
|
||||
conn.commit()
|
||||
finally:
|
||||
|
||||
pool.release_connection(conn)
|
||||
return 0, res
|
||||
|
||||
res, set = get_all_archived_runners()
|
||||
print(f"fetched {len(set)}")
|
||||
for row in set:
|
||||
ra: RunArchive = row_to_object(row)
|
||||
print(f"item {ra.id}")
|
||||
res, val = migrate_to_columns(ra)
|
||||
print(res,val)
|
||||
print("migrated", ra.id)
|
||||
|
||||
|
||||
#print(set)
|
||||
|
||||
# def migrate():
|
||||
# set = list[RunArchiveDetail]
|
||||
# #res, set = get_all_archived_runners_detail()
|
||||
# print(f"fetched {len(set)}")
|
||||
# for row in set:
|
||||
# #insert_archive_detail(row)
|
||||
# print(f"inserted {row['id']}")
|
||||
|
||||
|
||||
# idecko = uuid4()
|
||||
|
||||
# runArchiveDetail: RunArchiveDetail = RunArchiveDetail(id = idecko,
|
||||
# name="nazev runneru",
|
||||
# bars=bars,
|
||||
# indicators=[dict(time=[])],
|
||||
# statinds=dict(neco=233,zase=333),
|
||||
# trades=list(dict()))
|
||||
44
testy/migrace/migracni skript.sql
Normal file
44
testy/migrace/migracni skript.sql
Normal file
@ -0,0 +1,44 @@
|
||||
|
||||
CREATE TABLE "sqlb_temp_table_1" (
|
||||
"runner_id" varchar(32) NOT NULL,
|
||||
"strat_id" TEXT,
|
||||
"batch_id" TEXT,
|
||||
"symbol" TEXT,
|
||||
"name" TEXT,
|
||||
"note" TEXT,
|
||||
"started" TEXT,
|
||||
"stopped" TEXT,
|
||||
"mode" TEXT,
|
||||
"account" TEXT,
|
||||
"bt_from" TEXT,
|
||||
"bt_to" TEXT,
|
||||
"strat_json" TEXT,
|
||||
"settings" TEXT,
|
||||
"ilog_save" INTEGER,
|
||||
"profit" NUMERIC,
|
||||
"trade_count" INTEGER,
|
||||
"end_positions" INTEGER,
|
||||
"end_positions_avgp" NUMERIC,
|
||||
"metrics" TEXT,
|
||||
"stratvars_toml" TEXT,
|
||||
"data" json NOT NULL,
|
||||
PRIMARY KEY("runner_id")
|
||||
);
|
||||
INSERT INTO "main"."sqlb_temp_table_1" ("batch_id","data","runner_id") SELECT "batch_id","data","runner_id" FROM "main"."runner_header"
|
||||
PRAGMA defer_foreign_keys;
|
||||
PRAGMA defer_foreign_keys = '1';
|
||||
DROP TABLE "main"."runner_header"
|
||||
ALTER TABLE "main"."sqlb_temp_table_1" RENAME TO "runner_header"
|
||||
PRAGMA defer_foreign_keys = '0';
|
||||
|
||||
CREATE INDEX "index_runner_header_batch" ON "runner_header" (
|
||||
"batch_id"
|
||||
)
|
||||
|
||||
CREATE INDEX "index_runner_header_pk" ON "runner_header" (
|
||||
"runner_id"
|
||||
)
|
||||
|
||||
CREATE INDEX "index_runner_header_strat" ON "runner_header" (
|
||||
"strat_id"
|
||||
)
|
||||
23
testy/valueremapping.py
Normal file
23
testy/valueremapping.py
Normal file
@ -0,0 +1,23 @@
|
||||
|
||||
|
||||
import numpy as np
|
||||
|
||||
|
||||
arr = np.array(values)
|
||||
|
||||
# Find the current value and the minimum and maximum values
|
||||
current_value = arr[-1]
|
||||
min_value = np.min(arr)
|
||||
max_value = np.max(arr)
|
||||
|
||||
#remapping to -1 and 1
|
||||
|
||||
|
||||
remapped_value = 2 * (current_value - min_value) / (max_value - min_value) - 1
|
||||
|
||||
|
||||
#remap to range 0 and 1
|
||||
remapped_value = (atr10[-1] - np.min(atr10)) / (np.max(atr10) - np.min(atr10))
|
||||
|
||||
cp.statement = "np.mean(vwap[-(abs(int(50*atr10r[-1]))):])"
|
||||
|
||||
Reference in New Issue
Block a user