migrace arch_detail do sqlite

This commit is contained in:
David Brazda
2023-06-26 17:30:17 +02:00
parent 3293ad847e
commit 0bef90175a
7 changed files with 337 additions and 43 deletions

171
testy/migrace/migracesql.py Normal file
View File

@ -0,0 +1,171 @@
import sqlite3
from v2realbot.config import DATA_DIR
from v2realbot.utils.utils import json_serial
from uuid import UUID, uuid4
import json
from datetime import datetime
from v2realbot.enums.enums import RecordType, StartBarAlign, Mode, Account
from v2realbot.common.model import RunArchiveDetail
from tinydb import TinyDB, Query, where
sqlite_db_file = DATA_DIR + "/v2trading.db"
conn = sqlite3.connect(sqlite_db_file)
#standardne vraci pole tuplů, kde clen tuplu jsou sloupce
#conn.row_factory = lambda c, r: json.loads(r[0])
#conn.row_factory = lambda c, r: r[0]
#conn.row_factory = sqlite3.Row
#CREATE TABLE
# c = conn.cursor()
# createTable= "CREATE TABLE runner_logs (runner_id varchar(32) NOT NULL, time real NOT NULL, data json NOT NULL);"
# print(c.execute(createTable))
# sql = ("CREATE INDEX index_runner_logs ON runner_logs (runner_id, time);")
# print(c.execute(sql))
#testovaci objekty
insert = dict(time=datetime.now(), side="ddd", rectype=RecordType.BAR, id=uuid4())
insert_list = [dict(time=datetime.now().timestamp(), side="ddd", rectype=RecordType.BAR, id=uuid4()),dict(time=datetime.now().timestamp(), side="ddd", rectype=RecordType.BAR, id=uuid4()),dict(time=datetime.now().timestamp(), side="ddd", rectype=RecordType.BAR, id=uuid4()),dict(time=datetime.now().timestamp(), side="ddd", rectype=RecordType.BAR, id=uuid4())]
def insert_log(runner_id: UUID, time: float, logdict: dict):
c = conn.cursor()
json_string = json.dumps(logdict, default=json_serial)
res = c.execute("INSERT INTO runner_logs VALUES (?,?,?)",[str(runner_id), time, json_string])
conn.commit()
return res.rowcount
def insert_log_multiple(runner_id: UUID, loglist: list):
c = conn.cursor()
insert_data = []
for i in loglist:
row = (str(runner_id), i["time"], json.dumps(i, default=json_serial))
insert_data.append(row)
c.executemany("INSERT INTO runner_logs VALUES (?,?,?)", insert_data)
conn.commit()
return c.rowcount
# c = conn.cursor()
# json_string = json.dumps(logdict, default=json_serial)
# res = c.execute("INSERT INTO runner_logs VALUES (?,?,?)",[str(runner_id), time, json_string])
# print(res)
# conn.commit()
# return res
#returns list of ilog jsons
def read_log_window(runner_id: UUID, timestamp_from: float, timestamp_to: float):
conn.row_factory = lambda c, r: json.loads(r[0])
c = conn.cursor()
res = c.execute(f"SELECT data FROM runner_logs WHERE runner_id='{str(runner_id)}' AND time >={ts_from} AND time <={ts_to}")
return res.fetchall()
#returns number of deleted elements
def delete_logs(runner_id: UUID):
c = conn.cursor()
res = c.execute(f"DELETE from runner_logs WHERE runner_id='{str(runner_id)}';")
print(res.rowcount)
conn.commit()
return res.rowcount
# print(insert_log(str(uuid4()), datetime.now().timestamp(), insert))
# c = conn.cursor()
# ts_from = 1683108821.08872
# ts_to = 1683108821.08874
# # res = c.execute(f"SELECT runner_id, time, data FROM runner_logs where time > {ts_from} and time <{ts_to}")
# # result = res.fetchall()
# # res= delete_logs("7f9866ac-c742-47f4-a329-1d2b6721e781")
# # print(res)
# # res = read_log_window(runner_id="33", timestamp_from=11 , timestamp_to=22)
# # print(res)
# res = insert_log_multiple(uuid4(), insert_list)
# print(res)
# res = read_log_window("3340e257-d19a-4179-baf3-3b39190acde3", ts_from, ts_to)
# print(res)
# for r in res.fetchall():
# print(dict(r))
#print(res.description)
#print(result)
def insert_archive_detail(archdetail: RunArchiveDetail):
c = conn.cursor()
json_string = json.dumps(archdetail, default=json_serial)
res = c.execute("INSERT INTO runner_detail VALUES (?,?)",[str(archdetail["id"]), json_string])
conn.commit()
return res.rowcount
#returns list of details
def get_all_archive_detail():
conn.row_factory = lambda c, r: json.loads(r[0])
c = conn.cursor()
res = c.execute(f"SELECT data FROM runner_detail")
return res.fetchall()
#vrátí konkrétní
def get_archive_detail_byID(runner_id: UUID):
conn.row_factory = lambda c, r: json.loads(r[0])
c = conn.cursor()
res = c.execute(f"SELECT data FROM runner_detail WHERE runner_id='{str(runner_id)}'")
return res.fetchone()
#returns number of deleted elements
def delete_archive_detail(runner_id: UUID):
c = conn.cursor()
res = c.execute(f"DELETE from runner_detail WHERE runner_id='{str(runner_id)}';")
print(res.rowcount)
conn.commit()
return res.rowcount
def get_all_archived_runners_detail():
arch_detail_file = DATA_DIR + "/arch_detail.json"
db_arch_d = TinyDB(arch_detail_file, default=json_serial)
res = db_arch_d.all()
return 0, res
def migrate():
set = list[RunArchiveDetail]
res, set = get_all_archived_runners_detail()
print(f"fetched {len(set)}")
for row in set:
insert_archive_detail(row)
print(f"inserted {row['id']}")
bars = {'high': [],
'low': [],
'volume': [],
'close': [],
'hlcc4': [],
'open': [],
'time': [],
'trades':[],
'resolution':[],
'confirmed': [],
'vwap': [],
'updated': [],
'index': []}
idecko = uuid4()
runArchiveDetail: RunArchiveDetail = RunArchiveDetail(id = idecko,
name="nazev runneru",
bars=bars,
indicators=[dict(time=[])],
statinds=dict(neco=233,zase=333),
trades=list(dict()))
# insert_archive_detail(runArchiveDetail)
migrate()
# res = get_archive_detail_byID(idecko)
# print("byID",res)
res = get_all_archive_detail()
print("finished: all",len(res))
# delete_archive_detail(idecko)
# res = get_archive_detail_byID(idecko)
# print("byID",res)
# res = get_all_archive_detail()
# print("all",res)

View File

@ -5,6 +5,8 @@ from uuid import UUID, uuid4
import json import json
from datetime import datetime from datetime import datetime
from v2realbot.enums.enums import RecordType, StartBarAlign, Mode, Account from v2realbot.enums.enums import RecordType, StartBarAlign, Mode, Account
from v2realbot.common.model import RunArchiveDetail
from tinydb import TinyDB, Query, where
sqlite_db_file = DATA_DIR + "/v2trading.db" sqlite_db_file = DATA_DIR + "/v2trading.db"
conn = sqlite3.connect(sqlite_db_file) conn = sqlite3.connect(sqlite_db_file)
@ -63,22 +65,22 @@ def delete_logs(runner_id: UUID):
conn.commit() conn.commit()
return res.rowcount return res.rowcount
print(insert_log(str(uuid4()), datetime.now().timestamp(), insert)) # print(insert_log(str(uuid4()), datetime.now().timestamp(), insert))
c = conn.cursor() # c = conn.cursor()
ts_from = 1683108821.08872 # ts_from = 1683108821.08872
ts_to = 1683108821.08874 # ts_to = 1683108821.08874
# res = c.execute(f"SELECT runner_id, time, data FROM runner_logs where time > {ts_from} and time <{ts_to}") # # res = c.execute(f"SELECT runner_id, time, data FROM runner_logs where time > {ts_from} and time <{ts_to}")
# result = res.fetchall() # # result = res.fetchall()
# res= delete_logs("7f9866ac-c742-47f4-a329-1d2b6721e781") # # res= delete_logs("7f9866ac-c742-47f4-a329-1d2b6721e781")
# # print(res)
# # res = read_log_window(runner_id="33", timestamp_from=11 , timestamp_to=22)
# # print(res)
# res = insert_log_multiple(uuid4(), insert_list)
# print(res) # print(res)
# res = read_log_window(runner_id="33", timestamp_from=11 , timestamp_to=22)
# print(res)
res = insert_log_multiple(uuid4(), insert_list)
print(res)
# res = read_log_window("3340e257-d19a-4179-baf3-3b39190acde3", ts_from, ts_to) # res = read_log_window("3340e257-d19a-4179-baf3-3b39190acde3", ts_from, ts_to)
# print(res) # print(res)
@ -90,8 +92,80 @@ print(res)
#print(res.description) #print(res.description)
#print(result) #print(result)
def insert_archive_detail(archdetail: RunArchiveDetail):
c = conn.cursor()
json_string = json.dumps(archdetail, default=json_serial)
res = c.execute("INSERT INTO runner_detail VALUES (?,?)",[str(archdetail["id"]), json_string])
conn.commit()
return res.rowcount
#returns list of details
def get_all_archive_detail():
conn.row_factory = lambda c, r: json.loads(r[0])
c = conn.cursor()
res = c.execute(f"SELECT data FROM runner_detail")
return res.fetchall()
#vrátí konkrétní
def get_archive_detail_byID(runner_id: UUID):
conn.row_factory = lambda c, r: json.loads(r[0])
c = conn.cursor()
res = c.execute(f"SELECT data FROM runner_detail WHERE runner_id='{str(runner_id)}'")
return res.fetchone()
#returns number of deleted elements
def delete_archive_detail(runner_id: UUID):
c = conn.cursor()
res = c.execute(f"DELETE from runner_detail WHERE runner_id='{str(runner_id)}';")
print(res.rowcount)
conn.commit()
return res.rowcount
def get_all_archived_runners_detail():
arch_detail_file = DATA_DIR + "/arch_detail.json"
db_arch_d = TinyDB(arch_detail_file, default=json_serial)
res = db_arch_d.all()
return 0, res
def migrate():
set = list[RunArchiveDetail]
res, set = get_all_archived_runners_detail()
print(f"fetched {len(set)}")
for row in set:
#insert_archive_detail(row)
print(f"inserted {row['id']}")
bars = {'high': [],
'low': [],
'volume': [],
'close': [],
'hlcc4': [],
'open': [],
'time': [],
'trades':[],
'resolution':[],
'confirmed': [],
'vwap': [],
'updated': [],
'index': []}
idecko = uuid4()
runArchiveDetail: RunArchiveDetail = RunArchiveDetail(id = idecko,
name="nazev runneru",
bars=bars,
indicators=[dict(time=[])],
statinds=dict(neco=233,zase=333),
trades=list(dict()))
# insert_archive_detail(runArchiveDetail)
migrate()
# res = get_archive_detail_byID(idecko)
# print("byID",res)
res = get_all_archive_detail()
print("finished: all",len(res))
# delete_archive_detail(idecko)
# res = get_archive_detail_byID(idecko)
# print("byID",res)
# res = get_all_archive_detail()
# print("all",res)

5
v2realbot/common/db.py Normal file
View File

@ -0,0 +1,5 @@
from v2realbot.config import DATA_DIR
import sqlite3
sqlite_db_file = DATA_DIR + "/v2trading.db"
conn = sqlite3.connect(sqlite_db_file, check_same_thread=False, isolation_level=None)

View File

@ -21,12 +21,16 @@ from numpy import ndarray
import pandas as pd import pandas as pd
from traceback import format_exc from traceback import format_exc
from datetime import timedelta, time from datetime import timedelta, time
from threading import Lock
from v2realbot.common.db import conn
#adding lock to ensure thread safety of TinyDB (in future will be migrated to proper db)
lock = Lock()
arch_header_file = DATA_DIR + "/arch_header.json" arch_header_file = DATA_DIR + "/arch_header.json"
arch_detail_file = DATA_DIR + "/arch_detail.json" #arch_detail_file = DATA_DIR + "/arch_detail.json"
#db layer to store runner archive #db layer to store runner archive
db_arch_h = TinyDB(arch_header_file, default=json_serial) db_arch_h = TinyDB(arch_header_file, default=json_serial)
db_arch_d = TinyDB(arch_detail_file, default=json_serial) #db_arch_d = TinyDB(arch_detail_file, default=json_serial)
#db layer to store stratins, TBD zmigrovat do TinyDB #db layer to store stratins, TBD zmigrovat do TinyDB
db = Store() db = Store()
@ -222,6 +226,7 @@ def pause_runner(id: UUID):
def stop_runner(id: UUID = None): def stop_runner(id: UUID = None):
chng = [] chng = []
try:
for i in db.runners: for i in db.runners:
#print(i['id']) #print(i['id'])
if id is None or str(i.id) == id: if id is None or str(i.id) == id:
@ -235,6 +240,8 @@ def stop_runner(id: UUID = None):
# i.run_pause_ev = None # i.run_pause_ev = None
# i.run_stop_ev = None # i.run_stop_ev = None
# #stratins.remove(i) # #stratins.remove(i)
except Exception as e:
return (-2, "Error Exception" + str(e) + format_exc())
if len(chng) > 0: if len(chng) > 0:
return (0, "Sent STOP signal to those" + str(chng)) return (0, "Sent STOP signal to those" + str(chng))
else: else:
@ -551,8 +558,10 @@ def archive_runner(runner: Runner, strat: StrategyInstance):
indicators=flattened_indicators_list, indicators=flattened_indicators_list,
statinds=strat.state.statinds, statinds=strat.state.statinds,
trades=strat.state.tradeList) trades=strat.state.tradeList)
with lock:
resh = db_arch_h.insert(runArchive.__dict__) resh = db_arch_h.insert(runArchive.__dict__)
resd = db_arch_d.insert(runArchiveDetail.__dict__) resd = insert_archive_detail(runArchiveDetail)
#resd = db_arch_d.insert(runArchiveDetail.__dict__)
print("archive runner finished") print("archive runner finished")
return 0, str(resh) + " " + str(resd) return 0, str(resh) + " " + str(resd)
except Exception as e: except Exception as e:
@ -566,10 +575,15 @@ def get_all_archived_runners():
#delete runner in archive and archive detail and runner logs #delete runner in archive and archive detail and runner logs
def delete_archived_runners_byID(id: UUID): def delete_archived_runners_byID(id: UUID):
try: try:
with lock:
print("before header del")
resh = db_arch_h.remove(where('id') == id) resh = db_arch_h.remove(where('id') == id)
resd = db_arch_d.remove(where('id') == id) print("before detail del")
#resd = db_arch_d.remove(where('id') == id)
resd = delete_archive_detail_byID(id)
print("Arch header and detail removed. Log deletition will start.")
reslogs = delete_logs(id) reslogs = delete_logs(id)
if len(resh) == 0 or len(resd) == 0: if len(resh) == 0 or resd == 0:
return -1, "not found "+str(resh) + " " + str(resd) + " " + str(reslogs) return -1, "not found "+str(resh) + " " + str(resd) + " " + str(reslogs)
return 0, str(resh) + " " + str(resd) + " " + str(reslogs) return 0, str(resh) + " " + str(resd) + " " + str(reslogs)
except Exception as e: except Exception as e:
@ -578,6 +592,7 @@ def delete_archived_runners_byID(id: UUID):
#edit archived runner note #edit archived runner note
def edit_archived_runners(runner_id: UUID, archChange: RunArchiveChange): def edit_archived_runners(runner_id: UUID, archChange: RunArchiveChange):
try: try:
with lock:
res = db_arch_h.update(set('note', archChange.note), where('id') == str(runner_id)) res = db_arch_h.update(set('note', archChange.note), where('id') == str(runner_id))
if len(res) == 0: if len(res) == 0:
return -1, "not found "+str(runner_id) return -1, "not found "+str(runner_id)
@ -585,17 +600,47 @@ def edit_archived_runners(runner_id: UUID, archChange: RunArchiveChange):
except Exception as e: except Exception as e:
return -2, str(e) return -2, str(e)
def get_all_archived_runners_detail(): #returns number of deleted elements
res = db_arch_d.all() def delete_archive_detail_byID(id: UUID):
return 0, res c = conn.cursor()
res = c.execute(f"DELETE from runner_detail WHERE runner_id='{str(id)}';")
print("deleted", res.rowcount)
return res.rowcount
# def get_all_archived_runners_detail_old():
# res = db_arch_d.all()
# return 0, res
def get_all_archived_runners_detail():
conn.row_factory = lambda c, r: json.loads(r[0])
c = conn.cursor()
res = c.execute(f"SELECT data FROM runner_detail")
return 0, res.fetchall()
# def get_archived_runner_details_byID_old(id: UUID):
# res = db_arch_d.get(where('id') == str(id))
# if res==None:
# return -2, "not found"
# else:
# return 0, res
#vrátí konkrétní
def get_archived_runner_details_byID(id: UUID): def get_archived_runner_details_byID(id: UUID):
res = db_arch_d.get(where('id') == str(id)) conn.row_factory = lambda c, r: json.loads(r[0])
c = conn.cursor()
result = c.execute(f"SELECT data FROM runner_detail WHERE runner_id='{str(id)}'")
res= result.fetchone()
if res==None: if res==None:
return -2, "not found" return -2, "not found"
else: else:
return 0, res return 0, res
def insert_archive_detail(archdetail: RunArchiveDetail):
c = conn.cursor()
json_string = json.dumps(archdetail, default=json_serial)
res = c.execute("INSERT INTO runner_detail VALUES (?,?)",[str(archdetail.id), json_string])
return res.rowcount
#returns b #returns b
def get_alpaca_history_bars(symbol: str, datetime_object_from: datetime, datetime_object_to: datetime, timeframe: TimeFrame): def get_alpaca_history_bars(symbol: str, datetime_object_from: datetime, datetime_object_to: datetime, timeframe: TimeFrame):
"""Returns Bar object """Returns Bar object

View File

@ -1,7 +1,6 @@
function populate_real_time_chart() { function populate_real_time_chart() {
cleanup_chart() cleanup_chart()
initialize_chart() initialize_chart()
intitialize_candles() intitialize_candles()
initialize_vwap() initialize_vwap()

View File

@ -1,13 +1,11 @@
import sqlite3
from v2realbot.config import DATA_DIR from v2realbot.config import DATA_DIR
from v2realbot.utils.utils import json_serial from v2realbot.utils.utils import json_serial
from uuid import UUID, uuid4 from uuid import UUID, uuid4
import json import json
from datetime import datetime from datetime import datetime
from v2realbot.enums.enums import RecordType, StartBarAlign, Mode, Account from v2realbot.enums.enums import RecordType, StartBarAlign, Mode, Account
from v2realbot.common.db import conn
sqlite_db_file = DATA_DIR + "/v2trading.db"
conn = sqlite3.connect(sqlite_db_file, check_same_thread=False)
#standardne vraci pole tuplů, kde clen tuplu jsou sloupce #standardne vraci pole tuplů, kde clen tuplu jsou sloupce
#conn.row_factory = lambda c, r: json.loads(r[0]) #conn.row_factory = lambda c, r: json.loads(r[0])
#conn.row_factory = lambda c, r: r[0] #conn.row_factory = lambda c, r: r[0]
@ -40,7 +38,7 @@ def insert_log_multiple(runner_id: UUID, loglist: list):
row = (str(runner_id), i["time"], json.dumps(i, default=json_serial)) row = (str(runner_id), i["time"], json.dumps(i, default=json_serial))
insert_data.append(row) insert_data.append(row)
c.executemany("INSERT INTO runner_logs VALUES (?,?,?)", insert_data) c.executemany("INSERT INTO runner_logs VALUES (?,?,?)", insert_data)
conn.commit() #conn.commit()
return c.rowcount return c.rowcount
#returns list of ilog jsons #returns list of ilog jsons
@ -58,6 +56,8 @@ def delete_logs(runner_id: UUID):
conn.commit() conn.commit()
return res.rowcount return res.rowcount
# print(insert_log(str(uuid4()), datetime.now().timestamp(), insert)) # print(insert_log(str(uuid4()), datetime.now().timestamp(), insert))
# c = conn.cursor() # c = conn.cursor()
# ts_from = 1683108821.08872 # ts_from = 1683108821.08872