gui js refactors + ilogs database

This commit is contained in:
David Brazda
2023-05-05 18:20:52 +02:00
parent 4691599276
commit d9abc19c1f
26 changed files with 1105 additions and 305 deletions

97
testy/testSqlite3.py Normal file
View File

@ -0,0 +1,97 @@
import sqlite3
from v2realbot.config import DATA_DIR
from v2realbot.utils.utils import json_serial
from uuid import UUID, uuid4
import json
from datetime import datetime
from v2realbot.enums.enums import RecordType, StartBarAlign, Mode, Account
sqlite_db_file = DATA_DIR + "/v2trading.db"
conn = sqlite3.connect(sqlite_db_file)
#standardne vraci pole tuplů, kde clen tuplu jsou sloupce
#conn.row_factory = lambda c, r: json.loads(r[0])
#conn.row_factory = lambda c, r: r[0]
#conn.row_factory = sqlite3.Row
#CREATE TABLE
# c = conn.cursor()
# createTable= "CREATE TABLE runner_logs (runner_id varchar(32) NOT NULL, time real NOT NULL, data json NOT NULL);"
# print(c.execute(createTable))
# sql = ("CREATE INDEX index_runner_logs ON runner_logs (runner_id, time);")
# print(c.execute(sql))
#testovaci objekty
insert = dict(time=datetime.now(), side="ddd", rectype=RecordType.BAR, id=uuid4())
insert_list = [dict(time=datetime.now().timestamp(), side="ddd", rectype=RecordType.BAR, id=uuid4()),dict(time=datetime.now().timestamp(), side="ddd", rectype=RecordType.BAR, id=uuid4()),dict(time=datetime.now().timestamp(), side="ddd", rectype=RecordType.BAR, id=uuid4()),dict(time=datetime.now().timestamp(), side="ddd", rectype=RecordType.BAR, id=uuid4())]
def insert_log(runner_id: UUID, time: float, logdict: dict):
c = conn.cursor()
json_string = json.dumps(logdict, default=json_serial)
res = c.execute("INSERT INTO runner_logs VALUES (?,?,?)",[str(runner_id), time, json_string])
conn.commit()
return res.rowcount
def insert_log_multiple(runner_id: UUID, loglist: list):
c = conn.cursor()
insert_data = []
for i in loglist:
row = (str(runner_id), i["time"], json.dumps(i, default=json_serial))
insert_data.append(row)
c.executemany("INSERT INTO runner_logs VALUES (?,?,?)", insert_data)
conn.commit()
return c.rowcount
# c = conn.cursor()
# json_string = json.dumps(logdict, default=json_serial)
# res = c.execute("INSERT INTO runner_logs VALUES (?,?,?)",[str(runner_id), time, json_string])
# print(res)
# conn.commit()
# return res
#returns list of ilog jsons
def read_log_window(runner_id: UUID, timestamp_from: float, timestamp_to: float):
conn.row_factory = lambda c, r: json.loads(r[0])
c = conn.cursor()
res = c.execute(f"SELECT data FROM runner_logs WHERE runner_id='{str(runner_id)}' AND time >={ts_from} AND time <={ts_to}")
return res.fetchall()
#returns number of deleted elements
def delete_logs(runner_id: UUID):
c = conn.cursor()
res = c.execute(f"DELETE from runner_logs WHERE runner_id='{str(runner_id)}';")
print(res.rowcount)
conn.commit()
return res.rowcount
print(insert_log(str(uuid4()), datetime.now().timestamp(), insert))
c = conn.cursor()
ts_from = 1683108821.08872
ts_to = 1683108821.08874
# res = c.execute(f"SELECT runner_id, time, data FROM runner_logs where time > {ts_from} and time <{ts_to}")
# result = res.fetchall()
# res= delete_logs("7f9866ac-c742-47f4-a329-1d2b6721e781")
# print(res)
# res = read_log_window(runner_id="33", timestamp_from=11 , timestamp_to=22)
# print(res)
res = insert_log_multiple(uuid4(), insert_list)
print(res)
# res = read_log_window("3340e257-d19a-4179-baf3-3b39190acde3", ts_from, ts_to)
# print(res)
# for r in res.fetchall():
# print(dict(r))
#print(res.description)
#print(result)

View File

@ -19,14 +19,58 @@ from tinydb.operations import set
import json
from rich import print
arch_header_file = DATA_DIR + "/arch_header.json"
arch_detail_file = DATA_DIR + "/arch_detail.json"
#db layer to store runner archive
db_arch_h = TinyDB(arch_header_file, default=json_serial)
db_arch_d = TinyDB(arch_detail_file, default=json_serial)
#vyzkouset https://github.com/MrPigss/BetterJSONStorage
insert = {'datum': datetime.now(), 'side': "dd", 'name': 'david','id': uuid4(), 'order': "neco"}
class RunnerLogger:
def __init__(self, runner_id: UUID) -> None:
self.runner_id = runner_id
runner_log_file = DATA_DIR + "/runner_log.json"
db_runner_log = TinyDB(runner_log_file, default=json_serial)
def insert_log_multiple(runner_id: UUID, logList: list):
runner_table = db_runner_log.table(str(runner_id))
res = runner_table.insert_multiple(logList)
return res
def insert_log(runner_id: UUID, logdict: dict):
runner_table = db_runner_log.table(str(runner_id))
res = runner_table.insert(logdict)
return res
def read_log_window(runner_id: UUID, timestamp_from: float, timestamp_to: float):
runner_table = db_runner_log.table(str(runner_id))
res = runner_table.search((where('datum') >= timestamp_from) & (where('datum') <= timestamp_to))
if len(res) == 0:
return -1, "not found"
return 0, res
def delete_log(runner_id: UUID):
res = db_runner_log.drop_table(str(runner_id))
if res is None:
return -1, "not found"
return 0, runner_id
# runner_id = uuid4()
# for i in range(0,10):
# print(insert_log(runner_id, insert))
print(delete_log(runner_id="2459a6ff-a350-44dc-9c14-11cfae07f7e9"))
print(read_log_window("ae9cdf8f-5cd0-4a49-8cfe-c486e21cb4fa",1,99999999999999))
#2459a6ff-a350-44dc-9c14-11cfae07f7e9
#ae9cdf8f-5cd0-4a49-8cfe-c486e21cb4fa
#db_runner_log.drop_tables()
print(db_runner_log.tables())
# res = db_arch_h.update(set('note', "ahoj"), where('id') == "74aa524e-3ed4-41fb-8166-f20946520344")
# print(res)
res = db_arch_d.all()
print(res)
#res = db_runner_log.all()
#print(res)

44
testy/tinyFLUXtest.py Normal file
View File

@ -0,0 +1,44 @@
from typing import Any, List
from uuid import UUID, uuid4
import pickle
from alpaca.data.historical import StockHistoricalDataClient
from alpaca.data.requests import StockTradesRequest, StockBarsRequest
from alpaca.data.enums import DataFeed
from alpaca.data.timeframe import TimeFrame
from v2realbot.enums.enums import RecordType, StartBarAlign, Mode, Account
from v2realbot.common.model import StrategyInstance, Runner, RunRequest, RunArchive, RunArchiveDetail, RunArchiveChange
from v2realbot.utils.utils import AttributeDict, zoneNY, dict_replace_value, Store, parse_toml_string, json_serial
from datetime import datetime
from threading import Thread, current_thread, Event, enumerate
from v2realbot.config import STRATVARS_UNCHANGEABLES, ACCOUNT1_LIVE_API_KEY, ACCOUNT1_LIVE_SECRET_KEY, DATA_DIR
import importlib
from queue import Queue
#from tinydb import TinyDB, Query, where
#from tinydb.operations import set
import json
from rich import print
from tinyflux import Point, TinyFlux
runner_log_file = DATA_DIR + "/runner_flux__log.json"
#db layer to store runner archive
db_runner_log = TinyFlux(runner_log_file)
insert_dict = {'datum': datetime.now(), 'side': "dd", 'name': 'david','id': uuid4(), 'order': "neco"}
#json.dumps(insert_dict, default=json_serial)
p1 = Point(time=datetime.now(), tags=insert_dict)
db_runner_log.insert(p1)
res=db_runner_log.all()
print(res)
# #db_runner_log.drop_table('hash')
# res = runner_table.get(where('side') == "dd")
# print(res)
# # res = db_arch_h.update(set('note', "ahoj"), where('id') == "74aa524e-3ed4-41fb-8166-f20946520344")
# # print(res)
# res = runner_table.all()
# print(res)

View File

@ -114,6 +114,10 @@ db = TinyDB(db_file, default=json_serial)
db.truncate()
insert = {'datum': datetime.now(), 'side': OrderSide.BUY, 'name': 'david','id': uuid4(), 'order': orderList}
#insert record
db.insert(a.__dict__)