refactoring archrunners+new ma inds
This commit is contained in:
@ -9,25 +9,37 @@ from alpaca.data.models import BarSet, QuoteSet, TradeSet
|
||||
from alpaca.data.timeframe import TimeFrame
|
||||
# import mplfinance as mpf
|
||||
import pandas as pd
|
||||
from rich import print
|
||||
from v2realbot.utils.utils import zoneNY
|
||||
from v2realbot.config import ACCOUNT1_PAPER_API_KEY, ACCOUNT1_PAPER_SECRET_KEY
|
||||
from alpaca.trading.requests import GetCalendarRequest
|
||||
from alpaca.trading.client import TradingClient
|
||||
parametry = {}
|
||||
|
||||
# no keys required
|
||||
#client = CryptoHistoricalDataClient()
|
||||
client = StockHistoricalDataClient(ACCOUNT1_PAPER_API_KEY, ACCOUNT1_PAPER_SECRET_KEY, raw_data=False)
|
||||
datetime_object_from = datetime.datetime(2023, 2, 27, 18, 51, 38, tzinfo=datetime.timezone.utc)
|
||||
datetime_object_to = datetime.datetime(2023, 2, 27, 21, 51, 39, tzinfo=datetime.timezone.utc)
|
||||
bar_request = StockBarsRequest(symbol_or_symbols="BAC",timeframe=TimeFrame.Minute, start=datetime_object_from, end=datetime_object_to, feed=DataFeed.SIP)
|
||||
clientTrading = TradingClient(ACCOUNT1_PAPER_API_KEY, ACCOUNT1_PAPER_SECRET_KEY, raw_data=False)
|
||||
|
||||
#get previous days bar
|
||||
|
||||
datetime_object_from = datetime.datetime(2023, 10, 11, 4, 0, 00, tzinfo=datetime.timezone.utc)
|
||||
datetime_object_to = datetime.datetime(2023, 10, 16, 16, 1, 00, tzinfo=datetime.timezone.utc)
|
||||
calendar_request = GetCalendarRequest(start=datetime_object_from,end=datetime_object_to)
|
||||
cal_dates = clientTrading.get_calendar(calendar_request)
|
||||
print(cal_dates)
|
||||
bar_request = StockBarsRequest(symbol_or_symbols="BAC",timeframe=TimeFrame.Day, start=datetime_object_from, end=datetime_object_to, feed=DataFeed.SIP)
|
||||
|
||||
# bars = client.get_stock_bars(bar_request).df
|
||||
|
||||
bars = client.get_stock_bars(bar_request)
|
||||
bars: BarSet = client.get_stock_bars(bar_request)
|
||||
#bars = bars.drop(['symbol'])
|
||||
|
||||
#print(bars.df.close)
|
||||
#bars = bars.tz_convert('America/New_York')
|
||||
print(bars.data["BAC"])
|
||||
#print(len(bars))
|
||||
print(bars)
|
||||
#print(bars.data["BAC"][0])
|
||||
#print(bars.df.columns)
|
||||
#Index(['open', 'high', 'low', 'close', 'volume', 'trade_count', 'vwap'], dtype='object')
|
||||
# bars.df.set_index('timestamp', inplace=True)
|
||||
|
||||
142
testy/migrace/migracerunnerheader.py
Normal file
142
testy/migrace/migracerunnerheader.py
Normal file
@ -0,0 +1,142 @@
|
||||
import sqlite3
|
||||
from v2realbot.config import DATA_DIR
|
||||
from v2realbot.utils.utils import json_serial
|
||||
from uuid import UUID, uuid4
|
||||
import json
|
||||
from datetime import datetime
|
||||
from v2realbot.enums.enums import RecordType, StartBarAlign, Mode, Account
|
||||
from v2realbot.common.model import RunArchiveDetail, RunArchive, RunArchiveView
|
||||
from tinydb import TinyDB, Query, where
|
||||
from v2realbot.common.db import pool, execute_with_retry
|
||||
|
||||
|
||||
|
||||
# Helper function to transform a row to a RunArchive object
|
||||
def row_to_object(row: dict) -> RunArchive:
|
||||
return RunArchive(
|
||||
id=row.get('id'),
|
||||
strat_id=row.get('strat_id'),
|
||||
batch_id=row.get('batch_id'),
|
||||
symbol=row.get('symbol'),
|
||||
name=row.get('name'),
|
||||
note=row.get('note'),
|
||||
started=row.get('started'),
|
||||
stopped=row.get('stopped'),
|
||||
mode=row.get('mode'),
|
||||
account=row.get('account'),
|
||||
bt_from=row.get('bt_from'),
|
||||
bt_to=row.get('bt_to'),
|
||||
strat_json=row.get('strat_json'),
|
||||
stratvars=row.get('stratvars'),
|
||||
settings=row.get('settings'),
|
||||
ilog_save=row.get('ilog_save'),
|
||||
profit=row.get('profit'),
|
||||
trade_count=row.get('trade_count'),
|
||||
end_positions=row.get('end_positions'),
|
||||
end_positions_avgp=row.get('end_positions_avgp'),
|
||||
metrics=row.get('open_orders'),
|
||||
#metrics=json.loads(row.get('metrics')) if row.get('metrics') else None,
|
||||
stratvars_toml=row.get('stratvars_toml')
|
||||
)
|
||||
|
||||
def get_all_archived_runners():
|
||||
conn = pool.get_connection()
|
||||
try:
|
||||
conn.row_factory = lambda c, r: json.loads(r[0])
|
||||
c = conn.cursor()
|
||||
res = c.execute(f"SELECT data FROM runner_header")
|
||||
finally:
|
||||
conn.row_factory = None
|
||||
pool.release_connection(conn)
|
||||
return 0, res.fetchall()
|
||||
|
||||
def insert_archive_header(archeader: RunArchive):
|
||||
conn = pool.get_connection()
|
||||
try:
|
||||
c = conn.cursor()
|
||||
json_string = json.dumps(archeader, default=json_serial)
|
||||
if archeader.batch_id is not None:
|
||||
statement = f"INSERT INTO runner_header (runner_id, batch_id, ra) VALUES ('{str(archeader.id)}','{str(archeader.batch_id)}','{json_string}')"
|
||||
else:
|
||||
statement = f"INSERT INTO runner_header (runner_id, ra) VALUES ('{str(archeader.id)}','{json_string}')"
|
||||
|
||||
res = execute_with_retry(c,statement)
|
||||
conn.commit()
|
||||
finally:
|
||||
pool.release_connection(conn)
|
||||
return res.rowcount
|
||||
|
||||
set = list[RunArchive]
|
||||
|
||||
def migrate_to_columns(ra: RunArchive):
|
||||
conn = pool.get_connection()
|
||||
try:
|
||||
|
||||
c = conn.cursor()
|
||||
# statement = f"""UPDATE runner_header SET
|
||||
# strat_id='{str(ra.strat_id)}',
|
||||
# batch_id='{ra.batch_id}',
|
||||
# symbol='{ra.symbol}',
|
||||
# name='{ra.name}',
|
||||
# note='{ra.note}',
|
||||
# started='{ra.started}',
|
||||
# stopped='{ra.stopped}',
|
||||
# mode='{ra.mode}',
|
||||
# account='{ra.account}',
|
||||
# bt_from='{ra.bt_from}',
|
||||
# bt_to='{ra.bt_to}',
|
||||
# strat_json='ra.strat_json)',
|
||||
# settings='{ra.settings}',
|
||||
# ilog_save='{ra.ilog_save}',
|
||||
# profit='{ra.profit}',
|
||||
# trade_count='{ra.trade_count}',
|
||||
# end_positions='{ra.end_positions}',
|
||||
# end_positions_avgp='{ra.end_positions_avgp}',
|
||||
# metrics='{ra.metrics}',
|
||||
# stratvars_toml="{ra.stratvars_toml}"
|
||||
# WHERE runner_id='{str(ra.strat_id)}'
|
||||
# """
|
||||
# print(statement)
|
||||
|
||||
res = c.execute('''
|
||||
UPDATE runner_header
|
||||
SET strat_id=?, batch_id=?, symbol=?, name=?, note=?, started=?, stopped=?, mode=?, account=?, bt_from=?, bt_to=?, strat_json=?, settings=?, ilog_save=?, profit=?, trade_count=?, end_positions=?, end_positions_avgp=?, metrics=?, stratvars_toml=?
|
||||
WHERE runner_id=?
|
||||
''',
|
||||
(str(ra.strat_id), ra.batch_id, ra.symbol, ra.name, ra.note, ra.started, ra.stopped, ra.mode, ra.account, ra.bt_from, ra.bt_to, json.dumps(ra.strat_json), json.dumps(ra.settings), ra.ilog_save, ra.profit, ra.trade_count, ra.end_positions, ra.end_positions_avgp, json.dumps(ra.metrics), ra.stratvars_toml, str(ra.id)))
|
||||
|
||||
conn.commit()
|
||||
finally:
|
||||
|
||||
pool.release_connection(conn)
|
||||
return 0, res
|
||||
|
||||
res, set = get_all_archived_runners()
|
||||
print(f"fetched {len(set)}")
|
||||
for row in set:
|
||||
ra: RunArchive = row_to_object(row)
|
||||
print(f"item {ra.id}")
|
||||
res, val = migrate_to_columns(ra)
|
||||
print(res,val)
|
||||
print("migrated", ra.id)
|
||||
|
||||
|
||||
#print(set)
|
||||
|
||||
# def migrate():
|
||||
# set = list[RunArchiveDetail]
|
||||
# #res, set = get_all_archived_runners_detail()
|
||||
# print(f"fetched {len(set)}")
|
||||
# for row in set:
|
||||
# #insert_archive_detail(row)
|
||||
# print(f"inserted {row['id']}")
|
||||
|
||||
|
||||
# idecko = uuid4()
|
||||
|
||||
# runArchiveDetail: RunArchiveDetail = RunArchiveDetail(id = idecko,
|
||||
# name="nazev runneru",
|
||||
# bars=bars,
|
||||
# indicators=[dict(time=[])],
|
||||
# statinds=dict(neco=233,zase=333),
|
||||
# trades=list(dict()))
|
||||
44
testy/migrace/migracni skript.sql
Normal file
44
testy/migrace/migracni skript.sql
Normal file
@ -0,0 +1,44 @@
|
||||
|
||||
CREATE TABLE "sqlb_temp_table_1" (
|
||||
"runner_id" varchar(32) NOT NULL,
|
||||
"strat_id" TEXT,
|
||||
"batch_id" TEXT,
|
||||
"symbol" TEXT,
|
||||
"name" TEXT,
|
||||
"note" TEXT,
|
||||
"started" TEXT,
|
||||
"stopped" TEXT,
|
||||
"mode" TEXT,
|
||||
"account" TEXT,
|
||||
"bt_from" TEXT,
|
||||
"bt_to" TEXT,
|
||||
"strat_json" TEXT,
|
||||
"settings" TEXT,
|
||||
"ilog_save" INTEGER,
|
||||
"profit" NUMERIC,
|
||||
"trade_count" INTEGER,
|
||||
"end_positions" INTEGER,
|
||||
"end_positions_avgp" NUMERIC,
|
||||
"metrics" TEXT,
|
||||
"stratvars_toml" TEXT,
|
||||
"data" json NOT NULL,
|
||||
PRIMARY KEY("runner_id")
|
||||
);
|
||||
INSERT INTO "main"."sqlb_temp_table_1" ("batch_id","data","runner_id") SELECT "batch_id","data","runner_id" FROM "main"."runner_header"
|
||||
PRAGMA defer_foreign_keys;
|
||||
PRAGMA defer_foreign_keys = '1';
|
||||
DROP TABLE "main"."runner_header"
|
||||
ALTER TABLE "main"."sqlb_temp_table_1" RENAME TO "runner_header"
|
||||
PRAGMA defer_foreign_keys = '0';
|
||||
|
||||
CREATE INDEX "index_runner_header_batch" ON "runner_header" (
|
||||
"batch_id"
|
||||
)
|
||||
|
||||
CREATE INDEX "index_runner_header_pk" ON "runner_header" (
|
||||
"runner_id"
|
||||
)
|
||||
|
||||
CREATE INDEX "index_runner_header_strat" ON "runner_header" (
|
||||
"strat_id"
|
||||
)
|
||||
23
testy/valueremapping.py
Normal file
23
testy/valueremapping.py
Normal file
@ -0,0 +1,23 @@
|
||||
|
||||
|
||||
import numpy as np
|
||||
|
||||
|
||||
arr = np.array(values)
|
||||
|
||||
# Find the current value and the minimum and maximum values
|
||||
current_value = arr[-1]
|
||||
min_value = np.min(arr)
|
||||
max_value = np.max(arr)
|
||||
|
||||
#remapping to -1 and 1
|
||||
|
||||
|
||||
remapped_value = 2 * (current_value - min_value) / (max_value - min_value) - 1
|
||||
|
||||
|
||||
#remap to range 0 and 1
|
||||
remapped_value = (atr10[-1] - np.min(atr10)) / (np.max(atr10) - np.min(atr10))
|
||||
|
||||
cp.statement = "np.mean(vwap[-(abs(int(50*atr10r[-1]))):])"
|
||||
|
||||
@ -3,6 +3,9 @@ import sqlite3
|
||||
import queue
|
||||
import threading
|
||||
import time
|
||||
from v2realbot.common.model import RunArchive, RunArchiveView
|
||||
from datetime import datetime
|
||||
import json
|
||||
|
||||
sqlite_db_file = DATA_DIR + "/v2trading.db"
|
||||
# Define the connection pool
|
||||
@ -50,9 +53,57 @@ def execute_with_retry(cursor: sqlite3.Cursor, statement: str, retry_interval: i
|
||||
else:
|
||||
raise e
|
||||
|
||||
|
||||
#for pool of connections if necessary
|
||||
pool = ConnectionPool(10)
|
||||
#for one shared connection (used for writes only in WAL mode)
|
||||
insert_conn = sqlite3.connect(sqlite_db_file, check_same_thread=False)
|
||||
insert_queue = queue.Queue()
|
||||
|
||||
#prevede dict radku zpatky na objekt vcetme retypizace
|
||||
def row_to_runarchiveview(row: dict) -> RunArchiveView:
|
||||
return RunArchive(
|
||||
id=row['runner_id'],
|
||||
strat_id=row['strat_id'],
|
||||
batch_id=row['batch_id'],
|
||||
symbol=row['symbol'],
|
||||
name=row['name'],
|
||||
note=row['note'],
|
||||
started=datetime.fromisoformat(row['started']) if row['started'] else None,
|
||||
stopped=datetime.fromisoformat(row['stopped']) if row['stopped'] else None,
|
||||
mode=row['mode'],
|
||||
account=row['account'],
|
||||
bt_from=datetime.fromisoformat(row['bt_from']) if row['bt_from'] else None,
|
||||
bt_to=datetime.fromisoformat(row['bt_to']) if row['bt_to'] else None,
|
||||
ilog_save=bool(row['ilog_save']),
|
||||
profit=float(row['profit']),
|
||||
trade_count=int(row['trade_count']),
|
||||
end_positions=int(row['end_positions']),
|
||||
end_positions_avgp=float(row['end_positions_avgp']),
|
||||
metrics=json.loads(row['metrics']),
|
||||
)
|
||||
|
||||
#prevede dict radku zpatky na objekt vcetme retypizace
|
||||
def row_to_runarchive(row: dict) -> RunArchive:
|
||||
return RunArchive(
|
||||
id=row['runner_id'],
|
||||
strat_id=row['strat_id'],
|
||||
batch_id=row['batch_id'],
|
||||
symbol=row['symbol'],
|
||||
name=row['name'],
|
||||
note=row['note'],
|
||||
started=datetime.fromisoformat(row['started']) if row['started'] else None,
|
||||
stopped=datetime.fromisoformat(row['stopped']) if row['stopped'] else None,
|
||||
mode=row['mode'],
|
||||
account=row['account'],
|
||||
bt_from=datetime.fromisoformat(row['bt_from']) if row['bt_from'] else None,
|
||||
bt_to=datetime.fromisoformat(row['bt_to']) if row['bt_to'] else None,
|
||||
strat_json=json.loads(row['strat_json']),
|
||||
settings=json.loads(row['settings']),
|
||||
ilog_save=bool(row['ilog_save']),
|
||||
profit=float(row['profit']),
|
||||
trade_count=int(row['trade_count']),
|
||||
end_positions=int(row['end_positions']),
|
||||
end_positions_avgp=float(row['end_positions_avgp']),
|
||||
metrics=json.loads(row['metrics']),
|
||||
stratvars_toml=row['stratvars_toml']
|
||||
)
|
||||
@ -198,6 +198,7 @@ class RunArchiveChange(BaseModel):
|
||||
id: UUID
|
||||
note: str
|
||||
|
||||
#do budoucna pouzit SQLAlchemy
|
||||
#Contains archive of running strategies (runner) - master
|
||||
class RunArchive(BaseModel):
|
||||
#unique id of algorun
|
||||
@ -215,6 +216,7 @@ class RunArchive(BaseModel):
|
||||
bt_from: Optional[datetime] = None
|
||||
bt_to: Optional[datetime] = None
|
||||
strat_json: Optional[str] = None
|
||||
##bude decomiss, misto toho stratvars_toml
|
||||
stratvars: Optional[dict] = None
|
||||
settings: Optional[dict] = None
|
||||
ilog_save: Optional[bool] = False
|
||||
@ -222,9 +224,30 @@ class RunArchive(BaseModel):
|
||||
trade_count: int = 0
|
||||
end_positions: int = 0
|
||||
end_positions_avgp: float = 0
|
||||
open_orders: Union[dict, str] = None
|
||||
metrics: Union[dict, str] = None
|
||||
stratvars_toml: Optional[str] = None
|
||||
|
||||
#For gui view master table
|
||||
class RunArchiveView(BaseModel):
|
||||
id: UUID
|
||||
strat_id: UUID
|
||||
batch_id: Optional[str] = None
|
||||
symbol: str
|
||||
name: str
|
||||
note: Optional[str] = None
|
||||
started: datetime
|
||||
stopped: Optional[datetime] = None
|
||||
mode: Mode
|
||||
account: Account
|
||||
bt_from: Optional[datetime] = None
|
||||
bt_to: Optional[datetime] = None
|
||||
ilog_save: Optional[bool] = False
|
||||
profit: float = 0
|
||||
trade_count: int = 0
|
||||
end_positions: int = 0
|
||||
end_positions_avgp: float = 0
|
||||
metrics: Union[dict, str] = None
|
||||
|
||||
#trida pro ukladani historie stoplossy do ext_data
|
||||
class SLHistory(BaseModel):
|
||||
id: Optional[UUID]
|
||||
|
||||
@ -6,7 +6,7 @@ from alpaca.data.requests import StockTradesRequest, StockBarsRequest
|
||||
from alpaca.data.enums import DataFeed
|
||||
from alpaca.data.timeframe import TimeFrame
|
||||
from v2realbot.enums.enums import RecordType, StartBarAlign, Mode, Account, OrderSide
|
||||
from v2realbot.common.model import RunDay, StrategyInstance, Runner, RunRequest, RunArchive, RunArchiveDetail, RunArchiveChange, Bar, TradeEvent, TestList, Intervals, ConfigItem
|
||||
from v2realbot.common.model import RunDay, StrategyInstance, Runner, RunRequest, RunArchive, RunArchiveView, RunArchiveDetail, RunArchiveChange, Bar, TradeEvent, TestList, Intervals, ConfigItem
|
||||
from v2realbot.utils.utils import AttributeDict, zoneNY, dict_replace_value, Store, parse_toml_string, json_serial, is_open_hours, send_to_telegram
|
||||
from v2realbot.utils.ilog import delete_logs
|
||||
from v2realbot.common.PrescribedTradeModel import Trade, TradeDirection, TradeStatus, TradeStoplossType
|
||||
@ -27,8 +27,8 @@ import pandas as pd
|
||||
from traceback import format_exc
|
||||
from datetime import timedelta, time
|
||||
from threading import Lock
|
||||
from v2realbot.common.db import pool, execute_with_retry
|
||||
from sqlite3 import OperationalError
|
||||
from v2realbot.common.db import pool, execute_with_retry, row_to_runarchive, row_to_runarchiveview
|
||||
from sqlite3 import OperationalError, Row
|
||||
#from pyinstrument import Profiler
|
||||
#adding lock to ensure thread safety of TinyDB (in future will be migrated to proper db)
|
||||
lock = Lock()
|
||||
@ -680,14 +680,14 @@ def populate_metrics_output_directory(strat: StrategyInstance, inter_batch_param
|
||||
res["profit"]["short_cnt"] = short_cnt
|
||||
res["profit"]["long_profit"] = round(long_profit,2)
|
||||
res["profit"]["short_profit"] = round(short_profit,2)
|
||||
res["profit"]["long_wins"] = round(long_wins,2)
|
||||
res["profit"]["long_losses"] = round(long_losses,2)
|
||||
res["profit"]["short_wins"] = round(short_wins,2)
|
||||
res["profit"]["short_losses"] = round(short_losses,2)
|
||||
res["profit"]["max_profit"] = round(max_profit,2)
|
||||
res["profit"]["max_profit_time"] = str(max_profit_time)
|
||||
res["profit"]["max_loss"] = round(max_loss,2)
|
||||
res["profit"]["max_loss_time"] = str(max_loss_time)
|
||||
res["profit"]["long_wins"] = round(long_wins,2)
|
||||
res["profit"]["long_losses"] = round(long_losses,2)
|
||||
res["profit"]["short_wins"] = round(short_wins,2)
|
||||
res["profit"]["short_losses"] = round(short_losses,2)
|
||||
#vlozeni celeho listu
|
||||
res["prescr_trades"]=json.loads(json.dumps(strat.state.vars.prescribedTrades, default=json_serial))
|
||||
|
||||
@ -747,13 +747,12 @@ def archive_runner(runner: Runner, strat: StrategyInstance, inter_batch_params:
|
||||
bt_from=bp_from,
|
||||
bt_to = bp_to,
|
||||
strat_json = runner.run_strat_json,
|
||||
stratvars = strat.state.vars,
|
||||
settings = settings,
|
||||
profit=round(float(strat.state.profit),2),
|
||||
trade_count=len(strat.state.tradeList),
|
||||
end_positions=strat.state.positions,
|
||||
end_positions_avgp=round(float(strat.state.avgp),3),
|
||||
open_orders=json.dumps(results_metrics, default=json_serial),
|
||||
metrics=results_metrics,
|
||||
stratvars_toml=runner.run_stratvars_toml
|
||||
)
|
||||
|
||||
@ -825,32 +824,69 @@ def migrate_archived_runners() -> list[RunArchive]:
|
||||
print("Exception in migration: " + str(e) + format_exc())
|
||||
return -2, str(e) + format_exc()
|
||||
|
||||
def get_all_archived_runners():
|
||||
conn = pool.get_connection()
|
||||
try:
|
||||
conn.row_factory = lambda c, r: json.loads(r[0])
|
||||
c = conn.cursor()
|
||||
res = c.execute(f"SELECT data FROM runner_header")
|
||||
finally:
|
||||
conn.row_factory = None
|
||||
pool.release_connection(conn)
|
||||
return 0, res.fetchall()
|
||||
|
||||
#vrátí konkrétní
|
||||
def get_archived_runner_header_byID(id: UUID):
|
||||
def get_all_archived_runners() -> list[RunArchiveView]:
|
||||
conn = pool.get_connection()
|
||||
try:
|
||||
conn.row_factory = lambda c, r: json.loads(r[0])
|
||||
conn.row_factory = Row
|
||||
c = conn.cursor()
|
||||
result = c.execute(f"SELECT data FROM runner_header WHERE runner_id='{str(id)}'")
|
||||
res= result.fetchone()
|
||||
c.execute(f"SELECT runner_id, strat_id, batch_id, symbol, name, note, started, stopped, mode, account, bt_from, bt_to, ilog_save, profit, trade_count, end_positions, end_positions_avgp, metrics FROM runner_header")
|
||||
rows = c.fetchall()
|
||||
results = []
|
||||
for row in rows:
|
||||
results.append(row_to_runarchiveview(row))
|
||||
finally:
|
||||
conn.row_factory = None
|
||||
pool.release_connection(conn)
|
||||
if res==None:
|
||||
return -2, "not found"
|
||||
return 0, results
|
||||
|
||||
#DECOMMS
|
||||
# def get_all_archived_runners():
|
||||
# conn = pool.get_connection()
|
||||
# try:
|
||||
# conn.row_factory = lambda c, r: json.loads(r[0])
|
||||
# c = conn.cursor()
|
||||
# res = c.execute(f"SELECT data FROM runner_header")
|
||||
# finally:
|
||||
# conn.row_factory = None
|
||||
# pool.release_connection(conn)
|
||||
# return 0, res.fetchall()
|
||||
|
||||
#vrati cely kompletni zaznam RunArchive
|
||||
def get_archived_runner_header_byID(id: UUID) -> RunArchive:
|
||||
conn = pool.get_connection()
|
||||
try:
|
||||
conn.row_factory = Row
|
||||
c = conn.cursor()
|
||||
c.execute(f"SELECT * FROM runner_header WHERE runner_id='{str(id)}'")
|
||||
row = c.fetchone()
|
||||
|
||||
if row:
|
||||
return 0, row_to_runarchive(row)
|
||||
else:
|
||||
return 0, res
|
||||
return -2, "not found"
|
||||
|
||||
finally:
|
||||
conn.row_factory = None
|
||||
pool.release_connection(conn)
|
||||
|
||||
|
||||
#DECOMM
|
||||
# #vrátí vsechny datakonkrétní
|
||||
# def get_archived_runner_header_byID(id: UUID):
|
||||
# conn = pool.get_connection()
|
||||
# try:
|
||||
# conn.row_factory = lambda c, r: json.loads(r[0])
|
||||
# c = conn.cursor()
|
||||
# result = c.execute(f"SELECT data FROM runner_header WHERE runner_id='{str(id)}'")
|
||||
# res= result.fetchone()
|
||||
# finally:
|
||||
# conn.row_factory = None
|
||||
# pool.release_connection(conn)
|
||||
# if res==None:
|
||||
# return -2, "not found"
|
||||
# else:
|
||||
# return 0, res
|
||||
|
||||
#vrátí seznam runneru s danym batch_id
|
||||
def get_archived_runnerslist_byBatchID(batch_id: str):
|
||||
@ -867,13 +903,18 @@ def insert_archive_header(archeader: RunArchive):
|
||||
conn = pool.get_connection()
|
||||
try:
|
||||
c = conn.cursor()
|
||||
json_string = json.dumps(archeader, default=json_serial)
|
||||
if archeader.batch_id is not None:
|
||||
statement = f"INSERT INTO runner_header (runner_id, batch_id, data) VALUES ('{str(archeader.id)}','{str(archeader.batch_id)}','{json_string}')"
|
||||
else:
|
||||
statement = f"INSERT INTO runner_header (runner_id, data) VALUES ('{str(archeader.id)}','{json_string}')"
|
||||
#json_string = json.dumps(archeader, default=json_serial)
|
||||
|
||||
res = execute_with_retry(c,statement)
|
||||
res = c.execute("""
|
||||
INSERT INTO runner_header
|
||||
(runner_id, strat_id, batch_id, symbol, name, note, started, stopped, mode, account, bt_from, bt_to, strat_json, settings, ilog_save, profit, trade_count, end_positions, end_positions_avgp, metrics, stratvars_toml)
|
||||
VALUES
|
||||
(?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
|
||||
""",
|
||||
(str(archeader.id), str(archeader.strat_id), archeader.batch_id, archeader.symbol, archeader.name, archeader.note, archeader.started, archeader.stopped, archeader.mode, archeader.account, archeader.bt_from, archeader.bt_to, json.dumps(archeader.strat_json), json.dumps(archeader.settings), archeader.ilog_save, archeader.profit, archeader.trade_count, archeader.end_positions, archeader.end_positions_avgp, json.dumps(archeader.metrics, default=json_serial), archeader.stratvars_toml))
|
||||
|
||||
#retry not yet supported for statement format above
|
||||
#res = execute_with_retry(c,statement)
|
||||
conn.commit()
|
||||
finally:
|
||||
pool.release_connection(conn)
|
||||
@ -884,14 +925,21 @@ def edit_archived_runners(runner_id: UUID, archChange: RunArchiveChange):
|
||||
try:
|
||||
res, sada = get_archived_runner_header_byID(id=runner_id)
|
||||
if res == 0:
|
||||
archOriginal = RunArchive(**sada)
|
||||
archOriginal.note = archChange.note
|
||||
|
||||
#updatujeme pouze note
|
||||
try:
|
||||
conn = pool.get_connection()
|
||||
c = conn.cursor()
|
||||
json_string = json.dumps(archOriginal, default=json_serial)
|
||||
statement = f"UPDATE runner_header SET data = '{json_string}' WHERE runner_id='{str(runner_id)}'"
|
||||
res = execute_with_retry(c,statement)
|
||||
|
||||
res = c.execute('''
|
||||
UPDATE runner_header
|
||||
SET note=?
|
||||
WHERE runner_id=?
|
||||
''',
|
||||
(archChange.note, str(runner_id)))
|
||||
|
||||
#retry not yet supported here
|
||||
#res = execute_with_retry(c,statement)
|
||||
#print(res)
|
||||
conn.commit()
|
||||
finally:
|
||||
@ -901,7 +949,9 @@ def edit_archived_runners(runner_id: UUID, archChange: RunArchiveChange):
|
||||
return -1, f"Could not find arch runner {runner_id} {res} {sada}"
|
||||
|
||||
except Exception as e:
|
||||
return -2, str(e)
|
||||
errmsg = str(e) + format_exc()
|
||||
print(errmsg)
|
||||
return -2, errmsg
|
||||
|
||||
#delete runner in archive and archive detail and runner logs
|
||||
#predelano do JEDNE TRANSAKCE
|
||||
|
||||
@ -96,6 +96,13 @@ def trima(data: Any, period: int = 50, use_series=False) -> Any:
|
||||
trima = ti.trima(data, period)
|
||||
return pd.Series(trima) if use_series else trima
|
||||
|
||||
def tema(data: Any, period: int = 50, use_series=False) -> Any:
|
||||
if check_series(data):
|
||||
use_series = True
|
||||
data = convert_to_numpy(data)
|
||||
tema = ti.tema(data, period)
|
||||
return pd.Series(tema) if use_series else tema
|
||||
|
||||
|
||||
def macd(data: Any, short_period: int = 12, long_period: int = 26, signal_period: int = 9, use_series=False) -> Any:
|
||||
if check_series(data):
|
||||
@ -106,3 +113,23 @@ def macd(data: Any, short_period: int = 12, long_period: int = 26, signal_period
|
||||
df = pd.DataFrame({'macd': macd, 'macd_signal': macd_signal, 'macd_histogram': macd_histogram})
|
||||
return df
|
||||
return macd, macd_signal, macd_histogram
|
||||
|
||||
def ema(data, period: int = 50, use_series=False):
|
||||
if check_series(data):
|
||||
use_series = True
|
||||
data = convert_to_numpy(data)
|
||||
ema = ti.ema(data, period=period)
|
||||
return pd.Series(ema) if use_series else ema
|
||||
|
||||
def sma(data, period: int = 50, use_series=False):
|
||||
"""
|
||||
Finding the moving average of a dataset
|
||||
Args:
|
||||
data: (list) A list containing the data you want to find the moving average of
|
||||
period: (int) How far each average set should be
|
||||
"""
|
||||
if check_series(data):
|
||||
use_series = True
|
||||
data = convert_to_numpy(data)
|
||||
sma = ti.sma(data, period=period)
|
||||
return pd.Series(sma) if use_series else sma
|
||||
@ -11,7 +11,7 @@ import uvicorn
|
||||
from uuid import UUID
|
||||
import v2realbot.controller.services as cs
|
||||
from v2realbot.utils.ilog import get_log_window
|
||||
from v2realbot.common.model import StrategyInstance, RunnerView, RunRequest, Trade, RunArchive, RunArchiveDetail, Bar, RunArchiveChange, TestList, ConfigItem
|
||||
from v2realbot.common.model import StrategyInstance, RunnerView, RunRequest, Trade, RunArchive, RunArchiveView, RunArchiveDetail, Bar, RunArchiveChange, TestList, ConfigItem
|
||||
from fastapi import FastAPI, WebSocket, WebSocketDisconnect, Depends, HTTPException, status, WebSocketException, Cookie, Query
|
||||
from fastapi.responses import FileResponse
|
||||
from fastapi.staticfiles import StaticFiles
|
||||
@ -285,19 +285,105 @@ def migrate():
|
||||
if not os.path.exists(lock_file):
|
||||
|
||||
#migration code
|
||||
print("migration code done")
|
||||
print("migration code STARTED")
|
||||
try:
|
||||
# Helper function to transform a row to a RunArchive object
|
||||
def row_to_object(row: dict) -> RunArchive:
|
||||
return RunArchive(
|
||||
id=row.get('id'),
|
||||
strat_id=row.get('strat_id'),
|
||||
batch_id=row.get('batch_id'),
|
||||
symbol=row.get('symbol'),
|
||||
name=row.get('name'),
|
||||
note=row.get('note'),
|
||||
started=row.get('started'),
|
||||
stopped=row.get('stopped'),
|
||||
mode=row.get('mode'),
|
||||
account=row.get('account'),
|
||||
bt_from=row.get('bt_from'),
|
||||
bt_to=row.get('bt_to'),
|
||||
strat_json=row.get('strat_json'),
|
||||
stratvars=row.get('stratvars'),
|
||||
settings=row.get('settings'),
|
||||
ilog_save=row.get('ilog_save'),
|
||||
profit=row.get('profit'),
|
||||
trade_count=row.get('trade_count'),
|
||||
end_positions=row.get('end_positions'),
|
||||
end_positions_avgp=row.get('end_positions_avgp'),
|
||||
metrics=row.get('open_orders'),
|
||||
#metrics=json.loads(row.get('metrics')) if row.get('metrics') else None,
|
||||
stratvars_toml=row.get('stratvars_toml')
|
||||
)
|
||||
|
||||
def get_all_archived_runners():
|
||||
conn = pool.get_connection()
|
||||
try:
|
||||
conn.row_factory = lambda c, r: json.loads(r[0])
|
||||
c = conn.cursor()
|
||||
statement = f'ALTER TABLE "runner_header" ADD COLUMN "batch_id" TEXT'
|
||||
res = c.execute(statement)
|
||||
print(res)
|
||||
print("table created")
|
||||
conn.commit()
|
||||
res = c.execute(f"SELECT data FROM runner_header")
|
||||
finally:
|
||||
conn.row_factory = None
|
||||
pool.release_connection(conn)
|
||||
return 0, res.fetchall()
|
||||
|
||||
set = list[RunArchive]
|
||||
|
||||
def migrate_to_columns(ra: RunArchive):
|
||||
conn = pool.get_connection()
|
||||
try:
|
||||
|
||||
c = conn.cursor()
|
||||
# statement = f"""UPDATE runner_header SET
|
||||
# strat_id='{str(ra.strat_id)}',
|
||||
# batch_id='{ra.batch_id}',
|
||||
# symbol='{ra.symbol}',
|
||||
# name='{ra.name}',
|
||||
# note='{ra.note}',
|
||||
# started='{ra.started}',
|
||||
# stopped='{ra.stopped}',
|
||||
# mode='{ra.mode}',
|
||||
# account='{ra.account}',
|
||||
# bt_from='{ra.bt_from}',
|
||||
# bt_to='{ra.bt_to}',
|
||||
# strat_json='ra.strat_json)',
|
||||
# settings='{ra.settings}',
|
||||
# ilog_save='{ra.ilog_save}',
|
||||
# profit='{ra.profit}',
|
||||
# trade_count='{ra.trade_count}',
|
||||
# end_positions='{ra.end_positions}',
|
||||
# end_positions_avgp='{ra.end_positions_avgp}',
|
||||
# metrics='{ra.metrics}',
|
||||
# stratvars_toml="{ra.stratvars_toml}"
|
||||
# WHERE runner_id='{str(ra.strat_id)}'
|
||||
# """
|
||||
# print(statement)
|
||||
|
||||
res = c.execute('''
|
||||
UPDATE runner_header
|
||||
SET strat_id=?, batch_id=?, symbol=?, name=?, note=?, started=?, stopped=?, mode=?, account=?, bt_from=?, bt_to=?, strat_json=?, settings=?, ilog_save=?, profit=?, trade_count=?, end_positions=?, end_positions_avgp=?, metrics=?, stratvars_toml=?
|
||||
WHERE runner_id=?
|
||||
''',
|
||||
(str(ra.strat_id), ra.batch_id, ra.symbol, ra.name, ra.note, ra.started, ra.stopped, ra.mode, ra.account, ra.bt_from, ra.bt_to, json.dumps(ra.strat_json), json.dumps(ra.settings), ra.ilog_save, ra.profit, ra.trade_count, ra.end_positions, ra.end_positions_avgp, json.dumps(ra.metrics), ra.stratvars_toml, str(ra.id)))
|
||||
|
||||
conn.commit()
|
||||
finally:
|
||||
|
||||
pool.release_connection(conn)
|
||||
return 0, res
|
||||
|
||||
res, set = get_all_archived_runners()
|
||||
print(f"fetched {len(set)}")
|
||||
for row in set:
|
||||
ra: RunArchive = row_to_object(row)
|
||||
print(f"item {ra.id}")
|
||||
res, val = migrate_to_columns(ra)
|
||||
print(res,val)
|
||||
print("migrated", ra.id)
|
||||
|
||||
|
||||
|
||||
|
||||
finally:
|
||||
open(lock_file, 'w').close()
|
||||
|
||||
return 0
|
||||
@ -317,15 +403,24 @@ def migrate():
|
||||
#ARCHIVE RUNNERS SECTION
|
||||
# region Archive runners
|
||||
|
||||
#get all archived runners header
|
||||
#get all archived runners headers - just RunArchiveView
|
||||
@app.get("/archived_runners/", dependencies=[Depends(api_key_auth)])
|
||||
def _get_all_archived_runners() -> list[RunArchive]:
|
||||
def _get_all_archived_runners() -> list[RunArchiveView]:
|
||||
res, set =cs.get_all_archived_runners()
|
||||
if res == 0:
|
||||
return set
|
||||
else:
|
||||
raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail=f"No data found")
|
||||
|
||||
#get complete header data for specific archivedRunner = RunArchive
|
||||
@app.get("/archived_runners/{runner_id}", dependencies=[Depends(api_key_auth)])
|
||||
def _get_archived_runner_header_byID(runner_id: UUID) -> RunArchive:
|
||||
res, set =cs.get_archived_runner_header_byID(runner_id)
|
||||
if res == 0:
|
||||
return set
|
||||
else:
|
||||
raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail=f"No data found")
|
||||
|
||||
#delete archive runner from header and detail
|
||||
@app.delete("/archived_runners/", dependencies=[Depends(api_key_auth)], status_code=status.HTTP_200_OK)
|
||||
def _delete_archived_runners_byIDs(runner_ids: list[UUID]):
|
||||
@ -342,7 +437,7 @@ def _edit_archived_runners(archChange: RunArchiveChange, runner_id: UUID):
|
||||
elif res == -1:
|
||||
raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail=f"Error not found: {res}:{runner_id}")
|
||||
else:
|
||||
raise HTTPException(status_code=status.HTTP_406_NOT_ACCEPTABLE, detail=f"Error not changed: {res}:{runner_id}")
|
||||
raise HTTPException(status_code=status.HTTP_406_NOT_ACCEPTABLE, detail=f"Error not changed: {res}:{runner_id}:{id}")
|
||||
|
||||
#get all archived runners detail
|
||||
@app.get("/archived_runners_detail/", dependencies=[Depends(api_key_auth)])
|
||||
|
||||
@ -251,12 +251,10 @@
|
||||
<th>bt_from</th>
|
||||
<th>bt_to</th>
|
||||
<th>ilog</th>
|
||||
<th>stratvars</th>
|
||||
<th>profit</th>
|
||||
<th>trade</th>
|
||||
<th>pos</th>
|
||||
<th>pos_avgp</th>
|
||||
<th>json</th>
|
||||
<th>metrics</th>
|
||||
</tr>
|
||||
</thead>
|
||||
|
||||
@ -906,7 +906,7 @@ function chart_archived_run(archRecord, data, oneMinuteBars) {
|
||||
$("#statusAccount").text(archRecord.account)
|
||||
$("#statusIlog").text("Logged:" + archRecord.ilog_save)
|
||||
$("#statusStratvars").text(((archRecord.strat_json)?archRecord.strat_json:archRecord.stratvars),null,2)
|
||||
$("#statusSettings").text(JSON.stringify(archRecord.open_orders,null,2) + " " + JSON.stringify(archRecord.settings,null,2))
|
||||
$("#statusSettings").text(JSON.stringify(archRecord.metrics,null,2) + " " + JSON.stringify(archRecord.settings,null,2))
|
||||
|
||||
//TBD other dynamically created indicators
|
||||
|
||||
|
||||
@ -1,4 +1,42 @@
|
||||
//ARCHIVE TABLES
|
||||
|
||||
function refresh_arch_and_callback(row, callback) {
|
||||
console.log("entering refresh")
|
||||
var request = $.ajax({
|
||||
url: "/archived_runners/"+row.id,
|
||||
beforeSend: function (xhr) {
|
||||
xhr.setRequestHeader('X-API-Key',
|
||||
API_KEY); },
|
||||
method:"GET",
|
||||
contentType: "application/json",
|
||||
dataType: "json",
|
||||
success:function(data){
|
||||
console.log("fetched data ok")
|
||||
//console.log(JSON.stringify(data,null,2));
|
||||
},
|
||||
error: function(xhr, status, error) {
|
||||
var err = eval("(" + xhr.responseText + ")");
|
||||
window.alert(JSON.stringify(xhr));
|
||||
console.log(JSON.stringify(xhr));
|
||||
}
|
||||
});
|
||||
|
||||
// Handling the responses of both requests
|
||||
$.when(request).then(function(response) {
|
||||
// Both requests have completed successfully
|
||||
//console.log("Result from request:", response);
|
||||
console.log("Response received. calling callback")
|
||||
//call callback function
|
||||
callback(response)
|
||||
|
||||
}, function(error) {
|
||||
// Handle errors from either request here
|
||||
// Example:
|
||||
console.error("Error from first request:", error);
|
||||
console.log("requesting id error")
|
||||
});
|
||||
}
|
||||
|
||||
$(document).ready(function () {
|
||||
archiveRecords.ajax.reload();
|
||||
|
||||
@ -34,15 +72,79 @@ $(document).ready(function () {
|
||||
}
|
||||
});
|
||||
|
||||
|
||||
//button compare arch
|
||||
$('#button_compare_arch').click(function () {
|
||||
window.$('#diffModal').modal('show');
|
||||
rows = archiveRecords.rows('.selected').data();
|
||||
|
||||
id1 = rows[0].id
|
||||
id2 = rows[1].id
|
||||
|
||||
var request1 = $.ajax({
|
||||
url: "/archived_runners/"+id1,
|
||||
beforeSend: function (xhr) {
|
||||
xhr.setRequestHeader('X-API-Key',
|
||||
API_KEY); },
|
||||
method:"GET",
|
||||
contentType: "application/json",
|
||||
dataType: "json",
|
||||
success:function(data){
|
||||
console.log("first request ok")
|
||||
console.log(JSON.stringify(data,null,2));
|
||||
},
|
||||
error: function(xhr, status, error) {
|
||||
var err = eval("(" + xhr.responseText + ")");
|
||||
window.alert(JSON.stringify(xhr));
|
||||
console.log(JSON.stringify(xhr));
|
||||
console.log("first request error")
|
||||
}
|
||||
});
|
||||
var request2 = $.ajax({
|
||||
url: "/archived_runners/"+id2,
|
||||
beforeSend: function (xhr) {
|
||||
xhr.setRequestHeader('X-API-Key',
|
||||
API_KEY); },
|
||||
method:"GET",
|
||||
contentType: "application/json",
|
||||
dataType: "json",
|
||||
success:function(data){
|
||||
console.log("first request ok")
|
||||
console.log(JSON.stringify(data,null,2));
|
||||
},
|
||||
error: function(xhr, status, error) {
|
||||
var err = eval("(" + xhr.responseText + ")");
|
||||
window.alert(JSON.stringify(xhr));
|
||||
console.log(JSON.stringify(xhr));
|
||||
console.log("first request error")
|
||||
}
|
||||
});
|
||||
|
||||
// Handling the responses of both requests
|
||||
$.when(request1, request2).then(function(response1, response2) {
|
||||
// Both requests have completed successfully
|
||||
var result1 = response1[0];
|
||||
var result2 = response2[0];
|
||||
console.log("Result from first request:", result1);
|
||||
console.log("Result from second request:", result2);
|
||||
console.log("calling compare")
|
||||
perform_compare(result1, result2)
|
||||
// Perform your action with the results from both requests
|
||||
// Example:
|
||||
|
||||
}, function(error1, error2) {
|
||||
// Handle errors from either request here
|
||||
// Example:
|
||||
console.error("Error from first request:", error1);
|
||||
console.error("Error from second request:", error2);
|
||||
});
|
||||
|
||||
//sem vstupuji dva nove natahnute objekty
|
||||
function perform_compare(data1, data2) {
|
||||
|
||||
var record1 = new Object()
|
||||
//console.log(JSON.stringify(rows))
|
||||
|
||||
record1 = JSON.parse(rows[0].strat_json)
|
||||
record1 = JSON.parse(data1.strat_json)
|
||||
//record1.json = rows[0].json
|
||||
//record1.id = rows[0].id;
|
||||
// record1.id2 = parseInt(rows[0].id2);
|
||||
@ -57,10 +159,10 @@ $(document).ready(function () {
|
||||
//ELEMENTS TO COMPARE
|
||||
|
||||
//profit sekce
|
||||
console.log(rows[0].open_orders)
|
||||
console.log(data1.metrics)
|
||||
|
||||
try {
|
||||
record1["profit"] = JSON.parse(rows[0].open_orders).profit
|
||||
record1["profit"] = JSON.parse(data1.metrics).profit
|
||||
}
|
||||
catch (e) {
|
||||
console.log(e.message)
|
||||
@ -73,7 +175,7 @@ $(document).ready(function () {
|
||||
//jsonString1 = JSON.stringify(record1, null, 2);
|
||||
|
||||
var record2 = new Object()
|
||||
record2 = JSON.parse(rows[1].strat_json)
|
||||
record2 = JSON.parse(data2.strat_json)
|
||||
|
||||
// record2.id = rows[1].id;
|
||||
// record2.id2 = parseInt(rows[1].id2);
|
||||
@ -85,10 +187,10 @@ $(document).ready(function () {
|
||||
// record2.close_rush = rows[1].close_rush;
|
||||
|
||||
//ELEMENTS TO COMPARE
|
||||
console.log(rows[1].open_orders)
|
||||
console.log(data2.metrics)
|
||||
|
||||
try {
|
||||
record2["profit"] = JSON.parse(rows[1].open_orders).profit
|
||||
record2["profit"] = JSON.parse(data2.metrics).profit
|
||||
}
|
||||
catch (e) {
|
||||
console.log(e.message)
|
||||
@ -101,8 +203,8 @@ $(document).ready(function () {
|
||||
|
||||
$('#diff_first').text(record1.name);
|
||||
$('#diff_second').text(record2.name);
|
||||
$('#diff_first_id').text(rows[0].id);
|
||||
$('#diff_second_id').text(rows[1].id);
|
||||
$('#diff_first_id').text(data1.id);
|
||||
$('#diff_second_id').text(data2.id);
|
||||
|
||||
var delta = compareObjects(record1, record2)
|
||||
const htmlMarkup2 = `<pre>{\n${generateHTML(record2, delta)}}\n</pre>`;
|
||||
@ -113,6 +215,7 @@ $(document).ready(function () {
|
||||
|
||||
event.preventDefault();
|
||||
//$('#button_compare').attr('disabled','disabled');
|
||||
}
|
||||
});
|
||||
|
||||
|
||||
@ -144,19 +247,23 @@ $(document).ready(function () {
|
||||
if (row == undefined) {
|
||||
return
|
||||
}
|
||||
|
||||
refresh_arch_and_callback(row, display_edit_modal)
|
||||
|
||||
function display_edit_modal(row) {
|
||||
window.$('#editModalArchive').modal('show');
|
||||
$('#editidarchive').val(row.id);
|
||||
$('#editnote').val(row.note);
|
||||
|
||||
|
||||
try {
|
||||
metrics = JSON.parse(row.open_orders)
|
||||
metrics = JSON.parse(row.metrics)
|
||||
}
|
||||
catch (e) {
|
||||
metrics = row.open_orders
|
||||
metrics = row.metrics
|
||||
}
|
||||
$('#metrics').val(JSON.stringify(metrics,null,2));
|
||||
//$('#metrics').val(TOML.parse(row.open_orders));
|
||||
//$('#metrics').val(TOML.parse(row.metrics));
|
||||
if (row.stratvars_toml) {
|
||||
$('#editstratvars').val(row.stratvars_toml);
|
||||
}
|
||||
@ -166,15 +273,20 @@ $(document).ready(function () {
|
||||
|
||||
|
||||
$('#editstratjson').val(row.strat_json);
|
||||
}
|
||||
});
|
||||
|
||||
//show button
|
||||
$('#button_show_arch').click(function () {
|
||||
|
||||
row = archiveRecords.row('.selected').data();
|
||||
if (row == undefined) {
|
||||
return
|
||||
}
|
||||
$('#button_show_arch').attr('disabled',true);
|
||||
|
||||
refresh_arch_and_callback(row, get_detail_and_show)
|
||||
|
||||
function get_detail_and_show(row) {
|
||||
$.ajax({
|
||||
url:"/archived_runners_detail/"+row.id,
|
||||
beforeSend: function (xhr) {
|
||||
@ -201,6 +313,7 @@ $(document).ready(function () {
|
||||
$('#button_show_arch').attr('disabled',false);
|
||||
}
|
||||
})
|
||||
}
|
||||
});
|
||||
})
|
||||
|
||||
@ -216,7 +329,72 @@ $(document).ready(function () {
|
||||
//record1.json = rows[0].json
|
||||
|
||||
//TBD mozna zkopirovat jen urcite?
|
||||
record1 = row
|
||||
|
||||
//getting required data (detail of the archived runner + stratin to be run)
|
||||
var request1 = $.ajax({
|
||||
url: "/archived_runners/"+row.id,
|
||||
beforeSend: function (xhr) {
|
||||
xhr.setRequestHeader('X-API-Key',
|
||||
API_KEY); },
|
||||
method:"GET",
|
||||
contentType: "application/json",
|
||||
dataType: "json",
|
||||
success:function(data){
|
||||
console.log("fetched data ok")
|
||||
console.log(JSON.stringify(data,null,2));
|
||||
},
|
||||
error: function(xhr, status, error) {
|
||||
var err = eval("(" + xhr.responseText + ")");
|
||||
window.alert(JSON.stringify(xhr));
|
||||
console.log(JSON.stringify(xhr));
|
||||
}
|
||||
});
|
||||
|
||||
//nalaodovat data pro strategii
|
||||
var request2 = $.ajax({
|
||||
url: "/stratins/"+row.strat_id,
|
||||
beforeSend: function (xhr) {
|
||||
xhr.setRequestHeader('X-API-Key',
|
||||
API_KEY); },
|
||||
method:"GET",
|
||||
contentType: "application/json",
|
||||
dataType: "json",
|
||||
success:function(data){
|
||||
console.log("fetched data ok")
|
||||
console.log(JSON.stringify(data,null,2));
|
||||
},
|
||||
error: function(xhr, status, error) {
|
||||
var err = eval("(" + xhr.responseText + ")");
|
||||
window.alert(JSON.stringify(xhr));
|
||||
console.log(JSON.stringify(xhr));
|
||||
}
|
||||
});
|
||||
|
||||
|
||||
// Handling the responses of both requests
|
||||
$.when(request1, request2).then(function(response1, response2) {
|
||||
// Both requests have completed successfully
|
||||
var result1 = response1[0];
|
||||
var result2 = response2[0];
|
||||
|
||||
console.log("Result from first request:", result1);
|
||||
console.log("Result from second request:", result2);
|
||||
|
||||
console.log("calling compare")
|
||||
rerun_strategy(result1, result2)
|
||||
// Perform your action with the results from both requests
|
||||
// Example:
|
||||
|
||||
}, function(error1, error2) {
|
||||
// Handle errors from either request here
|
||||
// Example:
|
||||
console.error("Error from first request:", error1);
|
||||
console.error("Error from second request:", error2);
|
||||
});
|
||||
|
||||
|
||||
function rerun_strategy(archRunner, stratData) {
|
||||
record1 = archRunner
|
||||
console.log(record1)
|
||||
|
||||
//smazeneme nepotrebne a pridame potrebne
|
||||
@ -229,7 +407,7 @@ $(document).ready(function () {
|
||||
delete record1["stratvars_toml"];
|
||||
delete record1["started"];
|
||||
delete record1["stopped"];
|
||||
delete record1["open_orders"];
|
||||
delete record1["metrics"];
|
||||
delete record1["settings"];
|
||||
delete record1["stratvars"];
|
||||
|
||||
@ -241,24 +419,6 @@ $(document).ready(function () {
|
||||
//mazeme, pouze rerunujeme single
|
||||
record1["test_batch_id"];
|
||||
|
||||
//najdeme ve stratinu radek s danym ID a z tohoto radku a sestavime strat_json
|
||||
var idToFind = record1.strat_id; // Replace with the specific ID you want to find
|
||||
|
||||
var foundRow = stratinRecords.rows().eq(0).filter(function (rowIdx) {
|
||||
return stratinRecords.row(rowIdx).data().id === idToFind;
|
||||
});
|
||||
|
||||
if (foundRow.length > 0) {
|
||||
// Get the data of the first matching row
|
||||
var stratData = stratinRecords.row(foundRow[0]).data();
|
||||
console.log(stratData);
|
||||
} else {
|
||||
// Handle the case where no matching row is found
|
||||
console.log("No strategy with ID " + idToFind + " found.");
|
||||
window.alert("No strategy with ID " + idToFind + " found.")
|
||||
return
|
||||
}
|
||||
|
||||
const rec = new Object()
|
||||
rec.id2 = parseInt(stratData.id2);
|
||||
rec.name = stratData.name;
|
||||
@ -303,6 +463,7 @@ $(document).ready(function () {
|
||||
$('#button_runagain_arch').attr('disabled',false);
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
})
|
||||
|
||||
@ -412,14 +573,11 @@ var archiveRecords =
|
||||
{data: 'bt_from', visible: true},
|
||||
{data: 'bt_to', visible: true},
|
||||
{data: 'ilog_save', visible: true},
|
||||
{data: 'stratvars', visible: false},
|
||||
{data: 'profit'},
|
||||
{data: 'trade_count', visible: true},
|
||||
{data: 'end_positions', visible: true},
|
||||
{data: 'end_positions_avgp', visible: true},
|
||||
{data: 'strat_json', visible: false},
|
||||
{data: 'open_orders', visible: true},
|
||||
{data: 'stratvars_toml', visible: false},
|
||||
{data: 'metrics', visible: true},
|
||||
],
|
||||
paging: false,
|
||||
processing: false,
|
||||
@ -470,7 +628,7 @@ var archiveRecords =
|
||||
},
|
||||
},
|
||||
{
|
||||
targets: [18],
|
||||
targets: [16],
|
||||
render: function ( data, type, row ) {
|
||||
try {
|
||||
data = JSON.parse(data)
|
||||
|
||||
@ -38,6 +38,44 @@ function is_stratin_running(id) {
|
||||
return running
|
||||
}
|
||||
|
||||
function refresh_stratin_and_callback(row, callback) {
|
||||
var request = $.ajax({
|
||||
url: "/stratin/"+row.id,
|
||||
beforeSend: function (xhr) {
|
||||
xhr.setRequestHeader('X-API-Key',
|
||||
API_KEY); },
|
||||
method:"GET",
|
||||
contentType: "application/json",
|
||||
dataType: "json",
|
||||
success:function(data){
|
||||
console.log("fetched data ok")
|
||||
console.log(JSON.stringify(data,null,2));
|
||||
},
|
||||
error: function(xhr, status, error) {
|
||||
var err = eval("(" + xhr.responseText + ")");
|
||||
window.alert(JSON.stringify(xhr));
|
||||
console.log(JSON.stringify(xhr));
|
||||
}
|
||||
});
|
||||
|
||||
// Handling the responses of both requests
|
||||
$.when(request).then(function(response) {
|
||||
// Both requests have completed successfully
|
||||
var result = response[0];
|
||||
|
||||
console.log("Result from first request:", result);
|
||||
console.log("calling compare")
|
||||
//call callback function
|
||||
callback(result)
|
||||
|
||||
}, function(error) {
|
||||
// Handle errors from either request here
|
||||
// Example:
|
||||
console.error("Error from first request:", error);
|
||||
console.log("requesting id error")
|
||||
});
|
||||
}
|
||||
|
||||
let editor;
|
||||
|
||||
//STRATIN and RUNNERS TABELS
|
||||
@ -341,33 +379,98 @@ $(document).ready(function () {
|
||||
$('#button_compare').click(function () {
|
||||
window.$('#diffModal').modal('show');
|
||||
rows = stratinRecords.rows('.selected').data();
|
||||
const rec1 = new Object()
|
||||
rec1.id = rows[0].id;
|
||||
rec1.id2 = parseInt(rows[0].id2);
|
||||
rec1.name = rows[0].name;
|
||||
rec1.symbol = rows[0].symbol;
|
||||
rec1.class_name = rows[0].class_name;
|
||||
rec1.script = rows[0].script;
|
||||
rec1.open_rush = rows[0].open_rush;
|
||||
rec1.close_rush = rows[0].close_rush;
|
||||
rec1.stratvars_conf = TOML.parse(rows[0].stratvars_conf);
|
||||
rec1.add_data_conf = TOML.parse(rows[0].add_data_conf);
|
||||
rec1.note = rows[0].note;
|
||||
|
||||
id1 = rows[0].id
|
||||
id2 = rows[1].id
|
||||
|
||||
//get up to date data
|
||||
var request1 = $.ajax({
|
||||
url: "/stratins/"+id1,
|
||||
beforeSend: function (xhr) {
|
||||
xhr.setRequestHeader('X-API-Key',
|
||||
API_KEY); },
|
||||
method:"GET",
|
||||
contentType: "application/json",
|
||||
dataType: "json",
|
||||
success:function(data){
|
||||
console.log("first request ok")
|
||||
console.log(JSON.stringify(data,null,2));
|
||||
},
|
||||
error: function(xhr, status, error) {
|
||||
var err = eval("(" + xhr.responseText + ")");
|
||||
window.alert(JSON.stringify(xhr));
|
||||
console.log(JSON.stringify(xhr));
|
||||
console.log("first request error")
|
||||
}
|
||||
});
|
||||
var request2 = $.ajax({
|
||||
url: "/stratins/"+id2,
|
||||
beforeSend: function (xhr) {
|
||||
xhr.setRequestHeader('X-API-Key',
|
||||
API_KEY); },
|
||||
method:"GET",
|
||||
contentType: "application/json",
|
||||
dataType: "json",
|
||||
success:function(data){
|
||||
console.log("first request ok")
|
||||
console.log(JSON.stringify(data,null,2));
|
||||
},
|
||||
error: function(xhr, status, error) {
|
||||
var err = eval("(" + xhr.responseText + ")");
|
||||
window.alert(JSON.stringify(xhr));
|
||||
console.log(JSON.stringify(xhr));
|
||||
console.log("first request error")
|
||||
}
|
||||
});
|
||||
|
||||
// Handling the responses of both requests
|
||||
$.when(request1, request2).then(function(response1, response2) {
|
||||
// Both requests have completed successfully
|
||||
var result1 = response1[0];
|
||||
var result2 = response2[0];
|
||||
console.log("Result from first request:", result1);
|
||||
console.log("Result from second request:", result2);
|
||||
console.log("calling compare")
|
||||
perform_compare(result1, result2)
|
||||
// Perform your action with the results from both requests
|
||||
// Example:
|
||||
|
||||
}, function(error1, error2) {
|
||||
// Handle errors from either request here
|
||||
// Example:
|
||||
console.error("Error from first request:", error1);
|
||||
console.error("Error from second request:", error2);
|
||||
});
|
||||
|
||||
function perform_compare(rec1, rec2) {
|
||||
|
||||
// const rec1 = new Object()
|
||||
// rec1.id = rows[0].id;
|
||||
// rec1.id2 = parseInt(rows[0].id2);
|
||||
// rec1.name = rows[0].name;
|
||||
// rec1.symbol = rows[0].symbol;
|
||||
// rec1.class_name = rows[0].class_name;
|
||||
// rec1.script = rows[0].script;
|
||||
// rec1.open_rush = rows[0].open_rush;
|
||||
// rec1.close_rush = rows[0].close_rush;
|
||||
rec1.stratvars_conf = TOML.parse(rec1.stratvars_conf);
|
||||
rec1.add_data_conf = TOML.parse(rec1.add_data_conf);
|
||||
// rec1.note = rows[0].note;
|
||||
rec1.history = "";
|
||||
//jsonString1 = JSON.stringify(rec1, null, 2);
|
||||
|
||||
const rec2 = new Object()
|
||||
rec2.id = rows[1].id;
|
||||
rec2.id2 = parseInt(rows[1].id2);
|
||||
rec2.name = rows[1].name;
|
||||
rec2.symbol = rows[1].symbol;
|
||||
rec2.class_name = rows[1].class_name;
|
||||
rec2.script = rows[1].script;
|
||||
rec2.open_rush = rows[1].open_rush;
|
||||
rec2.close_rush = rows[1].close_rush;
|
||||
rec2.stratvars_conf = TOML.parse(rows[1].stratvars_conf);
|
||||
rec2.add_data_conf = TOML.parse(rows[1].add_data_conf);
|
||||
rec2.note = rows[1].note;
|
||||
// const rec2 = new Object()
|
||||
// rec2.id = rows[1].id;
|
||||
// rec2.id2 = parseInt(rows[1].id2);
|
||||
// rec2.name = rows[1].name;
|
||||
// rec2.symbol = rows[1].symbol;
|
||||
// rec2.class_name = rows[1].class_name;
|
||||
// rec2.script = rows[1].script;
|
||||
// rec2.open_rush = rows[1].open_rush;
|
||||
// rec2.close_rush = rows[1].close_rush;
|
||||
rec2.stratvars_conf = TOML.parse(rec2.stratvars_conf);
|
||||
rec2.add_data_conf = TOML.parse(rec2.add_data_conf);
|
||||
// rec2.note = rows[1].note;
|
||||
rec2.history = "";
|
||||
//jsonString2 = JSON.stringify(rec2, null, 2);
|
||||
|
||||
@ -386,6 +489,7 @@ $(document).ready(function () {
|
||||
|
||||
event.preventDefault();
|
||||
//$('#button_compare').attr('disabled','disabled');
|
||||
}
|
||||
});
|
||||
|
||||
//button connect
|
||||
@ -512,6 +616,11 @@ $(document).ready(function () {
|
||||
if (row== undefined) {
|
||||
return
|
||||
}
|
||||
|
||||
refresh_stratin_and_callback(row, show_edit_modal)
|
||||
|
||||
function show_edit_modal(row) {
|
||||
|
||||
window.$('#recordModal').modal('show');
|
||||
$('#id').val(row.id);
|
||||
$('#id2').val(row.id2);
|
||||
@ -528,6 +637,7 @@ $(document).ready(function () {
|
||||
$('.modal-title').html(" Edit Records");
|
||||
$('#action').val('updateRecord');
|
||||
$('#save').val('Save');
|
||||
}
|
||||
|
||||
});
|
||||
//delete button
|
||||
@ -549,6 +659,12 @@ $(document).ready(function () {
|
||||
if (row== undefined) {
|
||||
return
|
||||
}
|
||||
|
||||
//refresh item and then call methods
|
||||
refresh_stratin_and_callback(row, show_stratvars_edit_modal)
|
||||
|
||||
function show_stratvars_edit_modal(row) {
|
||||
|
||||
$('#stratvar_id').val(row.id);
|
||||
require(["vs/editor/editor.main"], () => {
|
||||
editor = monaco.editor.create(document.getElementById('stratvars_editor'), {
|
||||
@ -560,6 +676,7 @@ $(document).ready(function () {
|
||||
});
|
||||
window.$('#stratvarsModal').modal('show');
|
||||
//$('#stratvars_editor_val').val(row.stratvars_conf);
|
||||
}
|
||||
});
|
||||
} );
|
||||
|
||||
|
||||
@ -3,8 +3,8 @@ API_KEY = localStorage.getItem("api-key")
|
||||
var chart = null
|
||||
// var colors = ["#8B1874","#B71375","#B46060","#61c740","#BE6DB7","#898121","#4389d9","#00425A","#B5D5C5","#e61957","#8B1874","#B71375","#B46060","#61c740","#BE6DB7","#898121","#4389d9","#00425A","#B5D5C5","#e61957"]
|
||||
// var reset_colors = ["#8B1874","#B71375","#B46060","#61c740","#BE6DB7","#898121","#4389d9","#00425A","#B5D5C5","#e61957","#8B1874","#B71375","#B46060","#61c740","#BE6DB7","#898121","#4389d9","#00425A","#B5D5C5","#e61957"]
|
||||
var colors = ["#8B1874","#B71375","#B46060","#61c740","#BE6DB7","#898121","#4389d9","#00425A","#B5D5C5","#e61957","#7B0E60","#9B2888","#BD38A0","#A30F68","#6E0B50","#CA2183","#E6319B","#A04C54","#643848","#CA7474","#E68D8D","#4F9C34","#3B7128","#73DF4D","#95EF65","#A857A4","#824690","#D087CC","#E2A1DF","#79711B","#635D17","#99912B","#B1A73D","#3779C9","#2B68B3","#5599ED","#77A9F7","#004C67","#00687D","#A1C6B5","#8CC6A5","#C9E6D5","#E4F6EA","#D2144A","#A60F3B","#FA2463","#FF3775"];
|
||||
var reset_colors = ["#8B1874","#B71375","#B46060","#61c740","#BE6DB7","#898121","#4389d9","#00425A","#B5D5C5","#e61957","#7B0E60","#9B2888","#BD38A0","#A30F68","#6E0B50","#CA2183","#E6319B","#A04C54","#643848","#CA7474","#E68D8D","#4F9C34","#3B7128","#73DF4D","#95EF65","#A857A4","#824690","#D087CC","#E2A1DF","#79711B","#635D17","#99912B","#B1A73D","#3779C9","#2B68B3","#5599ED","#77A9F7","#004C67","#00687D","#A1C6B5","#8CC6A5","#C9E6D5","#E4F6EA","#D2144A","#A60F3B","#FA2463","#FF3775"];
|
||||
var colors = ["#8B1874","#B71375","#B46060","#61c740","#BE6DB7","#898121","#4389d9","#B5D5C5","#e61957","#7B0E60","#9B2888","#BD38A0","#A30F68","#6E0B50","#CA2183","#E6319B","#A04C54","#643848","#CA7474","#E68D8D","#4F9C34","#3B7128","#73DF4D","#95EF65","#A857A4","#824690","#D087CC","#E2A1DF","#79711B","#635D17","#99912B","#B1A73D","#3779C9","#2B68B3","#5599ED","#77A9F7","#004C67","#00687D","#A1C6B5","#8CC6A5","#C9E6D5","#E4F6EA","#D2144A","#A60F3B","#FA2463","#FF3775"];
|
||||
var reset_colors = ["#8B1874","#B71375","#B46060","#61c740","#BE6DB7","#898121","#4389d9","#B5D5C5","#e61957","#7B0E60","#9B2888","#BD38A0","#A30F68","#6E0B50","#CA2183","#E6319B","#A04C54","#643848","#CA7474","#E68D8D","#4F9C34","#3B7128","#73DF4D","#95EF65","#A857A4","#824690","#D087CC","#E2A1DF","#79711B","#635D17","#99912B","#B1A73D","#3779C9","#2B68B3","#5599ED","#77A9F7","#004C67","#00687D","#A1C6B5","#8CC6A5","#C9E6D5","#E4F6EA","#D2144A","#A60F3B","#FA2463","#FF3775"];
|
||||
|
||||
|
||||
var indList = []
|
||||
|
||||
@ -252,7 +252,7 @@ html {
|
||||
display: inline-block;
|
||||
/* overflow: auto; */
|
||||
height: 596px;
|
||||
width: 32%;
|
||||
/* width: 32%; */
|
||||
}
|
||||
|
||||
.clearbutton {
|
||||
|
||||
36
v2realbot/strategyblocks/indicators/custom/ma.py
Normal file
36
v2realbot/strategyblocks/indicators/custom/ma.py
Normal file
@ -0,0 +1,36 @@
|
||||
from v2realbot.utils.utils import isrising, isfalling,zoneNY, price2dec, print, safe_get, is_still, is_window_open, eval_cond_dict, crossed_down, crossed_up, crossed, is_pivot, json_serial, pct_diff, create_new_bars, slice_dict_lists
|
||||
from v2realbot.strategy.base import StrategyState
|
||||
import v2realbot.indicators.moving_averages as mi
|
||||
from v2realbot.strategyblocks.indicators.helpers import get_source_series
|
||||
from rich import print as printanyway
|
||||
from traceback import format_exc
|
||||
from v2realbot.ml.ml import ModelML
|
||||
import numpy as np
|
||||
from collections import defaultdict
|
||||
from v2realbot.strategyblocks.indicators.helpers import value_or_indicator
|
||||
|
||||
|
||||
#IMPLEMENTS different types of moving averages
|
||||
def ma(state, params):
|
||||
funcName = "ma"
|
||||
type = safe_get(params, "type", "ema")
|
||||
source = safe_get(params, "source", None)
|
||||
lookback = safe_get(params, "lookback",14)
|
||||
|
||||
#lookback muze byt odkaz na indikator, pak berem jeho hodnotu
|
||||
lookback = int(value_or_indicator(state, lookback))
|
||||
|
||||
source_series = get_source_series(state, source)
|
||||
|
||||
#pokud je mene elementu, pracujeme s tim co je
|
||||
if len(source_series) > lookback:
|
||||
source_series = source_series[-lookback:]
|
||||
|
||||
type = "mi."+type
|
||||
ma_function = eval(type)
|
||||
|
||||
ma_value = ma_function(source_series, lookback)
|
||||
val = round(ma_value[-1],4)
|
||||
|
||||
state.ilog(lvl=1,e=f"INSIDE {funcName} {val} {type=} {source=} {lookback=}", **params)
|
||||
return 0, val
|
||||
@ -17,12 +17,17 @@ def statement(state: StrategyState, params):
|
||||
if operation is None :
|
||||
return -2, "required param missing"
|
||||
|
||||
state.ilog(lvl=1,e=f"BEFORE {funcName} {operation=}", **params)
|
||||
|
||||
#pro zacatek eval
|
||||
val = eval(operation, None, state.ind_mapping)
|
||||
|
||||
|
||||
if not np.isfinite(val):
|
||||
val = 0
|
||||
#val = ne.evaluate(operation, state.ind_mapping)
|
||||
|
||||
state.ilog(lvl=1,e=f"INSIDE {funcName} {operation=} res:{val}", **params)
|
||||
state.ilog(lvl=1,e=f"AFTER {funcName} {operation=} res:{val}", **params)
|
||||
return 0, val
|
||||
|
||||
|
||||
|
||||
38
v2realbot/strategyblocks/indicators/custom/vwma.py
Normal file
38
v2realbot/strategyblocks/indicators/custom/vwma.py
Normal file
@ -0,0 +1,38 @@
|
||||
#volume weighted exp average
|
||||
from v2realbot.utils.utils import isrising, isfalling,zoneNY, price2dec, print, safe_get, is_still, is_window_open, eval_cond_dict, crossed_down, crossed_up, crossed, is_pivot, json_serial, pct_diff, create_new_bars, slice_dict_lists
|
||||
from v2realbot.strategy.base import StrategyState
|
||||
from v2realbot.indicators.moving_averages import vwma as ext_vwma
|
||||
from v2realbot.strategyblocks.indicators.helpers import get_source_series
|
||||
from rich import print as printanyway
|
||||
from traceback import format_exc
|
||||
from v2realbot.ml.ml import ModelML
|
||||
import numpy as np
|
||||
from collections import defaultdict
|
||||
from v2realbot.strategyblocks.indicators.helpers import value_or_indicator
|
||||
|
||||
# Volume(or reference_source) Weighted moving Average
|
||||
def vwma(state, params):
|
||||
funcName = "vwma"
|
||||
source = safe_get(params, "source", None)
|
||||
ref_source = safe_get(params, "ref_source", "volume")
|
||||
lookback = safe_get(params, "lookback",14)
|
||||
|
||||
#lookback muze byt odkaz na indikator, pak berem jeho hodnotu
|
||||
lookback = int(value_or_indicator(state, lookback))
|
||||
|
||||
source_series = get_source_series(state, source)
|
||||
ref_source_series = get_source_series(state, ref_source)
|
||||
|
||||
pocet_clenu = len(source_series)
|
||||
#pokud je mene elementu, pracujeme s tim co je
|
||||
if pocet_clenu < lookback:
|
||||
lookback = pocet_clenu
|
||||
|
||||
source_series = source_series[-lookback:]
|
||||
ref_source_series = ref_source_series[-lookback:]
|
||||
|
||||
vwma_value = ext_vwma(source_series, ref_source_series, lookback)
|
||||
val = round(vwma_value[-1],4)
|
||||
|
||||
state.ilog(lvl=1,e=f"INSIDE {funcName} {val} {source=} {ref_source=} {lookback=}", **params)
|
||||
return 0, val
|
||||
Reference in New Issue
Block a user