sizing, progressionbar,reporting basics

This commit is contained in:
David Brazda
2023-11-20 17:55:55 +01:00
parent 520b1a9a8f
commit 67d34481c6
19 changed files with 1032 additions and 54 deletions

5
media/basic/.gitignore vendored Normal file
View File

@ -0,0 +1,5 @@
# Ignore everything in this directory
*
# Except this file
!.gitignore

View File

@ -30,6 +30,8 @@ class Trade(BaseModel):
entry_price: Optional[float] = None
goal_price: Optional[float] = None
size: Optional[int] = None
# size_multiplier je pomocna promenna pro pocitani relativniho denniho profit
size_multiplier: Optional[float] = None
# stoploss_type: TradeStoplossType
stoploss_value: Optional[float] = None
profit: Optional[float] = 0

View File

@ -1,6 +1,10 @@
from alpaca.data.enums import DataFeed
from v2realbot.enums.enums import Mode, Account, FillCondition
from appdirs import user_data_dir
from pathlib import Path
#directory for generated images and basic reports
MEDIA_DIRECTORY = Path(__file__).parent.parent / "media"
#'0.0.0.0',
#currently only prod server has acces to LIVE

View File

@ -13,7 +13,7 @@ from v2realbot.utils.ilog import delete_logs
from v2realbot.common.PrescribedTradeModel import Trade, TradeDirection, TradeStatus, TradeStoplossType
from datetime import datetime
from threading import Thread, current_thread, Event, enumerate
from v2realbot.config import STRATVARS_UNCHANGEABLES, ACCOUNT1_PAPER_API_KEY, ACCOUNT1_PAPER_SECRET_KEY, ACCOUNT1_LIVE_API_KEY, ACCOUNT1_LIVE_SECRET_KEY, DATA_DIR,BT_FILL_CONS_TRADES_REQUIRED,BT_FILL_LOG_SURROUNDING_TRADES,BT_FILL_CONDITION_BUY_LIMIT,BT_FILL_CONDITION_SELL_LIMIT, GROUP_TRADES_WITH_TIMESTAMP_LESS_THAN
from v2realbot.config import STRATVARS_UNCHANGEABLES, ACCOUNT1_PAPER_API_KEY, ACCOUNT1_PAPER_SECRET_KEY, ACCOUNT1_LIVE_API_KEY, ACCOUNT1_LIVE_SECRET_KEY, DATA_DIR,BT_FILL_CONS_TRADES_REQUIRED,BT_FILL_LOG_SURROUNDING_TRADES,BT_FILL_CONDITION_BUY_LIMIT,BT_FILL_CONDITION_SELL_LIMIT, GROUP_TRADES_WITH_TIMESTAMP_LESS_THAN, MEDIA_DIRECTORY
import importlib
from alpaca.trading.requests import GetCalendarRequest
from alpaca.trading.client import TradingClient
@ -35,6 +35,8 @@ import v2realbot.strategyblocks.indicators.custom as ci
from v2realbot.strategyblocks.inits.init_indicators import initialize_dynamic_indicators
from v2realbot.strategyblocks.indicators.indicators_hub import populate_dynamic_indicators
from v2realbot.interfaces.backtest_interface import BacktestInterface
import os
from v2realbot.reporting.metricstoolsimage import generate_trading_report_image
#from pyinstrument import Profiler
#adding lock to ensure thread safety of TinyDB (in future will be migrated to proper db)
@ -332,6 +334,12 @@ def capsule(target: object, db: object, inter_batch_params: dict = None):
archive_runner(runner=i, strat=target, inter_batch_params=inter_batch_params)
#mazeme runner po skonceni instance
db.runners.remove(i)
#vytvoreni report image pro RUNNER
try:
generate_trading_report_image(runner_ids=[str(i.id)])
print("DAILY REPORT IMAGE CREATED")
except Exception as e:
print("Nepodarilo se vytvorit report image", str(e)+format_exc())
print("Runner STOPPED")
@ -500,6 +508,13 @@ def batch_run_manager(id: UUID, runReq: RunRequest, rundays: list[RunDay]):
#i.history += str(runner.__dict__)+"<BR>"
db.save()
#vytvoreni report image pro batch
try:
generate_trading_report_image(batch_id=batch_id)
print("BATCH REPORT IMAGE CREATED")
except Exception as e:
print("Nepodarilo se vytvorit report image", str(e)+format_exc())
#stratin run
def run_stratin(id: UUID, runReq: RunRequest, synchronous: bool = False, inter_batch_params: dict = None):
if runReq.mode == Mode.BT:
@ -676,12 +691,6 @@ def populate_metrics_output_directory(strat: StrategyInstance, inter_batch_param
res["profit"]["batch_sum_profit"] = int(inter_batch_params["batch_profit"])
res["profit"]["batch_sum_rel_profit"] = inter_batch_params["batch_rel_profit"]
#rel_profit zprumerovane
res["profit"]["daily_rel_profit_avg"] = float(np.sum(strat.state.rel_profit_cum)) if len(strat.state.rel_profit_cum) > 0 else 0
#rel_profit rozepsane zisky
res["profit"]["daily_rel_profit_list"] = strat.state.rel_profit_cum
#metrikz z prescribedTrades, pokud existuji
try:
long_profit = 0
@ -696,39 +705,53 @@ def populate_metrics_output_directory(strat: StrategyInstance, inter_batch_param
max_loss_time = None
long_cnt = 0
short_cnt = 0
sum_wins_profit= 0
sum_loss = 0
if "prescribedTrades" in strat.state.vars:
for trade in strat.state.vars.prescribedTrades:
if trade.profit_sum < max_loss:
max_loss = trade.profit_sum
max_loss_time = trade.last_update
if trade.profit_sum > max_profit:
max_profit = trade.profit_sum
max_profit_time = trade.last_update
if trade.status == TradeStatus.ACTIVATED and trade.direction == TradeDirection.LONG:
long_cnt += 1
if trade.profit is not None:
long_profit += trade.profit
if trade.profit < 0:
long_losses += trade.profit
if trade.profit > 0:
long_wins += trade.profit
if trade.status == TradeStatus.ACTIVATED and trade.direction == TradeDirection.SHORT:
short_cnt +=1
if trade.profit is not None:
short_profit += trade.profit
if trade.profit < 0:
short_losses += trade.profit
if trade.profit > 0:
short_wins += trade.profit
if trade.status == TradeStatus.CLOSED:
if trade.profit_sum < max_loss:
max_loss = trade.profit_sum
max_loss_time = trade.last_update
if trade.profit_sum > max_profit:
max_profit = trade.profit_sum
max_profit_time = trade.last_update
if trade.direction == TradeDirection.LONG:
long_cnt += 1
if trade.profit is not None:
long_profit += trade.profit
if trade.profit < 0:
long_losses += trade.profit
if trade.profit > 0:
long_wins += trade.profit
if trade.direction == TradeDirection.SHORT:
short_cnt +=1
if trade.profit is not None:
short_profit += trade.profit
if trade.profit < 0:
short_losses += trade.profit
if trade.profit > 0:
short_wins += trade.profit
sum_wins = long_wins + short_wins
sum_losses = long_losses + short_losses
#toto nejak narovnat, mozna diskutovat s Martinem nebo s Vercou
#zatim to neukazuje moc jasne - poznámka: ztráta by měla být jenom negativní profit, nikoliv nová veličina
#jediná vyjímka je u max.kumulativní ztráty (drawdown)
res["profit"]["sum_wins"] = sum_wins
res["profit"]["sum_losses"] = sum_losses
res["profit"]["long_cnt"] = long_cnt
res["profit"]["short_cnt"] = short_cnt
res["profit"]["short_cnt"] = short_cnt
#celkovy profit za long/short
res["profit"]["long_profit"] = round(long_profit,2)
res["profit"]["short_profit"] = round(short_profit,2)
res["profit"]["max_profit"] = round(max_profit,2)
res["profit"]["max_profit_time"] = str(max_profit_time)
res["profit"]["max_loss"] = round(max_loss,2)
res["profit"]["max_loss_time"] = str(max_loss_time)
#maximalni kumulativni profit (tzn. peaky profitu)
res["profit"]["max_profit_cum"] = round(max_profit,2)
res["profit"]["max_profit_cum_time"] = str(max_profit_time)
#maximalni kumulativni ztrata (tzn. peaky v lossu)
res["profit"]["max_loss_cum"] = round(max_loss,2)
res["profit"]["max_loss_time_cum"] = str(max_loss_time)
res["profit"]["long_wins"] = round(long_wins,2)
res["profit"]["long_losses"] = round(long_losses,2)
res["profit"]["short_wins"] = round(short_wins,2)
@ -739,7 +762,13 @@ def populate_metrics_output_directory(strat: StrategyInstance, inter_batch_param
rp_string = "RP" + str(float(np.sum(strat.state.rel_profit_cum))) if len(strat.state.rel_profit_cum) >0 else "noRP"
##summary pro rychle zobrazeni P333L-222 PT9:30 PL10:30
res["profit"]["sum"]="P"+str(int(max_profit))+"L"+str(int(max_loss))+" "+ mpt_string+" " + mlt_string + rp_string + " "+str(strat.state.rel_profit_cum)
res["profit"]["sum"]="P"+str(int(sum_wins))+"L"+str(int(sum_losses))+" "+"MCP"+str(int(max_profit))+"MCL(DD)"+str(int(max_loss))+" "+ mpt_string+" " + mlt_string + rp_string + " "+str(strat.state.rel_profit_cum)
#rel_profit zprumerovane
res["profit"]["daily_rel_profit_sum"] = float(np.sum(strat.state.rel_profit_cum)) if len(strat.state.rel_profit_cum) > 0 else 0
#rel_profit rozepsane zisky
res["profit"]["daily_rel_profit_list"] = strat.state.rel_profit_cum
#vlozeni celeho listu
res["prescr_trades"]=json.loads(json.dumps(strat.state.vars.prescribedTrades, default=json_serial))
@ -1007,6 +1036,25 @@ def edit_archived_runners(runner_id: UUID, archChange: RunArchiveChange):
print(errmsg)
return -2, errmsg
def delete_report_files(id):
#ZATIM MAME JEN BASIC
#delete report images
image_file_name = f"{id}.png"
image_path = str(MEDIA_DIRECTORY / "basic" / image_file_name)
try:
if os.path.exists(image_path):
os.remove(image_path)
print(f"File {image_path} has been deleted.")
return (0, "deleted")
else:
print(f"No File {image_path} found to delte.")
return (1, "not found")
except Exception as e:
print(f"An error occurred while deleting the file: {e}")
return (-1, str(e))
#delete runner in archive and archive detail and runner logs
#predelano do JEDNE TRANSAKCE
def delete_archived_runners_byIDs(ids: list[UUID]):
@ -1016,6 +1064,19 @@ def delete_archived_runners_byIDs(ids: list[UUID]):
for id in ids:
c = conn.cursor()
print(str(id))
# Get batch_id for the current runner_id
c.execute("SELECT batch_id FROM runner_header WHERE runner_id = ?", (str(id),))
batch_id = c.fetchone()
if batch_id:
batch_id = batch_id[0]
# Check if this is the last record with the given batch_id
c.execute("SELECT COUNT(*) FROM runner_header WHERE batch_id = ?", (batch_id,))
count = c.fetchone()[0]
if count == 1:
# If it's the last record, call delete_report_files
delete_report_files(batch_id)
resh = c.execute(f"DELETE from runner_header WHERE runner_id='{str(id)}';")
print("header deleted",resh.rowcount)
resd = c.execute(f"DELETE from runner_detail WHERE runner_id='{str(id)}';")
@ -1025,6 +1086,9 @@ def delete_archived_runners_byIDs(ids: list[UUID]):
out.append(str(id) + ": " + str(resh.rowcount) + " " + str(resd.rowcount) + " " + str(resl.rowcount))
conn.commit()
print("commit")
delete_report_files(id)
# if resh.rowcount == 0 or resd.rowcount == 0:
# return -1, "not found "+str(resh.rowcount) + " " + str(resd.rowcount) + " " + str(resl.rowcount)
return 0, out
@ -1044,6 +1108,15 @@ def delete_archive_header_byID(id: UUID):
res = execute_with_retry(c,statement)
conn.commit()
print("deleted", res.rowcount)
#delete report images
image_file_name = f"report_{id}.png"
image_path = str(MEDIA_DIRECTORY / image_file_name)
try:
if os.path.exists(image_path):
os.remove(image_path)
print(f"File {image_path} has been deleted.")
except Exception as e:
print(f"An error occurred while deleting the file: {e}")
finally:
pool.release_connection(conn)
return res.rowcount

View File

@ -8,7 +8,7 @@ from alpaca.data.enums import DataFeed
from alpaca.data.historical import StockHistoricalDataClient
from alpaca.data.requests import StockLatestQuoteRequest, StockBarsRequest, StockTradesRequest
from threading import Thread, current_thread
from v2realbot.utils.utils import parse_alpaca_timestamp, ltp, zoneNY, print
from v2realbot.utils.utils import parse_alpaca_timestamp, ltp, zoneNY
from v2realbot.utils.tlog import tlog
from datetime import datetime, timedelta, date
from threading import Thread
@ -21,6 +21,8 @@ import os
from rich import print
import queue
from alpaca.trading.models import Calendar
from tqdm import tqdm
"""
Trade offline data streamer, based on Alpaca historical data.
"""
@ -212,7 +214,7 @@ class Trade_Offline_Streamer(Thread):
cnt = 1
for t in tradesResponse[symbol]:
for t in tqdm(tradesResponse[symbol]):
#protoze je zde cely den, poustime dal, jen ty relevantni
#pokud je start_time < trade < end_time

View File

@ -1,6 +1,6 @@
import os,sys
sys.path.append(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
from v2realbot.config import WEB_API_KEY, DATA_DIR
from v2realbot.config import WEB_API_KEY, DATA_DIR, MEDIA_DIRECTORY
from alpaca.data.timeframe import TimeFrame, TimeFrameUnit
from datetime import datetime
import os
@ -13,7 +13,7 @@ import v2realbot.controller.services as cs
from v2realbot.utils.ilog import get_log_window
from v2realbot.common.model import StrategyInstance, RunnerView, RunRequest, Trade, RunArchive, RunArchiveView, RunArchiveDetail, Bar, RunArchiveChange, TestList, ConfigItem, InstantIndicator
from fastapi import FastAPI, WebSocket, WebSocketDisconnect, Depends, HTTPException, status, WebSocketException, Cookie, Query
from fastapi.responses import FileResponse
from fastapi.responses import FileResponse, StreamingResponse
from fastapi.staticfiles import StaticFiles
from fastapi.security import HTTPBasic, HTTPBasicCredentials
from v2realbot.enums.enums import Env, Mode
@ -30,6 +30,9 @@ from v2realbot.utils.sysutils import get_environment
from uuid import uuid4
from sqlite3 import OperationalError
from time import sleep
import v2realbot.reporting.metricstools as mt
from v2realbot.reporting.metricstoolsimage import generate_trading_report_image
from traceback import format_exc
#from async io import Queue, QueueEmpty
# install()
@ -64,6 +67,7 @@ def api_key_auth(api_key: str = Depends(X_API_KEY)):
app = FastAPI()
root = os.path.dirname(os.path.abspath(__file__))
app.mount("/static", StaticFiles(html=True, directory=os.path.join(root, 'static')), name="static")
app.mount("/media", StaticFiles(directory=str(MEDIA_DIRECTORY)), name="media")
#app.mount("/", StaticFiles(html=True, directory=os.path.join(root, 'static')), name="www")
security = HTTPBasic()
@ -459,7 +463,6 @@ def _delete_indicator_byName(runner_id: UUID, indicator: InstantIndicator):
raise HTTPException(status_code=status.HTTP_406_NOT_ACCEPTABLE, detail=f"Error not changed: {res}:{runner_id}:{vals}")
#edit archived runner ("note",..)
@app.patch("/archived_runners/{runner_id}", dependencies=[Depends(api_key_auth)])
def _edit_archived_runners(archChange: RunArchiveChange, runner_id: UUID):
@ -509,6 +512,31 @@ def _get_alpaca_history_bars(symbol: str, datetime_object_from: datetime, dateti
else:
raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail=f"No data found {res} {set}")
#get pdf report - WIP
@app.put("/archived_runners/{runner_id}/generatepdf", dependencies=[Depends(api_key_auth)], responses={200: {"content": {"application/pdf": {}}}})
def _generat_pdf(runner_id: UUID):
#jako vstup umouznit i seznam runneru - vytvori to pote report ze vsech techto
#pripadne mit jako vstup batch a udelat to pro batch ()
res, vals = mt.create_trading_report_pdf(id=runner_id)
if res == 0:
# Return the PDF data as a streaming response {str(runner_id)}
return StreamingResponse(vals, media_type="application/pdf", headers={"Content-Disposition": "attachment; filename=report.pdf"})
elif res == -1:
raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail=f"Error no runner: {runner_id} {res}:{vals}")
else:
raise HTTPException(status_code=status.HTTP_406_NOT_ACCEPTABLE, detail=f"Error not changed: {res}:{runner_id}:{vals}")
#generate image based list of ids
@app.post("/archived_runners/generatereportimage", dependencies=[Depends(api_key_auth)], responses={200: {"content": {"image/png": {}}}})
def _generate_report_image(runner_ids: list[UUID]):
try:
res, stream = generate_trading_report_image(runner_ids=runner_ids,stream=True)
if res == 0: return StreamingResponse(stream, media_type="image/png",headers={"Content-Disposition": "attachment; filename=report.png"})
elif res < 0:
raise HTTPException(status_code=status.HTTP_406_NOT_ACCEPTABLE, detail=f"Error: {res}:{id}")
except Exception as e:
raise HTTPException(status_code=status.HTTP_406_NOT_ACCEPTABLE, detail=f"Error: {str(e)}" + format_exc())
#TestList APIS - do budoucna predelat SQL do separatnich funkci
@app.post('/testlists/', dependencies=[Depends(api_key_auth)])
def create_record(testlist: TestList):

View File

View File

@ -0,0 +1,145 @@
import json
import numpy as np
import matplotlib
matplotlib.use('Agg') # Set the Matplotlib backend to 'Agg'
import matplotlib.pyplot as plt
import seaborn as sns
import pandas as pd
from fpdf import FPDF, XPos, YPos
from datetime import datetime
from io import BytesIO
import v2realbot.controller.services as cs
from rich import print
def create_trading_report_pdf(id, direct = True, output_file='trading_report.pdf'):
#get runner
res, set =cs.get_archived_runner_header_byID(id)
if res != 0:
return -1, f"no runner {id} found"
print("archrunner")
print(set)
# Parse JSON data
data = set.metrics
profit_data = data["profit"]
pos_cnt_data = data["pos_cnt"]
prescr_trades_data = data["prescr_trades"]
# PDF setup
pdf = FPDF()
pdf.set_auto_page_break(auto=True, margin=15)
pdf.set_font("Helvetica", size=10)
# Start the first page for plots
pdf.add_page()
# Create a combined figure for all plots (adjusting the layout to 3x3)
fig, axs = plt.subplots(3, 3, figsize=(15, 15))
# Plot 1: Overall Profit Summary Chart
sns.barplot(x=['Total Wins', 'Total Losses', 'Net Profit'],
y=[profit_data["sum_wins"], profit_data["sum_losses"],
profit_data["sum_wins"] - profit_data["sum_losses"]],
ax=axs[0, 0])
axs[0, 0].set_title('Overall Profit Summary')
# Plot 2: Profit Distribution by Trade Type
axs[0, 1].pie([profit_data["long_profit"], profit_data["short_profit"]],
labels=['Long Profit', 'Short Profit'], autopct='%1.1f%%')
axs[0, 1].set_title('Profit Distribution by Trade Type')
# Plot 3: Cumulative Profit Over Time Line Chart
exit_times = [datetime.fromtimestamp(trade["exit_time"]) for trade in prescr_trades_data]
cumulative_profits = [trade["profit_sum"] for trade in prescr_trades_data]
sns.lineplot(x=exit_times, y=cumulative_profits, ax=axs[0, 2])
axs[0, 2].set_title('Cumulative Profit Over Time')
axs[0, 2].tick_params(axis='x', rotation=45)
# Plot 4: Cumulative Profit Over Time with Max Profit Point
sns.lineplot(x=exit_times, y=cumulative_profits, label='Cumulative Profit', ax=axs[1, 0])
max_profit_time = datetime.fromisoformat(profit_data["max_profit_cum_time"])
max_profit = profit_data["max_profit_cum"]
axs[1, 0].scatter(max_profit_time, max_profit, color='green', label='Max Profit')
axs[1, 0].set_title('Cumulative Profit Over Time with Max Profit Point')
axs[1, 0].tick_params(axis='x', rotation=45)
axs[1, 0].legend()
# Plot 5: Trade Counts Bar Chart
sns.barplot(x=['Long Trades', 'Short Trades'],
y=[profit_data["long_cnt"], profit_data["short_cnt"]],
ax=axs[1, 1])
axs[1, 1].set_title('Trade Counts')
# Plot 6: Position Size Distribution
sns.barplot(x=list(pos_cnt_data.keys()), y=list(pos_cnt_data.values()), ax=axs[1, 2])
axs[1, 2].set_title('Position Size Distribution')
# Plot 7: Daily Relative Profit Chart
sns.lineplot(x=range(len(profit_data["daily_rel_profit_list"])), y=profit_data["daily_rel_profit_list"], ax=axs[2, 0])
axs[2, 0].set_title('Daily Relative Profit')
axs[2, 0].set_xlabel('Trade Number')
axs[2, 0].set_ylabel('Relative Profit')
# Adjust layout, save the combined plot, and add it to the PDF
# plt.tight_layout()
# plt.savefig("combined_plot.png", format="png", bbox_inches="tight")
# plt.close()
# pdf.image("combined_plot.png", x=10, y=20, w=180)
plt.tight_layout()
plot_buffer = BytesIO()
plt.savefig(plot_buffer, format="png")
plt.close()
plot_buffer.seek(0)
pdf.image(plot_buffer, x=10, y=20, w=180)
plot_buffer.close()
# Start a new page for the table and additional information
pdf.add_page()
# 8. Individual Trade Details Table
pdf.set_font("Helvetica", size=8)
trade_fields = ['id', 'direction', 'entry_time', 'exit_time', 'profit', 'profit_sum', 'rel_profit']
trades_table_data = [{field: trade[field] for field in trade_fields} for trade in prescr_trades_data]
trades_table = pd.DataFrame(trades_table_data)
for row in trades_table.values:
for cell in row:
pdf.cell(40, 10, str(cell), border=1)
pdf.ln()
# Profit/Loss Ratio and Relative Profit Metrics
profit_loss_ratio = "N/A" if profit_data["sum_losses"] == 0 else str(profit_data["sum_wins"] / profit_data["sum_losses"])
relative_profit = profit_data["daily_rel_profit_sum"]
pdf.cell(0, 10, f"Profit/Loss Ratio: {profit_loss_ratio}", new_x=XPos.LMARGIN, new_y=YPos.NEXT)
pdf.cell(0, 10, f"Total Relative Profit: {relative_profit}", new_x=XPos.LMARGIN, new_y=YPos.NEXT)
# Summary of Key Metrics
pdf.cell(0, 10, "\nSummary of Key Metrics:", new_x=XPos.LMARGIN, new_y=YPos.NEXT)
pdf.cell(0, 10, f"Total Number of Trades: {profit_data['long_cnt'] + profit_data['short_cnt']}", new_x=XPos.LMARGIN, new_y=YPos.NEXT)
pdf.cell(0, 10, f"Total Profit: {profit_data['sum_wins']}", new_x=XPos.LMARGIN, new_y=YPos.NEXT)
pdf.cell(0, 10, f"Total Loss: {profit_data['sum_losses']}", new_x=XPos.LMARGIN, new_y=YPos.NEXT)
best_trade_profit = max(profit_data["long_wins"], profit_data["short_wins"])
pdf.cell(0, 10, f"Best Trade Profit: {best_trade_profit}", new_x=XPos.LMARGIN, new_y=YPos.NEXT)
worst_trade_profit = min(trade["profit"] for trade in prescr_trades_data)
pdf.cell(0, 10, f"Worst Trade Profit: {worst_trade_profit}", new_x=XPos.LMARGIN, new_y=YPos.NEXT)
# Save PDF
pdf.output(output_file)
if direct is False:
# Save PDF
pdf.output(output_file)
else:
# Instead of saving to a file, write to a BytesIO buffer
pdf_buffer = BytesIO()
pdf.output(pdf_buffer)
pdf_buffer.seek(0) # Move to the beginning of the BytesIO buffer
return 0, pdf_buffer
# Example usage:
if __name__ == '__main__':
id = "c3e31cb5-ddf9-467e-a932-2118f6844355"
res, val = create_trading_report_pdf(id, True)
print(res,val)

View File

@ -0,0 +1,382 @@
import matplotlib
import matplotlib.dates as mdates
matplotlib.use('Agg') # Set the Matplotlib backend to 'Agg'
import matplotlib.pyplot as plt
import seaborn as sns
import pandas as pd
from datetime import datetime
from typing import List
from enum import Enum
import numpy as np
import v2realbot.controller.services as cs
from rich import print
from v2realbot.common.PrescribedTradeModel import TradeDirection, TradeStatus, Trade, TradeStoplossType
from v2realbot.utils.utils import isrising, isfalling,zoneNY, price2dec, safe_get, print
from pathlib import Path
from v2realbot.config import WEB_API_KEY, DATA_DIR, MEDIA_DIRECTORY
from v2realbot.enums.enums import RecordType, StartBarAlign, Mode, Account, OrderSide
from io import BytesIO
# Assuming Trade, TradeStatus, TradeDirection, TradeStoplossType classes are defined elsewhere
def generate_trading_report_image(runner_ids: list = None, batch_id: str = None, stream: bool = False):
#TODO dopracovat drawdown a minimalni a maximalni profity nikoliv cumulovane, zamyslet se
#TODO list of runner_ids
#TODO pridelat na vytvoreni runnera a batche, samostatne REST API + na remove archrunnera
if runner_ids is None and batch_id is None:
return -2, f"runner_id or batch_id must be present"
if batch_id is not None:
res, runner_ids =cs.get_archived_runnerslist_byBatchID(batch_id)
if res != 0:
print(f"no batch {batch_id} found")
return -1, f"no batch {batch_id} found"
trades = []
for id in runner_ids:
#get runner
res, sada =cs.get_archived_runner_header_byID(id)
if res != 0:
print(f"no runner {id} found")
return -1, f"no runner {id} found"
print("archrunner")
print(sada)
# Parse trades
#trades = [Trade(**trade_dict) for trade_dict in set.metrics["prescr_trades"]]
trades_dicts = sada.metrics["prescr_trades"]
for trade_dict in trades_dicts:
trade_dict['last_update'] = datetime.fromtimestamp(trade_dict.get('last_update')).astimezone(zoneNY)
trade_dict['entry_time'] = datetime.fromtimestamp(trade_dict.get('entry_time')).astimezone(zoneNY)
trade_dict['exit_time'] = datetime.fromtimestamp(trade_dict.get('exit_time')).astimezone(zoneNY)
trades.append(Trade(**trade_dict))
print(trades)
# Filter to only use trades with status 'CLOSED'
closed_trades = [trade for trade in trades if trade.status == TradeStatus.CLOSED]
# Data extraction for the plots
exit_times = [trade.exit_time for trade in closed_trades if trade.exit_time is not None]
cumulative_profits = [trade.profit_sum for trade in closed_trades if trade.profit_sum is not None]
profits = [trade.profit for trade in closed_trades if trade.profit is not None]
wins = [trade.profit for trade in closed_trades if trade.profit > 0]
losses = [trade.profit for trade in closed_trades if trade.profit < 0]
wins_long = [trade.profit for trade in closed_trades if trade.profit > 0 and trade.direction == TradeDirection.LONG]
losses_long = [trade.profit for trade in closed_trades if trade.profit < 0 and trade.direction == TradeDirection.LONG]
wins_short = [trade.profit for trade in closed_trades if trade.profit > 0 and trade.direction == TradeDirection.SHORT]
losses_short = [trade.profit for trade in closed_trades if trade.profit < 0 and trade.direction == TradeDirection.SHORT]
directions = [trade.direction for trade in closed_trades]
long_profits = [trade.profit for trade in closed_trades if trade.direction == TradeDirection.LONG and trade.profit is not None]
short_profits = [trade.profit for trade in closed_trades if trade.direction == TradeDirection.SHORT and trade.profit is not None]
# Setting up dark mode for the plots
plt.style.use('dark_background')
# Optionally, you can further customize colors, labels, and axes
params = {
'axes.titlesize': 9,
'axes.labelsize': 8,
'xtick.labelsize': 9,
'ytick.labelsize': 9,
'axes.labelcolor': '#a9a9a9', #a1a3aa',
'axes.facecolor': '#121722', #'#0e0e0e', #202020', # Dark background for plot area
'axes.grid': False, # Turn off the grid globally
'grid.color': 'gray', # If the grid is on, set grid line color
'grid.linestyle': '--', # Grid line style
'grid.linewidth': 1,
'xtick.color': '#a9a9a9',
'ytick.color': '#a9a9a9',
'axes.edgecolor': '#a9a9a9'
}
plt.rcParams.update(params)
# Create a combined figure for all plots
fig, axs = plt.subplots(3, 4, figsize=(11, 7))
#TITLE
title = ""
cnt_ids = len(runner_ids)
if batch_id is not None:
title = "Batch: "+str(batch_id)+ " "
title += "Days: " + str(cnt_ids)
if cnt_ids == 1:
title += " ("+str(runner_ids[0])[0:14]+") "
if sada.mode == Mode.BT:
datum = sada.bt_from
else:
datum = sada.started
title += datum.strftime("%d.%m.%Y %H:%M")
# Add a title to the figure
fig.suptitle(title, fontsize=15, color='white')
# Plot 1: Overall Profit Summary Chart
total_wins = int(sum(wins))
total_losses = int(sum(losses))
net_profit = int(sum(profits))
sns.barplot(x=['Total', 'Wins','Losses'],
y=[net_profit, total_wins, total_losses],
ax=axs[0, 0])
axs[0, 0].set_title('Overall Profit Summary')
# Define the offset for placing text inside the bars
offset = max(total_wins, abs(total_losses), net_profit) * 0.05 # 5% of the highest (or lowest) bar value
# Function to place text annotation
def place_annotation(ax, x, value, offset):
va = 'top' if value >= 0 else 'bottom'
y = value - offset if value >= 0 else value + offset
ax.text(x, y, f'{value}', ha='center', va=va, color='black', fontsize=12)
# Annotate the Total Wins, Losses, and Net Profit bars
place_annotation(axs[0, 0], 0, net_profit, offset)
place_annotation(axs[0, 0], 1, total_wins, offset)
place_annotation(axs[0, 0], 2, total_losses, offset)
# Plot 2: LONG - profit summary
total_wins_long = int(sum(wins_long))
total_losses_long = int(sum(losses_long))
total_long = total_wins_long + total_losses_long
sns.barplot(x=['Total', 'Wins','Losses'],
y=[total_long, total_wins_long, total_losses_long],
ax=axs[0, 1])
axs[0, 1].set_title('LONG Profit Summary')
# Define the offset for placing text inside the bars
offset = max(total_wins_long, abs(total_losses_long)) * 0.05 # 5% of the highest (or lowest) bar value
place_annotation(axs[0, 1], 0, total_long, offset)
place_annotation(axs[0, 1], 1, total_wins_long, offset)
place_annotation(axs[0, 1], 2, total_losses_long, offset)
# Plot 3: SHORT - profit summary
total_wins_short =int(sum(wins_short))
total_losses_short = int(sum(losses_short))
total_short = total_wins_short + total_losses_short
sns.barplot(x=['Total', 'Wins', 'Losses'],
y=[total_short, total_wins_short,
total_losses_short],
ax=axs[0, 2])
axs[0, 2].set_title('SHORT Profit Summary')
# Define the offset for placing text inside the bars
offset = max(total_wins_short, abs(total_losses_short)) * 0.05 # 5% of the highest (or lowest) bar value
place_annotation(axs[0, 2], 0, total_short, offset)
place_annotation(axs[0, 2], 1, total_wins_short, offset)
place_annotation(axs[0, 2], 2, total_losses_short, offset)
# Plot 4: Trade Counts Bar Chart
long_count = len([trade for trade in closed_trades if trade.direction == TradeDirection.LONG])
short_count = len([trade for trade in closed_trades if trade.direction == TradeDirection.SHORT])
sns.barplot(x=['Long Trades', 'Short Trades'], y=[long_count, short_count], ax=axs[0, 3])
axs[0, 3].set_title('Trade Counts')
offset = max(long_count, short_count) * 0.05 # 5% of the highest (or lowest) bar value
place_annotation(axs[0, 3], 0, long_count, offset)
place_annotation(axs[0, 3], 1, short_count, offset)
#Cumulative profit - bud 1 den nebo vice dni
if len(runner_ids)== 1:
# Plot 3: Cumulative Profit Over Time with Max Profit Point
max_profit_time = exit_times[np.argmax(cumulative_profits)]
max_profit = max(cumulative_profits)
min_profit_time = exit_times[np.argmin(cumulative_profits)]
min_profit = min(cumulative_profits)
sns.lineplot(x=exit_times, y=cumulative_profits, label='Cumulative Profit', ax=axs[1, 3])
axs[1, 3].scatter(max_profit_time, max_profit, color='green', label='Max Profit')
axs[1, 3].scatter(min_profit_time, min_profit, color='red', label='Min Profit')
# Format dates on the x-axis
axs[1, 3].xaxis.set_major_formatter(mdates.DateFormatter('%H', tz=zoneNY))
axs[1, 3].set_title('Cumulative Profit Over Time')
axs[1, 3].legend()
else:
# Calculate cumulative profit
# Additional Plot: Cumulative Profit Over Time
# Sort trades by exit time
sorted_trades = sorted([trade for trade in trades if trade.status == TradeStatus.CLOSED],
key=lambda x: x.exit_time)
cumulative_profits = np.cumsum([trade.profit for trade in sorted_trades])
exit_times_sorted = [trade.exit_time for trade in sorted_trades]
axs[1, 3].plot(exit_times_sorted, cumulative_profits, color='blue')
axs[1, 3].set_title('Cumulative Profit Over Time')
axs[1, 3].set_xlabel('Time')
axs[1, 3].set_ylabel('Cumulative Profit')
axs[1, 3].xaxis.set_major_formatter(mdates.DateFormatter('%d', tz=zoneNY))
# Creating a DataFrame for the heatmap
heatmap_data_list = []
for trade in trades:
if trade.status == TradeStatus.CLOSED:
day = trade.exit_time.strftime('%m-%d') # Format date as 'MM-DD'
#day = trade.exit_time.date()
hour = trade.exit_time.hour
profit = trade.profit
heatmap_data_list.append({'Day': day, 'Hour': hour, 'Profit': profit})
heatmap_data = pd.DataFrame(heatmap_data_list)
heatmap_data = heatmap_data.groupby(['Day', 'Hour']).sum().reset_index()
heatmap_pivot = heatmap_data.pivot(index='Day', columns='Hour', values='Profit')
# Plot 3: Heatmap of Profits
sns.heatmap(heatmap_pivot, cmap='viridis', ax=axs[1, 0])
axs[1, 0].set_title('Heatmap of Profits (based on Exit time)')
axs[1, 0].set_xlabel('Hour of Day')
axs[1, 0].set_ylabel('Day')
# Plot 9: Profit/Loss Distribution Histogram
sns.histplot(profits, bins=30, ax=axs[1, 1], kde=True, color='skyblue')
axs[1, 1].set_title('Profit/Loss Distribution')
axs[1, 1].set_xlabel('Profit/Loss')
axs[1, 1].set_ylabel('Frequency')
# Plot 5
# - pro 1 den: Position Size Distribution
# - pro vice dnu: Trade Duration vs. Profit/Loss
if len(runner_ids) == 1:
sizes = [trade.size for trade in closed_trades if trade.size is not None]
size_counts = {size: sizes.count(size) for size in set(sizes)}
sns.barplot(x=list(size_counts.keys()), y=list(size_counts.values()), ax=axs[1, 2])
axs[1, 2].set_title('Position Size Distribution')
else:
trade_durations = []
trade_profits = []
#trade_volumes = [] # Assuming you have a way to measure the size/volume of each trade
trade_types = [] # 'Long' or 'Short'
for trade in trades:
if trade.status == TradeStatus.CLOSED:
duration = (trade.exit_time - trade.entry_time).total_seconds() / 60 # Duration in minutes (3600 for hours)
trade_durations.append(duration)
trade_profits.append(trade.profit)
##trade_volumes.append(trade.size) # or any other measure of trade size
trade_types.append('Long' if trade.direction == TradeDirection.LONG else 'Short')
# Plot 8: Trade Duration vs. Profit/Loss
scatter_data = pd.DataFrame({
'Duration': trade_durations,
'Profit': trade_profits,
#'Volume': trade_volumes,
'Type': trade_types
})
#sns.scatterplot(data=scatter_data, x='Duration', y='Profit', size='Volume', hue='Type', ax=axs[1, 2])
sns.scatterplot(data=scatter_data, x='Duration', y='Profit', hue='Type', ax=axs[1, 2])
axs[1, 2].set_title('Trade Duration vs. Profit/Loss')
axs[1, 2].set_xlabel('Duration (Minutes)')
axs[1, 2].set_ylabel('Profit/Loss')
# Plot 6: Daily Relative Profit Chart
if len(runner_ids) == 1:
daily_rel_profits = [trade.rel_profit for trade in closed_trades if trade.rel_profit is not None]
sns.lineplot(x=range(len(daily_rel_profits)), y=daily_rel_profits, ax=axs[2, 0])
axs[2, 0].set_title('Daily Relative Profit')
else:
# Creating a DataFrame for the heatmap
heatmap_data_list = []
for trade in trades:
if trade.status == TradeStatus.CLOSED:
day = trade.entry_time.strftime('%m-%d') # Format date as 'MM-DD'
#day = trade.entry_time.date()
hour = trade.entry_time.hour
profit = trade.profit
heatmap_data_list.append({'Day': day, 'Hour': hour, 'Profit': profit})
heatmap_data = pd.DataFrame(heatmap_data_list)
heatmap_data = heatmap_data.groupby(['Day', 'Hour']).sum().reset_index()
heatmap_pivot = heatmap_data.pivot(index='Day', columns='Hour', values='Profit')
# Plot 3: Heatmap of Profits
sns.heatmap(heatmap_pivot, cmap='viridis', ax=axs[2, 0])
axs[2, 0].set_title('Heatmap of Profits (based on Entry time)')
axs[2, 0].set_xlabel('Hour of Day')
axs[2, 0].set_ylabel('Day')
# Plot 8: Profits Based on Hour of the Day (Entry)
entry_hours = [trade.entry_time.hour for trade in closed_trades if trade.entry_time is not None]
profits_by_hour = {}
for hour, trade in zip(entry_hours, closed_trades):
if hour not in profits_by_hour:
profits_by_hour[hour] = 0
profits_by_hour[hour] += trade.profit
# Sorting by hour for plotting
sorted_hours = sorted(profits_by_hour.keys())
sorted_profits = [profits_by_hour[hour] for hour in sorted_hours]
sns.barplot(x=sorted_hours, y=sorted_profits, ax=axs[2, 1])
axs[2, 1].set_title('Profits by Hour of Day (Entry)')
axs[2, 1].set_xlabel('Hour of Day')
axs[2, 1].set_ylabel('Profit')
# Plot 9: Profits Based on Hour of the Day - based on Exit
exit_hours = [trade.exit_time.hour for trade in closed_trades if trade.exit_time is not None]
profits_by_hour = {}
for hour, trade in zip(exit_hours, closed_trades):
if hour not in profits_by_hour:
profits_by_hour[hour] = 0
profits_by_hour[hour] += trade.profit
# Sorting by hour for plotting
sorted_hours = sorted(profits_by_hour.keys())
sorted_profits = [profits_by_hour[hour] for hour in sorted_hours]
sns.barplot(x=sorted_hours, y=sorted_profits, ax=axs[2, 2])
axs[2, 2].set_title('Profits by Hour of Day (Exit)')
axs[2, 2].set_xlabel('Hour of Day')
axs[2, 2].set_ylabel('Profit')
# Calculate profits by day of the week
day_of_week_profits = {i: 0 for i in range(7)} # Dictionary to store profits for each day of the week
for trade in trades:
if trade.status == TradeStatus.CLOSED:
day_of_week = trade.exit_time.weekday() # Monday is 0 and Sunday is 6
day_of_week_profits[day_of_week] += trade.profit
days = ['Mo', 'Tue', 'Wed', 'Thu', 'Fri']
# Additional Plot: Strategy Performance by Day of the Week
axs[2, 3].bar(days, [day_of_week_profits[i] for i in range(5)])
axs[2, 3].set_title('Profit by Day of the Week')
axs[2, 3].set_xlabel('Day of the Week')
axs[2, 3].set_ylabel('Cumulative Profit')
#filename
file = batch_id if batch_id is not None else runner_ids[0]
image_file_name = f"{file}.png"
image_path = str(MEDIA_DIRECTORY / "basic" / image_file_name)
# Adjust layout and save the combined plot as an image
plt.tight_layout()
if stream is False:
plt.savefig(image_path)
plt.close()
else:
# Return the image as a BytesIO stream
img_stream = BytesIO()
plt.savefig(img_stream, format='png')
plt.close()
img_stream.seek(0) # Rewind the stream to the beginning
return 0, img_stream
# Example usage
# trades = [list of Trade objects]
if __name__ == '__main__':
id_list = ["c3e31cb5-ddf9-467e-a932-2118f6844355"]
generate_trading_report_image(runner_ids=id_list)
# batch_id = "90973e57"
# generate_trading_report_image(batch_id=batch_id)

View File

@ -279,7 +279,9 @@
</div>
</div>
</div>
<div id="imagePreview" onclick="hideImage()">
<img src="" id="previewImg" style="width: auto; height: auto;" />
</div>
<div id="archive-table" class="flex-items">
<label data-bs-toggle="collapse" data-bs-target="#archive-table-inner">
<h4>Past Runs</h4>
@ -299,6 +301,7 @@
<button id="button_selpage" class="btn btn-outline-success btn-sm">Select all</button>
<button id="button_export_xml" class="btn btn-outline-success btn-sm">Export xml</button>
<button id="button_export_csv" class="btn btn-outline-success btn-sm">Export csv</button>
<button id="button_report" class="btn btn-outline-success btn-sm">Report</button>
<!-- <button id="button_stopall" class="btn btn-outline-success btn-sm">Stop All</button>
<button id="button_refresh" class="btn btn-outline-success btn-sm">Refresh</button> -->
</div>

View File

@ -110,9 +110,61 @@ function prepare_export() {
return trdList
}
function display_image(imageUrl) {
// Attempt to load the image
var img = new Image();
img.src = imageUrl;
img.onload = function() {
// If the image loads successfully, display it
$('#previewImg').attr('src', imageUrl);
$('#imagePreview').show();
};
img.onerror = function() {
console.log("no image available")
// If the image fails to load, do nothing
};
}
$(document).ready(function () {
archiveRecords.ajax.reload();
// Use 'td:nth-child(2)' to target the second column
$('#archiveTable tbody').on('click', 'td:nth-child(2)', function () {
var data = archiveRecords.row(this).data();
//var imageUrl = '/media/report_'+data.id+".png"; // Replace with your logic to get image URL
var imageUrl = '/media/basic/'+data.id+'.png'; // Replace with your logic to get image URL
console.log(imageUrl)
display_image(imageUrl)
});
// Use 'td:nth-child(2)' to target the second column
$('#archiveTable tbody').on('click', 'td:nth-child(18)', function () {
var data = archiveRecords.row(this).data();
if (data.batch_id) {
//var imageUrl = '/media/report_'+data.id+".png"; // Replace with your logic to get image URL
var imageUrl = '/media/basic/'+data.batch_id+'.png'; // Replace with your logic to get image URL
console.log(imageUrl)
display_image(imageUrl)
}
});
// $('#archiveTable tbody').on('mouseleave', 'td:nth-child(2)', function () {
// $('#imagePreview').hide();
// });
// Hide image on click anywhere in the document
$(document).on('click', function() {
$('#imagePreview').hide();
});
function hideImage() {
$('#imagePreview').hide();
}
// $('#archiveTable tbody').on('mousemove', 'td:nth-child(2)', function(e) {
// $('#imagePreview').css({'top': e.pageY + 10, 'left': e.pageX + 10});
// });
//button export
$('#button_export_xml').click(function () {
xmled = convertToXml(prepare_export())
@ -360,6 +412,46 @@ $(document).ready(function () {
}
});
//generate report button
$('#button_report').click(function () {
rows = archiveRecords.rows('.selected');
if (rows == undefined) {
return
}
runnerIds = []
if(rows.data().length > 0 ) {
// Loop through the selected rows and display an alert with each row's ID
rows.every(function (rowIdx, tableLoop, rowLoop ) {
var data = this.data()
runnerIds.push(data.id);
});
}
$.ajax({
url:"/archived_runners/generatereportimage",
beforeSend: function (xhr) {
xhr.setRequestHeader('X-API-Key',
API_KEY); },
method:"POST",
xhrFields: {
responseType: 'blob'
},
contentType: "application/json",
processData: false,
data: JSON.stringify(runnerIds),
success:function(blob){
var url = window.URL || window.webkitURL;
console.log("vraceny obraz", blob)
console.log("url",url.createObjectURL(blob))
display_image(url.createObjectURL(blob))
},
error: function(xhr, status, error) {
console.log("proc to skace do erroru?")
//window.alert(JSON.stringify(xhr));
console.log(JSON.stringify(xhr));
}
})
});
//delete button
$('#button_delete_arch').click(function () {

View File

@ -164,7 +164,14 @@ table.dataTable thead>tr>th.sorting_asc:before, table.dataTable thead>tr>th.sort
--bs-gradient: none;
}
#imagePreview {
display: none;
position: fixed;
z-index: 100;
left: 50%;
top: 50%;
transform: translate(-50%, -50%);
}
/* .btn-outline-success {
--bs-btn-color: #316164;

View File

@ -129,14 +129,27 @@ class StrategyClassicSL(Strategy):
#pokud jde o finalni FILL - pridame do pole tento celkovy relativnich profit (ze ktereho se pocita kumulativni relativni profit)
rel_profit_cum_calculated = 0
partial_exit = False
partial_last = False
if data.event == TradeEvent.FILL:
#TODO pokud mame partial exit, tak se spravne vypocita relativni profit, ale
# je jen na mensi mnozszvi take z nej delat cum_calculate je blbost - OPRAVIT
self.state.rel_profit_cum.append(rel_profit)
rel_profit_cum_calculated = round(np.sum(self.state.rel_profit_cum),5)
#jde o partial EXIT dvááme si rel.profit do docasne promenne, po poslednim exitu z nich vypocteme skutecny rel.profit
if data.position_qty != 0:
self.state.docasny_rel_profit.append(rel_profit)
partial_exit = True
else:
#jde o posledni z PARTIAL EXITU tzn.data.position_qty == 0
if len(self.state.docasny_rel_profit) > 0:
#pricteme aktualni rel profit
self.state.docasny_rel_profit.append(rel_profit)
#a z rel profitu tohoto tradu vypocteme prumer, ktery teprve ulozime
rel_profit = round(np.mean(self.state.docasny_rel_profit),5)
self.state.docasny_rel_profit = []
partial_last = True
self.state.ilog(e=f"BUY notif - SHORT PROFIT:{round(float(trade_profit),3)} celkem:{round(float(self.state.profit),3)} rel:{float(rel_profit)} rel_cum:{round(rel_profit_cum_calculated,7)}", msg=str(data.event), rel_profit_cum=str(self.state.rel_profit_cum), bought_amount=bought_amount, avg_costs=avg_costs, trade_qty=data.qty, trade_price=data.price, orderid=str(data.order.id))
self.state.rel_profit_cum.append(rel_profit)
rel_profit_cum_calculated = round(np.sum(self.state.rel_profit_cum),5)
self.state.ilog(e=f"BUY notif - SHORT PROFIT: {partial_exit=} {partial_last=} {round(float(trade_profit),3)} celkem:{round(float(self.state.profit),3)} rel:{float(rel_profit)} rel_cum:{round(rel_profit_cum_calculated,7)}", msg=str(data.event), rel_profit_cum=str(self.state.rel_profit_cum), bought_amount=bought_amount, avg_costs=avg_costs, trade_qty=data.qty, trade_price=data.price, orderid=str(data.order.id))
#zapsat profit do prescr.trades
for trade in self.state.vars.prescribedTrades:
@ -260,12 +273,27 @@ class StrategyClassicSL(Strategy):
rel_profit = round((trade_profit / (vstup_cena * float(data.order.qty))) * 100,5)
rel_profit_cum_calculated = 0
#pokud jde o finalni FILL - pridame do pole relativnich profit (ze ktereho se pocita kumulativni relativni profit)
partial_exit = False
partial_last = False
if data.event == TradeEvent.FILL:
self.state.rel_profit_cum.append(rel_profit)
rel_profit_cum_calculated = round(np.sum(self.state.rel_profit_cum),5)
#jde o partial EXIT dvááme si rel.profit do docasne promenne, po poslednim exitu z nich vypocteme skutecny rel.profit
if data.position_qty != 0:
self.state.docasny_rel_profit.append(rel_profit)
partial_exit = True
else:
#jde o posledni z PARTIAL EXITU tzn.data.position_qty == 0
if len(self.state.docasny_rel_profit) > 0:
#pricteme aktualni rel profit
self.state.docasny_rel_profit.append(rel_profit)
#a z rel profitu tohoto tradu vypocteme prumer, ktery teprve ulozime
rel_profit = round(np.mean(self.state.docasny_rel_profit),5)
self.state.docasny_rel_profit = []
partial_last = True
self.state.ilog(e=f"SELL notif - PROFIT:{round(float(trade_profit),3)} celkem:{round(float(self.state.profit),3)} rel:{float(rel_profit)} rel_cum:{round(rel_profit_cum_calculated,7)}", msg=str(data.event), rel_profit_cum = str(self.state.rel_profit_cum), sold_amount=sold_amount, avg_costs=avg_costs, trade_qty=data.qty, trade_price=data.price, orderid=str(data.order.id))
self.state.rel_profit_cum.append(rel_profit)
rel_profit_cum_calculated = round(np.sum(self.state.rel_profit_cum),5)
self.state.ilog(e=f"SELL notif - LONG PROFIT {partial_exit=} {partial_last=}:{round(float(trade_profit),3)} celkem:{round(float(self.state.profit),3)} rel:{float(rel_profit)} rel_cum:{round(rel_profit_cum_calculated,7)}", msg=str(data.event), rel_profit_cum = str(self.state.rel_profit_cum), sold_amount=sold_amount, avg_costs=avg_costs, trade_qty=data.qty, trade_price=data.price, orderid=str(data.order.id))
#zapsat profit do prescr.trades
for trade in self.state.vars.prescribedTrades:

View File

@ -707,6 +707,7 @@ class StrategyState:
self.runner_id = runner_id
self.bt = bt
self.dont_exit_already_activated = False
self.docasny_rel_profit = []
self.ilog_save = ilog_save
self.sl_optimizer_short = optimsl.SLOptimizer(ptm.TradeDirection.SHORT)
self.sl_optimizer_long = optimsl.SLOptimizer(ptm.TradeDirection.LONG)

View File

@ -82,7 +82,10 @@ def get_source_series(state, source: str):
try:
return state.bars[source]
except KeyError:
return state.indicators[source]
try:
return state.indicators[source]
except KeyError:
return None
else:
dict_name = source[:split_index]
key = source[split_index + 1:]

View File

@ -7,6 +7,7 @@ from datetime import datetime
from rich import print as printanyway
from traceback import format_exc
from v2realbot.strategyblocks.newtrade.conditions import go_conditions_met, common_go_preconditions_check
from v2realbot.strategyblocks.newtrade.sizing import get_size, get_multiplier
def signal_search(state: StrategyState, data):
# SIGNAL sekce ve stratvars obsahuje signaly: Ty se skladaji z obecnych parametru a podsekce podminek.
@ -42,7 +43,7 @@ def execute_signal_generator(state, data, name):
options = safe_get(state.vars.signals, name, None)
if options is None:
state.ilog(lvl=1,e="No options for {name} in stratvars")
state.ilog(lvl=1,e=f"No options for {name} in stratvars")
return
if common_go_preconditions_check(state, data, signalname=name, options=options) is False:
@ -71,20 +72,26 @@ def execute_signal_generator(state, data, name):
if long_enabled is False:
state.ilog(lvl=1,e=f"{name} LONG DISABLED")
if long_enabled and go_conditions_met(state, data,signalname=name, direction=TradeDirection.LONG):
multiplier = get_multiplier(state, data, options, TradeDirection.LONG)
state.vars.prescribedTrades.append(Trade(
id=uuid4(),
last_update=datetime.fromtimestamp(state.time).astimezone(zoneNY),
status=TradeStatus.READY,
generated_by=name,
size=multiplier*state.vars.chunk,
size_multiplier = multiplier,
direction=TradeDirection.LONG,
entry_price=None,
stoploss_value = None))
elif short_enabled and go_conditions_met(state, data, signalname=name, direction=TradeDirection.SHORT):
multiplier = get_multiplier(state, data, options, TradeDirection.SHORT)
state.vars.prescribedTrades.append(Trade(
id=uuid4(),
last_update=datetime.fromtimestamp(state.time).astimezone(zoneNY),
status=TradeStatus.READY,
generated_by=name,
size=multiplier*state.vars.chunk,
size_multiplier = multiplier,
direction=TradeDirection.SHORT,
entry_price=None,
stoploss_value = None))

View File

@ -0,0 +1,143 @@
from v2realbot.strategy.base import StrategyState
from v2realbot.common.PrescribedTradeModel import Trade, TradeDirection, TradeStatus
import v2realbot.utils.utils as utls
from v2realbot.config import KW
from uuid import uuid4
from datetime import datetime
from rich import print as printanyway
from traceback import format_exc
from v2realbot.strategyblocks.newtrade.conditions import go_conditions_met, common_go_preconditions_check
from v2realbot.strategyblocks.indicators.helpers import get_source_series
import numpy as np
def get_size(state: StrategyState, data, signaloptions: dict, direction: TradeDirection):
return state.vars.chunk * get_multiplier(state, signaloptions, direction)
def get_multiplier(state: StrategyState, data, signaloptions: dict, direction: TradeDirection):
""""
Function return dynamic sizing multiplier according to directive and current trades.
Default: state.vars.chunk
Additional sizing logic is layered on top of each other according to directives.
Currently supporting:
1) pattern sizing U-shape, hat-shape (x - bars, minutes, y - sizing multiplier 0 to 1 )
2) probes
Future ideas:
- ML sizing model
DIRECTIVES:
#sondy na zacatku market
probe_enabled = true # sonda zapnuta
number = 1 # pocet sond
probe_size = 0.01 #velikost sondy, nasobek def size
#pattern - dynamicka uprava na zaklade casu
pattern_enabled = true
pattern_source = "minutes" #or any series - indicators, bars or state etc. (index[-1], np.sum(state.rel_profit_cum)...)
pattern_source_vals = [0,30,90, 200, 300, 390]
pattern_sizing_vals = [0.1,0.5, 0.8, 1, 0.6, 0.1]
#np.interp(atr10, [0.001, 0.06], [1, 5])
#size_multiplier = np.interp(pattern_source, [SIZING_pattern_source_vals], [SIZING_pattern_sizing_vals])
#TODO
- pomocna graf pro vizualizaci interpolace - v tools/sizingpatternvisual.py
- dopsat do dokumentace direktiv - do tabulky
- ukládat sizing coeff do prescrTrades
- upravit výpočet denního relativniho profitu u tradu na základě vstupního sizing koeficientu
- vyresit zda max_oss_to_quit_rel aplikovat bud per alokovana pozice nebo trade
(pokud mam na trade, pak mi zafunguje i na minimalni sodnu) Zatim bude
realizován takto
- nejprve ověří rel profit tradu a pokud přesáhne, strategie se suspendne
- poté se rel profit tradu vynásobí multiplikátorem a započte se do denního rel profitu, jehož
výše se následně také ověří
NOTE: zatim neupraveno, a do denniho rel profitu se zapocitava plnym pomerem, diky tomu
si muzu dat i na sondu -0.5 suspend strategie. Nevyhoda: rel.profit presne neodpovida
[stratvars.signals.morning1.sizing] #specificke pro dany signal
probe_enabled = true
probe_size = 0.01
pattern_enabled = true
# pattern_source = "minutes" #or any series - indicators, bars or state etc. (index[-1], np.sum(state.rel_profit_cum)...)
pattern_source_axis = [0,30,90, 200, 300, 390]
pattern_size_axis = [0.1,0.5, 0.8, 1, 0.6, 0.1]
[stratvars.sizing] #obecne jako fallback pro vsechny signaly
probe_enabled = true
probe_size = 0.01
pattern_enabled = true
# pattern_source = "minutes" #or any series - indicators, bars or state etc. (index[-1], np.sum(state.rel_profit_cum)...)
pattern_source_axis = [0,30,90, 200, 300, 390]
pattern_size_axis = [0.1,0.5, 0.8, 1, 0.6, 0.1]
"""""
multiplier = 1
#fallback common sizing sekci
fallback_options = utls.safe_get(state.vars, 'sizing', None)
#signal specific sekce
options = utls.safe_get(signaloptions, 'sizing', fallback_options)
if options is None:
state.ilog(lvl=1,e="No sizing options common or signal specific in stratvars")
return multiplier
#PROBE ENABLED
# probe_enabled = true # sonda zapnuta
# probe_number = 1 # pocet sond
# probe_size = 0.01 #velikost sondy, nasobek def size
probe_enabled = utls.safe_get(options, "probe_enabled", False)
if probe_enabled:
#zatim pouze probe number 1 natvrdo, tzn. nesmi byt trade pro aktivace
if state.vars.last_in_index is None:
#probe_number = utls.safe_get(options, "probe_number",1)
probe_size = float(utls.safe_get(options, "probe_size", 0.1))
state.ilog(lvl=1,e=f"SIZER - PROBE - setting multiplier to {probe_size}", options=options)
return probe_size
#SIZING PATTER
# pattern_enabled = true
# pattern_source = "minutes" #or any series - indicators, bars or state etc. (index[-1], np.sum(state.rel_profit_cum)...)
# pattern_source_axis = [0,30,90, 200, 300, 390]
# pattern_size_axis = [0.1,0.5, 0.8, 1, 0.6, 0.1]
pattern_enabled = utls.safe_get(options, "pattern_enabled", False)
if pattern_enabled:
input_value = None
pattern_source = utls.safe_get(options, "pattern_source", "minutes")
#TODO do budoucna mozna sem dát libovolnou series např. index, time, profit, rel_profit?
if pattern_source != "minutes":
input_value = eval(pattern_source, {'state': state, 'np': np, 'utls': utls}, state.ind_mapping)
if input_value is None:
state.ilog(lvl=1,e=f"SIZER - ERROR Pattern source is None, after evaluation of expression", options=str(options))
return multiplier
else:
input_value = utls.minutes_since_market_open(datetime.fromtimestamp(data['updated']).astimezone(utls.zoneNY))
pattern_source_axis = utls.safe_get(options, "pattern_source_axis", None)
pattern_size_axis = utls.safe_get(options, "pattern_size_axis", None)
if pattern_source_axis is None or pattern_size_axis is None:
state.ilog(lvl=1,e=f"SIZER - Pattern source and size axis must be set", options=str(options))
return multiplier
state.ilog(lvl=1,e=f"SIZER - Input value of {pattern_source} value {input_value}", options=options, time=state.time)
multiplier = np.interp(input_value, pattern_source_axis, pattern_size_axis)
state.ilog(lvl=1,e=f"SIZER - Interpolated value {multiplier}", input_value=input_value, pattern_source_axis=pattern_source_axis, pattern_size_axis=pattern_size_axis, options=options, time=state.time)
if multiplier > 1 or multiplier <= 0:
state.ilog(lvl=1,e=f"SIZER - Mame nekde problem MULTIPLIER mimo RANGE ERROR {multiplier}", options=options, time=state.time)
multiplier = 1
return multiplier

View File

@ -0,0 +1,29 @@
import numpy as np
import matplotlib.pyplot as plt
# Sem zadat pattern X a Y pro VIZUALIZACI
#minutes
pattern_x = [0, 30, 90, 200, 300, 390]
pattern_y = [0.1, 0.5, 0.8, 1, 0.6, 0.1]
#bar index - použitelné u time scale barů
pattern_x = [0, 30, 90, 200, 300, 390]
pattern_y = [0.1, 0.5, 0.8, 1, 0.6, 0.1]
#celkový profit
# Generating a range of input values for interpolation
input_values = np.linspace(min(pattern_x), max(pattern_x), 500)
multipliers = np.interp(input_values, pattern_x, pattern_y)
# Plotting
plt.figure(figsize=(10, 6))
plt.plot(pattern_x, pattern_y, 'o', label='Original Points')
plt.plot(input_values, multipliers, label='Interpolated Values')
plt.xlabel('X values')
plt.ylabel('Interpolated Multipliers')
plt.title('Interpolation Chart')
plt.legend()
plt.grid(True)
plt.show()

View File

@ -464,6 +464,30 @@ def is_open_rush(dt: datetime, mins: int = 30):
rushtime = (datetime.combine(date.today(), business_hours["from"]) + timedelta(minutes=mins)).time()
return business_hours["from"] <= dt.time() < rushtime
#TODO v budoucnu predelat - v initu nacist jednou market open cas a ten pouzivat vsude
#kde je treba (ted je tady natvrdo 9.30)
def minutes_since_market_open(datetime_aware: datetime):
"""
Calculate the number of minutes elapsed from 9:30 AM to the given timezone-aware datetime of the same day.
This version is optimized for speed and should be used when calling in a loop.
:param datetime_aware: A timezone-aware datetime object representing the time to compare.
:return: The number of minutes since today's 9:30 AM.
"""
# Ensure the input datetime is timezone-aware
if datetime_aware.tzinfo is None or datetime_aware.tzinfo.utcoffset(datetime_aware) is None:
raise ValueError("The input datetime must be timezone-aware.")
# Calculate minutes since midnight for both times
minutes_since_midnight = datetime_aware.hour * 60 + datetime_aware.minute
morning_minutes = 9 * 60 + 30
# Calculate the difference
delta_minutes = minutes_since_midnight - morning_minutes
return delta_minutes if delta_minutes >= 0 else 0
#optimalized by BARD
def is_window_open(dt: datetime, start: int = 0, end: int = 390):
""""