This commit is contained in:
David Brazda
2024-02-24 20:32:01 +07:00
parent 73fef65309
commit ca3565132d
5 changed files with 68 additions and 20 deletions

View File

@ -218,6 +218,8 @@ except ValueError:
print(f"Invalid data feed type: {data_feed_type_str} in LIVE_DATA_FEED defaulting to 'iex'")
LIVE_DATA_FEED = DataFeed.IEX
# LIVE_DATA_FEED = DataFeed.IEX
class KW:
activate: str = "activate"
dont_go: str = "dont_go"

View File

@ -2,7 +2,7 @@ from v2realbot.loader.aggregator import TradeAggregator, TradeAggregator2List, T
#from v2realbot.loader.cacher import get_cached_agg_data
from alpaca.trading.requests import GetCalendarRequest
from alpaca.data.live import StockDataStream
from v2realbot.config import ACCOUNT1_PAPER_API_KEY, ACCOUNT1_PAPER_SECRET_KEY, DATA_DIR, OFFLINE_MODE
from v2realbot.config import ACCOUNT1_PAPER_API_KEY, ACCOUNT1_PAPER_SECRET_KEY, DATA_DIR, OFFLINE_MODE, LIVE_DATA_FEED
from alpaca.data.enums import DataFeed
from alpaca.data.historical import StockHistoricalDataClient
from alpaca.data.requests import StockLatestQuoteRequest, StockBarsRequest, StockTradesRequest
@ -191,9 +191,10 @@ class Trade_Offline_Streamer(Thread):
# stream.send_cache_to_output(cache)
# to_rem.append(stream)
#cache resime jen kdyz backtestujeme cely den
#cache resime jen kdyz backtestujeme cely den a mame sip datapoint (iex necachujeme)
#pokud ne tak ani necteme, ani nezapisujeme do cache
if self.time_to >= day.close and self.time_from <= day.open:
if (self.time_to >= day.close and self.time_from <= day.open) and LIVE_DATA_FEED == DataFeed.SIP:
#tento odstavec obchazime pokud je nastaveno "dont_use_cache"
stream_btdata = self.to_run[symbpole[0]][0]
cache_btdata, file_btdata = stream_btdata.get_cache(day.open, day.close)
@ -249,9 +250,9 @@ class Trade_Offline_Streamer(Thread):
# tradesResponse = self.client.get_stock_trades(stockTradeRequest)
print("Remote Fetch DAY DATA Complete", day.open, day.close)
#pokud jde o dnešní den a nebyl konec trhu tak cache neukládáme
if day.open < datetime.now().astimezone(zoneNY) < day.close:
print("not saving trade cache, market still open today")
#pokud jde o dnešní den a nebyl konec trhu tak cache neukládáme, pripadne pri iex datapointu necachujeme
if (day.open < datetime.now().astimezone(zoneNY) < day.close) or LIVE_DATA_FEED == DataFeed.IEX:
print("not saving trade cache, market still open today or IEX datapoint")
#ic(datetime.now().astimezone(zoneNY))
#ic(day.open, day.close)
else:

View File

@ -42,6 +42,7 @@ import mlroom.utils.mlutils as ml
from typing import List
import v2realbot.controller.run_manager as rm
import v2realbot.scheduler.ap_scheduler as aps
import re
#from async io import Queue, QueueEmpty
#
# install()
@ -557,30 +558,65 @@ def _get_archived_runner_log_byID(runner_id: UUID, timestamp_from: float, timest
else:
raise HTTPException(status_code=404, detail=f"No logs found with id: {runner_id} and between {timestamp_from} and {timestamp_to}")
def remove_ansi_codes(text):
ansi_escape = re.compile(r'\x1B[@-_][0-?]*[ -/]*[@-~]')
return ansi_escape.sub('', text)
# endregion
# A simple function to read the last lines of a file
def tail(file_path, n=10, buffer_size=1024):
# def tail(file_path, n=10, buffer_size=1024):
# try:
# with open(file_path, 'rb') as f:
# f.seek(0, 2) # Move to the end of the file
# file_size = f.tell()
# lines = []
# buffer = bytearray()
# for i in range(file_size // buffer_size + 1):
# read_start = max(-buffer_size * (i + 1), -file_size)
# f.seek(read_start, 2)
# read_size = min(buffer_size, file_size - buffer_size * i)
# buffer[0:0] = f.read(read_size) # Prepend to buffer
# if buffer.count(b'\n') >= n + 1:
# break
# lines = buffer.decode(errors='ignore').splitlines()[-n:]
# lines = [remove_ansi_codes(line) for line in lines]
# return lines
# except Exception as e:
# return [str(e) + format_exc()]
#updated version that reads lines line by line
def tail(file_path, n=10):
try:
with open(file_path, 'rb') as f:
f.seek(0, 2) # Move to the end of the file
file_size = f.tell()
lines = []
buffer = bytearray()
line = b''
for i in range(file_size // buffer_size + 1):
read_start = max(-buffer_size * (i + 1), -file_size)
f.seek(read_start, 2)
read_size = min(buffer_size, file_size - buffer_size * i)
buffer[0:0] = f.read(read_size) # Prepend to buffer
f.seek(-1, 2) # Start at the last byte
while len(lines) < n and f.tell() != 0:
byte = f.read(1)
if byte == b'\n':
# Decode, remove ANSI codes, and append the line
lines.append(remove_ansi_codes(line.decode(errors='ignore')))
line = b''
else:
line = byte + line
f.seek(-2, 1) # Move backwards by two bytes
if buffer.count(b'\n') >= n + 1:
break
if line:
# Append any remaining line after removing ANSI codes
lines.append(remove_ansi_codes(line.decode(errors='ignore')))
lines = buffer.decode(errors='ignore').splitlines()[-n:]
return lines
return lines[::-1] # Reverse the list to get the lines in correct order
except Exception as e:
return [str(e) + format_exc()]
return [str(e)]
@app.get("/log", dependencies=[Depends(api_key_auth)])
def read_log(lines: int = 700, logfile: str = "strat.log"):
log_path = LOG_PATH / logfile

View File

@ -1159,7 +1159,7 @@
<!-- <script src="/static/js/archivetables.js?v=1.05"></script> -->
<!-- archiveTables split into separate files -->
<script src="/static/js/tables/archivetable/init.js?v=1.09"></script>
<script src="/static/js/tables/archivetable/functions.js?v=1.08"></script>
<script src="/static/js/tables/archivetable/functions.js?v=1.09"></script>
<script src="/static/js/tables/archivetable/modals.js?v=1.07"></script>
<script src="/static/js/tables/archivetable/handlers.js?v=1.07"></script>

View File

@ -468,7 +468,8 @@ function refresh_logfile() {
$('#log-content').html("no records");
}
else {
$('#log-content').html(response.lines.join('\n'));
var escapedLines = response.lines.map(line => escapeHtml(line));
$('#log-content').html(escapedLines.join('\n'));
}
},
error: function(xhr, status, error) {
@ -478,6 +479,14 @@ function refresh_logfile() {
})
}
function escapeHtml(text) {
return text
.replace(/&/g, "&amp;")
.replace(/</g, "&lt;")
.replace(/>/g, "&gt;")
.replace(/"/g, "&quot;")
.replace(/'/g, "&#039;");
}
function delete_arch_rows(ids) {
$.ajax({
url:"/archived_runners/",