This commit is contained in:
@ -218,6 +218,8 @@ except ValueError:
|
|||||||
print(f"Invalid data feed type: {data_feed_type_str} in LIVE_DATA_FEED defaulting to 'iex'")
|
print(f"Invalid data feed type: {data_feed_type_str} in LIVE_DATA_FEED defaulting to 'iex'")
|
||||||
LIVE_DATA_FEED = DataFeed.IEX
|
LIVE_DATA_FEED = DataFeed.IEX
|
||||||
|
|
||||||
|
# LIVE_DATA_FEED = DataFeed.IEX
|
||||||
|
|
||||||
class KW:
|
class KW:
|
||||||
activate: str = "activate"
|
activate: str = "activate"
|
||||||
dont_go: str = "dont_go"
|
dont_go: str = "dont_go"
|
||||||
|
|||||||
@ -2,7 +2,7 @@ from v2realbot.loader.aggregator import TradeAggregator, TradeAggregator2List, T
|
|||||||
#from v2realbot.loader.cacher import get_cached_agg_data
|
#from v2realbot.loader.cacher import get_cached_agg_data
|
||||||
from alpaca.trading.requests import GetCalendarRequest
|
from alpaca.trading.requests import GetCalendarRequest
|
||||||
from alpaca.data.live import StockDataStream
|
from alpaca.data.live import StockDataStream
|
||||||
from v2realbot.config import ACCOUNT1_PAPER_API_KEY, ACCOUNT1_PAPER_SECRET_KEY, DATA_DIR, OFFLINE_MODE
|
from v2realbot.config import ACCOUNT1_PAPER_API_KEY, ACCOUNT1_PAPER_SECRET_KEY, DATA_DIR, OFFLINE_MODE, LIVE_DATA_FEED
|
||||||
from alpaca.data.enums import DataFeed
|
from alpaca.data.enums import DataFeed
|
||||||
from alpaca.data.historical import StockHistoricalDataClient
|
from alpaca.data.historical import StockHistoricalDataClient
|
||||||
from alpaca.data.requests import StockLatestQuoteRequest, StockBarsRequest, StockTradesRequest
|
from alpaca.data.requests import StockLatestQuoteRequest, StockBarsRequest, StockTradesRequest
|
||||||
@ -191,9 +191,10 @@ class Trade_Offline_Streamer(Thread):
|
|||||||
# stream.send_cache_to_output(cache)
|
# stream.send_cache_to_output(cache)
|
||||||
# to_rem.append(stream)
|
# to_rem.append(stream)
|
||||||
|
|
||||||
#cache resime jen kdyz backtestujeme cely den
|
#cache resime jen kdyz backtestujeme cely den a mame sip datapoint (iex necachujeme)
|
||||||
#pokud ne tak ani necteme, ani nezapisujeme do cache
|
#pokud ne tak ani necteme, ani nezapisujeme do cache
|
||||||
if self.time_to >= day.close and self.time_from <= day.open:
|
|
||||||
|
if (self.time_to >= day.close and self.time_from <= day.open) and LIVE_DATA_FEED == DataFeed.SIP:
|
||||||
#tento odstavec obchazime pokud je nastaveno "dont_use_cache"
|
#tento odstavec obchazime pokud je nastaveno "dont_use_cache"
|
||||||
stream_btdata = self.to_run[symbpole[0]][0]
|
stream_btdata = self.to_run[symbpole[0]][0]
|
||||||
cache_btdata, file_btdata = stream_btdata.get_cache(day.open, day.close)
|
cache_btdata, file_btdata = stream_btdata.get_cache(day.open, day.close)
|
||||||
@ -249,9 +250,9 @@ class Trade_Offline_Streamer(Thread):
|
|||||||
# tradesResponse = self.client.get_stock_trades(stockTradeRequest)
|
# tradesResponse = self.client.get_stock_trades(stockTradeRequest)
|
||||||
print("Remote Fetch DAY DATA Complete", day.open, day.close)
|
print("Remote Fetch DAY DATA Complete", day.open, day.close)
|
||||||
|
|
||||||
#pokud jde o dnešní den a nebyl konec trhu tak cache neukládáme
|
#pokud jde o dnešní den a nebyl konec trhu tak cache neukládáme, pripadne pri iex datapointu necachujeme
|
||||||
if day.open < datetime.now().astimezone(zoneNY) < day.close:
|
if (day.open < datetime.now().astimezone(zoneNY) < day.close) or LIVE_DATA_FEED == DataFeed.IEX:
|
||||||
print("not saving trade cache, market still open today")
|
print("not saving trade cache, market still open today or IEX datapoint")
|
||||||
#ic(datetime.now().astimezone(zoneNY))
|
#ic(datetime.now().astimezone(zoneNY))
|
||||||
#ic(day.open, day.close)
|
#ic(day.open, day.close)
|
||||||
else:
|
else:
|
||||||
|
|||||||
@ -42,6 +42,7 @@ import mlroom.utils.mlutils as ml
|
|||||||
from typing import List
|
from typing import List
|
||||||
import v2realbot.controller.run_manager as rm
|
import v2realbot.controller.run_manager as rm
|
||||||
import v2realbot.scheduler.ap_scheduler as aps
|
import v2realbot.scheduler.ap_scheduler as aps
|
||||||
|
import re
|
||||||
#from async io import Queue, QueueEmpty
|
#from async io import Queue, QueueEmpty
|
||||||
#
|
#
|
||||||
# install()
|
# install()
|
||||||
@ -557,30 +558,65 @@ def _get_archived_runner_log_byID(runner_id: UUID, timestamp_from: float, timest
|
|||||||
else:
|
else:
|
||||||
raise HTTPException(status_code=404, detail=f"No logs found with id: {runner_id} and between {timestamp_from} and {timestamp_to}")
|
raise HTTPException(status_code=404, detail=f"No logs found with id: {runner_id} and between {timestamp_from} and {timestamp_to}")
|
||||||
|
|
||||||
|
def remove_ansi_codes(text):
|
||||||
|
ansi_escape = re.compile(r'\x1B[@-_][0-?]*[ -/]*[@-~]')
|
||||||
|
return ansi_escape.sub('', text)
|
||||||
|
|
||||||
# endregion
|
# endregion
|
||||||
# A simple function to read the last lines of a file
|
# A simple function to read the last lines of a file
|
||||||
def tail(file_path, n=10, buffer_size=1024):
|
# def tail(file_path, n=10, buffer_size=1024):
|
||||||
|
# try:
|
||||||
|
# with open(file_path, 'rb') as f:
|
||||||
|
# f.seek(0, 2) # Move to the end of the file
|
||||||
|
# file_size = f.tell()
|
||||||
|
# lines = []
|
||||||
|
# buffer = bytearray()
|
||||||
|
|
||||||
|
# for i in range(file_size // buffer_size + 1):
|
||||||
|
# read_start = max(-buffer_size * (i + 1), -file_size)
|
||||||
|
# f.seek(read_start, 2)
|
||||||
|
# read_size = min(buffer_size, file_size - buffer_size * i)
|
||||||
|
# buffer[0:0] = f.read(read_size) # Prepend to buffer
|
||||||
|
|
||||||
|
# if buffer.count(b'\n') >= n + 1:
|
||||||
|
# break
|
||||||
|
|
||||||
|
# lines = buffer.decode(errors='ignore').splitlines()[-n:]
|
||||||
|
# lines = [remove_ansi_codes(line) for line in lines]
|
||||||
|
# return lines
|
||||||
|
# except Exception as e:
|
||||||
|
# return [str(e) + format_exc()]
|
||||||
|
|
||||||
|
#updated version that reads lines line by line
|
||||||
|
def tail(file_path, n=10):
|
||||||
try:
|
try:
|
||||||
with open(file_path, 'rb') as f:
|
with open(file_path, 'rb') as f:
|
||||||
f.seek(0, 2) # Move to the end of the file
|
f.seek(0, 2) # Move to the end of the file
|
||||||
file_size = f.tell()
|
file_size = f.tell()
|
||||||
lines = []
|
lines = []
|
||||||
buffer = bytearray()
|
line = b''
|
||||||
|
|
||||||
for i in range(file_size // buffer_size + 1):
|
f.seek(-1, 2) # Start at the last byte
|
||||||
read_start = max(-buffer_size * (i + 1), -file_size)
|
while len(lines) < n and f.tell() != 0:
|
||||||
f.seek(read_start, 2)
|
byte = f.read(1)
|
||||||
read_size = min(buffer_size, file_size - buffer_size * i)
|
if byte == b'\n':
|
||||||
buffer[0:0] = f.read(read_size) # Prepend to buffer
|
# Decode, remove ANSI codes, and append the line
|
||||||
|
lines.append(remove_ansi_codes(line.decode(errors='ignore')))
|
||||||
|
line = b''
|
||||||
|
else:
|
||||||
|
line = byte + line
|
||||||
|
f.seek(-2, 1) # Move backwards by two bytes
|
||||||
|
|
||||||
if buffer.count(b'\n') >= n + 1:
|
if line:
|
||||||
break
|
# Append any remaining line after removing ANSI codes
|
||||||
|
lines.append(remove_ansi_codes(line.decode(errors='ignore')))
|
||||||
|
|
||||||
lines = buffer.decode(errors='ignore').splitlines()[-n:]
|
return lines[::-1] # Reverse the list to get the lines in correct order
|
||||||
return lines
|
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
return [str(e) + format_exc()]
|
return [str(e)]
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
@app.get("/log", dependencies=[Depends(api_key_auth)])
|
@app.get("/log", dependencies=[Depends(api_key_auth)])
|
||||||
def read_log(lines: int = 700, logfile: str = "strat.log"):
|
def read_log(lines: int = 700, logfile: str = "strat.log"):
|
||||||
log_path = LOG_PATH / logfile
|
log_path = LOG_PATH / logfile
|
||||||
|
|||||||
@ -1159,7 +1159,7 @@
|
|||||||
<!-- <script src="/static/js/archivetables.js?v=1.05"></script> -->
|
<!-- <script src="/static/js/archivetables.js?v=1.05"></script> -->
|
||||||
<!-- archiveTables split into separate files -->
|
<!-- archiveTables split into separate files -->
|
||||||
<script src="/static/js/tables/archivetable/init.js?v=1.09"></script>
|
<script src="/static/js/tables/archivetable/init.js?v=1.09"></script>
|
||||||
<script src="/static/js/tables/archivetable/functions.js?v=1.08"></script>
|
<script src="/static/js/tables/archivetable/functions.js?v=1.09"></script>
|
||||||
<script src="/static/js/tables/archivetable/modals.js?v=1.07"></script>
|
<script src="/static/js/tables/archivetable/modals.js?v=1.07"></script>
|
||||||
<script src="/static/js/tables/archivetable/handlers.js?v=1.07"></script>
|
<script src="/static/js/tables/archivetable/handlers.js?v=1.07"></script>
|
||||||
|
|
||||||
|
|||||||
@ -468,7 +468,8 @@ function refresh_logfile() {
|
|||||||
$('#log-content').html("no records");
|
$('#log-content').html("no records");
|
||||||
}
|
}
|
||||||
else {
|
else {
|
||||||
$('#log-content').html(response.lines.join('\n'));
|
var escapedLines = response.lines.map(line => escapeHtml(line));
|
||||||
|
$('#log-content').html(escapedLines.join('\n'));
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
error: function(xhr, status, error) {
|
error: function(xhr, status, error) {
|
||||||
@ -478,6 +479,14 @@ function refresh_logfile() {
|
|||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
|
function escapeHtml(text) {
|
||||||
|
return text
|
||||||
|
.replace(/&/g, "&")
|
||||||
|
.replace(/</g, "<")
|
||||||
|
.replace(/>/g, ">")
|
||||||
|
.replace(/"/g, """)
|
||||||
|
.replace(/'/g, "'");
|
||||||
|
}
|
||||||
function delete_arch_rows(ids) {
|
function delete_arch_rows(ids) {
|
||||||
$.ajax({
|
$.ajax({
|
||||||
url:"/archived_runners/",
|
url:"/archived_runners/",
|
||||||
|
|||||||
Reference in New Issue
Block a user