first commit
This commit is contained in:
0
v2realbot/utils/__init__.py
Normal file
0
v2realbot/utils/__init__.py
Normal file
BIN
v2realbot/utils/__pycache__/__init__.cpython-310.pyc
Normal file
BIN
v2realbot/utils/__pycache__/__init__.cpython-310.pyc
Normal file
Binary file not shown.
BIN
v2realbot/utils/__pycache__/__init__.cpython-311.pyc
Normal file
BIN
v2realbot/utils/__pycache__/__init__.cpython-311.pyc
Normal file
Binary file not shown.
BIN
v2realbot/utils/__pycache__/dash_save_html.cpython-310.pyc
Normal file
BIN
v2realbot/utils/__pycache__/dash_save_html.cpython-310.pyc
Normal file
Binary file not shown.
BIN
v2realbot/utils/__pycache__/dash_save_html.cpython-311.pyc
Normal file
BIN
v2realbot/utils/__pycache__/dash_save_html.cpython-311.pyc
Normal file
Binary file not shown.
BIN
v2realbot/utils/__pycache__/tlog.cpython-310.pyc
Normal file
BIN
v2realbot/utils/__pycache__/tlog.cpython-310.pyc
Normal file
Binary file not shown.
BIN
v2realbot/utils/__pycache__/tlog.cpython-311.pyc
Normal file
BIN
v2realbot/utils/__pycache__/tlog.cpython-311.pyc
Normal file
Binary file not shown.
BIN
v2realbot/utils/__pycache__/utils.cpython-310.pyc
Normal file
BIN
v2realbot/utils/__pycache__/utils.cpython-310.pyc
Normal file
Binary file not shown.
BIN
v2realbot/utils/__pycache__/utils.cpython-311.pyc
Normal file
BIN
v2realbot/utils/__pycache__/utils.cpython-311.pyc
Normal file
Binary file not shown.
97
v2realbot/utils/dash_save_html.py
Normal file
97
v2realbot/utils/dash_save_html.py
Normal file
@ -0,0 +1,97 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
import os
|
||||
import requests
|
||||
from html.parser import HTMLParser
|
||||
import requests
|
||||
from config import DATA_DIR
|
||||
|
||||
def patch_file(file_path: str, content: bytes, extra: dict = None) -> bytes:
|
||||
if file_path == 'index.html':
|
||||
index_html_content = content.decode('utf8')
|
||||
extra_jsons = f'''
|
||||
var patched_jsons_content={{
|
||||
{','.join(["'/" + k + "':" + v.decode("utf8") + "" for k, v in extra.items()])}
|
||||
}};
|
||||
'''
|
||||
patched_content = index_html_content.replace(
|
||||
'<footer>',
|
||||
f'''
|
||||
<footer>
|
||||
<script>
|
||||
''' + extra_jsons + '''
|
||||
const origFetch = window.fetch;
|
||||
window.fetch = function () {
|
||||
const e = arguments[0]
|
||||
if (patched_jsons_content.hasOwnProperty(e)) {
|
||||
return Promise.resolve({
|
||||
json: () => Promise.resolve(patched_jsons_content[e]),
|
||||
headers: new Headers({'content-type': 'application/json'}),
|
||||
status: 200,
|
||||
});
|
||||
} else {
|
||||
return origFetch.apply(this, arguments)
|
||||
}
|
||||
}
|
||||
</script>
|
||||
'''
|
||||
).replace(
|
||||
'href="/',
|
||||
'href="'
|
||||
).replace(
|
||||
'src="/',
|
||||
'src="'
|
||||
)
|
||||
return patched_content.encode('utf8')
|
||||
else:
|
||||
return content
|
||||
|
||||
|
||||
def write_file(file_path: str, content: bytes, target_dir='target', ):
|
||||
target_file_path = os.path.join(target_dir, file_path.lstrip('/').split('?')[0])
|
||||
target_leaf_dir = os.path.dirname(target_file_path)
|
||||
os.makedirs(target_leaf_dir, exist_ok=True)
|
||||
with open(target_file_path, 'wb') as f:
|
||||
f.write(content)
|
||||
pass
|
||||
|
||||
|
||||
class ExternalResourceParser(HTMLParser):
|
||||
def __init__(self):
|
||||
super().__init__()
|
||||
self.resources = []
|
||||
|
||||
def handle_starttag(self, tag, attrs):
|
||||
if tag == 'link':
|
||||
for k, v in attrs:
|
||||
if k == 'href':
|
||||
self.resources.append(v)
|
||||
if tag == 'script':
|
||||
for k, v in attrs:
|
||||
if k == 'src':
|
||||
self.resources.append(v)
|
||||
|
||||
|
||||
def make_static(base_url, target_dir=DATA_DIR):
|
||||
index_html_bytes = requests.get(base_url).content
|
||||
json_paths = ['_dash-layout', '_dash-dependencies', ]
|
||||
extra_json = {}
|
||||
for json_path in json_paths:
|
||||
json_content = requests.get(base_url + json_path).content
|
||||
extra_json[json_path] = json_content
|
||||
|
||||
patched_bytes = patch_file('index.html', index_html_bytes, extra=extra_json)
|
||||
write_file('index.html', patched_bytes, target_dir)
|
||||
parser = ExternalResourceParser()
|
||||
parser.feed(patched_bytes.decode('utf8'))
|
||||
extra_js = [
|
||||
'_dash-component-suites/dash/dcc/async-graph.js',
|
||||
'_dash-component-suites/dash/dcc/async-plotlyjs.js',
|
||||
'_dash-component-suites/dash/dash_table/async-table.js',
|
||||
'_dash-component-suites/dash/dash_table/async-highlight.js'
|
||||
]
|
||||
for resource_url in parser.resources + extra_js:
|
||||
resource_url_full = base_url + resource_url
|
||||
print(f'get {resource_url_full}')
|
||||
resource_bytes = requests.get(resource_url_full).content
|
||||
patched_bytes = patch_file(resource_url, resource_bytes)
|
||||
write_file(resource_url, patched_bytes, target_dir)
|
||||
40
v2realbot/utils/tlog.py
Normal file
40
v2realbot/utils/tlog.py
Normal file
@ -0,0 +1,40 @@
|
||||
import os
|
||||
import sys
|
||||
import traceback
|
||||
from datetime import datetime
|
||||
from rich import print
|
||||
|
||||
try:
|
||||
from config import gcp_logger
|
||||
|
||||
if gcp_logger:
|
||||
from google.cloud import logging
|
||||
|
||||
logger = logging.Client().logger("realbotv2")
|
||||
else:
|
||||
logger = None
|
||||
except Exception:
|
||||
logger = None
|
||||
|
||||
|
||||
def tlog(msg: str, origin: str = None) -> None:
|
||||
try:
|
||||
calling_fn = origin or f"[{sys._getframe(1).f_code.co_name}()]"
|
||||
except Exception:
|
||||
calling_fn = ""
|
||||
|
||||
if logger:
|
||||
try:
|
||||
logger.log_text(f"{calling_fn}[{os.getpid()}] {msg}")
|
||||
except Exception as e:
|
||||
print(f"[Error] exception when trying to log to Stackdriver {e}")
|
||||
print(f"{calling_fn}[{os.getpid()}]{datetime.now()}:{msg}", flush=True)
|
||||
|
||||
|
||||
def tlog_exception(origin: str):
|
||||
traceback.print_exc()
|
||||
exc_info = sys.exc_info()
|
||||
lines = traceback.format_exception(*exc_info)
|
||||
for line in lines:
|
||||
tlog(f"{line}", origin)
|
||||
del exc_info
|
||||
188
v2realbot/utils/utils.py
Normal file
188
v2realbot/utils/utils.py
Normal file
@ -0,0 +1,188 @@
|
||||
from msgpack.ext import Timestamp
|
||||
import math
|
||||
from queue import Queue
|
||||
from datetime import datetime, timezone, time, timedelta, date
|
||||
import pytz
|
||||
from dateutil import tz
|
||||
from rich import print
|
||||
import decimal
|
||||
from icecream import ic
|
||||
from v2realbot.enums.enums import RecordType, Mode, StartBarAlign
|
||||
import pickle
|
||||
import os
|
||||
from v2realbot.common.model import StrategyInstance, Runner
|
||||
from typing import List
|
||||
import tomli
|
||||
from config import DATA_DIR
|
||||
|
||||
#datetime to timestamp
|
||||
def json_serial(obj):
|
||||
"""JSON serializer for objects not serializable by default json code
|
||||
https://stackoverflow.com/questions/11875770/how-to-overcome-datetime-datetime-not-json-serializable
|
||||
"""
|
||||
|
||||
if isinstance(obj, (datetime, date)):
|
||||
return obj.timestamp()
|
||||
raise TypeError ("Type %s not serializable" % type(obj))
|
||||
|
||||
def parse_toml_string(tomlst: str):
|
||||
try:
|
||||
tomlst = tomli.loads(tomlst)
|
||||
except tomli.TOMLDecodeError as e:
|
||||
print("Not valid TOML.", str(e))
|
||||
return (-1, None)
|
||||
return (0, dict_replace_value(tomlst,"None",None))
|
||||
|
||||
#class to persist
|
||||
class Store:
|
||||
stratins : List[StrategyInstance] = []
|
||||
runners: List[Runner] = []
|
||||
def __init__(self) -> None:
|
||||
self.db_file = DATA_DIR + "/strategyinstances.cache"
|
||||
if os.path.exists(self.db_file):
|
||||
with open (self.db_file, 'rb') as fp:
|
||||
self.stratins = pickle.load(fp)
|
||||
|
||||
def save(self):
|
||||
with open(self.db_file, 'wb') as fp:
|
||||
pickle.dump(self.stratins, fp)
|
||||
|
||||
qu = Queue()
|
||||
|
||||
zoneNY = tz.gettz('America/New_York')
|
||||
|
||||
def print(*args, **kwargs):
|
||||
ic(*args, **kwargs)
|
||||
|
||||
def price2dec(price: float) -> float:
|
||||
"""
|
||||
pousti maximalne 2 decimals
|
||||
pokud je trojmistne a vic pak zakrouhli na 2, jinak necha
|
||||
"""
|
||||
return round(price,2) if count_decimals(price) > 2 else price
|
||||
|
||||
def count_decimals(number: float) -> int:
|
||||
"""
|
||||
Count the number of decimals in a given float: 1.4335 -> 4 or 3 -> 0
|
||||
"""
|
||||
return abs(decimal.Decimal(str(number)).as_tuple().exponent)
|
||||
|
||||
def p(var, n = None):
|
||||
if n: print(n, f'{var = }')
|
||||
else: print(f'{var = }')
|
||||
|
||||
def is_open_rush(dt: datetime, mins: int = 30):
|
||||
""""
|
||||
Returns true if time is within morning rush (open+mins)
|
||||
"""
|
||||
dt = dt.astimezone(zoneNY)
|
||||
business_hours = {
|
||||
"from": time(hour=9, minute=30),
|
||||
"to": time(hour=16, minute=0)
|
||||
}
|
||||
rushtime = (datetime.combine(date.today(), business_hours["from"]) + timedelta(minutes=mins)).time()
|
||||
return business_hours["from"] <= dt.time() < rushtime
|
||||
|
||||
def is_close_rush(dt: datetime, mins: int = 30):
|
||||
""""
|
||||
Returns true if time is within morning rush (open+mins)
|
||||
"""
|
||||
dt = dt.astimezone(zoneNY)
|
||||
business_hours = {
|
||||
"from": time(hour=9, minute=30),
|
||||
"to": time(hour=16, minute=0)
|
||||
}
|
||||
rushtime = (datetime.combine(date.today(), business_hours["to"]) - timedelta(minutes=mins)).time()
|
||||
return rushtime <= dt.time() <= business_hours["to"]
|
||||
|
||||
def is_open_hours(dt):
|
||||
""""
|
||||
Returns True if market is open that time. Holidays not implemented yet.
|
||||
|
||||
"""
|
||||
dt = dt.astimezone(zoneNY)
|
||||
#print("Ameriko time", dt)
|
||||
|
||||
business_hours = {
|
||||
# monday = 0, tuesday = 1, ... same pattern as date.weekday()
|
||||
"weekdays": [0, 1, 2, 3, 4],
|
||||
"from": time(hour=9, minute=30),
|
||||
"to": time(hour=16, minute=0)
|
||||
}
|
||||
|
||||
holidays = [date(2022, 12, 24), date(2022, 2, 24)]
|
||||
|
||||
return dt.weekday() in business_hours["weekdays"] \
|
||||
and dt.date() not in holidays \
|
||||
and business_hours["from"] <= dt.time() < business_hours["to"]
|
||||
|
||||
def isfalling(pole: list, pocet: int):
|
||||
if len(pole)<pocet: return False
|
||||
pole = pole[-pocet:]
|
||||
res = all(i > j for i, j in zip(pole, pole[1:]))
|
||||
return res
|
||||
|
||||
def isrising(pole: list, pocet: int):
|
||||
if len(pole)<pocet: return False
|
||||
pole = pole[-pocet:]
|
||||
res = all(i < j for i, j in zip(pole, pole[1:]))
|
||||
return res
|
||||
|
||||
def parse_alpaca_timestamp(value: Timestamp):
|
||||
return value.seconds + (value.nanoseconds * float(1e-9))
|
||||
|
||||
class ltp:
|
||||
price={}
|
||||
time={}
|
||||
|
||||
def trunc(f, n):
|
||||
return math.floor(f * 10 ** n) / 10 ** n
|
||||
|
||||
class AttributeDict(dict):
|
||||
"""
|
||||
This is adds functions to the dictionary class, no other modifications. This gives dictionaries abilities like:
|
||||
print(account.BTC) -> {'available': 1, 'hold': 0}
|
||||
account.BTC = "cool"
|
||||
print(account.BTC) -> cool
|
||||
Basically you can get and set attributes with a dot instead of [] - like dict.available rather than
|
||||
dict['available']
|
||||
"""
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
super(AttributeDict, self).__init__(*args, **kwargs)
|
||||
self.__dict__ = self
|
||||
|
||||
"""""
|
||||
Helper to replace value in nested dictionaries. Used for TOML to replace "None" string to None type
|
||||
Also used to type enums.
|
||||
# See input and output below
|
||||
output = dict_replace_value(input, 'string', 'something')
|
||||
"""""
|
||||
def dict_replace_value(d: AttributeDict, old: str, new) -> AttributeDict:
|
||||
x = {}
|
||||
for k, v in d.items():
|
||||
if isinstance(v, dict):
|
||||
v = dict_replace_value(v, old, new)
|
||||
elif isinstance(v, list):
|
||||
v = list_replace_value(v, old, new)
|
||||
elif isinstance(v, str):
|
||||
v = new if v == old else v
|
||||
if k == "rectype": v = RecordType(v)
|
||||
elif k == "align": v = StartBarAlign(v)
|
||||
elif k == "mode": v = Mode(v)
|
||||
x[k] = v
|
||||
return x
|
||||
|
||||
|
||||
def list_replace_value(l: list, old: str, new) -> list:
|
||||
x = []
|
||||
for e in l:
|
||||
if isinstance(e, list):
|
||||
e = list_replace_value(e, old, new)
|
||||
elif isinstance(e, dict):
|
||||
e = dict_replace_value(e, old, new)
|
||||
elif isinstance(e, str):
|
||||
e = new if e == old else e
|
||||
x.append(e)
|
||||
return x
|
||||
|
||||
Reference in New Issue
Block a user