505 lines
19 KiB
Python
505 lines
19 KiB
Python
# coding=utf-8
|
|
import ujson
|
|
from binance.websocket.spot.websocket_client import SpotWebsocketClient as WebsocketClient
|
|
import time
|
|
import requests
|
|
from loguru import logger
|
|
import datetime
|
|
import pymysql
|
|
import math
|
|
import csv
|
|
|
|
g_prices = {}
|
|
g_dbif = None
|
|
g_lastts = 0
|
|
def get_day60_rise(day, prices):
|
|
total = 0
|
|
cnt = 0
|
|
for i in range(60):
|
|
if str(day) in prices:
|
|
cur_price = prices[str(day)]
|
|
day = str(day - 3600 * 24)
|
|
if day in prices:
|
|
prev_price = prices[day]
|
|
try:
|
|
#print(((cur_price-prev_price)/prev_price), day, cur_price, prev_price)
|
|
total += (((cur_price-prev_price)/prev_price))
|
|
cnt += 1
|
|
except:
|
|
pass
|
|
# print(day, total, cnt)
|
|
day = int(day)
|
|
return total
|
|
|
|
def get_days_rise(day, maxdays, prices):
|
|
total = 0
|
|
cnt = 0
|
|
for i in range(maxdays):
|
|
if str(day) in prices:
|
|
cur_price = prices[str(day)]
|
|
day = str(day - 3600 * 24)
|
|
if day in prices:
|
|
prev_price = prices[day]
|
|
try:
|
|
#print(((cur_price-prev_price)/prev_price), day, cur_price, prev_price)
|
|
total += (((cur_price-prev_price)/prev_price))
|
|
cnt += 1
|
|
except:
|
|
pass
|
|
# print(day, total, cnt)
|
|
day = int(day)
|
|
return total
|
|
|
|
def append_jzr_day60(dbif, day, price, day60_rise, day7_rise, day30_rise, day90_rise):
|
|
dbif.append_jzr60(day, price, day60_rise, day7_rise, day30_rise, day90_rise)
|
|
|
|
def sync_jzr_day60(dbif, prices):
|
|
for day in prices:
|
|
print(day, prices[day])
|
|
day60_rise = get_days_rise(int(day), 60, prices)
|
|
day7_rise = get_days_rise(int(day), 7, prices)
|
|
day30_rise = get_days_rise(int(day), 30, prices)
|
|
day90_rise = get_days_rise(int(day), 90, prices)
|
|
print(day, day60_rise)
|
|
append_jzr_day60(dbif, day, prices[day], day60_rise, day7_rise, day30_rise, day90_rise)
|
|
|
|
def check_jzr60_sync(dbif):
|
|
return dbif.check_jzr60_sync()
|
|
|
|
def append_jzr60day(dbif, day, price, day60_rise, day7_rise, day30_rise, day90_rise):
|
|
dbif.append_jzr60_day(day, price, day60_rise, day7_rise, day30_rise, day90_rise)
|
|
|
|
def append_jzr60(dbif, dayutc, price, day60_rise, day7_rise, day30_rise, day90_rise):
|
|
dbif.append_jzr60(dayutc, price, day60_rise, day7_rise, day30_rise, day90_rise)
|
|
|
|
def clean_jzr60day(dbif, clean_day):
|
|
dbif.clean_jzr60_day(clean_day)
|
|
|
|
def handle_jzr_day60(dbif, day, dayutc, price, prices):
|
|
day60_rise = get_days_rise(dayutc, 60, prices)
|
|
day7_rise = get_days_rise(dayutc, 7, prices)
|
|
day30_rise = get_days_rise(dayutc, 30, prices)
|
|
day90_rise = get_days_rise(dayutc, 90, prices)
|
|
print(dayutc, price, day, day60_rise)
|
|
append_jzr60day(dbif, day, price, day60_rise, day7_rise, day30_rise, day90_rise)
|
|
append_jzr60(dbif, dayutc, price, day60_rise, day7_rise, day30_rise, day90_rise)
|
|
clean_day = dayutc - 3600 * 24 * 2
|
|
clean_jzr60day(dbif, clean_day)
|
|
|
|
class Arh99DbIf:
|
|
def __init__(self, host="172.17.0.1", port=4423, user="root", password="2GS@bPYcgiMyL14A", dbname="ethdb"):
|
|
self.conn = pymysql.connect(host=host, port=port, user=user, password=password, database=dbname, cursorclass=pymysql.cursors.DictCursor)
|
|
print("init arh99 db suceess!")
|
|
|
|
def check_sync(self):
|
|
synced = False
|
|
with self.conn.cursor() as cursor:
|
|
sql_query = "SELECT COUNT(id) FROM `arh99v3a`"
|
|
cursor.execute(sql_query)
|
|
result = cursor.fetchone()
|
|
print(result)
|
|
if result is not None:
|
|
if "COUNT(id)" in result:
|
|
if result["COUNT(id)"] > 0:
|
|
synced = True
|
|
self.conn.commit()
|
|
#print("synced", synced)
|
|
return synced
|
|
|
|
def append(self, day, price, arh99, arh99x):
|
|
with self.conn.cursor() as cursor:
|
|
sql_query = "SELECT COUNT(id) FROM `arh99v3a` WHERE unixdt=FROM_UNIXTIME(%s)"
|
|
cursor.execute(sql_query, (int(day),))
|
|
result = cursor.fetchone()
|
|
#print(dt_utc)
|
|
#print(result)
|
|
if result is not None:
|
|
if "COUNT(id)" in result:
|
|
if result["COUNT(id)"] > 0:
|
|
sql_update = 'UPDATE arh99v3a SET `arh99`=%s, `arh99x`=%s, `price`=%s, `unixdt`=FROM_UNIXTIME(%s) WHERE unixdt=FROM_UNIXTIME(%s)'
|
|
print(sql_update)
|
|
cursor.execute(sql_update, (arh99, arh99x, price, int(day), int(day)))
|
|
else:
|
|
sql_insert = "INSERT INTO `arh99v3a` (`unixdt`, `price`, `arh99`, `arh99x`) VALUES (FROM_UNIXTIME(%s), %s, %s, %s)"
|
|
print(sql_insert)
|
|
cursor.execute(sql_insert, (day, price, arh99, arh99x))
|
|
self.conn.commit()
|
|
|
|
def append_day(self, day, price, arh99, arh99x):
|
|
with self.conn.cursor() as cursor:
|
|
sql_insert = "INSERT INTO `arh99v3aday` (`unixdt`, `price`, `arh99`, `arh99x`) VALUES (FROM_UNIXTIME(%s), %s, %s, %s)"
|
|
print(sql_insert)
|
|
cursor.execute(sql_insert, (day, price, arh99, arh99x))
|
|
self.conn.commit()
|
|
|
|
def clean_day(self, day):
|
|
with self.conn.cursor() as cursor:
|
|
sql_clean = "DELETE from arh99v3aday where unixdt<FROM_UNIXTIME(%s)"
|
|
print(sql_clean)
|
|
cursor.execute(sql_clean, (int(day),))
|
|
self.conn.commit()
|
|
|
|
def check_jzr60_sync(self):
|
|
synced = False
|
|
with self.conn.cursor() as cursor:
|
|
sql_query = "SELECT COUNT(id) FROM `jzr60v3a`"
|
|
cursor.execute(sql_query)
|
|
result = cursor.fetchone()
|
|
print(result)
|
|
if result is not None:
|
|
if "COUNT(id)" in result:
|
|
if result["COUNT(id)"] > 0:
|
|
synced = True
|
|
self.conn.commit()
|
|
#print("synced", synced)
|
|
return synced
|
|
|
|
def append_jzr60(self, day, price, jzr60, jzr7, jzr30, jzr90):
|
|
with self.conn.cursor() as cursor:
|
|
sql_query = "SELECT COUNT(id) FROM `jzr60v3a` WHERE unixdt=FROM_UNIXTIME(%s)"
|
|
cursor.execute(sql_query, (int(day),))
|
|
result = cursor.fetchone()
|
|
#print(dt_utc)
|
|
#print(result)
|
|
if result is not None:
|
|
if "COUNT(id)" in result:
|
|
if result["COUNT(id)"] > 0:
|
|
sql_update = 'UPDATE jzr60v3a SET `jzr60`=%s,`jzr7`=%s,`jzr30`=%s,`jzr90`=%s,`price`=%s, `unixdt`=FROM_UNIXTIME(%s) WHERE unixdt=FROM_UNIXTIME(%s)'
|
|
print(sql_update)
|
|
cursor.execute(sql_update, (jzr60, jzr7, jzr30, jzr90, price, int(day), int(day)))
|
|
else:
|
|
sql_insert = "INSERT INTO `jzr60v3a` (`unixdt`, `price`, `jzr60`, `jzr7`, `jzr30`, `jzr90`) VALUES (FROM_UNIXTIME(%s), %s, %s, %s, %s, %s)"
|
|
print(sql_insert)
|
|
cursor.execute(sql_insert, (day, price, jzr60, jzr7, jzr30, jzr90))
|
|
self.conn.commit()
|
|
|
|
def append_jzr60_day(self, day, price, jzr60, jzr7, jzr30, jzr90):
|
|
with self.conn.cursor() as cursor:
|
|
sql_insert = "INSERT INTO `jzr60v3aday` (`unixdt`, `price`, `jzr60`, `jzr7`, `jzr30`, `jzr90`) VALUES (FROM_UNIXTIME(%s), %s, %s, %s, %s, %s)"
|
|
print(sql_insert)
|
|
cursor.execute(sql_insert, (day, price, jzr60, jzr7, jzr30, jzr90))
|
|
self.conn.commit()
|
|
|
|
def clean_jzr60_day(self, day):
|
|
with self.conn.cursor() as cursor:
|
|
sql_clean = "DELETE from jzr60v3aday where unixdt<FROM_UNIXTIME(%s)"
|
|
print(sql_clean)
|
|
cursor.execute(sql_clean, (int(day),))
|
|
self.conn.commit()
|
|
|
|
def check_ma730_sync(self):
|
|
synced = False
|
|
with self.conn.cursor() as cursor:
|
|
sql_query = "SELECT COUNT(id) FROM `ma730v3a`"
|
|
cursor.execute(sql_query)
|
|
result = cursor.fetchone()
|
|
print(result)
|
|
if result is not None:
|
|
if "COUNT(id)" in result:
|
|
if result["COUNT(id)"] > 0:
|
|
synced = True
|
|
self.conn.commit()
|
|
#print("synced", synced)
|
|
return synced
|
|
|
|
def append_ma730(self, day, price, ma730, ma365, ma200):
|
|
with self.conn.cursor() as cursor:
|
|
sql_query = "SELECT COUNT(id) FROM `ma730v3a` WHERE unixdt=FROM_UNIXTIME(%s)"
|
|
cursor.execute(sql_query, (int(day),))
|
|
result = cursor.fetchone()
|
|
#print(dt_utc)
|
|
#print(result)
|
|
if result is not None:
|
|
if "COUNT(id)" in result:
|
|
ma730x5 = ma730*5
|
|
if result["COUNT(id)"] > 0:
|
|
sql_update = 'UPDATE ma730v3a SET `ma730`=%s, `ma730x5`=%s, `ma365`=%s, `ma200`=%s, `price`=%s, `unixdt`=FROM_UNIXTIME(%s) WHERE unixdt=FROM_UNIXTIME(%s)'
|
|
print(sql_update)
|
|
cursor.execute(sql_update, (ma730, ma730x5, ma365, ma200, price, int(day), int(day)))
|
|
else:
|
|
sql_insert = "INSERT INTO `ma730v3a` (`unixdt`, `price`, `ma730`, `ma730x5`, `ma365`, `ma200`) VALUES (FROM_UNIXTIME(%s), %s, %s, %s, %s, %s)"
|
|
print(sql_insert)
|
|
cursor.execute(sql_insert, (day, price, ma730, ma730x5, ma365, ma200))
|
|
self.conn.commit()
|
|
|
|
def append_ma730_day(self, day, price, ma730, ma365, ma200):
|
|
with self.conn.cursor() as cursor:
|
|
ma730x5 = ma730*5
|
|
sql_insert = "INSERT INTO `ma730v3aday` (`unixdt`, `price`, `ma730`, `ma730x5`, `ma365`, `ma200`) VALUES (FROM_UNIXTIME(%s), %s, %s, %s, %s, %s)"
|
|
print(sql_insert)
|
|
cursor.execute(sql_insert, (day, price, ma730, ma730x5, ma365, ma200))
|
|
self.conn.commit()
|
|
|
|
def clean_ma730_day(self, day):
|
|
with self.conn.cursor() as cursor:
|
|
sql_clean = "DELETE from ma730v3aday where unixdt<FROM_UNIXTIME(%s)"
|
|
print(sql_clean)
|
|
cursor.execute(sql_clean, (int(day),))
|
|
self.conn.commit()
|
|
|
|
def get_history_price(dbif):
|
|
global g_prices
|
|
with open("eth_history_price.csv", newline='') as csvfile:
|
|
reader = csv.DictReader(csvfile)
|
|
for row in reader:
|
|
print(row)
|
|
daystr = row["Date"]
|
|
p = row["Price"]
|
|
dayutc = time.mktime(time.strptime(daystr, "%m/%d/%Y"))
|
|
g_prices[str(int(dayutc))] = float(p)
|
|
'''response_price = requests.get(
|
|
'https://data.nasdaq.com/api/v3/datasets/BCHAIN/MKPRU.json?api_key=FZqXog4sR-b7cYnXcRVV')
|
|
if response_price.status_code == 200:
|
|
#print(response_price.content)
|
|
priceweb = ujson.loads(response_price.content)
|
|
if "dataset" in priceweb:
|
|
priceset = priceweb["dataset"]
|
|
if "data" in priceset:
|
|
pricedata = priceset["data"]
|
|
for price in pricedata:
|
|
daystr = price[0]
|
|
p = price[1]
|
|
dayutc = time.mktime(time.strptime(daystr, "%Y-%m-%d"))
|
|
g_prices[str(int(dayutc))] = float(p)
|
|
#print(price, int(dayutc), g_prices[str(int(dayutc))])
|
|
'''
|
|
return g_prices
|
|
|
|
def get_history_price2(dbif):
|
|
global g_prices
|
|
#pricedict = {}
|
|
dayt = time.gmtime()
|
|
daystr = time.strftime("%Y", dayt)
|
|
year = int(daystr)
|
|
end_year = year
|
|
while True:
|
|
url = ""
|
|
if end_year != year:
|
|
start_year = end_year
|
|
url = "https://data.messari.io/api/v1/assets/ethereum/metrics/price/time-series?start="
|
|
else:
|
|
url = "https://data.messari.io/api/v1/assets/ethereum/metrics/price/time-series?after=" + str(
|
|
year) + "-01-01&order=descending&interval=1d"
|
|
|
|
if end_year != year:
|
|
url = url + str(start_year) + "-01-01&end=" + str(end_year) + "-12-31&order=descending&interval=1d"
|
|
header_set = {}
|
|
header_set["x-messari-api-key"] = "aH2pyj5i4QGo1k1gLxXEbIJ5RJr+FYKLEWk6cRT6RuSc6lRY"
|
|
# header_set["Content-Type"] = "application/json"
|
|
print(header_set, url)
|
|
response_price = requests.get(url, headers=header_set)
|
|
# print(response_price)
|
|
if response_price.status_code == 200:
|
|
# print(response_price.content)
|
|
priceweb = ujson.loads(response_price.content)
|
|
if "data" in priceweb:
|
|
priceset = priceweb["data"]
|
|
if "values" in priceset:
|
|
valueset = priceset["values"]
|
|
if valueset is not None:
|
|
for supply in valueset:
|
|
dayutc = int(supply[0] / 1000)
|
|
s = supply[1]
|
|
ret_time = time.gmtime(dayutc)
|
|
ret_daystr = time.strftime("%d %b %Y", ret_time)
|
|
ret_dayutc = int(time.mktime(time.strptime(ret_daystr, "%d %b %Y")))
|
|
#self.pricedict[str(ret_dayutc)] = float(s)
|
|
g_prices[str(ret_dayutc)] = float(s)
|
|
# print(s, dayutc, pricedict[str(dayutc)])
|
|
# break
|
|
else:
|
|
break
|
|
else:
|
|
break
|
|
end_year -= 1
|
|
time.sleep(2)
|
|
#print(self.pricedict)
|
|
#return self.pricedict
|
|
|
|
get_history_price(dbif)
|
|
return g_prices
|
|
|
|
def get_coin_days(day):
|
|
birthday = time.mktime(time.strptime("2009-01-03", "%Y-%m-%d"))
|
|
days = (int(day) - birthday)/3600/24
|
|
#print(day, birthday, days)
|
|
return days
|
|
|
|
def get_coin_exp(days):
|
|
try:
|
|
temp = 5.84*math.log10(days)-17.01
|
|
#print("temp", temp, math.log10(days), days)
|
|
exp = math.pow(10,temp)
|
|
return exp
|
|
except:
|
|
return 0
|
|
|
|
def cal_day200_price(prices, day):
|
|
total = 0
|
|
cnt = 0
|
|
for i in range(200):
|
|
if day in prices:
|
|
total += prices[day]
|
|
cnt += 1
|
|
#print(day, total, cnt)
|
|
day = str(int(day) - 3600 * 24)
|
|
|
|
if cnt > 0:
|
|
return total/cnt
|
|
return 0
|
|
|
|
def cal_arh99(prices, day, price):
|
|
day200 = cal_day200_price(prices, day)
|
|
#print("day200", day200)
|
|
days = get_coin_days(day)
|
|
#print("days", days)
|
|
exp = get_coin_exp(days)
|
|
#print("exp", exp, price)
|
|
try:
|
|
arh99 = (float(price)/day200)*(float(price)/exp)
|
|
arh99x = (day200/float(price))*(exp/float(price))*3
|
|
except:
|
|
arh99 = 0
|
|
arh99x = 0
|
|
#print("arh99", arh99)
|
|
|
|
return arh99, arh99x
|
|
|
|
def check_sync(dbif):
|
|
return dbif.check_sync()
|
|
|
|
def append_arh99(dbif, day, price, arh99, arh99x):
|
|
dbif.append(day, price, arh99, arh99x)
|
|
|
|
def sync_arh99(dbif, prices):
|
|
for day in prices:
|
|
print(day, prices[day])
|
|
arh99, arh99x = cal_arh99(prices, int(day), prices[day])
|
|
print(day, arh99, arh99x)
|
|
append_arh99(dbif, day, prices[day], arh99, arh99x)
|
|
|
|
def append_arh99day(dbif, day, price, arh99, arh99x):
|
|
dbif.append_day(day, price, arh99, arh99x)
|
|
|
|
def clean_arh99day(dbif, day):
|
|
dbif.clean_day(day)
|
|
|
|
def arh99_handler(message):
|
|
global g_prices
|
|
global g_dbif
|
|
global g_lastts
|
|
coin_data = message["data"]
|
|
#coin_symbol = coin_data["s"]
|
|
coin_ts = int(coin_data["E"])
|
|
coin_price = float(coin_data["c"])
|
|
#print((coin_ts/1000), int((coin_ts/1000)%60))
|
|
if int((coin_ts/1000)%60) == 0:
|
|
#if coin_ts/1000/60 != g_lastts:
|
|
if coin_ts/1000 - g_lastts >= 15:
|
|
#print(coin_ts, coin_price)
|
|
coin_ts2 = time.gmtime(coin_ts/1000)
|
|
daystr = time.strftime("%d %b %Y", coin_ts2)
|
|
print(daystr)
|
|
dayutc = int(time.mktime(time.strptime(daystr, "%d %b %Y")))
|
|
g_prices[str(dayutc)] = coin_price
|
|
arh99, arh99x = cal_arh99(g_prices, dayutc, coin_price)
|
|
print(dayutc, coin_price, arh99, arh99x)
|
|
|
|
append_arh99day(g_dbif, coin_ts/1000, coin_price, arh99, arh99x)
|
|
append_arh99(g_dbif, dayutc, coin_price, arh99, arh99x)
|
|
clean_day = dayutc - 3600*24*2
|
|
clean_arh99day(g_dbif, clean_day)
|
|
|
|
handle_jzr_day60(g_dbif, coin_ts/1000, dayutc, coin_price, g_prices)
|
|
handle_ma_day730(g_dbif, coin_ts / 1000, dayutc, coin_price, g_prices)
|
|
g_lastts = coin_ts/1000
|
|
|
|
def start_arh99(dbif, prices):
|
|
ws_client = WebsocketClient()
|
|
ws_client.start()
|
|
ws_client.instant_subscribe(
|
|
stream=['ethusdt@miniTicker'],
|
|
callback=arh99_handler,
|
|
)
|
|
|
|
def arh99():
|
|
global g_dbif
|
|
g_dbif = Arh99DbIf()
|
|
prices = get_history_price2(g_dbif)
|
|
#if not check_sync(g_dbif):
|
|
if True:
|
|
sync_arh99(g_dbif, prices)
|
|
#if not check_jzr60_sync(g_dbif):
|
|
if True:
|
|
sync_jzr_day60(g_dbif, prices)
|
|
#if not check_ma730_sync(g_dbif):
|
|
if True:
|
|
sync_ma_day730(g_dbif, prices)
|
|
start_arh99(g_dbif, prices)
|
|
|
|
#2-year ma multiplier
|
|
def get_day730_rise(day, prices):
|
|
total = 0
|
|
cnt = 0
|
|
for i in range(730):
|
|
if str(day) in prices:
|
|
cur_price = prices[str(day)]
|
|
total += cur_price
|
|
cnt += 1
|
|
day = str(day - 3600 * 24)
|
|
day = int(day)
|
|
if cnt > 0:
|
|
return total/cnt
|
|
return 0
|
|
|
|
def get_day365_rise(day, maxdays, prices):
|
|
total = 0
|
|
cnt = 0
|
|
for i in range(maxdays):
|
|
if str(day) in prices:
|
|
cur_price = prices[str(day)]
|
|
total += cur_price
|
|
cnt += 1
|
|
day = str(day - 3600 * 24)
|
|
day = int(day)
|
|
if cnt > 0:
|
|
return total/cnt
|
|
return 0
|
|
|
|
def append_ma_day730(dbif, day, price, day730_rise, day365_rise, day200_rise):
|
|
dbif.append_ma730(day, price, day730_rise, day365_rise, day200_rise)
|
|
|
|
def sync_ma_day730(dbif, prices):
|
|
for day in prices:
|
|
print(day, prices[day])
|
|
day730_rise = get_day730_rise(int(day), prices)
|
|
day365_rise = get_day365_rise(int(day), 365, prices)
|
|
day200_rise = get_day365_rise(int(day), 200, prices)
|
|
print(day, day730_rise)
|
|
append_ma_day730(dbif, day, prices[day], day730_rise, day365_rise, day200_rise)
|
|
|
|
def check_ma730_sync(dbif):
|
|
return dbif.check_ma730_sync()
|
|
|
|
def append_ma730day(dbif, day, price, day730_rise, day365_rise, day200_rise):
|
|
dbif.append_ma730_day(day, price, day730_rise, day365_rise, day200_rise)
|
|
|
|
def append_ma730(dbif, dayutc, price, day730_rise, day365_rise, day200_rise):
|
|
dbif.append_ma730(dayutc, price, day730_rise, day365_rise, day200_rise)
|
|
|
|
def clean_ma730day(dbif, clean_day):
|
|
dbif.clean_ma730_day(clean_day)
|
|
|
|
def handle_ma_day730(dbif, day, dayutc, price, prices):
|
|
day730_rise = get_day730_rise(dayutc, prices)
|
|
day365_rise = get_day365_rise(dayutc, 365, prices)
|
|
day200_rise = get_day365_rise(dayutc, 200, prices)
|
|
print(dayutc, price, day, day730_rise)
|
|
append_ma730day(dbif, day, price, day730_rise, day365_rise, day200_rise)
|
|
append_ma730(dbif, dayutc, price, day730_rise, day365_rise, day200_rise)
|
|
clean_day = dayutc - 3600 * 24 * 2
|
|
clean_ma730day(dbif, clean_day)
|
|
|
|
arh99()
|
|
|