From 2ba7422c9c71c6e121935d488af6eb521701c4ae Mon Sep 17 00:00:00 2001 From: fengche <1158629543@qq.com> Date: Fri, 25 Jul 2025 17:29:12 +0800 Subject: [PATCH] =?UTF-8?q?coinbus=E4=BB=A3=E7=A0=81=E6=9B=B4=E6=96=B0?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- coinbus/README.md | 8 + coinbus/btc24h_db_if.py | 600 +++++++++++++++++ coinbus/btc24h_redis_if.py | 613 +++++++++++++++++ coinbus/btc24h_stats.py | 1110 +++++++++++++++++++++++++++++++ coinbus/btc_price_fetcher.py | 85 +++ coinbus/btc_prices.py | 141 ++++ coinbus/btc_stats_qt.py | 1219 ++++++++++++++++++++++++++++++++++ coinbus/db_if_qt.py | 562 ++++++++++++++++ coinbus/redis_if_qt.py | 610 +++++++++++++++++ 9 files changed, 4948 insertions(+) create mode 100644 coinbus/README.md create mode 100644 coinbus/btc24h_db_if.py create mode 100644 coinbus/btc24h_redis_if.py create mode 100644 coinbus/btc24h_stats.py create mode 100644 coinbus/btc_price_fetcher.py create mode 100644 coinbus/btc_prices.py create mode 100644 coinbus/btc_stats_qt.py create mode 100644 coinbus/db_if_qt.py create mode 100644 coinbus/redis_if_qt.py diff --git a/coinbus/README.md b/coinbus/README.md new file mode 100644 index 0000000..577e464 --- /dev/null +++ b/coinbus/README.md @@ -0,0 +1,8 @@ +btc24h_stats.py --btc实时更新数据代码 +btc24h_redis_if.py --btc实时更新数据代码redis数据库 +btc24h_db_if.py --btc实时更新数据代码mysql数据库 +btc_stats_qt.py --btc每天更新数据代码 +redis_if_qt.py --btc每天更新数据代码redis数据库 +db_if_qt.py --btc每天更新数据代码mysql数据库 +btc_prices.py --获取历史币价代码 +btc_prices_fetcher.py --获取实时币价代码 \ No newline at end of file diff --git a/coinbus/btc24h_db_if.py b/coinbus/btc24h_db_if.py new file mode 100644 index 0000000..7b31e32 --- /dev/null +++ b/coinbus/btc24h_db_if.py @@ -0,0 +1,600 @@ +# coding=utf-8 +import datetime +import json +import requests +import pymysql +from loguru import logger +import time + + +class DbIf: + def __init__(self, host="172.17.0.1", port=4419, user="root", password="IeQcJNnagkaFP1Or", dbname="btcdb"): + self.conn = pymysql.connect(host=host, port=port, user=user, password=password, database=dbname, + cursorclass=pymysql.cursors.DictCursor) + + def update_to_dailyindsv2(self, dt_utc, height_begin, height_end, lth_volume, frm, cvdd, realized_price, + transferred_price, balanced_price, nvt_ratio, velocity): + with self.conn.cursor() as cursor: + print(dt_utc, height_begin, height_end, lth_volume, frm, cvdd, realized_price, transferred_price, + balanced_price, nvt_ratio, velocity) + # 调用消息订阅的api:向topic中储存rt_dailyindsv2e2的实时数据 + # url = "http://10.168.2.125:7101/marketall/push/realtime/btc/dailyindsv2e1" + # headers = {"accept": "application/json"} + # data = {"unixdt":dt_utc,"height_begin":height_begin,"height_end":height_end,"lth_volume":lth_volume,"frm":frm,"cvdd":cvdd,"realized_price":realized_price,"transferred_price":transferred_price,"balanced_price":balanced_price,"nvt_ratio":nvt_ratio,"velocity":velocity} + # response = requests.post(url=url, data=json.dumps(data), headers=headers) + sql_insert = "REPLACE INTO rt_dailyindsv3e2 (unixdt, height_begin, height_end, lth_volume, frm, cvdd, realized_price, transferred_price, balanced_price, nvt_ratio, velocity" + sql_insert = sql_insert + ") VALUES (FROM_UNIXTIME(%s), %s, %s, %s, %s, %s, %s, %s, %s, %s, %s)" + cursor.execute(sql_insert, ( + dt_utc, height_begin, height_end, lth_volume, frm, cvdd, realized_price, transferred_price, + balanced_price, nvt_ratio, velocity)) + + ''' + def update_to_realtimeindsv2(self, dt_utc, mempool_volume, mempool_fees): + with self.conn.cursor() as cursor: + sql_insert = "REPLACE INTO realtimeindsv2b (unixdt, mempool_volume, mempool_fees)" + cursor.execute(sql_insert, (dt_utc, mempool_volume, mempool_fees)) + ''' + + def update_to_dailyinds(self, dt_utc, height_begin, height_end, profitrate, fees, txs, new_address, total_address, + new_address_volume, active_address, + send_address, receive_address, volume, eavolume, sopr, asopr, easopr, lthsopr, sthsopr, + asol, eaasol, dormancy, adormancy, eadormancy, cdd, sacdd, eacdd, day1, day7, day30, day60, + day90, day180, day365, day730, csupply, mintusd, sumcsupply, sumcdd, sumeacdd, + liveliness, ealiveliness, rprofit, rloss, rplrate, price, marketcap, rcap, earcap, mvrv, + nupl): + with self.conn.cursor() as cursor: + # 调用消息订阅的api:向topic中储存rt_dailyindsv2e1的实时数据 + # 数据结构{dt_utc:'dt_utc'} + try: + url="https://coinbus.cc/api/v1/marketall/push/realtime/btc/dailyv2e1" + headers = {"accept": "application/json"} + data = {"unixdt":dt_utc,"height_begin":height_begin,"height_end":height_end,"profitrate":profitrate, + "fees":fees,"txs":txs,"new_address":new_address,"total_address":total_address, + "new_address_volume":new_address_volume,"active_address":active_address,"send_address":send_address, + "receive_address":receive_address,"volume":volume,"eavolume":eavolume,"sopr":sopr,"asopr":asopr,"easopr":easopr, + "lthsopr":lthsopr,"sthsopr":sthsopr,"asol":asol,"eaasol":eaasol,"dormancy":dormancy, + "adormancy":adormancy,"eadormancy":eadormancy,"cdd":cdd,"sacdd":sacdd,"eacdd":eacdd,"day1":day1,"day7":day7, + "day30": day30,"day60":day60,"day90":day90,"day180":day180,"day365":day365,"day730":day730, + "csupply":csupply,"mintusd":mintusd,"sumcsupply":sumcsupply,"sumcdd":sumcdd,"sumeacdd":sumeacdd,"liveliness":liveliness, + "ealiveliness":ealiveliness,"rprofit":rprofit,"rloss":rloss,"rplrate":rplrate, + "price":price,"marketcap":marketcap,"rcap":rcap,"earcap":earcap,"mvrv":mvrv,"nupl":nupl} + response = requests.post(url=url, data=json.dumps(data), headers=headers) + except: + print("api调用失败") + sql_insert = "REPLACE INTO rt_dailyindsv3e1 (unixdt, height_begin, height_end, profitrate, fees, txs, new_address, total_address, new_address_volume, active_address, send_address, receive_address, volume, eavolume, sopr, asopr, easopr, lthsopr, sthsopr," + sql_insert = sql_insert + " asol, eaasol, dormancy, adormancy, eadormancy, cdd, sacdd, eacdd, day1, day7, day30, day60, day90, day180, day365, day730, csupply, mintusd, sumcsupply, sumcdd, sumeacdd, liveliness, " + sql_insert = sql_insert + " ealiveliness, rprofit, rloss, rplrate, price, marketcap, rcap, earcap, mvrv, nupl" + sql_insert = sql_insert + ") VALUES (FROM_UNIXTIME(%s), %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s)" + # print(sql_insert) + cursor.execute(sql_insert, ( + dt_utc, height_begin, height_end, profitrate, fees, txs, new_address, total_address, new_address_volume, + active_address, send_address, + receive_address, volume, eavolume, sopr, asopr, easopr, lthsopr, sthsopr, asol, eaasol, dormancy, + adormancy, eadormancy, cdd, sacdd, eacdd, day1, day7, day30, day60, day90, day180, day365, day730, + csupply, mintusd, sumcsupply, sumcdd, sumeacdd, liveliness, ealiveliness, rprofit, rloss, rplrate, + price, marketcap, rcap, earcap, mvrv, + nupl)) + self.conn.commit() + + ''' + def update_to_dailyinds(self, dt_utc, height_begin, height_end, profitrate, fees, txs, new_address, new_address_volume, active_address, + send_address, receive_address, volume, eavolume, sopr, asopr, easopr, lthsopr, sthsopr, + asol, eaasol, dormancy, adormancy, eadormancy, cdd, sacdd, eacdd, day1, day7, day30, day60, + day90, day180, day365, day730, csupply, mintusd, sumcsupply, sumcdd, sumeacdd, + liveliness, ealiveliness, rprofit, rloss, rplrate, price, marketcap, rcap, earcap, mvrv, + lthmarketcap, lthrcap, sthmarketcap, sthrcap, lthmvrv, sthmvrv, nupl): + with self.conn.cursor() as cursor: + sql_insert = "REPLACE INTO dailyindsv1 (unixdt, height_begin, height_end, profitrate, fees, txs, new_address, new_address_volume, active_address, send_address, receive_address, volume, eavolume, sopr, asopr, easopr, lthsopr, sthsopr," + sql_insert = sql_insert + " asol, eaasol, dormancy, adormancy, eadormancy, cdd, sacdd, eacdd, day1, day7, day30, day60, day90, day180, day365, day730, csupply, mintusd, sumcsupply, sumcdd, sumeacdd, liveliness, " + sql_insert = sql_insert + " ealiveliness, rprofit, rloss, rplrate, price, marketcap, rcap, earcap, mvrv, lthmarketcap, lthrcap, sthmarketcap, sthrcap, lthmvrv, sthmvrv, nupl" + sql_insert = sql_insert + ") VALUES (FROM_UNIXTIME(%s), %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s)" + # print(sql_insert) + cursor.execute(sql_insert, ( + dt_utc, height_begin, height_end, profitrate, fees, txs, new_address, new_address_volume, active_address, send_address, + receive_address, volume, eavolume, sopr, asopr, easopr, lthsopr, sthsopr, asol, eaasol, dormancy, + adormancy, eadormancy, cdd, sacdd, eacdd, day1, day7, day30, day60, day90, day180, day365, day730, + csupply, mintusd, sumcsupply, sumcdd, sumeacdd, liveliness, ealiveliness, rprofit, rloss, rplrate, + price, marketcap, rcap, earcap, mvrv, lthmarketcap, lthrcap, sthmarketcap, sthrcap, lthmvrv, sthmvrv, + nupl)) + self.conn.commit() + ''' + ''' + # daily daily on-chain volume + def query_from_dailyvolume(self, start_id=0, end_id=0, start_time="", end_time="", limit=0): + with self.conn.cursor() as cursor: + sql_query = "SELECT * from `dailyvolume`" + + if start_id > 0: + sql_query = sql_query + " WHERE id > " + str(start_id) + if end_id > 0: + sql_query = sql_query + " AND id < " + str(end_id) + else: + if end_id > 0: + sql_query = sql_query + " WHERE id < " + str(end_id) + + if len(start_time) > 0: + if len(end_time) > 0: + sql_query = sql_query + " WHERE UNIX_TIMESTAMP(unixdt) BETWEEN \ + UNIX_TIMESTAMP(\'" + start_time + "\') AND UNIX_TIMESTAMP(\'" + end_time + "\')" + else: + sql_query = sql_query + " WHERE UNIX_TIMESTAMP(unixdt) BETWEEN \ + UNIX_TIMESTAMP(\'" + start_time + "\') AND UNIX_TIMESTAMP(NOW())" + else: + if len(end_time) > 0: + sql_query = sql_query + " WHERE UNIX_TIMESTAMP(unixdt) BETWEEN \ + UNIX_TIMESTAMP(NOW()) AND UNIX_TIMESTAMP(\'" + end_time + "\')" + + sql_query = sql_query + " order by `unixdt` desc" + + if limit > 0: + sql_query = sql_query + " LIMIT " + str(limit) + print(sql_query) + cursor.execute(sql_query) + return cursor.fetchall() + ''' + + # newaddrs + ''' + def update_to_newaddr(self, dayutc, last_profit_rate, last_fees, last_txs, last_eatxs, last_newaddr_cnt, + last_newaddr_vol, last_active_addr_cnt, last_tx_addr_cnt, last_rx_addr_cnt, last_vol_change, + last_vol): + with self.conn.cursor() as cursor: + sql_query = "SELECT COUNT(id) FROM `newaddrs` WHERE unixdt=FROM_UNIXTIME(%s)" + cursor.execute(sql_query, {dayutc, }) + result = cursor.fetchone() + # print(dt_utc) + # print(result) + if result is not None: + if "COUNT(id)" in result: + if result["COUNT(id)"] > 0: + print("update") + sql_update = 'UPDATE newaddrs SET `total`=%s, `amount`=%s, `active`=%s, `tx`=%s, `rx`=%s, `volume_change`=%s, `volume=%s`,`txs`=%s, `eatxs`=%s, `fees`=%s, `last_profit_rate`=%s WHERE unixdt=FROM_UNIXTIME(%s)' + cursor.execute(sql_update, ( + last_newaddr_cnt, last_newaddr_vol, last_active_addr_cnt, last_tx_addr_cnt, + last_rx_addr_cnt, + last_vol_change, last_vol, last_txs, last_eatxs, last_fees, last_profit_rate, dayutc)) + else: + print("insert") + sql_insert = "INSERT INTO `newaddrs` (`unixdt`, `total`, `amount`, `active`, `tx`, `rx`, `volume_change`, `volume`, `txs`, `eatxs`, `fees`, `last_profit_rate`) VALUES (FROM_UNIXTIME(%s), %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s)" + # print(sql_insert) + cursor.execute(sql_insert, ( + dayutc, last_newaddr_cnt, last_newaddr_vol, last_active_addr_cnt, last_tx_addr_cnt, + last_rx_addr_cnt, last_vol_change, last_vol, last_txs, last_eatxs, last_fees, + last_profit_rate)) + self.conn.commit() + ''' + ''' + def update_to_sellprofit(self, dayutc, current_price, block_buy_volume, block_sell_volume, block_sell_profit, last_height): + with self.conn.cursor() as cursor: + sql_insert = "INSERT INTO `dailybuysell` (`unixdt`, `price`, `buyvolume`, `sellvolume`, `sellprofit`, `height`) VALUES (FROM_UNIXTIME(%s), %s, %s, %s, %s, %s)" + #print(sql_insert) + #print(datetime, txid, vout, voutalias, amount, height) + cursor.execute(sql_insert, (dayutc, current_price, block_buy_volume, block_sell_volume, block_sell_profit, last_height)) + self.conn.commit() + ''' + ''' + def update_to_bigsellprofit(self, dayutc, current_price, tx_sell_average, tx_sell_amount, tx_sell_profit, + days_earliest, days_latest, days_largest, days_current, tx_buy_address, txid, + block_height): + with self.conn.cursor() as cursor: + sql_insert = "INSERT INTO `bigsell` (`unixdt`, `buyprice`, `sellprice`, `amount`, `profit`, `days_earliest`, `days_latest`, `days_largest`, `days_current`, `address`, `txid`, `height`) VALUES (FROM_UNIXTIME(%s), %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s)" + # print(sql_insert) + # print(datetime, txid, vout, voutalias, amount, height) + cursor.execute(sql_insert, ( + dayutc, current_price, tx_sell_average, tx_sell_amount, tx_sell_profit, days_earliest, days_latest, + days_largest, days_current, tx_buy_address, txid, block_height)) + self.conn.commit() + ''' + ''' + def update_to_dailycdd(self, dt_utc, cdd): + with self.conn.cursor() as cursor: + sql_insert = "REPLACE INTO `dailycdd` (`unixdt`, `cdd`) VALUES (FROM_UNIXTIME(%s), %s)" + # print(sql_insert) + cursor.execute(sql_insert, (dt_utc, cdd)) + self.conn.commit() + ''' + ''' + def update_to_dailycdddays(self, dt_utc, dormancy, adormancy, eadormancy, cdd, acdd, eacdd, day1, day7, day30, + day60, day90, day180, day365, day730): + with self.conn.cursor() as cursor: + sql_insert = "REPLACE INTO `dailycdddays` (`unixdt`, dormancy, adormancy, eadormancy, cdd, acdd, eacdd, `day1`, day7, day30, day60, day90, day180, day365, day730) VALUES (FROM_UNIXTIME(%s), %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s)" + # print(sql_insert) + cursor.execute(sql_insert, ( + dt_utc, dormancy, adormancy, eadormancy, cdd, acdd, eacdd, day1, day7, day30, day60, day90, day180, + day365, + day730)) + self.conn.commit() + ''' + ''' + def update_to_dailysopr(self, dt_utc, last_sopr, last_asopr, last_easopr, last_lth_sopr, last_sth_sopr): + with self.conn.cursor() as cursor: + sql_insert = "REPLACE INTO `dailysopr` (`unixdt`, `sopr`, asopr, easopr, lth_sopr, sth_sopr) VALUES (FROM_UNIXTIME(%s), %s, %s, %s, %s, %s, %s)" + # print(sql_insert) + cursor.execute(sql_insert, (dt_utc, last_sopr, last_asopr, last_easopr, last_lth_sopr, last_sth_sopr)) + self.conn.commit() + ''' + ''' + def update_to_inds(self, dt_utc, csupply, mintusd, sumcsupply, sumcdd, sumeacdd, liveliness, ealiveliness, rprofit, + rloss, rplrate, price, marketcap, rcap, earcap, mvrv): + with self.conn.cursor() as cursor: + sql_insert = "REPLACE INTO `inds` (`unixdt`, csupply, mintusd, sumcsupply, sumcdd, sumeacdd, liveliness, ealiveliness, rprofit, rloss, rplrate, price, marketcap, rcap, earcap, mvrv) VALUES (FROM_UNIXTIME(%s), %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s)" + # print(sql_insert) + cursor.execute(sql_insert, ( + dt_utc, csupply, mintusd, sumcsupply, sumcdd, sumeacdd, liveliness, ealiveliness, rprofit, rloss, + rplrate, + price, marketcap, rcap, earcap, mvrv)) + self.conn.commit() + ''' + # daily volume + ''' + def update_to_dailyvolume(self, dt_utc, volume): + with self.conn.cursor() as cursor: + sql_insert = "REPLACE INTO `dailyvolume` (`unixdt`, `volume`) VALUES (FROM_UNIXTIME(%s), %s)" + # print(sql_insert) + cursor.execute(sql_insert, (dt_utc, volume)) + self.conn.commit() + ''' + '''with self.conn.cursor() as cursor: + sql_query = "SELECT COUNT(id) FROM `dailyvolume` WHERE unixdt=FROM_UNIXTIME(%s)" + cursor.execute(sql_query, {dt_utc,}) + result = cursor.fetchone() + #print(dt_utc) + #print(result) + if result is not None: + if "COUNT(id)" in result: + if result["COUNT(id)"] > 0: + print("update") + sql_update = 'UPDATE dailyvolume SET `volume`=%s WHERE unixdt=FROM_UNIXTIME(%s)' + cursor.execute(sql_update, (volume, dt_utc)) + else: + print("insert") + sql_insert = "INSERT INTO `dailyvolume` (`unixdt`, `volume`) VALUES (FROM_UNIXTIME(%s), %s)" + # print(sql_insert) + cursor.execute(sql_insert, (dt_utc, volume)) + self.conn.commit()''' + ''' + def update_to_dailyfees(self, dt_utc, fees): + with self.conn.cursor() as cursor: + sql_insert = "REPLACE INTO `dailyfees` (`unixdt`, `fees`) VALUES (FROM_UNIXTIME(%s), %s)" + # print(sql_insert) + cursor.execute(sql_insert, (dt_utc, fees)) + self.conn.commit() + ''' + ''' + def import_to_dailyvolume2(self, dt_utc, volume): + with self.conn.cursor() as cursor: + sql_insert = "INSERT INTO `dailyvolume` (`unixdt`, `volume`) VALUES (FROM_UNIXTIME(%s), %s)" + # print(sql_insert) + cursor.execute(sql_insert, (dt_utc, volume)) + self.conn.commit() + + def delete_dailyvolume_data(self, config): + with self.conn.cursor() as cursor: + sql_query = "DELETE FROM `dailyvolume`" + cursor.execute(sql_query) + self.conn.commit() + + + # daily market cap + def query_from_marketcap(self, start_id=0, end_id=0, start_time="", end_time="", limit=0): + with self.conn.cursor() as cursor: + sql_query = "SELECT * from `dailyprice`" + + if start_id > 0: + sql_query = sql_query + " WHERE id > " + str(start_id) + if end_id > 0: + sql_query = sql_query + " AND id < " + str(end_id) + else: + if end_id > 0: + sql_query = sql_query + " WHERE id < " + str(end_id) + + if len(start_time) > 0: + if len(end_time) > 0: + sql_query = sql_query + " WHERE UNIX_TIMESTAMP(unixdt) BETWEEN \ + UNIX_TIMESTAMP(\'" + start_time + "\') AND UNIX_TIMESTAMP(\'" + end_time + "\')" + else: + sql_query = sql_query + " WHERE UNIX_TIMESTAMP(unixdt) BETWEEN \ + UNIX_TIMESTAMP(\'" + start_time + "\') AND UNIX_TIMESTAMP(NOW())" + else: + if len(end_time) > 0: + sql_query = sql_query + " WHERE UNIX_TIMESTAMP(unixdt) BETWEEN \ + UNIX_TIMESTAMP(NOW()) AND UNIX_TIMESTAMP(\'" + end_time + "\')" + + sql_query = sql_query + " order by `unixdt` desc" + + if limit > 0: + sql_query = sql_query + " LIMIT " + str(limit) + print(sql_query) + cursor.execute(sql_query) + return cursor.fetchall() + + #daily price + def import_to_dailyprice(self, dt_utc, price, volume, marketcap, csupply): + with self.conn.cursor() as cursor: + sql_insert = "INSERT INTO `dailyprice` (`unixdt`, `price`, `volume`, `marketcap`, `csupply`) VALUES (FROM_UNIXTIME(%s), %s, %s, %s, %s)" + #print(sql_insert) + cursor.execute(sql_insert, (dt_utc, price, volume, marketcap, csupply)) + self.conn.commit() + + def update_to_dailyprice(self, dt_utc, price, volume, change): + with self.conn.cursor() as cursor: + sql_insert = "INSERT INTO `dailyprice` (`unixdt`, `price`, `volume`, `change`) VALUES (FROM_UNIXTIME(%s), %s, %s, %s)" + #print(sql_insert) + cursor.execute(sql_insert, (dt_utc, price, volume, change)) + self.conn.commit() + + def update_to_dailyprice2(self, dt_utc, price, volume, change, marketcap, csupply): + with self.conn.cursor() as cursor: + sql_query = "SELECT COUNT(id) FROM `dailyprice` WHERE unixdt=FROM_UNIXTIME(%s)" + cursor.execute(sql_query, {dt_utc,}) + result = cursor.fetchone() + #print(dt_utc) + #print(result) + if result is not None: + if "COUNT(id)" in result: + if result["COUNT(id)"] > 0: + print("update") + sql_update = 'UPDATE dailyprice SET `price`=%s, `marketcap`=%s, `csupply`=%s, `volume`=%s, `change`=%s WHERE unixdt=FROM_UNIXTIME(%s)' + cursor.execute(sql_update, (price, marketcap, csupply, volume, change, dt_utc)) + else: + print("insert") + sql_insert = "INSERT INTO `dailyprice` (`unixdt`, `price`, `volume`, `change`, `marketcap`, `csupply`) VALUES (FROM_UNIXTIME(%s), %s, %s, %s, %s, %s)" + # print(sql_insert) + cursor.execute(sql_insert, (dt_utc, price, volume, change, marketcap, csupply)) + self.conn.commit() + + def update_dailypricechange(self): + with self.conn.cursor() as cursor: + sql_query = "SELECT unixdt,price FROM `dailyprice` order by unixdt" + cursor.execute(sql_query) + results = cursor.fetchall() + prevprice = -1 + for result in results: + if prevprice < 0: + prevprice = result["price"] + else: + #print(result["unixdt"], result["price"], result["marketcap"]) + try: + change = (result["price"]/prevprice - 1)*100 + except: + change = 0 + #print(csupply) + datestr = result["unixdt"] + logger.debug(datestr.__format__('%Y-%m-%d') + " " + str(change)) + sql_update = 'UPDATE dailyprice SET `change`=%s WHERE unixdt=%s' + cursor.execute(sql_update, (str(change), result["unixdt"])) + prevprice = result["price"] + self.conn.commit() + + def delete_dailyprice_data(self, config): + with self.conn.cursor() as cursor: + sql_query = "DELETE FROM `dailyprice`" + cursor.execute(sql_query) + self.conn.commit() + + def delete_failed_blockvolume(self, height): + with self.conn.cursor() as cursor: + sql_insert = "DELETE FROM `bigamountvout` WHERE height=%s" + cursor.execute(sql_insert, (height,)) + sql_insert = "DELETE FROM `bigamounttx` WHERE height=%s" + cursor.execute(sql_insert, (height,)) + sql_insert = "DELETE FROM `blockamount` WHERE height=%s" + cursor.execute(sql_insert, (height,)) + self.conn.commit() + + #block check --- big amount for vout + def query_from_bigamountvout(self, start_id=0, end_id=0, start_time="", end_time="", address="", limit=0): + with self.conn.cursor() as cursor: + sql_query = "SELECT * from `bigamountvout`" + if start_id > 0: + sql_query = sql_query + " WHERE id > " + str(start_id) + if end_id > 0: + sql_query = sql_query + " AND id < " + str(end_id) + else: + if end_id > 0: + sql_query = sql_query + " WHERE id < " + str(end_id) + + if len(start_time) > 0: + if len(end_time) > 0: + sql_query = sql_query + " WHERE UNIX_TIMESTAMP(unixdt) BETWEEN \ + UNIX_TIMESTAMP(\'" + start_time + "\') AND UNIX_TIMESTAMP(\'" + end_time + "\')" + else: + sql_query = sql_query + " WHERE UNIX_TIMESTAMP(unixdt) BETWEEN \ + UNIX_TIMESTAMP(\'" + start_time + "\') AND UNIX_TIMESTAMP(NOW())" + else: + if len(end_time) > 0: + sql_query = sql_query + " WHERE UNIX_TIMESTAMP(unixdt) BETWEEN \ + UNIX_TIMESTAMP(NOW()) AND UNIX_TIMESTAMP(\'" + end_time + "\')" + + sql_query = sql_query + " order by `unixdt` desc" + if limit > 0: + sql_query = sql_query + " LIMIT " + str(limit) + + print(sql_query) + cursor.execute(sql_query) + return cursor.fetchall() + ''' + + def update_to_bigamountvout(self, datetime, txid, vout, voutn, vouttype, amount, height, days, buyin, sellout, + profit): + with self.conn.cursor() as cursor: + # url = "http://10.168.2.125:7101/marketall/push/realtime/btc/dailyindsv2e1" + # headers = {"accept": "application/json"} + # data = {"unixdt":datetime,"vout":vout,"voutn":voutn,"vouttype":vouttype, + # "amount":amount,"height":height,"txid":txid,"days":days,"buyin":buyin, + # "sellout":sellout,"profit":profit} + # response = requests.post(url=url, data=json.dumps(data), headers=headers) + sql_insert = "INSERT INTO `rt_bigamountvoutv3e` (`unixdt`, `vout`, `voutn`, `vouttype`, `amount`, `height`, `txid`, days, buyprice, sellprice, profit) VALUES (FROM_UNIXTIME(%s), %s, %s, %s, %s, %s, %s, %s, %s, %s, %s)" + # print(sql_insert) + # print(datetime, txid, vout, voutalias, amount, height) + cursor.execute(sql_insert, + (datetime, vout, voutn, vouttype, amount, height, txid, days, buyin, sellout, profit)) + self.conn.commit() + + ''' + # block check --- big amount tx + def query_from_bigamounttx(self, start_id=0, end_id=0, start_time="", end_time="", address="", limit=0): + with self.conn.cursor() as cursor: + sql_query = "SELECT * from `bigamounttx`" + if start_id > 0: + sql_query = sql_query + " WHERE id > " + str(start_id) + if end_id > 0: + sql_query = sql_query + " AND id < " + str(end_id) + else: + if end_id > 0: + sql_query = sql_query + " WHERE id < " + str(end_id) + + if len(start_time) > 0: + if len(end_time) > 0: + sql_query = sql_query + " WHERE UNIX_TIMESTAMP(unixdt) BETWEEN \ + UNIX_TIMESTAMP(\'" + start_time + "\') AND UNIX_TIMESTAMP(\'" + end_time + "\')" + else: + sql_query = sql_query + " WHERE UNIX_TIMESTAMP(unixdt) BETWEEN \ + UNIX_TIMESTAMP(\'" + start_time + "\') AND UNIX_TIMESTAMP(NOW())" + else: + if len(end_time) > 0: + sql_query = sql_query + " WHERE UNIX_TIMESTAMP(unixdt) BETWEEN \ + UNIX_TIMESTAMP(NOW()) AND UNIX_TIMESTAMP(\'" + end_time + "\')" + + sql_query = sql_query + " order by `unixdt` desc" + if limit > 0: + sql_query = sql_query + " LIMIT " + str(limit) + + print(sql_query) + cursor.execute(sql_query) + return cursor.fetchall() + + def update_to_bigamounttx(self, datetime, txid, amount, height): + with self.conn.cursor() as cursor: + sql_insert = "INSERT INTO `bigamounttx` (`unixdt`, `amount`, `height`, `txid`) VALUES (FROM_UNIXTIME(%s), %s, %s, %s)" + #print(sql_insert) + #print(datetime, txid, amount, height) + cursor.execute(sql_insert, (datetime, amount, height, txid)) + self.conn.commit() + + # block check --- per block amount + def query_from_blockamount(self, start_id=0, end_id=0, start_time="", end_time="", limit=0, amount=0): + with self.conn.cursor() as cursor: + sql_query = "SELECT * from `blockamount`" + + if start_id > 0: + sql_query = sql_query + " WHERE id > " + str(start_id) + if end_id > 0: + sql_query = sql_query + " AND id < " + str(end_id) + if amount > 0: + sql_query = sql_query + " AND amount > " + str(amount) + else: + if end_id > 0: + sql_query = sql_query + " WHERE id < " + str(end_id) + if amount > 0: + sql_query = sql_query + " AND amount > " + str(amount) + else: + if amount > 0: + sql_query = sql_query + "WHERE amount > " + str(amount) + + if len(start_time) > 0: + if len(end_time) > 0: + sql_query = sql_query + " WHERE UNIX_TIMESTAMP(unixdt) BETWEEN \ + UNIX_TIMESTAMP(\'" + start_time + "\') AND UNIX_TIMESTAMP(\'" + end_time + "\')" + else: + sql_query = sql_query + " WHERE UNIX_TIMESTAMP(unixdt) BETWEEN \ + UNIX_TIMESTAMP(\'" + start_time + "\') AND UNIX_TIMESTAMP(NOW())" + if amount > 0: + sql_query = sql_query + " AND amount > " + str(amount) + else: + if len(end_time) > 0: + sql_query = sql_query + " WHERE UNIX_TIMESTAMP(unixdt) BETWEEN \ + UNIX_TIMESTAMP(NOW()) AND UNIX_TIMESTAMP(\'" + end_time + "\')" + if amount > 0: + sql_query = sql_query + " AND amount > " + str(amount) + + sql_query = sql_query + " order by `unixdt` desc" + + if limit > 0: + sql_query = sql_query + " LIMIT " + str(limit) + + cursor.execute(sql_query) + return cursor.fetchall() + + def update_to_blockamount(self, datetime, blockid, amount, height): + with self.conn.cursor() as cursor: + sql_insert = "INSERT INTO `blockamount` (`unixdt`, `amount`, `height`, `blockid`) VALUES (FROM_UNIXTIME(%s), %s, %s, %s)" + #print(sql_insert) + #print(datetime, blockid, amount, height) + cursor.execute(sql_insert, (datetime, amount, height, blockid)) + self.conn.commit() + + def delete_node_data(self, config): + with self.conn.cursor() as cursor: + sql_query = "DELETE FROM `blockamount`" + cursor.execute(sql_query) + sql_query = "DELETE FROM `bigamountvout`" + cursor.execute(sql_query) + sql_query = "DELETE FROM `bigamounttx`" + cursor.execute(sql_query) + self.conn.commit() + + def update_realize_cap(self, dayutc, last_rv): + with self.conn.cursor() as cursor: + sql_insert = "INSERT INTO `dailyrcap` (`unixdt`, `rcap`) VALUES (FROM_UNIXTIME(%s), %s)" + #print(sql_insert) + #print(datetime, blockid, amount, height) + cursor.execute(sql_insert, (dayutc, last_rv)) + self.conn.commit() + + # daily realize cap + def query_from_realizecap(self, start_id=0, end_id=0, start_time="", end_time="", limit=0): + with self.conn.cursor() as cursor: + sql_query = "SELECT * from `dailyrcap`" + + if start_id > 0: + sql_query = sql_query + " WHERE id > " + str(start_id) + if end_id > 0: + sql_query = sql_query + " AND id < " + str(end_id) + else: + if end_id > 0: + sql_query = sql_query + " WHERE id < " + str(end_id) + + if len(start_time) > 0: + if len(end_time) > 0: + sql_query = sql_query + " WHERE UNIX_TIMESTAMP(unixdt) BETWEEN \ + UNIX_TIMESTAMP(\'" + start_time + "\') AND UNIX_TIMESTAMP(\'" + end_time + "\')" + else: + sql_query = sql_query + " WHERE UNIX_TIMESTAMP(unixdt) BETWEEN \ + UNIX_TIMESTAMP(\'" + start_time + "\') AND UNIX_TIMESTAMP(NOW())" + else: + if len(end_time) > 0: + sql_query = sql_query + " WHERE UNIX_TIMESTAMP(unixdt) BETWEEN \ + UNIX_TIMESTAMP(NOW()) AND UNIX_TIMESTAMP(\'" + end_time + "\')" + + sql_query = sql_query + " order by `unixdt` desc" + + if limit > 0: + sql_query = sql_query + " LIMIT " + str(limit) + print(sql_query) + cursor.execute(sql_query) + return cursor.fetchall() + + def update_daily_addr(self, dayutc, last_add_cnt): + with self.conn.cursor() as cursor: + sql_insert = "INSERT INTO `dailyaddradd` (`unixdt`, `addcnt`) VALUES (FROM_UNIXTIME(%s), %s)" + #print(sql_insert) + #print(datetime, blockid, amount, height) + cursor.execute(sql_insert, (dayutc, last_add_cnt)) + self.conn.commit() + + def delete_daily_addr(self, config): + with self.conn.cursor() as cursor: + sql_query = "DELETE FROM `dailyaddradd`" + cursor.execute(sql_query) + self.conn.commit() + + def delete_daily_rv(self, config): + with self.conn.cursor() as cursor: + sql_query = "DELETE FROM `dailyrcap`" + cursor.execute(sql_query) + self.conn.commit() + ''' + + def __del__(self): + self.conn.close() diff --git a/coinbus/btc24h_redis_if.py b/coinbus/btc24h_redis_if.py new file mode 100644 index 0000000..d5d3236 --- /dev/null +++ b/coinbus/btc24h_redis_if.py @@ -0,0 +1,613 @@ +import time + +from walrus import * +from loguru import logger + +class RedisIf: + def __init__(self, host="127.0.0.1", port=6379, password="", db=0): + self.db = Database(host=host, port=port, db=db) + + self.zbalance = self.db.ZSet("rt_balancev2e") + ''' + #realize cap progress + self.rv = self.db.Hash("rv") + #address and balance progress + self.addr = self.db.Hash("addr") + #block volume progress + self.bv = self.db.Hash("bv") + #daily volume progress + self.dv = self.db.Hash("dv") + ''' + ''' + #stat tx progress + self.tx = self.db.Hash("tx") + + #ETH daily contract progress + self.eth_dc = self.db.Hash("ethdc") + + #btc stats fee + self.btc_stats = self.db.Hash("btcstats") + + #btc stats volume + self.btc_volume = self.db.Hash("btcvolume") + + # btc stats cdd + self.btc_cdd = self.db.Hash("btccdd") + + # btc stats cdd days + self.btc_cdd_days = self.db.Hash("btccdddays") + ''' + self.btc_block_time = self.db.Hash("rt_btcblocktimev2e") + ''' + self.btc_sopr = self.db.Hash("btcsopr") + ''' + self.btc_data = self.db.Hash("rt_btc_datav2e") + + self.active_address = self.db.Set("rt_active_addressv2e") + self.send_address = self.db.Set("rt_send_addressv2e") + self.receive_address = self.db.Set("rt_receive_addressv2e") + + def get_btc_data(self, key): + value = None + if self.btc_data[key] is not None: + value = self.btc_data[key] + return value + + def set_btc_data(self, key, value): + self.btc_data[key] = value + + def reset_btc_data(self): + self.btc_data.clear() + self.zbalance.clear() + # self.btc_block_time.clear() + + ''' + def get_last_btc_sopr(self): + last_sopr_buy = None + last_asopr_buy = None + last_easopr_buy = None + last_lth_sopr_buy = None + last_sth_sopr_buy = None + last_asol = None + last_eaasol = None + + if self.btc_sopr["last_asol"] is not None: + last_asol = self.btc_sopr["last_asol"] + #last_asol = float(self.btc_sopr["last_asol"].decode("utf-8")) + if self.btc_sopr["last_eaasol"] is not None: + last_eaasol = self.btc_sopr["last_eaasol"] + #last_eaasol = float(self.btc_sopr["last_eaasol"].decode("utf-8")) + + + if self.btc_sopr["last_sopr_buy"] is not None: + last_sopr_buy = self.btc_sopr["last_sopr_buy"] + #last_sopr_buy = float(self.btc_sopr["last_sopr_buy"].decode("utf-8")) + if self.btc_sopr["last_asopr_buy"] is not None: + last_asopr_buy = self.btc_sopr["last_asopr_buy"] + #last_asopr_buy = float(self.btc_sopr["last_asopr_buy"].decode("utf-8")) + if self.btc_sopr["last_easopr_buy"] is not None: + last_easopr_buy = self.btc_sopr["last_easopr_buy"] + #last_easopr_buy = float(self.btc_sopr["last_easopr_buy"].decode("utf-8")) + if self.btc_sopr["last_lth_sopr_buy"] is not None: + last_lth_sopr_buy = self.btc_sopr["last_lth_sopr_buy"] + #last_lth_sopr_buy = float(self.btc_sopr["last_lth_sopr_buy"].decode("utf-8")) + if self.btc_sopr["last_sth_sopr_buy"] is not None: + last_sth_sopr_buy = self.btc_sopr["last_sth_sopr_buy"] + #last_sth_sopr_buy = float(self.btc_sopr["last_sth_sopr_buy"].decode("utf-8")) + + last_sopr_sell = None + last_asopr_sell = None + last_easopr_sell = None + last_lth_sopr_sell = None + last_sth_sopr_sell = None + if self.btc_sopr["last_sopr_sell"] is not None: + last_sopr_sell = self.btc_sopr["last_sopr_sell"] + # last_sopr_sell = float(self.btc_sopr["last_sopr_sell"].decode("utf-8")) + if self.btc_sopr["last_asopr_sell"] is not None: + last_asopr_sell = self.btc_sopr["last_asopr_sell"] + # last_asopr = float(self.btc_sopr["last_asopr"].decode("utf-8")) + if self.btc_sopr["last_easopr_sell"] is not None: + last_easopr_sell = self.btc_sopr["last_easopr_sell"] + # last_easopr_sell = float(self.btc_sopr["last_easopr_sell"].decode("utf-8")) + if self.btc_sopr["last_lth_sopr_sell"] is not None: + last_lth_sopr_sell = self.btc_sopr["last_lth_sopr_sell"] + # last_lth_sopr_sell = float(self.btc_sopr["last_lth_sopr_sell"].decode("utf-8")) + if self.btc_sopr["last_sth_sopr_sell"] is not None: + last_sth_sopr_sell = self.btc_sopr["last_sth_sopr_sell"] + # last_sth_sopr_sell = float(self.btc_sopr["last_sth_sopr_sell"].decode("utf-8")) + + return last_asol, last_eaasol, last_sopr_buy, last_asopr_buy, last_easopr_buy, last_lth_sopr_buy, last_sth_sopr_buy, last_sopr_sell, last_asopr_sell, last_easopr_sell, last_lth_sopr_sell, last_sth_sopr_sell + + def set_last_btc_sopr(self, last_asol, last_eaasol, last_sopr_buy, last_asopr_buy, last_easopr_buy, last_lth_sopr_buy, last_sth_sopr_buy, last_sopr_sell, last_asopr_sell, last_easopr_sell, last_lth_sopr_sell, last_sth_sopr_sell): + self.btc_sopr["last_asol"] = last_asol + self.btc_sopr["last_eaasol"] = last_eaasol + + self.btc_sopr["last_sopr_buy"] = last_sopr_buy + self.btc_sopr["last_asopr_buy"] = last_asopr_buy + self.btc_sopr["last_easopr_buy"] = last_easopr_buy + self.btc_sopr["last_lth_sopr_buy"] = last_lth_sopr_buy + self.btc_sopr["last_sth_sopr_buy"] = last_sth_sopr_buy + self.btc_sopr["last_sopr_sell"] = last_sopr_sell + self.btc_sopr["last_asopr_sell"] = last_asopr_sell + self.btc_sopr["last_easopr_sell"] = last_easopr_sell + self.btc_sopr["last_lth_sopr_sell"] = last_lth_sopr_sell + self.btc_sopr["last_sth_sopr_sell"] = last_sth_sopr_sell + ''' + + def get_block_time(self, height): + block_time = None + height_str = str(height) + if self.btc_block_time[height_str] is not None: + block_time = int(self.btc_block_time[height_str].decode("utf-8")) + # block_time = int(self.btc_block_time[height_str].decode("utf-8")) + + return block_time + + def set_block_time(self, height, ts): + height_str = str(height) + self.btc_block_time[height_str] = ts + + ''' + def get_last_btc_cdd_days(self): + last_cdd = None + last_acdd = None + last_eacdd = None + last_cdd_day1= None + last_cdd_day7 = None + last_cdd_day30 = None + last_cdd_day60 = None + last_cdd_day90 = None + last_cdd_day180 = None + last_cdd_day365 = None + last_cdd_day730 = None + + last_date = None + last_height = None + last_date_str = None + + if self.btc_cdd["last_cdd"] is not None: + last_cdd = float(self.btc_cdd["last_cdd"].decode("utf-8")) + if self.btc_cdd["last_acdd"] is not None: + last_acdd = float(self.btc_cdd["last_acdd"].decode("utf-8")) + if self.btc_cdd["last_eacdd"] is not None: + last_eacdd = float(self.btc_cdd["last_eacdd"].decode("utf-8")) + if self.btc_cdd_days["last_cdd_day1"] is not None: + last_cdd_day1 = float(self.btc_cdd_days["last_cdd_day1"].decode("utf-8")) + if self.btc_cdd_days["last_cdd_day7"] is not None: + last_cdd_day7 = float(self.btc_cdd_days["last_cdd_day7"].decode("utf-8")) + if self.btc_cdd_days["last_cdd_day30"] is not None: + last_cdd_day30 = float(self.btc_cdd_days["last_cdd_day30"].decode("utf-8")) + if self.btc_cdd_days["last_cdd_day60"] is not None: + last_cdd_day60 = float(self.btc_cdd_days["last_cdd_day60"].decode("utf-8")) + if self.btc_cdd_days["last_cdd_day90"] is not None: + last_cdd_day90 = float(self.btc_cdd_days["last_cdd_day90"].decode("utf-8")) + if self.btc_cdd_days["last_cdd_day180"] is not None: + last_cdd_day180 = float(self.btc_cdd_days["last_cdd_day180"].decode("utf-8")) + if self.btc_cdd_days["last_cdd_day365"] is not None: + last_cdd_day365 = float(self.btc_cdd_days["last_cdd_day365"].decode("utf-8")) + if self.btc_cdd_days["last_cdd_day730"] is not None: + last_cdd_day730 = float(self.btc_cdd_days["last_cdd_day730"].decode("utf-8")) + if self.btc_cdd_days["last_date"] is not None: + last_date = int(self.btc_cdd_days["last_date"].decode("utf-8")) + if self.btc_cdd_days["last_height"] is not None: + last_height = int(self.btc_cdd_days["last_height"].decode("utf-8")) + if self.btc_cdd_days["last_date_str"] is not None: + last_date_str = self.btc_cdd_days["last_date_str"].decode("utf-8") + return last_cdd, last_acdd, last_eacdd, last_cdd_day1, last_cdd_day7, last_cdd_day30, last_cdd_day60, last_cdd_day90, last_cdd_day180, last_cdd_day365, last_cdd_day730, last_date, last_height, last_date_str + + def set_last_btc_cdd_days(self, cdd, acdd, eacdd, day1, day7, day30, day60, day90, day180, day365, day730, dt, height, dtstr): + self.btc_cdd["last_cdd"] = cdd + self.btc_cdd["last_acdd"] = acdd + self.btc_cdd["last_eacdd"] = eacdd + self.btc_cdd_days["last_cdd_day1"] = day1 + self.btc_cdd_days["last_cdd_day7"] = day7 + self.btc_cdd_days["last_cdd_day30"] = day30 + self.btc_cdd_days["last_cdd_day60"] = day60 + self.btc_cdd_days["last_cdd_day90"] = day90 + self.btc_cdd_days["last_cdd_day180"] = day180 + self.btc_cdd_days["last_cdd_day365"] = day365 + self.btc_cdd_days["last_cdd_day730"] = day730 + self.btc_cdd_days["last_date"] = dt + self.btc_cdd_days["last_height"] = height + self.btc_cdd_days["last_date_str"] = dtstr + ''' + ''' + def get_last_btc_cdd(self): + last_cdd = None + last_date = None + last_height = None + last_date_str = None + if self.btc_cdd["last_cdd"] is not None: + last_cdd = float(self.btc_cdd["last_cdd"].decode("utf-8")) + if self.btc_cdd["last_date"] is not None: + last_date = int(self.btc_cdd["last_date"].decode("utf-8")) + if self.btc_cdd["last_height"] is not None: + last_height = int(self.btc_cdd["last_height"].decode("utf-8")) + if self.btc_cdd["last_date_str"] is not None: + last_date_str = self.btc_cdd["last_date_str"].decode("utf-8") + return last_cdd, last_date, last_height, last_date_str + + def set_last_btc_cdd(self, cdd, dt, height, dtstr): + self.btc_cdd["last_cdd"] = cdd + self.btc_cdd["last_date"] = dt + self.btc_cdd["last_height"] = height + self.btc_cdd["last_date_str"] = dtstr + + def get_last_btc_volume(self): + last_volume = None + last_date = None + last_height = None + last_date_str = None + if self.btc_volume["last_volume"] is not None: + last_volume = float(self.btc_volume["last_volume"].decode("utf-8")) + if self.btc_volume["last_date"] is not None: + last_date = int(self.btc_volume["last_date"].decode("utf-8")) + if self.btc_volume["last_height"] is not None: + last_height = int(self.btc_volume["last_height"].decode("utf-8")) + if self.btc_volume["last_date_str"] is not None: + last_date_str = self.btc_volume["last_date_str"].decode("utf-8") + return last_volume, last_date, last_height, last_date_str + + def set_last_btc_volume(self, volume, dt, height, dtstr): + self.btc_volume["last_volume"] = volume + self.btc_volume["last_date"] = dt + self.btc_volume["last_height"] = height + self.btc_volume["last_date_str"] = dtstr + ''' + ''' + def get_last_btc_stats(self): + last_fees = None + last_date = None + last_height = None + last_date_str = None + last_volume = None + if self.btc_stats["last_fees"] is not None: + last_fees = float(self.btc_stats["last_fees"].decode("utf-8")) + if self.btc_volume["last_volume"] is not None: + last_volume = float(self.btc_volume["last_volume"].decode("utf-8")) + if self.btc_stats["last_date"] is not None: + last_date = int(self.btc_stats["last_date"].decode("utf-8")) + if self.btc_stats["last_height"] is not None: + last_height = int(self.btc_stats["last_height"].decode("utf-8")) + if self.btc_stats["last_date_str"] is not None: + last_date_str = self.btc_stats["last_date_str"].decode("utf-8") + return last_fees, last_volume, last_date, last_height, last_date_str + + def set_last_btc_stats(self, fees, volume, dt, height, dtstr): + self.btc_stats["last_fees"] = fees + self.btc_volume["last_volume"] = volume + self.btc_stats["last_date"] = dt + self.btc_stats["last_height"] = height + self.btc_stats["last_date_str"] = dtstr + + + def get_last_eth_dc(self): + last_date = None + last_height = None + last_date_str = None + if self.eth_dc["last_date"] is not None: + last_date = int(self.eth_dc["last_date"].decode("utf-8")) + if self.eth_dc["last_height"] is not None: + last_height = int(self.eth_dc["last_height"].decode("utf-8")) + if self.eth_dc["last_date_str"] is not None: + last_date_str = self.eth_dc["last_date_str"].decode("utf-8") + return last_date, last_height, last_date_str + + def set_last_eth_dc(self, dt, height, dtstr): + self.eth_dc["last_date"] = dt + self.eth_dc["last_height"] = height + self.eth_dc["last_date_str"] = dtstr + ''' + ''' + def get_last_dv(self): + last_dv = None + last_date = None + last_height = None + last_date_str = None + if self.dv["last_dv"] is not None: + last_dv = float(self.dv["last_dv"].decode("utf-8")) + if self.dv["last_date"] is not None: + last_date = int(self.dv["last_date"].decode("utf-8")) + if self.dv["last_height"] is not None: + last_height = int(self.dv["last_height"].decode("utf-8")) + if self.dv["last_date_str"] is not None: + last_date_str = self.dv["last_date_str"].decode("utf-8") + return last_dv, last_date, last_height, last_date_str + + def set_last_dv(self, dv, dt, height, dtstr): + self.dv["last_dv"] = dv + self.dv["last_date"] = dt + self.dv["last_height"] = height + self.dv["last_date_str"] = dtstr + + def get_last_bv(self): + last_height = None + if self.bv["last_height"] is not None: + last_height = int(self.bv["last_height"].decode("utf-8")) + return last_height + + def set_last_bv(self, height): + self.bv["last_height"] = height + ''' + ''' + def get_last_ind(self): + last_csupply = None + last_mintusd = None + last_sumcsupply = None + last_sumcdd = None + last_sumeacdd = None + last_rprofit = None + last_rloss = None + last_marketcap = None + last_rcap = None + last_mvrv = None + + last_earcap = None + if self.tx["last_csupply"] is not None: + last_csupply = float(self.tx["last_csupply"].decode("utf-8")) + if self.tx["last_mintusd"] is not None: + last_mintusd = float(self.tx["last_mintusd"].decode("utf-8")) + if self.tx["last_sumcsupply"] is not None: + last_sumcsupply = float(self.tx["last_sumcsupply"].decode("utf-8")) + if self.tx["last_sumcdd"] is not None: + last_sumcdd = float(self.tx["last_sumcdd"].decode("utf-8")) + if self.tx["last_sumeacdd"] is not None: + last_sumeacdd = float(self.tx["last_sumeacdd"].decode("utf-8")) + if self.tx["last_rprofit"] is not None: + last_rprofit = float(self.tx["last_rprofit"].decode("utf-8")) + if self.tx["last_rloss"] is not None: + last_rloss = float(self.tx["last_rloss"].decode("utf-8")) + if self.tx["last_marketcap"] is not None: + last_marketcap = float(self.tx["last_marketcap"].decode("utf-8")) + if self.tx["last_rcap"] is not None: + last_rcap = float(self.tx["last_rcap"].decode("utf-8")) + if self.tx["last_earcap"] is not None: + last_earcap = float(self.tx["last_earcap"].decode("utf-8")) + if self.tx["last_mvrv"] is not None: + last_mvrv = float(self.tx["last_mvrv"].decode("utf-8")) + + + return last_csupply, last_mintusd, last_sumcsupply, last_sumcdd, last_sumeacdd, last_rprofit, last_rloss, last_marketcap, last_rcap, last_earcap, last_mvrv + + def set_last_ind(self, last_csupply, last_mintusd, last_sumcsupply, last_sumcdd, last_sumeacdd, last_rprofit, last_rloss, last_marketcap, last_rcap, last_earcap, last_mvrv): + self.tx["last_csupply"] = last_csupply + self.tx["last_mintusd"] = last_mintusd + self.tx["last_sumcsupply"] = last_sumcsupply + self.tx["last_sumcdd"] = last_sumcdd + self.tx["last_sumeacdd"] = last_sumeacdd + self.tx["last_rprofit"] = last_rprofit + self.tx["last_rloss"] = last_rloss + self.tx["last_marketcap"] = last_marketcap + self.tx["last_rcap"] = last_rcap + self.tx["last_earcap"] = last_earcap + self.tx["last_mvrv"] = last_mvrv + + + def get_last_tx(self): + last_profit = None + last_fees = None + last_newaddr_cnt = None + last_newaddr_vol = None + last_active_addr_cnt = None + last_tx_addr_cnt = None + last_rx_addr_cnt = None + last_vol_change = None + last_vol = None + last_avol = None + last_date = None + last_height = None + last_date_str = None + last_txs = None + last_eatxs = None + if self.tx["last_profit_rate"] is not None: + last_profit = int(self.tx["last_profit"].decode("utf-8")) + if self.tx["last_fees"] is not None: + last_fees = int(self.tx["last_fees"].decode("utf-8")) + if self.tx["last_txs"] is not None: + last_txs = int(self.tx["last_txs"].decode("utf-8")) + if self.tx["last_eatxs"] is not None: + last_eatxs = int(self.tx["last_eatxs"].decode("utf-8")) + if self.tx["last_newaddr_cnt"] is not None: + last_newaddr_cnt = int(self.tx["last_newaddr_cnt"].decode("utf-8")) + if self.tx["last_newaddr_vol"] is not None: + last_newaddr_vol = float(self.tx["last_newaddr_vol"].decode("utf-8")) + if self.tx["last_active_addr_cnt"] is not None: + last_active_addr_cnt = int(self.tx["last_active_addr_cnt"].decode("utf-8")) + if self.tx["last_tx_addr_cnt"] is not None: + last_tx_addr_cnt = int(self.tx["last_tx_addr_cnt"].decode("utf-8")) + if self.tx["last_rx_addr_cnt"] is not None: + last_rx_addr_cnt = int(self.tx["last_rx_addr_cnt"].decode("utf-8")) + if self.tx["last_vol_change"] is not None: + last_vol_change = float(self.tx["last_vol_change"].decode("utf-8")) + if self.tx["last_vol"] is not None: + last_vol = float(self.tx["last_vol"].decode("utf-8")) + if self.tx["last_avol"] is not None: + last_avol = float(self.tx["last_avol"].decode("utf-8")) + if self.tx["last_date"] is not None: + last_date = int(self.tx["last_date"].decode("utf-8")) + if self.tx["last_height"] is not None: + last_height = int(self.tx["last_height"].decode("utf-8")) + if self.tx["last_date_str"] is not None: + last_date_str = self.tx["last_date_str"].decode("utf-8") + return last_profit, last_fees, last_txs, last_eatxs, last_newaddr_cnt, last_newaddr_vol, last_active_addr_cnt, last_tx_addr_cnt, last_rx_addr_cnt, last_vol_change, last_vol, last_avol, last_date, last_height, last_date_str + + def set_last_tx(self, last_profit, last_fees, last_txs, last_eatxs, newaddr_cnt, newaddr_vol, active_addr_cnt, tx_addr_cnt, rx_addr_cnt, vol_change, vol, avol, dt, height, dtstr): + self.tx["last_profit"] = last_profit + self.tx["last_fees"] = last_fees + self.tx["last_txs"] = last_txs + self.tx["last_eatxs"] = last_eatxs + self.tx["last_newaddr_cnt"] = newaddr_cnt + self.tx["last_newaddr_vol"] = newaddr_vol + self.tx["last_active_addr_cnt"] = active_addr_cnt + self.tx["last_tx_addr_cnt"] = tx_addr_cnt + self.tx["last_rx_addr_cnt"] = rx_addr_cnt + self.tx["last_vol_change"] = vol_change + self.tx["last_vol"] = vol + self.tx["last_avol"] = avol + self.tx["last_date"] = dt + self.tx["last_height"] = height + self.tx["last_date_str"] = dtstr + ''' + ''' + def get_last_addr(self): + last_daily_cnt = None + last_date = None + last_height = None + last_date_str = None + if self.addr["last_daily_cnt"] is not None: + last_daily_cnt = int(self.addr["last_daily_cnt"].decode("utf-8")) + if self.addr["last_date"] is not None: + last_date = int(self.addr["last_date"].decode("utf-8")) + if self.addr["last_height"] is not None: + last_height = int(self.addr["last_height"].decode("utf-8")) + if self.addr["last_date_str"] is not None: + last_date_str = self.addr["last_date_str"].decode("utf-8") + return last_daily_cnt, last_date, last_height, last_date_str + + def set_last_addr(self, daily_cnt, dt, height, dtstr): + self.addr["last_daily_cnt"] = daily_cnt + self.addr["last_date"] = dt + self.addr["last_height"] = height + self.addr["last_date_str"] = dtstr + ''' + + def is_active_address(self, address): + result = address in self.active_address + if not result: + self.active_address.add(address) + return result + + def reset_active_address(self): + self.active_address.clear() + + def get_active_address_cnt(self): + return len(self.active_address) + + def is_send_address(self, address): + result = address in self.send_address + if not result: + self.send_address.add(address) + return result + + def reset_send_address(self): + self.send_address.clear() + + def get_send_address_cnt(self): + return len(self.send_address) + + def is_receive_address(self, address): + result = address in self.receive_address + if not result: + self.receive_address.add(address) + return result + + def reset_receive_address(self): + self.receive_address.clear() + + def get_receive_address_cnt(self): + return len(self.receive_address) + + def save_addr(self, address, balance): + new_balance = balance + if address in self.zbalance: + new_balance = self.zbalance.score(address) + balance + # print("update", self.zbalance.score(address), balance, new_balance) + # time.sleep(10) + if new_balance < 0.01: + del self.zbalance[address] + # print("check exist", address, address in self.zbalance) + # time.sleep(10) + return + self.zbalance.add({address: new_balance}) + + ''' + def delete_addr(self, config): + self.addr.clear() + self.zbalance.clear() + ''' + + def is_in_addr(self, address): + return address in self.zbalance + + def get_addr_cnt(self): + return len(self.zbalance) + + ''' + def delete_rv(self, config): + self.rv.clear() + + def get_last_rv(self): + last_rv = None + last_date = None + last_height = None + last_date_str = None + if self.rv["last_rv"] is not None: + last_rv = float(self.rv["last_rv"].decode("utf-8")) + if self.rv["last_date"] is not None: + last_date = int(self.rv["last_date"].decode("utf-8")) + if self.rv["last_height"] is not None: + last_height = int(self.rv["last_height"].decode("utf-8")) + if self.rv["last_date_str"] is not None: + last_date_str = self.rv["last_date_str"].decode("utf-8") + return last_rv, last_date, last_height, last_date_str + + def set_last_rv(self, rv, dt, height, dtstr): + self.rv["last_rv"] = rv + self.rv["last_date"] = dt + self.rv["last_height"] = height + self.rv["last_date_str"] = dtstr + ''' + + def get_all_address(self): + return self.zbalance.keys() + + def delete_address_data(self, config): + self.zbalance.clear() + + ''' + def query_from_address(self, start_balance=0, end_balance=0, address="", limit=0): + if len(address) > 0: + results = [] + result = {} + result["address"] = address + balance = self.zbalance.score(address) + print(balance) + if balance is not None: + result["balance"] = balance + results.append(result) + return results + + match_result = None + if start_balance > 0: + if end_balance > 0: + match_result = self.zbalance.range_by_score(start_balance, end_balance, 0, -1, True, False) + else: + match_result = self.zbalance.range_by_score(0, start_balance, 0, -1, True, False) + else: + if end_balance > 0: + match_result = self.zbalance.range_by_score(end_balance, 21000000, 0, -1, True, False) + + results = [] + if match_result is not None: + #print(match_result) + for addr, balance2 in match_result: + address = addr.decode('utf-8') + result = {} + result["address"] = address + result["balance"] = balance2 + results.append(result) + if limit > 0 and len(results) >= limit: + break + return results + ''' + + + + + + + + + + + diff --git a/coinbus/btc24h_stats.py b/coinbus/btc24h_stats.py new file mode 100644 index 0000000..05c88d6 --- /dev/null +++ b/coinbus/btc24h_stats.py @@ -0,0 +1,1110 @@ +# coding=utf-8 +import sys +import time +from easybitcoinrpc import RPC +from bitcoinutils.setup import setup +from bitcoinutils.script import Script +from bitcoinutils.keys import P2wpkhAddress, P2wshAddress, P2shAddress, PrivateKey, PublicKey, SegwitAddress, \ + P2pkhAddress +import requests +import ujson +from requests import Session +from requests.exceptions import ConnectionError, Timeout, TooManyRedirects +import btc24h_db_if +import btc24h_redis_if +import pymysql +DEF_CONFIG_RULES = "rules" +oklinkheaders = {'Ok-Access-Key': "6b7bb4fb-70d5-44a4-bc6f-0a43a8e39896"} +class StatIf: + def __init__(self, ip="127.0.0.1", port="8332", user="user", password="password"): + self.host = ip + self.port = port + self.user = user + self.pwd = password + + self.rpc = None + self.height = 0 + self.pricedict = {} + setup('mainnet') + # 您似乎正在定义一个名为“StatIf”的类,该类似乎与使用RPC(远程过程调用)与比特币节点交互有关。此类使用用于连接到比特币节点的IP地址、端口、用户名和密码的参数进行初始化。 + # 以下是正在发生的事情的简要细分: + # - '__init__'方法:这是Python类中的一种特殊方法,在创建类的实例时调用。在此方法中,实例变量“self.host”、“self.port”、“self.user”和“self.pwd”使用提供的值进行初始化。这些变量似乎存储了比特币节点的连接信息。 + # - 'self.rpc':此变量初始化为'None'。它似乎旨在保存RPC客户端的实例(可能来自“easybitcoinrpc.RPC' 类)。 + # - 'self.height':此变量初始化为'0'。它似乎旨在存储当前的区块链高度。 + # - 'self.pricedict':此字典初始化为空字典 ('{}')。从这个片段中并不能立即看出它的目的,但它似乎与存储与价格相关的信息有关。 + # - 'setup('mainnet')':这个函数调用似乎设置了一些与比特币网络相关的配置,特别是针对主网的配置。 + # 总的来说,这个类看起来像是作为一个接口('StatIf'),用于与比特币节点进行交互,以获取与区块链相关的统计数据或其他信息。 + def get_vin_address(self, prev_scriptpubkey, prev_height, txid): + prev_type = prev_scriptpubkey["type"] + prev_address = None + if prev_type != "nulldata": + if isinstance(prev_scriptpubkey, dict): + if "address" in prev_scriptpubkey: + prev_address = prev_scriptpubkey["address"] + else: + if prev_scriptpubkey.is_address(): + prev_address = prev_scriptpubkey["address"] + if prev_address is None: + if prev_type == "pubkey": + temphex = prev_scriptpubkey["hex"] + try: + if temphex[2:4] == "04": + prev_address = PublicKey(temphex[2:132]).get_address(False).to_string() + elif temphex[2:4] == "02" or temphex[2:4] == "03": + prev_address = PublicKey(temphex[2:68]).get_address().to_string() + except: + print("decode address failed", str(prev_height), "txid", txid, "hex", temphex) + if prev_address is None: + prev_address = prev_scriptpubkey["hex"] + + return prev_address + # 这种“get_vin_address”方法似乎用于从事务输入中提取以前的输出地址。 + # 以下是正在发生的事情的细分: + # - 该方法采用三个参数:“prev_scriptpubkey”、“prev_height”和“txid”。这些似乎分别表示上一个输出的scriptPubKey、包含交易的块的高度和交易ID。 + # - 它首先检查上一个scriptPubKey的类型。如果它不是“nulldata”(意味着它是有效的scriptPubKey),它将继续提取地址。 + # - 它检查“prev_scriptpubkey”是否为字典并包含键“地址”。如果是这样,它会从中提取地址。 + # - 如果地址仍为“None”,则进一步检查scriptPubKey是否为公钥。如果是,它会尝试对其进行解码以获取地址。如果解码失败,则会打印错误消息。 + # - 如果地址仍为“None”,则将scriptPubKey的十六进制值分配给“prev_address”。 + # - 最后,它返回“prev_address”。 + # 此方法似乎可以处理从scriptPubKey中提取地址的各种方案,包括处理类型为“pubkey”的scriptPubKeys,并在所有其他尝试失败时回退到使用十六进制值。 + def get_vout_address(self, scriptpubkey, height, txid): + return self.get_vin_address(scriptpubkey, height, txid) + # 似乎“get_vout_address”方法只是使用提供的参数调用“get_vin_address”方法。 + # 以下是正在发生的事情的细分: + # - 'get_vout_address'方法采用与'get_vin_address'相同的参数:'scriptpubkey'、'height'和'txid'。 + # - 它没有实现自己的逻辑来从“scriptpubkey”中提取地址,而是直接调用具有相同参数的“get_vin_address”方法。 + # - 这意味着'get_vout_address'实质上将提取地址的任务委托给'get_vin_address'方法。 + def get_history_price(self, batch_size=5000): + #pricedict = {} + """获取数据库中的 Nasdaq 数据,存入字典""" + db_config = { + "host": "192.168.194.216", + "user": "root", + "password": "2GS@bPYcgiMyL14A", + "database": "btcdb", + "port": 4423, + "connect_timeout": 60, + "read_timeout": 60, + "write_timeout": 60, + "charset": "utf8mb4" + } + + offset = 0 + self.pricedict = {} + + while True: + connection = pymysql.connect(**db_config) + try: + with connection.cursor() as cursor: + sql = "SELECT timestamp, price FROM btc_prices WHERE source = 'Nasdaq' ORDER BY timestamp LIMIT %s OFFSET %s" + cursor.execute(sql, (batch_size, offset)) + rows = cursor.fetchall() + if not rows: + break + for timestamp, price in rows: + self.pricedict[str(int(timestamp))] = float(price) + finally: + connection.close() + + offset += batch_size + if len(rows) < batch_size: + break # 最后一页读取完成 + + return self.pricedict + # 这种“get_history_price”方法似乎是从Web API获取历史比特币价格数据,并用检索到的数据填充“self.pricedict”字典。 + # 以下是正在发生的事情的细分: + # - 该方法向指定的URL ('https://data.nasdaq.com/api/v3/datasets/BCHAIN/MKPRU.json?api_key=FZqXog4sR-b7cYnXcRVV') 发送GET请求以获取历史比特币价格数据。此请求是使用 + # 'requests.get()'函数发出的。 + # - 然后检查响应状态代码是否为“200”,表示响应成功。 + # - 如果响应成功,它将使用'ujson.loads()'将响应内容解析为JSON。 + # - 然后,它会检查解析后的JSON是否包含名为“dataset”的键。如果是这样,它将从数据集中检索“数据”。 + # - 它遍历“数据”中的每个条目,并提取日期(“daystr”)和价格(“p”)。它使用'time.mktime()'将日期转换为UNIX时间戳('dayutc')。 + # - 然后,它使用日期作为键(转换为整数)和price作为值来填充“self.pricedict”字典。 + # - 最后,如果它包含数据,它会返回'self.pricedict'。 + # - 该方法在处理后关闭响应对象。 + # 此方法本质上是从提供的API端点获取历史比特币价格数据,对其进行处理,并将其存储在“self.pricedict”字典中以供以后使用。 + def get_history_price2(self, batch_size=5000): + #pricedict = {} + """获取数据库中的 Messari 数据,存入字典""" + db_config = { + "host": "192.168.194.216", + "user": "root", + "password": "2GS@bPYcgiMyL14A", + "database": "btcdb", + "port": 4423, + "connect_timeout": 60, + "read_timeout": 60, + "write_timeout": 60, + "charset": "utf8mb4" + } + + offset = 0 + self.pricedict = {} + + while True: + connection = pymysql.connect(**db_config) + try: + with connection.cursor() as cursor: + sql = """ + SELECT timestamp, price + FROM btc_prices + WHERE source = 'CryptoCompare' + ORDER BY timestamp + LIMIT %s OFFSET %s + """ + cursor.execute(sql, (batch_size, offset)) + rows = cursor.fetchall() + if not rows: + break + for timestamp, price in rows: + self.pricedict[str(int(timestamp))] = float(price) + finally: + connection.close() + + offset += batch_size + if len(rows) < batch_size: + break # 数据已全部读取 + + return self.pricedict + # 与之前的“get_history_price”方法相比,这种“get_history_price2”方法似乎是另一种从不同的Web API获取历史比特币价格数据的方法。 + # 以下是此方法中发生的情况: + # - 该方法首先使用'time.gmtime()'获取当前UTC时间,然后对其进行格式化以检索当前年份。 + # - 它根据当年构建一个URL来获取比特币价格数据。该URL可以是当前年份,也可以是特定年份范围。 + # - 它为请求设置标头,特别是Messari API密钥。 + # - 它使用'requests.get()'向构造的URL发送GET请求。 + # - 如果响应状态码为200(表示成功),则将响应内容解析为JSON。 + # - 它检查JSON是否包含名为“data”的键,并从中提取“值”。 + # - 它遍历“values”中的每个条目,并提取时间戳('dayutc')和价格('s')。它将时间戳转换为人类可读的日期字符串 ('ret_daystr') + # - 然后转换回UNIX时间戳 ('ret_dayutc')。 + # - 它填充“self.pricedict”字典,将日期作为键(转换为字符串),将价格作为值。 + # - 此过程每年都会重复,逐年递减,直到没有更多数据可供获取。 + # - 在收集完所有数据后,它会打印“self.pricedict”并返回它。 + # 此方法有效地从Messari API中检索历史比特币价格数据,并将其存储在'self.pricedict'字典中。 + def get_current_utc(self): + curtime = time.gmtime(time.time()) + daystr = time.strftime("%d %b %Y", curtime) + dayutc = int(time.mktime(time.strptime(daystr, "%d %b %Y"))) + return dayutc + # “get_current_utc”方法似乎是一种实用方法,用于检索协调世界时 (UTC) 中的当前时间并将其转换为UNIX时间戳。 + # 以下是此方法中发生的情况的细分: + # - 它首先使用'time.gmtime(time.time())'获取UTC中的当前时间,它返回一个表示当前UTC时间的时间结构。 + # - 它使用'time.strftime(“%d %b %Y”, curtime)' + # 将UTC时间格式化为人类可读的日期字符串。这将以“DD Mon YYYY”格式提供当前日期的字符串表示形式(例如,“01Jan2024”)。 + # - 它使用'time.mktime(time.strptime(daystr, “%d %b %Y”))'将人类可读的日期字符串转换回UNIX时间戳。这会将格式化的日期字符串转换为时间结构, + # 然后'time.mktime()'将其转换为UNIX时间戳。 + # - 它返回表示当前UTC时间的UNIX时间戳。 + # 总之,此方法提供了一种将UTC中的当前时间作为UNIX时间戳获取的方法。 + def get_current_price(self): + price = 0 + DB_CONFIG = { + "host": "192.168.194.216", + "user": "root", + "password": "2GS@bPYcgiMyL14A", + "database": "btcdb", + "port": 4423 + } + connection = pymysql.connect(**DB_CONFIG) + try: + with connection.cursor() as cursor: + for source in ("binance", "coinbase"): + cursor.execute(""" + SELECT price FROM btc_realtime_prices + WHERE source=%s + ORDER BY timestamp DESC + LIMIT 1 + """, (source,)) + row = cursor.fetchone() + if row: + price = float(row[0]) + break + finally: + connection.close() + return price + # “get_current_price”方法似乎是另一种实用方法,这次用于从指定的API端点检索以美元为单位的比特币的当前价格。 + # 以下是此方法中发生的情况: + # - 它向指定的URL ('https://bitcoinexplorer.org/api/price') 发送GET请求以获取当前比特币价格。此请求是使用'requests.get()'函数发出的。 + # - 检查响应状态码是否为“200”,表示响应成功。 + # - 如果响应成功,它会使用'ujson.loads(response_price.text)'将响应文本解析为JSON。 + # - 它从解析的JSON响应中提取以美元为单位的价格,并将其转换为浮点数。 + # - 它打印响应文本和提取的价格以进行调试。 + # - 它在处理后关闭响应对象。 + # - 最后,它返回检索到的价格。如果请求失败或无法提取价格,则返回“0”。 + # 总体而言,此方法从指定的API端点获取当前比特币价格,并将其作为浮点数返回。 + def get_day_utc(self, utc_time): + t = time.gmtime(utc_time) + daystr = time.strftime("%d %b %Y", t) + dayutc = int(time.mktime(time.strptime(daystr, "%d %b %Y"))) + return dayutc + # “get_day_utc”方法似乎是一种实用方法,用于将给定的UNIX时间戳转换为由该时间戳表示的UTC日(00:00:00 UTC)的开始。 + # 此方法的作用如下: + # - 它采用UNIX时间戳“utc_time”作为输入。 + # - 它使用'time.gmtime(utc_time)'将给定的时间戳转换为UTC中的时间结构。 + # - 它将UTC时间格式化为人类可读的日期字符串,表示使用'time.strftime(“%d %b %Y”, t)'。此格式为“DD Mon YYYY”(例如,“01 Jan 2024”)。 + # - 它使用'time.mktime(time.strptime(daystr, “%d %b %Y”))'将格式化的日期字符串转换回表示当天开始的UNIX 时间戳。这有效地从时间戳中删除了时间分量,将其设置为午夜 (00:00:00)。 + # - 它返回生成的UNIX时间戳,表示与输入时间戳对应的UTC日期的开始。 + # 总之,此方法提供了一种获取UNIX时间戳的方法,该时间戳表示给定时间戳的UTC日开始时间。 + def rpc_cmd(self, cmd): + if self.rpc is None: + self.rpc = RPC(self.host, self.port, self.user, self.pwd) + + while True: + try: + if cmd == "getblockstats": + getblockstats = self.rpc.blockchain.get_block_stats(self.height) + return getblockstats + elif cmd == "getblock": + getblock = self.rpc.blockchain.get_block(self.height, 3) + return getblock + elif cmd == "getblockcount": + getblockcount = self.rpc.blockchain.get_block_count() + if self.height == getblockcount: + print("time sleep start") + time.sleep(30) + print("time sleep end") + else: + self.height += 1 + print("next height", self.height) + return None + elif cmd == "getmempoolinfo": + getmempoolinfo = self.rpc.blockchain.get_mempool_info() + return getmempoolinfo + else: + return None + except: + time.sleep(1) + self.rpc = RPC(self.host, self.port, self.user, self.pwd) + # 这种“rpc_cmd”方法似乎是一种实用方法,用于对比特币节点执行各种RPC(远程过程调用)命令。 + # 此方法的作用如下: + # - 它首先检查RPC客户端 ('self.rpc') 是否已初始化。如果没有,它将使用提供的主机、端口、用户名和密码对其进行初始化。 + # - 它进入一个循环,在该循环中,它不断尝试执行指定的RPC命令 ('cmd'),直到它成功。 + # - 在循环内部,它根据'cmd'的值具有不同的分支。根据命令的不同,它使用初始化的RPC客户端 ('self.rpc') 执行相应的RPC调用。 + # - 如果命令是 “getblockstats”,它会调用具有当前高度的'self.rpc.blockchain.get_block_stats()'并返回结果。 + # - 如果命令是 “getblock”,它会调用'self.rpc.blockchain.get_block()',当前高度和详细程度级别为3(完整块详细信息)并返回结果。 + # - 如果命令是 “getblockcount”,则调用'self.rpc.blockchain.get_block_count()'来获取当前块计数。如果当前高度与块计数匹配,则等待30秒,然后再次检查。否则,它将递增高度并返回“无”。 + # - 如果命令是 “getmempoolinfo”,它将调用'self.rpc.blockchain.get_mempool_info()'并返回结果。 + # - 如果该命令与任何指定的命令不匹配,则返回“None”。 + # - 如果在执行RPC命令期间发生任何异常,它会捕获它们,等待1秒钟,然后重新初始化RPC客户端以从潜在的连接问题中恢复。 + # 这种方法提供了一种灵活的方式,可以使用各种RPC命令与比特币节点进行交互,同时处理潜在的错误和重试。 + def stat_load(self, redisif, config): + self.rules = config[DEF_CONFIG_RULES] + self.get_history_price() + self.history_prices = self.get_history_price2() + self.current_price = self.get_current_price() + self.current_utc = self.get_current_utc() + self.history_prices[str(self.current_utc)] = self.current_price + self.daily_date = redisif.get_btc_data("daily_date") + if self.daily_date is None: + self.stat_reset() + return + + self.daily_date = int(redisif.get_btc_data("daily_date")) + self.daily_height = int(redisif.get_btc_data("daily_height").decode("utf-8")) + self.height = self.daily_height + self.daily_height_begin = int(redisif.get_btc_data("daily_height_begin").decode("utf-8")) + self.daily_height_end = int(redisif.get_btc_data("daily_height_end").decode("utf-8")) + self.daily_date_string = redisif.get_btc_data("daily_date_string").decode("utf-8") + self.daily_profit = float(redisif.get_btc_data("daily_profit").decode("utf-8")) + self.daily_fees = float(redisif.get_btc_data("daily_fees").decode("utf-8")) + self.daily_txs = int(redisif.get_btc_data("daily_txs").decode("utf-8")) + #self.daily_new_address = int(redisif.get_btc_data("daily_new_address").decode("utf-8")) + #self.daily_total_address = int(redisif.get_btc_data("daily_total_address").decode("utf-8")) + #self.daily_new_address_volume = float(redisif.get_btc_data("daily_new_address_volume").decode("utf-8")) + self.daily_active_address = int(redisif.get_btc_data("daily_active_address").decode("utf-8")) + self.daily_send_address = int(redisif.get_btc_data("daily_send_address").decode("utf-8")) + self.daily_receive_address = int(redisif.get_btc_data("daily_receive_address").decode("utf-8")) + self.daily_volume = float(redisif.get_btc_data("daily_volume").decode("utf-8")) + self.daily_eavolume = float(redisif.get_btc_data("daily_eavolume").decode("utf-8")) + self.daily_asol = float(redisif.get_btc_data("daily_asol").decode("utf-8")) + self.daily_eaasol = float(redisif.get_btc_data("daily_eaasol").decode("utf-8")) + self.daily_atxs = float(redisif.get_btc_data("daily_atxs").decode("utf-8")) + self.daily_sopr_buy = float(redisif.get_btc_data("daily_sopr_buy").decode("utf-8")) + self.daily_asopr_buy = float(redisif.get_btc_data("daily_asopr_buy").decode("utf-8")) + self.daily_easopr_buy = float(redisif.get_btc_data("daily_easopr_buy").decode("utf-8")) + self.daily_lthsopr_buy = float(redisif.get_btc_data("daily_lthsopr_buy").decode("utf-8")) + self.daily_sthsopr_buy = float(redisif.get_btc_data("daily_sthsopr_buy").decode("utf-8")) + self.daily_sopr_sell = float(redisif.get_btc_data("daily_sopr_sell").decode("utf-8")) + self.daily_asopr_sell = float(redisif.get_btc_data("daily_asopr_sell").decode("utf-8")) + self.daily_easopr_sell = float(redisif.get_btc_data("daily_easopr_sell").decode("utf-8")) + self.daily_lthsopr_sell = float(redisif.get_btc_data("daily_lthsopr_sell").decode("utf-8")) + self.daily_sthsopr_sell = float(redisif.get_btc_data("daily_sthsopr_sell").decode("utf-8")) + self.daily_cdd = float(redisif.get_btc_data("daily_cdd").decode("utf-8")) + self.daily_sacdd = float(redisif.get_btc_data("daily_sacdd").decode("utf-8")) + self.daily_eacdd = float(redisif.get_btc_data("daily_eacdd").decode("utf-8")) + self.daily_cdd_days1 = float(redisif.get_btc_data("daily_cdd_days1").decode("utf-8")) + self.daily_cdd_days7 = float(redisif.get_btc_data("daily_cdd_days7").decode("utf-8")) + self.daily_cdd_days30 = float(redisif.get_btc_data("daily_cdd_days30").decode("utf-8")) + self.daily_cdd_days60 = float(redisif.get_btc_data("daily_cdd_days60").decode("utf-8")) + self.daily_cdd_days90 = float(redisif.get_btc_data("daily_cdd_days90").decode("utf-8")) + self.daily_cdd_days180 = float(redisif.get_btc_data("daily_cdd_days180").decode("utf-8")) + self.daily_cdd_days365 = float(redisif.get_btc_data("daily_cdd_days365").decode("utf-8")) + self.daily_cdd_days730 = float(redisif.get_btc_data("daily_cdd_days730").decode("utf-8")) + #self.daily_csupply = float(redisif.get_btc_data("daily_csupply").decode("utf-8")) + #self.daily_mintusd = float(redisif.get_btc_data("daily_mintusd").decode("utf-8")) + #self.daily_sumcsupply = float(redisif.get_btc_data("daily_sumcsupply").decode("utf-8")) + #self.daily_sumcdd = float(redisif.get_btc_data("daily_sumcdd").decode("utf-8")) + #self.daily_sumeacdd = float(redisif.get_btc_data("daily_sumeacdd").decode("utf-8")) + #self.daily_rprofit = float(redisif.get_btc_data("daily_rprofit").decode("utf-8")) + #self.daily_rloss = float(redisif.get_btc_data("daily_rloss").decode("utf-8")) + #self.daily_marketcap = float(redisif.get_btc_data("daily_marketcap").decode("utf-8")) + #self.daily_rcap = float(redisif.get_btc_data("daily_rcap").decode("utf-8")) + #self.daily_earcap = float(redisif.get_btc_data("daily_earcap").decode("utf-8")) + #self.daily_mvrv = float(redisif.get_btc_data("daily_mvrv").decode("utf-8")) + '''self.daily_lth_marketcap = float(redisif.get_btc_data("daily_lth_marketcap").decode("utf-8")) + self.daily_lth_rcap = float(redisif.get_btc_data("daily_lth_rcap").decode("utf-8")) + self.daily_lth_mvrv = float(redisif.get_btc_data("daily_lth_mvrv").decode("utf-8")) + self.daily_sth_marketcap = float(redisif.get_btc_data("daily_sth_marketcap").decode("utf-8")) + self.daily_sth_rcap = float(redisif.get_btc_data("daily_sth_rcap").decode("utf-8")) + self.daily_sth_mvrv = float(redisif.get_btc_data("daily_sth_mvrv").decode("utf-8"))''' + #self.daily_nupl = float(redisif.get_btc_data("daily_nupl").decode("utf-8")) + #self.daily_uprofit = float(redisif.get_btc_data("daily_uprofit").decode("utf-8")) + #self.daily_uloss = float(redisif.get_btc_data("daily_uloss").decode("utf-8")) + #self.daily_lthnupl = float(redisif.get_btc_data("daily_lthnupl").decode("utf-8")) + #self.daily_sthnupl = float(redisif.get_btc_data("daily_sthnupl").decode("utf-8")) + self.daily_price = self.get_price(self.height, self.daily_date) + #v2 + self.daily_mint = float(redisif.get_btc_data("daily_mint").decode("utf-8")) + self.daily_lth_volume = float(redisif.get_btc_data("daily_lth_volume").decode("utf-8")) + self.daily_frm = float(redisif.get_btc_data("daily_frm").decode("utf-8")) + #self.daily_cvdd = float(redisif.get_btc_data("daily_cvdd").decode("utf-8")) + #self.daily_nvt_ratio = float(redisif.get_btc_data("daily_nvt_ratio").decode("utf-8")) + #self.daily_balanced_price = float(redisif.get_btc_data("daily_balanced_price").decode("utf-8")) + #self.daily_velocity = float(redisif.get_btc_data("daily_velocity").decode("utf-8")) + #self.daily_mempool_volume = float(redisif.get_btc_data("daily_mempool_volume").decode("utf-8")) + #self.daily_realized_price = float(redisif.get_btc_data("daily_realized_price").decode("utf-8")) + #self.daily_transferred_price = float(redisif.get_btc_data("daily_transferred_price").decode("utf-8")) + #v2 + #self.daily_sumvdd = float(redisif.get_btc_data("daily_sumvdd").decode("utf-8")) + #self.daily_sumdays = float(redisif.get_btc_data("daily_sumdays").decode("utf-8")) + # 似乎“stat_load”方法负责从Redis加载各种统计数据并相应地更新对象的状态。 + # 以下是此方法的作用的细分: + # - 它使用提供的“redisif”对象从Redis检索数据,并相应地更新对象的属性。 + # - 它从“config”字典中检索配置数据。 + # - 它使用键“DEF_CONFIG_RULES”从配置中检索规则,并将其分配给“self.rules”。 + # - 它使用“get_history_price”和“get_history_price2”方法加载历史价格数据,并将其分配给“self.history_prices”。 + # - 它使用“get_current_price”方法检索当前价格并将其分配给“self.current_price”。 + # - 它使用“get_current_utc”方法检索当前UTC时间戳并将其分配给“self.current_utc”。 + # - 它将当前价格添加到当前UTC时间戳下的历史价格数据中。 + # - 它从Redis加载各种其他统计数据,并将它们分配给对象的相应属性。 + # - 代码中有一些被注释掉的部分,这些部分似乎与可以从Redis加载的其他统计数据相关,但它们目前处于非活动状态。 + # 此方法实质上使用从Redis检索到的各种统计数据、历史价格数据和当前价格数据来初始化对象。 + def stat_save(self, redisif): + redisif.set_btc_data("daily_date", self.daily_date) + self.daily_height = self.height + redisif.set_btc_data("daily_height", self.daily_height) + redisif.set_btc_data("daily_height_begin", self.daily_height_begin) + redisif.set_btc_data("daily_height_end", self.daily_height_end) + redisif.set_btc_data("daily_date_string", self.daily_date_string) + redisif.set_btc_data("daily_profit", self.daily_profit) + redisif.set_btc_data("daily_fees", self.daily_fees) + redisif.set_btc_data("daily_txs", self.daily_txs) + #redisif.set_btc_data("daily_new_address", self.daily_new_address) + #redisif.set_btc_data("daily_total_address", self.daily_total_address) + #redisif.set_btc_data("daily_new_address_volume", self.daily_new_address_volume) + redisif.set_btc_data("daily_active_address", self.daily_active_address) + redisif.set_btc_data("daily_send_address", self.daily_send_address) + redisif.set_btc_data("daily_receive_address", self.daily_receive_address) + redisif.set_btc_data("daily_volume", self.daily_volume) + redisif.set_btc_data("daily_eavolume", self.daily_eavolume) + redisif.set_btc_data("daily_asol", self.daily_asol) + redisif.set_btc_data("daily_eaasol", self.daily_eaasol) + redisif.set_btc_data("daily_atxs", self.daily_atxs) + redisif.set_btc_data("daily_sopr_buy", self.daily_sopr_buy) + redisif.set_btc_data("daily_asopr_buy", self.daily_asopr_buy) + redisif.set_btc_data("daily_easopr_buy", self.daily_easopr_buy) + redisif.set_btc_data("daily_lthsopr_buy", self.daily_lthsopr_buy) + redisif.set_btc_data("daily_sthsopr_buy", self.daily_sthsopr_buy) + redisif.set_btc_data("daily_sopr_sell", self.daily_sopr_sell) + redisif.set_btc_data("daily_asopr_sell", self.daily_asopr_sell) + redisif.set_btc_data("daily_easopr_sell", self.daily_easopr_sell) + redisif.set_btc_data("daily_lthsopr_sell", self.daily_lthsopr_sell) + redisif.set_btc_data("daily_sthsopr_sell", self.daily_sthsopr_sell) + redisif.set_btc_data("daily_cdd", self.daily_cdd) + redisif.set_btc_data("daily_sacdd", self.daily_sacdd) + redisif.set_btc_data("daily_eacdd", self.daily_eacdd) + redisif.set_btc_data("daily_cdd_days1", self.daily_cdd_days1) + redisif.set_btc_data("daily_cdd_days7", self.daily_cdd_days7) + redisif.set_btc_data("daily_cdd_days30", self.daily_cdd_days30) + redisif.set_btc_data("daily_cdd_days60", self.daily_cdd_days60) + redisif.set_btc_data("daily_cdd_days90", self.daily_cdd_days90) + redisif.set_btc_data("daily_cdd_days180", self.daily_cdd_days180) + redisif.set_btc_data("daily_cdd_days365", self.daily_cdd_days365) + redisif.set_btc_data("daily_cdd_days730", self.daily_cdd_days730) + #redisif.set_btc_data("daily_csupply", self.daily_csupply) + #redisif.set_btc_data("daily_mintusd", self.daily_mintusd) + #redisif.set_btc_data("daily_sumcsupply", self.daily_sumcsupply) + #redisif.set_btc_data("daily_sumcdd", self.daily_sumcdd) + #redisif.set_btc_data("daily_sumeacdd", self.daily_sumeacdd) + #redisif.set_btc_data("daily_rprofit", self.daily_rprofit) + #redisif.set_btc_data("daily_rloss", self.daily_rloss) + #redisif.set_btc_data("daily_marketcap", self.daily_marketcap) + #redisif.set_btc_data("daily_rcap", self.daily_rcap) + #redisif.set_btc_data("daily_earcap", self.daily_earcap) + #redisif.set_btc_data("daily_mvrv", self.daily_mvrv) + '''redisif.set_btc_data("daily_lth_marketcap", self.daily_lth_marketcap) + redisif.set_btc_data("daily_lth_rcap", self.daily_lth_rcap) + redisif.set_btc_data("daily_lth_mvrv", self.daily_lth_mvrv) + redisif.set_btc_data("daily_sth_marketcap", self.daily_sth_marketcap) + redisif.set_btc_data("daily_sth_rcap", self.daily_sth_rcap) + redisif.set_btc_data("daily_sth_mvrv", self.daily_sth_mvrv)''' + #redisif.set_btc_data("daily_nupl", self.daily_nupl) + #redisif.set_btc_data("daily_uprofit", self.daily_uprofit) + #redisif.set_btc_data("daily_uloss", self.daily_uloss) + #redisif.set_btc_data("daily_lthnupl", self.daily_lthnupl) + #redisif.set_btc_data("daily_sthnupl", self.daily_sthnupl) + + #v2 + redisif.set_btc_data("daily_mint", self.daily_mint) + redisif.set_btc_data("daily_lth_volume", self.daily_lth_volume) + redisif.set_btc_data("daily_frm", self.daily_frm) + #redisif.set_btc_data("daily_cvdd", self.daily_cvdd) + #redisif.set_btc_data("daily_nvt_ratio", self.daily_nvt_ratio) + #redisif.set_btc_data("daily_balanced_price", self.daily_balanced_price) + #redisif.set_btc_data("daily_velocity", self.daily_velocity) + #redisif.set_btc_data("daily_mempool_volume", self.daily_mempool_volume) + #redisif.set_btc_data("daily_realized_price", self.daily_realized_price) + #redisif.set_btc_data("daily_transferred_price", self.daily_transferred_price) + #redisif.set_btc_data("daily_sumvdd", self.daily_sumvdd) + #redisif.set_btc_data("daily_sumdays", self.daily_sumdays) + # “stat_save”方法似乎负责将各种统计数据保存到Redis。 + # 以下是此方法的作用的细分: + # - 它接受一个'redisif'对象作为输入,这大概是一个Redis接口对象。 + # - 它使用“redisif.set_btc_data”方法将各种数据保存到Redis。保存的数据包括每日日期、高度、日期字符串、利润、费用、交易数量(“txs”)、活动地址、交易量以及与比特币统计数据相关的各种其他指标。 + # - 代码中有一些被注释掉的部分,这些部分似乎与可以保存到Redis的其他统计数据有关,但它们目前处于活动状态。 + # - 它还保存了一些与版本2 ('v2') 统计数据相关的数据,例如每日铸币量、长期持有量和自由流通市值。 + # 总的来说,这种方法提供了一种将与比特币相关的统计数据保存到Redis数据库以供以后检索和分析的方法。 + def stat_reset(self): + self.daily_date = 0 # working date + self.daily_height = 878280 # working height, ref. 747376 + self.daily_date_string = "" # working date string + + #self.daily_csupply = 0 # circulating supply + #self.daily_sumcsupply = 0 # cumulative circulating supply, for liveliness + #self.daily_sumcdd = 0 # cumulative coin days destoryed + #self.daily_sumeacdd = 0 # cumulative coin days destoryed(Entity-Adjusted) + #self.daily_marketcap = 0 # market capitalization + #self.daily_rcap = 0 # Realized capitalization + #self.daily_earcap = 0 # Realized capitalization(Entity-Adjusted) + ''' + self.daily_lth_marketcap = 0 # Long Term Holder market capitalization + self.daily_lth_rcap = 0 # Long Term Holder Realized capitalization + self.daily_sth_marketcap = 0 # Short Term Holder market capitalization + self.daily_sth_rcap = 0 # Short Term Holder Realized capitalization + ''' + #self.daily_uprofit = 0 # Unrealized Profit + #self.daily_uloss = 0 # Unrealized Loss + #self.daily_lthnupl = 0 # Long Term Holder NUPL + #self.daily_sthnupl = 0 # Short Term Holder NUPL + + self.stat_daily_reset() + + #self.daily_rprofit = 0 # realized profit + #self.daily_rloss = 0 # realized loss + + #v2 + #self.daily_sumvdd = 0 + #self.daily_sumdays = 0 + # “stat_reset”方法用于将与统计数据相关的各种属性重置为其初始值。以下是此方法的作用的细分: + # - 它将“daily_date”、“daily_height”和“daily_date_string”属性设置为其初始值。这些属性分别表示工作日期、工作高度和工作日期字符串。 + # - 它重置了与统计指标相关的几个属性,例如流通供应量、累计流通供应量、累计销毁币天数、市值、已实现资本化、已实现利润等。这些指标似乎与分析比特币持有者的行为有关。 + # - 代码中有一些注释掉的部分似乎与其他统计指标有关,但它们目前处于活动状态。 + # - 它将另一个方法称为“stat_daily_reset”,该方法可能会重置额外的每日统计信息。但是,此处未提供此方法的实现。 + # - 有一些与版本2 ('v2') 统计信息相关的注释部分,但它们目前不处于活动状态。 + # 总体而言,此方法提供了一种将各种统计属性重置为其初始值的方法,这对于初始化对象的状态或将其重置为已知状态非常有用。 + def stat_daily_reset(self): + self.daily_profit = 0 # Number of UTXOs in Profit + self.daily_fees = 0 # block fees each day + self.daily_txs = 0 # block txs exclude coinbase transaction + #self.daily_new_address = 0 # number of new address + #self.daily_total_address = redisif.get_addr_cnt() # number of address + #self.daily_new_address_volume = 0 # volume of new address + self.daily_active_address = 0 # number of active address + self.daily_send_address = 0 # number of send address + self.daily_receive_address = 0 # number of receive address + self.daily_volume = 0 # volume for each day + self.daily_eavolume = 0 # volume for each day(Entity-Adjusted) + self.daily_asol = 0 # Average Spent Output Lifespan + self.daily_eaasol = 0 # Average Spent Output Lifespan(Entity-Adjusted) + self.daily_atxs = 0 # exclude transaction < 1 hour + self.daily_sopr_buy = 0 # Spent Output Profit Ratio for buyin + self.daily_asopr_buy = 0 # Spent Output Profit Ratio(exclude < 1 hour) for buyin + self.daily_easopr_buy = 0 # Spent Output Profit Ratio(Entity-Adjusted) for buyin + self.daily_lthsopr_buy = 0 # Long-Term Holder SOPR for buyin + self.daily_sthsopr_buy = 0 # Short-Term Holder SOPR for buyin + self.daily_sopr_sell = 0 # Spent Output Profit Ratio for sellout + self.daily_asopr_sell = 0 # Spent Output Profit Ratio(exclude < 1 hour) for sellout + self.daily_easopr_sell = 0 # Spent Output Profit Ratio(Entity-Adjusted) for sellout + self.daily_lthsopr_sell = 0 # Long-Term Holder SOPR for sellout + self.daily_sthsopr_sell = 0 # Short-Term Holder SOPR for buyin + self.daily_cdd = 0 # Coin Days Destroyed + self.daily_sacdd = 0 # Supply-Adjusted CDD + self.daily_eacdd = 0 # Coin Days Destroyed(Entity-Adjusted) + self.daily_cdd_days1 = 0 # cdd < 1days + self.daily_cdd_days7 = 0 # + self.daily_cdd_days30 = 0 # + self.daily_cdd_days60 = 0 # + self.daily_cdd_days90 = 0 # + self.daily_cdd_days180 = 0 # + self.daily_cdd_days365 = 0 # + self.daily_cdd_days730 = 0 # + + #self.daily_mintusd = 0 # daily coin issuance (in USD), for Puell Multiple + + #self.daily_mvrv = 0 # market-value-to-realized-value ratio + self.daily_lth_mvrv = 0 # Long Term Holder MVRV + self.daily_sth_mvrv = 0 # Short Term Holder MVRV + + #self.daily_nupl = 0 # Net Unrealized Profit/Loss + + self.daily_height_begin = 0 + self.daily_height_end = 0 + + self.daily_price = 0 + self.redis.reset_active_address() + self.redis.reset_send_address() + self.redis.reset_receive_address() + + #v2 + self.daily_mint = 0 + self.daily_lth_volume = 0 + self.daily_frm = 0 + #self.daily_cvdd = 0 + #self.daily_nvt_ratio = 0 + #self.daily_balanced_price = 0 + #self.daily_realized_price = 0 + #self.daily_transferred_price = 0 + #self.daily_velocity = 0 + #self.daily_mempool_volume = 0 + # “stat_daily_reset”方法用于将各种每日统计指标重置为其初始值。以下是此方法的作用的细分: + # - 它重置了与每日统计指标相关的各种属性。这些指标包括每日利润、费用、交易数量 (“txs”)、活动地址、交易量、平均花费的输出寿命、花费的输出利润率、销毁的硬币天数等。 + # - 它重置与版本2 ('v2') 统计数据相关的属性,例如每日铸币量、长期持有量和自由流通市值。 + # - 它会重置其他一些属性,例如“daily_height_begin”和“daily_height_end”。 + # - 它重置了Redis接口对象 ('redis') 的一些属性,可能与活动地址、发送地址和接收地址有关。 + # 总而言之,这种方法提供了一种将各种每日统计指标重置为其初始值的方法,这对于日常数据分析或重置对象状态时非常有用。 + def stat_cdd(self, prev_value, days): + cdd = prev_value * days + self.daily_cdd += cdd + #self.daily_sumcdd += cdd + if days <= 1: + self.daily_cdd_days1 += cdd + elif days <= 7: + self.daily_cdd_days7 += cdd + elif days <= 30: + self.daily_cdd_days30 += cdd + elif days <= 60: + self.daily_cdd_days60 += cdd + elif days <= 90: + self.daily_cdd_days90 += cdd + elif days <= 180: + self.daily_cdd_days180 += cdd + elif days <= 365: + self.daily_cdd_days365 += cdd + else: + self.daily_cdd_days730 += cdd + # “stat_cdd”方法根据提供的“prev_value”(可能代表上一个计算周期中的指标值)和“天数”来计算和更新与硬币销毁天数 (CDD) 相关的各种指标。 + # 以下是此方法的作用的细分: + # - 它通过将“prev_value”乘以“天”的天数来计算CDD值。 + # - 它通过向“daily_cdd”属性添加计算的CDD值来更新该属性。 + # - 它将CDD值分类为不同的时间范围(“天”),并根据时间范围的长度更新相应的属性(“daily_cdd_days1”、“daily_cdd_days7”等)。例如: + # - 如果“days”小于或等于1,则更新“daily_cdd_days1”。 + # - 如果“天”介于2到7之间,它会更新“daily_cdd_days7”,依此类推其他时间范围。 + # 这种方法允许在不同的时间范围内计算和跟踪CDD指标,从而深入了解硬币持有者的行为和硬币随时间的移动。 + def get_price(self, height, dayutc): + price = 0 + dayutcstr = str(dayutc) + cnt = 0 + while cnt < 3: + cnt += 1 + if dayutcstr in self.history_prices: + price = self.history_prices[dayutcstr] + break + elif dayutcstr == str(self.current_utc): + price = self.get_current_price() + self.current_price = price + self.history_prices[dayutcstr] = self.current_price + break + else: + print("failed get price", height, dayutcstr) + self.get_history_price() + self.history_prices = self.get_history_price2() + self.current_price = self.get_current_price() + self.current_utc = self.get_current_utc() + self.history_prices[str(self.current_utc)] = self.current_price + if dayutcstr in self.history_prices: + price = self.history_prices[dayutcstr] + else: + price=0 + break + + return price + # “get_price”方法旨在检索特定高度和UTC日期的比特币价格。其工作原理如下: + # 1.它将“price”变量初始化为0。 + # 2.它将“dayutc”参数(代表UTC日)转换为字符串格式(“dayutcstr”),用作访问历史价格的键。 + # 3.它进入一个最多运行3次的循环,如果价格检索失败,允许重试。 + # 4.循环内部: + # - 它检查“dayutcstr”键是否存在于“history_prices”字典中。如果是这样,它会将相应的价格分配给“price”变量并跳出循环。 + # - 如果'dayutcstr'等于当前UTC时间 ('self.current_utc'),它将使用'get_current_price'方法检索当前价格,更新当前价格属性 ('self.current_price') + # 并将价格存储在当前UTC密钥下的'history_prices'字典中。然后,它将当前价格分配给“price”变量并跳出循环。 + # - 如果上述条件均不满足,则表示无法检索价格,打印失败消息以进行调试,并尝试通过调用“get_history_price”和“get_history_price2”方法更新历史价格。 + # 它还检索当前价格和UTC时间。更新历史价格和当前价格属性后,它会尝试使用“dayutcstr”键再次检索价格。 + # 5.最后,它返回检索到的价格。 + # 这种方法确保从历史数据或在数据不可用时获取当前价格来获得特定高度和日期的比特币价格。它还允许在检索失败时重试和更新数据。 + def save_db(self, dayutc, blocktime): + #if dayutc != self.daily_date: + print("cmp", dayutc, self.daily_date) + start = time.time() + #self.daily_sumcsupply += (self.daily_csupply) + daily_profit_rate = self.daily_profit / self.daily_txs if self.daily_txs != 0 else 0 + daily_sopr = self.daily_sopr_sell / self.daily_sopr_buy if self.daily_sopr_buy != 0 else 0 + daily_sasopr = self.daily_asopr_sell / self.daily_asopr_buy if self.daily_asopr_buy != 0 else 0 + daily_easopr = self.daily_easopr_sell / self.daily_easopr_buy if self.daily_easopr_buy != 0 else 0 + daily_lthsopr = self.daily_lthsopr_sell / self.daily_lthsopr_buy if self.daily_lthsopr_buy != 0 else 0 + daily_sthsopr = self.daily_sthsopr_sell / self.daily_sthsopr_buy if self.daily_sthsopr_buy != 0 else 0 + self.daily_asol = self.daily_asol / self.daily_atxs if self.daily_atxs != 0 else 0 + self.daily_eaasol = self.daily_eaasol / self.daily_atxs if self.daily_atxs != 0 else 0 + #self.daily_sacdd = self.daily_cdd / self.daily_csupply if self.daily_csupply != 0 else 0 + #self.daily_mvrv = self.daily_marketcap / self.daily_rcap if self.daily_rcap != 0 else 0 + #liveliness = self.daily_sumcdd / self.daily_sumcsupply if self.daily_sumcsupply != 0 else 0 + #ealiveliness = self.daily_sumeacdd / self.daily_sumcsupply if self.daily_sumcsupply != 0 else 0 + #rplrate = self.daily_rprofit - self.daily_rloss + dormancy = self.daily_cdd / self.daily_volume if self.daily_volume != 0 else 0 + #adormancy = dormancy / self.daily_csupply if self.daily_csupply != 0 else 0 + self.daily_eavolume -= (self.daily_fees) + eadormancy = self.daily_eacdd / self.daily_eavolume if self.daily_eavolume != 0 else 0 + #nupl = (self.daily_marketcap - self.daily_rcap) / self.daily_marketcap if self.daily_marketcap != 0 else 0 + + + #self.daily_total_address = redisif.get_addr_cnt() # number of address + + self.daily_height_end = self.height - 1 if self.height > self.daily_height_begin else self.daily_height_begin + dbif.update_to_dailyinds(blocktime, self.daily_height_begin, self.daily_height_end, daily_profit_rate, + self.daily_fees, self.daily_txs, 0, 0, + 0, self.daily_active_address, + self.daily_send_address, self.daily_receive_address, self.daily_volume, + self.daily_eavolume, daily_sopr, daily_sasopr, daily_easopr, daily_lthsopr, + daily_sthsopr, + self.daily_asol, self.daily_eaasol, dormancy, 0, eadormancy, + self.daily_cdd, 0, self.daily_eacdd, self.daily_cdd_days1, + self.daily_cdd_days7, self.daily_cdd_days30, self.daily_cdd_days60, + self.daily_cdd_days90, self.daily_cdd_days180, self.daily_cdd_days365, + self.daily_cdd_days730, 0, 0, + 0, 0, 0, + 0, 0, 0, 0, 0, + self.daily_price, 0, 0, 0, + 0, 0) + + #v2 + #self.daily_sumdays = (dayutc - 1231469665)/3600/24 + #self.daily_sumdays = self.daily_sumcdd/self.daily_csupply + #if self.daily_csupply > 0: + #self.daily_realized_price = self.daily_rcap/self.daily_csupply + #if self.daily_sumdays > 0: + #self.daily_transferred_price = self.daily_sumvdd/(self.daily_sumdays*self.daily_csupply) + #self.daily_balanced_price = self.daily_realized_price - self.daily_transferred_price + if self.daily_fees > 0: + self.daily_frm = (self.daily_fees + self.daily_mint)/self.daily_fees + #if self.daily_sumdays > 0: + #self.daily_cvdd = self.daily_sumvdd/(self.daily_sumdays*6000000) + + #daily_vp = self.daily_volume*self.daily_price + #if daily_vp > 0: + #if self.daily_volume > 0 and self.daily_price > 0: + #self.daily_nvt_ratio = self.daily_marketcap/self.daily_volume/self.daily_price + + #if self.daily_marketcap > 0: + #self.daily_velocity = self.daily_volume*self.daily_price/self.daily_marketcap + + dbif.update_to_dailyindsv2(blocktime, self.daily_height_begin, self.daily_height_end,self.daily_lth_volume, self.daily_frm, 0, 0, 0, 0, 0, 0) + + #if dayutc != self.daily_date: + #self.stat_daily_reset() + #self.daily_date = dayutc + #self.daily_height_begin = self.height + print("save_db", f'coast:{time.time() - start:.4f}s') + # “save_db”方法似乎负责将各种统计信息保存到数据库中。让我们分解一下它的功能: + # 1.该方法首先根据收集到的每日数据计算各种指标。这些指标包括: + # - 'daily_profit_rate':利润与交易数量的比率。 + # - “daily_sopr”:售罄交易的已用产出利润率。 + # - 'daily_sasopr':售罄交易的已用产出利润率(不包括1小时内的交易)。 + # - “daily_easopr”:出售交易的实体调整的已用产出利润率。 + # - “daily_lthsopr”和“daily_sthsopr”:用于买卖交易的长期持有者和短期持有者SOPR。 + # - “休眠”:销毁的硬币天数与每日交易量的比率。 + # - “休眠”:实体调整后的休眠,计算方法与休眠类似,但使用实体调整的销毁天数和数量。 + # - 还计算了其他指标,例如“daily_eavolume”、“daily_height_end”等。 + # 2.然后,该方法调用数据库接口 ('dbif') 以使用“update_to_dailyinds”和“update_to_dailyindsv2”方法使用计算的指标更新数据库。 + # 3.更新数据库后,该方法会打印出调试信息,包括“dayutc”与“self.daily_date”的比较以及数据库更新所需的时间。 + # 4.该方法不包括重置每日统计数据或更新“daily_date”属性的逻辑。这可能旨在单独处理,也可以作为其他方法的一部分进行处理。 + # 总体而言,“save_db”方法在将每日统计数据保存到数据库以供进一步分析和报告方面起着至关重要的作用。 + def stat_height_time(self, redisif): + tmp_height = 1 + if self.rpc is None: + self.rpc = RPC(self.host, self.port, self.user, self.pwd) + while True: + blockstats = self.rpc.blockchain.get_block_stats(tmp_height) + blocktime = blockstats["time"] + redisif.set_block_time(tmp_height, blocktime) + print("cache block height, time", tmp_height, blocktime) + getblockcount = self.rpc.blockchain.get_block_count() + if tmp_height == getblockcount: + break + else: + tmp_height += 1 + # “stat_height_time”方法似乎负责缓存块高度和相应的时间戳。让我们来看看它的功能: + # 1.它初始化一个值为1的临时高度变量“tmp_height”。 + # 2.它检查RPC客户端 ('self.rpc') 是否已初始化。如果没有,它将使用提供的主机、端口、用户名和密码初始化RPC客户端。 + # 3.它进入一个循环,从“tmp_height”开始连续获取每个区块高度的区块统计信息。 + # 4.在循环中: + # - 它使用RPC客户端的“blockchain.get_block_stats”方法检索当前“tmp_height”的块统计信息。 + # - 它从区块统计信息中提取区块时间。 + # - 它使用提供的“redisif”接口缓存相应区块高度的区块时间。 + # - 它打印出调试信息,指示缓存的块高度及其时间。 + # 5.缓存块时间后,它使用RPC客户端的'blockchain.get_block_count'方法检查'tmp_height'是否已达到最新的块高度 ('getblockcount')。 + # 6.如果“tmp_height”等于最新的块高度,则循环中断;否则,它会将“tmp_height”递增 1并继续获取块统计信息。 + # 综上所述,该方法持续从区块链获取区块统计信息,使用Redis 缓存每个区块高度的区块时间,并在达到最新的区块高度时停止。此过程可确保有效存储块高度和相应的时间戳,以备将来参考或分析。 + def stat_block(self, dbif, redisif, config): + self.redis = redisif + # self.stat_height_time(redisif) + self.stat_load(redisif, config) + if self.daily_date is None: + self.stat_reset() + #return + + self.height = self.daily_height; + + self.height += 1 + self.daily_height_begin = self.height + print("start") + + while True: + start = time.time() + blockstats = self.rpc_cmd("getblockstats") + print("getblockstats", f'coast:{time.time()-start:.4f}s') + start = time.time() + #mempoolinfo = self.rpc_cmd("getmempoolinfo") + blockdetail = self.rpc_cmd("getblock") + print("getblock", f'coast:{time.time() - start:.4f}s') + block_start = time.time() + self.blocktime = blockdetail.get_time() + block_time2 = time.gmtime(self.blocktime) + daystr = time.strftime("%d %b %Y", block_time2) + dayutc = int(time.mktime(time.strptime(daystr, "%d %b %Y"))) + dayutcstr = str(dayutc) + + if self.daily_date == 0: + self.daily_date = dayutc + + #print("mempoolinfo", mempoolinfo, mempoolinfo["size"], float(mempoolinfo["total_fee"])) + #time.sleep(10) + #dbif.update_to_realtimeindsv2(self.blocktime, int(mempoolinfo["size"]), float(mempoolinfo["total_fee"])) + #break + + #self.save_db(dayutc) + if dayutc != self.daily_date: + self.stat_daily_reset() + self.daily_date = dayutc + self.daily_height_begin = self.height + + + blocktxs = blockdetail.get_transactions() + self.height = blockdetail.get_height() + redisif.set_block_time(self.height, self.blocktime) # table for block height and time for later query + + mint = blockstats["subsidy"] / 100000000 + #self.daily_csupply += (mint) + #self.daily_sumcsupply += (self.daily_csupply) + self.daily_mint += (mint) + + block_fees = (blockstats["totalfee"] / 100000000) + self.daily_fees += block_fees + self.daily_volume += (blockstats["total_out"] / 100000000) + + self.daily_txs += (blockstats["txs"] - 1) # exclude coinbase tx + + block_price = self.get_price(self.height, dayutc) + #self.daily_mintusd += (block_price * (mint+block_fees)) + + self.daily_price = block_price + + #self.daily_marketcap = (self.daily_csupply * block_price) + # genisis_time = redisif.get_block_time(1) + '''genisis_time = 1231006505 + days = (self.blocktime - genisis_time) / 3600 / 24 + if days >= 155: + self.daily_lth_marketcap += (self.daily_csupply * block_price) + else: + self.daily_sth_marketcap += (self.daily_csupply * block_price) + ''' + for tx in blocktxs: + txid = tx.get_txid() + vins = tx.get_vins() + vouts = tx.get_vouts() + vin_hexs = [] + vin_addrs = [] + vin_values = [] + vin_dts = [] + + vin_volume = 0 + vin_volume_change = 0 + + vin_days_change = 0 + + vin_cdd = 0 + vin_cdd_change = 0 + + vin_rcap_change = 0 + + vin_sopr = 0 + + vin_asopr_diff = 0 + + vout_change_value = 0 + + if not tx.is_coinbase(): + for vin in vins: + # print(self.height, "vin", vin, type(vin)) + if vin.is_prevout(): + prevout = vin["prevout"] + prev_height = prevout["height"] + prev_value = float(prevout["value"]) + prev_scriptpubkey = prevout["scriptPubKey"] + #prev_type = prev_scriptpubkey["type"] + prev_hex = prev_scriptpubkey["hex"] + prev_address = self.get_vin_address(prev_scriptpubkey, prev_height, txid) + prev_blocktime = redisif.get_block_time(prev_height) + + #redisif.save_addr(prev_address, -prev_value) + if not redisif.is_send_address(prev_address): + self.daily_send_address += 1 + if not redisif.is_active_address(prev_address): + self.daily_active_address += 1 + + days = (self.blocktime - prev_blocktime) / 3600 / 24 + vin_cdd += (prev_value * days) + self.stat_cdd(prev_value, days) + + if days >= 155: + self.daily_lth_volume += prev_value + + vin_addrs.append(prev_address) + vin_values.append(prev_value) + vin_dts.append(prev_blocktime) + vin_hexs.append(prev_hex) + + vin_volume += prev_value + vin_asopr_diff += ((self.blocktime - prev_blocktime) * prev_value) + + prevutc = self.get_day_utc(prev_blocktime) + prev_price = self.get_price(prev_height, prevutc) + vin_sopr += (prev_price * prev_value) + + #self.daily_sumvdd += (prev_value * days * prev_price) + + #self.daily_rcap -= (prev_price * prev_value) + + have_change = False + for vout in vouts: + scriptpubkey = vout.get_script_pubkey() + # vout address is same with vin address + if scriptpubkey["hex"] == prev_scriptpubkey["hex"]: + vin_rcap_change += (prev_value * prev_price) + vin_volume_change += prev_value + vout_change_value = float(vout.get_value()) + + days = (self.blocktime - prev_blocktime) / 3600 / 24 + vin_days_change += days + vin_cdd_change += (prev_value * days) + + have_change = True + break + if not have_change: + #self.daily_earcap -= (prev_price * prev_value) + self.daily_eacdd += (prev_value * days) + + self.daily_eavolume += (vin_volume - vout_change_value) + + vin_sopr_change = vin_sopr + #vin_change_price = 0 + if vin_rcap_change != 0: + if vin_volume_change != 0: + vin_change_price = vin_rcap_change / vin_volume_change + #self.daily_earcap -= (vin_rcap_change - (vin_change_price * vout_change_value)) + vin_sopr_change -= (vin_change_price * vout_change_value) + + if vin_cdd_change != 0: + if vin_volume_change != 0: + vin_change_days = vin_cdd_change / vin_volume_change + vin_cdd_change -= (vin_change_days * vout_change_value) + + #self.daily_sumeacdd += (vin_cdd - vin_cdd_change) + + self.daily_sopr_buy += vin_sopr + + self.daily_easopr_buy += vin_sopr_change + + if vin_asopr_diff >= 3600 * vin_volume: + self.daily_asopr_buy += vin_sopr + if vin_volume > 0: + self.daily_asol += (vin_cdd/vin_volume) + self.daily_eaasol += (vin_cdd / vin_volume) + if vin_volume_change > 0: + self.daily_eaasol -= (vin_cdd_change/vin_volume_change) + self.daily_atxs += 1 + + if vin_asopr_diff >= 3600 * 155 * 24 * vin_volume: + self.daily_lthsopr_buy += vin_sopr + else: + self.daily_sthsopr_buy += vin_sopr + + vout_price = block_price + vout_volume = 0 + vout_volume_change = 0 + vout_sopr = 0 + vout_sopr_change = 0 + + for vout in vouts: + vout_value = float(vout.get_value()) + vout_volume += vout_value + scriptpubkey = vout.get_script_pubkey() + vout_type = scriptpubkey["type"] + vout_address = self.get_vout_address(scriptpubkey, self.height, txid) + vout_hex = scriptpubkey["hex"] + + #if not redisif.is_in_addr(vout_address): + #self.daily_new_address_volume += vout_value + #self.daily_new_address += 1 + #redisif.save_addr(vout_address, vout_value) + + if not redisif.is_receive_address(vout_address): + self.daily_receive_address += 1 + if not redisif.is_active_address(vout_address): + self.daily_active_address += 1 + + #self.daily_rcap += (vout_price * vout_value) + + vout_sopr += (vout_price * vout_value) + + have_change = False + for cmp in vin_hexs: + if cmp == vout_hex: + vout_volume_change += vout_value + have_change = True + break + if not have_change: + #self.daily_earcap += (vout_price * vout_value) + vout_sopr_change += (vout_price * vout_value) + + if self.height > 787556: + if (vout_price * vout_value) >= self.rules["flag_big_vout"]: + if vin_volume != 0: + days = vin_cdd / vin_volume + buyin = vin_sopr / vin_volume + sellout = vout_price + if buyin > 0: + profit = (sellout - buyin) / buyin + dbif.update_to_bigamountvout(self.blocktime, txid, \ + vout_address, vout.get_n(), vout_type, \ + vout_value, self.height, days, buyin, sellout, profit) + + self.daily_easopr_sell += vout_sopr_change + + self.daily_sopr_sell += vout_sopr + if vin_asopr_diff > 3600 * vin_volume: + self.daily_asopr_sell += vout_sopr + + if vin_asopr_diff >= 3600 * 155 * 24 * vin_volume: + self.daily_lthsopr_sell += vout_sopr + else: + self.daily_sthsopr_sell += vout_sopr + + if vin_volume != 0: + #if block_price > (vin_sopr / vin_volume): + #self.daily_rprofit += (vout_sopr - vin_sopr) + #if block_price < (vin_sopr / vin_volume): + #self.daily_rloss += (vin_sopr - vout_sopr) + + buyin = vin_sopr / vin_volume + sellout = vout_sopr / vout_volume if vout_volume != 0 else 0 + if sellout > buyin: + self.daily_profit += 1 + else: + for vout in vouts: + vout_value = float(vout.get_value()) + scriptpubkey = vout.get_script_pubkey() + vout_address = self.get_vout_address(scriptpubkey, self.height, txid) + + vout_price = block_price + #self.daily_rcap += (vout_price * vout_value) + #self.daily_earcap += (vout_price * vout_value) + + #if not redisif.is_in_addr(vout_address): + #self.daily_new_address_volume += vout_value + #self.daily_new_address += 1 + #redisif.save_addr(vout_address, vout_value) + + if not redisif.is_receive_address(vout_address): + self.daily_receive_address += 1 + if not redisif.is_active_address(vout_address): + self.daily_active_address += 1 + + self.save_db(dayutc, self.blocktime) + self.stat_save(redisif) + print("statblock", f'coast:{time.time() - block_start:.4f}s') + start = time.time() + self.rpc_cmd("getblockcount") + print("getblockcount", f'coast:{time.time() - start:.4f}s') + # 这种“stat_block”方法似乎负责处理和分析每个区块的区块链数据。让我们分解一下它的功能: + # 1. ** 初始化 **: + # - 该方法初始化Redis接口 ('redisif'),并使用'stat_load'方法从Redis加载统计信息。 + # 2. ** 区块高度初始化 **: + # - 如果'daily_date'为'None',则使用'stat_reset'方法重置统计信息。 + # 3. ** 块处理循环 **: + # - 该方法进入一个循环,按顺序处理每个块。 + # - 对于每个区块: + # - 它使用 RPC命令('getblockstats'和'getblock')获取区块统计信息和详细信息。 + # - 它计算各种指标,例如费用、数量和价格。 + # - 它更新统计数据,例如每日费用、交易量和各种SOPR(已用产出利润率)值。 + # - 它使用“save_db”方法将每日统计数据保存到数据库中。 + # - 它使用“stat_save”方法将更新的统计信息保存到Redis。 + # 4. ** 区块高度更新 **: + # - 处理每个块后,它使用“getblockcount”RPC命令更新块高度。 + # 总体而言,这种方法协调了区块链数据的检索、处理和存储,确保各种统计数据和指标得到正确更新和保存,以供进一步分析。 +def init_config(filename): + fconfig = open(filename) + config = ujson.load(fconfig) + fconfig.close() + dbif = btc24h_db_if.DbIf(host="172.17.0.1", port=4419, user="root", password="IeQcJNnagkaFP1Or", dbname="btcdb") + redisif = btc24h_redis_if.RedisIf(host="127.0.0.1", port=6379, password="", db=0) + return dbif, redisif, config + # “init_config”函数通过从“filename”指定的 JSON 文件加载配置设置来初始化配置设置。然后,它使用配置文件中定义的参数创建数据库 ('dbif') 和 Redis ('redisif') 接口的实例。 + # 以下是它的作用的细分: + # 1. **打开并加载配置文件**: + # - 它打开指定的文件(“filename”)。 + # - 它使用“ujson.load”从文件中读取 JSON 内容。 + # - 读取后关闭文件。 + # 2. **创建数据库和 Redis 接口**: + # - 它使用从配置文件中提取的参数创建数据库接口('dbif')的实例。 + # - 它使用从配置文件中提取的参数创建 Redis 接口 ('redisif') 的实例。 + # 3. 返回数据库接口、Redis 接口和配置: + # - 它返回创建的 'dbif'、'redisif' 和 'config' 对象以供进一步使用。 + # 此函数封装了初始化配置设置以及创建数据库和 Redis 接口的过程,从而可以更轻松地在应用程序中管理这些资源。 +if __name__ == '__main__': + dbif, redisif, config = init_config("btcstat.conf") + #print("init_config") + redisif.reset_btc_data() + statif = StatIf() + #print("StatIf") + statif.stat_block(dbif, redisif, config) + # 1. ** 检查脚本是否为主模块 **: + # - 'if __name__ == '__main__':'块确保以下代码仅在直接执行脚本时运行,而不是在将其作为模块导入到另一个脚本中时运行。 + # 2. ** 初始化配置 **: + # - 它调用带有参数'“btcstat.conf”'的'init_config'函数,以从指定文件加载配置设置。 + # - 它将返回的对象('dbif'、'redisif'和'config')分配给变量。 + # 3.实例化“StatIf”对象: + # - 它创建“StatIf”类的实例,并将其分配给变量“statif”。 + # 4. ** 调用“stat_block”方法 **: + # - 它调用'statif'对象的'stat_block'方法,将数据库接口 ('dbif')、Redis接口 ('redisif') 和配置 ('config') 作为参数传递。 + # - 鉴于所涉及的方法和对象的名称,此方法可能会启动监控和处理区块链数据的过程。 + # 5. ** 执行流程 **: + # - 执行从'if __name__ == '__main__':'块开始。 + # - 配置已初始化。 + # - 创建“StatIf”的实例。 + # - 调用“stat_block”方法,该方法有望处理区块链数据处理。 diff --git a/coinbus/btc_price_fetcher.py b/coinbus/btc_price_fetcher.py new file mode 100644 index 0000000..57b688b --- /dev/null +++ b/coinbus/btc_price_fetcher.py @@ -0,0 +1,85 @@ +import time +import requests +import pymysql +from datetime import datetime + +# MySQL配置 +DB_CONFIG = { + "host": "127.0.0.1", + "user": "root", + "password": "2GS@bPYcgiMyL14A", + "database": "btcdb", + "port": 4423 +} + +# 获取当前时间戳 +def get_current_timestamp(): + return int(time.time()) + +# 获取API1的BTC价格(示例:币安) +def get_binance_price(): + url = "https://api.binance.com/api/v3/ticker/price?symbol=BTCUSDT" + resp = requests.get(url, timeout=5) + resp.raise_for_status() + data = resp.json() + return float(data["price"]) + +# 获取API2的BTC价格(示例:Coinbase) +def get_coinbase_price(): + url = "https://api.coinbase.com/v2/prices/spot?currency=USD" + resp = requests.get(url, timeout=5) + resp.raise_for_status() + data = resp.json() + return float(data["data"]["amount"]) + +# 更新或插入价格 +def upsert_price(source, price, timestamp): + connection = pymysql.connect(**DB_CONFIG) + try: + with connection.cursor() as cursor: + # 先判断该 source 是否已存在 + sql_check = "SELECT id FROM btc_realtime_prices WHERE source = %s" + cursor.execute(sql_check, (source,)) + result = cursor.fetchone() + + if result: + # 已存在,执行更新 + sql_update = """ + UPDATE btc_realtime_prices + SET price = %s, timestamp = %s + WHERE source = %s + """ + cursor.execute(sql_update, (price, timestamp, source)) + else: + # 不存在,执行插入 + sql_insert = """ + INSERT INTO btc_realtime_prices (timestamp, source, price) + VALUES (%s, %s, %s) + """ + cursor.execute(sql_insert, (timestamp, source, price)) + connection.commit() + finally: + connection.close() + +def main(): + while True: + now_ts = get_current_timestamp() + + try: + binance_price = get_binance_price() + print(f"Binance BTC Price: {binance_price}") + upsert_price("binance", binance_price, now_ts) + except Exception as e: + print(f"获取Binance价格失败: {e}") + + try: + coinbase_price = get_coinbase_price() + print(f"Coinbase BTC Price: {coinbase_price}") + upsert_price("coinbase", coinbase_price, now_ts) + except Exception as e: + print(f"获取Coinbase价格失败: {e}") + + time.sleep(60) # 每分钟执行一次 + +if __name__ == "__main__": + main() \ No newline at end of file diff --git a/coinbus/btc_prices.py b/coinbus/btc_prices.py new file mode 100644 index 0000000..7c7b6d3 --- /dev/null +++ b/coinbus/btc_prices.py @@ -0,0 +1,141 @@ +import time +import requests +import pymysql +import ujson +from datetime import datetime, timedelta + +# MySQL 连接信息 +DB_CONFIG = { + "host": "127.0.0.1", + "user": "root", + "password": "2GS@bPYcgiMyL14A", + "database": "btcdb", + "port": 4423 +} + +# Nasdaq API Key +NASDAQ_API_KEY = "FZqXog4sR-b7cYnXcRVV" + +# 获取已存在的时间戳 +def get_existing_timestamps(): + connection = pymysql.connect(**DB_CONFIG) + existing_timestamps = set() + try: + with connection.cursor() as cursor: + cursor.execute("SELECT timestamp, source FROM btc_prices") + for row in cursor.fetchall(): + existing_timestamps.add((row[0], row[1])) + finally: + connection.close() + return existing_timestamps + +# 工具函数:将任意时间戳调整为北京时间当日 08:00 的时间戳 +def adjust_to_beijing_08am(timestamp): + dt = datetime.utcfromtimestamp(timestamp) + timedelta(hours=8) + dt_08am = datetime(dt.year, dt.month, dt.day, 8, 0, 0) + return int((dt_08am - timedelta(hours=8)).timestamp()) # 转回 UTC 存储 + +# Nasdaq 获取历史 BTC 美元价格 +def get_nasdaq_price(): + prices = {} + url = f'https://data.nasdaq.com/api/v3/datatables/QDL/BCHAIN?code=MKPRU&api_key={NASDAQ_API_KEY}' + response = requests.get(url) + if response.status_code == 200: + data = ujson.loads(response.content) + if "datatable" in data and "data" in data["datatable"]: + for item in data["datatable"]["data"]: + daystr = item[1] + price = item[2] + dt = datetime.strptime(daystr, "%Y-%m-%d") + dt_08am_bj = datetime(dt.year, dt.month, dt.day, 8, 0, 0) + dt_08am_utc = dt_08am_bj - timedelta(hours=8) + prices[int(dt_08am_utc.timestamp())] = float(price) + print(f"Nasdaq 获取数据量: {len(prices)} 条") + return prices + +# CryptoCompare 获取 BTC 历史每日收盘价(时间强制统一为北京时间 08:00) +def get_cryptocompare_price(): + url = "https://min-api.cryptocompare.com/data/v2/histoday" + limit = 2000 + to_ts = int(time.time()) + prices = {} + while True: + params = { + "fsym": "BTC", + "tsym": "USD", + "limit": limit, + "toTs": to_ts + } + print(f"请求 CryptoCompare: {params}") + response = requests.get(url, params=params) + if response.status_code != 200: + print("请求失败:", response.status_code) + break + + data = ujson.loads(response.content) + if data["Response"] != "Success": + print("API 返回错误:", data.get("Message")) + break + + entries = data["Data"]["Data"] + if not entries: + break + + for entry in entries: + raw_ts = entry["time"] + price = entry["close"] + adjusted_ts = adjust_to_beijing_08am(raw_ts) + prices[adjusted_ts] = price + + earliest = entries[0]["time"] + if earliest <= 1279300000: # 大约2010年7月 + break + + to_ts = earliest - 1 + time.sleep(1) + + print(f"CryptoCompare 获取数据量: {len(prices)} 条") + return prices + +# 保存数据到数据库 +def save_to_database(data, source): + existing_timestamps = get_existing_timestamps() + connection = pymysql.connect(**DB_CONFIG) + new_data_count = 0 + try: + with connection.cursor() as cursor: + sql = """ + INSERT INTO btc_prices (timestamp, price, source) + VALUES (%s, %s, %s) + """ + for timestamp, price in data.items(): + if (timestamp, source) not in existing_timestamps: + try: + cursor.execute(sql, (timestamp, price, source)) + new_data_count += 1 + except pymysql.MySQLError as e: + print(f"插入错误: {e}") + continue + connection.commit() + print(f"成功存入 {new_data_count} 条新数据({source})") + finally: + connection.close() + +# 定时任务 +def fetch_and_store_data(): + print("========== 开始获取比特币价格数据 ==========") + + # Nasdaq + nasdaq_prices = get_nasdaq_price() + save_to_database(nasdaq_prices, "Nasdaq") + + # CryptoCompare + cc_prices = get_cryptocompare_price() + save_to_database(cc_prices, "CryptoCompare") + + print("========== 数据存储完成 ==========") + +if __name__ == "__main__": + while True: + fetch_and_store_data() + time.sleep(14400) # 每 4 小时执行一次 \ No newline at end of file diff --git a/coinbus/btc_stats_qt.py b/coinbus/btc_stats_qt.py new file mode 100644 index 0000000..e44635d --- /dev/null +++ b/coinbus/btc_stats_qt.py @@ -0,0 +1,1219 @@ +# coding=utf-8 +import sys +import time +from easybitcoinrpc import RPC +from bitcoinutils.setup import setup +from bitcoinutils.script import Script +from bitcoinutils.keys import P2wpkhAddress, P2wshAddress, P2shAddress, PrivateKey, PublicKey, SegwitAddress, \ + P2pkhAddress +import requests +import ujson +from requests import Session +from requests.exceptions import ConnectionError, Timeout, TooManyRedirects +import db_if_qt +import redis_if_qt +import pymysql +DEF_CONFIG_RULES = "rules" +# 此脚本似乎是一个包含导入和一些默认配置的 Python 模块。以下是其内容的细分: +# +# 1. **导入语句**: +# - 'sys':提供对 Python 解释器使用或维护的某些变量以及与解释器交互的函数的访问。 +# - 'time':提供各种与时间相关的功能。 +# - 'easybitcoinrpc':它似乎是一个自定义模块或库,用于通过 RPC(远程过程调用)与 Bitcoin Core 进行交互。 +# - 'bitcoinutils':另一个用于与比特币交互的库,提供用于处理密钥、地址、脚本等的实用程序。 +# - 'requests':一个流行的 HTTP 库,用于发出请求。 +# - 'ujson':一个快速的JSON编码器和解码器,兼容Python的内置'json'模块。 +# - 'Session', 'ConnectionError', 'Timeout', 'TooManyRedirects':这些是“requests”库中用于处理 HTTP 请求和错误的特定组件。 +# +# 2. **默认配置**: +# - “DEF_CONFIG_RULES”:默认配置规则,设置为“规则”。这可能表示与规则相关的内容的默认值或配置。 +class StatIf: + def __init__(self, ip="127.0.0.1", port="8332", user="user", password="password"): + self.host = ip + self.port = port + self.user = user + self.pwd = password + + self.rpc = None + self.height = 0 + + self.pricedict = {} + setup('mainnet') + # 该类似乎是用于处理与比特币节点或区块链相关的统计数据的更大系统的一部分。以下是其属性和构造函数的细分:StatIf + # 特性: + # host:表示Bitcoin + # RPC服务器IP地址的字符串。默认值为 。"127.0.0.1" + # port:表示Bitcoin + # RPC服务器端口号的字符串。默认值为 。"8332" + # user:一个字符串,表示用于向Bitcoin + # RPC服务器进行身份验证的用户名。默认值为 。"user" + # pwd:一个字符串,表示用于使用BitcoinRPC服务器进行身份验证的密码。默认值为 。"password" + # rpc:初始化为 ,连接后将保存RPC客户端的实例。None + # height:初始化为 ,它将存储区块链的当前高度。0 + # 构造函数: + # 构造函数使用默认值或作为参数传递的值初始化属性。 + # 它还初始化属性,该属性似乎用于存储与价格相关的数据。pricedict + # 最后,它调用 ,这可能会设置与比特币主网络交互的环境。setup('mainnet') + # 此类作为处理统计信息和通过RPC与比特币节点交互的基础。通常会添加其他方法和功能来执行特定任务,例如检索区块信息、查询交易数据等。 + def get_vin_address(self, prev_scriptpubkey, prev_height, txid): + prev_type = prev_scriptpubkey["type"] + prev_address = None + if prev_type != "nulldata": + if isinstance(prev_scriptpubkey, dict): + if "address" in prev_scriptpubkey: + prev_address = prev_scriptpubkey["address"] + else: + if prev_scriptpubkey.is_address(): + prev_address = prev_scriptpubkey["address"] + if prev_address is None: + if prev_type == "pubkey": + temphex = prev_scriptpubkey["hex"] + try: + if temphex[2:4] == "04": + prev_address = PublicKey(temphex[2:132]).get_address(False).to_string() + elif temphex[2:4] == "02" or temphex[2:4] == "03": + prev_address = PublicKey(temphex[2:68]).get_address().to_string() + except: + print("decode address failed", str(prev_height), "txid", txid, "hex", temphex) + if prev_address is None: + prev_address = prev_scriptpubkey["hex"] + + return prev_address + # “StatIf”类中的“get_vin_address”方法似乎旨在检索与事务输入 (vin) 关联的地址。以下是其工作原理的细分: + # - ** 参数 **: + # - 'prev_scriptpubkey':输入正在花费的上一个事务输出(prevout)的scriptPubKey。 + # - 'prev_height':包含前一笔交易输出的区块的高度。 + # - 'txid':包含正在分析的输入的交易的交易ID。 + # - ** 功能性 **: + # - 它首先确定scriptPubKey ('prev_type') 的类型。 + # - 如果scriptPubKey类型不是“nulldata”(表示它不是非标准或不可花费的输出): + # - 它检查“prev_scriptpubkey”是否是字典,以及它是否包含“address”键。如果是这样,它会从那里检索地址。 + # - 如果“prev_scriptpubkey”不是字典或不包含“address”键,则会直接检查它是否为有效地址。 + # - 如果地址仍为“None”,则会将scriptPubKey类型视为“pubkey”,并尝试从公钥的十六进制表示形式派生地址。 + # - 如果所有检索地址的尝试都失败,则默认返回scriptPubKey的十六进制表示形式。 + # 此方法旨在用于识别交易记录输入的支出地址。它处理不同类型的 + # scriptPubKey,包括标准地址和公钥。如果遇到任何解码错误,它会打印一条指示失败的消息,以及相关信息,例如区块高度和事务ID。 + def get_vout_address(self, scriptpubkey, height, txid): + return self.get_vin_address(scriptpubkey, height, txid) + # “StatIf”类中的“get_vout_address”方法似乎是“get_vin_address”方法的简单包装器。它实质上将检索与事务输出 (vout) 关联的地址的任务委托给“get_vin_address”方法。 + # 以下是它的作用: + # - ** 参数 **: + # - 'scriptpubkey':此参数表示当前正在使用的输出的scriptPubKey。 + # - 'height':包含输出的块的高度。 + # - 'txid':当前输出的事务ID。 + # - ** 功能性 **: + # - 它只是使用相同的参数('scriptpubkey', 'height', 'txid')调用'get_vin_address'方法。 + # - ** 返回 **: + # - 它返回“get_vin_address”方法返回的任何内容。 + # 此方法在检索事务输入和输出关联的添加时提供处理事务输入和输出的一致性 + def get_history_price(self, batch_size=5000): + #pricedict = {} + # response_price = requests.get( + # 'https://data.nasdaq.com/api/v3/datasets/BCHAIN/MKPRU.json?api_key=FZqXog4sR-b7cYnXcRVV') + # if response_price.status_code == 200: + # priceweb = ujson.loads(response_price.content) + # if "dataset" in priceweb: + # priceset = priceweb["dataset"] + # if "data" in priceset: + # pricedata = priceset["data"] + # for price in pricedata: + # daystr = price[0] + # p = price[1] + # dayutc = time.mktime(time.strptime(daystr, "%Y-%m-%d")) + # self.pricedict[str(int(dayutc))] = float(p) + # if len(self.pricedict) > 0: + # return self.pricedict + # + # response_price.close() + #return self.pricedict + """获取数据库中的 Nasdaq 数据,存入字典""" + db_config = { + "host": "192.168.194.216", + "user": "root", + "password": "2GS@bPYcgiMyL14A", + "database": "btcdb", + "port": 4423, + "connect_timeout": 60, + "read_timeout": 60, + "write_timeout": 60, + "charset": "utf8mb4" + } + + offset = 0 + self.pricedict = {} + + while True: + connection = pymysql.connect(**db_config) + try: + with connection.cursor() as cursor: + sql = "SELECT timestamp, price FROM btc_prices WHERE source = 'Nasdaq' ORDER BY timestamp LIMIT %s OFFSET %s" + cursor.execute(sql, (batch_size, offset)) + rows = cursor.fetchall() + if not rows: + break + for timestamp, price in rows: + self.pricedict[str(int(timestamp))] = float(price) + finally: + connection.close() + + offset += batch_size + if len(rows) < batch_size: + break # 最后一页读取完成 + + return self.pricedict + # “get_history_price”方法似乎从特定数据源中检索历史比特币价格,并将其存储在类实例 (“self.pricedict”) 的“pricedict”属性中。以下是该方法的细分: + # - ** 功能性 **: + # - 它向特定URL (''https: // data.nasdaq.com / api / v3 / datasets / BCHAIN / MKPRU.json?api_key = FZqXog4sR - b7cYnXcRVV'') 发送GET请求以获取历史比特币价格数据。 + # - 如果响应状态代码为“200”(表示成功): + # - 它使用“ujson.loads”方法解析JSON响应。 + # - 它从解析的JSON响应中检索相关数据并对其进行迭代。 + # - 对于数据中的每个条目,它提取日期和价格,将日期转换为Unix时间戳,并将价格存储在'pricedict'字典属性中,以Unix时间戳为键。 + # - 最后,如果'pricedict'在处理数据后不为空,则返回字典。 + # - ** 返回 **: + # - 如果成功,它将返回“pricedict”字典,其中包含由Unix时间戳索引的历史比特币价格。 + # - ** 注意 **: + # - 方法末尾注释掉的代码 ('#return self.pricedict') 似乎是返回'pricedict'的替代方法,但目前已被禁用。您可以选择取消注释并使用它,而不是显式返回。 + def get_history_price2(self, batch_size=5000): + # #pricedict = {} + # dayt = time.gmtime() + # daystr = time.strftime("%Y", dayt) + # year = int(daystr) + # end_year = year + # while True: + # url = "" + # if end_year != year: + # start_year = end_year + # url = "https://data.messari.io/api/v1/assets/bitcoin/metrics/price/time-series?start=" + # else: + # url = "https://data.messari.io/api/v1/assets/bitcoin/metrics/price/time-series?after=" + str( + # year) + "-01-01&order=descending" + # + # if end_year != year: + # url = url + str(start_year) + "-01-01&end=" + str(end_year) + "-12-31&order=descending" + # header_set = {} + # header_set["x-messari-api-key"] = "aH2pyj5i4QGo1k1gLxXEbIJ5RJr+FYKLEWk6cRT6RuSc6lRY" + # # header_set["Content-Type"] = "application/json" + # print(header_set, url) + # response_price = requests.get(url, headers=header_set) + # # print(response_price) + # if response_price.status_code == 200: + # # print(response_price.content) + # priceweb = ujson.loads(response_price.content) + # if "data" in priceweb: + # priceset = priceweb["data"] + # if "values" in priceset: + # valueset = priceset["values"] + # if valueset is not None: + # for supply in valueset: + # dayutc = int(supply[0] / 1000) + # s = supply[1] + # ret_time = time.gmtime(dayutc) + # ret_daystr = time.strftime("%d %b %Y", ret_time) + # ret_dayutc = int(time.mktime(time.strptime(ret_daystr, "%d %b %Y"))) + # self.pricedict[str(ret_dayutc)] = float(s) + # # print(s, dayutc, pricedict[str(dayutc)]) + # # break + # else: + # break + # else: + # break + # end_year -= 1 + # time.sleep(2) + # self.pricedict[str(1308528000)]=float(15.5) + # self.pricedict[str(1308614400)] = float(15.05) + # self.pricedict[str(1308700800)] = float(15.39) + # self.pricedict[str(1308787200)] = float(16.7501) + # self.pricedict[str(1308873600)] = float(17.6) + # self.pricedict[str(1308960000)] = float(16.95) + # print(self.pricedict) + # return self.pricedict + + """获取数据库中的 Messari 数据,存入字典""" + db_config = { + "host": "192.168.194.216", + "user": "root", + "password": "2GS@bPYcgiMyL14A", + "database": "btcdb", + "port": 4423, + "connect_timeout": 60, + "read_timeout": 60, + "write_timeout": 60, + "charset": "utf8mb4" + } + + offset = 0 + self.pricedict = {} + + while True: + connection = pymysql.connect(**db_config) + try: + with connection.cursor() as cursor: + sql = """ + SELECT timestamp, price + FROM btc_prices + WHERE source = 'CryptoCompare' + ORDER BY timestamp + LIMIT %s OFFSET %s + """ + cursor.execute(sql, (batch_size, offset)) + rows = cursor.fetchall() + if not rows: + break + for timestamp, price in rows: + self.pricedict[str(int(timestamp))] = float(price) + finally: + connection.close() + + offset += batch_size + if len(rows) < batch_size: + break # 数据已全部读取 + + return self.pricedict + + # “get_history_price2”方法似乎从另一个数据源检索历史比特币价格,并将它们存储在类实例 ('self.pricedict') 的“pricedict”属性中。以下是该方法的说明: + # - ** 功能性 **: + # - 它初始化与当前日期相关的变量('dayt'、'daystr'、'year'、'end_year')。 + # - 它进入一个循环,从当前年份开始,回到过去,在几年内迭代。 + # - 它根据当前年份构建一个URL,并发送一个GET请求,以从MessariAPI检索历史比特币价格数据。 + # - 如果响应状态码为“200”(表示成功),则解析JSON响应。 + # - 它从JSON响应中提取相关数据(时间戳和价格),并将时间戳转换为Unix时间戳。 + # - 它将价格存储在“pricedict”字典属性中,并以Unix时间戳为键。 + # - 循环继续,直到检索到所有所需年份的数据或遇到错误。 + # - 它打印“pricedict”用于调试目的并返回它。 + # - ** 返回 **: + # - 该方法返回包含由Unix时间戳索引的历史比特币价格的“pricedict”字典。 + # - ** 注意 **: + # - 此方法从MessariAPI获取数据,其结构类似于从Nasdaq API获取数据的“get_history_price”。这两种方法都用于检索历史比特币价格,但来源不同。 + def get_current_utc(self): + curtime = time.gmtime(time.time()) + daystr = time.strftime("%d %b %Y", curtime) + dayutc = int(time.mktime(time.strptime(daystr, "%d %b %Y"))) + return dayutc + # “get_current_utc”方法检索当前UTC时间戳。以下是其功能的细分: + # - ** 功能性 **: + # - 它使用'time'模块使用'time.gmtime(time.time())'获取UTC中的当前时间。 + # - 它将当前时间格式化为一个字符串,以“DDMonYYYY”的格式表示日期(例如,“2024年3月15日”)。 + # - 它使用“time.mktime”将格式化的字符串转换回Unix时间戳。 + # - 最后,它返回以UTC表示当前日期的Unix时间戳。 + # - ** 返回 **: + # - 该方法返回一个整数,以UTC表示当天的时间戳。 + # 此方法可用于获取UTC中的当前时间戳,以用于各种目的,例如日志记录、时间戳事件或跨不同系统同步操作。 + def get_current_price(self): + price = 0 + DB_CONFIG = { + "host": "192.168.194.216", + "user": "root", + "password": "2GS@bPYcgiMyL14A", + "database": "btcdb", + "port": 4423 + } + connection = pymysql.connect(**DB_CONFIG) + try: + with connection.cursor() as cursor: + for source in ("binance", "coinbase"): + cursor.execute(""" + SELECT price FROM btc_realtime_prices + WHERE source=%s + ORDER BY timestamp DESC + LIMIT 1 + """, (source,)) + row = cursor.fetchone() + if row: + price = float(row[0]) + break + finally: + connection.close() + return price + # price = 0 + # try: + # response_price = requests.get( + # 'https://api.binance.com/api/v3/ticker/price?symbol=BTCUSDT') + # prices = ujson.loads(response_price.text) + # price = float(prices["price"]) + # print(response_price.text, price) + # response_price.close() + # # print("price", price) + # return price + # except: + # response_price = requests.get("https://api.coinpaprika.com/v1/tickers/btc-bitcoin") + # prices = ujson.loads(response_price.text) + # price = float(prices["quotes"]["USD"]["price"]) + # + # response_price.close() + # return price + # “get_current_price”方法旨在从特定的API端点获取以美元为单位的比特币的当前价格。以下是其功能的说明: + # - ** 功能性 **: + # - 它向指定的URL发送HTTPGET请求:“https: // bitcoinexplorer.org / api / price”。 + # - 如果响应状态代码为200(表示成功),则使用'ujson.loads(response_price.text)'从响应中提取JSON数据。 + # - 然后,它从JSON数据中检索以美元为单位的比特币价格,并将其转换为浮点数。 + # - 最后,它返回提取的价格。 + # - ** 返回 **: + # - 该方法返回一个浮点数,表示比特币的当前美元价格。 + # 此方法可用于获取各种应用程序的实时比特币价格数据,例如财务分析、加密货币交易或向用户显示价格信息。 + def get_day_utc(self, utc_time): + t = time.gmtime(utc_time) + daystr = time.strftime("%d %b %Y", t) + dayutc = int(time.mktime(time.strptime(daystr, "%d %b %Y"))) + return dayutc + # “get_day_utc”函数在00:00:00时将给定的UTC时间戳转换为相应的UTC日期(自Unix纪元以来的秒数)。以下是其功能的细分: + # - ** 参数 **: + # - 'utc_time':表示要转换的UTC时间戳的整数。 + # - ** 功能性 **: + # - 它使用“time.gmtime(utc_time)”将提供的UTC时间戳转换为UTC时区的时间元组。 + # - 然后,它使用'time.strftime(“%d %b %Y”, t)'将UTC时间元组格式化为表示日期的字符串,格式为“DDMonYYYY”(例如,“01Jan2024”)。 + # - 最后,它使用'time.mktime(time.strptime(daystr, “%d %b %Y”))'将格式化的日期字符串转换回UTC时间戳。 + # - ** 返回 **: + # - 该函数返回一个整数,表示所提供日期在00:00:00小时的UTC时间戳。 + # 此函数可用于将时间戳转换为相应的UTC日期,这对于各种应用程序(例如数据分析、时间序列操作或基于UTC时间戳生成每日报告)非常有用。 + def rpc_cmd(self, cmd): + if self.rpc is None: + self.rpc = RPC(self.host, self.port, self.user, self.pwd) + + while True: + try: + if cmd == "getblockstats": + getblockstats = self.rpc.blockchain.get_block_stats(self.height) + return getblockstats + elif cmd == "getblock": + getblock = self.rpc.blockchain.get_block(self.height, 3) + return getblock + elif cmd == "getblockcount": + getblockcount = self.rpc.blockchain.get_block_count() + if self.height == getblockcount: + time.sleep(30) + else: + self.height += 1 + print("next height", self.height) + return None + elif cmd == "getmempoolinfo": + getmempoolinfo = self.rpc.blockchain.get_mempool_info() + return getmempoolinfo + else: + return None + except: + time.sleep(1) + self.rpc = RPC(self.host, self.port, self.user, self.pwd) + # “rpc_cmd”方法旨在对比特币核心节点执行各种RPC命令。以下是其功能的细分: + # - ** 参数 **: + # - 'cmd':表示要执行的RPC命令的字符串。 + # - ** 功能性 **: + # - 它首先检查RPC连接是否已建立 ('self.rpc')。如果没有,它将使用提供的主机、端口、用户名和密码初始化RPC连接。 + # - 然后,它进入一个循环以处理潜在的连接错误,并重试执行RPC命令。 + # - 根据'cmd'的值,它执行不同的RPC命令: + # - 如果'cmd'是 “getblockstats”,它会使用'rpc.blockchain.get_block_stats(self.height)'检索当前块高度 ('self.height') 的块统计信息。 + # - 如果'cmd'为 “getblock”,则使用'rpc.blockchain.get_block(self.height, 3)'以详细格式 ('3') 检索当前块高度的块信息。 + # - 如果'cmd'是 “getblockcount”,它会使用'rpc.blockchain.get_block_count()'检索当前块计数。如果当前高度与块计数匹配,则等待30秒,然后再次检查;否则,它会递增高度并返回“None”。 + # - 如果'cmd'是 “getmempoolinfo”,它会使用'rpc.blockchain.get_mempool_info()'检索有关内存池的信息。 + # - 如果无法识别“cmd”,则返回“None”。 + # - ** 返回 **: + # - 该方法返回RPC命令执行的结果,如果发生错误或无法识别命令,则返回“None”。 + # 这种方法提供了一种通过RPC命令与比特币核心节点进行交互的灵活方式,并通过在短时间延迟后重试命令来优雅地处理潜在的连接问题。 + def stat_load(self, redisif, config): + self.rules = config[DEF_CONFIG_RULES] + self.get_history_price() + self.history_prices = self.get_history_price2() + self.current_price = self.get_current_price() + self.current_utc = self.get_current_utc() + self.history_prices[str(self.current_utc)] = self.current_price + self.daily_date = redisif.get_btc_data("daily_date") + if self.daily_date is None: + self.stat_reset() + return + + self.daily_date = int(redisif.get_btc_data("daily_date")) + self.daily_height = int(redisif.get_btc_data("daily_height").decode("utf-8")) + self.height = self.daily_height + self.daily_height_begin = int(redisif.get_btc_data("daily_height_begin").decode("utf-8")) + self.daily_height_end = int(redisif.get_btc_data("daily_height_end").decode("utf-8")) + self.daily_date_string = redisif.get_btc_data("daily_date_string").decode("utf-8") + self.daily_profit = float(redisif.get_btc_data("daily_profit").decode("utf-8")) + self.daily_fees = float(redisif.get_btc_data("daily_fees").decode("utf-8")) + self.daily_txs = int(redisif.get_btc_data("daily_txs").decode("utf-8")) + self.daily_new_address = int(redisif.get_btc_data("daily_new_address").decode("utf-8")) + self.daily_total_address = int(redisif.get_btc_data("daily_total_address").decode("utf-8")) + self.daily_new_address_volume = float(redisif.get_btc_data("daily_new_address_volume").decode("utf-8")) + self.daily_active_address = int(redisif.get_btc_data("daily_active_address").decode("utf-8")) + self.daily_send_address = int(redisif.get_btc_data("daily_send_address").decode("utf-8")) + self.daily_receive_address = int(redisif.get_btc_data("daily_receive_address").decode("utf-8")) + self.daily_volume = float(redisif.get_btc_data("daily_volume").decode("utf-8")) + self.daily_eavolume = float(redisif.get_btc_data("daily_eavolume").decode("utf-8")) + self.daily_asol = float(redisif.get_btc_data("daily_asol").decode("utf-8")) + self.daily_eaasol = float(redisif.get_btc_data("daily_eaasol").decode("utf-8")) + self.daily_atxs = float(redisif.get_btc_data("daily_atxs").decode("utf-8")) + self.daily_sopr_buy = float(redisif.get_btc_data("daily_sopr_buy").decode("utf-8")) + self.daily_asopr_buy = float(redisif.get_btc_data("daily_asopr_buy").decode("utf-8")) + self.daily_easopr_buy = float(redisif.get_btc_data("daily_easopr_buy").decode("utf-8")) + self.daily_lthsopr_buy = float(redisif.get_btc_data("daily_lthsopr_buy").decode("utf-8")) + self.daily_sthsopr_buy = float(redisif.get_btc_data("daily_sthsopr_buy").decode("utf-8")) + self.daily_sopr_sell = float(redisif.get_btc_data("daily_sopr_sell").decode("utf-8")) + self.daily_asopr_sell = float(redisif.get_btc_data("daily_asopr_sell").decode("utf-8")) + self.daily_easopr_sell = float(redisif.get_btc_data("daily_easopr_sell").decode("utf-8")) + self.daily_lthsopr_sell = float(redisif.get_btc_data("daily_lthsopr_sell").decode("utf-8")) + self.daily_sthsopr_sell = float(redisif.get_btc_data("daily_sthsopr_sell").decode("utf-8")) + self.daily_cdd = float(redisif.get_btc_data("daily_cdd").decode("utf-8")) + self.daily_sacdd = float(redisif.get_btc_data("daily_sacdd").decode("utf-8")) + self.daily_eacdd = float(redisif.get_btc_data("daily_eacdd").decode("utf-8")) + self.daily_cdd_days1 = float(redisif.get_btc_data("daily_cdd_days1").decode("utf-8")) + self.daily_cdd_days7 = float(redisif.get_btc_data("daily_cdd_days7").decode("utf-8")) + self.daily_cdd_days30 = float(redisif.get_btc_data("daily_cdd_days30").decode("utf-8")) + self.daily_cdd_days60 = float(redisif.get_btc_data("daily_cdd_days60").decode("utf-8")) + self.daily_cdd_days90 = float(redisif.get_btc_data("daily_cdd_days90").decode("utf-8")) + self.daily_cdd_days180 = float(redisif.get_btc_data("daily_cdd_days180").decode("utf-8")) + self.daily_cdd_days365 = float(redisif.get_btc_data("daily_cdd_days365").decode("utf-8")) + self.daily_cdd_days730 = float(redisif.get_btc_data("daily_cdd_days730").decode("utf-8")) + self.daily_csupply = float(redisif.get_btc_data("daily_csupply").decode("utf-8")) + self.daily_mintusd = float(redisif.get_btc_data("daily_mintusd").decode("utf-8")) + self.daily_sumcsupply = float(redisif.get_btc_data("daily_sumcsupply").decode("utf-8")) + self.daily_sumcdd = float(redisif.get_btc_data("daily_sumcdd").decode("utf-8")) + self.daily_sumeacdd = float(redisif.get_btc_data("daily_sumeacdd").decode("utf-8")) + self.daily_rprofit = float(redisif.get_btc_data("daily_rprofit").decode("utf-8")) + self.daily_rloss = float(redisif.get_btc_data("daily_rloss").decode("utf-8")) + self.daily_marketcap = float(redisif.get_btc_data("daily_marketcap").decode("utf-8")) + self.daily_rcap = float(redisif.get_btc_data("daily_rcap").decode("utf-8")) + self.daily_earcap = float(redisif.get_btc_data("daily_earcap").decode("utf-8")) + self.daily_mvrv = float(redisif.get_btc_data("daily_mvrv").decode("utf-8")) + '''self.daily_lth_marketcap = float(redisif.get_btc_data("daily_lth_marketcap").decode("utf-8")) + self.daily_lth_rcap = float(redisif.get_btc_data("daily_lth_rcap").decode("utf-8")) + self.daily_lth_mvrv = float(redisif.get_btc_data("daily_lth_mvrv").decode("utf-8")) + self.daily_sth_marketcap = float(redisif.get_btc_data("daily_sth_marketcap").decode("utf-8")) + self.daily_sth_rcap = float(redisif.get_btc_data("daily_sth_rcap").decode("utf-8")) + self.daily_sth_mvrv = float(redisif.get_btc_data("daily_sth_mvrv").decode("utf-8"))''' + self.daily_nupl = float(redisif.get_btc_data("daily_nupl").decode("utf-8")) + #self.daily_uprofit = float(redisif.get_btc_data("daily_uprofit").decode("utf-8")) + #self.daily_uloss = float(redisif.get_btc_data("daily_uloss").decode("utf-8")) + #self.daily_lthnupl = float(redisif.get_btc_data("daily_lthnupl").decode("utf-8")) + #self.daily_sthnupl = float(redisif.get_btc_data("daily_sthnupl").decode("utf-8")) + self.daily_price = self.get_price(self.height, self.daily_date) + + #v2 + self.daily_mint = float(redisif.get_btc_data("daily_mint").decode("utf-8")) + self.daily_lth_volume = float(redisif.get_btc_data("daily_lth_volume").decode("utf-8")) + self.daily_frm = float(redisif.get_btc_data("daily_frm").decode("utf-8")) + self.daily_cvdd = float(redisif.get_btc_data("daily_cvdd").decode("utf-8")) + self.daily_nvt_ratio = float(redisif.get_btc_data("daily_nvt_ratio").decode("utf-8")) + self.daily_balanced_price = float(redisif.get_btc_data("daily_balanced_price").decode("utf-8")) + self.daily_velocity = float(redisif.get_btc_data("daily_velocity").decode("utf-8")) + self.daily_mempool_volume = float(redisif.get_btc_data("daily_mempool_volume").decode("utf-8")) + self.daily_realized_price = float(redisif.get_btc_data("daily_realized_price").decode("utf-8")) + self.daily_transferred_price = float(redisif.get_btc_data("daily_transferred_price").decode("utf-8")) + #v2 + self.daily_sumvdd = float(redisif.get_btc_data("daily_sumvdd").decode("utf-8")) + self.daily_sumdays = float(redisif.get_btc_data("daily_sumdays").decode("utf-8")) + # “stat_load”方法似乎负责从Redis数据库加载各种统计信息和历史数据。以下是它的作用的细分: + # - ** 参数 **: + # - 'redisif':提供Redis数据库接口功能的类的实例。 + # - 'config':包含配置参数(包括规则)的字典。 + # - ** 功能性 **: + # - 从提供的“config”字典中加载配置规则。 + # - 使用“get_history_price”和“get_history_price2”方法从外部来源检索历史价格数据。 + # - 分别使用“get_current_price”和“get_current_utc”方法检索当前价格和当前UTC时间。 + # - 使用当前价格和UTC时间更新历史价格数据。 + # - 使用特定键从Redis数据库中检索各种每日统计数据和值,并将它们存储在相应的实例属性中。 + # - 根据需要对检索到的数据执行类型转换。 + # - 此方法似乎可以同时处理第1版和第2版的统计数据,如注释掉的部分所示。 + # - ** 返回 **: + # - 此方法没有显式返回值。 + # 总体而言,“stat_load”是一种关键的初始化方法,用于加载在应用程序上下文中进一步分析或处理所需的相关数据和统计数据 + def stat_save(self, redisif): + redisif.set_btc_data("daily_date", self.daily_date) + self.daily_height = self.height + redisif.set_btc_data("daily_height", self.daily_height) + redisif.set_btc_data("daily_height_begin", self.daily_height_begin) + redisif.set_btc_data("daily_height_end", self.daily_height_end) + redisif.set_btc_data("daily_date_string", self.daily_date_string) + redisif.set_btc_data("daily_profit", self.daily_profit) + redisif.set_btc_data("daily_fees", self.daily_fees) + redisif.set_btc_data("daily_txs", self.daily_txs) + redisif.set_btc_data("daily_new_address", self.daily_new_address) + redisif.set_btc_data("daily_total_address", self.daily_total_address) + redisif.set_btc_data("daily_new_address_volume", self.daily_new_address_volume) + redisif.set_btc_data("daily_active_address", self.daily_active_address) + redisif.set_btc_data("daily_send_address", self.daily_send_address) + redisif.set_btc_data("daily_receive_address", self.daily_receive_address) + redisif.set_btc_data("daily_volume", self.daily_volume) + redisif.set_btc_data("daily_eavolume", self.daily_eavolume) + redisif.set_btc_data("daily_asol", self.daily_asol) + redisif.set_btc_data("daily_eaasol", self.daily_eaasol) + redisif.set_btc_data("daily_atxs", self.daily_atxs) + redisif.set_btc_data("daily_sopr_buy", self.daily_sopr_buy) + redisif.set_btc_data("daily_asopr_buy", self.daily_asopr_buy) + redisif.set_btc_data("daily_easopr_buy", self.daily_easopr_buy) + redisif.set_btc_data("daily_lthsopr_buy", self.daily_lthsopr_buy) + redisif.set_btc_data("daily_sthsopr_buy", self.daily_sthsopr_buy) + redisif.set_btc_data("daily_sopr_sell", self.daily_sopr_sell) + redisif.set_btc_data("daily_asopr_sell", self.daily_asopr_sell) + redisif.set_btc_data("daily_easopr_sell", self.daily_easopr_sell) + redisif.set_btc_data("daily_lthsopr_sell", self.daily_lthsopr_sell) + redisif.set_btc_data("daily_sthsopr_sell", self.daily_sthsopr_sell) + redisif.set_btc_data("daily_cdd", self.daily_cdd) + redisif.set_btc_data("daily_sacdd", self.daily_sacdd) + redisif.set_btc_data("daily_eacdd", self.daily_eacdd) + redisif.set_btc_data("daily_cdd_days1", self.daily_cdd_days1) + redisif.set_btc_data("daily_cdd_days7", self.daily_cdd_days7) + redisif.set_btc_data("daily_cdd_days30", self.daily_cdd_days30) + redisif.set_btc_data("daily_cdd_days60", self.daily_cdd_days60) + redisif.set_btc_data("daily_cdd_days90", self.daily_cdd_days90) + redisif.set_btc_data("daily_cdd_days180", self.daily_cdd_days180) + redisif.set_btc_data("daily_cdd_days365", self.daily_cdd_days365) + redisif.set_btc_data("daily_cdd_days730", self.daily_cdd_days730) + redisif.set_btc_data("daily_csupply", self.daily_csupply) + redisif.set_btc_data("daily_mintusd", self.daily_mintusd) + redisif.set_btc_data("daily_sumcsupply", self.daily_sumcsupply) + redisif.set_btc_data("daily_sumcdd", self.daily_sumcdd) + redisif.set_btc_data("daily_sumeacdd", self.daily_sumeacdd) + redisif.set_btc_data("daily_rprofit", self.daily_rprofit) + redisif.set_btc_data("daily_rloss", self.daily_rloss) + redisif.set_btc_data("daily_marketcap", self.daily_marketcap) + redisif.set_btc_data("daily_rcap", self.daily_rcap) + redisif.set_btc_data("daily_earcap", self.daily_earcap) + redisif.set_btc_data("daily_mvrv", self.daily_mvrv) + '''redisif.set_btc_data("daily_lth_marketcap", self.daily_lth_marketcap) + redisif.set_btc_data("daily_lth_rcap", self.daily_lth_rcap) + redisif.set_btc_data("daily_lth_mvrv", self.daily_lth_mvrv) + redisif.set_btc_data("daily_sth_marketcap", self.daily_sth_marketcap) + redisif.set_btc_data("daily_sth_rcap", self.daily_sth_rcap) + redisif.set_btc_data("daily_sth_mvrv", self.daily_sth_mvrv)''' + redisif.set_btc_data("daily_nupl", self.daily_nupl) + #redisif.set_btc_data("daily_uprofit", self.daily_uprofit) + #redisif.set_btc_data("daily_uloss", self.daily_uloss) + #redisif.set_btc_data("daily_lthnupl", self.daily_lthnupl) + #redisif.set_btc_data("daily_sthnupl", self.daily_sthnupl) + + #v2 + redisif.set_btc_data("daily_mint", self.daily_mint) + redisif.set_btc_data("daily_lth_volume", self.daily_lth_volume) + redisif.set_btc_data("daily_frm", self.daily_frm) + redisif.set_btc_data("daily_cvdd", self.daily_cvdd) + redisif.set_btc_data("daily_nvt_ratio", self.daily_nvt_ratio) + redisif.set_btc_data("daily_balanced_price", self.daily_balanced_price) + redisif.set_btc_data("daily_velocity", self.daily_velocity) + redisif.set_btc_data("daily_mempool_volume", self.daily_mempool_volume) + redisif.set_btc_data("daily_realized_price", self.daily_realized_price) + redisif.set_btc_data("daily_transferred_price", self.daily_transferred_price) + redisif.set_btc_data("daily_sumvdd", self.daily_sumvdd) + redisif.set_btc_data("daily_sumdays", self.daily_sumdays) + # stat_save”方法负责将各种统计信息和值保存到Redis数据库。下面是该方法的每个部分的作用: + # - ** 参数 **: + # - 'redisif':提供Redis数据库接口功能的类的实例。 + # - ** 功能性 **: + # - 在Redis数据库中设置各种键值对,对应不同的每日统计数据和值。 + # - 该方法遍历每个统计量,并调用'redisif'对象的'set_btc_data'方法,将值保存在相应的键下。 + # - 注释掉的部分表示存在以前包含但当前未保存的其他统计信息或值。 + # - ** 返回 **: + # - 此方法没有显式返回值。 + # 总体而言,“stat_save”是对“stat_load”方法的补充,允许在处理或分析后将更新的统计数据和值保存回Redis数据库 + def stat_reset(self): + self.daily_date = 0 # working date + self.daily_height = 1 # working height, ref. 747376 + self.daily_date_string = "" # working date string + + self.daily_csupply = 0 # circulating supply + self.daily_sumcsupply = 0 # cumulative circulating supply, for liveliness + self.daily_sumcdd = 0 # cumulative coin days destoryed + self.daily_sumeacdd = 0 # cumulative coin days destoryed(Entity-Adjusted) + self.daily_marketcap = 0 # market capitalization + self.daily_rcap = 0 # Realized capitalization + self.daily_earcap = 0 # Realized capitalization(Entity-Adjusted) + ''' + self.daily_lth_marketcap = 0 # Long Term Holder market capitalization + self.daily_lth_rcap = 0 # Long Term Holder Realized capitalization + self.daily_sth_marketcap = 0 # Short Term Holder market capitalization + self.daily_sth_rcap = 0 # Short Term Holder Realized capitalization + ''' + #self.daily_uprofit = 0 # Unrealized Profit + #self.daily_uloss = 0 # Unrealized Loss + #self.daily_lthnupl = 0 # Long Term Holder NUPL + #self.daily_sthnupl = 0 # Short Term Holder NUPL + + self.stat_daily_reset() + + self.daily_rprofit = 0 # realized profit + self.daily_rloss = 0 # realized loss + + #v2 + self.daily_sumvdd = 0 + self.daily_sumdays = 0 + # “stat_reset”方法负责将各种每日统计数据和值重置为其初始状态。下面是该方法的每个部分的作用: + # - ** 属性重置 **: + # - 它将与每日日期、高度和日期字符串相关的属性重置为默认值。 + # - ** 统计重置 **: + # - 它将与供应、资本化、硬币销毁天数 (CDD) 和其他指标相关的各种每日统计数据设置为零。 + # - 一些统计数据,如累计供应量、累计CDD、市值和已实现资本化,设置为零,表示当天分析的新开始。 + # - 某些统计数据,例如长期持有者 (LTH) 和短期持有者 (STH) 指标、未实现损益以及LTH和STH的未实现净未实现损益 (NUPL),被注释掉,表明它们在当前实现中未重置或使用。 + # - ** 子程序调用 **: + # - 它调用“stat_daily_reset”方法,此处未定义该方法,但可能负责重置特定于应用程序要求的其他每日统计信息。 + # - ** v2 + # 统计信息重置 **: + # - 它重置了与指标相关的其他v2统计信息,例如已用产出年龄总和 (sumvdd) 和已用产出天数总和 (sumdays)。 + # 总体而言,“stat_reset”为重新计算每日统计数据和指标提供了一个干净的石板,确保每天的分析从一致的初始状态开始。 + def stat_daily_reset(self): + self.daily_profit = 0 # Number of UTXOs in Profit + self.daily_fees = 0 # block fees each day + self.daily_txs = 0 # block txs exclude coinbase transaction + self.daily_new_address = 0 # number of new address + self.daily_total_address = redisif.get_addr_cnt() # number of address + self.daily_new_address_volume = 0 # volume of new address + self.daily_active_address = 0 # number of active address + self.daily_send_address = 0 # number of send address + self.daily_receive_address = 0 # number of receive address + self.daily_volume = 0 # volume for each day + self.daily_eavolume = 0 # volume for each day(Entity-Adjusted) + self.daily_asol = 0 # Average Spent Output Lifespan + self.daily_eaasol = 0 # Average Spent Output Lifespan(Entity-Adjusted) + self.daily_atxs = 0 # exclude transaction < 1 hour + self.daily_sopr_buy = 0 # Spent Output Profit Ratio for buyin + self.daily_asopr_buy = 0 # Spent Output Profit Ratio(exclude < 1 hour) for buyin + self.daily_easopr_buy = 0 # Spent Output Profit Ratio(Entity-Adjusted) for buyin + self.daily_lthsopr_buy = 0 # Long-Term Holder SOPR for buyin + self.daily_sthsopr_buy = 0 # Short-Term Holder SOPR for buyin + self.daily_sopr_sell = 0 # Spent Output Profit Ratio for sellout + self.daily_asopr_sell = 0 # Spent Output Profit Ratio(exclude < 1 hour) for sellout + self.daily_easopr_sell = 0 # Spent Output Profit Ratio(Entity-Adjusted) for sellout + self.daily_lthsopr_sell = 0 # Long-Term Holder SOPR for sellout + self.daily_sthsopr_sell = 0 # Short-Term Holder SOPR for buyin + self.daily_cdd = 0 # Coin Days Destroyed + self.daily_sacdd = 0 # Supply-Adjusted CDD + self.daily_eacdd = 0 # Coin Days Destroyed(Entity-Adjusted) + self.daily_cdd_days1 = 0 # cdd < 1days + self.daily_cdd_days7 = 0 # + self.daily_cdd_days30 = 0 # + self.daily_cdd_days60 = 0 # + self.daily_cdd_days90 = 0 # + self.daily_cdd_days180 = 0 # + self.daily_cdd_days365 = 0 # + self.daily_cdd_days730 = 0 # + + self.daily_mintusd = 0 # daily coin issuance (in USD), for Puell Multiple + + self.daily_mvrv = 0 # market-value-to-realized-value ratio + self.daily_lth_mvrv = 0 # Long Term Holder MVRV + self.daily_sth_mvrv = 0 # Short Term Holder MVRV + + self.daily_nupl = 0 # Net Unrealized Profit/Loss + + self.daily_height_begin = 0 + self.daily_height_end = 0 + + self.daily_price = 0 + self.redis.reset_active_address() + self.redis.reset_send_address() + self.redis.reset_receive_address() + + #v2 + self.daily_mint = 0 + self.daily_lth_volume = 0 + self.daily_frm = 0 + self.daily_cvdd = 0 + self.daily_nvt_ratio = 0 + self.daily_balanced_price = 0 + self.daily_realized_price = 0 + self.daily_transferred_price = 0 + self.daily_velocity = 0 + self.daily_mempool_volume = 0 + # “stat_daily_reset”方法将各种每日统计信息和值重置为其初始状态。下面是该方法的每个部分的作用: + # - ** 属性重置 **: + # - 它将与利润、费用、交易、地址、数量、已用产出寿命、SOPR(已用产出利润率)、CDD(硬币销毁天数)、Mint(硬币发行)、MVRV(市场价值与已实现价值比率)、NUPL(净未实现利润 / 损失)、高度开始、高度结束和价格相关的属性重置为零或初始值。 + # - ** 子程序调用 **: + # - 它重置Redis数据库中的活动地址、发送地址和接收地址。 + # - ** v2统计信息重置 **: + # - 它重置了与铸币厂(硬币发行量)、LTH(长期持有者)交易量、FRM(费用比率倍数)、CVDD(累计销毁价值天数)、NVT(网络价值与交易比率)比率、平衡价格、实现价格、转移价格、速度和内存池数量等指标相关的其他v2统计数据。 + # 总体而言,“stat_daily_reset”可确保在每天分析开始时将各种每日统计数据和值重置为初始状态,从而为新一天的数据收集和处理提供干净的石板。 + def stat_cdd(self, prev_value, days): + cdd = prev_value * days + self.daily_cdd += cdd + self.daily_sumcdd += cdd + if days <= 1: + self.daily_cdd_days1 += cdd + elif days <= 7: + self.daily_cdd_days7 += cdd + elif days <= 30: + self.daily_cdd_days30 += cdd + elif days <= 60: + self.daily_cdd_days60 += cdd + elif days <= 90: + self.daily_cdd_days90 += cdd + elif days <= 180: + self.daily_cdd_days180 += cdd + elif days <= 365: + self.daily_cdd_days365 += cdd + else: + self.daily_cdd_days730 += cdd + # “stat_cdd”方法根据先前的值和天数计算和更新硬币销毁天数 (CDD) 统计数据。 + # - ** 输入参数 **: + # - 'prev_value':CDD的上一个值。 + # - 'days':计算CDD的天数。 + # - ** CDD计算 **: + # - 它通过将前一个值乘以天数来计算给定天数的CDD。 + # - ** 每日CDD更新 **: + # - 它将计算出的CDD添加到“daily_cdd”属性中,该属性表示当天的总CDD。 + # - 它还将计算出的CDD添加到“daily_sumcdd”属性中,该属性表示累积CDD。 + # - ** 按时间段更新CDD **: + # - 它根据天数对CDD进行分类,并根据时间段将其添加到相应的属性(“daily_cdd_days1”、“daily_cdd_days7”等)中。 + # 这种方法有效地跟踪和更新不同时间段的各种CDD统计数据,根据硬币的年龄提供对硬币走势的洞察。 + def get_price(self, height, dayutc): + price = 0 + dayutcstr = str(dayutc) + cnt = 0 + while cnt < 3: + cnt += 1 + if dayutcstr in self.history_prices: + price = self.history_prices[dayutcstr] + break + elif dayutcstr == str(self.current_utc): + price = self.get_current_price() + self.current_price = price + self.history_prices[dayutcstr] = self.current_price + break + else: + print("failed get price", height, dayutcstr) + self.get_history_price() + self.history_prices = self.get_history_price2() + self.current_price = self.get_current_price() + self.current_utc = self.get_current_utc() + self.history_prices[str(self.current_utc)] = self.current_price + price = self.history_prices[dayutcstr] + break + + return price + # “get_price”方法检索给定高度和UTC日的价格。 + # - ** 参数 **: + # - 'height':块高度。 + # - 'dayutc':需要价格的UTC日期。 + # - ** 价格检索 **: + # - 它首先将“dayutc”转换为字符串。 + # - 它尝试在'history_prices'字典属性中找到与'dayutc'对应的价格。 + # - 如果找到价格,则返回价格。 + # - 如果'dayutc'是当前UTC 日('self.current_utc'),它会使用 + # 'get_current_price()'方法检索当前价格,并相应地更新'current_price'和'history_prices'属性。 + # - 如果在历史记录中找不到价格,并且它不是当天,它会尝试通过调用“get_history_price()”和“get_history_price2()”方法来检索历史价格。然后,它检索当前价格并像以前一样更新属性。 + # - 如果价格检索在三次尝试后失败,它会打印一条消息,指示失败。 + # - ** 返回 **: + # - 它返回检索到的价格。如果未找到价格,则返回0。 + # 此方法可确保从历史数据或当前价格中检索价格,从而处理历史数据可能丢失或过时的情况。 + def save_db(self, dayutc): + if dayutc != self.daily_date: + print("cmp", dayutc, self.daily_date) + start = time.time() + self.daily_sumcsupply += (self.daily_csupply) + daily_profit_rate = self.daily_profit / self.daily_txs if self.daily_txs != 0 else 0 + daily_sopr = self.daily_sopr_sell / self.daily_sopr_buy if self.daily_sopr_buy != 0 else 0 + daily_sasopr = self.daily_asopr_sell / self.daily_asopr_buy if self.daily_asopr_buy != 0 else 0 + daily_easopr = self.daily_easopr_sell / self.daily_easopr_buy if self.daily_easopr_buy != 0 else 0 + daily_lthsopr = self.daily_lthsopr_sell / self.daily_lthsopr_buy if self.daily_lthsopr_buy != 0 else 0 + daily_sthsopr = self.daily_sthsopr_sell / self.daily_sthsopr_buy if self.daily_sthsopr_buy != 0 else 0 + self.daily_asol = self.daily_asol / self.daily_atxs if self.daily_atxs != 0 else 0 + self.daily_eaasol = self.daily_eaasol / self.daily_atxs if self.daily_atxs != 0 else 0 + self.daily_sacdd = self.daily_cdd / self.daily_csupply if self.daily_csupply != 0 else 0 + self.daily_mvrv = self.daily_marketcap / self.daily_rcap if self.daily_rcap != 0 else 0 + liveliness = self.daily_sumcdd / self.daily_sumcsupply if self.daily_sumcsupply != 0 else 0 + ealiveliness = self.daily_sumeacdd / self.daily_sumcsupply if self.daily_sumcsupply != 0 else 0 + rplrate = self.daily_rprofit - self.daily_rloss + dormancy = self.daily_cdd / self.daily_volume if self.daily_volume != 0 else 0 + adormancy = dormancy / self.daily_csupply if self.daily_csupply != 0 else 0 + self.daily_eavolume -= (self.daily_fees) + eadormancy = self.daily_eacdd / self.daily_eavolume if self.daily_eavolume != 0 else 0 + nupl = (self.daily_marketcap - self.daily_rcap) / self.daily_marketcap if self.daily_marketcap != 0 else 0 + + + self.daily_total_address = redisif.get_addr_cnt() # number of address + + self.daily_height_end = self.height - 1 if self.height > self.daily_height_begin else self.daily_height_begin + dbif.update_to_dailyinds(self.daily_date, self.daily_height_begin, self.daily_height_end, daily_profit_rate, + self.daily_fees, self.daily_txs, self.daily_new_address, self.daily_total_address, + self.daily_new_address_volume, self.daily_active_address, + self.daily_send_address, self.daily_receive_address, self.daily_volume, + self.daily_eavolume, daily_sopr, daily_sasopr, daily_easopr, daily_lthsopr, + daily_sthsopr, + self.daily_asol, self.daily_eaasol, dormancy, adormancy, eadormancy, + self.daily_cdd, self.daily_sacdd, self.daily_eacdd, self.daily_cdd_days1, + self.daily_cdd_days7, self.daily_cdd_days30, self.daily_cdd_days60, + self.daily_cdd_days90, self.daily_cdd_days180, self.daily_cdd_days365, + self.daily_cdd_days730, self.daily_csupply, self.daily_mintusd, + self.daily_sumcsupply, self.daily_sumcdd, self.daily_sumeacdd, + liveliness, ealiveliness, self.daily_rprofit, self.daily_rloss, rplrate, + self.daily_price, self.daily_marketcap, self.daily_rcap, self.daily_earcap, + self.daily_mvrv, nupl,self.daily_cdd*self.daily_price) + + #v2 + #self.daily_sumdays = (dayutc - 1231469665)/3600/24 + self.daily_sumdays = self.daily_sumcdd/self.daily_csupply + if self.daily_csupply > 0: + self.daily_realized_price = self.daily_rcap/self.daily_csupply + if self.daily_sumdays > 0: + self.daily_transferred_price = self.daily_sumvdd/(self.daily_sumdays*self.daily_csupply) + self.daily_balanced_price = self.daily_realized_price - self.daily_transferred_price + if self.daily_fees > 0: + self.daily_frm = (self.daily_fees + self.daily_mint)/self.daily_fees + if self.daily_sumdays > 0: + self.daily_cvdd = self.daily_sumvdd/(self.daily_sumdays*6000000) + + #daily_vp = self.daily_volume*self.daily_price + #if daily_vp > 0: + if self.daily_volume > 0 and self.daily_price > 0: + self.daily_nvt_ratio = self.daily_marketcap/self.daily_volume/self.daily_price + + if self.daily_marketcap > 0: + self.daily_velocity = self.daily_volume*self.daily_price/self.daily_marketcap + + dbif.update_to_dailyindsv2(dayutc, self.daily_height_begin, self.daily_height_end,self.daily_lth_volume, self.daily_frm, self.daily_cvdd, self.daily_realized_price, self.daily_transferred_price, self.daily_balanced_price, self.daily_nvt_ratio, self.daily_velocity) + + self.stat_daily_reset() + self.daily_date = dayutc + self.daily_height_begin = self.height + print("save_db", f'coast:{time.time() - start:.4f}s') + # “save_db”方法似乎在将数据保存到数据库之前执行多次计算和更新。让我们分解一下它的功能: + # - ** 参数 **: + # - 'dayutc':保存数据的UTC日期。 + # - ** 功能性 **: + # - 如果“dayutc”不等于当前“daily_date”,则继续进行数据处理。 + # - 根据当天收集的数据计算各种指标和比率。 + # - 这些指标包括: + # - 每日利润率、支出产出利润率 (SOPR)、硬币销毁天数 (CDD)、已实现资本化、市值与已实现价值比率 (MVRV)、净未实现损益 (NUPL) 等。 + # - 然后使用“dbif.update_to_dailyinds()”和“dbif.update_to_dailyindsv2()”方法将计算出的指标存储在数据库中。 + # - 存储数据后,该方法使用“stat_daily_reset()”方法重置每日统计信息。 + # - 最后,它会更新第二天数据收集的“daily_date”和“daily_height_begin”属性。 + # - ** 打印声明 **: + # - 包含一个打印语句,以指示保存数据所需的持续时间。 + # 这种方法本质上是处理每天指标和统计数据的数据处理和存储,确保数据库使用最新信息进行更新。 + def stat_block(self, dbif, redisif, config): + self.redis = redisif + self.stat_load(redisif, config) + if self.daily_date is None: + self.stat_reset() + return + print("start height", self.height) + # return + + self.height += 1 + + print("start") + + while True: + start = time.time() + blockstats = self.rpc_cmd("getblockstats") + print("getblockstats", f'coast:{time.time()-start:.4f}s') + start = time.time() + #mempoolinfo = self.rpc_cmd("getmempoolinfo") + blockdetail = self.rpc_cmd("getblock") + print("getblock", f'coast:{time.time() - start:.4f}s') + block_start = time.time() + self.blocktime = blockdetail.get_time() + block_time2 = time.gmtime(self.blocktime) + daystr = time.strftime("%d %b %Y", block_time2) + dayutc = int(time.mktime(time.strptime(daystr, "%d %b %Y"))) + dayutcstr = str(dayutc) + + if self.daily_date == 0: + self.daily_date = dayutc + + #print("mempoolinfo", mempoolinfo, mempoolinfo["size"], float(mempoolinfo["total_fee"])) + #time.sleep(10) + #dbif.update_to_realtimeindsv2(self.blocktime, int(mempoolinfo["size"]), float(mempoolinfo["total_fee"])) + #break + + self.save_db(dayutc) + + blocktxs = blockdetail.get_transactions() + self.height = blockdetail.get_height() + redisif.set_block_time(self.height, self.blocktime) # table for block height and time for later query + + mint = blockstats["subsidy"] / 100000000 + self.daily_csupply += (mint) + #self.daily_sumcsupply += (self.daily_csupply) + self.daily_mint += (mint) + + block_fees = (blockstats["totalfee"] / 100000000) + self.daily_fees += block_fees + self.daily_volume += (blockstats["total_out"] / 100000000) + + self.daily_txs += (blockstats["txs"] - 1) # exclude coinbase tx + + block_price = self.get_price(self.height, dayutc) + self.daily_mintusd += (block_price * (mint+block_fees)) + + self.daily_price = block_price + + self.daily_marketcap = (self.daily_csupply * block_price) + # genisis_time = redisif.get_block_time(1) + '''genisis_time = 1231006505 + days = (self.blocktime - genisis_time) / 3600 / 24 + if days >= 155: + self.daily_lth_marketcap += (self.daily_csupply * block_price) + else: + self.daily_sth_marketcap += (self.daily_csupply * block_price) + ''' + for tx in blocktxs: + txid = tx.get_txid() + vins = tx.get_vins() + vouts = tx.get_vouts() + vin_hexs = [] + vin_addrs = [] + vin_values = [] + vin_dts = [] + + vin_volume = 0 + vin_volume_change = 0 + + vin_days_change = 0 + + vin_cdd = 0 + vin_cdd_change = 0 + + vin_rcap_change = 0 + + vin_sopr = 0 + + vin_asopr_diff = 0 + + vout_change_value = 0 + + if not tx.is_coinbase(): + for vin in vins: + # print(self.height, "vin", vin, type(vin)) + if vin.is_prevout(): + prevout = vin["prevout"] + prev_height = prevout["height"] + prev_value = float(prevout["value"]) + prev_scriptpubkey = prevout["scriptPubKey"] + #prev_type = prev_scriptpubkey["type"] + prev_hex = prev_scriptpubkey["hex"] + prev_address = self.get_vin_address(prev_scriptpubkey, prev_height, txid) + prev_blocktime = redisif.get_block_time(prev_height) + + redisif.save_addr(prev_address, -prev_value) + if not redisif.is_send_address(prev_address): + self.daily_send_address += 1 + if not redisif.is_active_address(prev_address): + self.daily_active_address += 1 + + days = (self.blocktime - prev_blocktime) / 3600 / 24 + vin_cdd += (prev_value * days) + self.stat_cdd(prev_value, days) + + if days >= 155: + self.daily_lth_volume += prev_value + + vin_addrs.append(prev_address) + vin_values.append(prev_value) + vin_dts.append(prev_blocktime) + vin_hexs.append(prev_hex) + + vin_volume += prev_value + vin_asopr_diff += ((self.blocktime - prev_blocktime) * prev_value) + + prevutc = self.get_day_utc(prev_blocktime) + prev_price = self.get_price(prev_height, prevutc) + vin_sopr += (prev_price * prev_value) + + self.daily_sumvdd += (prev_value * days * prev_price) + + self.daily_rcap -= (prev_price * prev_value) + + have_change = False + for vout in vouts: + scriptpubkey = vout.get_script_pubkey() + # vout address is same with vin address + if scriptpubkey["hex"] == prev_scriptpubkey["hex"]: + vin_rcap_change += (prev_value * prev_price) + vin_volume_change += prev_value + vout_change_value = float(vout.get_value()) + + days = (self.blocktime - prev_blocktime) / 3600 / 24 + vin_days_change += days + vin_cdd_change += (prev_value * days) + + have_change = True + break + if not have_change: + self.daily_earcap -= (prev_price * prev_value) + self.daily_eacdd += (prev_value * days) + + self.daily_eavolume += (vin_volume - vout_change_value) + + vin_sopr_change = vin_sopr + #vin_change_price = 0 + if vin_rcap_change != 0: + if vin_volume_change != 0: + vin_change_price = vin_rcap_change / vin_volume_change + self.daily_earcap -= (vin_rcap_change - (vin_change_price * vout_change_value)) + vin_sopr_change -= (vin_change_price * vout_change_value) + + if vin_cdd_change != 0: + if vin_volume_change != 0: + vin_change_days = vin_cdd_change / vin_volume_change + vin_cdd_change -= (vin_change_days * vout_change_value) + + self.daily_sumeacdd += (vin_cdd - vin_cdd_change) + + self.daily_sopr_buy += vin_sopr + + self.daily_easopr_buy += vin_sopr_change + + if vin_asopr_diff >= 3600 * vin_volume: + self.daily_asopr_buy += vin_sopr + if vin_volume > 0: + self.daily_asol += (vin_cdd/vin_volume) + self.daily_eaasol += (vin_cdd / vin_volume) + if vin_volume_change > 0: + self.daily_eaasol -= (vin_cdd_change/vin_volume_change) + self.daily_atxs += 1 + + if vin_asopr_diff >= 3600 * 155 * 24 * vin_volume: + self.daily_lthsopr_buy += vin_sopr + else: + self.daily_sthsopr_buy += vin_sopr + + vout_price = block_price + vout_volume = 0 + vout_volume_change = 0 + vout_sopr = 0 + vout_sopr_change = 0 + + for vout in vouts: + vout_value = float(vout.get_value()) + vout_volume += vout_value + scriptpubkey = vout.get_script_pubkey() + vout_type = scriptpubkey["type"] + vout_address = self.get_vout_address(scriptpubkey, self.height, txid) + vout_hex = scriptpubkey["hex"] + + if not redisif.is_in_addr(vout_address): + self.daily_new_address_volume += vout_value + self.daily_new_address += 1 + redisif.save_addr(vout_address, vout_value) + + if not redisif.is_receive_address(vout_address): + self.daily_receive_address += 1 + if not redisif.is_active_address(vout_address): + self.daily_active_address += 1 + + self.daily_rcap += (vout_price * vout_value) + + vout_sopr += (vout_price * vout_value) + + have_change = False + for cmp in vin_hexs: + if cmp == vout_hex: + vout_volume_change += vout_value + have_change = True + break + if not have_change: + self.daily_earcap += (vout_price * vout_value) + vout_sopr_change += (vout_price * vout_value) + + if self.height > 787556: + if (vout_price * vout_value) >= self.rules["flag_big_vout"]: + if vin_volume != 0: + days = vin_cdd / vin_volume + buyin = vin_sopr / vin_volume + sellout = vout_price + profit = 0 + if buyin != 0: + profit = (sellout - buyin) / buyin + dbif.update_to_bigamountvout(self.blocktime, txid, \ + vout_address, vout.get_n(), vout_type, \ + vout_value, self.height, days, buyin, sellout, profit) + + self.daily_easopr_sell += vout_sopr_change + + self.daily_sopr_sell += vout_sopr + if vin_asopr_diff > 3600 * vin_volume: + self.daily_asopr_sell += vout_sopr + + if vin_asopr_diff >= 3600 * 155 * 24 * vin_volume: + self.daily_lthsopr_sell += vout_sopr + else: + self.daily_sthsopr_sell += vout_sopr + + if vin_volume != 0: + if block_price > (vin_sopr / vin_volume): + self.daily_rprofit += (vout_sopr - vin_sopr) + if block_price < (vin_sopr / vin_volume): + self.daily_rloss += (vin_sopr - vout_sopr) + + buyin = vin_sopr / vin_volume + sellout = vout_sopr / vout_volume if vout_volume != 0 else 0 + if sellout > buyin: + self.daily_profit += 1 + else: + for vout in vouts: + vout_value = float(vout.get_value()) + scriptpubkey = vout.get_script_pubkey() + vout_address = self.get_vout_address(scriptpubkey, self.height, txid) + + vout_price = block_price + self.daily_rcap += (vout_price * vout_value) + self.daily_earcap += (vout_price * vout_value) + + if not redisif.is_in_addr(vout_address): + self.daily_new_address_volume += vout_value + self.daily_new_address += 1 + redisif.save_addr(vout_address, vout_value) + + if not redisif.is_receive_address(vout_address): + self.daily_receive_address += 1 + if not redisif.is_active_address(vout_address): + self.daily_active_address += 1 + + self.stat_save(redisif) + print("statblock", f'coast:{time.time() - block_start:.4f}s') + start = time.time() + self.rpc_cmd("getblockcount") + print("getblockcount", f'coast:{time.time() - start:.4f}s') + # 这个“stat_block”方法似乎可以处理每个块的统计计算和数据库更新。让我们分解一下它的功能: + # - ** 参数 **: + # - 'dbif':数据库接口对象。 + # - 'redisif':Redis接口对象。 + # - 'config':配置对象。 + # - ** 功能性 **: + # - 它使用“stat_load()”方法从Redis加载统计信息。 + # - 如果'daily_date'为None,则使用'stat_reset()'重置统计信息并返回。 + # - 它使用RPC命令('getblockstats'、'getblock')检索区块统计信息和详细信息。 + # - 它计算各种指标并针对当前区块更新它们: + # - 每日供应量('daily_csupply')、费用('daily_fees')、交易量('daily_volume')、交易数量('daily_txs')等。 + # - 它检索区块时间并计算UTC日。 + # - 它使用“save_db()”方法保存每日数据。 + # - 它处理区块中的每笔交易: + # - 更新地址、数量、支出产出利润率 (SOPR)、硬币销毁天数 (CDD) 等。 + # - 它使用“stat_save()”方法将统计数据保存回Redis。 + # - ** 打印报表 **: + # - 包含print语句以指示各种操作('getblockstats'、'getblock'、'getblockcount')所花费的持续时间。 + # 总体而言,此方法处理每个块的统计信息的收集和处理,确保数据库使用最新信息进行更新。 +def init_config(filename): + fconfig = open(filename) + config = ujson.load(fconfig) + fconfig.close() + dbif = db_if_qt.DbIf(host="172.17.0.1", port=4419, user="root", password="IeQcJNnagkaFP1Or", dbname="btcdb") + redisif = redis_if_qt.RedisIf(host="127.0.0.1", port=6379, password="", db=0) + return dbif, redisif, config +# “init_config”函数通过从 JSON 文件加载配置来初始化配置,然后根据 JSON 中提供的配置创建数据库和 Redis 接口的实例。下面是一个细分: +# -**参数**: +# - 'filename':JSON配置文件的名称。 +# -**功能性**: +# 1. **打开JSON配置文件**: +# - 在读取模式下打开指定的 JSON 配置文件(“filename”)。 +# - 使用“ujson.load()”函数从文件加载 JSON 数据。 +# - 关闭文件。 +# 2. **创建数据库和Redis接口**: +# - 使用 'db_if_qt.DbIf“,其中包含从加载的配置中获取的主机、端口、用户、密码和数据库名称等参数。 +# - 使用“redis_if_qt”初始化 Redis 接口 ('redisif')。RedisIf“,其中包含从加载的配置中获取的主机、端口、密码和数据库等参数。 +# 3. **返回**: +# - 返回初始化的数据库接口('dbif')、Redis 接口('redisif')和加载的配置。 +# - **返回值**: +# - 'dbif':初始化的数据库接口对象。 +# - 'redisif':初始化的 Redis 接口对象。 +# - 'config':将配置加载为 Python 字典。 +# 该函数封装了加载配置设置以及初始化数据库和 Redis 接口的过程,为应用程序提供了一种方便的环境设置方式。 +if __name__ == '__main__': + dbif, redisif, config = init_config("btcstat_qt.conf") + #print("init_config") + #redisif.reset_btc_data() + statif = StatIf() + #print("StatIf") + statif.stat_block(dbif, redisif, config) +# 此代码块是脚本的入口点。让我一步一步地解释一下: +# - 'if __name__ == '__main__':':此行确保仅当脚本直接运行时才执行以下代码块,而不是将其作为模块导入到另一个脚本中。 +# - 'dbif, redisif, config = init_config(“btcstat_qt.conf”)':调用 'init_config' 函数初始化数据库接口 ('dbif')、Redis 接口 ('redisif'),并将配置设置加载到 'config' 变量中。这些对象是进一步操作所必需的。 +# - 'statif = StatIf()':它创建“StatIf”类的实例。这表明在代码的其他地方定义了一个名为“StatIf”的类。 +# - 'statif.stat_block(dbif, redisif, config)':调用 'StatIf' 实例的 'stat_block' 方法,传递数据库接口 ('dbif')、Redis 接口 ('redisif') 和加载的配置 ('config')。此方法可能使用提供的接口和配置执行一些与区块链数据相关的统计操作。 +# 总体而言,此脚本初始化必要的组件,例如数据库和 Redis 接口,加载配置设置,然后使用“StatIf”类执行统计操作。 diff --git a/coinbus/db_if_qt.py b/coinbus/db_if_qt.py new file mode 100644 index 0000000..238043e --- /dev/null +++ b/coinbus/db_if_qt.py @@ -0,0 +1,562 @@ +# coding=utf-8 +import datetime + +import pymysql +from loguru import logger +import time + + +class DbIf: + def __init__(self, host="172.17.0.1", port=4419, user="root", password="IeQcJNnagkaFP1Or", dbname="btcdb"): + self.conn = pymysql.connect(host=host, port=port, user=user, password=password, database=dbname,cursorclass=pymysql.cursors.DictCursor) + + def update_to_dailyindsv2(self, dt_utc, height_begin, height_end, lth_volume, frm, cvdd, realized_price, transferred_price, balanced_price, nvt_ratio, velocity): + with self.conn.cursor() as cursor: + print(dt_utc, height_begin, height_end, lth_volume, frm, cvdd, realized_price, transferred_price, balanced_price, nvt_ratio, velocity) + sql_insert = "REPLACE INTO dailyindsv3e2 (unixdt, height_begin, height_end, lth_volume, frm, cvdd, realized_price, transferred_price, balanced_price, nvt_ratio, velocity" + sql_insert = sql_insert + ") VALUES (FROM_UNIXTIME(%s), %s, %s, %s, %s, %s, %s, %s, %s, %s, %s)" + cursor.execute(sql_insert, ( + dt_utc, height_begin, height_end, lth_volume, frm, cvdd, realized_price, transferred_price, balanced_price, nvt_ratio, velocity)) + + ''' + def update_to_realtimeindsv2(self, dt_utc, mempool_volume, mempool_fees): + with self.conn.cursor() as cursor: + sql_insert = "REPLACE INTO realtimeindsv2b (unixdt, mempool_volume, mempool_fees)" + cursor.execute(sql_insert, (dt_utc, mempool_volume, mempool_fees)) + ''' + def update_to_dailyinds(self, dt_utc, height_begin, height_end, profitrate, fees, txs, new_address, total_address, new_address_volume, active_address, + send_address, receive_address, volume, eavolume, sopr, asopr, easopr, lthsopr, sthsopr, + asol, eaasol, dormancy, adormancy, eadormancy, cdd, sacdd, eacdd, day1, day7, day30, day60, + day90, day180, day365, day730, csupply, mintusd, sumcsupply, sumcdd, sumeacdd, + liveliness, ealiveliness, rprofit, rloss, rplrate, price, marketcap, rcap, earcap, mvrv, + nupl,vdd): + with self.conn.cursor() as cursor: + sql_insert = "REPLACE INTO dailyindsv3e1 (unixdt, height_begin, height_end, profitrate, fees, txs, new_address, total_address, new_address_volume, active_address, send_address, receive_address, volume, eavolume, sopr, asopr, easopr, lthsopr, sthsopr," + sql_insert = sql_insert + " asol, eaasol, dormancy, adormancy, eadormancy, cdd, sacdd, eacdd, day1, day7, day30, day60, day90, day180, day365, day730, csupply, mintusd, sumcsupply, sumcdd, sumeacdd, liveliness, " + sql_insert = sql_insert + " ealiveliness, rprofit, rloss, rplrate, price, marketcap, rcap, earcap, mvrv, nupl,vdd" + sql_insert = sql_insert + ") VALUES (FROM_UNIXTIME(%s), %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s)" + # print(sql_insert) + cursor.execute(sql_insert, ( + dt_utc, height_begin, height_end, profitrate, fees, txs, new_address, total_address, new_address_volume, active_address, send_address, + receive_address, volume, eavolume, sopr, asopr, easopr, lthsopr, sthsopr, asol, eaasol, dormancy, + adormancy, eadormancy, cdd, sacdd, eacdd, day1, day7, day30, day60, day90, day180, day365, day730, + csupply, mintusd, sumcsupply, sumcdd, sumeacdd, liveliness, ealiveliness, rprofit, rloss, rplrate, + price, marketcap, rcap, earcap, mvrv, + nupl,vdd)) + self.conn.commit() + ''' + def update_to_dailyinds(self, dt_utc, height_begin, height_end, profitrate, fees, txs, new_address, new_address_volume, active_address, + send_address, receive_address, volume, eavolume, sopr, asopr, easopr, lthsopr, sthsopr, + asol, eaasol, dormancy, adormancy, eadormancy, cdd, sacdd, eacdd, day1, day7, day30, day60, + day90, day180, day365, day730, csupply, mintusd, sumcsupply, sumcdd, sumeacdd, + liveliness, ealiveliness, rprofit, rloss, rplrate, price, marketcap, rcap, earcap, mvrv, + lthmarketcap, lthrcap, sthmarketcap, sthrcap, lthmvrv, sthmvrv, nupl): + with self.conn.cursor() as cursor: + sql_insert = "REPLACE INTO dailyindsv1 (unixdt, height_begin, height_end, profitrate, fees, txs, new_address, new_address_volume, active_address, send_address, receive_address, volume, eavolume, sopr, asopr, easopr, lthsopr, sthsopr," + sql_insert = sql_insert + " asol, eaasol, dormancy, adormancy, eadormancy, cdd, sacdd, eacdd, day1, day7, day30, day60, day90, day180, day365, day730, csupply, mintusd, sumcsupply, sumcdd, sumeacdd, liveliness, " + sql_insert = sql_insert + " ealiveliness, rprofit, rloss, rplrate, price, marketcap, rcap, earcap, mvrv, lthmarketcap, lthrcap, sthmarketcap, sthrcap, lthmvrv, sthmvrv, nupl" + sql_insert = sql_insert + ") VALUES (FROM_UNIXTIME(%s), %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s)" + # print(sql_insert) + cursor.execute(sql_insert, ( + dt_utc, height_begin, height_end, profitrate, fees, txs, new_address, new_address_volume, active_address, send_address, + receive_address, volume, eavolume, sopr, asopr, easopr, lthsopr, sthsopr, asol, eaasol, dormancy, + adormancy, eadormancy, cdd, sacdd, eacdd, day1, day7, day30, day60, day90, day180, day365, day730, + csupply, mintusd, sumcsupply, sumcdd, sumeacdd, liveliness, ealiveliness, rprofit, rloss, rplrate, + price, marketcap, rcap, earcap, mvrv, lthmarketcap, lthrcap, sthmarketcap, sthrcap, lthmvrv, sthmvrv, + nupl)) + self.conn.commit() + ''' + ''' + # daily daily on-chain volume + def query_from_dailyvolume(self, start_id=0, end_id=0, start_time="", end_time="", limit=0): + with self.conn.cursor() as cursor: + sql_query = "SELECT * from `dailyvolume`" + + if start_id > 0: + sql_query = sql_query + " WHERE id > " + str(start_id) + if end_id > 0: + sql_query = sql_query + " AND id < " + str(end_id) + else: + if end_id > 0: + sql_query = sql_query + " WHERE id < " + str(end_id) + + if len(start_time) > 0: + if len(end_time) > 0: + sql_query = sql_query + " WHERE UNIX_TIMESTAMP(unixdt) BETWEEN \ + UNIX_TIMESTAMP(\'" + start_time + "\') AND UNIX_TIMESTAMP(\'" + end_time + "\')" + else: + sql_query = sql_query + " WHERE UNIX_TIMESTAMP(unixdt) BETWEEN \ + UNIX_TIMESTAMP(\'" + start_time + "\') AND UNIX_TIMESTAMP(NOW())" + else: + if len(end_time) > 0: + sql_query = sql_query + " WHERE UNIX_TIMESTAMP(unixdt) BETWEEN \ + UNIX_TIMESTAMP(NOW()) AND UNIX_TIMESTAMP(\'" + end_time + "\')" + + sql_query = sql_query + " order by `unixdt` desc" + + if limit > 0: + sql_query = sql_query + " LIMIT " + str(limit) + print(sql_query) + cursor.execute(sql_query) + return cursor.fetchall() + ''' + + # newaddrs + ''' + def update_to_newaddr(self, dayutc, last_profit_rate, last_fees, last_txs, last_eatxs, last_newaddr_cnt, + last_newaddr_vol, last_active_addr_cnt, last_tx_addr_cnt, last_rx_addr_cnt, last_vol_change, + last_vol): + with self.conn.cursor() as cursor: + sql_query = "SELECT COUNT(id) FROM `newaddrs` WHERE unixdt=FROM_UNIXTIME(%s)" + cursor.execute(sql_query, {dayutc, }) + result = cursor.fetchone() + # print(dt_utc) + # print(result) + if result is not None: + if "COUNT(id)" in result: + if result["COUNT(id)"] > 0: + print("update") + sql_update = 'UPDATE newaddrs SET `total`=%s, `amount`=%s, `active`=%s, `tx`=%s, `rx`=%s, `volume_change`=%s, `volume=%s`,`txs`=%s, `eatxs`=%s, `fees`=%s, `last_profit_rate`=%s WHERE unixdt=FROM_UNIXTIME(%s)' + cursor.execute(sql_update, ( + last_newaddr_cnt, last_newaddr_vol, last_active_addr_cnt, last_tx_addr_cnt, + last_rx_addr_cnt, + last_vol_change, last_vol, last_txs, last_eatxs, last_fees, last_profit_rate, dayutc)) + else: + print("insert") + sql_insert = "INSERT INTO `newaddrs` (`unixdt`, `total`, `amount`, `active`, `tx`, `rx`, `volume_change`, `volume`, `txs`, `eatxs`, `fees`, `last_profit_rate`) VALUES (FROM_UNIXTIME(%s), %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s)" + # print(sql_insert) + cursor.execute(sql_insert, ( + dayutc, last_newaddr_cnt, last_newaddr_vol, last_active_addr_cnt, last_tx_addr_cnt, + last_rx_addr_cnt, last_vol_change, last_vol, last_txs, last_eatxs, last_fees, + last_profit_rate)) + self.conn.commit() + ''' + ''' + def update_to_sellprofit(self, dayutc, current_price, block_buy_volume, block_sell_volume, block_sell_profit, last_height): + with self.conn.cursor() as cursor: + sql_insert = "INSERT INTO `dailybuysell` (`unixdt`, `price`, `buyvolume`, `sellvolume`, `sellprofit`, `height`) VALUES (FROM_UNIXTIME(%s), %s, %s, %s, %s, %s)" + #print(sql_insert) + #print(datetime, txid, vout, voutalias, amount, height) + cursor.execute(sql_insert, (dayutc, current_price, block_buy_volume, block_sell_volume, block_sell_profit, last_height)) + self.conn.commit() + ''' + ''' + def update_to_bigsellprofit(self, dayutc, current_price, tx_sell_average, tx_sell_amount, tx_sell_profit, + days_earliest, days_latest, days_largest, days_current, tx_buy_address, txid, + block_height): + with self.conn.cursor() as cursor: + sql_insert = "INSERT INTO `bigsell` (`unixdt`, `buyprice`, `sellprice`, `amount`, `profit`, `days_earliest`, `days_latest`, `days_largest`, `days_current`, `address`, `txid`, `height`) VALUES (FROM_UNIXTIME(%s), %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s)" + # print(sql_insert) + # print(datetime, txid, vout, voutalias, amount, height) + cursor.execute(sql_insert, ( + dayutc, current_price, tx_sell_average, tx_sell_amount, tx_sell_profit, days_earliest, days_latest, + days_largest, days_current, tx_buy_address, txid, block_height)) + self.conn.commit() + ''' + ''' + def update_to_dailycdd(self, dt_utc, cdd): + with self.conn.cursor() as cursor: + sql_insert = "REPLACE INTO `dailycdd` (`unixdt`, `cdd`) VALUES (FROM_UNIXTIME(%s), %s)" + # print(sql_insert) + cursor.execute(sql_insert, (dt_utc, cdd)) + self.conn.commit() + ''' + ''' + def update_to_dailycdddays(self, dt_utc, dormancy, adormancy, eadormancy, cdd, acdd, eacdd, day1, day7, day30, + day60, day90, day180, day365, day730): + with self.conn.cursor() as cursor: + sql_insert = "REPLACE INTO `dailycdddays` (`unixdt`, dormancy, adormancy, eadormancy, cdd, acdd, eacdd, `day1`, day7, day30, day60, day90, day180, day365, day730) VALUES (FROM_UNIXTIME(%s), %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s)" + # print(sql_insert) + cursor.execute(sql_insert, ( + dt_utc, dormancy, adormancy, eadormancy, cdd, acdd, eacdd, day1, day7, day30, day60, day90, day180, + day365, + day730)) + self.conn.commit() + ''' + ''' + def update_to_dailysopr(self, dt_utc, last_sopr, last_asopr, last_easopr, last_lth_sopr, last_sth_sopr): + with self.conn.cursor() as cursor: + sql_insert = "REPLACE INTO `dailysopr` (`unixdt`, `sopr`, asopr, easopr, lth_sopr, sth_sopr) VALUES (FROM_UNIXTIME(%s), %s, %s, %s, %s, %s, %s)" + # print(sql_insert) + cursor.execute(sql_insert, (dt_utc, last_sopr, last_asopr, last_easopr, last_lth_sopr, last_sth_sopr)) + self.conn.commit() + ''' + ''' + def update_to_inds(self, dt_utc, csupply, mintusd, sumcsupply, sumcdd, sumeacdd, liveliness, ealiveliness, rprofit, + rloss, rplrate, price, marketcap, rcap, earcap, mvrv): + with self.conn.cursor() as cursor: + sql_insert = "REPLACE INTO `inds` (`unixdt`, csupply, mintusd, sumcsupply, sumcdd, sumeacdd, liveliness, ealiveliness, rprofit, rloss, rplrate, price, marketcap, rcap, earcap, mvrv) VALUES (FROM_UNIXTIME(%s), %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s)" + # print(sql_insert) + cursor.execute(sql_insert, ( + dt_utc, csupply, mintusd, sumcsupply, sumcdd, sumeacdd, liveliness, ealiveliness, rprofit, rloss, + rplrate, + price, marketcap, rcap, earcap, mvrv)) + self.conn.commit() + ''' + # daily volume + ''' + def update_to_dailyvolume(self, dt_utc, volume): + with self.conn.cursor() as cursor: + sql_insert = "REPLACE INTO `dailyvolume` (`unixdt`, `volume`) VALUES (FROM_UNIXTIME(%s), %s)" + # print(sql_insert) + cursor.execute(sql_insert, (dt_utc, volume)) + self.conn.commit() + ''' + '''with self.conn.cursor() as cursor: + sql_query = "SELECT COUNT(id) FROM `dailyvolume` WHERE unixdt=FROM_UNIXTIME(%s)" + cursor.execute(sql_query, {dt_utc,}) + result = cursor.fetchone() + #print(dt_utc) + #print(result) + if result is not None: + if "COUNT(id)" in result: + if result["COUNT(id)"] > 0: + print("update") + sql_update = 'UPDATE dailyvolume SET `volume`=%s WHERE unixdt=FROM_UNIXTIME(%s)' + cursor.execute(sql_update, (volume, dt_utc)) + else: + print("insert") + sql_insert = "INSERT INTO `dailyvolume` (`unixdt`, `volume`) VALUES (FROM_UNIXTIME(%s), %s)" + # print(sql_insert) + cursor.execute(sql_insert, (dt_utc, volume)) + self.conn.commit()''' + ''' + def update_to_dailyfees(self, dt_utc, fees): + with self.conn.cursor() as cursor: + sql_insert = "REPLACE INTO `dailyfees` (`unixdt`, `fees`) VALUES (FROM_UNIXTIME(%s), %s)" + # print(sql_insert) + cursor.execute(sql_insert, (dt_utc, fees)) + self.conn.commit() + ''' + ''' + def import_to_dailyvolume2(self, dt_utc, volume): + with self.conn.cursor() as cursor: + sql_insert = "INSERT INTO `dailyvolume` (`unixdt`, `volume`) VALUES (FROM_UNIXTIME(%s), %s)" + # print(sql_insert) + cursor.execute(sql_insert, (dt_utc, volume)) + self.conn.commit() + + def delete_dailyvolume_data(self, config): + with self.conn.cursor() as cursor: + sql_query = "DELETE FROM `dailyvolume`" + cursor.execute(sql_query) + self.conn.commit() + + + # daily market cap + def query_from_marketcap(self, start_id=0, end_id=0, start_time="", end_time="", limit=0): + with self.conn.cursor() as cursor: + sql_query = "SELECT * from `dailyprice`" + + if start_id > 0: + sql_query = sql_query + " WHERE id > " + str(start_id) + if end_id > 0: + sql_query = sql_query + " AND id < " + str(end_id) + else: + if end_id > 0: + sql_query = sql_query + " WHERE id < " + str(end_id) + + if len(start_time) > 0: + if len(end_time) > 0: + sql_query = sql_query + " WHERE UNIX_TIMESTAMP(unixdt) BETWEEN \ + UNIX_TIMESTAMP(\'" + start_time + "\') AND UNIX_TIMESTAMP(\'" + end_time + "\')" + else: + sql_query = sql_query + " WHERE UNIX_TIMESTAMP(unixdt) BETWEEN \ + UNIX_TIMESTAMP(\'" + start_time + "\') AND UNIX_TIMESTAMP(NOW())" + else: + if len(end_time) > 0: + sql_query = sql_query + " WHERE UNIX_TIMESTAMP(unixdt) BETWEEN \ + UNIX_TIMESTAMP(NOW()) AND UNIX_TIMESTAMP(\'" + end_time + "\')" + + sql_query = sql_query + " order by `unixdt` desc" + + if limit > 0: + sql_query = sql_query + " LIMIT " + str(limit) + print(sql_query) + cursor.execute(sql_query) + return cursor.fetchall() + + #daily price + def import_to_dailyprice(self, dt_utc, price, volume, marketcap, csupply): + with self.conn.cursor() as cursor: + sql_insert = "INSERT INTO `dailyprice` (`unixdt`, `price`, `volume`, `marketcap`, `csupply`) VALUES (FROM_UNIXTIME(%s), %s, %s, %s, %s)" + #print(sql_insert) + cursor.execute(sql_insert, (dt_utc, price, volume, marketcap, csupply)) + self.conn.commit() + + def update_to_dailyprice(self, dt_utc, price, volume, change): + with self.conn.cursor() as cursor: + sql_insert = "INSERT INTO `dailyprice` (`unixdt`, `price`, `volume`, `change`) VALUES (FROM_UNIXTIME(%s), %s, %s, %s)" + #print(sql_insert) + cursor.execute(sql_insert, (dt_utc, price, volume, change)) + self.conn.commit() + + def update_to_dailyprice2(self, dt_utc, price, volume, change, marketcap, csupply): + with self.conn.cursor() as cursor: + sql_query = "SELECT COUNT(id) FROM `dailyprice` WHERE unixdt=FROM_UNIXTIME(%s)" + cursor.execute(sql_query, {dt_utc,}) + result = cursor.fetchone() + #print(dt_utc) + #print(result) + if result is not None: + if "COUNT(id)" in result: + if result["COUNT(id)"] > 0: + print("update") + sql_update = 'UPDATE dailyprice SET `price`=%s, `marketcap`=%s, `csupply`=%s, `volume`=%s, `change`=%s WHERE unixdt=FROM_UNIXTIME(%s)' + cursor.execute(sql_update, (price, marketcap, csupply, volume, change, dt_utc)) + else: + print("insert") + sql_insert = "INSERT INTO `dailyprice` (`unixdt`, `price`, `volume`, `change`, `marketcap`, `csupply`) VALUES (FROM_UNIXTIME(%s), %s, %s, %s, %s, %s)" + # print(sql_insert) + cursor.execute(sql_insert, (dt_utc, price, volume, change, marketcap, csupply)) + self.conn.commit() + + def update_dailypricechange(self): + with self.conn.cursor() as cursor: + sql_query = "SELECT unixdt,price FROM `dailyprice` order by unixdt" + cursor.execute(sql_query) + results = cursor.fetchall() + prevprice = -1 + for result in results: + if prevprice < 0: + prevprice = result["price"] + else: + #print(result["unixdt"], result["price"], result["marketcap"]) + try: + change = (result["price"]/prevprice - 1)*100 + except: + change = 0 + #print(csupply) + datestr = result["unixdt"] + logger.debug(datestr.__format__('%Y-%m-%d') + " " + str(change)) + sql_update = 'UPDATE dailyprice SET `change`=%s WHERE unixdt=%s' + cursor.execute(sql_update, (str(change), result["unixdt"])) + prevprice = result["price"] + self.conn.commit() + + def delete_dailyprice_data(self, config): + with self.conn.cursor() as cursor: + sql_query = "DELETE FROM `dailyprice`" + cursor.execute(sql_query) + self.conn.commit() + + def delete_failed_blockvolume(self, height): + with self.conn.cursor() as cursor: + sql_insert = "DELETE FROM `bigamountvout` WHERE height=%s" + cursor.execute(sql_insert, (height,)) + sql_insert = "DELETE FROM `bigamounttx` WHERE height=%s" + cursor.execute(sql_insert, (height,)) + sql_insert = "DELETE FROM `blockamount` WHERE height=%s" + cursor.execute(sql_insert, (height,)) + self.conn.commit() + + #block check --- big amount for vout + def query_from_bigamountvout(self, start_id=0, end_id=0, start_time="", end_time="", address="", limit=0): + with self.conn.cursor() as cursor: + sql_query = "SELECT * from `bigamountvout`" + if start_id > 0: + sql_query = sql_query + " WHERE id > " + str(start_id) + if end_id > 0: + sql_query = sql_query + " AND id < " + str(end_id) + else: + if end_id > 0: + sql_query = sql_query + " WHERE id < " + str(end_id) + + if len(start_time) > 0: + if len(end_time) > 0: + sql_query = sql_query + " WHERE UNIX_TIMESTAMP(unixdt) BETWEEN \ + UNIX_TIMESTAMP(\'" + start_time + "\') AND UNIX_TIMESTAMP(\'" + end_time + "\')" + else: + sql_query = sql_query + " WHERE UNIX_TIMESTAMP(unixdt) BETWEEN \ + UNIX_TIMESTAMP(\'" + start_time + "\') AND UNIX_TIMESTAMP(NOW())" + else: + if len(end_time) > 0: + sql_query = sql_query + " WHERE UNIX_TIMESTAMP(unixdt) BETWEEN \ + UNIX_TIMESTAMP(NOW()) AND UNIX_TIMESTAMP(\'" + end_time + "\')" + + sql_query = sql_query + " order by `unixdt` desc" + if limit > 0: + sql_query = sql_query + " LIMIT " + str(limit) + + print(sql_query) + cursor.execute(sql_query) + return cursor.fetchall() + ''' + + def update_to_bigamountvout(self, datetime, txid, vout, voutn, vouttype, amount, height, days, buyin, sellout, + profit): + with self.conn.cursor() as cursor: + sql_insert = "INSERT INTO `bigamountvoutv3e` (`unixdt`, `vout`, `voutn`, `vouttype`, `amount`, `height`, `txid`, days, buyprice, sellprice, profit) VALUES (FROM_UNIXTIME(%s), %s, %s, %s, %s, %s, %s, %s, %s, %s, %s)" + # print(sql_insert) + # print(datetime, txid, vout, voutalias, amount, height) + cursor.execute(sql_insert, + (datetime, vout, voutn, vouttype, amount, height, txid, days, buyin, sellout, profit)) + self.conn.commit() + + ''' + # block check --- big amount tx + def query_from_bigamounttx(self, start_id=0, end_id=0, start_time="", end_time="", address="", limit=0): + with self.conn.cursor() as cursor: + sql_query = "SELECT * from `bigamounttx`" + if start_id > 0: + sql_query = sql_query + " WHERE id > " + str(start_id) + if end_id > 0: + sql_query = sql_query + " AND id < " + str(end_id) + else: + if end_id > 0: + sql_query = sql_query + " WHERE id < " + str(end_id) + + if len(start_time) > 0: + if len(end_time) > 0: + sql_query = sql_query + " WHERE UNIX_TIMESTAMP(unixdt) BETWEEN \ + UNIX_TIMESTAMP(\'" + start_time + "\') AND UNIX_TIMESTAMP(\'" + end_time + "\')" + else: + sql_query = sql_query + " WHERE UNIX_TIMESTAMP(unixdt) BETWEEN \ + UNIX_TIMESTAMP(\'" + start_time + "\') AND UNIX_TIMESTAMP(NOW())" + else: + if len(end_time) > 0: + sql_query = sql_query + " WHERE UNIX_TIMESTAMP(unixdt) BETWEEN \ + UNIX_TIMESTAMP(NOW()) AND UNIX_TIMESTAMP(\'" + end_time + "\')" + + sql_query = sql_query + " order by `unixdt` desc" + if limit > 0: + sql_query = sql_query + " LIMIT " + str(limit) + + print(sql_query) + cursor.execute(sql_query) + return cursor.fetchall() + + def update_to_bigamounttx(self, datetime, txid, amount, height): + with self.conn.cursor() as cursor: + sql_insert = "INSERT INTO `bigamounttx` (`unixdt`, `amount`, `height`, `txid`) VALUES (FROM_UNIXTIME(%s), %s, %s, %s)" + #print(sql_insert) + #print(datetime, txid, amount, height) + cursor.execute(sql_insert, (datetime, amount, height, txid)) + self.conn.commit() + + # block check --- per block amount + def query_from_blockamount(self, start_id=0, end_id=0, start_time="", end_time="", limit=0, amount=0): + with self.conn.cursor() as cursor: + sql_query = "SELECT * from `blockamount`" + + if start_id > 0: + sql_query = sql_query + " WHERE id > " + str(start_id) + if end_id > 0: + sql_query = sql_query + " AND id < " + str(end_id) + if amount > 0: + sql_query = sql_query + " AND amount > " + str(amount) + else: + if end_id > 0: + sql_query = sql_query + " WHERE id < " + str(end_id) + if amount > 0: + sql_query = sql_query + " AND amount > " + str(amount) + else: + if amount > 0: + sql_query = sql_query + "WHERE amount > " + str(amount) + + if len(start_time) > 0: + if len(end_time) > 0: + sql_query = sql_query + " WHERE UNIX_TIMESTAMP(unixdt) BETWEEN \ + UNIX_TIMESTAMP(\'" + start_time + "\') AND UNIX_TIMESTAMP(\'" + end_time + "\')" + else: + sql_query = sql_query + " WHERE UNIX_TIMESTAMP(unixdt) BETWEEN \ + UNIX_TIMESTAMP(\'" + start_time + "\') AND UNIX_TIMESTAMP(NOW())" + if amount > 0: + sql_query = sql_query + " AND amount > " + str(amount) + else: + if len(end_time) > 0: + sql_query = sql_query + " WHERE UNIX_TIMESTAMP(unixdt) BETWEEN \ + UNIX_TIMESTAMP(NOW()) AND UNIX_TIMESTAMP(\'" + end_time + "\')" + if amount > 0: + sql_query = sql_query + " AND amount > " + str(amount) + + sql_query = sql_query + " order by `unixdt` desc" + + if limit > 0: + sql_query = sql_query + " LIMIT " + str(limit) + + cursor.execute(sql_query) + return cursor.fetchall() + + def update_to_blockamount(self, datetime, blockid, amount, height): + with self.conn.cursor() as cursor: + sql_insert = "INSERT INTO `blockamount` (`unixdt`, `amount`, `height`, `blockid`) VALUES (FROM_UNIXTIME(%s), %s, %s, %s)" + #print(sql_insert) + #print(datetime, blockid, amount, height) + cursor.execute(sql_insert, (datetime, amount, height, blockid)) + self.conn.commit() + + def delete_node_data(self, config): + with self.conn.cursor() as cursor: + sql_query = "DELETE FROM `blockamount`" + cursor.execute(sql_query) + sql_query = "DELETE FROM `bigamountvout`" + cursor.execute(sql_query) + sql_query = "DELETE FROM `bigamounttx`" + cursor.execute(sql_query) + self.conn.commit() + + def update_realize_cap(self, dayutc, last_rv): + with self.conn.cursor() as cursor: + sql_insert = "INSERT INTO `dailyrcap` (`unixdt`, `rcap`) VALUES (FROM_UNIXTIME(%s), %s)" + #print(sql_insert) + #print(datetime, blockid, amount, height) + cursor.execute(sql_insert, (dayutc, last_rv)) + self.conn.commit() + + # daily realize cap + def query_from_realizecap(self, start_id=0, end_id=0, start_time="", end_time="", limit=0): + with self.conn.cursor() as cursor: + sql_query = "SELECT * from `dailyrcap`" + + if start_id > 0: + sql_query = sql_query + " WHERE id > " + str(start_id) + if end_id > 0: + sql_query = sql_query + " AND id < " + str(end_id) + else: + if end_id > 0: + sql_query = sql_query + " WHERE id < " + str(end_id) + + if len(start_time) > 0: + if len(end_time) > 0: + sql_query = sql_query + " WHERE UNIX_TIMESTAMP(unixdt) BETWEEN \ + UNIX_TIMESTAMP(\'" + start_time + "\') AND UNIX_TIMESTAMP(\'" + end_time + "\')" + else: + sql_query = sql_query + " WHERE UNIX_TIMESTAMP(unixdt) BETWEEN \ + UNIX_TIMESTAMP(\'" + start_time + "\') AND UNIX_TIMESTAMP(NOW())" + else: + if len(end_time) > 0: + sql_query = sql_query + " WHERE UNIX_TIMESTAMP(unixdt) BETWEEN \ + UNIX_TIMESTAMP(NOW()) AND UNIX_TIMESTAMP(\'" + end_time + "\')" + + sql_query = sql_query + " order by `unixdt` desc" + + if limit > 0: + sql_query = sql_query + " LIMIT " + str(limit) + print(sql_query) + cursor.execute(sql_query) + return cursor.fetchall() + + def update_daily_addr(self, dayutc, last_add_cnt): + with self.conn.cursor() as cursor: + sql_insert = "INSERT INTO `dailyaddradd` (`unixdt`, `addcnt`) VALUES (FROM_UNIXTIME(%s), %s)" + #print(sql_insert) + #print(datetime, blockid, amount, height) + cursor.execute(sql_insert, (dayutc, last_add_cnt)) + self.conn.commit() + + def delete_daily_addr(self, config): + with self.conn.cursor() as cursor: + sql_query = "DELETE FROM `dailyaddradd`" + cursor.execute(sql_query) + self.conn.commit() + + def delete_daily_rv(self, config): + with self.conn.cursor() as cursor: + sql_query = "DELETE FROM `dailyrcap`" + cursor.execute(sql_query) + self.conn.commit() + ''' + + def __del__(self): + self.conn.close() diff --git a/coinbus/redis_if_qt.py b/coinbus/redis_if_qt.py new file mode 100644 index 0000000..2c88abf --- /dev/null +++ b/coinbus/redis_if_qt.py @@ -0,0 +1,610 @@ +import time + +from walrus import * +from loguru import logger +class RedisIf: + def __init__(self, host="127.0.0.1", port=6379, password="", db=0): + self.db = Database(host=host, port=port, db=db) + + self.zbalance = self.db.ZSet("balancev2d") + ''' + #realize cap progress + self.rv = self.db.Hash("rv") + #address and balance progress + self.addr = self.db.Hash("addr") + #block volume progress + self.bv = self.db.Hash("bv") + #daily volume progress + self.dv = self.db.Hash("dv") + ''' + ''' + #stat tx progress + self.tx = self.db.Hash("tx") + + #ETH daily contract progress + self.eth_dc = self.db.Hash("ethdc") + + #btc stats fee + self.btc_stats = self.db.Hash("btcstats") + + #btc stats volume + self.btc_volume = self.db.Hash("btcvolume") + + # btc stats cdd + self.btc_cdd = self.db.Hash("btccdd") + + # btc stats cdd days + self.btc_cdd_days = self.db.Hash("btccdddays") + ''' + self.btc_block_time = self.db.Hash("btcblocktimev2d") + ''' + self.btc_sopr = self.db.Hash("btcsopr") + ''' + self.btc_data = self.db.Hash("btc_datav2d") + + self.active_address = self.db.Set("active_addressv2d") + self.send_address = self.db.Set("send_addressv2d") + self.receive_address = self.db.Set("receive_addressv2d") + + def get_btc_data(self, key): + value = None + if self.btc_data[key] is not None: + value = self.btc_data[key] + return value + + def set_btc_data(self, key, value): + self.btc_data[key] = value + + def reset_btc_data(self): + self.btc_data.clear() + self.zbalance.clear() + self.btc_block_time.clear() + + ''' + def get_last_btc_sopr(self): + last_sopr_buy = None + last_asopr_buy = None + last_easopr_buy = None + last_lth_sopr_buy = None + last_sth_sopr_buy = None + last_asol = None + last_eaasol = None + + if self.btc_sopr["last_asol"] is not None: + last_asol = self.btc_sopr["last_asol"] + #last_asol = float(self.btc_sopr["last_asol"].decode("utf-8")) + if self.btc_sopr["last_eaasol"] is not None: + last_eaasol = self.btc_sopr["last_eaasol"] + #last_eaasol = float(self.btc_sopr["last_eaasol"].decode("utf-8")) + + + if self.btc_sopr["last_sopr_buy"] is not None: + last_sopr_buy = self.btc_sopr["last_sopr_buy"] + #last_sopr_buy = float(self.btc_sopr["last_sopr_buy"].decode("utf-8")) + if self.btc_sopr["last_asopr_buy"] is not None: + last_asopr_buy = self.btc_sopr["last_asopr_buy"] + #last_asopr_buy = float(self.btc_sopr["last_asopr_buy"].decode("utf-8")) + if self.btc_sopr["last_easopr_buy"] is not None: + last_easopr_buy = self.btc_sopr["last_easopr_buy"] + #last_easopr_buy = float(self.btc_sopr["last_easopr_buy"].decode("utf-8")) + if self.btc_sopr["last_lth_sopr_buy"] is not None: + last_lth_sopr_buy = self.btc_sopr["last_lth_sopr_buy"] + #last_lth_sopr_buy = float(self.btc_sopr["last_lth_sopr_buy"].decode("utf-8")) + if self.btc_sopr["last_sth_sopr_buy"] is not None: + last_sth_sopr_buy = self.btc_sopr["last_sth_sopr_buy"] + #last_sth_sopr_buy = float(self.btc_sopr["last_sth_sopr_buy"].decode("utf-8")) + + last_sopr_sell = None + last_asopr_sell = None + last_easopr_sell = None + last_lth_sopr_sell = None + last_sth_sopr_sell = None + if self.btc_sopr["last_sopr_sell"] is not None: + last_sopr_sell = self.btc_sopr["last_sopr_sell"] + # last_sopr_sell = float(self.btc_sopr["last_sopr_sell"].decode("utf-8")) + if self.btc_sopr["last_asopr_sell"] is not None: + last_asopr_sell = self.btc_sopr["last_asopr_sell"] + # last_asopr = float(self.btc_sopr["last_asopr"].decode("utf-8")) + if self.btc_sopr["last_easopr_sell"] is not None: + last_easopr_sell = self.btc_sopr["last_easopr_sell"] + # last_easopr_sell = float(self.btc_sopr["last_easopr_sell"].decode("utf-8")) + if self.btc_sopr["last_lth_sopr_sell"] is not None: + last_lth_sopr_sell = self.btc_sopr["last_lth_sopr_sell"] + # last_lth_sopr_sell = float(self.btc_sopr["last_lth_sopr_sell"].decode("utf-8")) + if self.btc_sopr["last_sth_sopr_sell"] is not None: + last_sth_sopr_sell = self.btc_sopr["last_sth_sopr_sell"] + # last_sth_sopr_sell = float(self.btc_sopr["last_sth_sopr_sell"].decode("utf-8")) + + return last_asol, last_eaasol, last_sopr_buy, last_asopr_buy, last_easopr_buy, last_lth_sopr_buy, last_sth_sopr_buy, last_sopr_sell, last_asopr_sell, last_easopr_sell, last_lth_sopr_sell, last_sth_sopr_sell + + def set_last_btc_sopr(self, last_asol, last_eaasol, last_sopr_buy, last_asopr_buy, last_easopr_buy, last_lth_sopr_buy, last_sth_sopr_buy, last_sopr_sell, last_asopr_sell, last_easopr_sell, last_lth_sopr_sell, last_sth_sopr_sell): + self.btc_sopr["last_asol"] = last_asol + self.btc_sopr["last_eaasol"] = last_eaasol + + self.btc_sopr["last_sopr_buy"] = last_sopr_buy + self.btc_sopr["last_asopr_buy"] = last_asopr_buy + self.btc_sopr["last_easopr_buy"] = last_easopr_buy + self.btc_sopr["last_lth_sopr_buy"] = last_lth_sopr_buy + self.btc_sopr["last_sth_sopr_buy"] = last_sth_sopr_buy + self.btc_sopr["last_sopr_sell"] = last_sopr_sell + self.btc_sopr["last_asopr_sell"] = last_asopr_sell + self.btc_sopr["last_easopr_sell"] = last_easopr_sell + self.btc_sopr["last_lth_sopr_sell"] = last_lth_sopr_sell + self.btc_sopr["last_sth_sopr_sell"] = last_sth_sopr_sell + ''' + def get_block_time(self, height): + block_time = None + height_str = str(height) + if self.btc_block_time[height_str] is not None: + block_time = int(self.btc_block_time[height_str].decode("utf-8")) + #block_time = int(self.btc_block_time[height_str].decode("utf-8")) + + return block_time + + def set_block_time(self, height, ts): + height_str = str(height) + self.btc_block_time[height_str] = ts + + ''' + def get_last_btc_cdd_days(self): + last_cdd = None + last_acdd = None + last_eacdd = None + last_cdd_day1= None + last_cdd_day7 = None + last_cdd_day30 = None + last_cdd_day60 = None + last_cdd_day90 = None + last_cdd_day180 = None + last_cdd_day365 = None + last_cdd_day730 = None + + last_date = None + last_height = None + last_date_str = None + + if self.btc_cdd["last_cdd"] is not None: + last_cdd = float(self.btc_cdd["last_cdd"].decode("utf-8")) + if self.btc_cdd["last_acdd"] is not None: + last_acdd = float(self.btc_cdd["last_acdd"].decode("utf-8")) + if self.btc_cdd["last_eacdd"] is not None: + last_eacdd = float(self.btc_cdd["last_eacdd"].decode("utf-8")) + if self.btc_cdd_days["last_cdd_day1"] is not None: + last_cdd_day1 = float(self.btc_cdd_days["last_cdd_day1"].decode("utf-8")) + if self.btc_cdd_days["last_cdd_day7"] is not None: + last_cdd_day7 = float(self.btc_cdd_days["last_cdd_day7"].decode("utf-8")) + if self.btc_cdd_days["last_cdd_day30"] is not None: + last_cdd_day30 = float(self.btc_cdd_days["last_cdd_day30"].decode("utf-8")) + if self.btc_cdd_days["last_cdd_day60"] is not None: + last_cdd_day60 = float(self.btc_cdd_days["last_cdd_day60"].decode("utf-8")) + if self.btc_cdd_days["last_cdd_day90"] is not None: + last_cdd_day90 = float(self.btc_cdd_days["last_cdd_day90"].decode("utf-8")) + if self.btc_cdd_days["last_cdd_day180"] is not None: + last_cdd_day180 = float(self.btc_cdd_days["last_cdd_day180"].decode("utf-8")) + if self.btc_cdd_days["last_cdd_day365"] is not None: + last_cdd_day365 = float(self.btc_cdd_days["last_cdd_day365"].decode("utf-8")) + if self.btc_cdd_days["last_cdd_day730"] is not None: + last_cdd_day730 = float(self.btc_cdd_days["last_cdd_day730"].decode("utf-8")) + if self.btc_cdd_days["last_date"] is not None: + last_date = int(self.btc_cdd_days["last_date"].decode("utf-8")) + if self.btc_cdd_days["last_height"] is not None: + last_height = int(self.btc_cdd_days["last_height"].decode("utf-8")) + if self.btc_cdd_days["last_date_str"] is not None: + last_date_str = self.btc_cdd_days["last_date_str"].decode("utf-8") + return last_cdd, last_acdd, last_eacdd, last_cdd_day1, last_cdd_day7, last_cdd_day30, last_cdd_day60, last_cdd_day90, last_cdd_day180, last_cdd_day365, last_cdd_day730, last_date, last_height, last_date_str + + def set_last_btc_cdd_days(self, cdd, acdd, eacdd, day1, day7, day30, day60, day90, day180, day365, day730, dt, height, dtstr): + self.btc_cdd["last_cdd"] = cdd + self.btc_cdd["last_acdd"] = acdd + self.btc_cdd["last_eacdd"] = eacdd + self.btc_cdd_days["last_cdd_day1"] = day1 + self.btc_cdd_days["last_cdd_day7"] = day7 + self.btc_cdd_days["last_cdd_day30"] = day30 + self.btc_cdd_days["last_cdd_day60"] = day60 + self.btc_cdd_days["last_cdd_day90"] = day90 + self.btc_cdd_days["last_cdd_day180"] = day180 + self.btc_cdd_days["last_cdd_day365"] = day365 + self.btc_cdd_days["last_cdd_day730"] = day730 + self.btc_cdd_days["last_date"] = dt + self.btc_cdd_days["last_height"] = height + self.btc_cdd_days["last_date_str"] = dtstr + ''' + ''' + def get_last_btc_cdd(self): + last_cdd = None + last_date = None + last_height = None + last_date_str = None + if self.btc_cdd["last_cdd"] is not None: + last_cdd = float(self.btc_cdd["last_cdd"].decode("utf-8")) + if self.btc_cdd["last_date"] is not None: + last_date = int(self.btc_cdd["last_date"].decode("utf-8")) + if self.btc_cdd["last_height"] is not None: + last_height = int(self.btc_cdd["last_height"].decode("utf-8")) + if self.btc_cdd["last_date_str"] is not None: + last_date_str = self.btc_cdd["last_date_str"].decode("utf-8") + return last_cdd, last_date, last_height, last_date_str + + def set_last_btc_cdd(self, cdd, dt, height, dtstr): + self.btc_cdd["last_cdd"] = cdd + self.btc_cdd["last_date"] = dt + self.btc_cdd["last_height"] = height + self.btc_cdd["last_date_str"] = dtstr + + def get_last_btc_volume(self): + last_volume = None + last_date = None + last_height = None + last_date_str = None + if self.btc_volume["last_volume"] is not None: + last_volume = float(self.btc_volume["last_volume"].decode("utf-8")) + if self.btc_volume["last_date"] is not None: + last_date = int(self.btc_volume["last_date"].decode("utf-8")) + if self.btc_volume["last_height"] is not None: + last_height = int(self.btc_volume["last_height"].decode("utf-8")) + if self.btc_volume["last_date_str"] is not None: + last_date_str = self.btc_volume["last_date_str"].decode("utf-8") + return last_volume, last_date, last_height, last_date_str + + def set_last_btc_volume(self, volume, dt, height, dtstr): + self.btc_volume["last_volume"] = volume + self.btc_volume["last_date"] = dt + self.btc_volume["last_height"] = height + self.btc_volume["last_date_str"] = dtstr + ''' + ''' + def get_last_btc_stats(self): + last_fees = None + last_date = None + last_height = None + last_date_str = None + last_volume = None + if self.btc_stats["last_fees"] is not None: + last_fees = float(self.btc_stats["last_fees"].decode("utf-8")) + if self.btc_volume["last_volume"] is not None: + last_volume = float(self.btc_volume["last_volume"].decode("utf-8")) + if self.btc_stats["last_date"] is not None: + last_date = int(self.btc_stats["last_date"].decode("utf-8")) + if self.btc_stats["last_height"] is not None: + last_height = int(self.btc_stats["last_height"].decode("utf-8")) + if self.btc_stats["last_date_str"] is not None: + last_date_str = self.btc_stats["last_date_str"].decode("utf-8") + return last_fees, last_volume, last_date, last_height, last_date_str + + def set_last_btc_stats(self, fees, volume, dt, height, dtstr): + self.btc_stats["last_fees"] = fees + self.btc_volume["last_volume"] = volume + self.btc_stats["last_date"] = dt + self.btc_stats["last_height"] = height + self.btc_stats["last_date_str"] = dtstr + + + def get_last_eth_dc(self): + last_date = None + last_height = None + last_date_str = None + if self.eth_dc["last_date"] is not None: + last_date = int(self.eth_dc["last_date"].decode("utf-8")) + if self.eth_dc["last_height"] is not None: + last_height = int(self.eth_dc["last_height"].decode("utf-8")) + if self.eth_dc["last_date_str"] is not None: + last_date_str = self.eth_dc["last_date_str"].decode("utf-8") + return last_date, last_height, last_date_str + + def set_last_eth_dc(self, dt, height, dtstr): + self.eth_dc["last_date"] = dt + self.eth_dc["last_height"] = height + self.eth_dc["last_date_str"] = dtstr + ''' + ''' + def get_last_dv(self): + last_dv = None + last_date = None + last_height = None + last_date_str = None + if self.dv["last_dv"] is not None: + last_dv = float(self.dv["last_dv"].decode("utf-8")) + if self.dv["last_date"] is not None: + last_date = int(self.dv["last_date"].decode("utf-8")) + if self.dv["last_height"] is not None: + last_height = int(self.dv["last_height"].decode("utf-8")) + if self.dv["last_date_str"] is not None: + last_date_str = self.dv["last_date_str"].decode("utf-8") + return last_dv, last_date, last_height, last_date_str + + def set_last_dv(self, dv, dt, height, dtstr): + self.dv["last_dv"] = dv + self.dv["last_date"] = dt + self.dv["last_height"] = height + self.dv["last_date_str"] = dtstr + + def get_last_bv(self): + last_height = None + if self.bv["last_height"] is not None: + last_height = int(self.bv["last_height"].decode("utf-8")) + return last_height + + def set_last_bv(self, height): + self.bv["last_height"] = height + ''' + ''' + def get_last_ind(self): + last_csupply = None + last_mintusd = None + last_sumcsupply = None + last_sumcdd = None + last_sumeacdd = None + last_rprofit = None + last_rloss = None + last_marketcap = None + last_rcap = None + last_mvrv = None + + last_earcap = None + if self.tx["last_csupply"] is not None: + last_csupply = float(self.tx["last_csupply"].decode("utf-8")) + if self.tx["last_mintusd"] is not None: + last_mintusd = float(self.tx["last_mintusd"].decode("utf-8")) + if self.tx["last_sumcsupply"] is not None: + last_sumcsupply = float(self.tx["last_sumcsupply"].decode("utf-8")) + if self.tx["last_sumcdd"] is not None: + last_sumcdd = float(self.tx["last_sumcdd"].decode("utf-8")) + if self.tx["last_sumeacdd"] is not None: + last_sumeacdd = float(self.tx["last_sumeacdd"].decode("utf-8")) + if self.tx["last_rprofit"] is not None: + last_rprofit = float(self.tx["last_rprofit"].decode("utf-8")) + if self.tx["last_rloss"] is not None: + last_rloss = float(self.tx["last_rloss"].decode("utf-8")) + if self.tx["last_marketcap"] is not None: + last_marketcap = float(self.tx["last_marketcap"].decode("utf-8")) + if self.tx["last_rcap"] is not None: + last_rcap = float(self.tx["last_rcap"].decode("utf-8")) + if self.tx["last_earcap"] is not None: + last_earcap = float(self.tx["last_earcap"].decode("utf-8")) + if self.tx["last_mvrv"] is not None: + last_mvrv = float(self.tx["last_mvrv"].decode("utf-8")) + + + return last_csupply, last_mintusd, last_sumcsupply, last_sumcdd, last_sumeacdd, last_rprofit, last_rloss, last_marketcap, last_rcap, last_earcap, last_mvrv + + def set_last_ind(self, last_csupply, last_mintusd, last_sumcsupply, last_sumcdd, last_sumeacdd, last_rprofit, last_rloss, last_marketcap, last_rcap, last_earcap, last_mvrv): + self.tx["last_csupply"] = last_csupply + self.tx["last_mintusd"] = last_mintusd + self.tx["last_sumcsupply"] = last_sumcsupply + self.tx["last_sumcdd"] = last_sumcdd + self.tx["last_sumeacdd"] = last_sumeacdd + self.tx["last_rprofit"] = last_rprofit + self.tx["last_rloss"] = last_rloss + self.tx["last_marketcap"] = last_marketcap + self.tx["last_rcap"] = last_rcap + self.tx["last_earcap"] = last_earcap + self.tx["last_mvrv"] = last_mvrv + + + def get_last_tx(self): + last_profit = None + last_fees = None + last_newaddr_cnt = None + last_newaddr_vol = None + last_active_addr_cnt = None + last_tx_addr_cnt = None + last_rx_addr_cnt = None + last_vol_change = None + last_vol = None + last_avol = None + last_date = None + last_height = None + last_date_str = None + last_txs = None + last_eatxs = None + if self.tx["last_profit_rate"] is not None: + last_profit = int(self.tx["last_profit"].decode("utf-8")) + if self.tx["last_fees"] is not None: + last_fees = int(self.tx["last_fees"].decode("utf-8")) + if self.tx["last_txs"] is not None: + last_txs = int(self.tx["last_txs"].decode("utf-8")) + if self.tx["last_eatxs"] is not None: + last_eatxs = int(self.tx["last_eatxs"].decode("utf-8")) + if self.tx["last_newaddr_cnt"] is not None: + last_newaddr_cnt = int(self.tx["last_newaddr_cnt"].decode("utf-8")) + if self.tx["last_newaddr_vol"] is not None: + last_newaddr_vol = float(self.tx["last_newaddr_vol"].decode("utf-8")) + if self.tx["last_active_addr_cnt"] is not None: + last_active_addr_cnt = int(self.tx["last_active_addr_cnt"].decode("utf-8")) + if self.tx["last_tx_addr_cnt"] is not None: + last_tx_addr_cnt = int(self.tx["last_tx_addr_cnt"].decode("utf-8")) + if self.tx["last_rx_addr_cnt"] is not None: + last_rx_addr_cnt = int(self.tx["last_rx_addr_cnt"].decode("utf-8")) + if self.tx["last_vol_change"] is not None: + last_vol_change = float(self.tx["last_vol_change"].decode("utf-8")) + if self.tx["last_vol"] is not None: + last_vol = float(self.tx["last_vol"].decode("utf-8")) + if self.tx["last_avol"] is not None: + last_avol = float(self.tx["last_avol"].decode("utf-8")) + if self.tx["last_date"] is not None: + last_date = int(self.tx["last_date"].decode("utf-8")) + if self.tx["last_height"] is not None: + last_height = int(self.tx["last_height"].decode("utf-8")) + if self.tx["last_date_str"] is not None: + last_date_str = self.tx["last_date_str"].decode("utf-8") + return last_profit, last_fees, last_txs, last_eatxs, last_newaddr_cnt, last_newaddr_vol, last_active_addr_cnt, last_tx_addr_cnt, last_rx_addr_cnt, last_vol_change, last_vol, last_avol, last_date, last_height, last_date_str + + def set_last_tx(self, last_profit, last_fees, last_txs, last_eatxs, newaddr_cnt, newaddr_vol, active_addr_cnt, tx_addr_cnt, rx_addr_cnt, vol_change, vol, avol, dt, height, dtstr): + self.tx["last_profit"] = last_profit + self.tx["last_fees"] = last_fees + self.tx["last_txs"] = last_txs + self.tx["last_eatxs"] = last_eatxs + self.tx["last_newaddr_cnt"] = newaddr_cnt + self.tx["last_newaddr_vol"] = newaddr_vol + self.tx["last_active_addr_cnt"] = active_addr_cnt + self.tx["last_tx_addr_cnt"] = tx_addr_cnt + self.tx["last_rx_addr_cnt"] = rx_addr_cnt + self.tx["last_vol_change"] = vol_change + self.tx["last_vol"] = vol + self.tx["last_avol"] = avol + self.tx["last_date"] = dt + self.tx["last_height"] = height + self.tx["last_date_str"] = dtstr + ''' + ''' + def get_last_addr(self): + last_daily_cnt = None + last_date = None + last_height = None + last_date_str = None + if self.addr["last_daily_cnt"] is not None: + last_daily_cnt = int(self.addr["last_daily_cnt"].decode("utf-8")) + if self.addr["last_date"] is not None: + last_date = int(self.addr["last_date"].decode("utf-8")) + if self.addr["last_height"] is not None: + last_height = int(self.addr["last_height"].decode("utf-8")) + if self.addr["last_date_str"] is not None: + last_date_str = self.addr["last_date_str"].decode("utf-8") + return last_daily_cnt, last_date, last_height, last_date_str + + def set_last_addr(self, daily_cnt, dt, height, dtstr): + self.addr["last_daily_cnt"] = daily_cnt + self.addr["last_date"] = dt + self.addr["last_height"] = height + self.addr["last_date_str"] = dtstr + ''' + + def is_active_address(self, address): + result = address in self.active_address + if not result: + self.active_address.add(address) + return result + + def reset_active_address(self): + self.active_address.clear() + + def get_active_address_cnt(self): + return len(self.active_address) + + def is_send_address(self, address): + result = address in self.send_address + if not result: + self.send_address.add(address) + return result + + def reset_send_address(self): + self.send_address.clear() + + def get_send_address_cnt(self): + return len(self.send_address) + + def is_receive_address(self, address): + result = address in self.receive_address + if not result: + self.receive_address.add(address) + return result + + def reset_receive_address(self): + self.receive_address.clear() + + def get_receive_address_cnt(self): + return len(self.receive_address) + + def save_addr(self, address, balance): + new_balance = balance + if address in self.zbalance: + new_balance = self.zbalance.score(address) + balance + #print("update", self.zbalance.score(address), balance, new_balance) + #time.sleep(10) + if new_balance < 0.01: + del self.zbalance[address] + #print("check exist", address, address in self.zbalance) + #time.sleep(10) + return + self.zbalance.add({address: new_balance}) + + ''' + def delete_addr(self, config): + self.addr.clear() + self.zbalance.clear() + ''' + def is_in_addr(self, address): + return address in self.zbalance + + def get_addr_cnt(self): + return len(self.zbalance) + + ''' + def delete_rv(self, config): + self.rv.clear() + + def get_last_rv(self): + last_rv = None + last_date = None + last_height = None + last_date_str = None + if self.rv["last_rv"] is not None: + last_rv = float(self.rv["last_rv"].decode("utf-8")) + if self.rv["last_date"] is not None: + last_date = int(self.rv["last_date"].decode("utf-8")) + if self.rv["last_height"] is not None: + last_height = int(self.rv["last_height"].decode("utf-8")) + if self.rv["last_date_str"] is not None: + last_date_str = self.rv["last_date_str"].decode("utf-8") + return last_rv, last_date, last_height, last_date_str + + def set_last_rv(self, rv, dt, height, dtstr): + self.rv["last_rv"] = rv + self.rv["last_date"] = dt + self.rv["last_height"] = height + self.rv["last_date_str"] = dtstr + ''' + + def get_all_address(self): + return self.zbalance.keys() + + def delete_address_data(self, config): + self.zbalance.clear() + + ''' + def query_from_address(self, start_balance=0, end_balance=0, address="", limit=0): + if len(address) > 0: + results = [] + result = {} + result["address"] = address + balance = self.zbalance.score(address) + print(balance) + if balance is not None: + result["balance"] = balance + results.append(result) + return results + + match_result = None + if start_balance > 0: + if end_balance > 0: + match_result = self.zbalance.range_by_score(start_balance, end_balance, 0, -1, True, False) + else: + match_result = self.zbalance.range_by_score(0, start_balance, 0, -1, True, False) + else: + if end_balance > 0: + match_result = self.zbalance.range_by_score(end_balance, 21000000, 0, -1, True, False) + + results = [] + if match_result is not None: + #print(match_result) + for addr, balance2 in match_result: + address = addr.decode('utf-8') + result = {} + result["address"] = address + result["balance"] = balance2 + results.append(result) + if limit > 0 and len(results) >= limit: + break + return results + ''' + + + + + + + + + + +