From 676ad724b193d5be4bb04daede5cace6695c8b55 Mon Sep 17 00:00:00 2001 From: fengche <1158629543@qq.com> Date: Fri, 16 Jan 2026 11:14:55 +0800 Subject: [PATCH] =?UTF-8?q?feat:=20=20coinbus=E7=9B=B8=E5=85=B3=E6=95=B0?= =?UTF-8?q?=E6=8D=AE=E9=87=87=E9=9B=86=E4=BB=A3=E7=A0=81?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- lyq/Binance_fapi.py | 119 ++ lyq/CoinmarketCap.py | 115 ++ lyq/Macroeconomic_COVITGDP_v2.py | 83 + lyq/Macroeconomic_CPI_NSA_v2.py | 101 ++ lyq/Macroeconomic_FARBODI.py | 293 ++++ lyq/Macroeconomic_FBI_v2.py | 98 ++ lyq/Macroeconomic_FER.py | 89 + lyq/Macroeconomic_InterestRate.py | 252 +++ lyq/Macroeconomic_Loan.py | 102 ++ lyq/Macroeconomic_MoneyStockMeasures.py | 216 +++ lyq/Macroeconomic_PCE_v3.py | 117 ++ lyq/Macroeconomic_SAALOCBITUSS_ASSET.py | 1536 ++++++++++++++++ lyq/Macroeconomic_USTreasuriesSize.py | 85 + lyq/Macroeconomic_USTreasuriesYields_v2.py | 96 + lyq/Macroeconomic_WEI.py | 105 ++ lyq/README.md | 40 +- lyq/arh999_lyq.py | 1005 +++++++++++ lyq/arh999eth_lyq.py | 504 ++++++ lyq/btc24h_db_if.py | 600 +++++++ lyq/btc24h_redis_if.py | 613 +++++++ lyq/btc24h_stats.py | 1110 ++++++++++++ lyq/btc_price_fetcher.py | 85 + lyq/btc_prices.py | 141 ++ lyq/btc_stats_qt.py | 1219 +++++++++++++ lyq/btc_update.py | 125 ++ lyq/btc_utxos_lyq2.py | 1838 ++++++++++++++++++++ lyq/btc_utxos_lyq3.py | 1838 ++++++++++++++++++++ lyq/btc_utxos_update_lyq3.py | 51 + lyq/check_order_lyq.py | 184 ++ lyq/check_zone_lyq.py | 146 ++ lyq/db_if_qt.py | 562 ++++++ lyq/exchangeRate_lyq.py | 117 ++ lyq/nochain_eth_lyq.py | 191 ++ lyq/nochain_lyq_utc08.py | 27 + lyq/nochain_lyq_v2.py | 736 ++++++++ lyq/nochain_update_lyq.py | 87 + lyq/redis_if_qt.py | 610 +++++++ 37 files changed, 15231 insertions(+), 5 deletions(-) create mode 100644 lyq/Binance_fapi.py create mode 100644 lyq/CoinmarketCap.py create mode 100644 lyq/Macroeconomic_COVITGDP_v2.py create mode 100644 lyq/Macroeconomic_CPI_NSA_v2.py create mode 100644 lyq/Macroeconomic_FARBODI.py create mode 100644 lyq/Macroeconomic_FBI_v2.py create mode 100644 lyq/Macroeconomic_FER.py create mode 100644 lyq/Macroeconomic_InterestRate.py create mode 100644 lyq/Macroeconomic_Loan.py create mode 100644 lyq/Macroeconomic_MoneyStockMeasures.py create mode 100644 lyq/Macroeconomic_PCE_v3.py create mode 100644 lyq/Macroeconomic_SAALOCBITUSS_ASSET.py create mode 100644 lyq/Macroeconomic_USTreasuriesSize.py create mode 100644 lyq/Macroeconomic_USTreasuriesYields_v2.py create mode 100644 lyq/Macroeconomic_WEI.py create mode 100644 lyq/arh999_lyq.py create mode 100644 lyq/arh999eth_lyq.py create mode 100644 lyq/btc24h_db_if.py create mode 100644 lyq/btc24h_redis_if.py create mode 100644 lyq/btc24h_stats.py create mode 100644 lyq/btc_price_fetcher.py create mode 100644 lyq/btc_prices.py create mode 100644 lyq/btc_stats_qt.py create mode 100644 lyq/btc_update.py create mode 100644 lyq/btc_utxos_lyq2.py create mode 100644 lyq/btc_utxos_lyq3.py create mode 100644 lyq/btc_utxos_update_lyq3.py create mode 100644 lyq/check_order_lyq.py create mode 100644 lyq/check_zone_lyq.py create mode 100644 lyq/db_if_qt.py create mode 100644 lyq/exchangeRate_lyq.py create mode 100644 lyq/nochain_eth_lyq.py create mode 100644 lyq/nochain_lyq_utc08.py create mode 100644 lyq/nochain_lyq_v2.py create mode 100644 lyq/nochain_update_lyq.py create mode 100644 lyq/redis_if_qt.py diff --git a/lyq/Binance_fapi.py b/lyq/Binance_fapi.py new file mode 100644 index 0000000..f947594 --- /dev/null +++ b/lyq/Binance_fapi.py @@ -0,0 +1,119 @@ +import pymysql +import requests +import time +import schedule +from datetime import datetime + +# MySQL连接函数 +def connect_to_db(): + return pymysql.connect( + host="127.0.0.1", # 数据库主机 + user="root", # 数据库用户名 + password="2GS@bPYcgiMyL14A", # 数据库密码 + database="binance_api", # 数据库名称 + port=4423 # 数据库端口 + ) + +# 执行SQL查询的函数 +def execute_query(query, params=None): + conn = connect_to_db() # 连接数据库 + with conn.cursor() as cursor: + cursor.execute(query, params) # 执行SQL语句 + conn.commit() # 提交事务 + conn.close() # 关闭数据库连接 + +# 北京时间转换函数 +def bj_time(timestamp): + # 将时间戳转换为北京时间 + return datetime.utcfromtimestamp(timestamp / 1000).strftime('%Y-%m-%d %H:%M:%S') + +# Binance API客户端 +class BinanceAPI: + base_url = "https://fapi.binance.com" # Binance的基础API URL + + @staticmethod + def get(endpoint, params=None): + # 发送GET请求到Binance API + response = requests.get(f"{BinanceAPI.base_url}{endpoint}", params=params) + return response.json() # 返回JSON格式的响应数据 + +# 任务1:获取资金费率并插入数据库 +def funding_rate(): + # 获取BTC和ETH的资金费率数据 + btc_data = BinanceAPI.get("/fapi/v1/fundingRate", {"symbol": "BTCUSDT"}) + eth_data = BinanceAPI.get("/fapi/v1/fundingRate", {"symbol": "ETHUSDT"}) + + # 准备SQL插入语句 + btc_sql = """INSERT INTO fundingrate(symbol, ts, fundingRate) + VALUES ("BTCUSDT", %s, %s)""" + eth_sql = """INSERT INTO fundingrate(symbol, ts, fundingRate) + VALUES ("ETHUSDT", %s, %s)""" + + # 执行SQL插入操作 + execute_query(btc_sql, (btc_data[-1]['fundingTime'], btc_data[-1]['fundingRate'])) + execute_query(eth_sql, (eth_data[-1]['fundingTime'], eth_data[-1]['fundingRate'])) + +# 任务2:获取未平仓合约数并插入数据库 +def open_interest(): + # 获取BTC和ETH的未平仓合约数数据 + btc_data = BinanceAPI.get("/fapi/v1/openInterest", {"symbol": "BTCUSDT"}) + eth_data = BinanceAPI.get("/fapi/v1/openInterest", {"symbol": "ETHUSDT"}) + + # 准备SQL插入语句 + btc_sql = """INSERT INTO openInterest(symbol, ts, openInterest) + VALUES ("BTCUSDT", %s, %s)""" + eth_sql = """INSERT INTO openInterest(symbol, ts, openInterest) + VALUES ("ETHUSDT", %s, %s)""" + + # 执行SQL插入操作 + execute_query(btc_sql, (btc_data['time'], btc_data['openInterest'])) + execute_query(eth_sql, (eth_data['time'], eth_data['openInterest'])) + +# 任务3:获取长短比并插入数据库 +def long_short_ratio(interval): + # 获取BTC和ETH的长短比数据 + btc_data = BinanceAPI.get("/futures/data/takerlongshortRatio", { + "symbol": "BTCUSDT", "period": interval + }) + eth_data = BinanceAPI.get("/futures/data/takerlongshortRatio", { + "symbol": "ETHUSDT", "period": interval + }) + + # 准备SQL插入语句 + btc_sql = f"""INSERT INTO longshortratio{interval}(symbol, ts, buyVol, sellVol, buySellRatio) + VALUES ("BTCUSDT", %s, %s, %s, %s)""" + eth_sql = f"""INSERT INTO longshortratio{interval}(symbol, ts, buyVol, sellVol, buySellRatio) + VALUES ("ETHUSDT", %s, %s, %s, %s)""" + + # 执行SQL插入操作 + execute_query(btc_sql, (btc_data[-1]['timestamp'], btc_data[-1]['buyVol'], btc_data[-1]['sellVol'], btc_data[-1]['buySellRatio'])) + execute_query(eth_sql, (eth_data[-1]['timestamp'], eth_data[-1]['buyVol'], eth_data[-1]['sellVol'], eth_data[-1]['buySellRatio'])) + +# 定时任务调度 +def schedule_jobs(): + # 每天0点、8点和16点1分执行资金费率任务 + schedule.every().day.at("00:01").do(funding_rate) + schedule.every().day.at("08:01").do(funding_rate) + schedule.every().day.at("16:01").do(funding_rate) + + # 每分钟的15秒执行未平仓合约数任务 + schedule.every().minute.at(":15").do(open_interest) + schedule.every().minute.at(":25").do(open_interest) + schedule.every().minute.at(":35").do(open_interest) + schedule.every().minute.at(":45").do(open_interest) + schedule.every().minute.at(":55").do(open_interest) + + # 每分钟的15秒执行长短比任务,周期为5m, 15m, 30m等 + intervals = ["5m", "15m", "30m", "1h", "2h", "4h", "6h", "12h", "1d"] + for interval in intervals: + schedule.every().minute.at(":15").do(long_short_ratio, interval=interval) + +# 启动任务调度 +def run(): + schedule_jobs() # 设置定时任务 + while True: + schedule.run_pending() # 执行所有待处理任务 + time.sleep(1) # 每秒检查一次任务是否到期 + +if __name__ == "__main__": + run() # 运行定时任务调度 \ No newline at end of file diff --git a/lyq/CoinmarketCap.py b/lyq/CoinmarketCap.py new file mode 100644 index 0000000..4c22afd --- /dev/null +++ b/lyq/CoinmarketCap.py @@ -0,0 +1,115 @@ +import requests +import pymysql +import time +from apscheduler.schedulers.blocking import BlockingScheduler +from datetime import datetime, timedelta + +# API 密钥和请求头 +API_KEY = "83bf85c1-1bd8-426a-a043-6b67dad8bda5" +headers = { "X-CMC_PRO_API_KEY": API_KEY } +base_url = "https://pro-api.coinmarketcap.com" +url = f"{base_url}/v1/cryptocurrency/listings/latest" + +# MySQL 数据库连接配置 +db_config = { + 'host': '127.0.0.1', # 数据库主机地址 + 'user': 'root', # 数据库用户名 + 'password': '2GS@bPYcgiMyL14A', # 数据库密码 + 'database': 'coinmarketcap', # 数据库名称 + 'port': 4423 # 数据库端口 +} + +# 创建数据库表格(如果不存在) +def create_table(): + connection = pymysql.connect(**db_config) # 连接到数据库 + cursor = connection.cursor() # 创建游标对象 + + # 创建表格的 SQL 语句 + create_table_query = """ + CREATE TABLE IF NOT EXISTS marketInfo ( + id INT NOT NULL PRIMARY KEY AUTO_INCREMENT, # 自增ID + update_time DATETIME NOT NULL, # 更新时间 + symbol CHAR(15) NOT NULL, # 币种符号 + ranks INT NOT NULL, # 排名 + price DOUBLE NOT NULL, # 当前价格 + market_cap DOUBLE NOT NULL, # 市值 + volume_24h DOUBLE NOT NULL, # 24小时交易量 + volume_change_24h DOUBLE NOT NULL, # 24小时交易量变化 + percent_change_1h DOUBLE NOT NULL, # 1小时价格变化 + percent_change_24h DOUBLE NOT NULL, # 24小时价格变化 + percent_change_7d DOUBLE NOT NULL, # 7天价格变化 + percent_change_30d DOUBLE NOT NULL, # 30天价格变化 + percent_change_60d DOUBLE NOT NULL, # 60天价格变化 + percent_change_90d DOUBLE NOT NULL # 90天价格变化 + ); + """ + cursor.execute(create_table_query) # 执行创建表格的 SQL 语句 + connection.commit() # 提交事务 + cursor.close() # 关闭游标 + connection.close() # 关闭数据库连接 + +# 将 UTC 时间转换为北京时间 +def bj_time(utc_time): + """ 将 UTC 时间转换为北京时间 """ + utc_time = datetime.strptime(utc_time, '%Y-%m-%dT%H:%M:%S.%fZ') # 将 UTC 时间字符串转换为 datetime 对象 + beijing_time = utc_time + timedelta(hours=8) # 北京时间比 UTC 时间快 8 小时 + return beijing_time.strftime('%Y-%m-%d %H:%M:%S') # 格式化成字符串 + +# 获取市场数据并插入到数据库 +def marketcap(): + try: + # 向 CoinMarketCap API 发送请求,获取加密货币的市场数据 + response = requests.get(url, headers=headers, params={"limit": 200}) + response.raise_for_status() # 如果请求失败,抛出异常 + except requests.RequestException: + time.sleep(60) # 等待 1 分钟后重试 + response = requests.get(url, headers=headers, params={"limit": 200}) + + data = response.json() # 将返回的 JSON 数据转换为 Python 字典 + for item in data['data']: # 遍历获取的数据 + quote = item['quote']['USD'] # 获取 USD 相关的市场数据 + update_time = bj_time(quote['last_updated']) # 转换更新时间为北京时间 + symbol = item['symbol'] # 获取币种符号 + ranks = item['cmc_rank'] # 获取排名 + price = quote['price'] # 获取价格 + market_cap = quote['market_cap'] # 获取市值 + volume_24h = quote['volume_24h'] # 获取 24 小时交易量 + volume_change_24h = quote['volume_change_24h'] # 获取 24 小时交易量变化 + percent_change_1h = quote['percent_change_1h'] # 获取 1 小时价格变化 + percent_change_24h = quote['percent_change_24h'] # 获取 24 小时价格变化 + percent_change_7d = quote['percent_change_7d'] # 获取 7 天价格变化 + percent_change_30d = quote['percent_change_30d'] # 获取 30 天价格变化 + percent_change_60d = quote['percent_change_60d'] # 获取 60 天价格变化 + percent_change_90d = quote['percent_change_90d'] # 获取 90 天价格变化 + + # 将数据插入到 MySQL 数据库 + connection = pymysql.connect(**db_config) # 连接到数据库 + cursor = connection.cursor() # 创建游标对象 + insert_query = """ + INSERT INTO marketInfo ( + update_time, symbol, ranks, price, market_cap, volume_24h, + volume_change_24h, percent_change_1h, percent_change_24h, + percent_change_7d, percent_change_30d, percent_change_60d, + percent_change_90d + ) VALUES (%s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s); + """ + # 执行插入数据的 SQL 语句 + cursor.execute(insert_query, ( + update_time, symbol, ranks, price, market_cap, volume_24h, + volume_change_24h, percent_change_1h, percent_change_24h, + percent_change_7d, percent_change_30d, percent_change_60d, + percent_change_90d + )) + connection.commit() # 提交事务 + cursor.close() # 关闭游标 + connection.close() # 关闭数据库连接 + +# 定时任务:每 5 分钟执行一次 marketcap 函数 +def schedule_job(): + scheduler = BlockingScheduler() # 创建一个阻塞式调度器 + scheduler.add_job(marketcap, 'cron', minute='0,5,10,15,20,25,30,35,40,45,50,55') # 设置每 5 分钟执行一次 + scheduler.start() # 启动调度器 + +if __name__ == "__main__": + create_table() # 程序启动时,先创建数据库表格(如果不存在) + schedule_job() # 启动定时任务,开始定时抓取数据并插入数据库 \ No newline at end of file diff --git a/lyq/Macroeconomic_COVITGDP_v2.py b/lyq/Macroeconomic_COVITGDP_v2.py new file mode 100644 index 0000000..344f3a3 --- /dev/null +++ b/lyq/Macroeconomic_COVITGDP_v2.py @@ -0,0 +1,83 @@ +import requests +import pymysql +from datetime import datetime +import time + +def get_bea_data(year): + url = ("https://apps.bea.gov/api/data?&UserID=146B5757-D9E3-442C-B6AC-ADE9E6B71114&method=GetData&DataSetName=GDPbyIndustry&Year=%s&Industry=ALL&tableID=15&Frequency=Q&ResultFormat=JSON" % year) + response = requests.get(url) + return response.json()['BEAAPI']['Results'][0]['Data'] + +def update_database(cursor, data): + industry_map = { + 'Agriculture, forestry, fishing, and hunting': 'VAPGDPAFH', + 'Mining': 'VAPGDPM', + 'Construction': 'VAPGDPC', + 'Manufacturing': 'VAPGDPMA', + 'Retail trade': 'VAPGDPR', + 'Wholesale trade': 'VAPGDPW', + 'Utilities': 'VAPGDPU', + 'Transportation and warehousing': 'VAPGDPT', + 'Information': 'VAPGDPI', + 'Finance, insurance, real estate, rental, and leasing': 'VAPGDPFIRL', + 'Professional and business services': 'VAPGDPPBS', + 'Educational services, health care, and social assistance': 'VAPGDPHCSA', + 'Arts, entertainment, recreation, accommodation, and food services': 'VAPGDPAF', + 'Other services, except government': 'CPGDPOSEG', + 'Government': 'Federation', + 'State and local': 'State_local' + } + + for entry in data: + year = entry["Year"] + quarter = entry["Quarter"] + new_time = f"{year}Q{quarter}" + industry = entry["IndustrYDescription"] + value = entry["DataValue"] + + if industry in industry_map: + column = industry_map[industry] + + cursor.execute("SELECT quarterly FROM COVITGDP WHERE quarterly = %s", (new_time,)) + result = cursor.fetchone() + if result: + cursor.execute(f"SELECT {column} FROM COVITGDP WHERE quarterly = %s", (new_time,)) + old_value = cursor.fetchone()[0] + + if old_value != value: + cursor.execute(f"UPDATE COVITGDP SET {column} = %s WHERE quarterly = %s", (value, new_time)) + else: + print(f"No update needed for {column} for {new_time}") + + else: + if column == 'VAPGDPAFH': + cursor.execute("INSERT INTO COVITGDP (quarterly, VAPGDPAFH) VALUES (%s, %s)", (new_time, value)) + else: + cursor.execute(f"INSERT INTO COVITGDP (quarterly, {column}) VALUES (%s, %s) ON DUPLICATE KEY UPDATE {column} = VALUES({column})", (new_time, value)) + +def main(): + years = 2025 + + while True: + try: + db = pymysql.connect(host="127.0.0.1",user="root",password="2GS@bPYcgiMyL14A",database="Macroeconomics",port=4423) + cursor = db.cursor() + + data = get_bea_data(years) + update_database(cursor, data) + db.commit() + except pymysql.MySQLError as e: + print(f"Database connection error: {e}") + break + except Exception as e: + print(f"An error occurred: {e}") + finally: + if 'cursor' in locals(): + cursor.close() + if 'db' in locals(): + db.close() + + time.sleep(86400) + +if __name__ == "__main__": + main() diff --git a/lyq/Macroeconomic_CPI_NSA_v2.py b/lyq/Macroeconomic_CPI_NSA_v2.py new file mode 100644 index 0000000..5ebb17a --- /dev/null +++ b/lyq/Macroeconomic_CPI_NSA_v2.py @@ -0,0 +1,101 @@ +import time +import requests +import json +import pymysql +from datetime import datetime + +# Function to fetch data from BLS API +def fetch_data(series_ids): + headers = {'Content-type': 'application/json'} + data = json.dumps({"seriesid": series_ids, "startyear": "2024", "endyear": "2024"}) + try: + response = requests.post('https://api.bls.gov/publicAPI/v2/timeseries/data/', data=data, headers=headers) + response.raise_for_status() # Raise exception for HTTP errors + return json.loads(response.text) + except requests.exceptions.RequestException as e: + return None + +# Function to convert BLS period format to datetime +def convert_date(year, period): + date_string = f"{year}/{period.replace('M', '')}/01" + return datetime.strptime(date_string, '%Y/%m/%d') + +# Function to insert data into MySQL database +def insert_data(cursor, table_name, date, name, value): + cursor.execute( + f"INSERT INTO {table_name}(date, name, value) VALUES (%s, %s, %s)", + (date, name, value) + ) + +# Function to process series data and insert into the database +def process_series_data(json_data, table_name, names): + db = pymysql.connect(host="127.0.0.1",user="root",password="2GS@bPYcgiMyL14A",database="Macroeconomics",port=4423) + + cursor = db.cursor() + + for i, series in enumerate(json_data['Results']['series']): + for data_point in sorted(series['data'], key=lambda x: (x['year'], x['period'])): + year = data_point['year'] + period = data_point['period'] + value = data_point['value'] + date = convert_date(year, period) + name = names[i] if i < len(names) else f"Unknown {i}" + + cursor.execute(f"SELECT COUNT(*) FROM {table_name} WHERE date = %s AND name = %s", (date, name)) + if cursor.fetchone()[0] == 0: + insert_data(cursor, table_name, date, name, value) + db.commit() + + db.close() + +# Function to merge JSON data +def merge_json_data(json_data_list): + merged_series = [] + for json_data in json_data_list: + if json_data and 'Results' in json_data and 'series' in json_data['Results']: + merged_series.extend(json_data['Results']['series']) + return {'Results': {'series': merged_series}} + +# Main script logic +while True: + series_ids1 = [ + 'CUUR0000SA0', 'CUUR0000SAF1', 'CUUR0000SAF11', 'CUUR0000SAF111', 'CUUR0000SAF112', 'CUUR0000SEFJ', + 'CUUR0000SAF113', 'CUUR0000SAF114', 'CUUR0000SEFV', 'CUUR0000SA0E', 'CUUR0000SACE', 'CUUR0000SEHE01', + 'CUUR0000SETB', 'CUUR0000SETB01', 'CUUR0000SEHF', 'CUUR0000SEHF01', 'CUUR0000SEHF02' + ] + series_ids2 = [ + 'CUUR0000SA0L1E', 'CUUR0000SACL1E', 'CUUR0000SAA', 'CUUR0000SETA01', 'CUUR0000SETA02', 'CUUR0000SAM1', + 'CUUR0000SAF116', 'CUUR0000SEGA', 'CUUR0000SASLE', 'CUUR0000SAH1', 'CUUR0000SEHA', 'CUUR0000SEHC', + 'CUUR0000SAM2', 'CUUR0000SEMC01', 'CUUR0000SEMD01', 'CUUR0000SAS4', 'CUUR0000SETD', 'CUUR0000SETE', + 'CUUR0000SETG01' + ] + series_ids3 = [s.replace('CUUR', 'CUSR') for s in series_ids1] + series_ids4 = [s.replace('CUUR', 'CUSR') for s in series_ids2] + + json_data1 = fetch_data(series_ids1) + json_data2 = fetch_data(series_ids2) + json_data3 = fetch_data(series_ids3) + json_data4 = fetch_data(series_ids4) + + combined_json_data_NSA = merge_json_data([json_data1, json_data2]) + combined_json_data_SA = merge_json_data([json_data3, json_data4]) + + names = [ + 'All items', 'Food', 'Food at home', 'Cereals and bakery products', 'Meats, poultry, fish, and eggs', + 'Dairy and related products', 'Fruits and vegetables', 'Nonalcoholic beverages and beverage materials', + 'Food away from home', 'Energy', 'Energy commodities', 'Fuel oil', 'Motor fuel', 'Gasoline (all types)', + 'Energy services', 'Electricity', 'Utility (piped) gas service', 'All items less food and energy', + 'Commodities less food and energy commodities', 'Apparel', 'New vehicles', 'Used cars and trucks', + 'Medical care commodities', 'Alcoholic beverages', 'Tobacco and smoking products', + 'Services less energy services', 'Shelter', 'Rent of primary residence', "Owners equivalent rent of residences", + 'Medical care services', "Physicians services", 'Hospital services', 'Transportation services', + 'Motor vehicle maintenance and repair', 'Motor vehicle insurance', 'Airline fares' + ] + + if combined_json_data_NSA and 'Results' in combined_json_data_NSA and 'series' in combined_json_data_NSA['Results']: + process_series_data(combined_json_data_NSA, 'CPI_NSA', names) + + if combined_json_data_SA and 'Results' in combined_json_data_SA and 'series' in combined_json_data_SA['Results']: + process_series_data(combined_json_data_SA, 'CPI_SA', names) + + time.sleep(86400) \ No newline at end of file diff --git a/lyq/Macroeconomic_FARBODI.py b/lyq/Macroeconomic_FARBODI.py new file mode 100644 index 0000000..9af1333 --- /dev/null +++ b/lyq/Macroeconomic_FARBODI.py @@ -0,0 +1,293 @@ +import time +import pymysql +import requests +from bs4 import BeautifulSoup +from w3lib.html import remove_tags +import datetime +while True: + try: + # now_time = datetime.datetime.now() + # next_time = now_time + datetime.timedelta(days=+1) + # next_year = next_time.date().year + # next_month = next_time.date().month + # next_day = next_time.date().day + # next_time = datetime.datetime.strptime(str(next_year) + "-" + str(next_month) + "-" + str(next_day) + " 20:45:01","%Y-%m-%d %H:%M:%S") + # timer_start_time = (next_time - now_time).total_seconds() + db = pymysql.connect(host="127.0.0.1",user="root",password="2GS@bPYcgiMyL14A",database="Macroeconomics",port=4423) + cursor = db.cursor() + page = requests.get("https://www.federalreserve.gov/releases/h41/current/default.htm") + page=page.text + page = BeautifulSoup(page, 'html.parser') + date = page.find_all('div', class_="dates") + # 获取数据 + date = remove_tags(str(date)) + # 删除多余字符 + date = date.replace("[", "") + date = date.replace("]", "") + date = date.replace("Release Date:", "") + date = date.replace(",","") + date = date.replace(" ","") + date = date.strip() + date = date.lstrip() + date1 = date[-4:] + date2 = date[-6:-4] + # 转换时间 + date = date.replace("January", "/01/") + date = date.replace("February", "/02/") + date = date.replace("March", "/03/") + date = date.replace("April", "/04/") + date = date.replace("May", "/05/") + date = date.replace("June", "/06/") + date = date.replace("July", "/07/") + date = date.replace("August", "/08/") + date = date.replace("September", "/09/") + date = date.replace("October", "/10/") + date = date.replace("November", "/11/") + date = date.replace("December", "/12/") + date = date1+date[0:4]+date2 + date_string = date + format = '%Y/%m/%d' + from datetime import datetime + date = datetime.strptime(date_string, format) + sql = "select time from CHAFRNFRAA order by id desc limit 1" + cursor.execute(sql) + db.commit() + old_time = cursor.fetchall() + ole_time=old_time[0][0] + except: + time.sleep(30) + continue + # 判断时间 + if date != ole_time: + page = page.find_all('span',style="font-family:'Courier New'; font-weight:bold") + page = remove_tags(str(page)) + page = page.replace(",", "") + page = page.replace("[", "") + page = page.replace("]", "") + page=page.split() + symbol= '' + list=[] + # 数据拼接 + for i in page: + if i =='-': + symbol= '-' + continue + if i =='+': + continue + if i =='(0)': + continue + if i =='...': + i='NULL' + value= symbol + i + symbol= '' + list+=[value] + sql = "insert into FARBODI(name,time,THIS_AVG_VALUE,CHANGE_LASTWEEK,CHANGE_LASTYEAR,THIS_VALUE)values(%s,%s,%s,%s,%s,%s)" + data_list=[('Reserve Bank credit',date,list[0],list[1],list[2],list[3]), + ('Securities held outright', date, list[4], list[5], list[6], list[7]), + ('U.S. Treasury securities', date, list[8], list[9], list[10], list[11]), + ('Bills', date, list[12], list[13], list[14], list[15]), + ('Notes and bonds, nominal', date, list[16], list[17], list[18], list[19]), + ('Notes and bonds, inflation-indexed', date, list[20], list[21], list[22], list[23]), + ('Inflation compensation', date, list[24], list[25], list[26], list[27]), + ('Federal agency debt securities', date, list[28], list[29], list[30], list[31]), + ('Mortgage-backed securities', date, list[32], list[33], list[34], list[35]), + ('Uposho', date, list[36], list[37], list[38], list[39]), + ('Udosho', date, list[40], list[41], list[42], list[43]), + ('Repurchase agreements', date, list[44], list[45], list[46], list[47]), + ('Foreign official', date, list[48], list[49], list[50], list[51]), + ('Others', date, list[52], list[53], list[54], list[55]), + ('Loans', date, list[56], list[57], list[58], list[59]), + ('Primary credit', date, list[60], list[61], list[62], list[63]), + ('Secondary credit', date, list[64], list[65], list[66], list[67]), + ('Seasonal credit', date, list[68], list[69], list[70], list[71]), + ('PPPLF', date, list[72], list[73], list[74], list[75]), + ('Bank Term Funding Program', date, list[76], list[77], list[78], list[79]), + ('Other credit extensions', date, list[80], list[81], list[82], list[83]), + ('NphoMFLLC(MSLP)', date, list[84], list[85], list[86], list[87]), + ('Net portfolio holdings of MLF LLC', date, list[88], list[89], list[90], list[91]), + ('Net portfolio holdings of TALF II LLC',date, list[92], list[93], list[94], list[95]), + ('Float',date, list[96], list[97], list[98], list[99]), + ('Central bank liquidity swaps',date, list[100], list[101], list[102], list[103]), + ('Other Federal Reserve assets',date, list[104], list[105], list[106], list[107]), + ('Foreign currency denominated assets',date, list[108], list[109], list[110], list[111]), + ('Gold stock',date, list[112], list[113], list[114], list[115]), + ('Special drawing rights certificate account',date, list[116], list[117], list[118], list[119]), + ('Treasury currency outstanding',date, list[120], list[121], list[122], list[123]), + ('Total factors supplying reserve funds',date, list[124], list[125], list[126], list[127])] + cursor.executemany(sql,data_list) + + + + + sql2 = "insert into FARBODIC(name,time,THIS_AVG_VALUE,CHANGE_LASTWEEK,CHANGE_LASTYEAR,THIS_VALUE)values(%s,%s,%s,%s,%s,%s)" + data_list2=[('Currency in circulation',date, list[128], list[129], list[130], list[131]), + ('Reverse repurchase agreements',date, list[132], list[133], list[134], list[135]), + ('Foreign official and international accounts',date, list[136], list[137], list[138], list[139]), + ('Others',date, list[140], list[141], list[142], list[143]), + ('Treasury cash holdings',date, list[144], list[145], list[146], list[147]), + ('DwFRBotrb',date, list[148], list[149], list[150], list[151]), + ('Tdhbdi',date, list[152], list[153], list[154], list[155]), + ('U.S. Treasury, General Account',date, list[156], list[157], list[158], list[159]), + ('Foreign official',date, list[160], list[161], list[162], list[163]), + ('Other',date, list[164], list[165], list[166], list[167]), + ('Treasury contributions to credit facilities',date, list[168], list[169], list[170], list[171]), + ('Other liabilities and capital',date, list[172], list[173], list[174], list[175]), + ('Tfotrbarf',date, list[176], list[177], list[178], list[179]), + ('RbwFRB',date, list[180], list[181], list[182], list[183])] + cursor.executemany(sql2,data_list2) + + + + sql3 = "insert into MI(name,time,THIS_AVG_VALUE,CHANGE_LASTWEEK,CHANGE_LASTYEAR,THIS_VALUE)values(%s,%s,%s,%s,%s,%s)" + data_list3=[('Shicffoaia',date, list[184], list[185], list[186], list[187]), + ('Marketable U.S. Treasury securities',date, list[188], list[189], list[190], list[191]), + ('Fadambs',date, list[192], list[193], list[194], list[195]), + ('Other securities',date, list[196], list[197], list[198], list[199]), + ('Securities lent to dealers',date, list[200], list[201], list[202], list[203]), + ('Overnight facility',date, list[204], list[205], list[206], list[207]), + ('U.S. Treasury securities',date, list[208], list[209], list[210], list[211]), + ('Federal agency debt securities',date,list[212], list[213], list[214], list[215])] + cursor.executemany(sql3,data_list3) + + + + sql4 = "insert into MDOSLASOAAL(name,time,D15,D16_D90,D91_Y1,Y1_Y5,Y5_Y10,Y10_,TOTAL)values(%s,%s,%s,%s,%s,%s,%s,%s,%s)" + data_list4=[('Loans', date, list[216], list[217], list[218], list[219], list[220], list[221], list[222]), + ('USTsH', date, list[223], list[224], list[225], list[226], list[227], list[228], list[229]), + ('USTsWc', date, list[230], list[231], list[232], list[233], list[234], list[235], list[236]), + ('FadsH', date, list[237], list[238], list[239], list[240], list[241], list[242], list[243]), + ('FadsWc', date, list[244], list[245], list[246], list[247], list[248], list[249], list[250]), + ('MbsH', date, list[251], list[252], list[253], list[254], list[255], list[256], list[257]), + ('MbsWc', date, list[258], list[259], list[260], list[261], list[262], list[263], list[264]), + ('LphbMFLLC(MSLP)', date, list[265], list[266], list[267], list[268], list[269], list[270], list[271]), + ('Repurchase agreements', date, list[272], list[273], list[274], list[275], list[276], list[277], list[278]), + ('Central bank liquidity swaps', date, list[279], list[280], list[281], list[282], list[283], list[284], list[285]), + ('Reverse repurchase agreements', date, list[286], list[287], list[288], list[289], list[290], list[291], list[292]), + ('Term deposits', date, list[293], list[294], list[295], list[296], list[297], list[298], list[299])] + cursor.executemany(sql4,data_list4) + + + sql5 = "insert into SIOMS(name,time,value)values(%s,%s,%s)" + data_list5 = [('Mortgage-backed securities held outright', date, list[300]), + ('Residential mortgage-backed securities', date, list[301]), + ('Commercial mortgage-backed securities', date, list[302]), + ('Commitments to buy mortgage-backed securities', date, list[303]), + ('Commitments to sell mortgage-backed securities', date, list[304]), + ('Cash and cash equivalents', date, list[305])] + cursor.executemany(sql5, data_list5) + + + + sql6 = "insert into IOPAOCFL(name,time,OPAPTLLC,UPFAPT,FAAOA,TOTAL)values(%s,%s,%s,%s,%s,%s)" + data_list6 =[('MS Facilities LLC (Main Street Lending Program)',date, list[306], list[307], list[308], list[309])] + cursor.executemany(sql6, data_list6) + + + sql7 = "insert into CSOCOAFRB(name,time,EFC,THIS_VALUE,CHANGE_LASTWEEK,CHANGE_LASTYEAR)values(%s,%s,%s,%s,%s,%s)" + data_list7 = [('Gold certificate account', date, 'NULL', list[310], list[311], list[312]), + ('Special drawing rights certificate account', date, 'NULL', list[313], list[314], list[315]), + ('Coin', date, 'NULL', list[316], list[317], list[318]), + ('Supadraal', date, 'NULL', list[319], list[320], list[321]), + ('Securities held outright', date, 'NULL', list[322], list[323], list[324]), + ('U.S. Treasury securities', date, 'NULL', list[325], list[326], list[327]), + ('Bills', date, 'NULL', list[328], list[329], list[330]), + ('Notes and bonds, nominal', date, 'NULL', list[331], list[332], list[333]), + ('Notes and bonds, inflation-indexed', date, 'NULL', list[334], list[335], list[336]), + ('Inflation compensation', date, 'NULL', list[337], list[338], list[339]), + ('Federal agency debt securities', date, 'NULL', list[340], list[341], list[342]), + ('Mortgage-backed securities', date, 'NULL', list[343], list[344], list[345]), + ('Uposho', date, 'NULL', list[346], list[347], list[348]), + ('Udosho', date, 'NULL', list[349], list[350], list[351]), + ('Repurchase agreements', date, 'NULL', list[352], list[353], list[354]), + ('Loans', date, 'NULL', list[355], list[356], list[357]), + ('NphoMFLLC(MSLP)', date, 'NULL', list[358], list[359], list[360]), + ('NphoMLFLLC', date, 'NULL', list[361], list[362], list[363]), + ('Net portfolio holdings of TALF II LLC', date, 'NULL', list[364], list[365], list[366]), + ('Items in process of collection', date, 'NULL', list[367], list[368], list[369]), + ('Bank premises', date, 'NULL', list[370], list[371], list[372]), + ('Central bank liquidity swaps', date, 'NULL', list[373], list[374], list[375]), + ('Foreign currency denominated assets', date, 'NULL', list[376], list[377], list[378]), + ('Other assets', date, 'NULL', list[379], list[380], list[381]), + ('Total assets', date, 'NULL', list[382], list[383], list[384])] + cursor.executemany(sql7, data_list7) + + + + sql8 = "insert into CSOCOAFRBC(name,time,EFC,THIS_VALUE,CHANGE_LASTWEEK,CHANGE_LASTYEAR)values(%s,%s,%s,%s,%s,%s)" + data_list8 = [('FRnnoFBh', date, 'NULL', list[385], list[386], list[387]), + ('Reverse repurchase agreements', date, 'NULL', list[388], list[389], list[390]), + ('Deposits', date, 'NULL', list[391], list[392], list[393]), + ('Term deposits held by depository institutions', date, 'NULL', list[394], list[395], list[396]), + ('Other deposits held by depository institutions', date, 'NULL', list[397], list[398], list[399]), + ('U.S. Treasury, General Account', date, 'NULL', list[400], list[401], list[402]), + ('Foreign official', date, 'NULL', list[403], list[404], list[405]), + ('Other', date, 'NULL', list[406], list[407], list[408]), + ('Deferred availability cash items', date, 'NULL', list[409], list[410], list[411]), + ('Treasury contributions to credit facilities', date, 'NULL', list[412], list[413], list[414]), + ('Other liabilities and accrued dividends', date, 'NULL', list[415], list[416], list[417]), + ('Total liabilities', date, 'NULL', list[418], list[419], list[420]), + ('Capital paid in', date, 'NULL', list[421], list[422], list[423]), + ('Surplus', date, 'NULL', list[424], list[425], list[426]), + ('Other capital accounts', date, 'NULL', list[427], list[428], list[429]), + ('Total capital', date, 'NULL', list[430], list[431], list[432])] + cursor.executemany(sql8, data_list8) + + + + + + sql9 = "insert into SOCOEFRB(name,time,TOTAL,Boston,NewYork,Philadelphia,Cleveland,Richmond,Atlanta,Chicago,St_Louis,Minneapolis,Kansas_City,Dallas,San_Francisco)values(%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s)" + data_list9=[('Gcasdrc', date, list[433], list[434], list[435], list[436], list[437], list[438], list[439], list[440], list[441], list[442], list[443], list[444], list[445]), + ('Coin', date, list[446], list[447], list[448], list[449], list[450], list[451], list[452], list[453], list[454], list[455], list[456], list[457], list[458]), + ('Supadraal', date, list[459], list[460], list[461], list[462], list[463], list[464], list[465], list[466], list[467], list[468], list[469], list[470], list[471]), + ('NphoMFLLC(MSLP)', date, list[472], list[473], list[474], list[475], list[476], list[477], list[478], list[479], list[480], list[481], list[482], list[483], list[484]), + ('Central bank liquidity swaps', date, list[485], list[486], list[487], list[488], list[489], list[490], list[491], list[492], list[493], list[494], list[495], list[496], list[497]), + ('Foreign currency denominated assets', date, list[498], list[499], list[500], list[501], list[502], list[503], list[504], list[505], list[506], list[507], list[508], list[509], list[510]), + ('Other assets', date, list[511], list[512], list[513], list[514], list[515], list[516], list[517], list[518], list[519], list[520], list[521], list[522], list[523]), + ('Interdistrict settlement account', date, list[524], list[525], list[526], list[527], list[528], list[529], list[530], list[531], list[532], list[533], list[534], list[535], list[536]), + ('Total assets', date, list[537], list[538], list[539], list[540], list[541], list[542], list[543], list[544], list[545], list[546], list[547], list[548], list[549])] + cursor.executemany(sql9, data_list9) + + + + + sql10 = "insert into SOCOEFRBC(name,time,TOTAL,Boston,NewYork,Philadelphia,Cleveland,Richmond,Atlanta,Chicago,St_Louis,Minneapolis,Kansas_City,Dallas,San_Francisco)values(%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s)" + data_list10 = [('Federal Reserve notes, net', date, list[550], list[551], list[552], list[553], list[554], list[555], list[556], list[557], list[558], list[559], list[560], list[561], list[562]), + ('Reverse repurchase agreements', date, list[563], list[564], list[565], list[566], list[567], list[568], list[569], list[570], list[571], list[572], list[573], list[574], list[575]), + ('Deposits', date, list[576], list[577], list[578], list[579], list[580], list[581], list[582], list[583], list[584], list[585], list[586], list[587], list[588]), + ('Depository institutions', date, list[589], list[590], list[591], list[592], list[593], list[594], list[595], list[596], list[597], list[598], list[599], list[600], list[601]), + ('U.S. Treasury, General Account', date, list[602], list[603], list[604], list[605], list[606], list[607], list[608], list[609], list[610], list[611], list[612], list[613], list[614]), + ('Foreign official', date, list[615], list[616], list[617], list[618], list[619], list[620], list[621], list[622], list[623], list[624], list[625], list[626], list[627]), + ('Other', date, list[628], list[629], list[630], list[631], list[632], list[633], list[634], list[635], list[636], list[637], list[638], list[639], list[640]), + ('Earnings remittances due to the U.S. Treasury', date, list[641], list[642], list[643], list[644], list[645], list[646], list[647], list[648], list[649], list[650], list[651], list[652], list[653]), + ('Treasury contributions to credit facilities', date, list[654], list[655], list[656], list[657], list[658], list[659], list[660], list[661], list[662], list[663], list[664], list[665], list[666]), + ('Other liabilities and accrued dividends', date, list[667], list[668], list[669], list[670], list[671], list[672], list[673], list[674], list[675], list[676], list[677], list[678], list[679]), + ('Total liabilities', date, list[680], list[681], list[682], list[683], list[684], list[685], list[686], list[687], list[688], list[689], list[690], list[691], list[692]), + ('Capital paid in', date, list[693], list[694], list[695], list[696], list[697], list[698], list[699], list[700], list[701], list[702], list[703], list[704], list[705]), + ('Surplus', date, list[706], list[707], list[708], list[709], list[710], list[711], list[712], list[713], list[714], list[715], list[716], list[717], list[718]), + ('Other capital', date, list[719], list[720], list[721], list[722], list[723], list[724], list[725], list[726], list[727], list[728], list[729], list[730], list[731]), + ('Total liabilities and capital', date, list[732], list[733], list[734], list[735], list[736], list[737], list[738], list[739], list[740], list[741], list[742], list[743], list[744])] + cursor.executemany(sql10, data_list10) + + + + + + sql11 = "insert into CHAFRNFRAA(name,time,value)values(%s,%s,%s)" + data_list11 = [('Federal Reserve notes outstanding', date, list[745]), + ('LNhbFBnstc', date, list[746]), + ('Federal Reserve notes to be collateralized', date, list[747]), + ('Collateral held against Federal Reserve notes', date, list[748]), + ('Gold certificate account', date, list[749]), + ('Special drawing rights certificate account', date, list[750]), + ('UTadambsp', date, list[751]), + ('Other assets pledged', date, list[752]), + ('TUTadambs', date, list[753]), + ('LFvosurra', date, list[754]), + ('UTadambsetbp', date, list[755])] + cursor.executemany(sql11, data_list11) + db.commit() + else: + time.sleep(21600) + # time.sleep(timer_start_time) + diff --git a/lyq/Macroeconomic_FBI_v2.py b/lyq/Macroeconomic_FBI_v2.py new file mode 100644 index 0000000..8481e77 --- /dev/null +++ b/lyq/Macroeconomic_FBI_v2.py @@ -0,0 +1,98 @@ +import requests +import pymysql +from datetime import datetime +from w3lib.html import remove_tags +import pandas as pd +import time + +def parse_treasury_data(data): + # 找到列头位置 + header_index = data.index("Country") + columns = data[header_index:header_index+14] # Country + 13个月 + rows = data[header_index+14:] + + result = [] + i = 0 + while i < len(rows): + # 拼接国家名 + country_parts = [] + while i < len(rows) and not rows[i].replace('.', '', 1).isdigit(): + country_parts.append(rows[i]) + i += 1 + country = " ".join(country_parts).replace(",", "") + + # 取13个数值 + values = rows[i:i+13] + i += 13 + + if len(values) == 13: + result.append([country] + values) + + # 转成 DataFrame + df = pd.DataFrame(result, columns=columns) + + # =================== 名称清洗 =================== + rename_map = { + "Of Which: Foreign Official": "Foreign Official", + "Of Which: Foreign Official Treasury Bills": "Treasury Bills", + "Of Which: Foreign Official T-Bonds & Notes": "T-Bonds & Notes" + } + df["Country"] = df["Country"].replace(rename_map) + + return df + + +def run_job(): + print("=== 开始爬取并更新数据库 ===") + + # =================== 爬取网页 ===================== + page = requests.get("https://ticdata.treasury.gov/resource-center/data-chart-center/tic/Documents/slt_table5.html") + page = remove_tags(str(page.text)) + page = page.split() + + df = parse_treasury_data(page) + + # =================== 连接数据库 ===================== + db = pymysql.connect( + host="127.0.0.1", + user="root", + password="2GS@bPYcgiMyL14A", + database="Macroeconomics", + port=4423 + ) + cursor = db.cursor() + + # 查询数据库中最新日期 + cursor.execute("SELECT date FROM FBI ORDER BY date DESC LIMIT 1") + result = cursor.fetchone() + latest_date_in_db = result[0] if result else None # datetime 类型或 None + + # =================== 补齐逻辑 ===================== + for col in df.columns[1:]: # 遍历所有月份列 + col_date = datetime.strptime(col, "%Y-%m") + + # 如果数据库已有该日期,跳过 + if latest_date_in_db and col_date <= latest_date_in_db: + continue + + print(f"正在插入 {col} 的数据...") + insert_sql = "INSERT INTO FBI (date, name, value) VALUES (%s, %s, %s)" + for _, row in df.iterrows(): + country = row["Country"] + value = row[col] + cursor.execute(insert_sql, (col_date.strftime("%Y-%m-01"), country, value)) + + db.commit() + print(f"{col} 插入完成") + + cursor.close() + db.close() + print("=== 本次任务完成 ===\n") + + +# =================== 循环执行 ===================== +if __name__ == "__main__": + while True: + run_job() + print("休眠 21600 秒(6 小时)...\n") + time.sleep(21600) # 6小时 \ No newline at end of file diff --git a/lyq/Macroeconomic_FER.py b/lyq/Macroeconomic_FER.py new file mode 100644 index 0000000..046d628 --- /dev/null +++ b/lyq/Macroeconomic_FER.py @@ -0,0 +1,89 @@ +import time +import requests +import pymysql +from bs4 import BeautifulSoup +from w3lib.html import remove_tags +import datetime +while True: + try: + # now_time = datetime.datetime.now() + # next_time = now_time + datetime.timedelta(days=+1) + # next_year = next_time.date().year + # next_month = next_time.date().month + # next_day = next_time.date().day + # next_time = datetime.datetime.strptime(str(next_year) + "-" + str(next_month) + "-" + str(next_day) + " 20:30:01","%Y-%m-%d %H:%M:%S") + # timer_start_time = (next_time - now_time).total_seconds() + page = requests.get("https://www.federalreserve.gov/data/intlsumm/current.htm") + page = page.text + page = BeautifulSoup(page, 'html.parser') + page1 = page.find_all('th', class_="colorrev") + page = page.find_all('td', class_="shadedata1") + value1=remove_tags(str(page[-1])) + value1 = value1.replace(",", "") + value1 = value1.replace(" ", "") + date1=remove_tags(str(page1[-1])) + date1 = date1.replace(" ", "") + date1 = date1.replace("/r", "") + date1 = date1.replace("/p", "") + date1= date1[-4:] + date1[0:3] + date1 = date1.replace("Jan", "/1/01") + date1 = date1.replace("Feb", "/2/01") + date1 = date1.replace("Mar", "/3/01") + date1 = date1.replace("Apr", "/4/01") + date1 = date1.replace("May", "/5/01") + date1 = date1.replace("Jun", "/6/01") + date1 = date1.replace("Jul", "/7/01") + date1 = date1.replace("Aug", "/8/01") + date1 = date1.replace("Sep", "/9/01") + date1 = date1.replace("Oct", "/10/01") + date1 = date1.replace("Nov", "/11/01") + date1 = date1.replace("Dec", "/12/01") + format1 = '%Y/%m/%d' + + value2 = remove_tags(str(page[-2])) + value2 = value2.replace(",", "") + value2 = value2.replace(" ", "") + + date2 = remove_tags(str(page1[-2])) + date2 = date2.replace(" ", "") + date2 = date2.replace("/r", "") + date2 = date2.replace("/p", "") + date2 = date2[-4:] + date2[0:3] + date2 = date2.replace("Jan", "/1/01") + date2 = date2.replace("Feb", "/2/01") + date2 = date2.replace("Mar", "/3/01") + date2 = date2.replace("Apr", "/4/01") + date2 = date2.replace("May", "/5/01") + date2 = date2.replace("Jun", "/6/01") + date2 = date2.replace("Jul", "/7/01") + date2 = date2.replace("Aug", "/8/01") + date2 = date2.replace("Sep", "/9/01") + date2 = date2.replace("Oct", "/10/01") + date2 = date2.replace("Nov", "/11/01") + date2 = date2.replace("Dec", "/12/01") + format2 = '%Y/%m/%d' + from datetime import datetime + date1 = datetime.strptime(date1, format1) + date2 = datetime.strptime(date2, format2) + db = pymysql.connect(host="127.0.0.1",user="root",password="2GS@bPYcgiMyL14A",database="Macroeconomics",port=4423) + cursor = db.cursor() + sql = "select date from FER order by date desc limit 1" + cursor.execute(sql) + db.commit() + ole_time = cursor.fetchall() + ole_time = ole_time[0][0] + date2= "'" + str(date2) + "'" + sql = "update FER set FER= %s where date=%s" % (value2, date2) + cursor.execute(sql) + db.commit() + if date1 != ole_time: + sql = "insert into FER(date,FER)values('%s','%s')" % (date1, value1 + '*') + cursor.execute(sql) + db.commit() + db.close() + # time.sleep(timer_start_time) + time.sleep(21600) + except: + time.sleep(30) + continue + diff --git a/lyq/Macroeconomic_InterestRate.py b/lyq/Macroeconomic_InterestRate.py new file mode 100644 index 0000000..8ebbcd3 --- /dev/null +++ b/lyq/Macroeconomic_InterestRate.py @@ -0,0 +1,252 @@ +import time +from full_fred.fred import Fred +import pymysql +import requests +from datetime import datetime +from bs4 import BeautifulSoup +from w3lib.html import remove_tags +while True: + fred=Fred('example_key.txt') + fred.set_api_key_file('example_key.txt') + DFEDTARU = fred.get_series_df('DFEDTARU') + DFEDTARL = fred.get_series_df('DFEDTARL') + FEDFUNDS = fred.get_series_df('FEDFUNDS') + IORB = fred.get_series_df('IORB') + RRPONTSYAWARD = fred.get_series_df('RRPONTSYAWARD') + SOFR = fred.get_series_df('SOFR') + + list_date1 = DFEDTARU['date'] + list_value1 = DFEDTARU['value'] + list_date2 = DFEDTARL['date'] + list_value2 = DFEDTARL['value'] + list_date3 = FEDFUNDS['date'] + list_value3 = FEDFUNDS['value'] + list_date4 = IORB['date'] + list_value4 = IORB['value'] + list_date5 = RRPONTSYAWARD['date'] + list_value5 = RRPONTSYAWARD['value'] + list_date6 = SOFR['date'] + list_value6 = SOFR['value'] + + date1 = [] + value1 = [] + date2 = [] + value2 = [] + date3 = [] + value3 = [] + date4 = [] + value4 = [] + date5 = [] + value5 = [] + date6 = [] + value6 = [] + for i in list_date1: + date1 += [i] + for i in list_value1: + value1 += [i] + for i in list_date2: + date2 += [i] + for i in list_value2: + value2 += [i] + for i in list_date3: + date3 += [i] + for i in list_value3: + value3 += [i] + for i in list_date4: + date4 += [i] + for i in list_value4: + value4 += [i] + for i in list_date5: + date5 += [i] + for i in list_value5: + value5 += [i] + for i in list_date6: + date6 += [i] + for i in list_value6: + value6 += [i] + date1 = date1[-1] + value1 = value1[-1] + date2 = date2[-1] + value2 = value2[-1] + date3 = date3[-1] + value3 = value3[-1] + date4 = date4[-1] + value4 = value4[-1] + date5 = date5[-1] + value5 = value5[-1] + date6 = date6[-1] + value6 = value6[-1] + + date1 = date1.replace('-', '/') + date_string = date1 + format = '%Y/%m/%d' + date1 = datetime.strptime(date_string, format) + + date2 = date2.replace('-', '/') + date_string = date2 + format = '%Y/%m/%d' + date2 = datetime.strptime(date_string, format) + + date3 = date3.replace('-', '/') + date_string = date3 + format = '%Y/%m/%d' + date3 = datetime.strptime(date_string, format) + + date4 = date4.replace('-', '/') + date_string = date4 + format = '%Y/%m/%d' + date4 = datetime.strptime(date_string, format) + + date5 = date5.replace('-', '/') + date_string = date5 + format = '%Y/%m/%d' + date5 = datetime.strptime(date_string, format) + + date6 = date6.replace('-', '/') + date_string = date6 + format = '%Y/%m/%d' + date6 = datetime.strptime(date_string, format) + db = pymysql.connect(host="127.0.0.1",user="root",password="2GS@bPYcgiMyL14A",database="Macroeconomics",port=4423) + cursor = db.cursor() + sql = "select date from InterestRate where name='DFEDTARU'" + cursor.execute(sql) + db.commit() + DFEDTARU_old_time = cursor.fetchall() + DFEDTARU_old_time=DFEDTARU_old_time[-1][0] + if DFEDTARU_old_time != date1 : + sql = "insert into InterestRate(date,name,_value)values('%s','%s','%s')" % (date1, 'DFEDTARU', value1) + cursor.execute(sql) + db.commit() + sql2 = "select date from InterestRate where name='DFEDTARL'" + cursor.execute(sql2) + db.commit() + DFEDTARL_old_time = cursor.fetchall() + DFEDTARL_old_time=DFEDTARL_old_time[-1][0] + if DFEDTARL_old_time != date2 : + sql = "insert into InterestRate(date,name,_value)values('%s','%s','%s')" % (date2, 'DFEDTARL', value2) + cursor.execute(sql) + db.commit() + sql3 = "select date from InterestRate where name='FEDFUNDS'" + cursor.execute(sql3) + db.commit() + FEDFUNDS_old_time = cursor.fetchall() + FEDFUNDS_old_time=FEDFUNDS_old_time[-1][0] + if FEDFUNDS_old_time != date3 : + sql = "insert into InterestRate(date,name,_value)values('%s','%s','%s')" % (date3, 'FEDFUNDS', value3) + cursor.execute(sql) + db.commit() + sql4 = "select date from InterestRate where name='IORB'" + cursor.execute(sql4) + db.commit() + IORB_old_time = cursor.fetchall() + IORB_old_time=IORB_old_time[-1][0] + if IORB_old_time != date4 : + sql = "insert into InterestRate(date,name,_value)values('%s','%s','%s')" % (date4, 'IORB', value4) + cursor.execute(sql) + db.commit() + sql5 = "select date from InterestRate where name='RRPONTSYAWARD'" + cursor.execute(sql5) + db.commit() + RRPONTSYAWARD_old_time = cursor.fetchall() + RRPONTSYAWARD_old_time=RRPONTSYAWARD_old_time[-1][0] + if RRPONTSYAWARD_old_time != date5 : + sql = "insert into InterestRate(date,name,_value)values('%s','%s','%s')" % (date5, 'RRPONTSYAWARD', value5) + cursor.execute(sql) + db.commit() + sql6 = "select date from InterestRate where name='SOFR'" + cursor.execute(sql6) + db.commit() + SOFR_old_time = cursor.fetchall() + SOFR_old_time=SOFR_old_time[-1][0] + if SOFR_old_time != date6 : + sql = "insert into InterestRate(date,name,_value)values('%s','%s','%s')" % (date6, 'SOFR', value6) + cursor.execute(sql) + db.commit() + + pagee = requests.get("https://www.frbsf.org/wp-content/uploads/sites/4/proxy-funds-rate-chart1-data.csv") + pagee = pagee.text + pagee = pagee.split() + number = 0 + for i in pagee: + number += 1 + if number <= 5: + continue + else: + pagee = i.split()[-1] + pagee = pagee.replace(',', ' , ') + PFR_new_time = pagee[0:10] + PFR_new_time = PFR_new_time.replace('-', '/') + PFR_value = pagee[-8:] + PFR_value = PFR_value.replace(' ', '') + date_string = PFR_new_time + format = '%Y/%m/%d' + PFR_new_time = datetime.strptime(date_string, format) + sql = "select * from InterestRate where name='PFR' and date='%s'" % (PFR_new_time) + cursor.execute(sql) + outcome = cursor.fetchall() + if outcome == () or outcome == 0 or outcome == None: + sql = "insert into InterestRate(date,name,_value)values('%s','%s','%s')" % (PFR_new_time, 'PFR', PFR_value) + cursor.execute(sql) + db.commit() + else: + sql = "update InterestRate set _value='%s' where 'name'='PFR' and 'date' = '%s'" % (PFR_value, PFR_new_time) + cursor.execute(sql) + db.commit() + number=0 + pagee = requests.get("https://markets.newyorkfed.org/api/rp/repo/multiple/results/last/1.json") + pagee = pagee.json() + page=pagee['repo']['operations'][0] + page2=page['details'][0] + if page2.__contains__('minimumBidRate'): + RR_value=page2['minimumBidRate'] + RR_new_time = page['operationDate'] + RR_new_time = RR_new_time.replace('-', '/') + date_string = RR_new_time + format = '%Y/%m/%d' + RR_new_time = datetime.strptime(date_string, format) + sql = "select date from InterestRate where name='RR'" + cursor.execute(sql) + db.commit() + RR_old_time = cursor.fetchall() + RR_old_time = RR_old_time[-1][0] + if RR_old_time != RR_new_time: + sql = "insert into InterestRate(date,name,_value)values('%s','%s','%s')" % (RR_new_time, 'RR', RR_value) + cursor.execute(sql) + db.commit() + + page = requests.get("https://www.global-rates.com/en/interest-rates/libor/american-dollar/american-dollar.aspx") + page = page.text + page = BeautifulSoup(page, 'html.parser') + data = page.find_all('div', class_="table-normal text-end") + LIBOR_new_time = data[0] + LIBOR1M_value = data[5] + LIBOR3M_value = data[10] + LIBOR6M_value = data[15] + LIBOR_new_time = remove_tags(str(LIBOR_new_time)) + LIBOR1M_value = remove_tags(str(LIBOR1M_value)) + LIBOR3M_value = remove_tags(str(LIBOR3M_value)) + LIBOR6M_value = remove_tags(str(LIBOR6M_value)) + LIBOR_new_time = LIBOR_new_time[6:10]+'-'+LIBOR_new_time[0:5] + LIBOR_new_time = LIBOR_new_time.replace("-", "/") + LIBOR1M_value = LIBOR1M_value.replace(' ', '') + LIBOR3M_value = LIBOR3M_value.replace(' ', '') + LIBOR6M_value = LIBOR6M_value.replace(' ', '') + format = '%Y/%m/%d' + LIBOR_new_time = datetime.strptime(LIBOR_new_time, format) + sql = "select date from InterestRate where name='LIBOR1M'" + cursor.execute(sql) + db.commit() + LIBOR_old_time = cursor.fetchall() + LIBOR_old_time = LIBOR_old_time[-1][0] + if LIBOR_new_time != LIBOR_old_time: + sql = "insert into InterestRate(date,name,_value)values('%s','%s','%s')" % (LIBOR_new_time, 'LIBOR1M', LIBOR1M_value) + sql1 = "insert into InterestRate(date,name,_value)values('%s','%s','%s')" % (LIBOR_new_time, 'LIBOR3M', LIBOR3M_value) + sql2 = "insert into InterestRate(date,name,_value)values('%s','%s','%s')" % (LIBOR_new_time, 'LIBOR6M', LIBOR6M_value) + cursor.execute(sql) + cursor.execute(sql1) + cursor.execute(sql2) + db.commit() + db.close() + time.sleep(7200) + + diff --git a/lyq/Macroeconomic_Loan.py b/lyq/Macroeconomic_Loan.py new file mode 100644 index 0000000..c6b0018 --- /dev/null +++ b/lyq/Macroeconomic_Loan.py @@ -0,0 +1,102 @@ +import time +from full_fred.fred import Fred +import pymysql +from datetime import datetime +fred=Fred('example_key.txt') +fred.set_api_key_file('example_key.txt') +while True: + BUSLOANS=fred.get_series_df('BUSLOANS') + HBPIGDQ188S=fred.get_series_df('HBPIGDQ188S') + date1_all = BUSLOANS['date'] + value1_all = BUSLOANS['value'] + date2_all = HBPIGDQ188S['date'] + value2_all = HBPIGDQ188S['value'] + + list_date1 = [] + list_value1 = [] + list_date2 = [] + list_value2 = [] + + for i in date1_all: + list_date1 += [i] + for i in value1_all: + list_value1 += [i] + for i in date2_all: + list_date2 += [i] + for i in value2_all: + list_value2 += [i] + + date1 = list_date1[-2] + value1 = list_value1[-2] + date2 = list_date1[-3] + value2 = list_value1[-3] + date3 = list_date1[-4] + value3 = list_value1[-4] + date4 = list_date1[-5] + value4 = list_value1[-5] + + list_date1 = list_date1[-1] + list_value1 = list_value1[-1] + list_date2 = list_date2[-1] + list_value2 = list_value2[-1] + + list_date1 = list_date1.replace('-', '/') + date_string = list_date1 + format = '%Y/%m/%d' + list_date1 = datetime.strptime(date_string, format) + list_date2 = list_date2.replace('-', '/') + date_string2 = list_date2 + format = '%Y/%m/%d' + list_date2 = datetime.strptime(date_string2, format) + db = pymysql.connect(host="127.0.0.1",user="root",password="2GS@bPYcgiMyL14A",database="Macroeconomics",port=4423) + cursor = db.cursor() + sql = "select date from Loan order by date desc limit 1" + cursor.execute(sql) + db.commit() + old_time = cursor.fetchall() + old_time = old_time[0][0] + date1 = date1.replace('-', '/') + date_string = date1 + format = '%Y/%m/%d' + date1 = datetime.strptime(date_string, format) + + date2 = date2.replace('-', '/') + date_string = date2 + format = '%Y/%m/%d' + date2 = datetime.strptime(date_string, format) + + date3 = date3.replace('-', '/') + date_string = date3 + format = '%Y/%m/%d' + date3 = datetime.strptime(date_string, format) + + date4 = date4.replace('-', '/') + date_string = date4 + format = '%Y/%m/%d' + date4 = datetime.strptime(date_string, format) + sql = "update Loan set PSI=%s where date='%s'" % (value1, date1) + cursor.execute(sql) + sql = "update Loan set PSI=%s where date='%s'" % (value2, date2) + cursor.execute(sql) + sql = "update Loan set PSI=%s where date='%s'" % (value3, date3) + cursor.execute(sql) + sql = "update Loan set PSI=%s where date='%s'" % (value4, date4) + cursor.execute(sql) + db.commit() + if list_date1 == old_time: + db = pymysql.connect(host="127.0.0.1",user="root",password="2GS@bPYcgiMyL14A",database="Macroeconomics",port=4423) + cursor = db.cursor() + sql = "update Loan set PSI= %s where date='%s'" % (list_value1, list_date1) + cursor.execute(sql) + sql1 = "update Loan set FDHBPI_GDP=%s where date='%s'" % (list_value2, list_date2) + cursor.execute(sql1) + db.commit() + time.sleep(21600) + else: + db = pymysql.connect(host="127.0.0.1",user="root",password="2GS@bPYcgiMyL14A",database="Macroeconomics",port=4423) + cursor = db.cursor() + sql = "insert into Loan(date,PSI)values('%s','%s')" % (list_date1, list_value1) + cursor.execute(sql) + sql1 = "update Loan set FDHBPI_GDP=%s where date='%s'" % (list_value2, list_date2) + cursor.execute(sql1) + db.commit() \ No newline at end of file diff --git a/lyq/Macroeconomic_MoneyStockMeasures.py b/lyq/Macroeconomic_MoneyStockMeasures.py new file mode 100644 index 0000000..97a541f --- /dev/null +++ b/lyq/Macroeconomic_MoneyStockMeasures.py @@ -0,0 +1,216 @@ +import pymysql +import time +import requests +from w3lib.html import remove_tags +from bs4 import BeautifulSoup +condition=True +while condition: + import datetime + # 获取时间 + now_time = datetime.datetime.now() + next_time = now_time + datetime.timedelta(days=+1) + next_year = next_time.date().year + next_month = next_time.date().month + next_day = next_time.date().day + next_time = datetime.datetime.strptime(str(next_year) + "-" + str(next_month) + "-" + str(next_day) + " 17:00:01","%Y-%m-%d %H:%M:%S") + timer_start_time = (next_time - now_time).total_seconds() + + page =requests.get("https://www.federalreserve.gov/releases/h6/current/default.htm") + page=page.text + soup = BeautifulSoup(page, 'html.parser') + page_tbody = soup.find_all('tbody') + # 获取三个表数据MoneyStockMeasures,SeasonallyAdjusted,NotSeasonallyAdjusted + MoneyStockMeasures = page_tbody[0] + SeasonallyAdjusted = page_tbody[1] + NotSeasonallyAdjusted = page_tbody[2] + MoneyStockMeasures = remove_tags(str(MoneyStockMeasures)) + SeasonallyAdjusted = remove_tags(str(SeasonallyAdjusted)) + NotSeasonallyAdjusted = remove_tags(str(NotSeasonallyAdjusted)) + # 修改时间字段 + MoneyStockMeasures=MoneyStockMeasures.replace('Jan.', '1') + MoneyStockMeasures = MoneyStockMeasures.replace('Feb.', '2') + MoneyStockMeasures = MoneyStockMeasures.replace('Mar.', '3') + MoneyStockMeasures = MoneyStockMeasures.replace('Apr.', '4') + MoneyStockMeasures = MoneyStockMeasures.replace('May', '5') + MoneyStockMeasures = MoneyStockMeasures.replace('June', '6') + MoneyStockMeasures = MoneyStockMeasures.replace('July', '7') + MoneyStockMeasures = MoneyStockMeasures.replace('Aug.', '8') + MoneyStockMeasures = MoneyStockMeasures.replace('Sept.', '9') + MoneyStockMeasures = MoneyStockMeasures.replace('Oct.', '10') + MoneyStockMeasures = MoneyStockMeasures.replace('Nov.', '11') + MoneyStockMeasures = MoneyStockMeasures.replace('Dec.', '12') + MoneyStockMeasures = MoneyStockMeasures.replace('e', '') + MoneyStockMeasures = MoneyStockMeasures.split() + + SeasonallyAdjusted = SeasonallyAdjusted.replace('Jan.', '1') + SeasonallyAdjusted = SeasonallyAdjusted.replace('Feb.', '2') + SeasonallyAdjusted = SeasonallyAdjusted.replace('Mar.', '3') + SeasonallyAdjusted = SeasonallyAdjusted.replace('Apr.', '4') + SeasonallyAdjusted = SeasonallyAdjusted.replace('May', '5') + SeasonallyAdjusted = SeasonallyAdjusted.replace('June', '6') + SeasonallyAdjusted = SeasonallyAdjusted.replace('July', '7') + SeasonallyAdjusted = SeasonallyAdjusted.replace('Aug.', '8') + SeasonallyAdjusted = SeasonallyAdjusted.replace('Sept.', '9') + SeasonallyAdjusted = SeasonallyAdjusted.replace('Oct.', '10') + SeasonallyAdjusted = SeasonallyAdjusted.replace('Nov.', '11') + SeasonallyAdjusted = SeasonallyAdjusted.replace('Dec.', '12') + SeasonallyAdjusted = SeasonallyAdjusted.replace('e', '') + SeasonallyAdjusted = SeasonallyAdjusted.split() + + NotSeasonallyAdjusted = NotSeasonallyAdjusted.replace('Jan.', '1') + NotSeasonallyAdjusted = NotSeasonallyAdjusted.replace('Feb.', '2') + NotSeasonallyAdjusted = NotSeasonallyAdjusted.replace('Mar.', '3') + NotSeasonallyAdjusted = NotSeasonallyAdjusted.replace('Apr.', '4') + NotSeasonallyAdjusted = NotSeasonallyAdjusted.replace('May', '5') + NotSeasonallyAdjusted = NotSeasonallyAdjusted.replace('June', '6') + NotSeasonallyAdjusted = NotSeasonallyAdjusted.replace('July', '7') + NotSeasonallyAdjusted = NotSeasonallyAdjusted.replace('Aug.', '8') + NotSeasonallyAdjusted = NotSeasonallyAdjusted.replace('Sept.', '9') + NotSeasonallyAdjusted = NotSeasonallyAdjusted.replace('Oct.', '10') + NotSeasonallyAdjusted = NotSeasonallyAdjusted.replace('Nov.', '11') + NotSeasonallyAdjusted = NotSeasonallyAdjusted.replace('Dec.', '12') + NotSeasonallyAdjusted = NotSeasonallyAdjusted.replace('e', '') + NotSeasonallyAdjusted = NotSeasonallyAdjusted.split() + condition1=0 + for i in range(17): + date1= MoneyStockMeasures[condition1 + 1] + '/' + MoneyStockMeasures[condition1 + 0] + '/1' + format = '%Y/%m/%d' + from datetime import datetime + # 获取数据时间和各项数据的值 + date1 = datetime.strptime(date1, format) + value11=MoneyStockMeasures[condition1 + 2] + value11 = value11.replace(",", "") + value12=MoneyStockMeasures[condition1 + 3] + value12 = value12.replace(",", "") + value13=MoneyStockMeasures[condition1 + 7] + value13 = value13.replace(",", "") + value14=MoneyStockMeasures[condition1 + 8] + value14 = value14.replace(",", "") + value15=MoneyStockMeasures[condition1 + 4] + value15 = value15.replace(",", "") + value16=MoneyStockMeasures[condition1 + 5] + value16 = value16.replace(",", "") + value17=MoneyStockMeasures[condition1 + 6] + value17 = value17.replace(",", "") + value18=MoneyStockMeasures[condition1 + 9] + value18 = value18.replace(",", "") + value19=MoneyStockMeasures[condition1 + 10] + value19 = value19.replace(",", "") + value20=MoneyStockMeasures[condition1 + 11] + value20 = value20.replace(",", "") + condition1+=12 + + db = pymysql.connect(host="127.0.0.1",user="root",password="2GS@bPYcgiMyL14A",database="Macroeconomics",port=4423) + cursor = db.cursor() + sql = "select date from MoneyStockMeasures order by date desc limit 1" + cursor.execute(sql) + old_date = cursor.fetchall() + date2=old_date[0][0] + if i != 16: + sql="UPDATE MoneyStockMeasures SET adjustedM1=%s,adjustedM2=%s,notAdjustedM1=%s,notAdjustedM2=%s,currencyincirculation=%s,reserveBalances=%s,monetaryBase=%s,totalReserves=%s,totalMborrowings_M=%s,nonborrowedReserves=%s WHERE date= '%s'"%(value11, value12, value13, value14, value15, value16, value17, value18, value19, value20, date1) + cursor.execute(sql) + db.commit() + db.close() + else: + if date1 == date2: + sql = "UPDATE MoneyStockMeasures SET adjustedM1=%s,adjustedM2=%s,notAdjustedM1=%s,notAdjustedM2=%s,currencyincirculation=%s,reserveBalances=%s,monetaryBase=%s,totalReserves=%s,totalMborrowings_M=%s,nonborrowedReserves=%s WHERE date= '%s'" % ( + value11, value12, value13, value14, value15, value16, value17, value18, value19, value20, date1) + cursor.execute(sql) + db.commit() + db.close() + else: + sql = "insert into MoneyStockMeasures(date,adjustedM1,adjustedM2,notAdjustedM1,notAdjustedM2,currencyincirculation,reserveBalances,monetaryBase,totalReserves,totalMborrowings_M,nonborrowedReserves)values('%s','%s','%s','%s','%s','%s','%s','%s','%s','%s','%s')" % ( + date1, value11, value12, value13, value14, value15, value16, value17, value18, value19, value20) + cursor.execute(sql) + db.commit() + db.close() + condition2=0 + for i in range(17): + date3= SeasonallyAdjusted[condition2 + 1] + '/' + SeasonallyAdjusted[condition2 + 0] + '/1' + format = '%Y/%m/%d' + from datetime import datetime + date3 = datetime.strptime(date3, format) + value21=SeasonallyAdjusted[condition2 + 2] + value21 = value21.replace(",", "") + value22=SeasonallyAdjusted[condition2 + 3] + value22 = value22.replace(",", "") + value23=SeasonallyAdjusted[condition2 + 4] + value23 = value23.replace(",", "") + value24=SeasonallyAdjusted[condition2 + 5] + value24 = value24.replace(",", "") + value25=SeasonallyAdjusted[condition2 + 6] + value25 = value25.replace(",", "") + condition2+=7 + + db = pymysql.connect(host="127.0.0.1",user="root",password="2GS@bPYcgiMyL14A",database="Macroeconomics",port=4423) + cursor = db.cursor() + sql = "select date from SeasonallyAdjusted order by date desc limit 1" + cursor.execute(sql) + old_date = cursor.fetchall() + date4=old_date[0][0] + if i != 16: + sql="UPDATE SeasonallyAdjusted SET currencyM1=%s,demandM1=%s,otherLiquid=%s,smallDenominationTimeNonM1M2=%s,retailMoneyMarketFundsNonM1M2=%s WHERE date= '%s'"%(value21, value22, value23, value24, value25, date3) + cursor.execute(sql) + db.commit() + db.close() + else: + if date3 == date4: + sql = "UPDATE SeasonallyAdjusted SET currencyM1=%s,demandM1=%s,otherLiquid=%s,smallDenominationTimeNonM1M2=%s,retailMoneyMarketFundsNonM1M2=%s WHERE date= '%s'" % ( + value21, value22, value23, value24, value25, date3) + cursor.execute(sql) + db.commit() + db.close() + else: + sql = "insert into SeasonallyAdjusted(date,currencyM1,demandM1,otherLiquid,smallDenominationTimeNonM1M2,retailMoneyMarketFundsNonM1M2)values('%s','%s','%s','%s','%s','%s')" % ( + date3, value21, value22, value23, value24, value25) + cursor.execute(sql) + db.commit() + db.close() + condition3=0 + for i in range(17): + date5= NotSeasonallyAdjusted[condition3 + 1] + '/' + NotSeasonallyAdjusted[condition3 + 0] + '/1' + format = '%Y/%m/%d' + from datetime import datetime + date5 = datetime.strptime(date5, format) + value31=NotSeasonallyAdjusted[condition3 + 2] + value31 = value31.replace(",", "") + value32=NotSeasonallyAdjusted[condition3 + 3] + value32 = value32.replace(",", "") + value33=NotSeasonallyAdjusted[condition3 + 4] + value33 = value33.replace(",", "") + value34=NotSeasonallyAdjusted[condition3 + 5] + value34 = value34.replace(",", "") + value35=NotSeasonallyAdjusted[condition3 + 6] + value35 = value35.replace(",", "") + value36 = NotSeasonallyAdjusted[condition3 + 7] + value36 = value36.replace(",", "") + value37 = NotSeasonallyAdjusted[condition3 + 8] + value37 = value37.replace(",", "") + value38 = NotSeasonallyAdjusted[condition3 + 9] + value38 = value38.replace(",", "") + condition3+=10 + db = pymysql.connect(host="127.0.0.1",user="root",password="2GS@bPYcgiMyL14A",database="Macroeconomics",port=4423) + cursor = db.cursor() + sql = "select date from NotSeasonallyAdjusted order by date desc limit 1" + cursor.execute(sql) + old_date = cursor.fetchall() + date6=old_date[0][0] + if i != 16: + sql="UPDATE NotSeasonallyAdjusted SET currencyM1=%s,demandM1=%s,otherLiquid=%s,smallDenominationTimeNonM1M2=%s,retailMoneyMarketFundsNonM1M2=%s,atDepositoryInstitutions=%s,atMoneyMarketFunds=%s,total=%s WHERE date= '%s'"%(value31, value32, value33, value34, value35, value36, value37, value38, date5) + cursor.execute(sql) + db.commit() + db.close() + else: + if date5 == date6: + sql = "UPDATE NotSeasonallyAdjusted SET currencyM1=%s,demandM1=%s,otherLiquid=%s,smallDenominationTimeNonM1M2=%s,retailMoneyMarketFundsNonM1M2=%s,atDepositoryInstitutions=%s,atMoneyMarketFunds=%s,total=%s WHERE date= '%s'" % ( + value31, value32, value33, value34, value35, value36, value37, value38, date5) + cursor.execute(sql) + db.commit() + db.close() + else: + sql = "insert into NotSeasonallyAdjusted(date,currencyM1,demandM1,otherLiquid,smallDenominationTimeNonM1M2,retailMoneyMarketFundsNonM1M2,atDepositoryInstitutions,atMoneyMarketFunds,total)values('%s','%s','%s','%s','%s','%s','%s','%s','%s')" % ( + date5, value31, value32, value33, value34, value35, value36, value37, value38) + cursor.execute(sql) + db.commit() + db.close() + time.sleep(timer_start_time) \ No newline at end of file diff --git a/lyq/Macroeconomic_PCE_v3.py b/lyq/Macroeconomic_PCE_v3.py new file mode 100644 index 0000000..868b89c --- /dev/null +++ b/lyq/Macroeconomic_PCE_v3.py @@ -0,0 +1,117 @@ +import requests +import pymysql +from datetime import datetime +import time + +BEA_USER_ID = "146B5757-D9E3-442C-B6AC-ADE9E6B71114" +YEARS = ["2023","2024","2025"] # 第一次运行抓全部年份 +SLEEP_SECONDS = 21600 # 6小时 + +def get_bea_data(year): + """抓取指定年份的季度数据""" + url = ( + f'https://apps.bea.gov/api/data?UserID={BEA_USER_ID}' + f'&method=GetData&datasetname=NIPA&TableName=T10105&Frequency=Q' + f'&Year={year}&ResultFormat=JSON' + ) + response = requests.get(url) + return response.json()['BEAAPI']['Results']['Data'] + +def update_database(cursor, data): + """整理并插入缺失季度数据""" + # 查询数据库已存在的季度 + cursor.execute("SELECT times FROM PCE") + existing_pce = {row[0] for row in cursor.fetchall()} + cursor.execute("SELECT times FROM GPDI") + existing_gpdi = {row[0] for row in cursor.fetchall()} + cursor.execute("SELECT times FROM NETEXP") + existing_netexp = {row[0] for row in cursor.fetchall()} + + # 整理数据 + pce_data, gpdi_data, netexp_data = {}, {}, {} + for entry in data: + t = entry["TimePeriod"] + desc = entry["LineDescription"] + val = entry["DataValue"] + + if desc == "Personal consumption expenditures": + pce_data.setdefault(t, {})["PCE"] = val + elif desc == "Durable goods": + pce_data.setdefault(t, {})["PCEDG"] = val + elif desc == "Nondurable goods": + pce_data.setdefault(t, {})["PCEND"] = val + elif desc == "Services" and entry["LineNumber"] == '6': + pce_data.setdefault(t, {})["PCES"] = val + + elif desc == "Gross private domestic investment": + gpdi_data.setdefault(t, {})["GPDI"] = val + elif desc == "Fixed investment": + gpdi_data.setdefault(t, {})["FPI"] = val + elif desc == "Change in private inventories": + gpdi_data.setdefault(t, {})["CBI"] = val + + elif desc == "Net exports of goods and services": + netexp_data.setdefault(t, {})["NETEXP"] = val + elif desc == "Imports": + netexp_data.setdefault(t, {})["IMPGS"] = val + elif desc == "Exports": + netexp_data.setdefault(t, {})["EXPGS"] = val + + # 插入数据库缺失数据 + for t, vals in pce_data.items(): + if t not in existing_pce: + cursor.execute( + "INSERT INTO PCE (times, PCE, PCEDG, PCEND, PCES) VALUES (%s,%s,%s,%s,%s)", + (t, vals.get("PCE"), vals.get("PCEDG"), vals.get("PCEND"), vals.get("PCES")) + ) + for t, vals in gpdi_data.items(): + if t not in existing_gpdi: + cursor.execute( + "INSERT INTO GPDI (times, GPDI, FPI, CBI) VALUES (%s,%s,%s,%s)", + (t, vals.get("GPDI"), vals.get("FPI"), vals.get("CBI")) + ) + for t, vals in netexp_data.items(): + if t not in existing_netexp: + cursor.execute( + "INSERT INTO NETEXP (times, NETEXP, IMPGS, EXPGS) VALUES (%s,%s,%s,%s)", + (t, vals.get("NETEXP"), vals.get("IMPGS"), vals.get("EXPGS")) + ) + +def run_job(first_run=False): + """运行一次抓取和更新""" + print(f"[{datetime.now()}] 开始抓取 BEA 数据并更新数据库...") + try: + db = pymysql.connect( + host="127.0.0.1", + user="root", + password="2GS@bPYcgiMyL14A", + database="Macroeconomics", + port=4423 + ) + cursor = db.cursor() + + years_to_fetch = YEARS if first_run else [YEARS[-1]] # 第一次抓全部年份,否则只抓最新年份 + for year in years_to_fetch: + data = get_bea_data(year) + update_database(cursor, data) + db.commit() + print(f"[{datetime.now()}] {year} 数据更新完成") + + except pymysql.MySQLError as e: + print(f"[{datetime.now()}] 数据库错误: {e}") + except Exception as e: + print(f"[{datetime.now()}] 其他错误: {e}") + finally: + if 'cursor' in locals(): + cursor.close() + if 'db' in locals(): + db.close() + print(f"[{datetime.now()}] 本次任务完成。\n") + +if __name__ == "__main__": + first_run = True + while True: + run_job(first_run) + first_run = False # 之后循环只抓最新季度 + print(f"[{datetime.now()}] 休眠 {SLEEP_SECONDS} 秒(6小时)...\n") + time.sleep(SLEEP_SECONDS) diff --git a/lyq/Macroeconomic_SAALOCBITUSS_ASSET.py b/lyq/Macroeconomic_SAALOCBITUSS_ASSET.py new file mode 100644 index 0000000..4ed688c --- /dev/null +++ b/lyq/Macroeconomic_SAALOCBITUSS_ASSET.py @@ -0,0 +1,1536 @@ +from datetime import datetime +import pymysql +import requests +from bs4 import BeautifulSoup +from w3lib.html import remove_tags +while True: + # import datetime + # now_time = datetime.datetime.now() + # next_time = now_time + datetime.timedelta(days=+1) + # next_year = next_time.date().year + # next_month = next_time.date().month + # next_day = next_time.date().day + # next_time = datetime.datetime.strptime(str(next_year) + "-" + str(next_month) + "-" + str(next_day) + " 20:30:01","%Y-%m-%d %H:%M:%S") + # timer_start_time = (next_time - now_time).total_seconds() + db = pymysql.connect(host="127.0.0.1",user="root",password="2GS@bPYcgiMyL14A",database="Macroeconomics",port=4423) + cursor = db.cursor() + sql = "select quarterly from SAALOCBITUSS_ASSET order by id desc limit 1" + cursor.execute(sql) + db.commit() + old_time = cursor.fetchall() + old_time=old_time[0][0] + list=[] + page = requests.get("https://www.federalreserve.gov/releases/h8/current/default.htm") + page=page.text + page = BeautifulSoup(page, 'html.parser') + page = page.find_all('table') + table1=page[0] + table1=table1.select('tr') + timestamp=table1[0] + timestamp = remove_tags(str(timestamp)) + timestamp=timestamp.split() + timestamp= timestamp[-2] + timestamp[-1] + timestamp = timestamp.replace("Jan", "/1/1") + timestamp = timestamp.replace("Feb", "/2/1") + timestamp = timestamp.replace("Mar", "/3/1") + timestamp = timestamp.replace("Apr", "/4/1") + timestamp = timestamp.replace("May", "/5/1") + timestamp = timestamp.replace("Jun", "/6/1") + timestamp = timestamp.replace("Jul", "/7/1") + timestamp = timestamp.replace("Aug", "/8/1") + timestamp = timestamp.replace("Sep", "/9/1") + timestamp = timestamp.replace("Oct", "/10/1") + timestamp = timestamp.replace("Nov", "/11/1") + timestamp = timestamp.replace("Dec", "/12/1") + format = '%Y/%m/%d' + timestamp = datetime.strptime(timestamp, format) + if timestamp != old_time: + for i in range(1,31): + table_value=table1[i] + table_value = remove_tags(str(table_value)) + table_value = table_value.split() + if table_value == []: + continue + if table_value == ['Assets']: + continue + if table_value == ['Liabilities']: + continue + list+=[table_value[-1].replace(',', '')] + + table2=page[1] + table2=table2.select('tr') + for i in range(2,46): + table_value=table2[i] + table_value = remove_tags(str(table_value)) + table_value = table_value.split() + if table_value == []: + continue + if table_value == ['Assets']: + continue + if table_value == ['Liabilities']: + continue + list+=[table_value[-5].replace(',', '')] + + table3=page[2] + table3=table3.select('tr') + for i in range(2,46): + table_value=table3[i] + table_value = remove_tags(str(table_value)) + table_value = table_value.split() + if table_value == []: + continue + if table_value == ['Assets']: + continue + if table_value == ['Liabilities']: + continue + list+=[table_value[-5].replace(',', '')] + + table4=page[3] + table4=table4.select('tr') + for i in range(2,46): + table_value=table4[i] + table_value = remove_tags(str(table_value)) + table_value = table_value.split() + if table_value == []: + continue + if table_value == ['Assets']: + continue + if table_value == ['Liabilities']: + continue + list+=[table_value[-5].replace(',', '')] + + table5=page[4] + table5=table5.select('tr') + for i in range(2,46): + table_value=table5[i] + table_value = remove_tags(str(table_value)) + table_value = table_value.split() + if table_value == []: + continue + if table_value == ['Assets']: + continue + if table_value == ['Liabilities']: + continue + list+=[table_value[-5].replace(',', '')] + + table6=page[5] + table6=table6.select('tr') + for i in range(2,46): + table_value=table6[i] + table_value = remove_tags(str(table_value)) + table_value = table_value.split() + if table_value == []: + continue + if table_value == ['Assets']: + continue + if table_value == ['Liabilities']: + continue + list+=[table_value[-5].replace(',', '')] + + table7=page[6] + table7=table7.select('tr') + for i in range(2,46): + table_value=table7[i] + table_value = remove_tags(str(table_value)) + table_value = table_value.split() + if table_value == []: + continue + if table_value == ['Assets']: + continue + if table_value == ['Liabilities']: + continue + list+=[table_value[-5].replace(',', '')] + + table8=page[7] + table8=table8.select('tr') + for i in range(2,46): + table_value=table8[i] + table_value = remove_tags(str(table_value)) + table_value = table_value.split() + if table_value == []: + continue + if table_value == ['Assets']: + continue + if table_value == ['Liabilities']: + continue + list+=[table_value[-5].replace(',', '')] + + table9=page[8] + table9=table9.select('tr') + for i in range(2,46): + table_value=table9[i] + table_value = remove_tags(str(table_value)) + table_value = table_value.split() + if table_value == []: + continue + if table_value == ['Assets']: + continue + if table_value == ['Liabilities']: + continue + list+=[table_value[-5].replace(',', '')] + + table10=page[9] + table10=table10.select('tr') + for i in range(2,46): + table_value=table10[i] + table_value = remove_tags(str(table_value)) + table_value = table_value.split() + if table_value == []: + continue + if table_value == ['Assets']: + continue + if table_value == ['Liabilities']: + continue + list+=[table_value[-5].replace(',', '')] + + table11 = page[10] + table11 = table11.select('tr') + for i in range(2, 46): + table_value = table11[i] + table_value = remove_tags(str(table_value)) + table_value = table_value.split() + if table_value == []: + continue + if table_value == ['Assets']: + continue + if table_value == ['Liabilities']: + continue + list += [table_value[-5].replace(',', '')] + sql = "insert into SAALOCBITUSS_ASSET(quarterly,name,value)values('%s','%s','%s')" % ( + timestamp, 'Bank credit', list[0]) + cursor.execute(sql) + sql = "insert into SAALOCBITUSS_ASSET(quarterly,name,value)values('%s','%s','%s')" % ( + timestamp, 'Securities in bank credit', list[1]) + cursor.execute(sql) + sql = "insert into SAALOCBITUSS_ASSET(quarterly,name,value)values('%s','%s','%s')" % ( + timestamp, 'Treasury and agency securities', list[2]) + cursor.execute(sql) + sql = "insert into SAALOCBITUSS_ASSET(quarterly,name,value)values('%s','%s','%s')" % ( + timestamp, 'Other securities', list[3]) + cursor.execute(sql) + sql = "insert into SAALOCBITUSS_ASSET(quarterly,name,value)values('%s','%s','%s')" % ( + timestamp, 'Loans and leases in bank credit', list[4]) + cursor.execute(sql) + sql = "insert into SAALOCBITUSS_ASSET(quarterly,name,value)values('%s','%s','%s')" % ( + timestamp, 'Commercial and industrial loans', list[5]) + cursor.execute(sql) + sql = "insert into SAALOCBITUSS_ASSET(quarterly,name,value)values('%s','%s','%s')" % ( + timestamp, 'Real estate loans', list[6]) + cursor.execute(sql) + sql = "insert into SAALOCBITUSS_ASSET(quarterly,name,value)values('%s','%s','%s')" % ( + timestamp, 'Residential real estate loans', list[7]) + cursor.execute(sql) + sql = "insert into SAALOCBITUSS_ASSET(quarterly,name,value)values('%s','%s','%s')" % ( + timestamp, 'Revolving home equity loans', list[8]) + cursor.execute(sql) + sql = "insert into SAALOCBITUSS_ASSET(quarterly,name,value)values('%s','%s','%s')" % ( + timestamp, 'Closed-end residential loans', list[9]) + cursor.execute(sql) + sql = "insert into SAALOCBITUSS_ASSET(quarterly,name,value)values('%s','%s','%s')" % ( + timestamp, 'Commercial real estate loans', list[10]) + cursor.execute(sql) + sql = "insert into SAALOCBITUSS_ASSET(quarterly,name,value)values('%s','%s','%s')" % ( + timestamp, 'Consumer loans', list[11]) + cursor.execute(sql) + sql = "insert into SAALOCBITUSS_ASSET(quarterly,name,value)values('%s','%s','%s')" % ( + timestamp, 'Credit cards and other revolving plans', list[12]) + cursor.execute(sql) + sql = "insert into SAALOCBITUSS_ASSET(quarterly,name,value)values('%s','%s','%s')" % ( + timestamp, 'Other consumer loans', list[13]) + cursor.execute(sql) + sql = "insert into SAALOCBITUSS_ASSET(quarterly,name,value)values('%s','%s','%s')" % ( + timestamp, 'All other loans and leases', list[14]) + cursor.execute(sql) + sql = "insert into SAALOCBITUSS_ASSET(quarterly,name,value)values('%s','%s','%s')" % ( + timestamp, 'LESS: Allowance for loan and lease losses', list[15]) + cursor.execute(sql) + sql = "insert into SAALOCBITUSS_ASSET(quarterly,name,value)values('%s','%s','%s')" % ( + timestamp, 'Cash assets', list[16]) + cursor.execute(sql) + sql = "insert into SAALOCBITUSS_ASSET(quarterly,name,value)values('%s','%s','%s')" % ( + timestamp, 'Total federal funds sold and reverse RPs', list[17]) + cursor.execute(sql) + sql = "insert into SAALOCBITUSS_ASSET(quarterly,name,value)values('%s','%s','%s')" % ( + timestamp, 'Loans to commercial banks', list[18]) + cursor.execute(sql) + sql = "insert into SAALOCBITUSS_ASSET(quarterly,name,value)values('%s','%s','%s')" % ( + timestamp, 'Other assets including trading assets', list[19]) + cursor.execute(sql) + sql = "insert into SAALOCBITUSS_ASSET(quarterly,name,value)values('%s','%s','%s')" % ( + timestamp, 'Total assets', list[20]) + cursor.execute(sql) + + sql = "insert into SAALOCBITUSS_Liabilities(quarterly,name,value)values('%s','%s','%s')" % ( + timestamp, 'Deposits', list[21]) + cursor.execute(sql) + sql = "insert into SAALOCBITUSS_Liabilities(quarterly,name,value)values('%s','%s','%s')" % ( + timestamp, 'Large time deposits', list[22]) + cursor.execute(sql) + sql = "insert into SAALOCBITUSS_Liabilities(quarterly,name,value)values('%s','%s','%s')" % ( + timestamp, 'Other deposits', list[23]) + cursor.execute(sql) + sql = "insert into SAALOCBITUSS_Liabilities(quarterly,name,value)values('%s','%s','%s')" % ( + timestamp, 'Borrowings', list[24]) + cursor.execute(sql) + sql = "insert into SAALOCBITUSS_Liabilities(quarterly,name,value)values('%s','%s','%s')" % ( + timestamp, 'Other liabilities including trading liabilities', list[25]) + cursor.execute(sql) + sql = "insert into SAALOCBITUSS_Liabilities(quarterly,name,value)values('%s','%s','%s')" % ( + timestamp, 'Total liabilities', list[26]) + cursor.execute(sql) + + sql = "insert into AALOCBITUSS_ASSET(quarterly,name,value)values('%s','%s','%s')" % ( + timestamp, 'Bank credit', list[27]) + cursor.execute(sql) + sql = "insert into AALOCBITUSS_ASSET(quarterly,name,value)values('%s','%s','%s')" % ( + timestamp, 'Securities in bank credit', list[28]) + cursor.execute(sql) + sql = "insert into AALOCBITUSS_ASSET(quarterly,name,value)values('%s','%s','%s')" % ( + timestamp, 'Treasury and agency securities', list[29]) + cursor.execute(sql) + sql = "insert into AALOCBITUSS_ASSET(quarterly,name,value)values('%s','%s','%s')" % ( + timestamp, 'Mortgage-backed securities (MBS)', list[30]) + cursor.execute(sql) + sql = "insert into AALOCBITUSS_ASSET(quarterly,name,value)values('%s','%s','%s')" % ( + timestamp, 'Non-MBS', list[31]) + cursor.execute(sql) + sql = "insert into AALOCBITUSS_ASSET(quarterly,name,value)values('%s','%s','%s')" % ( + timestamp, 'Other securities', list[32]) + cursor.execute(sql) + sql = "insert into AALOCBITUSS_ASSET(quarterly,name,value)values('%s','%s','%s')" % ( + timestamp, 'Mortgage-backed securities (MBS)', list[33]) + cursor.execute(sql) + sql = "insert into AALOCBITUSS_ASSET(quarterly,name,value)values('%s','%s','%s')" % ( + timestamp, 'Non-MBS', list[34]) + cursor.execute(sql) + sql = "insert into AALOCBITUSS_ASSET(quarterly,name,value)values('%s','%s','%s')" % ( + timestamp, 'Loans and leases in bank credit', list[35]) + cursor.execute(sql) + sql = "insert into AALOCBITUSS_ASSET(quarterly,name,value)values('%s','%s','%s')" % ( + timestamp, 'Commercial and industrial loans', list[36]) + cursor.execute(sql) + sql = "insert into AALOCBITUSS_ASSET(quarterly,name,value)values('%s','%s','%s')" % ( + timestamp, 'Real estate loans', list[37]) + cursor.execute(sql) + sql = "insert into AALOCBITUSS_ASSET(quarterly,name,value)values('%s','%s','%s')" % ( + timestamp, 'Residential real estate loans', list[38]) + cursor.execute(sql) + sql = "insert into AALOCBITUSS_ASSET(quarterly,name,value)values('%s','%s','%s')" % ( + timestamp, 'Revolving home equity loans', list[39]) + cursor.execute(sql) + sql = "insert into AALOCBITUSS_ASSET(quarterly,name,value)values('%s','%s','%s')" % ( + timestamp, 'Closed-end residential loans', list[40]) + cursor.execute(sql) + sql = "insert into AALOCBITUSS_ASSET(quarterly,name,value)values('%s','%s','%s')" % ( + timestamp, 'Commercial real estate loans', list[41]) + cursor.execute(sql) + sql = "insert into AALOCBITUSS_ASSET(quarterly,name,value)values('%s','%s','%s')" % ( + timestamp, 'Construction and land development loans', list[42]) + cursor.execute(sql) + sql = "insert into AALOCBITUSS_ASSET(quarterly,name,value)values('%s','%s','%s')" % ( + timestamp, 'Secured by farmland', list[43]) + cursor.execute(sql) + sql = "insert into AALOCBITUSS_ASSET(quarterly,name,value)values('%s','%s','%s')" % ( + timestamp, 'Secured by multifamily properties', list[44]) + cursor.execute(sql) + sql = "insert into AALOCBITUSS_ASSET(quarterly,name,value)values('%s','%s','%s')" % ( + timestamp, 'Secured by nonfarm nonresidential properties', list[45]) + cursor.execute(sql) + sql = "insert into AALOCBITUSS_ASSET(quarterly,name,value)values('%s','%s','%s')" % ( + timestamp, 'Consumer loans', list[46]) + cursor.execute(sql) + sql = "insert into AALOCBITUSS_ASSET(quarterly,name,value)values('%s','%s','%s')" % ( + timestamp, 'Credit cards and other revolving plans', list[47]) + cursor.execute(sql) + sql = "insert into AALOCBITUSS_ASSET(quarterly,name,value)values('%s','%s','%s')" % ( + timestamp, 'Other consumer loans', list[48]) + cursor.execute(sql) + sql = "insert into AALOCBITUSS_ASSET(quarterly,name,value)values('%s','%s','%s')" % ( + timestamp, 'Automobile loans', list[49]) + cursor.execute(sql) + sql = "insert into AALOCBITUSS_ASSET(quarterly,name,value)values('%s','%s','%s')" % ( + timestamp, 'All other consumer loans', list[50]) + cursor.execute(sql) + sql = "insert into AALOCBITUSS_ASSET(quarterly,name,value)values('%s','%s','%s')" % ( + timestamp, 'All other loans and leases', list[51]) + cursor.execute(sql) + sql = "insert into AALOCBITUSS_ASSET(quarterly,name,value)values('%s','%s','%s')" % ( + timestamp, 'Loans to nondepository financial institutions', list[52]) + cursor.execute(sql) + sql = "insert into AALOCBITUSS_ASSET(quarterly,name,value)values('%s','%s','%s')" % ( + timestamp, 'All loans not elsewhere classified', list[53]) + cursor.execute(sql) + sql = "insert into AALOCBITUSS_ASSET(quarterly,name,value)values('%s','%s','%s')" % ( + timestamp, 'LESS: Allowance for loan and lease losses', list[54]) + cursor.execute(sql) + sql = "insert into AALOCBITUSS_ASSET(quarterly,name,value)values('%s','%s','%s')" % ( + timestamp, 'Cash assets', list[55]) + cursor.execute(sql) + sql = "insert into AALOCBITUSS_ASSET(quarterly,name,value)values('%s','%s','%s')" % ( + timestamp, 'Total federal funds sold and reverse RPs', list[56]) + cursor.execute(sql) + sql = "insert into AALOCBITUSS_ASSET(quarterly,name,value)values('%s','%s','%s')" % ( + timestamp, 'Loans to commercial banks', list[57]) + cursor.execute(sql) + sql = "insert into AALOCBITUSS_ASSET(quarterly,name,value)values('%s','%s','%s')" % ( + timestamp, 'Other assets including trading assets', list[58]) + cursor.execute(sql) + sql = "insert into AALOCBITUSS_ASSET(quarterly,name,value)values('%s','%s','%s')" % ( + timestamp, 'Total assets', list[59]) + cursor.execute(sql) + + sql = "insert into AALOCBITUSS_Liabilities(quarterly,name,value)values('%s','%s','%s')" % ( + timestamp, 'Deposits', list[60]) + cursor.execute(sql) + sql = "insert into AALOCBITUSS_Liabilities(quarterly,name,value)values('%s','%s','%s')" % ( + timestamp, 'Large time deposits', list[61]) + cursor.execute(sql) + sql = "insert into AALOCBITUSS_Liabilities(quarterly,name,value)values('%s','%s','%s')" % ( + timestamp, 'Other deposits', list[62]) + cursor.execute(sql) + sql = "insert into AALOCBITUSS_Liabilities(quarterly,name,value)values('%s','%s','%s')" % ( + timestamp, 'Borrowings', list[63]) + cursor.execute(sql) + sql = "insert into AALOCBITUSS_Liabilities(quarterly,name,value)values('%s','%s','%s')" % ( + timestamp, 'Net due to related foreign offices', list[64]) + cursor.execute(sql) + sql = "insert into AALOCBITUSS_Liabilities(quarterly,name,value)values('%s','%s','%s')" % ( + timestamp, 'Other liabilities including trading liabilities', list[65]) + cursor.execute(sql) + sql = "insert into AALOCBITUSS_Liabilities(quarterly,name,value)values('%s','%s','%s')" % ( + timestamp, 'Total liabilities', list[66]) + cursor.execute(sql) + sql = "insert into AALOCBITUSS_Liabilities(quarterly,name,value)values('%s','%s','%s')" % ( + timestamp, 'Residual (Assets LESS Liabilities)', list[67]) + cursor.execute(sql) + + sql = "insert into AALOCBITUSNS_ASSET(quarterly,name,value)values('%s','%s','%s')" % ( + timestamp, 'Bank credit', list[68]) + cursor.execute(sql) + sql = "insert into AALOCBITUSNS_ASSET(quarterly,name,value)values('%s','%s','%s')" % ( + timestamp, 'Securities in bank credit', list[69]) + cursor.execute(sql) + sql = "insert into AALOCBITUSNS_ASSET(quarterly,name,value)values('%s','%s','%s')" % ( + timestamp, 'Treasury and agency securities', list[70]) + cursor.execute(sql) + sql = "insert into AALOCBITUSNS_ASSET(quarterly,name,value)values('%s','%s','%s')" % ( + timestamp, 'Mortgage-backed securities (MBS)', list[71]) + cursor.execute(sql) + sql = "insert into AALOCBITUSNS_ASSET(quarterly,name,value)values('%s','%s','%s')" % ( + timestamp, 'Non-MBS', list[72]) + cursor.execute(sql) + sql = "insert into AALOCBITUSNS_ASSET(quarterly,name,value)values('%s','%s','%s')" % ( + timestamp, 'Other securities', list[73]) + cursor.execute(sql) + sql = "insert into AALOCBITUSNS_ASSET(quarterly,name,value)values('%s','%s','%s')" % ( + timestamp, 'Mortgage-backed securities (MBS)', list[74]) + cursor.execute(sql) + sql = "insert into AALOCBITUSNS_ASSET(quarterly,name,value)values('%s','%s','%s')" % ( + timestamp, 'Non-MBS', list[75]) + cursor.execute(sql) + sql = "insert into AALOCBITUSNS_ASSET(quarterly,name,value)values('%s','%s','%s')" % ( + timestamp, 'Loans and leases in bank credit', list[76]) + cursor.execute(sql) + sql = "insert into AALOCBITUSNS_ASSET(quarterly,name,value)values('%s','%s','%s')" % ( + timestamp, 'Commercial and industrial loans', list[77]) + cursor.execute(sql) + sql = "insert into AALOCBITUSNS_ASSET(quarterly,name,value)values('%s','%s','%s')" % ( + timestamp, 'Real estate loans', list[78]) + cursor.execute(sql) + sql = "insert into AALOCBITUSNS_ASSET(quarterly,name,value)values('%s','%s','%s')" % ( + timestamp, 'Residential real estate loans', list[79]) + cursor.execute(sql) + sql = "insert into AALOCBITUSNS_ASSET(quarterly,name,value)values('%s','%s','%s')" % ( + timestamp, 'Revolving home equity loans', list[80]) + cursor.execute(sql) + sql = "insert into AALOCBITUSNS_ASSET(quarterly,name,value)values('%s','%s','%s')" % ( + timestamp, 'Closed-end residential loans', list[81]) + cursor.execute(sql) + sql = "insert into AALOCBITUSNS_ASSET(quarterly,name,value)values('%s','%s','%s')" % ( + timestamp, 'Commercial real estate loans', list[82]) + cursor.execute(sql) + sql = "insert into AALOCBITUSNS_ASSET(quarterly,name,value)values('%s','%s','%s')" % ( + timestamp, 'Construction and land development loans', list[83]) + cursor.execute(sql) + sql = "insert into AALOCBITUSNS_ASSET(quarterly,name,value)values('%s','%s','%s')" % ( + timestamp, 'Secured by farmland', list[84]) + cursor.execute(sql) + sql = "insert into AALOCBITUSNS_ASSET(quarterly,name,value)values('%s','%s','%s')" % ( + timestamp, 'Secured by multifamily properties', list[85]) + cursor.execute(sql) + sql = "insert into AALOCBITUSNS_ASSET(quarterly,name,value)values('%s','%s','%s')" % ( + timestamp, 'Secured by nonfarm nonresidential properties', list[86]) + cursor.execute(sql) + sql = "insert into AALOCBITUSNS_ASSET(quarterly,name,value)values('%s','%s','%s')" % ( + timestamp, 'Consumer loans', list[87]) + cursor.execute(sql) + sql = "insert into AALOCBITUSNS_ASSET(quarterly,name,value)values('%s','%s','%s')" % ( + timestamp, 'Credit cards and other revolving plans', list[88]) + cursor.execute(sql) + sql = "insert into AALOCBITUSNS_ASSET(quarterly,name,value)values('%s','%s','%s')" % ( + timestamp, 'Other consumer loans', list[89]) + cursor.execute(sql) + sql = "insert into AALOCBITUSNS_ASSET(quarterly,name,value)values('%s','%s','%s')" % ( + timestamp, 'Automobile loans', list[90]) + cursor.execute(sql) + sql = "insert into AALOCBITUSNS_ASSET(quarterly,name,value)values('%s','%s','%s')" % ( + timestamp, 'All other consumer loans', list[91]) + cursor.execute(sql) + sql = "insert into AALOCBITUSNS_ASSET(quarterly,name,value)values('%s','%s','%s')" % ( + timestamp, 'All other loans and leases', list[92]) + cursor.execute(sql) + sql = "insert into AALOCBITUSNS_ASSET(quarterly,name,value)values('%s','%s','%s')" % ( + timestamp, 'Loans to nondepository financial institutions', list[93]) + cursor.execute(sql) + sql = "insert into AALOCBITUSNS_ASSET(quarterly,name,value)values('%s','%s','%s')" % ( + timestamp, 'All loans not elsewhere classified', list[94]) + cursor.execute(sql) + sql = "insert into AALOCBITUSNS_ASSET(quarterly,name,value)values('%s','%s','%s')" % ( + timestamp, 'LESS: Allowance for loan and lease losses', list[95]) + cursor.execute(sql) + sql = "insert into AALOCBITUSNS_ASSET(quarterly,name,value)values('%s','%s','%s')" % ( + timestamp, 'Cash assets', list[96]) + cursor.execute(sql) + sql = "insert into AALOCBITUSNS_ASSET(quarterly,name,value)values('%s','%s','%s')" % ( + timestamp, 'Total federal funds sold and reverse RPs', list[97]) + cursor.execute(sql) + sql = "insert into AALOCBITUSNS_ASSET(quarterly,name,value)values('%s','%s','%s')" % ( + timestamp, 'Loans to commercial banks', list[98]) + cursor.execute(sql) + sql = "insert into AALOCBITUSNS_ASSET(quarterly,name,value)values('%s','%s','%s')" % ( + timestamp, 'Other assets including trading assets', list[99]) + cursor.execute(sql) + sql = "insert into AALOCBITUSNS_ASSET(quarterly,name,value)values('%s','%s','%s')" % ( + timestamp, 'Total assets', list[100]) + cursor.execute(sql) + + sql = "insert into AALOCBITUSNS_Liabilities(quarterly,name,value)values('%s','%s','%s')" % ( + timestamp, 'Deposits', list[101]) + cursor.execute(sql) + sql = "insert into AALOCBITUSNS_Liabilities(quarterly,name,value)values('%s','%s','%s')" % ( + timestamp, 'Large time deposits', list[102]) + cursor.execute(sql) + sql = "insert into AALOCBITUSNS_Liabilities(quarterly,name,value)values('%s','%s','%s')" % ( + timestamp, 'Other deposits', list[103]) + cursor.execute(sql) + sql = "insert into AALOCBITUSNS_Liabilities(quarterly,name,value)values('%s','%s','%s')" % ( + timestamp, 'Borrowings', list[104]) + cursor.execute(sql) + sql = "insert into AALOCBITUSNS_Liabilities(quarterly,name,value)values('%s','%s','%s')" % ( + timestamp, 'Net due to related foreign offices', list[105]) + cursor.execute(sql) + sql = "insert into AALOCBITUSNS_Liabilities(quarterly,name,value)values('%s','%s','%s')" % ( + timestamp, 'Other liabilities including trading liabilities', list[106]) + cursor.execute(sql) + sql = "insert into AALOCBITUSNS_Liabilities(quarterly,name,value)values('%s','%s','%s')" % ( + timestamp, 'Total liabilities', list[107]) + cursor.execute(sql) + sql = "insert into AALOCBITUSNS_Liabilities(quarterly,name,value)values('%s','%s','%s')" % ( + timestamp, 'Residual (Assets LESS Liabilities)', list[108]) + cursor.execute(sql) + + sql = "insert into AALODCCBITUSS_ASSET(quarterly,name,value)values('%s','%s','%s')" % ( + timestamp, 'Bank credit', list[109]) + cursor.execute(sql) + sql = "insert into AALODCCBITUSS_ASSET(quarterly,name,value)values('%s','%s','%s')" % ( + timestamp, 'Securities in bank credit', list[110]) + cursor.execute(sql) + sql = "insert into AALODCCBITUSS_ASSET(quarterly,name,value)values('%s','%s','%s')" % ( + timestamp, 'Treasury and agency securities', list[111]) + cursor.execute(sql) + sql = "insert into AALODCCBITUSS_ASSET(quarterly,name,value)values('%s','%s','%s')" % ( + timestamp, 'Mortgage-backed securities (MBS)', list[112]) + cursor.execute(sql) + sql = "insert into AALODCCBITUSS_ASSET(quarterly,name,value)values('%s','%s','%s')" % ( + timestamp, 'Non-MBS', list[113]) + cursor.execute(sql) + sql = "insert into AALODCCBITUSS_ASSET(quarterly,name,value)values('%s','%s','%s')" % ( + timestamp, 'Other securities', list[114]) + cursor.execute(sql) + sql = "insert into AALODCCBITUSS_ASSET(quarterly,name,value)values('%s','%s','%s')" % ( + timestamp, 'Mortgage-backed securities (MBS)', list[115]) + cursor.execute(sql) + sql = "insert into AALODCCBITUSS_ASSET(quarterly,name,value)values('%s','%s','%s')" % ( + timestamp, 'Non-MBS', list[116]) + cursor.execute(sql) + sql = "insert into AALODCCBITUSS_ASSET(quarterly,name,value)values('%s','%s','%s')" % ( + timestamp, 'Loans and leases in bank credit', list[117]) + cursor.execute(sql) + sql = "insert into AALODCCBITUSS_ASSET(quarterly,name,value)values('%s','%s','%s')" % ( + timestamp, 'Commercial and industrial loans', list[118]) + cursor.execute(sql) + sql = "insert into AALODCCBITUSS_ASSET(quarterly,name,value)values('%s','%s','%s')" % ( + timestamp, 'Real estate loans', list[119]) + cursor.execute(sql) + sql = "insert into AALODCCBITUSS_ASSET(quarterly,name,value)values('%s','%s','%s')" % ( + timestamp, 'Residential real estate loans', list[120]) + cursor.execute(sql) + sql = "insert into AALODCCBITUSS_ASSET(quarterly,name,value)values('%s','%s','%s')" % ( + timestamp, 'Revolving home equity loans', list[121]) + cursor.execute(sql) + sql = "insert into AALODCCBITUSS_ASSET(quarterly,name,value)values('%s','%s','%s')" % ( + timestamp, 'Closed-end residential loans', list[122]) + cursor.execute(sql) + sql = "insert into AALODCCBITUSS_ASSET(quarterly,name,value)values('%s','%s','%s')" % ( + timestamp, 'Commercial real estate loans', list[123]) + cursor.execute(sql) + sql = "insert into AALODCCBITUSS_ASSET(quarterly,name,value)values('%s','%s','%s')" % ( + timestamp, 'Construction and land development loans', list[124]) + cursor.execute(sql) + sql = "insert into AALODCCBITUSS_ASSET(quarterly,name,value)values('%s','%s','%s')" % ( + timestamp, 'Secured by farmland', list[125]) + cursor.execute(sql) + sql = "insert into AALODCCBITUSS_ASSET(quarterly,name,value)values('%s','%s','%s')" % ( + timestamp, 'Secured by multifamily properties', list[126]) + cursor.execute(sql) + sql = "insert into AALODCCBITUSS_ASSET(quarterly,name,value)values('%s','%s','%s')" % ( + timestamp, 'Secured by nonfarm nonresidential properties', list[127]) + cursor.execute(sql) + sql = "insert into AALODCCBITUSS_ASSET(quarterly,name,value)values('%s','%s','%s')" % ( + timestamp, 'Consumer loans', list[128]) + cursor.execute(sql) + sql = "insert into AALODCCBITUSS_ASSET(quarterly,name,value)values('%s','%s','%s')" % ( + timestamp, 'Credit cards and other revolving plans', list[129]) + cursor.execute(sql) + sql = "insert into AALODCCBITUSS_ASSET(quarterly,name,value)values('%s','%s','%s')" % ( + timestamp, 'Other consumer loans', list[130]) + cursor.execute(sql) + sql = "insert into AALODCCBITUSS_ASSET(quarterly,name,value)values('%s','%s','%s')" % ( + timestamp, 'Automobile loans', list[131]) + cursor.execute(sql) + sql = "insert into AALODCCBITUSS_ASSET(quarterly,name,value)values('%s','%s','%s')" % ( + timestamp, 'All other consumer loans', list[132]) + cursor.execute(sql) + sql = "insert into AALODCCBITUSS_ASSET(quarterly,name,value)values('%s','%s','%s')" % ( + timestamp, 'All other loans and leases', list[133]) + cursor.execute(sql) + sql = "insert into AALODCCBITUSS_ASSET(quarterly,name,value)values('%s','%s','%s')" % ( + timestamp, 'Loans to nondepository financial institutions', list[134]) + cursor.execute(sql) + sql = "insert into AALODCCBITUSS_ASSET(quarterly,name,value)values('%s','%s','%s')" % ( + timestamp, 'All loans not elsewhere classified', list[135]) + cursor.execute(sql) + sql = "insert into AALODCCBITUSS_ASSET(quarterly,name,value)values('%s','%s','%s')" % ( + timestamp, 'LESS: Allowance for loan and lease losses', list[136]) + cursor.execute(sql) + sql = "insert into AALODCCBITUSS_ASSET(quarterly,name,value)values('%s','%s','%s')" % ( + timestamp, 'Cash assets', list[137]) + cursor.execute(sql) + sql = "insert into AALODCCBITUSS_ASSET(quarterly,name,value)values('%s','%s','%s')" % ( + timestamp, 'Total federal funds sold and reverse RPs', list[138]) + cursor.execute(sql) + sql = "insert into AALODCCBITUSS_ASSET(quarterly,name,value)values('%s','%s','%s')" % ( + timestamp, 'Loans to commercial banks', list[139]) + cursor.execute(sql) + sql = "insert into AALODCCBITUSS_ASSET(quarterly,name,value)values('%s','%s','%s')" % ( + timestamp, 'Other assets including trading assets', list[140]) + cursor.execute(sql) + sql = "insert into AALODCCBITUSS_ASSET(quarterly,name,value)values('%s','%s','%s')" % ( + timestamp, 'Total assets', list[141]) + cursor.execute(sql) + + sql = "insert into AALODCCBITUSS_Liabilities(quarterly,name,value)values('%s','%s','%s')" % ( + timestamp, 'Deposits', list[142]) + cursor.execute(sql) + sql = "insert into AALODCCBITUSS_Liabilities(quarterly,name,value)values('%s','%s','%s')" % ( + timestamp, 'Large time deposits', list[143]) + cursor.execute(sql) + sql = "insert into AALODCCBITUSS_Liabilities(quarterly,name,value)values('%s','%s','%s')" % ( + timestamp, 'Other deposits', list[144]) + cursor.execute(sql) + sql = "insert into AALODCCBITUSS_Liabilities(quarterly,name,value)values('%s','%s','%s')" % ( + timestamp, 'Borrowings', list[145]) + cursor.execute(sql) + sql = "insert into AALODCCBITUSS_Liabilities(quarterly,name,value)values('%s','%s','%s')" % ( + timestamp, 'Net due to related foreign offices', list[146]) + cursor.execute(sql) + sql = "insert into AALODCCBITUSS_Liabilities(quarterly,name,value)values('%s','%s','%s')" % ( + timestamp, 'Other liabilities including trading liabilities', list[147]) + cursor.execute(sql) + sql = "insert into AALODCCBITUSS_Liabilities(quarterly,name,value)values('%s','%s','%s')" % ( + timestamp, 'Total liabilities', list[148]) + cursor.execute(sql) + sql = "insert into AALODCCBITUSS_Liabilities(quarterly,name,value)values('%s','%s','%s')" % ( + timestamp, 'Residual (Assets LESS Liabilities)', list[149]) + cursor.execute(sql) + + sql = "insert into AALODCCBITUSNS_ASSET(quarterly,name,value)values('%s','%s','%s')" % ( + timestamp, 'Bank credit', list[150]) + cursor.execute(sql) + sql = "insert into AALODCCBITUSNS_ASSET(quarterly,name,value)values('%s','%s','%s')" % ( + timestamp, 'Securities in bank credit', list[151]) + cursor.execute(sql) + sql = "insert into AALODCCBITUSNS_ASSET(quarterly,name,value)values('%s','%s','%s')" % ( + timestamp, 'Treasury and agency securities', list[152]) + cursor.execute(sql) + sql = "insert into AALODCCBITUSNS_ASSET(quarterly,name,value)values('%s','%s','%s')" % ( + timestamp, 'Mortgage-backed securities (MBS)', list[153]) + cursor.execute(sql) + sql = "insert into AALODCCBITUSNS_ASSET(quarterly,name,value)values('%s','%s','%s')" % ( + timestamp, 'Non-MBS', list[154]) + cursor.execute(sql) + sql = "insert into AALODCCBITUSNS_ASSET(quarterly,name,value)values('%s','%s','%s')" % ( + timestamp, 'Other securities', list[155]) + cursor.execute(sql) + sql = "insert into AALODCCBITUSNS_ASSET(quarterly,name,value)values('%s','%s','%s')" % ( + timestamp, 'Mortgage-backed securities (MBS)', list[156]) + cursor.execute(sql) + sql = "insert into AALODCCBITUSNS_ASSET(quarterly,name,value)values('%s','%s','%s')" % ( + timestamp, 'Non-MBS', list[157]) + cursor.execute(sql) + sql = "insert into AALODCCBITUSNS_ASSET(quarterly,name,value)values('%s','%s','%s')" % ( + timestamp, 'Loans and leases in bank credit', list[158]) + cursor.execute(sql) + sql = "insert into AALODCCBITUSNS_ASSET(quarterly,name,value)values('%s','%s','%s')" % ( + timestamp, 'Commercial and industrial loans', list[159]) + cursor.execute(sql) + sql = "insert into AALODCCBITUSNS_ASSET(quarterly,name,value)values('%s','%s','%s')" % ( + timestamp, 'Real estate loans', list[160]) + cursor.execute(sql) + sql = "insert into AALODCCBITUSNS_ASSET(quarterly,name,value)values('%s','%s','%s')" % ( + timestamp, 'Residential real estate loans', list[161]) + cursor.execute(sql) + sql = "insert into AALODCCBITUSNS_ASSET(quarterly,name,value)values('%s','%s','%s')" % ( + timestamp, 'Revolving home equity loans', list[162]) + cursor.execute(sql) + sql = "insert into AALODCCBITUSNS_ASSET(quarterly,name,value)values('%s','%s','%s')" % ( + timestamp, 'Closed-end residential loans', list[163]) + cursor.execute(sql) + sql = "insert into AALODCCBITUSNS_ASSET(quarterly,name,value)values('%s','%s','%s')" % ( + timestamp, 'Commercial real estate loans', list[164]) + cursor.execute(sql) + sql = "insert into AALODCCBITUSNS_ASSET(quarterly,name,value)values('%s','%s','%s')" % ( + timestamp, 'Construction and land development loans', list[165]) + cursor.execute(sql) + sql = "insert into AALODCCBITUSNS_ASSET(quarterly,name,value)values('%s','%s','%s')" % ( + timestamp, 'Secured by farmland', list[166]) + cursor.execute(sql) + sql = "insert into AALODCCBITUSNS_ASSET(quarterly,name,value)values('%s','%s','%s')" % ( + timestamp, 'Secured by multifamily properties', list[167]) + cursor.execute(sql) + sql = "insert into AALODCCBITUSNS_ASSET(quarterly,name,value)values('%s','%s','%s')" % ( + timestamp, 'Secured by nonfarm nonresidential properties', list[168]) + cursor.execute(sql) + sql = "insert into AALODCCBITUSNS_ASSET(quarterly,name,value)values('%s','%s','%s')" % ( + timestamp, 'Consumer loans', list[169]) + cursor.execute(sql) + sql = "insert into AALODCCBITUSNS_ASSET(quarterly,name,value)values('%s','%s','%s')" % ( + timestamp, 'Credit cards and other revolving plans', list[170]) + cursor.execute(sql) + sql = "insert into AALODCCBITUSNS_ASSET(quarterly,name,value)values('%s','%s','%s')" % ( + timestamp, 'Other consumer loans', list[171]) + cursor.execute(sql) + sql = "insert into AALODCCBITUSNS_ASSET(quarterly,name,value)values('%s','%s','%s')" % ( + timestamp, 'Automobile loans', list[172]) + cursor.execute(sql) + sql = "insert into AALODCCBITUSNS_ASSET(quarterly,name,value)values('%s','%s','%s')" % ( + timestamp, 'All other consumer loans', list[173]) + cursor.execute(sql) + sql = "insert into AALODCCBITUSNS_ASSET(quarterly,name,value)values('%s','%s','%s')" % ( + timestamp, 'All other loans and leases', list[174]) + cursor.execute(sql) + sql = "insert into AALODCCBITUSNS_ASSET(quarterly,name,value)values('%s','%s','%s')" % ( + timestamp, 'Loans to nondepository financial institutions', list[175]) + cursor.execute(sql) + sql = "insert into AALODCCBITUSNS_ASSET(quarterly,name,value)values('%s','%s','%s')" % ( + timestamp, 'All loans not elsewhere classified', list[176]) + cursor.execute(sql) + sql = "insert into AALODCCBITUSNS_ASSET(quarterly,name,value)values('%s','%s','%s')" % ( + timestamp, 'LESS: Allowance for loan and lease losses', list[177]) + cursor.execute(sql) + sql = "insert into AALODCCBITUSNS_ASSET(quarterly,name,value)values('%s','%s','%s')" % ( + timestamp, 'Cash assets', list[178]) + cursor.execute(sql) + sql = "insert into AALODCCBITUSNS_ASSET(quarterly,name,value)values('%s','%s','%s')" % ( + timestamp, 'Total federal funds sold and reverse RPs', list[179]) + cursor.execute(sql) + sql = "insert into AALODCCBITUSNS_ASSET(quarterly,name,value)values('%s','%s','%s')" % ( + timestamp, 'Loans to commercial banks', list[180]) + cursor.execute(sql) + sql = "insert into AALODCCBITUSNS_ASSET(quarterly,name,value)values('%s','%s','%s')" % ( + timestamp, 'Other assets including trading assets', list[181]) + cursor.execute(sql) + sql = "insert into AALODCCBITUSNS_ASSET(quarterly,name,value)values('%s','%s','%s')" % ( + timestamp, 'Total assets', list[182]) + cursor.execute(sql) + + sql = "insert into AALODCCBITUSNS_Liabilities(quarterly,name,value)values('%s','%s','%s')" % ( + timestamp, 'Deposits', list[183]) + cursor.execute(sql) + sql = "insert into AALODCCBITUSNS_Liabilities(quarterly,name,value)values('%s','%s','%s')" % ( + timestamp, 'Large time deposits', list[184]) + cursor.execute(sql) + sql = "insert into AALODCCBITUSNS_Liabilities(quarterly,name,value)values('%s','%s','%s')" % ( + timestamp, 'Other deposits', list[185]) + cursor.execute(sql) + sql = "insert into AALODCCBITUSNS_Liabilities(quarterly,name,value)values('%s','%s','%s')" % ( + timestamp, 'Borrowings', list[186]) + cursor.execute(sql) + sql = "insert into AALODCCBITUSNS_Liabilities(quarterly,name,value)values('%s','%s','%s')" % ( + timestamp, 'Net due to related foreign offices', list[187]) + cursor.execute(sql) + sql = "insert into AALODCCBITUSNS_Liabilities(quarterly,name,value)values('%s','%s','%s')" % ( + timestamp, 'Other liabilities including trading liabilities', list[188]) + cursor.execute(sql) + sql = "insert into AALODCCBITUSNS_Liabilities(quarterly,name,value)values('%s','%s','%s')" % ( + timestamp, 'Total liabilities', list[189]) + cursor.execute(sql) + sql = "insert into AALODCCBITUSNS_Liabilities(quarterly,name,value)values('%s','%s','%s')" % ( + timestamp, 'Residual (Assets LESS Liabilities)', list[190]) + cursor.execute(sql) + + sql = "insert into AALOLDCCBITUSS_ASSET(quarterly,name,value)values('%s','%s','%s')" % ( + timestamp, 'Bank credit', list[191]) + cursor.execute(sql) + sql = "insert into AALOLDCCBITUSS_ASSET(quarterly,name,value)values('%s','%s','%s')" % ( + timestamp, 'Securities in bank credit', list[192]) + cursor.execute(sql) + sql = "insert into AALOLDCCBITUSS_ASSET(quarterly,name,value)values('%s','%s','%s')" % ( + timestamp, 'Treasury and agency securities', list[193]) + cursor.execute(sql) + sql = "insert into AALOLDCCBITUSS_ASSET(quarterly,name,value)values('%s','%s','%s')" % ( + timestamp, 'Mortgage-backed securities (MBS)', list[194]) + cursor.execute(sql) + sql = "insert into AALOLDCCBITUSS_ASSET(quarterly,name,value)values('%s','%s','%s')" % ( + timestamp, 'Non-MBS', list[195]) + cursor.execute(sql) + sql = "insert into AALOLDCCBITUSS_ASSET(quarterly,name,value)values('%s','%s','%s')" % ( + timestamp, 'Other securities', list[196]) + cursor.execute(sql) + sql = "insert into AALOLDCCBITUSS_ASSET(quarterly,name,value)values('%s','%s','%s')" % ( + timestamp, 'Mortgage-backed securities (MBS)', list[197]) + cursor.execute(sql) + sql = "insert into AALOLDCCBITUSS_ASSET(quarterly,name,value)values('%s','%s','%s')" % ( + timestamp, 'Non-MBS', list[198]) + cursor.execute(sql) + sql = "insert into AALOLDCCBITUSS_ASSET(quarterly,name,value)values('%s','%s','%s')" % ( + timestamp, 'Loans and leases in bank credit', list[199]) + cursor.execute(sql) + sql = "insert into AALOLDCCBITUSS_ASSET(quarterly,name,value)values('%s','%s','%s')" % ( + timestamp, 'Commercial and industrial loans', list[200]) + cursor.execute(sql) + sql = "insert into AALOLDCCBITUSS_ASSET(quarterly,name,value)values('%s','%s','%s')" % ( + timestamp, 'Real estate loans', list[201]) + cursor.execute(sql) + sql = "insert into AALOLDCCBITUSS_ASSET(quarterly,name,value)values('%s','%s','%s')" % ( + timestamp, 'Residential real estate loans', list[202]) + cursor.execute(sql) + sql = "insert into AALOLDCCBITUSS_ASSET(quarterly,name,value)values('%s','%s','%s')" % ( + timestamp, 'Revolving home equity loans', list[203]) + cursor.execute(sql) + sql = "insert into AALOLDCCBITUSS_ASSET(quarterly,name,value)values('%s','%s','%s')" % ( + timestamp, 'Closed-end residential loans', list[204]) + cursor.execute(sql) + sql = "insert into AALOLDCCBITUSS_ASSET(quarterly,name,value)values('%s','%s','%s')" % ( + timestamp, 'Commercial real estate loans', list[205]) + cursor.execute(sql) + sql = "insert into AALOLDCCBITUSS_ASSET(quarterly,name,value)values('%s','%s','%s')" % ( + timestamp, 'Construction and land development loans', list[206]) + cursor.execute(sql) + sql = "insert into AALOLDCCBITUSS_ASSET(quarterly,name,value)values('%s','%s','%s')" % ( + timestamp, 'Secured by farmland', list[207]) + cursor.execute(sql) + sql = "insert into AALOLDCCBITUSS_ASSET(quarterly,name,value)values('%s','%s','%s')" % ( + timestamp, 'Secured by multifamily properties', list[208]) + cursor.execute(sql) + sql = "insert into AALOLDCCBITUSS_ASSET(quarterly,name,value)values('%s','%s','%s')" % ( + timestamp, 'Secured by nonfarm nonresidential properties', list[209]) + cursor.execute(sql) + sql = "insert into AALOLDCCBITUSS_ASSET(quarterly,name,value)values('%s','%s','%s')" % ( + timestamp, 'Consumer loans', list[210]) + cursor.execute(sql) + sql = "insert into AALOLDCCBITUSS_ASSET(quarterly,name,value)values('%s','%s','%s')" % ( + timestamp, 'Credit cards and other revolving plans', list[211]) + cursor.execute(sql) + sql = "insert into AALOLDCCBITUSS_ASSET(quarterly,name,value)values('%s','%s','%s')" % ( + timestamp, 'Other consumer loans', list[212]) + cursor.execute(sql) + sql = "insert into AALOLDCCBITUSS_ASSET(quarterly,name,value)values('%s','%s','%s')" % ( + timestamp, 'Automobile loans', list[213]) + cursor.execute(sql) + sql = "insert into AALOLDCCBITUSS_ASSET(quarterly,name,value)values('%s','%s','%s')" % ( + timestamp, 'All other consumer loans', list[214]) + cursor.execute(sql) + sql = "insert into AALOLDCCBITUSS_ASSET(quarterly,name,value)values('%s','%s','%s')" % ( + timestamp, 'All other loans and leases', list[215]) + cursor.execute(sql) + sql = "insert into AALOLDCCBITUSS_ASSET(quarterly,name,value)values('%s','%s','%s')" % ( + timestamp, 'Loans to nondepository financial institutions', list[216]) + cursor.execute(sql) + sql = "insert into AALOLDCCBITUSS_ASSET(quarterly,name,value)values('%s','%s','%s')" % ( + timestamp, 'All loans not elsewhere classified', list[217]) + cursor.execute(sql) + sql = "insert into AALOLDCCBITUSS_ASSET(quarterly,name,value)values('%s','%s','%s')" % ( + timestamp, 'LESS: Allowance for loan and lease losses', list[218]) + cursor.execute(sql) + sql = "insert into AALOLDCCBITUSS_ASSET(quarterly,name,value)values('%s','%s','%s')" % ( + timestamp, 'Cash assets', list[219]) + cursor.execute(sql) + sql = "insert into AALOLDCCBITUSS_ASSET(quarterly,name,value)values('%s','%s','%s')" % ( + timestamp, 'Total federal funds sold and reverse RPs', list[220]) + cursor.execute(sql) + sql = "insert into AALOLDCCBITUSS_ASSET(quarterly,name,value)values('%s','%s','%s')" % ( + timestamp, 'Loans to commercial banks', list[221]) + cursor.execute(sql) + sql = "insert into AALOLDCCBITUSS_ASSET(quarterly,name,value)values('%s','%s','%s')" % ( + timestamp, 'Other assets including trading assets', list[222]) + cursor.execute(sql) + sql = "insert into AALOLDCCBITUSS_ASSET(quarterly,name,value)values('%s','%s','%s')" % ( + timestamp, 'Total assets', list[223]) + cursor.execute(sql) + + sql = "insert into AALOLDCCBITUSS_Liabilities(quarterly,name,value)values('%s','%s','%s')" % ( + timestamp, 'Deposits', list[224]) + cursor.execute(sql) + sql = "insert into AALOLDCCBITUSS_Liabilities(quarterly,name,value)values('%s','%s','%s')" % ( + timestamp, 'Large time deposits', list[225]) + cursor.execute(sql) + sql = "insert into AALOLDCCBITUSS_Liabilities(quarterly,name,value)values('%s','%s','%s')" % ( + timestamp, 'Other deposits', list[226]) + cursor.execute(sql) + sql = "insert into AALOLDCCBITUSS_Liabilities(quarterly,name,value)values('%s','%s','%s')" % ( + timestamp, 'Borrowings', list[227]) + cursor.execute(sql) + sql = "insert into AALOLDCCBITUSS_Liabilities(quarterly,name,value)values('%s','%s','%s')" % ( + timestamp, 'Net due to related foreign offices', list[228]) + cursor.execute(sql) + sql = "insert into AALOLDCCBITUSS_Liabilities(quarterly,name,value)values('%s','%s','%s')" % ( + timestamp, 'Other liabilities including trading liabilities', list[229]) + cursor.execute(sql) + sql = "insert into AALOLDCCBITUSS_Liabilities(quarterly,name,value)values('%s','%s','%s')" % ( + timestamp, 'Total liabilities', list[230]) + cursor.execute(sql) + sql = "insert into AALOLDCCBITUSS_Liabilities(quarterly,name,value)values('%s','%s','%s')" % ( + timestamp, 'Residual (Assets LESS Liabilities)', list[231]) + cursor.execute(sql) + + sql = "insert into AALOLDCCBITUSNS_ASSET(quarterly,name,value)values('%s','%s','%s')" % ( + timestamp, 'Bank credit', list[232]) + cursor.execute(sql) + sql = "insert into AALOLDCCBITUSNS_ASSET(quarterly,name,value)values('%s','%s','%s')" % ( + timestamp, 'Securities in bank credit', list[233]) + cursor.execute(sql) + sql = "insert into AALOLDCCBITUSNS_ASSET(quarterly,name,value)values('%s','%s','%s')" % ( + timestamp, 'Treasury and agency securities', list[234]) + cursor.execute(sql) + sql = "insert into AALOLDCCBITUSNS_ASSET(quarterly,name,value)values('%s','%s','%s')" % ( + timestamp, 'Mortgage-backed securities (MBS)', list[235]) + cursor.execute(sql) + sql = "insert into AALOLDCCBITUSNS_ASSET(quarterly,name,value)values('%s','%s','%s')" % ( + timestamp, 'Non-MBS', list[236]) + cursor.execute(sql) + sql = "insert into AALOLDCCBITUSNS_ASSET(quarterly,name,value)values('%s','%s','%s')" % ( + timestamp, 'Other securities', list[237]) + cursor.execute(sql) + sql = "insert into AALOLDCCBITUSNS_ASSET(quarterly,name,value)values('%s','%s','%s')" % ( + timestamp, 'Mortgage-backed securities (MBS)', list[238]) + cursor.execute(sql) + sql = "insert into AALOLDCCBITUSNS_ASSET(quarterly,name,value)values('%s','%s','%s')" % ( + timestamp, 'Non-MBS', list[239]) + cursor.execute(sql) + sql = "insert into AALOLDCCBITUSNS_ASSET(quarterly,name,value)values('%s','%s','%s')" % ( + timestamp, 'Loans and leases in bank credit', list[240]) + cursor.execute(sql) + sql = "insert into AALOLDCCBITUSNS_ASSET(quarterly,name,value)values('%s','%s','%s')" % ( + timestamp, 'Commercial and industrial loans', list[241]) + cursor.execute(sql) + sql = "insert into AALOLDCCBITUSNS_ASSET(quarterly,name,value)values('%s','%s','%s')" % ( + timestamp, 'Real estate loans', list[242]) + cursor.execute(sql) + sql = "insert into AALOLDCCBITUSNS_ASSET(quarterly,name,value)values('%s','%s','%s')" % ( + timestamp, 'Residential real estate loans', list[243]) + cursor.execute(sql) + sql = "insert into AALOLDCCBITUSNS_ASSET(quarterly,name,value)values('%s','%s','%s')" % ( + timestamp, 'Revolving home equity loans', list[244]) + cursor.execute(sql) + sql = "insert into AALOLDCCBITUSNS_ASSET(quarterly,name,value)values('%s','%s','%s')" % ( + timestamp, 'Closed-end residential loans', list[245]) + cursor.execute(sql) + sql = "insert into AALOLDCCBITUSNS_ASSET(quarterly,name,value)values('%s','%s','%s')" % ( + timestamp, 'Commercial real estate loans', list[246]) + cursor.execute(sql) + sql = "insert into AALOLDCCBITUSNS_ASSET(quarterly,name,value)values('%s','%s','%s')" % ( + timestamp, 'Construction and land development loans', list[247]) + cursor.execute(sql) + sql = "insert into AALOLDCCBITUSNS_ASSET(quarterly,name,value)values('%s','%s','%s')" % ( + timestamp, 'Secured by farmland', list[248]) + cursor.execute(sql) + sql = "insert into AALOLDCCBITUSNS_ASSET(quarterly,name,value)values('%s','%s','%s')" % ( + timestamp, 'Secured by multifamily properties', list[249]) + cursor.execute(sql) + sql = "insert into AALOLDCCBITUSNS_ASSET(quarterly,name,value)values('%s','%s','%s')" % ( + timestamp, 'Secured by nonfarm nonresidential properties', list[250]) + cursor.execute(sql) + sql = "insert into AALOLDCCBITUSNS_ASSET(quarterly,name,value)values('%s','%s','%s')" % ( + timestamp, 'Consumer loans', list[251]) + cursor.execute(sql) + sql = "insert into AALOLDCCBITUSNS_ASSET(quarterly,name,value)values('%s','%s','%s')" % ( + timestamp, 'Credit cards and other revolving plans', list[252]) + cursor.execute(sql) + sql = "insert into AALOLDCCBITUSNS_ASSET(quarterly,name,value)values('%s','%s','%s')" % ( + timestamp, 'Other consumer loans', list[253]) + cursor.execute(sql) + sql = "insert into AALOLDCCBITUSNS_ASSET(quarterly,name,value)values('%s','%s','%s')" % ( + timestamp, 'Automobile loans', list[254]) + cursor.execute(sql) + sql = "insert into AALOLDCCBITUSNS_ASSET(quarterly,name,value)values('%s','%s','%s')" % ( + timestamp, 'All other consumer loans', list[255]) + cursor.execute(sql) + sql = "insert into AALOLDCCBITUSNS_ASSET(quarterly,name,value)values('%s','%s','%s')" % ( + timestamp, 'All other loans and leases', list[256]) + cursor.execute(sql) + sql = "insert into AALOLDCCBITUSNS_ASSET(quarterly,name,value)values('%s','%s','%s')" % ( + timestamp, 'Loans to nondepository financial institutions', list[257]) + cursor.execute(sql) + sql = "insert into AALOLDCCBITUSNS_ASSET(quarterly,name,value)values('%s','%s','%s')" % ( + timestamp, 'All loans not elsewhere classified', list[258]) + cursor.execute(sql) + sql = "insert into AALOLDCCBITUSNS_ASSET(quarterly,name,value)values('%s','%s','%s')" % ( + timestamp, 'LESS: Allowance for loan and lease losses', list[259]) + cursor.execute(sql) + sql = "insert into AALOLDCCBITUSNS_ASSET(quarterly,name,value)values('%s','%s','%s')" % ( + timestamp, 'Cash assets', list[260]) + cursor.execute(sql) + sql = "insert into AALOLDCCBITUSNS_ASSET(quarterly,name,value)values('%s','%s','%s')" % ( + timestamp, 'Total federal funds sold and reverse RPs', list[261]) + cursor.execute(sql) + sql = "insert into AALOLDCCBITUSNS_ASSET(quarterly,name,value)values('%s','%s','%s')" % ( + timestamp, 'Loans to commercial banks', list[262]) + cursor.execute(sql) + sql = "insert into AALOLDCCBITUSNS_ASSET(quarterly,name,value)values('%s','%s','%s')" % ( + timestamp, 'Other assets including trading assets', list[263]) + cursor.execute(sql) + sql = "insert into AALOLDCCBITUSNS_ASSET(quarterly,name,value)values('%s','%s','%s')" % ( + timestamp, 'Total assets', list[264]) + cursor.execute(sql) + + sql = "insert into AALOLDCCBITUSNS_Liabilities(quarterly,name,value)values('%s','%s','%s')" % ( + timestamp, 'Deposits', list[265]) + cursor.execute(sql) + sql = "insert into AALOLDCCBITUSNS_Liabilities(quarterly,name,value)values('%s','%s','%s')" % ( + timestamp, 'Large time deposits', list[266]) + cursor.execute(sql) + sql = "insert into AALOLDCCBITUSNS_Liabilities(quarterly,name,value)values('%s','%s','%s')" % ( + timestamp, 'Other deposits', list[267]) + cursor.execute(sql) + sql = "insert into AALOLDCCBITUSNS_Liabilities(quarterly,name,value)values('%s','%s','%s')" % ( + timestamp, 'Borrowings', list[268]) + cursor.execute(sql) + sql = "insert into AALOLDCCBITUSNS_Liabilities(quarterly,name,value)values('%s','%s','%s')" % ( + timestamp, 'Net due to related foreign offices', list[269]) + cursor.execute(sql) + sql = "insert into AALOLDCCBITUSNS_Liabilities(quarterly,name,value)values('%s','%s','%s')" % ( + timestamp, 'Other liabilities including trading liabilities', list[270]) + cursor.execute(sql) + sql = "insert into AALOLDCCBITUSNS_Liabilities(quarterly,name,value)values('%s','%s','%s')" % ( + timestamp, 'Total liabilities', list[271]) + cursor.execute(sql) + sql = "insert into AALOLDCCBITUSNS_Liabilities(quarterly,name,value)values('%s','%s','%s')" % ( + timestamp, 'Residual (Assets LESS Liabilities)', list[272]) + cursor.execute(sql) + + sql = "insert into AALOSDCCBITUSS_ASSET(quarterly,name,value)values('%s','%s','%s')" % ( + timestamp, 'Bank credit', list[273]) + cursor.execute(sql) + sql = "insert into AALOSDCCBITUSS_ASSET(quarterly,name,value)values('%s','%s','%s')" % ( + timestamp, 'Securities in bank credit', list[274]) + cursor.execute(sql) + sql = "insert into AALOSDCCBITUSS_ASSET(quarterly,name,value)values('%s','%s','%s')" % ( + timestamp, 'Treasury and agency securities', list[275]) + cursor.execute(sql) + sql = "insert into AALOSDCCBITUSS_ASSET(quarterly,name,value)values('%s','%s','%s')" % ( + timestamp, 'Mortgage-backed securities (MBS)', list[276]) + cursor.execute(sql) + sql = "insert into AALOSDCCBITUSS_ASSET(quarterly,name,value)values('%s','%s','%s')" % ( + timestamp, 'Non-MBS', list[277]) + cursor.execute(sql) + sql = "insert into AALOSDCCBITUSS_ASSET(quarterly,name,value)values('%s','%s','%s')" % ( + timestamp, 'Other securities', list[278]) + cursor.execute(sql) + sql = "insert into AALOSDCCBITUSS_ASSET(quarterly,name,value)values('%s','%s','%s')" % ( + timestamp, 'Mortgage-backed securities (MBS)', list[279]) + cursor.execute(sql) + sql = "insert into AALOSDCCBITUSS_ASSET(quarterly,name,value)values('%s','%s','%s')" % ( + timestamp, 'Non-MBS', list[280]) + cursor.execute(sql) + sql = "insert into AALOSDCCBITUSS_ASSET(quarterly,name,value)values('%s','%s','%s')" % ( + timestamp, 'Loans and leases in bank credit', list[281]) + cursor.execute(sql) + sql = "insert into AALOSDCCBITUSS_ASSET(quarterly,name,value)values('%s','%s','%s')" % ( + timestamp, 'Commercial and industrial loans', list[282]) + cursor.execute(sql) + sql = "insert into AALOSDCCBITUSS_ASSET(quarterly,name,value)values('%s','%s','%s')" % ( + timestamp, 'Real estate loans', list[283]) + cursor.execute(sql) + sql = "insert into AALOSDCCBITUSS_ASSET(quarterly,name,value)values('%s','%s','%s')" % ( + timestamp, 'Residential real estate loans', list[284]) + cursor.execute(sql) + sql = "insert into AALOSDCCBITUSS_ASSET(quarterly,name,value)values('%s','%s','%s')" % ( + timestamp, 'Revolving home equity loans', list[285]) + cursor.execute(sql) + sql = "insert into AALOSDCCBITUSS_ASSET(quarterly,name,value)values('%s','%s','%s')" % ( + timestamp, 'Closed-end residential loans', list[286]) + cursor.execute(sql) + sql = "insert into AALOSDCCBITUSS_ASSET(quarterly,name,value)values('%s','%s','%s')" % ( + timestamp, 'Commercial real estate loans', list[287]) + cursor.execute(sql) + sql = "insert into AALOSDCCBITUSS_ASSET(quarterly,name,value)values('%s','%s','%s')" % ( + timestamp, 'Construction and land development loans', list[288]) + cursor.execute(sql) + sql = "insert into AALOSDCCBITUSS_ASSET(quarterly,name,value)values('%s','%s','%s')" % ( + timestamp, 'Secured by farmland', list[289]) + cursor.execute(sql) + sql = "insert into AALOSDCCBITUSS_ASSET(quarterly,name,value)values('%s','%s','%s')" % ( + timestamp, 'Secured by multifamily properties', list[290]) + cursor.execute(sql) + sql = "insert into AALOSDCCBITUSS_ASSET(quarterly,name,value)values('%s','%s','%s')" % ( + timestamp, 'Secured by nonfarm nonresidential properties', list[291]) + cursor.execute(sql) + sql = "insert into AALOSDCCBITUSS_ASSET(quarterly,name,value)values('%s','%s','%s')" % ( + timestamp, 'Consumer loans', list[292]) + cursor.execute(sql) + sql = "insert into AALOSDCCBITUSS_ASSET(quarterly,name,value)values('%s','%s','%s')" % ( + timestamp, 'Credit cards and other revolving plans', list[293]) + cursor.execute(sql) + sql = "insert into AALOSDCCBITUSS_ASSET(quarterly,name,value)values('%s','%s','%s')" % ( + timestamp, 'Other consumer loans', list[294]) + cursor.execute(sql) + sql = "insert into AALOSDCCBITUSS_ASSET(quarterly,name,value)values('%s','%s','%s')" % ( + timestamp, 'Automobile loans', list[295]) + cursor.execute(sql) + sql = "insert into AALOSDCCBITUSS_ASSET(quarterly,name,value)values('%s','%s','%s')" % ( + timestamp, 'All other consumer loans', list[296]) + cursor.execute(sql) + sql = "insert into AALOSDCCBITUSS_ASSET(quarterly,name,value)values('%s','%s','%s')" % ( + timestamp, 'All other loans and leases', list[297]) + cursor.execute(sql) + sql = "insert into AALOSDCCBITUSS_ASSET(quarterly,name,value)values('%s','%s','%s')" % ( + timestamp, 'Loans to nondepository financial institutions', list[298]) + cursor.execute(sql) + sql = "insert into AALOSDCCBITUSS_ASSET(quarterly,name,value)values('%s','%s','%s')" % ( + timestamp, 'All loans not elsewhere classified', list[299]) + cursor.execute(sql) + sql = "insert into AALOSDCCBITUSS_ASSET(quarterly,name,value)values('%s','%s','%s')" % ( + timestamp, 'LESS: Allowance for loan and lease losses', list[300]) + cursor.execute(sql) + sql = "insert into AALOSDCCBITUSS_ASSET(quarterly,name,value)values('%s','%s','%s')" % ( + timestamp, 'Cash assets', list[301]) + cursor.execute(sql) + sql = "insert into AALOSDCCBITUSS_ASSET(quarterly,name,value)values('%s','%s','%s')" % ( + timestamp, 'Total federal funds sold and reverse RPs', list[302]) + cursor.execute(sql) + sql = "insert into AALOSDCCBITUSS_ASSET(quarterly,name,value)values('%s','%s','%s')" % ( + timestamp, 'Loans to commercial banks', list[303]) + cursor.execute(sql) + sql = "insert into AALOSDCCBITUSS_ASSET(quarterly,name,value)values('%s','%s','%s')" % ( + timestamp, 'Other assets including trading assets', list[304]) + cursor.execute(sql) + sql = "insert into AALOSDCCBITUSS_ASSET(quarterly,name,value)values('%s','%s','%s')" % ( + timestamp, 'Total assets', list[305]) + cursor.execute(sql) + + sql = "insert into AALOSDCCBITUSS_Liabilities(quarterly,name,value)values('%s','%s','%s')" % ( + timestamp, 'Deposits', list[306]) + cursor.execute(sql) + sql = "insert into AALOSDCCBITUSS_Liabilities(quarterly,name,value)values('%s','%s','%s')" % ( + timestamp, 'Large time deposits', list[307]) + cursor.execute(sql) + sql = "insert into AALOSDCCBITUSS_Liabilities(quarterly,name,value)values('%s','%s','%s')" % ( + timestamp, 'Other deposits', list[308]) + cursor.execute(sql) + sql = "insert into AALOSDCCBITUSS_Liabilities(quarterly,name,value)values('%s','%s','%s')" % ( + timestamp, 'Borrowings', list[309]) + cursor.execute(sql) + sql = "insert into AALOSDCCBITUSS_Liabilities(quarterly,name,value)values('%s','%s','%s')" % ( + timestamp, 'Net due to related foreign offices', list[310]) + cursor.execute(sql) + sql = "insert into AALOSDCCBITUSS_Liabilities(quarterly,name,value)values('%s','%s','%s')" % ( + timestamp, 'Other liabilities including trading liabilities', list[311]) + cursor.execute(sql) + sql = "insert into AALOSDCCBITUSS_Liabilities(quarterly,name,value)values('%s','%s','%s')" % ( + timestamp, 'Total liabilities', list[312]) + cursor.execute(sql) + sql = "insert into AALOSDCCBITUSS_Liabilities(quarterly,name,value)values('%s','%s','%s')" % ( + timestamp, 'Residual (Assets LESS Liabilities)', list[313]) + cursor.execute(sql) + + sql = "insert into AALOSDCCBITUSNS_ASSET(quarterly,name,value)values('%s','%s','%s')" % ( + timestamp, 'Bank credit', list[314]) + cursor.execute(sql) + sql = "insert into AALOSDCCBITUSNS_ASSET(quarterly,name,value)values('%s','%s','%s')" % ( + timestamp, 'Securities in bank credit', list[315]) + cursor.execute(sql) + sql = "insert into AALOSDCCBITUSNS_ASSET(quarterly,name,value)values('%s','%s','%s')" % ( + timestamp, 'Treasury and agency securities', list[316]) + cursor.execute(sql) + sql = "insert into AALOSDCCBITUSNS_ASSET(quarterly,name,value)values('%s','%s','%s')" % ( + timestamp, 'Mortgage-backed securities (MBS)', list[317]) + cursor.execute(sql) + sql = "insert into AALOSDCCBITUSNS_ASSET(quarterly,name,value)values('%s','%s','%s')" % ( + timestamp, 'Non-MBS', list[318]) + cursor.execute(sql) + sql = "insert into AALOSDCCBITUSNS_ASSET(quarterly,name,value)values('%s','%s','%s')" % ( + timestamp, 'Other securities', list[319]) + cursor.execute(sql) + sql = "insert into AALOSDCCBITUSNS_ASSET(quarterly,name,value)values('%s','%s','%s')" % ( + timestamp, 'Mortgage-backed securities (MBS)', list[320]) + cursor.execute(sql) + sql = "insert into AALOSDCCBITUSNS_ASSET(quarterly,name,value)values('%s','%s','%s')" % ( + timestamp, 'Non-MBS', list[321]) + cursor.execute(sql) + sql = "insert into AALOSDCCBITUSNS_ASSET(quarterly,name,value)values('%s','%s','%s')" % ( + timestamp, 'Loans and leases in bank credit', list[322]) + cursor.execute(sql) + sql = "insert into AALOSDCCBITUSNS_ASSET(quarterly,name,value)values('%s','%s','%s')" % ( + timestamp, 'Commercial and industrial loans', list[323]) + cursor.execute(sql) + sql = "insert into AALOSDCCBITUSNS_ASSET(quarterly,name,value)values('%s','%s','%s')" % ( + timestamp, 'Real estate loans', list[324]) + cursor.execute(sql) + sql = "insert into AALOSDCCBITUSNS_ASSET(quarterly,name,value)values('%s','%s','%s')" % ( + timestamp, 'Residential real estate loans', list[325]) + cursor.execute(sql) + sql = "insert into AALOSDCCBITUSNS_ASSET(quarterly,name,value)values('%s','%s','%s')" % ( + timestamp, 'Revolving home equity loans', list[326]) + cursor.execute(sql) + sql = "insert into AALOSDCCBITUSNS_ASSET(quarterly,name,value)values('%s','%s','%s')" % ( + timestamp, 'Closed-end residential loans', list[327]) + cursor.execute(sql) + sql = "insert into AALOSDCCBITUSNS_ASSET(quarterly,name,value)values('%s','%s','%s')" % ( + timestamp, 'Commercial real estate loans', list[328]) + cursor.execute(sql) + sql = "insert into AALOSDCCBITUSNS_ASSET(quarterly,name,value)values('%s','%s','%s')" % ( + timestamp, 'Construction and land development loans', list[329]) + cursor.execute(sql) + sql = "insert into AALOSDCCBITUSNS_ASSET(quarterly,name,value)values('%s','%s','%s')" % ( + timestamp, 'Secured by farmland', list[330]) + cursor.execute(sql) + sql = "insert into AALOSDCCBITUSNS_ASSET(quarterly,name,value)values('%s','%s','%s')" % ( + timestamp, 'Secured by multifamily properties', list[331]) + cursor.execute(sql) + sql = "insert into AALOSDCCBITUSNS_ASSET(quarterly,name,value)values('%s','%s','%s')" % ( + timestamp, 'Secured by nonfarm nonresidential properties', list[332]) + cursor.execute(sql) + sql = "insert into AALOSDCCBITUSNS_ASSET(quarterly,name,value)values('%s','%s','%s')" % ( + timestamp, 'Consumer loans', list[333]) + cursor.execute(sql) + sql = "insert into AALOSDCCBITUSNS_ASSET(quarterly,name,value)values('%s','%s','%s')" % ( + timestamp, 'Credit cards and other revolving plans', list[334]) + cursor.execute(sql) + sql = "insert into AALOSDCCBITUSNS_ASSET(quarterly,name,value)values('%s','%s','%s')" % ( + timestamp, 'Other consumer loans', list[335]) + cursor.execute(sql) + sql = "insert into AALOSDCCBITUSNS_ASSET(quarterly,name,value)values('%s','%s','%s')" % ( + timestamp, 'Automobile loans', list[336]) + cursor.execute(sql) + sql = "insert into AALOSDCCBITUSNS_ASSET(quarterly,name,value)values('%s','%s','%s')" % ( + timestamp, 'All other consumer loans', list[337]) + cursor.execute(sql) + sql = "insert into AALOSDCCBITUSNS_ASSET(quarterly,name,value)values('%s','%s','%s')" % ( + timestamp, 'All other loans and leases', list[338]) + cursor.execute(sql) + sql = "insert into AALOSDCCBITUSNS_ASSET(quarterly,name,value)values('%s','%s','%s')" % ( + timestamp, 'Loans to nondepository financial institutions', list[339]) + cursor.execute(sql) + sql = "insert into AALOSDCCBITUSNS_ASSET(quarterly,name,value)values('%s','%s','%s')" % ( + timestamp, 'All loans not elsewhere classified', list[340]) + cursor.execute(sql) + sql = "insert into AALOSDCCBITUSNS_ASSET(quarterly,name,value)values('%s','%s','%s')" % ( + timestamp, 'LESS: Allowance for loan and lease losses', list[341]) + cursor.execute(sql) + sql = "insert into AALOSDCCBITUSNS_ASSET(quarterly,name,value)values('%s','%s','%s')" % ( + timestamp, 'Cash assets', list[342]) + cursor.execute(sql) + sql = "insert into AALOSDCCBITUSNS_ASSET(quarterly,name,value)values('%s','%s','%s')" % ( + timestamp, 'Total federal funds sold and reverse RPs', list[343]) + cursor.execute(sql) + sql = "insert into AALOSDCCBITUSNS_ASSET(quarterly,name,value)values('%s','%s','%s')" % ( + timestamp, 'Loans to commercial banks', list[344]) + cursor.execute(sql) + sql = "insert into AALOSDCCBITUSNS_ASSET(quarterly,name,value)values('%s','%s','%s')" % ( + timestamp, 'Other assets including trading assets', list[345]) + cursor.execute(sql) + sql = "insert into AALOSDCCBITUSNS_ASSET(quarterly,name,value)values('%s','%s','%s')" % ( + timestamp, 'Total assets', list[346]) + cursor.execute(sql) + + sql = "insert into AALOSDCCBITUSNS_Liabilities(quarterly,name,value)values('%s','%s','%s')" % ( + timestamp, 'Deposits', list[347]) + cursor.execute(sql) + sql = "insert into AALOSDCCBITUSNS_Liabilities(quarterly,name,value)values('%s','%s','%s')" % ( + timestamp, 'Large time deposits', list[348]) + cursor.execute(sql) + sql = "insert into AALOSDCCBITUSNS_Liabilities(quarterly,name,value)values('%s','%s','%s')" % ( + timestamp, 'Other deposits', list[349]) + cursor.execute(sql) + sql = "insert into AALOSDCCBITUSNS_Liabilities(quarterly,name,value)values('%s','%s','%s')" % ( + timestamp, 'Borrowings', list[350]) + cursor.execute(sql) + sql = "insert into AALOSDCCBITUSNS_Liabilities(quarterly,name,value)values('%s','%s','%s')" % ( + timestamp, 'Net due to related foreign offices', list[351]) + cursor.execute(sql) + sql = "insert into AALOSDCCBITUSNS_Liabilities(quarterly,name,value)values('%s','%s','%s')" % ( + timestamp, 'Other liabilities including trading liabilities', list[352]) + cursor.execute(sql) + sql = "insert into AALOSDCCBITUSNS_Liabilities(quarterly,name,value)values('%s','%s','%s')" % ( + timestamp, 'Total liabilities', list[353]) + cursor.execute(sql) + sql = "insert into AALOSDCCBITUSNS_Liabilities(quarterly,name,value)values('%s','%s','%s')" % ( + timestamp, 'Residual (Assets LESS Liabilities)', list[354]) + cursor.execute(sql) + + sql = "insert into AALOFRIITUSS_ASSET(quarterly,name,value)values('%s','%s','%s')" % ( + timestamp, 'Bank credit', list[355]) + cursor.execute(sql) + sql = "insert into AALOFRIITUSS_ASSET(quarterly,name,value)values('%s','%s','%s')" % ( + timestamp, 'Securities in bank credit', list[356]) + cursor.execute(sql) + sql = "insert into AALOFRIITUSS_ASSET(quarterly,name,value)values('%s','%s','%s')" % ( + timestamp, 'Treasury and agency securities', list[357]) + cursor.execute(sql) + sql = "insert into AALOFRIITUSS_ASSET(quarterly,name,value)values('%s','%s','%s')" % ( + timestamp, 'Mortgage-backed securities (MBS)', list[358]) + cursor.execute(sql) + sql = "insert into AALOFRIITUSS_ASSET(quarterly,name,value)values('%s','%s','%s')" % ( + timestamp, 'Non-MBS', list[359]) + cursor.execute(sql) + sql = "insert into AALOFRIITUSS_ASSET(quarterly,name,value)values('%s','%s','%s')" % ( + timestamp, 'Other securities', list[360]) + cursor.execute(sql) + sql = "insert into AALOFRIITUSS_ASSET(quarterly,name,value)values('%s','%s','%s')" % ( + timestamp, 'Mortgage-backed securities (MBS)', list[361]) + cursor.execute(sql) + sql = "insert into AALOFRIITUSS_ASSET(quarterly,name,value)values('%s','%s','%s')" % ( + timestamp, 'Non-MBS', list[362]) + cursor.execute(sql) + sql = "insert into AALOFRIITUSS_ASSET(quarterly,name,value)values('%s','%s','%s')" % ( + timestamp, 'Loans and leases in bank credit', list[363]) + cursor.execute(sql) + sql = "insert into AALOFRIITUSS_ASSET(quarterly,name,value)values('%s','%s','%s')" % ( + timestamp, 'Commercial and industrial loans', list[364]) + cursor.execute(sql) + sql = "insert into AALOFRIITUSS_ASSET(quarterly,name,value)values('%s','%s','%s')" % ( + timestamp, 'Real estate loans', list[365]) + cursor.execute(sql) + sql = "insert into AALOFRIITUSS_ASSET(quarterly,name,value)values('%s','%s','%s')" % ( + timestamp, 'Residential real estate loans', list[366]) + cursor.execute(sql) + sql = "insert into AALOFRIITUSS_ASSET(quarterly,name,value)values('%s','%s','%s')" % ( + timestamp, 'Revolving home equity loans', list[367]) + cursor.execute(sql) + sql = "insert into AALOFRIITUSS_ASSET(quarterly,name,value)values('%s','%s','%s')" % ( + timestamp, 'Closed-end residential loans', list[368]) + cursor.execute(sql) + sql = "insert into AALOFRIITUSS_ASSET(quarterly,name,value)values('%s','%s','%s')" % ( + timestamp, 'Commercial real estate loans', list[369]) + cursor.execute(sql) + sql = "insert into AALOFRIITUSS_ASSET(quarterly,name,value)values('%s','%s','%s')" % ( + timestamp, 'Construction and land development loans', list[370]) + cursor.execute(sql) + sql = "insert into AALOFRIITUSS_ASSET(quarterly,name,value)values('%s','%s','%s')" % ( + timestamp, 'Secured by farmland', list[371]) + cursor.execute(sql) + sql = "insert into AALOFRIITUSS_ASSET(quarterly,name,value)values('%s','%s','%s')" % ( + timestamp, 'Secured by multifamily properties', list[372]) + cursor.execute(sql) + sql = "insert into AALOFRIITUSS_ASSET(quarterly,name,value)values('%s','%s','%s')" % ( + timestamp, 'Secured by nonfarm nonresidential properties', list[373]) + cursor.execute(sql) + sql = "insert into AALOFRIITUSS_ASSET(quarterly,name,value)values('%s','%s','%s')" % ( + timestamp, 'Consumer loans', list[374]) + cursor.execute(sql) + sql = "insert into AALOFRIITUSS_ASSET(quarterly,name,value)values('%s','%s','%s')" % ( + timestamp, 'Credit cards and other revolving plans', list[375]) + cursor.execute(sql) + sql = "insert into AALOFRIITUSS_ASSET(quarterly,name,value)values('%s','%s','%s')" % ( + timestamp, 'Other consumer loans', list[376]) + cursor.execute(sql) + sql = "insert into AALOFRIITUSS_ASSET(quarterly,name,value)values('%s','%s','%s')" % ( + timestamp, 'Automobile loans', list[377]) + cursor.execute(sql) + sql = "insert into AALOFRIITUSS_ASSET(quarterly,name,value)values('%s','%s','%s')" % ( + timestamp, 'All other consumer loans', list[378]) + cursor.execute(sql) + sql = "insert into AALOFRIITUSS_ASSET(quarterly,name,value)values('%s','%s','%s')" % ( + timestamp, 'All other loans and leases', list[379]) + cursor.execute(sql) + sql = "insert into AALOFRIITUSS_ASSET(quarterly,name,value)values('%s','%s','%s')" % ( + timestamp, 'Loans to nondepository financial institutions', list[380]) + cursor.execute(sql) + sql = "insert into AALOFRIITUSS_ASSET(quarterly,name,value)values('%s','%s','%s')" % ( + timestamp, 'All loans not elsewhere classified', list[381]) + cursor.execute(sql) + sql = "insert into AALOFRIITUSS_ASSET(quarterly,name,value)values('%s','%s','%s')" % ( + timestamp, 'LESS: Allowance for loan and lease losses', list[382]) + cursor.execute(sql) + sql = "insert into AALOFRIITUSS_ASSET(quarterly,name,value)values('%s','%s','%s')" % ( + timestamp, 'Cash assets', list[383]) + cursor.execute(sql) + sql = "insert into AALOFRIITUSS_ASSET(quarterly,name,value)values('%s','%s','%s')" % ( + timestamp, 'Total federal funds sold and reverse RPs', list[384]) + cursor.execute(sql) + sql = "insert into AALOFRIITUSS_ASSET(quarterly,name,value)values('%s','%s','%s')" % ( + timestamp, 'Loans to commercial banks', list[385]) + cursor.execute(sql) + sql = "insert into AALOFRIITUSS_ASSET(quarterly,name,value)values('%s','%s','%s')" % ( + timestamp, 'Other assets including trading assets', list[386]) + cursor.execute(sql) + sql = "insert into AALOFRIITUSS_ASSET(quarterly,name,value)values('%s','%s','%s')" % ( + timestamp, 'Total assets', list[387]) + cursor.execute(sql) + + sql = "insert into AALOFRIITUSS_Liabilities(quarterly,name,value)values('%s','%s','%s')" % ( + timestamp, 'Deposits', list[388]) + cursor.execute(sql) + sql = "insert into AALOFRIITUSS_Liabilities(quarterly,name,value)values('%s','%s','%s')" % ( + timestamp, 'Large time deposits', list[389]) + cursor.execute(sql) + sql = "insert into AALOFRIITUSS_Liabilities(quarterly,name,value)values('%s','%s','%s')" % ( + timestamp, 'Other deposits', list[390]) + cursor.execute(sql) + sql = "insert into AALOFRIITUSS_Liabilities(quarterly,name,value)values('%s','%s','%s')" % ( + timestamp, 'Borrowings', list[391]) + cursor.execute(sql) + sql = "insert into AALOFRIITUSS_Liabilities(quarterly,name,value)values('%s','%s','%s')" % ( + timestamp, 'Net due to related foreign offices', list[392]) + cursor.execute(sql) + sql = "insert into AALOFRIITUSS_Liabilities(quarterly,name,value)values('%s','%s','%s')" % ( + timestamp, 'Other liabilities including trading liabilities', list[393]) + cursor.execute(sql) + sql = "insert into AALOFRIITUSS_Liabilities(quarterly,name,value)values('%s','%s','%s')" % ( + timestamp, 'Total liabilities', list[394]) + cursor.execute(sql) + sql = "insert into AALOFRIITUSS_Liabilities(quarterly,name,value)values('%s','%s','%s')" % ( + timestamp, 'Residual (Assets LESS Liabilities)', list[395]) + cursor.execute(sql) + + sql = "insert into AALOFRIITUSNS_ASSET(quarterly,name,value)values('%s','%s','%s')" % ( + timestamp, 'Bank credit', list[396]) + cursor.execute(sql) + sql = "insert into AALOFRIITUSNS_ASSET(quarterly,name,value)values('%s','%s','%s')" % ( + timestamp, 'Securities in bank credit', list[397]) + cursor.execute(sql) + sql = "insert into AALOFRIITUSNS_ASSET(quarterly,name,value)values('%s','%s','%s')" % ( + timestamp, 'Treasury and agency securities', list[398]) + cursor.execute(sql) + sql = "insert into AALOFRIITUSNS_ASSET(quarterly,name,value)values('%s','%s','%s')" % ( + timestamp, 'Mortgage-backed securities (MBS)', list[399]) + cursor.execute(sql) + sql = "insert into AALOFRIITUSNS_ASSET(quarterly,name,value)values('%s','%s','%s')" % ( + timestamp, 'Non-MBS', list[400]) + cursor.execute(sql) + sql = "insert into AALOFRIITUSNS_ASSET(quarterly,name,value)values('%s','%s','%s')" % ( + timestamp, 'Other securities', list[401]) + cursor.execute(sql) + sql = "insert into AALOFRIITUSNS_ASSET(quarterly,name,value)values('%s','%s','%s')" % ( + timestamp, 'Mortgage-backed securities (MBS)', list[402]) + cursor.execute(sql) + sql = "insert into AALOFRIITUSNS_ASSET(quarterly,name,value)values('%s','%s','%s')" % ( + timestamp, 'Non-MBS', list[403]) + cursor.execute(sql) + sql = "insert into AALOFRIITUSNS_ASSET(quarterly,name,value)values('%s','%s','%s')" % ( + timestamp, 'Loans and leases in bank credit', list[404]) + cursor.execute(sql) + sql = "insert into AALOFRIITUSNS_ASSET(quarterly,name,value)values('%s','%s','%s')" % ( + timestamp, 'Commercial and industrial loans', list[405]) + cursor.execute(sql) + sql = "insert into AALOFRIITUSNS_ASSET(quarterly,name,value)values('%s','%s','%s')" % ( + timestamp, 'Real estate loans', list[406]) + cursor.execute(sql) + sql = "insert into AALOFRIITUSNS_ASSET(quarterly,name,value)values('%s','%s','%s')" % ( + timestamp, 'Residential real estate loans', list[407]) + cursor.execute(sql) + sql = "insert into AALOFRIITUSNS_ASSET(quarterly,name,value)values('%s','%s','%s')" % ( + timestamp, 'Revolving home equity loans', list[408]) + cursor.execute(sql) + sql = "insert into AALOFRIITUSNS_ASSET(quarterly,name,value)values('%s','%s','%s')" % ( + timestamp, 'Closed-end residential loans', list[409]) + cursor.execute(sql) + sql = "insert into AALOFRIITUSNS_ASSET(quarterly,name,value)values('%s','%s','%s')" % ( + timestamp, 'Commercial real estate loans', list[410]) + cursor.execute(sql) + sql = "insert into AALOFRIITUSNS_ASSET(quarterly,name,value)values('%s','%s','%s')" % ( + timestamp, 'Construction and land development loans', list[411]) + cursor.execute(sql) + sql = "insert into AALOFRIITUSNS_ASSET(quarterly,name,value)values('%s','%s','%s')" % ( + timestamp, 'Secured by farmland', list[412]) + cursor.execute(sql) + sql = "insert into AALOFRIITUSNS_ASSET(quarterly,name,value)values('%s','%s','%s')" % ( + timestamp, 'Secured by multifamily properties', list[413]) + cursor.execute(sql) + sql = "insert into AALOFRIITUSNS_ASSET(quarterly,name,value)values('%s','%s','%s')" % ( + timestamp, 'Secured by nonfarm nonresidential properties', list[414]) + cursor.execute(sql) + sql = "insert into AALOFRIITUSNS_ASSET(quarterly,name,value)values('%s','%s','%s')" % ( + timestamp, 'Consumer loans', list[415]) + cursor.execute(sql) + sql = "insert into AALOFRIITUSNS_ASSET(quarterly,name,value)values('%s','%s','%s')" % ( + timestamp, 'Credit cards and other revolving plans', list[416]) + cursor.execute(sql) + sql = "insert into AALOFRIITUSNS_ASSET(quarterly,name,value)values('%s','%s','%s')" % ( + timestamp, 'Other consumer loans', list[417]) + cursor.execute(sql) + sql = "insert into AALOFRIITUSNS_ASSET(quarterly,name,value)values('%s','%s','%s')" % ( + timestamp, 'Automobile loans', list[418]) + cursor.execute(sql) + sql = "insert into AALOFRIITUSNS_ASSET(quarterly,name,value)values('%s','%s','%s')" % ( + timestamp, 'All other consumer loans', list[419]) + cursor.execute(sql) + sql = "insert into AALOFRIITUSNS_ASSET(quarterly,name,value)values('%s','%s','%s')" % ( + timestamp, 'All other loans and leases', list[420]) + cursor.execute(sql) + sql = "insert into AALOFRIITUSNS_ASSET(quarterly,name,value)values('%s','%s','%s')" % ( + timestamp, 'Loans to nondepository financial institutions', list[421]) + cursor.execute(sql) + sql = "insert into AALOFRIITUSNS_ASSET(quarterly,name,value)values('%s','%s','%s')" % ( + timestamp, 'All loans not elsewhere classified', list[422]) + cursor.execute(sql) + sql = "insert into AALOFRIITUSNS_ASSET(quarterly,name,value)values('%s','%s','%s')" % ( + timestamp, 'LESS: Allowance for loan and lease losses', list[423]) + cursor.execute(sql) + sql = "insert into AALOFRIITUSNS_ASSET(quarterly,name,value)values('%s','%s','%s')" % ( + timestamp, 'Cash assets', list[424]) + cursor.execute(sql) + sql = "insert into AALOFRIITUSNS_ASSET(quarterly,name,value)values('%s','%s','%s')" % ( + timestamp, 'Total federal funds sold and reverse RPs', list[425]) + cursor.execute(sql) + sql = "insert into AALOFRIITUSNS_ASSET(quarterly,name,value)values('%s','%s','%s')" % ( + timestamp, 'Loans to commercial banks', list[426]) + cursor.execute(sql) + sql = "insert into AALOFRIITUSNS_ASSET(quarterly,name,value)values('%s','%s','%s')" % ( + timestamp, 'Other assets including trading assets', list[427]) + cursor.execute(sql) + sql = "insert into AALOFRIITUSNS_ASSET(quarterly,name,value)values('%s','%s','%s')" % ( + timestamp, 'Total assets', list[428]) + cursor.execute(sql) + + sql = "insert into AALOFRIITUSNS_Liabilities(quarterly,name,value)values('%s','%s','%s')" % ( + timestamp, 'Deposits', list[429]) + cursor.execute(sql) + sql = "insert into AALOFRIITUSNS_Liabilities(quarterly,name,value)values('%s','%s','%s')" % ( + timestamp, 'Large time deposits', list[430]) + cursor.execute(sql) + sql = "insert into AALOFRIITUSNS_Liabilities(quarterly,name,value)values('%s','%s','%s')" % ( + timestamp, 'Other deposits', list[431]) + cursor.execute(sql) + sql = "insert into AALOFRIITUSNS_Liabilities(quarterly,name,value)values('%s','%s','%s')" % ( + timestamp, 'Borrowings', list[432]) + cursor.execute(sql) + sql = "insert into AALOFRIITUSNS_Liabilities(quarterly,name,value)values('%s','%s','%s')" % ( + timestamp, 'Net due to related foreign offices', list[433]) + cursor.execute(sql) + sql = "insert into AALOFRIITUSNS_Liabilities(quarterly,name,value)values('%s','%s','%s')" % ( + timestamp, 'Other liabilities including trading liabilities', list[434]) + cursor.execute(sql) + sql = "insert into AALOFRIITUSNS_Liabilities(quarterly,name,value)values('%s','%s','%s')" % ( + timestamp, 'Total liabilities', list[435]) + cursor.execute(sql) + sql = "insert into AALOFRIITUSNS_Liabilities(quarterly,name,value)values('%s','%s','%s')" % ( + timestamp, 'Residual (Assets LESS Liabilities)', list[436]) + cursor.execute(sql) + db.commit() + else: + import time + time.sleep(43200) + # time.sleep(timer_start_time) + diff --git a/lyq/Macroeconomic_USTreasuriesSize.py b/lyq/Macroeconomic_USTreasuriesSize.py new file mode 100644 index 0000000..e102e39 --- /dev/null +++ b/lyq/Macroeconomic_USTreasuriesSize.py @@ -0,0 +1,85 @@ +import requests +import pymysql +from datetime import datetime +import time +old_transaction_mtd_amt=0 +condition=True +db = pymysql.connect(host="127.0.0.1",user="root",password="2GS@bPYcgiMyL14A",database="Macroeconomics",port=4423) +cursor = db.cursor() +while condition: + page = requests.get("https://api.fiscaldata.treasury.gov/services/api/fiscal_service/v1/accounting/dts/public_debt_transactions?fields=record_date,transaction_type,security_type,transaction_mtd_amt&sort=-record_date") + page = page.json() + page = page['data'] + # 获取网站最新一条数据时间 + page_data = page[0:23] + for data in page_data: + sql = "select date from USTreasuriesSize order by date desc limit 1" + cursor.execute(sql) + old_date = cursor.fetchall() + old_date = str(old_date) + old_date = old_date[20:-11] + old_date = old_date.replace(' ', '') + old_date = old_date.replace(',', '/') + date_string = old_date + format = '%Y/%m/%d' + old_date = datetime.strptime(date_string, format) + + record_date = data['record_date'] + transaction_type = data['transaction_type'] + security_type=data['security_type'] + transaction_mtd_amt=data['transaction_mtd_amt'] + record_date = record_date.replace('-', '/') + format = '%Y/%m/%d' + record_date = datetime.strptime(record_date, format) + # 判断数据库最新一条数据时间和网站最新一条数据时间 + if record_date!=old_date: + sql = "insert into USTreasuriesSize(date)values('%s')" % (record_date) + cursor.execute(sql) + db.commit() + sql = "select id from USTreasuriesSize order by id desc limit 1" + cursor.execute(sql) + id = cursor.fetchall() + id = id[0][0] + for data in page_data: + transaction_type = data['transaction_type'] + security_type = data['security_type'] + transaction_mtd_amt = data['transaction_mtd_amt'] + if transaction_type == 'Issues': + if security_type == 'Bills': + transaction_mtd_amt= old_transaction_mtd_amt + int(transaction_mtd_amt) + sql1 = "update USTreasuriesSize set TBill=%s where id=%s" % (transaction_mtd_amt, id) + cursor.execute(sql1) + db.commit() + old_transaction_mtd_amt=transaction_mtd_amt + + elif security_type == 'Notes': + sql2 = "update USTreasuriesSize set TNote=%s where id=%s" % (transaction_mtd_amt, id) + cursor.execute(sql2) + db.commit() + + elif security_type == 'Bonds': + sql3 = "update USTreasuriesSize set TBond=%s where id=%s" % (transaction_mtd_amt, id) + cursor.execute(sql3) + db.commit() + + elif security_type == 'Inflation-Protected Securities Increment': + sql4 = "update USTreasuriesSize set TIPS=%s where id=%s" % (transaction_mtd_amt, id) + cursor.execute(sql4) + db.commit() + else: + continue + else: + continue + else: + continue + + old_transaction_mtd_amt=0 + time.sleep(21600) + + + + + + + + diff --git a/lyq/Macroeconomic_USTreasuriesYields_v2.py b/lyq/Macroeconomic_USTreasuriesYields_v2.py new file mode 100644 index 0000000..f99df46 --- /dev/null +++ b/lyq/Macroeconomic_USTreasuriesYields_v2.py @@ -0,0 +1,96 @@ +import pymysql +import time +from selenium import webdriver +from selenium.webdriver.chrome.options import Options +from selenium.webdriver.chrome.service import Service +from w3lib.html import remove_tags +from bs4 import BeautifulSoup +from datetime import datetime + +# 配置 Selenium +chrome_options = Options() +chrome_options.add_argument("--headless") +chrome_options.add_argument('--no-sandbox') +chrome_options.add_argument('--disable-gpu') +chrome_options.add_argument('--disable-dev-shm-usage') +chrome_options.add_argument('blink-settings=imagesEnabled=false') + +# 在 Selenium 3 中,直接指定 chrome_options 参数即可 +browser = webdriver.Chrome(executable_path="chromedriver", options=chrome_options) +# 将月份映射整理成一个字典 +MONTH_MAPPING = { + "Jan": "/1/", "Feb": "/2/", "Mar": "/3/", "Apr": "/4/", + "May": "/5/", "Jun": "/6/", "Jul": "/7/", "Aug": "/8/", + "Sep": "/9/", "Oct": "/10/", "Nov": "/11/", "Dec": "/12/" +} + +# 数据库连接配置 +DB_CONFIG = { + "host": "127.0.0.1", + "user": "root", + "password": "2GS@bPYcgiMyL14A", + "database": "Macroeconomics", + "port": 4423 +} + +def fetch_web_data(): + """抓取网页数据并解析日期和利率数据""" + browser.get("https://www.federalreserve.gov/releases/h15/") + soup = BeautifulSoup(browser.page_source, 'html.parser') + + # 获取日期 + date_text = soup.find_all('th', class_="colhead sticky sticky-row-cell")[-1].get_text(strip=True) + for month, replacement in MONTH_MAPPING.items(): + date_text = date_text.replace(month, replacement) + date = datetime.strptime(date_text.replace('*', ''), '%Y/%m/%d') + + # 获取利率数据 + data = [remove_tags(str(td)).strip() for td in soup.find_all('td', class_="data")] + return date, data + +def get_latest_db_date(): + """从数据库获取最新日期""" + with pymysql.connect(**DB_CONFIG) as conn: + with conn.cursor() as cursor: + cursor.execute("SELECT MAX(date) FROM USTreasuriesYields") + result = cursor.fetchone() + return result[0] if result[0] else None + +def insert_data(date, rates, mprime, dpcredit): + """插入数据到数据库""" + with pymysql.connect(**DB_CONFIG) as conn: + with conn.cursor() as cursor: + sql_treasuries = """ + INSERT INTO USTreasuriesYields (date, 1_Mo, 3_Mo, 6_Mo, 1_Yr, 2_Yr, 5_Yr, 10_Yr, 20_Yr, 30_Yr) + VALUES (%s, %s, %s, %s, %s, %s, %s, %s, %s, %s) + """ + sql_interest_rate = """ + INSERT INTO InterestRate (date, name, _value) + VALUES (%s, %s, %s) + """ + cursor.execute(sql_treasuries, [date] + rates) + cursor.execute(sql_interest_rate, (date, 'BPL', mprime)) + cursor.execute(sql_interest_rate, (date, 'DWPC', dpcredit)) + conn.commit() + +def main(): + while True: + new_date, data = fetch_web_data() + old_date = get_latest_db_date() + + if old_date and new_date <= old_date: + time.sleep(21600) # 6小时 + continue + + # 提取利率数据 + rates = [ + data[i].replace('ND', 'NULL') if i < len(data) else 'NULL' + for i in [104, 109, 114, 119, 124, 134, 144, 149, 154] + ] + mprime = data[54] if len(data) > 54 else 'NULL' + dpcredit = data[59] if len(data) > 59 else 'NULL' + + insert_data(new_date, rates, mprime, dpcredit) + +if __name__ == "__main__": + main() diff --git a/lyq/Macroeconomic_WEI.py b/lyq/Macroeconomic_WEI.py new file mode 100644 index 0000000..83995d1 --- /dev/null +++ b/lyq/Macroeconomic_WEI.py @@ -0,0 +1,105 @@ +import time +from full_fred.fred import Fred +import pymysql +import time +fred=Fred('example_key.txt') +fred.set_api_key_file('example_key.txt') +times="00:00:00" +while True: + import datetime + # now_time = datetime.datetime.now() + # next_time = now_time + datetime.timedelta(days=+1) + # next_year = next_time.date().year + # next_month = next_time.date().month + # next_day = next_time.date().day + # next_time = datetime.datetime.strptime(str(next_year) + "-" + str(next_month) + "-" + str(next_day) + " 15:00:00","%Y-%m-%d %H:%M:%S") + # timer_start_time = (next_time - now_time).total_seconds() + data=fred.get_series_df('WEI') + data_date=data['date'] + data_value=data['value'] + # 获取网站数据 + list1=[] + list2=[] + for i in data_date: + list1+=[i] + for i2 in data_value: + list2+=[i2] + # 拿最新七条数据 + date1=list1[-1] + date1= date1 + ' ' + times + timeArray = time.strptime(date1, "%Y-%m-%d %H:%M:%S") + timestamp = time.mktime(timeArray) + date1 = int(timestamp * 1000) - 28800000 + value1=list2[-1] + + date2=list1[-2] + date2 = date2 + ' ' + times + timeArray = time.strptime(date2, "%Y-%m-%d %H:%M:%S") + timestamp = time.mktime(timeArray) + date2 = int(timestamp * 1000) - 28800000 + value2=list2[-2] + + date3=list1[-3] + date3 = date3 + ' ' + times + timeArray = time.strptime(date3, "%Y-%m-%d %H:%M:%S") + timestamp = time.mktime(timeArray) + date3 = int(timestamp * 1000) - 28800000 + value3=list2[-3] + + date4=list1[-4] + date4 = date4 + ' ' + times + timeArray = time.strptime(date4, "%Y-%m-%d %H:%M:%S") + timestamp = time.mktime(timeArray) + date4 = int(timestamp * 1000) - 28800000 + value4=list2[-4] + + date5=list1[-5] + date5 = date5 + ' ' + times + timeArray = time.strptime(date5, "%Y-%m-%d %H:%M:%S") + timestamp = time.mktime(timeArray) + date5 = int(timestamp * 1000) - 28800000 + value5=list2[-5] + + date6=list1[-6] + date6 = date6 + ' ' + times + timeArray = time.strptime(date6, "%Y-%m-%d %H:%M:%S") + timestamp = time.mktime(timeArray) + date6 = int(timestamp * 1000) - 28800000 + value6=list2[-6] + + date7=list1[-7] + date7 = date7 + ' ' + times + timeArray = time.strptime(date7, "%Y-%m-%d %H:%M:%S") + timestamp = time.mktime(timeArray) + date7 = int(timestamp * 1000) - 28800000 + value7=list2[-7] + + db = pymysql.connect(host="127.0.0.1",user="root",password="2GS@bPYcgiMyL14A",database="Macroeconomics",port=4423) + cursor = db.cursor() + sql = "select times from WEI order by times desc limit 1" + cursor.execute(sql) + db.commit() + old_time = cursor.fetchall() + old_time = old_time[0][0] + sql = "update WEI set WEI=%s where times='%s'" % (value2, date2) + cursor.execute(sql) + sql = "update WEI set WEI=%s where times='%s'" % (value3, date3) + cursor.execute(sql) + sql = "update WEI set WEI=%s where times='%s'" % (value4, date4) + cursor.execute(sql) + sql = "update WEI set WEI=%s where times='%s'" % (value5, date5) + cursor.execute(sql) + sql = "update WEI set WEI=%s where times='%s'" % (value6, date6) + cursor.execute(sql) + sql = "update WEI set WEI=%s where times='%s'" % (value7, date7) + cursor.execute(sql) + db.commit() + if date1 == old_time: + time.sleep(21600) + # time.sleep(timer_start_time) + else: + db = pymysql.connect(host="127.0.0.1",user="root",password="2GS@bPYcgiMyL14A",database="Macroeconomics",port=4423) + cursor = db.cursor() + sql = "insert into WEI(times,WEI)values('%s','%s')" % (date1, value1) + cursor.execute(sql) + db.commit() \ No newline at end of file diff --git a/lyq/README.md b/lyq/README.md index 6141ed0..923f0c9 100644 --- a/lyq/README.md +++ b/lyq/README.md @@ -1,5 +1,35 @@ -目录简介: - -相关联其他文件: - -功能简介: \ No newline at end of file +arh999_lyq.py --btc数据代码 +arh999eth_lyq.py --btc数据代码 +Binance_fapi.py --btc数据代码 +btc_price_fetcher.py --btc数据代码 +btc_price.py --btc数据代码 +btc_stats_qt.py --btc数据代码 +btc_update.py --btc数据代码 +btc_utxos_lyq3.py --btc数据代码 +btc_utxos_update_lyq3.py --btc数据代码 +btc24h_db_if.py --btc数据代码mysql数据库 +btc24h_redis_if.py --btc数据代码redis数据库 +btc24h_stats.py --btc实时更新数据代码 +check_order_lyq.py --btcusdt/ethusdt获取代码 +check_zone_lyq.py --btcusdt/ethusdt获取代码 +CoinmarKetCap.py --币种,市值,交易量,价格变化获取代码 +db_if_qt.py --btc数据代码mysql数据库 +exchangeRate_lyq.py --实时汇率代码 +Macroeconomic_COVITGDP_v2.py --宏观经济数据代码 +Macroeconomic_CPI_NSA_v2.py --宏观经济数据代码 +Macroeconomic_FARBODI.py --宏观经济数据代码 +Macroeconomic_FBI_v2.py --宏观经济数据代码 +Macroeconomic_FER.py --宏观经济数据代码 +Macroeconomic_InterestRate.py --宏观经济数据代码 +Macroeconomic_Loan.py --宏观经济数据代码 +Macroeconomic_MoneyStockMeasures.py --宏观经济数据代码 +Macroeconomic_PCE_v3.py --宏观经济数据代码 +Macroeconomic_SAALOCBITUSS_ASSET.py --宏观经济数据代码 +Macroeconomic_USTreasuriesSize.py --宏观经济数据代码 +Macroeconomic_USTreasuriesYields_v2.py --宏观经济数据代码 +Macroeconomic_WEI.py --宏观经济数据代码 +nochain_eth_lyq.py --供应量,供应比率代码 +nochain_lyq_utc08.py --供应量,供应比率代码 +nochain_lyq_v2.py --供应量,供应比率代码 +nochain_update_lyq.py --供应量,供应比率代码 +redis_if_qt.py --btc数据代码redis数据库 \ No newline at end of file diff --git a/lyq/arh999_lyq.py b/lyq/arh999_lyq.py new file mode 100644 index 0000000..350c6f1 --- /dev/null +++ b/lyq/arh999_lyq.py @@ -0,0 +1,1005 @@ +# coding=utf-8 +import ujson +from binance.websocket.spot.websocket_client import SpotWebsocketClient as WebsocketClient +import time +import requests +from loguru import logger +import datetime +import pymysql +import math + +g_prices = {} +g_dbif = None +g_lastts = 0 +# 正在设置一个与币安的现货 Websocket API 交互以获取加密货币价格数据的环境。它还包括用于处理日志记录、发出 HTTP 请求以及与 MySQL 数据库交互的模块。以下是导入和初始化的细分: +# ujson:快速的JSON编码器和解码器。 +# SpotWebsocketClient:币安用于现货交易的 WebSocket 客户端。 +# time:用于时间相关功能的模块。 +# requests:用于发出请求的 HTTP 库。 +# logger:Python 的日志记录库。 +# datetime:用于操作日期和时间的模块。 +# pymysql:MySQL 客户端库。 +# math:提供数学函数的模块。 +# 该脚本初始化全局变量 、 和 。 似乎用于存储价格数据、与 MySQL 数据库交互以及存储上次操作的时间戳。g_prices g_dbif g_lastts g_pricesg_dbifg_lastts +def get_day60_rise(day, prices): + total = 0 + cnt = 0 + for i in range(60): + if str(day) in prices: + cur_price = prices[str(day)] + day = str(day - 3600 * 24) + if day in prices: + prev_price = prices[day] + try: + #print(((cur_price-prev_price)/prev_price), day, cur_price, prev_price) + total += (((cur_price-prev_price)/prev_price)) + cnt += 1 + except: + pass + # print(day, total, cnt) + day = int(day) + return total +# 此函数“get_day60_rise”根据提供的价格数据计算过去 60 天的价格上涨百分比。以下是其工作原理的细分: +# -**参数**: +# - 'day':需要计算价格上涨的当天。 +# - 'prices':包含价格数据的字典,其中键是时间戳,值是相应的价格。 +# -**功能性**: +# - 它初始化变量“total”和“cnt”,以跟踪总价格上涨和考虑的天数。 +# - 它从给定的“日期”向后迭代 60 天。 +# - 对于范围内的每一天,它会检查当天的价格数据是否在“价格”字典中可用。 +# - 如果当天和前一天的价格数据可用,它会计算价格上涨的百分比并将其添加到“总计”中。 +# - 它为每个有效的价格数据条目递增“cnt”变量。 +# - 最后,它返回过去 60 天的总百分比涨幅。 +# 此函数似乎可用于分析特定时期的价格趋势 +def get_days_rise(day, maxdays, prices): + total = 0 + cnt = 0 + for i in range(maxdays): + if str(day) in prices: + cur_price = prices[str(day)] + day = str(day - 3600 * 24) + if day in prices: + prev_price = prices[day] + try: + #print(((cur_price-prev_price)/prev_price), day, cur_price, prev_price) + total += (((cur_price-prev_price)/prev_price)) + cnt += 1 + except: + pass + # print(day, total, cnt) + day = int(day) + print("get_days_rise", day, maxdays,cnt, total) + return total +# “get_days_rise”函数是先前“get_day60_rise”函数的扩展。其工作原理如下: +# -**参数**: +# - 'day':需要计算价格上涨的当天。 +# - 'maxdays':计算价格上涨时要考虑的最大天数。 +# - 'prices':包含价格数据的字典,其中键是时间戳,值是相应的价格。 +# -**功能性**: +# - 它初始化变量“total”和“cnt”,以跟踪总价格上涨和考虑的天数。 +# - 它从给定的“day”向后循环到“maxdays”的天数范围。 +# - 对于范围内的每一天,它会检查当天的价格数据是否在“价格”字典中可用。 +# - 如果当天和前一天的价格数据可用,它会计算价格上涨的百分比并将其添加到“总计”中。 +# - 它为每个有效的价格数据条目递增“cnt”变量。 +# - 最后,它返回指定天数内的总百分比增长。 +# 此函数通过允许计算由“maxdays”参数指定的自定义天数范围内的价格上涨来提供灵活性。 +def append_jzr_day60(dbif, day, price, day60_rise, day7_rise, day30_rise, day90_rise): + dbif.append_jzr60(day, price, day60_rise, day7_rise, day30_rise, day90_rise) +# “append_jzr_day60”功能似乎是将数据附加到数据库的更大系统的一部分。以下是它的作用的细分: +# -**参数**: +# - 'dbif':与数据库交互的接口。 +# - 'day':当前日期或时间戳。 +# - 'price':与当天对应的价格值。 +# - “day60_rise”:过去 60 天内价格上涨的百分比。 +# - “day7_rise”:过去 7 天内价格上涨的百分比。 +# - “day30_rise”:过去 30 天内价格上涨的百分比。 +# - “day90_rise”:过去 90 天内价格上涨的百分比。 +# -**功能性**: +# - 它在 'dbif' 对象上调用方法 'append_jzr60',传递提供的参数。 +# - 此方法可能负责将数据追加或存储在数据库表或集合中。 +# 该功能似乎是管理和分析价格数据系统的一部分,专门计算和存储不同时间间隔(60天、7天、30天、90天)的价格上涨百分比,并将其保存到数据库中以供进一步分析或报告 +def sync_jzr_day60(dbif, prices): + for day in prices: + print(day, prices[day]) + day60_rise = get_days_rise(int(day), 60, prices) + day7_rise = get_days_rise(int(day), 7, prices) + day30_rise = get_days_rise(int(day), 30, prices) + day90_rise = get_days_rise(int(day), 90, prices) + print(day, day60_rise) + append_jzr_day60(dbif, day, prices[day], day60_rise, day7_rise, day30_rise, day90_rise) +# “sync_jzr_day60”功能似乎通过计算给定价格数据集中每天在不同时间间隔(60 天、7 天、30 天、90 天)内的价格上涨百分比来同步数据。以下是它的作用的细分: +# -**参数**: +# - 'dbif':与数据库交互的接口。 +# - 'prices':包含每天价格数据的字典。 +# -**功能性**: +# - 它在“价格”字典中每天都在迭代。 +# - 对于每一天,它使用“get_days_rise”函数计算过去 60、7、30 和 90 天的价格上涨百分比。 +# - 然后,它调用“append_jzr_day60”函数将此数据以及相应的日期和价格附加到数据库中。 +# 总体而言,此功能负责通过计算和存储不同时间间隔内价格上涨的百分比来与数据库同步价格数据 +def check_jzr60_sync(dbif): + return dbif.check_jzr60_sync() +# “check_jzr60_sync”功能,用于检查数据库中 jzr60 表的数据同步是否完成。以下是它的作用: +# -**参数**: +# - 'dbif':与数据库交互的接口。 +# -**功能性**: +# - 它可能会查询数据库以检查“jzr60”表的数据同步是否完成。 +# - 此函数的具体实现将取决于数据库接口 ('dbif') 的详细信息,但通常它涉及查询数据库以确定是否已同步所有必要的数据。 +# 此函数可用于验证“jzr60”表的数据同步过程是否完成,这对于确保应用程序使用的数据的完整性非常重要 +def append_jzr60day(dbif, day, price, day60_rise, day7_rise, day30_rise, day90_rise): + dbif.append_jzr60_day(day, price, day60_rise, day7_rise, day30_rise, day90_rise) +# “append_jzr60day”函数似乎将特定日期的数据附加到数据库中的“jzr60”表中。以下是它的作用: +# -**参数**: +# - 'dbif':与数据库交互的接口。 +# - 'day':追加数据的特定日期。 +# - 'price':与当天相关的价格。 +# - “day60_rise”:过去 60 天内的价格上涨。 +# - “day7_rise”:过去 7 天的价格上涨。 +# - “day30_rise”:过去 30 天的价格上涨。 +# - “day90_rise”:过去 90 天内的价格上涨。 +# -**功能性**: +# - 它可能会在数据库的“jzr60”表中插入一个新行,其中包含提供的数据。 +# - 该函数将使用“dbif”接口执行 SQL 查询以将数据插入数据库。 +# - 这允许存储不同时间间隔(60 天、7 天、30 天和 90 天)的历史价格上涨数据以及相应的日期和价格。 +# 此功能对于维护不同时间段内价格上涨数据的历史记录至关重要 +def append_jzr60(dbif, dayutc, price, day60_rise, day7_rise, day30_rise, day90_rise): + dbif.append_jzr60(dayutc, price, day60_rise, day7_rise, day30_rise, day90_rise) +# “append_jzr60”函数似乎将特定日期的数据附加到数据库中可能名为“jzr60”的表中。以下是其目的和功能的细分: +# -**参数**: +# - 'dbif':与数据库交互的接口。 +# - 'dayutc':附加数据的特定日期的时间戳(以 UTC 为单位)。 +# - 'price':与当天相关的价格。 +# - “day60_rise”:过去 60 天内的价格上涨。 +# - “day7_rise”:过去 7 天的价格上涨。 +# - “day30_rise”:过去 30 天的价格上涨。 +# - “day90_rise”:过去 90 天内的价格上涨。 +# -**功能性**: +# - 它可能会在数据库的“jzr60”表中插入一个新行,其中包含提供的数据。 +# - 该函数将使用“dbif”接口执行 SQL 查询以将数据插入数据库。 +# - 这允许存储不同时间间隔(60 天、7 天、30 天和 90 天)的历史价格上涨数据以及相应的时间戳和价格。 +# 此功能对于维护不同时间段内价格上涨数据的历史记录至关重要 +def clean_jzr60day(dbif, clean_day): + dbif.clean_jzr60_day(clean_day) +# “clean_jzr60day”函数可能用于从数据库的“jzr60”表中清理或删除与特定日期相关的数据。以下是它的作用: +# - ** 参数 **: +# - 'dbif':与数据库交互的接口。 +# - “clean_day”:要从“jzr60”表中清除或删除数据的日期的时间戳(以UTC为单位)。 +# - ** 功能性 **: +# - 它可能通过'dbif'接口执行SQL查询,以从'jzr60'表中删除与提供的'clean_day'关联的数据。 +# - 此操作通过删除过时或不相关的记录来帮助维护数据的完整性。 +# 出于各种原因,可能需要清理特定日期的数据,例如更正错误或删除重复条目。此功能可确保数据库保持最新和准确 +def handle_jzr_day60(dbif, day, dayutc, price, prices): + day60_rise = get_days_rise(dayutc, 60, prices) + day7_rise = get_days_rise(dayutc, 7, prices) + day30_rise = get_days_rise(dayutc, 30, prices) + day90_rise = get_days_rise(dayutc, 90, prices) + print(dayutc, price, day, day60_rise) + append_jzr60day(dbif, day, price, day60_rise, day7_rise, day30_rise, day90_rise) + append_jzr60(dbif, dayutc, price, day60_rise, day7_rise, day30_rise, day90_rise) + clean_day = dayutc - 3600 * 24 * 2 + clean_jzr60day(dbif, clean_day) +# “handle_jzr_day60”功能似乎用于管理与“jzr60”指标相关的数据的处理和存储,这可能表示 60 天的价格上涨。以下是它的作用的细分: +# -**参数**: +# - 'dbif':与数据库交互的接口。 +# - 'day':处理数据的日期的时间戳。 +# - 'dayutc':处理数据的当天的 UTC 时间戳。 +# - 'price':与当天相关的价格。 +# - 'prices':包含历史价格数据的字典。 +# -**功能性**: +# 1. 使用“get_days_rise”函数计算 60 天、7 天、30 天和 90 天的价格上涨。 +# 2. 打印 UTC 时间戳、价格和 60 天价格上涨,以便进行调试或监控。 +# 3. 使用“append_jzr60day”功能将计算出的指标('day60_rise'、'day7_rise'、'day30_rise'、'day90_rise')附加到数据库。 +# 4. 使用“append_jzr60”函数将相同的指标附加到数据库,但带有 UTC 时间戳 ('dayutc')。 +# 5. 使用“clean_jzr60day”功能清理当天 2 天(“clean_day”)的数据。 +# 总体而言,此函数可确保计算出的价格上涨指标存储在特定日期和相应的 UTC 时间戳的数据库中。此外,它还会清理数据以保持数据库干净 +class Arh99DbIf: + def __init__(self, host="172.17.0.1", port=4423, user="root", password="2GS@bPYcgiMyL14A", dbname="btcdb"): + self.conn = pymysql.connect(host=host, port=port, user=user, password=password, database=dbname, cursorclass=pymysql.cursors.DictCursor) + print("init arh99 db suceess!") + # “Arh99DbIf”类似乎是与数据库交互的接口。以下是其结构的细分: + # - ** 属性 **: + # - 'conn':此属性表示使用“pymysql.connect”方法与数据库的连接。它存储连接详细信息,例如主机、端口、用户名、密码、数据库名称和游标类。 + # - ** 方法 **: + # - '__init__':这是初始化 + # 'Arh99DbIf' + # 类的构造函数方法。它采用多个参数来获取数据库连接详细信息(主机、端口、用户、密码、dbname),并使用“pymysql.connect”建立与数据库的连接。连接成功后,它会打印一条消息,指示初始化成功。 + # 总的来说,这个类充当MySQL数据库连接的包装器,允许代码库的其他部分轻松地与数据库进行交互 + def check_sync(self): + synced = False + with self.conn.cursor() as cursor: + sql_query = "SELECT COUNT(id) FROM `arh99v3a`" + cursor.execute(sql_query) + result = cursor.fetchone() + print(result) + if result is not None: + if "COUNT(id)" in result: + if result["COUNT(id)"] > 0: + synced = True + self.conn.commit() + #print("synced", synced) + return synced + # “check_sync”方法检查数据库表“arh99v2a”是否已同步。以下是其功能的细分: + # - ** 方法签名 **: + # - 'check_sync(self)':此方法除“self”外不接受任何参数,表明它是“Arh99DbIf”类的实例方法。 + # - ** 功能性 **: + # - 它将'synced'变量初始化为'False',表示尚未确认同步。 + # - 在“with”语句中,它会打开一个光标来执行数据库操作。 + # - 它执行SQL查询,使用'COUNT(id)'函数计算'arh99v2a'表中的行数。 + # - 获取SQL查询的结果。 + # - 如果结果不是“None”,它会检查结果字典中是否存在键“COUNT(id)”,以及其值是否大于0。如果是这样,它会将“synced”设置为“True”。 + # - 它提交事务以应用游标所做的任何更改。 + # - 最后,它返回'synced'的值,指示同步是否已确认 ('True') 或未 ('False')。 + # 此方法提供了一种检查数据库表“arh99v2a”的同步状态的方法 + def append(self, day, price, arh99, arh99x): + with self.conn.cursor() as cursor: + sql_query = "SELECT COUNT(id) FROM `arh99v3a` WHERE unixdt=FROM_UNIXTIME(%s)" + cursor.execute(sql_query, (int(day),)) + result = cursor.fetchone() + #print(dt_utc) + #print(result) + if result is not None: + if "COUNT(id)" in result: + if result["COUNT(id)"] > 0: + sql_update = 'UPDATE arh99v3a SET `arh99`=%s, `arh99x`=%s, `price`=%s, `unixdt`=FROM_UNIXTIME(%s) WHERE unixdt=FROM_UNIXTIME(%s)' + print(sql_update) + cursor.execute(sql_update, (arh99, arh99x, price, int(day), int(day))) + else: + sql_insert = "INSERT INTO `arh99v3a` (`unixdt`, `price`, `arh99`, `arh99x`) VALUES (FROM_UNIXTIME(%s), %s, %s, %s)" + print(sql_insert) + cursor.execute(sql_insert, (day, price, arh99, arh99x)) + self.conn.commit() + # “Arh99DbIf”类的“append”方法负责根据提供的参数添加或更新“arh99v2a”表中的记录。以下是其工作原理的解释: + # - ** 方法签名 **: + # - 'append(self, day, price, arh99, arh99x)':此方法采用四个参数: + # - 'day':Unix格式的当天时间戳。 + # - 'price':价格值。 + # - 'arh99':arh99的值。 + # - 'arh99x':arh99x的值。 + # - 'self':此参数是指'Arh99DbIf'类本身的实例。 + # - ** 功能性 **: + # - 在“with”语句中,它会打开一个光标来执行数据库操作。 + # - 它执行SQL查询以计算'arh99v2a'表中的行数,其中'unixdt' (Unix时间戳) 与提供的'day'匹配。 + # - 获取SQL查询的结果。 + # - 如果给定的“日期”有现有记录: + # - 它构造一个SQL'UPDATE'语句,使用新值'arh99'、'arh99x'、'price'和'unixdt'更新现有记录。 + # - 'UPDATE'语句更新与提供的'day'匹配的记录的'arh99'、'arh99x'、'price'和'unixdt'字段。 + # - 如果给定的“日期”没有现有记录: + # - 它构造一个SQL'INSERT'语句,将提供值为'day'、'price'、'arh99'和'arh99x'的新记录插入到'arh99v2a'表中。 + # - 'INSERT'语句在表中插入具有指定值的新记录。 + # - 执行SQL语句后,它会提交事务以应用游标所做的任何更改。 + # 此方法提供了根据提供的参数在“arh99v2a”表中添加新记录或更新现有记录的功能 + def append_day(self, day, price, arh99, arh99x): + with self.conn.cursor() as cursor: + sql_insert = "INSERT INTO `arh99v3aday` (`unixdt`, `price`, `arh99`, `arh99x`) VALUES (FROM_UNIXTIME(%s), %s, %s, %s)" + print(sql_insert) + cursor.execute(sql_insert, (day, price, arh99, arh99x)) + self.conn.commit() + # “Arh99DbIf”类的“append_day”方法似乎与“append”方法具有类似的用途,但它专门在“arh99v2aday”表中插入了一条新记录。其工作原理如下: + # - ** 方法签名 **: + # - 'append_day(self, day, price, arh99, arh99x)':此方法采用四个参数: + # - 'day':Unix格式的当天时间戳。 + # - 'price':价格值。 + # - 'arh99':arh99的值。 + # - 'arh99x':arh99x的值。 + # - 'self':此参数是指'Arh99DbIf'类本身的实例。 + # - ** 功能性 **: + # - 在“with”语句中,它会打开一个光标来执行数据库操作。 + # - 它构造一个SQL'INSERT'语句,将提供值为'day'、'price'、'arh99'和'arh99x'的新记录插入到'arh99v2aday'表中。 + # - 'INSERT'语句在表中插入具有指定值的新记录。 + # - 执行SQL语句后,它会提交事务以应用游标所做的任何更改。 + # 此方法可用于专门将记录插入到“arh99v2aday”表中 + def clean_day(self, day): + with self.conn.cursor() as cursor: + sql_clean = "DELETE from arh99v3aday where unixdt 0: + synced = True + self.conn.commit() + #print("synced", synced) + return synced + # “Arh99DbIf”类中的“check_jzr60_sync”方法旨在确定“jzr60v2a”表是否已同步。以下是其功能的细分: + # - ** 方法签名 **: + # - 'check_jzr60_sync(self)':此方法不接受任何参数。 + # - ** 功能性 **: + # - 它将布尔变量'synced'初始化为'False',表示同步状态最初假定为未同步。 + # - 在'with'语句中,它会打开一个光标来执行SQL查询。 + # - 该方法构造一个SQL'SELECT'查询来计算'jzr60v2a'表中的记录数。 + # - 执行查询后,它使用'cursor.fetchone()'获取结果。 + # - 如果结果不是“None”,而是包含键“COUNT(id)”,则检查计数是否大于“0”。如果是这样,它会将“synced”设置为“True”,表示表已同步。 + # - 最后,它提交光标所做的任何更改。 + # 此方法的目的是提供有关“jzr60v2a”表是否已同步的信息 + def append_jzr60(self, day, price, jzr60, jzr7, jzr30, jzr90): + with self.conn.cursor() as cursor: + sql_query = "SELECT COUNT(id) FROM `jzr60v3a` WHERE unixdt=FROM_UNIXTIME(%s)" + cursor.execute(sql_query, (int(day),)) + result = cursor.fetchone() + #print(dt_utc) + #print(result) + if result is not None: + if "COUNT(id)" in result: + if result["COUNT(id)"] > 0: + sql_update = 'UPDATE jzr60v3a SET `jzr60`=%s,`jzr7`=%s,`jzr30`=%s,`jzr90`=%s,`price`=%s, `unixdt`=FROM_UNIXTIME(%s) WHERE unixdt=FROM_UNIXTIME(%s)' + print(sql_update) + cursor.execute(sql_update, (jzr60, jzr7, jzr30, jzr90, price, int(day), int(day))) + else: + sql_insert = "INSERT INTO `jzr60v3a` (`unixdt`, `price`, `jzr60`, `jzr7`, `jzr30`, `jzr90`) VALUES (FROM_UNIXTIME(%s), %s, %s, %s, %s, %s)" + print(sql_insert) + cursor.execute(sql_insert, (day, price, jzr60, jzr7, jzr30, jzr90)) + self.conn.commit() + # “Arh99DbIf”类中的“append_jzr60”方法负责附加与“jzr60v2a”表相关的数据。以下是对其功能的分析: + # - ** 方法签名 **: + # - 'append_jzr60(self, day, price, jzr60, jzr7, jzr30, jzr90)':此方法采用以下参数: + # - 'day':表示日期的UNIX时间戳。 + # - 'price':与当天相关的价格。 + # - 'jzr60':JZR(60天上涨)值。 + # - 'jzr7':JZR(7天上涨)值。 + # - 'jzr30':JZR(30天上涨)值。 + # - 'jzr90':JZR(90天上涨)值。 + # - ** 功能性 **: + # - 该方法首先在“with”语句中初始化游标以执行 SQL 查询。 + # - 它构造一个SQL'SELECT'查询来计算指定日期的'jzr60v2a'表中的记录数。 + # - 执行查询后,它使用'cursor.fetchone()'获取结果。 + # - 如果结果不是“None”,而是包含键“COUNT(id)”,则检查计数是否大于“0”。如果为true,则使用SQL“UPDATE”查询使用提供的值更新现有记录。否则,它将使用SQL“INSERT”查询插入具有所提供值的新记录。 + # - 最后,它提交光标所做的任何更改。 + # 此方法允许根据提供的参数附加或更新与数据库中的“jzr60v2a”表相关的数据 + def append_jzr60_day(self, day, price, jzr60, jzr7, jzr30, jzr90): + with self.conn.cursor() as cursor: + sql_insert = "INSERT INTO `jzr60v3aday` (`unixdt`, `price`, `jzr60`, `jzr7`, `jzr30`, `jzr90`) VALUES (FROM_UNIXTIME(%s), %s, %s, %s, %s, %s)" + print(sql_insert) + cursor.execute(sql_insert, (day, price, jzr60, jzr7, jzr30, jzr90)) + self.conn.commit() + # “Arh99DbIf”类中的“append_jzr60_day”方法负责附加与“jzr60v2aday”表相关的数据。以下是对其功能的分析: + # - ** 方法签名 **: + # - 'append_jzr60_day(self, day, price, jzr60, jzr7, jzr30, jzr90)':此方法采用以下参数: + # - 'day':表示日期的UNIX时间戳。 + # - 'price':与当天相关的价格。 + # - 'jzr60':JZR(60天上涨)值。 + # - 'jzr7':JZR(7天上涨)值。 + # - 'jzr30':JZR(30天上涨)值。 + # - 'jzr90':JZR(90天上涨)值。 + # - ** 功能性 **: + # - 该方法首先在“with”语句中初始化游标以执行 SQL 查询。 + # - 它构造一个SQL'INSERT'查询,以使用提供的值将新记录插入到'jzr60v2aday'表中。 + # - 构造查询后,它会打印SQL查询以进行调试。 + # - 然后,它使用提供的参数执行SQL“INSERT”查询。 + # - 最后,它提交光标所做的任何更改。 + # 此方法允许根据提供的参数附加与数据库中的“jzr60v2aday”表相关的新数据 + def clean_jzr60_day(self, day): + with self.conn.cursor() as cursor: + sql_clean = "DELETE from jzr60v3aday where unixdt 0: + synced = True + self.conn.commit() + #print("synced", synced) + return synced + # “Arh99DbIf”类中的“check_ma730_sync”方法负责检查“ma730v2a”表是否同步。以下是其功能的细分: + # - ** 方法签名 **: + # - 'check_ma730_sync(self)':此方法不接受任何参数。 + # - ** 功能性 **: + # - 它首先将布尔变量“synced”初始化为“False”。 + # - 在'with'语句中,它创建一个游标来执行SQL查询。 + # - 它构造一个SQL'SELECT'查询来计算'ma730v2a'表中的行数。 + # - 执行查询后,它使用'cursor.fetchone()'获取结果。 + # - 如果结果不是“None”,它会检查结果字典中是否存在键“COUNT(id)”,以及其值是否大于“0”。如果是这样,它会将“synced”设置为“True”。 + # - 它提交光标所做的任何更改。 + # - 最后,它返回'synced'的值,指示表是否同步。 + # 此方法提供了一种方法,通过检查“ma730v2a”表是否包含任何记录来确定它是否同步。如果是这样,则认为该表是同步的 + def append_ma730(self, day, price, ma730, ma365, ma200): + with self.conn.cursor() as cursor: + sql_query = "SELECT COUNT(id) FROM `ma730v3a` WHERE unixdt=FROM_UNIXTIME(%s)" + cursor.execute(sql_query, (int(day),)) + result = cursor.fetchone() + #print(dt_utc) + #print(result) + if result is not None: + if "COUNT(id)" in result: + ma730x5 = ma730*5 + if result["COUNT(id)"] > 0: + sql_update = 'UPDATE ma730v3a SET `ma730`=%s, `ma730x5`=%s, `ma365`=%s, `ma200`=%s, `price`=%s, `unixdt`=FROM_UNIXTIME(%s) WHERE unixdt=FROM_UNIXTIME(%s)' + print(sql_update) + cursor.execute(sql_update, (ma730, ma730x5, ma365, ma200, price, int(day), int(day))) + else: + sql_insert = "INSERT INTO `ma730v3a` (`unixdt`, `price`, `ma730`, `ma730x5`, `ma365`, `ma200`) VALUES (FROM_UNIXTIME(%s), %s, %s, %s, %s, %s)" + print(sql_insert) + cursor.execute(sql_insert, (day, price, ma730, ma730x5, ma365, ma200)) + self.conn.commit() + # “Arh99DbIf”类中的“append_ma730”方法负责将与730天移动平均线相关的数据附加到数据库。以下是其功能的细分: + # - ** 方法签名 **: + # - 'append_ma730(self, day, price, ma730, ma365, ma200)':此方法采用表示日期(UNIX时间戳格式)、价格、730天移动平均线、365天移动平均线和200天移动平均线的参数。 + # - ** 功能性 **: + # - 它首先创建一个游标,以便在“with”语句中执行 SQL 查询。 + # - 该方法构造一个SQL“SELECT”查询,以计算“ma730v2a”表中“unixdt”列与给定日期匹配的行数。 + # - 执行查询后,它使用'cursor.fetchone()'获取结果。 + # - 如果结果不是“None”,则检查结果字典中是否存在键“COUNT(id)”。 + # - 它将“ma730x5”计算为“ma730”值的五倍。 + # - 如果记录计数大于'0',则构造SQL'UPDATE'语句,使用提供的数据更新现有记录。 + # - 如果给定日期没有记录,它将构造一个SQL'INSERT'语句,以插入包含所提供数据的新记录。 + # - 更改将提交到数据库,并且方法完成。 + # 此方法允许将与730天移动平均线相关的数据添加到数据库中的“ma730v2a”表中。如果给定日期的记录已存在,则会更新现有记录; + # 否则,它会插入一个新的记录 + def append_ma730_day(self, day, price, ma730, ma365, ma200): + with self.conn.cursor() as cursor: + ma730x5 = ma730*5 + sql_insert = "INSERT INTO `ma730v3aday` (`unixdt`, `price`, `ma730`, `ma730x5`, `ma365`, `ma200`) VALUES (FROM_UNIXTIME(%s), %s, %s, %s, %s, %s)" + print(sql_insert) + cursor.execute(sql_insert, (day, price, ma730, ma730x5, ma365, ma200)) + self.conn.commit() + # “Arh99DbIf”类中的“append_ma730_day”方法负责将与730天移动平均线相关的每日数据附加到数据库。以下是其功能的细分: + # - ** 方法签名 **: + # - 'append_ma730_day(self, day, price, ma730, ma365, ma200)':此方法采用表示日期(UNIX时间戳格式)、价格、730天移动平均线、365天移动平均线和200天移动平均线的参数。 + # - ** 功能性 **: + # - 它首先创建一个游标,以便在“with”语句中执行 SQL 查询。 + # - 它将“ma730x5”计算为“ma730”值的五倍。 + # - 它构造一个SQL'INSERT'语句,将新记录插入到包含所提供数据的'ma730v2aday'表中。 + # - 打印SQL查询以进行调试。 + # - 该方法使用'cursor.execute()'使用提供的数据执行SQL'INSERT'语句。 + # - 更改将提交到数据库,并且方法完成。 + # 此方法允许将与730天移动平均线相关的每日数据添加到数据库中的“ma730v2aday”表中。每次调用此方法都会添加一条新记录,表示当天的数据 + def clean_ma730_day(self, day): + with self.conn.cursor() as cursor: + sql_clean = "DELETE from ma730v3aday where unixdt 0: + return total/cnt + return 0 +# 函数“cal_day200_price(prices, day)”计算比特币在 200 天内的平均价格。 +# - **功能签名**: +# - 'cal_day200_price(prices, day)':此函数采用两个参数: +# - 'prices':包含比特币历史价格的字典。 +# - 'day':需要计算平均价格的当天。 +# -**功能性**: +# - 它初始化变量“total”和“cnt”,分别存储价格总和和天数。 +# - 它在 200 天的范围内进行迭代,在每次迭代中将“天”减少一天。 +# - 对于每一天,如果“价格”字典中提供了当天的价格,则会将其添加到“总计”中并递增“cnt”。 +# - 循环后,它通过将“总计”除以“cnt”来计算平均价格。 +# - 如果 200 天内的任何一天没有可用的价格,则返回 0 作为平均价格。 +# - 否则,它将返回计算出的平均价格 +def cal_arh99(prices, day, price): + day200 = cal_day200_price(prices, day) + #print("day200", day200) + days = get_coin_days(day) + #print("days", days) + exp = get_coin_exp(days) + #print("exp", exp, price) + try: + arh99 = (float(price)/day200)*(float(price)/exp) + arh99x = (day200/float(price))*(exp/float(price))*3 + except: + arh99 = 0 + arh99x = 0 + #print("arh99", arh99) + print("cal_arh99", day, price, arh99, arh99x) + return arh99, arh99x +# 函数“cal_arh99(prices, day, price)”计算比特币给定日期和价格的 ARH99 和 ARH99x 值。 +# - **功能签名**: +# - 'cal_arh99(prices, day, price)':此函数采用三个参数: +# - 'prices':包含比特币历史价格的字典。 +# - 'day':需要计算 ARH99 和 ARH99x 值的当前日期。 +# - 'price':比特币在给定日期的价格。 +# -**功能性**: +# - 它首先使用“cal_day200_price()”函数计算比特币的 200 天平均价格。 +# - 然后,它使用“get_coin_days()”函数计算比特币诞生以来的天数。 +# - 接下来,它使用“get_coin_exp()”函数计算比特币的预期价格。 +# - 然后,它使用提供的公式计算 ARH99 和 ARH99x 值: +# - 'arh99 = (价格 / day200) * (价格 / exp)' +# - 'arh99x = (day200 / 价格) * (exp / 价格) * 3' +# - 如果在计算过程中出现任何错误(例如,除以零),则将“arh99”和“arh99x”都设置为0。 +# - 最后,它打印计算值并返回 'arh99' 和 'arh99x' +def check_sync(dbif): + return dbif.check_sync() +# “check_sync(dbif)”函数检查与提供的“dbif”对象关联的数据库表是否同步。以下是其功能的细分: +# - **功能签名**: +# - 'check_sync(dbif)':此函数采用单个参数: +# - 'dbif':表示数据库接口的对象。 +# -**功能性**: +# - 它调用提供的 'dbif' 对象的 'check_sync()' 方法,该方法可能检查数据库中的特定表是否同步。 +# - 该方法返回一个布尔值,指示同步是否完成 ('True') 或未完成 ('False')。 +# - 最后,该函数返回此布尔值。 +# 在“dbif”对象中实际实现“check_sync()”方法将涉及查询数据库以检查同步指示器,例如是否存在某些表示同步过程完成的记录或标志 +def append_arh99(dbif, day, price, arh99, arh99x): + dbif.append(day, price, arh99, arh99x) +# 'append_arh99(dbif, day, price, arh99, arh99x)' 函数用于通过提供的 'dbif' 对象将与 ARH99 和 ARH99X 值相关的数据附加到数据库中。以下是其功能的细分: +# - **功能签名**: +# - 'append_arh99(dbif, day, price, arh99, arh99x)':此函数采用五个参数: +# - 'dbif':表示数据库接口的对象。 +# - 'day':计算 ARH99 和 ARH99X 值的日期。 +# - 'price':与所提供日期相关的价格。 +# - 'arh99':计算出的 ARH99 值。 +# - 'arh99x':计算出的 ARH99X 值。 +# -**功能性**: +# - 它调用提供的 'dbif' 对象的 'append()' 方法,在数据库中插入或更新 ARH99 和 ARH99X 值。 +# - 'append()' 方法可能会执行 SQL 查询,以便在数据库中不存在指定日期的数据时插入新记录,或者更新现有记录(如果存在)。 +# - 传递给 'append()' 方法的参数包括 day、price、ARH99 和 ARH99X 值。 +# - 将数据追加到数据库后,将提交更改。 +# 在“dbif”对象中实际实现“append()”方法将涉及执行 SQL 查询以与数据库交互并相应地插入或更新 ARH99 和 ARH99X 值。 +def sync_arh99(dbif, prices): + for day in prices: + print(day, prices[day]) + arh99, arh99x = cal_arh99(prices, int(day), prices[day]) + print(day, arh99, arh99x) + append_arh99(dbif, day, prices[day], arh99, arh99x) +# “sync_arh99(dbif, prices)”功能似乎将 ARH99 和 ARH99X 数据与提供的价格数据同步。其工作原理如下: +# - **功能签名**: +# - 'sync_arh99(dbif, prices)':此函数采用两个参数: +# - 'dbif':表示数据库接口的对象。 +# - 'prices':包含不同日期价格数据的字典。 +# -**功能性**: +# - 它在“价格”字典中每天都会迭代。 +# - 对于每天,它使用“cal_arh99()”函数计算 ARH99 和 ARH99X 值。 +# - 'cal_arh99()' 函数根据特定日期的价格数据计算这些值。 +# - 计算完成后,它会调用 'append_arh99()' 函数,使用提供的 'dbif' 对象将 day、price、ARH99 和 ARH99X 值附加到数据库中。 +# - 在附加之前,它会打印出日期和相关价格,以及计算出的 ARH99 和 ARH99X 值,以便进行调试或监控。 +# 此功能实质上是将数据库中的 ARH99 和 ARH99X 数据与提供的价格数据同步,确保数据库反映最新的计算值 +def append_arh99day(dbif, day, price, arh99, arh99x): + dbif.append_day(day, price, arh99, arh99x) +# 'append_arh99day(dbif, day, price, arh99, arh99x)' 函数似乎将特定日期的 ARH99 和 ARH99X 数据附加到数据库中。其工作原理如下: +# - **功能签名**: +# - 'append_arh99day(dbif, day, price, arh99, arh99x)':此函数采用五个参数: +# - 'dbif':表示数据库接口的对象。 +# - 'day':追加 ARH99 和 ARH99X 数据的日期。 +# - 'price':指定日期的价格。 +# - 'arh99':指定日期的计算 ARH99 值。 +# - 'arh99x':指定日期的计算值 ARH99X。 +# -**功能性**: +# - 它调用 'dbif' 对象的 'append_day()' 方法,传递 'day'、'price'、'arh99' 和 'arh99x' 值作为参数。 +# - “append_day()”方法应处理将数据插入到与 ARH99 值关联的数据库表中。 +# - 此函数实质上是将指定日期的 ARH99 和 ARH99X 数据附加到数据库接口对象的任务委托给数据库接口对象。 +# 此函数可用于每天或特定时间间隔将 ARH99 和 ARH99X 数据附加到数据库 +def clean_arh99day(dbif, day): + dbif.clean_day(day) +# “clean_arh99day(dbif, day)”功能似乎从数据库中清除了特定日期的 ARH99 和 ARH99X 数据。其工作原理如下: +# - **功能签名**: +# - 'clean_arh99day(dbif, day)':此函数采用两个参数: +# - 'dbif':表示数据库接口的对象。 +# - 'day':清理 ARH99 和 ARH99X 数据的日期。 +# -**功能性**: +# - 它调用 'dbif' 对象的 'clean_day()' 方法,将 'day' 值作为参数传递。 +# - “clean_day()”方法应处理从数据库表中删除指定日期的 ARH99 和 ARH99X 数据。 +# - 该函数实质上是将清理指定日期的 ARH99 和 ARH99X 数据的任务委托给数据库接口对象。 +# 此功能可用于从数据库中删除过时或不必要的 ARH99 和 ARH99X 数据,以保持数据存储的高效和最新。 +def arh99_handler(message): + global g_prices + global g_dbif + global g_lastts + coin_data = message["data"] + #coin_symbol = coin_data["s"] + coin_ts = int(coin_data["E"]) + coin_price = float(coin_data["c"]) + #print((coin_ts/1000), int((coin_ts/1000)%60)) + if int((coin_ts/1000)%60) == 0: + #if coin_ts / 1000 / 60 != g_lastts: + if coin_ts/1000 - g_lastts >= 15: + #print(coin_ts, coin_price) + coin_ts2 = time.gmtime(coin_ts/1000) + daystr = time.strftime("%d %b %Y", coin_ts2) + print(daystr) + dayutc = int(time.mktime(time.strptime(daystr, "%d %b %Y"))) + g_prices[str(dayutc)] = coin_price + arh99, arh99x = cal_arh99(g_prices, dayutc, coin_price) + print(dayutc, coin_price, arh99, arh99x) + append_arh99day(g_dbif, coin_ts/1000, coin_price, arh99, arh99x) + append_arh99(g_dbif, dayutc, coin_price, arh99, arh99x) + clean_day = dayutc - 3600*24*2 + clean_arh99day(g_dbif, clean_day) + + handle_jzr_day60(g_dbif, coin_ts/1000, dayutc, coin_price, g_prices) + handle_ma_day730(g_dbif, coin_ts / 1000, dayutc, coin_price, g_prices) + g_lastts = coin_ts/1000 +# “arh99_handler(message)”函数似乎用于处理与 ARH99 计算相关的传入消息。以下是其功能的细分: +# - **功能签名**: +# - 'arh99_handler(message)':此函数采用单个参数 'message',该参数应为包含与 ARH99 计算相关的数据的字典。 +# - **全局变量**: +# - “g_prices”:存储一段时间内的价格数据。 +# - 'g_dbif':表示数据库接口对象。 +# - “g_lastts”:跟踪上次处理的消息的时间戳。 +# -**功能性**: +# - 从传入消息中提取相关数据,例如时间戳 ('coin_ts') 和价格 ('coin_price')。 +# - 检查当前时间戳是否与新小时的开始相对应。 +# - 如果是新小时的开始(当时间戳的秒部分为 0 时): +# - 使用相应日期的新价格更新“g_prices”。 +# - 使用“cal_arh99()”函数计算 ARH99 和 ARH99X 值。 +# - 使用“append_arh99day()”和“append_arh99()”函数将ARH99和ARH99X数据附加到数据库。 +# - 使用“clean_arh99day()”函数清理过时的 ARH99 和 ARH99X 数据。 +# - 调用 'handle_jzr_day60()' 和 'handle_马_day730()' 函数来处理相关计算。 +# - 使用当前时间戳更新“g_lastts”。 +# 此函数是处理 ARH99 相关消息、执行必要计算和相应更新数据库的主要入口点。 +def start_arh99(dbif, prices): + ws_client = WebsocketClient() + ws_client.start() + ws_client.instant_subscribe( + stream=['btcusdt@miniTicker'], + callback=arh99_handler, + ) +# “start_arh99”功能似乎启动了监控 ARH99 计算的实时数据的过程。下面是一个细分: +# - **功能签名**: +# - 'start_arh99(dbif, prices)':此函数采用两个参数: +# - 'dbif':用于数据存储的数据库接口对象。 +# - 'prices':存储历史价格数据的字典。 +# -**功能性**: +# - 启动 WebSocket 客户端 ('ws_client') 以连接到数据源。 +# - 启动 WebSocket 客户端。 +# - 订阅“btcusdt@miniTicker”流,该流可能会提供比特币兑 USDT 的实时价格更新。 +# - 指定 'arh99_handler' 作为回调函数,用于处理来自订阅流的传入消息。 +# 通过启动 WebSocket 客户端并订阅相关流,此功能可以持续监控比特币的实时价格数据,并触发“arh99_handler”功能来处理传入的消息并相应地更新数据库 +def arh99(): + global g_dbif + g_dbif = Arh99DbIf() + prices = get_history_price2(g_dbif) + # print(prices) + # list1 = [] + # list2 = [] + # for key, value in prices.items(): + # old_time = time.strftime("%Y-%m-%d %H:%M:%S", time.localtime(int(key))) + # new_time = old_time[0:10] + " 08:00:00" + # new_key = int(time.mktime(time.strptime(new_time, "%Y-%m-%d %H:%M:%S"))) + # list1.append(str(new_key)) + # list2.append(value) + # prices = dict(zip(list1, list2)) + #if not check_sync(g_dbif): + if True: + sync_arh99(g_dbif, prices) + #if not check_jzr60_sync(g_dbif): + if True: + sync_jzr_day60(g_dbif, prices) + #if not check_ma730_sync(g_dbif): + if True: + sync_ma_day730(g_dbif, prices) + start_arh99(g_dbif, prices) +# “arh99”功能似乎是启动 ARH99 监控过程的入口点。下面是一个细分: +# - **功能签名**: +# - 'arh99()':此函数不接受任何参数。 +# -**功能性**: +# - 使用“Arh99DbIf”实例初始化全局“g_dbif”变量,表示 ARH99 数据存储的数据库接口。 +# - 使用“get_history_price2”功能检索历史价格数据。 +# - 打印检索到的历史价格数据。 +# - 通过调用“sync_arh99”函数启动 ARH99 数据与数据库的同步。 +# - 通过调用“sync_jzr_day60”函数启动 JZR60 数据与数据库的同步。 +# - 通过调用“sync_马_day730”函数启动MA730数据与数据库的同步。 +# - 通过格数据的“start_a调用具有数据库接口和历史价rh99”函数来启动 ARH99 监控过程 +#2-year ma multiplier +def get_day730_rise(day, prices): + total = 0 + cnt = 0 + for i in range(730): + if str(day) in prices: + cur_price = prices[str(day)] + total += cur_price + cnt += 1 + day = str(day - 3600 * 24) + day = int(day) + if cnt > 0: + return total/cnt + print("get_day730_rise", day, total, cnt) + return 0 +# “get_day730_rise”函数计算 730 天内的平均价格上涨。 +# -**参数**: +# - 'day':计算平均价格上涨的起始日期。 +# - 'prices':包含历史价格数据的字典,其中键是时间戳,值是相应的价格。 +# -**功能性**: +# - 在给定“日”之前的 730 天范围内进行迭代。 +# - 检查“prices”字典中是否存在当天的价格。 +# - 如果价格存在,它会将价格添加到“总计”中,并递增“cnt”变量,该变量计算可用价格的天数。 +# - 在所有天数遍历后,它通过将价格总和除以天数 ('cnt') 来计算平均价格上涨。 +# - 如果没有可用的价格 ('cnt == 0'),则返回 0。 +# - 打印调试信息,包括日期、价格总和以及可用价格的天数 +def get_day365_rise(day, maxdays, prices): + total = 0 + cnt = 0 + for i in range(maxdays): + if str(day) in prices: + cur_price = prices[str(day)] + total += cur_price + cnt += 1 + day = str(day - 3600 * 24) + day = int(day) + if cnt > 0: + return total/cnt + print("get_day365_rise", total, cnt) + return 0 +# “get_day365_rise”功能计算指定天数(最多 365 天)的平均价格上涨。 +# -**参数**: +# - 'day':计算平均价格上涨的起始日期。 +# - “maxdays”:计算时要考虑的最大天数。 +# - 'prices':包含历史价格数据的字典,其中键是时间戳,值是相应的价格。 +# -**功能性**: +# - 遍历“maxdays”范围,表示给定“day”之前的天数。 +# - 检查“prices”字典中是否存在当天的价格。 +# - 如果价格存在,它会将价格添加到“总计”中,并递增“cnt”变量,该变量计算可用价格的天数。 +# - 在所有天数遍历后,它通过将价格总和除以天数 ('cnt') 来计算平均价格上涨。 +# - 如果没有可用的价格 ('cnt == 0'),则返回 0。 +# - 打印调试信息,包括价格总和以及可用价格的天数。 +def append_ma_day730(dbif, day, price, day730_rise, day365_rise, day200_rise): + dbif.append_ma730(day, price, day730_rise, day365_rise, day200_rise) +# “append_马_day730”函数似乎将与 730 天移动平均线相关的数据附加到数据库。 +# -**参数**: +# - 'dbif':数据库接口对象的实例。 +# - 'day':追加数据的日期的时间戳。 +# - 'price':与当天相关的价格值。 +# - “day730_rise”:计算出过去 730 天内的平均价格涨幅。 +# - “day365_rise”:过去 365 天内计算出的平均价格涨幅。 +# - “day200_rise”:过去 200 天内计算出的平均价格涨幅。 +# -**功能性**: +# - 使用“dbif”对象将提供的数据追加到与 730 天移动平均线相关的数据库表中。 +# - 附加数据包括当天的时间戳(“天”)、相应的价格(“价格”)以及计算出的平均价格在 730、365 和 200 天内的上涨。 +# 此函数看起来很简单,并且应该按预期工作,假设数据库接口 ('dbif') 具有处理数据追加的方法。 +def sync_ma_day730(dbif, prices): + for day in prices: + print(day, prices[day]) + day730_rise = get_day730_rise(int(day), prices) + day365_rise = get_day365_rise(int(day), 365, prices) + day200_rise = get_day365_rise(int(day), 200, prices) + print(day, day730_rise) + append_ma_day730(dbif, day, prices[day], day730_rise, day365_rise, day200_rise) +# “sync_ma_day730”函数似乎将与 730 天移动平均线相关的数据与数据库同步。 +# -**参数**: +# - 'dbif':数据库接口对象的实例。 +# - 'prices':包含历史价格数据的字典,其中键是时间戳,值是相应的价格值。 +# -**功能性**: +# - 在提供的“价格”字典中遍历每一天。 +# - 使用辅助函数计算过去 730 天 ('day730_rise')、365 天 ('day365_rise') 和 200 天 ('day200_rise') 的平均价格上涨。 +# - 使用“append_ma_day730”函数将计算数据追加到数据库。 +# 此功能应有效地将 730 天移动平均线数据与历史价格数据中每天的数据库同步。 +def check_ma730_sync(dbif): + return dbif.check_ma730_sync() +# “check_ma730_sync”功能显示,用于检查与730天移动平均线相关的数据是否与数据库同步。 +# -**参数**: +# - 'dbif':数据库接口对象的实例。 +# -**功能性**: +# - 查询数据库接口对象,查看730日移动平均线相关数据是否同步。 +# - 返回一个布尔值,指示同步检查是否成功。 +# 此函数允许您验证与 730 天移动平均线相关的数据是否是最新的并与数据库同步 +def append_ma730day(dbif, day, price, day730_rise, day365_rise, day200_rise): + dbif.append_ma730_day(day, price, day730_rise, day365_rise, day200_rise) +# “append_ma730day”函数似乎将与 730 天移动平均线相关的数据附加到数据库表中。 +# -**参数**: +# - 'dbif':数据库接口对象的实例。 +# - 'day':表示日期的 Unix 时间戳。 +# - 'price':价格值。 +# - “day730_rise”:过去 730 天的价格上涨。 +# - “day365_rise”:过去 365 天内的价格上涨。 +# - “day200_rise”:过去 200 天的价格上涨。 +# -**功能性**: +# - 使用提供的数据在数据库表中插入 730 天移动平均线的新记录。 +# 该功能负责将与 730 天移动平均线相关的每日数据附加到数据库中,确保存储历史价格信息以供分析和将来参考 +def append_ma730(dbif, dayutc, price, day730_rise, day365_rise, day200_rise): + dbif.append_ma730(dayutc, price, day730_rise, day365_rise, day200_rise) +# “append_ma730”函数似乎将与 730 天移动平均线相关的数据附加到数据库表中。 +# -**参数**: +# - 'dbif':数据库接口对象的实例。 +# - 'dayutc':表示日期的 Unix 时间戳。 +# - 'price':价格值。 +# - “day730_rise”:过去 730 天的价格上涨。 +# - “day365_rise”:过去 365 天内的价格上涨。 +# - “day200_rise”:过去 200 天的价格上涨。 +# -**功能性**: +# - 使用提供的数据在数据库表中插入 730 天移动平均线的新记录。 +# 此功能似乎是跟踪和分析不同时间段内价格走势的系统的一部分,特别关注 730 天的走势 +def clean_ma730day(dbif, clean_day): + dbif.clean_ma730_day(clean_day) +# “clean_ma730day”功能负责从数据库中清理与特定日期的 730 天移动平均线相关的数据。 +# -**参数**: +# - 'dbif':数据库接口对象的实例。 +# - 'clean_day':表示需要清理数据的日期的 Unix 时间戳。 +# -**功能性**: +# - 从数据库表中删除早于指定“clean_day”的 730 天移动平均线的记录。这有助于通过删除过时或不必要的数据来维护数据库 +def handle_ma_day730(dbif, day, dayutc, price, prices): + day730_rise = get_day730_rise(dayutc, prices) + day365_rise = get_day365_rise(dayutc, 365, prices) + day200_rise = get_day365_rise(dayutc, 200, prices) + print(dayutc, price, day, day730_rise) + append_ma730day(dbif, day, price, day730_rise, day365_rise, day200_rise) + append_ma730(dbif, dayutc, price, day730_rise, day365_rise, day200_rise) + clean_day = dayutc - 3600 * 24 * 2 + clean_ma730day(dbif, clean_day) +# “handle_ma_day730”函数计算并处理与特定日期的 730 天移动平均线相关的数据。 +# -**参数**: +# - 'dbif':数据库接口对象的实例。 +# - 'day':表示日期的 Unix 时间戳。 +# - 'dayutc':表示 UTC 日期的 Unix 时间戳。 +# - 'price':当天的价格值。 +# - 'prices':包含不同日期价格数据的字典。 +# -**功能性**: +# - 使用“get_day730_rise”和“get_day365_rise”函数计算给定“dayUTC”的 730 天移动平均线、365 天移动平均线和 200 天移动平均线。 +# - 使用“append_ma730day”和“append_ma730”函数将计算出的移动平均数据附加到相应的数据库表中。 +# - 使用“clean_ma730day”功能清理数据库中的过时数据,以确保数据的完整性和效率 +arh99() +# 此函数似乎处理与 ARH99(从比特币价格派生的指标)相关的数据同步、计算和附加到数据库的过程。arh99() +# 下面是该函数功能的简要概述:arh99() +# 它初始化数据库接口。 +# 它使用该函数检索历史价格数据。get_history_price2() +# 它检查数据库中的 ARH99 数据是否需要同步。 +# 如果需要同步,它会使用该功能同步 ARH99 数据。sync_arh99() +# 它检查数据库中 60 天的 JZR (Juzhen) 数据是否需要同步。 +# 如果需要同步,它将使用该函数同步 60 天的 JZR 数据。sync_jzr_day60() +# 它检查数据库中的 730 天移动平均线 (MA730) 数据是否需要同步。 +# 如果需要同步,它会使用该功能同步 MA730 数据。sync_ma_day730() +# 它启动 ARH99 websocket 客户端以侦听实时数据更新并使用该函数进行处理。arh99_handler() +# 该功能似乎协调了管理和更新与比特币价格及其衍生品相关的各种指标和数据点的整个过程 + diff --git a/lyq/arh999eth_lyq.py b/lyq/arh999eth_lyq.py new file mode 100644 index 0000000..b0fc2f1 --- /dev/null +++ b/lyq/arh999eth_lyq.py @@ -0,0 +1,504 @@ +# coding=utf-8 +import ujson +from binance.websocket.spot.websocket_client import SpotWebsocketClient as WebsocketClient +import time +import requests +from loguru import logger +import datetime +import pymysql +import math +import csv + +g_prices = {} +g_dbif = None +g_lastts = 0 +def get_day60_rise(day, prices): + total = 0 + cnt = 0 + for i in range(60): + if str(day) in prices: + cur_price = prices[str(day)] + day = str(day - 3600 * 24) + if day in prices: + prev_price = prices[day] + try: + #print(((cur_price-prev_price)/prev_price), day, cur_price, prev_price) + total += (((cur_price-prev_price)/prev_price)) + cnt += 1 + except: + pass + # print(day, total, cnt) + day = int(day) + return total + +def get_days_rise(day, maxdays, prices): + total = 0 + cnt = 0 + for i in range(maxdays): + if str(day) in prices: + cur_price = prices[str(day)] + day = str(day - 3600 * 24) + if day in prices: + prev_price = prices[day] + try: + #print(((cur_price-prev_price)/prev_price), day, cur_price, prev_price) + total += (((cur_price-prev_price)/prev_price)) + cnt += 1 + except: + pass + # print(day, total, cnt) + day = int(day) + return total + +def append_jzr_day60(dbif, day, price, day60_rise, day7_rise, day30_rise, day90_rise): + dbif.append_jzr60(day, price, day60_rise, day7_rise, day30_rise, day90_rise) + +def sync_jzr_day60(dbif, prices): + for day in prices: + print(day, prices[day]) + day60_rise = get_days_rise(int(day), 60, prices) + day7_rise = get_days_rise(int(day), 7, prices) + day30_rise = get_days_rise(int(day), 30, prices) + day90_rise = get_days_rise(int(day), 90, prices) + print(day, day60_rise) + append_jzr_day60(dbif, day, prices[day], day60_rise, day7_rise, day30_rise, day90_rise) + +def check_jzr60_sync(dbif): + return dbif.check_jzr60_sync() + +def append_jzr60day(dbif, day, price, day60_rise, day7_rise, day30_rise, day90_rise): + dbif.append_jzr60_day(day, price, day60_rise, day7_rise, day30_rise, day90_rise) + +def append_jzr60(dbif, dayutc, price, day60_rise, day7_rise, day30_rise, day90_rise): + dbif.append_jzr60(dayutc, price, day60_rise, day7_rise, day30_rise, day90_rise) + +def clean_jzr60day(dbif, clean_day): + dbif.clean_jzr60_day(clean_day) + +def handle_jzr_day60(dbif, day, dayutc, price, prices): + day60_rise = get_days_rise(dayutc, 60, prices) + day7_rise = get_days_rise(dayutc, 7, prices) + day30_rise = get_days_rise(dayutc, 30, prices) + day90_rise = get_days_rise(dayutc, 90, prices) + print(dayutc, price, day, day60_rise) + append_jzr60day(dbif, day, price, day60_rise, day7_rise, day30_rise, day90_rise) + append_jzr60(dbif, dayutc, price, day60_rise, day7_rise, day30_rise, day90_rise) + clean_day = dayutc - 3600 * 24 * 2 + clean_jzr60day(dbif, clean_day) + +class Arh99DbIf: + def __init__(self, host="172.17.0.1", port=4423, user="root", password="2GS@bPYcgiMyL14A", dbname="ethdb"): + self.conn = pymysql.connect(host=host, port=port, user=user, password=password, database=dbname, cursorclass=pymysql.cursors.DictCursor) + print("init arh99 db suceess!") + + def check_sync(self): + synced = False + with self.conn.cursor() as cursor: + sql_query = "SELECT COUNT(id) FROM `arh99v3a`" + cursor.execute(sql_query) + result = cursor.fetchone() + print(result) + if result is not None: + if "COUNT(id)" in result: + if result["COUNT(id)"] > 0: + synced = True + self.conn.commit() + #print("synced", synced) + return synced + + def append(self, day, price, arh99, arh99x): + with self.conn.cursor() as cursor: + sql_query = "SELECT COUNT(id) FROM `arh99v3a` WHERE unixdt=FROM_UNIXTIME(%s)" + cursor.execute(sql_query, (int(day),)) + result = cursor.fetchone() + #print(dt_utc) + #print(result) + if result is not None: + if "COUNT(id)" in result: + if result["COUNT(id)"] > 0: + sql_update = 'UPDATE arh99v3a SET `arh99`=%s, `arh99x`=%s, `price`=%s, `unixdt`=FROM_UNIXTIME(%s) WHERE unixdt=FROM_UNIXTIME(%s)' + print(sql_update) + cursor.execute(sql_update, (arh99, arh99x, price, int(day), int(day))) + else: + sql_insert = "INSERT INTO `arh99v3a` (`unixdt`, `price`, `arh99`, `arh99x`) VALUES (FROM_UNIXTIME(%s), %s, %s, %s)" + print(sql_insert) + cursor.execute(sql_insert, (day, price, arh99, arh99x)) + self.conn.commit() + + def append_day(self, day, price, arh99, arh99x): + with self.conn.cursor() as cursor: + sql_insert = "INSERT INTO `arh99v3aday` (`unixdt`, `price`, `arh99`, `arh99x`) VALUES (FROM_UNIXTIME(%s), %s, %s, %s)" + print(sql_insert) + cursor.execute(sql_insert, (day, price, arh99, arh99x)) + self.conn.commit() + + def clean_day(self, day): + with self.conn.cursor() as cursor: + sql_clean = "DELETE from arh99v3aday where unixdt 0: + synced = True + self.conn.commit() + #print("synced", synced) + return synced + + def append_jzr60(self, day, price, jzr60, jzr7, jzr30, jzr90): + with self.conn.cursor() as cursor: + sql_query = "SELECT COUNT(id) FROM `jzr60v3a` WHERE unixdt=FROM_UNIXTIME(%s)" + cursor.execute(sql_query, (int(day),)) + result = cursor.fetchone() + #print(dt_utc) + #print(result) + if result is not None: + if "COUNT(id)" in result: + if result["COUNT(id)"] > 0: + sql_update = 'UPDATE jzr60v3a SET `jzr60`=%s,`jzr7`=%s,`jzr30`=%s,`jzr90`=%s,`price`=%s, `unixdt`=FROM_UNIXTIME(%s) WHERE unixdt=FROM_UNIXTIME(%s)' + print(sql_update) + cursor.execute(sql_update, (jzr60, jzr7, jzr30, jzr90, price, int(day), int(day))) + else: + sql_insert = "INSERT INTO `jzr60v3a` (`unixdt`, `price`, `jzr60`, `jzr7`, `jzr30`, `jzr90`) VALUES (FROM_UNIXTIME(%s), %s, %s, %s, %s, %s)" + print(sql_insert) + cursor.execute(sql_insert, (day, price, jzr60, jzr7, jzr30, jzr90)) + self.conn.commit() + + def append_jzr60_day(self, day, price, jzr60, jzr7, jzr30, jzr90): + with self.conn.cursor() as cursor: + sql_insert = "INSERT INTO `jzr60v3aday` (`unixdt`, `price`, `jzr60`, `jzr7`, `jzr30`, `jzr90`) VALUES (FROM_UNIXTIME(%s), %s, %s, %s, %s, %s)" + print(sql_insert) + cursor.execute(sql_insert, (day, price, jzr60, jzr7, jzr30, jzr90)) + self.conn.commit() + + def clean_jzr60_day(self, day): + with self.conn.cursor() as cursor: + sql_clean = "DELETE from jzr60v3aday where unixdt 0: + synced = True + self.conn.commit() + #print("synced", synced) + return synced + + def append_ma730(self, day, price, ma730, ma365, ma200): + with self.conn.cursor() as cursor: + sql_query = "SELECT COUNT(id) FROM `ma730v3a` WHERE unixdt=FROM_UNIXTIME(%s)" + cursor.execute(sql_query, (int(day),)) + result = cursor.fetchone() + #print(dt_utc) + #print(result) + if result is not None: + if "COUNT(id)" in result: + ma730x5 = ma730*5 + if result["COUNT(id)"] > 0: + sql_update = 'UPDATE ma730v3a SET `ma730`=%s, `ma730x5`=%s, `ma365`=%s, `ma200`=%s, `price`=%s, `unixdt`=FROM_UNIXTIME(%s) WHERE unixdt=FROM_UNIXTIME(%s)' + print(sql_update) + cursor.execute(sql_update, (ma730, ma730x5, ma365, ma200, price, int(day), int(day))) + else: + sql_insert = "INSERT INTO `ma730v3a` (`unixdt`, `price`, `ma730`, `ma730x5`, `ma365`, `ma200`) VALUES (FROM_UNIXTIME(%s), %s, %s, %s, %s, %s)" + print(sql_insert) + cursor.execute(sql_insert, (day, price, ma730, ma730x5, ma365, ma200)) + self.conn.commit() + + def append_ma730_day(self, day, price, ma730, ma365, ma200): + with self.conn.cursor() as cursor: + ma730x5 = ma730*5 + sql_insert = "INSERT INTO `ma730v3aday` (`unixdt`, `price`, `ma730`, `ma730x5`, `ma365`, `ma200`) VALUES (FROM_UNIXTIME(%s), %s, %s, %s, %s, %s)" + print(sql_insert) + cursor.execute(sql_insert, (day, price, ma730, ma730x5, ma365, ma200)) + self.conn.commit() + + def clean_ma730_day(self, day): + with self.conn.cursor() as cursor: + sql_clean = "DELETE from ma730v3aday where unixdt 0: + return total/cnt + return 0 + +def cal_arh99(prices, day, price): + day200 = cal_day200_price(prices, day) + #print("day200", day200) + days = get_coin_days(day) + #print("days", days) + exp = get_coin_exp(days) + #print("exp", exp, price) + try: + arh99 = (float(price)/day200)*(float(price)/exp) + arh99x = (day200/float(price))*(exp/float(price))*3 + except: + arh99 = 0 + arh99x = 0 + #print("arh99", arh99) + + return arh99, arh99x + +def check_sync(dbif): + return dbif.check_sync() + +def append_arh99(dbif, day, price, arh99, arh99x): + dbif.append(day, price, arh99, arh99x) + +def sync_arh99(dbif, prices): + for day in prices: + print(day, prices[day]) + arh99, arh99x = cal_arh99(prices, int(day), prices[day]) + print(day, arh99, arh99x) + append_arh99(dbif, day, prices[day], arh99, arh99x) + +def append_arh99day(dbif, day, price, arh99, arh99x): + dbif.append_day(day, price, arh99, arh99x) + +def clean_arh99day(dbif, day): + dbif.clean_day(day) + +def arh99_handler(message): + global g_prices + global g_dbif + global g_lastts + coin_data = message["data"] + #coin_symbol = coin_data["s"] + coin_ts = int(coin_data["E"]) + coin_price = float(coin_data["c"]) + #print((coin_ts/1000), int((coin_ts/1000)%60)) + if int((coin_ts/1000)%60) == 0: + #if coin_ts/1000/60 != g_lastts: + if coin_ts/1000 - g_lastts >= 15: + #print(coin_ts, coin_price) + coin_ts2 = time.gmtime(coin_ts/1000) + daystr = time.strftime("%d %b %Y", coin_ts2) + print(daystr) + dayutc = int(time.mktime(time.strptime(daystr, "%d %b %Y"))) + g_prices[str(dayutc)] = coin_price + arh99, arh99x = cal_arh99(g_prices, dayutc, coin_price) + print(dayutc, coin_price, arh99, arh99x) + + append_arh99day(g_dbif, coin_ts/1000, coin_price, arh99, arh99x) + append_arh99(g_dbif, dayutc, coin_price, arh99, arh99x) + clean_day = dayutc - 3600*24*2 + clean_arh99day(g_dbif, clean_day) + + handle_jzr_day60(g_dbif, coin_ts/1000, dayutc, coin_price, g_prices) + handle_ma_day730(g_dbif, coin_ts / 1000, dayutc, coin_price, g_prices) + g_lastts = coin_ts/1000 + +def start_arh99(dbif, prices): + ws_client = WebsocketClient() + ws_client.start() + ws_client.instant_subscribe( + stream=['ethusdt@miniTicker'], + callback=arh99_handler, + ) + +def arh99(): + global g_dbif + g_dbif = Arh99DbIf() + prices = get_history_price2(g_dbif) + #if not check_sync(g_dbif): + if True: + sync_arh99(g_dbif, prices) + #if not check_jzr60_sync(g_dbif): + if True: + sync_jzr_day60(g_dbif, prices) + #if not check_ma730_sync(g_dbif): + if True: + sync_ma_day730(g_dbif, prices) + start_arh99(g_dbif, prices) + +#2-year ma multiplier +def get_day730_rise(day, prices): + total = 0 + cnt = 0 + for i in range(730): + if str(day) in prices: + cur_price = prices[str(day)] + total += cur_price + cnt += 1 + day = str(day - 3600 * 24) + day = int(day) + if cnt > 0: + return total/cnt + return 0 + +def get_day365_rise(day, maxdays, prices): + total = 0 + cnt = 0 + for i in range(maxdays): + if str(day) in prices: + cur_price = prices[str(day)] + total += cur_price + cnt += 1 + day = str(day - 3600 * 24) + day = int(day) + if cnt > 0: + return total/cnt + return 0 + +def append_ma_day730(dbif, day, price, day730_rise, day365_rise, day200_rise): + dbif.append_ma730(day, price, day730_rise, day365_rise, day200_rise) + +def sync_ma_day730(dbif, prices): + for day in prices: + print(day, prices[day]) + day730_rise = get_day730_rise(int(day), prices) + day365_rise = get_day365_rise(int(day), 365, prices) + day200_rise = get_day365_rise(int(day), 200, prices) + print(day, day730_rise) + append_ma_day730(dbif, day, prices[day], day730_rise, day365_rise, day200_rise) + +def check_ma730_sync(dbif): + return dbif.check_ma730_sync() + +def append_ma730day(dbif, day, price, day730_rise, day365_rise, day200_rise): + dbif.append_ma730_day(day, price, day730_rise, day365_rise, day200_rise) + +def append_ma730(dbif, dayutc, price, day730_rise, day365_rise, day200_rise): + dbif.append_ma730(dayutc, price, day730_rise, day365_rise, day200_rise) + +def clean_ma730day(dbif, clean_day): + dbif.clean_ma730_day(clean_day) + +def handle_ma_day730(dbif, day, dayutc, price, prices): + day730_rise = get_day730_rise(dayutc, prices) + day365_rise = get_day365_rise(dayutc, 365, prices) + day200_rise = get_day365_rise(dayutc, 200, prices) + print(dayutc, price, day, day730_rise) + append_ma730day(dbif, day, price, day730_rise, day365_rise, day200_rise) + append_ma730(dbif, dayutc, price, day730_rise, day365_rise, day200_rise) + clean_day = dayutc - 3600 * 24 * 2 + clean_ma730day(dbif, clean_day) + +arh99() + diff --git a/lyq/btc24h_db_if.py b/lyq/btc24h_db_if.py new file mode 100644 index 0000000..7b31e32 --- /dev/null +++ b/lyq/btc24h_db_if.py @@ -0,0 +1,600 @@ +# coding=utf-8 +import datetime +import json +import requests +import pymysql +from loguru import logger +import time + + +class DbIf: + def __init__(self, host="172.17.0.1", port=4419, user="root", password="IeQcJNnagkaFP1Or", dbname="btcdb"): + self.conn = pymysql.connect(host=host, port=port, user=user, password=password, database=dbname, + cursorclass=pymysql.cursors.DictCursor) + + def update_to_dailyindsv2(self, dt_utc, height_begin, height_end, lth_volume, frm, cvdd, realized_price, + transferred_price, balanced_price, nvt_ratio, velocity): + with self.conn.cursor() as cursor: + print(dt_utc, height_begin, height_end, lth_volume, frm, cvdd, realized_price, transferred_price, + balanced_price, nvt_ratio, velocity) + # 调用消息订阅的api:向topic中储存rt_dailyindsv2e2的实时数据 + # url = "http://10.168.2.125:7101/marketall/push/realtime/btc/dailyindsv2e1" + # headers = {"accept": "application/json"} + # data = {"unixdt":dt_utc,"height_begin":height_begin,"height_end":height_end,"lth_volume":lth_volume,"frm":frm,"cvdd":cvdd,"realized_price":realized_price,"transferred_price":transferred_price,"balanced_price":balanced_price,"nvt_ratio":nvt_ratio,"velocity":velocity} + # response = requests.post(url=url, data=json.dumps(data), headers=headers) + sql_insert = "REPLACE INTO rt_dailyindsv3e2 (unixdt, height_begin, height_end, lth_volume, frm, cvdd, realized_price, transferred_price, balanced_price, nvt_ratio, velocity" + sql_insert = sql_insert + ") VALUES (FROM_UNIXTIME(%s), %s, %s, %s, %s, %s, %s, %s, %s, %s, %s)" + cursor.execute(sql_insert, ( + dt_utc, height_begin, height_end, lth_volume, frm, cvdd, realized_price, transferred_price, + balanced_price, nvt_ratio, velocity)) + + ''' + def update_to_realtimeindsv2(self, dt_utc, mempool_volume, mempool_fees): + with self.conn.cursor() as cursor: + sql_insert = "REPLACE INTO realtimeindsv2b (unixdt, mempool_volume, mempool_fees)" + cursor.execute(sql_insert, (dt_utc, mempool_volume, mempool_fees)) + ''' + + def update_to_dailyinds(self, dt_utc, height_begin, height_end, profitrate, fees, txs, new_address, total_address, + new_address_volume, active_address, + send_address, receive_address, volume, eavolume, sopr, asopr, easopr, lthsopr, sthsopr, + asol, eaasol, dormancy, adormancy, eadormancy, cdd, sacdd, eacdd, day1, day7, day30, day60, + day90, day180, day365, day730, csupply, mintusd, sumcsupply, sumcdd, sumeacdd, + liveliness, ealiveliness, rprofit, rloss, rplrate, price, marketcap, rcap, earcap, mvrv, + nupl): + with self.conn.cursor() as cursor: + # 调用消息订阅的api:向topic中储存rt_dailyindsv2e1的实时数据 + # 数据结构{dt_utc:'dt_utc'} + try: + url="https://coinbus.cc/api/v1/marketall/push/realtime/btc/dailyv2e1" + headers = {"accept": "application/json"} + data = {"unixdt":dt_utc,"height_begin":height_begin,"height_end":height_end,"profitrate":profitrate, + "fees":fees,"txs":txs,"new_address":new_address,"total_address":total_address, + "new_address_volume":new_address_volume,"active_address":active_address,"send_address":send_address, + "receive_address":receive_address,"volume":volume,"eavolume":eavolume,"sopr":sopr,"asopr":asopr,"easopr":easopr, + "lthsopr":lthsopr,"sthsopr":sthsopr,"asol":asol,"eaasol":eaasol,"dormancy":dormancy, + "adormancy":adormancy,"eadormancy":eadormancy,"cdd":cdd,"sacdd":sacdd,"eacdd":eacdd,"day1":day1,"day7":day7, + "day30": day30,"day60":day60,"day90":day90,"day180":day180,"day365":day365,"day730":day730, + "csupply":csupply,"mintusd":mintusd,"sumcsupply":sumcsupply,"sumcdd":sumcdd,"sumeacdd":sumeacdd,"liveliness":liveliness, + "ealiveliness":ealiveliness,"rprofit":rprofit,"rloss":rloss,"rplrate":rplrate, + "price":price,"marketcap":marketcap,"rcap":rcap,"earcap":earcap,"mvrv":mvrv,"nupl":nupl} + response = requests.post(url=url, data=json.dumps(data), headers=headers) + except: + print("api调用失败") + sql_insert = "REPLACE INTO rt_dailyindsv3e1 (unixdt, height_begin, height_end, profitrate, fees, txs, new_address, total_address, new_address_volume, active_address, send_address, receive_address, volume, eavolume, sopr, asopr, easopr, lthsopr, sthsopr," + sql_insert = sql_insert + " asol, eaasol, dormancy, adormancy, eadormancy, cdd, sacdd, eacdd, day1, day7, day30, day60, day90, day180, day365, day730, csupply, mintusd, sumcsupply, sumcdd, sumeacdd, liveliness, " + sql_insert = sql_insert + " ealiveliness, rprofit, rloss, rplrate, price, marketcap, rcap, earcap, mvrv, nupl" + sql_insert = sql_insert + ") VALUES (FROM_UNIXTIME(%s), %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s)" + # print(sql_insert) + cursor.execute(sql_insert, ( + dt_utc, height_begin, height_end, profitrate, fees, txs, new_address, total_address, new_address_volume, + active_address, send_address, + receive_address, volume, eavolume, sopr, asopr, easopr, lthsopr, sthsopr, asol, eaasol, dormancy, + adormancy, eadormancy, cdd, sacdd, eacdd, day1, day7, day30, day60, day90, day180, day365, day730, + csupply, mintusd, sumcsupply, sumcdd, sumeacdd, liveliness, ealiveliness, rprofit, rloss, rplrate, + price, marketcap, rcap, earcap, mvrv, + nupl)) + self.conn.commit() + + ''' + def update_to_dailyinds(self, dt_utc, height_begin, height_end, profitrate, fees, txs, new_address, new_address_volume, active_address, + send_address, receive_address, volume, eavolume, sopr, asopr, easopr, lthsopr, sthsopr, + asol, eaasol, dormancy, adormancy, eadormancy, cdd, sacdd, eacdd, day1, day7, day30, day60, + day90, day180, day365, day730, csupply, mintusd, sumcsupply, sumcdd, sumeacdd, + liveliness, ealiveliness, rprofit, rloss, rplrate, price, marketcap, rcap, earcap, mvrv, + lthmarketcap, lthrcap, sthmarketcap, sthrcap, lthmvrv, sthmvrv, nupl): + with self.conn.cursor() as cursor: + sql_insert = "REPLACE INTO dailyindsv1 (unixdt, height_begin, height_end, profitrate, fees, txs, new_address, new_address_volume, active_address, send_address, receive_address, volume, eavolume, sopr, asopr, easopr, lthsopr, sthsopr," + sql_insert = sql_insert + " asol, eaasol, dormancy, adormancy, eadormancy, cdd, sacdd, eacdd, day1, day7, day30, day60, day90, day180, day365, day730, csupply, mintusd, sumcsupply, sumcdd, sumeacdd, liveliness, " + sql_insert = sql_insert + " ealiveliness, rprofit, rloss, rplrate, price, marketcap, rcap, earcap, mvrv, lthmarketcap, lthrcap, sthmarketcap, sthrcap, lthmvrv, sthmvrv, nupl" + sql_insert = sql_insert + ") VALUES (FROM_UNIXTIME(%s), %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s)" + # print(sql_insert) + cursor.execute(sql_insert, ( + dt_utc, height_begin, height_end, profitrate, fees, txs, new_address, new_address_volume, active_address, send_address, + receive_address, volume, eavolume, sopr, asopr, easopr, lthsopr, sthsopr, asol, eaasol, dormancy, + adormancy, eadormancy, cdd, sacdd, eacdd, day1, day7, day30, day60, day90, day180, day365, day730, + csupply, mintusd, sumcsupply, sumcdd, sumeacdd, liveliness, ealiveliness, rprofit, rloss, rplrate, + price, marketcap, rcap, earcap, mvrv, lthmarketcap, lthrcap, sthmarketcap, sthrcap, lthmvrv, sthmvrv, + nupl)) + self.conn.commit() + ''' + ''' + # daily daily on-chain volume + def query_from_dailyvolume(self, start_id=0, end_id=0, start_time="", end_time="", limit=0): + with self.conn.cursor() as cursor: + sql_query = "SELECT * from `dailyvolume`" + + if start_id > 0: + sql_query = sql_query + " WHERE id > " + str(start_id) + if end_id > 0: + sql_query = sql_query + " AND id < " + str(end_id) + else: + if end_id > 0: + sql_query = sql_query + " WHERE id < " + str(end_id) + + if len(start_time) > 0: + if len(end_time) > 0: + sql_query = sql_query + " WHERE UNIX_TIMESTAMP(unixdt) BETWEEN \ + UNIX_TIMESTAMP(\'" + start_time + "\') AND UNIX_TIMESTAMP(\'" + end_time + "\')" + else: + sql_query = sql_query + " WHERE UNIX_TIMESTAMP(unixdt) BETWEEN \ + UNIX_TIMESTAMP(\'" + start_time + "\') AND UNIX_TIMESTAMP(NOW())" + else: + if len(end_time) > 0: + sql_query = sql_query + " WHERE UNIX_TIMESTAMP(unixdt) BETWEEN \ + UNIX_TIMESTAMP(NOW()) AND UNIX_TIMESTAMP(\'" + end_time + "\')" + + sql_query = sql_query + " order by `unixdt` desc" + + if limit > 0: + sql_query = sql_query + " LIMIT " + str(limit) + print(sql_query) + cursor.execute(sql_query) + return cursor.fetchall() + ''' + + # newaddrs + ''' + def update_to_newaddr(self, dayutc, last_profit_rate, last_fees, last_txs, last_eatxs, last_newaddr_cnt, + last_newaddr_vol, last_active_addr_cnt, last_tx_addr_cnt, last_rx_addr_cnt, last_vol_change, + last_vol): + with self.conn.cursor() as cursor: + sql_query = "SELECT COUNT(id) FROM `newaddrs` WHERE unixdt=FROM_UNIXTIME(%s)" + cursor.execute(sql_query, {dayutc, }) + result = cursor.fetchone() + # print(dt_utc) + # print(result) + if result is not None: + if "COUNT(id)" in result: + if result["COUNT(id)"] > 0: + print("update") + sql_update = 'UPDATE newaddrs SET `total`=%s, `amount`=%s, `active`=%s, `tx`=%s, `rx`=%s, `volume_change`=%s, `volume=%s`,`txs`=%s, `eatxs`=%s, `fees`=%s, `last_profit_rate`=%s WHERE unixdt=FROM_UNIXTIME(%s)' + cursor.execute(sql_update, ( + last_newaddr_cnt, last_newaddr_vol, last_active_addr_cnt, last_tx_addr_cnt, + last_rx_addr_cnt, + last_vol_change, last_vol, last_txs, last_eatxs, last_fees, last_profit_rate, dayutc)) + else: + print("insert") + sql_insert = "INSERT INTO `newaddrs` (`unixdt`, `total`, `amount`, `active`, `tx`, `rx`, `volume_change`, `volume`, `txs`, `eatxs`, `fees`, `last_profit_rate`) VALUES (FROM_UNIXTIME(%s), %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s)" + # print(sql_insert) + cursor.execute(sql_insert, ( + dayutc, last_newaddr_cnt, last_newaddr_vol, last_active_addr_cnt, last_tx_addr_cnt, + last_rx_addr_cnt, last_vol_change, last_vol, last_txs, last_eatxs, last_fees, + last_profit_rate)) + self.conn.commit() + ''' + ''' + def update_to_sellprofit(self, dayutc, current_price, block_buy_volume, block_sell_volume, block_sell_profit, last_height): + with self.conn.cursor() as cursor: + sql_insert = "INSERT INTO `dailybuysell` (`unixdt`, `price`, `buyvolume`, `sellvolume`, `sellprofit`, `height`) VALUES (FROM_UNIXTIME(%s), %s, %s, %s, %s, %s)" + #print(sql_insert) + #print(datetime, txid, vout, voutalias, amount, height) + cursor.execute(sql_insert, (dayutc, current_price, block_buy_volume, block_sell_volume, block_sell_profit, last_height)) + self.conn.commit() + ''' + ''' + def update_to_bigsellprofit(self, dayutc, current_price, tx_sell_average, tx_sell_amount, tx_sell_profit, + days_earliest, days_latest, days_largest, days_current, tx_buy_address, txid, + block_height): + with self.conn.cursor() as cursor: + sql_insert = "INSERT INTO `bigsell` (`unixdt`, `buyprice`, `sellprice`, `amount`, `profit`, `days_earliest`, `days_latest`, `days_largest`, `days_current`, `address`, `txid`, `height`) VALUES (FROM_UNIXTIME(%s), %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s)" + # print(sql_insert) + # print(datetime, txid, vout, voutalias, amount, height) + cursor.execute(sql_insert, ( + dayutc, current_price, tx_sell_average, tx_sell_amount, tx_sell_profit, days_earliest, days_latest, + days_largest, days_current, tx_buy_address, txid, block_height)) + self.conn.commit() + ''' + ''' + def update_to_dailycdd(self, dt_utc, cdd): + with self.conn.cursor() as cursor: + sql_insert = "REPLACE INTO `dailycdd` (`unixdt`, `cdd`) VALUES (FROM_UNIXTIME(%s), %s)" + # print(sql_insert) + cursor.execute(sql_insert, (dt_utc, cdd)) + self.conn.commit() + ''' + ''' + def update_to_dailycdddays(self, dt_utc, dormancy, adormancy, eadormancy, cdd, acdd, eacdd, day1, day7, day30, + day60, day90, day180, day365, day730): + with self.conn.cursor() as cursor: + sql_insert = "REPLACE INTO `dailycdddays` (`unixdt`, dormancy, adormancy, eadormancy, cdd, acdd, eacdd, `day1`, day7, day30, day60, day90, day180, day365, day730) VALUES (FROM_UNIXTIME(%s), %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s)" + # print(sql_insert) + cursor.execute(sql_insert, ( + dt_utc, dormancy, adormancy, eadormancy, cdd, acdd, eacdd, day1, day7, day30, day60, day90, day180, + day365, + day730)) + self.conn.commit() + ''' + ''' + def update_to_dailysopr(self, dt_utc, last_sopr, last_asopr, last_easopr, last_lth_sopr, last_sth_sopr): + with self.conn.cursor() as cursor: + sql_insert = "REPLACE INTO `dailysopr` (`unixdt`, `sopr`, asopr, easopr, lth_sopr, sth_sopr) VALUES (FROM_UNIXTIME(%s), %s, %s, %s, %s, %s, %s)" + # print(sql_insert) + cursor.execute(sql_insert, (dt_utc, last_sopr, last_asopr, last_easopr, last_lth_sopr, last_sth_sopr)) + self.conn.commit() + ''' + ''' + def update_to_inds(self, dt_utc, csupply, mintusd, sumcsupply, sumcdd, sumeacdd, liveliness, ealiveliness, rprofit, + rloss, rplrate, price, marketcap, rcap, earcap, mvrv): + with self.conn.cursor() as cursor: + sql_insert = "REPLACE INTO `inds` (`unixdt`, csupply, mintusd, sumcsupply, sumcdd, sumeacdd, liveliness, ealiveliness, rprofit, rloss, rplrate, price, marketcap, rcap, earcap, mvrv) VALUES (FROM_UNIXTIME(%s), %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s)" + # print(sql_insert) + cursor.execute(sql_insert, ( + dt_utc, csupply, mintusd, sumcsupply, sumcdd, sumeacdd, liveliness, ealiveliness, rprofit, rloss, + rplrate, + price, marketcap, rcap, earcap, mvrv)) + self.conn.commit() + ''' + # daily volume + ''' + def update_to_dailyvolume(self, dt_utc, volume): + with self.conn.cursor() as cursor: + sql_insert = "REPLACE INTO `dailyvolume` (`unixdt`, `volume`) VALUES (FROM_UNIXTIME(%s), %s)" + # print(sql_insert) + cursor.execute(sql_insert, (dt_utc, volume)) + self.conn.commit() + ''' + '''with self.conn.cursor() as cursor: + sql_query = "SELECT COUNT(id) FROM `dailyvolume` WHERE unixdt=FROM_UNIXTIME(%s)" + cursor.execute(sql_query, {dt_utc,}) + result = cursor.fetchone() + #print(dt_utc) + #print(result) + if result is not None: + if "COUNT(id)" in result: + if result["COUNT(id)"] > 0: + print("update") + sql_update = 'UPDATE dailyvolume SET `volume`=%s WHERE unixdt=FROM_UNIXTIME(%s)' + cursor.execute(sql_update, (volume, dt_utc)) + else: + print("insert") + sql_insert = "INSERT INTO `dailyvolume` (`unixdt`, `volume`) VALUES (FROM_UNIXTIME(%s), %s)" + # print(sql_insert) + cursor.execute(sql_insert, (dt_utc, volume)) + self.conn.commit()''' + ''' + def update_to_dailyfees(self, dt_utc, fees): + with self.conn.cursor() as cursor: + sql_insert = "REPLACE INTO `dailyfees` (`unixdt`, `fees`) VALUES (FROM_UNIXTIME(%s), %s)" + # print(sql_insert) + cursor.execute(sql_insert, (dt_utc, fees)) + self.conn.commit() + ''' + ''' + def import_to_dailyvolume2(self, dt_utc, volume): + with self.conn.cursor() as cursor: + sql_insert = "INSERT INTO `dailyvolume` (`unixdt`, `volume`) VALUES (FROM_UNIXTIME(%s), %s)" + # print(sql_insert) + cursor.execute(sql_insert, (dt_utc, volume)) + self.conn.commit() + + def delete_dailyvolume_data(self, config): + with self.conn.cursor() as cursor: + sql_query = "DELETE FROM `dailyvolume`" + cursor.execute(sql_query) + self.conn.commit() + + + # daily market cap + def query_from_marketcap(self, start_id=0, end_id=0, start_time="", end_time="", limit=0): + with self.conn.cursor() as cursor: + sql_query = "SELECT * from `dailyprice`" + + if start_id > 0: + sql_query = sql_query + " WHERE id > " + str(start_id) + if end_id > 0: + sql_query = sql_query + " AND id < " + str(end_id) + else: + if end_id > 0: + sql_query = sql_query + " WHERE id < " + str(end_id) + + if len(start_time) > 0: + if len(end_time) > 0: + sql_query = sql_query + " WHERE UNIX_TIMESTAMP(unixdt) BETWEEN \ + UNIX_TIMESTAMP(\'" + start_time + "\') AND UNIX_TIMESTAMP(\'" + end_time + "\')" + else: + sql_query = sql_query + " WHERE UNIX_TIMESTAMP(unixdt) BETWEEN \ + UNIX_TIMESTAMP(\'" + start_time + "\') AND UNIX_TIMESTAMP(NOW())" + else: + if len(end_time) > 0: + sql_query = sql_query + " WHERE UNIX_TIMESTAMP(unixdt) BETWEEN \ + UNIX_TIMESTAMP(NOW()) AND UNIX_TIMESTAMP(\'" + end_time + "\')" + + sql_query = sql_query + " order by `unixdt` desc" + + if limit > 0: + sql_query = sql_query + " LIMIT " + str(limit) + print(sql_query) + cursor.execute(sql_query) + return cursor.fetchall() + + #daily price + def import_to_dailyprice(self, dt_utc, price, volume, marketcap, csupply): + with self.conn.cursor() as cursor: + sql_insert = "INSERT INTO `dailyprice` (`unixdt`, `price`, `volume`, `marketcap`, `csupply`) VALUES (FROM_UNIXTIME(%s), %s, %s, %s, %s)" + #print(sql_insert) + cursor.execute(sql_insert, (dt_utc, price, volume, marketcap, csupply)) + self.conn.commit() + + def update_to_dailyprice(self, dt_utc, price, volume, change): + with self.conn.cursor() as cursor: + sql_insert = "INSERT INTO `dailyprice` (`unixdt`, `price`, `volume`, `change`) VALUES (FROM_UNIXTIME(%s), %s, %s, %s)" + #print(sql_insert) + cursor.execute(sql_insert, (dt_utc, price, volume, change)) + self.conn.commit() + + def update_to_dailyprice2(self, dt_utc, price, volume, change, marketcap, csupply): + with self.conn.cursor() as cursor: + sql_query = "SELECT COUNT(id) FROM `dailyprice` WHERE unixdt=FROM_UNIXTIME(%s)" + cursor.execute(sql_query, {dt_utc,}) + result = cursor.fetchone() + #print(dt_utc) + #print(result) + if result is not None: + if "COUNT(id)" in result: + if result["COUNT(id)"] > 0: + print("update") + sql_update = 'UPDATE dailyprice SET `price`=%s, `marketcap`=%s, `csupply`=%s, `volume`=%s, `change`=%s WHERE unixdt=FROM_UNIXTIME(%s)' + cursor.execute(sql_update, (price, marketcap, csupply, volume, change, dt_utc)) + else: + print("insert") + sql_insert = "INSERT INTO `dailyprice` (`unixdt`, `price`, `volume`, `change`, `marketcap`, `csupply`) VALUES (FROM_UNIXTIME(%s), %s, %s, %s, %s, %s)" + # print(sql_insert) + cursor.execute(sql_insert, (dt_utc, price, volume, change, marketcap, csupply)) + self.conn.commit() + + def update_dailypricechange(self): + with self.conn.cursor() as cursor: + sql_query = "SELECT unixdt,price FROM `dailyprice` order by unixdt" + cursor.execute(sql_query) + results = cursor.fetchall() + prevprice = -1 + for result in results: + if prevprice < 0: + prevprice = result["price"] + else: + #print(result["unixdt"], result["price"], result["marketcap"]) + try: + change = (result["price"]/prevprice - 1)*100 + except: + change = 0 + #print(csupply) + datestr = result["unixdt"] + logger.debug(datestr.__format__('%Y-%m-%d') + " " + str(change)) + sql_update = 'UPDATE dailyprice SET `change`=%s WHERE unixdt=%s' + cursor.execute(sql_update, (str(change), result["unixdt"])) + prevprice = result["price"] + self.conn.commit() + + def delete_dailyprice_data(self, config): + with self.conn.cursor() as cursor: + sql_query = "DELETE FROM `dailyprice`" + cursor.execute(sql_query) + self.conn.commit() + + def delete_failed_blockvolume(self, height): + with self.conn.cursor() as cursor: + sql_insert = "DELETE FROM `bigamountvout` WHERE height=%s" + cursor.execute(sql_insert, (height,)) + sql_insert = "DELETE FROM `bigamounttx` WHERE height=%s" + cursor.execute(sql_insert, (height,)) + sql_insert = "DELETE FROM `blockamount` WHERE height=%s" + cursor.execute(sql_insert, (height,)) + self.conn.commit() + + #block check --- big amount for vout + def query_from_bigamountvout(self, start_id=0, end_id=0, start_time="", end_time="", address="", limit=0): + with self.conn.cursor() as cursor: + sql_query = "SELECT * from `bigamountvout`" + if start_id > 0: + sql_query = sql_query + " WHERE id > " + str(start_id) + if end_id > 0: + sql_query = sql_query + " AND id < " + str(end_id) + else: + if end_id > 0: + sql_query = sql_query + " WHERE id < " + str(end_id) + + if len(start_time) > 0: + if len(end_time) > 0: + sql_query = sql_query + " WHERE UNIX_TIMESTAMP(unixdt) BETWEEN \ + UNIX_TIMESTAMP(\'" + start_time + "\') AND UNIX_TIMESTAMP(\'" + end_time + "\')" + else: + sql_query = sql_query + " WHERE UNIX_TIMESTAMP(unixdt) BETWEEN \ + UNIX_TIMESTAMP(\'" + start_time + "\') AND UNIX_TIMESTAMP(NOW())" + else: + if len(end_time) > 0: + sql_query = sql_query + " WHERE UNIX_TIMESTAMP(unixdt) BETWEEN \ + UNIX_TIMESTAMP(NOW()) AND UNIX_TIMESTAMP(\'" + end_time + "\')" + + sql_query = sql_query + " order by `unixdt` desc" + if limit > 0: + sql_query = sql_query + " LIMIT " + str(limit) + + print(sql_query) + cursor.execute(sql_query) + return cursor.fetchall() + ''' + + def update_to_bigamountvout(self, datetime, txid, vout, voutn, vouttype, amount, height, days, buyin, sellout, + profit): + with self.conn.cursor() as cursor: + # url = "http://10.168.2.125:7101/marketall/push/realtime/btc/dailyindsv2e1" + # headers = {"accept": "application/json"} + # data = {"unixdt":datetime,"vout":vout,"voutn":voutn,"vouttype":vouttype, + # "amount":amount,"height":height,"txid":txid,"days":days,"buyin":buyin, + # "sellout":sellout,"profit":profit} + # response = requests.post(url=url, data=json.dumps(data), headers=headers) + sql_insert = "INSERT INTO `rt_bigamountvoutv3e` (`unixdt`, `vout`, `voutn`, `vouttype`, `amount`, `height`, `txid`, days, buyprice, sellprice, profit) VALUES (FROM_UNIXTIME(%s), %s, %s, %s, %s, %s, %s, %s, %s, %s, %s)" + # print(sql_insert) + # print(datetime, txid, vout, voutalias, amount, height) + cursor.execute(sql_insert, + (datetime, vout, voutn, vouttype, amount, height, txid, days, buyin, sellout, profit)) + self.conn.commit() + + ''' + # block check --- big amount tx + def query_from_bigamounttx(self, start_id=0, end_id=0, start_time="", end_time="", address="", limit=0): + with self.conn.cursor() as cursor: + sql_query = "SELECT * from `bigamounttx`" + if start_id > 0: + sql_query = sql_query + " WHERE id > " + str(start_id) + if end_id > 0: + sql_query = sql_query + " AND id < " + str(end_id) + else: + if end_id > 0: + sql_query = sql_query + " WHERE id < " + str(end_id) + + if len(start_time) > 0: + if len(end_time) > 0: + sql_query = sql_query + " WHERE UNIX_TIMESTAMP(unixdt) BETWEEN \ + UNIX_TIMESTAMP(\'" + start_time + "\') AND UNIX_TIMESTAMP(\'" + end_time + "\')" + else: + sql_query = sql_query + " WHERE UNIX_TIMESTAMP(unixdt) BETWEEN \ + UNIX_TIMESTAMP(\'" + start_time + "\') AND UNIX_TIMESTAMP(NOW())" + else: + if len(end_time) > 0: + sql_query = sql_query + " WHERE UNIX_TIMESTAMP(unixdt) BETWEEN \ + UNIX_TIMESTAMP(NOW()) AND UNIX_TIMESTAMP(\'" + end_time + "\')" + + sql_query = sql_query + " order by `unixdt` desc" + if limit > 0: + sql_query = sql_query + " LIMIT " + str(limit) + + print(sql_query) + cursor.execute(sql_query) + return cursor.fetchall() + + def update_to_bigamounttx(self, datetime, txid, amount, height): + with self.conn.cursor() as cursor: + sql_insert = "INSERT INTO `bigamounttx` (`unixdt`, `amount`, `height`, `txid`) VALUES (FROM_UNIXTIME(%s), %s, %s, %s)" + #print(sql_insert) + #print(datetime, txid, amount, height) + cursor.execute(sql_insert, (datetime, amount, height, txid)) + self.conn.commit() + + # block check --- per block amount + def query_from_blockamount(self, start_id=0, end_id=0, start_time="", end_time="", limit=0, amount=0): + with self.conn.cursor() as cursor: + sql_query = "SELECT * from `blockamount`" + + if start_id > 0: + sql_query = sql_query + " WHERE id > " + str(start_id) + if end_id > 0: + sql_query = sql_query + " AND id < " + str(end_id) + if amount > 0: + sql_query = sql_query + " AND amount > " + str(amount) + else: + if end_id > 0: + sql_query = sql_query + " WHERE id < " + str(end_id) + if amount > 0: + sql_query = sql_query + " AND amount > " + str(amount) + else: + if amount > 0: + sql_query = sql_query + "WHERE amount > " + str(amount) + + if len(start_time) > 0: + if len(end_time) > 0: + sql_query = sql_query + " WHERE UNIX_TIMESTAMP(unixdt) BETWEEN \ + UNIX_TIMESTAMP(\'" + start_time + "\') AND UNIX_TIMESTAMP(\'" + end_time + "\')" + else: + sql_query = sql_query + " WHERE UNIX_TIMESTAMP(unixdt) BETWEEN \ + UNIX_TIMESTAMP(\'" + start_time + "\') AND UNIX_TIMESTAMP(NOW())" + if amount > 0: + sql_query = sql_query + " AND amount > " + str(amount) + else: + if len(end_time) > 0: + sql_query = sql_query + " WHERE UNIX_TIMESTAMP(unixdt) BETWEEN \ + UNIX_TIMESTAMP(NOW()) AND UNIX_TIMESTAMP(\'" + end_time + "\')" + if amount > 0: + sql_query = sql_query + " AND amount > " + str(amount) + + sql_query = sql_query + " order by `unixdt` desc" + + if limit > 0: + sql_query = sql_query + " LIMIT " + str(limit) + + cursor.execute(sql_query) + return cursor.fetchall() + + def update_to_blockamount(self, datetime, blockid, amount, height): + with self.conn.cursor() as cursor: + sql_insert = "INSERT INTO `blockamount` (`unixdt`, `amount`, `height`, `blockid`) VALUES (FROM_UNIXTIME(%s), %s, %s, %s)" + #print(sql_insert) + #print(datetime, blockid, amount, height) + cursor.execute(sql_insert, (datetime, amount, height, blockid)) + self.conn.commit() + + def delete_node_data(self, config): + with self.conn.cursor() as cursor: + sql_query = "DELETE FROM `blockamount`" + cursor.execute(sql_query) + sql_query = "DELETE FROM `bigamountvout`" + cursor.execute(sql_query) + sql_query = "DELETE FROM `bigamounttx`" + cursor.execute(sql_query) + self.conn.commit() + + def update_realize_cap(self, dayutc, last_rv): + with self.conn.cursor() as cursor: + sql_insert = "INSERT INTO `dailyrcap` (`unixdt`, `rcap`) VALUES (FROM_UNIXTIME(%s), %s)" + #print(sql_insert) + #print(datetime, blockid, amount, height) + cursor.execute(sql_insert, (dayutc, last_rv)) + self.conn.commit() + + # daily realize cap + def query_from_realizecap(self, start_id=0, end_id=0, start_time="", end_time="", limit=0): + with self.conn.cursor() as cursor: + sql_query = "SELECT * from `dailyrcap`" + + if start_id > 0: + sql_query = sql_query + " WHERE id > " + str(start_id) + if end_id > 0: + sql_query = sql_query + " AND id < " + str(end_id) + else: + if end_id > 0: + sql_query = sql_query + " WHERE id < " + str(end_id) + + if len(start_time) > 0: + if len(end_time) > 0: + sql_query = sql_query + " WHERE UNIX_TIMESTAMP(unixdt) BETWEEN \ + UNIX_TIMESTAMP(\'" + start_time + "\') AND UNIX_TIMESTAMP(\'" + end_time + "\')" + else: + sql_query = sql_query + " WHERE UNIX_TIMESTAMP(unixdt) BETWEEN \ + UNIX_TIMESTAMP(\'" + start_time + "\') AND UNIX_TIMESTAMP(NOW())" + else: + if len(end_time) > 0: + sql_query = sql_query + " WHERE UNIX_TIMESTAMP(unixdt) BETWEEN \ + UNIX_TIMESTAMP(NOW()) AND UNIX_TIMESTAMP(\'" + end_time + "\')" + + sql_query = sql_query + " order by `unixdt` desc" + + if limit > 0: + sql_query = sql_query + " LIMIT " + str(limit) + print(sql_query) + cursor.execute(sql_query) + return cursor.fetchall() + + def update_daily_addr(self, dayutc, last_add_cnt): + with self.conn.cursor() as cursor: + sql_insert = "INSERT INTO `dailyaddradd` (`unixdt`, `addcnt`) VALUES (FROM_UNIXTIME(%s), %s)" + #print(sql_insert) + #print(datetime, blockid, amount, height) + cursor.execute(sql_insert, (dayutc, last_add_cnt)) + self.conn.commit() + + def delete_daily_addr(self, config): + with self.conn.cursor() as cursor: + sql_query = "DELETE FROM `dailyaddradd`" + cursor.execute(sql_query) + self.conn.commit() + + def delete_daily_rv(self, config): + with self.conn.cursor() as cursor: + sql_query = "DELETE FROM `dailyrcap`" + cursor.execute(sql_query) + self.conn.commit() + ''' + + def __del__(self): + self.conn.close() diff --git a/lyq/btc24h_redis_if.py b/lyq/btc24h_redis_if.py new file mode 100644 index 0000000..d5d3236 --- /dev/null +++ b/lyq/btc24h_redis_if.py @@ -0,0 +1,613 @@ +import time + +from walrus import * +from loguru import logger + +class RedisIf: + def __init__(self, host="127.0.0.1", port=6379, password="", db=0): + self.db = Database(host=host, port=port, db=db) + + self.zbalance = self.db.ZSet("rt_balancev2e") + ''' + #realize cap progress + self.rv = self.db.Hash("rv") + #address and balance progress + self.addr = self.db.Hash("addr") + #block volume progress + self.bv = self.db.Hash("bv") + #daily volume progress + self.dv = self.db.Hash("dv") + ''' + ''' + #stat tx progress + self.tx = self.db.Hash("tx") + + #ETH daily contract progress + self.eth_dc = self.db.Hash("ethdc") + + #btc stats fee + self.btc_stats = self.db.Hash("btcstats") + + #btc stats volume + self.btc_volume = self.db.Hash("btcvolume") + + # btc stats cdd + self.btc_cdd = self.db.Hash("btccdd") + + # btc stats cdd days + self.btc_cdd_days = self.db.Hash("btccdddays") + ''' + self.btc_block_time = self.db.Hash("rt_btcblocktimev2e") + ''' + self.btc_sopr = self.db.Hash("btcsopr") + ''' + self.btc_data = self.db.Hash("rt_btc_datav2e") + + self.active_address = self.db.Set("rt_active_addressv2e") + self.send_address = self.db.Set("rt_send_addressv2e") + self.receive_address = self.db.Set("rt_receive_addressv2e") + + def get_btc_data(self, key): + value = None + if self.btc_data[key] is not None: + value = self.btc_data[key] + return value + + def set_btc_data(self, key, value): + self.btc_data[key] = value + + def reset_btc_data(self): + self.btc_data.clear() + self.zbalance.clear() + # self.btc_block_time.clear() + + ''' + def get_last_btc_sopr(self): + last_sopr_buy = None + last_asopr_buy = None + last_easopr_buy = None + last_lth_sopr_buy = None + last_sth_sopr_buy = None + last_asol = None + last_eaasol = None + + if self.btc_sopr["last_asol"] is not None: + last_asol = self.btc_sopr["last_asol"] + #last_asol = float(self.btc_sopr["last_asol"].decode("utf-8")) + if self.btc_sopr["last_eaasol"] is not None: + last_eaasol = self.btc_sopr["last_eaasol"] + #last_eaasol = float(self.btc_sopr["last_eaasol"].decode("utf-8")) + + + if self.btc_sopr["last_sopr_buy"] is not None: + last_sopr_buy = self.btc_sopr["last_sopr_buy"] + #last_sopr_buy = float(self.btc_sopr["last_sopr_buy"].decode("utf-8")) + if self.btc_sopr["last_asopr_buy"] is not None: + last_asopr_buy = self.btc_sopr["last_asopr_buy"] + #last_asopr_buy = float(self.btc_sopr["last_asopr_buy"].decode("utf-8")) + if self.btc_sopr["last_easopr_buy"] is not None: + last_easopr_buy = self.btc_sopr["last_easopr_buy"] + #last_easopr_buy = float(self.btc_sopr["last_easopr_buy"].decode("utf-8")) + if self.btc_sopr["last_lth_sopr_buy"] is not None: + last_lth_sopr_buy = self.btc_sopr["last_lth_sopr_buy"] + #last_lth_sopr_buy = float(self.btc_sopr["last_lth_sopr_buy"].decode("utf-8")) + if self.btc_sopr["last_sth_sopr_buy"] is not None: + last_sth_sopr_buy = self.btc_sopr["last_sth_sopr_buy"] + #last_sth_sopr_buy = float(self.btc_sopr["last_sth_sopr_buy"].decode("utf-8")) + + last_sopr_sell = None + last_asopr_sell = None + last_easopr_sell = None + last_lth_sopr_sell = None + last_sth_sopr_sell = None + if self.btc_sopr["last_sopr_sell"] is not None: + last_sopr_sell = self.btc_sopr["last_sopr_sell"] + # last_sopr_sell = float(self.btc_sopr["last_sopr_sell"].decode("utf-8")) + if self.btc_sopr["last_asopr_sell"] is not None: + last_asopr_sell = self.btc_sopr["last_asopr_sell"] + # last_asopr = float(self.btc_sopr["last_asopr"].decode("utf-8")) + if self.btc_sopr["last_easopr_sell"] is not None: + last_easopr_sell = self.btc_sopr["last_easopr_sell"] + # last_easopr_sell = float(self.btc_sopr["last_easopr_sell"].decode("utf-8")) + if self.btc_sopr["last_lth_sopr_sell"] is not None: + last_lth_sopr_sell = self.btc_sopr["last_lth_sopr_sell"] + # last_lth_sopr_sell = float(self.btc_sopr["last_lth_sopr_sell"].decode("utf-8")) + if self.btc_sopr["last_sth_sopr_sell"] is not None: + last_sth_sopr_sell = self.btc_sopr["last_sth_sopr_sell"] + # last_sth_sopr_sell = float(self.btc_sopr["last_sth_sopr_sell"].decode("utf-8")) + + return last_asol, last_eaasol, last_sopr_buy, last_asopr_buy, last_easopr_buy, last_lth_sopr_buy, last_sth_sopr_buy, last_sopr_sell, last_asopr_sell, last_easopr_sell, last_lth_sopr_sell, last_sth_sopr_sell + + def set_last_btc_sopr(self, last_asol, last_eaasol, last_sopr_buy, last_asopr_buy, last_easopr_buy, last_lth_sopr_buy, last_sth_sopr_buy, last_sopr_sell, last_asopr_sell, last_easopr_sell, last_lth_sopr_sell, last_sth_sopr_sell): + self.btc_sopr["last_asol"] = last_asol + self.btc_sopr["last_eaasol"] = last_eaasol + + self.btc_sopr["last_sopr_buy"] = last_sopr_buy + self.btc_sopr["last_asopr_buy"] = last_asopr_buy + self.btc_sopr["last_easopr_buy"] = last_easopr_buy + self.btc_sopr["last_lth_sopr_buy"] = last_lth_sopr_buy + self.btc_sopr["last_sth_sopr_buy"] = last_sth_sopr_buy + self.btc_sopr["last_sopr_sell"] = last_sopr_sell + self.btc_sopr["last_asopr_sell"] = last_asopr_sell + self.btc_sopr["last_easopr_sell"] = last_easopr_sell + self.btc_sopr["last_lth_sopr_sell"] = last_lth_sopr_sell + self.btc_sopr["last_sth_sopr_sell"] = last_sth_sopr_sell + ''' + + def get_block_time(self, height): + block_time = None + height_str = str(height) + if self.btc_block_time[height_str] is not None: + block_time = int(self.btc_block_time[height_str].decode("utf-8")) + # block_time = int(self.btc_block_time[height_str].decode("utf-8")) + + return block_time + + def set_block_time(self, height, ts): + height_str = str(height) + self.btc_block_time[height_str] = ts + + ''' + def get_last_btc_cdd_days(self): + last_cdd = None + last_acdd = None + last_eacdd = None + last_cdd_day1= None + last_cdd_day7 = None + last_cdd_day30 = None + last_cdd_day60 = None + last_cdd_day90 = None + last_cdd_day180 = None + last_cdd_day365 = None + last_cdd_day730 = None + + last_date = None + last_height = None + last_date_str = None + + if self.btc_cdd["last_cdd"] is not None: + last_cdd = float(self.btc_cdd["last_cdd"].decode("utf-8")) + if self.btc_cdd["last_acdd"] is not None: + last_acdd = float(self.btc_cdd["last_acdd"].decode("utf-8")) + if self.btc_cdd["last_eacdd"] is not None: + last_eacdd = float(self.btc_cdd["last_eacdd"].decode("utf-8")) + if self.btc_cdd_days["last_cdd_day1"] is not None: + last_cdd_day1 = float(self.btc_cdd_days["last_cdd_day1"].decode("utf-8")) + if self.btc_cdd_days["last_cdd_day7"] is not None: + last_cdd_day7 = float(self.btc_cdd_days["last_cdd_day7"].decode("utf-8")) + if self.btc_cdd_days["last_cdd_day30"] is not None: + last_cdd_day30 = float(self.btc_cdd_days["last_cdd_day30"].decode("utf-8")) + if self.btc_cdd_days["last_cdd_day60"] is not None: + last_cdd_day60 = float(self.btc_cdd_days["last_cdd_day60"].decode("utf-8")) + if self.btc_cdd_days["last_cdd_day90"] is not None: + last_cdd_day90 = float(self.btc_cdd_days["last_cdd_day90"].decode("utf-8")) + if self.btc_cdd_days["last_cdd_day180"] is not None: + last_cdd_day180 = float(self.btc_cdd_days["last_cdd_day180"].decode("utf-8")) + if self.btc_cdd_days["last_cdd_day365"] is not None: + last_cdd_day365 = float(self.btc_cdd_days["last_cdd_day365"].decode("utf-8")) + if self.btc_cdd_days["last_cdd_day730"] is not None: + last_cdd_day730 = float(self.btc_cdd_days["last_cdd_day730"].decode("utf-8")) + if self.btc_cdd_days["last_date"] is not None: + last_date = int(self.btc_cdd_days["last_date"].decode("utf-8")) + if self.btc_cdd_days["last_height"] is not None: + last_height = int(self.btc_cdd_days["last_height"].decode("utf-8")) + if self.btc_cdd_days["last_date_str"] is not None: + last_date_str = self.btc_cdd_days["last_date_str"].decode("utf-8") + return last_cdd, last_acdd, last_eacdd, last_cdd_day1, last_cdd_day7, last_cdd_day30, last_cdd_day60, last_cdd_day90, last_cdd_day180, last_cdd_day365, last_cdd_day730, last_date, last_height, last_date_str + + def set_last_btc_cdd_days(self, cdd, acdd, eacdd, day1, day7, day30, day60, day90, day180, day365, day730, dt, height, dtstr): + self.btc_cdd["last_cdd"] = cdd + self.btc_cdd["last_acdd"] = acdd + self.btc_cdd["last_eacdd"] = eacdd + self.btc_cdd_days["last_cdd_day1"] = day1 + self.btc_cdd_days["last_cdd_day7"] = day7 + self.btc_cdd_days["last_cdd_day30"] = day30 + self.btc_cdd_days["last_cdd_day60"] = day60 + self.btc_cdd_days["last_cdd_day90"] = day90 + self.btc_cdd_days["last_cdd_day180"] = day180 + self.btc_cdd_days["last_cdd_day365"] = day365 + self.btc_cdd_days["last_cdd_day730"] = day730 + self.btc_cdd_days["last_date"] = dt + self.btc_cdd_days["last_height"] = height + self.btc_cdd_days["last_date_str"] = dtstr + ''' + ''' + def get_last_btc_cdd(self): + last_cdd = None + last_date = None + last_height = None + last_date_str = None + if self.btc_cdd["last_cdd"] is not None: + last_cdd = float(self.btc_cdd["last_cdd"].decode("utf-8")) + if self.btc_cdd["last_date"] is not None: + last_date = int(self.btc_cdd["last_date"].decode("utf-8")) + if self.btc_cdd["last_height"] is not None: + last_height = int(self.btc_cdd["last_height"].decode("utf-8")) + if self.btc_cdd["last_date_str"] is not None: + last_date_str = self.btc_cdd["last_date_str"].decode("utf-8") + return last_cdd, last_date, last_height, last_date_str + + def set_last_btc_cdd(self, cdd, dt, height, dtstr): + self.btc_cdd["last_cdd"] = cdd + self.btc_cdd["last_date"] = dt + self.btc_cdd["last_height"] = height + self.btc_cdd["last_date_str"] = dtstr + + def get_last_btc_volume(self): + last_volume = None + last_date = None + last_height = None + last_date_str = None + if self.btc_volume["last_volume"] is not None: + last_volume = float(self.btc_volume["last_volume"].decode("utf-8")) + if self.btc_volume["last_date"] is not None: + last_date = int(self.btc_volume["last_date"].decode("utf-8")) + if self.btc_volume["last_height"] is not None: + last_height = int(self.btc_volume["last_height"].decode("utf-8")) + if self.btc_volume["last_date_str"] is not None: + last_date_str = self.btc_volume["last_date_str"].decode("utf-8") + return last_volume, last_date, last_height, last_date_str + + def set_last_btc_volume(self, volume, dt, height, dtstr): + self.btc_volume["last_volume"] = volume + self.btc_volume["last_date"] = dt + self.btc_volume["last_height"] = height + self.btc_volume["last_date_str"] = dtstr + ''' + ''' + def get_last_btc_stats(self): + last_fees = None + last_date = None + last_height = None + last_date_str = None + last_volume = None + if self.btc_stats["last_fees"] is not None: + last_fees = float(self.btc_stats["last_fees"].decode("utf-8")) + if self.btc_volume["last_volume"] is not None: + last_volume = float(self.btc_volume["last_volume"].decode("utf-8")) + if self.btc_stats["last_date"] is not None: + last_date = int(self.btc_stats["last_date"].decode("utf-8")) + if self.btc_stats["last_height"] is not None: + last_height = int(self.btc_stats["last_height"].decode("utf-8")) + if self.btc_stats["last_date_str"] is not None: + last_date_str = self.btc_stats["last_date_str"].decode("utf-8") + return last_fees, last_volume, last_date, last_height, last_date_str + + def set_last_btc_stats(self, fees, volume, dt, height, dtstr): + self.btc_stats["last_fees"] = fees + self.btc_volume["last_volume"] = volume + self.btc_stats["last_date"] = dt + self.btc_stats["last_height"] = height + self.btc_stats["last_date_str"] = dtstr + + + def get_last_eth_dc(self): + last_date = None + last_height = None + last_date_str = None + if self.eth_dc["last_date"] is not None: + last_date = int(self.eth_dc["last_date"].decode("utf-8")) + if self.eth_dc["last_height"] is not None: + last_height = int(self.eth_dc["last_height"].decode("utf-8")) + if self.eth_dc["last_date_str"] is not None: + last_date_str = self.eth_dc["last_date_str"].decode("utf-8") + return last_date, last_height, last_date_str + + def set_last_eth_dc(self, dt, height, dtstr): + self.eth_dc["last_date"] = dt + self.eth_dc["last_height"] = height + self.eth_dc["last_date_str"] = dtstr + ''' + ''' + def get_last_dv(self): + last_dv = None + last_date = None + last_height = None + last_date_str = None + if self.dv["last_dv"] is not None: + last_dv = float(self.dv["last_dv"].decode("utf-8")) + if self.dv["last_date"] is not None: + last_date = int(self.dv["last_date"].decode("utf-8")) + if self.dv["last_height"] is not None: + last_height = int(self.dv["last_height"].decode("utf-8")) + if self.dv["last_date_str"] is not None: + last_date_str = self.dv["last_date_str"].decode("utf-8") + return last_dv, last_date, last_height, last_date_str + + def set_last_dv(self, dv, dt, height, dtstr): + self.dv["last_dv"] = dv + self.dv["last_date"] = dt + self.dv["last_height"] = height + self.dv["last_date_str"] = dtstr + + def get_last_bv(self): + last_height = None + if self.bv["last_height"] is not None: + last_height = int(self.bv["last_height"].decode("utf-8")) + return last_height + + def set_last_bv(self, height): + self.bv["last_height"] = height + ''' + ''' + def get_last_ind(self): + last_csupply = None + last_mintusd = None + last_sumcsupply = None + last_sumcdd = None + last_sumeacdd = None + last_rprofit = None + last_rloss = None + last_marketcap = None + last_rcap = None + last_mvrv = None + + last_earcap = None + if self.tx["last_csupply"] is not None: + last_csupply = float(self.tx["last_csupply"].decode("utf-8")) + if self.tx["last_mintusd"] is not None: + last_mintusd = float(self.tx["last_mintusd"].decode("utf-8")) + if self.tx["last_sumcsupply"] is not None: + last_sumcsupply = float(self.tx["last_sumcsupply"].decode("utf-8")) + if self.tx["last_sumcdd"] is not None: + last_sumcdd = float(self.tx["last_sumcdd"].decode("utf-8")) + if self.tx["last_sumeacdd"] is not None: + last_sumeacdd = float(self.tx["last_sumeacdd"].decode("utf-8")) + if self.tx["last_rprofit"] is not None: + last_rprofit = float(self.tx["last_rprofit"].decode("utf-8")) + if self.tx["last_rloss"] is not None: + last_rloss = float(self.tx["last_rloss"].decode("utf-8")) + if self.tx["last_marketcap"] is not None: + last_marketcap = float(self.tx["last_marketcap"].decode("utf-8")) + if self.tx["last_rcap"] is not None: + last_rcap = float(self.tx["last_rcap"].decode("utf-8")) + if self.tx["last_earcap"] is not None: + last_earcap = float(self.tx["last_earcap"].decode("utf-8")) + if self.tx["last_mvrv"] is not None: + last_mvrv = float(self.tx["last_mvrv"].decode("utf-8")) + + + return last_csupply, last_mintusd, last_sumcsupply, last_sumcdd, last_sumeacdd, last_rprofit, last_rloss, last_marketcap, last_rcap, last_earcap, last_mvrv + + def set_last_ind(self, last_csupply, last_mintusd, last_sumcsupply, last_sumcdd, last_sumeacdd, last_rprofit, last_rloss, last_marketcap, last_rcap, last_earcap, last_mvrv): + self.tx["last_csupply"] = last_csupply + self.tx["last_mintusd"] = last_mintusd + self.tx["last_sumcsupply"] = last_sumcsupply + self.tx["last_sumcdd"] = last_sumcdd + self.tx["last_sumeacdd"] = last_sumeacdd + self.tx["last_rprofit"] = last_rprofit + self.tx["last_rloss"] = last_rloss + self.tx["last_marketcap"] = last_marketcap + self.tx["last_rcap"] = last_rcap + self.tx["last_earcap"] = last_earcap + self.tx["last_mvrv"] = last_mvrv + + + def get_last_tx(self): + last_profit = None + last_fees = None + last_newaddr_cnt = None + last_newaddr_vol = None + last_active_addr_cnt = None + last_tx_addr_cnt = None + last_rx_addr_cnt = None + last_vol_change = None + last_vol = None + last_avol = None + last_date = None + last_height = None + last_date_str = None + last_txs = None + last_eatxs = None + if self.tx["last_profit_rate"] is not None: + last_profit = int(self.tx["last_profit"].decode("utf-8")) + if self.tx["last_fees"] is not None: + last_fees = int(self.tx["last_fees"].decode("utf-8")) + if self.tx["last_txs"] is not None: + last_txs = int(self.tx["last_txs"].decode("utf-8")) + if self.tx["last_eatxs"] is not None: + last_eatxs = int(self.tx["last_eatxs"].decode("utf-8")) + if self.tx["last_newaddr_cnt"] is not None: + last_newaddr_cnt = int(self.tx["last_newaddr_cnt"].decode("utf-8")) + if self.tx["last_newaddr_vol"] is not None: + last_newaddr_vol = float(self.tx["last_newaddr_vol"].decode("utf-8")) + if self.tx["last_active_addr_cnt"] is not None: + last_active_addr_cnt = int(self.tx["last_active_addr_cnt"].decode("utf-8")) + if self.tx["last_tx_addr_cnt"] is not None: + last_tx_addr_cnt = int(self.tx["last_tx_addr_cnt"].decode("utf-8")) + if self.tx["last_rx_addr_cnt"] is not None: + last_rx_addr_cnt = int(self.tx["last_rx_addr_cnt"].decode("utf-8")) + if self.tx["last_vol_change"] is not None: + last_vol_change = float(self.tx["last_vol_change"].decode("utf-8")) + if self.tx["last_vol"] is not None: + last_vol = float(self.tx["last_vol"].decode("utf-8")) + if self.tx["last_avol"] is not None: + last_avol = float(self.tx["last_avol"].decode("utf-8")) + if self.tx["last_date"] is not None: + last_date = int(self.tx["last_date"].decode("utf-8")) + if self.tx["last_height"] is not None: + last_height = int(self.tx["last_height"].decode("utf-8")) + if self.tx["last_date_str"] is not None: + last_date_str = self.tx["last_date_str"].decode("utf-8") + return last_profit, last_fees, last_txs, last_eatxs, last_newaddr_cnt, last_newaddr_vol, last_active_addr_cnt, last_tx_addr_cnt, last_rx_addr_cnt, last_vol_change, last_vol, last_avol, last_date, last_height, last_date_str + + def set_last_tx(self, last_profit, last_fees, last_txs, last_eatxs, newaddr_cnt, newaddr_vol, active_addr_cnt, tx_addr_cnt, rx_addr_cnt, vol_change, vol, avol, dt, height, dtstr): + self.tx["last_profit"] = last_profit + self.tx["last_fees"] = last_fees + self.tx["last_txs"] = last_txs + self.tx["last_eatxs"] = last_eatxs + self.tx["last_newaddr_cnt"] = newaddr_cnt + self.tx["last_newaddr_vol"] = newaddr_vol + self.tx["last_active_addr_cnt"] = active_addr_cnt + self.tx["last_tx_addr_cnt"] = tx_addr_cnt + self.tx["last_rx_addr_cnt"] = rx_addr_cnt + self.tx["last_vol_change"] = vol_change + self.tx["last_vol"] = vol + self.tx["last_avol"] = avol + self.tx["last_date"] = dt + self.tx["last_height"] = height + self.tx["last_date_str"] = dtstr + ''' + ''' + def get_last_addr(self): + last_daily_cnt = None + last_date = None + last_height = None + last_date_str = None + if self.addr["last_daily_cnt"] is not None: + last_daily_cnt = int(self.addr["last_daily_cnt"].decode("utf-8")) + if self.addr["last_date"] is not None: + last_date = int(self.addr["last_date"].decode("utf-8")) + if self.addr["last_height"] is not None: + last_height = int(self.addr["last_height"].decode("utf-8")) + if self.addr["last_date_str"] is not None: + last_date_str = self.addr["last_date_str"].decode("utf-8") + return last_daily_cnt, last_date, last_height, last_date_str + + def set_last_addr(self, daily_cnt, dt, height, dtstr): + self.addr["last_daily_cnt"] = daily_cnt + self.addr["last_date"] = dt + self.addr["last_height"] = height + self.addr["last_date_str"] = dtstr + ''' + + def is_active_address(self, address): + result = address in self.active_address + if not result: + self.active_address.add(address) + return result + + def reset_active_address(self): + self.active_address.clear() + + def get_active_address_cnt(self): + return len(self.active_address) + + def is_send_address(self, address): + result = address in self.send_address + if not result: + self.send_address.add(address) + return result + + def reset_send_address(self): + self.send_address.clear() + + def get_send_address_cnt(self): + return len(self.send_address) + + def is_receive_address(self, address): + result = address in self.receive_address + if not result: + self.receive_address.add(address) + return result + + def reset_receive_address(self): + self.receive_address.clear() + + def get_receive_address_cnt(self): + return len(self.receive_address) + + def save_addr(self, address, balance): + new_balance = balance + if address in self.zbalance: + new_balance = self.zbalance.score(address) + balance + # print("update", self.zbalance.score(address), balance, new_balance) + # time.sleep(10) + if new_balance < 0.01: + del self.zbalance[address] + # print("check exist", address, address in self.zbalance) + # time.sleep(10) + return + self.zbalance.add({address: new_balance}) + + ''' + def delete_addr(self, config): + self.addr.clear() + self.zbalance.clear() + ''' + + def is_in_addr(self, address): + return address in self.zbalance + + def get_addr_cnt(self): + return len(self.zbalance) + + ''' + def delete_rv(self, config): + self.rv.clear() + + def get_last_rv(self): + last_rv = None + last_date = None + last_height = None + last_date_str = None + if self.rv["last_rv"] is not None: + last_rv = float(self.rv["last_rv"].decode("utf-8")) + if self.rv["last_date"] is not None: + last_date = int(self.rv["last_date"].decode("utf-8")) + if self.rv["last_height"] is not None: + last_height = int(self.rv["last_height"].decode("utf-8")) + if self.rv["last_date_str"] is not None: + last_date_str = self.rv["last_date_str"].decode("utf-8") + return last_rv, last_date, last_height, last_date_str + + def set_last_rv(self, rv, dt, height, dtstr): + self.rv["last_rv"] = rv + self.rv["last_date"] = dt + self.rv["last_height"] = height + self.rv["last_date_str"] = dtstr + ''' + + def get_all_address(self): + return self.zbalance.keys() + + def delete_address_data(self, config): + self.zbalance.clear() + + ''' + def query_from_address(self, start_balance=0, end_balance=0, address="", limit=0): + if len(address) > 0: + results = [] + result = {} + result["address"] = address + balance = self.zbalance.score(address) + print(balance) + if balance is not None: + result["balance"] = balance + results.append(result) + return results + + match_result = None + if start_balance > 0: + if end_balance > 0: + match_result = self.zbalance.range_by_score(start_balance, end_balance, 0, -1, True, False) + else: + match_result = self.zbalance.range_by_score(0, start_balance, 0, -1, True, False) + else: + if end_balance > 0: + match_result = self.zbalance.range_by_score(end_balance, 21000000, 0, -1, True, False) + + results = [] + if match_result is not None: + #print(match_result) + for addr, balance2 in match_result: + address = addr.decode('utf-8') + result = {} + result["address"] = address + result["balance"] = balance2 + results.append(result) + if limit > 0 and len(results) >= limit: + break + return results + ''' + + + + + + + + + + + diff --git a/lyq/btc24h_stats.py b/lyq/btc24h_stats.py new file mode 100644 index 0000000..05c88d6 --- /dev/null +++ b/lyq/btc24h_stats.py @@ -0,0 +1,1110 @@ +# coding=utf-8 +import sys +import time +from easybitcoinrpc import RPC +from bitcoinutils.setup import setup +from bitcoinutils.script import Script +from bitcoinutils.keys import P2wpkhAddress, P2wshAddress, P2shAddress, PrivateKey, PublicKey, SegwitAddress, \ + P2pkhAddress +import requests +import ujson +from requests import Session +from requests.exceptions import ConnectionError, Timeout, TooManyRedirects +import btc24h_db_if +import btc24h_redis_if +import pymysql +DEF_CONFIG_RULES = "rules" +oklinkheaders = {'Ok-Access-Key': "6b7bb4fb-70d5-44a4-bc6f-0a43a8e39896"} +class StatIf: + def __init__(self, ip="127.0.0.1", port="8332", user="user", password="password"): + self.host = ip + self.port = port + self.user = user + self.pwd = password + + self.rpc = None + self.height = 0 + self.pricedict = {} + setup('mainnet') + # 您似乎正在定义一个名为“StatIf”的类,该类似乎与使用RPC(远程过程调用)与比特币节点交互有关。此类使用用于连接到比特币节点的IP地址、端口、用户名和密码的参数进行初始化。 + # 以下是正在发生的事情的简要细分: + # - '__init__'方法:这是Python类中的一种特殊方法,在创建类的实例时调用。在此方法中,实例变量“self.host”、“self.port”、“self.user”和“self.pwd”使用提供的值进行初始化。这些变量似乎存储了比特币节点的连接信息。 + # - 'self.rpc':此变量初始化为'None'。它似乎旨在保存RPC客户端的实例(可能来自“easybitcoinrpc.RPC' 类)。 + # - 'self.height':此变量初始化为'0'。它似乎旨在存储当前的区块链高度。 + # - 'self.pricedict':此字典初始化为空字典 ('{}')。从这个片段中并不能立即看出它的目的,但它似乎与存储与价格相关的信息有关。 + # - 'setup('mainnet')':这个函数调用似乎设置了一些与比特币网络相关的配置,特别是针对主网的配置。 + # 总的来说,这个类看起来像是作为一个接口('StatIf'),用于与比特币节点进行交互,以获取与区块链相关的统计数据或其他信息。 + def get_vin_address(self, prev_scriptpubkey, prev_height, txid): + prev_type = prev_scriptpubkey["type"] + prev_address = None + if prev_type != "nulldata": + if isinstance(prev_scriptpubkey, dict): + if "address" in prev_scriptpubkey: + prev_address = prev_scriptpubkey["address"] + else: + if prev_scriptpubkey.is_address(): + prev_address = prev_scriptpubkey["address"] + if prev_address is None: + if prev_type == "pubkey": + temphex = prev_scriptpubkey["hex"] + try: + if temphex[2:4] == "04": + prev_address = PublicKey(temphex[2:132]).get_address(False).to_string() + elif temphex[2:4] == "02" or temphex[2:4] == "03": + prev_address = PublicKey(temphex[2:68]).get_address().to_string() + except: + print("decode address failed", str(prev_height), "txid", txid, "hex", temphex) + if prev_address is None: + prev_address = prev_scriptpubkey["hex"] + + return prev_address + # 这种“get_vin_address”方法似乎用于从事务输入中提取以前的输出地址。 + # 以下是正在发生的事情的细分: + # - 该方法采用三个参数:“prev_scriptpubkey”、“prev_height”和“txid”。这些似乎分别表示上一个输出的scriptPubKey、包含交易的块的高度和交易ID。 + # - 它首先检查上一个scriptPubKey的类型。如果它不是“nulldata”(意味着它是有效的scriptPubKey),它将继续提取地址。 + # - 它检查“prev_scriptpubkey”是否为字典并包含键“地址”。如果是这样,它会从中提取地址。 + # - 如果地址仍为“None”,则进一步检查scriptPubKey是否为公钥。如果是,它会尝试对其进行解码以获取地址。如果解码失败,则会打印错误消息。 + # - 如果地址仍为“None”,则将scriptPubKey的十六进制值分配给“prev_address”。 + # - 最后,它返回“prev_address”。 + # 此方法似乎可以处理从scriptPubKey中提取地址的各种方案,包括处理类型为“pubkey”的scriptPubKeys,并在所有其他尝试失败时回退到使用十六进制值。 + def get_vout_address(self, scriptpubkey, height, txid): + return self.get_vin_address(scriptpubkey, height, txid) + # 似乎“get_vout_address”方法只是使用提供的参数调用“get_vin_address”方法。 + # 以下是正在发生的事情的细分: + # - 'get_vout_address'方法采用与'get_vin_address'相同的参数:'scriptpubkey'、'height'和'txid'。 + # - 它没有实现自己的逻辑来从“scriptpubkey”中提取地址,而是直接调用具有相同参数的“get_vin_address”方法。 + # - 这意味着'get_vout_address'实质上将提取地址的任务委托给'get_vin_address'方法。 + def get_history_price(self, batch_size=5000): + #pricedict = {} + """获取数据库中的 Nasdaq 数据,存入字典""" + db_config = { + "host": "192.168.194.216", + "user": "root", + "password": "2GS@bPYcgiMyL14A", + "database": "btcdb", + "port": 4423, + "connect_timeout": 60, + "read_timeout": 60, + "write_timeout": 60, + "charset": "utf8mb4" + } + + offset = 0 + self.pricedict = {} + + while True: + connection = pymysql.connect(**db_config) + try: + with connection.cursor() as cursor: + sql = "SELECT timestamp, price FROM btc_prices WHERE source = 'Nasdaq' ORDER BY timestamp LIMIT %s OFFSET %s" + cursor.execute(sql, (batch_size, offset)) + rows = cursor.fetchall() + if not rows: + break + for timestamp, price in rows: + self.pricedict[str(int(timestamp))] = float(price) + finally: + connection.close() + + offset += batch_size + if len(rows) < batch_size: + break # 最后一页读取完成 + + return self.pricedict + # 这种“get_history_price”方法似乎是从Web API获取历史比特币价格数据,并用检索到的数据填充“self.pricedict”字典。 + # 以下是正在发生的事情的细分: + # - 该方法向指定的URL ('https://data.nasdaq.com/api/v3/datasets/BCHAIN/MKPRU.json?api_key=FZqXog4sR-b7cYnXcRVV') 发送GET请求以获取历史比特币价格数据。此请求是使用 + # 'requests.get()'函数发出的。 + # - 然后检查响应状态代码是否为“200”,表示响应成功。 + # - 如果响应成功,它将使用'ujson.loads()'将响应内容解析为JSON。 + # - 然后,它会检查解析后的JSON是否包含名为“dataset”的键。如果是这样,它将从数据集中检索“数据”。 + # - 它遍历“数据”中的每个条目,并提取日期(“daystr”)和价格(“p”)。它使用'time.mktime()'将日期转换为UNIX时间戳('dayutc')。 + # - 然后,它使用日期作为键(转换为整数)和price作为值来填充“self.pricedict”字典。 + # - 最后,如果它包含数据,它会返回'self.pricedict'。 + # - 该方法在处理后关闭响应对象。 + # 此方法本质上是从提供的API端点获取历史比特币价格数据,对其进行处理,并将其存储在“self.pricedict”字典中以供以后使用。 + def get_history_price2(self, batch_size=5000): + #pricedict = {} + """获取数据库中的 Messari 数据,存入字典""" + db_config = { + "host": "192.168.194.216", + "user": "root", + "password": "2GS@bPYcgiMyL14A", + "database": "btcdb", + "port": 4423, + "connect_timeout": 60, + "read_timeout": 60, + "write_timeout": 60, + "charset": "utf8mb4" + } + + offset = 0 + self.pricedict = {} + + while True: + connection = pymysql.connect(**db_config) + try: + with connection.cursor() as cursor: + sql = """ + SELECT timestamp, price + FROM btc_prices + WHERE source = 'CryptoCompare' + ORDER BY timestamp + LIMIT %s OFFSET %s + """ + cursor.execute(sql, (batch_size, offset)) + rows = cursor.fetchall() + if not rows: + break + for timestamp, price in rows: + self.pricedict[str(int(timestamp))] = float(price) + finally: + connection.close() + + offset += batch_size + if len(rows) < batch_size: + break # 数据已全部读取 + + return self.pricedict + # 与之前的“get_history_price”方法相比,这种“get_history_price2”方法似乎是另一种从不同的Web API获取历史比特币价格数据的方法。 + # 以下是此方法中发生的情况: + # - 该方法首先使用'time.gmtime()'获取当前UTC时间,然后对其进行格式化以检索当前年份。 + # - 它根据当年构建一个URL来获取比特币价格数据。该URL可以是当前年份,也可以是特定年份范围。 + # - 它为请求设置标头,特别是Messari API密钥。 + # - 它使用'requests.get()'向构造的URL发送GET请求。 + # - 如果响应状态码为200(表示成功),则将响应内容解析为JSON。 + # - 它检查JSON是否包含名为“data”的键,并从中提取“值”。 + # - 它遍历“values”中的每个条目,并提取时间戳('dayutc')和价格('s')。它将时间戳转换为人类可读的日期字符串 ('ret_daystr') + # - 然后转换回UNIX时间戳 ('ret_dayutc')。 + # - 它填充“self.pricedict”字典,将日期作为键(转换为字符串),将价格作为值。 + # - 此过程每年都会重复,逐年递减,直到没有更多数据可供获取。 + # - 在收集完所有数据后,它会打印“self.pricedict”并返回它。 + # 此方法有效地从Messari API中检索历史比特币价格数据,并将其存储在'self.pricedict'字典中。 + def get_current_utc(self): + curtime = time.gmtime(time.time()) + daystr = time.strftime("%d %b %Y", curtime) + dayutc = int(time.mktime(time.strptime(daystr, "%d %b %Y"))) + return dayutc + # “get_current_utc”方法似乎是一种实用方法,用于检索协调世界时 (UTC) 中的当前时间并将其转换为UNIX时间戳。 + # 以下是此方法中发生的情况的细分: + # - 它首先使用'time.gmtime(time.time())'获取UTC中的当前时间,它返回一个表示当前UTC时间的时间结构。 + # - 它使用'time.strftime(“%d %b %Y”, curtime)' + # 将UTC时间格式化为人类可读的日期字符串。这将以“DD Mon YYYY”格式提供当前日期的字符串表示形式(例如,“01Jan2024”)。 + # - 它使用'time.mktime(time.strptime(daystr, “%d %b %Y”))'将人类可读的日期字符串转换回UNIX时间戳。这会将格式化的日期字符串转换为时间结构, + # 然后'time.mktime()'将其转换为UNIX时间戳。 + # - 它返回表示当前UTC时间的UNIX时间戳。 + # 总之,此方法提供了一种将UTC中的当前时间作为UNIX时间戳获取的方法。 + def get_current_price(self): + price = 0 + DB_CONFIG = { + "host": "192.168.194.216", + "user": "root", + "password": "2GS@bPYcgiMyL14A", + "database": "btcdb", + "port": 4423 + } + connection = pymysql.connect(**DB_CONFIG) + try: + with connection.cursor() as cursor: + for source in ("binance", "coinbase"): + cursor.execute(""" + SELECT price FROM btc_realtime_prices + WHERE source=%s + ORDER BY timestamp DESC + LIMIT 1 + """, (source,)) + row = cursor.fetchone() + if row: + price = float(row[0]) + break + finally: + connection.close() + return price + # “get_current_price”方法似乎是另一种实用方法,这次用于从指定的API端点检索以美元为单位的比特币的当前价格。 + # 以下是此方法中发生的情况: + # - 它向指定的URL ('https://bitcoinexplorer.org/api/price') 发送GET请求以获取当前比特币价格。此请求是使用'requests.get()'函数发出的。 + # - 检查响应状态码是否为“200”,表示响应成功。 + # - 如果响应成功,它会使用'ujson.loads(response_price.text)'将响应文本解析为JSON。 + # - 它从解析的JSON响应中提取以美元为单位的价格,并将其转换为浮点数。 + # - 它打印响应文本和提取的价格以进行调试。 + # - 它在处理后关闭响应对象。 + # - 最后,它返回检索到的价格。如果请求失败或无法提取价格,则返回“0”。 + # 总体而言,此方法从指定的API端点获取当前比特币价格,并将其作为浮点数返回。 + def get_day_utc(self, utc_time): + t = time.gmtime(utc_time) + daystr = time.strftime("%d %b %Y", t) + dayutc = int(time.mktime(time.strptime(daystr, "%d %b %Y"))) + return dayutc + # “get_day_utc”方法似乎是一种实用方法,用于将给定的UNIX时间戳转换为由该时间戳表示的UTC日(00:00:00 UTC)的开始。 + # 此方法的作用如下: + # - 它采用UNIX时间戳“utc_time”作为输入。 + # - 它使用'time.gmtime(utc_time)'将给定的时间戳转换为UTC中的时间结构。 + # - 它将UTC时间格式化为人类可读的日期字符串,表示使用'time.strftime(“%d %b %Y”, t)'。此格式为“DD Mon YYYY”(例如,“01 Jan 2024”)。 + # - 它使用'time.mktime(time.strptime(daystr, “%d %b %Y”))'将格式化的日期字符串转换回表示当天开始的UNIX 时间戳。这有效地从时间戳中删除了时间分量,将其设置为午夜 (00:00:00)。 + # - 它返回生成的UNIX时间戳,表示与输入时间戳对应的UTC日期的开始。 + # 总之,此方法提供了一种获取UNIX时间戳的方法,该时间戳表示给定时间戳的UTC日开始时间。 + def rpc_cmd(self, cmd): + if self.rpc is None: + self.rpc = RPC(self.host, self.port, self.user, self.pwd) + + while True: + try: + if cmd == "getblockstats": + getblockstats = self.rpc.blockchain.get_block_stats(self.height) + return getblockstats + elif cmd == "getblock": + getblock = self.rpc.blockchain.get_block(self.height, 3) + return getblock + elif cmd == "getblockcount": + getblockcount = self.rpc.blockchain.get_block_count() + if self.height == getblockcount: + print("time sleep start") + time.sleep(30) + print("time sleep end") + else: + self.height += 1 + print("next height", self.height) + return None + elif cmd == "getmempoolinfo": + getmempoolinfo = self.rpc.blockchain.get_mempool_info() + return getmempoolinfo + else: + return None + except: + time.sleep(1) + self.rpc = RPC(self.host, self.port, self.user, self.pwd) + # 这种“rpc_cmd”方法似乎是一种实用方法,用于对比特币节点执行各种RPC(远程过程调用)命令。 + # 此方法的作用如下: + # - 它首先检查RPC客户端 ('self.rpc') 是否已初始化。如果没有,它将使用提供的主机、端口、用户名和密码对其进行初始化。 + # - 它进入一个循环,在该循环中,它不断尝试执行指定的RPC命令 ('cmd'),直到它成功。 + # - 在循环内部,它根据'cmd'的值具有不同的分支。根据命令的不同,它使用初始化的RPC客户端 ('self.rpc') 执行相应的RPC调用。 + # - 如果命令是 “getblockstats”,它会调用具有当前高度的'self.rpc.blockchain.get_block_stats()'并返回结果。 + # - 如果命令是 “getblock”,它会调用'self.rpc.blockchain.get_block()',当前高度和详细程度级别为3(完整块详细信息)并返回结果。 + # - 如果命令是 “getblockcount”,则调用'self.rpc.blockchain.get_block_count()'来获取当前块计数。如果当前高度与块计数匹配,则等待30秒,然后再次检查。否则,它将递增高度并返回“无”。 + # - 如果命令是 “getmempoolinfo”,它将调用'self.rpc.blockchain.get_mempool_info()'并返回结果。 + # - 如果该命令与任何指定的命令不匹配,则返回“None”。 + # - 如果在执行RPC命令期间发生任何异常,它会捕获它们,等待1秒钟,然后重新初始化RPC客户端以从潜在的连接问题中恢复。 + # 这种方法提供了一种灵活的方式,可以使用各种RPC命令与比特币节点进行交互,同时处理潜在的错误和重试。 + def stat_load(self, redisif, config): + self.rules = config[DEF_CONFIG_RULES] + self.get_history_price() + self.history_prices = self.get_history_price2() + self.current_price = self.get_current_price() + self.current_utc = self.get_current_utc() + self.history_prices[str(self.current_utc)] = self.current_price + self.daily_date = redisif.get_btc_data("daily_date") + if self.daily_date is None: + self.stat_reset() + return + + self.daily_date = int(redisif.get_btc_data("daily_date")) + self.daily_height = int(redisif.get_btc_data("daily_height").decode("utf-8")) + self.height = self.daily_height + self.daily_height_begin = int(redisif.get_btc_data("daily_height_begin").decode("utf-8")) + self.daily_height_end = int(redisif.get_btc_data("daily_height_end").decode("utf-8")) + self.daily_date_string = redisif.get_btc_data("daily_date_string").decode("utf-8") + self.daily_profit = float(redisif.get_btc_data("daily_profit").decode("utf-8")) + self.daily_fees = float(redisif.get_btc_data("daily_fees").decode("utf-8")) + self.daily_txs = int(redisif.get_btc_data("daily_txs").decode("utf-8")) + #self.daily_new_address = int(redisif.get_btc_data("daily_new_address").decode("utf-8")) + #self.daily_total_address = int(redisif.get_btc_data("daily_total_address").decode("utf-8")) + #self.daily_new_address_volume = float(redisif.get_btc_data("daily_new_address_volume").decode("utf-8")) + self.daily_active_address = int(redisif.get_btc_data("daily_active_address").decode("utf-8")) + self.daily_send_address = int(redisif.get_btc_data("daily_send_address").decode("utf-8")) + self.daily_receive_address = int(redisif.get_btc_data("daily_receive_address").decode("utf-8")) + self.daily_volume = float(redisif.get_btc_data("daily_volume").decode("utf-8")) + self.daily_eavolume = float(redisif.get_btc_data("daily_eavolume").decode("utf-8")) + self.daily_asol = float(redisif.get_btc_data("daily_asol").decode("utf-8")) + self.daily_eaasol = float(redisif.get_btc_data("daily_eaasol").decode("utf-8")) + self.daily_atxs = float(redisif.get_btc_data("daily_atxs").decode("utf-8")) + self.daily_sopr_buy = float(redisif.get_btc_data("daily_sopr_buy").decode("utf-8")) + self.daily_asopr_buy = float(redisif.get_btc_data("daily_asopr_buy").decode("utf-8")) + self.daily_easopr_buy = float(redisif.get_btc_data("daily_easopr_buy").decode("utf-8")) + self.daily_lthsopr_buy = float(redisif.get_btc_data("daily_lthsopr_buy").decode("utf-8")) + self.daily_sthsopr_buy = float(redisif.get_btc_data("daily_sthsopr_buy").decode("utf-8")) + self.daily_sopr_sell = float(redisif.get_btc_data("daily_sopr_sell").decode("utf-8")) + self.daily_asopr_sell = float(redisif.get_btc_data("daily_asopr_sell").decode("utf-8")) + self.daily_easopr_sell = float(redisif.get_btc_data("daily_easopr_sell").decode("utf-8")) + self.daily_lthsopr_sell = float(redisif.get_btc_data("daily_lthsopr_sell").decode("utf-8")) + self.daily_sthsopr_sell = float(redisif.get_btc_data("daily_sthsopr_sell").decode("utf-8")) + self.daily_cdd = float(redisif.get_btc_data("daily_cdd").decode("utf-8")) + self.daily_sacdd = float(redisif.get_btc_data("daily_sacdd").decode("utf-8")) + self.daily_eacdd = float(redisif.get_btc_data("daily_eacdd").decode("utf-8")) + self.daily_cdd_days1 = float(redisif.get_btc_data("daily_cdd_days1").decode("utf-8")) + self.daily_cdd_days7 = float(redisif.get_btc_data("daily_cdd_days7").decode("utf-8")) + self.daily_cdd_days30 = float(redisif.get_btc_data("daily_cdd_days30").decode("utf-8")) + self.daily_cdd_days60 = float(redisif.get_btc_data("daily_cdd_days60").decode("utf-8")) + self.daily_cdd_days90 = float(redisif.get_btc_data("daily_cdd_days90").decode("utf-8")) + self.daily_cdd_days180 = float(redisif.get_btc_data("daily_cdd_days180").decode("utf-8")) + self.daily_cdd_days365 = float(redisif.get_btc_data("daily_cdd_days365").decode("utf-8")) + self.daily_cdd_days730 = float(redisif.get_btc_data("daily_cdd_days730").decode("utf-8")) + #self.daily_csupply = float(redisif.get_btc_data("daily_csupply").decode("utf-8")) + #self.daily_mintusd = float(redisif.get_btc_data("daily_mintusd").decode("utf-8")) + #self.daily_sumcsupply = float(redisif.get_btc_data("daily_sumcsupply").decode("utf-8")) + #self.daily_sumcdd = float(redisif.get_btc_data("daily_sumcdd").decode("utf-8")) + #self.daily_sumeacdd = float(redisif.get_btc_data("daily_sumeacdd").decode("utf-8")) + #self.daily_rprofit = float(redisif.get_btc_data("daily_rprofit").decode("utf-8")) + #self.daily_rloss = float(redisif.get_btc_data("daily_rloss").decode("utf-8")) + #self.daily_marketcap = float(redisif.get_btc_data("daily_marketcap").decode("utf-8")) + #self.daily_rcap = float(redisif.get_btc_data("daily_rcap").decode("utf-8")) + #self.daily_earcap = float(redisif.get_btc_data("daily_earcap").decode("utf-8")) + #self.daily_mvrv = float(redisif.get_btc_data("daily_mvrv").decode("utf-8")) + '''self.daily_lth_marketcap = float(redisif.get_btc_data("daily_lth_marketcap").decode("utf-8")) + self.daily_lth_rcap = float(redisif.get_btc_data("daily_lth_rcap").decode("utf-8")) + self.daily_lth_mvrv = float(redisif.get_btc_data("daily_lth_mvrv").decode("utf-8")) + self.daily_sth_marketcap = float(redisif.get_btc_data("daily_sth_marketcap").decode("utf-8")) + self.daily_sth_rcap = float(redisif.get_btc_data("daily_sth_rcap").decode("utf-8")) + self.daily_sth_mvrv = float(redisif.get_btc_data("daily_sth_mvrv").decode("utf-8"))''' + #self.daily_nupl = float(redisif.get_btc_data("daily_nupl").decode("utf-8")) + #self.daily_uprofit = float(redisif.get_btc_data("daily_uprofit").decode("utf-8")) + #self.daily_uloss = float(redisif.get_btc_data("daily_uloss").decode("utf-8")) + #self.daily_lthnupl = float(redisif.get_btc_data("daily_lthnupl").decode("utf-8")) + #self.daily_sthnupl = float(redisif.get_btc_data("daily_sthnupl").decode("utf-8")) + self.daily_price = self.get_price(self.height, self.daily_date) + #v2 + self.daily_mint = float(redisif.get_btc_data("daily_mint").decode("utf-8")) + self.daily_lth_volume = float(redisif.get_btc_data("daily_lth_volume").decode("utf-8")) + self.daily_frm = float(redisif.get_btc_data("daily_frm").decode("utf-8")) + #self.daily_cvdd = float(redisif.get_btc_data("daily_cvdd").decode("utf-8")) + #self.daily_nvt_ratio = float(redisif.get_btc_data("daily_nvt_ratio").decode("utf-8")) + #self.daily_balanced_price = float(redisif.get_btc_data("daily_balanced_price").decode("utf-8")) + #self.daily_velocity = float(redisif.get_btc_data("daily_velocity").decode("utf-8")) + #self.daily_mempool_volume = float(redisif.get_btc_data("daily_mempool_volume").decode("utf-8")) + #self.daily_realized_price = float(redisif.get_btc_data("daily_realized_price").decode("utf-8")) + #self.daily_transferred_price = float(redisif.get_btc_data("daily_transferred_price").decode("utf-8")) + #v2 + #self.daily_sumvdd = float(redisif.get_btc_data("daily_sumvdd").decode("utf-8")) + #self.daily_sumdays = float(redisif.get_btc_data("daily_sumdays").decode("utf-8")) + # 似乎“stat_load”方法负责从Redis加载各种统计数据并相应地更新对象的状态。 + # 以下是此方法的作用的细分: + # - 它使用提供的“redisif”对象从Redis检索数据,并相应地更新对象的属性。 + # - 它从“config”字典中检索配置数据。 + # - 它使用键“DEF_CONFIG_RULES”从配置中检索规则,并将其分配给“self.rules”。 + # - 它使用“get_history_price”和“get_history_price2”方法加载历史价格数据,并将其分配给“self.history_prices”。 + # - 它使用“get_current_price”方法检索当前价格并将其分配给“self.current_price”。 + # - 它使用“get_current_utc”方法检索当前UTC时间戳并将其分配给“self.current_utc”。 + # - 它将当前价格添加到当前UTC时间戳下的历史价格数据中。 + # - 它从Redis加载各种其他统计数据,并将它们分配给对象的相应属性。 + # - 代码中有一些被注释掉的部分,这些部分似乎与可以从Redis加载的其他统计数据相关,但它们目前处于非活动状态。 + # 此方法实质上使用从Redis检索到的各种统计数据、历史价格数据和当前价格数据来初始化对象。 + def stat_save(self, redisif): + redisif.set_btc_data("daily_date", self.daily_date) + self.daily_height = self.height + redisif.set_btc_data("daily_height", self.daily_height) + redisif.set_btc_data("daily_height_begin", self.daily_height_begin) + redisif.set_btc_data("daily_height_end", self.daily_height_end) + redisif.set_btc_data("daily_date_string", self.daily_date_string) + redisif.set_btc_data("daily_profit", self.daily_profit) + redisif.set_btc_data("daily_fees", self.daily_fees) + redisif.set_btc_data("daily_txs", self.daily_txs) + #redisif.set_btc_data("daily_new_address", self.daily_new_address) + #redisif.set_btc_data("daily_total_address", self.daily_total_address) + #redisif.set_btc_data("daily_new_address_volume", self.daily_new_address_volume) + redisif.set_btc_data("daily_active_address", self.daily_active_address) + redisif.set_btc_data("daily_send_address", self.daily_send_address) + redisif.set_btc_data("daily_receive_address", self.daily_receive_address) + redisif.set_btc_data("daily_volume", self.daily_volume) + redisif.set_btc_data("daily_eavolume", self.daily_eavolume) + redisif.set_btc_data("daily_asol", self.daily_asol) + redisif.set_btc_data("daily_eaasol", self.daily_eaasol) + redisif.set_btc_data("daily_atxs", self.daily_atxs) + redisif.set_btc_data("daily_sopr_buy", self.daily_sopr_buy) + redisif.set_btc_data("daily_asopr_buy", self.daily_asopr_buy) + redisif.set_btc_data("daily_easopr_buy", self.daily_easopr_buy) + redisif.set_btc_data("daily_lthsopr_buy", self.daily_lthsopr_buy) + redisif.set_btc_data("daily_sthsopr_buy", self.daily_sthsopr_buy) + redisif.set_btc_data("daily_sopr_sell", self.daily_sopr_sell) + redisif.set_btc_data("daily_asopr_sell", self.daily_asopr_sell) + redisif.set_btc_data("daily_easopr_sell", self.daily_easopr_sell) + redisif.set_btc_data("daily_lthsopr_sell", self.daily_lthsopr_sell) + redisif.set_btc_data("daily_sthsopr_sell", self.daily_sthsopr_sell) + redisif.set_btc_data("daily_cdd", self.daily_cdd) + redisif.set_btc_data("daily_sacdd", self.daily_sacdd) + redisif.set_btc_data("daily_eacdd", self.daily_eacdd) + redisif.set_btc_data("daily_cdd_days1", self.daily_cdd_days1) + redisif.set_btc_data("daily_cdd_days7", self.daily_cdd_days7) + redisif.set_btc_data("daily_cdd_days30", self.daily_cdd_days30) + redisif.set_btc_data("daily_cdd_days60", self.daily_cdd_days60) + redisif.set_btc_data("daily_cdd_days90", self.daily_cdd_days90) + redisif.set_btc_data("daily_cdd_days180", self.daily_cdd_days180) + redisif.set_btc_data("daily_cdd_days365", self.daily_cdd_days365) + redisif.set_btc_data("daily_cdd_days730", self.daily_cdd_days730) + #redisif.set_btc_data("daily_csupply", self.daily_csupply) + #redisif.set_btc_data("daily_mintusd", self.daily_mintusd) + #redisif.set_btc_data("daily_sumcsupply", self.daily_sumcsupply) + #redisif.set_btc_data("daily_sumcdd", self.daily_sumcdd) + #redisif.set_btc_data("daily_sumeacdd", self.daily_sumeacdd) + #redisif.set_btc_data("daily_rprofit", self.daily_rprofit) + #redisif.set_btc_data("daily_rloss", self.daily_rloss) + #redisif.set_btc_data("daily_marketcap", self.daily_marketcap) + #redisif.set_btc_data("daily_rcap", self.daily_rcap) + #redisif.set_btc_data("daily_earcap", self.daily_earcap) + #redisif.set_btc_data("daily_mvrv", self.daily_mvrv) + '''redisif.set_btc_data("daily_lth_marketcap", self.daily_lth_marketcap) + redisif.set_btc_data("daily_lth_rcap", self.daily_lth_rcap) + redisif.set_btc_data("daily_lth_mvrv", self.daily_lth_mvrv) + redisif.set_btc_data("daily_sth_marketcap", self.daily_sth_marketcap) + redisif.set_btc_data("daily_sth_rcap", self.daily_sth_rcap) + redisif.set_btc_data("daily_sth_mvrv", self.daily_sth_mvrv)''' + #redisif.set_btc_data("daily_nupl", self.daily_nupl) + #redisif.set_btc_data("daily_uprofit", self.daily_uprofit) + #redisif.set_btc_data("daily_uloss", self.daily_uloss) + #redisif.set_btc_data("daily_lthnupl", self.daily_lthnupl) + #redisif.set_btc_data("daily_sthnupl", self.daily_sthnupl) + + #v2 + redisif.set_btc_data("daily_mint", self.daily_mint) + redisif.set_btc_data("daily_lth_volume", self.daily_lth_volume) + redisif.set_btc_data("daily_frm", self.daily_frm) + #redisif.set_btc_data("daily_cvdd", self.daily_cvdd) + #redisif.set_btc_data("daily_nvt_ratio", self.daily_nvt_ratio) + #redisif.set_btc_data("daily_balanced_price", self.daily_balanced_price) + #redisif.set_btc_data("daily_velocity", self.daily_velocity) + #redisif.set_btc_data("daily_mempool_volume", self.daily_mempool_volume) + #redisif.set_btc_data("daily_realized_price", self.daily_realized_price) + #redisif.set_btc_data("daily_transferred_price", self.daily_transferred_price) + #redisif.set_btc_data("daily_sumvdd", self.daily_sumvdd) + #redisif.set_btc_data("daily_sumdays", self.daily_sumdays) + # “stat_save”方法似乎负责将各种统计数据保存到Redis。 + # 以下是此方法的作用的细分: + # - 它接受一个'redisif'对象作为输入,这大概是一个Redis接口对象。 + # - 它使用“redisif.set_btc_data”方法将各种数据保存到Redis。保存的数据包括每日日期、高度、日期字符串、利润、费用、交易数量(“txs”)、活动地址、交易量以及与比特币统计数据相关的各种其他指标。 + # - 代码中有一些被注释掉的部分,这些部分似乎与可以保存到Redis的其他统计数据有关,但它们目前处于活动状态。 + # - 它还保存了一些与版本2 ('v2') 统计数据相关的数据,例如每日铸币量、长期持有量和自由流通市值。 + # 总的来说,这种方法提供了一种将与比特币相关的统计数据保存到Redis数据库以供以后检索和分析的方法。 + def stat_reset(self): + self.daily_date = 0 # working date + self.daily_height = 878280 # working height, ref. 747376 + self.daily_date_string = "" # working date string + + #self.daily_csupply = 0 # circulating supply + #self.daily_sumcsupply = 0 # cumulative circulating supply, for liveliness + #self.daily_sumcdd = 0 # cumulative coin days destoryed + #self.daily_sumeacdd = 0 # cumulative coin days destoryed(Entity-Adjusted) + #self.daily_marketcap = 0 # market capitalization + #self.daily_rcap = 0 # Realized capitalization + #self.daily_earcap = 0 # Realized capitalization(Entity-Adjusted) + ''' + self.daily_lth_marketcap = 0 # Long Term Holder market capitalization + self.daily_lth_rcap = 0 # Long Term Holder Realized capitalization + self.daily_sth_marketcap = 0 # Short Term Holder market capitalization + self.daily_sth_rcap = 0 # Short Term Holder Realized capitalization + ''' + #self.daily_uprofit = 0 # Unrealized Profit + #self.daily_uloss = 0 # Unrealized Loss + #self.daily_lthnupl = 0 # Long Term Holder NUPL + #self.daily_sthnupl = 0 # Short Term Holder NUPL + + self.stat_daily_reset() + + #self.daily_rprofit = 0 # realized profit + #self.daily_rloss = 0 # realized loss + + #v2 + #self.daily_sumvdd = 0 + #self.daily_sumdays = 0 + # “stat_reset”方法用于将与统计数据相关的各种属性重置为其初始值。以下是此方法的作用的细分: + # - 它将“daily_date”、“daily_height”和“daily_date_string”属性设置为其初始值。这些属性分别表示工作日期、工作高度和工作日期字符串。 + # - 它重置了与统计指标相关的几个属性,例如流通供应量、累计流通供应量、累计销毁币天数、市值、已实现资本化、已实现利润等。这些指标似乎与分析比特币持有者的行为有关。 + # - 代码中有一些注释掉的部分似乎与其他统计指标有关,但它们目前处于活动状态。 + # - 它将另一个方法称为“stat_daily_reset”,该方法可能会重置额外的每日统计信息。但是,此处未提供此方法的实现。 + # - 有一些与版本2 ('v2') 统计信息相关的注释部分,但它们目前不处于活动状态。 + # 总体而言,此方法提供了一种将各种统计属性重置为其初始值的方法,这对于初始化对象的状态或将其重置为已知状态非常有用。 + def stat_daily_reset(self): + self.daily_profit = 0 # Number of UTXOs in Profit + self.daily_fees = 0 # block fees each day + self.daily_txs = 0 # block txs exclude coinbase transaction + #self.daily_new_address = 0 # number of new address + #self.daily_total_address = redisif.get_addr_cnt() # number of address + #self.daily_new_address_volume = 0 # volume of new address + self.daily_active_address = 0 # number of active address + self.daily_send_address = 0 # number of send address + self.daily_receive_address = 0 # number of receive address + self.daily_volume = 0 # volume for each day + self.daily_eavolume = 0 # volume for each day(Entity-Adjusted) + self.daily_asol = 0 # Average Spent Output Lifespan + self.daily_eaasol = 0 # Average Spent Output Lifespan(Entity-Adjusted) + self.daily_atxs = 0 # exclude transaction < 1 hour + self.daily_sopr_buy = 0 # Spent Output Profit Ratio for buyin + self.daily_asopr_buy = 0 # Spent Output Profit Ratio(exclude < 1 hour) for buyin + self.daily_easopr_buy = 0 # Spent Output Profit Ratio(Entity-Adjusted) for buyin + self.daily_lthsopr_buy = 0 # Long-Term Holder SOPR for buyin + self.daily_sthsopr_buy = 0 # Short-Term Holder SOPR for buyin + self.daily_sopr_sell = 0 # Spent Output Profit Ratio for sellout + self.daily_asopr_sell = 0 # Spent Output Profit Ratio(exclude < 1 hour) for sellout + self.daily_easopr_sell = 0 # Spent Output Profit Ratio(Entity-Adjusted) for sellout + self.daily_lthsopr_sell = 0 # Long-Term Holder SOPR for sellout + self.daily_sthsopr_sell = 0 # Short-Term Holder SOPR for buyin + self.daily_cdd = 0 # Coin Days Destroyed + self.daily_sacdd = 0 # Supply-Adjusted CDD + self.daily_eacdd = 0 # Coin Days Destroyed(Entity-Adjusted) + self.daily_cdd_days1 = 0 # cdd < 1days + self.daily_cdd_days7 = 0 # + self.daily_cdd_days30 = 0 # + self.daily_cdd_days60 = 0 # + self.daily_cdd_days90 = 0 # + self.daily_cdd_days180 = 0 # + self.daily_cdd_days365 = 0 # + self.daily_cdd_days730 = 0 # + + #self.daily_mintusd = 0 # daily coin issuance (in USD), for Puell Multiple + + #self.daily_mvrv = 0 # market-value-to-realized-value ratio + self.daily_lth_mvrv = 0 # Long Term Holder MVRV + self.daily_sth_mvrv = 0 # Short Term Holder MVRV + + #self.daily_nupl = 0 # Net Unrealized Profit/Loss + + self.daily_height_begin = 0 + self.daily_height_end = 0 + + self.daily_price = 0 + self.redis.reset_active_address() + self.redis.reset_send_address() + self.redis.reset_receive_address() + + #v2 + self.daily_mint = 0 + self.daily_lth_volume = 0 + self.daily_frm = 0 + #self.daily_cvdd = 0 + #self.daily_nvt_ratio = 0 + #self.daily_balanced_price = 0 + #self.daily_realized_price = 0 + #self.daily_transferred_price = 0 + #self.daily_velocity = 0 + #self.daily_mempool_volume = 0 + # “stat_daily_reset”方法用于将各种每日统计指标重置为其初始值。以下是此方法的作用的细分: + # - 它重置了与每日统计指标相关的各种属性。这些指标包括每日利润、费用、交易数量 (“txs”)、活动地址、交易量、平均花费的输出寿命、花费的输出利润率、销毁的硬币天数等。 + # - 它重置与版本2 ('v2') 统计数据相关的属性,例如每日铸币量、长期持有量和自由流通市值。 + # - 它会重置其他一些属性,例如“daily_height_begin”和“daily_height_end”。 + # - 它重置了Redis接口对象 ('redis') 的一些属性,可能与活动地址、发送地址和接收地址有关。 + # 总而言之,这种方法提供了一种将各种每日统计指标重置为其初始值的方法,这对于日常数据分析或重置对象状态时非常有用。 + def stat_cdd(self, prev_value, days): + cdd = prev_value * days + self.daily_cdd += cdd + #self.daily_sumcdd += cdd + if days <= 1: + self.daily_cdd_days1 += cdd + elif days <= 7: + self.daily_cdd_days7 += cdd + elif days <= 30: + self.daily_cdd_days30 += cdd + elif days <= 60: + self.daily_cdd_days60 += cdd + elif days <= 90: + self.daily_cdd_days90 += cdd + elif days <= 180: + self.daily_cdd_days180 += cdd + elif days <= 365: + self.daily_cdd_days365 += cdd + else: + self.daily_cdd_days730 += cdd + # “stat_cdd”方法根据提供的“prev_value”(可能代表上一个计算周期中的指标值)和“天数”来计算和更新与硬币销毁天数 (CDD) 相关的各种指标。 + # 以下是此方法的作用的细分: + # - 它通过将“prev_value”乘以“天”的天数来计算CDD值。 + # - 它通过向“daily_cdd”属性添加计算的CDD值来更新该属性。 + # - 它将CDD值分类为不同的时间范围(“天”),并根据时间范围的长度更新相应的属性(“daily_cdd_days1”、“daily_cdd_days7”等)。例如: + # - 如果“days”小于或等于1,则更新“daily_cdd_days1”。 + # - 如果“天”介于2到7之间,它会更新“daily_cdd_days7”,依此类推其他时间范围。 + # 这种方法允许在不同的时间范围内计算和跟踪CDD指标,从而深入了解硬币持有者的行为和硬币随时间的移动。 + def get_price(self, height, dayutc): + price = 0 + dayutcstr = str(dayutc) + cnt = 0 + while cnt < 3: + cnt += 1 + if dayutcstr in self.history_prices: + price = self.history_prices[dayutcstr] + break + elif dayutcstr == str(self.current_utc): + price = self.get_current_price() + self.current_price = price + self.history_prices[dayutcstr] = self.current_price + break + else: + print("failed get price", height, dayutcstr) + self.get_history_price() + self.history_prices = self.get_history_price2() + self.current_price = self.get_current_price() + self.current_utc = self.get_current_utc() + self.history_prices[str(self.current_utc)] = self.current_price + if dayutcstr in self.history_prices: + price = self.history_prices[dayutcstr] + else: + price=0 + break + + return price + # “get_price”方法旨在检索特定高度和UTC日期的比特币价格。其工作原理如下: + # 1.它将“price”变量初始化为0。 + # 2.它将“dayutc”参数(代表UTC日)转换为字符串格式(“dayutcstr”),用作访问历史价格的键。 + # 3.它进入一个最多运行3次的循环,如果价格检索失败,允许重试。 + # 4.循环内部: + # - 它检查“dayutcstr”键是否存在于“history_prices”字典中。如果是这样,它会将相应的价格分配给“price”变量并跳出循环。 + # - 如果'dayutcstr'等于当前UTC时间 ('self.current_utc'),它将使用'get_current_price'方法检索当前价格,更新当前价格属性 ('self.current_price') + # 并将价格存储在当前UTC密钥下的'history_prices'字典中。然后,它将当前价格分配给“price”变量并跳出循环。 + # - 如果上述条件均不满足,则表示无法检索价格,打印失败消息以进行调试,并尝试通过调用“get_history_price”和“get_history_price2”方法更新历史价格。 + # 它还检索当前价格和UTC时间。更新历史价格和当前价格属性后,它会尝试使用“dayutcstr”键再次检索价格。 + # 5.最后,它返回检索到的价格。 + # 这种方法确保从历史数据或在数据不可用时获取当前价格来获得特定高度和日期的比特币价格。它还允许在检索失败时重试和更新数据。 + def save_db(self, dayutc, blocktime): + #if dayutc != self.daily_date: + print("cmp", dayutc, self.daily_date) + start = time.time() + #self.daily_sumcsupply += (self.daily_csupply) + daily_profit_rate = self.daily_profit / self.daily_txs if self.daily_txs != 0 else 0 + daily_sopr = self.daily_sopr_sell / self.daily_sopr_buy if self.daily_sopr_buy != 0 else 0 + daily_sasopr = self.daily_asopr_sell / self.daily_asopr_buy if self.daily_asopr_buy != 0 else 0 + daily_easopr = self.daily_easopr_sell / self.daily_easopr_buy if self.daily_easopr_buy != 0 else 0 + daily_lthsopr = self.daily_lthsopr_sell / self.daily_lthsopr_buy if self.daily_lthsopr_buy != 0 else 0 + daily_sthsopr = self.daily_sthsopr_sell / self.daily_sthsopr_buy if self.daily_sthsopr_buy != 0 else 0 + self.daily_asol = self.daily_asol / self.daily_atxs if self.daily_atxs != 0 else 0 + self.daily_eaasol = self.daily_eaasol / self.daily_atxs if self.daily_atxs != 0 else 0 + #self.daily_sacdd = self.daily_cdd / self.daily_csupply if self.daily_csupply != 0 else 0 + #self.daily_mvrv = self.daily_marketcap / self.daily_rcap if self.daily_rcap != 0 else 0 + #liveliness = self.daily_sumcdd / self.daily_sumcsupply if self.daily_sumcsupply != 0 else 0 + #ealiveliness = self.daily_sumeacdd / self.daily_sumcsupply if self.daily_sumcsupply != 0 else 0 + #rplrate = self.daily_rprofit - self.daily_rloss + dormancy = self.daily_cdd / self.daily_volume if self.daily_volume != 0 else 0 + #adormancy = dormancy / self.daily_csupply if self.daily_csupply != 0 else 0 + self.daily_eavolume -= (self.daily_fees) + eadormancy = self.daily_eacdd / self.daily_eavolume if self.daily_eavolume != 0 else 0 + #nupl = (self.daily_marketcap - self.daily_rcap) / self.daily_marketcap if self.daily_marketcap != 0 else 0 + + + #self.daily_total_address = redisif.get_addr_cnt() # number of address + + self.daily_height_end = self.height - 1 if self.height > self.daily_height_begin else self.daily_height_begin + dbif.update_to_dailyinds(blocktime, self.daily_height_begin, self.daily_height_end, daily_profit_rate, + self.daily_fees, self.daily_txs, 0, 0, + 0, self.daily_active_address, + self.daily_send_address, self.daily_receive_address, self.daily_volume, + self.daily_eavolume, daily_sopr, daily_sasopr, daily_easopr, daily_lthsopr, + daily_sthsopr, + self.daily_asol, self.daily_eaasol, dormancy, 0, eadormancy, + self.daily_cdd, 0, self.daily_eacdd, self.daily_cdd_days1, + self.daily_cdd_days7, self.daily_cdd_days30, self.daily_cdd_days60, + self.daily_cdd_days90, self.daily_cdd_days180, self.daily_cdd_days365, + self.daily_cdd_days730, 0, 0, + 0, 0, 0, + 0, 0, 0, 0, 0, + self.daily_price, 0, 0, 0, + 0, 0) + + #v2 + #self.daily_sumdays = (dayutc - 1231469665)/3600/24 + #self.daily_sumdays = self.daily_sumcdd/self.daily_csupply + #if self.daily_csupply > 0: + #self.daily_realized_price = self.daily_rcap/self.daily_csupply + #if self.daily_sumdays > 0: + #self.daily_transferred_price = self.daily_sumvdd/(self.daily_sumdays*self.daily_csupply) + #self.daily_balanced_price = self.daily_realized_price - self.daily_transferred_price + if self.daily_fees > 0: + self.daily_frm = (self.daily_fees + self.daily_mint)/self.daily_fees + #if self.daily_sumdays > 0: + #self.daily_cvdd = self.daily_sumvdd/(self.daily_sumdays*6000000) + + #daily_vp = self.daily_volume*self.daily_price + #if daily_vp > 0: + #if self.daily_volume > 0 and self.daily_price > 0: + #self.daily_nvt_ratio = self.daily_marketcap/self.daily_volume/self.daily_price + + #if self.daily_marketcap > 0: + #self.daily_velocity = self.daily_volume*self.daily_price/self.daily_marketcap + + dbif.update_to_dailyindsv2(blocktime, self.daily_height_begin, self.daily_height_end,self.daily_lth_volume, self.daily_frm, 0, 0, 0, 0, 0, 0) + + #if dayutc != self.daily_date: + #self.stat_daily_reset() + #self.daily_date = dayutc + #self.daily_height_begin = self.height + print("save_db", f'coast:{time.time() - start:.4f}s') + # “save_db”方法似乎负责将各种统计信息保存到数据库中。让我们分解一下它的功能: + # 1.该方法首先根据收集到的每日数据计算各种指标。这些指标包括: + # - 'daily_profit_rate':利润与交易数量的比率。 + # - “daily_sopr”:售罄交易的已用产出利润率。 + # - 'daily_sasopr':售罄交易的已用产出利润率(不包括1小时内的交易)。 + # - “daily_easopr”:出售交易的实体调整的已用产出利润率。 + # - “daily_lthsopr”和“daily_sthsopr”:用于买卖交易的长期持有者和短期持有者SOPR。 + # - “休眠”:销毁的硬币天数与每日交易量的比率。 + # - “休眠”:实体调整后的休眠,计算方法与休眠类似,但使用实体调整的销毁天数和数量。 + # - 还计算了其他指标,例如“daily_eavolume”、“daily_height_end”等。 + # 2.然后,该方法调用数据库接口 ('dbif') 以使用“update_to_dailyinds”和“update_to_dailyindsv2”方法使用计算的指标更新数据库。 + # 3.更新数据库后,该方法会打印出调试信息,包括“dayutc”与“self.daily_date”的比较以及数据库更新所需的时间。 + # 4.该方法不包括重置每日统计数据或更新“daily_date”属性的逻辑。这可能旨在单独处理,也可以作为其他方法的一部分进行处理。 + # 总体而言,“save_db”方法在将每日统计数据保存到数据库以供进一步分析和报告方面起着至关重要的作用。 + def stat_height_time(self, redisif): + tmp_height = 1 + if self.rpc is None: + self.rpc = RPC(self.host, self.port, self.user, self.pwd) + while True: + blockstats = self.rpc.blockchain.get_block_stats(tmp_height) + blocktime = blockstats["time"] + redisif.set_block_time(tmp_height, blocktime) + print("cache block height, time", tmp_height, blocktime) + getblockcount = self.rpc.blockchain.get_block_count() + if tmp_height == getblockcount: + break + else: + tmp_height += 1 + # “stat_height_time”方法似乎负责缓存块高度和相应的时间戳。让我们来看看它的功能: + # 1.它初始化一个值为1的临时高度变量“tmp_height”。 + # 2.它检查RPC客户端 ('self.rpc') 是否已初始化。如果没有,它将使用提供的主机、端口、用户名和密码初始化RPC客户端。 + # 3.它进入一个循环,从“tmp_height”开始连续获取每个区块高度的区块统计信息。 + # 4.在循环中: + # - 它使用RPC客户端的“blockchain.get_block_stats”方法检索当前“tmp_height”的块统计信息。 + # - 它从区块统计信息中提取区块时间。 + # - 它使用提供的“redisif”接口缓存相应区块高度的区块时间。 + # - 它打印出调试信息,指示缓存的块高度及其时间。 + # 5.缓存块时间后,它使用RPC客户端的'blockchain.get_block_count'方法检查'tmp_height'是否已达到最新的块高度 ('getblockcount')。 + # 6.如果“tmp_height”等于最新的块高度,则循环中断;否则,它会将“tmp_height”递增 1并继续获取块统计信息。 + # 综上所述,该方法持续从区块链获取区块统计信息,使用Redis 缓存每个区块高度的区块时间,并在达到最新的区块高度时停止。此过程可确保有效存储块高度和相应的时间戳,以备将来参考或分析。 + def stat_block(self, dbif, redisif, config): + self.redis = redisif + # self.stat_height_time(redisif) + self.stat_load(redisif, config) + if self.daily_date is None: + self.stat_reset() + #return + + self.height = self.daily_height; + + self.height += 1 + self.daily_height_begin = self.height + print("start") + + while True: + start = time.time() + blockstats = self.rpc_cmd("getblockstats") + print("getblockstats", f'coast:{time.time()-start:.4f}s') + start = time.time() + #mempoolinfo = self.rpc_cmd("getmempoolinfo") + blockdetail = self.rpc_cmd("getblock") + print("getblock", f'coast:{time.time() - start:.4f}s') + block_start = time.time() + self.blocktime = blockdetail.get_time() + block_time2 = time.gmtime(self.blocktime) + daystr = time.strftime("%d %b %Y", block_time2) + dayutc = int(time.mktime(time.strptime(daystr, "%d %b %Y"))) + dayutcstr = str(dayutc) + + if self.daily_date == 0: + self.daily_date = dayutc + + #print("mempoolinfo", mempoolinfo, mempoolinfo["size"], float(mempoolinfo["total_fee"])) + #time.sleep(10) + #dbif.update_to_realtimeindsv2(self.blocktime, int(mempoolinfo["size"]), float(mempoolinfo["total_fee"])) + #break + + #self.save_db(dayutc) + if dayutc != self.daily_date: + self.stat_daily_reset() + self.daily_date = dayutc + self.daily_height_begin = self.height + + + blocktxs = blockdetail.get_transactions() + self.height = blockdetail.get_height() + redisif.set_block_time(self.height, self.blocktime) # table for block height and time for later query + + mint = blockstats["subsidy"] / 100000000 + #self.daily_csupply += (mint) + #self.daily_sumcsupply += (self.daily_csupply) + self.daily_mint += (mint) + + block_fees = (blockstats["totalfee"] / 100000000) + self.daily_fees += block_fees + self.daily_volume += (blockstats["total_out"] / 100000000) + + self.daily_txs += (blockstats["txs"] - 1) # exclude coinbase tx + + block_price = self.get_price(self.height, dayutc) + #self.daily_mintusd += (block_price * (mint+block_fees)) + + self.daily_price = block_price + + #self.daily_marketcap = (self.daily_csupply * block_price) + # genisis_time = redisif.get_block_time(1) + '''genisis_time = 1231006505 + days = (self.blocktime - genisis_time) / 3600 / 24 + if days >= 155: + self.daily_lth_marketcap += (self.daily_csupply * block_price) + else: + self.daily_sth_marketcap += (self.daily_csupply * block_price) + ''' + for tx in blocktxs: + txid = tx.get_txid() + vins = tx.get_vins() + vouts = tx.get_vouts() + vin_hexs = [] + vin_addrs = [] + vin_values = [] + vin_dts = [] + + vin_volume = 0 + vin_volume_change = 0 + + vin_days_change = 0 + + vin_cdd = 0 + vin_cdd_change = 0 + + vin_rcap_change = 0 + + vin_sopr = 0 + + vin_asopr_diff = 0 + + vout_change_value = 0 + + if not tx.is_coinbase(): + for vin in vins: + # print(self.height, "vin", vin, type(vin)) + if vin.is_prevout(): + prevout = vin["prevout"] + prev_height = prevout["height"] + prev_value = float(prevout["value"]) + prev_scriptpubkey = prevout["scriptPubKey"] + #prev_type = prev_scriptpubkey["type"] + prev_hex = prev_scriptpubkey["hex"] + prev_address = self.get_vin_address(prev_scriptpubkey, prev_height, txid) + prev_blocktime = redisif.get_block_time(prev_height) + + #redisif.save_addr(prev_address, -prev_value) + if not redisif.is_send_address(prev_address): + self.daily_send_address += 1 + if not redisif.is_active_address(prev_address): + self.daily_active_address += 1 + + days = (self.blocktime - prev_blocktime) / 3600 / 24 + vin_cdd += (prev_value * days) + self.stat_cdd(prev_value, days) + + if days >= 155: + self.daily_lth_volume += prev_value + + vin_addrs.append(prev_address) + vin_values.append(prev_value) + vin_dts.append(prev_blocktime) + vin_hexs.append(prev_hex) + + vin_volume += prev_value + vin_asopr_diff += ((self.blocktime - prev_blocktime) * prev_value) + + prevutc = self.get_day_utc(prev_blocktime) + prev_price = self.get_price(prev_height, prevutc) + vin_sopr += (prev_price * prev_value) + + #self.daily_sumvdd += (prev_value * days * prev_price) + + #self.daily_rcap -= (prev_price * prev_value) + + have_change = False + for vout in vouts: + scriptpubkey = vout.get_script_pubkey() + # vout address is same with vin address + if scriptpubkey["hex"] == prev_scriptpubkey["hex"]: + vin_rcap_change += (prev_value * prev_price) + vin_volume_change += prev_value + vout_change_value = float(vout.get_value()) + + days = (self.blocktime - prev_blocktime) / 3600 / 24 + vin_days_change += days + vin_cdd_change += (prev_value * days) + + have_change = True + break + if not have_change: + #self.daily_earcap -= (prev_price * prev_value) + self.daily_eacdd += (prev_value * days) + + self.daily_eavolume += (vin_volume - vout_change_value) + + vin_sopr_change = vin_sopr + #vin_change_price = 0 + if vin_rcap_change != 0: + if vin_volume_change != 0: + vin_change_price = vin_rcap_change / vin_volume_change + #self.daily_earcap -= (vin_rcap_change - (vin_change_price * vout_change_value)) + vin_sopr_change -= (vin_change_price * vout_change_value) + + if vin_cdd_change != 0: + if vin_volume_change != 0: + vin_change_days = vin_cdd_change / vin_volume_change + vin_cdd_change -= (vin_change_days * vout_change_value) + + #self.daily_sumeacdd += (vin_cdd - vin_cdd_change) + + self.daily_sopr_buy += vin_sopr + + self.daily_easopr_buy += vin_sopr_change + + if vin_asopr_diff >= 3600 * vin_volume: + self.daily_asopr_buy += vin_sopr + if vin_volume > 0: + self.daily_asol += (vin_cdd/vin_volume) + self.daily_eaasol += (vin_cdd / vin_volume) + if vin_volume_change > 0: + self.daily_eaasol -= (vin_cdd_change/vin_volume_change) + self.daily_atxs += 1 + + if vin_asopr_diff >= 3600 * 155 * 24 * vin_volume: + self.daily_lthsopr_buy += vin_sopr + else: + self.daily_sthsopr_buy += vin_sopr + + vout_price = block_price + vout_volume = 0 + vout_volume_change = 0 + vout_sopr = 0 + vout_sopr_change = 0 + + for vout in vouts: + vout_value = float(vout.get_value()) + vout_volume += vout_value + scriptpubkey = vout.get_script_pubkey() + vout_type = scriptpubkey["type"] + vout_address = self.get_vout_address(scriptpubkey, self.height, txid) + vout_hex = scriptpubkey["hex"] + + #if not redisif.is_in_addr(vout_address): + #self.daily_new_address_volume += vout_value + #self.daily_new_address += 1 + #redisif.save_addr(vout_address, vout_value) + + if not redisif.is_receive_address(vout_address): + self.daily_receive_address += 1 + if not redisif.is_active_address(vout_address): + self.daily_active_address += 1 + + #self.daily_rcap += (vout_price * vout_value) + + vout_sopr += (vout_price * vout_value) + + have_change = False + for cmp in vin_hexs: + if cmp == vout_hex: + vout_volume_change += vout_value + have_change = True + break + if not have_change: + #self.daily_earcap += (vout_price * vout_value) + vout_sopr_change += (vout_price * vout_value) + + if self.height > 787556: + if (vout_price * vout_value) >= self.rules["flag_big_vout"]: + if vin_volume != 0: + days = vin_cdd / vin_volume + buyin = vin_sopr / vin_volume + sellout = vout_price + if buyin > 0: + profit = (sellout - buyin) / buyin + dbif.update_to_bigamountvout(self.blocktime, txid, \ + vout_address, vout.get_n(), vout_type, \ + vout_value, self.height, days, buyin, sellout, profit) + + self.daily_easopr_sell += vout_sopr_change + + self.daily_sopr_sell += vout_sopr + if vin_asopr_diff > 3600 * vin_volume: + self.daily_asopr_sell += vout_sopr + + if vin_asopr_diff >= 3600 * 155 * 24 * vin_volume: + self.daily_lthsopr_sell += vout_sopr + else: + self.daily_sthsopr_sell += vout_sopr + + if vin_volume != 0: + #if block_price > (vin_sopr / vin_volume): + #self.daily_rprofit += (vout_sopr - vin_sopr) + #if block_price < (vin_sopr / vin_volume): + #self.daily_rloss += (vin_sopr - vout_sopr) + + buyin = vin_sopr / vin_volume + sellout = vout_sopr / vout_volume if vout_volume != 0 else 0 + if sellout > buyin: + self.daily_profit += 1 + else: + for vout in vouts: + vout_value = float(vout.get_value()) + scriptpubkey = vout.get_script_pubkey() + vout_address = self.get_vout_address(scriptpubkey, self.height, txid) + + vout_price = block_price + #self.daily_rcap += (vout_price * vout_value) + #self.daily_earcap += (vout_price * vout_value) + + #if not redisif.is_in_addr(vout_address): + #self.daily_new_address_volume += vout_value + #self.daily_new_address += 1 + #redisif.save_addr(vout_address, vout_value) + + if not redisif.is_receive_address(vout_address): + self.daily_receive_address += 1 + if not redisif.is_active_address(vout_address): + self.daily_active_address += 1 + + self.save_db(dayutc, self.blocktime) + self.stat_save(redisif) + print("statblock", f'coast:{time.time() - block_start:.4f}s') + start = time.time() + self.rpc_cmd("getblockcount") + print("getblockcount", f'coast:{time.time() - start:.4f}s') + # 这种“stat_block”方法似乎负责处理和分析每个区块的区块链数据。让我们分解一下它的功能: + # 1. ** 初始化 **: + # - 该方法初始化Redis接口 ('redisif'),并使用'stat_load'方法从Redis加载统计信息。 + # 2. ** 区块高度初始化 **: + # - 如果'daily_date'为'None',则使用'stat_reset'方法重置统计信息。 + # 3. ** 块处理循环 **: + # - 该方法进入一个循环,按顺序处理每个块。 + # - 对于每个区块: + # - 它使用 RPC命令('getblockstats'和'getblock')获取区块统计信息和详细信息。 + # - 它计算各种指标,例如费用、数量和价格。 + # - 它更新统计数据,例如每日费用、交易量和各种SOPR(已用产出利润率)值。 + # - 它使用“save_db”方法将每日统计数据保存到数据库中。 + # - 它使用“stat_save”方法将更新的统计信息保存到Redis。 + # 4. ** 区块高度更新 **: + # - 处理每个块后,它使用“getblockcount”RPC命令更新块高度。 + # 总体而言,这种方法协调了区块链数据的检索、处理和存储,确保各种统计数据和指标得到正确更新和保存,以供进一步分析。 +def init_config(filename): + fconfig = open(filename) + config = ujson.load(fconfig) + fconfig.close() + dbif = btc24h_db_if.DbIf(host="172.17.0.1", port=4419, user="root", password="IeQcJNnagkaFP1Or", dbname="btcdb") + redisif = btc24h_redis_if.RedisIf(host="127.0.0.1", port=6379, password="", db=0) + return dbif, redisif, config + # “init_config”函数通过从“filename”指定的 JSON 文件加载配置设置来初始化配置设置。然后,它使用配置文件中定义的参数创建数据库 ('dbif') 和 Redis ('redisif') 接口的实例。 + # 以下是它的作用的细分: + # 1. **打开并加载配置文件**: + # - 它打开指定的文件(“filename”)。 + # - 它使用“ujson.load”从文件中读取 JSON 内容。 + # - 读取后关闭文件。 + # 2. **创建数据库和 Redis 接口**: + # - 它使用从配置文件中提取的参数创建数据库接口('dbif')的实例。 + # - 它使用从配置文件中提取的参数创建 Redis 接口 ('redisif') 的实例。 + # 3. 返回数据库接口、Redis 接口和配置: + # - 它返回创建的 'dbif'、'redisif' 和 'config' 对象以供进一步使用。 + # 此函数封装了初始化配置设置以及创建数据库和 Redis 接口的过程,从而可以更轻松地在应用程序中管理这些资源。 +if __name__ == '__main__': + dbif, redisif, config = init_config("btcstat.conf") + #print("init_config") + redisif.reset_btc_data() + statif = StatIf() + #print("StatIf") + statif.stat_block(dbif, redisif, config) + # 1. ** 检查脚本是否为主模块 **: + # - 'if __name__ == '__main__':'块确保以下代码仅在直接执行脚本时运行,而不是在将其作为模块导入到另一个脚本中时运行。 + # 2. ** 初始化配置 **: + # - 它调用带有参数'“btcstat.conf”'的'init_config'函数,以从指定文件加载配置设置。 + # - 它将返回的对象('dbif'、'redisif'和'config')分配给变量。 + # 3.实例化“StatIf”对象: + # - 它创建“StatIf”类的实例,并将其分配给变量“statif”。 + # 4. ** 调用“stat_block”方法 **: + # - 它调用'statif'对象的'stat_block'方法,将数据库接口 ('dbif')、Redis接口 ('redisif') 和配置 ('config') 作为参数传递。 + # - 鉴于所涉及的方法和对象的名称,此方法可能会启动监控和处理区块链数据的过程。 + # 5. ** 执行流程 **: + # - 执行从'if __name__ == '__main__':'块开始。 + # - 配置已初始化。 + # - 创建“StatIf”的实例。 + # - 调用“stat_block”方法,该方法有望处理区块链数据处理。 diff --git a/lyq/btc_price_fetcher.py b/lyq/btc_price_fetcher.py new file mode 100644 index 0000000..57b688b --- /dev/null +++ b/lyq/btc_price_fetcher.py @@ -0,0 +1,85 @@ +import time +import requests +import pymysql +from datetime import datetime + +# MySQL配置 +DB_CONFIG = { + "host": "127.0.0.1", + "user": "root", + "password": "2GS@bPYcgiMyL14A", + "database": "btcdb", + "port": 4423 +} + +# 获取当前时间戳 +def get_current_timestamp(): + return int(time.time()) + +# 获取API1的BTC价格(示例:币安) +def get_binance_price(): + url = "https://api.binance.com/api/v3/ticker/price?symbol=BTCUSDT" + resp = requests.get(url, timeout=5) + resp.raise_for_status() + data = resp.json() + return float(data["price"]) + +# 获取API2的BTC价格(示例:Coinbase) +def get_coinbase_price(): + url = "https://api.coinbase.com/v2/prices/spot?currency=USD" + resp = requests.get(url, timeout=5) + resp.raise_for_status() + data = resp.json() + return float(data["data"]["amount"]) + +# 更新或插入价格 +def upsert_price(source, price, timestamp): + connection = pymysql.connect(**DB_CONFIG) + try: + with connection.cursor() as cursor: + # 先判断该 source 是否已存在 + sql_check = "SELECT id FROM btc_realtime_prices WHERE source = %s" + cursor.execute(sql_check, (source,)) + result = cursor.fetchone() + + if result: + # 已存在,执行更新 + sql_update = """ + UPDATE btc_realtime_prices + SET price = %s, timestamp = %s + WHERE source = %s + """ + cursor.execute(sql_update, (price, timestamp, source)) + else: + # 不存在,执行插入 + sql_insert = """ + INSERT INTO btc_realtime_prices (timestamp, source, price) + VALUES (%s, %s, %s) + """ + cursor.execute(sql_insert, (timestamp, source, price)) + connection.commit() + finally: + connection.close() + +def main(): + while True: + now_ts = get_current_timestamp() + + try: + binance_price = get_binance_price() + print(f"Binance BTC Price: {binance_price}") + upsert_price("binance", binance_price, now_ts) + except Exception as e: + print(f"获取Binance价格失败: {e}") + + try: + coinbase_price = get_coinbase_price() + print(f"Coinbase BTC Price: {coinbase_price}") + upsert_price("coinbase", coinbase_price, now_ts) + except Exception as e: + print(f"获取Coinbase价格失败: {e}") + + time.sleep(60) # 每分钟执行一次 + +if __name__ == "__main__": + main() \ No newline at end of file diff --git a/lyq/btc_prices.py b/lyq/btc_prices.py new file mode 100644 index 0000000..7c7b6d3 --- /dev/null +++ b/lyq/btc_prices.py @@ -0,0 +1,141 @@ +import time +import requests +import pymysql +import ujson +from datetime import datetime, timedelta + +# MySQL 连接信息 +DB_CONFIG = { + "host": "127.0.0.1", + "user": "root", + "password": "2GS@bPYcgiMyL14A", + "database": "btcdb", + "port": 4423 +} + +# Nasdaq API Key +NASDAQ_API_KEY = "FZqXog4sR-b7cYnXcRVV" + +# 获取已存在的时间戳 +def get_existing_timestamps(): + connection = pymysql.connect(**DB_CONFIG) + existing_timestamps = set() + try: + with connection.cursor() as cursor: + cursor.execute("SELECT timestamp, source FROM btc_prices") + for row in cursor.fetchall(): + existing_timestamps.add((row[0], row[1])) + finally: + connection.close() + return existing_timestamps + +# 工具函数:将任意时间戳调整为北京时间当日 08:00 的时间戳 +def adjust_to_beijing_08am(timestamp): + dt = datetime.utcfromtimestamp(timestamp) + timedelta(hours=8) + dt_08am = datetime(dt.year, dt.month, dt.day, 8, 0, 0) + return int((dt_08am - timedelta(hours=8)).timestamp()) # 转回 UTC 存储 + +# Nasdaq 获取历史 BTC 美元价格 +def get_nasdaq_price(): + prices = {} + url = f'https://data.nasdaq.com/api/v3/datatables/QDL/BCHAIN?code=MKPRU&api_key={NASDAQ_API_KEY}' + response = requests.get(url) + if response.status_code == 200: + data = ujson.loads(response.content) + if "datatable" in data and "data" in data["datatable"]: + for item in data["datatable"]["data"]: + daystr = item[1] + price = item[2] + dt = datetime.strptime(daystr, "%Y-%m-%d") + dt_08am_bj = datetime(dt.year, dt.month, dt.day, 8, 0, 0) + dt_08am_utc = dt_08am_bj - timedelta(hours=8) + prices[int(dt_08am_utc.timestamp())] = float(price) + print(f"Nasdaq 获取数据量: {len(prices)} 条") + return prices + +# CryptoCompare 获取 BTC 历史每日收盘价(时间强制统一为北京时间 08:00) +def get_cryptocompare_price(): + url = "https://min-api.cryptocompare.com/data/v2/histoday" + limit = 2000 + to_ts = int(time.time()) + prices = {} + while True: + params = { + "fsym": "BTC", + "tsym": "USD", + "limit": limit, + "toTs": to_ts + } + print(f"请求 CryptoCompare: {params}") + response = requests.get(url, params=params) + if response.status_code != 200: + print("请求失败:", response.status_code) + break + + data = ujson.loads(response.content) + if data["Response"] != "Success": + print("API 返回错误:", data.get("Message")) + break + + entries = data["Data"]["Data"] + if not entries: + break + + for entry in entries: + raw_ts = entry["time"] + price = entry["close"] + adjusted_ts = adjust_to_beijing_08am(raw_ts) + prices[adjusted_ts] = price + + earliest = entries[0]["time"] + if earliest <= 1279300000: # 大约2010年7月 + break + + to_ts = earliest - 1 + time.sleep(1) + + print(f"CryptoCompare 获取数据量: {len(prices)} 条") + return prices + +# 保存数据到数据库 +def save_to_database(data, source): + existing_timestamps = get_existing_timestamps() + connection = pymysql.connect(**DB_CONFIG) + new_data_count = 0 + try: + with connection.cursor() as cursor: + sql = """ + INSERT INTO btc_prices (timestamp, price, source) + VALUES (%s, %s, %s) + """ + for timestamp, price in data.items(): + if (timestamp, source) not in existing_timestamps: + try: + cursor.execute(sql, (timestamp, price, source)) + new_data_count += 1 + except pymysql.MySQLError as e: + print(f"插入错误: {e}") + continue + connection.commit() + print(f"成功存入 {new_data_count} 条新数据({source})") + finally: + connection.close() + +# 定时任务 +def fetch_and_store_data(): + print("========== 开始获取比特币价格数据 ==========") + + # Nasdaq + nasdaq_prices = get_nasdaq_price() + save_to_database(nasdaq_prices, "Nasdaq") + + # CryptoCompare + cc_prices = get_cryptocompare_price() + save_to_database(cc_prices, "CryptoCompare") + + print("========== 数据存储完成 ==========") + +if __name__ == "__main__": + while True: + fetch_and_store_data() + time.sleep(14400) # 每 4 小时执行一次 \ No newline at end of file diff --git a/lyq/btc_stats_qt.py b/lyq/btc_stats_qt.py new file mode 100644 index 0000000..e44635d --- /dev/null +++ b/lyq/btc_stats_qt.py @@ -0,0 +1,1219 @@ +# coding=utf-8 +import sys +import time +from easybitcoinrpc import RPC +from bitcoinutils.setup import setup +from bitcoinutils.script import Script +from bitcoinutils.keys import P2wpkhAddress, P2wshAddress, P2shAddress, PrivateKey, PublicKey, SegwitAddress, \ + P2pkhAddress +import requests +import ujson +from requests import Session +from requests.exceptions import ConnectionError, Timeout, TooManyRedirects +import db_if_qt +import redis_if_qt +import pymysql +DEF_CONFIG_RULES = "rules" +# 此脚本似乎是一个包含导入和一些默认配置的 Python 模块。以下是其内容的细分: +# +# 1. **导入语句**: +# - 'sys':提供对 Python 解释器使用或维护的某些变量以及与解释器交互的函数的访问。 +# - 'time':提供各种与时间相关的功能。 +# - 'easybitcoinrpc':它似乎是一个自定义模块或库,用于通过 RPC(远程过程调用)与 Bitcoin Core 进行交互。 +# - 'bitcoinutils':另一个用于与比特币交互的库,提供用于处理密钥、地址、脚本等的实用程序。 +# - 'requests':一个流行的 HTTP 库,用于发出请求。 +# - 'ujson':一个快速的JSON编码器和解码器,兼容Python的内置'json'模块。 +# - 'Session', 'ConnectionError', 'Timeout', 'TooManyRedirects':这些是“requests”库中用于处理 HTTP 请求和错误的特定组件。 +# +# 2. **默认配置**: +# - “DEF_CONFIG_RULES”:默认配置规则,设置为“规则”。这可能表示与规则相关的内容的默认值或配置。 +class StatIf: + def __init__(self, ip="127.0.0.1", port="8332", user="user", password="password"): + self.host = ip + self.port = port + self.user = user + self.pwd = password + + self.rpc = None + self.height = 0 + + self.pricedict = {} + setup('mainnet') + # 该类似乎是用于处理与比特币节点或区块链相关的统计数据的更大系统的一部分。以下是其属性和构造函数的细分:StatIf + # 特性: + # host:表示Bitcoin + # RPC服务器IP地址的字符串。默认值为 。"127.0.0.1" + # port:表示Bitcoin + # RPC服务器端口号的字符串。默认值为 。"8332" + # user:一个字符串,表示用于向Bitcoin + # RPC服务器进行身份验证的用户名。默认值为 。"user" + # pwd:一个字符串,表示用于使用BitcoinRPC服务器进行身份验证的密码。默认值为 。"password" + # rpc:初始化为 ,连接后将保存RPC客户端的实例。None + # height:初始化为 ,它将存储区块链的当前高度。0 + # 构造函数: + # 构造函数使用默认值或作为参数传递的值初始化属性。 + # 它还初始化属性,该属性似乎用于存储与价格相关的数据。pricedict + # 最后,它调用 ,这可能会设置与比特币主网络交互的环境。setup('mainnet') + # 此类作为处理统计信息和通过RPC与比特币节点交互的基础。通常会添加其他方法和功能来执行特定任务,例如检索区块信息、查询交易数据等。 + def get_vin_address(self, prev_scriptpubkey, prev_height, txid): + prev_type = prev_scriptpubkey["type"] + prev_address = None + if prev_type != "nulldata": + if isinstance(prev_scriptpubkey, dict): + if "address" in prev_scriptpubkey: + prev_address = prev_scriptpubkey["address"] + else: + if prev_scriptpubkey.is_address(): + prev_address = prev_scriptpubkey["address"] + if prev_address is None: + if prev_type == "pubkey": + temphex = prev_scriptpubkey["hex"] + try: + if temphex[2:4] == "04": + prev_address = PublicKey(temphex[2:132]).get_address(False).to_string() + elif temphex[2:4] == "02" or temphex[2:4] == "03": + prev_address = PublicKey(temphex[2:68]).get_address().to_string() + except: + print("decode address failed", str(prev_height), "txid", txid, "hex", temphex) + if prev_address is None: + prev_address = prev_scriptpubkey["hex"] + + return prev_address + # “StatIf”类中的“get_vin_address”方法似乎旨在检索与事务输入 (vin) 关联的地址。以下是其工作原理的细分: + # - ** 参数 **: + # - 'prev_scriptpubkey':输入正在花费的上一个事务输出(prevout)的scriptPubKey。 + # - 'prev_height':包含前一笔交易输出的区块的高度。 + # - 'txid':包含正在分析的输入的交易的交易ID。 + # - ** 功能性 **: + # - 它首先确定scriptPubKey ('prev_type') 的类型。 + # - 如果scriptPubKey类型不是“nulldata”(表示它不是非标准或不可花费的输出): + # - 它检查“prev_scriptpubkey”是否是字典,以及它是否包含“address”键。如果是这样,它会从那里检索地址。 + # - 如果“prev_scriptpubkey”不是字典或不包含“address”键,则会直接检查它是否为有效地址。 + # - 如果地址仍为“None”,则会将scriptPubKey类型视为“pubkey”,并尝试从公钥的十六进制表示形式派生地址。 + # - 如果所有检索地址的尝试都失败,则默认返回scriptPubKey的十六进制表示形式。 + # 此方法旨在用于识别交易记录输入的支出地址。它处理不同类型的 + # scriptPubKey,包括标准地址和公钥。如果遇到任何解码错误,它会打印一条指示失败的消息,以及相关信息,例如区块高度和事务ID。 + def get_vout_address(self, scriptpubkey, height, txid): + return self.get_vin_address(scriptpubkey, height, txid) + # “StatIf”类中的“get_vout_address”方法似乎是“get_vin_address”方法的简单包装器。它实质上将检索与事务输出 (vout) 关联的地址的任务委托给“get_vin_address”方法。 + # 以下是它的作用: + # - ** 参数 **: + # - 'scriptpubkey':此参数表示当前正在使用的输出的scriptPubKey。 + # - 'height':包含输出的块的高度。 + # - 'txid':当前输出的事务ID。 + # - ** 功能性 **: + # - 它只是使用相同的参数('scriptpubkey', 'height', 'txid')调用'get_vin_address'方法。 + # - ** 返回 **: + # - 它返回“get_vin_address”方法返回的任何内容。 + # 此方法在检索事务输入和输出关联的添加时提供处理事务输入和输出的一致性 + def get_history_price(self, batch_size=5000): + #pricedict = {} + # response_price = requests.get( + # 'https://data.nasdaq.com/api/v3/datasets/BCHAIN/MKPRU.json?api_key=FZqXog4sR-b7cYnXcRVV') + # if response_price.status_code == 200: + # priceweb = ujson.loads(response_price.content) + # if "dataset" in priceweb: + # priceset = priceweb["dataset"] + # if "data" in priceset: + # pricedata = priceset["data"] + # for price in pricedata: + # daystr = price[0] + # p = price[1] + # dayutc = time.mktime(time.strptime(daystr, "%Y-%m-%d")) + # self.pricedict[str(int(dayutc))] = float(p) + # if len(self.pricedict) > 0: + # return self.pricedict + # + # response_price.close() + #return self.pricedict + """获取数据库中的 Nasdaq 数据,存入字典""" + db_config = { + "host": "192.168.194.216", + "user": "root", + "password": "2GS@bPYcgiMyL14A", + "database": "btcdb", + "port": 4423, + "connect_timeout": 60, + "read_timeout": 60, + "write_timeout": 60, + "charset": "utf8mb4" + } + + offset = 0 + self.pricedict = {} + + while True: + connection = pymysql.connect(**db_config) + try: + with connection.cursor() as cursor: + sql = "SELECT timestamp, price FROM btc_prices WHERE source = 'Nasdaq' ORDER BY timestamp LIMIT %s OFFSET %s" + cursor.execute(sql, (batch_size, offset)) + rows = cursor.fetchall() + if not rows: + break + for timestamp, price in rows: + self.pricedict[str(int(timestamp))] = float(price) + finally: + connection.close() + + offset += batch_size + if len(rows) < batch_size: + break # 最后一页读取完成 + + return self.pricedict + # “get_history_price”方法似乎从特定数据源中检索历史比特币价格,并将其存储在类实例 (“self.pricedict”) 的“pricedict”属性中。以下是该方法的细分: + # - ** 功能性 **: + # - 它向特定URL (''https: // data.nasdaq.com / api / v3 / datasets / BCHAIN / MKPRU.json?api_key = FZqXog4sR - b7cYnXcRVV'') 发送GET请求以获取历史比特币价格数据。 + # - 如果响应状态代码为“200”(表示成功): + # - 它使用“ujson.loads”方法解析JSON响应。 + # - 它从解析的JSON响应中检索相关数据并对其进行迭代。 + # - 对于数据中的每个条目,它提取日期和价格,将日期转换为Unix时间戳,并将价格存储在'pricedict'字典属性中,以Unix时间戳为键。 + # - 最后,如果'pricedict'在处理数据后不为空,则返回字典。 + # - ** 返回 **: + # - 如果成功,它将返回“pricedict”字典,其中包含由Unix时间戳索引的历史比特币价格。 + # - ** 注意 **: + # - 方法末尾注释掉的代码 ('#return self.pricedict') 似乎是返回'pricedict'的替代方法,但目前已被禁用。您可以选择取消注释并使用它,而不是显式返回。 + def get_history_price2(self, batch_size=5000): + # #pricedict = {} + # dayt = time.gmtime() + # daystr = time.strftime("%Y", dayt) + # year = int(daystr) + # end_year = year + # while True: + # url = "" + # if end_year != year: + # start_year = end_year + # url = "https://data.messari.io/api/v1/assets/bitcoin/metrics/price/time-series?start=" + # else: + # url = "https://data.messari.io/api/v1/assets/bitcoin/metrics/price/time-series?after=" + str( + # year) + "-01-01&order=descending" + # + # if end_year != year: + # url = url + str(start_year) + "-01-01&end=" + str(end_year) + "-12-31&order=descending" + # header_set = {} + # header_set["x-messari-api-key"] = "aH2pyj5i4QGo1k1gLxXEbIJ5RJr+FYKLEWk6cRT6RuSc6lRY" + # # header_set["Content-Type"] = "application/json" + # print(header_set, url) + # response_price = requests.get(url, headers=header_set) + # # print(response_price) + # if response_price.status_code == 200: + # # print(response_price.content) + # priceweb = ujson.loads(response_price.content) + # if "data" in priceweb: + # priceset = priceweb["data"] + # if "values" in priceset: + # valueset = priceset["values"] + # if valueset is not None: + # for supply in valueset: + # dayutc = int(supply[0] / 1000) + # s = supply[1] + # ret_time = time.gmtime(dayutc) + # ret_daystr = time.strftime("%d %b %Y", ret_time) + # ret_dayutc = int(time.mktime(time.strptime(ret_daystr, "%d %b %Y"))) + # self.pricedict[str(ret_dayutc)] = float(s) + # # print(s, dayutc, pricedict[str(dayutc)]) + # # break + # else: + # break + # else: + # break + # end_year -= 1 + # time.sleep(2) + # self.pricedict[str(1308528000)]=float(15.5) + # self.pricedict[str(1308614400)] = float(15.05) + # self.pricedict[str(1308700800)] = float(15.39) + # self.pricedict[str(1308787200)] = float(16.7501) + # self.pricedict[str(1308873600)] = float(17.6) + # self.pricedict[str(1308960000)] = float(16.95) + # print(self.pricedict) + # return self.pricedict + + """获取数据库中的 Messari 数据,存入字典""" + db_config = { + "host": "192.168.194.216", + "user": "root", + "password": "2GS@bPYcgiMyL14A", + "database": "btcdb", + "port": 4423, + "connect_timeout": 60, + "read_timeout": 60, + "write_timeout": 60, + "charset": "utf8mb4" + } + + offset = 0 + self.pricedict = {} + + while True: + connection = pymysql.connect(**db_config) + try: + with connection.cursor() as cursor: + sql = """ + SELECT timestamp, price + FROM btc_prices + WHERE source = 'CryptoCompare' + ORDER BY timestamp + LIMIT %s OFFSET %s + """ + cursor.execute(sql, (batch_size, offset)) + rows = cursor.fetchall() + if not rows: + break + for timestamp, price in rows: + self.pricedict[str(int(timestamp))] = float(price) + finally: + connection.close() + + offset += batch_size + if len(rows) < batch_size: + break # 数据已全部读取 + + return self.pricedict + + # “get_history_price2”方法似乎从另一个数据源检索历史比特币价格,并将它们存储在类实例 ('self.pricedict') 的“pricedict”属性中。以下是该方法的说明: + # - ** 功能性 **: + # - 它初始化与当前日期相关的变量('dayt'、'daystr'、'year'、'end_year')。 + # - 它进入一个循环,从当前年份开始,回到过去,在几年内迭代。 + # - 它根据当前年份构建一个URL,并发送一个GET请求,以从MessariAPI检索历史比特币价格数据。 + # - 如果响应状态码为“200”(表示成功),则解析JSON响应。 + # - 它从JSON响应中提取相关数据(时间戳和价格),并将时间戳转换为Unix时间戳。 + # - 它将价格存储在“pricedict”字典属性中,并以Unix时间戳为键。 + # - 循环继续,直到检索到所有所需年份的数据或遇到错误。 + # - 它打印“pricedict”用于调试目的并返回它。 + # - ** 返回 **: + # - 该方法返回包含由Unix时间戳索引的历史比特币价格的“pricedict”字典。 + # - ** 注意 **: + # - 此方法从MessariAPI获取数据,其结构类似于从Nasdaq API获取数据的“get_history_price”。这两种方法都用于检索历史比特币价格,但来源不同。 + def get_current_utc(self): + curtime = time.gmtime(time.time()) + daystr = time.strftime("%d %b %Y", curtime) + dayutc = int(time.mktime(time.strptime(daystr, "%d %b %Y"))) + return dayutc + # “get_current_utc”方法检索当前UTC时间戳。以下是其功能的细分: + # - ** 功能性 **: + # - 它使用'time'模块使用'time.gmtime(time.time())'获取UTC中的当前时间。 + # - 它将当前时间格式化为一个字符串,以“DDMonYYYY”的格式表示日期(例如,“2024年3月15日”)。 + # - 它使用“time.mktime”将格式化的字符串转换回Unix时间戳。 + # - 最后,它返回以UTC表示当前日期的Unix时间戳。 + # - ** 返回 **: + # - 该方法返回一个整数,以UTC表示当天的时间戳。 + # 此方法可用于获取UTC中的当前时间戳,以用于各种目的,例如日志记录、时间戳事件或跨不同系统同步操作。 + def get_current_price(self): + price = 0 + DB_CONFIG = { + "host": "192.168.194.216", + "user": "root", + "password": "2GS@bPYcgiMyL14A", + "database": "btcdb", + "port": 4423 + } + connection = pymysql.connect(**DB_CONFIG) + try: + with connection.cursor() as cursor: + for source in ("binance", "coinbase"): + cursor.execute(""" + SELECT price FROM btc_realtime_prices + WHERE source=%s + ORDER BY timestamp DESC + LIMIT 1 + """, (source,)) + row = cursor.fetchone() + if row: + price = float(row[0]) + break + finally: + connection.close() + return price + # price = 0 + # try: + # response_price = requests.get( + # 'https://api.binance.com/api/v3/ticker/price?symbol=BTCUSDT') + # prices = ujson.loads(response_price.text) + # price = float(prices["price"]) + # print(response_price.text, price) + # response_price.close() + # # print("price", price) + # return price + # except: + # response_price = requests.get("https://api.coinpaprika.com/v1/tickers/btc-bitcoin") + # prices = ujson.loads(response_price.text) + # price = float(prices["quotes"]["USD"]["price"]) + # + # response_price.close() + # return price + # “get_current_price”方法旨在从特定的API端点获取以美元为单位的比特币的当前价格。以下是其功能的说明: + # - ** 功能性 **: + # - 它向指定的URL发送HTTPGET请求:“https: // bitcoinexplorer.org / api / price”。 + # - 如果响应状态代码为200(表示成功),则使用'ujson.loads(response_price.text)'从响应中提取JSON数据。 + # - 然后,它从JSON数据中检索以美元为单位的比特币价格,并将其转换为浮点数。 + # - 最后,它返回提取的价格。 + # - ** 返回 **: + # - 该方法返回一个浮点数,表示比特币的当前美元价格。 + # 此方法可用于获取各种应用程序的实时比特币价格数据,例如财务分析、加密货币交易或向用户显示价格信息。 + def get_day_utc(self, utc_time): + t = time.gmtime(utc_time) + daystr = time.strftime("%d %b %Y", t) + dayutc = int(time.mktime(time.strptime(daystr, "%d %b %Y"))) + return dayutc + # “get_day_utc”函数在00:00:00时将给定的UTC时间戳转换为相应的UTC日期(自Unix纪元以来的秒数)。以下是其功能的细分: + # - ** 参数 **: + # - 'utc_time':表示要转换的UTC时间戳的整数。 + # - ** 功能性 **: + # - 它使用“time.gmtime(utc_time)”将提供的UTC时间戳转换为UTC时区的时间元组。 + # - 然后,它使用'time.strftime(“%d %b %Y”, t)'将UTC时间元组格式化为表示日期的字符串,格式为“DDMonYYYY”(例如,“01Jan2024”)。 + # - 最后,它使用'time.mktime(time.strptime(daystr, “%d %b %Y”))'将格式化的日期字符串转换回UTC时间戳。 + # - ** 返回 **: + # - 该函数返回一个整数,表示所提供日期在00:00:00小时的UTC时间戳。 + # 此函数可用于将时间戳转换为相应的UTC日期,这对于各种应用程序(例如数据分析、时间序列操作或基于UTC时间戳生成每日报告)非常有用。 + def rpc_cmd(self, cmd): + if self.rpc is None: + self.rpc = RPC(self.host, self.port, self.user, self.pwd) + + while True: + try: + if cmd == "getblockstats": + getblockstats = self.rpc.blockchain.get_block_stats(self.height) + return getblockstats + elif cmd == "getblock": + getblock = self.rpc.blockchain.get_block(self.height, 3) + return getblock + elif cmd == "getblockcount": + getblockcount = self.rpc.blockchain.get_block_count() + if self.height == getblockcount: + time.sleep(30) + else: + self.height += 1 + print("next height", self.height) + return None + elif cmd == "getmempoolinfo": + getmempoolinfo = self.rpc.blockchain.get_mempool_info() + return getmempoolinfo + else: + return None + except: + time.sleep(1) + self.rpc = RPC(self.host, self.port, self.user, self.pwd) + # “rpc_cmd”方法旨在对比特币核心节点执行各种RPC命令。以下是其功能的细分: + # - ** 参数 **: + # - 'cmd':表示要执行的RPC命令的字符串。 + # - ** 功能性 **: + # - 它首先检查RPC连接是否已建立 ('self.rpc')。如果没有,它将使用提供的主机、端口、用户名和密码初始化RPC连接。 + # - 然后,它进入一个循环以处理潜在的连接错误,并重试执行RPC命令。 + # - 根据'cmd'的值,它执行不同的RPC命令: + # - 如果'cmd'是 “getblockstats”,它会使用'rpc.blockchain.get_block_stats(self.height)'检索当前块高度 ('self.height') 的块统计信息。 + # - 如果'cmd'为 “getblock”,则使用'rpc.blockchain.get_block(self.height, 3)'以详细格式 ('3') 检索当前块高度的块信息。 + # - 如果'cmd'是 “getblockcount”,它会使用'rpc.blockchain.get_block_count()'检索当前块计数。如果当前高度与块计数匹配,则等待30秒,然后再次检查;否则,它会递增高度并返回“None”。 + # - 如果'cmd'是 “getmempoolinfo”,它会使用'rpc.blockchain.get_mempool_info()'检索有关内存池的信息。 + # - 如果无法识别“cmd”,则返回“None”。 + # - ** 返回 **: + # - 该方法返回RPC命令执行的结果,如果发生错误或无法识别命令,则返回“None”。 + # 这种方法提供了一种通过RPC命令与比特币核心节点进行交互的灵活方式,并通过在短时间延迟后重试命令来优雅地处理潜在的连接问题。 + def stat_load(self, redisif, config): + self.rules = config[DEF_CONFIG_RULES] + self.get_history_price() + self.history_prices = self.get_history_price2() + self.current_price = self.get_current_price() + self.current_utc = self.get_current_utc() + self.history_prices[str(self.current_utc)] = self.current_price + self.daily_date = redisif.get_btc_data("daily_date") + if self.daily_date is None: + self.stat_reset() + return + + self.daily_date = int(redisif.get_btc_data("daily_date")) + self.daily_height = int(redisif.get_btc_data("daily_height").decode("utf-8")) + self.height = self.daily_height + self.daily_height_begin = int(redisif.get_btc_data("daily_height_begin").decode("utf-8")) + self.daily_height_end = int(redisif.get_btc_data("daily_height_end").decode("utf-8")) + self.daily_date_string = redisif.get_btc_data("daily_date_string").decode("utf-8") + self.daily_profit = float(redisif.get_btc_data("daily_profit").decode("utf-8")) + self.daily_fees = float(redisif.get_btc_data("daily_fees").decode("utf-8")) + self.daily_txs = int(redisif.get_btc_data("daily_txs").decode("utf-8")) + self.daily_new_address = int(redisif.get_btc_data("daily_new_address").decode("utf-8")) + self.daily_total_address = int(redisif.get_btc_data("daily_total_address").decode("utf-8")) + self.daily_new_address_volume = float(redisif.get_btc_data("daily_new_address_volume").decode("utf-8")) + self.daily_active_address = int(redisif.get_btc_data("daily_active_address").decode("utf-8")) + self.daily_send_address = int(redisif.get_btc_data("daily_send_address").decode("utf-8")) + self.daily_receive_address = int(redisif.get_btc_data("daily_receive_address").decode("utf-8")) + self.daily_volume = float(redisif.get_btc_data("daily_volume").decode("utf-8")) + self.daily_eavolume = float(redisif.get_btc_data("daily_eavolume").decode("utf-8")) + self.daily_asol = float(redisif.get_btc_data("daily_asol").decode("utf-8")) + self.daily_eaasol = float(redisif.get_btc_data("daily_eaasol").decode("utf-8")) + self.daily_atxs = float(redisif.get_btc_data("daily_atxs").decode("utf-8")) + self.daily_sopr_buy = float(redisif.get_btc_data("daily_sopr_buy").decode("utf-8")) + self.daily_asopr_buy = float(redisif.get_btc_data("daily_asopr_buy").decode("utf-8")) + self.daily_easopr_buy = float(redisif.get_btc_data("daily_easopr_buy").decode("utf-8")) + self.daily_lthsopr_buy = float(redisif.get_btc_data("daily_lthsopr_buy").decode("utf-8")) + self.daily_sthsopr_buy = float(redisif.get_btc_data("daily_sthsopr_buy").decode("utf-8")) + self.daily_sopr_sell = float(redisif.get_btc_data("daily_sopr_sell").decode("utf-8")) + self.daily_asopr_sell = float(redisif.get_btc_data("daily_asopr_sell").decode("utf-8")) + self.daily_easopr_sell = float(redisif.get_btc_data("daily_easopr_sell").decode("utf-8")) + self.daily_lthsopr_sell = float(redisif.get_btc_data("daily_lthsopr_sell").decode("utf-8")) + self.daily_sthsopr_sell = float(redisif.get_btc_data("daily_sthsopr_sell").decode("utf-8")) + self.daily_cdd = float(redisif.get_btc_data("daily_cdd").decode("utf-8")) + self.daily_sacdd = float(redisif.get_btc_data("daily_sacdd").decode("utf-8")) + self.daily_eacdd = float(redisif.get_btc_data("daily_eacdd").decode("utf-8")) + self.daily_cdd_days1 = float(redisif.get_btc_data("daily_cdd_days1").decode("utf-8")) + self.daily_cdd_days7 = float(redisif.get_btc_data("daily_cdd_days7").decode("utf-8")) + self.daily_cdd_days30 = float(redisif.get_btc_data("daily_cdd_days30").decode("utf-8")) + self.daily_cdd_days60 = float(redisif.get_btc_data("daily_cdd_days60").decode("utf-8")) + self.daily_cdd_days90 = float(redisif.get_btc_data("daily_cdd_days90").decode("utf-8")) + self.daily_cdd_days180 = float(redisif.get_btc_data("daily_cdd_days180").decode("utf-8")) + self.daily_cdd_days365 = float(redisif.get_btc_data("daily_cdd_days365").decode("utf-8")) + self.daily_cdd_days730 = float(redisif.get_btc_data("daily_cdd_days730").decode("utf-8")) + self.daily_csupply = float(redisif.get_btc_data("daily_csupply").decode("utf-8")) + self.daily_mintusd = float(redisif.get_btc_data("daily_mintusd").decode("utf-8")) + self.daily_sumcsupply = float(redisif.get_btc_data("daily_sumcsupply").decode("utf-8")) + self.daily_sumcdd = float(redisif.get_btc_data("daily_sumcdd").decode("utf-8")) + self.daily_sumeacdd = float(redisif.get_btc_data("daily_sumeacdd").decode("utf-8")) + self.daily_rprofit = float(redisif.get_btc_data("daily_rprofit").decode("utf-8")) + self.daily_rloss = float(redisif.get_btc_data("daily_rloss").decode("utf-8")) + self.daily_marketcap = float(redisif.get_btc_data("daily_marketcap").decode("utf-8")) + self.daily_rcap = float(redisif.get_btc_data("daily_rcap").decode("utf-8")) + self.daily_earcap = float(redisif.get_btc_data("daily_earcap").decode("utf-8")) + self.daily_mvrv = float(redisif.get_btc_data("daily_mvrv").decode("utf-8")) + '''self.daily_lth_marketcap = float(redisif.get_btc_data("daily_lth_marketcap").decode("utf-8")) + self.daily_lth_rcap = float(redisif.get_btc_data("daily_lth_rcap").decode("utf-8")) + self.daily_lth_mvrv = float(redisif.get_btc_data("daily_lth_mvrv").decode("utf-8")) + self.daily_sth_marketcap = float(redisif.get_btc_data("daily_sth_marketcap").decode("utf-8")) + self.daily_sth_rcap = float(redisif.get_btc_data("daily_sth_rcap").decode("utf-8")) + self.daily_sth_mvrv = float(redisif.get_btc_data("daily_sth_mvrv").decode("utf-8"))''' + self.daily_nupl = float(redisif.get_btc_data("daily_nupl").decode("utf-8")) + #self.daily_uprofit = float(redisif.get_btc_data("daily_uprofit").decode("utf-8")) + #self.daily_uloss = float(redisif.get_btc_data("daily_uloss").decode("utf-8")) + #self.daily_lthnupl = float(redisif.get_btc_data("daily_lthnupl").decode("utf-8")) + #self.daily_sthnupl = float(redisif.get_btc_data("daily_sthnupl").decode("utf-8")) + self.daily_price = self.get_price(self.height, self.daily_date) + + #v2 + self.daily_mint = float(redisif.get_btc_data("daily_mint").decode("utf-8")) + self.daily_lth_volume = float(redisif.get_btc_data("daily_lth_volume").decode("utf-8")) + self.daily_frm = float(redisif.get_btc_data("daily_frm").decode("utf-8")) + self.daily_cvdd = float(redisif.get_btc_data("daily_cvdd").decode("utf-8")) + self.daily_nvt_ratio = float(redisif.get_btc_data("daily_nvt_ratio").decode("utf-8")) + self.daily_balanced_price = float(redisif.get_btc_data("daily_balanced_price").decode("utf-8")) + self.daily_velocity = float(redisif.get_btc_data("daily_velocity").decode("utf-8")) + self.daily_mempool_volume = float(redisif.get_btc_data("daily_mempool_volume").decode("utf-8")) + self.daily_realized_price = float(redisif.get_btc_data("daily_realized_price").decode("utf-8")) + self.daily_transferred_price = float(redisif.get_btc_data("daily_transferred_price").decode("utf-8")) + #v2 + self.daily_sumvdd = float(redisif.get_btc_data("daily_sumvdd").decode("utf-8")) + self.daily_sumdays = float(redisif.get_btc_data("daily_sumdays").decode("utf-8")) + # “stat_load”方法似乎负责从Redis数据库加载各种统计信息和历史数据。以下是它的作用的细分: + # - ** 参数 **: + # - 'redisif':提供Redis数据库接口功能的类的实例。 + # - 'config':包含配置参数(包括规则)的字典。 + # - ** 功能性 **: + # - 从提供的“config”字典中加载配置规则。 + # - 使用“get_history_price”和“get_history_price2”方法从外部来源检索历史价格数据。 + # - 分别使用“get_current_price”和“get_current_utc”方法检索当前价格和当前UTC时间。 + # - 使用当前价格和UTC时间更新历史价格数据。 + # - 使用特定键从Redis数据库中检索各种每日统计数据和值,并将它们存储在相应的实例属性中。 + # - 根据需要对检索到的数据执行类型转换。 + # - 此方法似乎可以同时处理第1版和第2版的统计数据,如注释掉的部分所示。 + # - ** 返回 **: + # - 此方法没有显式返回值。 + # 总体而言,“stat_load”是一种关键的初始化方法,用于加载在应用程序上下文中进一步分析或处理所需的相关数据和统计数据 + def stat_save(self, redisif): + redisif.set_btc_data("daily_date", self.daily_date) + self.daily_height = self.height + redisif.set_btc_data("daily_height", self.daily_height) + redisif.set_btc_data("daily_height_begin", self.daily_height_begin) + redisif.set_btc_data("daily_height_end", self.daily_height_end) + redisif.set_btc_data("daily_date_string", self.daily_date_string) + redisif.set_btc_data("daily_profit", self.daily_profit) + redisif.set_btc_data("daily_fees", self.daily_fees) + redisif.set_btc_data("daily_txs", self.daily_txs) + redisif.set_btc_data("daily_new_address", self.daily_new_address) + redisif.set_btc_data("daily_total_address", self.daily_total_address) + redisif.set_btc_data("daily_new_address_volume", self.daily_new_address_volume) + redisif.set_btc_data("daily_active_address", self.daily_active_address) + redisif.set_btc_data("daily_send_address", self.daily_send_address) + redisif.set_btc_data("daily_receive_address", self.daily_receive_address) + redisif.set_btc_data("daily_volume", self.daily_volume) + redisif.set_btc_data("daily_eavolume", self.daily_eavolume) + redisif.set_btc_data("daily_asol", self.daily_asol) + redisif.set_btc_data("daily_eaasol", self.daily_eaasol) + redisif.set_btc_data("daily_atxs", self.daily_atxs) + redisif.set_btc_data("daily_sopr_buy", self.daily_sopr_buy) + redisif.set_btc_data("daily_asopr_buy", self.daily_asopr_buy) + redisif.set_btc_data("daily_easopr_buy", self.daily_easopr_buy) + redisif.set_btc_data("daily_lthsopr_buy", self.daily_lthsopr_buy) + redisif.set_btc_data("daily_sthsopr_buy", self.daily_sthsopr_buy) + redisif.set_btc_data("daily_sopr_sell", self.daily_sopr_sell) + redisif.set_btc_data("daily_asopr_sell", self.daily_asopr_sell) + redisif.set_btc_data("daily_easopr_sell", self.daily_easopr_sell) + redisif.set_btc_data("daily_lthsopr_sell", self.daily_lthsopr_sell) + redisif.set_btc_data("daily_sthsopr_sell", self.daily_sthsopr_sell) + redisif.set_btc_data("daily_cdd", self.daily_cdd) + redisif.set_btc_data("daily_sacdd", self.daily_sacdd) + redisif.set_btc_data("daily_eacdd", self.daily_eacdd) + redisif.set_btc_data("daily_cdd_days1", self.daily_cdd_days1) + redisif.set_btc_data("daily_cdd_days7", self.daily_cdd_days7) + redisif.set_btc_data("daily_cdd_days30", self.daily_cdd_days30) + redisif.set_btc_data("daily_cdd_days60", self.daily_cdd_days60) + redisif.set_btc_data("daily_cdd_days90", self.daily_cdd_days90) + redisif.set_btc_data("daily_cdd_days180", self.daily_cdd_days180) + redisif.set_btc_data("daily_cdd_days365", self.daily_cdd_days365) + redisif.set_btc_data("daily_cdd_days730", self.daily_cdd_days730) + redisif.set_btc_data("daily_csupply", self.daily_csupply) + redisif.set_btc_data("daily_mintusd", self.daily_mintusd) + redisif.set_btc_data("daily_sumcsupply", self.daily_sumcsupply) + redisif.set_btc_data("daily_sumcdd", self.daily_sumcdd) + redisif.set_btc_data("daily_sumeacdd", self.daily_sumeacdd) + redisif.set_btc_data("daily_rprofit", self.daily_rprofit) + redisif.set_btc_data("daily_rloss", self.daily_rloss) + redisif.set_btc_data("daily_marketcap", self.daily_marketcap) + redisif.set_btc_data("daily_rcap", self.daily_rcap) + redisif.set_btc_data("daily_earcap", self.daily_earcap) + redisif.set_btc_data("daily_mvrv", self.daily_mvrv) + '''redisif.set_btc_data("daily_lth_marketcap", self.daily_lth_marketcap) + redisif.set_btc_data("daily_lth_rcap", self.daily_lth_rcap) + redisif.set_btc_data("daily_lth_mvrv", self.daily_lth_mvrv) + redisif.set_btc_data("daily_sth_marketcap", self.daily_sth_marketcap) + redisif.set_btc_data("daily_sth_rcap", self.daily_sth_rcap) + redisif.set_btc_data("daily_sth_mvrv", self.daily_sth_mvrv)''' + redisif.set_btc_data("daily_nupl", self.daily_nupl) + #redisif.set_btc_data("daily_uprofit", self.daily_uprofit) + #redisif.set_btc_data("daily_uloss", self.daily_uloss) + #redisif.set_btc_data("daily_lthnupl", self.daily_lthnupl) + #redisif.set_btc_data("daily_sthnupl", self.daily_sthnupl) + + #v2 + redisif.set_btc_data("daily_mint", self.daily_mint) + redisif.set_btc_data("daily_lth_volume", self.daily_lth_volume) + redisif.set_btc_data("daily_frm", self.daily_frm) + redisif.set_btc_data("daily_cvdd", self.daily_cvdd) + redisif.set_btc_data("daily_nvt_ratio", self.daily_nvt_ratio) + redisif.set_btc_data("daily_balanced_price", self.daily_balanced_price) + redisif.set_btc_data("daily_velocity", self.daily_velocity) + redisif.set_btc_data("daily_mempool_volume", self.daily_mempool_volume) + redisif.set_btc_data("daily_realized_price", self.daily_realized_price) + redisif.set_btc_data("daily_transferred_price", self.daily_transferred_price) + redisif.set_btc_data("daily_sumvdd", self.daily_sumvdd) + redisif.set_btc_data("daily_sumdays", self.daily_sumdays) + # stat_save”方法负责将各种统计信息和值保存到Redis数据库。下面是该方法的每个部分的作用: + # - ** 参数 **: + # - 'redisif':提供Redis数据库接口功能的类的实例。 + # - ** 功能性 **: + # - 在Redis数据库中设置各种键值对,对应不同的每日统计数据和值。 + # - 该方法遍历每个统计量,并调用'redisif'对象的'set_btc_data'方法,将值保存在相应的键下。 + # - 注释掉的部分表示存在以前包含但当前未保存的其他统计信息或值。 + # - ** 返回 **: + # - 此方法没有显式返回值。 + # 总体而言,“stat_save”是对“stat_load”方法的补充,允许在处理或分析后将更新的统计数据和值保存回Redis数据库 + def stat_reset(self): + self.daily_date = 0 # working date + self.daily_height = 1 # working height, ref. 747376 + self.daily_date_string = "" # working date string + + self.daily_csupply = 0 # circulating supply + self.daily_sumcsupply = 0 # cumulative circulating supply, for liveliness + self.daily_sumcdd = 0 # cumulative coin days destoryed + self.daily_sumeacdd = 0 # cumulative coin days destoryed(Entity-Adjusted) + self.daily_marketcap = 0 # market capitalization + self.daily_rcap = 0 # Realized capitalization + self.daily_earcap = 0 # Realized capitalization(Entity-Adjusted) + ''' + self.daily_lth_marketcap = 0 # Long Term Holder market capitalization + self.daily_lth_rcap = 0 # Long Term Holder Realized capitalization + self.daily_sth_marketcap = 0 # Short Term Holder market capitalization + self.daily_sth_rcap = 0 # Short Term Holder Realized capitalization + ''' + #self.daily_uprofit = 0 # Unrealized Profit + #self.daily_uloss = 0 # Unrealized Loss + #self.daily_lthnupl = 0 # Long Term Holder NUPL + #self.daily_sthnupl = 0 # Short Term Holder NUPL + + self.stat_daily_reset() + + self.daily_rprofit = 0 # realized profit + self.daily_rloss = 0 # realized loss + + #v2 + self.daily_sumvdd = 0 + self.daily_sumdays = 0 + # “stat_reset”方法负责将各种每日统计数据和值重置为其初始状态。下面是该方法的每个部分的作用: + # - ** 属性重置 **: + # - 它将与每日日期、高度和日期字符串相关的属性重置为默认值。 + # - ** 统计重置 **: + # - 它将与供应、资本化、硬币销毁天数 (CDD) 和其他指标相关的各种每日统计数据设置为零。 + # - 一些统计数据,如累计供应量、累计CDD、市值和已实现资本化,设置为零,表示当天分析的新开始。 + # - 某些统计数据,例如长期持有者 (LTH) 和短期持有者 (STH) 指标、未实现损益以及LTH和STH的未实现净未实现损益 (NUPL),被注释掉,表明它们在当前实现中未重置或使用。 + # - ** 子程序调用 **: + # - 它调用“stat_daily_reset”方法,此处未定义该方法,但可能负责重置特定于应用程序要求的其他每日统计信息。 + # - ** v2 + # 统计信息重置 **: + # - 它重置了与指标相关的其他v2统计信息,例如已用产出年龄总和 (sumvdd) 和已用产出天数总和 (sumdays)。 + # 总体而言,“stat_reset”为重新计算每日统计数据和指标提供了一个干净的石板,确保每天的分析从一致的初始状态开始。 + def stat_daily_reset(self): + self.daily_profit = 0 # Number of UTXOs in Profit + self.daily_fees = 0 # block fees each day + self.daily_txs = 0 # block txs exclude coinbase transaction + self.daily_new_address = 0 # number of new address + self.daily_total_address = redisif.get_addr_cnt() # number of address + self.daily_new_address_volume = 0 # volume of new address + self.daily_active_address = 0 # number of active address + self.daily_send_address = 0 # number of send address + self.daily_receive_address = 0 # number of receive address + self.daily_volume = 0 # volume for each day + self.daily_eavolume = 0 # volume for each day(Entity-Adjusted) + self.daily_asol = 0 # Average Spent Output Lifespan + self.daily_eaasol = 0 # Average Spent Output Lifespan(Entity-Adjusted) + self.daily_atxs = 0 # exclude transaction < 1 hour + self.daily_sopr_buy = 0 # Spent Output Profit Ratio for buyin + self.daily_asopr_buy = 0 # Spent Output Profit Ratio(exclude < 1 hour) for buyin + self.daily_easopr_buy = 0 # Spent Output Profit Ratio(Entity-Adjusted) for buyin + self.daily_lthsopr_buy = 0 # Long-Term Holder SOPR for buyin + self.daily_sthsopr_buy = 0 # Short-Term Holder SOPR for buyin + self.daily_sopr_sell = 0 # Spent Output Profit Ratio for sellout + self.daily_asopr_sell = 0 # Spent Output Profit Ratio(exclude < 1 hour) for sellout + self.daily_easopr_sell = 0 # Spent Output Profit Ratio(Entity-Adjusted) for sellout + self.daily_lthsopr_sell = 0 # Long-Term Holder SOPR for sellout + self.daily_sthsopr_sell = 0 # Short-Term Holder SOPR for buyin + self.daily_cdd = 0 # Coin Days Destroyed + self.daily_sacdd = 0 # Supply-Adjusted CDD + self.daily_eacdd = 0 # Coin Days Destroyed(Entity-Adjusted) + self.daily_cdd_days1 = 0 # cdd < 1days + self.daily_cdd_days7 = 0 # + self.daily_cdd_days30 = 0 # + self.daily_cdd_days60 = 0 # + self.daily_cdd_days90 = 0 # + self.daily_cdd_days180 = 0 # + self.daily_cdd_days365 = 0 # + self.daily_cdd_days730 = 0 # + + self.daily_mintusd = 0 # daily coin issuance (in USD), for Puell Multiple + + self.daily_mvrv = 0 # market-value-to-realized-value ratio + self.daily_lth_mvrv = 0 # Long Term Holder MVRV + self.daily_sth_mvrv = 0 # Short Term Holder MVRV + + self.daily_nupl = 0 # Net Unrealized Profit/Loss + + self.daily_height_begin = 0 + self.daily_height_end = 0 + + self.daily_price = 0 + self.redis.reset_active_address() + self.redis.reset_send_address() + self.redis.reset_receive_address() + + #v2 + self.daily_mint = 0 + self.daily_lth_volume = 0 + self.daily_frm = 0 + self.daily_cvdd = 0 + self.daily_nvt_ratio = 0 + self.daily_balanced_price = 0 + self.daily_realized_price = 0 + self.daily_transferred_price = 0 + self.daily_velocity = 0 + self.daily_mempool_volume = 0 + # “stat_daily_reset”方法将各种每日统计信息和值重置为其初始状态。下面是该方法的每个部分的作用: + # - ** 属性重置 **: + # - 它将与利润、费用、交易、地址、数量、已用产出寿命、SOPR(已用产出利润率)、CDD(硬币销毁天数)、Mint(硬币发行)、MVRV(市场价值与已实现价值比率)、NUPL(净未实现利润 / 损失)、高度开始、高度结束和价格相关的属性重置为零或初始值。 + # - ** 子程序调用 **: + # - 它重置Redis数据库中的活动地址、发送地址和接收地址。 + # - ** v2统计信息重置 **: + # - 它重置了与铸币厂(硬币发行量)、LTH(长期持有者)交易量、FRM(费用比率倍数)、CVDD(累计销毁价值天数)、NVT(网络价值与交易比率)比率、平衡价格、实现价格、转移价格、速度和内存池数量等指标相关的其他v2统计数据。 + # 总体而言,“stat_daily_reset”可确保在每天分析开始时将各种每日统计数据和值重置为初始状态,从而为新一天的数据收集和处理提供干净的石板。 + def stat_cdd(self, prev_value, days): + cdd = prev_value * days + self.daily_cdd += cdd + self.daily_sumcdd += cdd + if days <= 1: + self.daily_cdd_days1 += cdd + elif days <= 7: + self.daily_cdd_days7 += cdd + elif days <= 30: + self.daily_cdd_days30 += cdd + elif days <= 60: + self.daily_cdd_days60 += cdd + elif days <= 90: + self.daily_cdd_days90 += cdd + elif days <= 180: + self.daily_cdd_days180 += cdd + elif days <= 365: + self.daily_cdd_days365 += cdd + else: + self.daily_cdd_days730 += cdd + # “stat_cdd”方法根据先前的值和天数计算和更新硬币销毁天数 (CDD) 统计数据。 + # - ** 输入参数 **: + # - 'prev_value':CDD的上一个值。 + # - 'days':计算CDD的天数。 + # - ** CDD计算 **: + # - 它通过将前一个值乘以天数来计算给定天数的CDD。 + # - ** 每日CDD更新 **: + # - 它将计算出的CDD添加到“daily_cdd”属性中,该属性表示当天的总CDD。 + # - 它还将计算出的CDD添加到“daily_sumcdd”属性中,该属性表示累积CDD。 + # - ** 按时间段更新CDD **: + # - 它根据天数对CDD进行分类,并根据时间段将其添加到相应的属性(“daily_cdd_days1”、“daily_cdd_days7”等)中。 + # 这种方法有效地跟踪和更新不同时间段的各种CDD统计数据,根据硬币的年龄提供对硬币走势的洞察。 + def get_price(self, height, dayutc): + price = 0 + dayutcstr = str(dayutc) + cnt = 0 + while cnt < 3: + cnt += 1 + if dayutcstr in self.history_prices: + price = self.history_prices[dayutcstr] + break + elif dayutcstr == str(self.current_utc): + price = self.get_current_price() + self.current_price = price + self.history_prices[dayutcstr] = self.current_price + break + else: + print("failed get price", height, dayutcstr) + self.get_history_price() + self.history_prices = self.get_history_price2() + self.current_price = self.get_current_price() + self.current_utc = self.get_current_utc() + self.history_prices[str(self.current_utc)] = self.current_price + price = self.history_prices[dayutcstr] + break + + return price + # “get_price”方法检索给定高度和UTC日的价格。 + # - ** 参数 **: + # - 'height':块高度。 + # - 'dayutc':需要价格的UTC日期。 + # - ** 价格检索 **: + # - 它首先将“dayutc”转换为字符串。 + # - 它尝试在'history_prices'字典属性中找到与'dayutc'对应的价格。 + # - 如果找到价格,则返回价格。 + # - 如果'dayutc'是当前UTC 日('self.current_utc'),它会使用 + # 'get_current_price()'方法检索当前价格,并相应地更新'current_price'和'history_prices'属性。 + # - 如果在历史记录中找不到价格,并且它不是当天,它会尝试通过调用“get_history_price()”和“get_history_price2()”方法来检索历史价格。然后,它检索当前价格并像以前一样更新属性。 + # - 如果价格检索在三次尝试后失败,它会打印一条消息,指示失败。 + # - ** 返回 **: + # - 它返回检索到的价格。如果未找到价格,则返回0。 + # 此方法可确保从历史数据或当前价格中检索价格,从而处理历史数据可能丢失或过时的情况。 + def save_db(self, dayutc): + if dayutc != self.daily_date: + print("cmp", dayutc, self.daily_date) + start = time.time() + self.daily_sumcsupply += (self.daily_csupply) + daily_profit_rate = self.daily_profit / self.daily_txs if self.daily_txs != 0 else 0 + daily_sopr = self.daily_sopr_sell / self.daily_sopr_buy if self.daily_sopr_buy != 0 else 0 + daily_sasopr = self.daily_asopr_sell / self.daily_asopr_buy if self.daily_asopr_buy != 0 else 0 + daily_easopr = self.daily_easopr_sell / self.daily_easopr_buy if self.daily_easopr_buy != 0 else 0 + daily_lthsopr = self.daily_lthsopr_sell / self.daily_lthsopr_buy if self.daily_lthsopr_buy != 0 else 0 + daily_sthsopr = self.daily_sthsopr_sell / self.daily_sthsopr_buy if self.daily_sthsopr_buy != 0 else 0 + self.daily_asol = self.daily_asol / self.daily_atxs if self.daily_atxs != 0 else 0 + self.daily_eaasol = self.daily_eaasol / self.daily_atxs if self.daily_atxs != 0 else 0 + self.daily_sacdd = self.daily_cdd / self.daily_csupply if self.daily_csupply != 0 else 0 + self.daily_mvrv = self.daily_marketcap / self.daily_rcap if self.daily_rcap != 0 else 0 + liveliness = self.daily_sumcdd / self.daily_sumcsupply if self.daily_sumcsupply != 0 else 0 + ealiveliness = self.daily_sumeacdd / self.daily_sumcsupply if self.daily_sumcsupply != 0 else 0 + rplrate = self.daily_rprofit - self.daily_rloss + dormancy = self.daily_cdd / self.daily_volume if self.daily_volume != 0 else 0 + adormancy = dormancy / self.daily_csupply if self.daily_csupply != 0 else 0 + self.daily_eavolume -= (self.daily_fees) + eadormancy = self.daily_eacdd / self.daily_eavolume if self.daily_eavolume != 0 else 0 + nupl = (self.daily_marketcap - self.daily_rcap) / self.daily_marketcap if self.daily_marketcap != 0 else 0 + + + self.daily_total_address = redisif.get_addr_cnt() # number of address + + self.daily_height_end = self.height - 1 if self.height > self.daily_height_begin else self.daily_height_begin + dbif.update_to_dailyinds(self.daily_date, self.daily_height_begin, self.daily_height_end, daily_profit_rate, + self.daily_fees, self.daily_txs, self.daily_new_address, self.daily_total_address, + self.daily_new_address_volume, self.daily_active_address, + self.daily_send_address, self.daily_receive_address, self.daily_volume, + self.daily_eavolume, daily_sopr, daily_sasopr, daily_easopr, daily_lthsopr, + daily_sthsopr, + self.daily_asol, self.daily_eaasol, dormancy, adormancy, eadormancy, + self.daily_cdd, self.daily_sacdd, self.daily_eacdd, self.daily_cdd_days1, + self.daily_cdd_days7, self.daily_cdd_days30, self.daily_cdd_days60, + self.daily_cdd_days90, self.daily_cdd_days180, self.daily_cdd_days365, + self.daily_cdd_days730, self.daily_csupply, self.daily_mintusd, + self.daily_sumcsupply, self.daily_sumcdd, self.daily_sumeacdd, + liveliness, ealiveliness, self.daily_rprofit, self.daily_rloss, rplrate, + self.daily_price, self.daily_marketcap, self.daily_rcap, self.daily_earcap, + self.daily_mvrv, nupl,self.daily_cdd*self.daily_price) + + #v2 + #self.daily_sumdays = (dayutc - 1231469665)/3600/24 + self.daily_sumdays = self.daily_sumcdd/self.daily_csupply + if self.daily_csupply > 0: + self.daily_realized_price = self.daily_rcap/self.daily_csupply + if self.daily_sumdays > 0: + self.daily_transferred_price = self.daily_sumvdd/(self.daily_sumdays*self.daily_csupply) + self.daily_balanced_price = self.daily_realized_price - self.daily_transferred_price + if self.daily_fees > 0: + self.daily_frm = (self.daily_fees + self.daily_mint)/self.daily_fees + if self.daily_sumdays > 0: + self.daily_cvdd = self.daily_sumvdd/(self.daily_sumdays*6000000) + + #daily_vp = self.daily_volume*self.daily_price + #if daily_vp > 0: + if self.daily_volume > 0 and self.daily_price > 0: + self.daily_nvt_ratio = self.daily_marketcap/self.daily_volume/self.daily_price + + if self.daily_marketcap > 0: + self.daily_velocity = self.daily_volume*self.daily_price/self.daily_marketcap + + dbif.update_to_dailyindsv2(dayutc, self.daily_height_begin, self.daily_height_end,self.daily_lth_volume, self.daily_frm, self.daily_cvdd, self.daily_realized_price, self.daily_transferred_price, self.daily_balanced_price, self.daily_nvt_ratio, self.daily_velocity) + + self.stat_daily_reset() + self.daily_date = dayutc + self.daily_height_begin = self.height + print("save_db", f'coast:{time.time() - start:.4f}s') + # “save_db”方法似乎在将数据保存到数据库之前执行多次计算和更新。让我们分解一下它的功能: + # - ** 参数 **: + # - 'dayutc':保存数据的UTC日期。 + # - ** 功能性 **: + # - 如果“dayutc”不等于当前“daily_date”,则继续进行数据处理。 + # - 根据当天收集的数据计算各种指标和比率。 + # - 这些指标包括: + # - 每日利润率、支出产出利润率 (SOPR)、硬币销毁天数 (CDD)、已实现资本化、市值与已实现价值比率 (MVRV)、净未实现损益 (NUPL) 等。 + # - 然后使用“dbif.update_to_dailyinds()”和“dbif.update_to_dailyindsv2()”方法将计算出的指标存储在数据库中。 + # - 存储数据后,该方法使用“stat_daily_reset()”方法重置每日统计信息。 + # - 最后,它会更新第二天数据收集的“daily_date”和“daily_height_begin”属性。 + # - ** 打印声明 **: + # - 包含一个打印语句,以指示保存数据所需的持续时间。 + # 这种方法本质上是处理每天指标和统计数据的数据处理和存储,确保数据库使用最新信息进行更新。 + def stat_block(self, dbif, redisif, config): + self.redis = redisif + self.stat_load(redisif, config) + if self.daily_date is None: + self.stat_reset() + return + print("start height", self.height) + # return + + self.height += 1 + + print("start") + + while True: + start = time.time() + blockstats = self.rpc_cmd("getblockstats") + print("getblockstats", f'coast:{time.time()-start:.4f}s') + start = time.time() + #mempoolinfo = self.rpc_cmd("getmempoolinfo") + blockdetail = self.rpc_cmd("getblock") + print("getblock", f'coast:{time.time() - start:.4f}s') + block_start = time.time() + self.blocktime = blockdetail.get_time() + block_time2 = time.gmtime(self.blocktime) + daystr = time.strftime("%d %b %Y", block_time2) + dayutc = int(time.mktime(time.strptime(daystr, "%d %b %Y"))) + dayutcstr = str(dayutc) + + if self.daily_date == 0: + self.daily_date = dayutc + + #print("mempoolinfo", mempoolinfo, mempoolinfo["size"], float(mempoolinfo["total_fee"])) + #time.sleep(10) + #dbif.update_to_realtimeindsv2(self.blocktime, int(mempoolinfo["size"]), float(mempoolinfo["total_fee"])) + #break + + self.save_db(dayutc) + + blocktxs = blockdetail.get_transactions() + self.height = blockdetail.get_height() + redisif.set_block_time(self.height, self.blocktime) # table for block height and time for later query + + mint = blockstats["subsidy"] / 100000000 + self.daily_csupply += (mint) + #self.daily_sumcsupply += (self.daily_csupply) + self.daily_mint += (mint) + + block_fees = (blockstats["totalfee"] / 100000000) + self.daily_fees += block_fees + self.daily_volume += (blockstats["total_out"] / 100000000) + + self.daily_txs += (blockstats["txs"] - 1) # exclude coinbase tx + + block_price = self.get_price(self.height, dayutc) + self.daily_mintusd += (block_price * (mint+block_fees)) + + self.daily_price = block_price + + self.daily_marketcap = (self.daily_csupply * block_price) + # genisis_time = redisif.get_block_time(1) + '''genisis_time = 1231006505 + days = (self.blocktime - genisis_time) / 3600 / 24 + if days >= 155: + self.daily_lth_marketcap += (self.daily_csupply * block_price) + else: + self.daily_sth_marketcap += (self.daily_csupply * block_price) + ''' + for tx in blocktxs: + txid = tx.get_txid() + vins = tx.get_vins() + vouts = tx.get_vouts() + vin_hexs = [] + vin_addrs = [] + vin_values = [] + vin_dts = [] + + vin_volume = 0 + vin_volume_change = 0 + + vin_days_change = 0 + + vin_cdd = 0 + vin_cdd_change = 0 + + vin_rcap_change = 0 + + vin_sopr = 0 + + vin_asopr_diff = 0 + + vout_change_value = 0 + + if not tx.is_coinbase(): + for vin in vins: + # print(self.height, "vin", vin, type(vin)) + if vin.is_prevout(): + prevout = vin["prevout"] + prev_height = prevout["height"] + prev_value = float(prevout["value"]) + prev_scriptpubkey = prevout["scriptPubKey"] + #prev_type = prev_scriptpubkey["type"] + prev_hex = prev_scriptpubkey["hex"] + prev_address = self.get_vin_address(prev_scriptpubkey, prev_height, txid) + prev_blocktime = redisif.get_block_time(prev_height) + + redisif.save_addr(prev_address, -prev_value) + if not redisif.is_send_address(prev_address): + self.daily_send_address += 1 + if not redisif.is_active_address(prev_address): + self.daily_active_address += 1 + + days = (self.blocktime - prev_blocktime) / 3600 / 24 + vin_cdd += (prev_value * days) + self.stat_cdd(prev_value, days) + + if days >= 155: + self.daily_lth_volume += prev_value + + vin_addrs.append(prev_address) + vin_values.append(prev_value) + vin_dts.append(prev_blocktime) + vin_hexs.append(prev_hex) + + vin_volume += prev_value + vin_asopr_diff += ((self.blocktime - prev_blocktime) * prev_value) + + prevutc = self.get_day_utc(prev_blocktime) + prev_price = self.get_price(prev_height, prevutc) + vin_sopr += (prev_price * prev_value) + + self.daily_sumvdd += (prev_value * days * prev_price) + + self.daily_rcap -= (prev_price * prev_value) + + have_change = False + for vout in vouts: + scriptpubkey = vout.get_script_pubkey() + # vout address is same with vin address + if scriptpubkey["hex"] == prev_scriptpubkey["hex"]: + vin_rcap_change += (prev_value * prev_price) + vin_volume_change += prev_value + vout_change_value = float(vout.get_value()) + + days = (self.blocktime - prev_blocktime) / 3600 / 24 + vin_days_change += days + vin_cdd_change += (prev_value * days) + + have_change = True + break + if not have_change: + self.daily_earcap -= (prev_price * prev_value) + self.daily_eacdd += (prev_value * days) + + self.daily_eavolume += (vin_volume - vout_change_value) + + vin_sopr_change = vin_sopr + #vin_change_price = 0 + if vin_rcap_change != 0: + if vin_volume_change != 0: + vin_change_price = vin_rcap_change / vin_volume_change + self.daily_earcap -= (vin_rcap_change - (vin_change_price * vout_change_value)) + vin_sopr_change -= (vin_change_price * vout_change_value) + + if vin_cdd_change != 0: + if vin_volume_change != 0: + vin_change_days = vin_cdd_change / vin_volume_change + vin_cdd_change -= (vin_change_days * vout_change_value) + + self.daily_sumeacdd += (vin_cdd - vin_cdd_change) + + self.daily_sopr_buy += vin_sopr + + self.daily_easopr_buy += vin_sopr_change + + if vin_asopr_diff >= 3600 * vin_volume: + self.daily_asopr_buy += vin_sopr + if vin_volume > 0: + self.daily_asol += (vin_cdd/vin_volume) + self.daily_eaasol += (vin_cdd / vin_volume) + if vin_volume_change > 0: + self.daily_eaasol -= (vin_cdd_change/vin_volume_change) + self.daily_atxs += 1 + + if vin_asopr_diff >= 3600 * 155 * 24 * vin_volume: + self.daily_lthsopr_buy += vin_sopr + else: + self.daily_sthsopr_buy += vin_sopr + + vout_price = block_price + vout_volume = 0 + vout_volume_change = 0 + vout_sopr = 0 + vout_sopr_change = 0 + + for vout in vouts: + vout_value = float(vout.get_value()) + vout_volume += vout_value + scriptpubkey = vout.get_script_pubkey() + vout_type = scriptpubkey["type"] + vout_address = self.get_vout_address(scriptpubkey, self.height, txid) + vout_hex = scriptpubkey["hex"] + + if not redisif.is_in_addr(vout_address): + self.daily_new_address_volume += vout_value + self.daily_new_address += 1 + redisif.save_addr(vout_address, vout_value) + + if not redisif.is_receive_address(vout_address): + self.daily_receive_address += 1 + if not redisif.is_active_address(vout_address): + self.daily_active_address += 1 + + self.daily_rcap += (vout_price * vout_value) + + vout_sopr += (vout_price * vout_value) + + have_change = False + for cmp in vin_hexs: + if cmp == vout_hex: + vout_volume_change += vout_value + have_change = True + break + if not have_change: + self.daily_earcap += (vout_price * vout_value) + vout_sopr_change += (vout_price * vout_value) + + if self.height > 787556: + if (vout_price * vout_value) >= self.rules["flag_big_vout"]: + if vin_volume != 0: + days = vin_cdd / vin_volume + buyin = vin_sopr / vin_volume + sellout = vout_price + profit = 0 + if buyin != 0: + profit = (sellout - buyin) / buyin + dbif.update_to_bigamountvout(self.blocktime, txid, \ + vout_address, vout.get_n(), vout_type, \ + vout_value, self.height, days, buyin, sellout, profit) + + self.daily_easopr_sell += vout_sopr_change + + self.daily_sopr_sell += vout_sopr + if vin_asopr_diff > 3600 * vin_volume: + self.daily_asopr_sell += vout_sopr + + if vin_asopr_diff >= 3600 * 155 * 24 * vin_volume: + self.daily_lthsopr_sell += vout_sopr + else: + self.daily_sthsopr_sell += vout_sopr + + if vin_volume != 0: + if block_price > (vin_sopr / vin_volume): + self.daily_rprofit += (vout_sopr - vin_sopr) + if block_price < (vin_sopr / vin_volume): + self.daily_rloss += (vin_sopr - vout_sopr) + + buyin = vin_sopr / vin_volume + sellout = vout_sopr / vout_volume if vout_volume != 0 else 0 + if sellout > buyin: + self.daily_profit += 1 + else: + for vout in vouts: + vout_value = float(vout.get_value()) + scriptpubkey = vout.get_script_pubkey() + vout_address = self.get_vout_address(scriptpubkey, self.height, txid) + + vout_price = block_price + self.daily_rcap += (vout_price * vout_value) + self.daily_earcap += (vout_price * vout_value) + + if not redisif.is_in_addr(vout_address): + self.daily_new_address_volume += vout_value + self.daily_new_address += 1 + redisif.save_addr(vout_address, vout_value) + + if not redisif.is_receive_address(vout_address): + self.daily_receive_address += 1 + if not redisif.is_active_address(vout_address): + self.daily_active_address += 1 + + self.stat_save(redisif) + print("statblock", f'coast:{time.time() - block_start:.4f}s') + start = time.time() + self.rpc_cmd("getblockcount") + print("getblockcount", f'coast:{time.time() - start:.4f}s') + # 这个“stat_block”方法似乎可以处理每个块的统计计算和数据库更新。让我们分解一下它的功能: + # - ** 参数 **: + # - 'dbif':数据库接口对象。 + # - 'redisif':Redis接口对象。 + # - 'config':配置对象。 + # - ** 功能性 **: + # - 它使用“stat_load()”方法从Redis加载统计信息。 + # - 如果'daily_date'为None,则使用'stat_reset()'重置统计信息并返回。 + # - 它使用RPC命令('getblockstats'、'getblock')检索区块统计信息和详细信息。 + # - 它计算各种指标并针对当前区块更新它们: + # - 每日供应量('daily_csupply')、费用('daily_fees')、交易量('daily_volume')、交易数量('daily_txs')等。 + # - 它检索区块时间并计算UTC日。 + # - 它使用“save_db()”方法保存每日数据。 + # - 它处理区块中的每笔交易: + # - 更新地址、数量、支出产出利润率 (SOPR)、硬币销毁天数 (CDD) 等。 + # - 它使用“stat_save()”方法将统计数据保存回Redis。 + # - ** 打印报表 **: + # - 包含print语句以指示各种操作('getblockstats'、'getblock'、'getblockcount')所花费的持续时间。 + # 总体而言,此方法处理每个块的统计信息的收集和处理,确保数据库使用最新信息进行更新。 +def init_config(filename): + fconfig = open(filename) + config = ujson.load(fconfig) + fconfig.close() + dbif = db_if_qt.DbIf(host="172.17.0.1", port=4419, user="root", password="IeQcJNnagkaFP1Or", dbname="btcdb") + redisif = redis_if_qt.RedisIf(host="127.0.0.1", port=6379, password="", db=0) + return dbif, redisif, config +# “init_config”函数通过从 JSON 文件加载配置来初始化配置,然后根据 JSON 中提供的配置创建数据库和 Redis 接口的实例。下面是一个细分: +# -**参数**: +# - 'filename':JSON配置文件的名称。 +# -**功能性**: +# 1. **打开JSON配置文件**: +# - 在读取模式下打开指定的 JSON 配置文件(“filename”)。 +# - 使用“ujson.load()”函数从文件加载 JSON 数据。 +# - 关闭文件。 +# 2. **创建数据库和Redis接口**: +# - 使用 'db_if_qt.DbIf“,其中包含从加载的配置中获取的主机、端口、用户、密码和数据库名称等参数。 +# - 使用“redis_if_qt”初始化 Redis 接口 ('redisif')。RedisIf“,其中包含从加载的配置中获取的主机、端口、密码和数据库等参数。 +# 3. **返回**: +# - 返回初始化的数据库接口('dbif')、Redis 接口('redisif')和加载的配置。 +# - **返回值**: +# - 'dbif':初始化的数据库接口对象。 +# - 'redisif':初始化的 Redis 接口对象。 +# - 'config':将配置加载为 Python 字典。 +# 该函数封装了加载配置设置以及初始化数据库和 Redis 接口的过程,为应用程序提供了一种方便的环境设置方式。 +if __name__ == '__main__': + dbif, redisif, config = init_config("btcstat_qt.conf") + #print("init_config") + #redisif.reset_btc_data() + statif = StatIf() + #print("StatIf") + statif.stat_block(dbif, redisif, config) +# 此代码块是脚本的入口点。让我一步一步地解释一下: +# - 'if __name__ == '__main__':':此行确保仅当脚本直接运行时才执行以下代码块,而不是将其作为模块导入到另一个脚本中。 +# - 'dbif, redisif, config = init_config(“btcstat_qt.conf”)':调用 'init_config' 函数初始化数据库接口 ('dbif')、Redis 接口 ('redisif'),并将配置设置加载到 'config' 变量中。这些对象是进一步操作所必需的。 +# - 'statif = StatIf()':它创建“StatIf”类的实例。这表明在代码的其他地方定义了一个名为“StatIf”的类。 +# - 'statif.stat_block(dbif, redisif, config)':调用 'StatIf' 实例的 'stat_block' 方法,传递数据库接口 ('dbif')、Redis 接口 ('redisif') 和加载的配置 ('config')。此方法可能使用提供的接口和配置执行一些与区块链数据相关的统计操作。 +# 总体而言,此脚本初始化必要的组件,例如数据库和 Redis 接口,加载配置设置,然后使用“StatIf”类执行统计操作。 diff --git a/lyq/btc_update.py b/lyq/btc_update.py new file mode 100644 index 0000000..ce4e227 --- /dev/null +++ b/lyq/btc_update.py @@ -0,0 +1,125 @@ +import pymysql +import json +import os +import time +from datetime import datetime, timedelta + +# 数据库配置 +DB_CONFIG = { + "host": "192.168.194.240", + "user": "root", + "password": "2GS@bPYcgiMyL14A", + "database": "btcdb", + "port": 4423, + "connect_timeout": 60, + "read_timeout": 60, + "write_timeout": 60, + "charset": "utf8mb4" +} + +# 数据文件路径 +DATA_FILE = "btc_historical_price.py" + +# 定时任务间隔(秒)—— 例如 3600 为每小时更新一次 +INTERVAL = 28800 + + +def get_new_prices(source, last_timestamp=None): + """ + 从数据库获取 source 数据源的最新价格 + 仅每天北京时间 08:00:00 的数据减 8 小时存入文件 + """ + conn = pymysql.connect(**DB_CONFIG) + prices = {} + try: + with conn.cursor() as cursor: + if last_timestamp: + sql = """ + SELECT timestamp, price + FROM btc_prices + WHERE source = %s AND timestamp > %s + ORDER BY timestamp + """ + cursor.execute(sql, (source, last_timestamp)) + else: + sql = """ + SELECT timestamp, price + FROM btc_prices + WHERE source = %s + ORDER BY timestamp + """ + cursor.execute(sql, (source,)) + rows = cursor.fetchall() + for timestamp, price in rows: + ts_int = int(timestamp) + # 转换为北京时间 + dt_beijing = datetime.utcfromtimestamp(ts_int) + timedelta(hours=8) + # 如果是每天 08:00:00 北京时间,则减 8 小时 + if dt_beijing.hour == 8 and dt_beijing.minute == 0 and dt_beijing.second == 0: + ts_int -= 8 * 3600 + prices[str(ts_int)] = float(price) + finally: + conn.close() + return prices + + +def load_existing_data(): + """加载历史价格数据""" + if not os.path.exists(DATA_FILE): + return {}, {} + + try: + with open(DATA_FILE, "r", encoding="utf-8") as f: + ns = {} + exec(f.read(), ns) + return ns.get("prices_temp", {}), ns.get("prices", {}) + except Exception: + return {}, {} + + +def save_prices(prices_temp, prices): + """保存价格数据到文件""" + with open(DATA_FILE, "w", encoding="utf-8") as f: + f.write("# 自动生成的BTC历史价格数据文件\n") + f.write(f"# 更新时间: {datetime.now()}\n\n") + f.write("prices_temp = ") + f.write(json.dumps(prices_temp, indent=4, ensure_ascii=False)) + f.write("\n\nprices = ") + f.write(json.dumps(prices, indent=4, ensure_ascii=False)) + f.write("\n") + + +def get_last_timestamp(price_dict): + """获取当前字典中最大的时间戳""" + if not price_dict: + return None + return max(int(ts) for ts in price_dict.keys()) + + +def update_once(): + """执行一次更新流程""" + prices_temp, prices = load_existing_data() + last_nasdaq_ts = get_last_timestamp(prices_temp) + last_crypto_ts = get_last_timestamp(prices) + + nasdaq_new = get_new_prices("Nasdaq", last_nasdaq_ts) + crypto_new = get_new_prices("CryptoCompare", last_crypto_ts) + + prices_temp.update(nasdaq_new) + prices.update(crypto_new) + + save_prices(prices_temp, prices) + + +def main(): + """主循环任务""" + while True: + try: + update_once() + except Exception: + pass + time.sleep(INTERVAL) + + +if __name__ == "__main__": + main() diff --git a/lyq/btc_utxos_lyq2.py b/lyq/btc_utxos_lyq2.py new file mode 100644 index 0000000..ad94a59 --- /dev/null +++ b/lyq/btc_utxos_lyq2.py @@ -0,0 +1,1838 @@ +# coding=utf-8 +import json +import os +import sys +import ujson +import time +import requests +from loguru import logger +from datetime import datetime, timedelta +from easybitcoinrpc import RPC +import csv +import sqlite3 +from bitcoinutils.script import Script +from bitcoinutils.keys import P2wpkhAddress, P2wshAddress, P2shAddress, PrivateKey, PublicKey, SegwitAddress, P2pkhAddress +from bitcoinutils.setup import setup +import pymysql +import pymongo +from urllib import parse + +import btc_historical_price + + +class UtxosIf: + def __init__(self): + self.balance_0 = 0 + self.balance_001 = 0 + self.balance_01 = 0 + self.balance_1 = 0 + self.balance_10 = 0 + self.balance_100 = 0 + self.balance_1000 = 0 + self.balance_10000 = 0 + + self.balance_amount_0 = 0 + self.balance_amount_001 = 0 + self.balance_amount_01 = 0 + self.balance_amount_1 = 0 + self.balance_amount_10 = 0 + self.balance_amount_100 = 0 + self.balance_amount_1000 = 0 + self.balance_amount_10000 = 0 + + self.profit_addresses = 0 + self.loss_addresses = 0 + self.profit_ratio = 0 + self.lth_supply = 0 + self.sth_supply = 0 + self.realized_price = 0 + self.relative_lth_sth = 0 + self.lth_profit_supply = 0 + self.lth_loss_supply = 0 + self.lth_profit_ratio = 0 + self.sth_profit_supply = 0 + self.sth_loss_supply = 0 + self.sth_profit_ratio = 0 + self.slrv_ratio = 0 + self.slrv_24h = 0 + self.slrv_6m1y = 0 + + self.total_address = 0 + self.miner_address = 0 + self.miner_balance = 0 + self.total_balance = 0 + self.total_rcap = 0 + self.holder_0 = 0 + self.holder_1 = 0 + self.holder_2 = 0 + self.holder_3 = 0 + self.holder_4 = 0 + self.holder_5 = 0 + self.holder_6 = 0 + self.holder_7 = 0 + self.holder_15 = 0 + self.holder_30 = 0 + self.holder_60 = 0 + self.holder_90 = 0 + self.holder_180 = 0 + self.holder_360 = 0 + self.holder_540 = 0 + self.holder_720 = 0 + self.holder_1080 = 0 + self.holder_1440 = 0 + self.holder_1800 = 0 + self.holder_2160 = 0 + self.holder_2520 = 0 + self.holder_2880 = 0 + self.holder_3240 = 0 + self.holder_3600 = 0 + self.holder_3960 = 0 + + self.holder_balance_0 = 0 + self.holder_balance_1 = 0 + self.holder_balance_2 = 0 + self.holder_balance_3 = 0 + self.holder_balance_4 = 0 + self.holder_balance_5 = 0 + self.holder_balance_6 = 0 + self.holder_balance_7 = 0 + self.holder_balance_15 = 0 + self.holder_balance_30 = 0 + self.holder_balance_60 = 0 + self.holder_balance_90 = 0 + self.holder_balance_180 = 0 + self.holder_balance_360 = 0 + self.holder_balance_540 = 0 + self.holder_balance_720 = 0 + self.holder_balance_1080 = 0 + self.holder_balance_1440 = 0 + self.holder_balance_1800 = 0 + self.holder_balance_2160 = 0 + self.holder_balance_2520 = 0 + self.holder_balance_2880 = 0 + self.holder_balance_3240 = 0 + self.holder_balance_3600 = 0 + self.holder_balance_3960 = 0 + + self.price_buy = {} # step 500 + self.price_buy_amount = {} # step 500 + self.diff_sell = {} # step 500 + self.diff_sell_amount = {} + self.profit_sell = {} # step 5000 + self.profit_sell_amount = {} + + self.balance_0_days = {} + self.balance_001_days = {} + self.balance_01_days = {} + self.balance_1_days = {} + self.balance_10_days = {} + self.balance_100_days = {} + self.balance_1000_days = {} + self.balance_10000_days = {} + + self.balance_amount_0_days = {} + self.balance_amount_001_days = {} + self.balance_amount_01_days = {} + self.balance_amount_1_days = {} + self.balance_amount_10_days = {} + self.balance_amount_100_days = {} + self.balance_amount_1000_days = {} + self.balance_amount_10000_days = {} + + ''' + self.current_dt = time.time() + self.current_dt2 = time.gmtime(int(self.current_dt)) + self.current_daystr = time.strftime("%d %b %Y", self.current_dt2) + self.current_dayutc = int(time.mktime(time.strptime(self.current_daystr, "%d %b %Y"))) + ''' + + self.mc = pymongo.MongoClient("mongodb://10.168.3.192:27018/") + self.mdb = self.mc["btcutxos2"] + # self.mdb.authenticate("root", "123456") + + self.uprofit = 0 + self.uloss = 0 + self.lth_nupl = 0 + self.sth_nupl = 0 + self.lth_mv = 0 + self.sth_mv = 0 + self.lth_rcap = 0 + self.sth_rcap = 0 + self.lth_mvrv = 0 + self.sth_mvrv = 0 +# 此“UtxosIf”类使用默认值初始化大量属性。以下是属性的摘要: +# +# - 与平衡相关的属性: +# - 'balance_0', 'balance_001', ..., 'balance_10000':这些是不同面额比特币的余额计数器。 +# - 'balance_amount_0', 'balance_amount_001', ..., 'balance_amount_10000':这些是与余额计数器相对应的余额金额。 +# +# - 与地址和利润相关的属性: +# - 'profit_addresses':利润计数器地址。 +# - 'loss_addresses': 丢失地址的计数器。 +# - 与利润率、供应、已实现价格等相关的各种其他属性 +# +# +# - 与持有人及其余额相关的属性: +# - 不同时间间隔的持有者计数器('holder_0'、'holder_1'、...、'holder_3960')。 +# - 每个持有人的相应余额属性。 +# +# - 与价格和交易相关的属性: +# - 用于存储价格和交易金额的字典('price_buy'、'price_buy_amount'、'diff_sell'、'diff_sell_amount'、'profit_sell'、'profit_sell_amount')。 +# +# - 与余额随时间变化相关的属性: +# - 用于存储不同时间间隔的余额变化的字典('balance_0_days'、'balance_001_days'、...、'balance_amount_10000_days')。 +# +# - 与 +# MongoDB +# 连接和其他指标相关的属性: +# - 连接到MongoDB数据库('mc','mdb')。 +# - 与利润、亏损、已实现资本化等相关的指标 +# +# 此类似乎用于管理与比特币UTXO(未花费的交易输出)相关的各种指标和数据 + def init_db(self): + return pymysql.connect(host="172.17.0.1", port=4419, user="root", password="IeQcJNnagkaFP1Or", database="btcdb", + cursorclass=pymysql.cursors.DictCursor) +# 'init_dbpymysql库来建立连接。 + # + # 以下是该方法的作用的细分 + # + # 它使用提供的主机、端口、用户名、密码和数据库名称建立与 + # MySQL + # 数据库的连接。 + def get_vout_addr(self, rpc, txid, vout): + addr = None + addrtype = None + ip = "127.0.0.1" + port = "8332" + user = "user" + password = "password" + timeout=100 + if rpc is None: + rpc = RPC(ip, port, user, password) + tx = None + while True: + try: + tx = rpc.transactions.get_raw_transaction(txid, True) + #break + except: + time.sleep(1) + #print("reconnect") + rpc = RPC(ip, port, user, password) + continue + + txouts = tx["vout"] + txout = None + for outone in txouts: + # print(outone, vout) + if outone["n"] == vout: + txout = outone + break + scriptPubKey = txout["scriptPubKey"] + addrtype = scriptPubKey["type"] + + #print("get_vout_addr", txid, vout, tx, scriptPubKey, addrtype) + if "address" not in scriptPubKey: + addr = scriptPubKey["hex"] + if scriptPubKey["type"] == "pubkey": + temphex = scriptPubKey["hex"] + try: + if temphex[2:4] == "04": + addr = PublicKey(temphex[2:132]).get_address(False).to_string() + else: + addr = PublicKey(temphex[2:68]).get_address().to_string() + print("pubkey", txid, temphex, addr) + except Exception as e: + print("pubkey exception", txid, vout, temphex, addr, e) + else: + print(scriptPubKey) + else: + addr = scriptPubKey["address"] + # print(addr) + + break + + return rpc, addr, addrtype +# “UtxosIf”类中的“get_vout_addr”方法旨在检索与给定事务 (txid) 中的特定事务输出 (vout) 相对应的地址和地址类型。以下是其功能的细分: + # + # -参数: + # - 'rpc':用于与比特币网络交互的 RPC 类的实例。 + # - 'txid':交易 ID。 + # - 'vout':事务输出的索引。 + # + # -初始化: + # - 它初始化用于 RPC 连接的 IP 地址、端口、用户名和密码的变量。 + # + # - RPC连接: + # - 如果 'rpc' 参数为 'None',则使用默认 IP、端口、用户名和密码初始化 RPC 连接。 + # + # - 交易检索: + # - 它尝试使用 'get_raw_transaction' 方法从 RPC 对象中检索原始事务详细信息。 + # - 如果在此过程中出现异常(可能是由于连接问题),它会等待 1 秒钟并重试。 + # + # - 提取地址: + # - 一旦获得交易详细信息,它就会遍历交易的输出('vout')以找到特定的输出。 + # - 它从输出中提取“scriptPubKey”及其类型。 + # - 如果“scriptPubKey”不直接包含地址,它会尝试解析它(例如,如果它是一个公钥脚本)。 + # + # - 返回结果: + # - 最后,它返回 RPC 实例、地址和地址类型。 + # + # 方法如下: + # + # '''蟒蛇 + # def get_vout_addr(self、rpc、txid、vout): + # addr = 无 + # addrtype = 无 + # ip = “127.0.0.1” + # 端口 =“8332” + # user = “用户” + # password = “密码” + # 如果 rpc 为 None: + # rpc = RPC(ip, port, user, password) + # tx = 无 + # 而 True: + # 尝试: + # tx = rpc.transactions.get_raw_transaction(txid, 真) + # 除了: + # 时间睡眠(1) + # rpc = RPC(ip, port, user, password) + # 继续 + # + # txouts = tx[“vout”] + # txout = 无 + # 对于 TXOUTS 中的 Outone: + # if outone[“n”] == vout: + # txout = outone + # 破 + # scriptPubKey = txout[“scriptPubKey”] + # addrtype = scriptPubKey[“类型”] + # + # 如果 “address” 不在 scriptPubKey 中: + # addr = scriptPubKey[“十六进制”] + # if scriptPubKey[“type”] == “pubkey”: + # temphex = scriptPubKey[“十六进制”] + # 尝试: + # 如果 temphex[2:4] == “04”: + # addr = PublicKey(temphex[2:132]).get_address(False).to_string() + # 还: + # addr = PublicKey(temphex[2:68]).get_address().to_string() + # print(“pubkey”, txid, temphex, addr) + # 除了 Exception as e: + # print(“pubkey exception”, txid, vout, temphex, addr, e) + # 还: + # print(scriptPubKey) + # 还: + # addr = scriptPubKey[“地址”] + # + # 破 + # + # 返回 RPC、ADDR、ADdrType + # ''' + # + # 此方法可确保从事务输出中正确提取地址和地址类型,从而处理检索过程中的潜在错误 + def summary_toplist(self, txid, vout, coinbase, value, height, scriptpubkey, dt, price, dt2): + if value >= 100: + rpc = None + rpc, addr, addrtype = self.get_vout_addr(rpc, txid, vout) + if addrtype is None: + addrtype = "unknown" + if addr is None: + addr = "unknown" + toplist = {} + toplist["txid"] = txid + toplist["vout"] = vout + toplist["coinbase"] = coinbase + toplist["value"] = value + toplist["height"] = height + toplist["scriptpubkey"] = scriptpubkey + toplist["dt"] = dt + toplist["price"] = price + toplist["dt2"] = dt2 + toplist["addr"] = addr + toplist["type"] = addrtype + + result = self.mdbc_toplist.find_one(toplist) + if result is None or len(result) > 0: + + self.mdbc_toplist.insert_one(toplist) + #print(self.mdbc_toplist.find_one()) +# 该类中的方法似乎用于汇总和存储有关满足特定条件的事务的信息。以下是该方法的作用:summary_toplistUtxosIf + # + # 它需要与交易相关的几个参数(、、、)。txidvoutcoinbasevalueheightscriptpubkeydtpricedt2 + # 它首先检查事务输出 () 的值是否大于或等于100。value + # 如果该值满足条件,则将变量初始化为None。rpc + # 然后,它调用该方法来检索与事务输出对应的地址和地址类型。get_vout_addr + # 它创建一个名为的字典,其中包含有关交易的各种信息,包括其ID、输出索引、是否是 + # coinbase交易、其值、区块高度、scriptPubKey、日期时间信息、价格、地址和地址类型。toplist + # 它检查MongoDB集合中是否存在具有相同事务ID和输出索引的记录。如果没有,它会将字典插入到集合中。mdbc_toplisttoplist + def summary_balance_days(self, dt, dt2, value, days_out, balance_out): + daysecs = 3600 * 24 + days = (dt - dt2)/daysecs + if days < 1: + if "day_0" in days_out: + days_out["day_0"]+=1 + balance_out["day_0"] += value + else: + days_out["day_0"]=1 + balance_out["day_0"] = value + elif days < 1 * 2: + if "day_1" in days_out: + days_out["day_1"] += 1 + balance_out["day_1"] += value + else: + days_out["day_1"] = 1 + balance_out["day_1"] = value + elif days < 1 * 3: + if "day_2" in days_out: + days_out["day_2"] += 1 + balance_out["day_2"] += value + else: + days_out["day_2"] = 1 + balance_out["day_2"] = value + elif days < 1 * 4: + if "day_3" in days_out: + days_out["day_3"] += 1 + balance_out["day_3"] += value + else: + days_out["day_3"] = 1 + balance_out["day_3"] = value + elif days < 1 * 5: + if "day_4" in days_out: + days_out["day_4"] += 1 + balance_out["day_4"] += value + else: + days_out["day_4"] = 1 + balance_out["day_4"] = value + elif days < 1 * 6: + if "day_5" in days_out: + days_out["day_5"] += 1 + balance_out["day_5"] += value + else: + days_out["day_5"] = 1 + balance_out["day_5"] = value + elif days < 1 * 7: + if "day_6" in days_out: + days_out["day_6"] += 1 + balance_out["day_6"] += value + else: + days_out["day_6"] = 1 + balance_out["day_6"] = value + elif days < 1 * 8: + if "day_7" in days_out: + days_out["day_7"] += 1 + balance_out["day_7"] += value + else: + days_out["day_7"] = 1 + balance_out["day_7"] = value + elif days < 1 * 9: + if "day_8" in days_out: + days_out["day_8"] += 1 + balance_out["day_8"] += value + else: + days_out["day_8"] = 1 + balance_out["day_8"] = value + elif days < 1 * 10: + if "day_9" in days_out: + days_out["day_9"] += 1 + balance_out["day_9"] += value + else: + days_out["day_9"] = 1 + balance_out["day_9"] = value + elif days < 1 * 11: + if "day_10" in days_out: + days_out["day_10"] += 1 + balance_out["day_10"] += value + else: + days_out["day_10"] = 1 + balance_out["day_10"] = value + elif days < 1 * 12: + if "day_11" in days_out: + days_out["day_11"] += 1 + balance_out["day_11"] += value + else: + days_out["day_11"] = 1 + balance_out["day_11"] = value + elif days < 1 * 13: + if "day_12" in days_out: + days_out["day_12"] += 1 + balance_out["day_12"] += value + else: + days_out["day_12"] = 1 + balance_out["day_12"] = value + elif days < 1 * 14: + if "day_13" in days_out: + days_out["day_13"] += 1 + balance_out["day_13"] += value + else: + days_out["day_13"] = 1 + balance_out["day_13"] = value + elif days < 1 * 15: + if "day_14" in days_out: + days_out["day_14"] += 1 + balance_out["day_14"] += value + else: + days_out["day_14"] = 1 + balance_out["day_14"] = value + elif days < 1 * 16: + if "day_15" in days_out: + days_out["day_15"] += 1 + balance_out["day_15"] += value + else: + days_out["day_15"] = 1 + balance_out["day_15"] = value + elif days < 1 * 17: + if "day_16" in days_out: + days_out["day_16"] += 1 + balance_out["day_16"] += value + else: + days_out["day_16"] = 1 + balance_out["day_16"] = value + elif days < 1 * 18: + if "day_17" in days_out: + days_out["day_17"] += 1 + balance_out["day_17"] += value + else: + days_out["day_17"] = 1 + balance_out["day_17"] = value + elif days < 1 * 19: + if "day_18" in days_out: + days_out["day_18"] += 1 + balance_out["day_18"] += value + else: + days_out["day_18"] = 1 + balance_out["day_18"] = value + elif days < 1 * 20: + if "day_19" in days_out: + days_out["day_19"] += 1 + balance_out["day_19"] += value + else: + days_out["day_19"] = 1 + balance_out["day_19"] = value + elif days < 1 * 21: + if "day_20" in days_out: + days_out["day_20"] += 1 + balance_out["day_20"] += value + else: + days_out["day_20"] = 1 + balance_out["day_20"] = value + elif days < 1 * 22: + if "day_21" in days_out: + days_out["day_21"] += 1 + balance_out["day_21"] += value + else: + days_out["day_21"] = 1 + balance_out["day_21"] = value + elif days < 1 * 23: + if "day_22" in days_out: + days_out["day_22"] += 1 + balance_out["day_22"] += value + else: + days_out["day_22"] = 1 + balance_out["day_22"] = value + elif days < 1 * 24: + if "day_23" in days_out: + days_out["day_23"] += 1 + balance_out["day_23"] += value + else: + days_out["day_23"] = 1 + balance_out["day_23"] = value + elif days < 1 * 25: + if "day_24" in days_out: + days_out["day_24"] += 1 + balance_out["day_24"] += value + else: + days_out["day_24"] = 1 + balance_out["day_24"] = value + elif days < 1 * 26: + if "day_25" in days_out: + days_out["day_25"] += 1 + balance_out["day_25"] += value + else: + days_out["day_25"] = 1 + balance_out["day_25"] = value + elif days < 1 * 27: + if "day_26" in days_out: + days_out["day_26"] += 1 + balance_out["day_26"] += value + else: + days_out["day_26"] = 1 + balance_out["day_26"] = value + elif days < 1 * 28: + if "day_27" in days_out: + days_out["day_27"] += 1 + balance_out["day_27"] += value + else: + days_out["day_27"] = 1 + balance_out["day_27"] = value + elif days < 1 * 29: + if "day_28" in days_out: + days_out["day_28"] += 1 + balance_out["day_28"] += value + else: + days_out["day_28"] = 1 + balance_out["day_28"] = value + elif days < 1 * 30: + if "day_29" in days_out: + days_out["day_29"] += 1 + balance_out["day_29"] += value + else: + days_out["day_29"] = 1 + balance_out["day_29"] = value + elif days < 1 * 31: + if "day_30" in days_out: + days_out["day_30"] += 1 + balance_out["day_30"] += value + else: + days_out["day_30"] = 1 + balance_out["day_30"] = value + elif days < 1 * 60: + if "day_60" in days_out: + days_out["day_60"] += 1 + balance_out["day_60"] += value + else: + days_out["day_60"] = 1 + balance_out["day_60"] = value + elif days < 1 * 90: + if "day_90" in days_out: + days_out["day_90"] += 1 + balance_out["day_90"] += value + else: + days_out["day_90"] = 1 + balance_out["day_90"] = value + elif days < 1 * 180: + if "day_180" in days_out: + days_out["day_180"] += 1 + balance_out["day_180"] += value + else: + days_out["day_180"] = 1 + balance_out["day_180"] = value + elif days < 1 * 360: + if "day_360" in days_out: + days_out["day_360"] += 1 + balance_out["day_360"] += value + else: + days_out["day_360"] = 1 + balance_out["day_360"] = value + elif days < 1 * 540: + if "day_540" in days_out: + days_out["day_540"] += 1 + balance_out["day_540"] += value + else: + days_out["day_540"] = 1 + balance_out["day_540"] = value + elif days < 1 * 720: + if "day_720" in days_out: + days_out["day_720"] += 1 + balance_out["day_720"] += value + else: + days_out["day_720"] = 1 + balance_out["day_720"] = value + elif days < 1 * 1080: + if "day_1080" in days_out: + days_out["day_1080"] += 1 + balance_out["day_1080"] += value + else: + days_out["day_1080"] = 1 + balance_out["day_1080"] = value + elif days < 1 * 1440: + if "day_1440" in days_out: + days_out["day_1440"] += 1 + balance_out["day_1440"] += value + else: + days_out["day_1440"] = 1 + balance_out["day_1440"] = value + elif days < 1 * 1880: + if "day_1880" in days_out: + days_out["day_1880"] += 1 + balance_out["day_1880"] += value + else: + days_out["day_1880"] = 1 + balance_out["day_1880"] = value + elif days < 1 * 2160: + if "day_2160" in days_out: + days_out["day_2160"] += 1 + balance_out["day_2160"] += value + else: + days_out["day_2160"] = 1 + balance_out["day_2160"] = value + elif days < 1 * 2520: + if "day_2520" in days_out: + days_out["day_2520"] += 1 + balance_out["day_2520"] += value + else: + days_out["day_2520"] = 1 + balance_out["day_2520"] = value + elif days < 1 * 2880: + if "day_2880" in days_out: + days_out["day_2880"] += 1 + balance_out["day_2880"] += value + else: + days_out["day_2880"] = 1 + balance_out["day_2880"] = value + elif days < 1 * 3240: + if "day_3240" in days_out: + days_out["day_3240"] += 1 + balance_out["day_3240"] += value + else: + days_out["day_3240"] = 1 + balance_out["day_3240"] = value + elif days < 1 * 3600: + if "day_3600" in days_out: + days_out["day_3600"] += 1 + balance_out["day_3600"] += value + else: + days_out["day_3600"] = 1 + balance_out["day_3600"] = value + else: + if "day_3960" in days_out: + days_out["day_3960"] += 1 + balance_out["day_3960"] += value + else: + days_out["day_3960"] = 1 + balance_out["day_3960"] = value + return days_out, balance_out +# “summary_balance_days”功能似乎旨在根据持有余额的天数对余额进行分类。以下是其工作原理的细分: + # + # -参数: + # - 'dt':当前日期。 + # - “dt2”:获取余额的日期。 + # - 'value':余额的值。 + # - 'days_out':用于存储不同时期余额计数的字典。 + # - 'balance_out':存储不同时期累计余额的字典。 + # + # - 计算天数: + # - 它通过从当前日期 ('dt') 中减去购置日期 ('dt2') 来计算余额持有的天数。此值存储在“days”变量中。 + # + # - 对余额进行分类: + # - 根据计算的天数,该函数将余额分配给特定类别 ('day_X'),其中 + # 'X' + # 表示天数。 + # - 如果余额持有时间少于一天,则将其归类为“day_0”。 + # - 如果余额已持有 + # 1 + # 到 + # 30 + # 天,则分别归类为“day_1”至“day_30”。 + # - 如果余额已持有 + # 31 + # 到 + # 60 + # 天,则将其归类为“day_60”。 + # - 如果余额已持有超过 + # 3600 + # 天(约 + # 10 + # 年),则将其归类为“day_3960”。 + # + # - 累积余额: + # - 对于每个类别,该函数递增余额计数 ('days_out') 并将余额值添加到累计余额 ('balance_out')。 + # + # - 返回结果: + # - 返回更新的“days_out”和“balance_out”字典。 + # 0.01,0.1,1,10,100,1000,10000,total balance + # new addr, total address + def summary_utxos(self, current_price, txid, vout, coinbase, value, height, scriptpubkey, dt, price, dt2): + + self.total_address += 1 + self.total_balance += value + + self.total_rcap += (value * price) + + if coinbase == 1: + self.miner_address += 1 + self.miner_balance += value + + if current_price > price: + self.uprofit +=(value*(current_price-price)) + self.profit_addresses += 1 + if current_price <= price: + self.uloss += (value*(price - current_price)) + self.loss_addresses += 1 + + n = int(price / 1000) + n = n * 1000 + + keystr = "buy" + str(n) + if keystr in self.price_buy: + self.price_buy[keystr] += 1 + else: + self.price_buy[keystr] = 1 + + keystr = "buy_amount" + str(n) + if keystr in self.price_buy_amount: + self.price_buy_amount[keystr] += value + else: + self.price_buy_amount[keystr] = value + + diff = current_price - price + n = int(diff / 1000) + n = n * 1000 + keystr = "diff" + str(n) + if keystr in self.diff_sell: + self.diff_sell[keystr] += 1 + else: + self.diff_sell[keystr] = 1 + + keystr = "diff_amount" + str(n) + if keystr in self.diff_sell_amount: + self.diff_sell_amount[keystr] += value + else: + self.diff_sell_amount[keystr] = value + + + try: + profit = (current_price - price)/(price)*10 + except: + profit = current_price*10 + + if int(profit) < 100: + n = int(profit) + keystr = "profit" + str(n) + if keystr in self.profit_sell: + self.profit_sell[keystr] += 1 + else: + self.profit_sell[keystr] = 1 + + keystr = "profit_amount" + str(n) + if keystr in self.profit_sell_amount: + self.profit_sell_amount[keystr] += value + else: + self.profit_sell_amount[keystr] = value + else: + profit = profit/100 + n = int(profit) + keystr = "profit10" + str(n) + if keystr in self.profit_sell: + self.profit_sell[keystr] += 1 + else: + self.profit_sell[keystr] = 1 + + keystr = "profit_amount10" + str(n) + if keystr in self.profit_sell_amount: + self.profit_sell_amount[keystr] += value + else: + self.profit_sell_amount[keystr] = value + + if value < 0.01: + self.balance_0 += 1 + self.balance_amount_0 += value + self.balance_0_days, self.balance_amount_0_days = self.summary_balance_days(self.current_dayutc, dt2, value, self.balance_0_days, + self.balance_amount_0_days) + elif value < 0.1: + self.balance_001 += 1 + self.balance_amount_001 += value + self.balance_001_days, self.balance_amount_001_days = self.summary_balance_days(self.current_dayutc, dt2, value, + self.balance_001_days, + self.balance_amount_001_days) + elif value < 1: + self.balance_01 += 1 + self.balance_amount_01 += value + self.balance_01_days, self.balance_amount_01_days = self.summary_balance_days(self.current_dayutc, dt2, value, self.balance_01_days, + self.balance_amount_01_days) + elif value < 10: + self.balance_1 += 1 + self.balance_amount_1 += value + self.balance_1_days, self.balance_amount_1_days = self.summary_balance_days(self.current_dayutc, dt2, value, self.balance_1_days, + self.balance_amount_1_days) + elif value < 100: + self.balance_10 += 1 + self.balance_amount_10 += value + self.balance_10_days, self.balance_amount_10_days = self.summary_balance_days(self.current_dayutc, dt2, value, self.balance_10_days, + self.balance_amount_10_days) + elif value < 1000: + self.balance_100 += 1 + self.balance_amount_100 += value + self.balance_100_days, self.balance_amount_100_days = self.summary_balance_days(self.current_dayutc, dt2, value, + self.balance_100_days, + self.balance_amount_100_days) + elif value < 10000: + self.balance_1000 += 1 + self.balance_amount_1000 += value + self.balance_1000_days, self.balance_amount_1000_days = self.summary_balance_days(self.current_dayutc, dt2, value, + self.balance_1000_days, + self.balance_amount_1000_days) + else: + self.balance_10000 += 1 + self.balance_amount_10000 += value + self.balance_10000_days, self.balance_amount_10000_days = self.summary_balance_days(self.current_dayutc, dt2, value, + self.balance_10000_days, + self.balance_amount_10000_days) + + daysecs = 3600 * 24 + + if self.current_dayutc - dt2 >= 180 * daysecs: + if self.current_dayutc - dt2 <= 365 * daysecs: + self.slrv_6m1y += (value*price) + + if self.current_dayutc - dt2 <= daysecs: + self.slrv_24h += (value*price) + + if self.current_dayutc - dt2 >= 155*daysecs: + self.lth_nupl += (value*(current_price-price)) + self.lth_rcap += (value*price) + self.lth_mv += (value*current_price) + self.lth_supply += value + if current_price > price: + self.lth_profit_supply += value + if current_price < price: + self.lth_loss_supply += value + else: + self.sth_nupl += (value*(price - current_price)) + self.sth_rcap += (value * price) + self.sth_mv += (value * current_price) + self.sth_supply += value + if current_price > price: + self.sth_profit_supply += value + if current_price < price: + self.sth_loss_supply += value + + if self.current_dayutc - dt2 < daysecs: + self.holder_0 += 1 + self.holder_balance_0 += value + elif self.current_dayutc - dt2 < daysecs * 2: + self.holder_1 += 1 + self.holder_balance_1 += value + elif self.current_dayutc - dt2 < daysecs * 3: + self.holder_2 += 1 + self.holder_balance_2 += value + elif self.current_dayutc - dt2 < daysecs * 4: + self.holder_3 += 1 + self.holder_balance_3 += value + elif self.current_dayutc - dt2 < daysecs * 5: + self.holder_4 += 1 + self.holder_balance_4 += value + elif self.current_dayutc - dt2 < daysecs * 6: + self.holder_5 += 1 + self.holder_balance_5 += value + elif self.current_dayutc - dt2 < daysecs * 7: + self.holder_6 += 1 + self.holder_balance_6 += value + elif self.current_dayutc - dt2 < daysecs * 8: + self.holder_7 += 1 + self.holder_balance_7 += value + elif self.current_dayutc - dt2 < daysecs * 15: + self.holder_15 += 1 + self.holder_balance_15 += value + elif self.current_dayutc - dt2 < daysecs * 30: + self.holder_30 += 1 + self.holder_balance_30 += value + elif self.current_dayutc - dt2 < daysecs * 60: + self.holder_60 += 1 + self.holder_balance_60 += value + elif self.current_dayutc - dt2 < daysecs * 90: + self.holder_90 += 1 + self.holder_balance_90 += value + elif self.current_dayutc - dt2 < daysecs * 180: + self.holder_180 += 1 + self.holder_balance_180 += value + elif self.current_dayutc - dt2 < daysecs * 360: + self.holder_360 += 1 + self.holder_balance_360 += value + elif self.current_dayutc - dt2 < daysecs * 540: + self.holder_540 += 1 + self.holder_balance_540 += value + elif self.current_dayutc - dt2 < daysecs * 720: + self.holder_720 += 1 + self.holder_balance_720 += value + elif self.current_dayutc - dt2 < daysecs * 1080: + self.holder_1080 += 1 + self.holder_balance_1080 += value + elif self.current_dayutc - dt2 < daysecs * 1440: + self.holder_1440 += 1 + self.holder_balance_1440 += value + elif self.current_dayutc - dt2 < daysecs * 1880: + self.holder_1800 += 1 + self.holder_balance_1800 += value + elif self.current_dayutc - dt2 < daysecs * 2160: + self.holder_2160 += 1 + self.holder_balance_2160 += value + elif self.current_dayutc - dt2 < daysecs * 2520: + self.holder_2520 += 1 + self.holder_balance_2520 += value + elif self.current_dayutc - dt2 < daysecs * 2880: + self.holder_2880 += 1 + self.holder_balance_2880 += value + elif self.current_dayutc - dt2 < daysecs * 3240: + self.holder_3240 += 1 + self.holder_balance_3240 += value + elif self.current_dayutc - dt2 < daysecs * 3600: + self.holder_3600 += 1 + self.holder_balance_3600 += value + else: + self.holder_3960 += 1 + self.holder_balance_3960 += value +# “summary_utxos”功能似乎是用于分析交易及其相关余额的更大系统的一部分。以下是该函数的功能细分: + # + # - ** 更新计数器和总计: ** + # - 'self.total_address':递增 + # 1 + # 以计算地址总数。 + # - “self.total_balance”:累积总余额。 + # - 'self.total_rcap':累计已实现的总资本化(余额 * 价格)。 + # - “self.miner_address”和“self.miner_balance”:如果交易是 + # coinbase + # 交易(“coinbase == 1”),则增加矿工地址计数并累积矿工余额。 + # + # - ** 盈亏计算: ** + # - 'self.uprofit' + # 和 + # 'self.uloss':如果当前价格分别高于或低于成交价格,则计算并累计未实现损益。 + # - “self.profit_addresses”和“self.loss_addresses”:统计有未实现盈亏的地址。 + # + # - ** 价格分析: ** + # - 'n':将价格四舍五入到最接近的千位。 + # - 'keystr':根据四舍五入的价格生成密钥。 + # - 更新买入价格(“self.price_buy”)及其相应金额(“self.price_buy_amount”)的计数器。 + # - 计算当前价格和交易价格之间的差额 ('diff'),将其四舍五入到最接近的千位,并更新价格差异计数器('self.diff_sell' + # 和 + # 'self.diff_sell_amount')。 + # + # - ** 利润百分比分析: ** + # - 根据当前价格和交易价格之间的差额计算利润百分比(“利润”)。 + # - 将利润分类为不同的范围,并相应地更新计数器(“self.profit_sell”和“self.profit_sell_amount”)。 + # + # - ** 余额分类: ** + # - 根据余额的值将余额分类为不同的范围,并相应地更新计数器。 + # - 对于每个范围,更新地址计数 ('self.balance_X') 和累计余额 ('self.balance_amount_X') 的计数器。 + # + # - ** 基于时间的分析: ** + # - 分析余额被持有的时间(“self.summary_balance_days”)并相应地更新计数器。 + # + # - ** 其他指标: ** + # - 根据余额持有的时间长度更新各种指标,例如短期和长期已实现资本化、市场价值、供应、利润和损失。 + # + # - ** 持有人分析: ** + # - 根据持有余额的时间对持有人进行分类,并更新相应的计数器,以显示持有人的数量及其累计余额。 + # + # 总体而言,此功能似乎提供了对交易数据的全面分析,包括与余额、价格、利润和持有人行为相关的各种指标。 + def save_db(self): + db_conn = self.init_db() + with db_conn.cursor() as cursor: + + sql_insert = 'REPLACE INTO `utxosv3` (`unixdt`, `total_address`, `total_balance`, `total_rcap`, `miner_address`,`miner_balance`, `balance_0`, `balance_001`, `balance_01`, `balance_1`, `balance_10`,`balance_100`, `balance_1000`, `balance_10000`, uprofit, uloss, lthnupl, sthnupl, lthmarketcap, lthrcap, sthmarketcap, sthrcap, lthmvrv, sthmvrv) VALUES (FROM_UNIXTIME(%s), %s, %s, %s, %s, %s,%s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s)' + cursor.execute(sql_insert, ( + self.current_dayutc, self.total_address, self.total_balance, self.total_rcap, self.miner_address, self.miner_balance, self.balance_0, + self.balance_001, + self.balance_01, self.balance_1, self.balance_10, self.balance_100, self.balance_1000, self.balance_10000, self.uprofit, self.uloss, self.lth_nupl, self.sth_nupl, self.lth_mv, self.lth_rcap, self.sth_mv, self.sth_rcap, self.lth_mvrv, self.sth_mvrv)) + sql_insert = 'REPLACE INTO `utxos3nd` (`unixdt`, `balance_amount_0`, `balance_amount_001`, `balance_amount_01`, `balance_amount_1`, `balance_amount_10`,`balance_amount_100`, `balance_amount_1000`, `balance_amount_10000`) VALUES (FROM_UNIXTIME(%s), %s, %s, %s, %s, %s,%s, %s, %s)' + cursor.execute(sql_insert, ( + self.current_dayutc, self.balance_amount_0, + self.balance_amount_001, + self.balance_amount_01, self.balance_amount_1, self.balance_amount_10, self.balance_amount_100, self.balance_amount_1000, + self.balance_amount_10000)) + sql_insert = 'REPLACE INTO `holder3` (`unixdt`,`holder_0`,`holder_1`,`holder_2`,`holder_3`,`holder_4`,`holder_5`,`holder_6`,`holder_7`,`holder_15`,`holder_30`,`holder_60`,`holder_90`,`holder_180`,`holder_360`,`holder_540`,`holder_720`,`holder_1080`,`holder_1440`,`holder_1800`,`holder_2160`,`holder_2520`,`holder_2880`,`holder_3240`,`holder_3600`,`holder_3960`) VALUES(FROM_UNIXTIME(%s), %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s)' + cursor.execute(sql_insert, ( + self.current_dayutc, self.holder_0, self.holder_1, self.holder_2, self.holder_3, self.holder_4, self.holder_5, self.holder_6, self.holder_7, + self.holder_15, + self.holder_30, self.holder_60, self.holder_90, self.holder_180, self.holder_360, self.holder_540, self.holder_720, self.holder_1080, + self.holder_1440, + self.holder_1800, self.holder_2160, self.holder_2520, self.holder_2880, self.holder_3240, self.holder_3600, self.holder_3960)) + sql_insert = 'REPLACE INTO `holder_balance3` (`unixdt`,`holder_balance_0`,`holder_balance_1`,`holder_balance_2`,`holder_balance_3`,`holder_balance_4`,`holder_balance_5`,`holder_balance_6`,`holder_balance_7`,`holder_balance_15`,`holder_balance_30`,`holder_balance_60`,`holder_balance_90`,`holder_balance_180`,`holder_balance_360`,`holder_balance_540`,`holder_balance_720`,`holder_balance_1080`,`holder_balance_1440`,`holder_balance_1800`,`holder_balance_2160`,`holder_balance_2520`,`holder_balance_2880`,`holder_balance_3240`,`holder_balance_3600`,`holder_balance_3960`) VALUES(FROM_UNIXTIME(%s), %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s)' + cursor.execute(sql_insert, ( + self.current_dayutc, self.holder_balance_0, self.holder_balance_1, self.holder_balance_2, self.holder_balance_3, + self.holder_balance_4, + self.holder_balance_5, self.holder_balance_6, self.holder_balance_7, self.holder_balance_15, + self.holder_balance_30, self.holder_balance_60, self.holder_balance_90, self.holder_balance_180, self.holder_balance_360, + self.holder_balance_540, self.holder_balance_720, self.holder_balance_1080, self.holder_balance_1440, + self.holder_balance_1800, self.holder_balance_2160, self.holder_balance_2520, self.holder_balance_2880, self.holder_balance_3240, + self.holder_balance_3600, self.holder_balance_3960)) + + + #v2 + if self.loss_addresses > 0: + self.profit_ratio = self.profit_addresses/self.loss_addresses + if self.total_balance > 0: + self.realized_price = self.total_rcap/self.total_balance + if self.sth_loss_supply > 0: + self.sth_profit_ratio = self.sth_profit_supply/self.sth_loss_supply + if self.lth_loss_supply > 0: + self.lth_profit_ratio = self.lth_profit_supply / self.lth_loss_supply + if self.sth_profit_ratio > 0: + self.relative_lth_sth = self.lth_profit_ratio/self.sth_profit_ratio + if self.slrv_6m1y > 0: + self.slrv_ratio = self.slrv_24h/self.slrv_6m1y + + sql_insert = 'REPLACE INTO `utxosv4` (`unixdt`,`profit_addresses`,`loss_addresses`,`profit_ratio`,`lth_supply`,`sth_supply`,`realized_price`,`relative_lth_sth`,`lth_profit_supply`,`lth_loss_supply`,`lth_profit_ratio`,`sth_profit_supply`,`sth_loss_supply`,`sth_profit_ratio`,`slrv_ratio`) VALUES(FROM_UNIXTIME(%s), %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s)' + cursor.execute(sql_insert, (self.current_dayutc, self.profit_addresses, self.loss_addresses, self.profit_ratio, self.lth_supply, self.sth_supply, self.realized_price, self.relative_lth_sth, self.lth_profit_supply, self.lth_loss_supply, self.lth_profit_ratio, self.sth_profit_supply, self.sth_loss_supply, self.sth_profit_ratio, self.slrv_ratio)) + db_conn.commit() + + + mc = pymongo.MongoClient("mongodb://10.168.3.192:27018/") + mdb = mc["btcutxos2"] + # mdb.authenticate("root", "123456") + + self.price_buy["unixdt"] = int(self.current_dt) + mdbc_buy = mdb["buy"] + mdbc_buy.insert_one(self.price_buy) + print(mdbc_buy.find_one()) + self.price_buy_amount["unixdt"] = int(self.current_dt) + mdbc_buy_amount = mdb["buy_amount"] + mdbc_buy_amount.insert_one(self.price_buy_amount) + print(mdbc_buy_amount.find_one()) + + self.diff_sell["unixdt"] = int(self.current_dt) + mdbc_diff = mdb["diff"] + mdbc_diff.insert_one(self.diff_sell) + print(mdbc_diff.find_one()) + self.diff_sell_amount["unixdt"] = int(self.current_dt) + mdbc_diff_amount = mdb["diff_amount"] + mdbc_diff_amount.insert_one(self.diff_sell_amount) + print(mdbc_diff_amount.find_one()) + + self.profit_sell["unixdt"] = int(self.current_dt) + mdbc_profit = mdb["profit"] + mdbc_profit.insert_one(self.profit_sell) + print(mdbc_profit.find_one()) + self.profit_sell_amount["unixdt"] = int(self.current_dt) + mdbc_profit_amount = mdb["profit_amount"] + mdbc_profit_amount.insert_one(self.profit_sell_amount) + print(mdbc_profit_amount.find_one()) + + self.balance_0_days["unixdt"] = int(self.current_dt) + mdbc_balance_days = mdb["balance_0_days"] + mdbc_balance_days.insert_one(self.balance_0_days) + print(mdbc_balance_days.find_one()) + self.balance_amount_0_days["unixdt"] = int(self.current_dt) + mdbc_balance_amount_days = mdb["balance_amount_0_days"] + mdbc_balance_amount_days.insert_one(self.balance_amount_0_days) + print(mdbc_balance_amount_days.find_one()) + + self.balance_001_days["unixdt"] = int(self.current_dt) + mdbc_balance_days = mdb["balance_001_days"] + mdbc_balance_days.insert_one(self.balance_001_days) + print(mdbc_balance_days.find_one()) + self.balance_amount_001_days["unixdt"] = int(self.current_dt) + mdbc_balance_amount_days = mdb["balance_amount_001_days"] + mdbc_balance_amount_days.insert_one(self.balance_amount_001_days) + print(mdbc_balance_amount_days.find_one()) + + self.balance_01_days["unixdt"] = int(self.current_dt) + mdbc_balance_days = mdb["balance_01_days"] + mdbc_balance_days.insert_one(self.balance_01_days) + print(mdbc_balance_days.find_one()) + self.balance_amount_01_days["unixdt"] = int(self.current_dt) + mdbc_balance_amount_days = mdb["balance_amount_01_days"] + mdbc_balance_amount_days.insert_one(self.balance_amount_01_days) + print(mdbc_balance_amount_days.find_one()) + + self.balance_1_days["unixdt"] = int(self.current_dt) + mdbc_balance_days = mdb["balance_1_days"] + mdbc_balance_days.insert_one(self.balance_1_days) + print(mdbc_balance_days.find_one()) + self.balance_amount_1_days["unixdt"] = int(self.current_dt) + mdbc_balance_amount_days = mdb["balance_amount_1_days"] + mdbc_balance_amount_days.insert_one(self.balance_amount_1_days) + print(mdbc_balance_amount_days.find_one()) + + self.balance_10_days["unixdt"] = int(self.current_dt) + mdbc_balance_days = mdb["balance_10_days"] + mdbc_balance_days.insert_one(self.balance_10_days) + print(mdbc_balance_days.find_one()) + self.balance_amount_10_days["unixdt"] = int(self.current_dt) + mdbc_balance_amount_days = mdb["balance_amount_10_days"] + mdbc_balance_amount_days.insert_one(self.balance_amount_10_days) + print(mdbc_balance_amount_days.find_one()) + + self.balance_100_days["unixdt"] = int(self.current_dt) + mdbc_balance_days = mdb["balance_100_days"] + mdbc_balance_days.insert_one(self.balance_100_days) + print(mdbc_balance_days.find_one()) + self.balance_amount_100_days["unixdt"] = int(self.current_dt) + mdbc_balance_amount_days = mdb["balance_amount_100_days"] + mdbc_balance_amount_days.insert_one(self.balance_amount_100_days) + print(mdbc_balance_amount_days.find_one()) + + self.balance_1000_days["unixdt"] = int(self.current_dt) + mdbc_balance_days = mdb["balance_1000_days"] + mdbc_balance_days.insert_one(self.balance_1000_days) + print(mdbc_balance_days.find_one()) + self.balance_amount_1000_days["unixdt"] = int(self.current_dt) + mdbc_balance_amount_days = mdb["balance_amount_1000_days"] + mdbc_balance_amount_days.insert_one(self.balance_amount_1000_days) + print(mdbc_balance_amount_days.find_one()) + + self.balance_10000_days["unixdt"] = int(self.current_dt) + mdbc_balance_days = mdb["balance_10000_days"] + mdbc_balance_days.insert_one(self.balance_10000_days) + print(mdbc_balance_days.find_one()) + self.balance_amount_10000_days["unixdt"] = int(self.current_dt) + mdbc_balance_amount_days = mdb["balance_amount_10000_days"] + mdbc_balance_amount_days.insert_one(self.balance_amount_10000_days) + print(mdbc_balance_amount_days.find_one()) +# “save_db”方法负责将分析期间收集的数据保存到数据库和MongoDB实例中。以下是它的作用的细分: + # - MySQL数据库: ** + # - 使用'init_db'方法连接到MySQL数据库。 + # - 使用SQL查询将记录插入或替换到多个表('utxosv1'、'utxos2nd'、'holder'、'holder_balance'和'utxosv2')中。这些表包含用于分析的各种指标和汇总数据。 + # - ** MongoDB: ** + # - 连接到MongoDB实例。 + # - 将数据插入多个集合('buy'、'buy_amount'、'diff'、'diff_amount'、'profit'、'profit_amount'、'balance_0_days'、'balance_amount_0_days' + # 等)中,以进行不同类型的分析。 + # 对于MongoDB中的每个集合,该方法将数据插入到集合中,并打印插入的文档以进行验证。 + # 此外,该方法还会计算和更新一些其他指标(如利润率、实现价格、短期和长期持有者的利润率、相对比率等),并将它们插入“utxosv2”表中。 + # 总体而言,这种方法有助于将分析的数据存储到关系数据库 (MySQL) 和NoSQL数据库 (MongoDB) 中,从而便于检索和进一步分析。 + def get_history_price(self, batch_size=5000): + """获取数据库中的 Nasdaq 数据,存入字典""" + db_config = { + "host": "192.168.194.240", + "user": "root", + "password": "2GS@bPYcgiMyL14A", + "database": "btcdb", + "port": 4423, + "connect_timeout": 60, + "read_timeout": 60, + "write_timeout": 60, + "charset": "utf8mb4" + } + + offset = 0 + self.pricedict = {} + + while True: + connection = pymysql.connect(**db_config) + try: + with connection.cursor() as cursor: + sql = "SELECT timestamp, price FROM btc_prices WHERE source = 'Nasdaq' ORDER BY timestamp LIMIT %s OFFSET %s" + cursor.execute(sql, (batch_size, offset)) + rows = cursor.fetchall() + if not rows: + break + for timestamp, price in rows: + self.pricedict[str(int(timestamp))] = float(price) + finally: + connection.close() + + offset += batch_size + if len(rows) < batch_size: + break # 最后一页读取完成 + + return self.pricedict + #prices = {} + #response_price = requests.get( + # 'https://data.nasdaq.com/api/v3/datatables/QDL/BCHAIN?code=MKPRU;api_key=FZqXog4sR-b7cYnXcRVV') + #if response_price.status_code == 200: + # #print(response_price.content) + # priceweb = ujson.loads(response_price.content) + # if "datatable" in priceweb: + # priceset = priceweb["datatable"] + # if "data" in priceset: + # pricedata = priceset["data"] + # for price in pricedata: + # daystr = price[1] + # p = price[2] + # dayutc = time.mktime(time.strptime(daystr, "%Y-%m-%d")) + # prices[str(int(dayutc))] = float(p) + # #print(price, int(dayutc), g_prices[str(int(dayutc))]) + #return prices +# “get_history_price”方法似乎用于从WebAPI终结点检索历史价格数据。以下是它的作用的细分: + # - 初始化一个空字典“prices”来存储历史价格数据。 + # - 向指定的API端点发送HTTP GET请求,该端点可能提供历史比特币价格数据。 + # - 检查响应状态代码是否为200(表示响应成功)。 + # - 如果响应成功: + # - 使用“ujson”库解析JSON响应。 + # - 检查解析的JSON响应中是否存在键'“dataset''。 + # - 如果'“dataset”'存在: + # - 检索“data”' 字段,该字段可能包含历史价格数据点的列表。 + # - 遍历列表中的每个价格数据点。 + # - 使用'strptime'将日期字符串 ('daystr') 解析为Unix时间戳 ('dayutc')。 + # - 将Unix 时间戳作为键存储在“prices”字典中,并将相应的price ('p') 作为值。 + # 最后,它返回包含历史价格数据的“prices”字典,其中Unix时间戳作为键,价格作为值 + def get_history_price2(self, batch_size=5000): + #pricedict = {} + """获取数据库中的 Messari 数据,存入字典""" + db_config = { + "host": "192.168.194.240", + "user": "root", + "password": "2GS@bPYcgiMyL14A", + "database": "btcdb", + "port": 4423, + "connect_timeout": 60, + "read_timeout": 60, + "write_timeout": 60, + "charset": "utf8mb4" + } + + offset = 0 + self.pricedict = {} + + while True: + connection = pymysql.connect(**db_config) + try: + with connection.cursor() as cursor: + sql = """ + SELECT timestamp, price + FROM btc_prices + WHERE source = 'CryptoCompare' + ORDER BY timestamp + LIMIT %s OFFSET %s + """ + cursor.execute(sql, (batch_size, offset)) + rows = cursor.fetchall() + if not rows: + break + for timestamp, price in rows: + self.pricedict[str(int(timestamp))] = float(price) + finally: + connection.close() + + offset += batch_size + if len(rows) < batch_size: + break # 数据已全部读取 + + return self.pricedict + #dayt = time.gmtime() + #daystr = time.strftime("%Y", dayt) + #year = int(daystr) + #end_year = year + #while True: + # url = "" + # if end_year != year: + # start_year = end_year + # url = "https://data.messari.io/api/v1/assets/bitcoin/metrics/price/time-series?start=" + # else: + # url = "https://data.messari.io/api/v1/assets/bitcoin/metrics/price/time-series?after=" + str( + # year) + "-01-01&order=descending&interval=1d" +# +# if end_year != year: +# url = url + str(start_year) + "-01-01&end=" + str(end_year) + "-12-31&order=descending&interval=1d" +# header_set = {} +# header_set["x-messari-api-key"] = "aH2pyj5i4QGo1k1gLxXEbIJ5RJr+FYKLEWk6cRT6RuSc6lRY" +# # header_set["Content-Type"] = "application/json" +# print(header_set, url) +# response_price = requests.get(url, headers=header_set) +# # print(response_price) +# if response_price.status_code == 200: +# # print(response_price.content) +# priceweb = ujson.loads(response_price.content) +# if "data" in priceweb: +# priceset = priceweb["data"] +# if "values" in priceset: +# valueset = priceset["values"] +# if valueset is not None: +# for supply in valueset: +# dayutc = int(supply[0] / 1000) +# s = supply[1] +# ret_time = time.gmtime(dayutc) +# ret_daystr = time.strftime("%d %b %Y", ret_time) +# ret_dayutc = int(time.mktime(time.strptime(ret_daystr, "%d %b %Y"))) +# pricedict[str(ret_dayutc)] = float(s) +# # print(s, dayutc, pricedict[str(dayutc)]) +# # break +# else: +# break +# else: +# break +# end_year -= 1 +# time.sleep(2) +# print(pricedict) +# return pricedict +# 'get_history + # 初始化与时间相关的变量,包括当前年份。 + # 进入一个循环,从当前年份开始,向后循环访问年份。 + # 根据当前年份和端点构造一个URL,用于获取比特币价格数据。 + # 向构造的URL发送HTTP + # GET请求,包括标头中的特定API密钥。 + # 检查响应状态代码是否为 + # 2 + # 使用'u 解析 JSON 响应 + # 从JSON响应中提取相关价格数据,并将其添加到“pricedict” + # 将时间戳转换为人类可读的日期字符串,然后转换回Unix时间戳,并将它们作为键存储在'pricedic 中 + # 在发出下一个请求之前暂停执行2秒,以避免API过载。 + # 继续此过程,直到遍历所有年份或发生错误。 + # 最后,它返回“pricedict” + def get_current_price(self): + price = 0 + DB_CONFIG = { + "host": "192.168.194.240", + "user": "root", + "password": "2GS@bPYcgiMyL14A", + "database": "btcdb", + "port": 4423 + } + connection = pymysql.connect(**DB_CONFIG) + try: + with connection.cursor() as cursor: + for source in ("binance", "coinbase"): + cursor.execute(""" + SELECT price FROM btc_realtime_prices + WHERE source=%s + ORDER BY timestamp DESC + LIMIT 1 + """, (source,)) + row = cursor.fetchone() + if row: + price = float(row[0]) + break + finally: + connection.close() + return price + #price = 0 + #try: + # response_price = requests.get( + # 'https://api.binance.com/api/v3/ticker/price?symbol=BTCUSDT') + # prices = ujson.loads(response_price.text) + # price = float(prices["price"]) + # print(response_price.text, price) + # response_price.close() + # # print("price", price) + # return price + #except: + # response_price = requests.get("https://api.coinpaprika.com/v1/tickers/btc-bitcoin") + # prices = ujson.loads(response_price.text) + # price = float(prices["quotes"]["USD"]["price"]) + # response_price.close() + # return price +# 该函数似乎是一种从特定 + # API端点检索比特币当前价格的方法。以下是其功能的细分:get_current_price + # 初始化默认值为0的变量。price + # 向指定的URL () 发送HTTP + # GET请求,以获取当前以美元为单位的比特币价格。'https://bitcoinexplorer.org/api/price/usd' + # 检查响应状态代码是否为200,表示响应成功。 + # 如果响应成功,它将打印响应文本(大概是当前价格),并在删除逗号并将其转换为浮点数后将其分配给变量。price + # 关闭响应对象。 + # 打印检索到的价格以进行调试。 + # 返回检索到的价格。 + # 此函数实质上是从指定的API端点检索和处理当前比特币价格,并将其作为浮点数返回 + def get_ht(self): + try: + with open('height_time.csv', mode='r') as f: + reader = csv.reader(f) + height_time = {rows[0]: rows[1] for rows in reader} + '''for key in height_time.keys(): + print(key, height_time[key]) + break''' + return height_time + return None + except: + return None +# 这get_ht函数似乎从名为“height_time.csv” + # 的CSV文件中读取数据并将其转换为字典,其中键取自第一列,值取自第二列。以下是其功能的细分: + # 尝试在读取模式下打开文件“height_time.csv” + # 初始化名为“height”的字典 + # 使用“csv.reade”循环访问CSV文件中的每一行 + # 构造一个字典,其中每个键值对对应于CSV文件中的一行,键取自第一列,值取自第二列。 + # 返回“height_time + # 如果在文件读取或字典构造过程中发生异常,该函数将返回“无” + # 此函数实质上是从CSV文件中读取数据并将其作为字典返回,该字典可能在代码中的其他位置用于进一步处理或分析 + def get_day_utc(self, utc_time): + t = time.gmtime(utc_time) + daystr = time.strftime("%d %b %Y", t) + dayutc = int(time.mktime(time.strptime(daystr, "%d %b %Y"))) + return dayutc +# 该函数将 + # UTC时间戳作为输入,并返回表示与该输入时间戳对应的一天 (00:00:00UTC) 开始的 + # UTC时间戳。以下是其功能的细分:get_day_utc + # 它接收UTC时间戳作为输入。utc_time + # 它使用将UTC时间戳转换为时间元组。此元组表示UTC中的时间。time.gmtime() + # 它将时间元组格式化为一个字符串,以“DD Mon YYYY” + # (例如,“2022年1月1日”)表示日期。 + # 它使用和将此格式化的字符串转换回UTC时间戳。这有效地将时间设置为同一天的 + # 00:00:00UTC。time.strptime() + # time.mktime() + # 它返回表示一天开始的UTC时间戳。 + # 总体而言,此函数可用于将任何UTC时间戳转换为UTC中相应日期的开始 + def get_dh_height(self): + try: + with open('daily_height.csv', mode='r') as f: + reader = csv.reader(f) + daily_height = {rows[0]: rows[1] for rows in reader} + return daily_height + return None + except: + print("failed open daily_height.csv") + return None +# 该函数尝试从名为“daily_height.csv”的CSV + # 文件中读取数据,并将其转换为字典,其中键是日期,值是相应的高度。get_dh_height + # 其工作原理如下: + # 它尝试在读取模式下打开文件“daily_height.csv”。 + # 如果文件已成功打开,它将使用函数读取其内容,该函数将返回 + # CSV文件各行的可迭代对象。csv.reader() + # 然后,它将每行转换为键值对,其中第一列(索引0)表示日期,第二列(索引1)表示高度。这是使用字典理解来完成的。 + # 返回生成的字典。daily_height + # 如果在此过程中发生任何错误(例如无法打开文件),它会打印一条消息指示失败并返回. + # None + # 此函数可用于从CSV文件中检索每日身高数据,以便在程序中进行进一步处理或分析 + def get_dh_hash(self): + try: + with open('daily_height.csv', mode='r') as f: + reader = csv.reader(f) + daily_hash = {rows[0]: rows[2] for rows in reader} + return daily_hash + return None + except: + print("failed open daily_height.csv") + return None +# 该函数类似于 ,但它不是检索高度值,而是从同一个CSV + # 文件 “daily_height.csv” 中检索哈希值。get_dh_hashget_dh_height + # 以下是其工作原理的细分: + # 它尝试在读取模式下打开文件“daily_height.csv”。 + # 如果文件已成功打开,它将使用函数读取其内容,该函数将返回 + # CSV文件各行的可迭代对象。csv.reader() + # 然后,它将每行转换为键值对,其中第一列(索引0)表示日期,第三列(索引2)表示哈希值。这是使用字典理解来完成的。 + # 返回生成的字典。daily_hash + # 如果在此过程中发生任何错误(例如无法打开文件),它会打印一条消息指示失败并返回. + # None与 + # 一样,此函数可用于从CSV文件中检索每日哈希数据,以便在程序中进一步处理或分析。get_dh_height + def get_daily_height(self): + height = 1 + last_dayutc = None + daily_height = self.get_dh_height() + daily_hash = self.get_dh_hash() + #print(daily_height) + #print(daily_hash) + if daily_height is None: + daily_height = {} + daily_hash = {} + else: + if len(daily_height) > 0: + item = daily_height.popitem() + #print(item, type(item[1])) + height = int(item[1])+1 + daily_height[item[0]] = int(item[1]) + + ip = "127.0.0.1" + port = "8332" + user = "user" + password = "password" + + rpc = RPC(ip, port, user, password) +# total_height = rpc.blockchain.get_block_count() + while True: + try: + total_height = rpc.blockchain.get_block_count() + break + except Exception as e: + print("rpctimeout") + time.sleep(10) + rpc = RPC(ip, port, user, password) + if height >= total_height: + return + prev_height = None + prev_hash = None + while True: + blockh = None + while True: + try: + blockh = rpc.blockchain.get_block_header(height, True) + #print(blockh) + break + except: + time.sleep(1) + #print("reconnect") + rpc = RPC(ip, port, user, password) + + if blockh is not None: + block_time = blockh["time"] + block_height = blockh["height"] + block_hash = blockh["hash"] + dayutc = self.get_day_utc(block_time) + if last_dayutc is None: + last_dayutc = dayutc + print(dayutc, last_dayutc) + if dayutc != last_dayutc: + daily_height[str(last_dayutc)] = prev_height + daily_hash[str(last_dayutc)] = prev_hash + last_dayutc = dayutc + #print(dayutc, daily_height[str(dayutc)], daily_hash[str(dayutc)]) + prev_height = block_height + prev_hash = block_hash + while True: + try: + total_height = rpc.blockchain.get_block_count() + if height == total_height: + break + else: + height += 1 + print("next height " + str(height)) + break + except: + time.sleep(1) + #print("reconnect") + rpc = RPC(ip, port, user, password) + + if height == total_height: + break + + with open('daily_height.csv', 'w') as f: + for key in daily_height.keys(): + #print(key) + #print(daily_height[key]) + #print(daily_hash[key]) + f.write("%s, %s, %s\n" % (key, daily_height[key], daily_hash[key])) + f.close() +# 该函数负责从区块链中检索每日身高数据并将其存储在名为“daily_height.csv”的CSV文件中。其工作原理如下: + # get_daily_height + # 它使用默认值1初始化变量,并将其设置为 。heightlast_dayutcNone + # 它调用 + # 和 + # 函数以从CSV文件中检索任何现有的每日高度和哈希数据。如果数据不可用,它将初始化空字典。get_dh_heightget_dh_hash + # 如果存在现有的每日身高数据,它将检索最后一个条目,将身高递增1,并使用新的身高值更新字典。 + # 它设置了与比特币区块链交互的RPC连接细节。 + # 它使用RPC调用检索区块链的总高度。rpc.blockchain.get_block_count() + # 它进入一个循环来获取每个区块高度的区块头,直到达到区块链的总高度。 + # 在循环中,如果RPC调用期间出现异常,它会尝试重新连接,从而处理网络问题。 + # 对于成功获取的每个区块标头,它都会提取区块时间、高度、哈希值和UTC日。 + # 如果UTC日发生更改,它会使用当天以前的高度和哈希值更新每日高度和哈希字典。 + # 它将每日高度和哈希数据写入“daily_height.csv”文件。 + # 一旦它处理了所有区块高度,直到区块链的总高度,该函数就会结束。 + # 该功能使用最新的区块链信息有效地更新CSV文件中的每日高度和哈希数据,方便历史数据分析或程序内的其他目的。 + def get_height_timestamp(self): + height = 0 + height_time = self.get_ht() + if height_time is None: + height_time = {} + else: + height = len(height_time) + + print("exist height", height) + + ip = "127.0.0.1" + port = "8332" + user = "user" + password = "password" + + rpc = RPC(ip, port, user, password) +# total_height = rpc.blockchain.get_block_count() + while True: + try: + total_height = rpc.blockchain.get_block_count() + break + except Exception as e: + print("rpctimeout") + time.sleep(10) + rpc = RPC(ip, port, user, password) + if height >= total_height: + return + #total_height = rpc.blockchain.get_block_count() + # print("last_height", total_height) + + while True: + blockh = None + while True: + try: + blockh = rpc.blockchain.get_block_header(height, True) + #print(blockh) + break + except: + time.sleep(1) + print("reconnect") + rpc = RPC(ip, port, user, password) + + if blockh is not None: + block_time = blockh["time"] + block_height = blockh["height"] + height_time[str(block_height)] = block_time + #print(str(block_height), height_time[str(block_height)]) + + while True: + try: + total_height = rpc.blockchain.get_block_count() + if height == total_height: + break + else: + height += 1 + print("next height " + str(height)) + break + except: + time.sleep(1) + #print("reconnect") + rpc = RPC(ip, port, user, password) + + if height == total_height: + break + + with open('height_time.csv', 'w') as f: + for key in height_time.keys(): + f.write("%s, %s\n" % (key, height_time[key])) + f.close() +# 该函数从区块链中检索区块高度和相应的时间戳,并将它们存储在名为“height_time.csv”的CSV文件中。以下是其工作原理的细分: + # get_height_timestamp + # 它将变量初始化为0,并使用该函数检索现有的高度时间数据。如果不存在任何数据,则初始化一个空字典。heightget_ht + # 它设置了与比特币区块链交互的RPC连接细节。 + # 它使用RPC调用检索区块链的总高度。rpc.blockchain.get_block_count() + # 如果存在现有的高度时间数据,则将变量设置为字典中的条目数。height + # 它进入一个循环来获取每个区块高度的区块头,直到达到区块链的总高度。 + # 在循环中,如果RPC调用期间出现异常,它会尝试重新连接,从而处理网络问题。 + # 对于每个成功获取的块头,它都会提取块时间和高度,并更新高度时间字典。 + # 它将高度时间数据写入“height_time.csv”文件。 + # 循环一直持续到它处理完所有区块高度,直到区块链的总高度。 + # 该功能有效地将CSV文件中的高度时间数据更新为最新的区块链信息,方便程序内的历史数据分析或其他目的。 + def handle_utxos(self, prices, ht, check_dayutc): + current_price = 0 + #current_price = self.get_current_price() + #if current_price == 0: + # return + print("handle_utxos initiate") + connin = sqlite3.connect("utxos.sqlite") + cin = connin.cursor() + cursorin = cin.execute("SELECT * from utxos") + + #connout = sqlite3.connect("utxos.db") + #cout = connout.cursor() + #cout.execute('CREATE TABLE IF NOT EXISTS utxos(txid TEXT PRIMARY KEY NOT NULL, vout INT, value INT, coinbase INT, height INT, scriptpubkey TEXT, dt TIMESTAMP, price INT, dt2 TIMESTAMP)') + #connout.commit() + coin_idx = 0 + rpc = None + for row in cursorin: + #print(row) + txid = row[0] + vout = row[1] + value = row[2] + coinbase = row[3] + height = row[4] + scriptpubkey = row[5] + dt = ht[str(height)] + #print(dt) + dt2 = time.gmtime(int(dt)) + daystr = time.strftime("%d %b %Y", dt2) + dayutc = int(time.mktime(time.strptime(daystr, "%d %b %Y"))) + price = 0 + d_t = datetime.utcfromtimestamp(dayutc) + timedelta(hours=8) + d_t08am = datetime(d_t.year, d_t.month, d_t.day, 8, 0, 0) + dayutc=int((d_t08am - timedelta(hours=8)).timestamp())-28800 + d_t2 = datetime.utcfromtimestamp(check_dayutc) + timedelta(hours=8) + d_t208am = datetime(d_t2.year, d_t2.month, d_t2.day, 8, 0, 0) + check_dayutc=int((d_t208am - timedelta(hours=8)).timestamp())-28800 + print("ceshi",5) + if str(dayutc) in prices: + price = int(prices[str(dayutc)]) + else: + print("failed get tx price") + return + + if str(check_dayutc) in prices: + current_price = int(prices[str(check_dayutc)]) + else: + print("failed get check price") + return + + value = value / 100000000 + #print(self.current_dayutc, txid, value, coinbase, height, daystr, dayutc, price, current_price) + + self.summary_utxos(current_price, txid, vout, coinbase, value, height, scriptpubkey, dt, price, dayutc) + self.summary_toplist(txid, vout, coinbase, value, height, scriptpubkey, dt, price, dayutc) + + #sql_insert = 'INSERT INTO utxos VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?)' + #insert_data = (txid, vout, value, coinbase, height, scriptpubkey, dt, price, dayutc) + #cout.execute(sql_insert, insert_data) + #if coin_idx % (1024 * 1024) == 0: + #print(coin_idx) + #connout.commit() + coin_idx+=1 + #connout.commit() + if self.lth_rcap > 0: + self.lth_mvrv = self.lth_mv / self.lth_rcap + if self.sth_rcap > 0: + self.sth_mvrv = self.sth_mv / self.sth_rcap + print("save_db initiate") + self.save_db() + print("save_db ok") + + cin.close() + #cout.close() + connin.close() + #connout.close() +# 该方法似乎用于处理来自名为“utxos.sqlite”的 + # SQLite + # 数据库的未使用的事务输出 (UTXO)。以下是其功能的细分:handle_utxos + # 它需要三个参数:(包含历史价格的字典)、(包含区块高度和时间戳的字典)和(检查价格的特定日期)。priceshtcheck_dayutc + # 它初始化为0和0。current_pricecoin_idx + # 它建立与SQLite + # 数据库的连接,并检索用于执行SQL查询的游标。 + # 它循环访问从数据库提取的表中的每一行。utxos对于每一行,它提取各种属性,例如交易 + # ID()、输出索引 ()、值、是否是coinbase交易 ()、区块高度 ()、scriptPubKey () 和时间戳 ()。txidvoutcoinbaseheightscriptpubkeydt + # 它将时间戳转换为人类可读的日期字符串,然后转换为UTC时间戳。dtdaystrdayutc + # 它从字典中获取与对应的历史价格,并将其分配给变量。dayutcpricesprice + # 它检查字典中是否存在 ,并将其相应的价格分配给 。check_dayutcpricescurrent_price + # 它将UTXO值除以1000000000以将其转换为比特币单位。 + # 它调用和方法与相关参数来处理UTXO数据。summary_utxossummary_toplist + # 如有必要,根据某些条件进行计算。lth_mvrvsth_mvrv + # 最后,它调用该方法将处理后的数据保存到数据库中。save_db + # 它关闭游标和数据库连接。 + # 此方法有效地处理来自SQLite数据库的UTXO的处理,执行必要的计算和更新,并将处理后的数据保存回数据库 + def utxos(self, check_dayutc): + + self.current_dt = check_dayutc + self.current_dt2 = time.gmtime(int(self.current_dt)) + self.current_daystr = time.strftime("%d %b %Y", self.current_dt2) + self.current_dayutc = int(time.mktime(time.strptime(self.current_daystr, "%d %b %Y"))) + + topliststr = "toplist" + self.current_daystr + print(topliststr) + self.mdbc_toplist = self.mdb[topliststr] + + # prices_temp = self.get_history_price() + # prices = self.get_history_price2(prices_temp) + prices_temp=btc_historical_price.prices_temp + prices=btc_historical_price.prices + '''for key in prices.keys(): + print(key, prices[key]) + break''' + print(prices) + if len(prices) <= 0: + print("failed get price") + return + ht = self.get_ht() + if ht is None: + print("failed get height and time") + return + + self.handle_utxos(prices, ht, check_dayutc) +# 该方法似乎负责处理 表示的特定日期的未使用事务输出 (UTXO)。以下是其功能的摘要:utxoscheck_dayutc +# +# 它根据提供的 .check_dayutc +# 它使用 和 方法检索历史价格数据。get_history_priceget_history_price2 +# 该方法从纳斯达克 API 检索历史比特币价格数据。get_history_price +# 该方法从 Messari API 检索其他历史价格数据。get_history_price2 +# 它使用该方法检索高度和时间戳数据。get_ht +# 它调用该方法以使用检索到的价格和高度数据处理 UTXO。handle_utxos +# 该方法进一步与 SQLite 数据库交互以处理 UTXO 并执行必要的计算。handle_utxos +# 总体而言,该方法协调了获取历史价格和高度数据并使用它来处理特定日期的 UTXO 的过程。如果成功,它将使用处理后的 UTXO 数据更新相关数据库。utxos +if __name__ == '__main__': + if len(sys.argv) > 1: + check_dt = sys.argv[1] + stats = UtxosIf() + setup('mainnet') + stats.get_height_timestamp() + stats.get_daily_height() + daily_hash = stats.get_dh_hash() + #print(daily_hash) + if daily_hash is None: + print("failed get daily height") + else: + os.system("if [ -e utxos.dat ]; then rm utxos.dat; fi") + os.system("if [ -e utxos.sqlite ]; then rm utxos.sqlite; fi") + check_dayutc = int(time.mktime(time.strptime(check_dt, "%Y-%m-%d"))) + cmd = "~/bitcoin-29.0/bin/bitcoin-cli -rpcuser=user -rpcpassword=password -rpcclienttimeout=600 invalidateblock " + daily_hash[str(check_dayutc)] + os.system(cmd) + print("select ok",daily_hash[str(check_dayutc)]) + time.sleep(60); + os.system("~/bitcoin-29.0/bin/bitcoin-cli -rpcuser=user -rpcpassword=password -rpcclienttimeout=1800 dumptxoutset ~/utxos.dat latest") + print("dumptxoutset ok") + time.sleep(60); + #os.system("./utxo_to_sqlite ./utxos.dat ./utxos.sqlite") + os.system("python3 utxo_to_sqlite.py ./utxos.dat ./utxos.sqlite") + print("utxo_to_sqlite ok") + time.sleep(60); + stats.utxos(check_dayutc) + print("utxos stat ok") + cmd = "~/bitcoin-29.0/bin/bitcoin-cli -rpcuser=user -rpcpassword=password reconsiderblock " + daily_hash[str(check_dayutc)] + os.system(cmd) + print("reconsiderblock ok") +# 此脚本似乎基于命令行参数执行与比特币 UTXO(未花费的交易输出)相关的几个任务。以下是它的作用的细分: +# 1. 它检查脚本是否直接运行 ('__name__ == '__main__'')。 +# 2. 它检查是否提供了命令行参数 ('len(sys.argv) > 1')。 +# 3. 它初始化“UtxosIf”类的实例,可能包含与处理 UTXO 相关的方法。 +# 4. 它使用“设置”功能设置比特币网络(假设为“主网”)。 +# 5. 它分别使用“get_height_timestamp”和“get_daily_height”方法检索高度和时间戳数据。 +# 6. 它使用“get_dh_hash”方法获取每日哈希数据。 +# 7. 如果每日哈希数据可用,则继续进行进一步操作: +# - 如果存在现有的“utxos.dat”和“utxos.sqlite”文件,它会删除它们。 +# - 它使用比特币 CLI 命令“invalidateblock”使区块失效。 +# - 它会等待一段时间(大概是网络处理区块失效)。 +# - 它使用比特币CLI命令'dumptxoutset'转储UTXO集。 +# - 它再次等待。 +# - 它使用“utxo_to_sqlite”脚本将转储的 UTXO 数据转换为 SQLite 数据库。 +# - 它使用“utxos”方法处理指定日期的 UTXO。 +# - 它使用比特币 CLI 命令“reconsiderblock”重新考虑之前失效的区块。 +# 8. 每个步骤都附有指示脚本进度的打印语句。 +# 这个脚本似乎是一个更大的系统的一部分,该系统与比特币网络交互并执行与UTXO相关的任务。它严重依赖比特币核心软件和命令行界面(“bitcoin-cli”) diff --git a/lyq/btc_utxos_lyq3.py b/lyq/btc_utxos_lyq3.py new file mode 100644 index 0000000..4d6e5d3 --- /dev/null +++ b/lyq/btc_utxos_lyq3.py @@ -0,0 +1,1838 @@ +# coding=utf-8 +import json +import os +import sys +import ujson +import time +import requests +from loguru import logger +from datetime import datetime, timedelta +from easybitcoinrpc import RPC +import csv +import sqlite3 +from bitcoinutils.script import Script +from bitcoinutils.keys import P2wpkhAddress, P2wshAddress, P2shAddress, PrivateKey, PublicKey, SegwitAddress, P2pkhAddress +from bitcoinutils.setup import setup +import pymysql +import pymongo +from urllib import parse + +import btc_historical_price + + +class UtxosIf: + def __init__(self): + self.balance_0 = 0 + self.balance_001 = 0 + self.balance_01 = 0 + self.balance_1 = 0 + self.balance_10 = 0 + self.balance_100 = 0 + self.balance_1000 = 0 + self.balance_10000 = 0 + + self.balance_amount_0 = 0 + self.balance_amount_001 = 0 + self.balance_amount_01 = 0 + self.balance_amount_1 = 0 + self.balance_amount_10 = 0 + self.balance_amount_100 = 0 + self.balance_amount_1000 = 0 + self.balance_amount_10000 = 0 + + self.profit_addresses = 0 + self.loss_addresses = 0 + self.profit_ratio = 0 + self.lth_supply = 0 + self.sth_supply = 0 + self.realized_price = 0 + self.relative_lth_sth = 0 + self.lth_profit_supply = 0 + self.lth_loss_supply = 0 + self.lth_profit_ratio = 0 + self.sth_profit_supply = 0 + self.sth_loss_supply = 0 + self.sth_profit_ratio = 0 + self.slrv_ratio = 0 + self.slrv_24h = 0 + self.slrv_6m1y = 0 + + self.total_address = 0 + self.miner_address = 0 + self.miner_balance = 0 + self.total_balance = 0 + self.total_rcap = 0 + self.holder_0 = 0 + self.holder_1 = 0 + self.holder_2 = 0 + self.holder_3 = 0 + self.holder_4 = 0 + self.holder_5 = 0 + self.holder_6 = 0 + self.holder_7 = 0 + self.holder_15 = 0 + self.holder_30 = 0 + self.holder_60 = 0 + self.holder_90 = 0 + self.holder_180 = 0 + self.holder_360 = 0 + self.holder_540 = 0 + self.holder_720 = 0 + self.holder_1080 = 0 + self.holder_1440 = 0 + self.holder_1800 = 0 + self.holder_2160 = 0 + self.holder_2520 = 0 + self.holder_2880 = 0 + self.holder_3240 = 0 + self.holder_3600 = 0 + self.holder_3960 = 0 + + self.holder_balance_0 = 0 + self.holder_balance_1 = 0 + self.holder_balance_2 = 0 + self.holder_balance_3 = 0 + self.holder_balance_4 = 0 + self.holder_balance_5 = 0 + self.holder_balance_6 = 0 + self.holder_balance_7 = 0 + self.holder_balance_15 = 0 + self.holder_balance_30 = 0 + self.holder_balance_60 = 0 + self.holder_balance_90 = 0 + self.holder_balance_180 = 0 + self.holder_balance_360 = 0 + self.holder_balance_540 = 0 + self.holder_balance_720 = 0 + self.holder_balance_1080 = 0 + self.holder_balance_1440 = 0 + self.holder_balance_1800 = 0 + self.holder_balance_2160 = 0 + self.holder_balance_2520 = 0 + self.holder_balance_2880 = 0 + self.holder_balance_3240 = 0 + self.holder_balance_3600 = 0 + self.holder_balance_3960 = 0 + + self.price_buy = {} # step 500 + self.price_buy_amount = {} # step 500 + self.diff_sell = {} # step 500 + self.diff_sell_amount = {} + self.profit_sell = {} # step 5000 + self.profit_sell_amount = {} + + self.balance_0_days = {} + self.balance_001_days = {} + self.balance_01_days = {} + self.balance_1_days = {} + self.balance_10_days = {} + self.balance_100_days = {} + self.balance_1000_days = {} + self.balance_10000_days = {} + + self.balance_amount_0_days = {} + self.balance_amount_001_days = {} + self.balance_amount_01_days = {} + self.balance_amount_1_days = {} + self.balance_amount_10_days = {} + self.balance_amount_100_days = {} + self.balance_amount_1000_days = {} + self.balance_amount_10000_days = {} + + ''' + self.current_dt = time.time() + self.current_dt2 = time.gmtime(int(self.current_dt)) + self.current_daystr = time.strftime("%d %b %Y", self.current_dt2) + self.current_dayutc = int(time.mktime(time.strptime(self.current_daystr, "%d %b %Y"))) + ''' + + self.mc = pymongo.MongoClient("mongodb://10.168.3.192:27018/") + self.mdb = self.mc["btcutxos2"] + # self.mdb.authenticate("root", "123456") + + self.uprofit = 0 + self.uloss = 0 + self.lth_nupl = 0 + self.sth_nupl = 0 + self.lth_mv = 0 + self.sth_mv = 0 + self.lth_rcap = 0 + self.sth_rcap = 0 + self.lth_mvrv = 0 + self.sth_mvrv = 0 +# 此“UtxosIf”类使用默认值初始化大量属性。以下是属性的摘要: +# +# - 与平衡相关的属性: +# - 'balance_0', 'balance_001', ..., 'balance_10000':这些是不同面额比特币的余额计数器。 +# - 'balance_amount_0', 'balance_amount_001', ..., 'balance_amount_10000':这些是与余额计数器相对应的余额金额。 +# +# - 与地址和利润相关的属性: +# - 'profit_addresses':利润计数器地址。 +# - 'loss_addresses': 丢失地址的计数器。 +# - 与利润率、供应、已实现价格等相关的各种其他属性 +# +# +# - 与持有人及其余额相关的属性: +# - 不同时间间隔的持有者计数器('holder_0'、'holder_1'、...、'holder_3960')。 +# - 每个持有人的相应余额属性。 +# +# - 与价格和交易相关的属性: +# - 用于存储价格和交易金额的字典('price_buy'、'price_buy_amount'、'diff_sell'、'diff_sell_amount'、'profit_sell'、'profit_sell_amount')。 +# +# - 与余额随时间变化相关的属性: +# - 用于存储不同时间间隔的余额变化的字典('balance_0_days'、'balance_001_days'、...、'balance_amount_10000_days')。 +# +# - 与 +# MongoDB +# 连接和其他指标相关的属性: +# - 连接到MongoDB数据库('mc','mdb')。 +# - 与利润、亏损、已实现资本化等相关的指标 +# +# 此类似乎用于管理与比特币UTXO(未花费的交易输出)相关的各种指标和数据 + def init_db(self): + return pymysql.connect(host="172.17.0.1", port=4419, user="root", password="IeQcJNnagkaFP1Or", database="btcdb", + cursorclass=pymysql.cursors.DictCursor) +# 'init_dbpymysql库来建立连接。 + # + # 以下是该方法的作用的细分 + # + # 它使用提供的主机、端口、用户名、密码和数据库名称建立与 + # MySQL + # 数据库的连接。 + def get_vout_addr(self, rpc, txid, vout): + addr = None + addrtype = None + ip = "127.0.0.1" + port = "8332" + user = "user" + password = "password" + timeout=100 + if rpc is None: + rpc = RPC(ip, port, user, password) + tx = None + while True: + try: + tx = rpc.transactions.get_raw_transaction(txid, True) + #break + except: + time.sleep(1) + #print("reconnect") + rpc = RPC(ip, port, user, password) + continue + + txouts = tx["vout"] + txout = None + for outone in txouts: + # print(outone, vout) + if outone["n"] == vout: + txout = outone + break + scriptPubKey = txout["scriptPubKey"] + addrtype = scriptPubKey["type"] + + #print("get_vout_addr", txid, vout, tx, scriptPubKey, addrtype) + if "address" not in scriptPubKey: + addr = scriptPubKey["hex"] + if scriptPubKey["type"] == "pubkey": + temphex = scriptPubKey["hex"] + try: + if temphex[2:4] == "04": + addr = PublicKey(temphex[2:132]).get_address(False).to_string() + else: + addr = PublicKey(temphex[2:68]).get_address().to_string() + print("pubkey", txid, temphex, addr) + except Exception as e: + print("pubkey exception", txid, vout, temphex, addr, e) + else: + print(scriptPubKey) + else: + addr = scriptPubKey["address"] + # print(addr) + + break + + return rpc, addr, addrtype +# “UtxosIf”类中的“get_vout_addr”方法旨在检索与给定事务 (txid) 中的特定事务输出 (vout) 相对应的地址和地址类型。以下是其功能的细分: + # + # -参数: + # - 'rpc':用于与比特币网络交互的 RPC 类的实例。 + # - 'txid':交易 ID。 + # - 'vout':事务输出的索引。 + # + # -初始化: + # - 它初始化用于 RPC 连接的 IP 地址、端口、用户名和密码的变量。 + # + # - RPC连接: + # - 如果 'rpc' 参数为 'None',则使用默认 IP、端口、用户名和密码初始化 RPC 连接。 + # + # - 交易检索: + # - 它尝试使用 'get_raw_transaction' 方法从 RPC 对象中检索原始事务详细信息。 + # - 如果在此过程中出现异常(可能是由于连接问题),它会等待 1 秒钟并重试。 + # + # - 提取地址: + # - 一旦获得交易详细信息,它就会遍历交易的输出('vout')以找到特定的输出。 + # - 它从输出中提取“scriptPubKey”及其类型。 + # - 如果“scriptPubKey”不直接包含地址,它会尝试解析它(例如,如果它是一个公钥脚本)。 + # + # - 返回结果: + # - 最后,它返回 RPC 实例、地址和地址类型。 + # + # 方法如下: + # + # '''蟒蛇 + # def get_vout_addr(self、rpc、txid、vout): + # addr = 无 + # addrtype = 无 + # ip = “127.0.0.1” + # 端口 =“8332” + # user = “用户” + # password = “密码” + # 如果 rpc 为 None: + # rpc = RPC(ip, port, user, password) + # tx = 无 + # 而 True: + # 尝试: + # tx = rpc.transactions.get_raw_transaction(txid, 真) + # 除了: + # 时间睡眠(1) + # rpc = RPC(ip, port, user, password) + # 继续 + # + # txouts = tx[“vout”] + # txout = 无 + # 对于 TXOUTS 中的 Outone: + # if outone[“n”] == vout: + # txout = outone + # 破 + # scriptPubKey = txout[“scriptPubKey”] + # addrtype = scriptPubKey[“类型”] + # + # 如果 “address” 不在 scriptPubKey 中: + # addr = scriptPubKey[“十六进制”] + # if scriptPubKey[“type”] == “pubkey”: + # temphex = scriptPubKey[“十六进制”] + # 尝试: + # 如果 temphex[2:4] == “04”: + # addr = PublicKey(temphex[2:132]).get_address(False).to_string() + # 还: + # addr = PublicKey(temphex[2:68]).get_address().to_string() + # print(“pubkey”, txid, temphex, addr) + # 除了 Exception as e: + # print(“pubkey exception”, txid, vout, temphex, addr, e) + # 还: + # print(scriptPubKey) + # 还: + # addr = scriptPubKey[“地址”] + # + # 破 + # + # 返回 RPC、ADDR、ADdrType + # ''' + # + # 此方法可确保从事务输出中正确提取地址和地址类型,从而处理检索过程中的潜在错误 + def summary_toplist(self, txid, vout, coinbase, value, height, scriptpubkey, dt, price, dt2): + if value >= 100: + rpc = None + rpc, addr, addrtype = self.get_vout_addr(rpc, txid, vout) + if addrtype is None: + addrtype = "unknown" + if addr is None: + addr = "unknown" + toplist = {} + toplist["txid"] = txid + toplist["vout"] = vout + toplist["coinbase"] = coinbase + toplist["value"] = value + toplist["height"] = height + toplist["scriptpubkey"] = scriptpubkey + toplist["dt"] = dt + toplist["price"] = price + toplist["dt2"] = dt2 + toplist["addr"] = addr + toplist["type"] = addrtype + + result = self.mdbc_toplist.find_one(toplist) + if result is None or len(result) > 0: + + self.mdbc_toplist.insert_one(toplist) + #print(self.mdbc_toplist.find_one()) +# 该类中的方法似乎用于汇总和存储有关满足特定条件的事务的信息。以下是该方法的作用:summary_toplistUtxosIf + # + # 它需要与交易相关的几个参数(、、、)。txidvoutcoinbasevalueheightscriptpubkeydtpricedt2 + # 它首先检查事务输出 () 的值是否大于或等于100。value + # 如果该值满足条件,则将变量初始化为None。rpc + # 然后,它调用该方法来检索与事务输出对应的地址和地址类型。get_vout_addr + # 它创建一个名为的字典,其中包含有关交易的各种信息,包括其ID、输出索引、是否是 + # coinbase交易、其值、区块高度、scriptPubKey、日期时间信息、价格、地址和地址类型。toplist + # 它检查MongoDB集合中是否存在具有相同事务ID和输出索引的记录。如果没有,它会将字典插入到集合中。mdbc_toplisttoplist + def summary_balance_days(self, dt, dt2, value, days_out, balance_out): + daysecs = 3600 * 24 + days = (dt - dt2)/daysecs + if days < 1: + if "day_0" in days_out: + days_out["day_0"]+=1 + balance_out["day_0"] += value + else: + days_out["day_0"]=1 + balance_out["day_0"] = value + elif days < 1 * 2: + if "day_1" in days_out: + days_out["day_1"] += 1 + balance_out["day_1"] += value + else: + days_out["day_1"] = 1 + balance_out["day_1"] = value + elif days < 1 * 3: + if "day_2" in days_out: + days_out["day_2"] += 1 + balance_out["day_2"] += value + else: + days_out["day_2"] = 1 + balance_out["day_2"] = value + elif days < 1 * 4: + if "day_3" in days_out: + days_out["day_3"] += 1 + balance_out["day_3"] += value + else: + days_out["day_3"] = 1 + balance_out["day_3"] = value + elif days < 1 * 5: + if "day_4" in days_out: + days_out["day_4"] += 1 + balance_out["day_4"] += value + else: + days_out["day_4"] = 1 + balance_out["day_4"] = value + elif days < 1 * 6: + if "day_5" in days_out: + days_out["day_5"] += 1 + balance_out["day_5"] += value + else: + days_out["day_5"] = 1 + balance_out["day_5"] = value + elif days < 1 * 7: + if "day_6" in days_out: + days_out["day_6"] += 1 + balance_out["day_6"] += value + else: + days_out["day_6"] = 1 + balance_out["day_6"] = value + elif days < 1 * 8: + if "day_7" in days_out: + days_out["day_7"] += 1 + balance_out["day_7"] += value + else: + days_out["day_7"] = 1 + balance_out["day_7"] = value + elif days < 1 * 9: + if "day_8" in days_out: + days_out["day_8"] += 1 + balance_out["day_8"] += value + else: + days_out["day_8"] = 1 + balance_out["day_8"] = value + elif days < 1 * 10: + if "day_9" in days_out: + days_out["day_9"] += 1 + balance_out["day_9"] += value + else: + days_out["day_9"] = 1 + balance_out["day_9"] = value + elif days < 1 * 11: + if "day_10" in days_out: + days_out["day_10"] += 1 + balance_out["day_10"] += value + else: + days_out["day_10"] = 1 + balance_out["day_10"] = value + elif days < 1 * 12: + if "day_11" in days_out: + days_out["day_11"] += 1 + balance_out["day_11"] += value + else: + days_out["day_11"] = 1 + balance_out["day_11"] = value + elif days < 1 * 13: + if "day_12" in days_out: + days_out["day_12"] += 1 + balance_out["day_12"] += value + else: + days_out["day_12"] = 1 + balance_out["day_12"] = value + elif days < 1 * 14: + if "day_13" in days_out: + days_out["day_13"] += 1 + balance_out["day_13"] += value + else: + days_out["day_13"] = 1 + balance_out["day_13"] = value + elif days < 1 * 15: + if "day_14" in days_out: + days_out["day_14"] += 1 + balance_out["day_14"] += value + else: + days_out["day_14"] = 1 + balance_out["day_14"] = value + elif days < 1 * 16: + if "day_15" in days_out: + days_out["day_15"] += 1 + balance_out["day_15"] += value + else: + days_out["day_15"] = 1 + balance_out["day_15"] = value + elif days < 1 * 17: + if "day_16" in days_out: + days_out["day_16"] += 1 + balance_out["day_16"] += value + else: + days_out["day_16"] = 1 + balance_out["day_16"] = value + elif days < 1 * 18: + if "day_17" in days_out: + days_out["day_17"] += 1 + balance_out["day_17"] += value + else: + days_out["day_17"] = 1 + balance_out["day_17"] = value + elif days < 1 * 19: + if "day_18" in days_out: + days_out["day_18"] += 1 + balance_out["day_18"] += value + else: + days_out["day_18"] = 1 + balance_out["day_18"] = value + elif days < 1 * 20: + if "day_19" in days_out: + days_out["day_19"] += 1 + balance_out["day_19"] += value + else: + days_out["day_19"] = 1 + balance_out["day_19"] = value + elif days < 1 * 21: + if "day_20" in days_out: + days_out["day_20"] += 1 + balance_out["day_20"] += value + else: + days_out["day_20"] = 1 + balance_out["day_20"] = value + elif days < 1 * 22: + if "day_21" in days_out: + days_out["day_21"] += 1 + balance_out["day_21"] += value + else: + days_out["day_21"] = 1 + balance_out["day_21"] = value + elif days < 1 * 23: + if "day_22" in days_out: + days_out["day_22"] += 1 + balance_out["day_22"] += value + else: + days_out["day_22"] = 1 + balance_out["day_22"] = value + elif days < 1 * 24: + if "day_23" in days_out: + days_out["day_23"] += 1 + balance_out["day_23"] += value + else: + days_out["day_23"] = 1 + balance_out["day_23"] = value + elif days < 1 * 25: + if "day_24" in days_out: + days_out["day_24"] += 1 + balance_out["day_24"] += value + else: + days_out["day_24"] = 1 + balance_out["day_24"] = value + elif days < 1 * 26: + if "day_25" in days_out: + days_out["day_25"] += 1 + balance_out["day_25"] += value + else: + days_out["day_25"] = 1 + balance_out["day_25"] = value + elif days < 1 * 27: + if "day_26" in days_out: + days_out["day_26"] += 1 + balance_out["day_26"] += value + else: + days_out["day_26"] = 1 + balance_out["day_26"] = value + elif days < 1 * 28: + if "day_27" in days_out: + days_out["day_27"] += 1 + balance_out["day_27"] += value + else: + days_out["day_27"] = 1 + balance_out["day_27"] = value + elif days < 1 * 29: + if "day_28" in days_out: + days_out["day_28"] += 1 + balance_out["day_28"] += value + else: + days_out["day_28"] = 1 + balance_out["day_28"] = value + elif days < 1 * 30: + if "day_29" in days_out: + days_out["day_29"] += 1 + balance_out["day_29"] += value + else: + days_out["day_29"] = 1 + balance_out["day_29"] = value + elif days < 1 * 31: + if "day_30" in days_out: + days_out["day_30"] += 1 + balance_out["day_30"] += value + else: + days_out["day_30"] = 1 + balance_out["day_30"] = value + elif days < 1 * 60: + if "day_60" in days_out: + days_out["day_60"] += 1 + balance_out["day_60"] += value + else: + days_out["day_60"] = 1 + balance_out["day_60"] = value + elif days < 1 * 90: + if "day_90" in days_out: + days_out["day_90"] += 1 + balance_out["day_90"] += value + else: + days_out["day_90"] = 1 + balance_out["day_90"] = value + elif days < 1 * 180: + if "day_180" in days_out: + days_out["day_180"] += 1 + balance_out["day_180"] += value + else: + days_out["day_180"] = 1 + balance_out["day_180"] = value + elif days < 1 * 360: + if "day_360" in days_out: + days_out["day_360"] += 1 + balance_out["day_360"] += value + else: + days_out["day_360"] = 1 + balance_out["day_360"] = value + elif days < 1 * 540: + if "day_540" in days_out: + days_out["day_540"] += 1 + balance_out["day_540"] += value + else: + days_out["day_540"] = 1 + balance_out["day_540"] = value + elif days < 1 * 720: + if "day_720" in days_out: + days_out["day_720"] += 1 + balance_out["day_720"] += value + else: + days_out["day_720"] = 1 + balance_out["day_720"] = value + elif days < 1 * 1080: + if "day_1080" in days_out: + days_out["day_1080"] += 1 + balance_out["day_1080"] += value + else: + days_out["day_1080"] = 1 + balance_out["day_1080"] = value + elif days < 1 * 1440: + if "day_1440" in days_out: + days_out["day_1440"] += 1 + balance_out["day_1440"] += value + else: + days_out["day_1440"] = 1 + balance_out["day_1440"] = value + elif days < 1 * 1880: + if "day_1880" in days_out: + days_out["day_1880"] += 1 + balance_out["day_1880"] += value + else: + days_out["day_1880"] = 1 + balance_out["day_1880"] = value + elif days < 1 * 2160: + if "day_2160" in days_out: + days_out["day_2160"] += 1 + balance_out["day_2160"] += value + else: + days_out["day_2160"] = 1 + balance_out["day_2160"] = value + elif days < 1 * 2520: + if "day_2520" in days_out: + days_out["day_2520"] += 1 + balance_out["day_2520"] += value + else: + days_out["day_2520"] = 1 + balance_out["day_2520"] = value + elif days < 1 * 2880: + if "day_2880" in days_out: + days_out["day_2880"] += 1 + balance_out["day_2880"] += value + else: + days_out["day_2880"] = 1 + balance_out["day_2880"] = value + elif days < 1 * 3240: + if "day_3240" in days_out: + days_out["day_3240"] += 1 + balance_out["day_3240"] += value + else: + days_out["day_3240"] = 1 + balance_out["day_3240"] = value + elif days < 1 * 3600: + if "day_3600" in days_out: + days_out["day_3600"] += 1 + balance_out["day_3600"] += value + else: + days_out["day_3600"] = 1 + balance_out["day_3600"] = value + else: + if "day_3960" in days_out: + days_out["day_3960"] += 1 + balance_out["day_3960"] += value + else: + days_out["day_3960"] = 1 + balance_out["day_3960"] = value + return days_out, balance_out +# “summary_balance_days”功能似乎旨在根据持有余额的天数对余额进行分类。以下是其工作原理的细分: + # + # -参数: + # - 'dt':当前日期。 + # - “dt2”:获取余额的日期。 + # - 'value':余额的值。 + # - 'days_out':用于存储不同时期余额计数的字典。 + # - 'balance_out':存储不同时期累计余额的字典。 + # + # - 计算天数: + # - 它通过从当前日期 ('dt') 中减去购置日期 ('dt2') 来计算余额持有的天数。此值存储在“days”变量中。 + # + # - 对余额进行分类: + # - 根据计算的天数,该函数将余额分配给特定类别 ('day_X'),其中 + # 'X' + # 表示天数。 + # - 如果余额持有时间少于一天,则将其归类为“day_0”。 + # - 如果余额已持有 + # 1 + # 到 + # 30 + # 天,则分别归类为“day_1”至“day_30”。 + # - 如果余额已持有 + # 31 + # 到 + # 60 + # 天,则将其归类为“day_60”。 + # - 如果余额已持有超过 + # 3600 + # 天(约 + # 10 + # 年),则将其归类为“day_3960”。 + # + # - 累积余额: + # - 对于每个类别,该函数递增余额计数 ('days_out') 并将余额值添加到累计余额 ('balance_out')。 + # + # - 返回结果: + # - 返回更新的“days_out”和“balance_out”字典。 + # 0.01,0.1,1,10,100,1000,10000,total balance + # new addr, total address + def summary_utxos(self, current_price, txid, vout, coinbase, value, height, scriptpubkey, dt, price, dt2): + + self.total_address += 1 + self.total_balance += value + + self.total_rcap += (value * price) + + if coinbase == 1: + self.miner_address += 1 + self.miner_balance += value + + if current_price > price: + self.uprofit +=(value*(current_price-price)) + self.profit_addresses += 1 + if current_price <= price: + self.uloss += (value*(price - current_price)) + self.loss_addresses += 1 + + n = int(price / 1000) + n = n * 1000 + + keystr = "buy" + str(n) + if keystr in self.price_buy: + self.price_buy[keystr] += 1 + else: + self.price_buy[keystr] = 1 + + keystr = "buy_amount" + str(n) + if keystr in self.price_buy_amount: + self.price_buy_amount[keystr] += value + else: + self.price_buy_amount[keystr] = value + + diff = current_price - price + n = int(diff / 1000) + n = n * 1000 + keystr = "diff" + str(n) + if keystr in self.diff_sell: + self.diff_sell[keystr] += 1 + else: + self.diff_sell[keystr] = 1 + + keystr = "diff_amount" + str(n) + if keystr in self.diff_sell_amount: + self.diff_sell_amount[keystr] += value + else: + self.diff_sell_amount[keystr] = value + + + try: + profit = (current_price - price)/(price)*10 + except: + profit = current_price*10 + + if int(profit) < 100: + n = int(profit) + keystr = "profit" + str(n) + if keystr in self.profit_sell: + self.profit_sell[keystr] += 1 + else: + self.profit_sell[keystr] = 1 + + keystr = "profit_amount" + str(n) + if keystr in self.profit_sell_amount: + self.profit_sell_amount[keystr] += value + else: + self.profit_sell_amount[keystr] = value + else: + profit = profit/100 + n = int(profit) + keystr = "profit10" + str(n) + if keystr in self.profit_sell: + self.profit_sell[keystr] += 1 + else: + self.profit_sell[keystr] = 1 + + keystr = "profit_amount10" + str(n) + if keystr in self.profit_sell_amount: + self.profit_sell_amount[keystr] += value + else: + self.profit_sell_amount[keystr] = value + + if value < 0.01: + self.balance_0 += 1 + self.balance_amount_0 += value + self.balance_0_days, self.balance_amount_0_days = self.summary_balance_days(self.current_dayutc, dt2, value, self.balance_0_days, + self.balance_amount_0_days) + elif value < 0.1: + self.balance_001 += 1 + self.balance_amount_001 += value + self.balance_001_days, self.balance_amount_001_days = self.summary_balance_days(self.current_dayutc, dt2, value, + self.balance_001_days, + self.balance_amount_001_days) + elif value < 1: + self.balance_01 += 1 + self.balance_amount_01 += value + self.balance_01_days, self.balance_amount_01_days = self.summary_balance_days(self.current_dayutc, dt2, value, self.balance_01_days, + self.balance_amount_01_days) + elif value < 10: + self.balance_1 += 1 + self.balance_amount_1 += value + self.balance_1_days, self.balance_amount_1_days = self.summary_balance_days(self.current_dayutc, dt2, value, self.balance_1_days, + self.balance_amount_1_days) + elif value < 100: + self.balance_10 += 1 + self.balance_amount_10 += value + self.balance_10_days, self.balance_amount_10_days = self.summary_balance_days(self.current_dayutc, dt2, value, self.balance_10_days, + self.balance_amount_10_days) + elif value < 1000: + self.balance_100 += 1 + self.balance_amount_100 += value + self.balance_100_days, self.balance_amount_100_days = self.summary_balance_days(self.current_dayutc, dt2, value, + self.balance_100_days, + self.balance_amount_100_days) + elif value < 10000: + self.balance_1000 += 1 + self.balance_amount_1000 += value + self.balance_1000_days, self.balance_amount_1000_days = self.summary_balance_days(self.current_dayutc, dt2, value, + self.balance_1000_days, + self.balance_amount_1000_days) + else: + self.balance_10000 += 1 + self.balance_amount_10000 += value + self.balance_10000_days, self.balance_amount_10000_days = self.summary_balance_days(self.current_dayutc, dt2, value, + self.balance_10000_days, + self.balance_amount_10000_days) + + daysecs = 3600 * 24 + + if self.current_dayutc - dt2 >= 180 * daysecs: + if self.current_dayutc - dt2 <= 365 * daysecs: + self.slrv_6m1y += (value*price) + + if self.current_dayutc - dt2 <= daysecs: + self.slrv_24h += (value*price) + + if self.current_dayutc - dt2 >= 155*daysecs: + self.lth_nupl += (value*(current_price-price)) + self.lth_rcap += (value*price) + self.lth_mv += (value*current_price) + self.lth_supply += value + if current_price > price: + self.lth_profit_supply += value + if current_price < price: + self.lth_loss_supply += value + else: + self.sth_nupl += (value*(price - current_price)) + self.sth_rcap += (value * price) + self.sth_mv += (value * current_price) + self.sth_supply += value + if current_price > price: + self.sth_profit_supply += value + if current_price < price: + self.sth_loss_supply += value + + if self.current_dayutc - dt2 < daysecs: + self.holder_0 += 1 + self.holder_balance_0 += value + elif self.current_dayutc - dt2 < daysecs * 2: + self.holder_1 += 1 + self.holder_balance_1 += value + elif self.current_dayutc - dt2 < daysecs * 3: + self.holder_2 += 1 + self.holder_balance_2 += value + elif self.current_dayutc - dt2 < daysecs * 4: + self.holder_3 += 1 + self.holder_balance_3 += value + elif self.current_dayutc - dt2 < daysecs * 5: + self.holder_4 += 1 + self.holder_balance_4 += value + elif self.current_dayutc - dt2 < daysecs * 6: + self.holder_5 += 1 + self.holder_balance_5 += value + elif self.current_dayutc - dt2 < daysecs * 7: + self.holder_6 += 1 + self.holder_balance_6 += value + elif self.current_dayutc - dt2 < daysecs * 8: + self.holder_7 += 1 + self.holder_balance_7 += value + elif self.current_dayutc - dt2 < daysecs * 15: + self.holder_15 += 1 + self.holder_balance_15 += value + elif self.current_dayutc - dt2 < daysecs * 30: + self.holder_30 += 1 + self.holder_balance_30 += value + elif self.current_dayutc - dt2 < daysecs * 60: + self.holder_60 += 1 + self.holder_balance_60 += value + elif self.current_dayutc - dt2 < daysecs * 90: + self.holder_90 += 1 + self.holder_balance_90 += value + elif self.current_dayutc - dt2 < daysecs * 180: + self.holder_180 += 1 + self.holder_balance_180 += value + elif self.current_dayutc - dt2 < daysecs * 360: + self.holder_360 += 1 + self.holder_balance_360 += value + elif self.current_dayutc - dt2 < daysecs * 540: + self.holder_540 += 1 + self.holder_balance_540 += value + elif self.current_dayutc - dt2 < daysecs * 720: + self.holder_720 += 1 + self.holder_balance_720 += value + elif self.current_dayutc - dt2 < daysecs * 1080: + self.holder_1080 += 1 + self.holder_balance_1080 += value + elif self.current_dayutc - dt2 < daysecs * 1440: + self.holder_1440 += 1 + self.holder_balance_1440 += value + elif self.current_dayutc - dt2 < daysecs * 1880: + self.holder_1800 += 1 + self.holder_balance_1800 += value + elif self.current_dayutc - dt2 < daysecs * 2160: + self.holder_2160 += 1 + self.holder_balance_2160 += value + elif self.current_dayutc - dt2 < daysecs * 2520: + self.holder_2520 += 1 + self.holder_balance_2520 += value + elif self.current_dayutc - dt2 < daysecs * 2880: + self.holder_2880 += 1 + self.holder_balance_2880 += value + elif self.current_dayutc - dt2 < daysecs * 3240: + self.holder_3240 += 1 + self.holder_balance_3240 += value + elif self.current_dayutc - dt2 < daysecs * 3600: + self.holder_3600 += 1 + self.holder_balance_3600 += value + else: + self.holder_3960 += 1 + self.holder_balance_3960 += value +# “summary_utxos”功能似乎是用于分析交易及其相关余额的更大系统的一部分。以下是该函数的功能细分: + # + # - ** 更新计数器和总计: ** + # - 'self.total_address':递增 + # 1 + # 以计算地址总数。 + # - “self.total_balance”:累积总余额。 + # - 'self.total_rcap':累计已实现的总资本化(余额 * 价格)。 + # - “self.miner_address”和“self.miner_balance”:如果交易是 + # coinbase + # 交易(“coinbase == 1”),则增加矿工地址计数并累积矿工余额。 + # + # - ** 盈亏计算: ** + # - 'self.uprofit' + # 和 + # 'self.uloss':如果当前价格分别高于或低于成交价格,则计算并累计未实现损益。 + # - “self.profit_addresses”和“self.loss_addresses”:统计有未实现盈亏的地址。 + # + # - ** 价格分析: ** + # - 'n':将价格四舍五入到最接近的千位。 + # - 'keystr':根据四舍五入的价格生成密钥。 + # - 更新买入价格(“self.price_buy”)及其相应金额(“self.price_buy_amount”)的计数器。 + # - 计算当前价格和交易价格之间的差额 ('diff'),将其四舍五入到最接近的千位,并更新价格差异计数器('self.diff_sell' + # 和 + # 'self.diff_sell_amount')。 + # + # - ** 利润百分比分析: ** + # - 根据当前价格和交易价格之间的差额计算利润百分比(“利润”)。 + # - 将利润分类为不同的范围,并相应地更新计数器(“self.profit_sell”和“self.profit_sell_amount”)。 + # + # - ** 余额分类: ** + # - 根据余额的值将余额分类为不同的范围,并相应地更新计数器。 + # - 对于每个范围,更新地址计数 ('self.balance_X') 和累计余额 ('self.balance_amount_X') 的计数器。 + # + # - ** 基于时间的分析: ** + # - 分析余额被持有的时间(“self.summary_balance_days”)并相应地更新计数器。 + # + # - ** 其他指标: ** + # - 根据余额持有的时间长度更新各种指标,例如短期和长期已实现资本化、市场价值、供应、利润和损失。 + # + # - ** 持有人分析: ** + # - 根据持有余额的时间对持有人进行分类,并更新相应的计数器,以显示持有人的数量及其累计余额。 + # + # 总体而言,此功能似乎提供了对交易数据的全面分析,包括与余额、价格、利润和持有人行为相关的各种指标。 + def save_db(self): + db_conn = self.init_db() + with db_conn.cursor() as cursor: + + sql_insert = 'REPLACE INTO `utxosv3` (`unixdt`, `total_address`, `total_balance`, `total_rcap`, `miner_address`,`miner_balance`, `balance_0`, `balance_001`, `balance_01`, `balance_1`, `balance_10`,`balance_100`, `balance_1000`, `balance_10000`, uprofit, uloss, lthnupl, sthnupl, lthmarketcap, lthrcap, sthmarketcap, sthrcap, lthmvrv, sthmvrv) VALUES (FROM_UNIXTIME(%s), %s, %s, %s, %s, %s,%s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s)' + cursor.execute(sql_insert, ( + self.current_dayutc, self.total_address, self.total_balance, self.total_rcap, self.miner_address, self.miner_balance, self.balance_0, + self.balance_001, + self.balance_01, self.balance_1, self.balance_10, self.balance_100, self.balance_1000, self.balance_10000, self.uprofit, self.uloss, self.lth_nupl, self.sth_nupl, self.lth_mv, self.lth_rcap, self.sth_mv, self.sth_rcap, self.lth_mvrv, self.sth_mvrv)) + sql_insert = 'REPLACE INTO `utxos3nd` (`unixdt`, `balance_amount_0`, `balance_amount_001`, `balance_amount_01`, `balance_amount_1`, `balance_amount_10`,`balance_amount_100`, `balance_amount_1000`, `balance_amount_10000`) VALUES (FROM_UNIXTIME(%s), %s, %s, %s, %s, %s,%s, %s, %s)' + cursor.execute(sql_insert, ( + self.current_dayutc, self.balance_amount_0, + self.balance_amount_001, + self.balance_amount_01, self.balance_amount_1, self.balance_amount_10, self.balance_amount_100, self.balance_amount_1000, + self.balance_amount_10000)) + sql_insert = 'REPLACE INTO `holder3` (`unixdt`,`holder_0`,`holder_1`,`holder_2`,`holder_3`,`holder_4`,`holder_5`,`holder_6`,`holder_7`,`holder_15`,`holder_30`,`holder_60`,`holder_90`,`holder_180`,`holder_360`,`holder_540`,`holder_720`,`holder_1080`,`holder_1440`,`holder_1800`,`holder_2160`,`holder_2520`,`holder_2880`,`holder_3240`,`holder_3600`,`holder_3960`) VALUES(FROM_UNIXTIME(%s), %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s)' + cursor.execute(sql_insert, ( + self.current_dayutc, self.holder_0, self.holder_1, self.holder_2, self.holder_3, self.holder_4, self.holder_5, self.holder_6, self.holder_7, + self.holder_15, + self.holder_30, self.holder_60, self.holder_90, self.holder_180, self.holder_360, self.holder_540, self.holder_720, self.holder_1080, + self.holder_1440, + self.holder_1800, self.holder_2160, self.holder_2520, self.holder_2880, self.holder_3240, self.holder_3600, self.holder_3960)) + sql_insert = 'REPLACE INTO `holder_balance3` (`unixdt`,`holder_balance_0`,`holder_balance_1`,`holder_balance_2`,`holder_balance_3`,`holder_balance_4`,`holder_balance_5`,`holder_balance_6`,`holder_balance_7`,`holder_balance_15`,`holder_balance_30`,`holder_balance_60`,`holder_balance_90`,`holder_balance_180`,`holder_balance_360`,`holder_balance_540`,`holder_balance_720`,`holder_balance_1080`,`holder_balance_1440`,`holder_balance_1800`,`holder_balance_2160`,`holder_balance_2520`,`holder_balance_2880`,`holder_balance_3240`,`holder_balance_3600`,`holder_balance_3960`) VALUES(FROM_UNIXTIME(%s), %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s)' + cursor.execute(sql_insert, ( + self.current_dayutc, self.holder_balance_0, self.holder_balance_1, self.holder_balance_2, self.holder_balance_3, + self.holder_balance_4, + self.holder_balance_5, self.holder_balance_6, self.holder_balance_7, self.holder_balance_15, + self.holder_balance_30, self.holder_balance_60, self.holder_balance_90, self.holder_balance_180, self.holder_balance_360, + self.holder_balance_540, self.holder_balance_720, self.holder_balance_1080, self.holder_balance_1440, + self.holder_balance_1800, self.holder_balance_2160, self.holder_balance_2520, self.holder_balance_2880, self.holder_balance_3240, + self.holder_balance_3600, self.holder_balance_3960)) + + + #v2 + if self.loss_addresses > 0: + self.profit_ratio = self.profit_addresses/self.loss_addresses + if self.total_balance > 0: + self.realized_price = self.total_rcap/self.total_balance + if self.sth_loss_supply > 0: + self.sth_profit_ratio = self.sth_profit_supply/self.sth_loss_supply + if self.lth_loss_supply > 0: + self.lth_profit_ratio = self.lth_profit_supply / self.lth_loss_supply + if self.sth_profit_ratio > 0: + self.relative_lth_sth = self.lth_profit_ratio/self.sth_profit_ratio + if self.slrv_6m1y > 0: + self.slrv_ratio = self.slrv_24h/self.slrv_6m1y + + sql_insert = 'REPLACE INTO `utxosv4` (`unixdt`,`profit_addresses`,`loss_addresses`,`profit_ratio`,`lth_supply`,`sth_supply`,`realized_price`,`relative_lth_sth`,`lth_profit_supply`,`lth_loss_supply`,`lth_profit_ratio`,`sth_profit_supply`,`sth_loss_supply`,`sth_profit_ratio`,`slrv_ratio`) VALUES(FROM_UNIXTIME(%s), %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s)' + cursor.execute(sql_insert, (self.current_dayutc, self.profit_addresses, self.loss_addresses, self.profit_ratio, self.lth_supply, self.sth_supply, self.realized_price, self.relative_lth_sth, self.lth_profit_supply, self.lth_loss_supply, self.lth_profit_ratio, self.sth_profit_supply, self.sth_loss_supply, self.sth_profit_ratio, self.slrv_ratio)) + db_conn.commit() + + + mc = pymongo.MongoClient("mongodb://10.168.3.192:27018/") + mdb = mc["btcutxos2"] + # mdb.authenticate("root", "123456") + + self.price_buy["unixdt"] = int(self.current_dt) + mdbc_buy = mdb["buy"] + mdbc_buy.insert_one(self.price_buy) + print(mdbc_buy.find_one()) + self.price_buy_amount["unixdt"] = int(self.current_dt) + mdbc_buy_amount = mdb["buy_amount"] + mdbc_buy_amount.insert_one(self.price_buy_amount) + print(mdbc_buy_amount.find_one()) + + self.diff_sell["unixdt"] = int(self.current_dt) + mdbc_diff = mdb["diff"] + mdbc_diff.insert_one(self.diff_sell) + print(mdbc_diff.find_one()) + self.diff_sell_amount["unixdt"] = int(self.current_dt) + mdbc_diff_amount = mdb["diff_amount"] + mdbc_diff_amount.insert_one(self.diff_sell_amount) + print(mdbc_diff_amount.find_one()) + + self.profit_sell["unixdt"] = int(self.current_dt) + mdbc_profit = mdb["profit"] + mdbc_profit.insert_one(self.profit_sell) + print(mdbc_profit.find_one()) + self.profit_sell_amount["unixdt"] = int(self.current_dt) + mdbc_profit_amount = mdb["profit_amount"] + mdbc_profit_amount.insert_one(self.profit_sell_amount) + print(mdbc_profit_amount.find_one()) + + self.balance_0_days["unixdt"] = int(self.current_dt) + mdbc_balance_days = mdb["balance_0_days"] + mdbc_balance_days.insert_one(self.balance_0_days) + print(mdbc_balance_days.find_one()) + self.balance_amount_0_days["unixdt"] = int(self.current_dt) + mdbc_balance_amount_days = mdb["balance_amount_0_days"] + mdbc_balance_amount_days.insert_one(self.balance_amount_0_days) + print(mdbc_balance_amount_days.find_one()) + + self.balance_001_days["unixdt"] = int(self.current_dt) + mdbc_balance_days = mdb["balance_001_days"] + mdbc_balance_days.insert_one(self.balance_001_days) + print(mdbc_balance_days.find_one()) + self.balance_amount_001_days["unixdt"] = int(self.current_dt) + mdbc_balance_amount_days = mdb["balance_amount_001_days"] + mdbc_balance_amount_days.insert_one(self.balance_amount_001_days) + print(mdbc_balance_amount_days.find_one()) + + self.balance_01_days["unixdt"] = int(self.current_dt) + mdbc_balance_days = mdb["balance_01_days"] + mdbc_balance_days.insert_one(self.balance_01_days) + print(mdbc_balance_days.find_one()) + self.balance_amount_01_days["unixdt"] = int(self.current_dt) + mdbc_balance_amount_days = mdb["balance_amount_01_days"] + mdbc_balance_amount_days.insert_one(self.balance_amount_01_days) + print(mdbc_balance_amount_days.find_one()) + + self.balance_1_days["unixdt"] = int(self.current_dt) + mdbc_balance_days = mdb["balance_1_days"] + mdbc_balance_days.insert_one(self.balance_1_days) + print(mdbc_balance_days.find_one()) + self.balance_amount_1_days["unixdt"] = int(self.current_dt) + mdbc_balance_amount_days = mdb["balance_amount_1_days"] + mdbc_balance_amount_days.insert_one(self.balance_amount_1_days) + print(mdbc_balance_amount_days.find_one()) + + self.balance_10_days["unixdt"] = int(self.current_dt) + mdbc_balance_days = mdb["balance_10_days"] + mdbc_balance_days.insert_one(self.balance_10_days) + print(mdbc_balance_days.find_one()) + self.balance_amount_10_days["unixdt"] = int(self.current_dt) + mdbc_balance_amount_days = mdb["balance_amount_10_days"] + mdbc_balance_amount_days.insert_one(self.balance_amount_10_days) + print(mdbc_balance_amount_days.find_one()) + + self.balance_100_days["unixdt"] = int(self.current_dt) + mdbc_balance_days = mdb["balance_100_days"] + mdbc_balance_days.insert_one(self.balance_100_days) + print(mdbc_balance_days.find_one()) + self.balance_amount_100_days["unixdt"] = int(self.current_dt) + mdbc_balance_amount_days = mdb["balance_amount_100_days"] + mdbc_balance_amount_days.insert_one(self.balance_amount_100_days) + print(mdbc_balance_amount_days.find_one()) + + self.balance_1000_days["unixdt"] = int(self.current_dt) + mdbc_balance_days = mdb["balance_1000_days"] + mdbc_balance_days.insert_one(self.balance_1000_days) + print(mdbc_balance_days.find_one()) + self.balance_amount_1000_days["unixdt"] = int(self.current_dt) + mdbc_balance_amount_days = mdb["balance_amount_1000_days"] + mdbc_balance_amount_days.insert_one(self.balance_amount_1000_days) + print(mdbc_balance_amount_days.find_one()) + + self.balance_10000_days["unixdt"] = int(self.current_dt) + mdbc_balance_days = mdb["balance_10000_days"] + mdbc_balance_days.insert_one(self.balance_10000_days) + print(mdbc_balance_days.find_one()) + self.balance_amount_10000_days["unixdt"] = int(self.current_dt) + mdbc_balance_amount_days = mdb["balance_amount_10000_days"] + mdbc_balance_amount_days.insert_one(self.balance_amount_10000_days) + print(mdbc_balance_amount_days.find_one()) +# “save_db”方法负责将分析期间收集的数据保存到数据库和MongoDB实例中。以下是它的作用的细分: + # - MySQL数据库: ** + # - 使用'init_db'方法连接到MySQL数据库。 + # - 使用SQL查询将记录插入或替换到多个表('utxosv1'、'utxos2nd'、'holder'、'holder_balance'和'utxosv2')中。这些表包含用于分析的各种指标和汇总数据。 + # - ** MongoDB: ** + # - 连接到MongoDB实例。 + # - 将数据插入多个集合('buy'、'buy_amount'、'diff'、'diff_amount'、'profit'、'profit_amount'、'balance_0_days'、'balance_amount_0_days' + # 等)中,以进行不同类型的分析。 + # 对于MongoDB中的每个集合,该方法将数据插入到集合中,并打印插入的文档以进行验证。 + # 此外,该方法还会计算和更新一些其他指标(如利润率、实现价格、短期和长期持有者的利润率、相对比率等),并将它们插入“utxosv2”表中。 + # 总体而言,这种方法有助于将分析的数据存储到关系数据库 (MySQL) 和NoSQL数据库 (MongoDB) 中,从而便于检索和进一步分析。 + def get_history_price(self, batch_size=5000): + """获取数据库中的 Nasdaq 数据,存入字典""" + db_config = { + "host": "192.168.194.240", + "user": "root", + "password": "2GS@bPYcgiMyL14A", + "database": "btcdb", + "port": 4423, + "connect_timeout": 60, + "read_timeout": 60, + "write_timeout": 60, + "charset": "utf8mb4" + } + + offset = 0 + self.pricedict = {} + + while True: + connection = pymysql.connect(**db_config) + try: + with connection.cursor() as cursor: + sql = "SELECT timestamp, price FROM btc_prices WHERE source = 'Nasdaq' ORDER BY timestamp LIMIT %s OFFSET %s" + cursor.execute(sql, (batch_size, offset)) + rows = cursor.fetchall() + if not rows: + break + for timestamp, price in rows: + self.pricedict[str(int(timestamp))] = float(price) + finally: + connection.close() + + offset += batch_size + if len(rows) < batch_size: + break # 最后一页读取完成 + + return self.pricedict + #prices = {} + #response_price = requests.get( + # 'https://data.nasdaq.com/api/v3/datatables/QDL/BCHAIN?code=MKPRU;api_key=FZqXog4sR-b7cYnXcRVV') + #if response_price.status_code == 200: + # #print(response_price.content) + # priceweb = ujson.loads(response_price.content) + # if "datatable" in priceweb: + # priceset = priceweb["datatable"] + # if "data" in priceset: + # pricedata = priceset["data"] + # for price in pricedata: + # daystr = price[1] + # p = price[2] + # dayutc = time.mktime(time.strptime(daystr, "%Y-%m-%d")) + # prices[str(int(dayutc))] = float(p) + # #print(price, int(dayutc), g_prices[str(int(dayutc))]) + #return prices +# “get_history_price”方法似乎用于从WebAPI终结点检索历史价格数据。以下是它的作用的细分: + # - 初始化一个空字典“prices”来存储历史价格数据。 + # - 向指定的API端点发送HTTP GET请求,该端点可能提供历史比特币价格数据。 + # - 检查响应状态代码是否为200(表示响应成功)。 + # - 如果响应成功: + # - 使用“ujson”库解析JSON响应。 + # - 检查解析的JSON响应中是否存在键'“dataset''。 + # - 如果'“dataset”'存在: + # - 检索“data”' 字段,该字段可能包含历史价格数据点的列表。 + # - 遍历列表中的每个价格数据点。 + # - 使用'strptime'将日期字符串 ('daystr') 解析为Unix时间戳 ('dayutc')。 + # - 将Unix 时间戳作为键存储在“prices”字典中,并将相应的price ('p') 作为值。 + # 最后,它返回包含历史价格数据的“prices”字典,其中Unix时间戳作为键,价格作为值 + def get_history_price2(self, batch_size=5000): + #pricedict = {} + """获取数据库中的 Messari 数据,存入字典""" + db_config = { + "host": "192.168.194.240", + "user": "root", + "password": "2GS@bPYcgiMyL14A", + "database": "btcdb", + "port": 4423, + "connect_timeout": 60, + "read_timeout": 60, + "write_timeout": 60, + "charset": "utf8mb4" + } + + offset = 0 + self.pricedict = {} + + while True: + connection = pymysql.connect(**db_config) + try: + with connection.cursor() as cursor: + sql = """ + SELECT timestamp, price + FROM btc_prices + WHERE source = 'CryptoCompare' + ORDER BY timestamp + LIMIT %s OFFSET %s + """ + cursor.execute(sql, (batch_size, offset)) + rows = cursor.fetchall() + if not rows: + break + for timestamp, price in rows: + self.pricedict[str(int(timestamp))] = float(price) + finally: + connection.close() + + offset += batch_size + if len(rows) < batch_size: + break # 数据已全部读取 + + return self.pricedict + #dayt = time.gmtime() + #daystr = time.strftime("%Y", dayt) + #year = int(daystr) + #end_year = year + #while True: + # url = "" + # if end_year != year: + # start_year = end_year + # url = "https://data.messari.io/api/v1/assets/bitcoin/metrics/price/time-series?start=" + # else: + # url = "https://data.messari.io/api/v1/assets/bitcoin/metrics/price/time-series?after=" + str( + # year) + "-01-01&order=descending&interval=1d" +# +# if end_year != year: +# url = url + str(start_year) + "-01-01&end=" + str(end_year) + "-12-31&order=descending&interval=1d" +# header_set = {} +# header_set["x-messari-api-key"] = "aH2pyj5i4QGo1k1gLxXEbIJ5RJr+FYKLEWk6cRT6RuSc6lRY" +# # header_set["Content-Type"] = "application/json" +# print(header_set, url) +# response_price = requests.get(url, headers=header_set) +# # print(response_price) +# if response_price.status_code == 200: +# # print(response_price.content) +# priceweb = ujson.loads(response_price.content) +# if "data" in priceweb: +# priceset = priceweb["data"] +# if "values" in priceset: +# valueset = priceset["values"] +# if valueset is not None: +# for supply in valueset: +# dayutc = int(supply[0] / 1000) +# s = supply[1] +# ret_time = time.gmtime(dayutc) +# ret_daystr = time.strftime("%d %b %Y", ret_time) +# ret_dayutc = int(time.mktime(time.strptime(ret_daystr, "%d %b %Y"))) +# pricedict[str(ret_dayutc)] = float(s) +# # print(s, dayutc, pricedict[str(dayutc)]) +# # break +# else: +# break +# else: +# break +# end_year -= 1 +# time.sleep(2) +# print(pricedict) +# return pricedict +# 'get_history + # 初始化与时间相关的变量,包括当前年份。 + # 进入一个循环,从当前年份开始,向后循环访问年份。 + # 根据当前年份和端点构造一个URL,用于获取比特币价格数据。 + # 向构造的URL发送HTTP + # GET请求,包括标头中的特定API密钥。 + # 检查响应状态代码是否为 + # 2 + # 使用'u 解析 JSON 响应 + # 从JSON响应中提取相关价格数据,并将其添加到“pricedict” + # 将时间戳转换为人类可读的日期字符串,然后转换回Unix时间戳,并将它们作为键存储在'pricedic 中 + # 在发出下一个请求之前暂停执行2秒,以避免API过载。 + # 继续此过程,直到遍历所有年份或发生错误。 + # 最后,它返回“pricedict” + def get_current_price(self): + price = 0 + DB_CONFIG = { + "host": "192.168.194.240", + "user": "root", + "password": "2GS@bPYcgiMyL14A", + "database": "btcdb", + "port": 4423 + } + connection = pymysql.connect(**DB_CONFIG) + try: + with connection.cursor() as cursor: + for source in ("binance", "coinbase"): + cursor.execute(""" + SELECT price FROM btc_realtime_prices + WHERE source=%s + ORDER BY timestamp DESC + LIMIT 1 + """, (source,)) + row = cursor.fetchone() + if row: + price = float(row[0]) + break + finally: + connection.close() + return price + #price = 0 + #try: + # response_price = requests.get( + # 'https://api.binance.com/api/v3/ticker/price?symbol=BTCUSDT') + # prices = ujson.loads(response_price.text) + # price = float(prices["price"]) + # print(response_price.text, price) + # response_price.close() + # # print("price", price) + # return price + #except: + # response_price = requests.get("https://api.coinpaprika.com/v1/tickers/btc-bitcoin") + # prices = ujson.loads(response_price.text) + # price = float(prices["quotes"]["USD"]["price"]) + # response_price.close() + # return price +# 该函数似乎是一种从特定 + # API端点检索比特币当前价格的方法。以下是其功能的细分:get_current_price + # 初始化默认值为0的变量。price + # 向指定的URL () 发送HTTP + # GET请求,以获取当前以美元为单位的比特币价格。'https://bitcoinexplorer.org/api/price/usd' + # 检查响应状态代码是否为200,表示响应成功。 + # 如果响应成功,它将打印响应文本(大概是当前价格),并在删除逗号并将其转换为浮点数后将其分配给变量。price + # 关闭响应对象。 + # 打印检索到的价格以进行调试。 + # 返回检索到的价格。 + # 此函数实质上是从指定的API端点检索和处理当前比特币价格,并将其作为浮点数返回 + def get_ht(self): + try: + with open('height_time.csv', mode='r') as f: + reader = csv.reader(f) + height_time = {rows[0]: rows[1] for rows in reader} + '''for key in height_time.keys(): + print(key, height_time[key]) + break''' + return height_time + return None + except: + return None +# 这get_ht函数似乎从名为“height_time.csv” + # 的CSV文件中读取数据并将其转换为字典,其中键取自第一列,值取自第二列。以下是其功能的细分: + # 尝试在读取模式下打开文件“height_time.csv” + # 初始化名为“height”的字典 + # 使用“csv.reade”循环访问CSV文件中的每一行 + # 构造一个字典,其中每个键值对对应于CSV文件中的一行,键取自第一列,值取自第二列。 + # 返回“height_time + # 如果在文件读取或字典构造过程中发生异常,该函数将返回“无” + # 此函数实质上是从CSV文件中读取数据并将其作为字典返回,该字典可能在代码中的其他位置用于进一步处理或分析 + def get_day_utc(self, utc_time): + t = time.gmtime(utc_time) + daystr = time.strftime("%d %b %Y", t) + dayutc = int(time.mktime(time.strptime(daystr, "%d %b %Y"))) + return dayutc +# 该函数将 + # UTC时间戳作为输入,并返回表示与该输入时间戳对应的一天 (00:00:00UTC) 开始的 + # UTC时间戳。以下是其功能的细分:get_day_utc + # 它接收UTC时间戳作为输入。utc_time + # 它使用将UTC时间戳转换为时间元组。此元组表示UTC中的时间。time.gmtime() + # 它将时间元组格式化为一个字符串,以“DD Mon YYYY” + # (例如,“2022年1月1日”)表示日期。 + # 它使用和将此格式化的字符串转换回UTC时间戳。这有效地将时间设置为同一天的 + # 00:00:00UTC。time.strptime() + # time.mktime() + # 它返回表示一天开始的UTC时间戳。 + # 总体而言,此函数可用于将任何UTC时间戳转换为UTC中相应日期的开始 + def get_dh_height(self): + try: + with open('daily_height.csv', mode='r') as f: + reader = csv.reader(f) + daily_height = {rows[0]: rows[1] for rows in reader} + return daily_height + return None + except: + print("failed open daily_height.csv") + return None +# 该函数尝试从名为“daily_height.csv”的CSV + # 文件中读取数据,并将其转换为字典,其中键是日期,值是相应的高度。get_dh_height + # 其工作原理如下: + # 它尝试在读取模式下打开文件“daily_height.csv”。 + # 如果文件已成功打开,它将使用函数读取其内容,该函数将返回 + # CSV文件各行的可迭代对象。csv.reader() + # 然后,它将每行转换为键值对,其中第一列(索引0)表示日期,第二列(索引1)表示高度。这是使用字典理解来完成的。 + # 返回生成的字典。daily_height + # 如果在此过程中发生任何错误(例如无法打开文件),它会打印一条消息指示失败并返回. + # None + # 此函数可用于从CSV文件中检索每日身高数据,以便在程序中进行进一步处理或分析 + def get_dh_hash(self): + try: + with open('daily_height.csv', mode='r') as f: + reader = csv.reader(f) + daily_hash = {rows[0]: rows[2] for rows in reader} + return daily_hash + return None + except: + print("failed open daily_height.csv") + return None +# 该函数类似于 ,但它不是检索高度值,而是从同一个CSV + # 文件 “daily_height.csv” 中检索哈希值。get_dh_hashget_dh_height + # 以下是其工作原理的细分: + # 它尝试在读取模式下打开文件“daily_height.csv”。 + # 如果文件已成功打开,它将使用函数读取其内容,该函数将返回 + # CSV文件各行的可迭代对象。csv.reader() + # 然后,它将每行转换为键值对,其中第一列(索引0)表示日期,第三列(索引2)表示哈希值。这是使用字典理解来完成的。 + # 返回生成的字典。daily_hash + # 如果在此过程中发生任何错误(例如无法打开文件),它会打印一条消息指示失败并返回. + # None与 + # 一样,此函数可用于从CSV文件中检索每日哈希数据,以便在程序中进一步处理或分析。get_dh_height + def get_daily_height(self): + height = 1 + last_dayutc = None + daily_height = self.get_dh_height() + daily_hash = self.get_dh_hash() + #print(daily_height) + #print(daily_hash) + if daily_height is None: + daily_height = {} + daily_hash = {} + else: + if len(daily_height) > 0: + item = daily_height.popitem() + #print(item, type(item[1])) + height = int(item[1])+1 + daily_height[item[0]] = int(item[1]) + + ip = "127.0.0.1" + port = "8332" + user = "user" + password = "password" + + rpc = RPC(ip, port, user, password) +# total_height = rpc.blockchain.get_block_count() + while True: + try: + total_height = rpc.blockchain.get_block_count() + break + except Exception as e: + print("rpctimeout") + time.sleep(10) + rpc = RPC(ip, port, user, password) + if height >= total_height: + return + prev_height = None + prev_hash = None + while True: + blockh = None + while True: + try: + blockh = rpc.blockchain.get_block_header(height, True) + #print(blockh) + break + except: + time.sleep(1) + #print("reconnect") + rpc = RPC(ip, port, user, password) + + if blockh is not None: + block_time = blockh["time"] + block_height = blockh["height"] + block_hash = blockh["hash"] + dayutc = self.get_day_utc(block_time) + if last_dayutc is None: + last_dayutc = dayutc + print(dayutc, last_dayutc) + if dayutc != last_dayutc: + daily_height[str(last_dayutc)] = prev_height + daily_hash[str(last_dayutc)] = prev_hash + last_dayutc = dayutc + #print(dayutc, daily_height[str(dayutc)], daily_hash[str(dayutc)]) + prev_height = block_height + prev_hash = block_hash + while True: + try: + total_height = rpc.blockchain.get_block_count() + if height == total_height: + break + else: + height += 1 + print("next height " + str(height)) + break + except: + time.sleep(1) + #print("reconnect") + rpc = RPC(ip, port, user, password) + + if height == total_height: + break + + with open('daily_height.csv', 'w') as f: + for key in daily_height.keys(): + #print(key) + #print(daily_height[key]) + #print(daily_hash[key]) + f.write("%s, %s, %s\n" % (key, daily_height[key], daily_hash[key])) + f.close() +# 该函数负责从区块链中检索每日身高数据并将其存储在名为“daily_height.csv”的CSV文件中。其工作原理如下: + # get_daily_height + # 它使用默认值1初始化变量,并将其设置为 。heightlast_dayutcNone + # 它调用 + # 和 + # 函数以从CSV文件中检索任何现有的每日高度和哈希数据。如果数据不可用,它将初始化空字典。get_dh_heightget_dh_hash + # 如果存在现有的每日身高数据,它将检索最后一个条目,将身高递增1,并使用新的身高值更新字典。 + # 它设置了与比特币区块链交互的RPC连接细节。 + # 它使用RPC调用检索区块链的总高度。rpc.blockchain.get_block_count() + # 它进入一个循环来获取每个区块高度的区块头,直到达到区块链的总高度。 + # 在循环中,如果RPC调用期间出现异常,它会尝试重新连接,从而处理网络问题。 + # 对于成功获取的每个区块标头,它都会提取区块时间、高度、哈希值和UTC日。 + # 如果UTC日发生更改,它会使用当天以前的高度和哈希值更新每日高度和哈希字典。 + # 它将每日高度和哈希数据写入“daily_height.csv”文件。 + # 一旦它处理了所有区块高度,直到区块链的总高度,该函数就会结束。 + # 该功能使用最新的区块链信息有效地更新CSV文件中的每日高度和哈希数据,方便历史数据分析或程序内的其他目的。 + def get_height_timestamp(self): + height = 0 + height_time = self.get_ht() + if height_time is None: + height_time = {} + else: + height = len(height_time) + + print("exist height", height) + + ip = "127.0.0.1" + port = "8332" + user = "user" + password = "password" + + rpc = RPC(ip, port, user, password) +# total_height = rpc.blockchain.get_block_count() + while True: + try: + total_height = rpc.blockchain.get_block_count() + break + except Exception as e: + print("rpctimeout") + time.sleep(10) + rpc = RPC(ip, port, user, password) + if height >= total_height: + return + #total_height = rpc.blockchain.get_block_count() + # print("last_height", total_height) + + while True: + blockh = None + while True: + try: + blockh = rpc.blockchain.get_block_header(height, True) + #print(blockh) + break + except: + time.sleep(1) + print("reconnect") + rpc = RPC(ip, port, user, password) + + if blockh is not None: + block_time = blockh["time"] + block_height = blockh["height"] + height_time[str(block_height)] = block_time + #print(str(block_height), height_time[str(block_height)]) + + while True: + try: + total_height = rpc.blockchain.get_block_count() + if height == total_height: + break + else: + height += 1 + print("next height " + str(height)) + break + except: + time.sleep(1) + #print("reconnect") + rpc = RPC(ip, port, user, password) + + if height == total_height: + break + + with open('height_time.csv', 'w') as f: + for key in height_time.keys(): + f.write("%s, %s\n" % (key, height_time[key])) + f.close() +# 该函数从区块链中检索区块高度和相应的时间戳,并将它们存储在名为“height_time.csv”的CSV文件中。以下是其工作原理的细分: + # get_height_timestamp + # 它将变量初始化为0,并使用该函数检索现有的高度时间数据。如果不存在任何数据,则初始化一个空字典。heightget_ht + # 它设置了与比特币区块链交互的RPC连接细节。 + # 它使用RPC调用检索区块链的总高度。rpc.blockchain.get_block_count() + # 如果存在现有的高度时间数据,则将变量设置为字典中的条目数。height + # 它进入一个循环来获取每个区块高度的区块头,直到达到区块链的总高度。 + # 在循环中,如果RPC调用期间出现异常,它会尝试重新连接,从而处理网络问题。 + # 对于每个成功获取的块头,它都会提取块时间和高度,并更新高度时间字典。 + # 它将高度时间数据写入“height_time.csv”文件。 + # 循环一直持续到它处理完所有区块高度,直到区块链的总高度。 + # 该功能有效地将CSV文件中的高度时间数据更新为最新的区块链信息,方便程序内的历史数据分析或其他目的。 + def handle_utxos(self, prices, ht, check_dayutc): + current_price = 0 + #current_price = self.get_current_price() + #if current_price == 0: + # return + print("handle_utxos initiate") + connin = sqlite3.connect("utxos.sqlite") + cin = connin.cursor() + cursorin = cin.execute("SELECT * from utxos") + + #connout = sqlite3.connect("utxos.db") + #cout = connout.cursor() + #cout.execute('CREATE TABLE IF NOT EXISTS utxos(txid TEXT PRIMARY KEY NOT NULL, vout INT, value INT, coinbase INT, height INT, scriptpubkey TEXT, dt TIMESTAMP, price INT, dt2 TIMESTAMP)') + #connout.commit() + coin_idx = 0 + rpc = None + for row in cursorin: + #print(row) + txid = row[0] + vout = row[1] + value = row[2] + coinbase = row[3] + height = row[4] + scriptpubkey = row[5] + dt = ht[str(height)] + #print(dt) + dt2 = time.gmtime(int(dt)) + daystr = time.strftime("%d %b %Y", dt2) + dayutc = int(time.mktime(time.strptime(daystr, "%d %b %Y"))) + price = 0 + d_t = datetime.utcfromtimestamp(dayutc) + timedelta(hours=8) + d_t08am = datetime(d_t.year, d_t.month, d_t.day, 8, 0, 0) + dayutc=int((d_t08am - timedelta(hours=8)).timestamp())-28800 + d_t2 = datetime.utcfromtimestamp(check_dayutc) + timedelta(hours=8) + d_t208am = datetime(d_t2.year, d_t2.month, d_t2.day, 8, 0, 0) + check_dayutc=int((d_t208am - timedelta(hours=8)).timestamp())-28800 + print("ceshi",5) + if str(dayutc) in prices: + price = int(prices[str(dayutc)]) + else: + print("failed get tx price") + return + + if str(check_dayutc) in prices: + current_price = int(prices[str(check_dayutc)]) + else: + print("failed get check price") + return + + value = value / 100000000 + #print(self.current_dayutc, txid, value, coinbase, height, daystr, dayutc, price, current_price) + + self.summary_utxos(current_price, txid, vout, coinbase, value, height, scriptpubkey, dt, price, dayutc) + self.summary_toplist(txid, vout, coinbase, value, height, scriptpubkey, dt, price, dayutc) + + #sql_insert = 'INSERT INTO utxos VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?)' + #insert_data = (txid, vout, value, coinbase, height, scriptpubkey, dt, price, dayutc) + #cout.execute(sql_insert, insert_data) + #if coin_idx % (1024 * 1024) == 0: + #print(coin_idx) + #connout.commit() + coin_idx+=1 + #connout.commit() + if self.lth_rcap > 0: + self.lth_mvrv = self.lth_mv / self.lth_rcap + if self.sth_rcap > 0: + self.sth_mvrv = self.sth_mv / self.sth_rcap + print("save_db initiate") + self.save_db() + print("save_db ok") + + cin.close() + #cout.close() + connin.close() + #connout.close() +# 该方法似乎用于处理来自名为“utxos.sqlite”的 + # SQLite + # 数据库的未使用的事务输出 (UTXO)。以下是其功能的细分:handle_utxos + # 它需要三个参数:(包含历史价格的字典)、(包含区块高度和时间戳的字典)和(检查价格的特定日期)。priceshtcheck_dayutc + # 它初始化为0和0。current_pricecoin_idx + # 它建立与SQLite + # 数据库的连接,并检索用于执行SQL查询的游标。 + # 它循环访问从数据库提取的表中的每一行。utxos对于每一行,它提取各种属性,例如交易 + # ID()、输出索引 ()、值、是否是coinbase交易 ()、区块高度 ()、scriptPubKey () 和时间戳 ()。txidvoutcoinbaseheightscriptpubkeydt + # 它将时间戳转换为人类可读的日期字符串,然后转换为UTC时间戳。dtdaystrdayutc + # 它从字典中获取与对应的历史价格,并将其分配给变量。dayutcpricesprice + # 它检查字典中是否存在 ,并将其相应的价格分配给 。check_dayutcpricescurrent_price + # 它将UTXO值除以1000000000以将其转换为比特币单位。 + # 它调用和方法与相关参数来处理UTXO数据。summary_utxossummary_toplist + # 如有必要,根据某些条件进行计算。lth_mvrvsth_mvrv + # 最后,它调用该方法将处理后的数据保存到数据库中。save_db + # 它关闭游标和数据库连接。 + # 此方法有效地处理来自SQLite数据库的UTXO的处理,执行必要的计算和更新,并将处理后的数据保存回数据库 + def utxos(self, check_dayutc): + + self.current_dt = check_dayutc + self.current_dt2 = time.gmtime(int(self.current_dt)) + self.current_daystr = time.strftime("%d %b %Y", self.current_dt2) + self.current_dayutc = int(time.mktime(time.strptime(self.current_daystr, "%d %b %Y"))) + + topliststr = "toplist" + self.current_daystr + print(topliststr) + self.mdbc_toplist = self.mdb[topliststr] + + # prices_temp = self.get_history_price() + # prices = self.get_history_price2(prices_temp) + prices_temp=btc_historical_price.prices_temp + prices=btc_historical_price.prices + '''for key in prices.keys(): + print(key, prices[key]) + break''' + print(prices) + if len(prices) <= 0: + print("failed get price") + return + ht = self.get_ht() + if ht is None: + print("failed get height and time") + return + + self.handle_utxos(prices, ht, check_dayutc) +# 该方法似乎负责处理 表示的特定日期的未使用事务输出 (UTXO)。以下是其功能的摘要:utxoscheck_dayutc +# +# 它根据提供的 .check_dayutc +# 它使用 和 方法检索历史价格数据。get_history_priceget_history_price2 +# 该方法从纳斯达克 API 检索历史比特币价格数据。get_history_price +# 该方法从 Messari API 检索其他历史价格数据。get_history_price2 +# 它使用该方法检索高度和时间戳数据。get_ht +# 它调用该方法以使用检索到的价格和高度数据处理 UTXO。handle_utxos +# 该方法进一步与 SQLite 数据库交互以处理 UTXO 并执行必要的计算。handle_utxos +# 总体而言,该方法协调了获取历史价格和高度数据并使用它来处理特定日期的 UTXO 的过程。如果成功,它将使用处理后的 UTXO 数据更新相关数据库。utxos +if __name__ == '__main__': + if len(sys.argv) > 1: + check_dt = sys.argv[1] + stats = UtxosIf() + setup('mainnet') + stats.get_height_timestamp() + stats.get_daily_height() + daily_hash = stats.get_dh_hash() + #print(daily_hash) + if daily_hash is None: + print("failed get daily height") + else: + os.system("if [ -e utxos.dat ]; then rm utxos.dat; fi") + os.system("if [ -e utxos.sqlite ]; then rm utxos.sqlite; fi") + check_dayutc = int(time.mktime(time.strptime(check_dt, "%Y-%m-%d"))) + cmd = "~/bitcoin-29.0/bin/bitcoin-cli -rpcuser=user -rpcpassword=password -rpcclienttimeout=600 invalidateblock " + daily_hash[str(check_dayutc)] + os.system(cmd) + print("select ok",daily_hash[str(check_dayutc)]) + time.sleep(60); + os.system("~/bitcoin-29.0/bin/bitcoin-cli -rpcuser=user -rpcpassword=password -rpcclienttimeout=1800 dumptxoutset ~/utxos.dat latest") + print("dumptxoutset ok") + time.sleep(60); + #os.system("./utxo_to_sqlite ./utxos.dat ./utxos.sqlite") + os.system("python3 utxo_to_sqlite.py ./utxos.dat ./utxos.sqlite") + print("utxo_to_sqlite ok") + time.sleep(60); + stats.utxos(check_dayutc) + print("utxos stat ok") + cmd = "~/bitcoin-29.0/bin/bitcoin-cli -rpcuser=user -rpcpassword=password reconsiderblock " + daily_hash[str(check_dayutc)] + os.system(cmd) + print("reconsiderblock ok") +# 此脚本似乎基于命令行参数执行与比特币 UTXO(未花费的交易输出)相关的几个任务。以下是它的作用的细分: +# 1. 它检查脚本是否直接运行 ('__name__ == '__main__'')。 +# 2. 它检查是否提供了命令行参数 ('len(sys.argv) > 1')。 +# 3. 它初始化“UtxosIf”类的实例,可能包含与处理 UTXO 相关的方法。 +# 4. 它使用“设置”功能设置比特币网络(假设为“主网”)。 +# 5. 它分别使用“get_height_timestamp”和“get_daily_height”方法检索高度和时间戳数据。 +# 6. 它使用“get_dh_hash”方法获取每日哈希数据。 +# 7. 如果每日哈希数据可用,则继续进行进一步操作: +# - 如果存在现有的“utxos.dat”和“utxos.sqlite”文件,它会删除它们。 +# - 它使用比特币 CLI 命令“invalidateblock”使区块失效。 +# - 它会等待一段时间(大概是网络处理区块失效)。 +# - 它使用比特币CLI命令'dumptxoutset'转储UTXO集。 +# - 它再次等待。 +# - 它使用“utxo_to_sqlite”脚本将转储的 UTXO 数据转换为 SQLite 数据库。 +# - 它使用“utxos”方法处理指定日期的 UTXO。 +# - 它使用比特币 CLI 命令“reconsiderblock”重新考虑之前失效的区块。 +# 8. 每个步骤都附有指示脚本进度的打印语句。 +# 这个脚本似乎是一个更大的系统的一部分,该系统与比特币网络交互并执行与UTXO相关的任务。它严重依赖比特币核心软件和命令行界面(“bitcoin-cli”) diff --git a/lyq/btc_utxos_update_lyq3.py b/lyq/btc_utxos_update_lyq3.py new file mode 100644 index 0000000..3087e6c --- /dev/null +++ b/lyq/btc_utxos_update_lyq3.py @@ -0,0 +1,51 @@ +import time +import subprocess +from datetime import datetime, timedelta + +def check_running_process(): + """检查是否已经有相同的进程在运行""" + command = "ps -ef | grep 'python3 btc_utxos_lyq2.py' | grep -v grep" + process = subprocess.Popen(command, shell=True, stdout=subprocess.PIPE) + output, _ = process.communicate() + return bool(output) # 如果找到输出,表示有相同的进程在运行 + +def run_script_for_date(target_date): + """运行指定日期的脚本""" + command = f"python3 btc_utxos_lyq2.py {target_date}" + result=subprocess.run(command, shell=True) + if result.returncode != 0: + raise RuntimeError(f"Script failed for date {target_date}") + +def main(): + # 从7月10日开始 + start_date = datetime(2024, 12, 16) + end_date = datetime.utcnow() # 今天的日期 + + current_date = start_date + a=datetime(2024,12,18) + if current_date == a: + current_date += timedelta(days=1) + else: + while current_date <= end_date: + target_date_str = current_date.strftime('%Y-%m-%d') + + # 检查是否已经有相同的进程在运行 + if check_running_process(): + print(f"已经有相同的进程在运行,等待完成再运行 {target_date_str} 的任务。") + time.sleep(60) # 等待60分钟后再检查 + continue + + # 运行脚本 + print(f"开始运行 {target_date_str} 的任务。") + try: + run_script_for_date(target_date_str) + print(f"{target_date_str} 的任务运行完成。") + + # 处理下一天的数据 + current_date += timedelta(days=1) + except RuntimeError as e: + print(f"Error occurred: {e}. Retrying {target_date_str}.") + time.sleep(60) + +if __name__ == "__main__": + main() \ No newline at end of file diff --git a/lyq/check_order_lyq.py b/lyq/check_order_lyq.py new file mode 100644 index 0000000..6bde9fe --- /dev/null +++ b/lyq/check_order_lyq.py @@ -0,0 +1,184 @@ +# coding=utf-8 +import ujson +from binance.websocket.spot.websocket_client import SpotWebsocketClient as WebsocketClient +from binance.spot import Spot +import time +import requests +import datetime +import pymysql +import math +import pymongo + +g_spot_client = Spot() + + +class Pair: + def __init__(self): + pass + + depth_u = 0 + depth_U = 0 + depth_ts = 0 + bids = {} + asks = {} + + +g_btcusdt = None +g_btcusdt = None + + +def init_db(): + mc = pymongo.MongoClient("mongodb://127.0.0.1:27020/") + mdb = mc["border2"] + return mc, mdb + + +def get_depth(client, pair): + new_pair = Pair() + d = client.depth(pair, limit=5000) + new_pair.bids = d["bids"] + new_pair.asks = d["asks"] + new_pair.depth_u = d["lastUpdateId"] + print(pair, ": get_depth: init", new_pair.depth_u) + #print(new_pair.bids) + return new_pair + + +def dict2number(dict_in): + dict_out = {} + #print("dict2number", dict_in) + for id in dict_in: + #print("dict2number", id) + #price = (int(float(id[0])) / 100) * 100 + #price = float(id[0]) + quantity = float(id[1]) + #pricestr = str(price) + dict_out[id[0]] = quantity + return dict_out + + +def dict2save(mdb, pair, dict_in, ts): + mdbc = mdb[pair] + s_append = {} + s_append["unixdt"] = int(ts / 1000) + #cnt = 0 + for id in dict_in: + # print(cnt, id) + #if cnt >= 50: + #break + # bids_append[id] = top_bids[id] + s_append[id[0]] = id[1] + #cnt += 1 + print("dict2save", s_append) + mdbc.insert_one(s_append) + +def classify_order(dict_in): + dict_out = {} + for id in dict_in: + price = int(int(float(id))/100)*100 + pricestr = str(price) + if pricestr in dict_out: + dict_out[pricestr] = dict_out[pricestr]+dict_in[id] + else: + dict_out[pricestr] = dict_in[id] + return dict_out + +def stat_order(pair, bids_in, asks_in, ts, old_ts): + print(pair, ": stat_order cmp", ts, old_ts) + if ts - old_ts < 1000 * 60 * 5: + return False + bids = dict2number(bids_in) + asks = dict2number(asks_in) + + bids_classify = classify_order(bids) + asks_classify = classify_order(asks) + print("bids_classify", bids_classify) + top_bids = sorted(bids_classify.items(), key=lambda x: x[1], reverse=False) + top_asks = sorted(asks_classify.items(), key=lambda x: x[1], reverse=False) + print("top_bids", top_bids) + mc, mdb = init_db() + + dict2save(mdb, pair + "_bids", top_bids, ts) + dict2save(mdb, pair + "_asks", top_asks, ts) + print(pair, ": stat_order OK at", ts) + return True + + +def merge_order(dst, src): + new_dst = [] + for dst_item in dst: + found = False + for src_item in src: + #print("dst", dst_item, "src", src_item) + if dst_item[0] == src_item[0]: + new_dst.append(src_item) + found = True + break + if found is False: + #print("merge_order dst copy", dst_item) + new_dst.append(dst_item) + return new_dst + + +def handler_order(pair, pair_name, msg_in, client): + ts = msg_in["E"] + dU = msg_in["U"] + du = msg_in["u"] + need_reinit = False + if pair is not None: + if (dU == pair.depth_u + 1) or ( + (du > pair.depth_u) and (pair.depth_ts == 0) and (pair.depth_u != 0)): + bids = msg_in["b"] + asks = msg_in["a"] + #print("merge_order dst", pair.bids) + #print("merge_order src", bids) + #print("handle", pair_name, ts, dU, du, pair.depth_u) + pair.bids = merge_order(pair.bids, bids) + pair.asks = merge_order(pair.asks, asks) + pair.depth_U = dU + pair.depth_u = du + if stat_order(pair_name, pair.bids, pair.asks, ts, pair.depth_ts): + pair.depth_ts = ts + print(pair_name, ": append", du) + else: + if (dU != pair.depth_u + 1) and (pair.depth_u != 0): + need_reinit = True + else: + pass + if need_reinit: + pair = get_depth(client, pair_name) + print(pair_name, ": reinit", pair.depth_u, dU, pair.depth_ts) + return pair + + +def order_handler(message): + #print(message) + global g_spot_client + global g_btcusdt + global g_ethusdt + if message["stream"] == "btcusdt@depth": + ddata = message["data"] + if ddata["e"] == "depthUpdate": + g_btcusdt = handler_order(g_btcusdt, "BTCUSDT", ddata, g_spot_client) + elif message["stream"] == "ethusdt@depth": + ddata = message["data"] + if ddata["e"] == "depthUpdate": + g_ethusdt = handler_order(g_ethusdt, "ETHUSDT", ddata, g_spot_client) + else: + pass + +def check_order(): + global g_spot_client + global g_btcusdt + global g_ethusdt + ws_client = WebsocketClient() + ws_client.start() + ws_client.instant_subscribe( + stream=['btcusdt@depth', 'ethusdt@depth'], + callback=order_handler, + ) + g_btcusdt = get_depth(g_spot_client, "BTCUSDT") + g_ethusdt = get_depth(g_spot_client, "ETHUSDT") + + +check_order() diff --git a/lyq/check_zone_lyq.py b/lyq/check_zone_lyq.py new file mode 100644 index 0000000..3843ec6 --- /dev/null +++ b/lyq/check_zone_lyq.py @@ -0,0 +1,146 @@ +# coding=utf-8 +import ujson +#from binance.websocket.spot.websocket_client import SpotWebsocketClient as WebsocketClient +from binance.spot import Spot +import time +import requests +import datetime +import pymysql +import math +#import pymongo + +g_btcusdt_prices = {} +g_ethusdt_prices = {} + +class ZoneDbIf: + def __init__(self, host="172.17.0.1", port=4423, user="root", password="2GS@bPYcgiMyL14A", dbname="btcdb"): + self.conn = pymysql.connect(host=host, port=port, user=user, password=password, database=dbname, cursorclass=pymysql.cursors.DictCursor) + print("init zone db suceess!") + + def save_zone_change(self, dayutc, change_us, change_asia, change_eu): + with self.conn.cursor() as cursor: + print( + dayutc, change_us, change_asia, change_eu) + sql_insert = "REPLACE INTO btczonechange3 (unixdt, change_us, change_asia, change_eu" + sql_insert = sql_insert + ") VALUES (FROM_UNIXTIME(%s), %s, %s, %s)" + cursor.execute(sql_insert, ( + dayutc, change_us, change_asia, change_eu)) + self.conn.commit() + +class EthZoneDbIf: + def __init__(self, host="172.17.0.1", port=4423, user="root", password="2GS@bPYcgiMyL14A", dbname="ethdb"): + self.conn = pymysql.connect(host=host, port=port, user=user, password=password, database=dbname, cursorclass=pymysql.cursors.DictCursor) + print("init zone db suceess!") + + def save_zone_change(self, dayutc, change_us, change_asia, change_eu): + with self.conn.cursor() as cursor: + print( + dayutc, change_us, change_asia, change_eu) + sql_insert = "REPLACE INTO ethzonechange3 (unixdt, change_us, change_asia, change_eu" + sql_insert = sql_insert + ") VALUES (FROM_UNIXTIME(%s), %s, %s, %s)" + cursor.execute(sql_insert, ( + dayutc, change_us, change_asia, change_eu)) + self.conn.commit() + +def get_history_price(spot_client, pair_name): + result = spot_client.klines(pair_name, "1h", limit=1000) + prices_open = {} + prices_close = {} + for price in result: + prices_open[str(price[0])] = float(price[1]) + prices_close[str(price[0])] = float(price[4]) + open_out = sorted(prices_open.items(), reverse=True) + close_out = sorted(prices_close.items(), reverse=True) + return open_out, close_out, prices_open, prices_close + +def get_last_price(spot_client, pair_name, cache_open, cache_close): + result = spot_client.klines(pair_name, "1h", limit=1) + for price in result: + cache_open[str(price[0])] = float(price[1]) + cache_close[str(price[0])] = float(price[4]) + open_out = sorted(cache_open.items(), reverse=True) + close_out = sorted(cache_close.items(), reverse=True) + return open_out, close_out, cache_open, cache_close + +def calc_zone(prices_open, price_close, zone_start, zone_end): + zone_total = 30*24 + zone_hours = 0 + zones = {} + price_start = 0 + price_end = 0 + dt_start = None + item_idx = 0 + for dt in prices_open: + tobj = time.gmtime(int(dt[0]) / 1000) + if tobj.tm_hour == zone_start: + price_start = dt[1] + dt_start = tobj + if zone_hours == 0 and tobj.tm_hour < zone_end: + zone_total = zone_total + tobj.tm_hour + 1 + close_list = price_close[item_idx] + price_end = close_list[1] + else: + if tobj.tm_hour == zone_end: + close_list = price_close[item_idx] + price_end = close_list[1] + if price_start > 0 and price_end > 0: + #zones[dt_end] = (price_end-price_start)/price_start + daystr = time.strftime("%d %b %Y", dt_start) + dayutc = int(time.mktime(time.strptime(daystr, "%d %b %Y"))) + zones[str(dayutc)] = price_end - price_start + price_start = 0 + price_end = 0 + item_idx = item_idx + 1 + zone_hours = zone_hours + 1 + if zone_hours >= zone_total: + break + return zones + + + +def check_zone(): + dbif = ZoneDbIf() + ethdbif = EthZoneDbIf() + spot_client = Spot() + prices_open, prices_close, cache_open, cache_close = get_history_price(spot_client, "BTCUSDT") + prices_open_eth, prices_close_eth, cache_open_eth, cache_close_eth = get_history_price(spot_client, "ETHUSDT") + prev_tm = time.gmtime(time.time()) + print("update", prev_tm.tm_hour) + while True: + zone_asia = calc_zone(prices_open, prices_close, 0, 12) + zone_eu = calc_zone(prices_open, prices_close, 6, 18) + zone_us = calc_zone(prices_open, prices_close, 12, 0) + zone_asia_eth = calc_zone(prices_open_eth, prices_close_eth, 0, 12) + zone_eu_eth = calc_zone(prices_open_eth, prices_close_eth, 6, 18) + zone_us_eth = calc_zone(prices_open_eth, prices_close_eth, 12, 0) + #print(zone_asia) + #print(zone_eu) + #print(zone_us) + for dt in zone_asia: + change_us = 0 + change_eu = 0 + if dt in zone_us: + change_us = zone_us[dt] + if dt in zone_eu: + change_eu = zone_eu[dt] + dbif.save_zone_change(dt, change_us, zone_asia[dt], change_eu) + change_us_eth = 0 + change_eu_eth = 0 + if dt in zone_us_eth: + change_us_eth = zone_us_eth[dt] + if dt in zone_eu_eth: + change_eu_eth = zone_eu_eth[dt] + ethdbif.save_zone_change(dt, change_us_eth, zone_asia_eth[dt], change_eu_eth) + while True: + time.sleep(60) + cur_tm = time.gmtime(time.time()) + if cur_tm.tm_hour != prev_tm.tm_hour: + prev_tm = cur_tm + time.sleep(60) + prices_open, prices_close, cache_open, cache_close = get_last_price(spot_client, "BTCUSDT", cache_open, cache_close) + prices_open_eth, prices_close_eth, cache_open_eth, cache_close_eth = get_last_price(spot_client, "ETHUSDT", cache_open_eth, + cache_close_eth) + print("update", cur_tm.tm_hour) + break + +check_zone() diff --git a/lyq/db_if_qt.py b/lyq/db_if_qt.py new file mode 100644 index 0000000..238043e --- /dev/null +++ b/lyq/db_if_qt.py @@ -0,0 +1,562 @@ +# coding=utf-8 +import datetime + +import pymysql +from loguru import logger +import time + + +class DbIf: + def __init__(self, host="172.17.0.1", port=4419, user="root", password="IeQcJNnagkaFP1Or", dbname="btcdb"): + self.conn = pymysql.connect(host=host, port=port, user=user, password=password, database=dbname,cursorclass=pymysql.cursors.DictCursor) + + def update_to_dailyindsv2(self, dt_utc, height_begin, height_end, lth_volume, frm, cvdd, realized_price, transferred_price, balanced_price, nvt_ratio, velocity): + with self.conn.cursor() as cursor: + print(dt_utc, height_begin, height_end, lth_volume, frm, cvdd, realized_price, transferred_price, balanced_price, nvt_ratio, velocity) + sql_insert = "REPLACE INTO dailyindsv3e2 (unixdt, height_begin, height_end, lth_volume, frm, cvdd, realized_price, transferred_price, balanced_price, nvt_ratio, velocity" + sql_insert = sql_insert + ") VALUES (FROM_UNIXTIME(%s), %s, %s, %s, %s, %s, %s, %s, %s, %s, %s)" + cursor.execute(sql_insert, ( + dt_utc, height_begin, height_end, lth_volume, frm, cvdd, realized_price, transferred_price, balanced_price, nvt_ratio, velocity)) + + ''' + def update_to_realtimeindsv2(self, dt_utc, mempool_volume, mempool_fees): + with self.conn.cursor() as cursor: + sql_insert = "REPLACE INTO realtimeindsv2b (unixdt, mempool_volume, mempool_fees)" + cursor.execute(sql_insert, (dt_utc, mempool_volume, mempool_fees)) + ''' + def update_to_dailyinds(self, dt_utc, height_begin, height_end, profitrate, fees, txs, new_address, total_address, new_address_volume, active_address, + send_address, receive_address, volume, eavolume, sopr, asopr, easopr, lthsopr, sthsopr, + asol, eaasol, dormancy, adormancy, eadormancy, cdd, sacdd, eacdd, day1, day7, day30, day60, + day90, day180, day365, day730, csupply, mintusd, sumcsupply, sumcdd, sumeacdd, + liveliness, ealiveliness, rprofit, rloss, rplrate, price, marketcap, rcap, earcap, mvrv, + nupl,vdd): + with self.conn.cursor() as cursor: + sql_insert = "REPLACE INTO dailyindsv3e1 (unixdt, height_begin, height_end, profitrate, fees, txs, new_address, total_address, new_address_volume, active_address, send_address, receive_address, volume, eavolume, sopr, asopr, easopr, lthsopr, sthsopr," + sql_insert = sql_insert + " asol, eaasol, dormancy, adormancy, eadormancy, cdd, sacdd, eacdd, day1, day7, day30, day60, day90, day180, day365, day730, csupply, mintusd, sumcsupply, sumcdd, sumeacdd, liveliness, " + sql_insert = sql_insert + " ealiveliness, rprofit, rloss, rplrate, price, marketcap, rcap, earcap, mvrv, nupl,vdd" + sql_insert = sql_insert + ") VALUES (FROM_UNIXTIME(%s), %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s)" + # print(sql_insert) + cursor.execute(sql_insert, ( + dt_utc, height_begin, height_end, profitrate, fees, txs, new_address, total_address, new_address_volume, active_address, send_address, + receive_address, volume, eavolume, sopr, asopr, easopr, lthsopr, sthsopr, asol, eaasol, dormancy, + adormancy, eadormancy, cdd, sacdd, eacdd, day1, day7, day30, day60, day90, day180, day365, day730, + csupply, mintusd, sumcsupply, sumcdd, sumeacdd, liveliness, ealiveliness, rprofit, rloss, rplrate, + price, marketcap, rcap, earcap, mvrv, + nupl,vdd)) + self.conn.commit() + ''' + def update_to_dailyinds(self, dt_utc, height_begin, height_end, profitrate, fees, txs, new_address, new_address_volume, active_address, + send_address, receive_address, volume, eavolume, sopr, asopr, easopr, lthsopr, sthsopr, + asol, eaasol, dormancy, adormancy, eadormancy, cdd, sacdd, eacdd, day1, day7, day30, day60, + day90, day180, day365, day730, csupply, mintusd, sumcsupply, sumcdd, sumeacdd, + liveliness, ealiveliness, rprofit, rloss, rplrate, price, marketcap, rcap, earcap, mvrv, + lthmarketcap, lthrcap, sthmarketcap, sthrcap, lthmvrv, sthmvrv, nupl): + with self.conn.cursor() as cursor: + sql_insert = "REPLACE INTO dailyindsv1 (unixdt, height_begin, height_end, profitrate, fees, txs, new_address, new_address_volume, active_address, send_address, receive_address, volume, eavolume, sopr, asopr, easopr, lthsopr, sthsopr," + sql_insert = sql_insert + " asol, eaasol, dormancy, adormancy, eadormancy, cdd, sacdd, eacdd, day1, day7, day30, day60, day90, day180, day365, day730, csupply, mintusd, sumcsupply, sumcdd, sumeacdd, liveliness, " + sql_insert = sql_insert + " ealiveliness, rprofit, rloss, rplrate, price, marketcap, rcap, earcap, mvrv, lthmarketcap, lthrcap, sthmarketcap, sthrcap, lthmvrv, sthmvrv, nupl" + sql_insert = sql_insert + ") VALUES (FROM_UNIXTIME(%s), %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s)" + # print(sql_insert) + cursor.execute(sql_insert, ( + dt_utc, height_begin, height_end, profitrate, fees, txs, new_address, new_address_volume, active_address, send_address, + receive_address, volume, eavolume, sopr, asopr, easopr, lthsopr, sthsopr, asol, eaasol, dormancy, + adormancy, eadormancy, cdd, sacdd, eacdd, day1, day7, day30, day60, day90, day180, day365, day730, + csupply, mintusd, sumcsupply, sumcdd, sumeacdd, liveliness, ealiveliness, rprofit, rloss, rplrate, + price, marketcap, rcap, earcap, mvrv, lthmarketcap, lthrcap, sthmarketcap, sthrcap, lthmvrv, sthmvrv, + nupl)) + self.conn.commit() + ''' + ''' + # daily daily on-chain volume + def query_from_dailyvolume(self, start_id=0, end_id=0, start_time="", end_time="", limit=0): + with self.conn.cursor() as cursor: + sql_query = "SELECT * from `dailyvolume`" + + if start_id > 0: + sql_query = sql_query + " WHERE id > " + str(start_id) + if end_id > 0: + sql_query = sql_query + " AND id < " + str(end_id) + else: + if end_id > 0: + sql_query = sql_query + " WHERE id < " + str(end_id) + + if len(start_time) > 0: + if len(end_time) > 0: + sql_query = sql_query + " WHERE UNIX_TIMESTAMP(unixdt) BETWEEN \ + UNIX_TIMESTAMP(\'" + start_time + "\') AND UNIX_TIMESTAMP(\'" + end_time + "\')" + else: + sql_query = sql_query + " WHERE UNIX_TIMESTAMP(unixdt) BETWEEN \ + UNIX_TIMESTAMP(\'" + start_time + "\') AND UNIX_TIMESTAMP(NOW())" + else: + if len(end_time) > 0: + sql_query = sql_query + " WHERE UNIX_TIMESTAMP(unixdt) BETWEEN \ + UNIX_TIMESTAMP(NOW()) AND UNIX_TIMESTAMP(\'" + end_time + "\')" + + sql_query = sql_query + " order by `unixdt` desc" + + if limit > 0: + sql_query = sql_query + " LIMIT " + str(limit) + print(sql_query) + cursor.execute(sql_query) + return cursor.fetchall() + ''' + + # newaddrs + ''' + def update_to_newaddr(self, dayutc, last_profit_rate, last_fees, last_txs, last_eatxs, last_newaddr_cnt, + last_newaddr_vol, last_active_addr_cnt, last_tx_addr_cnt, last_rx_addr_cnt, last_vol_change, + last_vol): + with self.conn.cursor() as cursor: + sql_query = "SELECT COUNT(id) FROM `newaddrs` WHERE unixdt=FROM_UNIXTIME(%s)" + cursor.execute(sql_query, {dayutc, }) + result = cursor.fetchone() + # print(dt_utc) + # print(result) + if result is not None: + if "COUNT(id)" in result: + if result["COUNT(id)"] > 0: + print("update") + sql_update = 'UPDATE newaddrs SET `total`=%s, `amount`=%s, `active`=%s, `tx`=%s, `rx`=%s, `volume_change`=%s, `volume=%s`,`txs`=%s, `eatxs`=%s, `fees`=%s, `last_profit_rate`=%s WHERE unixdt=FROM_UNIXTIME(%s)' + cursor.execute(sql_update, ( + last_newaddr_cnt, last_newaddr_vol, last_active_addr_cnt, last_tx_addr_cnt, + last_rx_addr_cnt, + last_vol_change, last_vol, last_txs, last_eatxs, last_fees, last_profit_rate, dayutc)) + else: + print("insert") + sql_insert = "INSERT INTO `newaddrs` (`unixdt`, `total`, `amount`, `active`, `tx`, `rx`, `volume_change`, `volume`, `txs`, `eatxs`, `fees`, `last_profit_rate`) VALUES (FROM_UNIXTIME(%s), %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s)" + # print(sql_insert) + cursor.execute(sql_insert, ( + dayutc, last_newaddr_cnt, last_newaddr_vol, last_active_addr_cnt, last_tx_addr_cnt, + last_rx_addr_cnt, last_vol_change, last_vol, last_txs, last_eatxs, last_fees, + last_profit_rate)) + self.conn.commit() + ''' + ''' + def update_to_sellprofit(self, dayutc, current_price, block_buy_volume, block_sell_volume, block_sell_profit, last_height): + with self.conn.cursor() as cursor: + sql_insert = "INSERT INTO `dailybuysell` (`unixdt`, `price`, `buyvolume`, `sellvolume`, `sellprofit`, `height`) VALUES (FROM_UNIXTIME(%s), %s, %s, %s, %s, %s)" + #print(sql_insert) + #print(datetime, txid, vout, voutalias, amount, height) + cursor.execute(sql_insert, (dayutc, current_price, block_buy_volume, block_sell_volume, block_sell_profit, last_height)) + self.conn.commit() + ''' + ''' + def update_to_bigsellprofit(self, dayutc, current_price, tx_sell_average, tx_sell_amount, tx_sell_profit, + days_earliest, days_latest, days_largest, days_current, tx_buy_address, txid, + block_height): + with self.conn.cursor() as cursor: + sql_insert = "INSERT INTO `bigsell` (`unixdt`, `buyprice`, `sellprice`, `amount`, `profit`, `days_earliest`, `days_latest`, `days_largest`, `days_current`, `address`, `txid`, `height`) VALUES (FROM_UNIXTIME(%s), %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s)" + # print(sql_insert) + # print(datetime, txid, vout, voutalias, amount, height) + cursor.execute(sql_insert, ( + dayutc, current_price, tx_sell_average, tx_sell_amount, tx_sell_profit, days_earliest, days_latest, + days_largest, days_current, tx_buy_address, txid, block_height)) + self.conn.commit() + ''' + ''' + def update_to_dailycdd(self, dt_utc, cdd): + with self.conn.cursor() as cursor: + sql_insert = "REPLACE INTO `dailycdd` (`unixdt`, `cdd`) VALUES (FROM_UNIXTIME(%s), %s)" + # print(sql_insert) + cursor.execute(sql_insert, (dt_utc, cdd)) + self.conn.commit() + ''' + ''' + def update_to_dailycdddays(self, dt_utc, dormancy, adormancy, eadormancy, cdd, acdd, eacdd, day1, day7, day30, + day60, day90, day180, day365, day730): + with self.conn.cursor() as cursor: + sql_insert = "REPLACE INTO `dailycdddays` (`unixdt`, dormancy, adormancy, eadormancy, cdd, acdd, eacdd, `day1`, day7, day30, day60, day90, day180, day365, day730) VALUES (FROM_UNIXTIME(%s), %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s)" + # print(sql_insert) + cursor.execute(sql_insert, ( + dt_utc, dormancy, adormancy, eadormancy, cdd, acdd, eacdd, day1, day7, day30, day60, day90, day180, + day365, + day730)) + self.conn.commit() + ''' + ''' + def update_to_dailysopr(self, dt_utc, last_sopr, last_asopr, last_easopr, last_lth_sopr, last_sth_sopr): + with self.conn.cursor() as cursor: + sql_insert = "REPLACE INTO `dailysopr` (`unixdt`, `sopr`, asopr, easopr, lth_sopr, sth_sopr) VALUES (FROM_UNIXTIME(%s), %s, %s, %s, %s, %s, %s)" + # print(sql_insert) + cursor.execute(sql_insert, (dt_utc, last_sopr, last_asopr, last_easopr, last_lth_sopr, last_sth_sopr)) + self.conn.commit() + ''' + ''' + def update_to_inds(self, dt_utc, csupply, mintusd, sumcsupply, sumcdd, sumeacdd, liveliness, ealiveliness, rprofit, + rloss, rplrate, price, marketcap, rcap, earcap, mvrv): + with self.conn.cursor() as cursor: + sql_insert = "REPLACE INTO `inds` (`unixdt`, csupply, mintusd, sumcsupply, sumcdd, sumeacdd, liveliness, ealiveliness, rprofit, rloss, rplrate, price, marketcap, rcap, earcap, mvrv) VALUES (FROM_UNIXTIME(%s), %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s)" + # print(sql_insert) + cursor.execute(sql_insert, ( + dt_utc, csupply, mintusd, sumcsupply, sumcdd, sumeacdd, liveliness, ealiveliness, rprofit, rloss, + rplrate, + price, marketcap, rcap, earcap, mvrv)) + self.conn.commit() + ''' + # daily volume + ''' + def update_to_dailyvolume(self, dt_utc, volume): + with self.conn.cursor() as cursor: + sql_insert = "REPLACE INTO `dailyvolume` (`unixdt`, `volume`) VALUES (FROM_UNIXTIME(%s), %s)" + # print(sql_insert) + cursor.execute(sql_insert, (dt_utc, volume)) + self.conn.commit() + ''' + '''with self.conn.cursor() as cursor: + sql_query = "SELECT COUNT(id) FROM `dailyvolume` WHERE unixdt=FROM_UNIXTIME(%s)" + cursor.execute(sql_query, {dt_utc,}) + result = cursor.fetchone() + #print(dt_utc) + #print(result) + if result is not None: + if "COUNT(id)" in result: + if result["COUNT(id)"] > 0: + print("update") + sql_update = 'UPDATE dailyvolume SET `volume`=%s WHERE unixdt=FROM_UNIXTIME(%s)' + cursor.execute(sql_update, (volume, dt_utc)) + else: + print("insert") + sql_insert = "INSERT INTO `dailyvolume` (`unixdt`, `volume`) VALUES (FROM_UNIXTIME(%s), %s)" + # print(sql_insert) + cursor.execute(sql_insert, (dt_utc, volume)) + self.conn.commit()''' + ''' + def update_to_dailyfees(self, dt_utc, fees): + with self.conn.cursor() as cursor: + sql_insert = "REPLACE INTO `dailyfees` (`unixdt`, `fees`) VALUES (FROM_UNIXTIME(%s), %s)" + # print(sql_insert) + cursor.execute(sql_insert, (dt_utc, fees)) + self.conn.commit() + ''' + ''' + def import_to_dailyvolume2(self, dt_utc, volume): + with self.conn.cursor() as cursor: + sql_insert = "INSERT INTO `dailyvolume` (`unixdt`, `volume`) VALUES (FROM_UNIXTIME(%s), %s)" + # print(sql_insert) + cursor.execute(sql_insert, (dt_utc, volume)) + self.conn.commit() + + def delete_dailyvolume_data(self, config): + with self.conn.cursor() as cursor: + sql_query = "DELETE FROM `dailyvolume`" + cursor.execute(sql_query) + self.conn.commit() + + + # daily market cap + def query_from_marketcap(self, start_id=0, end_id=0, start_time="", end_time="", limit=0): + with self.conn.cursor() as cursor: + sql_query = "SELECT * from `dailyprice`" + + if start_id > 0: + sql_query = sql_query + " WHERE id > " + str(start_id) + if end_id > 0: + sql_query = sql_query + " AND id < " + str(end_id) + else: + if end_id > 0: + sql_query = sql_query + " WHERE id < " + str(end_id) + + if len(start_time) > 0: + if len(end_time) > 0: + sql_query = sql_query + " WHERE UNIX_TIMESTAMP(unixdt) BETWEEN \ + UNIX_TIMESTAMP(\'" + start_time + "\') AND UNIX_TIMESTAMP(\'" + end_time + "\')" + else: + sql_query = sql_query + " WHERE UNIX_TIMESTAMP(unixdt) BETWEEN \ + UNIX_TIMESTAMP(\'" + start_time + "\') AND UNIX_TIMESTAMP(NOW())" + else: + if len(end_time) > 0: + sql_query = sql_query + " WHERE UNIX_TIMESTAMP(unixdt) BETWEEN \ + UNIX_TIMESTAMP(NOW()) AND UNIX_TIMESTAMP(\'" + end_time + "\')" + + sql_query = sql_query + " order by `unixdt` desc" + + if limit > 0: + sql_query = sql_query + " LIMIT " + str(limit) + print(sql_query) + cursor.execute(sql_query) + return cursor.fetchall() + + #daily price + def import_to_dailyprice(self, dt_utc, price, volume, marketcap, csupply): + with self.conn.cursor() as cursor: + sql_insert = "INSERT INTO `dailyprice` (`unixdt`, `price`, `volume`, `marketcap`, `csupply`) VALUES (FROM_UNIXTIME(%s), %s, %s, %s, %s)" + #print(sql_insert) + cursor.execute(sql_insert, (dt_utc, price, volume, marketcap, csupply)) + self.conn.commit() + + def update_to_dailyprice(self, dt_utc, price, volume, change): + with self.conn.cursor() as cursor: + sql_insert = "INSERT INTO `dailyprice` (`unixdt`, `price`, `volume`, `change`) VALUES (FROM_UNIXTIME(%s), %s, %s, %s)" + #print(sql_insert) + cursor.execute(sql_insert, (dt_utc, price, volume, change)) + self.conn.commit() + + def update_to_dailyprice2(self, dt_utc, price, volume, change, marketcap, csupply): + with self.conn.cursor() as cursor: + sql_query = "SELECT COUNT(id) FROM `dailyprice` WHERE unixdt=FROM_UNIXTIME(%s)" + cursor.execute(sql_query, {dt_utc,}) + result = cursor.fetchone() + #print(dt_utc) + #print(result) + if result is not None: + if "COUNT(id)" in result: + if result["COUNT(id)"] > 0: + print("update") + sql_update = 'UPDATE dailyprice SET `price`=%s, `marketcap`=%s, `csupply`=%s, `volume`=%s, `change`=%s WHERE unixdt=FROM_UNIXTIME(%s)' + cursor.execute(sql_update, (price, marketcap, csupply, volume, change, dt_utc)) + else: + print("insert") + sql_insert = "INSERT INTO `dailyprice` (`unixdt`, `price`, `volume`, `change`, `marketcap`, `csupply`) VALUES (FROM_UNIXTIME(%s), %s, %s, %s, %s, %s)" + # print(sql_insert) + cursor.execute(sql_insert, (dt_utc, price, volume, change, marketcap, csupply)) + self.conn.commit() + + def update_dailypricechange(self): + with self.conn.cursor() as cursor: + sql_query = "SELECT unixdt,price FROM `dailyprice` order by unixdt" + cursor.execute(sql_query) + results = cursor.fetchall() + prevprice = -1 + for result in results: + if prevprice < 0: + prevprice = result["price"] + else: + #print(result["unixdt"], result["price"], result["marketcap"]) + try: + change = (result["price"]/prevprice - 1)*100 + except: + change = 0 + #print(csupply) + datestr = result["unixdt"] + logger.debug(datestr.__format__('%Y-%m-%d') + " " + str(change)) + sql_update = 'UPDATE dailyprice SET `change`=%s WHERE unixdt=%s' + cursor.execute(sql_update, (str(change), result["unixdt"])) + prevprice = result["price"] + self.conn.commit() + + def delete_dailyprice_data(self, config): + with self.conn.cursor() as cursor: + sql_query = "DELETE FROM `dailyprice`" + cursor.execute(sql_query) + self.conn.commit() + + def delete_failed_blockvolume(self, height): + with self.conn.cursor() as cursor: + sql_insert = "DELETE FROM `bigamountvout` WHERE height=%s" + cursor.execute(sql_insert, (height,)) + sql_insert = "DELETE FROM `bigamounttx` WHERE height=%s" + cursor.execute(sql_insert, (height,)) + sql_insert = "DELETE FROM `blockamount` WHERE height=%s" + cursor.execute(sql_insert, (height,)) + self.conn.commit() + + #block check --- big amount for vout + def query_from_bigamountvout(self, start_id=0, end_id=0, start_time="", end_time="", address="", limit=0): + with self.conn.cursor() as cursor: + sql_query = "SELECT * from `bigamountvout`" + if start_id > 0: + sql_query = sql_query + " WHERE id > " + str(start_id) + if end_id > 0: + sql_query = sql_query + " AND id < " + str(end_id) + else: + if end_id > 0: + sql_query = sql_query + " WHERE id < " + str(end_id) + + if len(start_time) > 0: + if len(end_time) > 0: + sql_query = sql_query + " WHERE UNIX_TIMESTAMP(unixdt) BETWEEN \ + UNIX_TIMESTAMP(\'" + start_time + "\') AND UNIX_TIMESTAMP(\'" + end_time + "\')" + else: + sql_query = sql_query + " WHERE UNIX_TIMESTAMP(unixdt) BETWEEN \ + UNIX_TIMESTAMP(\'" + start_time + "\') AND UNIX_TIMESTAMP(NOW())" + else: + if len(end_time) > 0: + sql_query = sql_query + " WHERE UNIX_TIMESTAMP(unixdt) BETWEEN \ + UNIX_TIMESTAMP(NOW()) AND UNIX_TIMESTAMP(\'" + end_time + "\')" + + sql_query = sql_query + " order by `unixdt` desc" + if limit > 0: + sql_query = sql_query + " LIMIT " + str(limit) + + print(sql_query) + cursor.execute(sql_query) + return cursor.fetchall() + ''' + + def update_to_bigamountvout(self, datetime, txid, vout, voutn, vouttype, amount, height, days, buyin, sellout, + profit): + with self.conn.cursor() as cursor: + sql_insert = "INSERT INTO `bigamountvoutv3e` (`unixdt`, `vout`, `voutn`, `vouttype`, `amount`, `height`, `txid`, days, buyprice, sellprice, profit) VALUES (FROM_UNIXTIME(%s), %s, %s, %s, %s, %s, %s, %s, %s, %s, %s)" + # print(sql_insert) + # print(datetime, txid, vout, voutalias, amount, height) + cursor.execute(sql_insert, + (datetime, vout, voutn, vouttype, amount, height, txid, days, buyin, sellout, profit)) + self.conn.commit() + + ''' + # block check --- big amount tx + def query_from_bigamounttx(self, start_id=0, end_id=0, start_time="", end_time="", address="", limit=0): + with self.conn.cursor() as cursor: + sql_query = "SELECT * from `bigamounttx`" + if start_id > 0: + sql_query = sql_query + " WHERE id > " + str(start_id) + if end_id > 0: + sql_query = sql_query + " AND id < " + str(end_id) + else: + if end_id > 0: + sql_query = sql_query + " WHERE id < " + str(end_id) + + if len(start_time) > 0: + if len(end_time) > 0: + sql_query = sql_query + " WHERE UNIX_TIMESTAMP(unixdt) BETWEEN \ + UNIX_TIMESTAMP(\'" + start_time + "\') AND UNIX_TIMESTAMP(\'" + end_time + "\')" + else: + sql_query = sql_query + " WHERE UNIX_TIMESTAMP(unixdt) BETWEEN \ + UNIX_TIMESTAMP(\'" + start_time + "\') AND UNIX_TIMESTAMP(NOW())" + else: + if len(end_time) > 0: + sql_query = sql_query + " WHERE UNIX_TIMESTAMP(unixdt) BETWEEN \ + UNIX_TIMESTAMP(NOW()) AND UNIX_TIMESTAMP(\'" + end_time + "\')" + + sql_query = sql_query + " order by `unixdt` desc" + if limit > 0: + sql_query = sql_query + " LIMIT " + str(limit) + + print(sql_query) + cursor.execute(sql_query) + return cursor.fetchall() + + def update_to_bigamounttx(self, datetime, txid, amount, height): + with self.conn.cursor() as cursor: + sql_insert = "INSERT INTO `bigamounttx` (`unixdt`, `amount`, `height`, `txid`) VALUES (FROM_UNIXTIME(%s), %s, %s, %s)" + #print(sql_insert) + #print(datetime, txid, amount, height) + cursor.execute(sql_insert, (datetime, amount, height, txid)) + self.conn.commit() + + # block check --- per block amount + def query_from_blockamount(self, start_id=0, end_id=0, start_time="", end_time="", limit=0, amount=0): + with self.conn.cursor() as cursor: + sql_query = "SELECT * from `blockamount`" + + if start_id > 0: + sql_query = sql_query + " WHERE id > " + str(start_id) + if end_id > 0: + sql_query = sql_query + " AND id < " + str(end_id) + if amount > 0: + sql_query = sql_query + " AND amount > " + str(amount) + else: + if end_id > 0: + sql_query = sql_query + " WHERE id < " + str(end_id) + if amount > 0: + sql_query = sql_query + " AND amount > " + str(amount) + else: + if amount > 0: + sql_query = sql_query + "WHERE amount > " + str(amount) + + if len(start_time) > 0: + if len(end_time) > 0: + sql_query = sql_query + " WHERE UNIX_TIMESTAMP(unixdt) BETWEEN \ + UNIX_TIMESTAMP(\'" + start_time + "\') AND UNIX_TIMESTAMP(\'" + end_time + "\')" + else: + sql_query = sql_query + " WHERE UNIX_TIMESTAMP(unixdt) BETWEEN \ + UNIX_TIMESTAMP(\'" + start_time + "\') AND UNIX_TIMESTAMP(NOW())" + if amount > 0: + sql_query = sql_query + " AND amount > " + str(amount) + else: + if len(end_time) > 0: + sql_query = sql_query + " WHERE UNIX_TIMESTAMP(unixdt) BETWEEN \ + UNIX_TIMESTAMP(NOW()) AND UNIX_TIMESTAMP(\'" + end_time + "\')" + if amount > 0: + sql_query = sql_query + " AND amount > " + str(amount) + + sql_query = sql_query + " order by `unixdt` desc" + + if limit > 0: + sql_query = sql_query + " LIMIT " + str(limit) + + cursor.execute(sql_query) + return cursor.fetchall() + + def update_to_blockamount(self, datetime, blockid, amount, height): + with self.conn.cursor() as cursor: + sql_insert = "INSERT INTO `blockamount` (`unixdt`, `amount`, `height`, `blockid`) VALUES (FROM_UNIXTIME(%s), %s, %s, %s)" + #print(sql_insert) + #print(datetime, blockid, amount, height) + cursor.execute(sql_insert, (datetime, amount, height, blockid)) + self.conn.commit() + + def delete_node_data(self, config): + with self.conn.cursor() as cursor: + sql_query = "DELETE FROM `blockamount`" + cursor.execute(sql_query) + sql_query = "DELETE FROM `bigamountvout`" + cursor.execute(sql_query) + sql_query = "DELETE FROM `bigamounttx`" + cursor.execute(sql_query) + self.conn.commit() + + def update_realize_cap(self, dayutc, last_rv): + with self.conn.cursor() as cursor: + sql_insert = "INSERT INTO `dailyrcap` (`unixdt`, `rcap`) VALUES (FROM_UNIXTIME(%s), %s)" + #print(sql_insert) + #print(datetime, blockid, amount, height) + cursor.execute(sql_insert, (dayutc, last_rv)) + self.conn.commit() + + # daily realize cap + def query_from_realizecap(self, start_id=0, end_id=0, start_time="", end_time="", limit=0): + with self.conn.cursor() as cursor: + sql_query = "SELECT * from `dailyrcap`" + + if start_id > 0: + sql_query = sql_query + " WHERE id > " + str(start_id) + if end_id > 0: + sql_query = sql_query + " AND id < " + str(end_id) + else: + if end_id > 0: + sql_query = sql_query + " WHERE id < " + str(end_id) + + if len(start_time) > 0: + if len(end_time) > 0: + sql_query = sql_query + " WHERE UNIX_TIMESTAMP(unixdt) BETWEEN \ + UNIX_TIMESTAMP(\'" + start_time + "\') AND UNIX_TIMESTAMP(\'" + end_time + "\')" + else: + sql_query = sql_query + " WHERE UNIX_TIMESTAMP(unixdt) BETWEEN \ + UNIX_TIMESTAMP(\'" + start_time + "\') AND UNIX_TIMESTAMP(NOW())" + else: + if len(end_time) > 0: + sql_query = sql_query + " WHERE UNIX_TIMESTAMP(unixdt) BETWEEN \ + UNIX_TIMESTAMP(NOW()) AND UNIX_TIMESTAMP(\'" + end_time + "\')" + + sql_query = sql_query + " order by `unixdt` desc" + + if limit > 0: + sql_query = sql_query + " LIMIT " + str(limit) + print(sql_query) + cursor.execute(sql_query) + return cursor.fetchall() + + def update_daily_addr(self, dayutc, last_add_cnt): + with self.conn.cursor() as cursor: + sql_insert = "INSERT INTO `dailyaddradd` (`unixdt`, `addcnt`) VALUES (FROM_UNIXTIME(%s), %s)" + #print(sql_insert) + #print(datetime, blockid, amount, height) + cursor.execute(sql_insert, (dayutc, last_add_cnt)) + self.conn.commit() + + def delete_daily_addr(self, config): + with self.conn.cursor() as cursor: + sql_query = "DELETE FROM `dailyaddradd`" + cursor.execute(sql_query) + self.conn.commit() + + def delete_daily_rv(self, config): + with self.conn.cursor() as cursor: + sql_query = "DELETE FROM `dailyrcap`" + cursor.execute(sql_query) + self.conn.commit() + ''' + + def __del__(self): + self.conn.close() diff --git a/lyq/exchangeRate_lyq.py b/lyq/exchangeRate_lyq.py new file mode 100644 index 0000000..3d0ab48 --- /dev/null +++ b/lyq/exchangeRate_lyq.py @@ -0,0 +1,117 @@ +import requests +import pymysql +import time +from datetime import datetime + +# 目标币种列表(RUB 仍写在这里,方便统一逻辑) +symbols = ["EUR", "GBP", "JPY", "CAD", "SEK", "CHF", "CNY", "RUB"] + +# 数据库配置 +db_config = { + "host": "127.0.0.1", + "user": "root", + "password": "2GS@bPYcgiMyL14A", + "database": "Macroeconomics", + "port": 4423 +} + +def fetch_rates_frankfurter(): + base = "USD" + url = f"https://api.frankfurter.app/latest?from={base}&to=" + ",".join([s for s in symbols if s != "RUB"]) + retries = 5 + while retries > 0: + try: + response = requests.get(url, timeout=10) + response.raise_for_status() + data = response.json() + rates = data.get("rates", {}) + if not rates: + raise ValueError("接口返回空数据") + return rates + except Exception as e: + retries -= 1 + print(f"Frankfurter 请求失败,重试中... 剩余次数: {retries}, 错误: {e}") + time.sleep(1) + print("Frankfurter 多次重试后失败,返回空数据") + return {} + +def fetch_rub(): + try: + url = "https://open.er-api.com/v6/latest/USD" + response = requests.get(url, timeout=10) + data = response.json() + if data.get("result") == "success": + rub_rate = data["rates"].get("RUB") + if rub_rate: + return rub_rate + except Exception as e: + print(f"获取 RUB 失败: {e}") + return None + +def calc_dxy(rates): + weights = { + "EUR": 0.576, + "JPY": 0.136, + "GBP": 0.119, + "CAD": 0.091, + "SEK": 0.042, + "CHF": 0.036 + } + weighted_sum = 0 + weight_total = 0 + for ccy, w in weights.items(): + rate = rates.get(ccy) + if rate: + weighted_sum += rate * w + weight_total += w + if weight_total > 0: + return weighted_sum / weight_total + return None + +def save_to_db(rates, dxy): + current_time = datetime.now().replace(second=0, microsecond=0) + data = {} + + for ccy in symbols: + rate = rates.get(ccy) + if rate is not None: + data[f"USD{ccy}"] = round(rate, 5) + if dxy is not None: + data["DXY"] = round(dxy, 5) + + connection = pymysql.connect(**db_config) + try: + with connection.cursor() as cursor: + for symbol, value in data.items(): + query = """INSERT INTO exchangeRate (date, symbol, _value) VALUES (%s, %s, %s)""" + cursor.execute(query, (current_time, symbol, value)) + connection.commit() + print(f"{current_time} 数据写入数据库成功") + except Exception as e: + print(f"写入数据库失败: {e}") + finally: + connection.close() + +if __name__ == "__main__": + while True: + rates = fetch_rates_frankfurter() + + # 获取 RUB 汇率 + rub = fetch_rub() + if rub: + rates["RUB"] = rub + else: + print("未获取到 RUB 汇率") + + if rates: + dxy = calc_dxy(rates) + print(f"汇率数据: {rates}") + if dxy: + print(f"美元指数近似值: {dxy:.5f}") + else: + print("美元指数近似值 无法计算") + save_to_db(rates, dxy) + else: + print("未获取到汇率数据") + + time.sleep(1800) # 每30分钟执行一次 \ No newline at end of file diff --git a/lyq/nochain_eth_lyq.py b/lyq/nochain_eth_lyq.py new file mode 100644 index 0000000..f165592 --- /dev/null +++ b/lyq/nochain_eth_lyq.py @@ -0,0 +1,191 @@ +# coding=utf-8 +import ujson +from binance.websocket.spot.websocket_client import SpotWebsocketClient as WebsocketClient +import time +import requests +#from loguru import logger +import datetime +import pymysql +import math + + +class NochainDbIf: + def __init__(self, host="172.17.0.1", port=4423, user="root", password="2GS@bPYcgiMyL14A", dbname="ethdb"): + self.conn = pymysql.connect(host=host, port=port, user=user, password=password, database=dbname, + cursorclass=pymysql.cursors.DictCursor) + print("init nochain db suceess!") + + def save(self, day, price, ma350x2, ma111, ma350x1, ma350x1r6, ma350x3, ma350x5): + with self.conn.cursor() as cursor: + sql_insert = "REPLACE INTO `nochainv3a` (`unixdt`, `price`, `ma350x2`, `ma111`, ma350x1, ma350x1r6, ma350x3, ma350x5) VALUES (FROM_UNIXTIME(%s), %s, %s, %s, %s, %s, %s, %s)" + # print(sql_insert) + cursor.execute(sql_insert, (day, price, ma350x2, ma111, ma350x1, ma350x1r6, ma350x3, ma350x5)) + self.conn.commit() + +def prepare_maxxx(prices, day, madays): + total = 0 + cnt = 0 + for i in range(madays): + if day in prices: + total += prices[day] + cnt += 1 + # print(day, total, cnt) + day = str(int(day) - 3600 * 24) + + if cnt > 0: + return total / cnt + return 0 + +def prepare_ma350(prices, day): + return prepare_maxxx(prices, day, 350) + +def prepare_ma111(prices, day): + return prepare_maxxx(prices, day, 111) + +def prepare_gold_ratio(prices): + ma350x1 = {} + ma350x1r6 = {} + ma350x2 = {} + ma350x3 = {} + ma350x5 = {} + for day in prices: + ma350x1[day] = prepare_maxxx(prices, day, 350) + ma350x1r6[day] = ma350x1[day] * 1.6 + ma350x2[day] = ma350x1[day] * 2 + ma350x3[day] = ma350x1[day] * 3 + ma350x5[day] = ma350x1[day] * 5 + + return ma350x1, ma350x1r6, ma350x2, ma350x3, ma350x5 + +def calc_pi_cycle_top(dbif, prices): + ma350x2 = {} + ma111 = {} + for day in prices: + ma350x2[day] = prepare_ma350(prices, day) * 2 + ma111[day] = prepare_ma111(prices, day) + return ma350x2, ma111 + +def get_current_utc(): + curtime = time.gmtime(time.time()) + daystr = time.strftime("%d %b %Y", curtime) + dayutc = int(time.mktime(time.strptime(daystr, "%d %b %Y"))) + return dayutc + +def get_current_price(): + url = "https://data.messari.io/api/v1/assets/eth/metrics/market-data&interval=1d" + header_set = {} + header_set["x-messari-api-key"] = "aH2pyj5i4QGo1k1gLxXEbIJ5RJr+FYKLEWk6cRT6RuSc6lRY" + response_price = requests.get(url, headers=header_set) + if response_price.status_code == 200: + priceweb = ujson.loads(response_price.content) + if "data" in priceweb: + priceset = priceweb["data"] + if "market_data" in priceset: + pricedata = priceset["market_data"] + if "price_usd" in pricedata: + price = pricedata["price_usd"] + return price + return None + + +def get_history_price(coin_id): + prices = {} + + dayutc = get_current_utc() + price = get_current_price() + if price is not None: + prices[str(dayutc)] = price + print("start...", dayutc, price) + + dayt = time.gmtime() + daystr = time.strftime("%Y", dayt) + year = int(daystr) + end_year = year + while True: + # if end_year < 2022: + # break + url = "" + if end_year != year: + start_year = end_year + url = "https://data.messari.io/api/v1/assets/" + coin_id + "/metrics/price/time-series?start=" + else: + url = "https://data.messari.io/api/v1/assets/" + coin_id + "/metrics/price/time-series?after=" + str( + year) + "-01-01&order=descending&interval=1d" + # now_time = time.gmtime() + # daystr = time.strftime("%Y-%m-%d", now_time) + # url = url + daystr + "&order=desc&format=json" + if end_year != year: + url = url + str(start_year) + "-01-01&end=" + str(end_year) + "-12-31&interval=1d&order=descending&interval=1d" + header_set = {} + header_set["x-messari-api-key"] = "aH2pyj5i4QGo1k1gLxXEbIJ5RJr+FYKLEWk6cRT6RuSc6lRY" + # header_set["Content-Type"] = "application/json" + print(header_set, url) + response_supply = requests.get(url, headers=header_set) + # print(response_supply) + if response_supply.status_code == 200: + #print(response_supply.content) + supplyweb = ujson.loads(response_supply.content) + if "data" in supplyweb: + supplyset = supplyweb["data"] + if "values" in supplyset: + valueset = supplyset["values"] + if valueset is not None: + for supply in valueset: + dayutc = int(supply[0] / 1000) + s = supply[1] + prices[str(dayutc)] = float(s) + # print(s, dayutc, supplys[str(dayutc)]) + # break + else: + break + else: + break + end_year -= 1 + time.sleep(2) + return prices + +def get_eth_history_price(): + return get_history_price("ethereum") + +def nochain(): + global dbif + dbif = NochainDbIf() + print("prepare...") + prices = get_eth_history_price() + #print(prices) + + ma350x2, ma111 = calc_pi_cycle_top(dbif, prices) + print("calc_pi_cycle_top ok.") + ma350x1, ma350x1r6, ma350x2, ma350x3, ma350x5 = prepare_gold_ratio(prices); + print("prepare_gold_ratio ok.") + for day in prices: + #print(day) + ma350x21 = 0 + if day in ma350x2: + ma350x21 = ma350x2[day] + ma1111 = 0 + if day in ma111: + ma1111 = ma111[day] + + ma350x11 = 0 + if day in ma350x1: + ma350x11 = ma350x1[day] + + ma350x1r61 = 0 + if day in ma350x1r6: + ma350x1r61 = ma350x1r6[day] + + ma350x31 = 0 + if day in ma350x3: + ma350x31 = ma350x3[day] + + ma350x51 = 0 + if day in ma350x5: + ma350x51 = ma350x5[day] + + # print(day, prices[day], ma350x21, ma1111, supply, issue, s2f_ratio1, s2f_deflection1) + dbif.save(int(day), prices[day], ma350x21, ma1111, ma350x11, + ma350x1r61, ma350x31, ma350x51) + #print("save ok.") + +nochain() diff --git a/lyq/nochain_lyq_utc08.py b/lyq/nochain_lyq_utc08.py new file mode 100644 index 0000000..fd40cf8 --- /dev/null +++ b/lyq/nochain_lyq_utc08.py @@ -0,0 +1,27 @@ +import subprocess +from datetime import datetime, timedelta +import time +while True: +# 获取当前时间的UTC时间 + now = datetime.utcnow() + + # 计算到下一个08:00的时间间隔 + next_run = datetime(now.year, now.month, now.day, 8, 0) + if now >= next_run: + next_run += timedelta(days=1) + sleep_time = (next_run - now).total_seconds() + + # 休眠直到下一个08:00 + time.sleep(sleep_time) + + # 运行 nochain_lyq_v2.py + command1 = f"python3 nochain_lyq_v2.py" + subprocess.run(command1, shell=True) + + # 运行 nochain_update_lyq.py + command2 = f"python3 nochain_update_lyq.py" + subprocess.run(command2, shell=True) + + # 运行 nochain_eth_lyq.py + command3 = f"python3 nochain_eth_lyq.py" + subprocess.run(command3, shell=True) \ No newline at end of file diff --git a/lyq/nochain_lyq_v2.py b/lyq/nochain_lyq_v2.py new file mode 100644 index 0000000..b08e355 --- /dev/null +++ b/lyq/nochain_lyq_v2.py @@ -0,0 +1,736 @@ +# coding=utf-8 +import ujson +from binance.websocket.spot.websocket_client import SpotWebsocketClient as WebsocketClient +import time +import requests +#from loguru import logger +from datetime import datetime +import pymysql +import math +from datetime import datetime + +from stock_indicators import indicators +from stock_indicators.indicators.common.quote import Quote + +class NochainDbIf: + def __init__(self, host="172.17.0.1", port=4423, user="root", password="2GS@bPYcgiMyL14A", dbname="btcdb"): + self.conn = pymysql.connect(host=host, port=port, user=user, password=password, database=dbname, cursorclass=pymysql.cursors.DictCursor) + print("init nochain db suceess!") + + def save(self, day, price, ma350x2, ma111, supply, flow, s2fratio, s2fdeflection, ma350x1, ma350x1r6, ma350x3, ma350x5): + with self.conn.cursor() as cursor: + sql_insert = "REPLACE INTO `nochainv3c` (`unixdt`, `price`, `ma350x2`, `ma111`, `btcsupply`, `flow`, `s2fratio`, `s2fdeflection`, ma350x1, ma350x1r6, ma350x3, ma350x5) VALUES (FROM_UNIXTIME(%s), %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s)" + #print(sql_insert) + cursor.execute(sql_insert, (day, price, ma350x2, ma111, supply, flow, s2fratio, s2fdeflection, ma350x1, ma350x1r6, ma350x3, ma350x5)) + self.conn.commit() + + def save_ssr(self, day, price, marketcap, usdtsupply, usdcsupply, busdsupply, daisupply, stables_supply, ssr, ssrosc): + with self.conn.cursor() as cursor: + sql_insert = "REPLACE INTO `nochainv3b` (`unixdt`, `price`, marketcap, usdtsupply, usdcsupply, busdsupply, daisupply, stables_supply, ssr, ssrosc) VALUES (FROM_UNIXTIME(%s), %s, %s, %s, %s, %s, %s, %s, %s, %s)" + #print(sql_insert) + cursor.execute(sql_insert, (day, price, marketcap, usdtsupply, usdcsupply, busdsupply, daisupply, stables_supply, ssr, ssrosc)) + self.conn.commit() +# 看起来您已经定义了一个名为 NochainDbIf Python 类,该类与 MySQL 数据库交互。此类具有将数据保存到两个不同表(nochainv2c 和nochainv2b )的方法。save该方法用于保存与加密货币价格和指标相关的数据,而save_ssr该方法用于保存与稳定币供应和SSR(稳定币供应比率)相关的数据。 +# 下面是每种方法的作用的细分: +# __init__:这是使用提供的参数(主机、端口、用户、密码和数据库名称)初始化数据库连接的构造函数方法。它使用pymysql库来建立连接。 +# save:此方法在nochainv2c表中插入或更新记录。它采用代表与加密货币价格和供应相关的各种指标的参数。 +# save_ssr:此方法在nochainv2b表中插入或更新记录。它采用代表与稳定币供应和 SSR 相关的指标的参数。 +# 这两种方法都使用 SQL 语句,REPLACE INTO该语句尝试在表中插入新行。如果存在主键或唯一键冲突的行,则改为更新现有行。 +# 此外,这些方法使用上下文管理器 with self.conn.cursor() as cursor 来确保数据库资源得到正确管理,并在执行 SQL 语句后提交事务 self.conn.commit() 以保留更改。 +def get_history_price(): + prices = {} + response_price = requests.get( + 'https://data.nasdaq.com/api/v3/datatables/QDL/BCHAIN?code=MKPRU&api_key=FZqXog4sR-b7cYnXcRVV') + if response_price.status_code == 200: + #print(response_price.content) + priceweb = ujson.loads(response_price.content) + if "datatable" in priceweb: + priceset = priceweb["datatable"] + if "data" in priceset: + pricedata = priceset["data"] + for price in pricedata: + daystr = price[1] + p = price[2] + dayutc = time.mktime(time.strptime(daystr, "%Y-%m-%d")) + prices[str(int(dayutc))] = float(p) + #print(price, int(dayutc), prices[str(int(dayutc))]) + return prices +# 此函数似乎可以从纳斯达克 API 检索历史加密货币价格。其工作原理如下:get_history_price +# 它初始化一个空字典,用于存储历史价格。prices +# 它使用该函数向纳斯达克 API 端点发送 GET 请求。requests.get() +# 如果响应状态代码为 200(表示成功),则继续分析响应数据。 +# 它使用模块中的 JSON 响应加载到 Python 字典中。ujson.loads()ujson +# 它检查响应字典中是否存在该键,如果存在,则检索与之关联的密钥。"dataset""data" +# 它遍历每个价格数据点,提取日期和价格。 +# 它将日期字符串转换为 Unix 时间戳,然后使用 解析 。time.mktime() time.strptime() +# 它将时间戳作为字符串键存储在字典中,并将相应的价格作为值。prices +# 最后,它返回包含历史价格的字典。prices +def get_history_price2(pricedict): + #pricedict = {} + dayt = time.gmtime() + daystr = time.strftime("%Y", dayt) + year = int(daystr) + end_year = year + while True: + url = "" + if end_year != year: + start_year = end_year + url = "https://data.messari.io/api/v1/assets/bitcoin/metrics/price/time-series?start=" + else: + url = "https://data.messari.io/api/v1/assets/bitcoin/metrics/price/time-series?start=" + str( + year) + "-01-01&end="+str(year)+"-12-31&order=descending&interval=1d" + + if end_year != year: + url = url + str(start_year) + "-01-01&end=" + str(end_year) + "-12-31&order=descending&interval=1d" + header_set = {} + header_set["x-messari-api-key"] = "aH2pyj5i4QGo1k1gLxXEbIJ5RJr+FYKLEWk6cRT6RuSc6lRY" + # header_set["Content-Type"] = "application/json" + print(header_set, url) + response_price = requests.get(url, headers=header_set) + # print(response_price) + if response_price.status_code == 200: + # print(response_price.content) + priceweb = ujson.loads(response_price.content) + if "data" in priceweb: + priceset = priceweb["data"] + if "values" in priceset: + valueset = priceset["values"] + if valueset is not None: + for supply in valueset: + dayutc = int(supply[0] / 1000) + s = supply[1] + ret_time = time.gmtime(dayutc) + ret_daystr = time.strftime("%d %b %Y", ret_time) + ret_dayutc = int(time.mktime(time.strptime(ret_daystr, "%d %b %Y"))) + pricedict[str(ret_dayutc)] = float(s) + # print(s, dayutc, pricedict[str(dayutc)]) + # break + else: + break + else: + break + end_year -= 1 + time.sleep(2) + return pricedict +# 此函数似乎从 Messari API 获取历史加密货币价格并将其存储在字典中。让我们分解一下它是如何工作的:get_history_price2 +# 它需要一个字典pricedict作为参数,大概是为了存储历史价格。 +# 它初始化一些与时间相关的变量,包括当前年份 year 和结束年份end_year,后者最初设置为当前年份。 +# 它进入一个循环,无限期地持续下去,直到明确中断。 +# 在循环中,它构造 URL,用于根据start_year 和end_year 值从 Messari API 获取历史价格数据。 +# 它设置一个包含用于身份验证的 Messari API 密钥的标头。 +# 它使用构造的 URL 和标头向 Messari API 发送 GET 请求。requests.get() +# 如果响应状态代码为 200(表示成功),则会解析 JSON 响应。 +# 它从响应中提取价格数据并对其进行迭代。 +# 对于每个价格数据点,它将时间戳从毫秒转换为秒,将其转换为人类可读的日期格式,并将其作为 Unix 时间戳字符串键存储在字典pricedict中,并以相应的价格作为值。 +# 它会递减变量以获取上一年的数据,并在下一次迭代前增加 2 秒的延迟。end_year +# 循环一直持续到获取所有年份的历史数据或发生错误为止。 +# 最后,它打印包含获取的历史价格的字典pricedict并返回它。 +def get_history_supply(supplys): + #supplys = {} + #issues = {} + response_supply = requests.get( + 'https://data.nasdaq.com/api/v3/datatables/QDL/BCHAIN?code=TOTBC&api_key=FZqXog4sR-b7cYnXcRVV') + if response_supply.status_code == 200: + #print(response_supply.content) + supplyweb = ujson.loads(response_supply.content) + if "datatable" in supplyweb: + supplyset = supplyweb["datatable"] + # if "end_date" in supplyset: + # end_date = supplyset["end_date"] + if "data" in supplyset: + supplydata = supplyset["data"] + #previssue = 0 + #prevday = 0 + for supply in supplydata: + daystr = supply[1] + p = supply[2] + dayutc = time.mktime(time.strptime(daystr, "%Y-%m-%d")) + supplys[str(int(dayutc))] = float(p) + #if supply[0] == end_date: + #previssue = float(p) + #prevday = dayutc + #else: + #issues[str(int(prevday))] = previssue - supplys[str(int(dayutc))] + #previssue = float(p) + #prevday = dayutc + #print(supply, int(dayutc), supply[str(int(dayutc))]) + #return supplys, issues + print("get_history_supply", supplys) + return supplys +# 此功能似乎从纳斯达克 API 检索与比特币总供应量相关的历史数据。以下是其工作原理的细分:get_history_supply +# 它采用一个字典supplys作为参数,大概是为了存储历史供应数据。 +# 它使用该函数向纳斯达克 API 端点发送 GET 请求。requests.get() +# 如果响应状态代码为 200(表示成功),则继续分析响应数据。 +# 它使用模块中的 JSON 响应加载到 Python 字典中。ujson.loads() ujson +# 它检查响应字典中是否存在该键,如果存在,则检索与之关联的密钥。"dataset""data" +# 它提取数据集的结束日期,大概是为了进行一些进一步的处理(似乎被注释掉了)。 +# 它遍历数据集中的每个供应数据点。 +# 对于每个数据点,它提取日期和比特币总供应量,在用 解析后将日期字符串转换为 Unix 时间戳,并将时间戳作为字符串键存储在字典supplys中,并以相应的供应值作为值。time.mktime() time.strptime() +# 最后,它打印包含获取的历史供应数据的字典supplys并返回它。 +# 似乎有一些注释掉的部分与计算连续天数之间的供应差异并将该信息存储在另一个名为issues 的字典中有关,但这些部分目前处于非活动状态。 +def get_history_supply2(): + supplys = {} + dayt = time.gmtime() + daystr = time.strftime("%Y", dayt) + year = int(daystr) + end_year = year + while True: + url = "" + if end_year != year: + start_year = end_year + url = "https://data.messari.io/api/v1/assets/bitcoin/metrics/sply-circ/time-series?start=" + else: + url = "https://data.messari.io/api/v1/assets/bitcoin/metrics/sply-circ/time-series?start=" + str( + year) + "-01-01&end="+str(year)+"-12-31&order=descending&interval=1d" + + if end_year != year: + url = url + str(start_year) + "-01-01&end=" + str(end_year) + "-12-31&order=descending&interval=1d" + header_set = {} + header_set["x-messari-api-key"] = "aH2pyj5i4QGo1k1gLxXEbIJ5RJr+FYKLEWk6cRT6RuSc6lRY" + # header_set["Content-Type"] = "application/json" + print(header_set, url) + response_csupply = requests.get(url, headers=header_set) + # print(response_price) + if response_csupply.status_code == 200: + # print(response_price.content) + csweb = ujson.loads(response_csupply.content) + if "data" in csweb: + csset = csweb["data"] + if "values" in csset: + valueset = csset["values"] + if valueset is not None: + for supply in valueset: + dayutc = int(supply[0] / 1000) + s = supply[1] + ret_time = time.gmtime(dayutc) + ret_daystr = time.strftime("%d %b %Y", ret_time) + ret_dayutc = int(time.mktime(time.strptime(ret_daystr, "%d %b %Y"))) + supplys[str(ret_dayutc)] = float(s) + #print(s, dayutc, supplys[str(dayutc)]) + #break + else: + break + else: + break + + break + end_year -= 1 + time.sleep(2) + return supplys +# 此函数似乎从 Messari API 获取历史比特币供应数据并将其存储在字典中。以下是其工作原理的细分:get_history_supply2 +# 它初始化一个空字典supplys,用于存储历史供应数据。 +# 它获取当前年份并将变量设置为当前年份。end_year +# 它进入一个循环,无限期地持续下去,直到明确中断。 +# 在循环中,它构造 URL,用于根据 start_year和 end_year值从 Messari API 获取历史供应数据。 +# 它设置一个包含用于身份验证的 Messari API 密钥的标头。 +# 它使用构造的 URL 和标头向 Messari API 发送 GET 请求。requests.get() +# 如果响应状态代码为 200(表示成功),则会解析 JSON 响应。 +# 它从响应中提取供应数据并对其进行迭代。 +# 对于每个供应数据点,它将时间戳从毫秒转换为秒,将其转换为人类可读的日期格式,并将其作为 Unix 时间戳字符串键存储在字典supplys中,并以相应的供应值作为值。 +# 它打印包含获取的历史供应数据的字典supplys并返回它。 +# 但是,在循环的第一次迭代之后有一个break语句,该语句在仅获取数据一年后有效地终止了循环。这可能是无意的。 +def calc_issues(supplys): + issues = {} + prevday = 0 + previssue = 0 + for supply_day in supplys: + if int(supply_day) > prevday: + prevday = int(supply_day) + previssue = supplys[supply_day] + + print("calc_issues", prevday, previssue, supplys) + + for dayutc in supplys: + issues[str(int(prevday))] = previssue - supplys[str(int(dayutc))] + previssue = float(supplys[dayutc]) + prevday = dayutc + + print(issues) + return issues +# 此函数似乎根据提供的历史供应数据计算连续几天之间的比特币供应量差异。让我们分解一下它是如何工作的:calc_issues +# 它需要一个称为supplys输入的字典,其中可能包含历史比特币供应数据。 +# 它初始化一个空字典,该字典issues用于存储计算出的供应差异。 +# 它初始化变量,并分别跟踪前一天的时间戳和供应量。prevday previssue +# 它会遍历字典supplys中每天的供应数据。 +# 如果当天的时间戳大于前一天的时间戳,则它会更新为当天的时间戳和供应量。prevday previssue +# 它计算前一天和当天之间的供应量差异,并将其存储在字典issues中,并将前一天的时间戳作为键。 +# 它会更新并用于下一次迭代。previssue prevday +# 它打印出计算出的供应差异。 +# 它返回包含计算出的供应差异的字典issues。 +# 但是,实现存在潜在问题。prevday和 previssue变量用于跟踪前一天的供应量,但循环会覆盖每个新日期的供应量。因此,字典issues将仅包含字典supplys中最后一天与所有其他日期之间的差异。 +def prepare_year_issues(issues): + issues_year = {} + temp_issues_year = {} + for day in issues: + dayt = time.gmtime(int(day)) + daystr = time.strftime("%Y", dayt) + if daystr in temp_issues_year: + temp_issues_year[daystr] = temp_issues_year[daystr] + issues[day] + else: + temp_issues_year[daystr] = issues[day] + #print(temp_issues_year) + for day in issues: + dayt = time.gmtime(int(day)) + daystr = time.strftime("%Y", dayt) + if daystr in temp_issues_year: + issues_year[day] = temp_issues_year[daystr] + #print(issues_year) + return issues_year +# “prepare_year_issues”功能似乎将每日发行数据汇总为年度总数。其工作原理如下: +# - 它使用一个名为“issues”的字典作为输入,其中可能包含每日发行数据。 +# - 它初始化了两个字典:“issues_year”用于存储汇总的年度发行数据,“temp_issues_year”用于临时存储每年的发行总和。 +# - 它每天都在“issues”字典中迭代。 +# - 对于每一天,它都会从当天的 Unix 时间戳中提取年份。 +# - 它检查年份是否已经存在于“temp_issues_year”中。 +# - 如果是,则将该日的发行量添加到相应年份的总数中。 +# - 如果不是,它会以“temp_issues_year”的形式初始化该日期的年份。 +# - 然后,它每天再次在“issues”字典中迭代。 +# - 对于每一天,它都会提取年份并检查年份是否以“temp_issues_year”形式存在。 +# - 如果是,则在“issues_year”字典中为该日期分配相应的年度总发行量。 +# 最后,它返回包含年度汇总发行数据的“issues_year”字典。 +# 此功能有效地将每日发行数据汇总为年度总数,从而更容易进行分析和可视化。 +def prepare_maxxx(prices, day, madays): + total = 0 + cnt = 0 + for i in range(madays): + if day in prices: + total += prices[day] + cnt += 1 + # print(day, total, cnt) + day = str(int(day) - 3600 * 24) + + if cnt > 0: + return total / cnt + return 0 +# “prepare_maxxx”函数似乎计算了特定日期(“天”)结束的指定天数(“madays”)内加密货币价格的移动平均线。以下是其工作原理的细分: +# - 它需要三个参数: +# - 'prices':包含以 Unix 时间戳为键的加密货币价格的字典。 +# - 'day':一个 Unix 时间戳,表示计算移动平均线的周期的结束日期。 +# - 'madays':一个整数,表示计算移动平均线的天数。 +# - 它初始化“总计”和“cnt”变量,以分别计算移动平均线中包含的价格总和和价格计数。 +# - 它进入一个循环,迭代“madays”时间,表示计算移动平均线的天数。 +# - 在每次迭代中: +# - 它检查当前“日”是否存在于“价格”字典中。 +# - 如果是这样,它会将当天的价格添加到“总计”中,并增加“cnt”。 +# - 然后,它将“一天”减少 24 小时(3600 秒)以移动到前一天。 +# - 循环后,它会检查计算中是否包含任何价格(如果“cnt”大于 0)。 +# - 如果是这样,它将返回指定时间段内的平均价格(“总计/cnt”)。 +# - 如果不是(即,如果在指定时间段内没有找到价格),则返回 0。 +# 此函数计算在特定日期结束的指定天数内加密货币价格的简单移动平均线。 +def prepare_ma350(prices, day): + return prepare_maxxx(prices, day, 350) +# 用于计算加密货币价格的 350 天移动平均线。它通过调用“prepare_maxxx”函数并将“madays”参数设置为 350 来实现这一点。以下是其工作原理的摘要: +# - 它需要两个参数: +# - 'prices':包含以 Unix 时间戳为键的加密货币价格的字典。 +# - 'day':一个 Unix 时间戳,表示计算移动平均线的周期的结束日期。 +# - 它只是返回调用“prepare_maxxx(prices, day, 350)”的结果。 +# 此函数抽象了计算移动平均线的细节,并为获取加密货币价格的 350 天移动平均线提供了更简洁的界面。 +def prepare_ma111(prices, day): + return prepare_maxxx(prices, day, 111) +# “prepare_ma111”函数类似于“prepare_ma350”函数,但计算加密货币价格的 111 天移动平均线。其工作原理如下: +# - 它需要两个参数: +# - 'prices':包含以 Unix 时间戳为键的加密货币价格的字典。 +# - 'day':一个 Unix 时间戳,表示计算移动平均线的周期的结束日期。 +# - 它返回调用“prepare_maxxx(prices, day, 111)”的结果。 +# 此函数提供了一种方便的方法来计算加密货币价格的 111 天移动平均线,而无需每次明确指定天数。 +def prepare_ma200(ssr, day): + return prepare_maxxx(ssr, day, 200) +# “prepare_ma200”函数类似于之前的移动平均线函数,但它计算的是 200 天移动平均线。让我们分解一下它的功能: +# -**参数**: +# - 'ssr':包含与稳定币供应比率 (SSR) 相关的数据的字典,以 Unix 时间戳为键。 +# - 'day':一个 Unix 时间戳,表示计算移动平均线的周期的结束日期。 +# -**功能性**: +# - 它调用“prepare_maxxx”函数,并将“madays”参数设置为 200,传递“ssr”字典和指定的“day”。 +# - 此函数抽象了计算 200 天移动平均线的细节,并为获取它提供了更简洁的界面。 +# 此函数用于计算与稳定币供应比率 (SSR) 相关的数据的 200 天移动平均线 +def prepare_gold_ratio(prices): + ma350x1 = {} + ma350x1r6 = {} + ma350x2 = {} + ma350x3 = {} + ma350x5 = {} + for day in prices: + ma350x1[day] = prepare_maxxx(prices, day, 350) + ma350x1r6[day] = ma350x1[day]*1.6 + ma350x2[day] = ma350x1[day] * 2 + ma350x3[day] = ma350x1[day] * 3 + ma350x5[day] = ma350x1[day] * 5 + + return ma350x1, ma350x1r6, ma350x2, ma350x3, ma350x5 +# “prepare_gold_ratio”函数似乎根据一组给定的价格计算各种移动平均线。其工作原理如下: +# -**参数**: +# - 'prices':包含以 Unix 时间戳为键的加密货币价格的字典。 +# -**功能性**: +# - 它初始化字典以存储不同的移动平均线:'ma350x1'、'ma350x1r6'、'ma350x2'、'ma350x3' 和 'ma350x5'。 +# - 它在“价格”字典中每天都会迭代。 +# - 对于每一天,它使用“prepare_maxxx”函数计算 350 天移动平均线并将其存储在“ma350x1”中。 +# - 它将“ma350x1r6”计算为“ma350x1”值的 1.6 倍。 +# - 它将“ma350x2”、“ma350x3”和“ma350x5”分别计算为“ma350x1”值的 2、3 和 5 倍。 +# - 在所有天数遍历后,它返回计算出的移动平均线:'ma350x1'、'ma350x1r6'、'ma350x2'、'ma350x3' 和 'ma350x5'。 +# 此功能旨在根据加密货币价格的 350 天移动平均线准备不同的比率,可能用于比较或分析目的 +def cal_ssr_osc(ssr): + ssr_osc = {} + for day in ssr: + ssr_ma = prepare_ma200(ssr, day) + ssr_osc[day] = ssr[day]/ssr_ma + return ssr_osc +# “cal_ssr_osc”功能根据提供的 SSR 数据计算稳定币供应比率 (SSR) 的振荡器。其工作原理如下: +# -**参数**: +# - 'ssr':包含与稳定币供应比率 (SSR) 相关的数据的字典,以 Unix 时间戳为键。 +# -**功能性**: +# - 它初始化一个名为“ssr_osc”的字典来存储计算出的 SSR 振荡器值。 +# - 它在“ssr”字典中每天迭代。 +# - 对于每一天,它使用“prepare_ma200”函数计算 SSR 的 200 天移动平均线。 +# - 它将当天的 SSR 值除以 200 天移动平均线,以计算 SSR 振荡器值。 +# - 它将 SSR 振荡器值存储在“ssr_osc”字典中,并以相应的日期作为键。 +# - 在所有天数遍历后,它会返回包含计算出的 SSR 振荡器值的“ssr_osc”字典。 +# 此函数计算 SSR 与其 200 天移动平均线的比率,代表 SSR 振荡器,可用于分析与稳定币供应动态相关的趋势或信号 +def calc_pi_cycle_top(dbif, prices): + ma350x2 = {} + ma111 = {} + for day in prices: + ma350x2[day] = prepare_ma350(prices, day)*2 + ma111[day] = prepare_ma111(prices, day) + return ma350x2, ma111 +# “calc_pi_cycle_top”函数根据提供的价格计算两条移动平均线,“ma350x2”和“ma111”。以下是其功能的细分: +# -**参数**: +# - 'dbif':此参数似乎未在函数中使用,因此可能用于将来使用。 +# - 'prices':包含以 Unix 时间戳为键的加密货币价格的字典。 +# -**功能性**: +# - 它初始化字典“ma350x2”和“ma111”以存储计算出的移动平均线。 +# - 它在“价格”字典中每天都会迭代。 +# - 对于每一天,它计算: +# - 'ma350x2':使用“prepare_ma350”功能是 350 天移动平均线价格的两倍。 +# - 'ma111':使用“prepare_ma111”函数的 111 天移动平均价格。 +# - 在所有日期迭代后,它返回计算出的移动平均线“ma350x2”和“ma111”。 +# 此功能似乎是通过分析特定移动平均线来识别加密货币价格周期中潜在峰值的过程的一部分 +def calc_s2f_ratio(dbif, prices, supplys, issues_year): + s2f_ratio = {} + s2f_deflection = {} + cnt = 1 + for day in supplys: + if day in issues_year: + s2f = 0 + if int(day) >= 1672502400: #2023 + s2f = supplys[day]/(900*365) + else: + s2f = supplys[day]/issues_year[day] + print(s2f,day,supplys[day],issues_year[day]) + s2f_ratio[day] = 0.09*(math.pow(s2f, 3.3)) + #print(supplys[day], issues_year[day], s2f, s2f_ratio[day]) + s2f_deflection[day] = prices[day]/s2f_ratio[day] + #print(day, prices[day], s2f, s2f_ratio[day], s2f_deflection[day]) + #cnt+=1 + #if cnt > 10: + #break + return s2f_ratio, s2f_deflection +# “calc_s2f_ratio”功能根据提供的数据计算与比特币的库存流量 (S2F) 模型相关的两个指标。让我们分解一下它的功能: +# -**参数**: +# - 'dbif':此参数似乎未在函数中使用,因此可能用于将来使用。 +# - 'prices':包含以 Unix 时间戳为键的加密货币价格的字典。 +# - 'supplys':包含与加密货币供应相关的数据的字典,以 Unix 时间戳为键。 +# - “issues_year”:包含年度汇总发行数据的字典。 +# -**功能性**: +# - 它初始化两个字典,“s2f_ratio”和“s2f_deflection”,分别存储计算出的 S2F 比率和 S2F 偏转指标。 +# - 它初始化一个值为 1 的计数器 'cnt'(似乎未使用)。 +# - 它在“供应”字典中每天迭代。 +# - 对于每一天,它都会检查该日期是否存在于“issues_year”字典中,确保该日有相应的发行数据可用。 +# - 它根据可用的供应和发行数据计算库存流量 (S2F) 比率。如果该日期在 2023 年 1 月 1 日之后(以 Unix 时间戳1672502400表示),则使用固定发行率(每天 900 BTC)计算 S2F 比率。否则,它使用供应量除以年度发行量来计算 S2F 比率。 +# - 它使用涉及幂和乘法的公式计算 S2F 比率值。 +# - 它通过将当天的价格除以当天的 S2F 比率来计算 S2F 偏转。 +# - 它将计算出的 S2F 比率和 S2F 挠度值存储在各自的字典中,并以相应的日期为键。 +# - 在遍历所有天数后,它会返回包含计算指标的“s2f_ratio”和“s2f_deflection”字典。 +# 这个功能似乎是分析比特币的库存流量模型的关键部分,提供了对其与价格动态和潜在偏差的关系的见解。 +def get_stable_coin_supply(coin_id): + supplys = {} + dayt = time.gmtime() + daystr = time.strftime("%Y", dayt) + year = int(daystr) + end_year = year + #split_2023 = False + while True: + #if end_year < 2022: + #break + url = "" + #if split_2023 and end_year != year: + if end_year != year: + start_year = end_year + url = "https://data.messari.io/api/v1/assets/"+coin_id+"/metrics/sply-circ/time-series?start=" + else: + #if split_2023: + # url = "https://data.messari.io/api/v1/assets/" + coin_id + "/metrics/sply-circ/time-series?after=" + str( + # year) + "-06-01&order=descending" + #else: + # url = "https://data.messari.io/api/v1/assets/" + coin_id + "/metrics/sply-circ/time-series?start=2023-01-01&end=2023-06-01&order=descending" + url = "https://data.messari.io/api/v1/assets/" + coin_id + "/metrics/sply-circ/time-series?start=" + str( + year) + "-01-01&end="+str(year)+"-12-31&order=descending&interval=1d" + + #now_time = time.gmtime() + #daystr = time.strftime("%Y-%m-%d", now_time) + #url = url + daystr + "&order=desc&format=json" + if end_year != year: + url = url + str(start_year) + "-01-01&end="+ str(end_year) + "-12-31&order=descending&interval=1d" + header_set = {} + header_set["x-messari-api-key"] = "aH2pyj5i4QGo1k1gLxXEbIJ5RJr+FYKLEWk6cRT6RuSc6lRY" + #header_set["Content-Type"] = "application/json" + print(header_set, url) + response_supply = requests.get(url, headers=header_set) + #print(response_supply) + if response_supply.status_code == 200: + #print(response_supply.content) + supplyweb = ujson.loads(response_supply.content) + if "data" in supplyweb: + supplyset = supplyweb["data"] + if "values" in supplyset: + valueset = supplyset["values"] + if valueset is not None: + for supply in valueset: + dayutc = int(supply[0]/1000) + s = supply[1] + supplys[str(dayutc)] = float(s) + #print(s, dayutc, supplys[str(dayutc)]) + #break + else: + break + else: + break + #if split_2023: + end_year -= 1 + #else: + # split_2023 = True + + time.sleep(2) + return supplys +# “get_stable_coin_supply”功能根据给定稳定币的代币 ID 检索其历史供应数据。让我们来看看它是如何工作的: +# -**论点**: +# - 'coin_id':要获取历史供应数据的稳定币的标识符。 +# -**功能性**: +# - 它初始化一个名为“supplys”的空字典来存储检索到的供应数据。 +# - 它检索当前年份并将“end_year”设置为当前年份。 +# - 它进入一个循环,一直持续到数据检索完成。 +# - 在循环中,它构造了 URL,用于根据 'coin_id' 和 'end_year' 从 Messari API 获取历史供应数据。 +# - 它使用“requests.get()”向 Messari API 发送 GET 请求,其中包含包含 Messari API 密钥的构造 URL 和标头。 +# - 如果响应状态码为 200(表示成功),则解析 JSON 响应。 +# - 它从响应中提取供应数据并对其进行迭代。 +# - 对于每个供应数据点,它将时间戳从毫秒转换为秒,并将其作为 Unix 时间戳字符串键存储在“供应”字典中,并以相应的供应值作为值。 +# - 循环继续,直到检索到所有可用的历史供应数据。 +# - 它将“end_year”递减 1,以便在下一次迭代中获取上一年的数据。 +# - 在API请求之间等待2秒,避免服务器过载。 +# - 检索到所有历史供应数据后,它将返回包含数据的“供应”字典。 +# 此功能可以检索给定稳定币的历史供应数据,便于分析和监控其随时间推移的供应动态 +def get_usdt_supply(): + return get_stable_coin_supply("tether") +# “get_usdt_supply +# 它直接调用“get_stable_coin_supply(“tether”) +# 它返回从 'get_stable_coin_supply(“tether”) 获得的结果 +# 该功能提供了一种方便的方式来获取 USDT 供应数据,而无需每次都指定代币 ID +def get_usdc_supply(): + return get_stable_coin_supply("usd-coin") +# “get_usdc_sup但专门设计用于检索 USD Coin (USDC) 的历史供应数据。其工作原理如下: +# 它直接调用“get_stable_coin_supply("usd-coin") +# 它返回从“get_stable_coin_supply(“binance-usd")中获取的结果 +# 此功能提供了一种方便的方式来获取 USDC 供应数据,而无需每次都指定硬币 ID +def get_busd_supply(): + return get_stable_coin_supply("binance-usd") +# “get_busd_suppget_stable_coin_supply函数,这次专门用于检索 Binance USD (BUSD) 的历史供应数据。其工作原理如下: +# 它直接调用“get_stable_coin_supply("binance-usd"),该参数是 Binance USD (BUSD) 的硬币 ID +# 它返回从 'get_stable_coin_supply(“binance-usd")中获取的结果 +# 此函数提供了一种方便的方式来获取 BUSD 供应数据,而无需在每个时间指定硬币 ID +def get_dai_supply(): + return get_stable_coin_supply("dai") +# 该函数是该函数的另一个包装器,专门用于检索 Dai (DAI) 的历史供应数据。以下是它的运作方式:get_dai_supply get_stable_coin_supply +# 它直接调用get_stable_coin_supply"dai"函数,传递参数,该参数对应于 Dai (DAI) 的硬币 ID。 +# 此函数返回从get_stable_coin_supply("dai")中获取的结果。 +# 此功能简化了获取 DAI 供应数据的过程,无需每次都指定硬币 ID +def cal_ssr(dbif, prices, supplys): + print("calc_ssr") + usdts = get_usdt_supply() + #print(usdts) + #return + usdcs = get_usdc_supply() + #print(usdcs) + busds = get_busd_supply() + dais = get_dai_supply() + print("calc_ssr start") + stables = {} + ssr = {} + marketcap = {} + for day in usdts: + stables[day] = usdts[day] + + if day in usdcs: + stables[day] += usdcs[day] + if day in busds: + stables[day] += busds[day] + if day in dais: + stables[day] += dais[day] + + #print(day, stables[day], usdts[day], usdcs[day], busds[day], dais[day]) + #print(prices[day]) + if day in prices: + #print(day, prices) + if day in supplys: + #print(day, supplys) + marketcap[day] = prices[day] * supplys[day] + if stables[day] > 0: + ssr[day] = marketcap[day]/stables[day] + else: + ssr[day] = 0 + #break + print(ssr) + ssrosc = {} + quotes_list = [] + for day in ssr: + #print(day) + dayt = time.gmtime(int(day)) + #dayt = datetime.date.fromtimestamp(int(day)) + daystr = time.strftime("%Y-%m-%d", dayt) + dtobj = datetime.strptime(daystr, "%Y-%m-%d") + print(dtobj) + quotes_list.append(Quote(dtobj, 0, 0, 0, ssr[day], 0)) + print(quotes_list) + ssr_osc = indicators.get_bollinger_bands(quotes_list, 200, 2) + for item in ssr_osc: + if item.z_score is not None: + ssrosc[str(int(item.date.timestamp()))] = item.z_score + #ssrosc = cal_ssr_osc(ssr) + print(ssrosc) + for day in ssr: + price = 0 + if day in prices: + price = prices[day] + marketcap1 = 0 + if day in marketcap: + marketcap1 = marketcap[day] + usdt = 0 + if day in usdts: + usdt = usdts[day] + usdc = 0 + if day in usdcs: + usdc = usdcs[day] + busd = 0 + if day in busds: + busd = busds[day] + dai = 0 + if day in dais: + dai = dais[day] + stable = 0 + if day in stables: + stable = stables[day] + ssr1 = 0 + if day in ssr: + ssr1 = ssr[day] + ssrosc1 = 0 + if day in ssrosc: + ssrosc1 = ssrosc[day] + dbif.save_ssr(day, price, marketcap1, usdt, usdc, busd, dai, + stable, ssr1, ssrosc1) +# 似乎“cal_ssr”功能旨在计算稳定币供应比率 (SSR) 及其相应的振荡器 (SSR Oscillator) 并将其保存到数据库中。以下是该函数的运行方式: +# -**参数**: +# - 'dbif':数据库接口类的一个实例,尽管它在当前实现中似乎未使用。 +# - 'prices':包含以 Unix 时间戳为键的加密货币价格的字典。 +# - 'supplys':包含以 Unix 时间戳为键的稳定币历史供应数据的字典。 +# -**功能性**: +# - 它分别使用专用函数“get_usdt_supply”、“get_usdc_supply”、“get_busd_supply”和“get_dai_supply”获取不同稳定币的历史供应数据,包括 USDT、USDC、BUSD 和 DAI。 +# - 它初始化字典以存储稳定币供应量、SSR、市值和 SSR 振荡器数据。 +# - 它遍历所提供数据中的可用日期。 +# - 对于每个日期,它通过将 USDT、USDC、BUSD 和 DAI 的供应量相加来计算稳定币的总供应量。 +# - 它根据加密货币的价格及其相应的供应量计算市值。 +# - 它通过将市值除以稳定币总供应量来计算 SSR。 +# - 它构造一个包含每个日期的 SSR 值的“Quote”对象列表。 +# - 它使用周期为 200 天且标准差为 2 的布林带计算 SSR 振荡器。 +# - 它使用数据库接口类的“save_ssr”方法将 SSR 和 SSR 振荡器值保存到数据库中。 +# 总体而言,该功能似乎提供了对稳定币供应比率及其相关指标的全面分析,有助于进一步研究和监控稳定币动态 +def nochain(): + global dbif + dbif = NochainDbIf() + temp_prices = get_history_price() + prices = get_history_price2(temp_prices) + #supplys, issues = get_history_supply() + temp_supplys = get_history_supply2() + supplys = get_history_supply(temp_supplys) + issues = calc_issues(supplys) + ''' + cnt = 0; + for day in supplys: + print(supplys[day]) + if day in issues: + print(issues[day]) + cnt+=1 + if cnt > 5: + break + ''' + issues_year = prepare_year_issues(issues) + #print(issues_year) + + ma350x2, ma111 = calc_pi_cycle_top(dbif, prices) + #print(ma350x2) + #print(ma111) + + s2f_ratio, s2f_deflection = calc_s2f_ratio(dbif, prices, supplys, issues_year) + #print(s2f_ratio) + #print(s2f_deflection) + + ma350x1, ma350x1r6, ma350x2, ma350x3, ma350x5 = prepare_gold_ratio(prices) + + for day in prices: + #print(day) + ma350x21 = 0 + if day in ma350x2: + ma350x21 = ma350x2[day] + ma1111 = 0 + if day in ma111: + ma1111 = ma111[day] + supply = 0 + if day in supplys: + supply = supplys[day] + issue = 0 + if day in issues: + issue = issues[day] + s2f_ratio1 = 0 + if day in s2f_ratio: + s2f_ratio1 = s2f_ratio[day] + s2f_deflection1 = 0 + if day in s2f_deflection: + s2f_deflection1 = s2f_deflection[day] + + ma350x11 = 0 + if day in ma350x1: + ma350x11 = ma350x1[day] + + ma350x1r61 = 0 + if day in ma350x1r6: + ma350x1r61 = ma350x1r6[day] + + ma350x31 = 0 + if day in ma350x3: + ma350x31 = ma350x3[day] + + ma350x51 = 0 + if day in ma350x5: + ma350x51 = ma350x5[day] + + #print(day, prices[day], ma350x21, ma1111, supply, issue, s2f_ratio1, s2f_deflection1) + dbif.save(int(day), prices[day], ma350x21, ma1111, supply, issue, s2f_ratio1, s2f_deflection1, ma350x11, ma350x1r61, ma350x31, ma350x51) + + + cal_ssr(dbif, prices, supplys) +# “nochain”功能负责协调NoChain项目的数据收集、处理和存储。以下是它的运作方式: +# -**初始化**: +# - 初始化 'NochainDbIf' 类的全局实例 'dbif',表示数据库接口。 +# - **数据检索**: +# - 它使用“get_history_price”检索历史价格数据。 +# - 它使用“get_history_supply2”检索历史供应数据。 +# - 它使用“calc_issues”根据历史供应数据计算问题。 +# - 它使用“prepare_year_issues”准备年度汇总问题。 +# - 它使用“calc_pi_cycle_top”计算 Pi Cycle 顶部指标(“ma350x2”和“ma111”)。 +# - 它使用“calc_s2f_ratio”计算库存流量 (S2F) 比率和挠度。 +# - 它使用“prepare_gold_ratio”准备黄金比率指标('ma350x1'、'ma350x1r6'、'ma350x2'、'ma350x3'、'ma350x5')。 +# - **数据存储**: +# - 对于价格数据中的每一天: +# - 它检索相关指标。 +# - 它使用“NochainDbIf”的“save”方法将数据存储到数据库中。 +# - **稳定币供应比率(SSR)计算**: +# - 它使用“cal_ssr”计算 SSR 及其振荡器,利用来自“get_usdt_supply”、“get_usdc_supply”、“get_busd_supply”和“get_dai_supply”的数据。 +# 总体而言,该功能是收集、处理和存储与NoChain项目相关的数据的主要驱动力,从而实现后续分析和可视化。 +nochain() diff --git a/lyq/nochain_update_lyq.py b/lyq/nochain_update_lyq.py new file mode 100644 index 0000000..2c19f02 --- /dev/null +++ b/lyq/nochain_update_lyq.py @@ -0,0 +1,87 @@ +# coding=utf-8 +import ujson +from binance.websocket.spot.websocket_client import SpotWebsocketClient as WebsocketClient +import time +import requests +import datetime +import pymysql +import math +from stock_indicators import indicators +from stock_indicators.indicators.common.quote import Quote + +class NochainDbIf: + def __init__(self, host="172.17.0.1", port=4423, user="root", password="2GS@bPYcgiMyL14A", dbname="btcdb"): + self.conn = pymysql.connect(host=host, port=port, user=user, password=password, database=dbname, cursorclass=pymysql.cursors.DictCursor) + print("init nochain db suceess!") + # 可能用于与数据库交互。该方法是构造函数,它使用库设置与MySQL数据库的连接。 + # 以下是该方法的作用的细分:__init__ + # 它需要几个参数这些参数用于连接到MySQL数据库。如果未提供这些参数,则使用默认值。 + # 在该方法中,它使用提供的参数建立与MySQL数据库的连接。 + # 该参数指定结果应作为字典而不是元组返回,这样可以更方便地访问数据。cursorclass = pymysql.cursors.DictCursor + # 最后,它打印一条消息,指示数据库连接的初始化成功。 + def get_ssr(self): + with self.conn.cursor() as cursor: + sql_cmd = "SELECT unixdt,ssr FROM nochainv3b order by unixdt" + cursor.execute(sql_cmd) + self.conn.commit() + return cursor.fetchall() + # 该方法似乎旨在从名为nochainv2b的MySQL数据库表中检索数据。下面是此方法的细分:get_ssr + # 它使用上下文管理器 () 自动处理游标,并确保在执行查询后正确关闭连接。with + # 在上下文管理器中,它准备一个SQL命令来从nochainv2b表中选择数据。该命令选择unixdt和ssr列,并按unixdt对结果进行排序。 + # 它使用光标的方法执行SQL命令。execute + # 执行命令后,它使用.如果对数据库进行了任何需要永久保存的更改,则此步骤是必需的。self.conn.commit() + # 最后,它返回查询的结果 ,它以字典列表的形式检索查询返回的所有行,其中每个字典表示一行,其中列名作为键和相应的值。cursor.fetchall() + def update_ssr(self, dt, ssrosc): + with self.conn.cursor() as cursor: + sql_update = "UPDATE nochainv3b SET unixdt='%s', ssrosc='%s' WHERE unixdt='%s'" %(dt, ssrosc, dt) + print(sql_update) + cursor.execute(sql_update) + self.conn.commit() + # 该方法将更新MySQL数据库nochainv2b表中的数据。下面是此方法的细分:update_ssr + # 它需要两个参数:dt(可能表示时间戳)和ssrosc(可能表示与SSR相关的某个值)。 + # 在该方法中,它准备一个SQLUPDATE命令来更新nochainv2b表中特定列的列。 + # 该字符串的格式为dt和ssrosc的值,以动态构造SQL命令。这是潜在的风险,因为它开启了SQL注入攻击的可能性。改用参数化查询更安全。 + # 然后,该方法打印出构造的SQL命令以进行调试。 + # 它使用光标的方法执行SQL命令。execute + # 执行命令后,它会提交事务,使更改永久化。self.conn.commit() + # 但是,SQL命令中存在一个潜在问题:在设置新值和WHERE子句的条件时使用相同的值。这可能不按预期运行,因为它将更新与给定. + # 如果打算更新特定行,则可能需要相应地调整WHERE子句。 + def rollback(self): + self.conn.rollback() + # 它对当前数据库连接执行回滚操作。以下是它的作用:rollback + # 它在数据库连接 () 上调用该方法。rollback self.conn + # 此操作将当前事务期间所做的任何未提交的更改还原到以前的状态。 + # 当事务期间出现错误时,通常使用回滚,允许您还原在错误发生之前所做的任何更改。 +def nochain(): + try: + dbif = NochainDbIf() + ssr_ma = dbif.get_ssr() + #print(ssr_ma) + quotes_list = [] + for item in ssr_ma: + print(item["unixdt"]) + quotes_list.append(Quote(item["unixdt"],0,0,0,item["ssr"],0)) + #print(quotes_list) + ssr_osc = indicators.get_bollinger_bands(quotes_list, 200, 2) + for item in ssr_osc: + if item.z_score is not None: + #print(item.date, item.sma, item.percent_b, item.z_score, item.width) + #dbif.update_ssr(item.date, item.z_score) + break + print("ok") + except Exception as e: + #dbif.rollback(); + print(e) + finally: + print("end") +nochain() +# 可能使用该nochain类计算从 MySQL 数据库获取的 SSR(大概是卖空比率)数据上的布林带。NochainDbIf +# 以下是该函数功能的细分: +# 它首先创建类的实例以建立与数据库的连接。NochainDbIf +# 它使用实例的方法从数据库中检索 SSR 数据。get_ssr NochainDbIf +# 它使用检索到的 SSR 数据构造对象列表。Quote +# 它使用模块中的函数计算布林带。get_bollinger_bandsindicators +# 它遍历计算出的布林带,并打印有关每个带的一些信息。 +# 目前,它似乎已注释掉了使用计算出的布林带更新数据库中 SSR 数据的部分。 +# 如果在此过程中发生任何异常,它将捕获异常,打印它,然后继续。 +# 最后,无论该过程是成功还是遇到错误,它都会打印“结束” \ No newline at end of file diff --git a/lyq/redis_if_qt.py b/lyq/redis_if_qt.py new file mode 100644 index 0000000..2c88abf --- /dev/null +++ b/lyq/redis_if_qt.py @@ -0,0 +1,610 @@ +import time + +from walrus import * +from loguru import logger +class RedisIf: + def __init__(self, host="127.0.0.1", port=6379, password="", db=0): + self.db = Database(host=host, port=port, db=db) + + self.zbalance = self.db.ZSet("balancev2d") + ''' + #realize cap progress + self.rv = self.db.Hash("rv") + #address and balance progress + self.addr = self.db.Hash("addr") + #block volume progress + self.bv = self.db.Hash("bv") + #daily volume progress + self.dv = self.db.Hash("dv") + ''' + ''' + #stat tx progress + self.tx = self.db.Hash("tx") + + #ETH daily contract progress + self.eth_dc = self.db.Hash("ethdc") + + #btc stats fee + self.btc_stats = self.db.Hash("btcstats") + + #btc stats volume + self.btc_volume = self.db.Hash("btcvolume") + + # btc stats cdd + self.btc_cdd = self.db.Hash("btccdd") + + # btc stats cdd days + self.btc_cdd_days = self.db.Hash("btccdddays") + ''' + self.btc_block_time = self.db.Hash("btcblocktimev2d") + ''' + self.btc_sopr = self.db.Hash("btcsopr") + ''' + self.btc_data = self.db.Hash("btc_datav2d") + + self.active_address = self.db.Set("active_addressv2d") + self.send_address = self.db.Set("send_addressv2d") + self.receive_address = self.db.Set("receive_addressv2d") + + def get_btc_data(self, key): + value = None + if self.btc_data[key] is not None: + value = self.btc_data[key] + return value + + def set_btc_data(self, key, value): + self.btc_data[key] = value + + def reset_btc_data(self): + self.btc_data.clear() + self.zbalance.clear() + self.btc_block_time.clear() + + ''' + def get_last_btc_sopr(self): + last_sopr_buy = None + last_asopr_buy = None + last_easopr_buy = None + last_lth_sopr_buy = None + last_sth_sopr_buy = None + last_asol = None + last_eaasol = None + + if self.btc_sopr["last_asol"] is not None: + last_asol = self.btc_sopr["last_asol"] + #last_asol = float(self.btc_sopr["last_asol"].decode("utf-8")) + if self.btc_sopr["last_eaasol"] is not None: + last_eaasol = self.btc_sopr["last_eaasol"] + #last_eaasol = float(self.btc_sopr["last_eaasol"].decode("utf-8")) + + + if self.btc_sopr["last_sopr_buy"] is not None: + last_sopr_buy = self.btc_sopr["last_sopr_buy"] + #last_sopr_buy = float(self.btc_sopr["last_sopr_buy"].decode("utf-8")) + if self.btc_sopr["last_asopr_buy"] is not None: + last_asopr_buy = self.btc_sopr["last_asopr_buy"] + #last_asopr_buy = float(self.btc_sopr["last_asopr_buy"].decode("utf-8")) + if self.btc_sopr["last_easopr_buy"] is not None: + last_easopr_buy = self.btc_sopr["last_easopr_buy"] + #last_easopr_buy = float(self.btc_sopr["last_easopr_buy"].decode("utf-8")) + if self.btc_sopr["last_lth_sopr_buy"] is not None: + last_lth_sopr_buy = self.btc_sopr["last_lth_sopr_buy"] + #last_lth_sopr_buy = float(self.btc_sopr["last_lth_sopr_buy"].decode("utf-8")) + if self.btc_sopr["last_sth_sopr_buy"] is not None: + last_sth_sopr_buy = self.btc_sopr["last_sth_sopr_buy"] + #last_sth_sopr_buy = float(self.btc_sopr["last_sth_sopr_buy"].decode("utf-8")) + + last_sopr_sell = None + last_asopr_sell = None + last_easopr_sell = None + last_lth_sopr_sell = None + last_sth_sopr_sell = None + if self.btc_sopr["last_sopr_sell"] is not None: + last_sopr_sell = self.btc_sopr["last_sopr_sell"] + # last_sopr_sell = float(self.btc_sopr["last_sopr_sell"].decode("utf-8")) + if self.btc_sopr["last_asopr_sell"] is not None: + last_asopr_sell = self.btc_sopr["last_asopr_sell"] + # last_asopr = float(self.btc_sopr["last_asopr"].decode("utf-8")) + if self.btc_sopr["last_easopr_sell"] is not None: + last_easopr_sell = self.btc_sopr["last_easopr_sell"] + # last_easopr_sell = float(self.btc_sopr["last_easopr_sell"].decode("utf-8")) + if self.btc_sopr["last_lth_sopr_sell"] is not None: + last_lth_sopr_sell = self.btc_sopr["last_lth_sopr_sell"] + # last_lth_sopr_sell = float(self.btc_sopr["last_lth_sopr_sell"].decode("utf-8")) + if self.btc_sopr["last_sth_sopr_sell"] is not None: + last_sth_sopr_sell = self.btc_sopr["last_sth_sopr_sell"] + # last_sth_sopr_sell = float(self.btc_sopr["last_sth_sopr_sell"].decode("utf-8")) + + return last_asol, last_eaasol, last_sopr_buy, last_asopr_buy, last_easopr_buy, last_lth_sopr_buy, last_sth_sopr_buy, last_sopr_sell, last_asopr_sell, last_easopr_sell, last_lth_sopr_sell, last_sth_sopr_sell + + def set_last_btc_sopr(self, last_asol, last_eaasol, last_sopr_buy, last_asopr_buy, last_easopr_buy, last_lth_sopr_buy, last_sth_sopr_buy, last_sopr_sell, last_asopr_sell, last_easopr_sell, last_lth_sopr_sell, last_sth_sopr_sell): + self.btc_sopr["last_asol"] = last_asol + self.btc_sopr["last_eaasol"] = last_eaasol + + self.btc_sopr["last_sopr_buy"] = last_sopr_buy + self.btc_sopr["last_asopr_buy"] = last_asopr_buy + self.btc_sopr["last_easopr_buy"] = last_easopr_buy + self.btc_sopr["last_lth_sopr_buy"] = last_lth_sopr_buy + self.btc_sopr["last_sth_sopr_buy"] = last_sth_sopr_buy + self.btc_sopr["last_sopr_sell"] = last_sopr_sell + self.btc_sopr["last_asopr_sell"] = last_asopr_sell + self.btc_sopr["last_easopr_sell"] = last_easopr_sell + self.btc_sopr["last_lth_sopr_sell"] = last_lth_sopr_sell + self.btc_sopr["last_sth_sopr_sell"] = last_sth_sopr_sell + ''' + def get_block_time(self, height): + block_time = None + height_str = str(height) + if self.btc_block_time[height_str] is not None: + block_time = int(self.btc_block_time[height_str].decode("utf-8")) + #block_time = int(self.btc_block_time[height_str].decode("utf-8")) + + return block_time + + def set_block_time(self, height, ts): + height_str = str(height) + self.btc_block_time[height_str] = ts + + ''' + def get_last_btc_cdd_days(self): + last_cdd = None + last_acdd = None + last_eacdd = None + last_cdd_day1= None + last_cdd_day7 = None + last_cdd_day30 = None + last_cdd_day60 = None + last_cdd_day90 = None + last_cdd_day180 = None + last_cdd_day365 = None + last_cdd_day730 = None + + last_date = None + last_height = None + last_date_str = None + + if self.btc_cdd["last_cdd"] is not None: + last_cdd = float(self.btc_cdd["last_cdd"].decode("utf-8")) + if self.btc_cdd["last_acdd"] is not None: + last_acdd = float(self.btc_cdd["last_acdd"].decode("utf-8")) + if self.btc_cdd["last_eacdd"] is not None: + last_eacdd = float(self.btc_cdd["last_eacdd"].decode("utf-8")) + if self.btc_cdd_days["last_cdd_day1"] is not None: + last_cdd_day1 = float(self.btc_cdd_days["last_cdd_day1"].decode("utf-8")) + if self.btc_cdd_days["last_cdd_day7"] is not None: + last_cdd_day7 = float(self.btc_cdd_days["last_cdd_day7"].decode("utf-8")) + if self.btc_cdd_days["last_cdd_day30"] is not None: + last_cdd_day30 = float(self.btc_cdd_days["last_cdd_day30"].decode("utf-8")) + if self.btc_cdd_days["last_cdd_day60"] is not None: + last_cdd_day60 = float(self.btc_cdd_days["last_cdd_day60"].decode("utf-8")) + if self.btc_cdd_days["last_cdd_day90"] is not None: + last_cdd_day90 = float(self.btc_cdd_days["last_cdd_day90"].decode("utf-8")) + if self.btc_cdd_days["last_cdd_day180"] is not None: + last_cdd_day180 = float(self.btc_cdd_days["last_cdd_day180"].decode("utf-8")) + if self.btc_cdd_days["last_cdd_day365"] is not None: + last_cdd_day365 = float(self.btc_cdd_days["last_cdd_day365"].decode("utf-8")) + if self.btc_cdd_days["last_cdd_day730"] is not None: + last_cdd_day730 = float(self.btc_cdd_days["last_cdd_day730"].decode("utf-8")) + if self.btc_cdd_days["last_date"] is not None: + last_date = int(self.btc_cdd_days["last_date"].decode("utf-8")) + if self.btc_cdd_days["last_height"] is not None: + last_height = int(self.btc_cdd_days["last_height"].decode("utf-8")) + if self.btc_cdd_days["last_date_str"] is not None: + last_date_str = self.btc_cdd_days["last_date_str"].decode("utf-8") + return last_cdd, last_acdd, last_eacdd, last_cdd_day1, last_cdd_day7, last_cdd_day30, last_cdd_day60, last_cdd_day90, last_cdd_day180, last_cdd_day365, last_cdd_day730, last_date, last_height, last_date_str + + def set_last_btc_cdd_days(self, cdd, acdd, eacdd, day1, day7, day30, day60, day90, day180, day365, day730, dt, height, dtstr): + self.btc_cdd["last_cdd"] = cdd + self.btc_cdd["last_acdd"] = acdd + self.btc_cdd["last_eacdd"] = eacdd + self.btc_cdd_days["last_cdd_day1"] = day1 + self.btc_cdd_days["last_cdd_day7"] = day7 + self.btc_cdd_days["last_cdd_day30"] = day30 + self.btc_cdd_days["last_cdd_day60"] = day60 + self.btc_cdd_days["last_cdd_day90"] = day90 + self.btc_cdd_days["last_cdd_day180"] = day180 + self.btc_cdd_days["last_cdd_day365"] = day365 + self.btc_cdd_days["last_cdd_day730"] = day730 + self.btc_cdd_days["last_date"] = dt + self.btc_cdd_days["last_height"] = height + self.btc_cdd_days["last_date_str"] = dtstr + ''' + ''' + def get_last_btc_cdd(self): + last_cdd = None + last_date = None + last_height = None + last_date_str = None + if self.btc_cdd["last_cdd"] is not None: + last_cdd = float(self.btc_cdd["last_cdd"].decode("utf-8")) + if self.btc_cdd["last_date"] is not None: + last_date = int(self.btc_cdd["last_date"].decode("utf-8")) + if self.btc_cdd["last_height"] is not None: + last_height = int(self.btc_cdd["last_height"].decode("utf-8")) + if self.btc_cdd["last_date_str"] is not None: + last_date_str = self.btc_cdd["last_date_str"].decode("utf-8") + return last_cdd, last_date, last_height, last_date_str + + def set_last_btc_cdd(self, cdd, dt, height, dtstr): + self.btc_cdd["last_cdd"] = cdd + self.btc_cdd["last_date"] = dt + self.btc_cdd["last_height"] = height + self.btc_cdd["last_date_str"] = dtstr + + def get_last_btc_volume(self): + last_volume = None + last_date = None + last_height = None + last_date_str = None + if self.btc_volume["last_volume"] is not None: + last_volume = float(self.btc_volume["last_volume"].decode("utf-8")) + if self.btc_volume["last_date"] is not None: + last_date = int(self.btc_volume["last_date"].decode("utf-8")) + if self.btc_volume["last_height"] is not None: + last_height = int(self.btc_volume["last_height"].decode("utf-8")) + if self.btc_volume["last_date_str"] is not None: + last_date_str = self.btc_volume["last_date_str"].decode("utf-8") + return last_volume, last_date, last_height, last_date_str + + def set_last_btc_volume(self, volume, dt, height, dtstr): + self.btc_volume["last_volume"] = volume + self.btc_volume["last_date"] = dt + self.btc_volume["last_height"] = height + self.btc_volume["last_date_str"] = dtstr + ''' + ''' + def get_last_btc_stats(self): + last_fees = None + last_date = None + last_height = None + last_date_str = None + last_volume = None + if self.btc_stats["last_fees"] is not None: + last_fees = float(self.btc_stats["last_fees"].decode("utf-8")) + if self.btc_volume["last_volume"] is not None: + last_volume = float(self.btc_volume["last_volume"].decode("utf-8")) + if self.btc_stats["last_date"] is not None: + last_date = int(self.btc_stats["last_date"].decode("utf-8")) + if self.btc_stats["last_height"] is not None: + last_height = int(self.btc_stats["last_height"].decode("utf-8")) + if self.btc_stats["last_date_str"] is not None: + last_date_str = self.btc_stats["last_date_str"].decode("utf-8") + return last_fees, last_volume, last_date, last_height, last_date_str + + def set_last_btc_stats(self, fees, volume, dt, height, dtstr): + self.btc_stats["last_fees"] = fees + self.btc_volume["last_volume"] = volume + self.btc_stats["last_date"] = dt + self.btc_stats["last_height"] = height + self.btc_stats["last_date_str"] = dtstr + + + def get_last_eth_dc(self): + last_date = None + last_height = None + last_date_str = None + if self.eth_dc["last_date"] is not None: + last_date = int(self.eth_dc["last_date"].decode("utf-8")) + if self.eth_dc["last_height"] is not None: + last_height = int(self.eth_dc["last_height"].decode("utf-8")) + if self.eth_dc["last_date_str"] is not None: + last_date_str = self.eth_dc["last_date_str"].decode("utf-8") + return last_date, last_height, last_date_str + + def set_last_eth_dc(self, dt, height, dtstr): + self.eth_dc["last_date"] = dt + self.eth_dc["last_height"] = height + self.eth_dc["last_date_str"] = dtstr + ''' + ''' + def get_last_dv(self): + last_dv = None + last_date = None + last_height = None + last_date_str = None + if self.dv["last_dv"] is not None: + last_dv = float(self.dv["last_dv"].decode("utf-8")) + if self.dv["last_date"] is not None: + last_date = int(self.dv["last_date"].decode("utf-8")) + if self.dv["last_height"] is not None: + last_height = int(self.dv["last_height"].decode("utf-8")) + if self.dv["last_date_str"] is not None: + last_date_str = self.dv["last_date_str"].decode("utf-8") + return last_dv, last_date, last_height, last_date_str + + def set_last_dv(self, dv, dt, height, dtstr): + self.dv["last_dv"] = dv + self.dv["last_date"] = dt + self.dv["last_height"] = height + self.dv["last_date_str"] = dtstr + + def get_last_bv(self): + last_height = None + if self.bv["last_height"] is not None: + last_height = int(self.bv["last_height"].decode("utf-8")) + return last_height + + def set_last_bv(self, height): + self.bv["last_height"] = height + ''' + ''' + def get_last_ind(self): + last_csupply = None + last_mintusd = None + last_sumcsupply = None + last_sumcdd = None + last_sumeacdd = None + last_rprofit = None + last_rloss = None + last_marketcap = None + last_rcap = None + last_mvrv = None + + last_earcap = None + if self.tx["last_csupply"] is not None: + last_csupply = float(self.tx["last_csupply"].decode("utf-8")) + if self.tx["last_mintusd"] is not None: + last_mintusd = float(self.tx["last_mintusd"].decode("utf-8")) + if self.tx["last_sumcsupply"] is not None: + last_sumcsupply = float(self.tx["last_sumcsupply"].decode("utf-8")) + if self.tx["last_sumcdd"] is not None: + last_sumcdd = float(self.tx["last_sumcdd"].decode("utf-8")) + if self.tx["last_sumeacdd"] is not None: + last_sumeacdd = float(self.tx["last_sumeacdd"].decode("utf-8")) + if self.tx["last_rprofit"] is not None: + last_rprofit = float(self.tx["last_rprofit"].decode("utf-8")) + if self.tx["last_rloss"] is not None: + last_rloss = float(self.tx["last_rloss"].decode("utf-8")) + if self.tx["last_marketcap"] is not None: + last_marketcap = float(self.tx["last_marketcap"].decode("utf-8")) + if self.tx["last_rcap"] is not None: + last_rcap = float(self.tx["last_rcap"].decode("utf-8")) + if self.tx["last_earcap"] is not None: + last_earcap = float(self.tx["last_earcap"].decode("utf-8")) + if self.tx["last_mvrv"] is not None: + last_mvrv = float(self.tx["last_mvrv"].decode("utf-8")) + + + return last_csupply, last_mintusd, last_sumcsupply, last_sumcdd, last_sumeacdd, last_rprofit, last_rloss, last_marketcap, last_rcap, last_earcap, last_mvrv + + def set_last_ind(self, last_csupply, last_mintusd, last_sumcsupply, last_sumcdd, last_sumeacdd, last_rprofit, last_rloss, last_marketcap, last_rcap, last_earcap, last_mvrv): + self.tx["last_csupply"] = last_csupply + self.tx["last_mintusd"] = last_mintusd + self.tx["last_sumcsupply"] = last_sumcsupply + self.tx["last_sumcdd"] = last_sumcdd + self.tx["last_sumeacdd"] = last_sumeacdd + self.tx["last_rprofit"] = last_rprofit + self.tx["last_rloss"] = last_rloss + self.tx["last_marketcap"] = last_marketcap + self.tx["last_rcap"] = last_rcap + self.tx["last_earcap"] = last_earcap + self.tx["last_mvrv"] = last_mvrv + + + def get_last_tx(self): + last_profit = None + last_fees = None + last_newaddr_cnt = None + last_newaddr_vol = None + last_active_addr_cnt = None + last_tx_addr_cnt = None + last_rx_addr_cnt = None + last_vol_change = None + last_vol = None + last_avol = None + last_date = None + last_height = None + last_date_str = None + last_txs = None + last_eatxs = None + if self.tx["last_profit_rate"] is not None: + last_profit = int(self.tx["last_profit"].decode("utf-8")) + if self.tx["last_fees"] is not None: + last_fees = int(self.tx["last_fees"].decode("utf-8")) + if self.tx["last_txs"] is not None: + last_txs = int(self.tx["last_txs"].decode("utf-8")) + if self.tx["last_eatxs"] is not None: + last_eatxs = int(self.tx["last_eatxs"].decode("utf-8")) + if self.tx["last_newaddr_cnt"] is not None: + last_newaddr_cnt = int(self.tx["last_newaddr_cnt"].decode("utf-8")) + if self.tx["last_newaddr_vol"] is not None: + last_newaddr_vol = float(self.tx["last_newaddr_vol"].decode("utf-8")) + if self.tx["last_active_addr_cnt"] is not None: + last_active_addr_cnt = int(self.tx["last_active_addr_cnt"].decode("utf-8")) + if self.tx["last_tx_addr_cnt"] is not None: + last_tx_addr_cnt = int(self.tx["last_tx_addr_cnt"].decode("utf-8")) + if self.tx["last_rx_addr_cnt"] is not None: + last_rx_addr_cnt = int(self.tx["last_rx_addr_cnt"].decode("utf-8")) + if self.tx["last_vol_change"] is not None: + last_vol_change = float(self.tx["last_vol_change"].decode("utf-8")) + if self.tx["last_vol"] is not None: + last_vol = float(self.tx["last_vol"].decode("utf-8")) + if self.tx["last_avol"] is not None: + last_avol = float(self.tx["last_avol"].decode("utf-8")) + if self.tx["last_date"] is not None: + last_date = int(self.tx["last_date"].decode("utf-8")) + if self.tx["last_height"] is not None: + last_height = int(self.tx["last_height"].decode("utf-8")) + if self.tx["last_date_str"] is not None: + last_date_str = self.tx["last_date_str"].decode("utf-8") + return last_profit, last_fees, last_txs, last_eatxs, last_newaddr_cnt, last_newaddr_vol, last_active_addr_cnt, last_tx_addr_cnt, last_rx_addr_cnt, last_vol_change, last_vol, last_avol, last_date, last_height, last_date_str + + def set_last_tx(self, last_profit, last_fees, last_txs, last_eatxs, newaddr_cnt, newaddr_vol, active_addr_cnt, tx_addr_cnt, rx_addr_cnt, vol_change, vol, avol, dt, height, dtstr): + self.tx["last_profit"] = last_profit + self.tx["last_fees"] = last_fees + self.tx["last_txs"] = last_txs + self.tx["last_eatxs"] = last_eatxs + self.tx["last_newaddr_cnt"] = newaddr_cnt + self.tx["last_newaddr_vol"] = newaddr_vol + self.tx["last_active_addr_cnt"] = active_addr_cnt + self.tx["last_tx_addr_cnt"] = tx_addr_cnt + self.tx["last_rx_addr_cnt"] = rx_addr_cnt + self.tx["last_vol_change"] = vol_change + self.tx["last_vol"] = vol + self.tx["last_avol"] = avol + self.tx["last_date"] = dt + self.tx["last_height"] = height + self.tx["last_date_str"] = dtstr + ''' + ''' + def get_last_addr(self): + last_daily_cnt = None + last_date = None + last_height = None + last_date_str = None + if self.addr["last_daily_cnt"] is not None: + last_daily_cnt = int(self.addr["last_daily_cnt"].decode("utf-8")) + if self.addr["last_date"] is not None: + last_date = int(self.addr["last_date"].decode("utf-8")) + if self.addr["last_height"] is not None: + last_height = int(self.addr["last_height"].decode("utf-8")) + if self.addr["last_date_str"] is not None: + last_date_str = self.addr["last_date_str"].decode("utf-8") + return last_daily_cnt, last_date, last_height, last_date_str + + def set_last_addr(self, daily_cnt, dt, height, dtstr): + self.addr["last_daily_cnt"] = daily_cnt + self.addr["last_date"] = dt + self.addr["last_height"] = height + self.addr["last_date_str"] = dtstr + ''' + + def is_active_address(self, address): + result = address in self.active_address + if not result: + self.active_address.add(address) + return result + + def reset_active_address(self): + self.active_address.clear() + + def get_active_address_cnt(self): + return len(self.active_address) + + def is_send_address(self, address): + result = address in self.send_address + if not result: + self.send_address.add(address) + return result + + def reset_send_address(self): + self.send_address.clear() + + def get_send_address_cnt(self): + return len(self.send_address) + + def is_receive_address(self, address): + result = address in self.receive_address + if not result: + self.receive_address.add(address) + return result + + def reset_receive_address(self): + self.receive_address.clear() + + def get_receive_address_cnt(self): + return len(self.receive_address) + + def save_addr(self, address, balance): + new_balance = balance + if address in self.zbalance: + new_balance = self.zbalance.score(address) + balance + #print("update", self.zbalance.score(address), balance, new_balance) + #time.sleep(10) + if new_balance < 0.01: + del self.zbalance[address] + #print("check exist", address, address in self.zbalance) + #time.sleep(10) + return + self.zbalance.add({address: new_balance}) + + ''' + def delete_addr(self, config): + self.addr.clear() + self.zbalance.clear() + ''' + def is_in_addr(self, address): + return address in self.zbalance + + def get_addr_cnt(self): + return len(self.zbalance) + + ''' + def delete_rv(self, config): + self.rv.clear() + + def get_last_rv(self): + last_rv = None + last_date = None + last_height = None + last_date_str = None + if self.rv["last_rv"] is not None: + last_rv = float(self.rv["last_rv"].decode("utf-8")) + if self.rv["last_date"] is not None: + last_date = int(self.rv["last_date"].decode("utf-8")) + if self.rv["last_height"] is not None: + last_height = int(self.rv["last_height"].decode("utf-8")) + if self.rv["last_date_str"] is not None: + last_date_str = self.rv["last_date_str"].decode("utf-8") + return last_rv, last_date, last_height, last_date_str + + def set_last_rv(self, rv, dt, height, dtstr): + self.rv["last_rv"] = rv + self.rv["last_date"] = dt + self.rv["last_height"] = height + self.rv["last_date_str"] = dtstr + ''' + + def get_all_address(self): + return self.zbalance.keys() + + def delete_address_data(self, config): + self.zbalance.clear() + + ''' + def query_from_address(self, start_balance=0, end_balance=0, address="", limit=0): + if len(address) > 0: + results = [] + result = {} + result["address"] = address + balance = self.zbalance.score(address) + print(balance) + if balance is not None: + result["balance"] = balance + results.append(result) + return results + + match_result = None + if start_balance > 0: + if end_balance > 0: + match_result = self.zbalance.range_by_score(start_balance, end_balance, 0, -1, True, False) + else: + match_result = self.zbalance.range_by_score(0, start_balance, 0, -1, True, False) + else: + if end_balance > 0: + match_result = self.zbalance.range_by_score(end_balance, 21000000, 0, -1, True, False) + + results = [] + if match_result is not None: + #print(match_result) + for addr, balance2 in match_result: + address = addr.decode('utf-8') + result = {} + result["address"] = address + result["balance"] = balance2 + results.append(result) + if limit > 0 and len(results) >= limit: + break + return results + ''' + + + + + + + + + + +