feat: coinbus相关数据采集代码
This commit is contained in:
119
lyq/Binance_fapi.py
Normal file
119
lyq/Binance_fapi.py
Normal file
@@ -0,0 +1,119 @@
|
|||||||
|
import pymysql
|
||||||
|
import requests
|
||||||
|
import time
|
||||||
|
import schedule
|
||||||
|
from datetime import datetime
|
||||||
|
|
||||||
|
# MySQL连接函数
|
||||||
|
def connect_to_db():
|
||||||
|
return pymysql.connect(
|
||||||
|
host="127.0.0.1", # 数据库主机
|
||||||
|
user="root", # 数据库用户名
|
||||||
|
password="2GS@bPYcgiMyL14A", # 数据库密码
|
||||||
|
database="binance_api", # 数据库名称
|
||||||
|
port=4423 # 数据库端口
|
||||||
|
)
|
||||||
|
|
||||||
|
# 执行SQL查询的函数
|
||||||
|
def execute_query(query, params=None):
|
||||||
|
conn = connect_to_db() # 连接数据库
|
||||||
|
with conn.cursor() as cursor:
|
||||||
|
cursor.execute(query, params) # 执行SQL语句
|
||||||
|
conn.commit() # 提交事务
|
||||||
|
conn.close() # 关闭数据库连接
|
||||||
|
|
||||||
|
# 北京时间转换函数
|
||||||
|
def bj_time(timestamp):
|
||||||
|
# 将时间戳转换为北京时间
|
||||||
|
return datetime.utcfromtimestamp(timestamp / 1000).strftime('%Y-%m-%d %H:%M:%S')
|
||||||
|
|
||||||
|
# Binance API客户端
|
||||||
|
class BinanceAPI:
|
||||||
|
base_url = "https://fapi.binance.com" # Binance的基础API URL
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def get(endpoint, params=None):
|
||||||
|
# 发送GET请求到Binance API
|
||||||
|
response = requests.get(f"{BinanceAPI.base_url}{endpoint}", params=params)
|
||||||
|
return response.json() # 返回JSON格式的响应数据
|
||||||
|
|
||||||
|
# 任务1:获取资金费率并插入数据库
|
||||||
|
def funding_rate():
|
||||||
|
# 获取BTC和ETH的资金费率数据
|
||||||
|
btc_data = BinanceAPI.get("/fapi/v1/fundingRate", {"symbol": "BTCUSDT"})
|
||||||
|
eth_data = BinanceAPI.get("/fapi/v1/fundingRate", {"symbol": "ETHUSDT"})
|
||||||
|
|
||||||
|
# 准备SQL插入语句
|
||||||
|
btc_sql = """INSERT INTO fundingrate(symbol, ts, fundingRate)
|
||||||
|
VALUES ("BTCUSDT", %s, %s)"""
|
||||||
|
eth_sql = """INSERT INTO fundingrate(symbol, ts, fundingRate)
|
||||||
|
VALUES ("ETHUSDT", %s, %s)"""
|
||||||
|
|
||||||
|
# 执行SQL插入操作
|
||||||
|
execute_query(btc_sql, (btc_data[-1]['fundingTime'], btc_data[-1]['fundingRate']))
|
||||||
|
execute_query(eth_sql, (eth_data[-1]['fundingTime'], eth_data[-1]['fundingRate']))
|
||||||
|
|
||||||
|
# 任务2:获取未平仓合约数并插入数据库
|
||||||
|
def open_interest():
|
||||||
|
# 获取BTC和ETH的未平仓合约数数据
|
||||||
|
btc_data = BinanceAPI.get("/fapi/v1/openInterest", {"symbol": "BTCUSDT"})
|
||||||
|
eth_data = BinanceAPI.get("/fapi/v1/openInterest", {"symbol": "ETHUSDT"})
|
||||||
|
|
||||||
|
# 准备SQL插入语句
|
||||||
|
btc_sql = """INSERT INTO openInterest(symbol, ts, openInterest)
|
||||||
|
VALUES ("BTCUSDT", %s, %s)"""
|
||||||
|
eth_sql = """INSERT INTO openInterest(symbol, ts, openInterest)
|
||||||
|
VALUES ("ETHUSDT", %s, %s)"""
|
||||||
|
|
||||||
|
# 执行SQL插入操作
|
||||||
|
execute_query(btc_sql, (btc_data['time'], btc_data['openInterest']))
|
||||||
|
execute_query(eth_sql, (eth_data['time'], eth_data['openInterest']))
|
||||||
|
|
||||||
|
# 任务3:获取长短比并插入数据库
|
||||||
|
def long_short_ratio(interval):
|
||||||
|
# 获取BTC和ETH的长短比数据
|
||||||
|
btc_data = BinanceAPI.get("/futures/data/takerlongshortRatio", {
|
||||||
|
"symbol": "BTCUSDT", "period": interval
|
||||||
|
})
|
||||||
|
eth_data = BinanceAPI.get("/futures/data/takerlongshortRatio", {
|
||||||
|
"symbol": "ETHUSDT", "period": interval
|
||||||
|
})
|
||||||
|
|
||||||
|
# 准备SQL插入语句
|
||||||
|
btc_sql = f"""INSERT INTO longshortratio{interval}(symbol, ts, buyVol, sellVol, buySellRatio)
|
||||||
|
VALUES ("BTCUSDT", %s, %s, %s, %s)"""
|
||||||
|
eth_sql = f"""INSERT INTO longshortratio{interval}(symbol, ts, buyVol, sellVol, buySellRatio)
|
||||||
|
VALUES ("ETHUSDT", %s, %s, %s, %s)"""
|
||||||
|
|
||||||
|
# 执行SQL插入操作
|
||||||
|
execute_query(btc_sql, (btc_data[-1]['timestamp'], btc_data[-1]['buyVol'], btc_data[-1]['sellVol'], btc_data[-1]['buySellRatio']))
|
||||||
|
execute_query(eth_sql, (eth_data[-1]['timestamp'], eth_data[-1]['buyVol'], eth_data[-1]['sellVol'], eth_data[-1]['buySellRatio']))
|
||||||
|
|
||||||
|
# 定时任务调度
|
||||||
|
def schedule_jobs():
|
||||||
|
# 每天0点、8点和16点1分执行资金费率任务
|
||||||
|
schedule.every().day.at("00:01").do(funding_rate)
|
||||||
|
schedule.every().day.at("08:01").do(funding_rate)
|
||||||
|
schedule.every().day.at("16:01").do(funding_rate)
|
||||||
|
|
||||||
|
# 每分钟的15秒执行未平仓合约数任务
|
||||||
|
schedule.every().minute.at(":15").do(open_interest)
|
||||||
|
schedule.every().minute.at(":25").do(open_interest)
|
||||||
|
schedule.every().minute.at(":35").do(open_interest)
|
||||||
|
schedule.every().minute.at(":45").do(open_interest)
|
||||||
|
schedule.every().minute.at(":55").do(open_interest)
|
||||||
|
|
||||||
|
# 每分钟的15秒执行长短比任务,周期为5m, 15m, 30m等
|
||||||
|
intervals = ["5m", "15m", "30m", "1h", "2h", "4h", "6h", "12h", "1d"]
|
||||||
|
for interval in intervals:
|
||||||
|
schedule.every().minute.at(":15").do(long_short_ratio, interval=interval)
|
||||||
|
|
||||||
|
# 启动任务调度
|
||||||
|
def run():
|
||||||
|
schedule_jobs() # 设置定时任务
|
||||||
|
while True:
|
||||||
|
schedule.run_pending() # 执行所有待处理任务
|
||||||
|
time.sleep(1) # 每秒检查一次任务是否到期
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
run() # 运行定时任务调度
|
||||||
115
lyq/CoinmarketCap.py
Normal file
115
lyq/CoinmarketCap.py
Normal file
@@ -0,0 +1,115 @@
|
|||||||
|
import requests
|
||||||
|
import pymysql
|
||||||
|
import time
|
||||||
|
from apscheduler.schedulers.blocking import BlockingScheduler
|
||||||
|
from datetime import datetime, timedelta
|
||||||
|
|
||||||
|
# API 密钥和请求头
|
||||||
|
API_KEY = "83bf85c1-1bd8-426a-a043-6b67dad8bda5"
|
||||||
|
headers = { "X-CMC_PRO_API_KEY": API_KEY }
|
||||||
|
base_url = "https://pro-api.coinmarketcap.com"
|
||||||
|
url = f"{base_url}/v1/cryptocurrency/listings/latest"
|
||||||
|
|
||||||
|
# MySQL 数据库连接配置
|
||||||
|
db_config = {
|
||||||
|
'host': '127.0.0.1', # 数据库主机地址
|
||||||
|
'user': 'root', # 数据库用户名
|
||||||
|
'password': '2GS@bPYcgiMyL14A', # 数据库密码
|
||||||
|
'database': 'coinmarketcap', # 数据库名称
|
||||||
|
'port': 4423 # 数据库端口
|
||||||
|
}
|
||||||
|
|
||||||
|
# 创建数据库表格(如果不存在)
|
||||||
|
def create_table():
|
||||||
|
connection = pymysql.connect(**db_config) # 连接到数据库
|
||||||
|
cursor = connection.cursor() # 创建游标对象
|
||||||
|
|
||||||
|
# 创建表格的 SQL 语句
|
||||||
|
create_table_query = """
|
||||||
|
CREATE TABLE IF NOT EXISTS marketInfo (
|
||||||
|
id INT NOT NULL PRIMARY KEY AUTO_INCREMENT, # 自增ID
|
||||||
|
update_time DATETIME NOT NULL, # 更新时间
|
||||||
|
symbol CHAR(15) NOT NULL, # 币种符号
|
||||||
|
ranks INT NOT NULL, # 排名
|
||||||
|
price DOUBLE NOT NULL, # 当前价格
|
||||||
|
market_cap DOUBLE NOT NULL, # 市值
|
||||||
|
volume_24h DOUBLE NOT NULL, # 24小时交易量
|
||||||
|
volume_change_24h DOUBLE NOT NULL, # 24小时交易量变化
|
||||||
|
percent_change_1h DOUBLE NOT NULL, # 1小时价格变化
|
||||||
|
percent_change_24h DOUBLE NOT NULL, # 24小时价格变化
|
||||||
|
percent_change_7d DOUBLE NOT NULL, # 7天价格变化
|
||||||
|
percent_change_30d DOUBLE NOT NULL, # 30天价格变化
|
||||||
|
percent_change_60d DOUBLE NOT NULL, # 60天价格变化
|
||||||
|
percent_change_90d DOUBLE NOT NULL # 90天价格变化
|
||||||
|
);
|
||||||
|
"""
|
||||||
|
cursor.execute(create_table_query) # 执行创建表格的 SQL 语句
|
||||||
|
connection.commit() # 提交事务
|
||||||
|
cursor.close() # 关闭游标
|
||||||
|
connection.close() # 关闭数据库连接
|
||||||
|
|
||||||
|
# 将 UTC 时间转换为北京时间
|
||||||
|
def bj_time(utc_time):
|
||||||
|
""" 将 UTC 时间转换为北京时间 """
|
||||||
|
utc_time = datetime.strptime(utc_time, '%Y-%m-%dT%H:%M:%S.%fZ') # 将 UTC 时间字符串转换为 datetime 对象
|
||||||
|
beijing_time = utc_time + timedelta(hours=8) # 北京时间比 UTC 时间快 8 小时
|
||||||
|
return beijing_time.strftime('%Y-%m-%d %H:%M:%S') # 格式化成字符串
|
||||||
|
|
||||||
|
# 获取市场数据并插入到数据库
|
||||||
|
def marketcap():
|
||||||
|
try:
|
||||||
|
# 向 CoinMarketCap API 发送请求,获取加密货币的市场数据
|
||||||
|
response = requests.get(url, headers=headers, params={"limit": 200})
|
||||||
|
response.raise_for_status() # 如果请求失败,抛出异常
|
||||||
|
except requests.RequestException:
|
||||||
|
time.sleep(60) # 等待 1 分钟后重试
|
||||||
|
response = requests.get(url, headers=headers, params={"limit": 200})
|
||||||
|
|
||||||
|
data = response.json() # 将返回的 JSON 数据转换为 Python 字典
|
||||||
|
for item in data['data']: # 遍历获取的数据
|
||||||
|
quote = item['quote']['USD'] # 获取 USD 相关的市场数据
|
||||||
|
update_time = bj_time(quote['last_updated']) # 转换更新时间为北京时间
|
||||||
|
symbol = item['symbol'] # 获取币种符号
|
||||||
|
ranks = item['cmc_rank'] # 获取排名
|
||||||
|
price = quote['price'] # 获取价格
|
||||||
|
market_cap = quote['market_cap'] # 获取市值
|
||||||
|
volume_24h = quote['volume_24h'] # 获取 24 小时交易量
|
||||||
|
volume_change_24h = quote['volume_change_24h'] # 获取 24 小时交易量变化
|
||||||
|
percent_change_1h = quote['percent_change_1h'] # 获取 1 小时价格变化
|
||||||
|
percent_change_24h = quote['percent_change_24h'] # 获取 24 小时价格变化
|
||||||
|
percent_change_7d = quote['percent_change_7d'] # 获取 7 天价格变化
|
||||||
|
percent_change_30d = quote['percent_change_30d'] # 获取 30 天价格变化
|
||||||
|
percent_change_60d = quote['percent_change_60d'] # 获取 60 天价格变化
|
||||||
|
percent_change_90d = quote['percent_change_90d'] # 获取 90 天价格变化
|
||||||
|
|
||||||
|
# 将数据插入到 MySQL 数据库
|
||||||
|
connection = pymysql.connect(**db_config) # 连接到数据库
|
||||||
|
cursor = connection.cursor() # 创建游标对象
|
||||||
|
insert_query = """
|
||||||
|
INSERT INTO marketInfo (
|
||||||
|
update_time, symbol, ranks, price, market_cap, volume_24h,
|
||||||
|
volume_change_24h, percent_change_1h, percent_change_24h,
|
||||||
|
percent_change_7d, percent_change_30d, percent_change_60d,
|
||||||
|
percent_change_90d
|
||||||
|
) VALUES (%s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s);
|
||||||
|
"""
|
||||||
|
# 执行插入数据的 SQL 语句
|
||||||
|
cursor.execute(insert_query, (
|
||||||
|
update_time, symbol, ranks, price, market_cap, volume_24h,
|
||||||
|
volume_change_24h, percent_change_1h, percent_change_24h,
|
||||||
|
percent_change_7d, percent_change_30d, percent_change_60d,
|
||||||
|
percent_change_90d
|
||||||
|
))
|
||||||
|
connection.commit() # 提交事务
|
||||||
|
cursor.close() # 关闭游标
|
||||||
|
connection.close() # 关闭数据库连接
|
||||||
|
|
||||||
|
# 定时任务:每 5 分钟执行一次 marketcap 函数
|
||||||
|
def schedule_job():
|
||||||
|
scheduler = BlockingScheduler() # 创建一个阻塞式调度器
|
||||||
|
scheduler.add_job(marketcap, 'cron', minute='0,5,10,15,20,25,30,35,40,45,50,55') # 设置每 5 分钟执行一次
|
||||||
|
scheduler.start() # 启动调度器
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
create_table() # 程序启动时,先创建数据库表格(如果不存在)
|
||||||
|
schedule_job() # 启动定时任务,开始定时抓取数据并插入数据库
|
||||||
83
lyq/Macroeconomic_COVITGDP_v2.py
Normal file
83
lyq/Macroeconomic_COVITGDP_v2.py
Normal file
@@ -0,0 +1,83 @@
|
|||||||
|
import requests
|
||||||
|
import pymysql
|
||||||
|
from datetime import datetime
|
||||||
|
import time
|
||||||
|
|
||||||
|
def get_bea_data(year):
|
||||||
|
url = ("https://apps.bea.gov/api/data?&UserID=146B5757-D9E3-442C-B6AC-ADE9E6B71114&method=GetData&DataSetName=GDPbyIndustry&Year=%s&Industry=ALL&tableID=15&Frequency=Q&ResultFormat=JSON" % year)
|
||||||
|
response = requests.get(url)
|
||||||
|
return response.json()['BEAAPI']['Results'][0]['Data']
|
||||||
|
|
||||||
|
def update_database(cursor, data):
|
||||||
|
industry_map = {
|
||||||
|
'Agriculture, forestry, fishing, and hunting': 'VAPGDPAFH',
|
||||||
|
'Mining': 'VAPGDPM',
|
||||||
|
'Construction': 'VAPGDPC',
|
||||||
|
'Manufacturing': 'VAPGDPMA',
|
||||||
|
'Retail trade': 'VAPGDPR',
|
||||||
|
'Wholesale trade': 'VAPGDPW',
|
||||||
|
'Utilities': 'VAPGDPU',
|
||||||
|
'Transportation and warehousing': 'VAPGDPT',
|
||||||
|
'Information': 'VAPGDPI',
|
||||||
|
'Finance, insurance, real estate, rental, and leasing': 'VAPGDPFIRL',
|
||||||
|
'Professional and business services': 'VAPGDPPBS',
|
||||||
|
'Educational services, health care, and social assistance': 'VAPGDPHCSA',
|
||||||
|
'Arts, entertainment, recreation, accommodation, and food services': 'VAPGDPAF',
|
||||||
|
'Other services, except government': 'CPGDPOSEG',
|
||||||
|
'Government': 'Federation',
|
||||||
|
'State and local': 'State_local'
|
||||||
|
}
|
||||||
|
|
||||||
|
for entry in data:
|
||||||
|
year = entry["Year"]
|
||||||
|
quarter = entry["Quarter"]
|
||||||
|
new_time = f"{year}Q{quarter}"
|
||||||
|
industry = entry["IndustrYDescription"]
|
||||||
|
value = entry["DataValue"]
|
||||||
|
|
||||||
|
if industry in industry_map:
|
||||||
|
column = industry_map[industry]
|
||||||
|
|
||||||
|
cursor.execute("SELECT quarterly FROM COVITGDP WHERE quarterly = %s", (new_time,))
|
||||||
|
result = cursor.fetchone()
|
||||||
|
if result:
|
||||||
|
cursor.execute(f"SELECT {column} FROM COVITGDP WHERE quarterly = %s", (new_time,))
|
||||||
|
old_value = cursor.fetchone()[0]
|
||||||
|
|
||||||
|
if old_value != value:
|
||||||
|
cursor.execute(f"UPDATE COVITGDP SET {column} = %s WHERE quarterly = %s", (value, new_time))
|
||||||
|
else:
|
||||||
|
print(f"No update needed for {column} for {new_time}")
|
||||||
|
|
||||||
|
else:
|
||||||
|
if column == 'VAPGDPAFH':
|
||||||
|
cursor.execute("INSERT INTO COVITGDP (quarterly, VAPGDPAFH) VALUES (%s, %s)", (new_time, value))
|
||||||
|
else:
|
||||||
|
cursor.execute(f"INSERT INTO COVITGDP (quarterly, {column}) VALUES (%s, %s) ON DUPLICATE KEY UPDATE {column} = VALUES({column})", (new_time, value))
|
||||||
|
|
||||||
|
def main():
|
||||||
|
years = 2025
|
||||||
|
|
||||||
|
while True:
|
||||||
|
try:
|
||||||
|
db = pymysql.connect(host="127.0.0.1",user="root",password="2GS@bPYcgiMyL14A",database="Macroeconomics",port=4423)
|
||||||
|
cursor = db.cursor()
|
||||||
|
|
||||||
|
data = get_bea_data(years)
|
||||||
|
update_database(cursor, data)
|
||||||
|
db.commit()
|
||||||
|
except pymysql.MySQLError as e:
|
||||||
|
print(f"Database connection error: {e}")
|
||||||
|
break
|
||||||
|
except Exception as e:
|
||||||
|
print(f"An error occurred: {e}")
|
||||||
|
finally:
|
||||||
|
if 'cursor' in locals():
|
||||||
|
cursor.close()
|
||||||
|
if 'db' in locals():
|
||||||
|
db.close()
|
||||||
|
|
||||||
|
time.sleep(86400)
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
main()
|
||||||
101
lyq/Macroeconomic_CPI_NSA_v2.py
Normal file
101
lyq/Macroeconomic_CPI_NSA_v2.py
Normal file
@@ -0,0 +1,101 @@
|
|||||||
|
import time
|
||||||
|
import requests
|
||||||
|
import json
|
||||||
|
import pymysql
|
||||||
|
from datetime import datetime
|
||||||
|
|
||||||
|
# Function to fetch data from BLS API
|
||||||
|
def fetch_data(series_ids):
|
||||||
|
headers = {'Content-type': 'application/json'}
|
||||||
|
data = json.dumps({"seriesid": series_ids, "startyear": "2024", "endyear": "2024"})
|
||||||
|
try:
|
||||||
|
response = requests.post('https://api.bls.gov/publicAPI/v2/timeseries/data/', data=data, headers=headers)
|
||||||
|
response.raise_for_status() # Raise exception for HTTP errors
|
||||||
|
return json.loads(response.text)
|
||||||
|
except requests.exceptions.RequestException as e:
|
||||||
|
return None
|
||||||
|
|
||||||
|
# Function to convert BLS period format to datetime
|
||||||
|
def convert_date(year, period):
|
||||||
|
date_string = f"{year}/{period.replace('M', '')}/01"
|
||||||
|
return datetime.strptime(date_string, '%Y/%m/%d')
|
||||||
|
|
||||||
|
# Function to insert data into MySQL database
|
||||||
|
def insert_data(cursor, table_name, date, name, value):
|
||||||
|
cursor.execute(
|
||||||
|
f"INSERT INTO {table_name}(date, name, value) VALUES (%s, %s, %s)",
|
||||||
|
(date, name, value)
|
||||||
|
)
|
||||||
|
|
||||||
|
# Function to process series data and insert into the database
|
||||||
|
def process_series_data(json_data, table_name, names):
|
||||||
|
db = pymysql.connect(host="127.0.0.1",user="root",password="2GS@bPYcgiMyL14A",database="Macroeconomics",port=4423)
|
||||||
|
|
||||||
|
cursor = db.cursor()
|
||||||
|
|
||||||
|
for i, series in enumerate(json_data['Results']['series']):
|
||||||
|
for data_point in sorted(series['data'], key=lambda x: (x['year'], x['period'])):
|
||||||
|
year = data_point['year']
|
||||||
|
period = data_point['period']
|
||||||
|
value = data_point['value']
|
||||||
|
date = convert_date(year, period)
|
||||||
|
name = names[i] if i < len(names) else f"Unknown {i}"
|
||||||
|
|
||||||
|
cursor.execute(f"SELECT COUNT(*) FROM {table_name} WHERE date = %s AND name = %s", (date, name))
|
||||||
|
if cursor.fetchone()[0] == 0:
|
||||||
|
insert_data(cursor, table_name, date, name, value)
|
||||||
|
db.commit()
|
||||||
|
|
||||||
|
db.close()
|
||||||
|
|
||||||
|
# Function to merge JSON data
|
||||||
|
def merge_json_data(json_data_list):
|
||||||
|
merged_series = []
|
||||||
|
for json_data in json_data_list:
|
||||||
|
if json_data and 'Results' in json_data and 'series' in json_data['Results']:
|
||||||
|
merged_series.extend(json_data['Results']['series'])
|
||||||
|
return {'Results': {'series': merged_series}}
|
||||||
|
|
||||||
|
# Main script logic
|
||||||
|
while True:
|
||||||
|
series_ids1 = [
|
||||||
|
'CUUR0000SA0', 'CUUR0000SAF1', 'CUUR0000SAF11', 'CUUR0000SAF111', 'CUUR0000SAF112', 'CUUR0000SEFJ',
|
||||||
|
'CUUR0000SAF113', 'CUUR0000SAF114', 'CUUR0000SEFV', 'CUUR0000SA0E', 'CUUR0000SACE', 'CUUR0000SEHE01',
|
||||||
|
'CUUR0000SETB', 'CUUR0000SETB01', 'CUUR0000SEHF', 'CUUR0000SEHF01', 'CUUR0000SEHF02'
|
||||||
|
]
|
||||||
|
series_ids2 = [
|
||||||
|
'CUUR0000SA0L1E', 'CUUR0000SACL1E', 'CUUR0000SAA', 'CUUR0000SETA01', 'CUUR0000SETA02', 'CUUR0000SAM1',
|
||||||
|
'CUUR0000SAF116', 'CUUR0000SEGA', 'CUUR0000SASLE', 'CUUR0000SAH1', 'CUUR0000SEHA', 'CUUR0000SEHC',
|
||||||
|
'CUUR0000SAM2', 'CUUR0000SEMC01', 'CUUR0000SEMD01', 'CUUR0000SAS4', 'CUUR0000SETD', 'CUUR0000SETE',
|
||||||
|
'CUUR0000SETG01'
|
||||||
|
]
|
||||||
|
series_ids3 = [s.replace('CUUR', 'CUSR') for s in series_ids1]
|
||||||
|
series_ids4 = [s.replace('CUUR', 'CUSR') for s in series_ids2]
|
||||||
|
|
||||||
|
json_data1 = fetch_data(series_ids1)
|
||||||
|
json_data2 = fetch_data(series_ids2)
|
||||||
|
json_data3 = fetch_data(series_ids3)
|
||||||
|
json_data4 = fetch_data(series_ids4)
|
||||||
|
|
||||||
|
combined_json_data_NSA = merge_json_data([json_data1, json_data2])
|
||||||
|
combined_json_data_SA = merge_json_data([json_data3, json_data4])
|
||||||
|
|
||||||
|
names = [
|
||||||
|
'All items', 'Food', 'Food at home', 'Cereals and bakery products', 'Meats, poultry, fish, and eggs',
|
||||||
|
'Dairy and related products', 'Fruits and vegetables', 'Nonalcoholic beverages and beverage materials',
|
||||||
|
'Food away from home', 'Energy', 'Energy commodities', 'Fuel oil', 'Motor fuel', 'Gasoline (all types)',
|
||||||
|
'Energy services', 'Electricity', 'Utility (piped) gas service', 'All items less food and energy',
|
||||||
|
'Commodities less food and energy commodities', 'Apparel', 'New vehicles', 'Used cars and trucks',
|
||||||
|
'Medical care commodities', 'Alcoholic beverages', 'Tobacco and smoking products',
|
||||||
|
'Services less energy services', 'Shelter', 'Rent of primary residence', "Owners equivalent rent of residences",
|
||||||
|
'Medical care services', "Physicians services", 'Hospital services', 'Transportation services',
|
||||||
|
'Motor vehicle maintenance and repair', 'Motor vehicle insurance', 'Airline fares'
|
||||||
|
]
|
||||||
|
|
||||||
|
if combined_json_data_NSA and 'Results' in combined_json_data_NSA and 'series' in combined_json_data_NSA['Results']:
|
||||||
|
process_series_data(combined_json_data_NSA, 'CPI_NSA', names)
|
||||||
|
|
||||||
|
if combined_json_data_SA and 'Results' in combined_json_data_SA and 'series' in combined_json_data_SA['Results']:
|
||||||
|
process_series_data(combined_json_data_SA, 'CPI_SA', names)
|
||||||
|
|
||||||
|
time.sleep(86400)
|
||||||
293
lyq/Macroeconomic_FARBODI.py
Normal file
293
lyq/Macroeconomic_FARBODI.py
Normal file
@@ -0,0 +1,293 @@
|
|||||||
|
import time
|
||||||
|
import pymysql
|
||||||
|
import requests
|
||||||
|
from bs4 import BeautifulSoup
|
||||||
|
from w3lib.html import remove_tags
|
||||||
|
import datetime
|
||||||
|
while True:
|
||||||
|
try:
|
||||||
|
# now_time = datetime.datetime.now()
|
||||||
|
# next_time = now_time + datetime.timedelta(days=+1)
|
||||||
|
# next_year = next_time.date().year
|
||||||
|
# next_month = next_time.date().month
|
||||||
|
# next_day = next_time.date().day
|
||||||
|
# next_time = datetime.datetime.strptime(str(next_year) + "-" + str(next_month) + "-" + str(next_day) + " 20:45:01","%Y-%m-%d %H:%M:%S")
|
||||||
|
# timer_start_time = (next_time - now_time).total_seconds()
|
||||||
|
db = pymysql.connect(host="127.0.0.1",user="root",password="2GS@bPYcgiMyL14A",database="Macroeconomics",port=4423)
|
||||||
|
cursor = db.cursor()
|
||||||
|
page = requests.get("https://www.federalreserve.gov/releases/h41/current/default.htm")
|
||||||
|
page=page.text
|
||||||
|
page = BeautifulSoup(page, 'html.parser')
|
||||||
|
date = page.find_all('div', class_="dates")
|
||||||
|
# 获取数据
|
||||||
|
date = remove_tags(str(date))
|
||||||
|
# 删除多余字符
|
||||||
|
date = date.replace("[", "")
|
||||||
|
date = date.replace("]", "")
|
||||||
|
date = date.replace("Release Date:", "")
|
||||||
|
date = date.replace(",","")
|
||||||
|
date = date.replace(" ","")
|
||||||
|
date = date.strip()
|
||||||
|
date = date.lstrip()
|
||||||
|
date1 = date[-4:]
|
||||||
|
date2 = date[-6:-4]
|
||||||
|
# 转换时间
|
||||||
|
date = date.replace("January", "/01/")
|
||||||
|
date = date.replace("February", "/02/")
|
||||||
|
date = date.replace("March", "/03/")
|
||||||
|
date = date.replace("April", "/04/")
|
||||||
|
date = date.replace("May", "/05/")
|
||||||
|
date = date.replace("June", "/06/")
|
||||||
|
date = date.replace("July", "/07/")
|
||||||
|
date = date.replace("August", "/08/")
|
||||||
|
date = date.replace("September", "/09/")
|
||||||
|
date = date.replace("October", "/10/")
|
||||||
|
date = date.replace("November", "/11/")
|
||||||
|
date = date.replace("December", "/12/")
|
||||||
|
date = date1+date[0:4]+date2
|
||||||
|
date_string = date
|
||||||
|
format = '%Y/%m/%d'
|
||||||
|
from datetime import datetime
|
||||||
|
date = datetime.strptime(date_string, format)
|
||||||
|
sql = "select time from CHAFRNFRAA order by id desc limit 1"
|
||||||
|
cursor.execute(sql)
|
||||||
|
db.commit()
|
||||||
|
old_time = cursor.fetchall()
|
||||||
|
ole_time=old_time[0][0]
|
||||||
|
except:
|
||||||
|
time.sleep(30)
|
||||||
|
continue
|
||||||
|
# 判断时间
|
||||||
|
if date != ole_time:
|
||||||
|
page = page.find_all('span',style="font-family:'Courier New'; font-weight:bold")
|
||||||
|
page = remove_tags(str(page))
|
||||||
|
page = page.replace(",", "")
|
||||||
|
page = page.replace("[", "")
|
||||||
|
page = page.replace("]", "")
|
||||||
|
page=page.split()
|
||||||
|
symbol= ''
|
||||||
|
list=[]
|
||||||
|
# 数据拼接
|
||||||
|
for i in page:
|
||||||
|
if i =='-':
|
||||||
|
symbol= '-'
|
||||||
|
continue
|
||||||
|
if i =='+':
|
||||||
|
continue
|
||||||
|
if i =='(0)':
|
||||||
|
continue
|
||||||
|
if i =='...':
|
||||||
|
i='NULL'
|
||||||
|
value= symbol + i
|
||||||
|
symbol= ''
|
||||||
|
list+=[value]
|
||||||
|
sql = "insert into FARBODI(name,time,THIS_AVG_VALUE,CHANGE_LASTWEEK,CHANGE_LASTYEAR,THIS_VALUE)values(%s,%s,%s,%s,%s,%s)"
|
||||||
|
data_list=[('Reserve Bank credit',date,list[0],list[1],list[2],list[3]),
|
||||||
|
('Securities held outright', date, list[4], list[5], list[6], list[7]),
|
||||||
|
('U.S. Treasury securities', date, list[8], list[9], list[10], list[11]),
|
||||||
|
('Bills', date, list[12], list[13], list[14], list[15]),
|
||||||
|
('Notes and bonds, nominal', date, list[16], list[17], list[18], list[19]),
|
||||||
|
('Notes and bonds, inflation-indexed', date, list[20], list[21], list[22], list[23]),
|
||||||
|
('Inflation compensation', date, list[24], list[25], list[26], list[27]),
|
||||||
|
('Federal agency debt securities', date, list[28], list[29], list[30], list[31]),
|
||||||
|
('Mortgage-backed securities', date, list[32], list[33], list[34], list[35]),
|
||||||
|
('Uposho', date, list[36], list[37], list[38], list[39]),
|
||||||
|
('Udosho', date, list[40], list[41], list[42], list[43]),
|
||||||
|
('Repurchase agreements', date, list[44], list[45], list[46], list[47]),
|
||||||
|
('Foreign official', date, list[48], list[49], list[50], list[51]),
|
||||||
|
('Others', date, list[52], list[53], list[54], list[55]),
|
||||||
|
('Loans', date, list[56], list[57], list[58], list[59]),
|
||||||
|
('Primary credit', date, list[60], list[61], list[62], list[63]),
|
||||||
|
('Secondary credit', date, list[64], list[65], list[66], list[67]),
|
||||||
|
('Seasonal credit', date, list[68], list[69], list[70], list[71]),
|
||||||
|
('PPPLF', date, list[72], list[73], list[74], list[75]),
|
||||||
|
('Bank Term Funding Program', date, list[76], list[77], list[78], list[79]),
|
||||||
|
('Other credit extensions', date, list[80], list[81], list[82], list[83]),
|
||||||
|
('NphoMFLLC(MSLP)', date, list[84], list[85], list[86], list[87]),
|
||||||
|
('Net portfolio holdings of MLF LLC', date, list[88], list[89], list[90], list[91]),
|
||||||
|
('Net portfolio holdings of TALF II LLC',date, list[92], list[93], list[94], list[95]),
|
||||||
|
('Float',date, list[96], list[97], list[98], list[99]),
|
||||||
|
('Central bank liquidity swaps',date, list[100], list[101], list[102], list[103]),
|
||||||
|
('Other Federal Reserve assets',date, list[104], list[105], list[106], list[107]),
|
||||||
|
('Foreign currency denominated assets',date, list[108], list[109], list[110], list[111]),
|
||||||
|
('Gold stock',date, list[112], list[113], list[114], list[115]),
|
||||||
|
('Special drawing rights certificate account',date, list[116], list[117], list[118], list[119]),
|
||||||
|
('Treasury currency outstanding',date, list[120], list[121], list[122], list[123]),
|
||||||
|
('Total factors supplying reserve funds',date, list[124], list[125], list[126], list[127])]
|
||||||
|
cursor.executemany(sql,data_list)
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
sql2 = "insert into FARBODIC(name,time,THIS_AVG_VALUE,CHANGE_LASTWEEK,CHANGE_LASTYEAR,THIS_VALUE)values(%s,%s,%s,%s,%s,%s)"
|
||||||
|
data_list2=[('Currency in circulation',date, list[128], list[129], list[130], list[131]),
|
||||||
|
('Reverse repurchase agreements',date, list[132], list[133], list[134], list[135]),
|
||||||
|
('Foreign official and international accounts',date, list[136], list[137], list[138], list[139]),
|
||||||
|
('Others',date, list[140], list[141], list[142], list[143]),
|
||||||
|
('Treasury cash holdings',date, list[144], list[145], list[146], list[147]),
|
||||||
|
('DwFRBotrb',date, list[148], list[149], list[150], list[151]),
|
||||||
|
('Tdhbdi',date, list[152], list[153], list[154], list[155]),
|
||||||
|
('U.S. Treasury, General Account',date, list[156], list[157], list[158], list[159]),
|
||||||
|
('Foreign official',date, list[160], list[161], list[162], list[163]),
|
||||||
|
('Other',date, list[164], list[165], list[166], list[167]),
|
||||||
|
('Treasury contributions to credit facilities',date, list[168], list[169], list[170], list[171]),
|
||||||
|
('Other liabilities and capital',date, list[172], list[173], list[174], list[175]),
|
||||||
|
('Tfotrbarf',date, list[176], list[177], list[178], list[179]),
|
||||||
|
('RbwFRB',date, list[180], list[181], list[182], list[183])]
|
||||||
|
cursor.executemany(sql2,data_list2)
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
sql3 = "insert into MI(name,time,THIS_AVG_VALUE,CHANGE_LASTWEEK,CHANGE_LASTYEAR,THIS_VALUE)values(%s,%s,%s,%s,%s,%s)"
|
||||||
|
data_list3=[('Shicffoaia',date, list[184], list[185], list[186], list[187]),
|
||||||
|
('Marketable U.S. Treasury securities',date, list[188], list[189], list[190], list[191]),
|
||||||
|
('Fadambs',date, list[192], list[193], list[194], list[195]),
|
||||||
|
('Other securities',date, list[196], list[197], list[198], list[199]),
|
||||||
|
('Securities lent to dealers',date, list[200], list[201], list[202], list[203]),
|
||||||
|
('Overnight facility',date, list[204], list[205], list[206], list[207]),
|
||||||
|
('U.S. Treasury securities',date, list[208], list[209], list[210], list[211]),
|
||||||
|
('Federal agency debt securities',date,list[212], list[213], list[214], list[215])]
|
||||||
|
cursor.executemany(sql3,data_list3)
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
sql4 = "insert into MDOSLASOAAL(name,time,D15,D16_D90,D91_Y1,Y1_Y5,Y5_Y10,Y10_,TOTAL)values(%s,%s,%s,%s,%s,%s,%s,%s,%s)"
|
||||||
|
data_list4=[('Loans', date, list[216], list[217], list[218], list[219], list[220], list[221], list[222]),
|
||||||
|
('USTsH', date, list[223], list[224], list[225], list[226], list[227], list[228], list[229]),
|
||||||
|
('USTsWc', date, list[230], list[231], list[232], list[233], list[234], list[235], list[236]),
|
||||||
|
('FadsH', date, list[237], list[238], list[239], list[240], list[241], list[242], list[243]),
|
||||||
|
('FadsWc', date, list[244], list[245], list[246], list[247], list[248], list[249], list[250]),
|
||||||
|
('MbsH', date, list[251], list[252], list[253], list[254], list[255], list[256], list[257]),
|
||||||
|
('MbsWc', date, list[258], list[259], list[260], list[261], list[262], list[263], list[264]),
|
||||||
|
('LphbMFLLC(MSLP)', date, list[265], list[266], list[267], list[268], list[269], list[270], list[271]),
|
||||||
|
('Repurchase agreements', date, list[272], list[273], list[274], list[275], list[276], list[277], list[278]),
|
||||||
|
('Central bank liquidity swaps', date, list[279], list[280], list[281], list[282], list[283], list[284], list[285]),
|
||||||
|
('Reverse repurchase agreements', date, list[286], list[287], list[288], list[289], list[290], list[291], list[292]),
|
||||||
|
('Term deposits', date, list[293], list[294], list[295], list[296], list[297], list[298], list[299])]
|
||||||
|
cursor.executemany(sql4,data_list4)
|
||||||
|
|
||||||
|
|
||||||
|
sql5 = "insert into SIOMS(name,time,value)values(%s,%s,%s)"
|
||||||
|
data_list5 = [('Mortgage-backed securities held outright', date, list[300]),
|
||||||
|
('Residential mortgage-backed securities', date, list[301]),
|
||||||
|
('Commercial mortgage-backed securities', date, list[302]),
|
||||||
|
('Commitments to buy mortgage-backed securities', date, list[303]),
|
||||||
|
('Commitments to sell mortgage-backed securities', date, list[304]),
|
||||||
|
('Cash and cash equivalents', date, list[305])]
|
||||||
|
cursor.executemany(sql5, data_list5)
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
sql6 = "insert into IOPAOCFL(name,time,OPAPTLLC,UPFAPT,FAAOA,TOTAL)values(%s,%s,%s,%s,%s,%s)"
|
||||||
|
data_list6 =[('MS Facilities LLC (Main Street Lending Program)',date, list[306], list[307], list[308], list[309])]
|
||||||
|
cursor.executemany(sql6, data_list6)
|
||||||
|
|
||||||
|
|
||||||
|
sql7 = "insert into CSOCOAFRB(name,time,EFC,THIS_VALUE,CHANGE_LASTWEEK,CHANGE_LASTYEAR)values(%s,%s,%s,%s,%s,%s)"
|
||||||
|
data_list7 = [('Gold certificate account', date, 'NULL', list[310], list[311], list[312]),
|
||||||
|
('Special drawing rights certificate account', date, 'NULL', list[313], list[314], list[315]),
|
||||||
|
('Coin', date, 'NULL', list[316], list[317], list[318]),
|
||||||
|
('Supadraal', date, 'NULL', list[319], list[320], list[321]),
|
||||||
|
('Securities held outright', date, 'NULL', list[322], list[323], list[324]),
|
||||||
|
('U.S. Treasury securities', date, 'NULL', list[325], list[326], list[327]),
|
||||||
|
('Bills', date, 'NULL', list[328], list[329], list[330]),
|
||||||
|
('Notes and bonds, nominal', date, 'NULL', list[331], list[332], list[333]),
|
||||||
|
('Notes and bonds, inflation-indexed', date, 'NULL', list[334], list[335], list[336]),
|
||||||
|
('Inflation compensation', date, 'NULL', list[337], list[338], list[339]),
|
||||||
|
('Federal agency debt securities', date, 'NULL', list[340], list[341], list[342]),
|
||||||
|
('Mortgage-backed securities', date, 'NULL', list[343], list[344], list[345]),
|
||||||
|
('Uposho', date, 'NULL', list[346], list[347], list[348]),
|
||||||
|
('Udosho', date, 'NULL', list[349], list[350], list[351]),
|
||||||
|
('Repurchase agreements', date, 'NULL', list[352], list[353], list[354]),
|
||||||
|
('Loans', date, 'NULL', list[355], list[356], list[357]),
|
||||||
|
('NphoMFLLC(MSLP)', date, 'NULL', list[358], list[359], list[360]),
|
||||||
|
('NphoMLFLLC', date, 'NULL', list[361], list[362], list[363]),
|
||||||
|
('Net portfolio holdings of TALF II LLC', date, 'NULL', list[364], list[365], list[366]),
|
||||||
|
('Items in process of collection', date, 'NULL', list[367], list[368], list[369]),
|
||||||
|
('Bank premises', date, 'NULL', list[370], list[371], list[372]),
|
||||||
|
('Central bank liquidity swaps', date, 'NULL', list[373], list[374], list[375]),
|
||||||
|
('Foreign currency denominated assets', date, 'NULL', list[376], list[377], list[378]),
|
||||||
|
('Other assets', date, 'NULL', list[379], list[380], list[381]),
|
||||||
|
('Total assets', date, 'NULL', list[382], list[383], list[384])]
|
||||||
|
cursor.executemany(sql7, data_list7)
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
sql8 = "insert into CSOCOAFRBC(name,time,EFC,THIS_VALUE,CHANGE_LASTWEEK,CHANGE_LASTYEAR)values(%s,%s,%s,%s,%s,%s)"
|
||||||
|
data_list8 = [('FRnnoFBh', date, 'NULL', list[385], list[386], list[387]),
|
||||||
|
('Reverse repurchase agreements', date, 'NULL', list[388], list[389], list[390]),
|
||||||
|
('Deposits', date, 'NULL', list[391], list[392], list[393]),
|
||||||
|
('Term deposits held by depository institutions', date, 'NULL', list[394], list[395], list[396]),
|
||||||
|
('Other deposits held by depository institutions', date, 'NULL', list[397], list[398], list[399]),
|
||||||
|
('U.S. Treasury, General Account', date, 'NULL', list[400], list[401], list[402]),
|
||||||
|
('Foreign official', date, 'NULL', list[403], list[404], list[405]),
|
||||||
|
('Other', date, 'NULL', list[406], list[407], list[408]),
|
||||||
|
('Deferred availability cash items', date, 'NULL', list[409], list[410], list[411]),
|
||||||
|
('Treasury contributions to credit facilities', date, 'NULL', list[412], list[413], list[414]),
|
||||||
|
('Other liabilities and accrued dividends', date, 'NULL', list[415], list[416], list[417]),
|
||||||
|
('Total liabilities', date, 'NULL', list[418], list[419], list[420]),
|
||||||
|
('Capital paid in', date, 'NULL', list[421], list[422], list[423]),
|
||||||
|
('Surplus', date, 'NULL', list[424], list[425], list[426]),
|
||||||
|
('Other capital accounts', date, 'NULL', list[427], list[428], list[429]),
|
||||||
|
('Total capital', date, 'NULL', list[430], list[431], list[432])]
|
||||||
|
cursor.executemany(sql8, data_list8)
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
sql9 = "insert into SOCOEFRB(name,time,TOTAL,Boston,NewYork,Philadelphia,Cleveland,Richmond,Atlanta,Chicago,St_Louis,Minneapolis,Kansas_City,Dallas,San_Francisco)values(%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s)"
|
||||||
|
data_list9=[('Gcasdrc', date, list[433], list[434], list[435], list[436], list[437], list[438], list[439], list[440], list[441], list[442], list[443], list[444], list[445]),
|
||||||
|
('Coin', date, list[446], list[447], list[448], list[449], list[450], list[451], list[452], list[453], list[454], list[455], list[456], list[457], list[458]),
|
||||||
|
('Supadraal', date, list[459], list[460], list[461], list[462], list[463], list[464], list[465], list[466], list[467], list[468], list[469], list[470], list[471]),
|
||||||
|
('NphoMFLLC(MSLP)', date, list[472], list[473], list[474], list[475], list[476], list[477], list[478], list[479], list[480], list[481], list[482], list[483], list[484]),
|
||||||
|
('Central bank liquidity swaps', date, list[485], list[486], list[487], list[488], list[489], list[490], list[491], list[492], list[493], list[494], list[495], list[496], list[497]),
|
||||||
|
('Foreign currency denominated assets', date, list[498], list[499], list[500], list[501], list[502], list[503], list[504], list[505], list[506], list[507], list[508], list[509], list[510]),
|
||||||
|
('Other assets', date, list[511], list[512], list[513], list[514], list[515], list[516], list[517], list[518], list[519], list[520], list[521], list[522], list[523]),
|
||||||
|
('Interdistrict settlement account', date, list[524], list[525], list[526], list[527], list[528], list[529], list[530], list[531], list[532], list[533], list[534], list[535], list[536]),
|
||||||
|
('Total assets', date, list[537], list[538], list[539], list[540], list[541], list[542], list[543], list[544], list[545], list[546], list[547], list[548], list[549])]
|
||||||
|
cursor.executemany(sql9, data_list9)
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
sql10 = "insert into SOCOEFRBC(name,time,TOTAL,Boston,NewYork,Philadelphia,Cleveland,Richmond,Atlanta,Chicago,St_Louis,Minneapolis,Kansas_City,Dallas,San_Francisco)values(%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s)"
|
||||||
|
data_list10 = [('Federal Reserve notes, net', date, list[550], list[551], list[552], list[553], list[554], list[555], list[556], list[557], list[558], list[559], list[560], list[561], list[562]),
|
||||||
|
('Reverse repurchase agreements', date, list[563], list[564], list[565], list[566], list[567], list[568], list[569], list[570], list[571], list[572], list[573], list[574], list[575]),
|
||||||
|
('Deposits', date, list[576], list[577], list[578], list[579], list[580], list[581], list[582], list[583], list[584], list[585], list[586], list[587], list[588]),
|
||||||
|
('Depository institutions', date, list[589], list[590], list[591], list[592], list[593], list[594], list[595], list[596], list[597], list[598], list[599], list[600], list[601]),
|
||||||
|
('U.S. Treasury, General Account', date, list[602], list[603], list[604], list[605], list[606], list[607], list[608], list[609], list[610], list[611], list[612], list[613], list[614]),
|
||||||
|
('Foreign official', date, list[615], list[616], list[617], list[618], list[619], list[620], list[621], list[622], list[623], list[624], list[625], list[626], list[627]),
|
||||||
|
('Other', date, list[628], list[629], list[630], list[631], list[632], list[633], list[634], list[635], list[636], list[637], list[638], list[639], list[640]),
|
||||||
|
('Earnings remittances due to the U.S. Treasury', date, list[641], list[642], list[643], list[644], list[645], list[646], list[647], list[648], list[649], list[650], list[651], list[652], list[653]),
|
||||||
|
('Treasury contributions to credit facilities', date, list[654], list[655], list[656], list[657], list[658], list[659], list[660], list[661], list[662], list[663], list[664], list[665], list[666]),
|
||||||
|
('Other liabilities and accrued dividends', date, list[667], list[668], list[669], list[670], list[671], list[672], list[673], list[674], list[675], list[676], list[677], list[678], list[679]),
|
||||||
|
('Total liabilities', date, list[680], list[681], list[682], list[683], list[684], list[685], list[686], list[687], list[688], list[689], list[690], list[691], list[692]),
|
||||||
|
('Capital paid in', date, list[693], list[694], list[695], list[696], list[697], list[698], list[699], list[700], list[701], list[702], list[703], list[704], list[705]),
|
||||||
|
('Surplus', date, list[706], list[707], list[708], list[709], list[710], list[711], list[712], list[713], list[714], list[715], list[716], list[717], list[718]),
|
||||||
|
('Other capital', date, list[719], list[720], list[721], list[722], list[723], list[724], list[725], list[726], list[727], list[728], list[729], list[730], list[731]),
|
||||||
|
('Total liabilities and capital', date, list[732], list[733], list[734], list[735], list[736], list[737], list[738], list[739], list[740], list[741], list[742], list[743], list[744])]
|
||||||
|
cursor.executemany(sql10, data_list10)
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
sql11 = "insert into CHAFRNFRAA(name,time,value)values(%s,%s,%s)"
|
||||||
|
data_list11 = [('Federal Reserve notes outstanding', date, list[745]),
|
||||||
|
('LNhbFBnstc', date, list[746]),
|
||||||
|
('Federal Reserve notes to be collateralized', date, list[747]),
|
||||||
|
('Collateral held against Federal Reserve notes', date, list[748]),
|
||||||
|
('Gold certificate account', date, list[749]),
|
||||||
|
('Special drawing rights certificate account', date, list[750]),
|
||||||
|
('UTadambsp', date, list[751]),
|
||||||
|
('Other assets pledged', date, list[752]),
|
||||||
|
('TUTadambs', date, list[753]),
|
||||||
|
('LFvosurra', date, list[754]),
|
||||||
|
('UTadambsetbp', date, list[755])]
|
||||||
|
cursor.executemany(sql11, data_list11)
|
||||||
|
db.commit()
|
||||||
|
else:
|
||||||
|
time.sleep(21600)
|
||||||
|
# time.sleep(timer_start_time)
|
||||||
|
|
||||||
98
lyq/Macroeconomic_FBI_v2.py
Normal file
98
lyq/Macroeconomic_FBI_v2.py
Normal file
@@ -0,0 +1,98 @@
|
|||||||
|
import requests
|
||||||
|
import pymysql
|
||||||
|
from datetime import datetime
|
||||||
|
from w3lib.html import remove_tags
|
||||||
|
import pandas as pd
|
||||||
|
import time
|
||||||
|
|
||||||
|
def parse_treasury_data(data):
|
||||||
|
# 找到列头位置
|
||||||
|
header_index = data.index("Country")
|
||||||
|
columns = data[header_index:header_index+14] # Country + 13个月
|
||||||
|
rows = data[header_index+14:]
|
||||||
|
|
||||||
|
result = []
|
||||||
|
i = 0
|
||||||
|
while i < len(rows):
|
||||||
|
# 拼接国家名
|
||||||
|
country_parts = []
|
||||||
|
while i < len(rows) and not rows[i].replace('.', '', 1).isdigit():
|
||||||
|
country_parts.append(rows[i])
|
||||||
|
i += 1
|
||||||
|
country = " ".join(country_parts).replace(",", "")
|
||||||
|
|
||||||
|
# 取13个数值
|
||||||
|
values = rows[i:i+13]
|
||||||
|
i += 13
|
||||||
|
|
||||||
|
if len(values) == 13:
|
||||||
|
result.append([country] + values)
|
||||||
|
|
||||||
|
# 转成 DataFrame
|
||||||
|
df = pd.DataFrame(result, columns=columns)
|
||||||
|
|
||||||
|
# =================== 名称清洗 ===================
|
||||||
|
rename_map = {
|
||||||
|
"Of Which: Foreign Official": "Foreign Official",
|
||||||
|
"Of Which: Foreign Official Treasury Bills": "Treasury Bills",
|
||||||
|
"Of Which: Foreign Official T-Bonds & Notes": "T-Bonds & Notes"
|
||||||
|
}
|
||||||
|
df["Country"] = df["Country"].replace(rename_map)
|
||||||
|
|
||||||
|
return df
|
||||||
|
|
||||||
|
|
||||||
|
def run_job():
|
||||||
|
print("=== 开始爬取并更新数据库 ===")
|
||||||
|
|
||||||
|
# =================== 爬取网页 =====================
|
||||||
|
page = requests.get("https://ticdata.treasury.gov/resource-center/data-chart-center/tic/Documents/slt_table5.html")
|
||||||
|
page = remove_tags(str(page.text))
|
||||||
|
page = page.split()
|
||||||
|
|
||||||
|
df = parse_treasury_data(page)
|
||||||
|
|
||||||
|
# =================== 连接数据库 =====================
|
||||||
|
db = pymysql.connect(
|
||||||
|
host="127.0.0.1",
|
||||||
|
user="root",
|
||||||
|
password="2GS@bPYcgiMyL14A",
|
||||||
|
database="Macroeconomics",
|
||||||
|
port=4423
|
||||||
|
)
|
||||||
|
cursor = db.cursor()
|
||||||
|
|
||||||
|
# 查询数据库中最新日期
|
||||||
|
cursor.execute("SELECT date FROM FBI ORDER BY date DESC LIMIT 1")
|
||||||
|
result = cursor.fetchone()
|
||||||
|
latest_date_in_db = result[0] if result else None # datetime 类型或 None
|
||||||
|
|
||||||
|
# =================== 补齐逻辑 =====================
|
||||||
|
for col in df.columns[1:]: # 遍历所有月份列
|
||||||
|
col_date = datetime.strptime(col, "%Y-%m")
|
||||||
|
|
||||||
|
# 如果数据库已有该日期,跳过
|
||||||
|
if latest_date_in_db and col_date <= latest_date_in_db:
|
||||||
|
continue
|
||||||
|
|
||||||
|
print(f"正在插入 {col} 的数据...")
|
||||||
|
insert_sql = "INSERT INTO FBI (date, name, value) VALUES (%s, %s, %s)"
|
||||||
|
for _, row in df.iterrows():
|
||||||
|
country = row["Country"]
|
||||||
|
value = row[col]
|
||||||
|
cursor.execute(insert_sql, (col_date.strftime("%Y-%m-01"), country, value))
|
||||||
|
|
||||||
|
db.commit()
|
||||||
|
print(f"{col} 插入完成")
|
||||||
|
|
||||||
|
cursor.close()
|
||||||
|
db.close()
|
||||||
|
print("=== 本次任务完成 ===\n")
|
||||||
|
|
||||||
|
|
||||||
|
# =================== 循环执行 =====================
|
||||||
|
if __name__ == "__main__":
|
||||||
|
while True:
|
||||||
|
run_job()
|
||||||
|
print("休眠 21600 秒(6 小时)...\n")
|
||||||
|
time.sleep(21600) # 6小时
|
||||||
89
lyq/Macroeconomic_FER.py
Normal file
89
lyq/Macroeconomic_FER.py
Normal file
@@ -0,0 +1,89 @@
|
|||||||
|
import time
|
||||||
|
import requests
|
||||||
|
import pymysql
|
||||||
|
from bs4 import BeautifulSoup
|
||||||
|
from w3lib.html import remove_tags
|
||||||
|
import datetime
|
||||||
|
while True:
|
||||||
|
try:
|
||||||
|
# now_time = datetime.datetime.now()
|
||||||
|
# next_time = now_time + datetime.timedelta(days=+1)
|
||||||
|
# next_year = next_time.date().year
|
||||||
|
# next_month = next_time.date().month
|
||||||
|
# next_day = next_time.date().day
|
||||||
|
# next_time = datetime.datetime.strptime(str(next_year) + "-" + str(next_month) + "-" + str(next_day) + " 20:30:01","%Y-%m-%d %H:%M:%S")
|
||||||
|
# timer_start_time = (next_time - now_time).total_seconds()
|
||||||
|
page = requests.get("https://www.federalreserve.gov/data/intlsumm/current.htm")
|
||||||
|
page = page.text
|
||||||
|
page = BeautifulSoup(page, 'html.parser')
|
||||||
|
page1 = page.find_all('th', class_="colorrev")
|
||||||
|
page = page.find_all('td', class_="shadedata1")
|
||||||
|
value1=remove_tags(str(page[-1]))
|
||||||
|
value1 = value1.replace(",", "")
|
||||||
|
value1 = value1.replace(" ", "")
|
||||||
|
date1=remove_tags(str(page1[-1]))
|
||||||
|
date1 = date1.replace(" ", "")
|
||||||
|
date1 = date1.replace("/r", "")
|
||||||
|
date1 = date1.replace("/p", "")
|
||||||
|
date1= date1[-4:] + date1[0:3]
|
||||||
|
date1 = date1.replace("Jan", "/1/01")
|
||||||
|
date1 = date1.replace("Feb", "/2/01")
|
||||||
|
date1 = date1.replace("Mar", "/3/01")
|
||||||
|
date1 = date1.replace("Apr", "/4/01")
|
||||||
|
date1 = date1.replace("May", "/5/01")
|
||||||
|
date1 = date1.replace("Jun", "/6/01")
|
||||||
|
date1 = date1.replace("Jul", "/7/01")
|
||||||
|
date1 = date1.replace("Aug", "/8/01")
|
||||||
|
date1 = date1.replace("Sep", "/9/01")
|
||||||
|
date1 = date1.replace("Oct", "/10/01")
|
||||||
|
date1 = date1.replace("Nov", "/11/01")
|
||||||
|
date1 = date1.replace("Dec", "/12/01")
|
||||||
|
format1 = '%Y/%m/%d'
|
||||||
|
|
||||||
|
value2 = remove_tags(str(page[-2]))
|
||||||
|
value2 = value2.replace(",", "")
|
||||||
|
value2 = value2.replace(" ", "")
|
||||||
|
|
||||||
|
date2 = remove_tags(str(page1[-2]))
|
||||||
|
date2 = date2.replace(" ", "")
|
||||||
|
date2 = date2.replace("/r", "")
|
||||||
|
date2 = date2.replace("/p", "")
|
||||||
|
date2 = date2[-4:] + date2[0:3]
|
||||||
|
date2 = date2.replace("Jan", "/1/01")
|
||||||
|
date2 = date2.replace("Feb", "/2/01")
|
||||||
|
date2 = date2.replace("Mar", "/3/01")
|
||||||
|
date2 = date2.replace("Apr", "/4/01")
|
||||||
|
date2 = date2.replace("May", "/5/01")
|
||||||
|
date2 = date2.replace("Jun", "/6/01")
|
||||||
|
date2 = date2.replace("Jul", "/7/01")
|
||||||
|
date2 = date2.replace("Aug", "/8/01")
|
||||||
|
date2 = date2.replace("Sep", "/9/01")
|
||||||
|
date2 = date2.replace("Oct", "/10/01")
|
||||||
|
date2 = date2.replace("Nov", "/11/01")
|
||||||
|
date2 = date2.replace("Dec", "/12/01")
|
||||||
|
format2 = '%Y/%m/%d'
|
||||||
|
from datetime import datetime
|
||||||
|
date1 = datetime.strptime(date1, format1)
|
||||||
|
date2 = datetime.strptime(date2, format2)
|
||||||
|
db = pymysql.connect(host="127.0.0.1",user="root",password="2GS@bPYcgiMyL14A",database="Macroeconomics",port=4423)
|
||||||
|
cursor = db.cursor()
|
||||||
|
sql = "select date from FER order by date desc limit 1"
|
||||||
|
cursor.execute(sql)
|
||||||
|
db.commit()
|
||||||
|
ole_time = cursor.fetchall()
|
||||||
|
ole_time = ole_time[0][0]
|
||||||
|
date2= "'" + str(date2) + "'"
|
||||||
|
sql = "update FER set FER= %s where date=%s" % (value2, date2)
|
||||||
|
cursor.execute(sql)
|
||||||
|
db.commit()
|
||||||
|
if date1 != ole_time:
|
||||||
|
sql = "insert into FER(date,FER)values('%s','%s')" % (date1, value1 + '*')
|
||||||
|
cursor.execute(sql)
|
||||||
|
db.commit()
|
||||||
|
db.close()
|
||||||
|
# time.sleep(timer_start_time)
|
||||||
|
time.sleep(21600)
|
||||||
|
except:
|
||||||
|
time.sleep(30)
|
||||||
|
continue
|
||||||
|
|
||||||
252
lyq/Macroeconomic_InterestRate.py
Normal file
252
lyq/Macroeconomic_InterestRate.py
Normal file
@@ -0,0 +1,252 @@
|
|||||||
|
import time
|
||||||
|
from full_fred.fred import Fred
|
||||||
|
import pymysql
|
||||||
|
import requests
|
||||||
|
from datetime import datetime
|
||||||
|
from bs4 import BeautifulSoup
|
||||||
|
from w3lib.html import remove_tags
|
||||||
|
while True:
|
||||||
|
fred=Fred('example_key.txt')
|
||||||
|
fred.set_api_key_file('example_key.txt')
|
||||||
|
DFEDTARU = fred.get_series_df('DFEDTARU')
|
||||||
|
DFEDTARL = fred.get_series_df('DFEDTARL')
|
||||||
|
FEDFUNDS = fred.get_series_df('FEDFUNDS')
|
||||||
|
IORB = fred.get_series_df('IORB')
|
||||||
|
RRPONTSYAWARD = fred.get_series_df('RRPONTSYAWARD')
|
||||||
|
SOFR = fred.get_series_df('SOFR')
|
||||||
|
|
||||||
|
list_date1 = DFEDTARU['date']
|
||||||
|
list_value1 = DFEDTARU['value']
|
||||||
|
list_date2 = DFEDTARL['date']
|
||||||
|
list_value2 = DFEDTARL['value']
|
||||||
|
list_date3 = FEDFUNDS['date']
|
||||||
|
list_value3 = FEDFUNDS['value']
|
||||||
|
list_date4 = IORB['date']
|
||||||
|
list_value4 = IORB['value']
|
||||||
|
list_date5 = RRPONTSYAWARD['date']
|
||||||
|
list_value5 = RRPONTSYAWARD['value']
|
||||||
|
list_date6 = SOFR['date']
|
||||||
|
list_value6 = SOFR['value']
|
||||||
|
|
||||||
|
date1 = []
|
||||||
|
value1 = []
|
||||||
|
date2 = []
|
||||||
|
value2 = []
|
||||||
|
date3 = []
|
||||||
|
value3 = []
|
||||||
|
date4 = []
|
||||||
|
value4 = []
|
||||||
|
date5 = []
|
||||||
|
value5 = []
|
||||||
|
date6 = []
|
||||||
|
value6 = []
|
||||||
|
for i in list_date1:
|
||||||
|
date1 += [i]
|
||||||
|
for i in list_value1:
|
||||||
|
value1 += [i]
|
||||||
|
for i in list_date2:
|
||||||
|
date2 += [i]
|
||||||
|
for i in list_value2:
|
||||||
|
value2 += [i]
|
||||||
|
for i in list_date3:
|
||||||
|
date3 += [i]
|
||||||
|
for i in list_value3:
|
||||||
|
value3 += [i]
|
||||||
|
for i in list_date4:
|
||||||
|
date4 += [i]
|
||||||
|
for i in list_value4:
|
||||||
|
value4 += [i]
|
||||||
|
for i in list_date5:
|
||||||
|
date5 += [i]
|
||||||
|
for i in list_value5:
|
||||||
|
value5 += [i]
|
||||||
|
for i in list_date6:
|
||||||
|
date6 += [i]
|
||||||
|
for i in list_value6:
|
||||||
|
value6 += [i]
|
||||||
|
date1 = date1[-1]
|
||||||
|
value1 = value1[-1]
|
||||||
|
date2 = date2[-1]
|
||||||
|
value2 = value2[-1]
|
||||||
|
date3 = date3[-1]
|
||||||
|
value3 = value3[-1]
|
||||||
|
date4 = date4[-1]
|
||||||
|
value4 = value4[-1]
|
||||||
|
date5 = date5[-1]
|
||||||
|
value5 = value5[-1]
|
||||||
|
date6 = date6[-1]
|
||||||
|
value6 = value6[-1]
|
||||||
|
|
||||||
|
date1 = date1.replace('-', '/')
|
||||||
|
date_string = date1
|
||||||
|
format = '%Y/%m/%d'
|
||||||
|
date1 = datetime.strptime(date_string, format)
|
||||||
|
|
||||||
|
date2 = date2.replace('-', '/')
|
||||||
|
date_string = date2
|
||||||
|
format = '%Y/%m/%d'
|
||||||
|
date2 = datetime.strptime(date_string, format)
|
||||||
|
|
||||||
|
date3 = date3.replace('-', '/')
|
||||||
|
date_string = date3
|
||||||
|
format = '%Y/%m/%d'
|
||||||
|
date3 = datetime.strptime(date_string, format)
|
||||||
|
|
||||||
|
date4 = date4.replace('-', '/')
|
||||||
|
date_string = date4
|
||||||
|
format = '%Y/%m/%d'
|
||||||
|
date4 = datetime.strptime(date_string, format)
|
||||||
|
|
||||||
|
date5 = date5.replace('-', '/')
|
||||||
|
date_string = date5
|
||||||
|
format = '%Y/%m/%d'
|
||||||
|
date5 = datetime.strptime(date_string, format)
|
||||||
|
|
||||||
|
date6 = date6.replace('-', '/')
|
||||||
|
date_string = date6
|
||||||
|
format = '%Y/%m/%d'
|
||||||
|
date6 = datetime.strptime(date_string, format)
|
||||||
|
db = pymysql.connect(host="127.0.0.1",user="root",password="2GS@bPYcgiMyL14A",database="Macroeconomics",port=4423)
|
||||||
|
cursor = db.cursor()
|
||||||
|
sql = "select date from InterestRate where name='DFEDTARU'"
|
||||||
|
cursor.execute(sql)
|
||||||
|
db.commit()
|
||||||
|
DFEDTARU_old_time = cursor.fetchall()
|
||||||
|
DFEDTARU_old_time=DFEDTARU_old_time[-1][0]
|
||||||
|
if DFEDTARU_old_time != date1 :
|
||||||
|
sql = "insert into InterestRate(date,name,_value)values('%s','%s','%s')" % (date1, 'DFEDTARU', value1)
|
||||||
|
cursor.execute(sql)
|
||||||
|
db.commit()
|
||||||
|
sql2 = "select date from InterestRate where name='DFEDTARL'"
|
||||||
|
cursor.execute(sql2)
|
||||||
|
db.commit()
|
||||||
|
DFEDTARL_old_time = cursor.fetchall()
|
||||||
|
DFEDTARL_old_time=DFEDTARL_old_time[-1][0]
|
||||||
|
if DFEDTARL_old_time != date2 :
|
||||||
|
sql = "insert into InterestRate(date,name,_value)values('%s','%s','%s')" % (date2, 'DFEDTARL', value2)
|
||||||
|
cursor.execute(sql)
|
||||||
|
db.commit()
|
||||||
|
sql3 = "select date from InterestRate where name='FEDFUNDS'"
|
||||||
|
cursor.execute(sql3)
|
||||||
|
db.commit()
|
||||||
|
FEDFUNDS_old_time = cursor.fetchall()
|
||||||
|
FEDFUNDS_old_time=FEDFUNDS_old_time[-1][0]
|
||||||
|
if FEDFUNDS_old_time != date3 :
|
||||||
|
sql = "insert into InterestRate(date,name,_value)values('%s','%s','%s')" % (date3, 'FEDFUNDS', value3)
|
||||||
|
cursor.execute(sql)
|
||||||
|
db.commit()
|
||||||
|
sql4 = "select date from InterestRate where name='IORB'"
|
||||||
|
cursor.execute(sql4)
|
||||||
|
db.commit()
|
||||||
|
IORB_old_time = cursor.fetchall()
|
||||||
|
IORB_old_time=IORB_old_time[-1][0]
|
||||||
|
if IORB_old_time != date4 :
|
||||||
|
sql = "insert into InterestRate(date,name,_value)values('%s','%s','%s')" % (date4, 'IORB', value4)
|
||||||
|
cursor.execute(sql)
|
||||||
|
db.commit()
|
||||||
|
sql5 = "select date from InterestRate where name='RRPONTSYAWARD'"
|
||||||
|
cursor.execute(sql5)
|
||||||
|
db.commit()
|
||||||
|
RRPONTSYAWARD_old_time = cursor.fetchall()
|
||||||
|
RRPONTSYAWARD_old_time=RRPONTSYAWARD_old_time[-1][0]
|
||||||
|
if RRPONTSYAWARD_old_time != date5 :
|
||||||
|
sql = "insert into InterestRate(date,name,_value)values('%s','%s','%s')" % (date5, 'RRPONTSYAWARD', value5)
|
||||||
|
cursor.execute(sql)
|
||||||
|
db.commit()
|
||||||
|
sql6 = "select date from InterestRate where name='SOFR'"
|
||||||
|
cursor.execute(sql6)
|
||||||
|
db.commit()
|
||||||
|
SOFR_old_time = cursor.fetchall()
|
||||||
|
SOFR_old_time=SOFR_old_time[-1][0]
|
||||||
|
if SOFR_old_time != date6 :
|
||||||
|
sql = "insert into InterestRate(date,name,_value)values('%s','%s','%s')" % (date6, 'SOFR', value6)
|
||||||
|
cursor.execute(sql)
|
||||||
|
db.commit()
|
||||||
|
|
||||||
|
pagee = requests.get("https://www.frbsf.org/wp-content/uploads/sites/4/proxy-funds-rate-chart1-data.csv")
|
||||||
|
pagee = pagee.text
|
||||||
|
pagee = pagee.split()
|
||||||
|
number = 0
|
||||||
|
for i in pagee:
|
||||||
|
number += 1
|
||||||
|
if number <= 5:
|
||||||
|
continue
|
||||||
|
else:
|
||||||
|
pagee = i.split()[-1]
|
||||||
|
pagee = pagee.replace(',', ' , ')
|
||||||
|
PFR_new_time = pagee[0:10]
|
||||||
|
PFR_new_time = PFR_new_time.replace('-', '/')
|
||||||
|
PFR_value = pagee[-8:]
|
||||||
|
PFR_value = PFR_value.replace(' ', '')
|
||||||
|
date_string = PFR_new_time
|
||||||
|
format = '%Y/%m/%d'
|
||||||
|
PFR_new_time = datetime.strptime(date_string, format)
|
||||||
|
sql = "select * from InterestRate where name='PFR' and date='%s'" % (PFR_new_time)
|
||||||
|
cursor.execute(sql)
|
||||||
|
outcome = cursor.fetchall()
|
||||||
|
if outcome == () or outcome == 0 or outcome == None:
|
||||||
|
sql = "insert into InterestRate(date,name,_value)values('%s','%s','%s')" % (PFR_new_time, 'PFR', PFR_value)
|
||||||
|
cursor.execute(sql)
|
||||||
|
db.commit()
|
||||||
|
else:
|
||||||
|
sql = "update InterestRate set _value='%s' where 'name'='PFR' and 'date' = '%s'" % (PFR_value, PFR_new_time)
|
||||||
|
cursor.execute(sql)
|
||||||
|
db.commit()
|
||||||
|
number=0
|
||||||
|
pagee = requests.get("https://markets.newyorkfed.org/api/rp/repo/multiple/results/last/1.json")
|
||||||
|
pagee = pagee.json()
|
||||||
|
page=pagee['repo']['operations'][0]
|
||||||
|
page2=page['details'][0]
|
||||||
|
if page2.__contains__('minimumBidRate'):
|
||||||
|
RR_value=page2['minimumBidRate']
|
||||||
|
RR_new_time = page['operationDate']
|
||||||
|
RR_new_time = RR_new_time.replace('-', '/')
|
||||||
|
date_string = RR_new_time
|
||||||
|
format = '%Y/%m/%d'
|
||||||
|
RR_new_time = datetime.strptime(date_string, format)
|
||||||
|
sql = "select date from InterestRate where name='RR'"
|
||||||
|
cursor.execute(sql)
|
||||||
|
db.commit()
|
||||||
|
RR_old_time = cursor.fetchall()
|
||||||
|
RR_old_time = RR_old_time[-1][0]
|
||||||
|
if RR_old_time != RR_new_time:
|
||||||
|
sql = "insert into InterestRate(date,name,_value)values('%s','%s','%s')" % (RR_new_time, 'RR', RR_value)
|
||||||
|
cursor.execute(sql)
|
||||||
|
db.commit()
|
||||||
|
|
||||||
|
page = requests.get("https://www.global-rates.com/en/interest-rates/libor/american-dollar/american-dollar.aspx")
|
||||||
|
page = page.text
|
||||||
|
page = BeautifulSoup(page, 'html.parser')
|
||||||
|
data = page.find_all('div', class_="table-normal text-end")
|
||||||
|
LIBOR_new_time = data[0]
|
||||||
|
LIBOR1M_value = data[5]
|
||||||
|
LIBOR3M_value = data[10]
|
||||||
|
LIBOR6M_value = data[15]
|
||||||
|
LIBOR_new_time = remove_tags(str(LIBOR_new_time))
|
||||||
|
LIBOR1M_value = remove_tags(str(LIBOR1M_value))
|
||||||
|
LIBOR3M_value = remove_tags(str(LIBOR3M_value))
|
||||||
|
LIBOR6M_value = remove_tags(str(LIBOR6M_value))
|
||||||
|
LIBOR_new_time = LIBOR_new_time[6:10]+'-'+LIBOR_new_time[0:5]
|
||||||
|
LIBOR_new_time = LIBOR_new_time.replace("-", "/")
|
||||||
|
LIBOR1M_value = LIBOR1M_value.replace(' ', '')
|
||||||
|
LIBOR3M_value = LIBOR3M_value.replace(' ', '')
|
||||||
|
LIBOR6M_value = LIBOR6M_value.replace(' ', '')
|
||||||
|
format = '%Y/%m/%d'
|
||||||
|
LIBOR_new_time = datetime.strptime(LIBOR_new_time, format)
|
||||||
|
sql = "select date from InterestRate where name='LIBOR1M'"
|
||||||
|
cursor.execute(sql)
|
||||||
|
db.commit()
|
||||||
|
LIBOR_old_time = cursor.fetchall()
|
||||||
|
LIBOR_old_time = LIBOR_old_time[-1][0]
|
||||||
|
if LIBOR_new_time != LIBOR_old_time:
|
||||||
|
sql = "insert into InterestRate(date,name,_value)values('%s','%s','%s')" % (LIBOR_new_time, 'LIBOR1M', LIBOR1M_value)
|
||||||
|
sql1 = "insert into InterestRate(date,name,_value)values('%s','%s','%s')" % (LIBOR_new_time, 'LIBOR3M', LIBOR3M_value)
|
||||||
|
sql2 = "insert into InterestRate(date,name,_value)values('%s','%s','%s')" % (LIBOR_new_time, 'LIBOR6M', LIBOR6M_value)
|
||||||
|
cursor.execute(sql)
|
||||||
|
cursor.execute(sql1)
|
||||||
|
cursor.execute(sql2)
|
||||||
|
db.commit()
|
||||||
|
db.close()
|
||||||
|
time.sleep(7200)
|
||||||
|
|
||||||
|
|
||||||
102
lyq/Macroeconomic_Loan.py
Normal file
102
lyq/Macroeconomic_Loan.py
Normal file
@@ -0,0 +1,102 @@
|
|||||||
|
import time
|
||||||
|
from full_fred.fred import Fred
|
||||||
|
import pymysql
|
||||||
|
from datetime import datetime
|
||||||
|
fred=Fred('example_key.txt')
|
||||||
|
fred.set_api_key_file('example_key.txt')
|
||||||
|
while True:
|
||||||
|
BUSLOANS=fred.get_series_df('BUSLOANS')
|
||||||
|
HBPIGDQ188S=fred.get_series_df('HBPIGDQ188S')
|
||||||
|
date1_all = BUSLOANS['date']
|
||||||
|
value1_all = BUSLOANS['value']
|
||||||
|
date2_all = HBPIGDQ188S['date']
|
||||||
|
value2_all = HBPIGDQ188S['value']
|
||||||
|
|
||||||
|
list_date1 = []
|
||||||
|
list_value1 = []
|
||||||
|
list_date2 = []
|
||||||
|
list_value2 = []
|
||||||
|
|
||||||
|
for i in date1_all:
|
||||||
|
list_date1 += [i]
|
||||||
|
for i in value1_all:
|
||||||
|
list_value1 += [i]
|
||||||
|
for i in date2_all:
|
||||||
|
list_date2 += [i]
|
||||||
|
for i in value2_all:
|
||||||
|
list_value2 += [i]
|
||||||
|
|
||||||
|
date1 = list_date1[-2]
|
||||||
|
value1 = list_value1[-2]
|
||||||
|
date2 = list_date1[-3]
|
||||||
|
value2 = list_value1[-3]
|
||||||
|
date3 = list_date1[-4]
|
||||||
|
value3 = list_value1[-4]
|
||||||
|
date4 = list_date1[-5]
|
||||||
|
value4 = list_value1[-5]
|
||||||
|
|
||||||
|
list_date1 = list_date1[-1]
|
||||||
|
list_value1 = list_value1[-1]
|
||||||
|
list_date2 = list_date2[-1]
|
||||||
|
list_value2 = list_value2[-1]
|
||||||
|
|
||||||
|
list_date1 = list_date1.replace('-', '/')
|
||||||
|
date_string = list_date1
|
||||||
|
format = '%Y/%m/%d'
|
||||||
|
list_date1 = datetime.strptime(date_string, format)
|
||||||
|
list_date2 = list_date2.replace('-', '/')
|
||||||
|
date_string2 = list_date2
|
||||||
|
format = '%Y/%m/%d'
|
||||||
|
list_date2 = datetime.strptime(date_string2, format)
|
||||||
|
db = pymysql.connect(host="127.0.0.1",user="root",password="2GS@bPYcgiMyL14A",database="Macroeconomics",port=4423)
|
||||||
|
cursor = db.cursor()
|
||||||
|
sql = "select date from Loan order by date desc limit 1"
|
||||||
|
cursor.execute(sql)
|
||||||
|
db.commit()
|
||||||
|
old_time = cursor.fetchall()
|
||||||
|
old_time = old_time[0][0]
|
||||||
|
date1 = date1.replace('-', '/')
|
||||||
|
date_string = date1
|
||||||
|
format = '%Y/%m/%d'
|
||||||
|
date1 = datetime.strptime(date_string, format)
|
||||||
|
|
||||||
|
date2 = date2.replace('-', '/')
|
||||||
|
date_string = date2
|
||||||
|
format = '%Y/%m/%d'
|
||||||
|
date2 = datetime.strptime(date_string, format)
|
||||||
|
|
||||||
|
date3 = date3.replace('-', '/')
|
||||||
|
date_string = date3
|
||||||
|
format = '%Y/%m/%d'
|
||||||
|
date3 = datetime.strptime(date_string, format)
|
||||||
|
|
||||||
|
date4 = date4.replace('-', '/')
|
||||||
|
date_string = date4
|
||||||
|
format = '%Y/%m/%d'
|
||||||
|
date4 = datetime.strptime(date_string, format)
|
||||||
|
sql = "update Loan set PSI=%s where date='%s'" % (value1, date1)
|
||||||
|
cursor.execute(sql)
|
||||||
|
sql = "update Loan set PSI=%s where date='%s'" % (value2, date2)
|
||||||
|
cursor.execute(sql)
|
||||||
|
sql = "update Loan set PSI=%s where date='%s'" % (value3, date3)
|
||||||
|
cursor.execute(sql)
|
||||||
|
sql = "update Loan set PSI=%s where date='%s'" % (value4, date4)
|
||||||
|
cursor.execute(sql)
|
||||||
|
db.commit()
|
||||||
|
if list_date1 == old_time:
|
||||||
|
db = pymysql.connect(host="127.0.0.1",user="root",password="2GS@bPYcgiMyL14A",database="Macroeconomics",port=4423)
|
||||||
|
cursor = db.cursor()
|
||||||
|
sql = "update Loan set PSI= %s where date='%s'" % (list_value1, list_date1)
|
||||||
|
cursor.execute(sql)
|
||||||
|
sql1 = "update Loan set FDHBPI_GDP=%s where date='%s'" % (list_value2, list_date2)
|
||||||
|
cursor.execute(sql1)
|
||||||
|
db.commit()
|
||||||
|
time.sleep(21600)
|
||||||
|
else:
|
||||||
|
db = pymysql.connect(host="127.0.0.1",user="root",password="2GS@bPYcgiMyL14A",database="Macroeconomics",port=4423)
|
||||||
|
cursor = db.cursor()
|
||||||
|
sql = "insert into Loan(date,PSI)values('%s','%s')" % (list_date1, list_value1)
|
||||||
|
cursor.execute(sql)
|
||||||
|
sql1 = "update Loan set FDHBPI_GDP=%s where date='%s'" % (list_value2, list_date2)
|
||||||
|
cursor.execute(sql1)
|
||||||
|
db.commit()
|
||||||
216
lyq/Macroeconomic_MoneyStockMeasures.py
Normal file
216
lyq/Macroeconomic_MoneyStockMeasures.py
Normal file
@@ -0,0 +1,216 @@
|
|||||||
|
import pymysql
|
||||||
|
import time
|
||||||
|
import requests
|
||||||
|
from w3lib.html import remove_tags
|
||||||
|
from bs4 import BeautifulSoup
|
||||||
|
condition=True
|
||||||
|
while condition:
|
||||||
|
import datetime
|
||||||
|
# 获取时间
|
||||||
|
now_time = datetime.datetime.now()
|
||||||
|
next_time = now_time + datetime.timedelta(days=+1)
|
||||||
|
next_year = next_time.date().year
|
||||||
|
next_month = next_time.date().month
|
||||||
|
next_day = next_time.date().day
|
||||||
|
next_time = datetime.datetime.strptime(str(next_year) + "-" + str(next_month) + "-" + str(next_day) + " 17:00:01","%Y-%m-%d %H:%M:%S")
|
||||||
|
timer_start_time = (next_time - now_time).total_seconds()
|
||||||
|
|
||||||
|
page =requests.get("https://www.federalreserve.gov/releases/h6/current/default.htm")
|
||||||
|
page=page.text
|
||||||
|
soup = BeautifulSoup(page, 'html.parser')
|
||||||
|
page_tbody = soup.find_all('tbody')
|
||||||
|
# 获取三个表数据MoneyStockMeasures,SeasonallyAdjusted,NotSeasonallyAdjusted
|
||||||
|
MoneyStockMeasures = page_tbody[0]
|
||||||
|
SeasonallyAdjusted = page_tbody[1]
|
||||||
|
NotSeasonallyAdjusted = page_tbody[2]
|
||||||
|
MoneyStockMeasures = remove_tags(str(MoneyStockMeasures))
|
||||||
|
SeasonallyAdjusted = remove_tags(str(SeasonallyAdjusted))
|
||||||
|
NotSeasonallyAdjusted = remove_tags(str(NotSeasonallyAdjusted))
|
||||||
|
# 修改时间字段
|
||||||
|
MoneyStockMeasures=MoneyStockMeasures.replace('Jan.', '1')
|
||||||
|
MoneyStockMeasures = MoneyStockMeasures.replace('Feb.', '2')
|
||||||
|
MoneyStockMeasures = MoneyStockMeasures.replace('Mar.', '3')
|
||||||
|
MoneyStockMeasures = MoneyStockMeasures.replace('Apr.', '4')
|
||||||
|
MoneyStockMeasures = MoneyStockMeasures.replace('May', '5')
|
||||||
|
MoneyStockMeasures = MoneyStockMeasures.replace('June', '6')
|
||||||
|
MoneyStockMeasures = MoneyStockMeasures.replace('July', '7')
|
||||||
|
MoneyStockMeasures = MoneyStockMeasures.replace('Aug.', '8')
|
||||||
|
MoneyStockMeasures = MoneyStockMeasures.replace('Sept.', '9')
|
||||||
|
MoneyStockMeasures = MoneyStockMeasures.replace('Oct.', '10')
|
||||||
|
MoneyStockMeasures = MoneyStockMeasures.replace('Nov.', '11')
|
||||||
|
MoneyStockMeasures = MoneyStockMeasures.replace('Dec.', '12')
|
||||||
|
MoneyStockMeasures = MoneyStockMeasures.replace('e', '')
|
||||||
|
MoneyStockMeasures = MoneyStockMeasures.split()
|
||||||
|
|
||||||
|
SeasonallyAdjusted = SeasonallyAdjusted.replace('Jan.', '1')
|
||||||
|
SeasonallyAdjusted = SeasonallyAdjusted.replace('Feb.', '2')
|
||||||
|
SeasonallyAdjusted = SeasonallyAdjusted.replace('Mar.', '3')
|
||||||
|
SeasonallyAdjusted = SeasonallyAdjusted.replace('Apr.', '4')
|
||||||
|
SeasonallyAdjusted = SeasonallyAdjusted.replace('May', '5')
|
||||||
|
SeasonallyAdjusted = SeasonallyAdjusted.replace('June', '6')
|
||||||
|
SeasonallyAdjusted = SeasonallyAdjusted.replace('July', '7')
|
||||||
|
SeasonallyAdjusted = SeasonallyAdjusted.replace('Aug.', '8')
|
||||||
|
SeasonallyAdjusted = SeasonallyAdjusted.replace('Sept.', '9')
|
||||||
|
SeasonallyAdjusted = SeasonallyAdjusted.replace('Oct.', '10')
|
||||||
|
SeasonallyAdjusted = SeasonallyAdjusted.replace('Nov.', '11')
|
||||||
|
SeasonallyAdjusted = SeasonallyAdjusted.replace('Dec.', '12')
|
||||||
|
SeasonallyAdjusted = SeasonallyAdjusted.replace('e', '')
|
||||||
|
SeasonallyAdjusted = SeasonallyAdjusted.split()
|
||||||
|
|
||||||
|
NotSeasonallyAdjusted = NotSeasonallyAdjusted.replace('Jan.', '1')
|
||||||
|
NotSeasonallyAdjusted = NotSeasonallyAdjusted.replace('Feb.', '2')
|
||||||
|
NotSeasonallyAdjusted = NotSeasonallyAdjusted.replace('Mar.', '3')
|
||||||
|
NotSeasonallyAdjusted = NotSeasonallyAdjusted.replace('Apr.', '4')
|
||||||
|
NotSeasonallyAdjusted = NotSeasonallyAdjusted.replace('May', '5')
|
||||||
|
NotSeasonallyAdjusted = NotSeasonallyAdjusted.replace('June', '6')
|
||||||
|
NotSeasonallyAdjusted = NotSeasonallyAdjusted.replace('July', '7')
|
||||||
|
NotSeasonallyAdjusted = NotSeasonallyAdjusted.replace('Aug.', '8')
|
||||||
|
NotSeasonallyAdjusted = NotSeasonallyAdjusted.replace('Sept.', '9')
|
||||||
|
NotSeasonallyAdjusted = NotSeasonallyAdjusted.replace('Oct.', '10')
|
||||||
|
NotSeasonallyAdjusted = NotSeasonallyAdjusted.replace('Nov.', '11')
|
||||||
|
NotSeasonallyAdjusted = NotSeasonallyAdjusted.replace('Dec.', '12')
|
||||||
|
NotSeasonallyAdjusted = NotSeasonallyAdjusted.replace('e', '')
|
||||||
|
NotSeasonallyAdjusted = NotSeasonallyAdjusted.split()
|
||||||
|
condition1=0
|
||||||
|
for i in range(17):
|
||||||
|
date1= MoneyStockMeasures[condition1 + 1] + '/' + MoneyStockMeasures[condition1 + 0] + '/1'
|
||||||
|
format = '%Y/%m/%d'
|
||||||
|
from datetime import datetime
|
||||||
|
# 获取数据时间和各项数据的值
|
||||||
|
date1 = datetime.strptime(date1, format)
|
||||||
|
value11=MoneyStockMeasures[condition1 + 2]
|
||||||
|
value11 = value11.replace(",", "")
|
||||||
|
value12=MoneyStockMeasures[condition1 + 3]
|
||||||
|
value12 = value12.replace(",", "")
|
||||||
|
value13=MoneyStockMeasures[condition1 + 7]
|
||||||
|
value13 = value13.replace(",", "")
|
||||||
|
value14=MoneyStockMeasures[condition1 + 8]
|
||||||
|
value14 = value14.replace(",", "")
|
||||||
|
value15=MoneyStockMeasures[condition1 + 4]
|
||||||
|
value15 = value15.replace(",", "")
|
||||||
|
value16=MoneyStockMeasures[condition1 + 5]
|
||||||
|
value16 = value16.replace(",", "")
|
||||||
|
value17=MoneyStockMeasures[condition1 + 6]
|
||||||
|
value17 = value17.replace(",", "")
|
||||||
|
value18=MoneyStockMeasures[condition1 + 9]
|
||||||
|
value18 = value18.replace(",", "")
|
||||||
|
value19=MoneyStockMeasures[condition1 + 10]
|
||||||
|
value19 = value19.replace(",", "")
|
||||||
|
value20=MoneyStockMeasures[condition1 + 11]
|
||||||
|
value20 = value20.replace(",", "")
|
||||||
|
condition1+=12
|
||||||
|
|
||||||
|
db = pymysql.connect(host="127.0.0.1",user="root",password="2GS@bPYcgiMyL14A",database="Macroeconomics",port=4423)
|
||||||
|
cursor = db.cursor()
|
||||||
|
sql = "select date from MoneyStockMeasures order by date desc limit 1"
|
||||||
|
cursor.execute(sql)
|
||||||
|
old_date = cursor.fetchall()
|
||||||
|
date2=old_date[0][0]
|
||||||
|
if i != 16:
|
||||||
|
sql="UPDATE MoneyStockMeasures SET adjustedM1=%s,adjustedM2=%s,notAdjustedM1=%s,notAdjustedM2=%s,currencyincirculation=%s,reserveBalances=%s,monetaryBase=%s,totalReserves=%s,totalMborrowings_M=%s,nonborrowedReserves=%s WHERE date= '%s'"%(value11, value12, value13, value14, value15, value16, value17, value18, value19, value20, date1)
|
||||||
|
cursor.execute(sql)
|
||||||
|
db.commit()
|
||||||
|
db.close()
|
||||||
|
else:
|
||||||
|
if date1 == date2:
|
||||||
|
sql = "UPDATE MoneyStockMeasures SET adjustedM1=%s,adjustedM2=%s,notAdjustedM1=%s,notAdjustedM2=%s,currencyincirculation=%s,reserveBalances=%s,monetaryBase=%s,totalReserves=%s,totalMborrowings_M=%s,nonborrowedReserves=%s WHERE date= '%s'" % (
|
||||||
|
value11, value12, value13, value14, value15, value16, value17, value18, value19, value20, date1)
|
||||||
|
cursor.execute(sql)
|
||||||
|
db.commit()
|
||||||
|
db.close()
|
||||||
|
else:
|
||||||
|
sql = "insert into MoneyStockMeasures(date,adjustedM1,adjustedM2,notAdjustedM1,notAdjustedM2,currencyincirculation,reserveBalances,monetaryBase,totalReserves,totalMborrowings_M,nonborrowedReserves)values('%s','%s','%s','%s','%s','%s','%s','%s','%s','%s','%s')" % (
|
||||||
|
date1, value11, value12, value13, value14, value15, value16, value17, value18, value19, value20)
|
||||||
|
cursor.execute(sql)
|
||||||
|
db.commit()
|
||||||
|
db.close()
|
||||||
|
condition2=0
|
||||||
|
for i in range(17):
|
||||||
|
date3= SeasonallyAdjusted[condition2 + 1] + '/' + SeasonallyAdjusted[condition2 + 0] + '/1'
|
||||||
|
format = '%Y/%m/%d'
|
||||||
|
from datetime import datetime
|
||||||
|
date3 = datetime.strptime(date3, format)
|
||||||
|
value21=SeasonallyAdjusted[condition2 + 2]
|
||||||
|
value21 = value21.replace(",", "")
|
||||||
|
value22=SeasonallyAdjusted[condition2 + 3]
|
||||||
|
value22 = value22.replace(",", "")
|
||||||
|
value23=SeasonallyAdjusted[condition2 + 4]
|
||||||
|
value23 = value23.replace(",", "")
|
||||||
|
value24=SeasonallyAdjusted[condition2 + 5]
|
||||||
|
value24 = value24.replace(",", "")
|
||||||
|
value25=SeasonallyAdjusted[condition2 + 6]
|
||||||
|
value25 = value25.replace(",", "")
|
||||||
|
condition2+=7
|
||||||
|
|
||||||
|
db = pymysql.connect(host="127.0.0.1",user="root",password="2GS@bPYcgiMyL14A",database="Macroeconomics",port=4423)
|
||||||
|
cursor = db.cursor()
|
||||||
|
sql = "select date from SeasonallyAdjusted order by date desc limit 1"
|
||||||
|
cursor.execute(sql)
|
||||||
|
old_date = cursor.fetchall()
|
||||||
|
date4=old_date[0][0]
|
||||||
|
if i != 16:
|
||||||
|
sql="UPDATE SeasonallyAdjusted SET currencyM1=%s,demandM1=%s,otherLiquid=%s,smallDenominationTimeNonM1M2=%s,retailMoneyMarketFundsNonM1M2=%s WHERE date= '%s'"%(value21, value22, value23, value24, value25, date3)
|
||||||
|
cursor.execute(sql)
|
||||||
|
db.commit()
|
||||||
|
db.close()
|
||||||
|
else:
|
||||||
|
if date3 == date4:
|
||||||
|
sql = "UPDATE SeasonallyAdjusted SET currencyM1=%s,demandM1=%s,otherLiquid=%s,smallDenominationTimeNonM1M2=%s,retailMoneyMarketFundsNonM1M2=%s WHERE date= '%s'" % (
|
||||||
|
value21, value22, value23, value24, value25, date3)
|
||||||
|
cursor.execute(sql)
|
||||||
|
db.commit()
|
||||||
|
db.close()
|
||||||
|
else:
|
||||||
|
sql = "insert into SeasonallyAdjusted(date,currencyM1,demandM1,otherLiquid,smallDenominationTimeNonM1M2,retailMoneyMarketFundsNonM1M2)values('%s','%s','%s','%s','%s','%s')" % (
|
||||||
|
date3, value21, value22, value23, value24, value25)
|
||||||
|
cursor.execute(sql)
|
||||||
|
db.commit()
|
||||||
|
db.close()
|
||||||
|
condition3=0
|
||||||
|
for i in range(17):
|
||||||
|
date5= NotSeasonallyAdjusted[condition3 + 1] + '/' + NotSeasonallyAdjusted[condition3 + 0] + '/1'
|
||||||
|
format = '%Y/%m/%d'
|
||||||
|
from datetime import datetime
|
||||||
|
date5 = datetime.strptime(date5, format)
|
||||||
|
value31=NotSeasonallyAdjusted[condition3 + 2]
|
||||||
|
value31 = value31.replace(",", "")
|
||||||
|
value32=NotSeasonallyAdjusted[condition3 + 3]
|
||||||
|
value32 = value32.replace(",", "")
|
||||||
|
value33=NotSeasonallyAdjusted[condition3 + 4]
|
||||||
|
value33 = value33.replace(",", "")
|
||||||
|
value34=NotSeasonallyAdjusted[condition3 + 5]
|
||||||
|
value34 = value34.replace(",", "")
|
||||||
|
value35=NotSeasonallyAdjusted[condition3 + 6]
|
||||||
|
value35 = value35.replace(",", "")
|
||||||
|
value36 = NotSeasonallyAdjusted[condition3 + 7]
|
||||||
|
value36 = value36.replace(",", "")
|
||||||
|
value37 = NotSeasonallyAdjusted[condition3 + 8]
|
||||||
|
value37 = value37.replace(",", "")
|
||||||
|
value38 = NotSeasonallyAdjusted[condition3 + 9]
|
||||||
|
value38 = value38.replace(",", "")
|
||||||
|
condition3+=10
|
||||||
|
db = pymysql.connect(host="127.0.0.1",user="root",password="2GS@bPYcgiMyL14A",database="Macroeconomics",port=4423)
|
||||||
|
cursor = db.cursor()
|
||||||
|
sql = "select date from NotSeasonallyAdjusted order by date desc limit 1"
|
||||||
|
cursor.execute(sql)
|
||||||
|
old_date = cursor.fetchall()
|
||||||
|
date6=old_date[0][0]
|
||||||
|
if i != 16:
|
||||||
|
sql="UPDATE NotSeasonallyAdjusted SET currencyM1=%s,demandM1=%s,otherLiquid=%s,smallDenominationTimeNonM1M2=%s,retailMoneyMarketFundsNonM1M2=%s,atDepositoryInstitutions=%s,atMoneyMarketFunds=%s,total=%s WHERE date= '%s'"%(value31, value32, value33, value34, value35, value36, value37, value38, date5)
|
||||||
|
cursor.execute(sql)
|
||||||
|
db.commit()
|
||||||
|
db.close()
|
||||||
|
else:
|
||||||
|
if date5 == date6:
|
||||||
|
sql = "UPDATE NotSeasonallyAdjusted SET currencyM1=%s,demandM1=%s,otherLiquid=%s,smallDenominationTimeNonM1M2=%s,retailMoneyMarketFundsNonM1M2=%s,atDepositoryInstitutions=%s,atMoneyMarketFunds=%s,total=%s WHERE date= '%s'" % (
|
||||||
|
value31, value32, value33, value34, value35, value36, value37, value38, date5)
|
||||||
|
cursor.execute(sql)
|
||||||
|
db.commit()
|
||||||
|
db.close()
|
||||||
|
else:
|
||||||
|
sql = "insert into NotSeasonallyAdjusted(date,currencyM1,demandM1,otherLiquid,smallDenominationTimeNonM1M2,retailMoneyMarketFundsNonM1M2,atDepositoryInstitutions,atMoneyMarketFunds,total)values('%s','%s','%s','%s','%s','%s','%s','%s','%s')" % (
|
||||||
|
date5, value31, value32, value33, value34, value35, value36, value37, value38)
|
||||||
|
cursor.execute(sql)
|
||||||
|
db.commit()
|
||||||
|
db.close()
|
||||||
|
time.sleep(timer_start_time)
|
||||||
117
lyq/Macroeconomic_PCE_v3.py
Normal file
117
lyq/Macroeconomic_PCE_v3.py
Normal file
@@ -0,0 +1,117 @@
|
|||||||
|
import requests
|
||||||
|
import pymysql
|
||||||
|
from datetime import datetime
|
||||||
|
import time
|
||||||
|
|
||||||
|
BEA_USER_ID = "146B5757-D9E3-442C-B6AC-ADE9E6B71114"
|
||||||
|
YEARS = ["2023","2024","2025"] # 第一次运行抓全部年份
|
||||||
|
SLEEP_SECONDS = 21600 # 6小时
|
||||||
|
|
||||||
|
def get_bea_data(year):
|
||||||
|
"""抓取指定年份的季度数据"""
|
||||||
|
url = (
|
||||||
|
f'https://apps.bea.gov/api/data?UserID={BEA_USER_ID}'
|
||||||
|
f'&method=GetData&datasetname=NIPA&TableName=T10105&Frequency=Q'
|
||||||
|
f'&Year={year}&ResultFormat=JSON'
|
||||||
|
)
|
||||||
|
response = requests.get(url)
|
||||||
|
return response.json()['BEAAPI']['Results']['Data']
|
||||||
|
|
||||||
|
def update_database(cursor, data):
|
||||||
|
"""整理并插入缺失季度数据"""
|
||||||
|
# 查询数据库已存在的季度
|
||||||
|
cursor.execute("SELECT times FROM PCE")
|
||||||
|
existing_pce = {row[0] for row in cursor.fetchall()}
|
||||||
|
cursor.execute("SELECT times FROM GPDI")
|
||||||
|
existing_gpdi = {row[0] for row in cursor.fetchall()}
|
||||||
|
cursor.execute("SELECT times FROM NETEXP")
|
||||||
|
existing_netexp = {row[0] for row in cursor.fetchall()}
|
||||||
|
|
||||||
|
# 整理数据
|
||||||
|
pce_data, gpdi_data, netexp_data = {}, {}, {}
|
||||||
|
for entry in data:
|
||||||
|
t = entry["TimePeriod"]
|
||||||
|
desc = entry["LineDescription"]
|
||||||
|
val = entry["DataValue"]
|
||||||
|
|
||||||
|
if desc == "Personal consumption expenditures":
|
||||||
|
pce_data.setdefault(t, {})["PCE"] = val
|
||||||
|
elif desc == "Durable goods":
|
||||||
|
pce_data.setdefault(t, {})["PCEDG"] = val
|
||||||
|
elif desc == "Nondurable goods":
|
||||||
|
pce_data.setdefault(t, {})["PCEND"] = val
|
||||||
|
elif desc == "Services" and entry["LineNumber"] == '6':
|
||||||
|
pce_data.setdefault(t, {})["PCES"] = val
|
||||||
|
|
||||||
|
elif desc == "Gross private domestic investment":
|
||||||
|
gpdi_data.setdefault(t, {})["GPDI"] = val
|
||||||
|
elif desc == "Fixed investment":
|
||||||
|
gpdi_data.setdefault(t, {})["FPI"] = val
|
||||||
|
elif desc == "Change in private inventories":
|
||||||
|
gpdi_data.setdefault(t, {})["CBI"] = val
|
||||||
|
|
||||||
|
elif desc == "Net exports of goods and services":
|
||||||
|
netexp_data.setdefault(t, {})["NETEXP"] = val
|
||||||
|
elif desc == "Imports":
|
||||||
|
netexp_data.setdefault(t, {})["IMPGS"] = val
|
||||||
|
elif desc == "Exports":
|
||||||
|
netexp_data.setdefault(t, {})["EXPGS"] = val
|
||||||
|
|
||||||
|
# 插入数据库缺失数据
|
||||||
|
for t, vals in pce_data.items():
|
||||||
|
if t not in existing_pce:
|
||||||
|
cursor.execute(
|
||||||
|
"INSERT INTO PCE (times, PCE, PCEDG, PCEND, PCES) VALUES (%s,%s,%s,%s,%s)",
|
||||||
|
(t, vals.get("PCE"), vals.get("PCEDG"), vals.get("PCEND"), vals.get("PCES"))
|
||||||
|
)
|
||||||
|
for t, vals in gpdi_data.items():
|
||||||
|
if t not in existing_gpdi:
|
||||||
|
cursor.execute(
|
||||||
|
"INSERT INTO GPDI (times, GPDI, FPI, CBI) VALUES (%s,%s,%s,%s)",
|
||||||
|
(t, vals.get("GPDI"), vals.get("FPI"), vals.get("CBI"))
|
||||||
|
)
|
||||||
|
for t, vals in netexp_data.items():
|
||||||
|
if t not in existing_netexp:
|
||||||
|
cursor.execute(
|
||||||
|
"INSERT INTO NETEXP (times, NETEXP, IMPGS, EXPGS) VALUES (%s,%s,%s,%s)",
|
||||||
|
(t, vals.get("NETEXP"), vals.get("IMPGS"), vals.get("EXPGS"))
|
||||||
|
)
|
||||||
|
|
||||||
|
def run_job(first_run=False):
|
||||||
|
"""运行一次抓取和更新"""
|
||||||
|
print(f"[{datetime.now()}] 开始抓取 BEA 数据并更新数据库...")
|
||||||
|
try:
|
||||||
|
db = pymysql.connect(
|
||||||
|
host="127.0.0.1",
|
||||||
|
user="root",
|
||||||
|
password="2GS@bPYcgiMyL14A",
|
||||||
|
database="Macroeconomics",
|
||||||
|
port=4423
|
||||||
|
)
|
||||||
|
cursor = db.cursor()
|
||||||
|
|
||||||
|
years_to_fetch = YEARS if first_run else [YEARS[-1]] # 第一次抓全部年份,否则只抓最新年份
|
||||||
|
for year in years_to_fetch:
|
||||||
|
data = get_bea_data(year)
|
||||||
|
update_database(cursor, data)
|
||||||
|
db.commit()
|
||||||
|
print(f"[{datetime.now()}] {year} 数据更新完成")
|
||||||
|
|
||||||
|
except pymysql.MySQLError as e:
|
||||||
|
print(f"[{datetime.now()}] 数据库错误: {e}")
|
||||||
|
except Exception as e:
|
||||||
|
print(f"[{datetime.now()}] 其他错误: {e}")
|
||||||
|
finally:
|
||||||
|
if 'cursor' in locals():
|
||||||
|
cursor.close()
|
||||||
|
if 'db' in locals():
|
||||||
|
db.close()
|
||||||
|
print(f"[{datetime.now()}] 本次任务完成。\n")
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
first_run = True
|
||||||
|
while True:
|
||||||
|
run_job(first_run)
|
||||||
|
first_run = False # 之后循环只抓最新季度
|
||||||
|
print(f"[{datetime.now()}] 休眠 {SLEEP_SECONDS} 秒(6小时)...\n")
|
||||||
|
time.sleep(SLEEP_SECONDS)
|
||||||
1536
lyq/Macroeconomic_SAALOCBITUSS_ASSET.py
Normal file
1536
lyq/Macroeconomic_SAALOCBITUSS_ASSET.py
Normal file
File diff suppressed because it is too large
Load Diff
85
lyq/Macroeconomic_USTreasuriesSize.py
Normal file
85
lyq/Macroeconomic_USTreasuriesSize.py
Normal file
@@ -0,0 +1,85 @@
|
|||||||
|
import requests
|
||||||
|
import pymysql
|
||||||
|
from datetime import datetime
|
||||||
|
import time
|
||||||
|
old_transaction_mtd_amt=0
|
||||||
|
condition=True
|
||||||
|
db = pymysql.connect(host="127.0.0.1",user="root",password="2GS@bPYcgiMyL14A",database="Macroeconomics",port=4423)
|
||||||
|
cursor = db.cursor()
|
||||||
|
while condition:
|
||||||
|
page = requests.get("https://api.fiscaldata.treasury.gov/services/api/fiscal_service/v1/accounting/dts/public_debt_transactions?fields=record_date,transaction_type,security_type,transaction_mtd_amt&sort=-record_date")
|
||||||
|
page = page.json()
|
||||||
|
page = page['data']
|
||||||
|
# 获取网站最新一条数据时间
|
||||||
|
page_data = page[0:23]
|
||||||
|
for data in page_data:
|
||||||
|
sql = "select date from USTreasuriesSize order by date desc limit 1"
|
||||||
|
cursor.execute(sql)
|
||||||
|
old_date = cursor.fetchall()
|
||||||
|
old_date = str(old_date)
|
||||||
|
old_date = old_date[20:-11]
|
||||||
|
old_date = old_date.replace(' ', '')
|
||||||
|
old_date = old_date.replace(',', '/')
|
||||||
|
date_string = old_date
|
||||||
|
format = '%Y/%m/%d'
|
||||||
|
old_date = datetime.strptime(date_string, format)
|
||||||
|
|
||||||
|
record_date = data['record_date']
|
||||||
|
transaction_type = data['transaction_type']
|
||||||
|
security_type=data['security_type']
|
||||||
|
transaction_mtd_amt=data['transaction_mtd_amt']
|
||||||
|
record_date = record_date.replace('-', '/')
|
||||||
|
format = '%Y/%m/%d'
|
||||||
|
record_date = datetime.strptime(record_date, format)
|
||||||
|
# 判断数据库最新一条数据时间和网站最新一条数据时间
|
||||||
|
if record_date!=old_date:
|
||||||
|
sql = "insert into USTreasuriesSize(date)values('%s')" % (record_date)
|
||||||
|
cursor.execute(sql)
|
||||||
|
db.commit()
|
||||||
|
sql = "select id from USTreasuriesSize order by id desc limit 1"
|
||||||
|
cursor.execute(sql)
|
||||||
|
id = cursor.fetchall()
|
||||||
|
id = id[0][0]
|
||||||
|
for data in page_data:
|
||||||
|
transaction_type = data['transaction_type']
|
||||||
|
security_type = data['security_type']
|
||||||
|
transaction_mtd_amt = data['transaction_mtd_amt']
|
||||||
|
if transaction_type == 'Issues':
|
||||||
|
if security_type == 'Bills':
|
||||||
|
transaction_mtd_amt= old_transaction_mtd_amt + int(transaction_mtd_amt)
|
||||||
|
sql1 = "update USTreasuriesSize set TBill=%s where id=%s" % (transaction_mtd_amt, id)
|
||||||
|
cursor.execute(sql1)
|
||||||
|
db.commit()
|
||||||
|
old_transaction_mtd_amt=transaction_mtd_amt
|
||||||
|
|
||||||
|
elif security_type == 'Notes':
|
||||||
|
sql2 = "update USTreasuriesSize set TNote=%s where id=%s" % (transaction_mtd_amt, id)
|
||||||
|
cursor.execute(sql2)
|
||||||
|
db.commit()
|
||||||
|
|
||||||
|
elif security_type == 'Bonds':
|
||||||
|
sql3 = "update USTreasuriesSize set TBond=%s where id=%s" % (transaction_mtd_amt, id)
|
||||||
|
cursor.execute(sql3)
|
||||||
|
db.commit()
|
||||||
|
|
||||||
|
elif security_type == 'Inflation-Protected Securities Increment':
|
||||||
|
sql4 = "update USTreasuriesSize set TIPS=%s where id=%s" % (transaction_mtd_amt, id)
|
||||||
|
cursor.execute(sql4)
|
||||||
|
db.commit()
|
||||||
|
else:
|
||||||
|
continue
|
||||||
|
else:
|
||||||
|
continue
|
||||||
|
else:
|
||||||
|
continue
|
||||||
|
|
||||||
|
old_transaction_mtd_amt=0
|
||||||
|
time.sleep(21600)
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
96
lyq/Macroeconomic_USTreasuriesYields_v2.py
Normal file
96
lyq/Macroeconomic_USTreasuriesYields_v2.py
Normal file
@@ -0,0 +1,96 @@
|
|||||||
|
import pymysql
|
||||||
|
import time
|
||||||
|
from selenium import webdriver
|
||||||
|
from selenium.webdriver.chrome.options import Options
|
||||||
|
from selenium.webdriver.chrome.service import Service
|
||||||
|
from w3lib.html import remove_tags
|
||||||
|
from bs4 import BeautifulSoup
|
||||||
|
from datetime import datetime
|
||||||
|
|
||||||
|
# 配置 Selenium
|
||||||
|
chrome_options = Options()
|
||||||
|
chrome_options.add_argument("--headless")
|
||||||
|
chrome_options.add_argument('--no-sandbox')
|
||||||
|
chrome_options.add_argument('--disable-gpu')
|
||||||
|
chrome_options.add_argument('--disable-dev-shm-usage')
|
||||||
|
chrome_options.add_argument('blink-settings=imagesEnabled=false')
|
||||||
|
|
||||||
|
# 在 Selenium 3 中,直接指定 chrome_options 参数即可
|
||||||
|
browser = webdriver.Chrome(executable_path="chromedriver", options=chrome_options)
|
||||||
|
# 将月份映射整理成一个字典
|
||||||
|
MONTH_MAPPING = {
|
||||||
|
"Jan": "/1/", "Feb": "/2/", "Mar": "/3/", "Apr": "/4/",
|
||||||
|
"May": "/5/", "Jun": "/6/", "Jul": "/7/", "Aug": "/8/",
|
||||||
|
"Sep": "/9/", "Oct": "/10/", "Nov": "/11/", "Dec": "/12/"
|
||||||
|
}
|
||||||
|
|
||||||
|
# 数据库连接配置
|
||||||
|
DB_CONFIG = {
|
||||||
|
"host": "127.0.0.1",
|
||||||
|
"user": "root",
|
||||||
|
"password": "2GS@bPYcgiMyL14A",
|
||||||
|
"database": "Macroeconomics",
|
||||||
|
"port": 4423
|
||||||
|
}
|
||||||
|
|
||||||
|
def fetch_web_data():
|
||||||
|
"""抓取网页数据并解析日期和利率数据"""
|
||||||
|
browser.get("https://www.federalreserve.gov/releases/h15/")
|
||||||
|
soup = BeautifulSoup(browser.page_source, 'html.parser')
|
||||||
|
|
||||||
|
# 获取日期
|
||||||
|
date_text = soup.find_all('th', class_="colhead sticky sticky-row-cell")[-1].get_text(strip=True)
|
||||||
|
for month, replacement in MONTH_MAPPING.items():
|
||||||
|
date_text = date_text.replace(month, replacement)
|
||||||
|
date = datetime.strptime(date_text.replace('*', ''), '%Y/%m/%d')
|
||||||
|
|
||||||
|
# 获取利率数据
|
||||||
|
data = [remove_tags(str(td)).strip() for td in soup.find_all('td', class_="data")]
|
||||||
|
return date, data
|
||||||
|
|
||||||
|
def get_latest_db_date():
|
||||||
|
"""从数据库获取最新日期"""
|
||||||
|
with pymysql.connect(**DB_CONFIG) as conn:
|
||||||
|
with conn.cursor() as cursor:
|
||||||
|
cursor.execute("SELECT MAX(date) FROM USTreasuriesYields")
|
||||||
|
result = cursor.fetchone()
|
||||||
|
return result[0] if result[0] else None
|
||||||
|
|
||||||
|
def insert_data(date, rates, mprime, dpcredit):
|
||||||
|
"""插入数据到数据库"""
|
||||||
|
with pymysql.connect(**DB_CONFIG) as conn:
|
||||||
|
with conn.cursor() as cursor:
|
||||||
|
sql_treasuries = """
|
||||||
|
INSERT INTO USTreasuriesYields (date, 1_Mo, 3_Mo, 6_Mo, 1_Yr, 2_Yr, 5_Yr, 10_Yr, 20_Yr, 30_Yr)
|
||||||
|
VALUES (%s, %s, %s, %s, %s, %s, %s, %s, %s, %s)
|
||||||
|
"""
|
||||||
|
sql_interest_rate = """
|
||||||
|
INSERT INTO InterestRate (date, name, _value)
|
||||||
|
VALUES (%s, %s, %s)
|
||||||
|
"""
|
||||||
|
cursor.execute(sql_treasuries, [date] + rates)
|
||||||
|
cursor.execute(sql_interest_rate, (date, 'BPL', mprime))
|
||||||
|
cursor.execute(sql_interest_rate, (date, 'DWPC', dpcredit))
|
||||||
|
conn.commit()
|
||||||
|
|
||||||
|
def main():
|
||||||
|
while True:
|
||||||
|
new_date, data = fetch_web_data()
|
||||||
|
old_date = get_latest_db_date()
|
||||||
|
|
||||||
|
if old_date and new_date <= old_date:
|
||||||
|
time.sleep(21600) # 6小时
|
||||||
|
continue
|
||||||
|
|
||||||
|
# 提取利率数据
|
||||||
|
rates = [
|
||||||
|
data[i].replace('ND', 'NULL') if i < len(data) else 'NULL'
|
||||||
|
for i in [104, 109, 114, 119, 124, 134, 144, 149, 154]
|
||||||
|
]
|
||||||
|
mprime = data[54] if len(data) > 54 else 'NULL'
|
||||||
|
dpcredit = data[59] if len(data) > 59 else 'NULL'
|
||||||
|
|
||||||
|
insert_data(new_date, rates, mprime, dpcredit)
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
main()
|
||||||
105
lyq/Macroeconomic_WEI.py
Normal file
105
lyq/Macroeconomic_WEI.py
Normal file
@@ -0,0 +1,105 @@
|
|||||||
|
import time
|
||||||
|
from full_fred.fred import Fred
|
||||||
|
import pymysql
|
||||||
|
import time
|
||||||
|
fred=Fred('example_key.txt')
|
||||||
|
fred.set_api_key_file('example_key.txt')
|
||||||
|
times="00:00:00"
|
||||||
|
while True:
|
||||||
|
import datetime
|
||||||
|
# now_time = datetime.datetime.now()
|
||||||
|
# next_time = now_time + datetime.timedelta(days=+1)
|
||||||
|
# next_year = next_time.date().year
|
||||||
|
# next_month = next_time.date().month
|
||||||
|
# next_day = next_time.date().day
|
||||||
|
# next_time = datetime.datetime.strptime(str(next_year) + "-" + str(next_month) + "-" + str(next_day) + " 15:00:00","%Y-%m-%d %H:%M:%S")
|
||||||
|
# timer_start_time = (next_time - now_time).total_seconds()
|
||||||
|
data=fred.get_series_df('WEI')
|
||||||
|
data_date=data['date']
|
||||||
|
data_value=data['value']
|
||||||
|
# 获取网站数据
|
||||||
|
list1=[]
|
||||||
|
list2=[]
|
||||||
|
for i in data_date:
|
||||||
|
list1+=[i]
|
||||||
|
for i2 in data_value:
|
||||||
|
list2+=[i2]
|
||||||
|
# 拿最新七条数据
|
||||||
|
date1=list1[-1]
|
||||||
|
date1= date1 + ' ' + times
|
||||||
|
timeArray = time.strptime(date1, "%Y-%m-%d %H:%M:%S")
|
||||||
|
timestamp = time.mktime(timeArray)
|
||||||
|
date1 = int(timestamp * 1000) - 28800000
|
||||||
|
value1=list2[-1]
|
||||||
|
|
||||||
|
date2=list1[-2]
|
||||||
|
date2 = date2 + ' ' + times
|
||||||
|
timeArray = time.strptime(date2, "%Y-%m-%d %H:%M:%S")
|
||||||
|
timestamp = time.mktime(timeArray)
|
||||||
|
date2 = int(timestamp * 1000) - 28800000
|
||||||
|
value2=list2[-2]
|
||||||
|
|
||||||
|
date3=list1[-3]
|
||||||
|
date3 = date3 + ' ' + times
|
||||||
|
timeArray = time.strptime(date3, "%Y-%m-%d %H:%M:%S")
|
||||||
|
timestamp = time.mktime(timeArray)
|
||||||
|
date3 = int(timestamp * 1000) - 28800000
|
||||||
|
value3=list2[-3]
|
||||||
|
|
||||||
|
date4=list1[-4]
|
||||||
|
date4 = date4 + ' ' + times
|
||||||
|
timeArray = time.strptime(date4, "%Y-%m-%d %H:%M:%S")
|
||||||
|
timestamp = time.mktime(timeArray)
|
||||||
|
date4 = int(timestamp * 1000) - 28800000
|
||||||
|
value4=list2[-4]
|
||||||
|
|
||||||
|
date5=list1[-5]
|
||||||
|
date5 = date5 + ' ' + times
|
||||||
|
timeArray = time.strptime(date5, "%Y-%m-%d %H:%M:%S")
|
||||||
|
timestamp = time.mktime(timeArray)
|
||||||
|
date5 = int(timestamp * 1000) - 28800000
|
||||||
|
value5=list2[-5]
|
||||||
|
|
||||||
|
date6=list1[-6]
|
||||||
|
date6 = date6 + ' ' + times
|
||||||
|
timeArray = time.strptime(date6, "%Y-%m-%d %H:%M:%S")
|
||||||
|
timestamp = time.mktime(timeArray)
|
||||||
|
date6 = int(timestamp * 1000) - 28800000
|
||||||
|
value6=list2[-6]
|
||||||
|
|
||||||
|
date7=list1[-7]
|
||||||
|
date7 = date7 + ' ' + times
|
||||||
|
timeArray = time.strptime(date7, "%Y-%m-%d %H:%M:%S")
|
||||||
|
timestamp = time.mktime(timeArray)
|
||||||
|
date7 = int(timestamp * 1000) - 28800000
|
||||||
|
value7=list2[-7]
|
||||||
|
|
||||||
|
db = pymysql.connect(host="127.0.0.1",user="root",password="2GS@bPYcgiMyL14A",database="Macroeconomics",port=4423)
|
||||||
|
cursor = db.cursor()
|
||||||
|
sql = "select times from WEI order by times desc limit 1"
|
||||||
|
cursor.execute(sql)
|
||||||
|
db.commit()
|
||||||
|
old_time = cursor.fetchall()
|
||||||
|
old_time = old_time[0][0]
|
||||||
|
sql = "update WEI set WEI=%s where times='%s'" % (value2, date2)
|
||||||
|
cursor.execute(sql)
|
||||||
|
sql = "update WEI set WEI=%s where times='%s'" % (value3, date3)
|
||||||
|
cursor.execute(sql)
|
||||||
|
sql = "update WEI set WEI=%s where times='%s'" % (value4, date4)
|
||||||
|
cursor.execute(sql)
|
||||||
|
sql = "update WEI set WEI=%s where times='%s'" % (value5, date5)
|
||||||
|
cursor.execute(sql)
|
||||||
|
sql = "update WEI set WEI=%s where times='%s'" % (value6, date6)
|
||||||
|
cursor.execute(sql)
|
||||||
|
sql = "update WEI set WEI=%s where times='%s'" % (value7, date7)
|
||||||
|
cursor.execute(sql)
|
||||||
|
db.commit()
|
||||||
|
if date1 == old_time:
|
||||||
|
time.sleep(21600)
|
||||||
|
# time.sleep(timer_start_time)
|
||||||
|
else:
|
||||||
|
db = pymysql.connect(host="127.0.0.1",user="root",password="2GS@bPYcgiMyL14A",database="Macroeconomics",port=4423)
|
||||||
|
cursor = db.cursor()
|
||||||
|
sql = "insert into WEI(times,WEI)values('%s','%s')" % (date1, value1)
|
||||||
|
cursor.execute(sql)
|
||||||
|
db.commit()
|
||||||
@@ -1,5 +1,35 @@
|
|||||||
目录简介:
|
arh999_lyq.py --btc数据代码
|
||||||
|
arh999eth_lyq.py --btc数据代码
|
||||||
相关联其他文件:
|
Binance_fapi.py --btc数据代码
|
||||||
|
btc_price_fetcher.py --btc数据代码
|
||||||
功能简介:
|
btc_price.py --btc数据代码
|
||||||
|
btc_stats_qt.py --btc数据代码
|
||||||
|
btc_update.py --btc数据代码
|
||||||
|
btc_utxos_lyq3.py --btc数据代码
|
||||||
|
btc_utxos_update_lyq3.py --btc数据代码
|
||||||
|
btc24h_db_if.py --btc数据代码mysql数据库
|
||||||
|
btc24h_redis_if.py --btc数据代码redis数据库
|
||||||
|
btc24h_stats.py --btc实时更新数据代码
|
||||||
|
check_order_lyq.py --btcusdt/ethusdt获取代码
|
||||||
|
check_zone_lyq.py --btcusdt/ethusdt获取代码
|
||||||
|
CoinmarKetCap.py --币种,市值,交易量,价格变化获取代码
|
||||||
|
db_if_qt.py --btc数据代码mysql数据库
|
||||||
|
exchangeRate_lyq.py --实时汇率代码
|
||||||
|
Macroeconomic_COVITGDP_v2.py --宏观经济数据代码
|
||||||
|
Macroeconomic_CPI_NSA_v2.py --宏观经济数据代码
|
||||||
|
Macroeconomic_FARBODI.py --宏观经济数据代码
|
||||||
|
Macroeconomic_FBI_v2.py --宏观经济数据代码
|
||||||
|
Macroeconomic_FER.py --宏观经济数据代码
|
||||||
|
Macroeconomic_InterestRate.py --宏观经济数据代码
|
||||||
|
Macroeconomic_Loan.py --宏观经济数据代码
|
||||||
|
Macroeconomic_MoneyStockMeasures.py --宏观经济数据代码
|
||||||
|
Macroeconomic_PCE_v3.py --宏观经济数据代码
|
||||||
|
Macroeconomic_SAALOCBITUSS_ASSET.py --宏观经济数据代码
|
||||||
|
Macroeconomic_USTreasuriesSize.py --宏观经济数据代码
|
||||||
|
Macroeconomic_USTreasuriesYields_v2.py --宏观经济数据代码
|
||||||
|
Macroeconomic_WEI.py --宏观经济数据代码
|
||||||
|
nochain_eth_lyq.py --供应量,供应比率代码
|
||||||
|
nochain_lyq_utc08.py --供应量,供应比率代码
|
||||||
|
nochain_lyq_v2.py --供应量,供应比率代码
|
||||||
|
nochain_update_lyq.py --供应量,供应比率代码
|
||||||
|
redis_if_qt.py --btc数据代码redis数据库
|
||||||
1005
lyq/arh999_lyq.py
Normal file
1005
lyq/arh999_lyq.py
Normal file
File diff suppressed because it is too large
Load Diff
504
lyq/arh999eth_lyq.py
Normal file
504
lyq/arh999eth_lyq.py
Normal file
@@ -0,0 +1,504 @@
|
|||||||
|
# coding=utf-8
|
||||||
|
import ujson
|
||||||
|
from binance.websocket.spot.websocket_client import SpotWebsocketClient as WebsocketClient
|
||||||
|
import time
|
||||||
|
import requests
|
||||||
|
from loguru import logger
|
||||||
|
import datetime
|
||||||
|
import pymysql
|
||||||
|
import math
|
||||||
|
import csv
|
||||||
|
|
||||||
|
g_prices = {}
|
||||||
|
g_dbif = None
|
||||||
|
g_lastts = 0
|
||||||
|
def get_day60_rise(day, prices):
|
||||||
|
total = 0
|
||||||
|
cnt = 0
|
||||||
|
for i in range(60):
|
||||||
|
if str(day) in prices:
|
||||||
|
cur_price = prices[str(day)]
|
||||||
|
day = str(day - 3600 * 24)
|
||||||
|
if day in prices:
|
||||||
|
prev_price = prices[day]
|
||||||
|
try:
|
||||||
|
#print(((cur_price-prev_price)/prev_price), day, cur_price, prev_price)
|
||||||
|
total += (((cur_price-prev_price)/prev_price))
|
||||||
|
cnt += 1
|
||||||
|
except:
|
||||||
|
pass
|
||||||
|
# print(day, total, cnt)
|
||||||
|
day = int(day)
|
||||||
|
return total
|
||||||
|
|
||||||
|
def get_days_rise(day, maxdays, prices):
|
||||||
|
total = 0
|
||||||
|
cnt = 0
|
||||||
|
for i in range(maxdays):
|
||||||
|
if str(day) in prices:
|
||||||
|
cur_price = prices[str(day)]
|
||||||
|
day = str(day - 3600 * 24)
|
||||||
|
if day in prices:
|
||||||
|
prev_price = prices[day]
|
||||||
|
try:
|
||||||
|
#print(((cur_price-prev_price)/prev_price), day, cur_price, prev_price)
|
||||||
|
total += (((cur_price-prev_price)/prev_price))
|
||||||
|
cnt += 1
|
||||||
|
except:
|
||||||
|
pass
|
||||||
|
# print(day, total, cnt)
|
||||||
|
day = int(day)
|
||||||
|
return total
|
||||||
|
|
||||||
|
def append_jzr_day60(dbif, day, price, day60_rise, day7_rise, day30_rise, day90_rise):
|
||||||
|
dbif.append_jzr60(day, price, day60_rise, day7_rise, day30_rise, day90_rise)
|
||||||
|
|
||||||
|
def sync_jzr_day60(dbif, prices):
|
||||||
|
for day in prices:
|
||||||
|
print(day, prices[day])
|
||||||
|
day60_rise = get_days_rise(int(day), 60, prices)
|
||||||
|
day7_rise = get_days_rise(int(day), 7, prices)
|
||||||
|
day30_rise = get_days_rise(int(day), 30, prices)
|
||||||
|
day90_rise = get_days_rise(int(day), 90, prices)
|
||||||
|
print(day, day60_rise)
|
||||||
|
append_jzr_day60(dbif, day, prices[day], day60_rise, day7_rise, day30_rise, day90_rise)
|
||||||
|
|
||||||
|
def check_jzr60_sync(dbif):
|
||||||
|
return dbif.check_jzr60_sync()
|
||||||
|
|
||||||
|
def append_jzr60day(dbif, day, price, day60_rise, day7_rise, day30_rise, day90_rise):
|
||||||
|
dbif.append_jzr60_day(day, price, day60_rise, day7_rise, day30_rise, day90_rise)
|
||||||
|
|
||||||
|
def append_jzr60(dbif, dayutc, price, day60_rise, day7_rise, day30_rise, day90_rise):
|
||||||
|
dbif.append_jzr60(dayutc, price, day60_rise, day7_rise, day30_rise, day90_rise)
|
||||||
|
|
||||||
|
def clean_jzr60day(dbif, clean_day):
|
||||||
|
dbif.clean_jzr60_day(clean_day)
|
||||||
|
|
||||||
|
def handle_jzr_day60(dbif, day, dayutc, price, prices):
|
||||||
|
day60_rise = get_days_rise(dayutc, 60, prices)
|
||||||
|
day7_rise = get_days_rise(dayutc, 7, prices)
|
||||||
|
day30_rise = get_days_rise(dayutc, 30, prices)
|
||||||
|
day90_rise = get_days_rise(dayutc, 90, prices)
|
||||||
|
print(dayutc, price, day, day60_rise)
|
||||||
|
append_jzr60day(dbif, day, price, day60_rise, day7_rise, day30_rise, day90_rise)
|
||||||
|
append_jzr60(dbif, dayutc, price, day60_rise, day7_rise, day30_rise, day90_rise)
|
||||||
|
clean_day = dayutc - 3600 * 24 * 2
|
||||||
|
clean_jzr60day(dbif, clean_day)
|
||||||
|
|
||||||
|
class Arh99DbIf:
|
||||||
|
def __init__(self, host="172.17.0.1", port=4423, user="root", password="2GS@bPYcgiMyL14A", dbname="ethdb"):
|
||||||
|
self.conn = pymysql.connect(host=host, port=port, user=user, password=password, database=dbname, cursorclass=pymysql.cursors.DictCursor)
|
||||||
|
print("init arh99 db suceess!")
|
||||||
|
|
||||||
|
def check_sync(self):
|
||||||
|
synced = False
|
||||||
|
with self.conn.cursor() as cursor:
|
||||||
|
sql_query = "SELECT COUNT(id) FROM `arh99v3a`"
|
||||||
|
cursor.execute(sql_query)
|
||||||
|
result = cursor.fetchone()
|
||||||
|
print(result)
|
||||||
|
if result is not None:
|
||||||
|
if "COUNT(id)" in result:
|
||||||
|
if result["COUNT(id)"] > 0:
|
||||||
|
synced = True
|
||||||
|
self.conn.commit()
|
||||||
|
#print("synced", synced)
|
||||||
|
return synced
|
||||||
|
|
||||||
|
def append(self, day, price, arh99, arh99x):
|
||||||
|
with self.conn.cursor() as cursor:
|
||||||
|
sql_query = "SELECT COUNT(id) FROM `arh99v3a` WHERE unixdt=FROM_UNIXTIME(%s)"
|
||||||
|
cursor.execute(sql_query, (int(day),))
|
||||||
|
result = cursor.fetchone()
|
||||||
|
#print(dt_utc)
|
||||||
|
#print(result)
|
||||||
|
if result is not None:
|
||||||
|
if "COUNT(id)" in result:
|
||||||
|
if result["COUNT(id)"] > 0:
|
||||||
|
sql_update = 'UPDATE arh99v3a SET `arh99`=%s, `arh99x`=%s, `price`=%s, `unixdt`=FROM_UNIXTIME(%s) WHERE unixdt=FROM_UNIXTIME(%s)'
|
||||||
|
print(sql_update)
|
||||||
|
cursor.execute(sql_update, (arh99, arh99x, price, int(day), int(day)))
|
||||||
|
else:
|
||||||
|
sql_insert = "INSERT INTO `arh99v3a` (`unixdt`, `price`, `arh99`, `arh99x`) VALUES (FROM_UNIXTIME(%s), %s, %s, %s)"
|
||||||
|
print(sql_insert)
|
||||||
|
cursor.execute(sql_insert, (day, price, arh99, arh99x))
|
||||||
|
self.conn.commit()
|
||||||
|
|
||||||
|
def append_day(self, day, price, arh99, arh99x):
|
||||||
|
with self.conn.cursor() as cursor:
|
||||||
|
sql_insert = "INSERT INTO `arh99v3aday` (`unixdt`, `price`, `arh99`, `arh99x`) VALUES (FROM_UNIXTIME(%s), %s, %s, %s)"
|
||||||
|
print(sql_insert)
|
||||||
|
cursor.execute(sql_insert, (day, price, arh99, arh99x))
|
||||||
|
self.conn.commit()
|
||||||
|
|
||||||
|
def clean_day(self, day):
|
||||||
|
with self.conn.cursor() as cursor:
|
||||||
|
sql_clean = "DELETE from arh99v3aday where unixdt<FROM_UNIXTIME(%s)"
|
||||||
|
print(sql_clean)
|
||||||
|
cursor.execute(sql_clean, (int(day),))
|
||||||
|
self.conn.commit()
|
||||||
|
|
||||||
|
def check_jzr60_sync(self):
|
||||||
|
synced = False
|
||||||
|
with self.conn.cursor() as cursor:
|
||||||
|
sql_query = "SELECT COUNT(id) FROM `jzr60v3a`"
|
||||||
|
cursor.execute(sql_query)
|
||||||
|
result = cursor.fetchone()
|
||||||
|
print(result)
|
||||||
|
if result is not None:
|
||||||
|
if "COUNT(id)" in result:
|
||||||
|
if result["COUNT(id)"] > 0:
|
||||||
|
synced = True
|
||||||
|
self.conn.commit()
|
||||||
|
#print("synced", synced)
|
||||||
|
return synced
|
||||||
|
|
||||||
|
def append_jzr60(self, day, price, jzr60, jzr7, jzr30, jzr90):
|
||||||
|
with self.conn.cursor() as cursor:
|
||||||
|
sql_query = "SELECT COUNT(id) FROM `jzr60v3a` WHERE unixdt=FROM_UNIXTIME(%s)"
|
||||||
|
cursor.execute(sql_query, (int(day),))
|
||||||
|
result = cursor.fetchone()
|
||||||
|
#print(dt_utc)
|
||||||
|
#print(result)
|
||||||
|
if result is not None:
|
||||||
|
if "COUNT(id)" in result:
|
||||||
|
if result["COUNT(id)"] > 0:
|
||||||
|
sql_update = 'UPDATE jzr60v3a SET `jzr60`=%s,`jzr7`=%s,`jzr30`=%s,`jzr90`=%s,`price`=%s, `unixdt`=FROM_UNIXTIME(%s) WHERE unixdt=FROM_UNIXTIME(%s)'
|
||||||
|
print(sql_update)
|
||||||
|
cursor.execute(sql_update, (jzr60, jzr7, jzr30, jzr90, price, int(day), int(day)))
|
||||||
|
else:
|
||||||
|
sql_insert = "INSERT INTO `jzr60v3a` (`unixdt`, `price`, `jzr60`, `jzr7`, `jzr30`, `jzr90`) VALUES (FROM_UNIXTIME(%s), %s, %s, %s, %s, %s)"
|
||||||
|
print(sql_insert)
|
||||||
|
cursor.execute(sql_insert, (day, price, jzr60, jzr7, jzr30, jzr90))
|
||||||
|
self.conn.commit()
|
||||||
|
|
||||||
|
def append_jzr60_day(self, day, price, jzr60, jzr7, jzr30, jzr90):
|
||||||
|
with self.conn.cursor() as cursor:
|
||||||
|
sql_insert = "INSERT INTO `jzr60v3aday` (`unixdt`, `price`, `jzr60`, `jzr7`, `jzr30`, `jzr90`) VALUES (FROM_UNIXTIME(%s), %s, %s, %s, %s, %s)"
|
||||||
|
print(sql_insert)
|
||||||
|
cursor.execute(sql_insert, (day, price, jzr60, jzr7, jzr30, jzr90))
|
||||||
|
self.conn.commit()
|
||||||
|
|
||||||
|
def clean_jzr60_day(self, day):
|
||||||
|
with self.conn.cursor() as cursor:
|
||||||
|
sql_clean = "DELETE from jzr60v3aday where unixdt<FROM_UNIXTIME(%s)"
|
||||||
|
print(sql_clean)
|
||||||
|
cursor.execute(sql_clean, (int(day),))
|
||||||
|
self.conn.commit()
|
||||||
|
|
||||||
|
def check_ma730_sync(self):
|
||||||
|
synced = False
|
||||||
|
with self.conn.cursor() as cursor:
|
||||||
|
sql_query = "SELECT COUNT(id) FROM `ma730v3a`"
|
||||||
|
cursor.execute(sql_query)
|
||||||
|
result = cursor.fetchone()
|
||||||
|
print(result)
|
||||||
|
if result is not None:
|
||||||
|
if "COUNT(id)" in result:
|
||||||
|
if result["COUNT(id)"] > 0:
|
||||||
|
synced = True
|
||||||
|
self.conn.commit()
|
||||||
|
#print("synced", synced)
|
||||||
|
return synced
|
||||||
|
|
||||||
|
def append_ma730(self, day, price, ma730, ma365, ma200):
|
||||||
|
with self.conn.cursor() as cursor:
|
||||||
|
sql_query = "SELECT COUNT(id) FROM `ma730v3a` WHERE unixdt=FROM_UNIXTIME(%s)"
|
||||||
|
cursor.execute(sql_query, (int(day),))
|
||||||
|
result = cursor.fetchone()
|
||||||
|
#print(dt_utc)
|
||||||
|
#print(result)
|
||||||
|
if result is not None:
|
||||||
|
if "COUNT(id)" in result:
|
||||||
|
ma730x5 = ma730*5
|
||||||
|
if result["COUNT(id)"] > 0:
|
||||||
|
sql_update = 'UPDATE ma730v3a SET `ma730`=%s, `ma730x5`=%s, `ma365`=%s, `ma200`=%s, `price`=%s, `unixdt`=FROM_UNIXTIME(%s) WHERE unixdt=FROM_UNIXTIME(%s)'
|
||||||
|
print(sql_update)
|
||||||
|
cursor.execute(sql_update, (ma730, ma730x5, ma365, ma200, price, int(day), int(day)))
|
||||||
|
else:
|
||||||
|
sql_insert = "INSERT INTO `ma730v3a` (`unixdt`, `price`, `ma730`, `ma730x5`, `ma365`, `ma200`) VALUES (FROM_UNIXTIME(%s), %s, %s, %s, %s, %s)"
|
||||||
|
print(sql_insert)
|
||||||
|
cursor.execute(sql_insert, (day, price, ma730, ma730x5, ma365, ma200))
|
||||||
|
self.conn.commit()
|
||||||
|
|
||||||
|
def append_ma730_day(self, day, price, ma730, ma365, ma200):
|
||||||
|
with self.conn.cursor() as cursor:
|
||||||
|
ma730x5 = ma730*5
|
||||||
|
sql_insert = "INSERT INTO `ma730v3aday` (`unixdt`, `price`, `ma730`, `ma730x5`, `ma365`, `ma200`) VALUES (FROM_UNIXTIME(%s), %s, %s, %s, %s, %s)"
|
||||||
|
print(sql_insert)
|
||||||
|
cursor.execute(sql_insert, (day, price, ma730, ma730x5, ma365, ma200))
|
||||||
|
self.conn.commit()
|
||||||
|
|
||||||
|
def clean_ma730_day(self, day):
|
||||||
|
with self.conn.cursor() as cursor:
|
||||||
|
sql_clean = "DELETE from ma730v3aday where unixdt<FROM_UNIXTIME(%s)"
|
||||||
|
print(sql_clean)
|
||||||
|
cursor.execute(sql_clean, (int(day),))
|
||||||
|
self.conn.commit()
|
||||||
|
|
||||||
|
def get_history_price(dbif):
|
||||||
|
global g_prices
|
||||||
|
with open("eth_history_price.csv", newline='') as csvfile:
|
||||||
|
reader = csv.DictReader(csvfile)
|
||||||
|
for row in reader:
|
||||||
|
print(row)
|
||||||
|
daystr = row["Date"]
|
||||||
|
p = row["Price"]
|
||||||
|
dayutc = time.mktime(time.strptime(daystr, "%m/%d/%Y"))
|
||||||
|
g_prices[str(int(dayutc))] = float(p)
|
||||||
|
'''response_price = requests.get(
|
||||||
|
'https://data.nasdaq.com/api/v3/datasets/BCHAIN/MKPRU.json?api_key=FZqXog4sR-b7cYnXcRVV')
|
||||||
|
if response_price.status_code == 200:
|
||||||
|
#print(response_price.content)
|
||||||
|
priceweb = ujson.loads(response_price.content)
|
||||||
|
if "dataset" in priceweb:
|
||||||
|
priceset = priceweb["dataset"]
|
||||||
|
if "data" in priceset:
|
||||||
|
pricedata = priceset["data"]
|
||||||
|
for price in pricedata:
|
||||||
|
daystr = price[0]
|
||||||
|
p = price[1]
|
||||||
|
dayutc = time.mktime(time.strptime(daystr, "%Y-%m-%d"))
|
||||||
|
g_prices[str(int(dayutc))] = float(p)
|
||||||
|
#print(price, int(dayutc), g_prices[str(int(dayutc))])
|
||||||
|
'''
|
||||||
|
return g_prices
|
||||||
|
|
||||||
|
def get_history_price2(dbif):
|
||||||
|
global g_prices
|
||||||
|
#pricedict = {}
|
||||||
|
dayt = time.gmtime()
|
||||||
|
daystr = time.strftime("%Y", dayt)
|
||||||
|
year = int(daystr)
|
||||||
|
end_year = year
|
||||||
|
while True:
|
||||||
|
url = ""
|
||||||
|
if end_year != year:
|
||||||
|
start_year = end_year
|
||||||
|
url = "https://data.messari.io/api/v1/assets/ethereum/metrics/price/time-series?start="
|
||||||
|
else:
|
||||||
|
url = "https://data.messari.io/api/v1/assets/ethereum/metrics/price/time-series?after=" + str(
|
||||||
|
year) + "-01-01&order=descending&interval=1d"
|
||||||
|
|
||||||
|
if end_year != year:
|
||||||
|
url = url + str(start_year) + "-01-01&end=" + str(end_year) + "-12-31&order=descending&interval=1d"
|
||||||
|
header_set = {}
|
||||||
|
header_set["x-messari-api-key"] = "aH2pyj5i4QGo1k1gLxXEbIJ5RJr+FYKLEWk6cRT6RuSc6lRY"
|
||||||
|
# header_set["Content-Type"] = "application/json"
|
||||||
|
print(header_set, url)
|
||||||
|
response_price = requests.get(url, headers=header_set)
|
||||||
|
# print(response_price)
|
||||||
|
if response_price.status_code == 200:
|
||||||
|
# print(response_price.content)
|
||||||
|
priceweb = ujson.loads(response_price.content)
|
||||||
|
if "data" in priceweb:
|
||||||
|
priceset = priceweb["data"]
|
||||||
|
if "values" in priceset:
|
||||||
|
valueset = priceset["values"]
|
||||||
|
if valueset is not None:
|
||||||
|
for supply in valueset:
|
||||||
|
dayutc = int(supply[0] / 1000)
|
||||||
|
s = supply[1]
|
||||||
|
ret_time = time.gmtime(dayutc)
|
||||||
|
ret_daystr = time.strftime("%d %b %Y", ret_time)
|
||||||
|
ret_dayutc = int(time.mktime(time.strptime(ret_daystr, "%d %b %Y")))
|
||||||
|
#self.pricedict[str(ret_dayutc)] = float(s)
|
||||||
|
g_prices[str(ret_dayutc)] = float(s)
|
||||||
|
# print(s, dayutc, pricedict[str(dayutc)])
|
||||||
|
# break
|
||||||
|
else:
|
||||||
|
break
|
||||||
|
else:
|
||||||
|
break
|
||||||
|
end_year -= 1
|
||||||
|
time.sleep(2)
|
||||||
|
#print(self.pricedict)
|
||||||
|
#return self.pricedict
|
||||||
|
|
||||||
|
get_history_price(dbif)
|
||||||
|
return g_prices
|
||||||
|
|
||||||
|
def get_coin_days(day):
|
||||||
|
birthday = time.mktime(time.strptime("2009-01-03", "%Y-%m-%d"))
|
||||||
|
days = (int(day) - birthday)/3600/24
|
||||||
|
#print(day, birthday, days)
|
||||||
|
return days
|
||||||
|
|
||||||
|
def get_coin_exp(days):
|
||||||
|
try:
|
||||||
|
temp = 5.84*math.log10(days)-17.01
|
||||||
|
#print("temp", temp, math.log10(days), days)
|
||||||
|
exp = math.pow(10,temp)
|
||||||
|
return exp
|
||||||
|
except:
|
||||||
|
return 0
|
||||||
|
|
||||||
|
def cal_day200_price(prices, day):
|
||||||
|
total = 0
|
||||||
|
cnt = 0
|
||||||
|
for i in range(200):
|
||||||
|
if day in prices:
|
||||||
|
total += prices[day]
|
||||||
|
cnt += 1
|
||||||
|
#print(day, total, cnt)
|
||||||
|
day = str(int(day) - 3600 * 24)
|
||||||
|
|
||||||
|
if cnt > 0:
|
||||||
|
return total/cnt
|
||||||
|
return 0
|
||||||
|
|
||||||
|
def cal_arh99(prices, day, price):
|
||||||
|
day200 = cal_day200_price(prices, day)
|
||||||
|
#print("day200", day200)
|
||||||
|
days = get_coin_days(day)
|
||||||
|
#print("days", days)
|
||||||
|
exp = get_coin_exp(days)
|
||||||
|
#print("exp", exp, price)
|
||||||
|
try:
|
||||||
|
arh99 = (float(price)/day200)*(float(price)/exp)
|
||||||
|
arh99x = (day200/float(price))*(exp/float(price))*3
|
||||||
|
except:
|
||||||
|
arh99 = 0
|
||||||
|
arh99x = 0
|
||||||
|
#print("arh99", arh99)
|
||||||
|
|
||||||
|
return arh99, arh99x
|
||||||
|
|
||||||
|
def check_sync(dbif):
|
||||||
|
return dbif.check_sync()
|
||||||
|
|
||||||
|
def append_arh99(dbif, day, price, arh99, arh99x):
|
||||||
|
dbif.append(day, price, arh99, arh99x)
|
||||||
|
|
||||||
|
def sync_arh99(dbif, prices):
|
||||||
|
for day in prices:
|
||||||
|
print(day, prices[day])
|
||||||
|
arh99, arh99x = cal_arh99(prices, int(day), prices[day])
|
||||||
|
print(day, arh99, arh99x)
|
||||||
|
append_arh99(dbif, day, prices[day], arh99, arh99x)
|
||||||
|
|
||||||
|
def append_arh99day(dbif, day, price, arh99, arh99x):
|
||||||
|
dbif.append_day(day, price, arh99, arh99x)
|
||||||
|
|
||||||
|
def clean_arh99day(dbif, day):
|
||||||
|
dbif.clean_day(day)
|
||||||
|
|
||||||
|
def arh99_handler(message):
|
||||||
|
global g_prices
|
||||||
|
global g_dbif
|
||||||
|
global g_lastts
|
||||||
|
coin_data = message["data"]
|
||||||
|
#coin_symbol = coin_data["s"]
|
||||||
|
coin_ts = int(coin_data["E"])
|
||||||
|
coin_price = float(coin_data["c"])
|
||||||
|
#print((coin_ts/1000), int((coin_ts/1000)%60))
|
||||||
|
if int((coin_ts/1000)%60) == 0:
|
||||||
|
#if coin_ts/1000/60 != g_lastts:
|
||||||
|
if coin_ts/1000 - g_lastts >= 15:
|
||||||
|
#print(coin_ts, coin_price)
|
||||||
|
coin_ts2 = time.gmtime(coin_ts/1000)
|
||||||
|
daystr = time.strftime("%d %b %Y", coin_ts2)
|
||||||
|
print(daystr)
|
||||||
|
dayutc = int(time.mktime(time.strptime(daystr, "%d %b %Y")))
|
||||||
|
g_prices[str(dayutc)] = coin_price
|
||||||
|
arh99, arh99x = cal_arh99(g_prices, dayutc, coin_price)
|
||||||
|
print(dayutc, coin_price, arh99, arh99x)
|
||||||
|
|
||||||
|
append_arh99day(g_dbif, coin_ts/1000, coin_price, arh99, arh99x)
|
||||||
|
append_arh99(g_dbif, dayutc, coin_price, arh99, arh99x)
|
||||||
|
clean_day = dayutc - 3600*24*2
|
||||||
|
clean_arh99day(g_dbif, clean_day)
|
||||||
|
|
||||||
|
handle_jzr_day60(g_dbif, coin_ts/1000, dayutc, coin_price, g_prices)
|
||||||
|
handle_ma_day730(g_dbif, coin_ts / 1000, dayutc, coin_price, g_prices)
|
||||||
|
g_lastts = coin_ts/1000
|
||||||
|
|
||||||
|
def start_arh99(dbif, prices):
|
||||||
|
ws_client = WebsocketClient()
|
||||||
|
ws_client.start()
|
||||||
|
ws_client.instant_subscribe(
|
||||||
|
stream=['ethusdt@miniTicker'],
|
||||||
|
callback=arh99_handler,
|
||||||
|
)
|
||||||
|
|
||||||
|
def arh99():
|
||||||
|
global g_dbif
|
||||||
|
g_dbif = Arh99DbIf()
|
||||||
|
prices = get_history_price2(g_dbif)
|
||||||
|
#if not check_sync(g_dbif):
|
||||||
|
if True:
|
||||||
|
sync_arh99(g_dbif, prices)
|
||||||
|
#if not check_jzr60_sync(g_dbif):
|
||||||
|
if True:
|
||||||
|
sync_jzr_day60(g_dbif, prices)
|
||||||
|
#if not check_ma730_sync(g_dbif):
|
||||||
|
if True:
|
||||||
|
sync_ma_day730(g_dbif, prices)
|
||||||
|
start_arh99(g_dbif, prices)
|
||||||
|
|
||||||
|
#2-year ma multiplier
|
||||||
|
def get_day730_rise(day, prices):
|
||||||
|
total = 0
|
||||||
|
cnt = 0
|
||||||
|
for i in range(730):
|
||||||
|
if str(day) in prices:
|
||||||
|
cur_price = prices[str(day)]
|
||||||
|
total += cur_price
|
||||||
|
cnt += 1
|
||||||
|
day = str(day - 3600 * 24)
|
||||||
|
day = int(day)
|
||||||
|
if cnt > 0:
|
||||||
|
return total/cnt
|
||||||
|
return 0
|
||||||
|
|
||||||
|
def get_day365_rise(day, maxdays, prices):
|
||||||
|
total = 0
|
||||||
|
cnt = 0
|
||||||
|
for i in range(maxdays):
|
||||||
|
if str(day) in prices:
|
||||||
|
cur_price = prices[str(day)]
|
||||||
|
total += cur_price
|
||||||
|
cnt += 1
|
||||||
|
day = str(day - 3600 * 24)
|
||||||
|
day = int(day)
|
||||||
|
if cnt > 0:
|
||||||
|
return total/cnt
|
||||||
|
return 0
|
||||||
|
|
||||||
|
def append_ma_day730(dbif, day, price, day730_rise, day365_rise, day200_rise):
|
||||||
|
dbif.append_ma730(day, price, day730_rise, day365_rise, day200_rise)
|
||||||
|
|
||||||
|
def sync_ma_day730(dbif, prices):
|
||||||
|
for day in prices:
|
||||||
|
print(day, prices[day])
|
||||||
|
day730_rise = get_day730_rise(int(day), prices)
|
||||||
|
day365_rise = get_day365_rise(int(day), 365, prices)
|
||||||
|
day200_rise = get_day365_rise(int(day), 200, prices)
|
||||||
|
print(day, day730_rise)
|
||||||
|
append_ma_day730(dbif, day, prices[day], day730_rise, day365_rise, day200_rise)
|
||||||
|
|
||||||
|
def check_ma730_sync(dbif):
|
||||||
|
return dbif.check_ma730_sync()
|
||||||
|
|
||||||
|
def append_ma730day(dbif, day, price, day730_rise, day365_rise, day200_rise):
|
||||||
|
dbif.append_ma730_day(day, price, day730_rise, day365_rise, day200_rise)
|
||||||
|
|
||||||
|
def append_ma730(dbif, dayutc, price, day730_rise, day365_rise, day200_rise):
|
||||||
|
dbif.append_ma730(dayutc, price, day730_rise, day365_rise, day200_rise)
|
||||||
|
|
||||||
|
def clean_ma730day(dbif, clean_day):
|
||||||
|
dbif.clean_ma730_day(clean_day)
|
||||||
|
|
||||||
|
def handle_ma_day730(dbif, day, dayutc, price, prices):
|
||||||
|
day730_rise = get_day730_rise(dayutc, prices)
|
||||||
|
day365_rise = get_day365_rise(dayutc, 365, prices)
|
||||||
|
day200_rise = get_day365_rise(dayutc, 200, prices)
|
||||||
|
print(dayutc, price, day, day730_rise)
|
||||||
|
append_ma730day(dbif, day, price, day730_rise, day365_rise, day200_rise)
|
||||||
|
append_ma730(dbif, dayutc, price, day730_rise, day365_rise, day200_rise)
|
||||||
|
clean_day = dayutc - 3600 * 24 * 2
|
||||||
|
clean_ma730day(dbif, clean_day)
|
||||||
|
|
||||||
|
arh99()
|
||||||
|
|
||||||
600
lyq/btc24h_db_if.py
Normal file
600
lyq/btc24h_db_if.py
Normal file
@@ -0,0 +1,600 @@
|
|||||||
|
# coding=utf-8
|
||||||
|
import datetime
|
||||||
|
import json
|
||||||
|
import requests
|
||||||
|
import pymysql
|
||||||
|
from loguru import logger
|
||||||
|
import time
|
||||||
|
|
||||||
|
|
||||||
|
class DbIf:
|
||||||
|
def __init__(self, host="172.17.0.1", port=4419, user="root", password="IeQcJNnagkaFP1Or", dbname="btcdb"):
|
||||||
|
self.conn = pymysql.connect(host=host, port=port, user=user, password=password, database=dbname,
|
||||||
|
cursorclass=pymysql.cursors.DictCursor)
|
||||||
|
|
||||||
|
def update_to_dailyindsv2(self, dt_utc, height_begin, height_end, lth_volume, frm, cvdd, realized_price,
|
||||||
|
transferred_price, balanced_price, nvt_ratio, velocity):
|
||||||
|
with self.conn.cursor() as cursor:
|
||||||
|
print(dt_utc, height_begin, height_end, lth_volume, frm, cvdd, realized_price, transferred_price,
|
||||||
|
balanced_price, nvt_ratio, velocity)
|
||||||
|
# 调用消息订阅的api:向topic中储存rt_dailyindsv2e2的实时数据
|
||||||
|
# url = "http://10.168.2.125:7101/marketall/push/realtime/btc/dailyindsv2e1"
|
||||||
|
# headers = {"accept": "application/json"}
|
||||||
|
# data = {"unixdt":dt_utc,"height_begin":height_begin,"height_end":height_end,"lth_volume":lth_volume,"frm":frm,"cvdd":cvdd,"realized_price":realized_price,"transferred_price":transferred_price,"balanced_price":balanced_price,"nvt_ratio":nvt_ratio,"velocity":velocity}
|
||||||
|
# response = requests.post(url=url, data=json.dumps(data), headers=headers)
|
||||||
|
sql_insert = "REPLACE INTO rt_dailyindsv3e2 (unixdt, height_begin, height_end, lth_volume, frm, cvdd, realized_price, transferred_price, balanced_price, nvt_ratio, velocity"
|
||||||
|
sql_insert = sql_insert + ") VALUES (FROM_UNIXTIME(%s), %s, %s, %s, %s, %s, %s, %s, %s, %s, %s)"
|
||||||
|
cursor.execute(sql_insert, (
|
||||||
|
dt_utc, height_begin, height_end, lth_volume, frm, cvdd, realized_price, transferred_price,
|
||||||
|
balanced_price, nvt_ratio, velocity))
|
||||||
|
|
||||||
|
'''
|
||||||
|
def update_to_realtimeindsv2(self, dt_utc, mempool_volume, mempool_fees):
|
||||||
|
with self.conn.cursor() as cursor:
|
||||||
|
sql_insert = "REPLACE INTO realtimeindsv2b (unixdt, mempool_volume, mempool_fees)"
|
||||||
|
cursor.execute(sql_insert, (dt_utc, mempool_volume, mempool_fees))
|
||||||
|
'''
|
||||||
|
|
||||||
|
def update_to_dailyinds(self, dt_utc, height_begin, height_end, profitrate, fees, txs, new_address, total_address,
|
||||||
|
new_address_volume, active_address,
|
||||||
|
send_address, receive_address, volume, eavolume, sopr, asopr, easopr, lthsopr, sthsopr,
|
||||||
|
asol, eaasol, dormancy, adormancy, eadormancy, cdd, sacdd, eacdd, day1, day7, day30, day60,
|
||||||
|
day90, day180, day365, day730, csupply, mintusd, sumcsupply, sumcdd, sumeacdd,
|
||||||
|
liveliness, ealiveliness, rprofit, rloss, rplrate, price, marketcap, rcap, earcap, mvrv,
|
||||||
|
nupl):
|
||||||
|
with self.conn.cursor() as cursor:
|
||||||
|
# 调用消息订阅的api:向topic中储存rt_dailyindsv2e1的实时数据
|
||||||
|
# 数据结构{dt_utc:'dt_utc'}
|
||||||
|
try:
|
||||||
|
url="https://coinbus.cc/api/v1/marketall/push/realtime/btc/dailyv2e1"
|
||||||
|
headers = {"accept": "application/json"}
|
||||||
|
data = {"unixdt":dt_utc,"height_begin":height_begin,"height_end":height_end,"profitrate":profitrate,
|
||||||
|
"fees":fees,"txs":txs,"new_address":new_address,"total_address":total_address,
|
||||||
|
"new_address_volume":new_address_volume,"active_address":active_address,"send_address":send_address,
|
||||||
|
"receive_address":receive_address,"volume":volume,"eavolume":eavolume,"sopr":sopr,"asopr":asopr,"easopr":easopr,
|
||||||
|
"lthsopr":lthsopr,"sthsopr":sthsopr,"asol":asol,"eaasol":eaasol,"dormancy":dormancy,
|
||||||
|
"adormancy":adormancy,"eadormancy":eadormancy,"cdd":cdd,"sacdd":sacdd,"eacdd":eacdd,"day1":day1,"day7":day7,
|
||||||
|
"day30": day30,"day60":day60,"day90":day90,"day180":day180,"day365":day365,"day730":day730,
|
||||||
|
"csupply":csupply,"mintusd":mintusd,"sumcsupply":sumcsupply,"sumcdd":sumcdd,"sumeacdd":sumeacdd,"liveliness":liveliness,
|
||||||
|
"ealiveliness":ealiveliness,"rprofit":rprofit,"rloss":rloss,"rplrate":rplrate,
|
||||||
|
"price":price,"marketcap":marketcap,"rcap":rcap,"earcap":earcap,"mvrv":mvrv,"nupl":nupl}
|
||||||
|
response = requests.post(url=url, data=json.dumps(data), headers=headers)
|
||||||
|
except:
|
||||||
|
print("api调用失败")
|
||||||
|
sql_insert = "REPLACE INTO rt_dailyindsv3e1 (unixdt, height_begin, height_end, profitrate, fees, txs, new_address, total_address, new_address_volume, active_address, send_address, receive_address, volume, eavolume, sopr, asopr, easopr, lthsopr, sthsopr,"
|
||||||
|
sql_insert = sql_insert + " asol, eaasol, dormancy, adormancy, eadormancy, cdd, sacdd, eacdd, day1, day7, day30, day60, day90, day180, day365, day730, csupply, mintusd, sumcsupply, sumcdd, sumeacdd, liveliness, "
|
||||||
|
sql_insert = sql_insert + " ealiveliness, rprofit, rloss, rplrate, price, marketcap, rcap, earcap, mvrv, nupl"
|
||||||
|
sql_insert = sql_insert + ") VALUES (FROM_UNIXTIME(%s), %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s)"
|
||||||
|
# print(sql_insert)
|
||||||
|
cursor.execute(sql_insert, (
|
||||||
|
dt_utc, height_begin, height_end, profitrate, fees, txs, new_address, total_address, new_address_volume,
|
||||||
|
active_address, send_address,
|
||||||
|
receive_address, volume, eavolume, sopr, asopr, easopr, lthsopr, sthsopr, asol, eaasol, dormancy,
|
||||||
|
adormancy, eadormancy, cdd, sacdd, eacdd, day1, day7, day30, day60, day90, day180, day365, day730,
|
||||||
|
csupply, mintusd, sumcsupply, sumcdd, sumeacdd, liveliness, ealiveliness, rprofit, rloss, rplrate,
|
||||||
|
price, marketcap, rcap, earcap, mvrv,
|
||||||
|
nupl))
|
||||||
|
self.conn.commit()
|
||||||
|
|
||||||
|
'''
|
||||||
|
def update_to_dailyinds(self, dt_utc, height_begin, height_end, profitrate, fees, txs, new_address, new_address_volume, active_address,
|
||||||
|
send_address, receive_address, volume, eavolume, sopr, asopr, easopr, lthsopr, sthsopr,
|
||||||
|
asol, eaasol, dormancy, adormancy, eadormancy, cdd, sacdd, eacdd, day1, day7, day30, day60,
|
||||||
|
day90, day180, day365, day730, csupply, mintusd, sumcsupply, sumcdd, sumeacdd,
|
||||||
|
liveliness, ealiveliness, rprofit, rloss, rplrate, price, marketcap, rcap, earcap, mvrv,
|
||||||
|
lthmarketcap, lthrcap, sthmarketcap, sthrcap, lthmvrv, sthmvrv, nupl):
|
||||||
|
with self.conn.cursor() as cursor:
|
||||||
|
sql_insert = "REPLACE INTO dailyindsv1 (unixdt, height_begin, height_end, profitrate, fees, txs, new_address, new_address_volume, active_address, send_address, receive_address, volume, eavolume, sopr, asopr, easopr, lthsopr, sthsopr,"
|
||||||
|
sql_insert = sql_insert + " asol, eaasol, dormancy, adormancy, eadormancy, cdd, sacdd, eacdd, day1, day7, day30, day60, day90, day180, day365, day730, csupply, mintusd, sumcsupply, sumcdd, sumeacdd, liveliness, "
|
||||||
|
sql_insert = sql_insert + " ealiveliness, rprofit, rloss, rplrate, price, marketcap, rcap, earcap, mvrv, lthmarketcap, lthrcap, sthmarketcap, sthrcap, lthmvrv, sthmvrv, nupl"
|
||||||
|
sql_insert = sql_insert + ") VALUES (FROM_UNIXTIME(%s), %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s)"
|
||||||
|
# print(sql_insert)
|
||||||
|
cursor.execute(sql_insert, (
|
||||||
|
dt_utc, height_begin, height_end, profitrate, fees, txs, new_address, new_address_volume, active_address, send_address,
|
||||||
|
receive_address, volume, eavolume, sopr, asopr, easopr, lthsopr, sthsopr, asol, eaasol, dormancy,
|
||||||
|
adormancy, eadormancy, cdd, sacdd, eacdd, day1, day7, day30, day60, day90, day180, day365, day730,
|
||||||
|
csupply, mintusd, sumcsupply, sumcdd, sumeacdd, liveliness, ealiveliness, rprofit, rloss, rplrate,
|
||||||
|
price, marketcap, rcap, earcap, mvrv, lthmarketcap, lthrcap, sthmarketcap, sthrcap, lthmvrv, sthmvrv,
|
||||||
|
nupl))
|
||||||
|
self.conn.commit()
|
||||||
|
'''
|
||||||
|
'''
|
||||||
|
# daily daily on-chain volume
|
||||||
|
def query_from_dailyvolume(self, start_id=0, end_id=0, start_time="", end_time="", limit=0):
|
||||||
|
with self.conn.cursor() as cursor:
|
||||||
|
sql_query = "SELECT * from `dailyvolume`"
|
||||||
|
|
||||||
|
if start_id > 0:
|
||||||
|
sql_query = sql_query + " WHERE id > " + str(start_id)
|
||||||
|
if end_id > 0:
|
||||||
|
sql_query = sql_query + " AND id < " + str(end_id)
|
||||||
|
else:
|
||||||
|
if end_id > 0:
|
||||||
|
sql_query = sql_query + " WHERE id < " + str(end_id)
|
||||||
|
|
||||||
|
if len(start_time) > 0:
|
||||||
|
if len(end_time) > 0:
|
||||||
|
sql_query = sql_query + " WHERE UNIX_TIMESTAMP(unixdt) BETWEEN \
|
||||||
|
UNIX_TIMESTAMP(\'" + start_time + "\') AND UNIX_TIMESTAMP(\'" + end_time + "\')"
|
||||||
|
else:
|
||||||
|
sql_query = sql_query + " WHERE UNIX_TIMESTAMP(unixdt) BETWEEN \
|
||||||
|
UNIX_TIMESTAMP(\'" + start_time + "\') AND UNIX_TIMESTAMP(NOW())"
|
||||||
|
else:
|
||||||
|
if len(end_time) > 0:
|
||||||
|
sql_query = sql_query + " WHERE UNIX_TIMESTAMP(unixdt) BETWEEN \
|
||||||
|
UNIX_TIMESTAMP(NOW()) AND UNIX_TIMESTAMP(\'" + end_time + "\')"
|
||||||
|
|
||||||
|
sql_query = sql_query + " order by `unixdt` desc"
|
||||||
|
|
||||||
|
if limit > 0:
|
||||||
|
sql_query = sql_query + " LIMIT " + str(limit)
|
||||||
|
print(sql_query)
|
||||||
|
cursor.execute(sql_query)
|
||||||
|
return cursor.fetchall()
|
||||||
|
'''
|
||||||
|
|
||||||
|
# newaddrs
|
||||||
|
'''
|
||||||
|
def update_to_newaddr(self, dayutc, last_profit_rate, last_fees, last_txs, last_eatxs, last_newaddr_cnt,
|
||||||
|
last_newaddr_vol, last_active_addr_cnt, last_tx_addr_cnt, last_rx_addr_cnt, last_vol_change,
|
||||||
|
last_vol):
|
||||||
|
with self.conn.cursor() as cursor:
|
||||||
|
sql_query = "SELECT COUNT(id) FROM `newaddrs` WHERE unixdt=FROM_UNIXTIME(%s)"
|
||||||
|
cursor.execute(sql_query, {dayutc, })
|
||||||
|
result = cursor.fetchone()
|
||||||
|
# print(dt_utc)
|
||||||
|
# print(result)
|
||||||
|
if result is not None:
|
||||||
|
if "COUNT(id)" in result:
|
||||||
|
if result["COUNT(id)"] > 0:
|
||||||
|
print("update")
|
||||||
|
sql_update = 'UPDATE newaddrs SET `total`=%s, `amount`=%s, `active`=%s, `tx`=%s, `rx`=%s, `volume_change`=%s, `volume=%s`,`txs`=%s, `eatxs`=%s, `fees`=%s, `last_profit_rate`=%s WHERE unixdt=FROM_UNIXTIME(%s)'
|
||||||
|
cursor.execute(sql_update, (
|
||||||
|
last_newaddr_cnt, last_newaddr_vol, last_active_addr_cnt, last_tx_addr_cnt,
|
||||||
|
last_rx_addr_cnt,
|
||||||
|
last_vol_change, last_vol, last_txs, last_eatxs, last_fees, last_profit_rate, dayutc))
|
||||||
|
else:
|
||||||
|
print("insert")
|
||||||
|
sql_insert = "INSERT INTO `newaddrs` (`unixdt`, `total`, `amount`, `active`, `tx`, `rx`, `volume_change`, `volume`, `txs`, `eatxs`, `fees`, `last_profit_rate`) VALUES (FROM_UNIXTIME(%s), %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s)"
|
||||||
|
# print(sql_insert)
|
||||||
|
cursor.execute(sql_insert, (
|
||||||
|
dayutc, last_newaddr_cnt, last_newaddr_vol, last_active_addr_cnt, last_tx_addr_cnt,
|
||||||
|
last_rx_addr_cnt, last_vol_change, last_vol, last_txs, last_eatxs, last_fees,
|
||||||
|
last_profit_rate))
|
||||||
|
self.conn.commit()
|
||||||
|
'''
|
||||||
|
'''
|
||||||
|
def update_to_sellprofit(self, dayutc, current_price, block_buy_volume, block_sell_volume, block_sell_profit, last_height):
|
||||||
|
with self.conn.cursor() as cursor:
|
||||||
|
sql_insert = "INSERT INTO `dailybuysell` (`unixdt`, `price`, `buyvolume`, `sellvolume`, `sellprofit`, `height`) VALUES (FROM_UNIXTIME(%s), %s, %s, %s, %s, %s)"
|
||||||
|
#print(sql_insert)
|
||||||
|
#print(datetime, txid, vout, voutalias, amount, height)
|
||||||
|
cursor.execute(sql_insert, (dayutc, current_price, block_buy_volume, block_sell_volume, block_sell_profit, last_height))
|
||||||
|
self.conn.commit()
|
||||||
|
'''
|
||||||
|
'''
|
||||||
|
def update_to_bigsellprofit(self, dayutc, current_price, tx_sell_average, tx_sell_amount, tx_sell_profit,
|
||||||
|
days_earliest, days_latest, days_largest, days_current, tx_buy_address, txid,
|
||||||
|
block_height):
|
||||||
|
with self.conn.cursor() as cursor:
|
||||||
|
sql_insert = "INSERT INTO `bigsell` (`unixdt`, `buyprice`, `sellprice`, `amount`, `profit`, `days_earliest`, `days_latest`, `days_largest`, `days_current`, `address`, `txid`, `height`) VALUES (FROM_UNIXTIME(%s), %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s)"
|
||||||
|
# print(sql_insert)
|
||||||
|
# print(datetime, txid, vout, voutalias, amount, height)
|
||||||
|
cursor.execute(sql_insert, (
|
||||||
|
dayutc, current_price, tx_sell_average, tx_sell_amount, tx_sell_profit, days_earliest, days_latest,
|
||||||
|
days_largest, days_current, tx_buy_address, txid, block_height))
|
||||||
|
self.conn.commit()
|
||||||
|
'''
|
||||||
|
'''
|
||||||
|
def update_to_dailycdd(self, dt_utc, cdd):
|
||||||
|
with self.conn.cursor() as cursor:
|
||||||
|
sql_insert = "REPLACE INTO `dailycdd` (`unixdt`, `cdd`) VALUES (FROM_UNIXTIME(%s), %s)"
|
||||||
|
# print(sql_insert)
|
||||||
|
cursor.execute(sql_insert, (dt_utc, cdd))
|
||||||
|
self.conn.commit()
|
||||||
|
'''
|
||||||
|
'''
|
||||||
|
def update_to_dailycdddays(self, dt_utc, dormancy, adormancy, eadormancy, cdd, acdd, eacdd, day1, day7, day30,
|
||||||
|
day60, day90, day180, day365, day730):
|
||||||
|
with self.conn.cursor() as cursor:
|
||||||
|
sql_insert = "REPLACE INTO `dailycdddays` (`unixdt`, dormancy, adormancy, eadormancy, cdd, acdd, eacdd, `day1`, day7, day30, day60, day90, day180, day365, day730) VALUES (FROM_UNIXTIME(%s), %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s)"
|
||||||
|
# print(sql_insert)
|
||||||
|
cursor.execute(sql_insert, (
|
||||||
|
dt_utc, dormancy, adormancy, eadormancy, cdd, acdd, eacdd, day1, day7, day30, day60, day90, day180,
|
||||||
|
day365,
|
||||||
|
day730))
|
||||||
|
self.conn.commit()
|
||||||
|
'''
|
||||||
|
'''
|
||||||
|
def update_to_dailysopr(self, dt_utc, last_sopr, last_asopr, last_easopr, last_lth_sopr, last_sth_sopr):
|
||||||
|
with self.conn.cursor() as cursor:
|
||||||
|
sql_insert = "REPLACE INTO `dailysopr` (`unixdt`, `sopr`, asopr, easopr, lth_sopr, sth_sopr) VALUES (FROM_UNIXTIME(%s), %s, %s, %s, %s, %s, %s)"
|
||||||
|
# print(sql_insert)
|
||||||
|
cursor.execute(sql_insert, (dt_utc, last_sopr, last_asopr, last_easopr, last_lth_sopr, last_sth_sopr))
|
||||||
|
self.conn.commit()
|
||||||
|
'''
|
||||||
|
'''
|
||||||
|
def update_to_inds(self, dt_utc, csupply, mintusd, sumcsupply, sumcdd, sumeacdd, liveliness, ealiveliness, rprofit,
|
||||||
|
rloss, rplrate, price, marketcap, rcap, earcap, mvrv):
|
||||||
|
with self.conn.cursor() as cursor:
|
||||||
|
sql_insert = "REPLACE INTO `inds` (`unixdt`, csupply, mintusd, sumcsupply, sumcdd, sumeacdd, liveliness, ealiveliness, rprofit, rloss, rplrate, price, marketcap, rcap, earcap, mvrv) VALUES (FROM_UNIXTIME(%s), %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s)"
|
||||||
|
# print(sql_insert)
|
||||||
|
cursor.execute(sql_insert, (
|
||||||
|
dt_utc, csupply, mintusd, sumcsupply, sumcdd, sumeacdd, liveliness, ealiveliness, rprofit, rloss,
|
||||||
|
rplrate,
|
||||||
|
price, marketcap, rcap, earcap, mvrv))
|
||||||
|
self.conn.commit()
|
||||||
|
'''
|
||||||
|
# daily volume
|
||||||
|
'''
|
||||||
|
def update_to_dailyvolume(self, dt_utc, volume):
|
||||||
|
with self.conn.cursor() as cursor:
|
||||||
|
sql_insert = "REPLACE INTO `dailyvolume` (`unixdt`, `volume`) VALUES (FROM_UNIXTIME(%s), %s)"
|
||||||
|
# print(sql_insert)
|
||||||
|
cursor.execute(sql_insert, (dt_utc, volume))
|
||||||
|
self.conn.commit()
|
||||||
|
'''
|
||||||
|
'''with self.conn.cursor() as cursor:
|
||||||
|
sql_query = "SELECT COUNT(id) FROM `dailyvolume` WHERE unixdt=FROM_UNIXTIME(%s)"
|
||||||
|
cursor.execute(sql_query, {dt_utc,})
|
||||||
|
result = cursor.fetchone()
|
||||||
|
#print(dt_utc)
|
||||||
|
#print(result)
|
||||||
|
if result is not None:
|
||||||
|
if "COUNT(id)" in result:
|
||||||
|
if result["COUNT(id)"] > 0:
|
||||||
|
print("update")
|
||||||
|
sql_update = 'UPDATE dailyvolume SET `volume`=%s WHERE unixdt=FROM_UNIXTIME(%s)'
|
||||||
|
cursor.execute(sql_update, (volume, dt_utc))
|
||||||
|
else:
|
||||||
|
print("insert")
|
||||||
|
sql_insert = "INSERT INTO `dailyvolume` (`unixdt`, `volume`) VALUES (FROM_UNIXTIME(%s), %s)"
|
||||||
|
# print(sql_insert)
|
||||||
|
cursor.execute(sql_insert, (dt_utc, volume))
|
||||||
|
self.conn.commit()'''
|
||||||
|
'''
|
||||||
|
def update_to_dailyfees(self, dt_utc, fees):
|
||||||
|
with self.conn.cursor() as cursor:
|
||||||
|
sql_insert = "REPLACE INTO `dailyfees` (`unixdt`, `fees`) VALUES (FROM_UNIXTIME(%s), %s)"
|
||||||
|
# print(sql_insert)
|
||||||
|
cursor.execute(sql_insert, (dt_utc, fees))
|
||||||
|
self.conn.commit()
|
||||||
|
'''
|
||||||
|
'''
|
||||||
|
def import_to_dailyvolume2(self, dt_utc, volume):
|
||||||
|
with self.conn.cursor() as cursor:
|
||||||
|
sql_insert = "INSERT INTO `dailyvolume` (`unixdt`, `volume`) VALUES (FROM_UNIXTIME(%s), %s)"
|
||||||
|
# print(sql_insert)
|
||||||
|
cursor.execute(sql_insert, (dt_utc, volume))
|
||||||
|
self.conn.commit()
|
||||||
|
|
||||||
|
def delete_dailyvolume_data(self, config):
|
||||||
|
with self.conn.cursor() as cursor:
|
||||||
|
sql_query = "DELETE FROM `dailyvolume`"
|
||||||
|
cursor.execute(sql_query)
|
||||||
|
self.conn.commit()
|
||||||
|
|
||||||
|
|
||||||
|
# daily market cap
|
||||||
|
def query_from_marketcap(self, start_id=0, end_id=0, start_time="", end_time="", limit=0):
|
||||||
|
with self.conn.cursor() as cursor:
|
||||||
|
sql_query = "SELECT * from `dailyprice`"
|
||||||
|
|
||||||
|
if start_id > 0:
|
||||||
|
sql_query = sql_query + " WHERE id > " + str(start_id)
|
||||||
|
if end_id > 0:
|
||||||
|
sql_query = sql_query + " AND id < " + str(end_id)
|
||||||
|
else:
|
||||||
|
if end_id > 0:
|
||||||
|
sql_query = sql_query + " WHERE id < " + str(end_id)
|
||||||
|
|
||||||
|
if len(start_time) > 0:
|
||||||
|
if len(end_time) > 0:
|
||||||
|
sql_query = sql_query + " WHERE UNIX_TIMESTAMP(unixdt) BETWEEN \
|
||||||
|
UNIX_TIMESTAMP(\'" + start_time + "\') AND UNIX_TIMESTAMP(\'" + end_time + "\')"
|
||||||
|
else:
|
||||||
|
sql_query = sql_query + " WHERE UNIX_TIMESTAMP(unixdt) BETWEEN \
|
||||||
|
UNIX_TIMESTAMP(\'" + start_time + "\') AND UNIX_TIMESTAMP(NOW())"
|
||||||
|
else:
|
||||||
|
if len(end_time) > 0:
|
||||||
|
sql_query = sql_query + " WHERE UNIX_TIMESTAMP(unixdt) BETWEEN \
|
||||||
|
UNIX_TIMESTAMP(NOW()) AND UNIX_TIMESTAMP(\'" + end_time + "\')"
|
||||||
|
|
||||||
|
sql_query = sql_query + " order by `unixdt` desc"
|
||||||
|
|
||||||
|
if limit > 0:
|
||||||
|
sql_query = sql_query + " LIMIT " + str(limit)
|
||||||
|
print(sql_query)
|
||||||
|
cursor.execute(sql_query)
|
||||||
|
return cursor.fetchall()
|
||||||
|
|
||||||
|
#daily price
|
||||||
|
def import_to_dailyprice(self, dt_utc, price, volume, marketcap, csupply):
|
||||||
|
with self.conn.cursor() as cursor:
|
||||||
|
sql_insert = "INSERT INTO `dailyprice` (`unixdt`, `price`, `volume`, `marketcap`, `csupply`) VALUES (FROM_UNIXTIME(%s), %s, %s, %s, %s)"
|
||||||
|
#print(sql_insert)
|
||||||
|
cursor.execute(sql_insert, (dt_utc, price, volume, marketcap, csupply))
|
||||||
|
self.conn.commit()
|
||||||
|
|
||||||
|
def update_to_dailyprice(self, dt_utc, price, volume, change):
|
||||||
|
with self.conn.cursor() as cursor:
|
||||||
|
sql_insert = "INSERT INTO `dailyprice` (`unixdt`, `price`, `volume`, `change`) VALUES (FROM_UNIXTIME(%s), %s, %s, %s)"
|
||||||
|
#print(sql_insert)
|
||||||
|
cursor.execute(sql_insert, (dt_utc, price, volume, change))
|
||||||
|
self.conn.commit()
|
||||||
|
|
||||||
|
def update_to_dailyprice2(self, dt_utc, price, volume, change, marketcap, csupply):
|
||||||
|
with self.conn.cursor() as cursor:
|
||||||
|
sql_query = "SELECT COUNT(id) FROM `dailyprice` WHERE unixdt=FROM_UNIXTIME(%s)"
|
||||||
|
cursor.execute(sql_query, {dt_utc,})
|
||||||
|
result = cursor.fetchone()
|
||||||
|
#print(dt_utc)
|
||||||
|
#print(result)
|
||||||
|
if result is not None:
|
||||||
|
if "COUNT(id)" in result:
|
||||||
|
if result["COUNT(id)"] > 0:
|
||||||
|
print("update")
|
||||||
|
sql_update = 'UPDATE dailyprice SET `price`=%s, `marketcap`=%s, `csupply`=%s, `volume`=%s, `change`=%s WHERE unixdt=FROM_UNIXTIME(%s)'
|
||||||
|
cursor.execute(sql_update, (price, marketcap, csupply, volume, change, dt_utc))
|
||||||
|
else:
|
||||||
|
print("insert")
|
||||||
|
sql_insert = "INSERT INTO `dailyprice` (`unixdt`, `price`, `volume`, `change`, `marketcap`, `csupply`) VALUES (FROM_UNIXTIME(%s), %s, %s, %s, %s, %s)"
|
||||||
|
# print(sql_insert)
|
||||||
|
cursor.execute(sql_insert, (dt_utc, price, volume, change, marketcap, csupply))
|
||||||
|
self.conn.commit()
|
||||||
|
|
||||||
|
def update_dailypricechange(self):
|
||||||
|
with self.conn.cursor() as cursor:
|
||||||
|
sql_query = "SELECT unixdt,price FROM `dailyprice` order by unixdt"
|
||||||
|
cursor.execute(sql_query)
|
||||||
|
results = cursor.fetchall()
|
||||||
|
prevprice = -1
|
||||||
|
for result in results:
|
||||||
|
if prevprice < 0:
|
||||||
|
prevprice = result["price"]
|
||||||
|
else:
|
||||||
|
#print(result["unixdt"], result["price"], result["marketcap"])
|
||||||
|
try:
|
||||||
|
change = (result["price"]/prevprice - 1)*100
|
||||||
|
except:
|
||||||
|
change = 0
|
||||||
|
#print(csupply)
|
||||||
|
datestr = result["unixdt"]
|
||||||
|
logger.debug(datestr.__format__('%Y-%m-%d') + " " + str(change))
|
||||||
|
sql_update = 'UPDATE dailyprice SET `change`=%s WHERE unixdt=%s'
|
||||||
|
cursor.execute(sql_update, (str(change), result["unixdt"]))
|
||||||
|
prevprice = result["price"]
|
||||||
|
self.conn.commit()
|
||||||
|
|
||||||
|
def delete_dailyprice_data(self, config):
|
||||||
|
with self.conn.cursor() as cursor:
|
||||||
|
sql_query = "DELETE FROM `dailyprice`"
|
||||||
|
cursor.execute(sql_query)
|
||||||
|
self.conn.commit()
|
||||||
|
|
||||||
|
def delete_failed_blockvolume(self, height):
|
||||||
|
with self.conn.cursor() as cursor:
|
||||||
|
sql_insert = "DELETE FROM `bigamountvout` WHERE height=%s"
|
||||||
|
cursor.execute(sql_insert, (height,))
|
||||||
|
sql_insert = "DELETE FROM `bigamounttx` WHERE height=%s"
|
||||||
|
cursor.execute(sql_insert, (height,))
|
||||||
|
sql_insert = "DELETE FROM `blockamount` WHERE height=%s"
|
||||||
|
cursor.execute(sql_insert, (height,))
|
||||||
|
self.conn.commit()
|
||||||
|
|
||||||
|
#block check --- big amount for vout
|
||||||
|
def query_from_bigamountvout(self, start_id=0, end_id=0, start_time="", end_time="", address="", limit=0):
|
||||||
|
with self.conn.cursor() as cursor:
|
||||||
|
sql_query = "SELECT * from `bigamountvout`"
|
||||||
|
if start_id > 0:
|
||||||
|
sql_query = sql_query + " WHERE id > " + str(start_id)
|
||||||
|
if end_id > 0:
|
||||||
|
sql_query = sql_query + " AND id < " + str(end_id)
|
||||||
|
else:
|
||||||
|
if end_id > 0:
|
||||||
|
sql_query = sql_query + " WHERE id < " + str(end_id)
|
||||||
|
|
||||||
|
if len(start_time) > 0:
|
||||||
|
if len(end_time) > 0:
|
||||||
|
sql_query = sql_query + " WHERE UNIX_TIMESTAMP(unixdt) BETWEEN \
|
||||||
|
UNIX_TIMESTAMP(\'" + start_time + "\') AND UNIX_TIMESTAMP(\'" + end_time + "\')"
|
||||||
|
else:
|
||||||
|
sql_query = sql_query + " WHERE UNIX_TIMESTAMP(unixdt) BETWEEN \
|
||||||
|
UNIX_TIMESTAMP(\'" + start_time + "\') AND UNIX_TIMESTAMP(NOW())"
|
||||||
|
else:
|
||||||
|
if len(end_time) > 0:
|
||||||
|
sql_query = sql_query + " WHERE UNIX_TIMESTAMP(unixdt) BETWEEN \
|
||||||
|
UNIX_TIMESTAMP(NOW()) AND UNIX_TIMESTAMP(\'" + end_time + "\')"
|
||||||
|
|
||||||
|
sql_query = sql_query + " order by `unixdt` desc"
|
||||||
|
if limit > 0:
|
||||||
|
sql_query = sql_query + " LIMIT " + str(limit)
|
||||||
|
|
||||||
|
print(sql_query)
|
||||||
|
cursor.execute(sql_query)
|
||||||
|
return cursor.fetchall()
|
||||||
|
'''
|
||||||
|
|
||||||
|
def update_to_bigamountvout(self, datetime, txid, vout, voutn, vouttype, amount, height, days, buyin, sellout,
|
||||||
|
profit):
|
||||||
|
with self.conn.cursor() as cursor:
|
||||||
|
# url = "http://10.168.2.125:7101/marketall/push/realtime/btc/dailyindsv2e1"
|
||||||
|
# headers = {"accept": "application/json"}
|
||||||
|
# data = {"unixdt":datetime,"vout":vout,"voutn":voutn,"vouttype":vouttype,
|
||||||
|
# "amount":amount,"height":height,"txid":txid,"days":days,"buyin":buyin,
|
||||||
|
# "sellout":sellout,"profit":profit}
|
||||||
|
# response = requests.post(url=url, data=json.dumps(data), headers=headers)
|
||||||
|
sql_insert = "INSERT INTO `rt_bigamountvoutv3e` (`unixdt`, `vout`, `voutn`, `vouttype`, `amount`, `height`, `txid`, days, buyprice, sellprice, profit) VALUES (FROM_UNIXTIME(%s), %s, %s, %s, %s, %s, %s, %s, %s, %s, %s)"
|
||||||
|
# print(sql_insert)
|
||||||
|
# print(datetime, txid, vout, voutalias, amount, height)
|
||||||
|
cursor.execute(sql_insert,
|
||||||
|
(datetime, vout, voutn, vouttype, amount, height, txid, days, buyin, sellout, profit))
|
||||||
|
self.conn.commit()
|
||||||
|
|
||||||
|
'''
|
||||||
|
# block check --- big amount tx
|
||||||
|
def query_from_bigamounttx(self, start_id=0, end_id=0, start_time="", end_time="", address="", limit=0):
|
||||||
|
with self.conn.cursor() as cursor:
|
||||||
|
sql_query = "SELECT * from `bigamounttx`"
|
||||||
|
if start_id > 0:
|
||||||
|
sql_query = sql_query + " WHERE id > " + str(start_id)
|
||||||
|
if end_id > 0:
|
||||||
|
sql_query = sql_query + " AND id < " + str(end_id)
|
||||||
|
else:
|
||||||
|
if end_id > 0:
|
||||||
|
sql_query = sql_query + " WHERE id < " + str(end_id)
|
||||||
|
|
||||||
|
if len(start_time) > 0:
|
||||||
|
if len(end_time) > 0:
|
||||||
|
sql_query = sql_query + " WHERE UNIX_TIMESTAMP(unixdt) BETWEEN \
|
||||||
|
UNIX_TIMESTAMP(\'" + start_time + "\') AND UNIX_TIMESTAMP(\'" + end_time + "\')"
|
||||||
|
else:
|
||||||
|
sql_query = sql_query + " WHERE UNIX_TIMESTAMP(unixdt) BETWEEN \
|
||||||
|
UNIX_TIMESTAMP(\'" + start_time + "\') AND UNIX_TIMESTAMP(NOW())"
|
||||||
|
else:
|
||||||
|
if len(end_time) > 0:
|
||||||
|
sql_query = sql_query + " WHERE UNIX_TIMESTAMP(unixdt) BETWEEN \
|
||||||
|
UNIX_TIMESTAMP(NOW()) AND UNIX_TIMESTAMP(\'" + end_time + "\')"
|
||||||
|
|
||||||
|
sql_query = sql_query + " order by `unixdt` desc"
|
||||||
|
if limit > 0:
|
||||||
|
sql_query = sql_query + " LIMIT " + str(limit)
|
||||||
|
|
||||||
|
print(sql_query)
|
||||||
|
cursor.execute(sql_query)
|
||||||
|
return cursor.fetchall()
|
||||||
|
|
||||||
|
def update_to_bigamounttx(self, datetime, txid, amount, height):
|
||||||
|
with self.conn.cursor() as cursor:
|
||||||
|
sql_insert = "INSERT INTO `bigamounttx` (`unixdt`, `amount`, `height`, `txid`) VALUES (FROM_UNIXTIME(%s), %s, %s, %s)"
|
||||||
|
#print(sql_insert)
|
||||||
|
#print(datetime, txid, amount, height)
|
||||||
|
cursor.execute(sql_insert, (datetime, amount, height, txid))
|
||||||
|
self.conn.commit()
|
||||||
|
|
||||||
|
# block check --- per block amount
|
||||||
|
def query_from_blockamount(self, start_id=0, end_id=0, start_time="", end_time="", limit=0, amount=0):
|
||||||
|
with self.conn.cursor() as cursor:
|
||||||
|
sql_query = "SELECT * from `blockamount`"
|
||||||
|
|
||||||
|
if start_id > 0:
|
||||||
|
sql_query = sql_query + " WHERE id > " + str(start_id)
|
||||||
|
if end_id > 0:
|
||||||
|
sql_query = sql_query + " AND id < " + str(end_id)
|
||||||
|
if amount > 0:
|
||||||
|
sql_query = sql_query + " AND amount > " + str(amount)
|
||||||
|
else:
|
||||||
|
if end_id > 0:
|
||||||
|
sql_query = sql_query + " WHERE id < " + str(end_id)
|
||||||
|
if amount > 0:
|
||||||
|
sql_query = sql_query + " AND amount > " + str(amount)
|
||||||
|
else:
|
||||||
|
if amount > 0:
|
||||||
|
sql_query = sql_query + "WHERE amount > " + str(amount)
|
||||||
|
|
||||||
|
if len(start_time) > 0:
|
||||||
|
if len(end_time) > 0:
|
||||||
|
sql_query = sql_query + " WHERE UNIX_TIMESTAMP(unixdt) BETWEEN \
|
||||||
|
UNIX_TIMESTAMP(\'" + start_time + "\') AND UNIX_TIMESTAMP(\'" + end_time + "\')"
|
||||||
|
else:
|
||||||
|
sql_query = sql_query + " WHERE UNIX_TIMESTAMP(unixdt) BETWEEN \
|
||||||
|
UNIX_TIMESTAMP(\'" + start_time + "\') AND UNIX_TIMESTAMP(NOW())"
|
||||||
|
if amount > 0:
|
||||||
|
sql_query = sql_query + " AND amount > " + str(amount)
|
||||||
|
else:
|
||||||
|
if len(end_time) > 0:
|
||||||
|
sql_query = sql_query + " WHERE UNIX_TIMESTAMP(unixdt) BETWEEN \
|
||||||
|
UNIX_TIMESTAMP(NOW()) AND UNIX_TIMESTAMP(\'" + end_time + "\')"
|
||||||
|
if amount > 0:
|
||||||
|
sql_query = sql_query + " AND amount > " + str(amount)
|
||||||
|
|
||||||
|
sql_query = sql_query + " order by `unixdt` desc"
|
||||||
|
|
||||||
|
if limit > 0:
|
||||||
|
sql_query = sql_query + " LIMIT " + str(limit)
|
||||||
|
|
||||||
|
cursor.execute(sql_query)
|
||||||
|
return cursor.fetchall()
|
||||||
|
|
||||||
|
def update_to_blockamount(self, datetime, blockid, amount, height):
|
||||||
|
with self.conn.cursor() as cursor:
|
||||||
|
sql_insert = "INSERT INTO `blockamount` (`unixdt`, `amount`, `height`, `blockid`) VALUES (FROM_UNIXTIME(%s), %s, %s, %s)"
|
||||||
|
#print(sql_insert)
|
||||||
|
#print(datetime, blockid, amount, height)
|
||||||
|
cursor.execute(sql_insert, (datetime, amount, height, blockid))
|
||||||
|
self.conn.commit()
|
||||||
|
|
||||||
|
def delete_node_data(self, config):
|
||||||
|
with self.conn.cursor() as cursor:
|
||||||
|
sql_query = "DELETE FROM `blockamount`"
|
||||||
|
cursor.execute(sql_query)
|
||||||
|
sql_query = "DELETE FROM `bigamountvout`"
|
||||||
|
cursor.execute(sql_query)
|
||||||
|
sql_query = "DELETE FROM `bigamounttx`"
|
||||||
|
cursor.execute(sql_query)
|
||||||
|
self.conn.commit()
|
||||||
|
|
||||||
|
def update_realize_cap(self, dayutc, last_rv):
|
||||||
|
with self.conn.cursor() as cursor:
|
||||||
|
sql_insert = "INSERT INTO `dailyrcap` (`unixdt`, `rcap`) VALUES (FROM_UNIXTIME(%s), %s)"
|
||||||
|
#print(sql_insert)
|
||||||
|
#print(datetime, blockid, amount, height)
|
||||||
|
cursor.execute(sql_insert, (dayutc, last_rv))
|
||||||
|
self.conn.commit()
|
||||||
|
|
||||||
|
# daily realize cap
|
||||||
|
def query_from_realizecap(self, start_id=0, end_id=0, start_time="", end_time="", limit=0):
|
||||||
|
with self.conn.cursor() as cursor:
|
||||||
|
sql_query = "SELECT * from `dailyrcap`"
|
||||||
|
|
||||||
|
if start_id > 0:
|
||||||
|
sql_query = sql_query + " WHERE id > " + str(start_id)
|
||||||
|
if end_id > 0:
|
||||||
|
sql_query = sql_query + " AND id < " + str(end_id)
|
||||||
|
else:
|
||||||
|
if end_id > 0:
|
||||||
|
sql_query = sql_query + " WHERE id < " + str(end_id)
|
||||||
|
|
||||||
|
if len(start_time) > 0:
|
||||||
|
if len(end_time) > 0:
|
||||||
|
sql_query = sql_query + " WHERE UNIX_TIMESTAMP(unixdt) BETWEEN \
|
||||||
|
UNIX_TIMESTAMP(\'" + start_time + "\') AND UNIX_TIMESTAMP(\'" + end_time + "\')"
|
||||||
|
else:
|
||||||
|
sql_query = sql_query + " WHERE UNIX_TIMESTAMP(unixdt) BETWEEN \
|
||||||
|
UNIX_TIMESTAMP(\'" + start_time + "\') AND UNIX_TIMESTAMP(NOW())"
|
||||||
|
else:
|
||||||
|
if len(end_time) > 0:
|
||||||
|
sql_query = sql_query + " WHERE UNIX_TIMESTAMP(unixdt) BETWEEN \
|
||||||
|
UNIX_TIMESTAMP(NOW()) AND UNIX_TIMESTAMP(\'" + end_time + "\')"
|
||||||
|
|
||||||
|
sql_query = sql_query + " order by `unixdt` desc"
|
||||||
|
|
||||||
|
if limit > 0:
|
||||||
|
sql_query = sql_query + " LIMIT " + str(limit)
|
||||||
|
print(sql_query)
|
||||||
|
cursor.execute(sql_query)
|
||||||
|
return cursor.fetchall()
|
||||||
|
|
||||||
|
def update_daily_addr(self, dayutc, last_add_cnt):
|
||||||
|
with self.conn.cursor() as cursor:
|
||||||
|
sql_insert = "INSERT INTO `dailyaddradd` (`unixdt`, `addcnt`) VALUES (FROM_UNIXTIME(%s), %s)"
|
||||||
|
#print(sql_insert)
|
||||||
|
#print(datetime, blockid, amount, height)
|
||||||
|
cursor.execute(sql_insert, (dayutc, last_add_cnt))
|
||||||
|
self.conn.commit()
|
||||||
|
|
||||||
|
def delete_daily_addr(self, config):
|
||||||
|
with self.conn.cursor() as cursor:
|
||||||
|
sql_query = "DELETE FROM `dailyaddradd`"
|
||||||
|
cursor.execute(sql_query)
|
||||||
|
self.conn.commit()
|
||||||
|
|
||||||
|
def delete_daily_rv(self, config):
|
||||||
|
with self.conn.cursor() as cursor:
|
||||||
|
sql_query = "DELETE FROM `dailyrcap`"
|
||||||
|
cursor.execute(sql_query)
|
||||||
|
self.conn.commit()
|
||||||
|
'''
|
||||||
|
|
||||||
|
def __del__(self):
|
||||||
|
self.conn.close()
|
||||||
613
lyq/btc24h_redis_if.py
Normal file
613
lyq/btc24h_redis_if.py
Normal file
@@ -0,0 +1,613 @@
|
|||||||
|
import time
|
||||||
|
|
||||||
|
from walrus import *
|
||||||
|
from loguru import logger
|
||||||
|
|
||||||
|
class RedisIf:
|
||||||
|
def __init__(self, host="127.0.0.1", port=6379, password="", db=0):
|
||||||
|
self.db = Database(host=host, port=port, db=db)
|
||||||
|
|
||||||
|
self.zbalance = self.db.ZSet("rt_balancev2e")
|
||||||
|
'''
|
||||||
|
#realize cap progress
|
||||||
|
self.rv = self.db.Hash("rv")
|
||||||
|
#address and balance progress
|
||||||
|
self.addr = self.db.Hash("addr")
|
||||||
|
#block volume progress
|
||||||
|
self.bv = self.db.Hash("bv")
|
||||||
|
#daily volume progress
|
||||||
|
self.dv = self.db.Hash("dv")
|
||||||
|
'''
|
||||||
|
'''
|
||||||
|
#stat tx progress
|
||||||
|
self.tx = self.db.Hash("tx")
|
||||||
|
|
||||||
|
#ETH daily contract progress
|
||||||
|
self.eth_dc = self.db.Hash("ethdc")
|
||||||
|
|
||||||
|
#btc stats fee
|
||||||
|
self.btc_stats = self.db.Hash("btcstats")
|
||||||
|
|
||||||
|
#btc stats volume
|
||||||
|
self.btc_volume = self.db.Hash("btcvolume")
|
||||||
|
|
||||||
|
# btc stats cdd
|
||||||
|
self.btc_cdd = self.db.Hash("btccdd")
|
||||||
|
|
||||||
|
# btc stats cdd days
|
||||||
|
self.btc_cdd_days = self.db.Hash("btccdddays")
|
||||||
|
'''
|
||||||
|
self.btc_block_time = self.db.Hash("rt_btcblocktimev2e")
|
||||||
|
'''
|
||||||
|
self.btc_sopr = self.db.Hash("btcsopr")
|
||||||
|
'''
|
||||||
|
self.btc_data = self.db.Hash("rt_btc_datav2e")
|
||||||
|
|
||||||
|
self.active_address = self.db.Set("rt_active_addressv2e")
|
||||||
|
self.send_address = self.db.Set("rt_send_addressv2e")
|
||||||
|
self.receive_address = self.db.Set("rt_receive_addressv2e")
|
||||||
|
|
||||||
|
def get_btc_data(self, key):
|
||||||
|
value = None
|
||||||
|
if self.btc_data[key] is not None:
|
||||||
|
value = self.btc_data[key]
|
||||||
|
return value
|
||||||
|
|
||||||
|
def set_btc_data(self, key, value):
|
||||||
|
self.btc_data[key] = value
|
||||||
|
|
||||||
|
def reset_btc_data(self):
|
||||||
|
self.btc_data.clear()
|
||||||
|
self.zbalance.clear()
|
||||||
|
# self.btc_block_time.clear()
|
||||||
|
|
||||||
|
'''
|
||||||
|
def get_last_btc_sopr(self):
|
||||||
|
last_sopr_buy = None
|
||||||
|
last_asopr_buy = None
|
||||||
|
last_easopr_buy = None
|
||||||
|
last_lth_sopr_buy = None
|
||||||
|
last_sth_sopr_buy = None
|
||||||
|
last_asol = None
|
||||||
|
last_eaasol = None
|
||||||
|
|
||||||
|
if self.btc_sopr["last_asol"] is not None:
|
||||||
|
last_asol = self.btc_sopr["last_asol"]
|
||||||
|
#last_asol = float(self.btc_sopr["last_asol"].decode("utf-8"))
|
||||||
|
if self.btc_sopr["last_eaasol"] is not None:
|
||||||
|
last_eaasol = self.btc_sopr["last_eaasol"]
|
||||||
|
#last_eaasol = float(self.btc_sopr["last_eaasol"].decode("utf-8"))
|
||||||
|
|
||||||
|
|
||||||
|
if self.btc_sopr["last_sopr_buy"] is not None:
|
||||||
|
last_sopr_buy = self.btc_sopr["last_sopr_buy"]
|
||||||
|
#last_sopr_buy = float(self.btc_sopr["last_sopr_buy"].decode("utf-8"))
|
||||||
|
if self.btc_sopr["last_asopr_buy"] is not None:
|
||||||
|
last_asopr_buy = self.btc_sopr["last_asopr_buy"]
|
||||||
|
#last_asopr_buy = float(self.btc_sopr["last_asopr_buy"].decode("utf-8"))
|
||||||
|
if self.btc_sopr["last_easopr_buy"] is not None:
|
||||||
|
last_easopr_buy = self.btc_sopr["last_easopr_buy"]
|
||||||
|
#last_easopr_buy = float(self.btc_sopr["last_easopr_buy"].decode("utf-8"))
|
||||||
|
if self.btc_sopr["last_lth_sopr_buy"] is not None:
|
||||||
|
last_lth_sopr_buy = self.btc_sopr["last_lth_sopr_buy"]
|
||||||
|
#last_lth_sopr_buy = float(self.btc_sopr["last_lth_sopr_buy"].decode("utf-8"))
|
||||||
|
if self.btc_sopr["last_sth_sopr_buy"] is not None:
|
||||||
|
last_sth_sopr_buy = self.btc_sopr["last_sth_sopr_buy"]
|
||||||
|
#last_sth_sopr_buy = float(self.btc_sopr["last_sth_sopr_buy"].decode("utf-8"))
|
||||||
|
|
||||||
|
last_sopr_sell = None
|
||||||
|
last_asopr_sell = None
|
||||||
|
last_easopr_sell = None
|
||||||
|
last_lth_sopr_sell = None
|
||||||
|
last_sth_sopr_sell = None
|
||||||
|
if self.btc_sopr["last_sopr_sell"] is not None:
|
||||||
|
last_sopr_sell = self.btc_sopr["last_sopr_sell"]
|
||||||
|
# last_sopr_sell = float(self.btc_sopr["last_sopr_sell"].decode("utf-8"))
|
||||||
|
if self.btc_sopr["last_asopr_sell"] is not None:
|
||||||
|
last_asopr_sell = self.btc_sopr["last_asopr_sell"]
|
||||||
|
# last_asopr = float(self.btc_sopr["last_asopr"].decode("utf-8"))
|
||||||
|
if self.btc_sopr["last_easopr_sell"] is not None:
|
||||||
|
last_easopr_sell = self.btc_sopr["last_easopr_sell"]
|
||||||
|
# last_easopr_sell = float(self.btc_sopr["last_easopr_sell"].decode("utf-8"))
|
||||||
|
if self.btc_sopr["last_lth_sopr_sell"] is not None:
|
||||||
|
last_lth_sopr_sell = self.btc_sopr["last_lth_sopr_sell"]
|
||||||
|
# last_lth_sopr_sell = float(self.btc_sopr["last_lth_sopr_sell"].decode("utf-8"))
|
||||||
|
if self.btc_sopr["last_sth_sopr_sell"] is not None:
|
||||||
|
last_sth_sopr_sell = self.btc_sopr["last_sth_sopr_sell"]
|
||||||
|
# last_sth_sopr_sell = float(self.btc_sopr["last_sth_sopr_sell"].decode("utf-8"))
|
||||||
|
|
||||||
|
return last_asol, last_eaasol, last_sopr_buy, last_asopr_buy, last_easopr_buy, last_lth_sopr_buy, last_sth_sopr_buy, last_sopr_sell, last_asopr_sell, last_easopr_sell, last_lth_sopr_sell, last_sth_sopr_sell
|
||||||
|
|
||||||
|
def set_last_btc_sopr(self, last_asol, last_eaasol, last_sopr_buy, last_asopr_buy, last_easopr_buy, last_lth_sopr_buy, last_sth_sopr_buy, last_sopr_sell, last_asopr_sell, last_easopr_sell, last_lth_sopr_sell, last_sth_sopr_sell):
|
||||||
|
self.btc_sopr["last_asol"] = last_asol
|
||||||
|
self.btc_sopr["last_eaasol"] = last_eaasol
|
||||||
|
|
||||||
|
self.btc_sopr["last_sopr_buy"] = last_sopr_buy
|
||||||
|
self.btc_sopr["last_asopr_buy"] = last_asopr_buy
|
||||||
|
self.btc_sopr["last_easopr_buy"] = last_easopr_buy
|
||||||
|
self.btc_sopr["last_lth_sopr_buy"] = last_lth_sopr_buy
|
||||||
|
self.btc_sopr["last_sth_sopr_buy"] = last_sth_sopr_buy
|
||||||
|
self.btc_sopr["last_sopr_sell"] = last_sopr_sell
|
||||||
|
self.btc_sopr["last_asopr_sell"] = last_asopr_sell
|
||||||
|
self.btc_sopr["last_easopr_sell"] = last_easopr_sell
|
||||||
|
self.btc_sopr["last_lth_sopr_sell"] = last_lth_sopr_sell
|
||||||
|
self.btc_sopr["last_sth_sopr_sell"] = last_sth_sopr_sell
|
||||||
|
'''
|
||||||
|
|
||||||
|
def get_block_time(self, height):
|
||||||
|
block_time = None
|
||||||
|
height_str = str(height)
|
||||||
|
if self.btc_block_time[height_str] is not None:
|
||||||
|
block_time = int(self.btc_block_time[height_str].decode("utf-8"))
|
||||||
|
# block_time = int(self.btc_block_time[height_str].decode("utf-8"))
|
||||||
|
|
||||||
|
return block_time
|
||||||
|
|
||||||
|
def set_block_time(self, height, ts):
|
||||||
|
height_str = str(height)
|
||||||
|
self.btc_block_time[height_str] = ts
|
||||||
|
|
||||||
|
'''
|
||||||
|
def get_last_btc_cdd_days(self):
|
||||||
|
last_cdd = None
|
||||||
|
last_acdd = None
|
||||||
|
last_eacdd = None
|
||||||
|
last_cdd_day1= None
|
||||||
|
last_cdd_day7 = None
|
||||||
|
last_cdd_day30 = None
|
||||||
|
last_cdd_day60 = None
|
||||||
|
last_cdd_day90 = None
|
||||||
|
last_cdd_day180 = None
|
||||||
|
last_cdd_day365 = None
|
||||||
|
last_cdd_day730 = None
|
||||||
|
|
||||||
|
last_date = None
|
||||||
|
last_height = None
|
||||||
|
last_date_str = None
|
||||||
|
|
||||||
|
if self.btc_cdd["last_cdd"] is not None:
|
||||||
|
last_cdd = float(self.btc_cdd["last_cdd"].decode("utf-8"))
|
||||||
|
if self.btc_cdd["last_acdd"] is not None:
|
||||||
|
last_acdd = float(self.btc_cdd["last_acdd"].decode("utf-8"))
|
||||||
|
if self.btc_cdd["last_eacdd"] is not None:
|
||||||
|
last_eacdd = float(self.btc_cdd["last_eacdd"].decode("utf-8"))
|
||||||
|
if self.btc_cdd_days["last_cdd_day1"] is not None:
|
||||||
|
last_cdd_day1 = float(self.btc_cdd_days["last_cdd_day1"].decode("utf-8"))
|
||||||
|
if self.btc_cdd_days["last_cdd_day7"] is not None:
|
||||||
|
last_cdd_day7 = float(self.btc_cdd_days["last_cdd_day7"].decode("utf-8"))
|
||||||
|
if self.btc_cdd_days["last_cdd_day30"] is not None:
|
||||||
|
last_cdd_day30 = float(self.btc_cdd_days["last_cdd_day30"].decode("utf-8"))
|
||||||
|
if self.btc_cdd_days["last_cdd_day60"] is not None:
|
||||||
|
last_cdd_day60 = float(self.btc_cdd_days["last_cdd_day60"].decode("utf-8"))
|
||||||
|
if self.btc_cdd_days["last_cdd_day90"] is not None:
|
||||||
|
last_cdd_day90 = float(self.btc_cdd_days["last_cdd_day90"].decode("utf-8"))
|
||||||
|
if self.btc_cdd_days["last_cdd_day180"] is not None:
|
||||||
|
last_cdd_day180 = float(self.btc_cdd_days["last_cdd_day180"].decode("utf-8"))
|
||||||
|
if self.btc_cdd_days["last_cdd_day365"] is not None:
|
||||||
|
last_cdd_day365 = float(self.btc_cdd_days["last_cdd_day365"].decode("utf-8"))
|
||||||
|
if self.btc_cdd_days["last_cdd_day730"] is not None:
|
||||||
|
last_cdd_day730 = float(self.btc_cdd_days["last_cdd_day730"].decode("utf-8"))
|
||||||
|
if self.btc_cdd_days["last_date"] is not None:
|
||||||
|
last_date = int(self.btc_cdd_days["last_date"].decode("utf-8"))
|
||||||
|
if self.btc_cdd_days["last_height"] is not None:
|
||||||
|
last_height = int(self.btc_cdd_days["last_height"].decode("utf-8"))
|
||||||
|
if self.btc_cdd_days["last_date_str"] is not None:
|
||||||
|
last_date_str = self.btc_cdd_days["last_date_str"].decode("utf-8")
|
||||||
|
return last_cdd, last_acdd, last_eacdd, last_cdd_day1, last_cdd_day7, last_cdd_day30, last_cdd_day60, last_cdd_day90, last_cdd_day180, last_cdd_day365, last_cdd_day730, last_date, last_height, last_date_str
|
||||||
|
|
||||||
|
def set_last_btc_cdd_days(self, cdd, acdd, eacdd, day1, day7, day30, day60, day90, day180, day365, day730, dt, height, dtstr):
|
||||||
|
self.btc_cdd["last_cdd"] = cdd
|
||||||
|
self.btc_cdd["last_acdd"] = acdd
|
||||||
|
self.btc_cdd["last_eacdd"] = eacdd
|
||||||
|
self.btc_cdd_days["last_cdd_day1"] = day1
|
||||||
|
self.btc_cdd_days["last_cdd_day7"] = day7
|
||||||
|
self.btc_cdd_days["last_cdd_day30"] = day30
|
||||||
|
self.btc_cdd_days["last_cdd_day60"] = day60
|
||||||
|
self.btc_cdd_days["last_cdd_day90"] = day90
|
||||||
|
self.btc_cdd_days["last_cdd_day180"] = day180
|
||||||
|
self.btc_cdd_days["last_cdd_day365"] = day365
|
||||||
|
self.btc_cdd_days["last_cdd_day730"] = day730
|
||||||
|
self.btc_cdd_days["last_date"] = dt
|
||||||
|
self.btc_cdd_days["last_height"] = height
|
||||||
|
self.btc_cdd_days["last_date_str"] = dtstr
|
||||||
|
'''
|
||||||
|
'''
|
||||||
|
def get_last_btc_cdd(self):
|
||||||
|
last_cdd = None
|
||||||
|
last_date = None
|
||||||
|
last_height = None
|
||||||
|
last_date_str = None
|
||||||
|
if self.btc_cdd["last_cdd"] is not None:
|
||||||
|
last_cdd = float(self.btc_cdd["last_cdd"].decode("utf-8"))
|
||||||
|
if self.btc_cdd["last_date"] is not None:
|
||||||
|
last_date = int(self.btc_cdd["last_date"].decode("utf-8"))
|
||||||
|
if self.btc_cdd["last_height"] is not None:
|
||||||
|
last_height = int(self.btc_cdd["last_height"].decode("utf-8"))
|
||||||
|
if self.btc_cdd["last_date_str"] is not None:
|
||||||
|
last_date_str = self.btc_cdd["last_date_str"].decode("utf-8")
|
||||||
|
return last_cdd, last_date, last_height, last_date_str
|
||||||
|
|
||||||
|
def set_last_btc_cdd(self, cdd, dt, height, dtstr):
|
||||||
|
self.btc_cdd["last_cdd"] = cdd
|
||||||
|
self.btc_cdd["last_date"] = dt
|
||||||
|
self.btc_cdd["last_height"] = height
|
||||||
|
self.btc_cdd["last_date_str"] = dtstr
|
||||||
|
|
||||||
|
def get_last_btc_volume(self):
|
||||||
|
last_volume = None
|
||||||
|
last_date = None
|
||||||
|
last_height = None
|
||||||
|
last_date_str = None
|
||||||
|
if self.btc_volume["last_volume"] is not None:
|
||||||
|
last_volume = float(self.btc_volume["last_volume"].decode("utf-8"))
|
||||||
|
if self.btc_volume["last_date"] is not None:
|
||||||
|
last_date = int(self.btc_volume["last_date"].decode("utf-8"))
|
||||||
|
if self.btc_volume["last_height"] is not None:
|
||||||
|
last_height = int(self.btc_volume["last_height"].decode("utf-8"))
|
||||||
|
if self.btc_volume["last_date_str"] is not None:
|
||||||
|
last_date_str = self.btc_volume["last_date_str"].decode("utf-8")
|
||||||
|
return last_volume, last_date, last_height, last_date_str
|
||||||
|
|
||||||
|
def set_last_btc_volume(self, volume, dt, height, dtstr):
|
||||||
|
self.btc_volume["last_volume"] = volume
|
||||||
|
self.btc_volume["last_date"] = dt
|
||||||
|
self.btc_volume["last_height"] = height
|
||||||
|
self.btc_volume["last_date_str"] = dtstr
|
||||||
|
'''
|
||||||
|
'''
|
||||||
|
def get_last_btc_stats(self):
|
||||||
|
last_fees = None
|
||||||
|
last_date = None
|
||||||
|
last_height = None
|
||||||
|
last_date_str = None
|
||||||
|
last_volume = None
|
||||||
|
if self.btc_stats["last_fees"] is not None:
|
||||||
|
last_fees = float(self.btc_stats["last_fees"].decode("utf-8"))
|
||||||
|
if self.btc_volume["last_volume"] is not None:
|
||||||
|
last_volume = float(self.btc_volume["last_volume"].decode("utf-8"))
|
||||||
|
if self.btc_stats["last_date"] is not None:
|
||||||
|
last_date = int(self.btc_stats["last_date"].decode("utf-8"))
|
||||||
|
if self.btc_stats["last_height"] is not None:
|
||||||
|
last_height = int(self.btc_stats["last_height"].decode("utf-8"))
|
||||||
|
if self.btc_stats["last_date_str"] is not None:
|
||||||
|
last_date_str = self.btc_stats["last_date_str"].decode("utf-8")
|
||||||
|
return last_fees, last_volume, last_date, last_height, last_date_str
|
||||||
|
|
||||||
|
def set_last_btc_stats(self, fees, volume, dt, height, dtstr):
|
||||||
|
self.btc_stats["last_fees"] = fees
|
||||||
|
self.btc_volume["last_volume"] = volume
|
||||||
|
self.btc_stats["last_date"] = dt
|
||||||
|
self.btc_stats["last_height"] = height
|
||||||
|
self.btc_stats["last_date_str"] = dtstr
|
||||||
|
|
||||||
|
|
||||||
|
def get_last_eth_dc(self):
|
||||||
|
last_date = None
|
||||||
|
last_height = None
|
||||||
|
last_date_str = None
|
||||||
|
if self.eth_dc["last_date"] is not None:
|
||||||
|
last_date = int(self.eth_dc["last_date"].decode("utf-8"))
|
||||||
|
if self.eth_dc["last_height"] is not None:
|
||||||
|
last_height = int(self.eth_dc["last_height"].decode("utf-8"))
|
||||||
|
if self.eth_dc["last_date_str"] is not None:
|
||||||
|
last_date_str = self.eth_dc["last_date_str"].decode("utf-8")
|
||||||
|
return last_date, last_height, last_date_str
|
||||||
|
|
||||||
|
def set_last_eth_dc(self, dt, height, dtstr):
|
||||||
|
self.eth_dc["last_date"] = dt
|
||||||
|
self.eth_dc["last_height"] = height
|
||||||
|
self.eth_dc["last_date_str"] = dtstr
|
||||||
|
'''
|
||||||
|
'''
|
||||||
|
def get_last_dv(self):
|
||||||
|
last_dv = None
|
||||||
|
last_date = None
|
||||||
|
last_height = None
|
||||||
|
last_date_str = None
|
||||||
|
if self.dv["last_dv"] is not None:
|
||||||
|
last_dv = float(self.dv["last_dv"].decode("utf-8"))
|
||||||
|
if self.dv["last_date"] is not None:
|
||||||
|
last_date = int(self.dv["last_date"].decode("utf-8"))
|
||||||
|
if self.dv["last_height"] is not None:
|
||||||
|
last_height = int(self.dv["last_height"].decode("utf-8"))
|
||||||
|
if self.dv["last_date_str"] is not None:
|
||||||
|
last_date_str = self.dv["last_date_str"].decode("utf-8")
|
||||||
|
return last_dv, last_date, last_height, last_date_str
|
||||||
|
|
||||||
|
def set_last_dv(self, dv, dt, height, dtstr):
|
||||||
|
self.dv["last_dv"] = dv
|
||||||
|
self.dv["last_date"] = dt
|
||||||
|
self.dv["last_height"] = height
|
||||||
|
self.dv["last_date_str"] = dtstr
|
||||||
|
|
||||||
|
def get_last_bv(self):
|
||||||
|
last_height = None
|
||||||
|
if self.bv["last_height"] is not None:
|
||||||
|
last_height = int(self.bv["last_height"].decode("utf-8"))
|
||||||
|
return last_height
|
||||||
|
|
||||||
|
def set_last_bv(self, height):
|
||||||
|
self.bv["last_height"] = height
|
||||||
|
'''
|
||||||
|
'''
|
||||||
|
def get_last_ind(self):
|
||||||
|
last_csupply = None
|
||||||
|
last_mintusd = None
|
||||||
|
last_sumcsupply = None
|
||||||
|
last_sumcdd = None
|
||||||
|
last_sumeacdd = None
|
||||||
|
last_rprofit = None
|
||||||
|
last_rloss = None
|
||||||
|
last_marketcap = None
|
||||||
|
last_rcap = None
|
||||||
|
last_mvrv = None
|
||||||
|
|
||||||
|
last_earcap = None
|
||||||
|
if self.tx["last_csupply"] is not None:
|
||||||
|
last_csupply = float(self.tx["last_csupply"].decode("utf-8"))
|
||||||
|
if self.tx["last_mintusd"] is not None:
|
||||||
|
last_mintusd = float(self.tx["last_mintusd"].decode("utf-8"))
|
||||||
|
if self.tx["last_sumcsupply"] is not None:
|
||||||
|
last_sumcsupply = float(self.tx["last_sumcsupply"].decode("utf-8"))
|
||||||
|
if self.tx["last_sumcdd"] is not None:
|
||||||
|
last_sumcdd = float(self.tx["last_sumcdd"].decode("utf-8"))
|
||||||
|
if self.tx["last_sumeacdd"] is not None:
|
||||||
|
last_sumeacdd = float(self.tx["last_sumeacdd"].decode("utf-8"))
|
||||||
|
if self.tx["last_rprofit"] is not None:
|
||||||
|
last_rprofit = float(self.tx["last_rprofit"].decode("utf-8"))
|
||||||
|
if self.tx["last_rloss"] is not None:
|
||||||
|
last_rloss = float(self.tx["last_rloss"].decode("utf-8"))
|
||||||
|
if self.tx["last_marketcap"] is not None:
|
||||||
|
last_marketcap = float(self.tx["last_marketcap"].decode("utf-8"))
|
||||||
|
if self.tx["last_rcap"] is not None:
|
||||||
|
last_rcap = float(self.tx["last_rcap"].decode("utf-8"))
|
||||||
|
if self.tx["last_earcap"] is not None:
|
||||||
|
last_earcap = float(self.tx["last_earcap"].decode("utf-8"))
|
||||||
|
if self.tx["last_mvrv"] is not None:
|
||||||
|
last_mvrv = float(self.tx["last_mvrv"].decode("utf-8"))
|
||||||
|
|
||||||
|
|
||||||
|
return last_csupply, last_mintusd, last_sumcsupply, last_sumcdd, last_sumeacdd, last_rprofit, last_rloss, last_marketcap, last_rcap, last_earcap, last_mvrv
|
||||||
|
|
||||||
|
def set_last_ind(self, last_csupply, last_mintusd, last_sumcsupply, last_sumcdd, last_sumeacdd, last_rprofit, last_rloss, last_marketcap, last_rcap, last_earcap, last_mvrv):
|
||||||
|
self.tx["last_csupply"] = last_csupply
|
||||||
|
self.tx["last_mintusd"] = last_mintusd
|
||||||
|
self.tx["last_sumcsupply"] = last_sumcsupply
|
||||||
|
self.tx["last_sumcdd"] = last_sumcdd
|
||||||
|
self.tx["last_sumeacdd"] = last_sumeacdd
|
||||||
|
self.tx["last_rprofit"] = last_rprofit
|
||||||
|
self.tx["last_rloss"] = last_rloss
|
||||||
|
self.tx["last_marketcap"] = last_marketcap
|
||||||
|
self.tx["last_rcap"] = last_rcap
|
||||||
|
self.tx["last_earcap"] = last_earcap
|
||||||
|
self.tx["last_mvrv"] = last_mvrv
|
||||||
|
|
||||||
|
|
||||||
|
def get_last_tx(self):
|
||||||
|
last_profit = None
|
||||||
|
last_fees = None
|
||||||
|
last_newaddr_cnt = None
|
||||||
|
last_newaddr_vol = None
|
||||||
|
last_active_addr_cnt = None
|
||||||
|
last_tx_addr_cnt = None
|
||||||
|
last_rx_addr_cnt = None
|
||||||
|
last_vol_change = None
|
||||||
|
last_vol = None
|
||||||
|
last_avol = None
|
||||||
|
last_date = None
|
||||||
|
last_height = None
|
||||||
|
last_date_str = None
|
||||||
|
last_txs = None
|
||||||
|
last_eatxs = None
|
||||||
|
if self.tx["last_profit_rate"] is not None:
|
||||||
|
last_profit = int(self.tx["last_profit"].decode("utf-8"))
|
||||||
|
if self.tx["last_fees"] is not None:
|
||||||
|
last_fees = int(self.tx["last_fees"].decode("utf-8"))
|
||||||
|
if self.tx["last_txs"] is not None:
|
||||||
|
last_txs = int(self.tx["last_txs"].decode("utf-8"))
|
||||||
|
if self.tx["last_eatxs"] is not None:
|
||||||
|
last_eatxs = int(self.tx["last_eatxs"].decode("utf-8"))
|
||||||
|
if self.tx["last_newaddr_cnt"] is not None:
|
||||||
|
last_newaddr_cnt = int(self.tx["last_newaddr_cnt"].decode("utf-8"))
|
||||||
|
if self.tx["last_newaddr_vol"] is not None:
|
||||||
|
last_newaddr_vol = float(self.tx["last_newaddr_vol"].decode("utf-8"))
|
||||||
|
if self.tx["last_active_addr_cnt"] is not None:
|
||||||
|
last_active_addr_cnt = int(self.tx["last_active_addr_cnt"].decode("utf-8"))
|
||||||
|
if self.tx["last_tx_addr_cnt"] is not None:
|
||||||
|
last_tx_addr_cnt = int(self.tx["last_tx_addr_cnt"].decode("utf-8"))
|
||||||
|
if self.tx["last_rx_addr_cnt"] is not None:
|
||||||
|
last_rx_addr_cnt = int(self.tx["last_rx_addr_cnt"].decode("utf-8"))
|
||||||
|
if self.tx["last_vol_change"] is not None:
|
||||||
|
last_vol_change = float(self.tx["last_vol_change"].decode("utf-8"))
|
||||||
|
if self.tx["last_vol"] is not None:
|
||||||
|
last_vol = float(self.tx["last_vol"].decode("utf-8"))
|
||||||
|
if self.tx["last_avol"] is not None:
|
||||||
|
last_avol = float(self.tx["last_avol"].decode("utf-8"))
|
||||||
|
if self.tx["last_date"] is not None:
|
||||||
|
last_date = int(self.tx["last_date"].decode("utf-8"))
|
||||||
|
if self.tx["last_height"] is not None:
|
||||||
|
last_height = int(self.tx["last_height"].decode("utf-8"))
|
||||||
|
if self.tx["last_date_str"] is not None:
|
||||||
|
last_date_str = self.tx["last_date_str"].decode("utf-8")
|
||||||
|
return last_profit, last_fees, last_txs, last_eatxs, last_newaddr_cnt, last_newaddr_vol, last_active_addr_cnt, last_tx_addr_cnt, last_rx_addr_cnt, last_vol_change, last_vol, last_avol, last_date, last_height, last_date_str
|
||||||
|
|
||||||
|
def set_last_tx(self, last_profit, last_fees, last_txs, last_eatxs, newaddr_cnt, newaddr_vol, active_addr_cnt, tx_addr_cnt, rx_addr_cnt, vol_change, vol, avol, dt, height, dtstr):
|
||||||
|
self.tx["last_profit"] = last_profit
|
||||||
|
self.tx["last_fees"] = last_fees
|
||||||
|
self.tx["last_txs"] = last_txs
|
||||||
|
self.tx["last_eatxs"] = last_eatxs
|
||||||
|
self.tx["last_newaddr_cnt"] = newaddr_cnt
|
||||||
|
self.tx["last_newaddr_vol"] = newaddr_vol
|
||||||
|
self.tx["last_active_addr_cnt"] = active_addr_cnt
|
||||||
|
self.tx["last_tx_addr_cnt"] = tx_addr_cnt
|
||||||
|
self.tx["last_rx_addr_cnt"] = rx_addr_cnt
|
||||||
|
self.tx["last_vol_change"] = vol_change
|
||||||
|
self.tx["last_vol"] = vol
|
||||||
|
self.tx["last_avol"] = avol
|
||||||
|
self.tx["last_date"] = dt
|
||||||
|
self.tx["last_height"] = height
|
||||||
|
self.tx["last_date_str"] = dtstr
|
||||||
|
'''
|
||||||
|
'''
|
||||||
|
def get_last_addr(self):
|
||||||
|
last_daily_cnt = None
|
||||||
|
last_date = None
|
||||||
|
last_height = None
|
||||||
|
last_date_str = None
|
||||||
|
if self.addr["last_daily_cnt"] is not None:
|
||||||
|
last_daily_cnt = int(self.addr["last_daily_cnt"].decode("utf-8"))
|
||||||
|
if self.addr["last_date"] is not None:
|
||||||
|
last_date = int(self.addr["last_date"].decode("utf-8"))
|
||||||
|
if self.addr["last_height"] is not None:
|
||||||
|
last_height = int(self.addr["last_height"].decode("utf-8"))
|
||||||
|
if self.addr["last_date_str"] is not None:
|
||||||
|
last_date_str = self.addr["last_date_str"].decode("utf-8")
|
||||||
|
return last_daily_cnt, last_date, last_height, last_date_str
|
||||||
|
|
||||||
|
def set_last_addr(self, daily_cnt, dt, height, dtstr):
|
||||||
|
self.addr["last_daily_cnt"] = daily_cnt
|
||||||
|
self.addr["last_date"] = dt
|
||||||
|
self.addr["last_height"] = height
|
||||||
|
self.addr["last_date_str"] = dtstr
|
||||||
|
'''
|
||||||
|
|
||||||
|
def is_active_address(self, address):
|
||||||
|
result = address in self.active_address
|
||||||
|
if not result:
|
||||||
|
self.active_address.add(address)
|
||||||
|
return result
|
||||||
|
|
||||||
|
def reset_active_address(self):
|
||||||
|
self.active_address.clear()
|
||||||
|
|
||||||
|
def get_active_address_cnt(self):
|
||||||
|
return len(self.active_address)
|
||||||
|
|
||||||
|
def is_send_address(self, address):
|
||||||
|
result = address in self.send_address
|
||||||
|
if not result:
|
||||||
|
self.send_address.add(address)
|
||||||
|
return result
|
||||||
|
|
||||||
|
def reset_send_address(self):
|
||||||
|
self.send_address.clear()
|
||||||
|
|
||||||
|
def get_send_address_cnt(self):
|
||||||
|
return len(self.send_address)
|
||||||
|
|
||||||
|
def is_receive_address(self, address):
|
||||||
|
result = address in self.receive_address
|
||||||
|
if not result:
|
||||||
|
self.receive_address.add(address)
|
||||||
|
return result
|
||||||
|
|
||||||
|
def reset_receive_address(self):
|
||||||
|
self.receive_address.clear()
|
||||||
|
|
||||||
|
def get_receive_address_cnt(self):
|
||||||
|
return len(self.receive_address)
|
||||||
|
|
||||||
|
def save_addr(self, address, balance):
|
||||||
|
new_balance = balance
|
||||||
|
if address in self.zbalance:
|
||||||
|
new_balance = self.zbalance.score(address) + balance
|
||||||
|
# print("update", self.zbalance.score(address), balance, new_balance)
|
||||||
|
# time.sleep(10)
|
||||||
|
if new_balance < 0.01:
|
||||||
|
del self.zbalance[address]
|
||||||
|
# print("check exist", address, address in self.zbalance)
|
||||||
|
# time.sleep(10)
|
||||||
|
return
|
||||||
|
self.zbalance.add({address: new_balance})
|
||||||
|
|
||||||
|
'''
|
||||||
|
def delete_addr(self, config):
|
||||||
|
self.addr.clear()
|
||||||
|
self.zbalance.clear()
|
||||||
|
'''
|
||||||
|
|
||||||
|
def is_in_addr(self, address):
|
||||||
|
return address in self.zbalance
|
||||||
|
|
||||||
|
def get_addr_cnt(self):
|
||||||
|
return len(self.zbalance)
|
||||||
|
|
||||||
|
'''
|
||||||
|
def delete_rv(self, config):
|
||||||
|
self.rv.clear()
|
||||||
|
|
||||||
|
def get_last_rv(self):
|
||||||
|
last_rv = None
|
||||||
|
last_date = None
|
||||||
|
last_height = None
|
||||||
|
last_date_str = None
|
||||||
|
if self.rv["last_rv"] is not None:
|
||||||
|
last_rv = float(self.rv["last_rv"].decode("utf-8"))
|
||||||
|
if self.rv["last_date"] is not None:
|
||||||
|
last_date = int(self.rv["last_date"].decode("utf-8"))
|
||||||
|
if self.rv["last_height"] is not None:
|
||||||
|
last_height = int(self.rv["last_height"].decode("utf-8"))
|
||||||
|
if self.rv["last_date_str"] is not None:
|
||||||
|
last_date_str = self.rv["last_date_str"].decode("utf-8")
|
||||||
|
return last_rv, last_date, last_height, last_date_str
|
||||||
|
|
||||||
|
def set_last_rv(self, rv, dt, height, dtstr):
|
||||||
|
self.rv["last_rv"] = rv
|
||||||
|
self.rv["last_date"] = dt
|
||||||
|
self.rv["last_height"] = height
|
||||||
|
self.rv["last_date_str"] = dtstr
|
||||||
|
'''
|
||||||
|
|
||||||
|
def get_all_address(self):
|
||||||
|
return self.zbalance.keys()
|
||||||
|
|
||||||
|
def delete_address_data(self, config):
|
||||||
|
self.zbalance.clear()
|
||||||
|
|
||||||
|
'''
|
||||||
|
def query_from_address(self, start_balance=0, end_balance=0, address="", limit=0):
|
||||||
|
if len(address) > 0:
|
||||||
|
results = []
|
||||||
|
result = {}
|
||||||
|
result["address"] = address
|
||||||
|
balance = self.zbalance.score(address)
|
||||||
|
print(balance)
|
||||||
|
if balance is not None:
|
||||||
|
result["balance"] = balance
|
||||||
|
results.append(result)
|
||||||
|
return results
|
||||||
|
|
||||||
|
match_result = None
|
||||||
|
if start_balance > 0:
|
||||||
|
if end_balance > 0:
|
||||||
|
match_result = self.zbalance.range_by_score(start_balance, end_balance, 0, -1, True, False)
|
||||||
|
else:
|
||||||
|
match_result = self.zbalance.range_by_score(0, start_balance, 0, -1, True, False)
|
||||||
|
else:
|
||||||
|
if end_balance > 0:
|
||||||
|
match_result = self.zbalance.range_by_score(end_balance, 21000000, 0, -1, True, False)
|
||||||
|
|
||||||
|
results = []
|
||||||
|
if match_result is not None:
|
||||||
|
#print(match_result)
|
||||||
|
for addr, balance2 in match_result:
|
||||||
|
address = addr.decode('utf-8')
|
||||||
|
result = {}
|
||||||
|
result["address"] = address
|
||||||
|
result["balance"] = balance2
|
||||||
|
results.append(result)
|
||||||
|
if limit > 0 and len(results) >= limit:
|
||||||
|
break
|
||||||
|
return results
|
||||||
|
'''
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
1110
lyq/btc24h_stats.py
Normal file
1110
lyq/btc24h_stats.py
Normal file
File diff suppressed because it is too large
Load Diff
85
lyq/btc_price_fetcher.py
Normal file
85
lyq/btc_price_fetcher.py
Normal file
@@ -0,0 +1,85 @@
|
|||||||
|
import time
|
||||||
|
import requests
|
||||||
|
import pymysql
|
||||||
|
from datetime import datetime
|
||||||
|
|
||||||
|
# MySQL配置
|
||||||
|
DB_CONFIG = {
|
||||||
|
"host": "127.0.0.1",
|
||||||
|
"user": "root",
|
||||||
|
"password": "2GS@bPYcgiMyL14A",
|
||||||
|
"database": "btcdb",
|
||||||
|
"port": 4423
|
||||||
|
}
|
||||||
|
|
||||||
|
# 获取当前时间戳
|
||||||
|
def get_current_timestamp():
|
||||||
|
return int(time.time())
|
||||||
|
|
||||||
|
# 获取API1的BTC价格(示例:币安)
|
||||||
|
def get_binance_price():
|
||||||
|
url = "https://api.binance.com/api/v3/ticker/price?symbol=BTCUSDT"
|
||||||
|
resp = requests.get(url, timeout=5)
|
||||||
|
resp.raise_for_status()
|
||||||
|
data = resp.json()
|
||||||
|
return float(data["price"])
|
||||||
|
|
||||||
|
# 获取API2的BTC价格(示例:Coinbase)
|
||||||
|
def get_coinbase_price():
|
||||||
|
url = "https://api.coinbase.com/v2/prices/spot?currency=USD"
|
||||||
|
resp = requests.get(url, timeout=5)
|
||||||
|
resp.raise_for_status()
|
||||||
|
data = resp.json()
|
||||||
|
return float(data["data"]["amount"])
|
||||||
|
|
||||||
|
# 更新或插入价格
|
||||||
|
def upsert_price(source, price, timestamp):
|
||||||
|
connection = pymysql.connect(**DB_CONFIG)
|
||||||
|
try:
|
||||||
|
with connection.cursor() as cursor:
|
||||||
|
# 先判断该 source 是否已存在
|
||||||
|
sql_check = "SELECT id FROM btc_realtime_prices WHERE source = %s"
|
||||||
|
cursor.execute(sql_check, (source,))
|
||||||
|
result = cursor.fetchone()
|
||||||
|
|
||||||
|
if result:
|
||||||
|
# 已存在,执行更新
|
||||||
|
sql_update = """
|
||||||
|
UPDATE btc_realtime_prices
|
||||||
|
SET price = %s, timestamp = %s
|
||||||
|
WHERE source = %s
|
||||||
|
"""
|
||||||
|
cursor.execute(sql_update, (price, timestamp, source))
|
||||||
|
else:
|
||||||
|
# 不存在,执行插入
|
||||||
|
sql_insert = """
|
||||||
|
INSERT INTO btc_realtime_prices (timestamp, source, price)
|
||||||
|
VALUES (%s, %s, %s)
|
||||||
|
"""
|
||||||
|
cursor.execute(sql_insert, (timestamp, source, price))
|
||||||
|
connection.commit()
|
||||||
|
finally:
|
||||||
|
connection.close()
|
||||||
|
|
||||||
|
def main():
|
||||||
|
while True:
|
||||||
|
now_ts = get_current_timestamp()
|
||||||
|
|
||||||
|
try:
|
||||||
|
binance_price = get_binance_price()
|
||||||
|
print(f"Binance BTC Price: {binance_price}")
|
||||||
|
upsert_price("binance", binance_price, now_ts)
|
||||||
|
except Exception as e:
|
||||||
|
print(f"获取Binance价格失败: {e}")
|
||||||
|
|
||||||
|
try:
|
||||||
|
coinbase_price = get_coinbase_price()
|
||||||
|
print(f"Coinbase BTC Price: {coinbase_price}")
|
||||||
|
upsert_price("coinbase", coinbase_price, now_ts)
|
||||||
|
except Exception as e:
|
||||||
|
print(f"获取Coinbase价格失败: {e}")
|
||||||
|
|
||||||
|
time.sleep(60) # 每分钟执行一次
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
main()
|
||||||
141
lyq/btc_prices.py
Normal file
141
lyq/btc_prices.py
Normal file
@@ -0,0 +1,141 @@
|
|||||||
|
import time
|
||||||
|
import requests
|
||||||
|
import pymysql
|
||||||
|
import ujson
|
||||||
|
from datetime import datetime, timedelta
|
||||||
|
|
||||||
|
# MySQL 连接信息
|
||||||
|
DB_CONFIG = {
|
||||||
|
"host": "127.0.0.1",
|
||||||
|
"user": "root",
|
||||||
|
"password": "2GS@bPYcgiMyL14A",
|
||||||
|
"database": "btcdb",
|
||||||
|
"port": 4423
|
||||||
|
}
|
||||||
|
|
||||||
|
# Nasdaq API Key
|
||||||
|
NASDAQ_API_KEY = "FZqXog4sR-b7cYnXcRVV"
|
||||||
|
|
||||||
|
# 获取已存在的时间戳
|
||||||
|
def get_existing_timestamps():
|
||||||
|
connection = pymysql.connect(**DB_CONFIG)
|
||||||
|
existing_timestamps = set()
|
||||||
|
try:
|
||||||
|
with connection.cursor() as cursor:
|
||||||
|
cursor.execute("SELECT timestamp, source FROM btc_prices")
|
||||||
|
for row in cursor.fetchall():
|
||||||
|
existing_timestamps.add((row[0], row[1]))
|
||||||
|
finally:
|
||||||
|
connection.close()
|
||||||
|
return existing_timestamps
|
||||||
|
|
||||||
|
# 工具函数:将任意时间戳调整为北京时间当日 08:00 的时间戳
|
||||||
|
def adjust_to_beijing_08am(timestamp):
|
||||||
|
dt = datetime.utcfromtimestamp(timestamp) + timedelta(hours=8)
|
||||||
|
dt_08am = datetime(dt.year, dt.month, dt.day, 8, 0, 0)
|
||||||
|
return int((dt_08am - timedelta(hours=8)).timestamp()) # 转回 UTC 存储
|
||||||
|
|
||||||
|
# Nasdaq 获取历史 BTC 美元价格
|
||||||
|
def get_nasdaq_price():
|
||||||
|
prices = {}
|
||||||
|
url = f'https://data.nasdaq.com/api/v3/datatables/QDL/BCHAIN?code=MKPRU&api_key={NASDAQ_API_KEY}'
|
||||||
|
response = requests.get(url)
|
||||||
|
if response.status_code == 200:
|
||||||
|
data = ujson.loads(response.content)
|
||||||
|
if "datatable" in data and "data" in data["datatable"]:
|
||||||
|
for item in data["datatable"]["data"]:
|
||||||
|
daystr = item[1]
|
||||||
|
price = item[2]
|
||||||
|
dt = datetime.strptime(daystr, "%Y-%m-%d")
|
||||||
|
dt_08am_bj = datetime(dt.year, dt.month, dt.day, 8, 0, 0)
|
||||||
|
dt_08am_utc = dt_08am_bj - timedelta(hours=8)
|
||||||
|
prices[int(dt_08am_utc.timestamp())] = float(price)
|
||||||
|
print(f"Nasdaq 获取数据量: {len(prices)} 条")
|
||||||
|
return prices
|
||||||
|
|
||||||
|
# CryptoCompare 获取 BTC 历史每日收盘价(时间强制统一为北京时间 08:00)
|
||||||
|
def get_cryptocompare_price():
|
||||||
|
url = "https://min-api.cryptocompare.com/data/v2/histoday"
|
||||||
|
limit = 2000
|
||||||
|
to_ts = int(time.time())
|
||||||
|
prices = {}
|
||||||
|
while True:
|
||||||
|
params = {
|
||||||
|
"fsym": "BTC",
|
||||||
|
"tsym": "USD",
|
||||||
|
"limit": limit,
|
||||||
|
"toTs": to_ts
|
||||||
|
}
|
||||||
|
print(f"请求 CryptoCompare: {params}")
|
||||||
|
response = requests.get(url, params=params)
|
||||||
|
if response.status_code != 200:
|
||||||
|
print("请求失败:", response.status_code)
|
||||||
|
break
|
||||||
|
|
||||||
|
data = ujson.loads(response.content)
|
||||||
|
if data["Response"] != "Success":
|
||||||
|
print("API 返回错误:", data.get("Message"))
|
||||||
|
break
|
||||||
|
|
||||||
|
entries = data["Data"]["Data"]
|
||||||
|
if not entries:
|
||||||
|
break
|
||||||
|
|
||||||
|
for entry in entries:
|
||||||
|
raw_ts = entry["time"]
|
||||||
|
price = entry["close"]
|
||||||
|
adjusted_ts = adjust_to_beijing_08am(raw_ts)
|
||||||
|
prices[adjusted_ts] = price
|
||||||
|
|
||||||
|
earliest = entries[0]["time"]
|
||||||
|
if earliest <= 1279300000: # 大约2010年7月
|
||||||
|
break
|
||||||
|
|
||||||
|
to_ts = earliest - 1
|
||||||
|
time.sleep(1)
|
||||||
|
|
||||||
|
print(f"CryptoCompare 获取数据量: {len(prices)} 条")
|
||||||
|
return prices
|
||||||
|
|
||||||
|
# 保存数据到数据库
|
||||||
|
def save_to_database(data, source):
|
||||||
|
existing_timestamps = get_existing_timestamps()
|
||||||
|
connection = pymysql.connect(**DB_CONFIG)
|
||||||
|
new_data_count = 0
|
||||||
|
try:
|
||||||
|
with connection.cursor() as cursor:
|
||||||
|
sql = """
|
||||||
|
INSERT INTO btc_prices (timestamp, price, source)
|
||||||
|
VALUES (%s, %s, %s)
|
||||||
|
"""
|
||||||
|
for timestamp, price in data.items():
|
||||||
|
if (timestamp, source) not in existing_timestamps:
|
||||||
|
try:
|
||||||
|
cursor.execute(sql, (timestamp, price, source))
|
||||||
|
new_data_count += 1
|
||||||
|
except pymysql.MySQLError as e:
|
||||||
|
print(f"插入错误: {e}")
|
||||||
|
continue
|
||||||
|
connection.commit()
|
||||||
|
print(f"成功存入 {new_data_count} 条新数据({source})")
|
||||||
|
finally:
|
||||||
|
connection.close()
|
||||||
|
|
||||||
|
# 定时任务
|
||||||
|
def fetch_and_store_data():
|
||||||
|
print("========== 开始获取比特币价格数据 ==========")
|
||||||
|
|
||||||
|
# Nasdaq
|
||||||
|
nasdaq_prices = get_nasdaq_price()
|
||||||
|
save_to_database(nasdaq_prices, "Nasdaq")
|
||||||
|
|
||||||
|
# CryptoCompare
|
||||||
|
cc_prices = get_cryptocompare_price()
|
||||||
|
save_to_database(cc_prices, "CryptoCompare")
|
||||||
|
|
||||||
|
print("========== 数据存储完成 ==========")
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
while True:
|
||||||
|
fetch_and_store_data()
|
||||||
|
time.sleep(14400) # 每 4 小时执行一次
|
||||||
1219
lyq/btc_stats_qt.py
Normal file
1219
lyq/btc_stats_qt.py
Normal file
File diff suppressed because it is too large
Load Diff
125
lyq/btc_update.py
Normal file
125
lyq/btc_update.py
Normal file
@@ -0,0 +1,125 @@
|
|||||||
|
import pymysql
|
||||||
|
import json
|
||||||
|
import os
|
||||||
|
import time
|
||||||
|
from datetime import datetime, timedelta
|
||||||
|
|
||||||
|
# 数据库配置
|
||||||
|
DB_CONFIG = {
|
||||||
|
"host": "192.168.194.240",
|
||||||
|
"user": "root",
|
||||||
|
"password": "2GS@bPYcgiMyL14A",
|
||||||
|
"database": "btcdb",
|
||||||
|
"port": 4423,
|
||||||
|
"connect_timeout": 60,
|
||||||
|
"read_timeout": 60,
|
||||||
|
"write_timeout": 60,
|
||||||
|
"charset": "utf8mb4"
|
||||||
|
}
|
||||||
|
|
||||||
|
# 数据文件路径
|
||||||
|
DATA_FILE = "btc_historical_price.py"
|
||||||
|
|
||||||
|
# 定时任务间隔(秒)—— 例如 3600 为每小时更新一次
|
||||||
|
INTERVAL = 28800
|
||||||
|
|
||||||
|
|
||||||
|
def get_new_prices(source, last_timestamp=None):
|
||||||
|
"""
|
||||||
|
从数据库获取 source 数据源的最新价格
|
||||||
|
仅每天北京时间 08:00:00 的数据减 8 小时存入文件
|
||||||
|
"""
|
||||||
|
conn = pymysql.connect(**DB_CONFIG)
|
||||||
|
prices = {}
|
||||||
|
try:
|
||||||
|
with conn.cursor() as cursor:
|
||||||
|
if last_timestamp:
|
||||||
|
sql = """
|
||||||
|
SELECT timestamp, price
|
||||||
|
FROM btc_prices
|
||||||
|
WHERE source = %s AND timestamp > %s
|
||||||
|
ORDER BY timestamp
|
||||||
|
"""
|
||||||
|
cursor.execute(sql, (source, last_timestamp))
|
||||||
|
else:
|
||||||
|
sql = """
|
||||||
|
SELECT timestamp, price
|
||||||
|
FROM btc_prices
|
||||||
|
WHERE source = %s
|
||||||
|
ORDER BY timestamp
|
||||||
|
"""
|
||||||
|
cursor.execute(sql, (source,))
|
||||||
|
rows = cursor.fetchall()
|
||||||
|
for timestamp, price in rows:
|
||||||
|
ts_int = int(timestamp)
|
||||||
|
# 转换为北京时间
|
||||||
|
dt_beijing = datetime.utcfromtimestamp(ts_int) + timedelta(hours=8)
|
||||||
|
# 如果是每天 08:00:00 北京时间,则减 8 小时
|
||||||
|
if dt_beijing.hour == 8 and dt_beijing.minute == 0 and dt_beijing.second == 0:
|
||||||
|
ts_int -= 8 * 3600
|
||||||
|
prices[str(ts_int)] = float(price)
|
||||||
|
finally:
|
||||||
|
conn.close()
|
||||||
|
return prices
|
||||||
|
|
||||||
|
|
||||||
|
def load_existing_data():
|
||||||
|
"""加载历史价格数据"""
|
||||||
|
if not os.path.exists(DATA_FILE):
|
||||||
|
return {}, {}
|
||||||
|
|
||||||
|
try:
|
||||||
|
with open(DATA_FILE, "r", encoding="utf-8") as f:
|
||||||
|
ns = {}
|
||||||
|
exec(f.read(), ns)
|
||||||
|
return ns.get("prices_temp", {}), ns.get("prices", {})
|
||||||
|
except Exception:
|
||||||
|
return {}, {}
|
||||||
|
|
||||||
|
|
||||||
|
def save_prices(prices_temp, prices):
|
||||||
|
"""保存价格数据到文件"""
|
||||||
|
with open(DATA_FILE, "w", encoding="utf-8") as f:
|
||||||
|
f.write("# 自动生成的BTC历史价格数据文件\n")
|
||||||
|
f.write(f"# 更新时间: {datetime.now()}\n\n")
|
||||||
|
f.write("prices_temp = ")
|
||||||
|
f.write(json.dumps(prices_temp, indent=4, ensure_ascii=False))
|
||||||
|
f.write("\n\nprices = ")
|
||||||
|
f.write(json.dumps(prices, indent=4, ensure_ascii=False))
|
||||||
|
f.write("\n")
|
||||||
|
|
||||||
|
|
||||||
|
def get_last_timestamp(price_dict):
|
||||||
|
"""获取当前字典中最大的时间戳"""
|
||||||
|
if not price_dict:
|
||||||
|
return None
|
||||||
|
return max(int(ts) for ts in price_dict.keys())
|
||||||
|
|
||||||
|
|
||||||
|
def update_once():
|
||||||
|
"""执行一次更新流程"""
|
||||||
|
prices_temp, prices = load_existing_data()
|
||||||
|
last_nasdaq_ts = get_last_timestamp(prices_temp)
|
||||||
|
last_crypto_ts = get_last_timestamp(prices)
|
||||||
|
|
||||||
|
nasdaq_new = get_new_prices("Nasdaq", last_nasdaq_ts)
|
||||||
|
crypto_new = get_new_prices("CryptoCompare", last_crypto_ts)
|
||||||
|
|
||||||
|
prices_temp.update(nasdaq_new)
|
||||||
|
prices.update(crypto_new)
|
||||||
|
|
||||||
|
save_prices(prices_temp, prices)
|
||||||
|
|
||||||
|
|
||||||
|
def main():
|
||||||
|
"""主循环任务"""
|
||||||
|
while True:
|
||||||
|
try:
|
||||||
|
update_once()
|
||||||
|
except Exception:
|
||||||
|
pass
|
||||||
|
time.sleep(INTERVAL)
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
main()
|
||||||
1838
lyq/btc_utxos_lyq2.py
Normal file
1838
lyq/btc_utxos_lyq2.py
Normal file
File diff suppressed because it is too large
Load Diff
1838
lyq/btc_utxos_lyq3.py
Normal file
1838
lyq/btc_utxos_lyq3.py
Normal file
File diff suppressed because it is too large
Load Diff
51
lyq/btc_utxos_update_lyq3.py
Normal file
51
lyq/btc_utxos_update_lyq3.py
Normal file
@@ -0,0 +1,51 @@
|
|||||||
|
import time
|
||||||
|
import subprocess
|
||||||
|
from datetime import datetime, timedelta
|
||||||
|
|
||||||
|
def check_running_process():
|
||||||
|
"""检查是否已经有相同的进程在运行"""
|
||||||
|
command = "ps -ef | grep 'python3 btc_utxos_lyq2.py' | grep -v grep"
|
||||||
|
process = subprocess.Popen(command, shell=True, stdout=subprocess.PIPE)
|
||||||
|
output, _ = process.communicate()
|
||||||
|
return bool(output) # 如果找到输出,表示有相同的进程在运行
|
||||||
|
|
||||||
|
def run_script_for_date(target_date):
|
||||||
|
"""运行指定日期的脚本"""
|
||||||
|
command = f"python3 btc_utxos_lyq2.py {target_date}"
|
||||||
|
result=subprocess.run(command, shell=True)
|
||||||
|
if result.returncode != 0:
|
||||||
|
raise RuntimeError(f"Script failed for date {target_date}")
|
||||||
|
|
||||||
|
def main():
|
||||||
|
# 从7月10日开始
|
||||||
|
start_date = datetime(2024, 12, 16)
|
||||||
|
end_date = datetime.utcnow() # 今天的日期
|
||||||
|
|
||||||
|
current_date = start_date
|
||||||
|
a=datetime(2024,12,18)
|
||||||
|
if current_date == a:
|
||||||
|
current_date += timedelta(days=1)
|
||||||
|
else:
|
||||||
|
while current_date <= end_date:
|
||||||
|
target_date_str = current_date.strftime('%Y-%m-%d')
|
||||||
|
|
||||||
|
# 检查是否已经有相同的进程在运行
|
||||||
|
if check_running_process():
|
||||||
|
print(f"已经有相同的进程在运行,等待完成再运行 {target_date_str} 的任务。")
|
||||||
|
time.sleep(60) # 等待60分钟后再检查
|
||||||
|
continue
|
||||||
|
|
||||||
|
# 运行脚本
|
||||||
|
print(f"开始运行 {target_date_str} 的任务。")
|
||||||
|
try:
|
||||||
|
run_script_for_date(target_date_str)
|
||||||
|
print(f"{target_date_str} 的任务运行完成。")
|
||||||
|
|
||||||
|
# 处理下一天的数据
|
||||||
|
current_date += timedelta(days=1)
|
||||||
|
except RuntimeError as e:
|
||||||
|
print(f"Error occurred: {e}. Retrying {target_date_str}.")
|
||||||
|
time.sleep(60)
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
main()
|
||||||
184
lyq/check_order_lyq.py
Normal file
184
lyq/check_order_lyq.py
Normal file
@@ -0,0 +1,184 @@
|
|||||||
|
# coding=utf-8
|
||||||
|
import ujson
|
||||||
|
from binance.websocket.spot.websocket_client import SpotWebsocketClient as WebsocketClient
|
||||||
|
from binance.spot import Spot
|
||||||
|
import time
|
||||||
|
import requests
|
||||||
|
import datetime
|
||||||
|
import pymysql
|
||||||
|
import math
|
||||||
|
import pymongo
|
||||||
|
|
||||||
|
g_spot_client = Spot()
|
||||||
|
|
||||||
|
|
||||||
|
class Pair:
|
||||||
|
def __init__(self):
|
||||||
|
pass
|
||||||
|
|
||||||
|
depth_u = 0
|
||||||
|
depth_U = 0
|
||||||
|
depth_ts = 0
|
||||||
|
bids = {}
|
||||||
|
asks = {}
|
||||||
|
|
||||||
|
|
||||||
|
g_btcusdt = None
|
||||||
|
g_btcusdt = None
|
||||||
|
|
||||||
|
|
||||||
|
def init_db():
|
||||||
|
mc = pymongo.MongoClient("mongodb://127.0.0.1:27020/")
|
||||||
|
mdb = mc["border2"]
|
||||||
|
return mc, mdb
|
||||||
|
|
||||||
|
|
||||||
|
def get_depth(client, pair):
|
||||||
|
new_pair = Pair()
|
||||||
|
d = client.depth(pair, limit=5000)
|
||||||
|
new_pair.bids = d["bids"]
|
||||||
|
new_pair.asks = d["asks"]
|
||||||
|
new_pair.depth_u = d["lastUpdateId"]
|
||||||
|
print(pair, ": get_depth: init", new_pair.depth_u)
|
||||||
|
#print(new_pair.bids)
|
||||||
|
return new_pair
|
||||||
|
|
||||||
|
|
||||||
|
def dict2number(dict_in):
|
||||||
|
dict_out = {}
|
||||||
|
#print("dict2number", dict_in)
|
||||||
|
for id in dict_in:
|
||||||
|
#print("dict2number", id)
|
||||||
|
#price = (int(float(id[0])) / 100) * 100
|
||||||
|
#price = float(id[0])
|
||||||
|
quantity = float(id[1])
|
||||||
|
#pricestr = str(price)
|
||||||
|
dict_out[id[0]] = quantity
|
||||||
|
return dict_out
|
||||||
|
|
||||||
|
|
||||||
|
def dict2save(mdb, pair, dict_in, ts):
|
||||||
|
mdbc = mdb[pair]
|
||||||
|
s_append = {}
|
||||||
|
s_append["unixdt"] = int(ts / 1000)
|
||||||
|
#cnt = 0
|
||||||
|
for id in dict_in:
|
||||||
|
# print(cnt, id)
|
||||||
|
#if cnt >= 50:
|
||||||
|
#break
|
||||||
|
# bids_append[id] = top_bids[id]
|
||||||
|
s_append[id[0]] = id[1]
|
||||||
|
#cnt += 1
|
||||||
|
print("dict2save", s_append)
|
||||||
|
mdbc.insert_one(s_append)
|
||||||
|
|
||||||
|
def classify_order(dict_in):
|
||||||
|
dict_out = {}
|
||||||
|
for id in dict_in:
|
||||||
|
price = int(int(float(id))/100)*100
|
||||||
|
pricestr = str(price)
|
||||||
|
if pricestr in dict_out:
|
||||||
|
dict_out[pricestr] = dict_out[pricestr]+dict_in[id]
|
||||||
|
else:
|
||||||
|
dict_out[pricestr] = dict_in[id]
|
||||||
|
return dict_out
|
||||||
|
|
||||||
|
def stat_order(pair, bids_in, asks_in, ts, old_ts):
|
||||||
|
print(pair, ": stat_order cmp", ts, old_ts)
|
||||||
|
if ts - old_ts < 1000 * 60 * 5:
|
||||||
|
return False
|
||||||
|
bids = dict2number(bids_in)
|
||||||
|
asks = dict2number(asks_in)
|
||||||
|
|
||||||
|
bids_classify = classify_order(bids)
|
||||||
|
asks_classify = classify_order(asks)
|
||||||
|
print("bids_classify", bids_classify)
|
||||||
|
top_bids = sorted(bids_classify.items(), key=lambda x: x[1], reverse=False)
|
||||||
|
top_asks = sorted(asks_classify.items(), key=lambda x: x[1], reverse=False)
|
||||||
|
print("top_bids", top_bids)
|
||||||
|
mc, mdb = init_db()
|
||||||
|
|
||||||
|
dict2save(mdb, pair + "_bids", top_bids, ts)
|
||||||
|
dict2save(mdb, pair + "_asks", top_asks, ts)
|
||||||
|
print(pair, ": stat_order OK at", ts)
|
||||||
|
return True
|
||||||
|
|
||||||
|
|
||||||
|
def merge_order(dst, src):
|
||||||
|
new_dst = []
|
||||||
|
for dst_item in dst:
|
||||||
|
found = False
|
||||||
|
for src_item in src:
|
||||||
|
#print("dst", dst_item, "src", src_item)
|
||||||
|
if dst_item[0] == src_item[0]:
|
||||||
|
new_dst.append(src_item)
|
||||||
|
found = True
|
||||||
|
break
|
||||||
|
if found is False:
|
||||||
|
#print("merge_order dst copy", dst_item)
|
||||||
|
new_dst.append(dst_item)
|
||||||
|
return new_dst
|
||||||
|
|
||||||
|
|
||||||
|
def handler_order(pair, pair_name, msg_in, client):
|
||||||
|
ts = msg_in["E"]
|
||||||
|
dU = msg_in["U"]
|
||||||
|
du = msg_in["u"]
|
||||||
|
need_reinit = False
|
||||||
|
if pair is not None:
|
||||||
|
if (dU == pair.depth_u + 1) or (
|
||||||
|
(du > pair.depth_u) and (pair.depth_ts == 0) and (pair.depth_u != 0)):
|
||||||
|
bids = msg_in["b"]
|
||||||
|
asks = msg_in["a"]
|
||||||
|
#print("merge_order dst", pair.bids)
|
||||||
|
#print("merge_order src", bids)
|
||||||
|
#print("handle", pair_name, ts, dU, du, pair.depth_u)
|
||||||
|
pair.bids = merge_order(pair.bids, bids)
|
||||||
|
pair.asks = merge_order(pair.asks, asks)
|
||||||
|
pair.depth_U = dU
|
||||||
|
pair.depth_u = du
|
||||||
|
if stat_order(pair_name, pair.bids, pair.asks, ts, pair.depth_ts):
|
||||||
|
pair.depth_ts = ts
|
||||||
|
print(pair_name, ": append", du)
|
||||||
|
else:
|
||||||
|
if (dU != pair.depth_u + 1) and (pair.depth_u != 0):
|
||||||
|
need_reinit = True
|
||||||
|
else:
|
||||||
|
pass
|
||||||
|
if need_reinit:
|
||||||
|
pair = get_depth(client, pair_name)
|
||||||
|
print(pair_name, ": reinit", pair.depth_u, dU, pair.depth_ts)
|
||||||
|
return pair
|
||||||
|
|
||||||
|
|
||||||
|
def order_handler(message):
|
||||||
|
#print(message)
|
||||||
|
global g_spot_client
|
||||||
|
global g_btcusdt
|
||||||
|
global g_ethusdt
|
||||||
|
if message["stream"] == "btcusdt@depth":
|
||||||
|
ddata = message["data"]
|
||||||
|
if ddata["e"] == "depthUpdate":
|
||||||
|
g_btcusdt = handler_order(g_btcusdt, "BTCUSDT", ddata, g_spot_client)
|
||||||
|
elif message["stream"] == "ethusdt@depth":
|
||||||
|
ddata = message["data"]
|
||||||
|
if ddata["e"] == "depthUpdate":
|
||||||
|
g_ethusdt = handler_order(g_ethusdt, "ETHUSDT", ddata, g_spot_client)
|
||||||
|
else:
|
||||||
|
pass
|
||||||
|
|
||||||
|
def check_order():
|
||||||
|
global g_spot_client
|
||||||
|
global g_btcusdt
|
||||||
|
global g_ethusdt
|
||||||
|
ws_client = WebsocketClient()
|
||||||
|
ws_client.start()
|
||||||
|
ws_client.instant_subscribe(
|
||||||
|
stream=['btcusdt@depth', 'ethusdt@depth'],
|
||||||
|
callback=order_handler,
|
||||||
|
)
|
||||||
|
g_btcusdt = get_depth(g_spot_client, "BTCUSDT")
|
||||||
|
g_ethusdt = get_depth(g_spot_client, "ETHUSDT")
|
||||||
|
|
||||||
|
|
||||||
|
check_order()
|
||||||
146
lyq/check_zone_lyq.py
Normal file
146
lyq/check_zone_lyq.py
Normal file
@@ -0,0 +1,146 @@
|
|||||||
|
# coding=utf-8
|
||||||
|
import ujson
|
||||||
|
#from binance.websocket.spot.websocket_client import SpotWebsocketClient as WebsocketClient
|
||||||
|
from binance.spot import Spot
|
||||||
|
import time
|
||||||
|
import requests
|
||||||
|
import datetime
|
||||||
|
import pymysql
|
||||||
|
import math
|
||||||
|
#import pymongo
|
||||||
|
|
||||||
|
g_btcusdt_prices = {}
|
||||||
|
g_ethusdt_prices = {}
|
||||||
|
|
||||||
|
class ZoneDbIf:
|
||||||
|
def __init__(self, host="172.17.0.1", port=4423, user="root", password="2GS@bPYcgiMyL14A", dbname="btcdb"):
|
||||||
|
self.conn = pymysql.connect(host=host, port=port, user=user, password=password, database=dbname, cursorclass=pymysql.cursors.DictCursor)
|
||||||
|
print("init zone db suceess!")
|
||||||
|
|
||||||
|
def save_zone_change(self, dayutc, change_us, change_asia, change_eu):
|
||||||
|
with self.conn.cursor() as cursor:
|
||||||
|
print(
|
||||||
|
dayutc, change_us, change_asia, change_eu)
|
||||||
|
sql_insert = "REPLACE INTO btczonechange3 (unixdt, change_us, change_asia, change_eu"
|
||||||
|
sql_insert = sql_insert + ") VALUES (FROM_UNIXTIME(%s), %s, %s, %s)"
|
||||||
|
cursor.execute(sql_insert, (
|
||||||
|
dayutc, change_us, change_asia, change_eu))
|
||||||
|
self.conn.commit()
|
||||||
|
|
||||||
|
class EthZoneDbIf:
|
||||||
|
def __init__(self, host="172.17.0.1", port=4423, user="root", password="2GS@bPYcgiMyL14A", dbname="ethdb"):
|
||||||
|
self.conn = pymysql.connect(host=host, port=port, user=user, password=password, database=dbname, cursorclass=pymysql.cursors.DictCursor)
|
||||||
|
print("init zone db suceess!")
|
||||||
|
|
||||||
|
def save_zone_change(self, dayutc, change_us, change_asia, change_eu):
|
||||||
|
with self.conn.cursor() as cursor:
|
||||||
|
print(
|
||||||
|
dayutc, change_us, change_asia, change_eu)
|
||||||
|
sql_insert = "REPLACE INTO ethzonechange3 (unixdt, change_us, change_asia, change_eu"
|
||||||
|
sql_insert = sql_insert + ") VALUES (FROM_UNIXTIME(%s), %s, %s, %s)"
|
||||||
|
cursor.execute(sql_insert, (
|
||||||
|
dayutc, change_us, change_asia, change_eu))
|
||||||
|
self.conn.commit()
|
||||||
|
|
||||||
|
def get_history_price(spot_client, pair_name):
|
||||||
|
result = spot_client.klines(pair_name, "1h", limit=1000)
|
||||||
|
prices_open = {}
|
||||||
|
prices_close = {}
|
||||||
|
for price in result:
|
||||||
|
prices_open[str(price[0])] = float(price[1])
|
||||||
|
prices_close[str(price[0])] = float(price[4])
|
||||||
|
open_out = sorted(prices_open.items(), reverse=True)
|
||||||
|
close_out = sorted(prices_close.items(), reverse=True)
|
||||||
|
return open_out, close_out, prices_open, prices_close
|
||||||
|
|
||||||
|
def get_last_price(spot_client, pair_name, cache_open, cache_close):
|
||||||
|
result = spot_client.klines(pair_name, "1h", limit=1)
|
||||||
|
for price in result:
|
||||||
|
cache_open[str(price[0])] = float(price[1])
|
||||||
|
cache_close[str(price[0])] = float(price[4])
|
||||||
|
open_out = sorted(cache_open.items(), reverse=True)
|
||||||
|
close_out = sorted(cache_close.items(), reverse=True)
|
||||||
|
return open_out, close_out, cache_open, cache_close
|
||||||
|
|
||||||
|
def calc_zone(prices_open, price_close, zone_start, zone_end):
|
||||||
|
zone_total = 30*24
|
||||||
|
zone_hours = 0
|
||||||
|
zones = {}
|
||||||
|
price_start = 0
|
||||||
|
price_end = 0
|
||||||
|
dt_start = None
|
||||||
|
item_idx = 0
|
||||||
|
for dt in prices_open:
|
||||||
|
tobj = time.gmtime(int(dt[0]) / 1000)
|
||||||
|
if tobj.tm_hour == zone_start:
|
||||||
|
price_start = dt[1]
|
||||||
|
dt_start = tobj
|
||||||
|
if zone_hours == 0 and tobj.tm_hour < zone_end:
|
||||||
|
zone_total = zone_total + tobj.tm_hour + 1
|
||||||
|
close_list = price_close[item_idx]
|
||||||
|
price_end = close_list[1]
|
||||||
|
else:
|
||||||
|
if tobj.tm_hour == zone_end:
|
||||||
|
close_list = price_close[item_idx]
|
||||||
|
price_end = close_list[1]
|
||||||
|
if price_start > 0 and price_end > 0:
|
||||||
|
#zones[dt_end] = (price_end-price_start)/price_start
|
||||||
|
daystr = time.strftime("%d %b %Y", dt_start)
|
||||||
|
dayutc = int(time.mktime(time.strptime(daystr, "%d %b %Y")))
|
||||||
|
zones[str(dayutc)] = price_end - price_start
|
||||||
|
price_start = 0
|
||||||
|
price_end = 0
|
||||||
|
item_idx = item_idx + 1
|
||||||
|
zone_hours = zone_hours + 1
|
||||||
|
if zone_hours >= zone_total:
|
||||||
|
break
|
||||||
|
return zones
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
def check_zone():
|
||||||
|
dbif = ZoneDbIf()
|
||||||
|
ethdbif = EthZoneDbIf()
|
||||||
|
spot_client = Spot()
|
||||||
|
prices_open, prices_close, cache_open, cache_close = get_history_price(spot_client, "BTCUSDT")
|
||||||
|
prices_open_eth, prices_close_eth, cache_open_eth, cache_close_eth = get_history_price(spot_client, "ETHUSDT")
|
||||||
|
prev_tm = time.gmtime(time.time())
|
||||||
|
print("update", prev_tm.tm_hour)
|
||||||
|
while True:
|
||||||
|
zone_asia = calc_zone(prices_open, prices_close, 0, 12)
|
||||||
|
zone_eu = calc_zone(prices_open, prices_close, 6, 18)
|
||||||
|
zone_us = calc_zone(prices_open, prices_close, 12, 0)
|
||||||
|
zone_asia_eth = calc_zone(prices_open_eth, prices_close_eth, 0, 12)
|
||||||
|
zone_eu_eth = calc_zone(prices_open_eth, prices_close_eth, 6, 18)
|
||||||
|
zone_us_eth = calc_zone(prices_open_eth, prices_close_eth, 12, 0)
|
||||||
|
#print(zone_asia)
|
||||||
|
#print(zone_eu)
|
||||||
|
#print(zone_us)
|
||||||
|
for dt in zone_asia:
|
||||||
|
change_us = 0
|
||||||
|
change_eu = 0
|
||||||
|
if dt in zone_us:
|
||||||
|
change_us = zone_us[dt]
|
||||||
|
if dt in zone_eu:
|
||||||
|
change_eu = zone_eu[dt]
|
||||||
|
dbif.save_zone_change(dt, change_us, zone_asia[dt], change_eu)
|
||||||
|
change_us_eth = 0
|
||||||
|
change_eu_eth = 0
|
||||||
|
if dt in zone_us_eth:
|
||||||
|
change_us_eth = zone_us_eth[dt]
|
||||||
|
if dt in zone_eu_eth:
|
||||||
|
change_eu_eth = zone_eu_eth[dt]
|
||||||
|
ethdbif.save_zone_change(dt, change_us_eth, zone_asia_eth[dt], change_eu_eth)
|
||||||
|
while True:
|
||||||
|
time.sleep(60)
|
||||||
|
cur_tm = time.gmtime(time.time())
|
||||||
|
if cur_tm.tm_hour != prev_tm.tm_hour:
|
||||||
|
prev_tm = cur_tm
|
||||||
|
time.sleep(60)
|
||||||
|
prices_open, prices_close, cache_open, cache_close = get_last_price(spot_client, "BTCUSDT", cache_open, cache_close)
|
||||||
|
prices_open_eth, prices_close_eth, cache_open_eth, cache_close_eth = get_last_price(spot_client, "ETHUSDT", cache_open_eth,
|
||||||
|
cache_close_eth)
|
||||||
|
print("update", cur_tm.tm_hour)
|
||||||
|
break
|
||||||
|
|
||||||
|
check_zone()
|
||||||
562
lyq/db_if_qt.py
Normal file
562
lyq/db_if_qt.py
Normal file
@@ -0,0 +1,562 @@
|
|||||||
|
# coding=utf-8
|
||||||
|
import datetime
|
||||||
|
|
||||||
|
import pymysql
|
||||||
|
from loguru import logger
|
||||||
|
import time
|
||||||
|
|
||||||
|
|
||||||
|
class DbIf:
|
||||||
|
def __init__(self, host="172.17.0.1", port=4419, user="root", password="IeQcJNnagkaFP1Or", dbname="btcdb"):
|
||||||
|
self.conn = pymysql.connect(host=host, port=port, user=user, password=password, database=dbname,cursorclass=pymysql.cursors.DictCursor)
|
||||||
|
|
||||||
|
def update_to_dailyindsv2(self, dt_utc, height_begin, height_end, lth_volume, frm, cvdd, realized_price, transferred_price, balanced_price, nvt_ratio, velocity):
|
||||||
|
with self.conn.cursor() as cursor:
|
||||||
|
print(dt_utc, height_begin, height_end, lth_volume, frm, cvdd, realized_price, transferred_price, balanced_price, nvt_ratio, velocity)
|
||||||
|
sql_insert = "REPLACE INTO dailyindsv3e2 (unixdt, height_begin, height_end, lth_volume, frm, cvdd, realized_price, transferred_price, balanced_price, nvt_ratio, velocity"
|
||||||
|
sql_insert = sql_insert + ") VALUES (FROM_UNIXTIME(%s), %s, %s, %s, %s, %s, %s, %s, %s, %s, %s)"
|
||||||
|
cursor.execute(sql_insert, (
|
||||||
|
dt_utc, height_begin, height_end, lth_volume, frm, cvdd, realized_price, transferred_price, balanced_price, nvt_ratio, velocity))
|
||||||
|
|
||||||
|
'''
|
||||||
|
def update_to_realtimeindsv2(self, dt_utc, mempool_volume, mempool_fees):
|
||||||
|
with self.conn.cursor() as cursor:
|
||||||
|
sql_insert = "REPLACE INTO realtimeindsv2b (unixdt, mempool_volume, mempool_fees)"
|
||||||
|
cursor.execute(sql_insert, (dt_utc, mempool_volume, mempool_fees))
|
||||||
|
'''
|
||||||
|
def update_to_dailyinds(self, dt_utc, height_begin, height_end, profitrate, fees, txs, new_address, total_address, new_address_volume, active_address,
|
||||||
|
send_address, receive_address, volume, eavolume, sopr, asopr, easopr, lthsopr, sthsopr,
|
||||||
|
asol, eaasol, dormancy, adormancy, eadormancy, cdd, sacdd, eacdd, day1, day7, day30, day60,
|
||||||
|
day90, day180, day365, day730, csupply, mintusd, sumcsupply, sumcdd, sumeacdd,
|
||||||
|
liveliness, ealiveliness, rprofit, rloss, rplrate, price, marketcap, rcap, earcap, mvrv,
|
||||||
|
nupl,vdd):
|
||||||
|
with self.conn.cursor() as cursor:
|
||||||
|
sql_insert = "REPLACE INTO dailyindsv3e1 (unixdt, height_begin, height_end, profitrate, fees, txs, new_address, total_address, new_address_volume, active_address, send_address, receive_address, volume, eavolume, sopr, asopr, easopr, lthsopr, sthsopr,"
|
||||||
|
sql_insert = sql_insert + " asol, eaasol, dormancy, adormancy, eadormancy, cdd, sacdd, eacdd, day1, day7, day30, day60, day90, day180, day365, day730, csupply, mintusd, sumcsupply, sumcdd, sumeacdd, liveliness, "
|
||||||
|
sql_insert = sql_insert + " ealiveliness, rprofit, rloss, rplrate, price, marketcap, rcap, earcap, mvrv, nupl,vdd"
|
||||||
|
sql_insert = sql_insert + ") VALUES (FROM_UNIXTIME(%s), %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s)"
|
||||||
|
# print(sql_insert)
|
||||||
|
cursor.execute(sql_insert, (
|
||||||
|
dt_utc, height_begin, height_end, profitrate, fees, txs, new_address, total_address, new_address_volume, active_address, send_address,
|
||||||
|
receive_address, volume, eavolume, sopr, asopr, easopr, lthsopr, sthsopr, asol, eaasol, dormancy,
|
||||||
|
adormancy, eadormancy, cdd, sacdd, eacdd, day1, day7, day30, day60, day90, day180, day365, day730,
|
||||||
|
csupply, mintusd, sumcsupply, sumcdd, sumeacdd, liveliness, ealiveliness, rprofit, rloss, rplrate,
|
||||||
|
price, marketcap, rcap, earcap, mvrv,
|
||||||
|
nupl,vdd))
|
||||||
|
self.conn.commit()
|
||||||
|
'''
|
||||||
|
def update_to_dailyinds(self, dt_utc, height_begin, height_end, profitrate, fees, txs, new_address, new_address_volume, active_address,
|
||||||
|
send_address, receive_address, volume, eavolume, sopr, asopr, easopr, lthsopr, sthsopr,
|
||||||
|
asol, eaasol, dormancy, adormancy, eadormancy, cdd, sacdd, eacdd, day1, day7, day30, day60,
|
||||||
|
day90, day180, day365, day730, csupply, mintusd, sumcsupply, sumcdd, sumeacdd,
|
||||||
|
liveliness, ealiveliness, rprofit, rloss, rplrate, price, marketcap, rcap, earcap, mvrv,
|
||||||
|
lthmarketcap, lthrcap, sthmarketcap, sthrcap, lthmvrv, sthmvrv, nupl):
|
||||||
|
with self.conn.cursor() as cursor:
|
||||||
|
sql_insert = "REPLACE INTO dailyindsv1 (unixdt, height_begin, height_end, profitrate, fees, txs, new_address, new_address_volume, active_address, send_address, receive_address, volume, eavolume, sopr, asopr, easopr, lthsopr, sthsopr,"
|
||||||
|
sql_insert = sql_insert + " asol, eaasol, dormancy, adormancy, eadormancy, cdd, sacdd, eacdd, day1, day7, day30, day60, day90, day180, day365, day730, csupply, mintusd, sumcsupply, sumcdd, sumeacdd, liveliness, "
|
||||||
|
sql_insert = sql_insert + " ealiveliness, rprofit, rloss, rplrate, price, marketcap, rcap, earcap, mvrv, lthmarketcap, lthrcap, sthmarketcap, sthrcap, lthmvrv, sthmvrv, nupl"
|
||||||
|
sql_insert = sql_insert + ") VALUES (FROM_UNIXTIME(%s), %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s)"
|
||||||
|
# print(sql_insert)
|
||||||
|
cursor.execute(sql_insert, (
|
||||||
|
dt_utc, height_begin, height_end, profitrate, fees, txs, new_address, new_address_volume, active_address, send_address,
|
||||||
|
receive_address, volume, eavolume, sopr, asopr, easopr, lthsopr, sthsopr, asol, eaasol, dormancy,
|
||||||
|
adormancy, eadormancy, cdd, sacdd, eacdd, day1, day7, day30, day60, day90, day180, day365, day730,
|
||||||
|
csupply, mintusd, sumcsupply, sumcdd, sumeacdd, liveliness, ealiveliness, rprofit, rloss, rplrate,
|
||||||
|
price, marketcap, rcap, earcap, mvrv, lthmarketcap, lthrcap, sthmarketcap, sthrcap, lthmvrv, sthmvrv,
|
||||||
|
nupl))
|
||||||
|
self.conn.commit()
|
||||||
|
'''
|
||||||
|
'''
|
||||||
|
# daily daily on-chain volume
|
||||||
|
def query_from_dailyvolume(self, start_id=0, end_id=0, start_time="", end_time="", limit=0):
|
||||||
|
with self.conn.cursor() as cursor:
|
||||||
|
sql_query = "SELECT * from `dailyvolume`"
|
||||||
|
|
||||||
|
if start_id > 0:
|
||||||
|
sql_query = sql_query + " WHERE id > " + str(start_id)
|
||||||
|
if end_id > 0:
|
||||||
|
sql_query = sql_query + " AND id < " + str(end_id)
|
||||||
|
else:
|
||||||
|
if end_id > 0:
|
||||||
|
sql_query = sql_query + " WHERE id < " + str(end_id)
|
||||||
|
|
||||||
|
if len(start_time) > 0:
|
||||||
|
if len(end_time) > 0:
|
||||||
|
sql_query = sql_query + " WHERE UNIX_TIMESTAMP(unixdt) BETWEEN \
|
||||||
|
UNIX_TIMESTAMP(\'" + start_time + "\') AND UNIX_TIMESTAMP(\'" + end_time + "\')"
|
||||||
|
else:
|
||||||
|
sql_query = sql_query + " WHERE UNIX_TIMESTAMP(unixdt) BETWEEN \
|
||||||
|
UNIX_TIMESTAMP(\'" + start_time + "\') AND UNIX_TIMESTAMP(NOW())"
|
||||||
|
else:
|
||||||
|
if len(end_time) > 0:
|
||||||
|
sql_query = sql_query + " WHERE UNIX_TIMESTAMP(unixdt) BETWEEN \
|
||||||
|
UNIX_TIMESTAMP(NOW()) AND UNIX_TIMESTAMP(\'" + end_time + "\')"
|
||||||
|
|
||||||
|
sql_query = sql_query + " order by `unixdt` desc"
|
||||||
|
|
||||||
|
if limit > 0:
|
||||||
|
sql_query = sql_query + " LIMIT " + str(limit)
|
||||||
|
print(sql_query)
|
||||||
|
cursor.execute(sql_query)
|
||||||
|
return cursor.fetchall()
|
||||||
|
'''
|
||||||
|
|
||||||
|
# newaddrs
|
||||||
|
'''
|
||||||
|
def update_to_newaddr(self, dayutc, last_profit_rate, last_fees, last_txs, last_eatxs, last_newaddr_cnt,
|
||||||
|
last_newaddr_vol, last_active_addr_cnt, last_tx_addr_cnt, last_rx_addr_cnt, last_vol_change,
|
||||||
|
last_vol):
|
||||||
|
with self.conn.cursor() as cursor:
|
||||||
|
sql_query = "SELECT COUNT(id) FROM `newaddrs` WHERE unixdt=FROM_UNIXTIME(%s)"
|
||||||
|
cursor.execute(sql_query, {dayutc, })
|
||||||
|
result = cursor.fetchone()
|
||||||
|
# print(dt_utc)
|
||||||
|
# print(result)
|
||||||
|
if result is not None:
|
||||||
|
if "COUNT(id)" in result:
|
||||||
|
if result["COUNT(id)"] > 0:
|
||||||
|
print("update")
|
||||||
|
sql_update = 'UPDATE newaddrs SET `total`=%s, `amount`=%s, `active`=%s, `tx`=%s, `rx`=%s, `volume_change`=%s, `volume=%s`,`txs`=%s, `eatxs`=%s, `fees`=%s, `last_profit_rate`=%s WHERE unixdt=FROM_UNIXTIME(%s)'
|
||||||
|
cursor.execute(sql_update, (
|
||||||
|
last_newaddr_cnt, last_newaddr_vol, last_active_addr_cnt, last_tx_addr_cnt,
|
||||||
|
last_rx_addr_cnt,
|
||||||
|
last_vol_change, last_vol, last_txs, last_eatxs, last_fees, last_profit_rate, dayutc))
|
||||||
|
else:
|
||||||
|
print("insert")
|
||||||
|
sql_insert = "INSERT INTO `newaddrs` (`unixdt`, `total`, `amount`, `active`, `tx`, `rx`, `volume_change`, `volume`, `txs`, `eatxs`, `fees`, `last_profit_rate`) VALUES (FROM_UNIXTIME(%s), %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s)"
|
||||||
|
# print(sql_insert)
|
||||||
|
cursor.execute(sql_insert, (
|
||||||
|
dayutc, last_newaddr_cnt, last_newaddr_vol, last_active_addr_cnt, last_tx_addr_cnt,
|
||||||
|
last_rx_addr_cnt, last_vol_change, last_vol, last_txs, last_eatxs, last_fees,
|
||||||
|
last_profit_rate))
|
||||||
|
self.conn.commit()
|
||||||
|
'''
|
||||||
|
'''
|
||||||
|
def update_to_sellprofit(self, dayutc, current_price, block_buy_volume, block_sell_volume, block_sell_profit, last_height):
|
||||||
|
with self.conn.cursor() as cursor:
|
||||||
|
sql_insert = "INSERT INTO `dailybuysell` (`unixdt`, `price`, `buyvolume`, `sellvolume`, `sellprofit`, `height`) VALUES (FROM_UNIXTIME(%s), %s, %s, %s, %s, %s)"
|
||||||
|
#print(sql_insert)
|
||||||
|
#print(datetime, txid, vout, voutalias, amount, height)
|
||||||
|
cursor.execute(sql_insert, (dayutc, current_price, block_buy_volume, block_sell_volume, block_sell_profit, last_height))
|
||||||
|
self.conn.commit()
|
||||||
|
'''
|
||||||
|
'''
|
||||||
|
def update_to_bigsellprofit(self, dayutc, current_price, tx_sell_average, tx_sell_amount, tx_sell_profit,
|
||||||
|
days_earliest, days_latest, days_largest, days_current, tx_buy_address, txid,
|
||||||
|
block_height):
|
||||||
|
with self.conn.cursor() as cursor:
|
||||||
|
sql_insert = "INSERT INTO `bigsell` (`unixdt`, `buyprice`, `sellprice`, `amount`, `profit`, `days_earliest`, `days_latest`, `days_largest`, `days_current`, `address`, `txid`, `height`) VALUES (FROM_UNIXTIME(%s), %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s)"
|
||||||
|
# print(sql_insert)
|
||||||
|
# print(datetime, txid, vout, voutalias, amount, height)
|
||||||
|
cursor.execute(sql_insert, (
|
||||||
|
dayutc, current_price, tx_sell_average, tx_sell_amount, tx_sell_profit, days_earliest, days_latest,
|
||||||
|
days_largest, days_current, tx_buy_address, txid, block_height))
|
||||||
|
self.conn.commit()
|
||||||
|
'''
|
||||||
|
'''
|
||||||
|
def update_to_dailycdd(self, dt_utc, cdd):
|
||||||
|
with self.conn.cursor() as cursor:
|
||||||
|
sql_insert = "REPLACE INTO `dailycdd` (`unixdt`, `cdd`) VALUES (FROM_UNIXTIME(%s), %s)"
|
||||||
|
# print(sql_insert)
|
||||||
|
cursor.execute(sql_insert, (dt_utc, cdd))
|
||||||
|
self.conn.commit()
|
||||||
|
'''
|
||||||
|
'''
|
||||||
|
def update_to_dailycdddays(self, dt_utc, dormancy, adormancy, eadormancy, cdd, acdd, eacdd, day1, day7, day30,
|
||||||
|
day60, day90, day180, day365, day730):
|
||||||
|
with self.conn.cursor() as cursor:
|
||||||
|
sql_insert = "REPLACE INTO `dailycdddays` (`unixdt`, dormancy, adormancy, eadormancy, cdd, acdd, eacdd, `day1`, day7, day30, day60, day90, day180, day365, day730) VALUES (FROM_UNIXTIME(%s), %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s)"
|
||||||
|
# print(sql_insert)
|
||||||
|
cursor.execute(sql_insert, (
|
||||||
|
dt_utc, dormancy, adormancy, eadormancy, cdd, acdd, eacdd, day1, day7, day30, day60, day90, day180,
|
||||||
|
day365,
|
||||||
|
day730))
|
||||||
|
self.conn.commit()
|
||||||
|
'''
|
||||||
|
'''
|
||||||
|
def update_to_dailysopr(self, dt_utc, last_sopr, last_asopr, last_easopr, last_lth_sopr, last_sth_sopr):
|
||||||
|
with self.conn.cursor() as cursor:
|
||||||
|
sql_insert = "REPLACE INTO `dailysopr` (`unixdt`, `sopr`, asopr, easopr, lth_sopr, sth_sopr) VALUES (FROM_UNIXTIME(%s), %s, %s, %s, %s, %s, %s)"
|
||||||
|
# print(sql_insert)
|
||||||
|
cursor.execute(sql_insert, (dt_utc, last_sopr, last_asopr, last_easopr, last_lth_sopr, last_sth_sopr))
|
||||||
|
self.conn.commit()
|
||||||
|
'''
|
||||||
|
'''
|
||||||
|
def update_to_inds(self, dt_utc, csupply, mintusd, sumcsupply, sumcdd, sumeacdd, liveliness, ealiveliness, rprofit,
|
||||||
|
rloss, rplrate, price, marketcap, rcap, earcap, mvrv):
|
||||||
|
with self.conn.cursor() as cursor:
|
||||||
|
sql_insert = "REPLACE INTO `inds` (`unixdt`, csupply, mintusd, sumcsupply, sumcdd, sumeacdd, liveliness, ealiveliness, rprofit, rloss, rplrate, price, marketcap, rcap, earcap, mvrv) VALUES (FROM_UNIXTIME(%s), %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s)"
|
||||||
|
# print(sql_insert)
|
||||||
|
cursor.execute(sql_insert, (
|
||||||
|
dt_utc, csupply, mintusd, sumcsupply, sumcdd, sumeacdd, liveliness, ealiveliness, rprofit, rloss,
|
||||||
|
rplrate,
|
||||||
|
price, marketcap, rcap, earcap, mvrv))
|
||||||
|
self.conn.commit()
|
||||||
|
'''
|
||||||
|
# daily volume
|
||||||
|
'''
|
||||||
|
def update_to_dailyvolume(self, dt_utc, volume):
|
||||||
|
with self.conn.cursor() as cursor:
|
||||||
|
sql_insert = "REPLACE INTO `dailyvolume` (`unixdt`, `volume`) VALUES (FROM_UNIXTIME(%s), %s)"
|
||||||
|
# print(sql_insert)
|
||||||
|
cursor.execute(sql_insert, (dt_utc, volume))
|
||||||
|
self.conn.commit()
|
||||||
|
'''
|
||||||
|
'''with self.conn.cursor() as cursor:
|
||||||
|
sql_query = "SELECT COUNT(id) FROM `dailyvolume` WHERE unixdt=FROM_UNIXTIME(%s)"
|
||||||
|
cursor.execute(sql_query, {dt_utc,})
|
||||||
|
result = cursor.fetchone()
|
||||||
|
#print(dt_utc)
|
||||||
|
#print(result)
|
||||||
|
if result is not None:
|
||||||
|
if "COUNT(id)" in result:
|
||||||
|
if result["COUNT(id)"] > 0:
|
||||||
|
print("update")
|
||||||
|
sql_update = 'UPDATE dailyvolume SET `volume`=%s WHERE unixdt=FROM_UNIXTIME(%s)'
|
||||||
|
cursor.execute(sql_update, (volume, dt_utc))
|
||||||
|
else:
|
||||||
|
print("insert")
|
||||||
|
sql_insert = "INSERT INTO `dailyvolume` (`unixdt`, `volume`) VALUES (FROM_UNIXTIME(%s), %s)"
|
||||||
|
# print(sql_insert)
|
||||||
|
cursor.execute(sql_insert, (dt_utc, volume))
|
||||||
|
self.conn.commit()'''
|
||||||
|
'''
|
||||||
|
def update_to_dailyfees(self, dt_utc, fees):
|
||||||
|
with self.conn.cursor() as cursor:
|
||||||
|
sql_insert = "REPLACE INTO `dailyfees` (`unixdt`, `fees`) VALUES (FROM_UNIXTIME(%s), %s)"
|
||||||
|
# print(sql_insert)
|
||||||
|
cursor.execute(sql_insert, (dt_utc, fees))
|
||||||
|
self.conn.commit()
|
||||||
|
'''
|
||||||
|
'''
|
||||||
|
def import_to_dailyvolume2(self, dt_utc, volume):
|
||||||
|
with self.conn.cursor() as cursor:
|
||||||
|
sql_insert = "INSERT INTO `dailyvolume` (`unixdt`, `volume`) VALUES (FROM_UNIXTIME(%s), %s)"
|
||||||
|
# print(sql_insert)
|
||||||
|
cursor.execute(sql_insert, (dt_utc, volume))
|
||||||
|
self.conn.commit()
|
||||||
|
|
||||||
|
def delete_dailyvolume_data(self, config):
|
||||||
|
with self.conn.cursor() as cursor:
|
||||||
|
sql_query = "DELETE FROM `dailyvolume`"
|
||||||
|
cursor.execute(sql_query)
|
||||||
|
self.conn.commit()
|
||||||
|
|
||||||
|
|
||||||
|
# daily market cap
|
||||||
|
def query_from_marketcap(self, start_id=0, end_id=0, start_time="", end_time="", limit=0):
|
||||||
|
with self.conn.cursor() as cursor:
|
||||||
|
sql_query = "SELECT * from `dailyprice`"
|
||||||
|
|
||||||
|
if start_id > 0:
|
||||||
|
sql_query = sql_query + " WHERE id > " + str(start_id)
|
||||||
|
if end_id > 0:
|
||||||
|
sql_query = sql_query + " AND id < " + str(end_id)
|
||||||
|
else:
|
||||||
|
if end_id > 0:
|
||||||
|
sql_query = sql_query + " WHERE id < " + str(end_id)
|
||||||
|
|
||||||
|
if len(start_time) > 0:
|
||||||
|
if len(end_time) > 0:
|
||||||
|
sql_query = sql_query + " WHERE UNIX_TIMESTAMP(unixdt) BETWEEN \
|
||||||
|
UNIX_TIMESTAMP(\'" + start_time + "\') AND UNIX_TIMESTAMP(\'" + end_time + "\')"
|
||||||
|
else:
|
||||||
|
sql_query = sql_query + " WHERE UNIX_TIMESTAMP(unixdt) BETWEEN \
|
||||||
|
UNIX_TIMESTAMP(\'" + start_time + "\') AND UNIX_TIMESTAMP(NOW())"
|
||||||
|
else:
|
||||||
|
if len(end_time) > 0:
|
||||||
|
sql_query = sql_query + " WHERE UNIX_TIMESTAMP(unixdt) BETWEEN \
|
||||||
|
UNIX_TIMESTAMP(NOW()) AND UNIX_TIMESTAMP(\'" + end_time + "\')"
|
||||||
|
|
||||||
|
sql_query = sql_query + " order by `unixdt` desc"
|
||||||
|
|
||||||
|
if limit > 0:
|
||||||
|
sql_query = sql_query + " LIMIT " + str(limit)
|
||||||
|
print(sql_query)
|
||||||
|
cursor.execute(sql_query)
|
||||||
|
return cursor.fetchall()
|
||||||
|
|
||||||
|
#daily price
|
||||||
|
def import_to_dailyprice(self, dt_utc, price, volume, marketcap, csupply):
|
||||||
|
with self.conn.cursor() as cursor:
|
||||||
|
sql_insert = "INSERT INTO `dailyprice` (`unixdt`, `price`, `volume`, `marketcap`, `csupply`) VALUES (FROM_UNIXTIME(%s), %s, %s, %s, %s)"
|
||||||
|
#print(sql_insert)
|
||||||
|
cursor.execute(sql_insert, (dt_utc, price, volume, marketcap, csupply))
|
||||||
|
self.conn.commit()
|
||||||
|
|
||||||
|
def update_to_dailyprice(self, dt_utc, price, volume, change):
|
||||||
|
with self.conn.cursor() as cursor:
|
||||||
|
sql_insert = "INSERT INTO `dailyprice` (`unixdt`, `price`, `volume`, `change`) VALUES (FROM_UNIXTIME(%s), %s, %s, %s)"
|
||||||
|
#print(sql_insert)
|
||||||
|
cursor.execute(sql_insert, (dt_utc, price, volume, change))
|
||||||
|
self.conn.commit()
|
||||||
|
|
||||||
|
def update_to_dailyprice2(self, dt_utc, price, volume, change, marketcap, csupply):
|
||||||
|
with self.conn.cursor() as cursor:
|
||||||
|
sql_query = "SELECT COUNT(id) FROM `dailyprice` WHERE unixdt=FROM_UNIXTIME(%s)"
|
||||||
|
cursor.execute(sql_query, {dt_utc,})
|
||||||
|
result = cursor.fetchone()
|
||||||
|
#print(dt_utc)
|
||||||
|
#print(result)
|
||||||
|
if result is not None:
|
||||||
|
if "COUNT(id)" in result:
|
||||||
|
if result["COUNT(id)"] > 0:
|
||||||
|
print("update")
|
||||||
|
sql_update = 'UPDATE dailyprice SET `price`=%s, `marketcap`=%s, `csupply`=%s, `volume`=%s, `change`=%s WHERE unixdt=FROM_UNIXTIME(%s)'
|
||||||
|
cursor.execute(sql_update, (price, marketcap, csupply, volume, change, dt_utc))
|
||||||
|
else:
|
||||||
|
print("insert")
|
||||||
|
sql_insert = "INSERT INTO `dailyprice` (`unixdt`, `price`, `volume`, `change`, `marketcap`, `csupply`) VALUES (FROM_UNIXTIME(%s), %s, %s, %s, %s, %s)"
|
||||||
|
# print(sql_insert)
|
||||||
|
cursor.execute(sql_insert, (dt_utc, price, volume, change, marketcap, csupply))
|
||||||
|
self.conn.commit()
|
||||||
|
|
||||||
|
def update_dailypricechange(self):
|
||||||
|
with self.conn.cursor() as cursor:
|
||||||
|
sql_query = "SELECT unixdt,price FROM `dailyprice` order by unixdt"
|
||||||
|
cursor.execute(sql_query)
|
||||||
|
results = cursor.fetchall()
|
||||||
|
prevprice = -1
|
||||||
|
for result in results:
|
||||||
|
if prevprice < 0:
|
||||||
|
prevprice = result["price"]
|
||||||
|
else:
|
||||||
|
#print(result["unixdt"], result["price"], result["marketcap"])
|
||||||
|
try:
|
||||||
|
change = (result["price"]/prevprice - 1)*100
|
||||||
|
except:
|
||||||
|
change = 0
|
||||||
|
#print(csupply)
|
||||||
|
datestr = result["unixdt"]
|
||||||
|
logger.debug(datestr.__format__('%Y-%m-%d') + " " + str(change))
|
||||||
|
sql_update = 'UPDATE dailyprice SET `change`=%s WHERE unixdt=%s'
|
||||||
|
cursor.execute(sql_update, (str(change), result["unixdt"]))
|
||||||
|
prevprice = result["price"]
|
||||||
|
self.conn.commit()
|
||||||
|
|
||||||
|
def delete_dailyprice_data(self, config):
|
||||||
|
with self.conn.cursor() as cursor:
|
||||||
|
sql_query = "DELETE FROM `dailyprice`"
|
||||||
|
cursor.execute(sql_query)
|
||||||
|
self.conn.commit()
|
||||||
|
|
||||||
|
def delete_failed_blockvolume(self, height):
|
||||||
|
with self.conn.cursor() as cursor:
|
||||||
|
sql_insert = "DELETE FROM `bigamountvout` WHERE height=%s"
|
||||||
|
cursor.execute(sql_insert, (height,))
|
||||||
|
sql_insert = "DELETE FROM `bigamounttx` WHERE height=%s"
|
||||||
|
cursor.execute(sql_insert, (height,))
|
||||||
|
sql_insert = "DELETE FROM `blockamount` WHERE height=%s"
|
||||||
|
cursor.execute(sql_insert, (height,))
|
||||||
|
self.conn.commit()
|
||||||
|
|
||||||
|
#block check --- big amount for vout
|
||||||
|
def query_from_bigamountvout(self, start_id=0, end_id=0, start_time="", end_time="", address="", limit=0):
|
||||||
|
with self.conn.cursor() as cursor:
|
||||||
|
sql_query = "SELECT * from `bigamountvout`"
|
||||||
|
if start_id > 0:
|
||||||
|
sql_query = sql_query + " WHERE id > " + str(start_id)
|
||||||
|
if end_id > 0:
|
||||||
|
sql_query = sql_query + " AND id < " + str(end_id)
|
||||||
|
else:
|
||||||
|
if end_id > 0:
|
||||||
|
sql_query = sql_query + " WHERE id < " + str(end_id)
|
||||||
|
|
||||||
|
if len(start_time) > 0:
|
||||||
|
if len(end_time) > 0:
|
||||||
|
sql_query = sql_query + " WHERE UNIX_TIMESTAMP(unixdt) BETWEEN \
|
||||||
|
UNIX_TIMESTAMP(\'" + start_time + "\') AND UNIX_TIMESTAMP(\'" + end_time + "\')"
|
||||||
|
else:
|
||||||
|
sql_query = sql_query + " WHERE UNIX_TIMESTAMP(unixdt) BETWEEN \
|
||||||
|
UNIX_TIMESTAMP(\'" + start_time + "\') AND UNIX_TIMESTAMP(NOW())"
|
||||||
|
else:
|
||||||
|
if len(end_time) > 0:
|
||||||
|
sql_query = sql_query + " WHERE UNIX_TIMESTAMP(unixdt) BETWEEN \
|
||||||
|
UNIX_TIMESTAMP(NOW()) AND UNIX_TIMESTAMP(\'" + end_time + "\')"
|
||||||
|
|
||||||
|
sql_query = sql_query + " order by `unixdt` desc"
|
||||||
|
if limit > 0:
|
||||||
|
sql_query = sql_query + " LIMIT " + str(limit)
|
||||||
|
|
||||||
|
print(sql_query)
|
||||||
|
cursor.execute(sql_query)
|
||||||
|
return cursor.fetchall()
|
||||||
|
'''
|
||||||
|
|
||||||
|
def update_to_bigamountvout(self, datetime, txid, vout, voutn, vouttype, amount, height, days, buyin, sellout,
|
||||||
|
profit):
|
||||||
|
with self.conn.cursor() as cursor:
|
||||||
|
sql_insert = "INSERT INTO `bigamountvoutv3e` (`unixdt`, `vout`, `voutn`, `vouttype`, `amount`, `height`, `txid`, days, buyprice, sellprice, profit) VALUES (FROM_UNIXTIME(%s), %s, %s, %s, %s, %s, %s, %s, %s, %s, %s)"
|
||||||
|
# print(sql_insert)
|
||||||
|
# print(datetime, txid, vout, voutalias, amount, height)
|
||||||
|
cursor.execute(sql_insert,
|
||||||
|
(datetime, vout, voutn, vouttype, amount, height, txid, days, buyin, sellout, profit))
|
||||||
|
self.conn.commit()
|
||||||
|
|
||||||
|
'''
|
||||||
|
# block check --- big amount tx
|
||||||
|
def query_from_bigamounttx(self, start_id=0, end_id=0, start_time="", end_time="", address="", limit=0):
|
||||||
|
with self.conn.cursor() as cursor:
|
||||||
|
sql_query = "SELECT * from `bigamounttx`"
|
||||||
|
if start_id > 0:
|
||||||
|
sql_query = sql_query + " WHERE id > " + str(start_id)
|
||||||
|
if end_id > 0:
|
||||||
|
sql_query = sql_query + " AND id < " + str(end_id)
|
||||||
|
else:
|
||||||
|
if end_id > 0:
|
||||||
|
sql_query = sql_query + " WHERE id < " + str(end_id)
|
||||||
|
|
||||||
|
if len(start_time) > 0:
|
||||||
|
if len(end_time) > 0:
|
||||||
|
sql_query = sql_query + " WHERE UNIX_TIMESTAMP(unixdt) BETWEEN \
|
||||||
|
UNIX_TIMESTAMP(\'" + start_time + "\') AND UNIX_TIMESTAMP(\'" + end_time + "\')"
|
||||||
|
else:
|
||||||
|
sql_query = sql_query + " WHERE UNIX_TIMESTAMP(unixdt) BETWEEN \
|
||||||
|
UNIX_TIMESTAMP(\'" + start_time + "\') AND UNIX_TIMESTAMP(NOW())"
|
||||||
|
else:
|
||||||
|
if len(end_time) > 0:
|
||||||
|
sql_query = sql_query + " WHERE UNIX_TIMESTAMP(unixdt) BETWEEN \
|
||||||
|
UNIX_TIMESTAMP(NOW()) AND UNIX_TIMESTAMP(\'" + end_time + "\')"
|
||||||
|
|
||||||
|
sql_query = sql_query + " order by `unixdt` desc"
|
||||||
|
if limit > 0:
|
||||||
|
sql_query = sql_query + " LIMIT " + str(limit)
|
||||||
|
|
||||||
|
print(sql_query)
|
||||||
|
cursor.execute(sql_query)
|
||||||
|
return cursor.fetchall()
|
||||||
|
|
||||||
|
def update_to_bigamounttx(self, datetime, txid, amount, height):
|
||||||
|
with self.conn.cursor() as cursor:
|
||||||
|
sql_insert = "INSERT INTO `bigamounttx` (`unixdt`, `amount`, `height`, `txid`) VALUES (FROM_UNIXTIME(%s), %s, %s, %s)"
|
||||||
|
#print(sql_insert)
|
||||||
|
#print(datetime, txid, amount, height)
|
||||||
|
cursor.execute(sql_insert, (datetime, amount, height, txid))
|
||||||
|
self.conn.commit()
|
||||||
|
|
||||||
|
# block check --- per block amount
|
||||||
|
def query_from_blockamount(self, start_id=0, end_id=0, start_time="", end_time="", limit=0, amount=0):
|
||||||
|
with self.conn.cursor() as cursor:
|
||||||
|
sql_query = "SELECT * from `blockamount`"
|
||||||
|
|
||||||
|
if start_id > 0:
|
||||||
|
sql_query = sql_query + " WHERE id > " + str(start_id)
|
||||||
|
if end_id > 0:
|
||||||
|
sql_query = sql_query + " AND id < " + str(end_id)
|
||||||
|
if amount > 0:
|
||||||
|
sql_query = sql_query + " AND amount > " + str(amount)
|
||||||
|
else:
|
||||||
|
if end_id > 0:
|
||||||
|
sql_query = sql_query + " WHERE id < " + str(end_id)
|
||||||
|
if amount > 0:
|
||||||
|
sql_query = sql_query + " AND amount > " + str(amount)
|
||||||
|
else:
|
||||||
|
if amount > 0:
|
||||||
|
sql_query = sql_query + "WHERE amount > " + str(amount)
|
||||||
|
|
||||||
|
if len(start_time) > 0:
|
||||||
|
if len(end_time) > 0:
|
||||||
|
sql_query = sql_query + " WHERE UNIX_TIMESTAMP(unixdt) BETWEEN \
|
||||||
|
UNIX_TIMESTAMP(\'" + start_time + "\') AND UNIX_TIMESTAMP(\'" + end_time + "\')"
|
||||||
|
else:
|
||||||
|
sql_query = sql_query + " WHERE UNIX_TIMESTAMP(unixdt) BETWEEN \
|
||||||
|
UNIX_TIMESTAMP(\'" + start_time + "\') AND UNIX_TIMESTAMP(NOW())"
|
||||||
|
if amount > 0:
|
||||||
|
sql_query = sql_query + " AND amount > " + str(amount)
|
||||||
|
else:
|
||||||
|
if len(end_time) > 0:
|
||||||
|
sql_query = sql_query + " WHERE UNIX_TIMESTAMP(unixdt) BETWEEN \
|
||||||
|
UNIX_TIMESTAMP(NOW()) AND UNIX_TIMESTAMP(\'" + end_time + "\')"
|
||||||
|
if amount > 0:
|
||||||
|
sql_query = sql_query + " AND amount > " + str(amount)
|
||||||
|
|
||||||
|
sql_query = sql_query + " order by `unixdt` desc"
|
||||||
|
|
||||||
|
if limit > 0:
|
||||||
|
sql_query = sql_query + " LIMIT " + str(limit)
|
||||||
|
|
||||||
|
cursor.execute(sql_query)
|
||||||
|
return cursor.fetchall()
|
||||||
|
|
||||||
|
def update_to_blockamount(self, datetime, blockid, amount, height):
|
||||||
|
with self.conn.cursor() as cursor:
|
||||||
|
sql_insert = "INSERT INTO `blockamount` (`unixdt`, `amount`, `height`, `blockid`) VALUES (FROM_UNIXTIME(%s), %s, %s, %s)"
|
||||||
|
#print(sql_insert)
|
||||||
|
#print(datetime, blockid, amount, height)
|
||||||
|
cursor.execute(sql_insert, (datetime, amount, height, blockid))
|
||||||
|
self.conn.commit()
|
||||||
|
|
||||||
|
def delete_node_data(self, config):
|
||||||
|
with self.conn.cursor() as cursor:
|
||||||
|
sql_query = "DELETE FROM `blockamount`"
|
||||||
|
cursor.execute(sql_query)
|
||||||
|
sql_query = "DELETE FROM `bigamountvout`"
|
||||||
|
cursor.execute(sql_query)
|
||||||
|
sql_query = "DELETE FROM `bigamounttx`"
|
||||||
|
cursor.execute(sql_query)
|
||||||
|
self.conn.commit()
|
||||||
|
|
||||||
|
def update_realize_cap(self, dayutc, last_rv):
|
||||||
|
with self.conn.cursor() as cursor:
|
||||||
|
sql_insert = "INSERT INTO `dailyrcap` (`unixdt`, `rcap`) VALUES (FROM_UNIXTIME(%s), %s)"
|
||||||
|
#print(sql_insert)
|
||||||
|
#print(datetime, blockid, amount, height)
|
||||||
|
cursor.execute(sql_insert, (dayutc, last_rv))
|
||||||
|
self.conn.commit()
|
||||||
|
|
||||||
|
# daily realize cap
|
||||||
|
def query_from_realizecap(self, start_id=0, end_id=0, start_time="", end_time="", limit=0):
|
||||||
|
with self.conn.cursor() as cursor:
|
||||||
|
sql_query = "SELECT * from `dailyrcap`"
|
||||||
|
|
||||||
|
if start_id > 0:
|
||||||
|
sql_query = sql_query + " WHERE id > " + str(start_id)
|
||||||
|
if end_id > 0:
|
||||||
|
sql_query = sql_query + " AND id < " + str(end_id)
|
||||||
|
else:
|
||||||
|
if end_id > 0:
|
||||||
|
sql_query = sql_query + " WHERE id < " + str(end_id)
|
||||||
|
|
||||||
|
if len(start_time) > 0:
|
||||||
|
if len(end_time) > 0:
|
||||||
|
sql_query = sql_query + " WHERE UNIX_TIMESTAMP(unixdt) BETWEEN \
|
||||||
|
UNIX_TIMESTAMP(\'" + start_time + "\') AND UNIX_TIMESTAMP(\'" + end_time + "\')"
|
||||||
|
else:
|
||||||
|
sql_query = sql_query + " WHERE UNIX_TIMESTAMP(unixdt) BETWEEN \
|
||||||
|
UNIX_TIMESTAMP(\'" + start_time + "\') AND UNIX_TIMESTAMP(NOW())"
|
||||||
|
else:
|
||||||
|
if len(end_time) > 0:
|
||||||
|
sql_query = sql_query + " WHERE UNIX_TIMESTAMP(unixdt) BETWEEN \
|
||||||
|
UNIX_TIMESTAMP(NOW()) AND UNIX_TIMESTAMP(\'" + end_time + "\')"
|
||||||
|
|
||||||
|
sql_query = sql_query + " order by `unixdt` desc"
|
||||||
|
|
||||||
|
if limit > 0:
|
||||||
|
sql_query = sql_query + " LIMIT " + str(limit)
|
||||||
|
print(sql_query)
|
||||||
|
cursor.execute(sql_query)
|
||||||
|
return cursor.fetchall()
|
||||||
|
|
||||||
|
def update_daily_addr(self, dayutc, last_add_cnt):
|
||||||
|
with self.conn.cursor() as cursor:
|
||||||
|
sql_insert = "INSERT INTO `dailyaddradd` (`unixdt`, `addcnt`) VALUES (FROM_UNIXTIME(%s), %s)"
|
||||||
|
#print(sql_insert)
|
||||||
|
#print(datetime, blockid, amount, height)
|
||||||
|
cursor.execute(sql_insert, (dayutc, last_add_cnt))
|
||||||
|
self.conn.commit()
|
||||||
|
|
||||||
|
def delete_daily_addr(self, config):
|
||||||
|
with self.conn.cursor() as cursor:
|
||||||
|
sql_query = "DELETE FROM `dailyaddradd`"
|
||||||
|
cursor.execute(sql_query)
|
||||||
|
self.conn.commit()
|
||||||
|
|
||||||
|
def delete_daily_rv(self, config):
|
||||||
|
with self.conn.cursor() as cursor:
|
||||||
|
sql_query = "DELETE FROM `dailyrcap`"
|
||||||
|
cursor.execute(sql_query)
|
||||||
|
self.conn.commit()
|
||||||
|
'''
|
||||||
|
|
||||||
|
def __del__(self):
|
||||||
|
self.conn.close()
|
||||||
117
lyq/exchangeRate_lyq.py
Normal file
117
lyq/exchangeRate_lyq.py
Normal file
@@ -0,0 +1,117 @@
|
|||||||
|
import requests
|
||||||
|
import pymysql
|
||||||
|
import time
|
||||||
|
from datetime import datetime
|
||||||
|
|
||||||
|
# 目标币种列表(RUB 仍写在这里,方便统一逻辑)
|
||||||
|
symbols = ["EUR", "GBP", "JPY", "CAD", "SEK", "CHF", "CNY", "RUB"]
|
||||||
|
|
||||||
|
# 数据库配置
|
||||||
|
db_config = {
|
||||||
|
"host": "127.0.0.1",
|
||||||
|
"user": "root",
|
||||||
|
"password": "2GS@bPYcgiMyL14A",
|
||||||
|
"database": "Macroeconomics",
|
||||||
|
"port": 4423
|
||||||
|
}
|
||||||
|
|
||||||
|
def fetch_rates_frankfurter():
|
||||||
|
base = "USD"
|
||||||
|
url = f"https://api.frankfurter.app/latest?from={base}&to=" + ",".join([s for s in symbols if s != "RUB"])
|
||||||
|
retries = 5
|
||||||
|
while retries > 0:
|
||||||
|
try:
|
||||||
|
response = requests.get(url, timeout=10)
|
||||||
|
response.raise_for_status()
|
||||||
|
data = response.json()
|
||||||
|
rates = data.get("rates", {})
|
||||||
|
if not rates:
|
||||||
|
raise ValueError("接口返回空数据")
|
||||||
|
return rates
|
||||||
|
except Exception as e:
|
||||||
|
retries -= 1
|
||||||
|
print(f"Frankfurter 请求失败,重试中... 剩余次数: {retries}, 错误: {e}")
|
||||||
|
time.sleep(1)
|
||||||
|
print("Frankfurter 多次重试后失败,返回空数据")
|
||||||
|
return {}
|
||||||
|
|
||||||
|
def fetch_rub():
|
||||||
|
try:
|
||||||
|
url = "https://open.er-api.com/v6/latest/USD"
|
||||||
|
response = requests.get(url, timeout=10)
|
||||||
|
data = response.json()
|
||||||
|
if data.get("result") == "success":
|
||||||
|
rub_rate = data["rates"].get("RUB")
|
||||||
|
if rub_rate:
|
||||||
|
return rub_rate
|
||||||
|
except Exception as e:
|
||||||
|
print(f"获取 RUB 失败: {e}")
|
||||||
|
return None
|
||||||
|
|
||||||
|
def calc_dxy(rates):
|
||||||
|
weights = {
|
||||||
|
"EUR": 0.576,
|
||||||
|
"JPY": 0.136,
|
||||||
|
"GBP": 0.119,
|
||||||
|
"CAD": 0.091,
|
||||||
|
"SEK": 0.042,
|
||||||
|
"CHF": 0.036
|
||||||
|
}
|
||||||
|
weighted_sum = 0
|
||||||
|
weight_total = 0
|
||||||
|
for ccy, w in weights.items():
|
||||||
|
rate = rates.get(ccy)
|
||||||
|
if rate:
|
||||||
|
weighted_sum += rate * w
|
||||||
|
weight_total += w
|
||||||
|
if weight_total > 0:
|
||||||
|
return weighted_sum / weight_total
|
||||||
|
return None
|
||||||
|
|
||||||
|
def save_to_db(rates, dxy):
|
||||||
|
current_time = datetime.now().replace(second=0, microsecond=0)
|
||||||
|
data = {}
|
||||||
|
|
||||||
|
for ccy in symbols:
|
||||||
|
rate = rates.get(ccy)
|
||||||
|
if rate is not None:
|
||||||
|
data[f"USD{ccy}"] = round(rate, 5)
|
||||||
|
if dxy is not None:
|
||||||
|
data["DXY"] = round(dxy, 5)
|
||||||
|
|
||||||
|
connection = pymysql.connect(**db_config)
|
||||||
|
try:
|
||||||
|
with connection.cursor() as cursor:
|
||||||
|
for symbol, value in data.items():
|
||||||
|
query = """INSERT INTO exchangeRate (date, symbol, _value) VALUES (%s, %s, %s)"""
|
||||||
|
cursor.execute(query, (current_time, symbol, value))
|
||||||
|
connection.commit()
|
||||||
|
print(f"{current_time} 数据写入数据库成功")
|
||||||
|
except Exception as e:
|
||||||
|
print(f"写入数据库失败: {e}")
|
||||||
|
finally:
|
||||||
|
connection.close()
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
while True:
|
||||||
|
rates = fetch_rates_frankfurter()
|
||||||
|
|
||||||
|
# 获取 RUB 汇率
|
||||||
|
rub = fetch_rub()
|
||||||
|
if rub:
|
||||||
|
rates["RUB"] = rub
|
||||||
|
else:
|
||||||
|
print("未获取到 RUB 汇率")
|
||||||
|
|
||||||
|
if rates:
|
||||||
|
dxy = calc_dxy(rates)
|
||||||
|
print(f"汇率数据: {rates}")
|
||||||
|
if dxy:
|
||||||
|
print(f"美元指数近似值: {dxy:.5f}")
|
||||||
|
else:
|
||||||
|
print("美元指数近似值 无法计算")
|
||||||
|
save_to_db(rates, dxy)
|
||||||
|
else:
|
||||||
|
print("未获取到汇率数据")
|
||||||
|
|
||||||
|
time.sleep(1800) # 每30分钟执行一次
|
||||||
191
lyq/nochain_eth_lyq.py
Normal file
191
lyq/nochain_eth_lyq.py
Normal file
@@ -0,0 +1,191 @@
|
|||||||
|
# coding=utf-8
|
||||||
|
import ujson
|
||||||
|
from binance.websocket.spot.websocket_client import SpotWebsocketClient as WebsocketClient
|
||||||
|
import time
|
||||||
|
import requests
|
||||||
|
#from loguru import logger
|
||||||
|
import datetime
|
||||||
|
import pymysql
|
||||||
|
import math
|
||||||
|
|
||||||
|
|
||||||
|
class NochainDbIf:
|
||||||
|
def __init__(self, host="172.17.0.1", port=4423, user="root", password="2GS@bPYcgiMyL14A", dbname="ethdb"):
|
||||||
|
self.conn = pymysql.connect(host=host, port=port, user=user, password=password, database=dbname,
|
||||||
|
cursorclass=pymysql.cursors.DictCursor)
|
||||||
|
print("init nochain db suceess!")
|
||||||
|
|
||||||
|
def save(self, day, price, ma350x2, ma111, ma350x1, ma350x1r6, ma350x3, ma350x5):
|
||||||
|
with self.conn.cursor() as cursor:
|
||||||
|
sql_insert = "REPLACE INTO `nochainv3a` (`unixdt`, `price`, `ma350x2`, `ma111`, ma350x1, ma350x1r6, ma350x3, ma350x5) VALUES (FROM_UNIXTIME(%s), %s, %s, %s, %s, %s, %s, %s)"
|
||||||
|
# print(sql_insert)
|
||||||
|
cursor.execute(sql_insert, (day, price, ma350x2, ma111, ma350x1, ma350x1r6, ma350x3, ma350x5))
|
||||||
|
self.conn.commit()
|
||||||
|
|
||||||
|
def prepare_maxxx(prices, day, madays):
|
||||||
|
total = 0
|
||||||
|
cnt = 0
|
||||||
|
for i in range(madays):
|
||||||
|
if day in prices:
|
||||||
|
total += prices[day]
|
||||||
|
cnt += 1
|
||||||
|
# print(day, total, cnt)
|
||||||
|
day = str(int(day) - 3600 * 24)
|
||||||
|
|
||||||
|
if cnt > 0:
|
||||||
|
return total / cnt
|
||||||
|
return 0
|
||||||
|
|
||||||
|
def prepare_ma350(prices, day):
|
||||||
|
return prepare_maxxx(prices, day, 350)
|
||||||
|
|
||||||
|
def prepare_ma111(prices, day):
|
||||||
|
return prepare_maxxx(prices, day, 111)
|
||||||
|
|
||||||
|
def prepare_gold_ratio(prices):
|
||||||
|
ma350x1 = {}
|
||||||
|
ma350x1r6 = {}
|
||||||
|
ma350x2 = {}
|
||||||
|
ma350x3 = {}
|
||||||
|
ma350x5 = {}
|
||||||
|
for day in prices:
|
||||||
|
ma350x1[day] = prepare_maxxx(prices, day, 350)
|
||||||
|
ma350x1r6[day] = ma350x1[day] * 1.6
|
||||||
|
ma350x2[day] = ma350x1[day] * 2
|
||||||
|
ma350x3[day] = ma350x1[day] * 3
|
||||||
|
ma350x5[day] = ma350x1[day] * 5
|
||||||
|
|
||||||
|
return ma350x1, ma350x1r6, ma350x2, ma350x3, ma350x5
|
||||||
|
|
||||||
|
def calc_pi_cycle_top(dbif, prices):
|
||||||
|
ma350x2 = {}
|
||||||
|
ma111 = {}
|
||||||
|
for day in prices:
|
||||||
|
ma350x2[day] = prepare_ma350(prices, day) * 2
|
||||||
|
ma111[day] = prepare_ma111(prices, day)
|
||||||
|
return ma350x2, ma111
|
||||||
|
|
||||||
|
def get_current_utc():
|
||||||
|
curtime = time.gmtime(time.time())
|
||||||
|
daystr = time.strftime("%d %b %Y", curtime)
|
||||||
|
dayutc = int(time.mktime(time.strptime(daystr, "%d %b %Y")))
|
||||||
|
return dayutc
|
||||||
|
|
||||||
|
def get_current_price():
|
||||||
|
url = "https://data.messari.io/api/v1/assets/eth/metrics/market-data&interval=1d"
|
||||||
|
header_set = {}
|
||||||
|
header_set["x-messari-api-key"] = "aH2pyj5i4QGo1k1gLxXEbIJ5RJr+FYKLEWk6cRT6RuSc6lRY"
|
||||||
|
response_price = requests.get(url, headers=header_set)
|
||||||
|
if response_price.status_code == 200:
|
||||||
|
priceweb = ujson.loads(response_price.content)
|
||||||
|
if "data" in priceweb:
|
||||||
|
priceset = priceweb["data"]
|
||||||
|
if "market_data" in priceset:
|
||||||
|
pricedata = priceset["market_data"]
|
||||||
|
if "price_usd" in pricedata:
|
||||||
|
price = pricedata["price_usd"]
|
||||||
|
return price
|
||||||
|
return None
|
||||||
|
|
||||||
|
|
||||||
|
def get_history_price(coin_id):
|
||||||
|
prices = {}
|
||||||
|
|
||||||
|
dayutc = get_current_utc()
|
||||||
|
price = get_current_price()
|
||||||
|
if price is not None:
|
||||||
|
prices[str(dayutc)] = price
|
||||||
|
print("start...", dayutc, price)
|
||||||
|
|
||||||
|
dayt = time.gmtime()
|
||||||
|
daystr = time.strftime("%Y", dayt)
|
||||||
|
year = int(daystr)
|
||||||
|
end_year = year
|
||||||
|
while True:
|
||||||
|
# if end_year < 2022:
|
||||||
|
# break
|
||||||
|
url = ""
|
||||||
|
if end_year != year:
|
||||||
|
start_year = end_year
|
||||||
|
url = "https://data.messari.io/api/v1/assets/" + coin_id + "/metrics/price/time-series?start="
|
||||||
|
else:
|
||||||
|
url = "https://data.messari.io/api/v1/assets/" + coin_id + "/metrics/price/time-series?after=" + str(
|
||||||
|
year) + "-01-01&order=descending&interval=1d"
|
||||||
|
# now_time = time.gmtime()
|
||||||
|
# daystr = time.strftime("%Y-%m-%d", now_time)
|
||||||
|
# url = url + daystr + "&order=desc&format=json"
|
||||||
|
if end_year != year:
|
||||||
|
url = url + str(start_year) + "-01-01&end=" + str(end_year) + "-12-31&interval=1d&order=descending&interval=1d"
|
||||||
|
header_set = {}
|
||||||
|
header_set["x-messari-api-key"] = "aH2pyj5i4QGo1k1gLxXEbIJ5RJr+FYKLEWk6cRT6RuSc6lRY"
|
||||||
|
# header_set["Content-Type"] = "application/json"
|
||||||
|
print(header_set, url)
|
||||||
|
response_supply = requests.get(url, headers=header_set)
|
||||||
|
# print(response_supply)
|
||||||
|
if response_supply.status_code == 200:
|
||||||
|
#print(response_supply.content)
|
||||||
|
supplyweb = ujson.loads(response_supply.content)
|
||||||
|
if "data" in supplyweb:
|
||||||
|
supplyset = supplyweb["data"]
|
||||||
|
if "values" in supplyset:
|
||||||
|
valueset = supplyset["values"]
|
||||||
|
if valueset is not None:
|
||||||
|
for supply in valueset:
|
||||||
|
dayutc = int(supply[0] / 1000)
|
||||||
|
s = supply[1]
|
||||||
|
prices[str(dayutc)] = float(s)
|
||||||
|
# print(s, dayutc, supplys[str(dayutc)])
|
||||||
|
# break
|
||||||
|
else:
|
||||||
|
break
|
||||||
|
else:
|
||||||
|
break
|
||||||
|
end_year -= 1
|
||||||
|
time.sleep(2)
|
||||||
|
return prices
|
||||||
|
|
||||||
|
def get_eth_history_price():
|
||||||
|
return get_history_price("ethereum")
|
||||||
|
|
||||||
|
def nochain():
|
||||||
|
global dbif
|
||||||
|
dbif = NochainDbIf()
|
||||||
|
print("prepare...")
|
||||||
|
prices = get_eth_history_price()
|
||||||
|
#print(prices)
|
||||||
|
|
||||||
|
ma350x2, ma111 = calc_pi_cycle_top(dbif, prices)
|
||||||
|
print("calc_pi_cycle_top ok.")
|
||||||
|
ma350x1, ma350x1r6, ma350x2, ma350x3, ma350x5 = prepare_gold_ratio(prices);
|
||||||
|
print("prepare_gold_ratio ok.")
|
||||||
|
for day in prices:
|
||||||
|
#print(day)
|
||||||
|
ma350x21 = 0
|
||||||
|
if day in ma350x2:
|
||||||
|
ma350x21 = ma350x2[day]
|
||||||
|
ma1111 = 0
|
||||||
|
if day in ma111:
|
||||||
|
ma1111 = ma111[day]
|
||||||
|
|
||||||
|
ma350x11 = 0
|
||||||
|
if day in ma350x1:
|
||||||
|
ma350x11 = ma350x1[day]
|
||||||
|
|
||||||
|
ma350x1r61 = 0
|
||||||
|
if day in ma350x1r6:
|
||||||
|
ma350x1r61 = ma350x1r6[day]
|
||||||
|
|
||||||
|
ma350x31 = 0
|
||||||
|
if day in ma350x3:
|
||||||
|
ma350x31 = ma350x3[day]
|
||||||
|
|
||||||
|
ma350x51 = 0
|
||||||
|
if day in ma350x5:
|
||||||
|
ma350x51 = ma350x5[day]
|
||||||
|
|
||||||
|
# print(day, prices[day], ma350x21, ma1111, supply, issue, s2f_ratio1, s2f_deflection1)
|
||||||
|
dbif.save(int(day), prices[day], ma350x21, ma1111, ma350x11,
|
||||||
|
ma350x1r61, ma350x31, ma350x51)
|
||||||
|
#print("save ok.")
|
||||||
|
|
||||||
|
nochain()
|
||||||
27
lyq/nochain_lyq_utc08.py
Normal file
27
lyq/nochain_lyq_utc08.py
Normal file
@@ -0,0 +1,27 @@
|
|||||||
|
import subprocess
|
||||||
|
from datetime import datetime, timedelta
|
||||||
|
import time
|
||||||
|
while True:
|
||||||
|
# 获取当前时间的UTC时间
|
||||||
|
now = datetime.utcnow()
|
||||||
|
|
||||||
|
# 计算到下一个08:00的时间间隔
|
||||||
|
next_run = datetime(now.year, now.month, now.day, 8, 0)
|
||||||
|
if now >= next_run:
|
||||||
|
next_run += timedelta(days=1)
|
||||||
|
sleep_time = (next_run - now).total_seconds()
|
||||||
|
|
||||||
|
# 休眠直到下一个08:00
|
||||||
|
time.sleep(sleep_time)
|
||||||
|
|
||||||
|
# 运行 nochain_lyq_v2.py
|
||||||
|
command1 = f"python3 nochain_lyq_v2.py"
|
||||||
|
subprocess.run(command1, shell=True)
|
||||||
|
|
||||||
|
# 运行 nochain_update_lyq.py
|
||||||
|
command2 = f"python3 nochain_update_lyq.py"
|
||||||
|
subprocess.run(command2, shell=True)
|
||||||
|
|
||||||
|
# 运行 nochain_eth_lyq.py
|
||||||
|
command3 = f"python3 nochain_eth_lyq.py"
|
||||||
|
subprocess.run(command3, shell=True)
|
||||||
736
lyq/nochain_lyq_v2.py
Normal file
736
lyq/nochain_lyq_v2.py
Normal file
@@ -0,0 +1,736 @@
|
|||||||
|
# coding=utf-8
|
||||||
|
import ujson
|
||||||
|
from binance.websocket.spot.websocket_client import SpotWebsocketClient as WebsocketClient
|
||||||
|
import time
|
||||||
|
import requests
|
||||||
|
#from loguru import logger
|
||||||
|
from datetime import datetime
|
||||||
|
import pymysql
|
||||||
|
import math
|
||||||
|
from datetime import datetime
|
||||||
|
|
||||||
|
from stock_indicators import indicators
|
||||||
|
from stock_indicators.indicators.common.quote import Quote
|
||||||
|
|
||||||
|
class NochainDbIf:
|
||||||
|
def __init__(self, host="172.17.0.1", port=4423, user="root", password="2GS@bPYcgiMyL14A", dbname="btcdb"):
|
||||||
|
self.conn = pymysql.connect(host=host, port=port, user=user, password=password, database=dbname, cursorclass=pymysql.cursors.DictCursor)
|
||||||
|
print("init nochain db suceess!")
|
||||||
|
|
||||||
|
def save(self, day, price, ma350x2, ma111, supply, flow, s2fratio, s2fdeflection, ma350x1, ma350x1r6, ma350x3, ma350x5):
|
||||||
|
with self.conn.cursor() as cursor:
|
||||||
|
sql_insert = "REPLACE INTO `nochainv3c` (`unixdt`, `price`, `ma350x2`, `ma111`, `btcsupply`, `flow`, `s2fratio`, `s2fdeflection`, ma350x1, ma350x1r6, ma350x3, ma350x5) VALUES (FROM_UNIXTIME(%s), %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s)"
|
||||||
|
#print(sql_insert)
|
||||||
|
cursor.execute(sql_insert, (day, price, ma350x2, ma111, supply, flow, s2fratio, s2fdeflection, ma350x1, ma350x1r6, ma350x3, ma350x5))
|
||||||
|
self.conn.commit()
|
||||||
|
|
||||||
|
def save_ssr(self, day, price, marketcap, usdtsupply, usdcsupply, busdsupply, daisupply, stables_supply, ssr, ssrosc):
|
||||||
|
with self.conn.cursor() as cursor:
|
||||||
|
sql_insert = "REPLACE INTO `nochainv3b` (`unixdt`, `price`, marketcap, usdtsupply, usdcsupply, busdsupply, daisupply, stables_supply, ssr, ssrosc) VALUES (FROM_UNIXTIME(%s), %s, %s, %s, %s, %s, %s, %s, %s, %s)"
|
||||||
|
#print(sql_insert)
|
||||||
|
cursor.execute(sql_insert, (day, price, marketcap, usdtsupply, usdcsupply, busdsupply, daisupply, stables_supply, ssr, ssrosc))
|
||||||
|
self.conn.commit()
|
||||||
|
# 看起来您已经定义了一个名为 NochainDbIf Python 类,该类与 MySQL 数据库交互。此类具有将数据保存到两个不同表(nochainv2c 和nochainv2b )的方法。save该方法用于保存与加密货币价格和指标相关的数据,而save_ssr该方法用于保存与稳定币供应和SSR(稳定币供应比率)相关的数据。
|
||||||
|
# 下面是每种方法的作用的细分:
|
||||||
|
# __init__:这是使用提供的参数(主机、端口、用户、密码和数据库名称)初始化数据库连接的构造函数方法。它使用pymysql库来建立连接。
|
||||||
|
# save:此方法在nochainv2c表中插入或更新记录。它采用代表与加密货币价格和供应相关的各种指标的参数。
|
||||||
|
# save_ssr:此方法在nochainv2b表中插入或更新记录。它采用代表与稳定币供应和 SSR 相关的指标的参数。
|
||||||
|
# 这两种方法都使用 SQL 语句,REPLACE INTO该语句尝试在表中插入新行。如果存在主键或唯一键冲突的行,则改为更新现有行。
|
||||||
|
# 此外,这些方法使用上下文管理器 with self.conn.cursor() as cursor 来确保数据库资源得到正确管理,并在执行 SQL 语句后提交事务 self.conn.commit() 以保留更改。
|
||||||
|
def get_history_price():
|
||||||
|
prices = {}
|
||||||
|
response_price = requests.get(
|
||||||
|
'https://data.nasdaq.com/api/v3/datatables/QDL/BCHAIN?code=MKPRU&api_key=FZqXog4sR-b7cYnXcRVV')
|
||||||
|
if response_price.status_code == 200:
|
||||||
|
#print(response_price.content)
|
||||||
|
priceweb = ujson.loads(response_price.content)
|
||||||
|
if "datatable" in priceweb:
|
||||||
|
priceset = priceweb["datatable"]
|
||||||
|
if "data" in priceset:
|
||||||
|
pricedata = priceset["data"]
|
||||||
|
for price in pricedata:
|
||||||
|
daystr = price[1]
|
||||||
|
p = price[2]
|
||||||
|
dayutc = time.mktime(time.strptime(daystr, "%Y-%m-%d"))
|
||||||
|
prices[str(int(dayutc))] = float(p)
|
||||||
|
#print(price, int(dayutc), prices[str(int(dayutc))])
|
||||||
|
return prices
|
||||||
|
# 此函数似乎可以从纳斯达克 API 检索历史加密货币价格。其工作原理如下:get_history_price
|
||||||
|
# 它初始化一个空字典,用于存储历史价格。prices
|
||||||
|
# 它使用该函数向纳斯达克 API 端点发送 GET 请求。requests.get()
|
||||||
|
# 如果响应状态代码为 200(表示成功),则继续分析响应数据。
|
||||||
|
# 它使用模块中的 JSON 响应加载到 Python 字典中。ujson.loads()ujson
|
||||||
|
# 它检查响应字典中是否存在该键,如果存在,则检索与之关联的密钥。"dataset""data"
|
||||||
|
# 它遍历每个价格数据点,提取日期和价格。
|
||||||
|
# 它将日期字符串转换为 Unix 时间戳,然后使用 解析 。time.mktime() time.strptime()
|
||||||
|
# 它将时间戳作为字符串键存储在字典中,并将相应的价格作为值。prices
|
||||||
|
# 最后,它返回包含历史价格的字典。prices
|
||||||
|
def get_history_price2(pricedict):
|
||||||
|
#pricedict = {}
|
||||||
|
dayt = time.gmtime()
|
||||||
|
daystr = time.strftime("%Y", dayt)
|
||||||
|
year = int(daystr)
|
||||||
|
end_year = year
|
||||||
|
while True:
|
||||||
|
url = ""
|
||||||
|
if end_year != year:
|
||||||
|
start_year = end_year
|
||||||
|
url = "https://data.messari.io/api/v1/assets/bitcoin/metrics/price/time-series?start="
|
||||||
|
else:
|
||||||
|
url = "https://data.messari.io/api/v1/assets/bitcoin/metrics/price/time-series?start=" + str(
|
||||||
|
year) + "-01-01&end="+str(year)+"-12-31&order=descending&interval=1d"
|
||||||
|
|
||||||
|
if end_year != year:
|
||||||
|
url = url + str(start_year) + "-01-01&end=" + str(end_year) + "-12-31&order=descending&interval=1d"
|
||||||
|
header_set = {}
|
||||||
|
header_set["x-messari-api-key"] = "aH2pyj5i4QGo1k1gLxXEbIJ5RJr+FYKLEWk6cRT6RuSc6lRY"
|
||||||
|
# header_set["Content-Type"] = "application/json"
|
||||||
|
print(header_set, url)
|
||||||
|
response_price = requests.get(url, headers=header_set)
|
||||||
|
# print(response_price)
|
||||||
|
if response_price.status_code == 200:
|
||||||
|
# print(response_price.content)
|
||||||
|
priceweb = ujson.loads(response_price.content)
|
||||||
|
if "data" in priceweb:
|
||||||
|
priceset = priceweb["data"]
|
||||||
|
if "values" in priceset:
|
||||||
|
valueset = priceset["values"]
|
||||||
|
if valueset is not None:
|
||||||
|
for supply in valueset:
|
||||||
|
dayutc = int(supply[0] / 1000)
|
||||||
|
s = supply[1]
|
||||||
|
ret_time = time.gmtime(dayutc)
|
||||||
|
ret_daystr = time.strftime("%d %b %Y", ret_time)
|
||||||
|
ret_dayutc = int(time.mktime(time.strptime(ret_daystr, "%d %b %Y")))
|
||||||
|
pricedict[str(ret_dayutc)] = float(s)
|
||||||
|
# print(s, dayutc, pricedict[str(dayutc)])
|
||||||
|
# break
|
||||||
|
else:
|
||||||
|
break
|
||||||
|
else:
|
||||||
|
break
|
||||||
|
end_year -= 1
|
||||||
|
time.sleep(2)
|
||||||
|
return pricedict
|
||||||
|
# 此函数似乎从 Messari API 获取历史加密货币价格并将其存储在字典中。让我们分解一下它是如何工作的:get_history_price2
|
||||||
|
# 它需要一个字典pricedict作为参数,大概是为了存储历史价格。
|
||||||
|
# 它初始化一些与时间相关的变量,包括当前年份 year 和结束年份end_year,后者最初设置为当前年份。
|
||||||
|
# 它进入一个循环,无限期地持续下去,直到明确中断。
|
||||||
|
# 在循环中,它构造 URL,用于根据start_year 和end_year 值从 Messari API 获取历史价格数据。
|
||||||
|
# 它设置一个包含用于身份验证的 Messari API 密钥的标头。
|
||||||
|
# 它使用构造的 URL 和标头向 Messari API 发送 GET 请求。requests.get()
|
||||||
|
# 如果响应状态代码为 200(表示成功),则会解析 JSON 响应。
|
||||||
|
# 它从响应中提取价格数据并对其进行迭代。
|
||||||
|
# 对于每个价格数据点,它将时间戳从毫秒转换为秒,将其转换为人类可读的日期格式,并将其作为 Unix 时间戳字符串键存储在字典pricedict中,并以相应的价格作为值。
|
||||||
|
# 它会递减变量以获取上一年的数据,并在下一次迭代前增加 2 秒的延迟。end_year
|
||||||
|
# 循环一直持续到获取所有年份的历史数据或发生错误为止。
|
||||||
|
# 最后,它打印包含获取的历史价格的字典pricedict并返回它。
|
||||||
|
def get_history_supply(supplys):
|
||||||
|
#supplys = {}
|
||||||
|
#issues = {}
|
||||||
|
response_supply = requests.get(
|
||||||
|
'https://data.nasdaq.com/api/v3/datatables/QDL/BCHAIN?code=TOTBC&api_key=FZqXog4sR-b7cYnXcRVV')
|
||||||
|
if response_supply.status_code == 200:
|
||||||
|
#print(response_supply.content)
|
||||||
|
supplyweb = ujson.loads(response_supply.content)
|
||||||
|
if "datatable" in supplyweb:
|
||||||
|
supplyset = supplyweb["datatable"]
|
||||||
|
# if "end_date" in supplyset:
|
||||||
|
# end_date = supplyset["end_date"]
|
||||||
|
if "data" in supplyset:
|
||||||
|
supplydata = supplyset["data"]
|
||||||
|
#previssue = 0
|
||||||
|
#prevday = 0
|
||||||
|
for supply in supplydata:
|
||||||
|
daystr = supply[1]
|
||||||
|
p = supply[2]
|
||||||
|
dayutc = time.mktime(time.strptime(daystr, "%Y-%m-%d"))
|
||||||
|
supplys[str(int(dayutc))] = float(p)
|
||||||
|
#if supply[0] == end_date:
|
||||||
|
#previssue = float(p)
|
||||||
|
#prevday = dayutc
|
||||||
|
#else:
|
||||||
|
#issues[str(int(prevday))] = previssue - supplys[str(int(dayutc))]
|
||||||
|
#previssue = float(p)
|
||||||
|
#prevday = dayutc
|
||||||
|
#print(supply, int(dayutc), supply[str(int(dayutc))])
|
||||||
|
#return supplys, issues
|
||||||
|
print("get_history_supply", supplys)
|
||||||
|
return supplys
|
||||||
|
# 此功能似乎从纳斯达克 API 检索与比特币总供应量相关的历史数据。以下是其工作原理的细分:get_history_supply
|
||||||
|
# 它采用一个字典supplys作为参数,大概是为了存储历史供应数据。
|
||||||
|
# 它使用该函数向纳斯达克 API 端点发送 GET 请求。requests.get()
|
||||||
|
# 如果响应状态代码为 200(表示成功),则继续分析响应数据。
|
||||||
|
# 它使用模块中的 JSON 响应加载到 Python 字典中。ujson.loads() ujson
|
||||||
|
# 它检查响应字典中是否存在该键,如果存在,则检索与之关联的密钥。"dataset""data"
|
||||||
|
# 它提取数据集的结束日期,大概是为了进行一些进一步的处理(似乎被注释掉了)。
|
||||||
|
# 它遍历数据集中的每个供应数据点。
|
||||||
|
# 对于每个数据点,它提取日期和比特币总供应量,在用 解析后将日期字符串转换为 Unix 时间戳,并将时间戳作为字符串键存储在字典supplys中,并以相应的供应值作为值。time.mktime() time.strptime()
|
||||||
|
# 最后,它打印包含获取的历史供应数据的字典supplys并返回它。
|
||||||
|
# 似乎有一些注释掉的部分与计算连续天数之间的供应差异并将该信息存储在另一个名为issues 的字典中有关,但这些部分目前处于非活动状态。
|
||||||
|
def get_history_supply2():
|
||||||
|
supplys = {}
|
||||||
|
dayt = time.gmtime()
|
||||||
|
daystr = time.strftime("%Y", dayt)
|
||||||
|
year = int(daystr)
|
||||||
|
end_year = year
|
||||||
|
while True:
|
||||||
|
url = ""
|
||||||
|
if end_year != year:
|
||||||
|
start_year = end_year
|
||||||
|
url = "https://data.messari.io/api/v1/assets/bitcoin/metrics/sply-circ/time-series?start="
|
||||||
|
else:
|
||||||
|
url = "https://data.messari.io/api/v1/assets/bitcoin/metrics/sply-circ/time-series?start=" + str(
|
||||||
|
year) + "-01-01&end="+str(year)+"-12-31&order=descending&interval=1d"
|
||||||
|
|
||||||
|
if end_year != year:
|
||||||
|
url = url + str(start_year) + "-01-01&end=" + str(end_year) + "-12-31&order=descending&interval=1d"
|
||||||
|
header_set = {}
|
||||||
|
header_set["x-messari-api-key"] = "aH2pyj5i4QGo1k1gLxXEbIJ5RJr+FYKLEWk6cRT6RuSc6lRY"
|
||||||
|
# header_set["Content-Type"] = "application/json"
|
||||||
|
print(header_set, url)
|
||||||
|
response_csupply = requests.get(url, headers=header_set)
|
||||||
|
# print(response_price)
|
||||||
|
if response_csupply.status_code == 200:
|
||||||
|
# print(response_price.content)
|
||||||
|
csweb = ujson.loads(response_csupply.content)
|
||||||
|
if "data" in csweb:
|
||||||
|
csset = csweb["data"]
|
||||||
|
if "values" in csset:
|
||||||
|
valueset = csset["values"]
|
||||||
|
if valueset is not None:
|
||||||
|
for supply in valueset:
|
||||||
|
dayutc = int(supply[0] / 1000)
|
||||||
|
s = supply[1]
|
||||||
|
ret_time = time.gmtime(dayutc)
|
||||||
|
ret_daystr = time.strftime("%d %b %Y", ret_time)
|
||||||
|
ret_dayutc = int(time.mktime(time.strptime(ret_daystr, "%d %b %Y")))
|
||||||
|
supplys[str(ret_dayutc)] = float(s)
|
||||||
|
#print(s, dayutc, supplys[str(dayutc)])
|
||||||
|
#break
|
||||||
|
else:
|
||||||
|
break
|
||||||
|
else:
|
||||||
|
break
|
||||||
|
|
||||||
|
break
|
||||||
|
end_year -= 1
|
||||||
|
time.sleep(2)
|
||||||
|
return supplys
|
||||||
|
# 此函数似乎从 Messari API 获取历史比特币供应数据并将其存储在字典中。以下是其工作原理的细分:get_history_supply2
|
||||||
|
# 它初始化一个空字典supplys,用于存储历史供应数据。
|
||||||
|
# 它获取当前年份并将变量设置为当前年份。end_year
|
||||||
|
# 它进入一个循环,无限期地持续下去,直到明确中断。
|
||||||
|
# 在循环中,它构造 URL,用于根据 start_year和 end_year值从 Messari API 获取历史供应数据。
|
||||||
|
# 它设置一个包含用于身份验证的 Messari API 密钥的标头。
|
||||||
|
# 它使用构造的 URL 和标头向 Messari API 发送 GET 请求。requests.get()
|
||||||
|
# 如果响应状态代码为 200(表示成功),则会解析 JSON 响应。
|
||||||
|
# 它从响应中提取供应数据并对其进行迭代。
|
||||||
|
# 对于每个供应数据点,它将时间戳从毫秒转换为秒,将其转换为人类可读的日期格式,并将其作为 Unix 时间戳字符串键存储在字典supplys中,并以相应的供应值作为值。
|
||||||
|
# 它打印包含获取的历史供应数据的字典supplys并返回它。
|
||||||
|
# 但是,在循环的第一次迭代之后有一个break语句,该语句在仅获取数据一年后有效地终止了循环。这可能是无意的。
|
||||||
|
def calc_issues(supplys):
|
||||||
|
issues = {}
|
||||||
|
prevday = 0
|
||||||
|
previssue = 0
|
||||||
|
for supply_day in supplys:
|
||||||
|
if int(supply_day) > prevday:
|
||||||
|
prevday = int(supply_day)
|
||||||
|
previssue = supplys[supply_day]
|
||||||
|
|
||||||
|
print("calc_issues", prevday, previssue, supplys)
|
||||||
|
|
||||||
|
for dayutc in supplys:
|
||||||
|
issues[str(int(prevday))] = previssue - supplys[str(int(dayutc))]
|
||||||
|
previssue = float(supplys[dayutc])
|
||||||
|
prevday = dayutc
|
||||||
|
|
||||||
|
print(issues)
|
||||||
|
return issues
|
||||||
|
# 此函数似乎根据提供的历史供应数据计算连续几天之间的比特币供应量差异。让我们分解一下它是如何工作的:calc_issues
|
||||||
|
# 它需要一个称为supplys输入的字典,其中可能包含历史比特币供应数据。
|
||||||
|
# 它初始化一个空字典,该字典issues用于存储计算出的供应差异。
|
||||||
|
# 它初始化变量,并分别跟踪前一天的时间戳和供应量。prevday previssue
|
||||||
|
# 它会遍历字典supplys中每天的供应数据。
|
||||||
|
# 如果当天的时间戳大于前一天的时间戳,则它会更新为当天的时间戳和供应量。prevday previssue
|
||||||
|
# 它计算前一天和当天之间的供应量差异,并将其存储在字典issues中,并将前一天的时间戳作为键。
|
||||||
|
# 它会更新并用于下一次迭代。previssue prevday
|
||||||
|
# 它打印出计算出的供应差异。
|
||||||
|
# 它返回包含计算出的供应差异的字典issues。
|
||||||
|
# 但是,实现存在潜在问题。prevday和 previssue变量用于跟踪前一天的供应量,但循环会覆盖每个新日期的供应量。因此,字典issues将仅包含字典supplys中最后一天与所有其他日期之间的差异。
|
||||||
|
def prepare_year_issues(issues):
|
||||||
|
issues_year = {}
|
||||||
|
temp_issues_year = {}
|
||||||
|
for day in issues:
|
||||||
|
dayt = time.gmtime(int(day))
|
||||||
|
daystr = time.strftime("%Y", dayt)
|
||||||
|
if daystr in temp_issues_year:
|
||||||
|
temp_issues_year[daystr] = temp_issues_year[daystr] + issues[day]
|
||||||
|
else:
|
||||||
|
temp_issues_year[daystr] = issues[day]
|
||||||
|
#print(temp_issues_year)
|
||||||
|
for day in issues:
|
||||||
|
dayt = time.gmtime(int(day))
|
||||||
|
daystr = time.strftime("%Y", dayt)
|
||||||
|
if daystr in temp_issues_year:
|
||||||
|
issues_year[day] = temp_issues_year[daystr]
|
||||||
|
#print(issues_year)
|
||||||
|
return issues_year
|
||||||
|
# “prepare_year_issues”功能似乎将每日发行数据汇总为年度总数。其工作原理如下:
|
||||||
|
# - 它使用一个名为“issues”的字典作为输入,其中可能包含每日发行数据。
|
||||||
|
# - 它初始化了两个字典:“issues_year”用于存储汇总的年度发行数据,“temp_issues_year”用于临时存储每年的发行总和。
|
||||||
|
# - 它每天都在“issues”字典中迭代。
|
||||||
|
# - 对于每一天,它都会从当天的 Unix 时间戳中提取年份。
|
||||||
|
# - 它检查年份是否已经存在于“temp_issues_year”中。
|
||||||
|
# - 如果是,则将该日的发行量添加到相应年份的总数中。
|
||||||
|
# - 如果不是,它会以“temp_issues_year”的形式初始化该日期的年份。
|
||||||
|
# - 然后,它每天再次在“issues”字典中迭代。
|
||||||
|
# - 对于每一天,它都会提取年份并检查年份是否以“temp_issues_year”形式存在。
|
||||||
|
# - 如果是,则在“issues_year”字典中为该日期分配相应的年度总发行量。
|
||||||
|
# 最后,它返回包含年度汇总发行数据的“issues_year”字典。
|
||||||
|
# 此功能有效地将每日发行数据汇总为年度总数,从而更容易进行分析和可视化。
|
||||||
|
def prepare_maxxx(prices, day, madays):
|
||||||
|
total = 0
|
||||||
|
cnt = 0
|
||||||
|
for i in range(madays):
|
||||||
|
if day in prices:
|
||||||
|
total += prices[day]
|
||||||
|
cnt += 1
|
||||||
|
# print(day, total, cnt)
|
||||||
|
day = str(int(day) - 3600 * 24)
|
||||||
|
|
||||||
|
if cnt > 0:
|
||||||
|
return total / cnt
|
||||||
|
return 0
|
||||||
|
# “prepare_maxxx”函数似乎计算了特定日期(“天”)结束的指定天数(“madays”)内加密货币价格的移动平均线。以下是其工作原理的细分:
|
||||||
|
# - 它需要三个参数:
|
||||||
|
# - 'prices':包含以 Unix 时间戳为键的加密货币价格的字典。
|
||||||
|
# - 'day':一个 Unix 时间戳,表示计算移动平均线的周期的结束日期。
|
||||||
|
# - 'madays':一个整数,表示计算移动平均线的天数。
|
||||||
|
# - 它初始化“总计”和“cnt”变量,以分别计算移动平均线中包含的价格总和和价格计数。
|
||||||
|
# - 它进入一个循环,迭代“madays”时间,表示计算移动平均线的天数。
|
||||||
|
# - 在每次迭代中:
|
||||||
|
# - 它检查当前“日”是否存在于“价格”字典中。
|
||||||
|
# - 如果是这样,它会将当天的价格添加到“总计”中,并增加“cnt”。
|
||||||
|
# - 然后,它将“一天”减少 24 小时(3600 秒)以移动到前一天。
|
||||||
|
# - 循环后,它会检查计算中是否包含任何价格(如果“cnt”大于 0)。
|
||||||
|
# - 如果是这样,它将返回指定时间段内的平均价格(“总计/cnt”)。
|
||||||
|
# - 如果不是(即,如果在指定时间段内没有找到价格),则返回 0。
|
||||||
|
# 此函数计算在特定日期结束的指定天数内加密货币价格的简单移动平均线。
|
||||||
|
def prepare_ma350(prices, day):
|
||||||
|
return prepare_maxxx(prices, day, 350)
|
||||||
|
# 用于计算加密货币价格的 350 天移动平均线。它通过调用“prepare_maxxx”函数并将“madays”参数设置为 350 来实现这一点。以下是其工作原理的摘要:
|
||||||
|
# - 它需要两个参数:
|
||||||
|
# - 'prices':包含以 Unix 时间戳为键的加密货币价格的字典。
|
||||||
|
# - 'day':一个 Unix 时间戳,表示计算移动平均线的周期的结束日期。
|
||||||
|
# - 它只是返回调用“prepare_maxxx(prices, day, 350)”的结果。
|
||||||
|
# 此函数抽象了计算移动平均线的细节,并为获取加密货币价格的 350 天移动平均线提供了更简洁的界面。
|
||||||
|
def prepare_ma111(prices, day):
|
||||||
|
return prepare_maxxx(prices, day, 111)
|
||||||
|
# “prepare_ma111”函数类似于“prepare_ma350”函数,但计算加密货币价格的 111 天移动平均线。其工作原理如下:
|
||||||
|
# - 它需要两个参数:
|
||||||
|
# - 'prices':包含以 Unix 时间戳为键的加密货币价格的字典。
|
||||||
|
# - 'day':一个 Unix 时间戳,表示计算移动平均线的周期的结束日期。
|
||||||
|
# - 它返回调用“prepare_maxxx(prices, day, 111)”的结果。
|
||||||
|
# 此函数提供了一种方便的方法来计算加密货币价格的 111 天移动平均线,而无需每次明确指定天数。
|
||||||
|
def prepare_ma200(ssr, day):
|
||||||
|
return prepare_maxxx(ssr, day, 200)
|
||||||
|
# “prepare_ma200”函数类似于之前的移动平均线函数,但它计算的是 200 天移动平均线。让我们分解一下它的功能:
|
||||||
|
# -**参数**:
|
||||||
|
# - 'ssr':包含与稳定币供应比率 (SSR) 相关的数据的字典,以 Unix 时间戳为键。
|
||||||
|
# - 'day':一个 Unix 时间戳,表示计算移动平均线的周期的结束日期。
|
||||||
|
# -**功能性**:
|
||||||
|
# - 它调用“prepare_maxxx”函数,并将“madays”参数设置为 200,传递“ssr”字典和指定的“day”。
|
||||||
|
# - 此函数抽象了计算 200 天移动平均线的细节,并为获取它提供了更简洁的界面。
|
||||||
|
# 此函数用于计算与稳定币供应比率 (SSR) 相关的数据的 200 天移动平均线
|
||||||
|
def prepare_gold_ratio(prices):
|
||||||
|
ma350x1 = {}
|
||||||
|
ma350x1r6 = {}
|
||||||
|
ma350x2 = {}
|
||||||
|
ma350x3 = {}
|
||||||
|
ma350x5 = {}
|
||||||
|
for day in prices:
|
||||||
|
ma350x1[day] = prepare_maxxx(prices, day, 350)
|
||||||
|
ma350x1r6[day] = ma350x1[day]*1.6
|
||||||
|
ma350x2[day] = ma350x1[day] * 2
|
||||||
|
ma350x3[day] = ma350x1[day] * 3
|
||||||
|
ma350x5[day] = ma350x1[day] * 5
|
||||||
|
|
||||||
|
return ma350x1, ma350x1r6, ma350x2, ma350x3, ma350x5
|
||||||
|
# “prepare_gold_ratio”函数似乎根据一组给定的价格计算各种移动平均线。其工作原理如下:
|
||||||
|
# -**参数**:
|
||||||
|
# - 'prices':包含以 Unix 时间戳为键的加密货币价格的字典。
|
||||||
|
# -**功能性**:
|
||||||
|
# - 它初始化字典以存储不同的移动平均线:'ma350x1'、'ma350x1r6'、'ma350x2'、'ma350x3' 和 'ma350x5'。
|
||||||
|
# - 它在“价格”字典中每天都会迭代。
|
||||||
|
# - 对于每一天,它使用“prepare_maxxx”函数计算 350 天移动平均线并将其存储在“ma350x1”中。
|
||||||
|
# - 它将“ma350x1r6”计算为“ma350x1”值的 1.6 倍。
|
||||||
|
# - 它将“ma350x2”、“ma350x3”和“ma350x5”分别计算为“ma350x1”值的 2、3 和 5 倍。
|
||||||
|
# - 在所有天数遍历后,它返回计算出的移动平均线:'ma350x1'、'ma350x1r6'、'ma350x2'、'ma350x3' 和 'ma350x5'。
|
||||||
|
# 此功能旨在根据加密货币价格的 350 天移动平均线准备不同的比率,可能用于比较或分析目的
|
||||||
|
def cal_ssr_osc(ssr):
|
||||||
|
ssr_osc = {}
|
||||||
|
for day in ssr:
|
||||||
|
ssr_ma = prepare_ma200(ssr, day)
|
||||||
|
ssr_osc[day] = ssr[day]/ssr_ma
|
||||||
|
return ssr_osc
|
||||||
|
# “cal_ssr_osc”功能根据提供的 SSR 数据计算稳定币供应比率 (SSR) 的振荡器。其工作原理如下:
|
||||||
|
# -**参数**:
|
||||||
|
# - 'ssr':包含与稳定币供应比率 (SSR) 相关的数据的字典,以 Unix 时间戳为键。
|
||||||
|
# -**功能性**:
|
||||||
|
# - 它初始化一个名为“ssr_osc”的字典来存储计算出的 SSR 振荡器值。
|
||||||
|
# - 它在“ssr”字典中每天迭代。
|
||||||
|
# - 对于每一天,它使用“prepare_ma200”函数计算 SSR 的 200 天移动平均线。
|
||||||
|
# - 它将当天的 SSR 值除以 200 天移动平均线,以计算 SSR 振荡器值。
|
||||||
|
# - 它将 SSR 振荡器值存储在“ssr_osc”字典中,并以相应的日期作为键。
|
||||||
|
# - 在所有天数遍历后,它会返回包含计算出的 SSR 振荡器值的“ssr_osc”字典。
|
||||||
|
# 此函数计算 SSR 与其 200 天移动平均线的比率,代表 SSR 振荡器,可用于分析与稳定币供应动态相关的趋势或信号
|
||||||
|
def calc_pi_cycle_top(dbif, prices):
|
||||||
|
ma350x2 = {}
|
||||||
|
ma111 = {}
|
||||||
|
for day in prices:
|
||||||
|
ma350x2[day] = prepare_ma350(prices, day)*2
|
||||||
|
ma111[day] = prepare_ma111(prices, day)
|
||||||
|
return ma350x2, ma111
|
||||||
|
# “calc_pi_cycle_top”函数根据提供的价格计算两条移动平均线,“ma350x2”和“ma111”。以下是其功能的细分:
|
||||||
|
# -**参数**:
|
||||||
|
# - 'dbif':此参数似乎未在函数中使用,因此可能用于将来使用。
|
||||||
|
# - 'prices':包含以 Unix 时间戳为键的加密货币价格的字典。
|
||||||
|
# -**功能性**:
|
||||||
|
# - 它初始化字典“ma350x2”和“ma111”以存储计算出的移动平均线。
|
||||||
|
# - 它在“价格”字典中每天都会迭代。
|
||||||
|
# - 对于每一天,它计算:
|
||||||
|
# - 'ma350x2':使用“prepare_ma350”功能是 350 天移动平均线价格的两倍。
|
||||||
|
# - 'ma111':使用“prepare_ma111”函数的 111 天移动平均价格。
|
||||||
|
# - 在所有日期迭代后,它返回计算出的移动平均线“ma350x2”和“ma111”。
|
||||||
|
# 此功能似乎是通过分析特定移动平均线来识别加密货币价格周期中潜在峰值的过程的一部分
|
||||||
|
def calc_s2f_ratio(dbif, prices, supplys, issues_year):
|
||||||
|
s2f_ratio = {}
|
||||||
|
s2f_deflection = {}
|
||||||
|
cnt = 1
|
||||||
|
for day in supplys:
|
||||||
|
if day in issues_year:
|
||||||
|
s2f = 0
|
||||||
|
if int(day) >= 1672502400: #2023
|
||||||
|
s2f = supplys[day]/(900*365)
|
||||||
|
else:
|
||||||
|
s2f = supplys[day]/issues_year[day]
|
||||||
|
print(s2f,day,supplys[day],issues_year[day])
|
||||||
|
s2f_ratio[day] = 0.09*(math.pow(s2f, 3.3))
|
||||||
|
#print(supplys[day], issues_year[day], s2f, s2f_ratio[day])
|
||||||
|
s2f_deflection[day] = prices[day]/s2f_ratio[day]
|
||||||
|
#print(day, prices[day], s2f, s2f_ratio[day], s2f_deflection[day])
|
||||||
|
#cnt+=1
|
||||||
|
#if cnt > 10:
|
||||||
|
#break
|
||||||
|
return s2f_ratio, s2f_deflection
|
||||||
|
# “calc_s2f_ratio”功能根据提供的数据计算与比特币的库存流量 (S2F) 模型相关的两个指标。让我们分解一下它的功能:
|
||||||
|
# -**参数**:
|
||||||
|
# - 'dbif':此参数似乎未在函数中使用,因此可能用于将来使用。
|
||||||
|
# - 'prices':包含以 Unix 时间戳为键的加密货币价格的字典。
|
||||||
|
# - 'supplys':包含与加密货币供应相关的数据的字典,以 Unix 时间戳为键。
|
||||||
|
# - “issues_year”:包含年度汇总发行数据的字典。
|
||||||
|
# -**功能性**:
|
||||||
|
# - 它初始化两个字典,“s2f_ratio”和“s2f_deflection”,分别存储计算出的 S2F 比率和 S2F 偏转指标。
|
||||||
|
# - 它初始化一个值为 1 的计数器 'cnt'(似乎未使用)。
|
||||||
|
# - 它在“供应”字典中每天迭代。
|
||||||
|
# - 对于每一天,它都会检查该日期是否存在于“issues_year”字典中,确保该日有相应的发行数据可用。
|
||||||
|
# - 它根据可用的供应和发行数据计算库存流量 (S2F) 比率。如果该日期在 2023 年 1 月 1 日之后(以 Unix 时间戳1672502400表示),则使用固定发行率(每天 900 BTC)计算 S2F 比率。否则,它使用供应量除以年度发行量来计算 S2F 比率。
|
||||||
|
# - 它使用涉及幂和乘法的公式计算 S2F 比率值。
|
||||||
|
# - 它通过将当天的价格除以当天的 S2F 比率来计算 S2F 偏转。
|
||||||
|
# - 它将计算出的 S2F 比率和 S2F 挠度值存储在各自的字典中,并以相应的日期为键。
|
||||||
|
# - 在遍历所有天数后,它会返回包含计算指标的“s2f_ratio”和“s2f_deflection”字典。
|
||||||
|
# 这个功能似乎是分析比特币的库存流量模型的关键部分,提供了对其与价格动态和潜在偏差的关系的见解。
|
||||||
|
def get_stable_coin_supply(coin_id):
|
||||||
|
supplys = {}
|
||||||
|
dayt = time.gmtime()
|
||||||
|
daystr = time.strftime("%Y", dayt)
|
||||||
|
year = int(daystr)
|
||||||
|
end_year = year
|
||||||
|
#split_2023 = False
|
||||||
|
while True:
|
||||||
|
#if end_year < 2022:
|
||||||
|
#break
|
||||||
|
url = ""
|
||||||
|
#if split_2023 and end_year != year:
|
||||||
|
if end_year != year:
|
||||||
|
start_year = end_year
|
||||||
|
url = "https://data.messari.io/api/v1/assets/"+coin_id+"/metrics/sply-circ/time-series?start="
|
||||||
|
else:
|
||||||
|
#if split_2023:
|
||||||
|
# url = "https://data.messari.io/api/v1/assets/" + coin_id + "/metrics/sply-circ/time-series?after=" + str(
|
||||||
|
# year) + "-06-01&order=descending"
|
||||||
|
#else:
|
||||||
|
# url = "https://data.messari.io/api/v1/assets/" + coin_id + "/metrics/sply-circ/time-series?start=2023-01-01&end=2023-06-01&order=descending"
|
||||||
|
url = "https://data.messari.io/api/v1/assets/" + coin_id + "/metrics/sply-circ/time-series?start=" + str(
|
||||||
|
year) + "-01-01&end="+str(year)+"-12-31&order=descending&interval=1d"
|
||||||
|
|
||||||
|
#now_time = time.gmtime()
|
||||||
|
#daystr = time.strftime("%Y-%m-%d", now_time)
|
||||||
|
#url = url + daystr + "&order=desc&format=json"
|
||||||
|
if end_year != year:
|
||||||
|
url = url + str(start_year) + "-01-01&end="+ str(end_year) + "-12-31&order=descending&interval=1d"
|
||||||
|
header_set = {}
|
||||||
|
header_set["x-messari-api-key"] = "aH2pyj5i4QGo1k1gLxXEbIJ5RJr+FYKLEWk6cRT6RuSc6lRY"
|
||||||
|
#header_set["Content-Type"] = "application/json"
|
||||||
|
print(header_set, url)
|
||||||
|
response_supply = requests.get(url, headers=header_set)
|
||||||
|
#print(response_supply)
|
||||||
|
if response_supply.status_code == 200:
|
||||||
|
#print(response_supply.content)
|
||||||
|
supplyweb = ujson.loads(response_supply.content)
|
||||||
|
if "data" in supplyweb:
|
||||||
|
supplyset = supplyweb["data"]
|
||||||
|
if "values" in supplyset:
|
||||||
|
valueset = supplyset["values"]
|
||||||
|
if valueset is not None:
|
||||||
|
for supply in valueset:
|
||||||
|
dayutc = int(supply[0]/1000)
|
||||||
|
s = supply[1]
|
||||||
|
supplys[str(dayutc)] = float(s)
|
||||||
|
#print(s, dayutc, supplys[str(dayutc)])
|
||||||
|
#break
|
||||||
|
else:
|
||||||
|
break
|
||||||
|
else:
|
||||||
|
break
|
||||||
|
#if split_2023:
|
||||||
|
end_year -= 1
|
||||||
|
#else:
|
||||||
|
# split_2023 = True
|
||||||
|
|
||||||
|
time.sleep(2)
|
||||||
|
return supplys
|
||||||
|
# “get_stable_coin_supply”功能根据给定稳定币的代币 ID 检索其历史供应数据。让我们来看看它是如何工作的:
|
||||||
|
# -**论点**:
|
||||||
|
# - 'coin_id':要获取历史供应数据的稳定币的标识符。
|
||||||
|
# -**功能性**:
|
||||||
|
# - 它初始化一个名为“supplys”的空字典来存储检索到的供应数据。
|
||||||
|
# - 它检索当前年份并将“end_year”设置为当前年份。
|
||||||
|
# - 它进入一个循环,一直持续到数据检索完成。
|
||||||
|
# - 在循环中,它构造了 URL,用于根据 'coin_id' 和 'end_year' 从 Messari API 获取历史供应数据。
|
||||||
|
# - 它使用“requests.get()”向 Messari API 发送 GET 请求,其中包含包含 Messari API 密钥的构造 URL 和标头。
|
||||||
|
# - 如果响应状态码为 200(表示成功),则解析 JSON 响应。
|
||||||
|
# - 它从响应中提取供应数据并对其进行迭代。
|
||||||
|
# - 对于每个供应数据点,它将时间戳从毫秒转换为秒,并将其作为 Unix 时间戳字符串键存储在“供应”字典中,并以相应的供应值作为值。
|
||||||
|
# - 循环继续,直到检索到所有可用的历史供应数据。
|
||||||
|
# - 它将“end_year”递减 1,以便在下一次迭代中获取上一年的数据。
|
||||||
|
# - 在API请求之间等待2秒,避免服务器过载。
|
||||||
|
# - 检索到所有历史供应数据后,它将返回包含数据的“供应”字典。
|
||||||
|
# 此功能可以检索给定稳定币的历史供应数据,便于分析和监控其随时间推移的供应动态
|
||||||
|
def get_usdt_supply():
|
||||||
|
return get_stable_coin_supply("tether")
|
||||||
|
# “get_usdt_supply
|
||||||
|
# 它直接调用“get_stable_coin_supply(“tether”)
|
||||||
|
# 它返回从 'get_stable_coin_supply(“tether”) 获得的结果
|
||||||
|
# 该功能提供了一种方便的方式来获取 USDT 供应数据,而无需每次都指定代币 ID
|
||||||
|
def get_usdc_supply():
|
||||||
|
return get_stable_coin_supply("usd-coin")
|
||||||
|
# “get_usdc_sup但专门设计用于检索 USD Coin (USDC) 的历史供应数据。其工作原理如下:
|
||||||
|
# 它直接调用“get_stable_coin_supply("usd-coin")
|
||||||
|
# 它返回从“get_stable_coin_supply(“binance-usd")中获取的结果
|
||||||
|
# 此功能提供了一种方便的方式来获取 USDC 供应数据,而无需每次都指定硬币 ID
|
||||||
|
def get_busd_supply():
|
||||||
|
return get_stable_coin_supply("binance-usd")
|
||||||
|
# “get_busd_suppget_stable_coin_supply函数,这次专门用于检索 Binance USD (BUSD) 的历史供应数据。其工作原理如下:
|
||||||
|
# 它直接调用“get_stable_coin_supply("binance-usd"),该参数是 Binance USD (BUSD) 的硬币 ID
|
||||||
|
# 它返回从 'get_stable_coin_supply(“binance-usd")中获取的结果
|
||||||
|
# 此函数提供了一种方便的方式来获取 BUSD 供应数据,而无需在每个时间指定硬币 ID
|
||||||
|
def get_dai_supply():
|
||||||
|
return get_stable_coin_supply("dai")
|
||||||
|
# 该函数是该函数的另一个包装器,专门用于检索 Dai (DAI) 的历史供应数据。以下是它的运作方式:get_dai_supply get_stable_coin_supply
|
||||||
|
# 它直接调用get_stable_coin_supply"dai"函数,传递参数,该参数对应于 Dai (DAI) 的硬币 ID。
|
||||||
|
# 此函数返回从get_stable_coin_supply("dai")中获取的结果。
|
||||||
|
# 此功能简化了获取 DAI 供应数据的过程,无需每次都指定硬币 ID
|
||||||
|
def cal_ssr(dbif, prices, supplys):
|
||||||
|
print("calc_ssr")
|
||||||
|
usdts = get_usdt_supply()
|
||||||
|
#print(usdts)
|
||||||
|
#return
|
||||||
|
usdcs = get_usdc_supply()
|
||||||
|
#print(usdcs)
|
||||||
|
busds = get_busd_supply()
|
||||||
|
dais = get_dai_supply()
|
||||||
|
print("calc_ssr start")
|
||||||
|
stables = {}
|
||||||
|
ssr = {}
|
||||||
|
marketcap = {}
|
||||||
|
for day in usdts:
|
||||||
|
stables[day] = usdts[day]
|
||||||
|
|
||||||
|
if day in usdcs:
|
||||||
|
stables[day] += usdcs[day]
|
||||||
|
if day in busds:
|
||||||
|
stables[day] += busds[day]
|
||||||
|
if day in dais:
|
||||||
|
stables[day] += dais[day]
|
||||||
|
|
||||||
|
#print(day, stables[day], usdts[day], usdcs[day], busds[day], dais[day])
|
||||||
|
#print(prices[day])
|
||||||
|
if day in prices:
|
||||||
|
#print(day, prices)
|
||||||
|
if day in supplys:
|
||||||
|
#print(day, supplys)
|
||||||
|
marketcap[day] = prices[day] * supplys[day]
|
||||||
|
if stables[day] > 0:
|
||||||
|
ssr[day] = marketcap[day]/stables[day]
|
||||||
|
else:
|
||||||
|
ssr[day] = 0
|
||||||
|
#break
|
||||||
|
print(ssr)
|
||||||
|
ssrosc = {}
|
||||||
|
quotes_list = []
|
||||||
|
for day in ssr:
|
||||||
|
#print(day)
|
||||||
|
dayt = time.gmtime(int(day))
|
||||||
|
#dayt = datetime.date.fromtimestamp(int(day))
|
||||||
|
daystr = time.strftime("%Y-%m-%d", dayt)
|
||||||
|
dtobj = datetime.strptime(daystr, "%Y-%m-%d")
|
||||||
|
print(dtobj)
|
||||||
|
quotes_list.append(Quote(dtobj, 0, 0, 0, ssr[day], 0))
|
||||||
|
print(quotes_list)
|
||||||
|
ssr_osc = indicators.get_bollinger_bands(quotes_list, 200, 2)
|
||||||
|
for item in ssr_osc:
|
||||||
|
if item.z_score is not None:
|
||||||
|
ssrosc[str(int(item.date.timestamp()))] = item.z_score
|
||||||
|
#ssrosc = cal_ssr_osc(ssr)
|
||||||
|
print(ssrosc)
|
||||||
|
for day in ssr:
|
||||||
|
price = 0
|
||||||
|
if day in prices:
|
||||||
|
price = prices[day]
|
||||||
|
marketcap1 = 0
|
||||||
|
if day in marketcap:
|
||||||
|
marketcap1 = marketcap[day]
|
||||||
|
usdt = 0
|
||||||
|
if day in usdts:
|
||||||
|
usdt = usdts[day]
|
||||||
|
usdc = 0
|
||||||
|
if day in usdcs:
|
||||||
|
usdc = usdcs[day]
|
||||||
|
busd = 0
|
||||||
|
if day in busds:
|
||||||
|
busd = busds[day]
|
||||||
|
dai = 0
|
||||||
|
if day in dais:
|
||||||
|
dai = dais[day]
|
||||||
|
stable = 0
|
||||||
|
if day in stables:
|
||||||
|
stable = stables[day]
|
||||||
|
ssr1 = 0
|
||||||
|
if day in ssr:
|
||||||
|
ssr1 = ssr[day]
|
||||||
|
ssrosc1 = 0
|
||||||
|
if day in ssrosc:
|
||||||
|
ssrosc1 = ssrosc[day]
|
||||||
|
dbif.save_ssr(day, price, marketcap1, usdt, usdc, busd, dai,
|
||||||
|
stable, ssr1, ssrosc1)
|
||||||
|
# 似乎“cal_ssr”功能旨在计算稳定币供应比率 (SSR) 及其相应的振荡器 (SSR Oscillator) 并将其保存到数据库中。以下是该函数的运行方式:
|
||||||
|
# -**参数**:
|
||||||
|
# - 'dbif':数据库接口类的一个实例,尽管它在当前实现中似乎未使用。
|
||||||
|
# - 'prices':包含以 Unix 时间戳为键的加密货币价格的字典。
|
||||||
|
# - 'supplys':包含以 Unix 时间戳为键的稳定币历史供应数据的字典。
|
||||||
|
# -**功能性**:
|
||||||
|
# - 它分别使用专用函数“get_usdt_supply”、“get_usdc_supply”、“get_busd_supply”和“get_dai_supply”获取不同稳定币的历史供应数据,包括 USDT、USDC、BUSD 和 DAI。
|
||||||
|
# - 它初始化字典以存储稳定币供应量、SSR、市值和 SSR 振荡器数据。
|
||||||
|
# - 它遍历所提供数据中的可用日期。
|
||||||
|
# - 对于每个日期,它通过将 USDT、USDC、BUSD 和 DAI 的供应量相加来计算稳定币的总供应量。
|
||||||
|
# - 它根据加密货币的价格及其相应的供应量计算市值。
|
||||||
|
# - 它通过将市值除以稳定币总供应量来计算 SSR。
|
||||||
|
# - 它构造一个包含每个日期的 SSR 值的“Quote”对象列表。
|
||||||
|
# - 它使用周期为 200 天且标准差为 2 的布林带计算 SSR 振荡器。
|
||||||
|
# - 它使用数据库接口类的“save_ssr”方法将 SSR 和 SSR 振荡器值保存到数据库中。
|
||||||
|
# 总体而言,该功能似乎提供了对稳定币供应比率及其相关指标的全面分析,有助于进一步研究和监控稳定币动态
|
||||||
|
def nochain():
|
||||||
|
global dbif
|
||||||
|
dbif = NochainDbIf()
|
||||||
|
temp_prices = get_history_price()
|
||||||
|
prices = get_history_price2(temp_prices)
|
||||||
|
#supplys, issues = get_history_supply()
|
||||||
|
temp_supplys = get_history_supply2()
|
||||||
|
supplys = get_history_supply(temp_supplys)
|
||||||
|
issues = calc_issues(supplys)
|
||||||
|
'''
|
||||||
|
cnt = 0;
|
||||||
|
for day in supplys:
|
||||||
|
print(supplys[day])
|
||||||
|
if day in issues:
|
||||||
|
print(issues[day])
|
||||||
|
cnt+=1
|
||||||
|
if cnt > 5:
|
||||||
|
break
|
||||||
|
'''
|
||||||
|
issues_year = prepare_year_issues(issues)
|
||||||
|
#print(issues_year)
|
||||||
|
|
||||||
|
ma350x2, ma111 = calc_pi_cycle_top(dbif, prices)
|
||||||
|
#print(ma350x2)
|
||||||
|
#print(ma111)
|
||||||
|
|
||||||
|
s2f_ratio, s2f_deflection = calc_s2f_ratio(dbif, prices, supplys, issues_year)
|
||||||
|
#print(s2f_ratio)
|
||||||
|
#print(s2f_deflection)
|
||||||
|
|
||||||
|
ma350x1, ma350x1r6, ma350x2, ma350x3, ma350x5 = prepare_gold_ratio(prices)
|
||||||
|
|
||||||
|
for day in prices:
|
||||||
|
#print(day)
|
||||||
|
ma350x21 = 0
|
||||||
|
if day in ma350x2:
|
||||||
|
ma350x21 = ma350x2[day]
|
||||||
|
ma1111 = 0
|
||||||
|
if day in ma111:
|
||||||
|
ma1111 = ma111[day]
|
||||||
|
supply = 0
|
||||||
|
if day in supplys:
|
||||||
|
supply = supplys[day]
|
||||||
|
issue = 0
|
||||||
|
if day in issues:
|
||||||
|
issue = issues[day]
|
||||||
|
s2f_ratio1 = 0
|
||||||
|
if day in s2f_ratio:
|
||||||
|
s2f_ratio1 = s2f_ratio[day]
|
||||||
|
s2f_deflection1 = 0
|
||||||
|
if day in s2f_deflection:
|
||||||
|
s2f_deflection1 = s2f_deflection[day]
|
||||||
|
|
||||||
|
ma350x11 = 0
|
||||||
|
if day in ma350x1:
|
||||||
|
ma350x11 = ma350x1[day]
|
||||||
|
|
||||||
|
ma350x1r61 = 0
|
||||||
|
if day in ma350x1r6:
|
||||||
|
ma350x1r61 = ma350x1r6[day]
|
||||||
|
|
||||||
|
ma350x31 = 0
|
||||||
|
if day in ma350x3:
|
||||||
|
ma350x31 = ma350x3[day]
|
||||||
|
|
||||||
|
ma350x51 = 0
|
||||||
|
if day in ma350x5:
|
||||||
|
ma350x51 = ma350x5[day]
|
||||||
|
|
||||||
|
#print(day, prices[day], ma350x21, ma1111, supply, issue, s2f_ratio1, s2f_deflection1)
|
||||||
|
dbif.save(int(day), prices[day], ma350x21, ma1111, supply, issue, s2f_ratio1, s2f_deflection1, ma350x11, ma350x1r61, ma350x31, ma350x51)
|
||||||
|
|
||||||
|
|
||||||
|
cal_ssr(dbif, prices, supplys)
|
||||||
|
# “nochain”功能负责协调NoChain项目的数据收集、处理和存储。以下是它的运作方式:
|
||||||
|
# -**初始化**:
|
||||||
|
# - 初始化 'NochainDbIf' 类的全局实例 'dbif',表示数据库接口。
|
||||||
|
# - **数据检索**:
|
||||||
|
# - 它使用“get_history_price”检索历史价格数据。
|
||||||
|
# - 它使用“get_history_supply2”检索历史供应数据。
|
||||||
|
# - 它使用“calc_issues”根据历史供应数据计算问题。
|
||||||
|
# - 它使用“prepare_year_issues”准备年度汇总问题。
|
||||||
|
# - 它使用“calc_pi_cycle_top”计算 Pi Cycle 顶部指标(“ma350x2”和“ma111”)。
|
||||||
|
# - 它使用“calc_s2f_ratio”计算库存流量 (S2F) 比率和挠度。
|
||||||
|
# - 它使用“prepare_gold_ratio”准备黄金比率指标('ma350x1'、'ma350x1r6'、'ma350x2'、'ma350x3'、'ma350x5')。
|
||||||
|
# - **数据存储**:
|
||||||
|
# - 对于价格数据中的每一天:
|
||||||
|
# - 它检索相关指标。
|
||||||
|
# - 它使用“NochainDbIf”的“save”方法将数据存储到数据库中。
|
||||||
|
# - **稳定币供应比率(SSR)计算**:
|
||||||
|
# - 它使用“cal_ssr”计算 SSR 及其振荡器,利用来自“get_usdt_supply”、“get_usdc_supply”、“get_busd_supply”和“get_dai_supply”的数据。
|
||||||
|
# 总体而言,该功能是收集、处理和存储与NoChain项目相关的数据的主要驱动力,从而实现后续分析和可视化。
|
||||||
|
nochain()
|
||||||
87
lyq/nochain_update_lyq.py
Normal file
87
lyq/nochain_update_lyq.py
Normal file
@@ -0,0 +1,87 @@
|
|||||||
|
# coding=utf-8
|
||||||
|
import ujson
|
||||||
|
from binance.websocket.spot.websocket_client import SpotWebsocketClient as WebsocketClient
|
||||||
|
import time
|
||||||
|
import requests
|
||||||
|
import datetime
|
||||||
|
import pymysql
|
||||||
|
import math
|
||||||
|
from stock_indicators import indicators
|
||||||
|
from stock_indicators.indicators.common.quote import Quote
|
||||||
|
|
||||||
|
class NochainDbIf:
|
||||||
|
def __init__(self, host="172.17.0.1", port=4423, user="root", password="2GS@bPYcgiMyL14A", dbname="btcdb"):
|
||||||
|
self.conn = pymysql.connect(host=host, port=port, user=user, password=password, database=dbname, cursorclass=pymysql.cursors.DictCursor)
|
||||||
|
print("init nochain db suceess!")
|
||||||
|
# 可能用于与数据库交互。该方法是构造函数,它使用库设置与MySQL数据库的连接。
|
||||||
|
# 以下是该方法的作用的细分:__init__
|
||||||
|
# 它需要几个参数这些参数用于连接到MySQL数据库。如果未提供这些参数,则使用默认值。
|
||||||
|
# 在该方法中,它使用提供的参数建立与MySQL数据库的连接。
|
||||||
|
# 该参数指定结果应作为字典而不是元组返回,这样可以更方便地访问数据。cursorclass = pymysql.cursors.DictCursor
|
||||||
|
# 最后,它打印一条消息,指示数据库连接的初始化成功。
|
||||||
|
def get_ssr(self):
|
||||||
|
with self.conn.cursor() as cursor:
|
||||||
|
sql_cmd = "SELECT unixdt,ssr FROM nochainv3b order by unixdt"
|
||||||
|
cursor.execute(sql_cmd)
|
||||||
|
self.conn.commit()
|
||||||
|
return cursor.fetchall()
|
||||||
|
# 该方法似乎旨在从名为nochainv2b的MySQL数据库表中检索数据。下面是此方法的细分:get_ssr
|
||||||
|
# 它使用上下文管理器 () 自动处理游标,并确保在执行查询后正确关闭连接。with
|
||||||
|
# 在上下文管理器中,它准备一个SQL命令来从nochainv2b表中选择数据。该命令选择unixdt和ssr列,并按unixdt对结果进行排序。
|
||||||
|
# 它使用光标的方法执行SQL命令。execute
|
||||||
|
# 执行命令后,它使用.如果对数据库进行了任何需要永久保存的更改,则此步骤是必需的。self.conn.commit()
|
||||||
|
# 最后,它返回查询的结果 ,它以字典列表的形式检索查询返回的所有行,其中每个字典表示一行,其中列名作为键和相应的值。cursor.fetchall()
|
||||||
|
def update_ssr(self, dt, ssrosc):
|
||||||
|
with self.conn.cursor() as cursor:
|
||||||
|
sql_update = "UPDATE nochainv3b SET unixdt='%s', ssrosc='%s' WHERE unixdt='%s'" %(dt, ssrosc, dt)
|
||||||
|
print(sql_update)
|
||||||
|
cursor.execute(sql_update)
|
||||||
|
self.conn.commit()
|
||||||
|
# 该方法将更新MySQL数据库nochainv2b表中的数据。下面是此方法的细分:update_ssr
|
||||||
|
# 它需要两个参数:dt(可能表示时间戳)和ssrosc(可能表示与SSR相关的某个值)。
|
||||||
|
# 在该方法中,它准备一个SQLUPDATE命令来更新nochainv2b表中特定列的列。
|
||||||
|
# 该字符串的格式为dt和ssrosc的值,以动态构造SQL命令。这是潜在的风险,因为它开启了SQL注入攻击的可能性。改用参数化查询更安全。
|
||||||
|
# 然后,该方法打印出构造的SQL命令以进行调试。
|
||||||
|
# 它使用光标的方法执行SQL命令。execute
|
||||||
|
# 执行命令后,它会提交事务,使更改永久化。self.conn.commit()
|
||||||
|
# 但是,SQL命令中存在一个潜在问题:在设置新值和WHERE子句的条件时使用相同的值。这可能不按预期运行,因为它将更新与给定.
|
||||||
|
# 如果打算更新特定行,则可能需要相应地调整WHERE子句。
|
||||||
|
def rollback(self):
|
||||||
|
self.conn.rollback()
|
||||||
|
# 它对当前数据库连接执行回滚操作。以下是它的作用:rollback
|
||||||
|
# 它在数据库连接 () 上调用该方法。rollback self.conn
|
||||||
|
# 此操作将当前事务期间所做的任何未提交的更改还原到以前的状态。
|
||||||
|
# 当事务期间出现错误时,通常使用回滚,允许您还原在错误发生之前所做的任何更改。
|
||||||
|
def nochain():
|
||||||
|
try:
|
||||||
|
dbif = NochainDbIf()
|
||||||
|
ssr_ma = dbif.get_ssr()
|
||||||
|
#print(ssr_ma)
|
||||||
|
quotes_list = []
|
||||||
|
for item in ssr_ma:
|
||||||
|
print(item["unixdt"])
|
||||||
|
quotes_list.append(Quote(item["unixdt"],0,0,0,item["ssr"],0))
|
||||||
|
#print(quotes_list)
|
||||||
|
ssr_osc = indicators.get_bollinger_bands(quotes_list, 200, 2)
|
||||||
|
for item in ssr_osc:
|
||||||
|
if item.z_score is not None:
|
||||||
|
#print(item.date, item.sma, item.percent_b, item.z_score, item.width)
|
||||||
|
#dbif.update_ssr(item.date, item.z_score)
|
||||||
|
break
|
||||||
|
print("ok")
|
||||||
|
except Exception as e:
|
||||||
|
#dbif.rollback();
|
||||||
|
print(e)
|
||||||
|
finally:
|
||||||
|
print("end")
|
||||||
|
nochain()
|
||||||
|
# 可能使用该nochain类计算从 MySQL 数据库获取的 SSR(大概是卖空比率)数据上的布林带。NochainDbIf
|
||||||
|
# 以下是该函数功能的细分:
|
||||||
|
# 它首先创建类的实例以建立与数据库的连接。NochainDbIf
|
||||||
|
# 它使用实例的方法从数据库中检索 SSR 数据。get_ssr NochainDbIf
|
||||||
|
# 它使用检索到的 SSR 数据构造对象列表。Quote
|
||||||
|
# 它使用模块中的函数计算布林带。get_bollinger_bandsindicators
|
||||||
|
# 它遍历计算出的布林带,并打印有关每个带的一些信息。
|
||||||
|
# 目前,它似乎已注释掉了使用计算出的布林带更新数据库中 SSR 数据的部分。
|
||||||
|
# 如果在此过程中发生任何异常,它将捕获异常,打印它,然后继续。
|
||||||
|
# 最后,无论该过程是成功还是遇到错误,它都会打印“结束”
|
||||||
610
lyq/redis_if_qt.py
Normal file
610
lyq/redis_if_qt.py
Normal file
@@ -0,0 +1,610 @@
|
|||||||
|
import time
|
||||||
|
|
||||||
|
from walrus import *
|
||||||
|
from loguru import logger
|
||||||
|
class RedisIf:
|
||||||
|
def __init__(self, host="127.0.0.1", port=6379, password="", db=0):
|
||||||
|
self.db = Database(host=host, port=port, db=db)
|
||||||
|
|
||||||
|
self.zbalance = self.db.ZSet("balancev2d")
|
||||||
|
'''
|
||||||
|
#realize cap progress
|
||||||
|
self.rv = self.db.Hash("rv")
|
||||||
|
#address and balance progress
|
||||||
|
self.addr = self.db.Hash("addr")
|
||||||
|
#block volume progress
|
||||||
|
self.bv = self.db.Hash("bv")
|
||||||
|
#daily volume progress
|
||||||
|
self.dv = self.db.Hash("dv")
|
||||||
|
'''
|
||||||
|
'''
|
||||||
|
#stat tx progress
|
||||||
|
self.tx = self.db.Hash("tx")
|
||||||
|
|
||||||
|
#ETH daily contract progress
|
||||||
|
self.eth_dc = self.db.Hash("ethdc")
|
||||||
|
|
||||||
|
#btc stats fee
|
||||||
|
self.btc_stats = self.db.Hash("btcstats")
|
||||||
|
|
||||||
|
#btc stats volume
|
||||||
|
self.btc_volume = self.db.Hash("btcvolume")
|
||||||
|
|
||||||
|
# btc stats cdd
|
||||||
|
self.btc_cdd = self.db.Hash("btccdd")
|
||||||
|
|
||||||
|
# btc stats cdd days
|
||||||
|
self.btc_cdd_days = self.db.Hash("btccdddays")
|
||||||
|
'''
|
||||||
|
self.btc_block_time = self.db.Hash("btcblocktimev2d")
|
||||||
|
'''
|
||||||
|
self.btc_sopr = self.db.Hash("btcsopr")
|
||||||
|
'''
|
||||||
|
self.btc_data = self.db.Hash("btc_datav2d")
|
||||||
|
|
||||||
|
self.active_address = self.db.Set("active_addressv2d")
|
||||||
|
self.send_address = self.db.Set("send_addressv2d")
|
||||||
|
self.receive_address = self.db.Set("receive_addressv2d")
|
||||||
|
|
||||||
|
def get_btc_data(self, key):
|
||||||
|
value = None
|
||||||
|
if self.btc_data[key] is not None:
|
||||||
|
value = self.btc_data[key]
|
||||||
|
return value
|
||||||
|
|
||||||
|
def set_btc_data(self, key, value):
|
||||||
|
self.btc_data[key] = value
|
||||||
|
|
||||||
|
def reset_btc_data(self):
|
||||||
|
self.btc_data.clear()
|
||||||
|
self.zbalance.clear()
|
||||||
|
self.btc_block_time.clear()
|
||||||
|
|
||||||
|
'''
|
||||||
|
def get_last_btc_sopr(self):
|
||||||
|
last_sopr_buy = None
|
||||||
|
last_asopr_buy = None
|
||||||
|
last_easopr_buy = None
|
||||||
|
last_lth_sopr_buy = None
|
||||||
|
last_sth_sopr_buy = None
|
||||||
|
last_asol = None
|
||||||
|
last_eaasol = None
|
||||||
|
|
||||||
|
if self.btc_sopr["last_asol"] is not None:
|
||||||
|
last_asol = self.btc_sopr["last_asol"]
|
||||||
|
#last_asol = float(self.btc_sopr["last_asol"].decode("utf-8"))
|
||||||
|
if self.btc_sopr["last_eaasol"] is not None:
|
||||||
|
last_eaasol = self.btc_sopr["last_eaasol"]
|
||||||
|
#last_eaasol = float(self.btc_sopr["last_eaasol"].decode("utf-8"))
|
||||||
|
|
||||||
|
|
||||||
|
if self.btc_sopr["last_sopr_buy"] is not None:
|
||||||
|
last_sopr_buy = self.btc_sopr["last_sopr_buy"]
|
||||||
|
#last_sopr_buy = float(self.btc_sopr["last_sopr_buy"].decode("utf-8"))
|
||||||
|
if self.btc_sopr["last_asopr_buy"] is not None:
|
||||||
|
last_asopr_buy = self.btc_sopr["last_asopr_buy"]
|
||||||
|
#last_asopr_buy = float(self.btc_sopr["last_asopr_buy"].decode("utf-8"))
|
||||||
|
if self.btc_sopr["last_easopr_buy"] is not None:
|
||||||
|
last_easopr_buy = self.btc_sopr["last_easopr_buy"]
|
||||||
|
#last_easopr_buy = float(self.btc_sopr["last_easopr_buy"].decode("utf-8"))
|
||||||
|
if self.btc_sopr["last_lth_sopr_buy"] is not None:
|
||||||
|
last_lth_sopr_buy = self.btc_sopr["last_lth_sopr_buy"]
|
||||||
|
#last_lth_sopr_buy = float(self.btc_sopr["last_lth_sopr_buy"].decode("utf-8"))
|
||||||
|
if self.btc_sopr["last_sth_sopr_buy"] is not None:
|
||||||
|
last_sth_sopr_buy = self.btc_sopr["last_sth_sopr_buy"]
|
||||||
|
#last_sth_sopr_buy = float(self.btc_sopr["last_sth_sopr_buy"].decode("utf-8"))
|
||||||
|
|
||||||
|
last_sopr_sell = None
|
||||||
|
last_asopr_sell = None
|
||||||
|
last_easopr_sell = None
|
||||||
|
last_lth_sopr_sell = None
|
||||||
|
last_sth_sopr_sell = None
|
||||||
|
if self.btc_sopr["last_sopr_sell"] is not None:
|
||||||
|
last_sopr_sell = self.btc_sopr["last_sopr_sell"]
|
||||||
|
# last_sopr_sell = float(self.btc_sopr["last_sopr_sell"].decode("utf-8"))
|
||||||
|
if self.btc_sopr["last_asopr_sell"] is not None:
|
||||||
|
last_asopr_sell = self.btc_sopr["last_asopr_sell"]
|
||||||
|
# last_asopr = float(self.btc_sopr["last_asopr"].decode("utf-8"))
|
||||||
|
if self.btc_sopr["last_easopr_sell"] is not None:
|
||||||
|
last_easopr_sell = self.btc_sopr["last_easopr_sell"]
|
||||||
|
# last_easopr_sell = float(self.btc_sopr["last_easopr_sell"].decode("utf-8"))
|
||||||
|
if self.btc_sopr["last_lth_sopr_sell"] is not None:
|
||||||
|
last_lth_sopr_sell = self.btc_sopr["last_lth_sopr_sell"]
|
||||||
|
# last_lth_sopr_sell = float(self.btc_sopr["last_lth_sopr_sell"].decode("utf-8"))
|
||||||
|
if self.btc_sopr["last_sth_sopr_sell"] is not None:
|
||||||
|
last_sth_sopr_sell = self.btc_sopr["last_sth_sopr_sell"]
|
||||||
|
# last_sth_sopr_sell = float(self.btc_sopr["last_sth_sopr_sell"].decode("utf-8"))
|
||||||
|
|
||||||
|
return last_asol, last_eaasol, last_sopr_buy, last_asopr_buy, last_easopr_buy, last_lth_sopr_buy, last_sth_sopr_buy, last_sopr_sell, last_asopr_sell, last_easopr_sell, last_lth_sopr_sell, last_sth_sopr_sell
|
||||||
|
|
||||||
|
def set_last_btc_sopr(self, last_asol, last_eaasol, last_sopr_buy, last_asopr_buy, last_easopr_buy, last_lth_sopr_buy, last_sth_sopr_buy, last_sopr_sell, last_asopr_sell, last_easopr_sell, last_lth_sopr_sell, last_sth_sopr_sell):
|
||||||
|
self.btc_sopr["last_asol"] = last_asol
|
||||||
|
self.btc_sopr["last_eaasol"] = last_eaasol
|
||||||
|
|
||||||
|
self.btc_sopr["last_sopr_buy"] = last_sopr_buy
|
||||||
|
self.btc_sopr["last_asopr_buy"] = last_asopr_buy
|
||||||
|
self.btc_sopr["last_easopr_buy"] = last_easopr_buy
|
||||||
|
self.btc_sopr["last_lth_sopr_buy"] = last_lth_sopr_buy
|
||||||
|
self.btc_sopr["last_sth_sopr_buy"] = last_sth_sopr_buy
|
||||||
|
self.btc_sopr["last_sopr_sell"] = last_sopr_sell
|
||||||
|
self.btc_sopr["last_asopr_sell"] = last_asopr_sell
|
||||||
|
self.btc_sopr["last_easopr_sell"] = last_easopr_sell
|
||||||
|
self.btc_sopr["last_lth_sopr_sell"] = last_lth_sopr_sell
|
||||||
|
self.btc_sopr["last_sth_sopr_sell"] = last_sth_sopr_sell
|
||||||
|
'''
|
||||||
|
def get_block_time(self, height):
|
||||||
|
block_time = None
|
||||||
|
height_str = str(height)
|
||||||
|
if self.btc_block_time[height_str] is not None:
|
||||||
|
block_time = int(self.btc_block_time[height_str].decode("utf-8"))
|
||||||
|
#block_time = int(self.btc_block_time[height_str].decode("utf-8"))
|
||||||
|
|
||||||
|
return block_time
|
||||||
|
|
||||||
|
def set_block_time(self, height, ts):
|
||||||
|
height_str = str(height)
|
||||||
|
self.btc_block_time[height_str] = ts
|
||||||
|
|
||||||
|
'''
|
||||||
|
def get_last_btc_cdd_days(self):
|
||||||
|
last_cdd = None
|
||||||
|
last_acdd = None
|
||||||
|
last_eacdd = None
|
||||||
|
last_cdd_day1= None
|
||||||
|
last_cdd_day7 = None
|
||||||
|
last_cdd_day30 = None
|
||||||
|
last_cdd_day60 = None
|
||||||
|
last_cdd_day90 = None
|
||||||
|
last_cdd_day180 = None
|
||||||
|
last_cdd_day365 = None
|
||||||
|
last_cdd_day730 = None
|
||||||
|
|
||||||
|
last_date = None
|
||||||
|
last_height = None
|
||||||
|
last_date_str = None
|
||||||
|
|
||||||
|
if self.btc_cdd["last_cdd"] is not None:
|
||||||
|
last_cdd = float(self.btc_cdd["last_cdd"].decode("utf-8"))
|
||||||
|
if self.btc_cdd["last_acdd"] is not None:
|
||||||
|
last_acdd = float(self.btc_cdd["last_acdd"].decode("utf-8"))
|
||||||
|
if self.btc_cdd["last_eacdd"] is not None:
|
||||||
|
last_eacdd = float(self.btc_cdd["last_eacdd"].decode("utf-8"))
|
||||||
|
if self.btc_cdd_days["last_cdd_day1"] is not None:
|
||||||
|
last_cdd_day1 = float(self.btc_cdd_days["last_cdd_day1"].decode("utf-8"))
|
||||||
|
if self.btc_cdd_days["last_cdd_day7"] is not None:
|
||||||
|
last_cdd_day7 = float(self.btc_cdd_days["last_cdd_day7"].decode("utf-8"))
|
||||||
|
if self.btc_cdd_days["last_cdd_day30"] is not None:
|
||||||
|
last_cdd_day30 = float(self.btc_cdd_days["last_cdd_day30"].decode("utf-8"))
|
||||||
|
if self.btc_cdd_days["last_cdd_day60"] is not None:
|
||||||
|
last_cdd_day60 = float(self.btc_cdd_days["last_cdd_day60"].decode("utf-8"))
|
||||||
|
if self.btc_cdd_days["last_cdd_day90"] is not None:
|
||||||
|
last_cdd_day90 = float(self.btc_cdd_days["last_cdd_day90"].decode("utf-8"))
|
||||||
|
if self.btc_cdd_days["last_cdd_day180"] is not None:
|
||||||
|
last_cdd_day180 = float(self.btc_cdd_days["last_cdd_day180"].decode("utf-8"))
|
||||||
|
if self.btc_cdd_days["last_cdd_day365"] is not None:
|
||||||
|
last_cdd_day365 = float(self.btc_cdd_days["last_cdd_day365"].decode("utf-8"))
|
||||||
|
if self.btc_cdd_days["last_cdd_day730"] is not None:
|
||||||
|
last_cdd_day730 = float(self.btc_cdd_days["last_cdd_day730"].decode("utf-8"))
|
||||||
|
if self.btc_cdd_days["last_date"] is not None:
|
||||||
|
last_date = int(self.btc_cdd_days["last_date"].decode("utf-8"))
|
||||||
|
if self.btc_cdd_days["last_height"] is not None:
|
||||||
|
last_height = int(self.btc_cdd_days["last_height"].decode("utf-8"))
|
||||||
|
if self.btc_cdd_days["last_date_str"] is not None:
|
||||||
|
last_date_str = self.btc_cdd_days["last_date_str"].decode("utf-8")
|
||||||
|
return last_cdd, last_acdd, last_eacdd, last_cdd_day1, last_cdd_day7, last_cdd_day30, last_cdd_day60, last_cdd_day90, last_cdd_day180, last_cdd_day365, last_cdd_day730, last_date, last_height, last_date_str
|
||||||
|
|
||||||
|
def set_last_btc_cdd_days(self, cdd, acdd, eacdd, day1, day7, day30, day60, day90, day180, day365, day730, dt, height, dtstr):
|
||||||
|
self.btc_cdd["last_cdd"] = cdd
|
||||||
|
self.btc_cdd["last_acdd"] = acdd
|
||||||
|
self.btc_cdd["last_eacdd"] = eacdd
|
||||||
|
self.btc_cdd_days["last_cdd_day1"] = day1
|
||||||
|
self.btc_cdd_days["last_cdd_day7"] = day7
|
||||||
|
self.btc_cdd_days["last_cdd_day30"] = day30
|
||||||
|
self.btc_cdd_days["last_cdd_day60"] = day60
|
||||||
|
self.btc_cdd_days["last_cdd_day90"] = day90
|
||||||
|
self.btc_cdd_days["last_cdd_day180"] = day180
|
||||||
|
self.btc_cdd_days["last_cdd_day365"] = day365
|
||||||
|
self.btc_cdd_days["last_cdd_day730"] = day730
|
||||||
|
self.btc_cdd_days["last_date"] = dt
|
||||||
|
self.btc_cdd_days["last_height"] = height
|
||||||
|
self.btc_cdd_days["last_date_str"] = dtstr
|
||||||
|
'''
|
||||||
|
'''
|
||||||
|
def get_last_btc_cdd(self):
|
||||||
|
last_cdd = None
|
||||||
|
last_date = None
|
||||||
|
last_height = None
|
||||||
|
last_date_str = None
|
||||||
|
if self.btc_cdd["last_cdd"] is not None:
|
||||||
|
last_cdd = float(self.btc_cdd["last_cdd"].decode("utf-8"))
|
||||||
|
if self.btc_cdd["last_date"] is not None:
|
||||||
|
last_date = int(self.btc_cdd["last_date"].decode("utf-8"))
|
||||||
|
if self.btc_cdd["last_height"] is not None:
|
||||||
|
last_height = int(self.btc_cdd["last_height"].decode("utf-8"))
|
||||||
|
if self.btc_cdd["last_date_str"] is not None:
|
||||||
|
last_date_str = self.btc_cdd["last_date_str"].decode("utf-8")
|
||||||
|
return last_cdd, last_date, last_height, last_date_str
|
||||||
|
|
||||||
|
def set_last_btc_cdd(self, cdd, dt, height, dtstr):
|
||||||
|
self.btc_cdd["last_cdd"] = cdd
|
||||||
|
self.btc_cdd["last_date"] = dt
|
||||||
|
self.btc_cdd["last_height"] = height
|
||||||
|
self.btc_cdd["last_date_str"] = dtstr
|
||||||
|
|
||||||
|
def get_last_btc_volume(self):
|
||||||
|
last_volume = None
|
||||||
|
last_date = None
|
||||||
|
last_height = None
|
||||||
|
last_date_str = None
|
||||||
|
if self.btc_volume["last_volume"] is not None:
|
||||||
|
last_volume = float(self.btc_volume["last_volume"].decode("utf-8"))
|
||||||
|
if self.btc_volume["last_date"] is not None:
|
||||||
|
last_date = int(self.btc_volume["last_date"].decode("utf-8"))
|
||||||
|
if self.btc_volume["last_height"] is not None:
|
||||||
|
last_height = int(self.btc_volume["last_height"].decode("utf-8"))
|
||||||
|
if self.btc_volume["last_date_str"] is not None:
|
||||||
|
last_date_str = self.btc_volume["last_date_str"].decode("utf-8")
|
||||||
|
return last_volume, last_date, last_height, last_date_str
|
||||||
|
|
||||||
|
def set_last_btc_volume(self, volume, dt, height, dtstr):
|
||||||
|
self.btc_volume["last_volume"] = volume
|
||||||
|
self.btc_volume["last_date"] = dt
|
||||||
|
self.btc_volume["last_height"] = height
|
||||||
|
self.btc_volume["last_date_str"] = dtstr
|
||||||
|
'''
|
||||||
|
'''
|
||||||
|
def get_last_btc_stats(self):
|
||||||
|
last_fees = None
|
||||||
|
last_date = None
|
||||||
|
last_height = None
|
||||||
|
last_date_str = None
|
||||||
|
last_volume = None
|
||||||
|
if self.btc_stats["last_fees"] is not None:
|
||||||
|
last_fees = float(self.btc_stats["last_fees"].decode("utf-8"))
|
||||||
|
if self.btc_volume["last_volume"] is not None:
|
||||||
|
last_volume = float(self.btc_volume["last_volume"].decode("utf-8"))
|
||||||
|
if self.btc_stats["last_date"] is not None:
|
||||||
|
last_date = int(self.btc_stats["last_date"].decode("utf-8"))
|
||||||
|
if self.btc_stats["last_height"] is not None:
|
||||||
|
last_height = int(self.btc_stats["last_height"].decode("utf-8"))
|
||||||
|
if self.btc_stats["last_date_str"] is not None:
|
||||||
|
last_date_str = self.btc_stats["last_date_str"].decode("utf-8")
|
||||||
|
return last_fees, last_volume, last_date, last_height, last_date_str
|
||||||
|
|
||||||
|
def set_last_btc_stats(self, fees, volume, dt, height, dtstr):
|
||||||
|
self.btc_stats["last_fees"] = fees
|
||||||
|
self.btc_volume["last_volume"] = volume
|
||||||
|
self.btc_stats["last_date"] = dt
|
||||||
|
self.btc_stats["last_height"] = height
|
||||||
|
self.btc_stats["last_date_str"] = dtstr
|
||||||
|
|
||||||
|
|
||||||
|
def get_last_eth_dc(self):
|
||||||
|
last_date = None
|
||||||
|
last_height = None
|
||||||
|
last_date_str = None
|
||||||
|
if self.eth_dc["last_date"] is not None:
|
||||||
|
last_date = int(self.eth_dc["last_date"].decode("utf-8"))
|
||||||
|
if self.eth_dc["last_height"] is not None:
|
||||||
|
last_height = int(self.eth_dc["last_height"].decode("utf-8"))
|
||||||
|
if self.eth_dc["last_date_str"] is not None:
|
||||||
|
last_date_str = self.eth_dc["last_date_str"].decode("utf-8")
|
||||||
|
return last_date, last_height, last_date_str
|
||||||
|
|
||||||
|
def set_last_eth_dc(self, dt, height, dtstr):
|
||||||
|
self.eth_dc["last_date"] = dt
|
||||||
|
self.eth_dc["last_height"] = height
|
||||||
|
self.eth_dc["last_date_str"] = dtstr
|
||||||
|
'''
|
||||||
|
'''
|
||||||
|
def get_last_dv(self):
|
||||||
|
last_dv = None
|
||||||
|
last_date = None
|
||||||
|
last_height = None
|
||||||
|
last_date_str = None
|
||||||
|
if self.dv["last_dv"] is not None:
|
||||||
|
last_dv = float(self.dv["last_dv"].decode("utf-8"))
|
||||||
|
if self.dv["last_date"] is not None:
|
||||||
|
last_date = int(self.dv["last_date"].decode("utf-8"))
|
||||||
|
if self.dv["last_height"] is not None:
|
||||||
|
last_height = int(self.dv["last_height"].decode("utf-8"))
|
||||||
|
if self.dv["last_date_str"] is not None:
|
||||||
|
last_date_str = self.dv["last_date_str"].decode("utf-8")
|
||||||
|
return last_dv, last_date, last_height, last_date_str
|
||||||
|
|
||||||
|
def set_last_dv(self, dv, dt, height, dtstr):
|
||||||
|
self.dv["last_dv"] = dv
|
||||||
|
self.dv["last_date"] = dt
|
||||||
|
self.dv["last_height"] = height
|
||||||
|
self.dv["last_date_str"] = dtstr
|
||||||
|
|
||||||
|
def get_last_bv(self):
|
||||||
|
last_height = None
|
||||||
|
if self.bv["last_height"] is not None:
|
||||||
|
last_height = int(self.bv["last_height"].decode("utf-8"))
|
||||||
|
return last_height
|
||||||
|
|
||||||
|
def set_last_bv(self, height):
|
||||||
|
self.bv["last_height"] = height
|
||||||
|
'''
|
||||||
|
'''
|
||||||
|
def get_last_ind(self):
|
||||||
|
last_csupply = None
|
||||||
|
last_mintusd = None
|
||||||
|
last_sumcsupply = None
|
||||||
|
last_sumcdd = None
|
||||||
|
last_sumeacdd = None
|
||||||
|
last_rprofit = None
|
||||||
|
last_rloss = None
|
||||||
|
last_marketcap = None
|
||||||
|
last_rcap = None
|
||||||
|
last_mvrv = None
|
||||||
|
|
||||||
|
last_earcap = None
|
||||||
|
if self.tx["last_csupply"] is not None:
|
||||||
|
last_csupply = float(self.tx["last_csupply"].decode("utf-8"))
|
||||||
|
if self.tx["last_mintusd"] is not None:
|
||||||
|
last_mintusd = float(self.tx["last_mintusd"].decode("utf-8"))
|
||||||
|
if self.tx["last_sumcsupply"] is not None:
|
||||||
|
last_sumcsupply = float(self.tx["last_sumcsupply"].decode("utf-8"))
|
||||||
|
if self.tx["last_sumcdd"] is not None:
|
||||||
|
last_sumcdd = float(self.tx["last_sumcdd"].decode("utf-8"))
|
||||||
|
if self.tx["last_sumeacdd"] is not None:
|
||||||
|
last_sumeacdd = float(self.tx["last_sumeacdd"].decode("utf-8"))
|
||||||
|
if self.tx["last_rprofit"] is not None:
|
||||||
|
last_rprofit = float(self.tx["last_rprofit"].decode("utf-8"))
|
||||||
|
if self.tx["last_rloss"] is not None:
|
||||||
|
last_rloss = float(self.tx["last_rloss"].decode("utf-8"))
|
||||||
|
if self.tx["last_marketcap"] is not None:
|
||||||
|
last_marketcap = float(self.tx["last_marketcap"].decode("utf-8"))
|
||||||
|
if self.tx["last_rcap"] is not None:
|
||||||
|
last_rcap = float(self.tx["last_rcap"].decode("utf-8"))
|
||||||
|
if self.tx["last_earcap"] is not None:
|
||||||
|
last_earcap = float(self.tx["last_earcap"].decode("utf-8"))
|
||||||
|
if self.tx["last_mvrv"] is not None:
|
||||||
|
last_mvrv = float(self.tx["last_mvrv"].decode("utf-8"))
|
||||||
|
|
||||||
|
|
||||||
|
return last_csupply, last_mintusd, last_sumcsupply, last_sumcdd, last_sumeacdd, last_rprofit, last_rloss, last_marketcap, last_rcap, last_earcap, last_mvrv
|
||||||
|
|
||||||
|
def set_last_ind(self, last_csupply, last_mintusd, last_sumcsupply, last_sumcdd, last_sumeacdd, last_rprofit, last_rloss, last_marketcap, last_rcap, last_earcap, last_mvrv):
|
||||||
|
self.tx["last_csupply"] = last_csupply
|
||||||
|
self.tx["last_mintusd"] = last_mintusd
|
||||||
|
self.tx["last_sumcsupply"] = last_sumcsupply
|
||||||
|
self.tx["last_sumcdd"] = last_sumcdd
|
||||||
|
self.tx["last_sumeacdd"] = last_sumeacdd
|
||||||
|
self.tx["last_rprofit"] = last_rprofit
|
||||||
|
self.tx["last_rloss"] = last_rloss
|
||||||
|
self.tx["last_marketcap"] = last_marketcap
|
||||||
|
self.tx["last_rcap"] = last_rcap
|
||||||
|
self.tx["last_earcap"] = last_earcap
|
||||||
|
self.tx["last_mvrv"] = last_mvrv
|
||||||
|
|
||||||
|
|
||||||
|
def get_last_tx(self):
|
||||||
|
last_profit = None
|
||||||
|
last_fees = None
|
||||||
|
last_newaddr_cnt = None
|
||||||
|
last_newaddr_vol = None
|
||||||
|
last_active_addr_cnt = None
|
||||||
|
last_tx_addr_cnt = None
|
||||||
|
last_rx_addr_cnt = None
|
||||||
|
last_vol_change = None
|
||||||
|
last_vol = None
|
||||||
|
last_avol = None
|
||||||
|
last_date = None
|
||||||
|
last_height = None
|
||||||
|
last_date_str = None
|
||||||
|
last_txs = None
|
||||||
|
last_eatxs = None
|
||||||
|
if self.tx["last_profit_rate"] is not None:
|
||||||
|
last_profit = int(self.tx["last_profit"].decode("utf-8"))
|
||||||
|
if self.tx["last_fees"] is not None:
|
||||||
|
last_fees = int(self.tx["last_fees"].decode("utf-8"))
|
||||||
|
if self.tx["last_txs"] is not None:
|
||||||
|
last_txs = int(self.tx["last_txs"].decode("utf-8"))
|
||||||
|
if self.tx["last_eatxs"] is not None:
|
||||||
|
last_eatxs = int(self.tx["last_eatxs"].decode("utf-8"))
|
||||||
|
if self.tx["last_newaddr_cnt"] is not None:
|
||||||
|
last_newaddr_cnt = int(self.tx["last_newaddr_cnt"].decode("utf-8"))
|
||||||
|
if self.tx["last_newaddr_vol"] is not None:
|
||||||
|
last_newaddr_vol = float(self.tx["last_newaddr_vol"].decode("utf-8"))
|
||||||
|
if self.tx["last_active_addr_cnt"] is not None:
|
||||||
|
last_active_addr_cnt = int(self.tx["last_active_addr_cnt"].decode("utf-8"))
|
||||||
|
if self.tx["last_tx_addr_cnt"] is not None:
|
||||||
|
last_tx_addr_cnt = int(self.tx["last_tx_addr_cnt"].decode("utf-8"))
|
||||||
|
if self.tx["last_rx_addr_cnt"] is not None:
|
||||||
|
last_rx_addr_cnt = int(self.tx["last_rx_addr_cnt"].decode("utf-8"))
|
||||||
|
if self.tx["last_vol_change"] is not None:
|
||||||
|
last_vol_change = float(self.tx["last_vol_change"].decode("utf-8"))
|
||||||
|
if self.tx["last_vol"] is not None:
|
||||||
|
last_vol = float(self.tx["last_vol"].decode("utf-8"))
|
||||||
|
if self.tx["last_avol"] is not None:
|
||||||
|
last_avol = float(self.tx["last_avol"].decode("utf-8"))
|
||||||
|
if self.tx["last_date"] is not None:
|
||||||
|
last_date = int(self.tx["last_date"].decode("utf-8"))
|
||||||
|
if self.tx["last_height"] is not None:
|
||||||
|
last_height = int(self.tx["last_height"].decode("utf-8"))
|
||||||
|
if self.tx["last_date_str"] is not None:
|
||||||
|
last_date_str = self.tx["last_date_str"].decode("utf-8")
|
||||||
|
return last_profit, last_fees, last_txs, last_eatxs, last_newaddr_cnt, last_newaddr_vol, last_active_addr_cnt, last_tx_addr_cnt, last_rx_addr_cnt, last_vol_change, last_vol, last_avol, last_date, last_height, last_date_str
|
||||||
|
|
||||||
|
def set_last_tx(self, last_profit, last_fees, last_txs, last_eatxs, newaddr_cnt, newaddr_vol, active_addr_cnt, tx_addr_cnt, rx_addr_cnt, vol_change, vol, avol, dt, height, dtstr):
|
||||||
|
self.tx["last_profit"] = last_profit
|
||||||
|
self.tx["last_fees"] = last_fees
|
||||||
|
self.tx["last_txs"] = last_txs
|
||||||
|
self.tx["last_eatxs"] = last_eatxs
|
||||||
|
self.tx["last_newaddr_cnt"] = newaddr_cnt
|
||||||
|
self.tx["last_newaddr_vol"] = newaddr_vol
|
||||||
|
self.tx["last_active_addr_cnt"] = active_addr_cnt
|
||||||
|
self.tx["last_tx_addr_cnt"] = tx_addr_cnt
|
||||||
|
self.tx["last_rx_addr_cnt"] = rx_addr_cnt
|
||||||
|
self.tx["last_vol_change"] = vol_change
|
||||||
|
self.tx["last_vol"] = vol
|
||||||
|
self.tx["last_avol"] = avol
|
||||||
|
self.tx["last_date"] = dt
|
||||||
|
self.tx["last_height"] = height
|
||||||
|
self.tx["last_date_str"] = dtstr
|
||||||
|
'''
|
||||||
|
'''
|
||||||
|
def get_last_addr(self):
|
||||||
|
last_daily_cnt = None
|
||||||
|
last_date = None
|
||||||
|
last_height = None
|
||||||
|
last_date_str = None
|
||||||
|
if self.addr["last_daily_cnt"] is not None:
|
||||||
|
last_daily_cnt = int(self.addr["last_daily_cnt"].decode("utf-8"))
|
||||||
|
if self.addr["last_date"] is not None:
|
||||||
|
last_date = int(self.addr["last_date"].decode("utf-8"))
|
||||||
|
if self.addr["last_height"] is not None:
|
||||||
|
last_height = int(self.addr["last_height"].decode("utf-8"))
|
||||||
|
if self.addr["last_date_str"] is not None:
|
||||||
|
last_date_str = self.addr["last_date_str"].decode("utf-8")
|
||||||
|
return last_daily_cnt, last_date, last_height, last_date_str
|
||||||
|
|
||||||
|
def set_last_addr(self, daily_cnt, dt, height, dtstr):
|
||||||
|
self.addr["last_daily_cnt"] = daily_cnt
|
||||||
|
self.addr["last_date"] = dt
|
||||||
|
self.addr["last_height"] = height
|
||||||
|
self.addr["last_date_str"] = dtstr
|
||||||
|
'''
|
||||||
|
|
||||||
|
def is_active_address(self, address):
|
||||||
|
result = address in self.active_address
|
||||||
|
if not result:
|
||||||
|
self.active_address.add(address)
|
||||||
|
return result
|
||||||
|
|
||||||
|
def reset_active_address(self):
|
||||||
|
self.active_address.clear()
|
||||||
|
|
||||||
|
def get_active_address_cnt(self):
|
||||||
|
return len(self.active_address)
|
||||||
|
|
||||||
|
def is_send_address(self, address):
|
||||||
|
result = address in self.send_address
|
||||||
|
if not result:
|
||||||
|
self.send_address.add(address)
|
||||||
|
return result
|
||||||
|
|
||||||
|
def reset_send_address(self):
|
||||||
|
self.send_address.clear()
|
||||||
|
|
||||||
|
def get_send_address_cnt(self):
|
||||||
|
return len(self.send_address)
|
||||||
|
|
||||||
|
def is_receive_address(self, address):
|
||||||
|
result = address in self.receive_address
|
||||||
|
if not result:
|
||||||
|
self.receive_address.add(address)
|
||||||
|
return result
|
||||||
|
|
||||||
|
def reset_receive_address(self):
|
||||||
|
self.receive_address.clear()
|
||||||
|
|
||||||
|
def get_receive_address_cnt(self):
|
||||||
|
return len(self.receive_address)
|
||||||
|
|
||||||
|
def save_addr(self, address, balance):
|
||||||
|
new_balance = balance
|
||||||
|
if address in self.zbalance:
|
||||||
|
new_balance = self.zbalance.score(address) + balance
|
||||||
|
#print("update", self.zbalance.score(address), balance, new_balance)
|
||||||
|
#time.sleep(10)
|
||||||
|
if new_balance < 0.01:
|
||||||
|
del self.zbalance[address]
|
||||||
|
#print("check exist", address, address in self.zbalance)
|
||||||
|
#time.sleep(10)
|
||||||
|
return
|
||||||
|
self.zbalance.add({address: new_balance})
|
||||||
|
|
||||||
|
'''
|
||||||
|
def delete_addr(self, config):
|
||||||
|
self.addr.clear()
|
||||||
|
self.zbalance.clear()
|
||||||
|
'''
|
||||||
|
def is_in_addr(self, address):
|
||||||
|
return address in self.zbalance
|
||||||
|
|
||||||
|
def get_addr_cnt(self):
|
||||||
|
return len(self.zbalance)
|
||||||
|
|
||||||
|
'''
|
||||||
|
def delete_rv(self, config):
|
||||||
|
self.rv.clear()
|
||||||
|
|
||||||
|
def get_last_rv(self):
|
||||||
|
last_rv = None
|
||||||
|
last_date = None
|
||||||
|
last_height = None
|
||||||
|
last_date_str = None
|
||||||
|
if self.rv["last_rv"] is not None:
|
||||||
|
last_rv = float(self.rv["last_rv"].decode("utf-8"))
|
||||||
|
if self.rv["last_date"] is not None:
|
||||||
|
last_date = int(self.rv["last_date"].decode("utf-8"))
|
||||||
|
if self.rv["last_height"] is not None:
|
||||||
|
last_height = int(self.rv["last_height"].decode("utf-8"))
|
||||||
|
if self.rv["last_date_str"] is not None:
|
||||||
|
last_date_str = self.rv["last_date_str"].decode("utf-8")
|
||||||
|
return last_rv, last_date, last_height, last_date_str
|
||||||
|
|
||||||
|
def set_last_rv(self, rv, dt, height, dtstr):
|
||||||
|
self.rv["last_rv"] = rv
|
||||||
|
self.rv["last_date"] = dt
|
||||||
|
self.rv["last_height"] = height
|
||||||
|
self.rv["last_date_str"] = dtstr
|
||||||
|
'''
|
||||||
|
|
||||||
|
def get_all_address(self):
|
||||||
|
return self.zbalance.keys()
|
||||||
|
|
||||||
|
def delete_address_data(self, config):
|
||||||
|
self.zbalance.clear()
|
||||||
|
|
||||||
|
'''
|
||||||
|
def query_from_address(self, start_balance=0, end_balance=0, address="", limit=0):
|
||||||
|
if len(address) > 0:
|
||||||
|
results = []
|
||||||
|
result = {}
|
||||||
|
result["address"] = address
|
||||||
|
balance = self.zbalance.score(address)
|
||||||
|
print(balance)
|
||||||
|
if balance is not None:
|
||||||
|
result["balance"] = balance
|
||||||
|
results.append(result)
|
||||||
|
return results
|
||||||
|
|
||||||
|
match_result = None
|
||||||
|
if start_balance > 0:
|
||||||
|
if end_balance > 0:
|
||||||
|
match_result = self.zbalance.range_by_score(start_balance, end_balance, 0, -1, True, False)
|
||||||
|
else:
|
||||||
|
match_result = self.zbalance.range_by_score(0, start_balance, 0, -1, True, False)
|
||||||
|
else:
|
||||||
|
if end_balance > 0:
|
||||||
|
match_result = self.zbalance.range_by_score(end_balance, 21000000, 0, -1, True, False)
|
||||||
|
|
||||||
|
results = []
|
||||||
|
if match_result is not None:
|
||||||
|
#print(match_result)
|
||||||
|
for addr, balance2 in match_result:
|
||||||
|
address = addr.decode('utf-8')
|
||||||
|
result = {}
|
||||||
|
result["address"] = address
|
||||||
|
result["balance"] = balance2
|
||||||
|
results.append(result)
|
||||||
|
if limit > 0 and len(results) >= limit:
|
||||||
|
break
|
||||||
|
return results
|
||||||
|
'''
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
Reference in New Issue
Block a user