Compare commits

5 Commits
main ... dev

Author SHA1 Message Date
fengche
676ad724b1 feat: coinbus相关数据采集代码 2026-01-16 11:14:55 +08:00
254212a154 Merge branch 'dev' of http://47.129.22.53:22345/lizixuan/coinbus into dev 2026-01-16 10:37:52 +08:00
yyb
53e264584d insert 项目说明 2026-01-16 10:37:13 +08:00
yyb
d10d9e5fbb insert 项目说明 2026-01-16 10:35:46 +08:00
408e40b206 代码推送 2026-01-16 10:32:27 +08:00
608 changed files with 187990 additions and 9 deletions

119
lyq/Binance_fapi.py Normal file
View File

@@ -0,0 +1,119 @@
import pymysql
import requests
import time
import schedule
from datetime import datetime
# MySQL连接函数
def connect_to_db():
return pymysql.connect(
host="127.0.0.1", # 数据库主机
user="root", # 数据库用户名
password="2GS@bPYcgiMyL14A", # 数据库密码
database="binance_api", # 数据库名称
port=4423 # 数据库端口
)
# 执行SQL查询的函数
def execute_query(query, params=None):
conn = connect_to_db() # 连接数据库
with conn.cursor() as cursor:
cursor.execute(query, params) # 执行SQL语句
conn.commit() # 提交事务
conn.close() # 关闭数据库连接
# 北京时间转换函数
def bj_time(timestamp):
# 将时间戳转换为北京时间
return datetime.utcfromtimestamp(timestamp / 1000).strftime('%Y-%m-%d %H:%M:%S')
# Binance API客户端
class BinanceAPI:
base_url = "https://fapi.binance.com" # Binance的基础API URL
@staticmethod
def get(endpoint, params=None):
# 发送GET请求到Binance API
response = requests.get(f"{BinanceAPI.base_url}{endpoint}", params=params)
return response.json() # 返回JSON格式的响应数据
# 任务1获取资金费率并插入数据库
def funding_rate():
# 获取BTC和ETH的资金费率数据
btc_data = BinanceAPI.get("/fapi/v1/fundingRate", {"symbol": "BTCUSDT"})
eth_data = BinanceAPI.get("/fapi/v1/fundingRate", {"symbol": "ETHUSDT"})
# 准备SQL插入语句
btc_sql = """INSERT INTO fundingrate(symbol, ts, fundingRate)
VALUES ("BTCUSDT", %s, %s)"""
eth_sql = """INSERT INTO fundingrate(symbol, ts, fundingRate)
VALUES ("ETHUSDT", %s, %s)"""
# 执行SQL插入操作
execute_query(btc_sql, (btc_data[-1]['fundingTime'], btc_data[-1]['fundingRate']))
execute_query(eth_sql, (eth_data[-1]['fundingTime'], eth_data[-1]['fundingRate']))
# 任务2获取未平仓合约数并插入数据库
def open_interest():
# 获取BTC和ETH的未平仓合约数数据
btc_data = BinanceAPI.get("/fapi/v1/openInterest", {"symbol": "BTCUSDT"})
eth_data = BinanceAPI.get("/fapi/v1/openInterest", {"symbol": "ETHUSDT"})
# 准备SQL插入语句
btc_sql = """INSERT INTO openInterest(symbol, ts, openInterest)
VALUES ("BTCUSDT", %s, %s)"""
eth_sql = """INSERT INTO openInterest(symbol, ts, openInterest)
VALUES ("ETHUSDT", %s, %s)"""
# 执行SQL插入操作
execute_query(btc_sql, (btc_data['time'], btc_data['openInterest']))
execute_query(eth_sql, (eth_data['time'], eth_data['openInterest']))
# 任务3获取长短比并插入数据库
def long_short_ratio(interval):
# 获取BTC和ETH的长短比数据
btc_data = BinanceAPI.get("/futures/data/takerlongshortRatio", {
"symbol": "BTCUSDT", "period": interval
})
eth_data = BinanceAPI.get("/futures/data/takerlongshortRatio", {
"symbol": "ETHUSDT", "period": interval
})
# 准备SQL插入语句
btc_sql = f"""INSERT INTO longshortratio{interval}(symbol, ts, buyVol, sellVol, buySellRatio)
VALUES ("BTCUSDT", %s, %s, %s, %s)"""
eth_sql = f"""INSERT INTO longshortratio{interval}(symbol, ts, buyVol, sellVol, buySellRatio)
VALUES ("ETHUSDT", %s, %s, %s, %s)"""
# 执行SQL插入操作
execute_query(btc_sql, (btc_data[-1]['timestamp'], btc_data[-1]['buyVol'], btc_data[-1]['sellVol'], btc_data[-1]['buySellRatio']))
execute_query(eth_sql, (eth_data[-1]['timestamp'], eth_data[-1]['buyVol'], eth_data[-1]['sellVol'], eth_data[-1]['buySellRatio']))
# 定时任务调度
def schedule_jobs():
# 每天0点、8点和16点1分执行资金费率任务
schedule.every().day.at("00:01").do(funding_rate)
schedule.every().day.at("08:01").do(funding_rate)
schedule.every().day.at("16:01").do(funding_rate)
# 每分钟的15秒执行未平仓合约数任务
schedule.every().minute.at(":15").do(open_interest)
schedule.every().minute.at(":25").do(open_interest)
schedule.every().minute.at(":35").do(open_interest)
schedule.every().minute.at(":45").do(open_interest)
schedule.every().minute.at(":55").do(open_interest)
# 每分钟的15秒执行长短比任务周期为5m, 15m, 30m等
intervals = ["5m", "15m", "30m", "1h", "2h", "4h", "6h", "12h", "1d"]
for interval in intervals:
schedule.every().minute.at(":15").do(long_short_ratio, interval=interval)
# 启动任务调度
def run():
schedule_jobs() # 设置定时任务
while True:
schedule.run_pending() # 执行所有待处理任务
time.sleep(1) # 每秒检查一次任务是否到期
if __name__ == "__main__":
run() # 运行定时任务调度

115
lyq/CoinmarketCap.py Normal file
View File

@@ -0,0 +1,115 @@
import requests
import pymysql
import time
from apscheduler.schedulers.blocking import BlockingScheduler
from datetime import datetime, timedelta
# API 密钥和请求头
API_KEY = "83bf85c1-1bd8-426a-a043-6b67dad8bda5"
headers = { "X-CMC_PRO_API_KEY": API_KEY }
base_url = "https://pro-api.coinmarketcap.com"
url = f"{base_url}/v1/cryptocurrency/listings/latest"
# MySQL 数据库连接配置
db_config = {
'host': '127.0.0.1', # 数据库主机地址
'user': 'root', # 数据库用户名
'password': '2GS@bPYcgiMyL14A', # 数据库密码
'database': 'coinmarketcap', # 数据库名称
'port': 4423 # 数据库端口
}
# 创建数据库表格(如果不存在)
def create_table():
connection = pymysql.connect(**db_config) # 连接到数据库
cursor = connection.cursor() # 创建游标对象
# 创建表格的 SQL 语句
create_table_query = """
CREATE TABLE IF NOT EXISTS marketInfo (
id INT NOT NULL PRIMARY KEY AUTO_INCREMENT, # 自增ID
update_time DATETIME NOT NULL, # 更新时间
symbol CHAR(15) NOT NULL, # 币种符号
ranks INT NOT NULL, # 排名
price DOUBLE NOT NULL, # 当前价格
market_cap DOUBLE NOT NULL, # 市值
volume_24h DOUBLE NOT NULL, # 24小时交易量
volume_change_24h DOUBLE NOT NULL, # 24小时交易量变化
percent_change_1h DOUBLE NOT NULL, # 1小时价格变化
percent_change_24h DOUBLE NOT NULL, # 24小时价格变化
percent_change_7d DOUBLE NOT NULL, # 7天价格变化
percent_change_30d DOUBLE NOT NULL, # 30天价格变化
percent_change_60d DOUBLE NOT NULL, # 60天价格变化
percent_change_90d DOUBLE NOT NULL # 90天价格变化
);
"""
cursor.execute(create_table_query) # 执行创建表格的 SQL 语句
connection.commit() # 提交事务
cursor.close() # 关闭游标
connection.close() # 关闭数据库连接
# 将 UTC 时间转换为北京时间
def bj_time(utc_time):
""" 将 UTC 时间转换为北京时间 """
utc_time = datetime.strptime(utc_time, '%Y-%m-%dT%H:%M:%S.%fZ') # 将 UTC 时间字符串转换为 datetime 对象
beijing_time = utc_time + timedelta(hours=8) # 北京时间比 UTC 时间快 8 小时
return beijing_time.strftime('%Y-%m-%d %H:%M:%S') # 格式化成字符串
# 获取市场数据并插入到数据库
def marketcap():
try:
# 向 CoinMarketCap API 发送请求,获取加密货币的市场数据
response = requests.get(url, headers=headers, params={"limit": 200})
response.raise_for_status() # 如果请求失败,抛出异常
except requests.RequestException:
time.sleep(60) # 等待 1 分钟后重试
response = requests.get(url, headers=headers, params={"limit": 200})
data = response.json() # 将返回的 JSON 数据转换为 Python 字典
for item in data['data']: # 遍历获取的数据
quote = item['quote']['USD'] # 获取 USD 相关的市场数据
update_time = bj_time(quote['last_updated']) # 转换更新时间为北京时间
symbol = item['symbol'] # 获取币种符号
ranks = item['cmc_rank'] # 获取排名
price = quote['price'] # 获取价格
market_cap = quote['market_cap'] # 获取市值
volume_24h = quote['volume_24h'] # 获取 24 小时交易量
volume_change_24h = quote['volume_change_24h'] # 获取 24 小时交易量变化
percent_change_1h = quote['percent_change_1h'] # 获取 1 小时价格变化
percent_change_24h = quote['percent_change_24h'] # 获取 24 小时价格变化
percent_change_7d = quote['percent_change_7d'] # 获取 7 天价格变化
percent_change_30d = quote['percent_change_30d'] # 获取 30 天价格变化
percent_change_60d = quote['percent_change_60d'] # 获取 60 天价格变化
percent_change_90d = quote['percent_change_90d'] # 获取 90 天价格变化
# 将数据插入到 MySQL 数据库
connection = pymysql.connect(**db_config) # 连接到数据库
cursor = connection.cursor() # 创建游标对象
insert_query = """
INSERT INTO marketInfo (
update_time, symbol, ranks, price, market_cap, volume_24h,
volume_change_24h, percent_change_1h, percent_change_24h,
percent_change_7d, percent_change_30d, percent_change_60d,
percent_change_90d
) VALUES (%s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s);
"""
# 执行插入数据的 SQL 语句
cursor.execute(insert_query, (
update_time, symbol, ranks, price, market_cap, volume_24h,
volume_change_24h, percent_change_1h, percent_change_24h,
percent_change_7d, percent_change_30d, percent_change_60d,
percent_change_90d
))
connection.commit() # 提交事务
cursor.close() # 关闭游标
connection.close() # 关闭数据库连接
# 定时任务:每 5 分钟执行一次 marketcap 函数
def schedule_job():
scheduler = BlockingScheduler() # 创建一个阻塞式调度器
scheduler.add_job(marketcap, 'cron', minute='0,5,10,15,20,25,30,35,40,45,50,55') # 设置每 5 分钟执行一次
scheduler.start() # 启动调度器
if __name__ == "__main__":
create_table() # 程序启动时,先创建数据库表格(如果不存在)
schedule_job() # 启动定时任务,开始定时抓取数据并插入数据库

View File

@@ -0,0 +1,83 @@
import requests
import pymysql
from datetime import datetime
import time
def get_bea_data(year):
url = ("https://apps.bea.gov/api/data?&UserID=146B5757-D9E3-442C-B6AC-ADE9E6B71114&method=GetData&DataSetName=GDPbyIndustry&Year=%s&Industry=ALL&tableID=15&Frequency=Q&ResultFormat=JSON" % year)
response = requests.get(url)
return response.json()['BEAAPI']['Results'][0]['Data']
def update_database(cursor, data):
industry_map = {
'Agriculture, forestry, fishing, and hunting': 'VAPGDPAFH',
'Mining': 'VAPGDPM',
'Construction': 'VAPGDPC',
'Manufacturing': 'VAPGDPMA',
'Retail trade': 'VAPGDPR',
'Wholesale trade': 'VAPGDPW',
'Utilities': 'VAPGDPU',
'Transportation and warehousing': 'VAPGDPT',
'Information': 'VAPGDPI',
'Finance, insurance, real estate, rental, and leasing': 'VAPGDPFIRL',
'Professional and business services': 'VAPGDPPBS',
'Educational services, health care, and social assistance': 'VAPGDPHCSA',
'Arts, entertainment, recreation, accommodation, and food services': 'VAPGDPAF',
'Other services, except government': 'CPGDPOSEG',
'Government': 'Federation',
'State and local': 'State_local'
}
for entry in data:
year = entry["Year"]
quarter = entry["Quarter"]
new_time = f"{year}Q{quarter}"
industry = entry["IndustrYDescription"]
value = entry["DataValue"]
if industry in industry_map:
column = industry_map[industry]
cursor.execute("SELECT quarterly FROM COVITGDP WHERE quarterly = %s", (new_time,))
result = cursor.fetchone()
if result:
cursor.execute(f"SELECT {column} FROM COVITGDP WHERE quarterly = %s", (new_time,))
old_value = cursor.fetchone()[0]
if old_value != value:
cursor.execute(f"UPDATE COVITGDP SET {column} = %s WHERE quarterly = %s", (value, new_time))
else:
print(f"No update needed for {column} for {new_time}")
else:
if column == 'VAPGDPAFH':
cursor.execute("INSERT INTO COVITGDP (quarterly, VAPGDPAFH) VALUES (%s, %s)", (new_time, value))
else:
cursor.execute(f"INSERT INTO COVITGDP (quarterly, {column}) VALUES (%s, %s) ON DUPLICATE KEY UPDATE {column} = VALUES({column})", (new_time, value))
def main():
years = 2025
while True:
try:
db = pymysql.connect(host="127.0.0.1",user="root",password="2GS@bPYcgiMyL14A",database="Macroeconomics",port=4423)
cursor = db.cursor()
data = get_bea_data(years)
update_database(cursor, data)
db.commit()
except pymysql.MySQLError as e:
print(f"Database connection error: {e}")
break
except Exception as e:
print(f"An error occurred: {e}")
finally:
if 'cursor' in locals():
cursor.close()
if 'db' in locals():
db.close()
time.sleep(86400)
if __name__ == "__main__":
main()

View File

@@ -0,0 +1,101 @@
import time
import requests
import json
import pymysql
from datetime import datetime
# Function to fetch data from BLS API
def fetch_data(series_ids):
headers = {'Content-type': 'application/json'}
data = json.dumps({"seriesid": series_ids, "startyear": "2024", "endyear": "2024"})
try:
response = requests.post('https://api.bls.gov/publicAPI/v2/timeseries/data/', data=data, headers=headers)
response.raise_for_status() # Raise exception for HTTP errors
return json.loads(response.text)
except requests.exceptions.RequestException as e:
return None
# Function to convert BLS period format to datetime
def convert_date(year, period):
date_string = f"{year}/{period.replace('M', '')}/01"
return datetime.strptime(date_string, '%Y/%m/%d')
# Function to insert data into MySQL database
def insert_data(cursor, table_name, date, name, value):
cursor.execute(
f"INSERT INTO {table_name}(date, name, value) VALUES (%s, %s, %s)",
(date, name, value)
)
# Function to process series data and insert into the database
def process_series_data(json_data, table_name, names):
db = pymysql.connect(host="127.0.0.1",user="root",password="2GS@bPYcgiMyL14A",database="Macroeconomics",port=4423)
cursor = db.cursor()
for i, series in enumerate(json_data['Results']['series']):
for data_point in sorted(series['data'], key=lambda x: (x['year'], x['period'])):
year = data_point['year']
period = data_point['period']
value = data_point['value']
date = convert_date(year, period)
name = names[i] if i < len(names) else f"Unknown {i}"
cursor.execute(f"SELECT COUNT(*) FROM {table_name} WHERE date = %s AND name = %s", (date, name))
if cursor.fetchone()[0] == 0:
insert_data(cursor, table_name, date, name, value)
db.commit()
db.close()
# Function to merge JSON data
def merge_json_data(json_data_list):
merged_series = []
for json_data in json_data_list:
if json_data and 'Results' in json_data and 'series' in json_data['Results']:
merged_series.extend(json_data['Results']['series'])
return {'Results': {'series': merged_series}}
# Main script logic
while True:
series_ids1 = [
'CUUR0000SA0', 'CUUR0000SAF1', 'CUUR0000SAF11', 'CUUR0000SAF111', 'CUUR0000SAF112', 'CUUR0000SEFJ',
'CUUR0000SAF113', 'CUUR0000SAF114', 'CUUR0000SEFV', 'CUUR0000SA0E', 'CUUR0000SACE', 'CUUR0000SEHE01',
'CUUR0000SETB', 'CUUR0000SETB01', 'CUUR0000SEHF', 'CUUR0000SEHF01', 'CUUR0000SEHF02'
]
series_ids2 = [
'CUUR0000SA0L1E', 'CUUR0000SACL1E', 'CUUR0000SAA', 'CUUR0000SETA01', 'CUUR0000SETA02', 'CUUR0000SAM1',
'CUUR0000SAF116', 'CUUR0000SEGA', 'CUUR0000SASLE', 'CUUR0000SAH1', 'CUUR0000SEHA', 'CUUR0000SEHC',
'CUUR0000SAM2', 'CUUR0000SEMC01', 'CUUR0000SEMD01', 'CUUR0000SAS4', 'CUUR0000SETD', 'CUUR0000SETE',
'CUUR0000SETG01'
]
series_ids3 = [s.replace('CUUR', 'CUSR') for s in series_ids1]
series_ids4 = [s.replace('CUUR', 'CUSR') for s in series_ids2]
json_data1 = fetch_data(series_ids1)
json_data2 = fetch_data(series_ids2)
json_data3 = fetch_data(series_ids3)
json_data4 = fetch_data(series_ids4)
combined_json_data_NSA = merge_json_data([json_data1, json_data2])
combined_json_data_SA = merge_json_data([json_data3, json_data4])
names = [
'All items', 'Food', 'Food at home', 'Cereals and bakery products', 'Meats, poultry, fish, and eggs',
'Dairy and related products', 'Fruits and vegetables', 'Nonalcoholic beverages and beverage materials',
'Food away from home', 'Energy', 'Energy commodities', 'Fuel oil', 'Motor fuel', 'Gasoline (all types)',
'Energy services', 'Electricity', 'Utility (piped) gas service', 'All items less food and energy',
'Commodities less food and energy commodities', 'Apparel', 'New vehicles', 'Used cars and trucks',
'Medical care commodities', 'Alcoholic beverages', 'Tobacco and smoking products',
'Services less energy services', 'Shelter', 'Rent of primary residence', "Owners equivalent rent of residences",
'Medical care services', "Physicians services", 'Hospital services', 'Transportation services',
'Motor vehicle maintenance and repair', 'Motor vehicle insurance', 'Airline fares'
]
if combined_json_data_NSA and 'Results' in combined_json_data_NSA and 'series' in combined_json_data_NSA['Results']:
process_series_data(combined_json_data_NSA, 'CPI_NSA', names)
if combined_json_data_SA and 'Results' in combined_json_data_SA and 'series' in combined_json_data_SA['Results']:
process_series_data(combined_json_data_SA, 'CPI_SA', names)
time.sleep(86400)

View File

@@ -0,0 +1,293 @@
import time
import pymysql
import requests
from bs4 import BeautifulSoup
from w3lib.html import remove_tags
import datetime
while True:
try:
# now_time = datetime.datetime.now()
# next_time = now_time + datetime.timedelta(days=+1)
# next_year = next_time.date().year
# next_month = next_time.date().month
# next_day = next_time.date().day
# next_time = datetime.datetime.strptime(str(next_year) + "-" + str(next_month) + "-" + str(next_day) + " 20:45:01","%Y-%m-%d %H:%M:%S")
# timer_start_time = (next_time - now_time).total_seconds()
db = pymysql.connect(host="127.0.0.1",user="root",password="2GS@bPYcgiMyL14A",database="Macroeconomics",port=4423)
cursor = db.cursor()
page = requests.get("https://www.federalreserve.gov/releases/h41/current/default.htm")
page=page.text
page = BeautifulSoup(page, 'html.parser')
date = page.find_all('div', class_="dates")
# 获取数据
date = remove_tags(str(date))
# 删除多余字符
date = date.replace("[", "")
date = date.replace("]", "")
date = date.replace("Release Date:", "")
date = date.replace(",","")
date = date.replace(" ","")
date = date.strip()
date = date.lstrip()
date1 = date[-4:]
date2 = date[-6:-4]
# 转换时间
date = date.replace("January", "/01/")
date = date.replace("February", "/02/")
date = date.replace("March", "/03/")
date = date.replace("April", "/04/")
date = date.replace("May", "/05/")
date = date.replace("June", "/06/")
date = date.replace("July", "/07/")
date = date.replace("August", "/08/")
date = date.replace("September", "/09/")
date = date.replace("October", "/10/")
date = date.replace("November", "/11/")
date = date.replace("December", "/12/")
date = date1+date[0:4]+date2
date_string = date
format = '%Y/%m/%d'
from datetime import datetime
date = datetime.strptime(date_string, format)
sql = "select time from CHAFRNFRAA order by id desc limit 1"
cursor.execute(sql)
db.commit()
old_time = cursor.fetchall()
ole_time=old_time[0][0]
except:
time.sleep(30)
continue
# 判断时间
if date != ole_time:
page = page.find_all('span',style="font-family:'Courier New'; font-weight:bold")
page = remove_tags(str(page))
page = page.replace(",", "")
page = page.replace("[", "")
page = page.replace("]", "")
page=page.split()
symbol= ''
list=[]
# 数据拼接
for i in page:
if i =='-':
symbol= '-'
continue
if i =='+':
continue
if i =='(0)':
continue
if i =='...':
i='NULL'
value= symbol + i
symbol= ''
list+=[value]
sql = "insert into FARBODI(name,time,THIS_AVG_VALUE,CHANGE_LASTWEEK,CHANGE_LASTYEAR,THIS_VALUE)values(%s,%s,%s,%s,%s,%s)"
data_list=[('Reserve Bank credit',date,list[0],list[1],list[2],list[3]),
('Securities held outright', date, list[4], list[5], list[6], list[7]),
('U.S. Treasury securities', date, list[8], list[9], list[10], list[11]),
('Bills', date, list[12], list[13], list[14], list[15]),
('Notes and bonds, nominal', date, list[16], list[17], list[18], list[19]),
('Notes and bonds, inflation-indexed', date, list[20], list[21], list[22], list[23]),
('Inflation compensation', date, list[24], list[25], list[26], list[27]),
('Federal agency debt securities', date, list[28], list[29], list[30], list[31]),
('Mortgage-backed securities', date, list[32], list[33], list[34], list[35]),
('Uposho', date, list[36], list[37], list[38], list[39]),
('Udosho', date, list[40], list[41], list[42], list[43]),
('Repurchase agreements', date, list[44], list[45], list[46], list[47]),
('Foreign official', date, list[48], list[49], list[50], list[51]),
('Others', date, list[52], list[53], list[54], list[55]),
('Loans', date, list[56], list[57], list[58], list[59]),
('Primary credit', date, list[60], list[61], list[62], list[63]),
('Secondary credit', date, list[64], list[65], list[66], list[67]),
('Seasonal credit', date, list[68], list[69], list[70], list[71]),
('PPPLF', date, list[72], list[73], list[74], list[75]),
('Bank Term Funding Program', date, list[76], list[77], list[78], list[79]),
('Other credit extensions', date, list[80], list[81], list[82], list[83]),
('NphoMFLLC(MSLP)', date, list[84], list[85], list[86], list[87]),
('Net portfolio holdings of MLF LLC', date, list[88], list[89], list[90], list[91]),
('Net portfolio holdings of TALF II LLC',date, list[92], list[93], list[94], list[95]),
('Float',date, list[96], list[97], list[98], list[99]),
('Central bank liquidity swaps',date, list[100], list[101], list[102], list[103]),
('Other Federal Reserve assets',date, list[104], list[105], list[106], list[107]),
('Foreign currency denominated assets',date, list[108], list[109], list[110], list[111]),
('Gold stock',date, list[112], list[113], list[114], list[115]),
('Special drawing rights certificate account',date, list[116], list[117], list[118], list[119]),
('Treasury currency outstanding',date, list[120], list[121], list[122], list[123]),
('Total factors supplying reserve funds',date, list[124], list[125], list[126], list[127])]
cursor.executemany(sql,data_list)
sql2 = "insert into FARBODIC(name,time,THIS_AVG_VALUE,CHANGE_LASTWEEK,CHANGE_LASTYEAR,THIS_VALUE)values(%s,%s,%s,%s,%s,%s)"
data_list2=[('Currency in circulation',date, list[128], list[129], list[130], list[131]),
('Reverse repurchase agreements',date, list[132], list[133], list[134], list[135]),
('Foreign official and international accounts',date, list[136], list[137], list[138], list[139]),
('Others',date, list[140], list[141], list[142], list[143]),
('Treasury cash holdings',date, list[144], list[145], list[146], list[147]),
('DwFRBotrb',date, list[148], list[149], list[150], list[151]),
('Tdhbdi',date, list[152], list[153], list[154], list[155]),
('U.S. Treasury, General Account',date, list[156], list[157], list[158], list[159]),
('Foreign official',date, list[160], list[161], list[162], list[163]),
('Other',date, list[164], list[165], list[166], list[167]),
('Treasury contributions to credit facilities',date, list[168], list[169], list[170], list[171]),
('Other liabilities and capital',date, list[172], list[173], list[174], list[175]),
('Tfotrbarf',date, list[176], list[177], list[178], list[179]),
('RbwFRB',date, list[180], list[181], list[182], list[183])]
cursor.executemany(sql2,data_list2)
sql3 = "insert into MI(name,time,THIS_AVG_VALUE,CHANGE_LASTWEEK,CHANGE_LASTYEAR,THIS_VALUE)values(%s,%s,%s,%s,%s,%s)"
data_list3=[('Shicffoaia',date, list[184], list[185], list[186], list[187]),
('Marketable U.S. Treasury securities',date, list[188], list[189], list[190], list[191]),
('Fadambs',date, list[192], list[193], list[194], list[195]),
('Other securities',date, list[196], list[197], list[198], list[199]),
('Securities lent to dealers',date, list[200], list[201], list[202], list[203]),
('Overnight facility',date, list[204], list[205], list[206], list[207]),
('U.S. Treasury securities',date, list[208], list[209], list[210], list[211]),
('Federal agency debt securities',date,list[212], list[213], list[214], list[215])]
cursor.executemany(sql3,data_list3)
sql4 = "insert into MDOSLASOAAL(name,time,D15,D16_D90,D91_Y1,Y1_Y5,Y5_Y10,Y10_,TOTAL)values(%s,%s,%s,%s,%s,%s,%s,%s,%s)"
data_list4=[('Loans', date, list[216], list[217], list[218], list[219], list[220], list[221], list[222]),
('USTsH', date, list[223], list[224], list[225], list[226], list[227], list[228], list[229]),
('USTsWc', date, list[230], list[231], list[232], list[233], list[234], list[235], list[236]),
('FadsH', date, list[237], list[238], list[239], list[240], list[241], list[242], list[243]),
('FadsWc', date, list[244], list[245], list[246], list[247], list[248], list[249], list[250]),
('MbsH', date, list[251], list[252], list[253], list[254], list[255], list[256], list[257]),
('MbsWc', date, list[258], list[259], list[260], list[261], list[262], list[263], list[264]),
('LphbMFLLC(MSLP)', date, list[265], list[266], list[267], list[268], list[269], list[270], list[271]),
('Repurchase agreements', date, list[272], list[273], list[274], list[275], list[276], list[277], list[278]),
('Central bank liquidity swaps', date, list[279], list[280], list[281], list[282], list[283], list[284], list[285]),
('Reverse repurchase agreements', date, list[286], list[287], list[288], list[289], list[290], list[291], list[292]),
('Term deposits', date, list[293], list[294], list[295], list[296], list[297], list[298], list[299])]
cursor.executemany(sql4,data_list4)
sql5 = "insert into SIOMS(name,time,value)values(%s,%s,%s)"
data_list5 = [('Mortgage-backed securities held outright', date, list[300]),
('Residential mortgage-backed securities', date, list[301]),
('Commercial mortgage-backed securities', date, list[302]),
('Commitments to buy mortgage-backed securities', date, list[303]),
('Commitments to sell mortgage-backed securities', date, list[304]),
('Cash and cash equivalents', date, list[305])]
cursor.executemany(sql5, data_list5)
sql6 = "insert into IOPAOCFL(name,time,OPAPTLLC,UPFAPT,FAAOA,TOTAL)values(%s,%s,%s,%s,%s,%s)"
data_list6 =[('MS Facilities LLC (Main Street Lending Program)',date, list[306], list[307], list[308], list[309])]
cursor.executemany(sql6, data_list6)
sql7 = "insert into CSOCOAFRB(name,time,EFC,THIS_VALUE,CHANGE_LASTWEEK,CHANGE_LASTYEAR)values(%s,%s,%s,%s,%s,%s)"
data_list7 = [('Gold certificate account', date, 'NULL', list[310], list[311], list[312]),
('Special drawing rights certificate account', date, 'NULL', list[313], list[314], list[315]),
('Coin', date, 'NULL', list[316], list[317], list[318]),
('Supadraal', date, 'NULL', list[319], list[320], list[321]),
('Securities held outright', date, 'NULL', list[322], list[323], list[324]),
('U.S. Treasury securities', date, 'NULL', list[325], list[326], list[327]),
('Bills', date, 'NULL', list[328], list[329], list[330]),
('Notes and bonds, nominal', date, 'NULL', list[331], list[332], list[333]),
('Notes and bonds, inflation-indexed', date, 'NULL', list[334], list[335], list[336]),
('Inflation compensation', date, 'NULL', list[337], list[338], list[339]),
('Federal agency debt securities', date, 'NULL', list[340], list[341], list[342]),
('Mortgage-backed securities', date, 'NULL', list[343], list[344], list[345]),
('Uposho', date, 'NULL', list[346], list[347], list[348]),
('Udosho', date, 'NULL', list[349], list[350], list[351]),
('Repurchase agreements', date, 'NULL', list[352], list[353], list[354]),
('Loans', date, 'NULL', list[355], list[356], list[357]),
('NphoMFLLC(MSLP)', date, 'NULL', list[358], list[359], list[360]),
('NphoMLFLLC', date, 'NULL', list[361], list[362], list[363]),
('Net portfolio holdings of TALF II LLC', date, 'NULL', list[364], list[365], list[366]),
('Items in process of collection', date, 'NULL', list[367], list[368], list[369]),
('Bank premises', date, 'NULL', list[370], list[371], list[372]),
('Central bank liquidity swaps', date, 'NULL', list[373], list[374], list[375]),
('Foreign currency denominated assets', date, 'NULL', list[376], list[377], list[378]),
('Other assets', date, 'NULL', list[379], list[380], list[381]),
('Total assets', date, 'NULL', list[382], list[383], list[384])]
cursor.executemany(sql7, data_list7)
sql8 = "insert into CSOCOAFRBC(name,time,EFC,THIS_VALUE,CHANGE_LASTWEEK,CHANGE_LASTYEAR)values(%s,%s,%s,%s,%s,%s)"
data_list8 = [('FRnnoFBh', date, 'NULL', list[385], list[386], list[387]),
('Reverse repurchase agreements', date, 'NULL', list[388], list[389], list[390]),
('Deposits', date, 'NULL', list[391], list[392], list[393]),
('Term deposits held by depository institutions', date, 'NULL', list[394], list[395], list[396]),
('Other deposits held by depository institutions', date, 'NULL', list[397], list[398], list[399]),
('U.S. Treasury, General Account', date, 'NULL', list[400], list[401], list[402]),
('Foreign official', date, 'NULL', list[403], list[404], list[405]),
('Other', date, 'NULL', list[406], list[407], list[408]),
('Deferred availability cash items', date, 'NULL', list[409], list[410], list[411]),
('Treasury contributions to credit facilities', date, 'NULL', list[412], list[413], list[414]),
('Other liabilities and accrued dividends', date, 'NULL', list[415], list[416], list[417]),
('Total liabilities', date, 'NULL', list[418], list[419], list[420]),
('Capital paid in', date, 'NULL', list[421], list[422], list[423]),
('Surplus', date, 'NULL', list[424], list[425], list[426]),
('Other capital accounts', date, 'NULL', list[427], list[428], list[429]),
('Total capital', date, 'NULL', list[430], list[431], list[432])]
cursor.executemany(sql8, data_list8)
sql9 = "insert into SOCOEFRB(name,time,TOTAL,Boston,NewYork,Philadelphia,Cleveland,Richmond,Atlanta,Chicago,St_Louis,Minneapolis,Kansas_City,Dallas,San_Francisco)values(%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s)"
data_list9=[('Gcasdrc', date, list[433], list[434], list[435], list[436], list[437], list[438], list[439], list[440], list[441], list[442], list[443], list[444], list[445]),
('Coin', date, list[446], list[447], list[448], list[449], list[450], list[451], list[452], list[453], list[454], list[455], list[456], list[457], list[458]),
('Supadraal', date, list[459], list[460], list[461], list[462], list[463], list[464], list[465], list[466], list[467], list[468], list[469], list[470], list[471]),
('NphoMFLLC(MSLP)', date, list[472], list[473], list[474], list[475], list[476], list[477], list[478], list[479], list[480], list[481], list[482], list[483], list[484]),
('Central bank liquidity swaps', date, list[485], list[486], list[487], list[488], list[489], list[490], list[491], list[492], list[493], list[494], list[495], list[496], list[497]),
('Foreign currency denominated assets', date, list[498], list[499], list[500], list[501], list[502], list[503], list[504], list[505], list[506], list[507], list[508], list[509], list[510]),
('Other assets', date, list[511], list[512], list[513], list[514], list[515], list[516], list[517], list[518], list[519], list[520], list[521], list[522], list[523]),
('Interdistrict settlement account', date, list[524], list[525], list[526], list[527], list[528], list[529], list[530], list[531], list[532], list[533], list[534], list[535], list[536]),
('Total assets', date, list[537], list[538], list[539], list[540], list[541], list[542], list[543], list[544], list[545], list[546], list[547], list[548], list[549])]
cursor.executemany(sql9, data_list9)
sql10 = "insert into SOCOEFRBC(name,time,TOTAL,Boston,NewYork,Philadelphia,Cleveland,Richmond,Atlanta,Chicago,St_Louis,Minneapolis,Kansas_City,Dallas,San_Francisco)values(%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s)"
data_list10 = [('Federal Reserve notes, net', date, list[550], list[551], list[552], list[553], list[554], list[555], list[556], list[557], list[558], list[559], list[560], list[561], list[562]),
('Reverse repurchase agreements', date, list[563], list[564], list[565], list[566], list[567], list[568], list[569], list[570], list[571], list[572], list[573], list[574], list[575]),
('Deposits', date, list[576], list[577], list[578], list[579], list[580], list[581], list[582], list[583], list[584], list[585], list[586], list[587], list[588]),
('Depository institutions', date, list[589], list[590], list[591], list[592], list[593], list[594], list[595], list[596], list[597], list[598], list[599], list[600], list[601]),
('U.S. Treasury, General Account', date, list[602], list[603], list[604], list[605], list[606], list[607], list[608], list[609], list[610], list[611], list[612], list[613], list[614]),
('Foreign official', date, list[615], list[616], list[617], list[618], list[619], list[620], list[621], list[622], list[623], list[624], list[625], list[626], list[627]),
('Other', date, list[628], list[629], list[630], list[631], list[632], list[633], list[634], list[635], list[636], list[637], list[638], list[639], list[640]),
('Earnings remittances due to the U.S. Treasury', date, list[641], list[642], list[643], list[644], list[645], list[646], list[647], list[648], list[649], list[650], list[651], list[652], list[653]),
('Treasury contributions to credit facilities', date, list[654], list[655], list[656], list[657], list[658], list[659], list[660], list[661], list[662], list[663], list[664], list[665], list[666]),
('Other liabilities and accrued dividends', date, list[667], list[668], list[669], list[670], list[671], list[672], list[673], list[674], list[675], list[676], list[677], list[678], list[679]),
('Total liabilities', date, list[680], list[681], list[682], list[683], list[684], list[685], list[686], list[687], list[688], list[689], list[690], list[691], list[692]),
('Capital paid in', date, list[693], list[694], list[695], list[696], list[697], list[698], list[699], list[700], list[701], list[702], list[703], list[704], list[705]),
('Surplus', date, list[706], list[707], list[708], list[709], list[710], list[711], list[712], list[713], list[714], list[715], list[716], list[717], list[718]),
('Other capital', date, list[719], list[720], list[721], list[722], list[723], list[724], list[725], list[726], list[727], list[728], list[729], list[730], list[731]),
('Total liabilities and capital', date, list[732], list[733], list[734], list[735], list[736], list[737], list[738], list[739], list[740], list[741], list[742], list[743], list[744])]
cursor.executemany(sql10, data_list10)
sql11 = "insert into CHAFRNFRAA(name,time,value)values(%s,%s,%s)"
data_list11 = [('Federal Reserve notes outstanding', date, list[745]),
('LNhbFBnstc', date, list[746]),
('Federal Reserve notes to be collateralized', date, list[747]),
('Collateral held against Federal Reserve notes', date, list[748]),
('Gold certificate account', date, list[749]),
('Special drawing rights certificate account', date, list[750]),
('UTadambsp', date, list[751]),
('Other assets pledged', date, list[752]),
('TUTadambs', date, list[753]),
('LFvosurra', date, list[754]),
('UTadambsetbp', date, list[755])]
cursor.executemany(sql11, data_list11)
db.commit()
else:
time.sleep(21600)
# time.sleep(timer_start_time)

View File

@@ -0,0 +1,98 @@
import requests
import pymysql
from datetime import datetime
from w3lib.html import remove_tags
import pandas as pd
import time
def parse_treasury_data(data):
# 找到列头位置
header_index = data.index("Country")
columns = data[header_index:header_index+14] # Country + 13个月
rows = data[header_index+14:]
result = []
i = 0
while i < len(rows):
# 拼接国家名
country_parts = []
while i < len(rows) and not rows[i].replace('.', '', 1).isdigit():
country_parts.append(rows[i])
i += 1
country = " ".join(country_parts).replace(",", "")
# 取13个数值
values = rows[i:i+13]
i += 13
if len(values) == 13:
result.append([country] + values)
# 转成 DataFrame
df = pd.DataFrame(result, columns=columns)
# =================== 名称清洗 ===================
rename_map = {
"Of Which: Foreign Official": "Foreign Official",
"Of Which: Foreign Official Treasury Bills": "Treasury Bills",
"Of Which: Foreign Official T-Bonds &amp; Notes": "T-Bonds & Notes"
}
df["Country"] = df["Country"].replace(rename_map)
return df
def run_job():
print("=== 开始爬取并更新数据库 ===")
# =================== 爬取网页 =====================
page = requests.get("https://ticdata.treasury.gov/resource-center/data-chart-center/tic/Documents/slt_table5.html")
page = remove_tags(str(page.text))
page = page.split()
df = parse_treasury_data(page)
# =================== 连接数据库 =====================
db = pymysql.connect(
host="127.0.0.1",
user="root",
password="2GS@bPYcgiMyL14A",
database="Macroeconomics",
port=4423
)
cursor = db.cursor()
# 查询数据库中最新日期
cursor.execute("SELECT date FROM FBI ORDER BY date DESC LIMIT 1")
result = cursor.fetchone()
latest_date_in_db = result[0] if result else None # datetime 类型或 None
# =================== 补齐逻辑 =====================
for col in df.columns[1:]: # 遍历所有月份列
col_date = datetime.strptime(col, "%Y-%m")
# 如果数据库已有该日期,跳过
if latest_date_in_db and col_date <= latest_date_in_db:
continue
print(f"正在插入 {col} 的数据...")
insert_sql = "INSERT INTO FBI (date, name, value) VALUES (%s, %s, %s)"
for _, row in df.iterrows():
country = row["Country"]
value = row[col]
cursor.execute(insert_sql, (col_date.strftime("%Y-%m-01"), country, value))
db.commit()
print(f"{col} 插入完成")
cursor.close()
db.close()
print("=== 本次任务完成 ===\n")
# =================== 循环执行 =====================
if __name__ == "__main__":
while True:
run_job()
print("休眠 21600 秒6 小时)...\n")
time.sleep(21600) # 6小时

89
lyq/Macroeconomic_FER.py Normal file
View File

@@ -0,0 +1,89 @@
import time
import requests
import pymysql
from bs4 import BeautifulSoup
from w3lib.html import remove_tags
import datetime
while True:
try:
# now_time = datetime.datetime.now()
# next_time = now_time + datetime.timedelta(days=+1)
# next_year = next_time.date().year
# next_month = next_time.date().month
# next_day = next_time.date().day
# next_time = datetime.datetime.strptime(str(next_year) + "-" + str(next_month) + "-" + str(next_day) + " 20:30:01","%Y-%m-%d %H:%M:%S")
# timer_start_time = (next_time - now_time).total_seconds()
page = requests.get("https://www.federalreserve.gov/data/intlsumm/current.htm")
page = page.text
page = BeautifulSoup(page, 'html.parser')
page1 = page.find_all('th', class_="colorrev")
page = page.find_all('td', class_="shadedata1")
value1=remove_tags(str(page[-1]))
value1 = value1.replace(",", "")
value1 = value1.replace(" ", "")
date1=remove_tags(str(page1[-1]))
date1 = date1.replace(" ", "")
date1 = date1.replace("/r", "")
date1 = date1.replace("/p", "")
date1= date1[-4:] + date1[0:3]
date1 = date1.replace("Jan", "/1/01")
date1 = date1.replace("Feb", "/2/01")
date1 = date1.replace("Mar", "/3/01")
date1 = date1.replace("Apr", "/4/01")
date1 = date1.replace("May", "/5/01")
date1 = date1.replace("Jun", "/6/01")
date1 = date1.replace("Jul", "/7/01")
date1 = date1.replace("Aug", "/8/01")
date1 = date1.replace("Sep", "/9/01")
date1 = date1.replace("Oct", "/10/01")
date1 = date1.replace("Nov", "/11/01")
date1 = date1.replace("Dec", "/12/01")
format1 = '%Y/%m/%d'
value2 = remove_tags(str(page[-2]))
value2 = value2.replace(",", "")
value2 = value2.replace(" ", "")
date2 = remove_tags(str(page1[-2]))
date2 = date2.replace(" ", "")
date2 = date2.replace("/r", "")
date2 = date2.replace("/p", "")
date2 = date2[-4:] + date2[0:3]
date2 = date2.replace("Jan", "/1/01")
date2 = date2.replace("Feb", "/2/01")
date2 = date2.replace("Mar", "/3/01")
date2 = date2.replace("Apr", "/4/01")
date2 = date2.replace("May", "/5/01")
date2 = date2.replace("Jun", "/6/01")
date2 = date2.replace("Jul", "/7/01")
date2 = date2.replace("Aug", "/8/01")
date2 = date2.replace("Sep", "/9/01")
date2 = date2.replace("Oct", "/10/01")
date2 = date2.replace("Nov", "/11/01")
date2 = date2.replace("Dec", "/12/01")
format2 = '%Y/%m/%d'
from datetime import datetime
date1 = datetime.strptime(date1, format1)
date2 = datetime.strptime(date2, format2)
db = pymysql.connect(host="127.0.0.1",user="root",password="2GS@bPYcgiMyL14A",database="Macroeconomics",port=4423)
cursor = db.cursor()
sql = "select date from FER order by date desc limit 1"
cursor.execute(sql)
db.commit()
ole_time = cursor.fetchall()
ole_time = ole_time[0][0]
date2= "'" + str(date2) + "'"
sql = "update FER set FER= %s where date=%s" % (value2, date2)
cursor.execute(sql)
db.commit()
if date1 != ole_time:
sql = "insert into FER(date,FER)values('%s','%s')" % (date1, value1 + '*')
cursor.execute(sql)
db.commit()
db.close()
# time.sleep(timer_start_time)
time.sleep(21600)
except:
time.sleep(30)
continue

View File

@@ -0,0 +1,252 @@
import time
from full_fred.fred import Fred
import pymysql
import requests
from datetime import datetime
from bs4 import BeautifulSoup
from w3lib.html import remove_tags
while True:
fred=Fred('example_key.txt')
fred.set_api_key_file('example_key.txt')
DFEDTARU = fred.get_series_df('DFEDTARU')
DFEDTARL = fred.get_series_df('DFEDTARL')
FEDFUNDS = fred.get_series_df('FEDFUNDS')
IORB = fred.get_series_df('IORB')
RRPONTSYAWARD = fred.get_series_df('RRPONTSYAWARD')
SOFR = fred.get_series_df('SOFR')
list_date1 = DFEDTARU['date']
list_value1 = DFEDTARU['value']
list_date2 = DFEDTARL['date']
list_value2 = DFEDTARL['value']
list_date3 = FEDFUNDS['date']
list_value3 = FEDFUNDS['value']
list_date4 = IORB['date']
list_value4 = IORB['value']
list_date5 = RRPONTSYAWARD['date']
list_value5 = RRPONTSYAWARD['value']
list_date6 = SOFR['date']
list_value6 = SOFR['value']
date1 = []
value1 = []
date2 = []
value2 = []
date3 = []
value3 = []
date4 = []
value4 = []
date5 = []
value5 = []
date6 = []
value6 = []
for i in list_date1:
date1 += [i]
for i in list_value1:
value1 += [i]
for i in list_date2:
date2 += [i]
for i in list_value2:
value2 += [i]
for i in list_date3:
date3 += [i]
for i in list_value3:
value3 += [i]
for i in list_date4:
date4 += [i]
for i in list_value4:
value4 += [i]
for i in list_date5:
date5 += [i]
for i in list_value5:
value5 += [i]
for i in list_date6:
date6 += [i]
for i in list_value6:
value6 += [i]
date1 = date1[-1]
value1 = value1[-1]
date2 = date2[-1]
value2 = value2[-1]
date3 = date3[-1]
value3 = value3[-1]
date4 = date4[-1]
value4 = value4[-1]
date5 = date5[-1]
value5 = value5[-1]
date6 = date6[-1]
value6 = value6[-1]
date1 = date1.replace('-', '/')
date_string = date1
format = '%Y/%m/%d'
date1 = datetime.strptime(date_string, format)
date2 = date2.replace('-', '/')
date_string = date2
format = '%Y/%m/%d'
date2 = datetime.strptime(date_string, format)
date3 = date3.replace('-', '/')
date_string = date3
format = '%Y/%m/%d'
date3 = datetime.strptime(date_string, format)
date4 = date4.replace('-', '/')
date_string = date4
format = '%Y/%m/%d'
date4 = datetime.strptime(date_string, format)
date5 = date5.replace('-', '/')
date_string = date5
format = '%Y/%m/%d'
date5 = datetime.strptime(date_string, format)
date6 = date6.replace('-', '/')
date_string = date6
format = '%Y/%m/%d'
date6 = datetime.strptime(date_string, format)
db = pymysql.connect(host="127.0.0.1",user="root",password="2GS@bPYcgiMyL14A",database="Macroeconomics",port=4423)
cursor = db.cursor()
sql = "select date from InterestRate where name='DFEDTARU'"
cursor.execute(sql)
db.commit()
DFEDTARU_old_time = cursor.fetchall()
DFEDTARU_old_time=DFEDTARU_old_time[-1][0]
if DFEDTARU_old_time != date1 :
sql = "insert into InterestRate(date,name,_value)values('%s','%s','%s')" % (date1, 'DFEDTARU', value1)
cursor.execute(sql)
db.commit()
sql2 = "select date from InterestRate where name='DFEDTARL'"
cursor.execute(sql2)
db.commit()
DFEDTARL_old_time = cursor.fetchall()
DFEDTARL_old_time=DFEDTARL_old_time[-1][0]
if DFEDTARL_old_time != date2 :
sql = "insert into InterestRate(date,name,_value)values('%s','%s','%s')" % (date2, 'DFEDTARL', value2)
cursor.execute(sql)
db.commit()
sql3 = "select date from InterestRate where name='FEDFUNDS'"
cursor.execute(sql3)
db.commit()
FEDFUNDS_old_time = cursor.fetchall()
FEDFUNDS_old_time=FEDFUNDS_old_time[-1][0]
if FEDFUNDS_old_time != date3 :
sql = "insert into InterestRate(date,name,_value)values('%s','%s','%s')" % (date3, 'FEDFUNDS', value3)
cursor.execute(sql)
db.commit()
sql4 = "select date from InterestRate where name='IORB'"
cursor.execute(sql4)
db.commit()
IORB_old_time = cursor.fetchall()
IORB_old_time=IORB_old_time[-1][0]
if IORB_old_time != date4 :
sql = "insert into InterestRate(date,name,_value)values('%s','%s','%s')" % (date4, 'IORB', value4)
cursor.execute(sql)
db.commit()
sql5 = "select date from InterestRate where name='RRPONTSYAWARD'"
cursor.execute(sql5)
db.commit()
RRPONTSYAWARD_old_time = cursor.fetchall()
RRPONTSYAWARD_old_time=RRPONTSYAWARD_old_time[-1][0]
if RRPONTSYAWARD_old_time != date5 :
sql = "insert into InterestRate(date,name,_value)values('%s','%s','%s')" % (date5, 'RRPONTSYAWARD', value5)
cursor.execute(sql)
db.commit()
sql6 = "select date from InterestRate where name='SOFR'"
cursor.execute(sql6)
db.commit()
SOFR_old_time = cursor.fetchall()
SOFR_old_time=SOFR_old_time[-1][0]
if SOFR_old_time != date6 :
sql = "insert into InterestRate(date,name,_value)values('%s','%s','%s')" % (date6, 'SOFR', value6)
cursor.execute(sql)
db.commit()
pagee = requests.get("https://www.frbsf.org/wp-content/uploads/sites/4/proxy-funds-rate-chart1-data.csv")
pagee = pagee.text
pagee = pagee.split()
number = 0
for i in pagee:
number += 1
if number <= 5:
continue
else:
pagee = i.split()[-1]
pagee = pagee.replace(',', ' , ')
PFR_new_time = pagee[0:10]
PFR_new_time = PFR_new_time.replace('-', '/')
PFR_value = pagee[-8:]
PFR_value = PFR_value.replace(' ', '')
date_string = PFR_new_time
format = '%Y/%m/%d'
PFR_new_time = datetime.strptime(date_string, format)
sql = "select * from InterestRate where name='PFR' and date='%s'" % (PFR_new_time)
cursor.execute(sql)
outcome = cursor.fetchall()
if outcome == () or outcome == 0 or outcome == None:
sql = "insert into InterestRate(date,name,_value)values('%s','%s','%s')" % (PFR_new_time, 'PFR', PFR_value)
cursor.execute(sql)
db.commit()
else:
sql = "update InterestRate set _value='%s' where 'name'='PFR' and 'date' = '%s'" % (PFR_value, PFR_new_time)
cursor.execute(sql)
db.commit()
number=0
pagee = requests.get("https://markets.newyorkfed.org/api/rp/repo/multiple/results/last/1.json")
pagee = pagee.json()
page=pagee['repo']['operations'][0]
page2=page['details'][0]
if page2.__contains__('minimumBidRate'):
RR_value=page2['minimumBidRate']
RR_new_time = page['operationDate']
RR_new_time = RR_new_time.replace('-', '/')
date_string = RR_new_time
format = '%Y/%m/%d'
RR_new_time = datetime.strptime(date_string, format)
sql = "select date from InterestRate where name='RR'"
cursor.execute(sql)
db.commit()
RR_old_time = cursor.fetchall()
RR_old_time = RR_old_time[-1][0]
if RR_old_time != RR_new_time:
sql = "insert into InterestRate(date,name,_value)values('%s','%s','%s')" % (RR_new_time, 'RR', RR_value)
cursor.execute(sql)
db.commit()
page = requests.get("https://www.global-rates.com/en/interest-rates/libor/american-dollar/american-dollar.aspx")
page = page.text
page = BeautifulSoup(page, 'html.parser')
data = page.find_all('div', class_="table-normal text-end")
LIBOR_new_time = data[0]
LIBOR1M_value = data[5]
LIBOR3M_value = data[10]
LIBOR6M_value = data[15]
LIBOR_new_time = remove_tags(str(LIBOR_new_time))
LIBOR1M_value = remove_tags(str(LIBOR1M_value))
LIBOR3M_value = remove_tags(str(LIBOR3M_value))
LIBOR6M_value = remove_tags(str(LIBOR6M_value))
LIBOR_new_time = LIBOR_new_time[6:10]+'-'+LIBOR_new_time[0:5]
LIBOR_new_time = LIBOR_new_time.replace("-", "/")
LIBOR1M_value = LIBOR1M_value.replace(' ', '')
LIBOR3M_value = LIBOR3M_value.replace(' ', '')
LIBOR6M_value = LIBOR6M_value.replace(' ', '')
format = '%Y/%m/%d'
LIBOR_new_time = datetime.strptime(LIBOR_new_time, format)
sql = "select date from InterestRate where name='LIBOR1M'"
cursor.execute(sql)
db.commit()
LIBOR_old_time = cursor.fetchall()
LIBOR_old_time = LIBOR_old_time[-1][0]
if LIBOR_new_time != LIBOR_old_time:
sql = "insert into InterestRate(date,name,_value)values('%s','%s','%s')" % (LIBOR_new_time, 'LIBOR1M', LIBOR1M_value)
sql1 = "insert into InterestRate(date,name,_value)values('%s','%s','%s')" % (LIBOR_new_time, 'LIBOR3M', LIBOR3M_value)
sql2 = "insert into InterestRate(date,name,_value)values('%s','%s','%s')" % (LIBOR_new_time, 'LIBOR6M', LIBOR6M_value)
cursor.execute(sql)
cursor.execute(sql1)
cursor.execute(sql2)
db.commit()
db.close()
time.sleep(7200)

102
lyq/Macroeconomic_Loan.py Normal file
View File

@@ -0,0 +1,102 @@
import time
from full_fred.fred import Fred
import pymysql
from datetime import datetime
fred=Fred('example_key.txt')
fred.set_api_key_file('example_key.txt')
while True:
BUSLOANS=fred.get_series_df('BUSLOANS')
HBPIGDQ188S=fred.get_series_df('HBPIGDQ188S')
date1_all = BUSLOANS['date']
value1_all = BUSLOANS['value']
date2_all = HBPIGDQ188S['date']
value2_all = HBPIGDQ188S['value']
list_date1 = []
list_value1 = []
list_date2 = []
list_value2 = []
for i in date1_all:
list_date1 += [i]
for i in value1_all:
list_value1 += [i]
for i in date2_all:
list_date2 += [i]
for i in value2_all:
list_value2 += [i]
date1 = list_date1[-2]
value1 = list_value1[-2]
date2 = list_date1[-3]
value2 = list_value1[-3]
date3 = list_date1[-4]
value3 = list_value1[-4]
date4 = list_date1[-5]
value4 = list_value1[-5]
list_date1 = list_date1[-1]
list_value1 = list_value1[-1]
list_date2 = list_date2[-1]
list_value2 = list_value2[-1]
list_date1 = list_date1.replace('-', '/')
date_string = list_date1
format = '%Y/%m/%d'
list_date1 = datetime.strptime(date_string, format)
list_date2 = list_date2.replace('-', '/')
date_string2 = list_date2
format = '%Y/%m/%d'
list_date2 = datetime.strptime(date_string2, format)
db = pymysql.connect(host="127.0.0.1",user="root",password="2GS@bPYcgiMyL14A",database="Macroeconomics",port=4423)
cursor = db.cursor()
sql = "select date from Loan order by date desc limit 1"
cursor.execute(sql)
db.commit()
old_time = cursor.fetchall()
old_time = old_time[0][0]
date1 = date1.replace('-', '/')
date_string = date1
format = '%Y/%m/%d'
date1 = datetime.strptime(date_string, format)
date2 = date2.replace('-', '/')
date_string = date2
format = '%Y/%m/%d'
date2 = datetime.strptime(date_string, format)
date3 = date3.replace('-', '/')
date_string = date3
format = '%Y/%m/%d'
date3 = datetime.strptime(date_string, format)
date4 = date4.replace('-', '/')
date_string = date4
format = '%Y/%m/%d'
date4 = datetime.strptime(date_string, format)
sql = "update Loan set PSI=%s where date='%s'" % (value1, date1)
cursor.execute(sql)
sql = "update Loan set PSI=%s where date='%s'" % (value2, date2)
cursor.execute(sql)
sql = "update Loan set PSI=%s where date='%s'" % (value3, date3)
cursor.execute(sql)
sql = "update Loan set PSI=%s where date='%s'" % (value4, date4)
cursor.execute(sql)
db.commit()
if list_date1 == old_time:
db = pymysql.connect(host="127.0.0.1",user="root",password="2GS@bPYcgiMyL14A",database="Macroeconomics",port=4423)
cursor = db.cursor()
sql = "update Loan set PSI= %s where date='%s'" % (list_value1, list_date1)
cursor.execute(sql)
sql1 = "update Loan set FDHBPI_GDP=%s where date='%s'" % (list_value2, list_date2)
cursor.execute(sql1)
db.commit()
time.sleep(21600)
else:
db = pymysql.connect(host="127.0.0.1",user="root",password="2GS@bPYcgiMyL14A",database="Macroeconomics",port=4423)
cursor = db.cursor()
sql = "insert into Loan(date,PSI)values('%s','%s')" % (list_date1, list_value1)
cursor.execute(sql)
sql1 = "update Loan set FDHBPI_GDP=%s where date='%s'" % (list_value2, list_date2)
cursor.execute(sql1)
db.commit()

View File

@@ -0,0 +1,216 @@
import pymysql
import time
import requests
from w3lib.html import remove_tags
from bs4 import BeautifulSoup
condition=True
while condition:
import datetime
# 获取时间
now_time = datetime.datetime.now()
next_time = now_time + datetime.timedelta(days=+1)
next_year = next_time.date().year
next_month = next_time.date().month
next_day = next_time.date().day
next_time = datetime.datetime.strptime(str(next_year) + "-" + str(next_month) + "-" + str(next_day) + " 17:00:01","%Y-%m-%d %H:%M:%S")
timer_start_time = (next_time - now_time).total_seconds()
page =requests.get("https://www.federalreserve.gov/releases/h6/current/default.htm")
page=page.text
soup = BeautifulSoup(page, 'html.parser')
page_tbody = soup.find_all('tbody')
# 获取三个表数据MoneyStockMeasuresSeasonallyAdjustedNotSeasonallyAdjusted
MoneyStockMeasures = page_tbody[0]
SeasonallyAdjusted = page_tbody[1]
NotSeasonallyAdjusted = page_tbody[2]
MoneyStockMeasures = remove_tags(str(MoneyStockMeasures))
SeasonallyAdjusted = remove_tags(str(SeasonallyAdjusted))
NotSeasonallyAdjusted = remove_tags(str(NotSeasonallyAdjusted))
# 修改时间字段
MoneyStockMeasures=MoneyStockMeasures.replace('Jan.', '1')
MoneyStockMeasures = MoneyStockMeasures.replace('Feb.', '2')
MoneyStockMeasures = MoneyStockMeasures.replace('Mar.', '3')
MoneyStockMeasures = MoneyStockMeasures.replace('Apr.', '4')
MoneyStockMeasures = MoneyStockMeasures.replace('May', '5')
MoneyStockMeasures = MoneyStockMeasures.replace('June', '6')
MoneyStockMeasures = MoneyStockMeasures.replace('July', '7')
MoneyStockMeasures = MoneyStockMeasures.replace('Aug.', '8')
MoneyStockMeasures = MoneyStockMeasures.replace('Sept.', '9')
MoneyStockMeasures = MoneyStockMeasures.replace('Oct.', '10')
MoneyStockMeasures = MoneyStockMeasures.replace('Nov.', '11')
MoneyStockMeasures = MoneyStockMeasures.replace('Dec.', '12')
MoneyStockMeasures = MoneyStockMeasures.replace('e', '')
MoneyStockMeasures = MoneyStockMeasures.split()
SeasonallyAdjusted = SeasonallyAdjusted.replace('Jan.', '1')
SeasonallyAdjusted = SeasonallyAdjusted.replace('Feb.', '2')
SeasonallyAdjusted = SeasonallyAdjusted.replace('Mar.', '3')
SeasonallyAdjusted = SeasonallyAdjusted.replace('Apr.', '4')
SeasonallyAdjusted = SeasonallyAdjusted.replace('May', '5')
SeasonallyAdjusted = SeasonallyAdjusted.replace('June', '6')
SeasonallyAdjusted = SeasonallyAdjusted.replace('July', '7')
SeasonallyAdjusted = SeasonallyAdjusted.replace('Aug.', '8')
SeasonallyAdjusted = SeasonallyAdjusted.replace('Sept.', '9')
SeasonallyAdjusted = SeasonallyAdjusted.replace('Oct.', '10')
SeasonallyAdjusted = SeasonallyAdjusted.replace('Nov.', '11')
SeasonallyAdjusted = SeasonallyAdjusted.replace('Dec.', '12')
SeasonallyAdjusted = SeasonallyAdjusted.replace('e', '')
SeasonallyAdjusted = SeasonallyAdjusted.split()
NotSeasonallyAdjusted = NotSeasonallyAdjusted.replace('Jan.', '1')
NotSeasonallyAdjusted = NotSeasonallyAdjusted.replace('Feb.', '2')
NotSeasonallyAdjusted = NotSeasonallyAdjusted.replace('Mar.', '3')
NotSeasonallyAdjusted = NotSeasonallyAdjusted.replace('Apr.', '4')
NotSeasonallyAdjusted = NotSeasonallyAdjusted.replace('May', '5')
NotSeasonallyAdjusted = NotSeasonallyAdjusted.replace('June', '6')
NotSeasonallyAdjusted = NotSeasonallyAdjusted.replace('July', '7')
NotSeasonallyAdjusted = NotSeasonallyAdjusted.replace('Aug.', '8')
NotSeasonallyAdjusted = NotSeasonallyAdjusted.replace('Sept.', '9')
NotSeasonallyAdjusted = NotSeasonallyAdjusted.replace('Oct.', '10')
NotSeasonallyAdjusted = NotSeasonallyAdjusted.replace('Nov.', '11')
NotSeasonallyAdjusted = NotSeasonallyAdjusted.replace('Dec.', '12')
NotSeasonallyAdjusted = NotSeasonallyAdjusted.replace('e', '')
NotSeasonallyAdjusted = NotSeasonallyAdjusted.split()
condition1=0
for i in range(17):
date1= MoneyStockMeasures[condition1 + 1] + '/' + MoneyStockMeasures[condition1 + 0] + '/1'
format = '%Y/%m/%d'
from datetime import datetime
# 获取数据时间和各项数据的值
date1 = datetime.strptime(date1, format)
value11=MoneyStockMeasures[condition1 + 2]
value11 = value11.replace(",", "")
value12=MoneyStockMeasures[condition1 + 3]
value12 = value12.replace(",", "")
value13=MoneyStockMeasures[condition1 + 7]
value13 = value13.replace(",", "")
value14=MoneyStockMeasures[condition1 + 8]
value14 = value14.replace(",", "")
value15=MoneyStockMeasures[condition1 + 4]
value15 = value15.replace(",", "")
value16=MoneyStockMeasures[condition1 + 5]
value16 = value16.replace(",", "")
value17=MoneyStockMeasures[condition1 + 6]
value17 = value17.replace(",", "")
value18=MoneyStockMeasures[condition1 + 9]
value18 = value18.replace(",", "")
value19=MoneyStockMeasures[condition1 + 10]
value19 = value19.replace(",", "")
value20=MoneyStockMeasures[condition1 + 11]
value20 = value20.replace(",", "")
condition1+=12
db = pymysql.connect(host="127.0.0.1",user="root",password="2GS@bPYcgiMyL14A",database="Macroeconomics",port=4423)
cursor = db.cursor()
sql = "select date from MoneyStockMeasures order by date desc limit 1"
cursor.execute(sql)
old_date = cursor.fetchall()
date2=old_date[0][0]
if i != 16:
sql="UPDATE MoneyStockMeasures SET adjustedM1=%s,adjustedM2=%s,notAdjustedM1=%s,notAdjustedM2=%s,currencyincirculation=%s,reserveBalances=%s,monetaryBase=%s,totalReserves=%s,totalMborrowings_M=%s,nonborrowedReserves=%s WHERE date= '%s'"%(value11, value12, value13, value14, value15, value16, value17, value18, value19, value20, date1)
cursor.execute(sql)
db.commit()
db.close()
else:
if date1 == date2:
sql = "UPDATE MoneyStockMeasures SET adjustedM1=%s,adjustedM2=%s,notAdjustedM1=%s,notAdjustedM2=%s,currencyincirculation=%s,reserveBalances=%s,monetaryBase=%s,totalReserves=%s,totalMborrowings_M=%s,nonborrowedReserves=%s WHERE date= '%s'" % (
value11, value12, value13, value14, value15, value16, value17, value18, value19, value20, date1)
cursor.execute(sql)
db.commit()
db.close()
else:
sql = "insert into MoneyStockMeasures(date,adjustedM1,adjustedM2,notAdjustedM1,notAdjustedM2,currencyincirculation,reserveBalances,monetaryBase,totalReserves,totalMborrowings_M,nonborrowedReserves)values('%s','%s','%s','%s','%s','%s','%s','%s','%s','%s','%s')" % (
date1, value11, value12, value13, value14, value15, value16, value17, value18, value19, value20)
cursor.execute(sql)
db.commit()
db.close()
condition2=0
for i in range(17):
date3= SeasonallyAdjusted[condition2 + 1] + '/' + SeasonallyAdjusted[condition2 + 0] + '/1'
format = '%Y/%m/%d'
from datetime import datetime
date3 = datetime.strptime(date3, format)
value21=SeasonallyAdjusted[condition2 + 2]
value21 = value21.replace(",", "")
value22=SeasonallyAdjusted[condition2 + 3]
value22 = value22.replace(",", "")
value23=SeasonallyAdjusted[condition2 + 4]
value23 = value23.replace(",", "")
value24=SeasonallyAdjusted[condition2 + 5]
value24 = value24.replace(",", "")
value25=SeasonallyAdjusted[condition2 + 6]
value25 = value25.replace(",", "")
condition2+=7
db = pymysql.connect(host="127.0.0.1",user="root",password="2GS@bPYcgiMyL14A",database="Macroeconomics",port=4423)
cursor = db.cursor()
sql = "select date from SeasonallyAdjusted order by date desc limit 1"
cursor.execute(sql)
old_date = cursor.fetchall()
date4=old_date[0][0]
if i != 16:
sql="UPDATE SeasonallyAdjusted SET currencyM1=%s,demandM1=%s,otherLiquid=%s,smallDenominationTimeNonM1M2=%s,retailMoneyMarketFundsNonM1M2=%s WHERE date= '%s'"%(value21, value22, value23, value24, value25, date3)
cursor.execute(sql)
db.commit()
db.close()
else:
if date3 == date4:
sql = "UPDATE SeasonallyAdjusted SET currencyM1=%s,demandM1=%s,otherLiquid=%s,smallDenominationTimeNonM1M2=%s,retailMoneyMarketFundsNonM1M2=%s WHERE date= '%s'" % (
value21, value22, value23, value24, value25, date3)
cursor.execute(sql)
db.commit()
db.close()
else:
sql = "insert into SeasonallyAdjusted(date,currencyM1,demandM1,otherLiquid,smallDenominationTimeNonM1M2,retailMoneyMarketFundsNonM1M2)values('%s','%s','%s','%s','%s','%s')" % (
date3, value21, value22, value23, value24, value25)
cursor.execute(sql)
db.commit()
db.close()
condition3=0
for i in range(17):
date5= NotSeasonallyAdjusted[condition3 + 1] + '/' + NotSeasonallyAdjusted[condition3 + 0] + '/1'
format = '%Y/%m/%d'
from datetime import datetime
date5 = datetime.strptime(date5, format)
value31=NotSeasonallyAdjusted[condition3 + 2]
value31 = value31.replace(",", "")
value32=NotSeasonallyAdjusted[condition3 + 3]
value32 = value32.replace(",", "")
value33=NotSeasonallyAdjusted[condition3 + 4]
value33 = value33.replace(",", "")
value34=NotSeasonallyAdjusted[condition3 + 5]
value34 = value34.replace(",", "")
value35=NotSeasonallyAdjusted[condition3 + 6]
value35 = value35.replace(",", "")
value36 = NotSeasonallyAdjusted[condition3 + 7]
value36 = value36.replace(",", "")
value37 = NotSeasonallyAdjusted[condition3 + 8]
value37 = value37.replace(",", "")
value38 = NotSeasonallyAdjusted[condition3 + 9]
value38 = value38.replace(",", "")
condition3+=10
db = pymysql.connect(host="127.0.0.1",user="root",password="2GS@bPYcgiMyL14A",database="Macroeconomics",port=4423)
cursor = db.cursor()
sql = "select date from NotSeasonallyAdjusted order by date desc limit 1"
cursor.execute(sql)
old_date = cursor.fetchall()
date6=old_date[0][0]
if i != 16:
sql="UPDATE NotSeasonallyAdjusted SET currencyM1=%s,demandM1=%s,otherLiquid=%s,smallDenominationTimeNonM1M2=%s,retailMoneyMarketFundsNonM1M2=%s,atDepositoryInstitutions=%s,atMoneyMarketFunds=%s,total=%s WHERE date= '%s'"%(value31, value32, value33, value34, value35, value36, value37, value38, date5)
cursor.execute(sql)
db.commit()
db.close()
else:
if date5 == date6:
sql = "UPDATE NotSeasonallyAdjusted SET currencyM1=%s,demandM1=%s,otherLiquid=%s,smallDenominationTimeNonM1M2=%s,retailMoneyMarketFundsNonM1M2=%s,atDepositoryInstitutions=%s,atMoneyMarketFunds=%s,total=%s WHERE date= '%s'" % (
value31, value32, value33, value34, value35, value36, value37, value38, date5)
cursor.execute(sql)
db.commit()
db.close()
else:
sql = "insert into NotSeasonallyAdjusted(date,currencyM1,demandM1,otherLiquid,smallDenominationTimeNonM1M2,retailMoneyMarketFundsNonM1M2,atDepositoryInstitutions,atMoneyMarketFunds,total)values('%s','%s','%s','%s','%s','%s','%s','%s','%s')" % (
date5, value31, value32, value33, value34, value35, value36, value37, value38)
cursor.execute(sql)
db.commit()
db.close()
time.sleep(timer_start_time)

117
lyq/Macroeconomic_PCE_v3.py Normal file
View File

@@ -0,0 +1,117 @@
import requests
import pymysql
from datetime import datetime
import time
BEA_USER_ID = "146B5757-D9E3-442C-B6AC-ADE9E6B71114"
YEARS = ["2023","2024","2025"] # 第一次运行抓全部年份
SLEEP_SECONDS = 21600 # 6小时
def get_bea_data(year):
"""抓取指定年份的季度数据"""
url = (
f'https://apps.bea.gov/api/data?UserID={BEA_USER_ID}'
f'&method=GetData&datasetname=NIPA&TableName=T10105&Frequency=Q'
f'&Year={year}&ResultFormat=JSON'
)
response = requests.get(url)
return response.json()['BEAAPI']['Results']['Data']
def update_database(cursor, data):
"""整理并插入缺失季度数据"""
# 查询数据库已存在的季度
cursor.execute("SELECT times FROM PCE")
existing_pce = {row[0] for row in cursor.fetchall()}
cursor.execute("SELECT times FROM GPDI")
existing_gpdi = {row[0] for row in cursor.fetchall()}
cursor.execute("SELECT times FROM NETEXP")
existing_netexp = {row[0] for row in cursor.fetchall()}
# 整理数据
pce_data, gpdi_data, netexp_data = {}, {}, {}
for entry in data:
t = entry["TimePeriod"]
desc = entry["LineDescription"]
val = entry["DataValue"]
if desc == "Personal consumption expenditures":
pce_data.setdefault(t, {})["PCE"] = val
elif desc == "Durable goods":
pce_data.setdefault(t, {})["PCEDG"] = val
elif desc == "Nondurable goods":
pce_data.setdefault(t, {})["PCEND"] = val
elif desc == "Services" and entry["LineNumber"] == '6':
pce_data.setdefault(t, {})["PCES"] = val
elif desc == "Gross private domestic investment":
gpdi_data.setdefault(t, {})["GPDI"] = val
elif desc == "Fixed investment":
gpdi_data.setdefault(t, {})["FPI"] = val
elif desc == "Change in private inventories":
gpdi_data.setdefault(t, {})["CBI"] = val
elif desc == "Net exports of goods and services":
netexp_data.setdefault(t, {})["NETEXP"] = val
elif desc == "Imports":
netexp_data.setdefault(t, {})["IMPGS"] = val
elif desc == "Exports":
netexp_data.setdefault(t, {})["EXPGS"] = val
# 插入数据库缺失数据
for t, vals in pce_data.items():
if t not in existing_pce:
cursor.execute(
"INSERT INTO PCE (times, PCE, PCEDG, PCEND, PCES) VALUES (%s,%s,%s,%s,%s)",
(t, vals.get("PCE"), vals.get("PCEDG"), vals.get("PCEND"), vals.get("PCES"))
)
for t, vals in gpdi_data.items():
if t not in existing_gpdi:
cursor.execute(
"INSERT INTO GPDI (times, GPDI, FPI, CBI) VALUES (%s,%s,%s,%s)",
(t, vals.get("GPDI"), vals.get("FPI"), vals.get("CBI"))
)
for t, vals in netexp_data.items():
if t not in existing_netexp:
cursor.execute(
"INSERT INTO NETEXP (times, NETEXP, IMPGS, EXPGS) VALUES (%s,%s,%s,%s)",
(t, vals.get("NETEXP"), vals.get("IMPGS"), vals.get("EXPGS"))
)
def run_job(first_run=False):
"""运行一次抓取和更新"""
print(f"[{datetime.now()}] 开始抓取 BEA 数据并更新数据库...")
try:
db = pymysql.connect(
host="127.0.0.1",
user="root",
password="2GS@bPYcgiMyL14A",
database="Macroeconomics",
port=4423
)
cursor = db.cursor()
years_to_fetch = YEARS if first_run else [YEARS[-1]] # 第一次抓全部年份,否则只抓最新年份
for year in years_to_fetch:
data = get_bea_data(year)
update_database(cursor, data)
db.commit()
print(f"[{datetime.now()}] {year} 数据更新完成")
except pymysql.MySQLError as e:
print(f"[{datetime.now()}] 数据库错误: {e}")
except Exception as e:
print(f"[{datetime.now()}] 其他错误: {e}")
finally:
if 'cursor' in locals():
cursor.close()
if 'db' in locals():
db.close()
print(f"[{datetime.now()}] 本次任务完成。\n")
if __name__ == "__main__":
first_run = True
while True:
run_job(first_run)
first_run = False # 之后循环只抓最新季度
print(f"[{datetime.now()}] 休眠 {SLEEP_SECONDS}6小时...\n")
time.sleep(SLEEP_SECONDS)

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,85 @@
import requests
import pymysql
from datetime import datetime
import time
old_transaction_mtd_amt=0
condition=True
db = pymysql.connect(host="127.0.0.1",user="root",password="2GS@bPYcgiMyL14A",database="Macroeconomics",port=4423)
cursor = db.cursor()
while condition:
page = requests.get("https://api.fiscaldata.treasury.gov/services/api/fiscal_service/v1/accounting/dts/public_debt_transactions?fields=record_date,transaction_type,security_type,transaction_mtd_amt&sort=-record_date")
page = page.json()
page = page['data']
# 获取网站最新一条数据时间
page_data = page[0:23]
for data in page_data:
sql = "select date from USTreasuriesSize order by date desc limit 1"
cursor.execute(sql)
old_date = cursor.fetchall()
old_date = str(old_date)
old_date = old_date[20:-11]
old_date = old_date.replace(' ', '')
old_date = old_date.replace(',', '/')
date_string = old_date
format = '%Y/%m/%d'
old_date = datetime.strptime(date_string, format)
record_date = data['record_date']
transaction_type = data['transaction_type']
security_type=data['security_type']
transaction_mtd_amt=data['transaction_mtd_amt']
record_date = record_date.replace('-', '/')
format = '%Y/%m/%d'
record_date = datetime.strptime(record_date, format)
# 判断数据库最新一条数据时间和网站最新一条数据时间
if record_date!=old_date:
sql = "insert into USTreasuriesSize(date)values('%s')" % (record_date)
cursor.execute(sql)
db.commit()
sql = "select id from USTreasuriesSize order by id desc limit 1"
cursor.execute(sql)
id = cursor.fetchall()
id = id[0][0]
for data in page_data:
transaction_type = data['transaction_type']
security_type = data['security_type']
transaction_mtd_amt = data['transaction_mtd_amt']
if transaction_type == 'Issues':
if security_type == 'Bills':
transaction_mtd_amt= old_transaction_mtd_amt + int(transaction_mtd_amt)
sql1 = "update USTreasuriesSize set TBill=%s where id=%s" % (transaction_mtd_amt, id)
cursor.execute(sql1)
db.commit()
old_transaction_mtd_amt=transaction_mtd_amt
elif security_type == 'Notes':
sql2 = "update USTreasuriesSize set TNote=%s where id=%s" % (transaction_mtd_amt, id)
cursor.execute(sql2)
db.commit()
elif security_type == 'Bonds':
sql3 = "update USTreasuriesSize set TBond=%s where id=%s" % (transaction_mtd_amt, id)
cursor.execute(sql3)
db.commit()
elif security_type == 'Inflation-Protected Securities Increment':
sql4 = "update USTreasuriesSize set TIPS=%s where id=%s" % (transaction_mtd_amt, id)
cursor.execute(sql4)
db.commit()
else:
continue
else:
continue
else:
continue
old_transaction_mtd_amt=0
time.sleep(21600)

View File

@@ -0,0 +1,96 @@
import pymysql
import time
from selenium import webdriver
from selenium.webdriver.chrome.options import Options
from selenium.webdriver.chrome.service import Service
from w3lib.html import remove_tags
from bs4 import BeautifulSoup
from datetime import datetime
# 配置 Selenium
chrome_options = Options()
chrome_options.add_argument("--headless")
chrome_options.add_argument('--no-sandbox')
chrome_options.add_argument('--disable-gpu')
chrome_options.add_argument('--disable-dev-shm-usage')
chrome_options.add_argument('blink-settings=imagesEnabled=false')
# 在 Selenium 3 中,直接指定 chrome_options 参数即可
browser = webdriver.Chrome(executable_path="chromedriver", options=chrome_options)
# 将月份映射整理成一个字典
MONTH_MAPPING = {
"Jan": "/1/", "Feb": "/2/", "Mar": "/3/", "Apr": "/4/",
"May": "/5/", "Jun": "/6/", "Jul": "/7/", "Aug": "/8/",
"Sep": "/9/", "Oct": "/10/", "Nov": "/11/", "Dec": "/12/"
}
# 数据库连接配置
DB_CONFIG = {
"host": "127.0.0.1",
"user": "root",
"password": "2GS@bPYcgiMyL14A",
"database": "Macroeconomics",
"port": 4423
}
def fetch_web_data():
"""抓取网页数据并解析日期和利率数据"""
browser.get("https://www.federalreserve.gov/releases/h15/")
soup = BeautifulSoup(browser.page_source, 'html.parser')
# 获取日期
date_text = soup.find_all('th', class_="colhead sticky sticky-row-cell")[-1].get_text(strip=True)
for month, replacement in MONTH_MAPPING.items():
date_text = date_text.replace(month, replacement)
date = datetime.strptime(date_text.replace('*', ''), '%Y/%m/%d')
# 获取利率数据
data = [remove_tags(str(td)).strip() for td in soup.find_all('td', class_="data")]
return date, data
def get_latest_db_date():
"""从数据库获取最新日期"""
with pymysql.connect(**DB_CONFIG) as conn:
with conn.cursor() as cursor:
cursor.execute("SELECT MAX(date) FROM USTreasuriesYields")
result = cursor.fetchone()
return result[0] if result[0] else None
def insert_data(date, rates, mprime, dpcredit):
"""插入数据到数据库"""
with pymysql.connect(**DB_CONFIG) as conn:
with conn.cursor() as cursor:
sql_treasuries = """
INSERT INTO USTreasuriesYields (date, 1_Mo, 3_Mo, 6_Mo, 1_Yr, 2_Yr, 5_Yr, 10_Yr, 20_Yr, 30_Yr)
VALUES (%s, %s, %s, %s, %s, %s, %s, %s, %s, %s)
"""
sql_interest_rate = """
INSERT INTO InterestRate (date, name, _value)
VALUES (%s, %s, %s)
"""
cursor.execute(sql_treasuries, [date] + rates)
cursor.execute(sql_interest_rate, (date, 'BPL', mprime))
cursor.execute(sql_interest_rate, (date, 'DWPC', dpcredit))
conn.commit()
def main():
while True:
new_date, data = fetch_web_data()
old_date = get_latest_db_date()
if old_date and new_date <= old_date:
time.sleep(21600) # 6小时
continue
# 提取利率数据
rates = [
data[i].replace('ND', 'NULL') if i < len(data) else 'NULL'
for i in [104, 109, 114, 119, 124, 134, 144, 149, 154]
]
mprime = data[54] if len(data) > 54 else 'NULL'
dpcredit = data[59] if len(data) > 59 else 'NULL'
insert_data(new_date, rates, mprime, dpcredit)
if __name__ == "__main__":
main()

105
lyq/Macroeconomic_WEI.py Normal file
View File

@@ -0,0 +1,105 @@
import time
from full_fred.fred import Fred
import pymysql
import time
fred=Fred('example_key.txt')
fred.set_api_key_file('example_key.txt')
times="00:00:00"
while True:
import datetime
# now_time = datetime.datetime.now()
# next_time = now_time + datetime.timedelta(days=+1)
# next_year = next_time.date().year
# next_month = next_time.date().month
# next_day = next_time.date().day
# next_time = datetime.datetime.strptime(str(next_year) + "-" + str(next_month) + "-" + str(next_day) + " 15:00:00","%Y-%m-%d %H:%M:%S")
# timer_start_time = (next_time - now_time).total_seconds()
data=fred.get_series_df('WEI')
data_date=data['date']
data_value=data['value']
# 获取网站数据
list1=[]
list2=[]
for i in data_date:
list1+=[i]
for i2 in data_value:
list2+=[i2]
# 拿最新七条数据
date1=list1[-1]
date1= date1 + ' ' + times
timeArray = time.strptime(date1, "%Y-%m-%d %H:%M:%S")
timestamp = time.mktime(timeArray)
date1 = int(timestamp * 1000) - 28800000
value1=list2[-1]
date2=list1[-2]
date2 = date2 + ' ' + times
timeArray = time.strptime(date2, "%Y-%m-%d %H:%M:%S")
timestamp = time.mktime(timeArray)
date2 = int(timestamp * 1000) - 28800000
value2=list2[-2]
date3=list1[-3]
date3 = date3 + ' ' + times
timeArray = time.strptime(date3, "%Y-%m-%d %H:%M:%S")
timestamp = time.mktime(timeArray)
date3 = int(timestamp * 1000) - 28800000
value3=list2[-3]
date4=list1[-4]
date4 = date4 + ' ' + times
timeArray = time.strptime(date4, "%Y-%m-%d %H:%M:%S")
timestamp = time.mktime(timeArray)
date4 = int(timestamp * 1000) - 28800000
value4=list2[-4]
date5=list1[-5]
date5 = date5 + ' ' + times
timeArray = time.strptime(date5, "%Y-%m-%d %H:%M:%S")
timestamp = time.mktime(timeArray)
date5 = int(timestamp * 1000) - 28800000
value5=list2[-5]
date6=list1[-6]
date6 = date6 + ' ' + times
timeArray = time.strptime(date6, "%Y-%m-%d %H:%M:%S")
timestamp = time.mktime(timeArray)
date6 = int(timestamp * 1000) - 28800000
value6=list2[-6]
date7=list1[-7]
date7 = date7 + ' ' + times
timeArray = time.strptime(date7, "%Y-%m-%d %H:%M:%S")
timestamp = time.mktime(timeArray)
date7 = int(timestamp * 1000) - 28800000
value7=list2[-7]
db = pymysql.connect(host="127.0.0.1",user="root",password="2GS@bPYcgiMyL14A",database="Macroeconomics",port=4423)
cursor = db.cursor()
sql = "select times from WEI order by times desc limit 1"
cursor.execute(sql)
db.commit()
old_time = cursor.fetchall()
old_time = old_time[0][0]
sql = "update WEI set WEI=%s where times='%s'" % (value2, date2)
cursor.execute(sql)
sql = "update WEI set WEI=%s where times='%s'" % (value3, date3)
cursor.execute(sql)
sql = "update WEI set WEI=%s where times='%s'" % (value4, date4)
cursor.execute(sql)
sql = "update WEI set WEI=%s where times='%s'" % (value5, date5)
cursor.execute(sql)
sql = "update WEI set WEI=%s where times='%s'" % (value6, date6)
cursor.execute(sql)
sql = "update WEI set WEI=%s where times='%s'" % (value7, date7)
cursor.execute(sql)
db.commit()
if date1 == old_time:
time.sleep(21600)
# time.sleep(timer_start_time)
else:
db = pymysql.connect(host="127.0.0.1",user="root",password="2GS@bPYcgiMyL14A",database="Macroeconomics",port=4423)
cursor = db.cursor()
sql = "insert into WEI(times,WEI)values('%s','%s')" % (date1, value1)
cursor.execute(sql)
db.commit()

View File

@@ -1,5 +1,35 @@
目录简介:
相关联其他文件:
功能简介:
arh999_lyq.py --btc数据代码
arh999eth_lyq.py --btc数据代码
Binance_fapi.py --btc数据代码
btc_price_fetcher.py --btc数据代码
btc_price.py --btc数据代码
btc_stats_qt.py --btc数据代码
btc_update.py --btc数据代码
btc_utxos_lyq3.py --btc数据代码
btc_utxos_update_lyq3.py --btc数据代码
btc24h_db_if.py --btc数据代码mysql数据库
btc24h_redis_if.py --btc数据代码redis数据库
btc24h_stats.py --btc实时更新数据代码
check_order_lyq.py --btcusdt/ethusdt获取代码
check_zone_lyq.py --btcusdt/ethusdt获取代码
CoinmarKetCap.py --币种,市值,交易量,价格变化获取代码
db_if_qt.py --btc数据代码mysql数据库
exchangeRate_lyq.py --实时汇率代码
Macroeconomic_COVITGDP_v2.py --宏观经济数据代码
Macroeconomic_CPI_NSA_v2.py --宏观经济数据代码
Macroeconomic_FARBODI.py --宏观经济数据代码
Macroeconomic_FBI_v2.py --宏观经济数据代码
Macroeconomic_FER.py --宏观经济数据代码
Macroeconomic_InterestRate.py --宏观经济数据代码
Macroeconomic_Loan.py --宏观经济数据代码
Macroeconomic_MoneyStockMeasures.py --宏观经济数据代码
Macroeconomic_PCE_v3.py --宏观经济数据代码
Macroeconomic_SAALOCBITUSS_ASSET.py --宏观经济数据代码
Macroeconomic_USTreasuriesSize.py --宏观经济数据代码
Macroeconomic_USTreasuriesYields_v2.py --宏观经济数据代码
Macroeconomic_WEI.py --宏观经济数据代码
nochain_eth_lyq.py --供应量,供应比率代码
nochain_lyq_utc08.py --供应量,供应比率代码
nochain_lyq_v2.py --供应量,供应比率代码
nochain_update_lyq.py --供应量,供应比率代码
redis_if_qt.py --btc数据代码redis数据库

1005
lyq/arh999_lyq.py Normal file

File diff suppressed because it is too large Load Diff

504
lyq/arh999eth_lyq.py Normal file
View File

@@ -0,0 +1,504 @@
# coding=utf-8
import ujson
from binance.websocket.spot.websocket_client import SpotWebsocketClient as WebsocketClient
import time
import requests
from loguru import logger
import datetime
import pymysql
import math
import csv
g_prices = {}
g_dbif = None
g_lastts = 0
def get_day60_rise(day, prices):
total = 0
cnt = 0
for i in range(60):
if str(day) in prices:
cur_price = prices[str(day)]
day = str(day - 3600 * 24)
if day in prices:
prev_price = prices[day]
try:
#print(((cur_price-prev_price)/prev_price), day, cur_price, prev_price)
total += (((cur_price-prev_price)/prev_price))
cnt += 1
except:
pass
# print(day, total, cnt)
day = int(day)
return total
def get_days_rise(day, maxdays, prices):
total = 0
cnt = 0
for i in range(maxdays):
if str(day) in prices:
cur_price = prices[str(day)]
day = str(day - 3600 * 24)
if day in prices:
prev_price = prices[day]
try:
#print(((cur_price-prev_price)/prev_price), day, cur_price, prev_price)
total += (((cur_price-prev_price)/prev_price))
cnt += 1
except:
pass
# print(day, total, cnt)
day = int(day)
return total
def append_jzr_day60(dbif, day, price, day60_rise, day7_rise, day30_rise, day90_rise):
dbif.append_jzr60(day, price, day60_rise, day7_rise, day30_rise, day90_rise)
def sync_jzr_day60(dbif, prices):
for day in prices:
print(day, prices[day])
day60_rise = get_days_rise(int(day), 60, prices)
day7_rise = get_days_rise(int(day), 7, prices)
day30_rise = get_days_rise(int(day), 30, prices)
day90_rise = get_days_rise(int(day), 90, prices)
print(day, day60_rise)
append_jzr_day60(dbif, day, prices[day], day60_rise, day7_rise, day30_rise, day90_rise)
def check_jzr60_sync(dbif):
return dbif.check_jzr60_sync()
def append_jzr60day(dbif, day, price, day60_rise, day7_rise, day30_rise, day90_rise):
dbif.append_jzr60_day(day, price, day60_rise, day7_rise, day30_rise, day90_rise)
def append_jzr60(dbif, dayutc, price, day60_rise, day7_rise, day30_rise, day90_rise):
dbif.append_jzr60(dayutc, price, day60_rise, day7_rise, day30_rise, day90_rise)
def clean_jzr60day(dbif, clean_day):
dbif.clean_jzr60_day(clean_day)
def handle_jzr_day60(dbif, day, dayutc, price, prices):
day60_rise = get_days_rise(dayutc, 60, prices)
day7_rise = get_days_rise(dayutc, 7, prices)
day30_rise = get_days_rise(dayutc, 30, prices)
day90_rise = get_days_rise(dayutc, 90, prices)
print(dayutc, price, day, day60_rise)
append_jzr60day(dbif, day, price, day60_rise, day7_rise, day30_rise, day90_rise)
append_jzr60(dbif, dayutc, price, day60_rise, day7_rise, day30_rise, day90_rise)
clean_day = dayutc - 3600 * 24 * 2
clean_jzr60day(dbif, clean_day)
class Arh99DbIf:
def __init__(self, host="172.17.0.1", port=4423, user="root", password="2GS@bPYcgiMyL14A", dbname="ethdb"):
self.conn = pymysql.connect(host=host, port=port, user=user, password=password, database=dbname, cursorclass=pymysql.cursors.DictCursor)
print("init arh99 db suceess!")
def check_sync(self):
synced = False
with self.conn.cursor() as cursor:
sql_query = "SELECT COUNT(id) FROM `arh99v3a`"
cursor.execute(sql_query)
result = cursor.fetchone()
print(result)
if result is not None:
if "COUNT(id)" in result:
if result["COUNT(id)"] > 0:
synced = True
self.conn.commit()
#print("synced", synced)
return synced
def append(self, day, price, arh99, arh99x):
with self.conn.cursor() as cursor:
sql_query = "SELECT COUNT(id) FROM `arh99v3a` WHERE unixdt=FROM_UNIXTIME(%s)"
cursor.execute(sql_query, (int(day),))
result = cursor.fetchone()
#print(dt_utc)
#print(result)
if result is not None:
if "COUNT(id)" in result:
if result["COUNT(id)"] > 0:
sql_update = 'UPDATE arh99v3a SET `arh99`=%s, `arh99x`=%s, `price`=%s, `unixdt`=FROM_UNIXTIME(%s) WHERE unixdt=FROM_UNIXTIME(%s)'
print(sql_update)
cursor.execute(sql_update, (arh99, arh99x, price, int(day), int(day)))
else:
sql_insert = "INSERT INTO `arh99v3a` (`unixdt`, `price`, `arh99`, `arh99x`) VALUES (FROM_UNIXTIME(%s), %s, %s, %s)"
print(sql_insert)
cursor.execute(sql_insert, (day, price, arh99, arh99x))
self.conn.commit()
def append_day(self, day, price, arh99, arh99x):
with self.conn.cursor() as cursor:
sql_insert = "INSERT INTO `arh99v3aday` (`unixdt`, `price`, `arh99`, `arh99x`) VALUES (FROM_UNIXTIME(%s), %s, %s, %s)"
print(sql_insert)
cursor.execute(sql_insert, (day, price, arh99, arh99x))
self.conn.commit()
def clean_day(self, day):
with self.conn.cursor() as cursor:
sql_clean = "DELETE from arh99v3aday where unixdt<FROM_UNIXTIME(%s)"
print(sql_clean)
cursor.execute(sql_clean, (int(day),))
self.conn.commit()
def check_jzr60_sync(self):
synced = False
with self.conn.cursor() as cursor:
sql_query = "SELECT COUNT(id) FROM `jzr60v3a`"
cursor.execute(sql_query)
result = cursor.fetchone()
print(result)
if result is not None:
if "COUNT(id)" in result:
if result["COUNT(id)"] > 0:
synced = True
self.conn.commit()
#print("synced", synced)
return synced
def append_jzr60(self, day, price, jzr60, jzr7, jzr30, jzr90):
with self.conn.cursor() as cursor:
sql_query = "SELECT COUNT(id) FROM `jzr60v3a` WHERE unixdt=FROM_UNIXTIME(%s)"
cursor.execute(sql_query, (int(day),))
result = cursor.fetchone()
#print(dt_utc)
#print(result)
if result is not None:
if "COUNT(id)" in result:
if result["COUNT(id)"] > 0:
sql_update = 'UPDATE jzr60v3a SET `jzr60`=%s,`jzr7`=%s,`jzr30`=%s,`jzr90`=%s,`price`=%s, `unixdt`=FROM_UNIXTIME(%s) WHERE unixdt=FROM_UNIXTIME(%s)'
print(sql_update)
cursor.execute(sql_update, (jzr60, jzr7, jzr30, jzr90, price, int(day), int(day)))
else:
sql_insert = "INSERT INTO `jzr60v3a` (`unixdt`, `price`, `jzr60`, `jzr7`, `jzr30`, `jzr90`) VALUES (FROM_UNIXTIME(%s), %s, %s, %s, %s, %s)"
print(sql_insert)
cursor.execute(sql_insert, (day, price, jzr60, jzr7, jzr30, jzr90))
self.conn.commit()
def append_jzr60_day(self, day, price, jzr60, jzr7, jzr30, jzr90):
with self.conn.cursor() as cursor:
sql_insert = "INSERT INTO `jzr60v3aday` (`unixdt`, `price`, `jzr60`, `jzr7`, `jzr30`, `jzr90`) VALUES (FROM_UNIXTIME(%s), %s, %s, %s, %s, %s)"
print(sql_insert)
cursor.execute(sql_insert, (day, price, jzr60, jzr7, jzr30, jzr90))
self.conn.commit()
def clean_jzr60_day(self, day):
with self.conn.cursor() as cursor:
sql_clean = "DELETE from jzr60v3aday where unixdt<FROM_UNIXTIME(%s)"
print(sql_clean)
cursor.execute(sql_clean, (int(day),))
self.conn.commit()
def check_ma730_sync(self):
synced = False
with self.conn.cursor() as cursor:
sql_query = "SELECT COUNT(id) FROM `ma730v3a`"
cursor.execute(sql_query)
result = cursor.fetchone()
print(result)
if result is not None:
if "COUNT(id)" in result:
if result["COUNT(id)"] > 0:
synced = True
self.conn.commit()
#print("synced", synced)
return synced
def append_ma730(self, day, price, ma730, ma365, ma200):
with self.conn.cursor() as cursor:
sql_query = "SELECT COUNT(id) FROM `ma730v3a` WHERE unixdt=FROM_UNIXTIME(%s)"
cursor.execute(sql_query, (int(day),))
result = cursor.fetchone()
#print(dt_utc)
#print(result)
if result is not None:
if "COUNT(id)" in result:
ma730x5 = ma730*5
if result["COUNT(id)"] > 0:
sql_update = 'UPDATE ma730v3a SET `ma730`=%s, `ma730x5`=%s, `ma365`=%s, `ma200`=%s, `price`=%s, `unixdt`=FROM_UNIXTIME(%s) WHERE unixdt=FROM_UNIXTIME(%s)'
print(sql_update)
cursor.execute(sql_update, (ma730, ma730x5, ma365, ma200, price, int(day), int(day)))
else:
sql_insert = "INSERT INTO `ma730v3a` (`unixdt`, `price`, `ma730`, `ma730x5`, `ma365`, `ma200`) VALUES (FROM_UNIXTIME(%s), %s, %s, %s, %s, %s)"
print(sql_insert)
cursor.execute(sql_insert, (day, price, ma730, ma730x5, ma365, ma200))
self.conn.commit()
def append_ma730_day(self, day, price, ma730, ma365, ma200):
with self.conn.cursor() as cursor:
ma730x5 = ma730*5
sql_insert = "INSERT INTO `ma730v3aday` (`unixdt`, `price`, `ma730`, `ma730x5`, `ma365`, `ma200`) VALUES (FROM_UNIXTIME(%s), %s, %s, %s, %s, %s)"
print(sql_insert)
cursor.execute(sql_insert, (day, price, ma730, ma730x5, ma365, ma200))
self.conn.commit()
def clean_ma730_day(self, day):
with self.conn.cursor() as cursor:
sql_clean = "DELETE from ma730v3aday where unixdt<FROM_UNIXTIME(%s)"
print(sql_clean)
cursor.execute(sql_clean, (int(day),))
self.conn.commit()
def get_history_price(dbif):
global g_prices
with open("eth_history_price.csv", newline='') as csvfile:
reader = csv.DictReader(csvfile)
for row in reader:
print(row)
daystr = row["Date"]
p = row["Price"]
dayutc = time.mktime(time.strptime(daystr, "%m/%d/%Y"))
g_prices[str(int(dayutc))] = float(p)
'''response_price = requests.get(
'https://data.nasdaq.com/api/v3/datasets/BCHAIN/MKPRU.json?api_key=FZqXog4sR-b7cYnXcRVV')
if response_price.status_code == 200:
#print(response_price.content)
priceweb = ujson.loads(response_price.content)
if "dataset" in priceweb:
priceset = priceweb["dataset"]
if "data" in priceset:
pricedata = priceset["data"]
for price in pricedata:
daystr = price[0]
p = price[1]
dayutc = time.mktime(time.strptime(daystr, "%Y-%m-%d"))
g_prices[str(int(dayutc))] = float(p)
#print(price, int(dayutc), g_prices[str(int(dayutc))])
'''
return g_prices
def get_history_price2(dbif):
global g_prices
#pricedict = {}
dayt = time.gmtime()
daystr = time.strftime("%Y", dayt)
year = int(daystr)
end_year = year
while True:
url = ""
if end_year != year:
start_year = end_year
url = "https://data.messari.io/api/v1/assets/ethereum/metrics/price/time-series?start="
else:
url = "https://data.messari.io/api/v1/assets/ethereum/metrics/price/time-series?after=" + str(
year) + "-01-01&order=descending&interval=1d"
if end_year != year:
url = url + str(start_year) + "-01-01&end=" + str(end_year) + "-12-31&order=descending&interval=1d"
header_set = {}
header_set["x-messari-api-key"] = "aH2pyj5i4QGo1k1gLxXEbIJ5RJr+FYKLEWk6cRT6RuSc6lRY"
# header_set["Content-Type"] = "application/json"
print(header_set, url)
response_price = requests.get(url, headers=header_set)
# print(response_price)
if response_price.status_code == 200:
# print(response_price.content)
priceweb = ujson.loads(response_price.content)
if "data" in priceweb:
priceset = priceweb["data"]
if "values" in priceset:
valueset = priceset["values"]
if valueset is not None:
for supply in valueset:
dayutc = int(supply[0] / 1000)
s = supply[1]
ret_time = time.gmtime(dayutc)
ret_daystr = time.strftime("%d %b %Y", ret_time)
ret_dayutc = int(time.mktime(time.strptime(ret_daystr, "%d %b %Y")))
#self.pricedict[str(ret_dayutc)] = float(s)
g_prices[str(ret_dayutc)] = float(s)
# print(s, dayutc, pricedict[str(dayutc)])
# break
else:
break
else:
break
end_year -= 1
time.sleep(2)
#print(self.pricedict)
#return self.pricedict
get_history_price(dbif)
return g_prices
def get_coin_days(day):
birthday = time.mktime(time.strptime("2009-01-03", "%Y-%m-%d"))
days = (int(day) - birthday)/3600/24
#print(day, birthday, days)
return days
def get_coin_exp(days):
try:
temp = 5.84*math.log10(days)-17.01
#print("temp", temp, math.log10(days), days)
exp = math.pow(10,temp)
return exp
except:
return 0
def cal_day200_price(prices, day):
total = 0
cnt = 0
for i in range(200):
if day in prices:
total += prices[day]
cnt += 1
#print(day, total, cnt)
day = str(int(day) - 3600 * 24)
if cnt > 0:
return total/cnt
return 0
def cal_arh99(prices, day, price):
day200 = cal_day200_price(prices, day)
#print("day200", day200)
days = get_coin_days(day)
#print("days", days)
exp = get_coin_exp(days)
#print("exp", exp, price)
try:
arh99 = (float(price)/day200)*(float(price)/exp)
arh99x = (day200/float(price))*(exp/float(price))*3
except:
arh99 = 0
arh99x = 0
#print("arh99", arh99)
return arh99, arh99x
def check_sync(dbif):
return dbif.check_sync()
def append_arh99(dbif, day, price, arh99, arh99x):
dbif.append(day, price, arh99, arh99x)
def sync_arh99(dbif, prices):
for day in prices:
print(day, prices[day])
arh99, arh99x = cal_arh99(prices, int(day), prices[day])
print(day, arh99, arh99x)
append_arh99(dbif, day, prices[day], arh99, arh99x)
def append_arh99day(dbif, day, price, arh99, arh99x):
dbif.append_day(day, price, arh99, arh99x)
def clean_arh99day(dbif, day):
dbif.clean_day(day)
def arh99_handler(message):
global g_prices
global g_dbif
global g_lastts
coin_data = message["data"]
#coin_symbol = coin_data["s"]
coin_ts = int(coin_data["E"])
coin_price = float(coin_data["c"])
#print((coin_ts/1000), int((coin_ts/1000)%60))
if int((coin_ts/1000)%60) == 0:
#if coin_ts/1000/60 != g_lastts:
if coin_ts/1000 - g_lastts >= 15:
#print(coin_ts, coin_price)
coin_ts2 = time.gmtime(coin_ts/1000)
daystr = time.strftime("%d %b %Y", coin_ts2)
print(daystr)
dayutc = int(time.mktime(time.strptime(daystr, "%d %b %Y")))
g_prices[str(dayutc)] = coin_price
arh99, arh99x = cal_arh99(g_prices, dayutc, coin_price)
print(dayutc, coin_price, arh99, arh99x)
append_arh99day(g_dbif, coin_ts/1000, coin_price, arh99, arh99x)
append_arh99(g_dbif, dayutc, coin_price, arh99, arh99x)
clean_day = dayutc - 3600*24*2
clean_arh99day(g_dbif, clean_day)
handle_jzr_day60(g_dbif, coin_ts/1000, dayutc, coin_price, g_prices)
handle_ma_day730(g_dbif, coin_ts / 1000, dayutc, coin_price, g_prices)
g_lastts = coin_ts/1000
def start_arh99(dbif, prices):
ws_client = WebsocketClient()
ws_client.start()
ws_client.instant_subscribe(
stream=['ethusdt@miniTicker'],
callback=arh99_handler,
)
def arh99():
global g_dbif
g_dbif = Arh99DbIf()
prices = get_history_price2(g_dbif)
#if not check_sync(g_dbif):
if True:
sync_arh99(g_dbif, prices)
#if not check_jzr60_sync(g_dbif):
if True:
sync_jzr_day60(g_dbif, prices)
#if not check_ma730_sync(g_dbif):
if True:
sync_ma_day730(g_dbif, prices)
start_arh99(g_dbif, prices)
#2-year ma multiplier
def get_day730_rise(day, prices):
total = 0
cnt = 0
for i in range(730):
if str(day) in prices:
cur_price = prices[str(day)]
total += cur_price
cnt += 1
day = str(day - 3600 * 24)
day = int(day)
if cnt > 0:
return total/cnt
return 0
def get_day365_rise(day, maxdays, prices):
total = 0
cnt = 0
for i in range(maxdays):
if str(day) in prices:
cur_price = prices[str(day)]
total += cur_price
cnt += 1
day = str(day - 3600 * 24)
day = int(day)
if cnt > 0:
return total/cnt
return 0
def append_ma_day730(dbif, day, price, day730_rise, day365_rise, day200_rise):
dbif.append_ma730(day, price, day730_rise, day365_rise, day200_rise)
def sync_ma_day730(dbif, prices):
for day in prices:
print(day, prices[day])
day730_rise = get_day730_rise(int(day), prices)
day365_rise = get_day365_rise(int(day), 365, prices)
day200_rise = get_day365_rise(int(day), 200, prices)
print(day, day730_rise)
append_ma_day730(dbif, day, prices[day], day730_rise, day365_rise, day200_rise)
def check_ma730_sync(dbif):
return dbif.check_ma730_sync()
def append_ma730day(dbif, day, price, day730_rise, day365_rise, day200_rise):
dbif.append_ma730_day(day, price, day730_rise, day365_rise, day200_rise)
def append_ma730(dbif, dayutc, price, day730_rise, day365_rise, day200_rise):
dbif.append_ma730(dayutc, price, day730_rise, day365_rise, day200_rise)
def clean_ma730day(dbif, clean_day):
dbif.clean_ma730_day(clean_day)
def handle_ma_day730(dbif, day, dayutc, price, prices):
day730_rise = get_day730_rise(dayutc, prices)
day365_rise = get_day365_rise(dayutc, 365, prices)
day200_rise = get_day365_rise(dayutc, 200, prices)
print(dayutc, price, day, day730_rise)
append_ma730day(dbif, day, price, day730_rise, day365_rise, day200_rise)
append_ma730(dbif, dayutc, price, day730_rise, day365_rise, day200_rise)
clean_day = dayutc - 3600 * 24 * 2
clean_ma730day(dbif, clean_day)
arh99()

600
lyq/btc24h_db_if.py Normal file
View File

@@ -0,0 +1,600 @@
# coding=utf-8
import datetime
import json
import requests
import pymysql
from loguru import logger
import time
class DbIf:
def __init__(self, host="172.17.0.1", port=4419, user="root", password="IeQcJNnagkaFP1Or", dbname="btcdb"):
self.conn = pymysql.connect(host=host, port=port, user=user, password=password, database=dbname,
cursorclass=pymysql.cursors.DictCursor)
def update_to_dailyindsv2(self, dt_utc, height_begin, height_end, lth_volume, frm, cvdd, realized_price,
transferred_price, balanced_price, nvt_ratio, velocity):
with self.conn.cursor() as cursor:
print(dt_utc, height_begin, height_end, lth_volume, frm, cvdd, realized_price, transferred_price,
balanced_price, nvt_ratio, velocity)
# 调用消息订阅的api向topic中储存rt_dailyindsv2e2的实时数据
# url = "http://10.168.2.125:7101/marketall/push/realtime/btc/dailyindsv2e1"
# headers = {"accept": "application/json"}
# data = {"unixdt":dt_utc,"height_begin":height_begin,"height_end":height_end,"lth_volume":lth_volume,"frm":frm,"cvdd":cvdd,"realized_price":realized_price,"transferred_price":transferred_price,"balanced_price":balanced_price,"nvt_ratio":nvt_ratio,"velocity":velocity}
# response = requests.post(url=url, data=json.dumps(data), headers=headers)
sql_insert = "REPLACE INTO rt_dailyindsv3e2 (unixdt, height_begin, height_end, lth_volume, frm, cvdd, realized_price, transferred_price, balanced_price, nvt_ratio, velocity"
sql_insert = sql_insert + ") VALUES (FROM_UNIXTIME(%s), %s, %s, %s, %s, %s, %s, %s, %s, %s, %s)"
cursor.execute(sql_insert, (
dt_utc, height_begin, height_end, lth_volume, frm, cvdd, realized_price, transferred_price,
balanced_price, nvt_ratio, velocity))
'''
def update_to_realtimeindsv2(self, dt_utc, mempool_volume, mempool_fees):
with self.conn.cursor() as cursor:
sql_insert = "REPLACE INTO realtimeindsv2b (unixdt, mempool_volume, mempool_fees)"
cursor.execute(sql_insert, (dt_utc, mempool_volume, mempool_fees))
'''
def update_to_dailyinds(self, dt_utc, height_begin, height_end, profitrate, fees, txs, new_address, total_address,
new_address_volume, active_address,
send_address, receive_address, volume, eavolume, sopr, asopr, easopr, lthsopr, sthsopr,
asol, eaasol, dormancy, adormancy, eadormancy, cdd, sacdd, eacdd, day1, day7, day30, day60,
day90, day180, day365, day730, csupply, mintusd, sumcsupply, sumcdd, sumeacdd,
liveliness, ealiveliness, rprofit, rloss, rplrate, price, marketcap, rcap, earcap, mvrv,
nupl):
with self.conn.cursor() as cursor:
# 调用消息订阅的api向topic中储存rt_dailyindsv2e1的实时数据
# 数据结构{dt_utc:'dt_utc'}
try:
url="https://coinbus.cc/api/v1/marketall/push/realtime/btc/dailyv2e1"
headers = {"accept": "application/json"}
data = {"unixdt":dt_utc,"height_begin":height_begin,"height_end":height_end,"profitrate":profitrate,
"fees":fees,"txs":txs,"new_address":new_address,"total_address":total_address,
"new_address_volume":new_address_volume,"active_address":active_address,"send_address":send_address,
"receive_address":receive_address,"volume":volume,"eavolume":eavolume,"sopr":sopr,"asopr":asopr,"easopr":easopr,
"lthsopr":lthsopr,"sthsopr":sthsopr,"asol":asol,"eaasol":eaasol,"dormancy":dormancy,
"adormancy":adormancy,"eadormancy":eadormancy,"cdd":cdd,"sacdd":sacdd,"eacdd":eacdd,"day1":day1,"day7":day7,
"day30": day30,"day60":day60,"day90":day90,"day180":day180,"day365":day365,"day730":day730,
"csupply":csupply,"mintusd":mintusd,"sumcsupply":sumcsupply,"sumcdd":sumcdd,"sumeacdd":sumeacdd,"liveliness":liveliness,
"ealiveliness":ealiveliness,"rprofit":rprofit,"rloss":rloss,"rplrate":rplrate,
"price":price,"marketcap":marketcap,"rcap":rcap,"earcap":earcap,"mvrv":mvrv,"nupl":nupl}
response = requests.post(url=url, data=json.dumps(data), headers=headers)
except:
print("api调用失败")
sql_insert = "REPLACE INTO rt_dailyindsv3e1 (unixdt, height_begin, height_end, profitrate, fees, txs, new_address, total_address, new_address_volume, active_address, send_address, receive_address, volume, eavolume, sopr, asopr, easopr, lthsopr, sthsopr,"
sql_insert = sql_insert + " asol, eaasol, dormancy, adormancy, eadormancy, cdd, sacdd, eacdd, day1, day7, day30, day60, day90, day180, day365, day730, csupply, mintusd, sumcsupply, sumcdd, sumeacdd, liveliness, "
sql_insert = sql_insert + " ealiveliness, rprofit, rloss, rplrate, price, marketcap, rcap, earcap, mvrv, nupl"
sql_insert = sql_insert + ") VALUES (FROM_UNIXTIME(%s), %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s)"
# print(sql_insert)
cursor.execute(sql_insert, (
dt_utc, height_begin, height_end, profitrate, fees, txs, new_address, total_address, new_address_volume,
active_address, send_address,
receive_address, volume, eavolume, sopr, asopr, easopr, lthsopr, sthsopr, asol, eaasol, dormancy,
adormancy, eadormancy, cdd, sacdd, eacdd, day1, day7, day30, day60, day90, day180, day365, day730,
csupply, mintusd, sumcsupply, sumcdd, sumeacdd, liveliness, ealiveliness, rprofit, rloss, rplrate,
price, marketcap, rcap, earcap, mvrv,
nupl))
self.conn.commit()
'''
def update_to_dailyinds(self, dt_utc, height_begin, height_end, profitrate, fees, txs, new_address, new_address_volume, active_address,
send_address, receive_address, volume, eavolume, sopr, asopr, easopr, lthsopr, sthsopr,
asol, eaasol, dormancy, adormancy, eadormancy, cdd, sacdd, eacdd, day1, day7, day30, day60,
day90, day180, day365, day730, csupply, mintusd, sumcsupply, sumcdd, sumeacdd,
liveliness, ealiveliness, rprofit, rloss, rplrate, price, marketcap, rcap, earcap, mvrv,
lthmarketcap, lthrcap, sthmarketcap, sthrcap, lthmvrv, sthmvrv, nupl):
with self.conn.cursor() as cursor:
sql_insert = "REPLACE INTO dailyindsv1 (unixdt, height_begin, height_end, profitrate, fees, txs, new_address, new_address_volume, active_address, send_address, receive_address, volume, eavolume, sopr, asopr, easopr, lthsopr, sthsopr,"
sql_insert = sql_insert + " asol, eaasol, dormancy, adormancy, eadormancy, cdd, sacdd, eacdd, day1, day7, day30, day60, day90, day180, day365, day730, csupply, mintusd, sumcsupply, sumcdd, sumeacdd, liveliness, "
sql_insert = sql_insert + " ealiveliness, rprofit, rloss, rplrate, price, marketcap, rcap, earcap, mvrv, lthmarketcap, lthrcap, sthmarketcap, sthrcap, lthmvrv, sthmvrv, nupl"
sql_insert = sql_insert + ") VALUES (FROM_UNIXTIME(%s), %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s)"
# print(sql_insert)
cursor.execute(sql_insert, (
dt_utc, height_begin, height_end, profitrate, fees, txs, new_address, new_address_volume, active_address, send_address,
receive_address, volume, eavolume, sopr, asopr, easopr, lthsopr, sthsopr, asol, eaasol, dormancy,
adormancy, eadormancy, cdd, sacdd, eacdd, day1, day7, day30, day60, day90, day180, day365, day730,
csupply, mintusd, sumcsupply, sumcdd, sumeacdd, liveliness, ealiveliness, rprofit, rloss, rplrate,
price, marketcap, rcap, earcap, mvrv, lthmarketcap, lthrcap, sthmarketcap, sthrcap, lthmvrv, sthmvrv,
nupl))
self.conn.commit()
'''
'''
# daily daily on-chain volume
def query_from_dailyvolume(self, start_id=0, end_id=0, start_time="", end_time="", limit=0):
with self.conn.cursor() as cursor:
sql_query = "SELECT * from `dailyvolume`"
if start_id > 0:
sql_query = sql_query + " WHERE id > " + str(start_id)
if end_id > 0:
sql_query = sql_query + " AND id < " + str(end_id)
else:
if end_id > 0:
sql_query = sql_query + " WHERE id < " + str(end_id)
if len(start_time) > 0:
if len(end_time) > 0:
sql_query = sql_query + " WHERE UNIX_TIMESTAMP(unixdt) BETWEEN \
UNIX_TIMESTAMP(\'" + start_time + "\') AND UNIX_TIMESTAMP(\'" + end_time + "\')"
else:
sql_query = sql_query + " WHERE UNIX_TIMESTAMP(unixdt) BETWEEN \
UNIX_TIMESTAMP(\'" + start_time + "\') AND UNIX_TIMESTAMP(NOW())"
else:
if len(end_time) > 0:
sql_query = sql_query + " WHERE UNIX_TIMESTAMP(unixdt) BETWEEN \
UNIX_TIMESTAMP(NOW()) AND UNIX_TIMESTAMP(\'" + end_time + "\')"
sql_query = sql_query + " order by `unixdt` desc"
if limit > 0:
sql_query = sql_query + " LIMIT " + str(limit)
print(sql_query)
cursor.execute(sql_query)
return cursor.fetchall()
'''
# newaddrs
'''
def update_to_newaddr(self, dayutc, last_profit_rate, last_fees, last_txs, last_eatxs, last_newaddr_cnt,
last_newaddr_vol, last_active_addr_cnt, last_tx_addr_cnt, last_rx_addr_cnt, last_vol_change,
last_vol):
with self.conn.cursor() as cursor:
sql_query = "SELECT COUNT(id) FROM `newaddrs` WHERE unixdt=FROM_UNIXTIME(%s)"
cursor.execute(sql_query, {dayutc, })
result = cursor.fetchone()
# print(dt_utc)
# print(result)
if result is not None:
if "COUNT(id)" in result:
if result["COUNT(id)"] > 0:
print("update")
sql_update = 'UPDATE newaddrs SET `total`=%s, `amount`=%s, `active`=%s, `tx`=%s, `rx`=%s, `volume_change`=%s, `volume=%s`,`txs`=%s, `eatxs`=%s, `fees`=%s, `last_profit_rate`=%s WHERE unixdt=FROM_UNIXTIME(%s)'
cursor.execute(sql_update, (
last_newaddr_cnt, last_newaddr_vol, last_active_addr_cnt, last_tx_addr_cnt,
last_rx_addr_cnt,
last_vol_change, last_vol, last_txs, last_eatxs, last_fees, last_profit_rate, dayutc))
else:
print("insert")
sql_insert = "INSERT INTO `newaddrs` (`unixdt`, `total`, `amount`, `active`, `tx`, `rx`, `volume_change`, `volume`, `txs`, `eatxs`, `fees`, `last_profit_rate`) VALUES (FROM_UNIXTIME(%s), %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s)"
# print(sql_insert)
cursor.execute(sql_insert, (
dayutc, last_newaddr_cnt, last_newaddr_vol, last_active_addr_cnt, last_tx_addr_cnt,
last_rx_addr_cnt, last_vol_change, last_vol, last_txs, last_eatxs, last_fees,
last_profit_rate))
self.conn.commit()
'''
'''
def update_to_sellprofit(self, dayutc, current_price, block_buy_volume, block_sell_volume, block_sell_profit, last_height):
with self.conn.cursor() as cursor:
sql_insert = "INSERT INTO `dailybuysell` (`unixdt`, `price`, `buyvolume`, `sellvolume`, `sellprofit`, `height`) VALUES (FROM_UNIXTIME(%s), %s, %s, %s, %s, %s)"
#print(sql_insert)
#print(datetime, txid, vout, voutalias, amount, height)
cursor.execute(sql_insert, (dayutc, current_price, block_buy_volume, block_sell_volume, block_sell_profit, last_height))
self.conn.commit()
'''
'''
def update_to_bigsellprofit(self, dayutc, current_price, tx_sell_average, tx_sell_amount, tx_sell_profit,
days_earliest, days_latest, days_largest, days_current, tx_buy_address, txid,
block_height):
with self.conn.cursor() as cursor:
sql_insert = "INSERT INTO `bigsell` (`unixdt`, `buyprice`, `sellprice`, `amount`, `profit`, `days_earliest`, `days_latest`, `days_largest`, `days_current`, `address`, `txid`, `height`) VALUES (FROM_UNIXTIME(%s), %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s)"
# print(sql_insert)
# print(datetime, txid, vout, voutalias, amount, height)
cursor.execute(sql_insert, (
dayutc, current_price, tx_sell_average, tx_sell_amount, tx_sell_profit, days_earliest, days_latest,
days_largest, days_current, tx_buy_address, txid, block_height))
self.conn.commit()
'''
'''
def update_to_dailycdd(self, dt_utc, cdd):
with self.conn.cursor() as cursor:
sql_insert = "REPLACE INTO `dailycdd` (`unixdt`, `cdd`) VALUES (FROM_UNIXTIME(%s), %s)"
# print(sql_insert)
cursor.execute(sql_insert, (dt_utc, cdd))
self.conn.commit()
'''
'''
def update_to_dailycdddays(self, dt_utc, dormancy, adormancy, eadormancy, cdd, acdd, eacdd, day1, day7, day30,
day60, day90, day180, day365, day730):
with self.conn.cursor() as cursor:
sql_insert = "REPLACE INTO `dailycdddays` (`unixdt`, dormancy, adormancy, eadormancy, cdd, acdd, eacdd, `day1`, day7, day30, day60, day90, day180, day365, day730) VALUES (FROM_UNIXTIME(%s), %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s)"
# print(sql_insert)
cursor.execute(sql_insert, (
dt_utc, dormancy, adormancy, eadormancy, cdd, acdd, eacdd, day1, day7, day30, day60, day90, day180,
day365,
day730))
self.conn.commit()
'''
'''
def update_to_dailysopr(self, dt_utc, last_sopr, last_asopr, last_easopr, last_lth_sopr, last_sth_sopr):
with self.conn.cursor() as cursor:
sql_insert = "REPLACE INTO `dailysopr` (`unixdt`, `sopr`, asopr, easopr, lth_sopr, sth_sopr) VALUES (FROM_UNIXTIME(%s), %s, %s, %s, %s, %s, %s)"
# print(sql_insert)
cursor.execute(sql_insert, (dt_utc, last_sopr, last_asopr, last_easopr, last_lth_sopr, last_sth_sopr))
self.conn.commit()
'''
'''
def update_to_inds(self, dt_utc, csupply, mintusd, sumcsupply, sumcdd, sumeacdd, liveliness, ealiveliness, rprofit,
rloss, rplrate, price, marketcap, rcap, earcap, mvrv):
with self.conn.cursor() as cursor:
sql_insert = "REPLACE INTO `inds` (`unixdt`, csupply, mintusd, sumcsupply, sumcdd, sumeacdd, liveliness, ealiveliness, rprofit, rloss, rplrate, price, marketcap, rcap, earcap, mvrv) VALUES (FROM_UNIXTIME(%s), %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s)"
# print(sql_insert)
cursor.execute(sql_insert, (
dt_utc, csupply, mintusd, sumcsupply, sumcdd, sumeacdd, liveliness, ealiveliness, rprofit, rloss,
rplrate,
price, marketcap, rcap, earcap, mvrv))
self.conn.commit()
'''
# daily volume
'''
def update_to_dailyvolume(self, dt_utc, volume):
with self.conn.cursor() as cursor:
sql_insert = "REPLACE INTO `dailyvolume` (`unixdt`, `volume`) VALUES (FROM_UNIXTIME(%s), %s)"
# print(sql_insert)
cursor.execute(sql_insert, (dt_utc, volume))
self.conn.commit()
'''
'''with self.conn.cursor() as cursor:
sql_query = "SELECT COUNT(id) FROM `dailyvolume` WHERE unixdt=FROM_UNIXTIME(%s)"
cursor.execute(sql_query, {dt_utc,})
result = cursor.fetchone()
#print(dt_utc)
#print(result)
if result is not None:
if "COUNT(id)" in result:
if result["COUNT(id)"] > 0:
print("update")
sql_update = 'UPDATE dailyvolume SET `volume`=%s WHERE unixdt=FROM_UNIXTIME(%s)'
cursor.execute(sql_update, (volume, dt_utc))
else:
print("insert")
sql_insert = "INSERT INTO `dailyvolume` (`unixdt`, `volume`) VALUES (FROM_UNIXTIME(%s), %s)"
# print(sql_insert)
cursor.execute(sql_insert, (dt_utc, volume))
self.conn.commit()'''
'''
def update_to_dailyfees(self, dt_utc, fees):
with self.conn.cursor() as cursor:
sql_insert = "REPLACE INTO `dailyfees` (`unixdt`, `fees`) VALUES (FROM_UNIXTIME(%s), %s)"
# print(sql_insert)
cursor.execute(sql_insert, (dt_utc, fees))
self.conn.commit()
'''
'''
def import_to_dailyvolume2(self, dt_utc, volume):
with self.conn.cursor() as cursor:
sql_insert = "INSERT INTO `dailyvolume` (`unixdt`, `volume`) VALUES (FROM_UNIXTIME(%s), %s)"
# print(sql_insert)
cursor.execute(sql_insert, (dt_utc, volume))
self.conn.commit()
def delete_dailyvolume_data(self, config):
with self.conn.cursor() as cursor:
sql_query = "DELETE FROM `dailyvolume`"
cursor.execute(sql_query)
self.conn.commit()
# daily market cap
def query_from_marketcap(self, start_id=0, end_id=0, start_time="", end_time="", limit=0):
with self.conn.cursor() as cursor:
sql_query = "SELECT * from `dailyprice`"
if start_id > 0:
sql_query = sql_query + " WHERE id > " + str(start_id)
if end_id > 0:
sql_query = sql_query + " AND id < " + str(end_id)
else:
if end_id > 0:
sql_query = sql_query + " WHERE id < " + str(end_id)
if len(start_time) > 0:
if len(end_time) > 0:
sql_query = sql_query + " WHERE UNIX_TIMESTAMP(unixdt) BETWEEN \
UNIX_TIMESTAMP(\'" + start_time + "\') AND UNIX_TIMESTAMP(\'" + end_time + "\')"
else:
sql_query = sql_query + " WHERE UNIX_TIMESTAMP(unixdt) BETWEEN \
UNIX_TIMESTAMP(\'" + start_time + "\') AND UNIX_TIMESTAMP(NOW())"
else:
if len(end_time) > 0:
sql_query = sql_query + " WHERE UNIX_TIMESTAMP(unixdt) BETWEEN \
UNIX_TIMESTAMP(NOW()) AND UNIX_TIMESTAMP(\'" + end_time + "\')"
sql_query = sql_query + " order by `unixdt` desc"
if limit > 0:
sql_query = sql_query + " LIMIT " + str(limit)
print(sql_query)
cursor.execute(sql_query)
return cursor.fetchall()
#daily price
def import_to_dailyprice(self, dt_utc, price, volume, marketcap, csupply):
with self.conn.cursor() as cursor:
sql_insert = "INSERT INTO `dailyprice` (`unixdt`, `price`, `volume`, `marketcap`, `csupply`) VALUES (FROM_UNIXTIME(%s), %s, %s, %s, %s)"
#print(sql_insert)
cursor.execute(sql_insert, (dt_utc, price, volume, marketcap, csupply))
self.conn.commit()
def update_to_dailyprice(self, dt_utc, price, volume, change):
with self.conn.cursor() as cursor:
sql_insert = "INSERT INTO `dailyprice` (`unixdt`, `price`, `volume`, `change`) VALUES (FROM_UNIXTIME(%s), %s, %s, %s)"
#print(sql_insert)
cursor.execute(sql_insert, (dt_utc, price, volume, change))
self.conn.commit()
def update_to_dailyprice2(self, dt_utc, price, volume, change, marketcap, csupply):
with self.conn.cursor() as cursor:
sql_query = "SELECT COUNT(id) FROM `dailyprice` WHERE unixdt=FROM_UNIXTIME(%s)"
cursor.execute(sql_query, {dt_utc,})
result = cursor.fetchone()
#print(dt_utc)
#print(result)
if result is not None:
if "COUNT(id)" in result:
if result["COUNT(id)"] > 0:
print("update")
sql_update = 'UPDATE dailyprice SET `price`=%s, `marketcap`=%s, `csupply`=%s, `volume`=%s, `change`=%s WHERE unixdt=FROM_UNIXTIME(%s)'
cursor.execute(sql_update, (price, marketcap, csupply, volume, change, dt_utc))
else:
print("insert")
sql_insert = "INSERT INTO `dailyprice` (`unixdt`, `price`, `volume`, `change`, `marketcap`, `csupply`) VALUES (FROM_UNIXTIME(%s), %s, %s, %s, %s, %s)"
# print(sql_insert)
cursor.execute(sql_insert, (dt_utc, price, volume, change, marketcap, csupply))
self.conn.commit()
def update_dailypricechange(self):
with self.conn.cursor() as cursor:
sql_query = "SELECT unixdt,price FROM `dailyprice` order by unixdt"
cursor.execute(sql_query)
results = cursor.fetchall()
prevprice = -1
for result in results:
if prevprice < 0:
prevprice = result["price"]
else:
#print(result["unixdt"], result["price"], result["marketcap"])
try:
change = (result["price"]/prevprice - 1)*100
except:
change = 0
#print(csupply)
datestr = result["unixdt"]
logger.debug(datestr.__format__('%Y-%m-%d') + " " + str(change))
sql_update = 'UPDATE dailyprice SET `change`=%s WHERE unixdt=%s'
cursor.execute(sql_update, (str(change), result["unixdt"]))
prevprice = result["price"]
self.conn.commit()
def delete_dailyprice_data(self, config):
with self.conn.cursor() as cursor:
sql_query = "DELETE FROM `dailyprice`"
cursor.execute(sql_query)
self.conn.commit()
def delete_failed_blockvolume(self, height):
with self.conn.cursor() as cursor:
sql_insert = "DELETE FROM `bigamountvout` WHERE height=%s"
cursor.execute(sql_insert, (height,))
sql_insert = "DELETE FROM `bigamounttx` WHERE height=%s"
cursor.execute(sql_insert, (height,))
sql_insert = "DELETE FROM `blockamount` WHERE height=%s"
cursor.execute(sql_insert, (height,))
self.conn.commit()
#block check --- big amount for vout
def query_from_bigamountvout(self, start_id=0, end_id=0, start_time="", end_time="", address="", limit=0):
with self.conn.cursor() as cursor:
sql_query = "SELECT * from `bigamountvout`"
if start_id > 0:
sql_query = sql_query + " WHERE id > " + str(start_id)
if end_id > 0:
sql_query = sql_query + " AND id < " + str(end_id)
else:
if end_id > 0:
sql_query = sql_query + " WHERE id < " + str(end_id)
if len(start_time) > 0:
if len(end_time) > 0:
sql_query = sql_query + " WHERE UNIX_TIMESTAMP(unixdt) BETWEEN \
UNIX_TIMESTAMP(\'" + start_time + "\') AND UNIX_TIMESTAMP(\'" + end_time + "\')"
else:
sql_query = sql_query + " WHERE UNIX_TIMESTAMP(unixdt) BETWEEN \
UNIX_TIMESTAMP(\'" + start_time + "\') AND UNIX_TIMESTAMP(NOW())"
else:
if len(end_time) > 0:
sql_query = sql_query + " WHERE UNIX_TIMESTAMP(unixdt) BETWEEN \
UNIX_TIMESTAMP(NOW()) AND UNIX_TIMESTAMP(\'" + end_time + "\')"
sql_query = sql_query + " order by `unixdt` desc"
if limit > 0:
sql_query = sql_query + " LIMIT " + str(limit)
print(sql_query)
cursor.execute(sql_query)
return cursor.fetchall()
'''
def update_to_bigamountvout(self, datetime, txid, vout, voutn, vouttype, amount, height, days, buyin, sellout,
profit):
with self.conn.cursor() as cursor:
# url = "http://10.168.2.125:7101/marketall/push/realtime/btc/dailyindsv2e1"
# headers = {"accept": "application/json"}
# data = {"unixdt":datetime,"vout":vout,"voutn":voutn,"vouttype":vouttype,
# "amount":amount,"height":height,"txid":txid,"days":days,"buyin":buyin,
# "sellout":sellout,"profit":profit}
# response = requests.post(url=url, data=json.dumps(data), headers=headers)
sql_insert = "INSERT INTO `rt_bigamountvoutv3e` (`unixdt`, `vout`, `voutn`, `vouttype`, `amount`, `height`, `txid`, days, buyprice, sellprice, profit) VALUES (FROM_UNIXTIME(%s), %s, %s, %s, %s, %s, %s, %s, %s, %s, %s)"
# print(sql_insert)
# print(datetime, txid, vout, voutalias, amount, height)
cursor.execute(sql_insert,
(datetime, vout, voutn, vouttype, amount, height, txid, days, buyin, sellout, profit))
self.conn.commit()
'''
# block check --- big amount tx
def query_from_bigamounttx(self, start_id=0, end_id=0, start_time="", end_time="", address="", limit=0):
with self.conn.cursor() as cursor:
sql_query = "SELECT * from `bigamounttx`"
if start_id > 0:
sql_query = sql_query + " WHERE id > " + str(start_id)
if end_id > 0:
sql_query = sql_query + " AND id < " + str(end_id)
else:
if end_id > 0:
sql_query = sql_query + " WHERE id < " + str(end_id)
if len(start_time) > 0:
if len(end_time) > 0:
sql_query = sql_query + " WHERE UNIX_TIMESTAMP(unixdt) BETWEEN \
UNIX_TIMESTAMP(\'" + start_time + "\') AND UNIX_TIMESTAMP(\'" + end_time + "\')"
else:
sql_query = sql_query + " WHERE UNIX_TIMESTAMP(unixdt) BETWEEN \
UNIX_TIMESTAMP(\'" + start_time + "\') AND UNIX_TIMESTAMP(NOW())"
else:
if len(end_time) > 0:
sql_query = sql_query + " WHERE UNIX_TIMESTAMP(unixdt) BETWEEN \
UNIX_TIMESTAMP(NOW()) AND UNIX_TIMESTAMP(\'" + end_time + "\')"
sql_query = sql_query + " order by `unixdt` desc"
if limit > 0:
sql_query = sql_query + " LIMIT " + str(limit)
print(sql_query)
cursor.execute(sql_query)
return cursor.fetchall()
def update_to_bigamounttx(self, datetime, txid, amount, height):
with self.conn.cursor() as cursor:
sql_insert = "INSERT INTO `bigamounttx` (`unixdt`, `amount`, `height`, `txid`) VALUES (FROM_UNIXTIME(%s), %s, %s, %s)"
#print(sql_insert)
#print(datetime, txid, amount, height)
cursor.execute(sql_insert, (datetime, amount, height, txid))
self.conn.commit()
# block check --- per block amount
def query_from_blockamount(self, start_id=0, end_id=0, start_time="", end_time="", limit=0, amount=0):
with self.conn.cursor() as cursor:
sql_query = "SELECT * from `blockamount`"
if start_id > 0:
sql_query = sql_query + " WHERE id > " + str(start_id)
if end_id > 0:
sql_query = sql_query + " AND id < " + str(end_id)
if amount > 0:
sql_query = sql_query + " AND amount > " + str(amount)
else:
if end_id > 0:
sql_query = sql_query + " WHERE id < " + str(end_id)
if amount > 0:
sql_query = sql_query + " AND amount > " + str(amount)
else:
if amount > 0:
sql_query = sql_query + "WHERE amount > " + str(amount)
if len(start_time) > 0:
if len(end_time) > 0:
sql_query = sql_query + " WHERE UNIX_TIMESTAMP(unixdt) BETWEEN \
UNIX_TIMESTAMP(\'" + start_time + "\') AND UNIX_TIMESTAMP(\'" + end_time + "\')"
else:
sql_query = sql_query + " WHERE UNIX_TIMESTAMP(unixdt) BETWEEN \
UNIX_TIMESTAMP(\'" + start_time + "\') AND UNIX_TIMESTAMP(NOW())"
if amount > 0:
sql_query = sql_query + " AND amount > " + str(amount)
else:
if len(end_time) > 0:
sql_query = sql_query + " WHERE UNIX_TIMESTAMP(unixdt) BETWEEN \
UNIX_TIMESTAMP(NOW()) AND UNIX_TIMESTAMP(\'" + end_time + "\')"
if amount > 0:
sql_query = sql_query + " AND amount > " + str(amount)
sql_query = sql_query + " order by `unixdt` desc"
if limit > 0:
sql_query = sql_query + " LIMIT " + str(limit)
cursor.execute(sql_query)
return cursor.fetchall()
def update_to_blockamount(self, datetime, blockid, amount, height):
with self.conn.cursor() as cursor:
sql_insert = "INSERT INTO `blockamount` (`unixdt`, `amount`, `height`, `blockid`) VALUES (FROM_UNIXTIME(%s), %s, %s, %s)"
#print(sql_insert)
#print(datetime, blockid, amount, height)
cursor.execute(sql_insert, (datetime, amount, height, blockid))
self.conn.commit()
def delete_node_data(self, config):
with self.conn.cursor() as cursor:
sql_query = "DELETE FROM `blockamount`"
cursor.execute(sql_query)
sql_query = "DELETE FROM `bigamountvout`"
cursor.execute(sql_query)
sql_query = "DELETE FROM `bigamounttx`"
cursor.execute(sql_query)
self.conn.commit()
def update_realize_cap(self, dayutc, last_rv):
with self.conn.cursor() as cursor:
sql_insert = "INSERT INTO `dailyrcap` (`unixdt`, `rcap`) VALUES (FROM_UNIXTIME(%s), %s)"
#print(sql_insert)
#print(datetime, blockid, amount, height)
cursor.execute(sql_insert, (dayutc, last_rv))
self.conn.commit()
# daily realize cap
def query_from_realizecap(self, start_id=0, end_id=0, start_time="", end_time="", limit=0):
with self.conn.cursor() as cursor:
sql_query = "SELECT * from `dailyrcap`"
if start_id > 0:
sql_query = sql_query + " WHERE id > " + str(start_id)
if end_id > 0:
sql_query = sql_query + " AND id < " + str(end_id)
else:
if end_id > 0:
sql_query = sql_query + " WHERE id < " + str(end_id)
if len(start_time) > 0:
if len(end_time) > 0:
sql_query = sql_query + " WHERE UNIX_TIMESTAMP(unixdt) BETWEEN \
UNIX_TIMESTAMP(\'" + start_time + "\') AND UNIX_TIMESTAMP(\'" + end_time + "\')"
else:
sql_query = sql_query + " WHERE UNIX_TIMESTAMP(unixdt) BETWEEN \
UNIX_TIMESTAMP(\'" + start_time + "\') AND UNIX_TIMESTAMP(NOW())"
else:
if len(end_time) > 0:
sql_query = sql_query + " WHERE UNIX_TIMESTAMP(unixdt) BETWEEN \
UNIX_TIMESTAMP(NOW()) AND UNIX_TIMESTAMP(\'" + end_time + "\')"
sql_query = sql_query + " order by `unixdt` desc"
if limit > 0:
sql_query = sql_query + " LIMIT " + str(limit)
print(sql_query)
cursor.execute(sql_query)
return cursor.fetchall()
def update_daily_addr(self, dayutc, last_add_cnt):
with self.conn.cursor() as cursor:
sql_insert = "INSERT INTO `dailyaddradd` (`unixdt`, `addcnt`) VALUES (FROM_UNIXTIME(%s), %s)"
#print(sql_insert)
#print(datetime, blockid, amount, height)
cursor.execute(sql_insert, (dayutc, last_add_cnt))
self.conn.commit()
def delete_daily_addr(self, config):
with self.conn.cursor() as cursor:
sql_query = "DELETE FROM `dailyaddradd`"
cursor.execute(sql_query)
self.conn.commit()
def delete_daily_rv(self, config):
with self.conn.cursor() as cursor:
sql_query = "DELETE FROM `dailyrcap`"
cursor.execute(sql_query)
self.conn.commit()
'''
def __del__(self):
self.conn.close()

613
lyq/btc24h_redis_if.py Normal file
View File

@@ -0,0 +1,613 @@
import time
from walrus import *
from loguru import logger
class RedisIf:
def __init__(self, host="127.0.0.1", port=6379, password="", db=0):
self.db = Database(host=host, port=port, db=db)
self.zbalance = self.db.ZSet("rt_balancev2e")
'''
#realize cap progress
self.rv = self.db.Hash("rv")
#address and balance progress
self.addr = self.db.Hash("addr")
#block volume progress
self.bv = self.db.Hash("bv")
#daily volume progress
self.dv = self.db.Hash("dv")
'''
'''
#stat tx progress
self.tx = self.db.Hash("tx")
#ETH daily contract progress
self.eth_dc = self.db.Hash("ethdc")
#btc stats fee
self.btc_stats = self.db.Hash("btcstats")
#btc stats volume
self.btc_volume = self.db.Hash("btcvolume")
# btc stats cdd
self.btc_cdd = self.db.Hash("btccdd")
# btc stats cdd days
self.btc_cdd_days = self.db.Hash("btccdddays")
'''
self.btc_block_time = self.db.Hash("rt_btcblocktimev2e")
'''
self.btc_sopr = self.db.Hash("btcsopr")
'''
self.btc_data = self.db.Hash("rt_btc_datav2e")
self.active_address = self.db.Set("rt_active_addressv2e")
self.send_address = self.db.Set("rt_send_addressv2e")
self.receive_address = self.db.Set("rt_receive_addressv2e")
def get_btc_data(self, key):
value = None
if self.btc_data[key] is not None:
value = self.btc_data[key]
return value
def set_btc_data(self, key, value):
self.btc_data[key] = value
def reset_btc_data(self):
self.btc_data.clear()
self.zbalance.clear()
# self.btc_block_time.clear()
'''
def get_last_btc_sopr(self):
last_sopr_buy = None
last_asopr_buy = None
last_easopr_buy = None
last_lth_sopr_buy = None
last_sth_sopr_buy = None
last_asol = None
last_eaasol = None
if self.btc_sopr["last_asol"] is not None:
last_asol = self.btc_sopr["last_asol"]
#last_asol = float(self.btc_sopr["last_asol"].decode("utf-8"))
if self.btc_sopr["last_eaasol"] is not None:
last_eaasol = self.btc_sopr["last_eaasol"]
#last_eaasol = float(self.btc_sopr["last_eaasol"].decode("utf-8"))
if self.btc_sopr["last_sopr_buy"] is not None:
last_sopr_buy = self.btc_sopr["last_sopr_buy"]
#last_sopr_buy = float(self.btc_sopr["last_sopr_buy"].decode("utf-8"))
if self.btc_sopr["last_asopr_buy"] is not None:
last_asopr_buy = self.btc_sopr["last_asopr_buy"]
#last_asopr_buy = float(self.btc_sopr["last_asopr_buy"].decode("utf-8"))
if self.btc_sopr["last_easopr_buy"] is not None:
last_easopr_buy = self.btc_sopr["last_easopr_buy"]
#last_easopr_buy = float(self.btc_sopr["last_easopr_buy"].decode("utf-8"))
if self.btc_sopr["last_lth_sopr_buy"] is not None:
last_lth_sopr_buy = self.btc_sopr["last_lth_sopr_buy"]
#last_lth_sopr_buy = float(self.btc_sopr["last_lth_sopr_buy"].decode("utf-8"))
if self.btc_sopr["last_sth_sopr_buy"] is not None:
last_sth_sopr_buy = self.btc_sopr["last_sth_sopr_buy"]
#last_sth_sopr_buy = float(self.btc_sopr["last_sth_sopr_buy"].decode("utf-8"))
last_sopr_sell = None
last_asopr_sell = None
last_easopr_sell = None
last_lth_sopr_sell = None
last_sth_sopr_sell = None
if self.btc_sopr["last_sopr_sell"] is not None:
last_sopr_sell = self.btc_sopr["last_sopr_sell"]
# last_sopr_sell = float(self.btc_sopr["last_sopr_sell"].decode("utf-8"))
if self.btc_sopr["last_asopr_sell"] is not None:
last_asopr_sell = self.btc_sopr["last_asopr_sell"]
# last_asopr = float(self.btc_sopr["last_asopr"].decode("utf-8"))
if self.btc_sopr["last_easopr_sell"] is not None:
last_easopr_sell = self.btc_sopr["last_easopr_sell"]
# last_easopr_sell = float(self.btc_sopr["last_easopr_sell"].decode("utf-8"))
if self.btc_sopr["last_lth_sopr_sell"] is not None:
last_lth_sopr_sell = self.btc_sopr["last_lth_sopr_sell"]
# last_lth_sopr_sell = float(self.btc_sopr["last_lth_sopr_sell"].decode("utf-8"))
if self.btc_sopr["last_sth_sopr_sell"] is not None:
last_sth_sopr_sell = self.btc_sopr["last_sth_sopr_sell"]
# last_sth_sopr_sell = float(self.btc_sopr["last_sth_sopr_sell"].decode("utf-8"))
return last_asol, last_eaasol, last_sopr_buy, last_asopr_buy, last_easopr_buy, last_lth_sopr_buy, last_sth_sopr_buy, last_sopr_sell, last_asopr_sell, last_easopr_sell, last_lth_sopr_sell, last_sth_sopr_sell
def set_last_btc_sopr(self, last_asol, last_eaasol, last_sopr_buy, last_asopr_buy, last_easopr_buy, last_lth_sopr_buy, last_sth_sopr_buy, last_sopr_sell, last_asopr_sell, last_easopr_sell, last_lth_sopr_sell, last_sth_sopr_sell):
self.btc_sopr["last_asol"] = last_asol
self.btc_sopr["last_eaasol"] = last_eaasol
self.btc_sopr["last_sopr_buy"] = last_sopr_buy
self.btc_sopr["last_asopr_buy"] = last_asopr_buy
self.btc_sopr["last_easopr_buy"] = last_easopr_buy
self.btc_sopr["last_lth_sopr_buy"] = last_lth_sopr_buy
self.btc_sopr["last_sth_sopr_buy"] = last_sth_sopr_buy
self.btc_sopr["last_sopr_sell"] = last_sopr_sell
self.btc_sopr["last_asopr_sell"] = last_asopr_sell
self.btc_sopr["last_easopr_sell"] = last_easopr_sell
self.btc_sopr["last_lth_sopr_sell"] = last_lth_sopr_sell
self.btc_sopr["last_sth_sopr_sell"] = last_sth_sopr_sell
'''
def get_block_time(self, height):
block_time = None
height_str = str(height)
if self.btc_block_time[height_str] is not None:
block_time = int(self.btc_block_time[height_str].decode("utf-8"))
# block_time = int(self.btc_block_time[height_str].decode("utf-8"))
return block_time
def set_block_time(self, height, ts):
height_str = str(height)
self.btc_block_time[height_str] = ts
'''
def get_last_btc_cdd_days(self):
last_cdd = None
last_acdd = None
last_eacdd = None
last_cdd_day1= None
last_cdd_day7 = None
last_cdd_day30 = None
last_cdd_day60 = None
last_cdd_day90 = None
last_cdd_day180 = None
last_cdd_day365 = None
last_cdd_day730 = None
last_date = None
last_height = None
last_date_str = None
if self.btc_cdd["last_cdd"] is not None:
last_cdd = float(self.btc_cdd["last_cdd"].decode("utf-8"))
if self.btc_cdd["last_acdd"] is not None:
last_acdd = float(self.btc_cdd["last_acdd"].decode("utf-8"))
if self.btc_cdd["last_eacdd"] is not None:
last_eacdd = float(self.btc_cdd["last_eacdd"].decode("utf-8"))
if self.btc_cdd_days["last_cdd_day1"] is not None:
last_cdd_day1 = float(self.btc_cdd_days["last_cdd_day1"].decode("utf-8"))
if self.btc_cdd_days["last_cdd_day7"] is not None:
last_cdd_day7 = float(self.btc_cdd_days["last_cdd_day7"].decode("utf-8"))
if self.btc_cdd_days["last_cdd_day30"] is not None:
last_cdd_day30 = float(self.btc_cdd_days["last_cdd_day30"].decode("utf-8"))
if self.btc_cdd_days["last_cdd_day60"] is not None:
last_cdd_day60 = float(self.btc_cdd_days["last_cdd_day60"].decode("utf-8"))
if self.btc_cdd_days["last_cdd_day90"] is not None:
last_cdd_day90 = float(self.btc_cdd_days["last_cdd_day90"].decode("utf-8"))
if self.btc_cdd_days["last_cdd_day180"] is not None:
last_cdd_day180 = float(self.btc_cdd_days["last_cdd_day180"].decode("utf-8"))
if self.btc_cdd_days["last_cdd_day365"] is not None:
last_cdd_day365 = float(self.btc_cdd_days["last_cdd_day365"].decode("utf-8"))
if self.btc_cdd_days["last_cdd_day730"] is not None:
last_cdd_day730 = float(self.btc_cdd_days["last_cdd_day730"].decode("utf-8"))
if self.btc_cdd_days["last_date"] is not None:
last_date = int(self.btc_cdd_days["last_date"].decode("utf-8"))
if self.btc_cdd_days["last_height"] is not None:
last_height = int(self.btc_cdd_days["last_height"].decode("utf-8"))
if self.btc_cdd_days["last_date_str"] is not None:
last_date_str = self.btc_cdd_days["last_date_str"].decode("utf-8")
return last_cdd, last_acdd, last_eacdd, last_cdd_day1, last_cdd_day7, last_cdd_day30, last_cdd_day60, last_cdd_day90, last_cdd_day180, last_cdd_day365, last_cdd_day730, last_date, last_height, last_date_str
def set_last_btc_cdd_days(self, cdd, acdd, eacdd, day1, day7, day30, day60, day90, day180, day365, day730, dt, height, dtstr):
self.btc_cdd["last_cdd"] = cdd
self.btc_cdd["last_acdd"] = acdd
self.btc_cdd["last_eacdd"] = eacdd
self.btc_cdd_days["last_cdd_day1"] = day1
self.btc_cdd_days["last_cdd_day7"] = day7
self.btc_cdd_days["last_cdd_day30"] = day30
self.btc_cdd_days["last_cdd_day60"] = day60
self.btc_cdd_days["last_cdd_day90"] = day90
self.btc_cdd_days["last_cdd_day180"] = day180
self.btc_cdd_days["last_cdd_day365"] = day365
self.btc_cdd_days["last_cdd_day730"] = day730
self.btc_cdd_days["last_date"] = dt
self.btc_cdd_days["last_height"] = height
self.btc_cdd_days["last_date_str"] = dtstr
'''
'''
def get_last_btc_cdd(self):
last_cdd = None
last_date = None
last_height = None
last_date_str = None
if self.btc_cdd["last_cdd"] is not None:
last_cdd = float(self.btc_cdd["last_cdd"].decode("utf-8"))
if self.btc_cdd["last_date"] is not None:
last_date = int(self.btc_cdd["last_date"].decode("utf-8"))
if self.btc_cdd["last_height"] is not None:
last_height = int(self.btc_cdd["last_height"].decode("utf-8"))
if self.btc_cdd["last_date_str"] is not None:
last_date_str = self.btc_cdd["last_date_str"].decode("utf-8")
return last_cdd, last_date, last_height, last_date_str
def set_last_btc_cdd(self, cdd, dt, height, dtstr):
self.btc_cdd["last_cdd"] = cdd
self.btc_cdd["last_date"] = dt
self.btc_cdd["last_height"] = height
self.btc_cdd["last_date_str"] = dtstr
def get_last_btc_volume(self):
last_volume = None
last_date = None
last_height = None
last_date_str = None
if self.btc_volume["last_volume"] is not None:
last_volume = float(self.btc_volume["last_volume"].decode("utf-8"))
if self.btc_volume["last_date"] is not None:
last_date = int(self.btc_volume["last_date"].decode("utf-8"))
if self.btc_volume["last_height"] is not None:
last_height = int(self.btc_volume["last_height"].decode("utf-8"))
if self.btc_volume["last_date_str"] is not None:
last_date_str = self.btc_volume["last_date_str"].decode("utf-8")
return last_volume, last_date, last_height, last_date_str
def set_last_btc_volume(self, volume, dt, height, dtstr):
self.btc_volume["last_volume"] = volume
self.btc_volume["last_date"] = dt
self.btc_volume["last_height"] = height
self.btc_volume["last_date_str"] = dtstr
'''
'''
def get_last_btc_stats(self):
last_fees = None
last_date = None
last_height = None
last_date_str = None
last_volume = None
if self.btc_stats["last_fees"] is not None:
last_fees = float(self.btc_stats["last_fees"].decode("utf-8"))
if self.btc_volume["last_volume"] is not None:
last_volume = float(self.btc_volume["last_volume"].decode("utf-8"))
if self.btc_stats["last_date"] is not None:
last_date = int(self.btc_stats["last_date"].decode("utf-8"))
if self.btc_stats["last_height"] is not None:
last_height = int(self.btc_stats["last_height"].decode("utf-8"))
if self.btc_stats["last_date_str"] is not None:
last_date_str = self.btc_stats["last_date_str"].decode("utf-8")
return last_fees, last_volume, last_date, last_height, last_date_str
def set_last_btc_stats(self, fees, volume, dt, height, dtstr):
self.btc_stats["last_fees"] = fees
self.btc_volume["last_volume"] = volume
self.btc_stats["last_date"] = dt
self.btc_stats["last_height"] = height
self.btc_stats["last_date_str"] = dtstr
def get_last_eth_dc(self):
last_date = None
last_height = None
last_date_str = None
if self.eth_dc["last_date"] is not None:
last_date = int(self.eth_dc["last_date"].decode("utf-8"))
if self.eth_dc["last_height"] is not None:
last_height = int(self.eth_dc["last_height"].decode("utf-8"))
if self.eth_dc["last_date_str"] is not None:
last_date_str = self.eth_dc["last_date_str"].decode("utf-8")
return last_date, last_height, last_date_str
def set_last_eth_dc(self, dt, height, dtstr):
self.eth_dc["last_date"] = dt
self.eth_dc["last_height"] = height
self.eth_dc["last_date_str"] = dtstr
'''
'''
def get_last_dv(self):
last_dv = None
last_date = None
last_height = None
last_date_str = None
if self.dv["last_dv"] is not None:
last_dv = float(self.dv["last_dv"].decode("utf-8"))
if self.dv["last_date"] is not None:
last_date = int(self.dv["last_date"].decode("utf-8"))
if self.dv["last_height"] is not None:
last_height = int(self.dv["last_height"].decode("utf-8"))
if self.dv["last_date_str"] is not None:
last_date_str = self.dv["last_date_str"].decode("utf-8")
return last_dv, last_date, last_height, last_date_str
def set_last_dv(self, dv, dt, height, dtstr):
self.dv["last_dv"] = dv
self.dv["last_date"] = dt
self.dv["last_height"] = height
self.dv["last_date_str"] = dtstr
def get_last_bv(self):
last_height = None
if self.bv["last_height"] is not None:
last_height = int(self.bv["last_height"].decode("utf-8"))
return last_height
def set_last_bv(self, height):
self.bv["last_height"] = height
'''
'''
def get_last_ind(self):
last_csupply = None
last_mintusd = None
last_sumcsupply = None
last_sumcdd = None
last_sumeacdd = None
last_rprofit = None
last_rloss = None
last_marketcap = None
last_rcap = None
last_mvrv = None
last_earcap = None
if self.tx["last_csupply"] is not None:
last_csupply = float(self.tx["last_csupply"].decode("utf-8"))
if self.tx["last_mintusd"] is not None:
last_mintusd = float(self.tx["last_mintusd"].decode("utf-8"))
if self.tx["last_sumcsupply"] is not None:
last_sumcsupply = float(self.tx["last_sumcsupply"].decode("utf-8"))
if self.tx["last_sumcdd"] is not None:
last_sumcdd = float(self.tx["last_sumcdd"].decode("utf-8"))
if self.tx["last_sumeacdd"] is not None:
last_sumeacdd = float(self.tx["last_sumeacdd"].decode("utf-8"))
if self.tx["last_rprofit"] is not None:
last_rprofit = float(self.tx["last_rprofit"].decode("utf-8"))
if self.tx["last_rloss"] is not None:
last_rloss = float(self.tx["last_rloss"].decode("utf-8"))
if self.tx["last_marketcap"] is not None:
last_marketcap = float(self.tx["last_marketcap"].decode("utf-8"))
if self.tx["last_rcap"] is not None:
last_rcap = float(self.tx["last_rcap"].decode("utf-8"))
if self.tx["last_earcap"] is not None:
last_earcap = float(self.tx["last_earcap"].decode("utf-8"))
if self.tx["last_mvrv"] is not None:
last_mvrv = float(self.tx["last_mvrv"].decode("utf-8"))
return last_csupply, last_mintusd, last_sumcsupply, last_sumcdd, last_sumeacdd, last_rprofit, last_rloss, last_marketcap, last_rcap, last_earcap, last_mvrv
def set_last_ind(self, last_csupply, last_mintusd, last_sumcsupply, last_sumcdd, last_sumeacdd, last_rprofit, last_rloss, last_marketcap, last_rcap, last_earcap, last_mvrv):
self.tx["last_csupply"] = last_csupply
self.tx["last_mintusd"] = last_mintusd
self.tx["last_sumcsupply"] = last_sumcsupply
self.tx["last_sumcdd"] = last_sumcdd
self.tx["last_sumeacdd"] = last_sumeacdd
self.tx["last_rprofit"] = last_rprofit
self.tx["last_rloss"] = last_rloss
self.tx["last_marketcap"] = last_marketcap
self.tx["last_rcap"] = last_rcap
self.tx["last_earcap"] = last_earcap
self.tx["last_mvrv"] = last_mvrv
def get_last_tx(self):
last_profit = None
last_fees = None
last_newaddr_cnt = None
last_newaddr_vol = None
last_active_addr_cnt = None
last_tx_addr_cnt = None
last_rx_addr_cnt = None
last_vol_change = None
last_vol = None
last_avol = None
last_date = None
last_height = None
last_date_str = None
last_txs = None
last_eatxs = None
if self.tx["last_profit_rate"] is not None:
last_profit = int(self.tx["last_profit"].decode("utf-8"))
if self.tx["last_fees"] is not None:
last_fees = int(self.tx["last_fees"].decode("utf-8"))
if self.tx["last_txs"] is not None:
last_txs = int(self.tx["last_txs"].decode("utf-8"))
if self.tx["last_eatxs"] is not None:
last_eatxs = int(self.tx["last_eatxs"].decode("utf-8"))
if self.tx["last_newaddr_cnt"] is not None:
last_newaddr_cnt = int(self.tx["last_newaddr_cnt"].decode("utf-8"))
if self.tx["last_newaddr_vol"] is not None:
last_newaddr_vol = float(self.tx["last_newaddr_vol"].decode("utf-8"))
if self.tx["last_active_addr_cnt"] is not None:
last_active_addr_cnt = int(self.tx["last_active_addr_cnt"].decode("utf-8"))
if self.tx["last_tx_addr_cnt"] is not None:
last_tx_addr_cnt = int(self.tx["last_tx_addr_cnt"].decode("utf-8"))
if self.tx["last_rx_addr_cnt"] is not None:
last_rx_addr_cnt = int(self.tx["last_rx_addr_cnt"].decode("utf-8"))
if self.tx["last_vol_change"] is not None:
last_vol_change = float(self.tx["last_vol_change"].decode("utf-8"))
if self.tx["last_vol"] is not None:
last_vol = float(self.tx["last_vol"].decode("utf-8"))
if self.tx["last_avol"] is not None:
last_avol = float(self.tx["last_avol"].decode("utf-8"))
if self.tx["last_date"] is not None:
last_date = int(self.tx["last_date"].decode("utf-8"))
if self.tx["last_height"] is not None:
last_height = int(self.tx["last_height"].decode("utf-8"))
if self.tx["last_date_str"] is not None:
last_date_str = self.tx["last_date_str"].decode("utf-8")
return last_profit, last_fees, last_txs, last_eatxs, last_newaddr_cnt, last_newaddr_vol, last_active_addr_cnt, last_tx_addr_cnt, last_rx_addr_cnt, last_vol_change, last_vol, last_avol, last_date, last_height, last_date_str
def set_last_tx(self, last_profit, last_fees, last_txs, last_eatxs, newaddr_cnt, newaddr_vol, active_addr_cnt, tx_addr_cnt, rx_addr_cnt, vol_change, vol, avol, dt, height, dtstr):
self.tx["last_profit"] = last_profit
self.tx["last_fees"] = last_fees
self.tx["last_txs"] = last_txs
self.tx["last_eatxs"] = last_eatxs
self.tx["last_newaddr_cnt"] = newaddr_cnt
self.tx["last_newaddr_vol"] = newaddr_vol
self.tx["last_active_addr_cnt"] = active_addr_cnt
self.tx["last_tx_addr_cnt"] = tx_addr_cnt
self.tx["last_rx_addr_cnt"] = rx_addr_cnt
self.tx["last_vol_change"] = vol_change
self.tx["last_vol"] = vol
self.tx["last_avol"] = avol
self.tx["last_date"] = dt
self.tx["last_height"] = height
self.tx["last_date_str"] = dtstr
'''
'''
def get_last_addr(self):
last_daily_cnt = None
last_date = None
last_height = None
last_date_str = None
if self.addr["last_daily_cnt"] is not None:
last_daily_cnt = int(self.addr["last_daily_cnt"].decode("utf-8"))
if self.addr["last_date"] is not None:
last_date = int(self.addr["last_date"].decode("utf-8"))
if self.addr["last_height"] is not None:
last_height = int(self.addr["last_height"].decode("utf-8"))
if self.addr["last_date_str"] is not None:
last_date_str = self.addr["last_date_str"].decode("utf-8")
return last_daily_cnt, last_date, last_height, last_date_str
def set_last_addr(self, daily_cnt, dt, height, dtstr):
self.addr["last_daily_cnt"] = daily_cnt
self.addr["last_date"] = dt
self.addr["last_height"] = height
self.addr["last_date_str"] = dtstr
'''
def is_active_address(self, address):
result = address in self.active_address
if not result:
self.active_address.add(address)
return result
def reset_active_address(self):
self.active_address.clear()
def get_active_address_cnt(self):
return len(self.active_address)
def is_send_address(self, address):
result = address in self.send_address
if not result:
self.send_address.add(address)
return result
def reset_send_address(self):
self.send_address.clear()
def get_send_address_cnt(self):
return len(self.send_address)
def is_receive_address(self, address):
result = address in self.receive_address
if not result:
self.receive_address.add(address)
return result
def reset_receive_address(self):
self.receive_address.clear()
def get_receive_address_cnt(self):
return len(self.receive_address)
def save_addr(self, address, balance):
new_balance = balance
if address in self.zbalance:
new_balance = self.zbalance.score(address) + balance
# print("update", self.zbalance.score(address), balance, new_balance)
# time.sleep(10)
if new_balance < 0.01:
del self.zbalance[address]
# print("check exist", address, address in self.zbalance)
# time.sleep(10)
return
self.zbalance.add({address: new_balance})
'''
def delete_addr(self, config):
self.addr.clear()
self.zbalance.clear()
'''
def is_in_addr(self, address):
return address in self.zbalance
def get_addr_cnt(self):
return len(self.zbalance)
'''
def delete_rv(self, config):
self.rv.clear()
def get_last_rv(self):
last_rv = None
last_date = None
last_height = None
last_date_str = None
if self.rv["last_rv"] is not None:
last_rv = float(self.rv["last_rv"].decode("utf-8"))
if self.rv["last_date"] is not None:
last_date = int(self.rv["last_date"].decode("utf-8"))
if self.rv["last_height"] is not None:
last_height = int(self.rv["last_height"].decode("utf-8"))
if self.rv["last_date_str"] is not None:
last_date_str = self.rv["last_date_str"].decode("utf-8")
return last_rv, last_date, last_height, last_date_str
def set_last_rv(self, rv, dt, height, dtstr):
self.rv["last_rv"] = rv
self.rv["last_date"] = dt
self.rv["last_height"] = height
self.rv["last_date_str"] = dtstr
'''
def get_all_address(self):
return self.zbalance.keys()
def delete_address_data(self, config):
self.zbalance.clear()
'''
def query_from_address(self, start_balance=0, end_balance=0, address="", limit=0):
if len(address) > 0:
results = []
result = {}
result["address"] = address
balance = self.zbalance.score(address)
print(balance)
if balance is not None:
result["balance"] = balance
results.append(result)
return results
match_result = None
if start_balance > 0:
if end_balance > 0:
match_result = self.zbalance.range_by_score(start_balance, end_balance, 0, -1, True, False)
else:
match_result = self.zbalance.range_by_score(0, start_balance, 0, -1, True, False)
else:
if end_balance > 0:
match_result = self.zbalance.range_by_score(end_balance, 21000000, 0, -1, True, False)
results = []
if match_result is not None:
#print(match_result)
for addr, balance2 in match_result:
address = addr.decode('utf-8')
result = {}
result["address"] = address
result["balance"] = balance2
results.append(result)
if limit > 0 and len(results) >= limit:
break
return results
'''

1110
lyq/btc24h_stats.py Normal file

File diff suppressed because it is too large Load Diff

85
lyq/btc_price_fetcher.py Normal file
View File

@@ -0,0 +1,85 @@
import time
import requests
import pymysql
from datetime import datetime
# MySQL配置
DB_CONFIG = {
"host": "127.0.0.1",
"user": "root",
"password": "2GS@bPYcgiMyL14A",
"database": "btcdb",
"port": 4423
}
# 获取当前时间戳
def get_current_timestamp():
return int(time.time())
# 获取API1的BTC价格示例币安
def get_binance_price():
url = "https://api.binance.com/api/v3/ticker/price?symbol=BTCUSDT"
resp = requests.get(url, timeout=5)
resp.raise_for_status()
data = resp.json()
return float(data["price"])
# 获取API2的BTC价格示例Coinbase
def get_coinbase_price():
url = "https://api.coinbase.com/v2/prices/spot?currency=USD"
resp = requests.get(url, timeout=5)
resp.raise_for_status()
data = resp.json()
return float(data["data"]["amount"])
# 更新或插入价格
def upsert_price(source, price, timestamp):
connection = pymysql.connect(**DB_CONFIG)
try:
with connection.cursor() as cursor:
# 先判断该 source 是否已存在
sql_check = "SELECT id FROM btc_realtime_prices WHERE source = %s"
cursor.execute(sql_check, (source,))
result = cursor.fetchone()
if result:
# 已存在,执行更新
sql_update = """
UPDATE btc_realtime_prices
SET price = %s, timestamp = %s
WHERE source = %s
"""
cursor.execute(sql_update, (price, timestamp, source))
else:
# 不存在,执行插入
sql_insert = """
INSERT INTO btc_realtime_prices (timestamp, source, price)
VALUES (%s, %s, %s)
"""
cursor.execute(sql_insert, (timestamp, source, price))
connection.commit()
finally:
connection.close()
def main():
while True:
now_ts = get_current_timestamp()
try:
binance_price = get_binance_price()
print(f"Binance BTC Price: {binance_price}")
upsert_price("binance", binance_price, now_ts)
except Exception as e:
print(f"获取Binance价格失败: {e}")
try:
coinbase_price = get_coinbase_price()
print(f"Coinbase BTC Price: {coinbase_price}")
upsert_price("coinbase", coinbase_price, now_ts)
except Exception as e:
print(f"获取Coinbase价格失败: {e}")
time.sleep(60) # 每分钟执行一次
if __name__ == "__main__":
main()

141
lyq/btc_prices.py Normal file
View File

@@ -0,0 +1,141 @@
import time
import requests
import pymysql
import ujson
from datetime import datetime, timedelta
# MySQL 连接信息
DB_CONFIG = {
"host": "127.0.0.1",
"user": "root",
"password": "2GS@bPYcgiMyL14A",
"database": "btcdb",
"port": 4423
}
# Nasdaq API Key
NASDAQ_API_KEY = "FZqXog4sR-b7cYnXcRVV"
# 获取已存在的时间戳
def get_existing_timestamps():
connection = pymysql.connect(**DB_CONFIG)
existing_timestamps = set()
try:
with connection.cursor() as cursor:
cursor.execute("SELECT timestamp, source FROM btc_prices")
for row in cursor.fetchall():
existing_timestamps.add((row[0], row[1]))
finally:
connection.close()
return existing_timestamps
# 工具函数:将任意时间戳调整为北京时间当日 08:00 的时间戳
def adjust_to_beijing_08am(timestamp):
dt = datetime.utcfromtimestamp(timestamp) + timedelta(hours=8)
dt_08am = datetime(dt.year, dt.month, dt.day, 8, 0, 0)
return int((dt_08am - timedelta(hours=8)).timestamp()) # 转回 UTC 存储
# Nasdaq 获取历史 BTC 美元价格
def get_nasdaq_price():
prices = {}
url = f'https://data.nasdaq.com/api/v3/datatables/QDL/BCHAIN?code=MKPRU&api_key={NASDAQ_API_KEY}'
response = requests.get(url)
if response.status_code == 200:
data = ujson.loads(response.content)
if "datatable" in data and "data" in data["datatable"]:
for item in data["datatable"]["data"]:
daystr = item[1]
price = item[2]
dt = datetime.strptime(daystr, "%Y-%m-%d")
dt_08am_bj = datetime(dt.year, dt.month, dt.day, 8, 0, 0)
dt_08am_utc = dt_08am_bj - timedelta(hours=8)
prices[int(dt_08am_utc.timestamp())] = float(price)
print(f"Nasdaq 获取数据量: {len(prices)}")
return prices
# CryptoCompare 获取 BTC 历史每日收盘价(时间强制统一为北京时间 08:00
def get_cryptocompare_price():
url = "https://min-api.cryptocompare.com/data/v2/histoday"
limit = 2000
to_ts = int(time.time())
prices = {}
while True:
params = {
"fsym": "BTC",
"tsym": "USD",
"limit": limit,
"toTs": to_ts
}
print(f"请求 CryptoCompare: {params}")
response = requests.get(url, params=params)
if response.status_code != 200:
print("请求失败:", response.status_code)
break
data = ujson.loads(response.content)
if data["Response"] != "Success":
print("API 返回错误:", data.get("Message"))
break
entries = data["Data"]["Data"]
if not entries:
break
for entry in entries:
raw_ts = entry["time"]
price = entry["close"]
adjusted_ts = adjust_to_beijing_08am(raw_ts)
prices[adjusted_ts] = price
earliest = entries[0]["time"]
if earliest <= 1279300000: # 大约2010年7月
break
to_ts = earliest - 1
time.sleep(1)
print(f"CryptoCompare 获取数据量: {len(prices)}")
return prices
# 保存数据到数据库
def save_to_database(data, source):
existing_timestamps = get_existing_timestamps()
connection = pymysql.connect(**DB_CONFIG)
new_data_count = 0
try:
with connection.cursor() as cursor:
sql = """
INSERT INTO btc_prices (timestamp, price, source)
VALUES (%s, %s, %s)
"""
for timestamp, price in data.items():
if (timestamp, source) not in existing_timestamps:
try:
cursor.execute(sql, (timestamp, price, source))
new_data_count += 1
except pymysql.MySQLError as e:
print(f"插入错误: {e}")
continue
connection.commit()
print(f"成功存入 {new_data_count} 条新数据({source}")
finally:
connection.close()
# 定时任务
def fetch_and_store_data():
print("========== 开始获取比特币价格数据 ==========")
# Nasdaq
nasdaq_prices = get_nasdaq_price()
save_to_database(nasdaq_prices, "Nasdaq")
# CryptoCompare
cc_prices = get_cryptocompare_price()
save_to_database(cc_prices, "CryptoCompare")
print("========== 数据存储完成 ==========")
if __name__ == "__main__":
while True:
fetch_and_store_data()
time.sleep(14400) # 每 4 小时执行一次

1219
lyq/btc_stats_qt.py Normal file

File diff suppressed because it is too large Load Diff

125
lyq/btc_update.py Normal file
View File

@@ -0,0 +1,125 @@
import pymysql
import json
import os
import time
from datetime import datetime, timedelta
# 数据库配置
DB_CONFIG = {
"host": "192.168.194.240",
"user": "root",
"password": "2GS@bPYcgiMyL14A",
"database": "btcdb",
"port": 4423,
"connect_timeout": 60,
"read_timeout": 60,
"write_timeout": 60,
"charset": "utf8mb4"
}
# 数据文件路径
DATA_FILE = "btc_historical_price.py"
# 定时任务间隔(秒)—— 例如 3600 为每小时更新一次
INTERVAL = 28800
def get_new_prices(source, last_timestamp=None):
"""
从数据库获取 source 数据源的最新价格
仅每天北京时间 08:00:00 的数据减 8 小时存入文件
"""
conn = pymysql.connect(**DB_CONFIG)
prices = {}
try:
with conn.cursor() as cursor:
if last_timestamp:
sql = """
SELECT timestamp, price
FROM btc_prices
WHERE source = %s AND timestamp > %s
ORDER BY timestamp
"""
cursor.execute(sql, (source, last_timestamp))
else:
sql = """
SELECT timestamp, price
FROM btc_prices
WHERE source = %s
ORDER BY timestamp
"""
cursor.execute(sql, (source,))
rows = cursor.fetchall()
for timestamp, price in rows:
ts_int = int(timestamp)
# 转换为北京时间
dt_beijing = datetime.utcfromtimestamp(ts_int) + timedelta(hours=8)
# 如果是每天 08:00:00 北京时间,则减 8 小时
if dt_beijing.hour == 8 and dt_beijing.minute == 0 and dt_beijing.second == 0:
ts_int -= 8 * 3600
prices[str(ts_int)] = float(price)
finally:
conn.close()
return prices
def load_existing_data():
"""加载历史价格数据"""
if not os.path.exists(DATA_FILE):
return {}, {}
try:
with open(DATA_FILE, "r", encoding="utf-8") as f:
ns = {}
exec(f.read(), ns)
return ns.get("prices_temp", {}), ns.get("prices", {})
except Exception:
return {}, {}
def save_prices(prices_temp, prices):
"""保存价格数据到文件"""
with open(DATA_FILE, "w", encoding="utf-8") as f:
f.write("# 自动生成的BTC历史价格数据文件\n")
f.write(f"# 更新时间: {datetime.now()}\n\n")
f.write("prices_temp = ")
f.write(json.dumps(prices_temp, indent=4, ensure_ascii=False))
f.write("\n\nprices = ")
f.write(json.dumps(prices, indent=4, ensure_ascii=False))
f.write("\n")
def get_last_timestamp(price_dict):
"""获取当前字典中最大的时间戳"""
if not price_dict:
return None
return max(int(ts) for ts in price_dict.keys())
def update_once():
"""执行一次更新流程"""
prices_temp, prices = load_existing_data()
last_nasdaq_ts = get_last_timestamp(prices_temp)
last_crypto_ts = get_last_timestamp(prices)
nasdaq_new = get_new_prices("Nasdaq", last_nasdaq_ts)
crypto_new = get_new_prices("CryptoCompare", last_crypto_ts)
prices_temp.update(nasdaq_new)
prices.update(crypto_new)
save_prices(prices_temp, prices)
def main():
"""主循环任务"""
while True:
try:
update_once()
except Exception:
pass
time.sleep(INTERVAL)
if __name__ == "__main__":
main()

1838
lyq/btc_utxos_lyq2.py Normal file

File diff suppressed because it is too large Load Diff

1838
lyq/btc_utxos_lyq3.py Normal file

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,51 @@
import time
import subprocess
from datetime import datetime, timedelta
def check_running_process():
"""检查是否已经有相同的进程在运行"""
command = "ps -ef | grep 'python3 btc_utxos_lyq2.py' | grep -v grep"
process = subprocess.Popen(command, shell=True, stdout=subprocess.PIPE)
output, _ = process.communicate()
return bool(output) # 如果找到输出,表示有相同的进程在运行
def run_script_for_date(target_date):
"""运行指定日期的脚本"""
command = f"python3 btc_utxos_lyq2.py {target_date}"
result=subprocess.run(command, shell=True)
if result.returncode != 0:
raise RuntimeError(f"Script failed for date {target_date}")
def main():
# 从7月10日开始
start_date = datetime(2024, 12, 16)
end_date = datetime.utcnow() # 今天的日期
current_date = start_date
a=datetime(2024,12,18)
if current_date == a:
current_date += timedelta(days=1)
else:
while current_date <= end_date:
target_date_str = current_date.strftime('%Y-%m-%d')
# 检查是否已经有相同的进程在运行
if check_running_process():
print(f"已经有相同的进程在运行,等待完成再运行 {target_date_str} 的任务。")
time.sleep(60) # 等待60分钟后再检查
continue
# 运行脚本
print(f"开始运行 {target_date_str} 的任务。")
try:
run_script_for_date(target_date_str)
print(f"{target_date_str} 的任务运行完成。")
# 处理下一天的数据
current_date += timedelta(days=1)
except RuntimeError as e:
print(f"Error occurred: {e}. Retrying {target_date_str}.")
time.sleep(60)
if __name__ == "__main__":
main()

184
lyq/check_order_lyq.py Normal file
View File

@@ -0,0 +1,184 @@
# coding=utf-8
import ujson
from binance.websocket.spot.websocket_client import SpotWebsocketClient as WebsocketClient
from binance.spot import Spot
import time
import requests
import datetime
import pymysql
import math
import pymongo
g_spot_client = Spot()
class Pair:
def __init__(self):
pass
depth_u = 0
depth_U = 0
depth_ts = 0
bids = {}
asks = {}
g_btcusdt = None
g_btcusdt = None
def init_db():
mc = pymongo.MongoClient("mongodb://127.0.0.1:27020/")
mdb = mc["border2"]
return mc, mdb
def get_depth(client, pair):
new_pair = Pair()
d = client.depth(pair, limit=5000)
new_pair.bids = d["bids"]
new_pair.asks = d["asks"]
new_pair.depth_u = d["lastUpdateId"]
print(pair, ": get_depth: init", new_pair.depth_u)
#print(new_pair.bids)
return new_pair
def dict2number(dict_in):
dict_out = {}
#print("dict2number", dict_in)
for id in dict_in:
#print("dict2number", id)
#price = (int(float(id[0])) / 100) * 100
#price = float(id[0])
quantity = float(id[1])
#pricestr = str(price)
dict_out[id[0]] = quantity
return dict_out
def dict2save(mdb, pair, dict_in, ts):
mdbc = mdb[pair]
s_append = {}
s_append["unixdt"] = int(ts / 1000)
#cnt = 0
for id in dict_in:
# print(cnt, id)
#if cnt >= 50:
#break
# bids_append[id] = top_bids[id]
s_append[id[0]] = id[1]
#cnt += 1
print("dict2save", s_append)
mdbc.insert_one(s_append)
def classify_order(dict_in):
dict_out = {}
for id in dict_in:
price = int(int(float(id))/100)*100
pricestr = str(price)
if pricestr in dict_out:
dict_out[pricestr] = dict_out[pricestr]+dict_in[id]
else:
dict_out[pricestr] = dict_in[id]
return dict_out
def stat_order(pair, bids_in, asks_in, ts, old_ts):
print(pair, ": stat_order cmp", ts, old_ts)
if ts - old_ts < 1000 * 60 * 5:
return False
bids = dict2number(bids_in)
asks = dict2number(asks_in)
bids_classify = classify_order(bids)
asks_classify = classify_order(asks)
print("bids_classify", bids_classify)
top_bids = sorted(bids_classify.items(), key=lambda x: x[1], reverse=False)
top_asks = sorted(asks_classify.items(), key=lambda x: x[1], reverse=False)
print("top_bids", top_bids)
mc, mdb = init_db()
dict2save(mdb, pair + "_bids", top_bids, ts)
dict2save(mdb, pair + "_asks", top_asks, ts)
print(pair, ": stat_order OK at", ts)
return True
def merge_order(dst, src):
new_dst = []
for dst_item in dst:
found = False
for src_item in src:
#print("dst", dst_item, "src", src_item)
if dst_item[0] == src_item[0]:
new_dst.append(src_item)
found = True
break
if found is False:
#print("merge_order dst copy", dst_item)
new_dst.append(dst_item)
return new_dst
def handler_order(pair, pair_name, msg_in, client):
ts = msg_in["E"]
dU = msg_in["U"]
du = msg_in["u"]
need_reinit = False
if pair is not None:
if (dU == pair.depth_u + 1) or (
(du > pair.depth_u) and (pair.depth_ts == 0) and (pair.depth_u != 0)):
bids = msg_in["b"]
asks = msg_in["a"]
#print("merge_order dst", pair.bids)
#print("merge_order src", bids)
#print("handle", pair_name, ts, dU, du, pair.depth_u)
pair.bids = merge_order(pair.bids, bids)
pair.asks = merge_order(pair.asks, asks)
pair.depth_U = dU
pair.depth_u = du
if stat_order(pair_name, pair.bids, pair.asks, ts, pair.depth_ts):
pair.depth_ts = ts
print(pair_name, ": append", du)
else:
if (dU != pair.depth_u + 1) and (pair.depth_u != 0):
need_reinit = True
else:
pass
if need_reinit:
pair = get_depth(client, pair_name)
print(pair_name, ": reinit", pair.depth_u, dU, pair.depth_ts)
return pair
def order_handler(message):
#print(message)
global g_spot_client
global g_btcusdt
global g_ethusdt
if message["stream"] == "btcusdt@depth":
ddata = message["data"]
if ddata["e"] == "depthUpdate":
g_btcusdt = handler_order(g_btcusdt, "BTCUSDT", ddata, g_spot_client)
elif message["stream"] == "ethusdt@depth":
ddata = message["data"]
if ddata["e"] == "depthUpdate":
g_ethusdt = handler_order(g_ethusdt, "ETHUSDT", ddata, g_spot_client)
else:
pass
def check_order():
global g_spot_client
global g_btcusdt
global g_ethusdt
ws_client = WebsocketClient()
ws_client.start()
ws_client.instant_subscribe(
stream=['btcusdt@depth', 'ethusdt@depth'],
callback=order_handler,
)
g_btcusdt = get_depth(g_spot_client, "BTCUSDT")
g_ethusdt = get_depth(g_spot_client, "ETHUSDT")
check_order()

146
lyq/check_zone_lyq.py Normal file
View File

@@ -0,0 +1,146 @@
# coding=utf-8
import ujson
#from binance.websocket.spot.websocket_client import SpotWebsocketClient as WebsocketClient
from binance.spot import Spot
import time
import requests
import datetime
import pymysql
import math
#import pymongo
g_btcusdt_prices = {}
g_ethusdt_prices = {}
class ZoneDbIf:
def __init__(self, host="172.17.0.1", port=4423, user="root", password="2GS@bPYcgiMyL14A", dbname="btcdb"):
self.conn = pymysql.connect(host=host, port=port, user=user, password=password, database=dbname, cursorclass=pymysql.cursors.DictCursor)
print("init zone db suceess!")
def save_zone_change(self, dayutc, change_us, change_asia, change_eu):
with self.conn.cursor() as cursor:
print(
dayutc, change_us, change_asia, change_eu)
sql_insert = "REPLACE INTO btczonechange3 (unixdt, change_us, change_asia, change_eu"
sql_insert = sql_insert + ") VALUES (FROM_UNIXTIME(%s), %s, %s, %s)"
cursor.execute(sql_insert, (
dayutc, change_us, change_asia, change_eu))
self.conn.commit()
class EthZoneDbIf:
def __init__(self, host="172.17.0.1", port=4423, user="root", password="2GS@bPYcgiMyL14A", dbname="ethdb"):
self.conn = pymysql.connect(host=host, port=port, user=user, password=password, database=dbname, cursorclass=pymysql.cursors.DictCursor)
print("init zone db suceess!")
def save_zone_change(self, dayutc, change_us, change_asia, change_eu):
with self.conn.cursor() as cursor:
print(
dayutc, change_us, change_asia, change_eu)
sql_insert = "REPLACE INTO ethzonechange3 (unixdt, change_us, change_asia, change_eu"
sql_insert = sql_insert + ") VALUES (FROM_UNIXTIME(%s), %s, %s, %s)"
cursor.execute(sql_insert, (
dayutc, change_us, change_asia, change_eu))
self.conn.commit()
def get_history_price(spot_client, pair_name):
result = spot_client.klines(pair_name, "1h", limit=1000)
prices_open = {}
prices_close = {}
for price in result:
prices_open[str(price[0])] = float(price[1])
prices_close[str(price[0])] = float(price[4])
open_out = sorted(prices_open.items(), reverse=True)
close_out = sorted(prices_close.items(), reverse=True)
return open_out, close_out, prices_open, prices_close
def get_last_price(spot_client, pair_name, cache_open, cache_close):
result = spot_client.klines(pair_name, "1h", limit=1)
for price in result:
cache_open[str(price[0])] = float(price[1])
cache_close[str(price[0])] = float(price[4])
open_out = sorted(cache_open.items(), reverse=True)
close_out = sorted(cache_close.items(), reverse=True)
return open_out, close_out, cache_open, cache_close
def calc_zone(prices_open, price_close, zone_start, zone_end):
zone_total = 30*24
zone_hours = 0
zones = {}
price_start = 0
price_end = 0
dt_start = None
item_idx = 0
for dt in prices_open:
tobj = time.gmtime(int(dt[0]) / 1000)
if tobj.tm_hour == zone_start:
price_start = dt[1]
dt_start = tobj
if zone_hours == 0 and tobj.tm_hour < zone_end:
zone_total = zone_total + tobj.tm_hour + 1
close_list = price_close[item_idx]
price_end = close_list[1]
else:
if tobj.tm_hour == zone_end:
close_list = price_close[item_idx]
price_end = close_list[1]
if price_start > 0 and price_end > 0:
#zones[dt_end] = (price_end-price_start)/price_start
daystr = time.strftime("%d %b %Y", dt_start)
dayutc = int(time.mktime(time.strptime(daystr, "%d %b %Y")))
zones[str(dayutc)] = price_end - price_start
price_start = 0
price_end = 0
item_idx = item_idx + 1
zone_hours = zone_hours + 1
if zone_hours >= zone_total:
break
return zones
def check_zone():
dbif = ZoneDbIf()
ethdbif = EthZoneDbIf()
spot_client = Spot()
prices_open, prices_close, cache_open, cache_close = get_history_price(spot_client, "BTCUSDT")
prices_open_eth, prices_close_eth, cache_open_eth, cache_close_eth = get_history_price(spot_client, "ETHUSDT")
prev_tm = time.gmtime(time.time())
print("update", prev_tm.tm_hour)
while True:
zone_asia = calc_zone(prices_open, prices_close, 0, 12)
zone_eu = calc_zone(prices_open, prices_close, 6, 18)
zone_us = calc_zone(prices_open, prices_close, 12, 0)
zone_asia_eth = calc_zone(prices_open_eth, prices_close_eth, 0, 12)
zone_eu_eth = calc_zone(prices_open_eth, prices_close_eth, 6, 18)
zone_us_eth = calc_zone(prices_open_eth, prices_close_eth, 12, 0)
#print(zone_asia)
#print(zone_eu)
#print(zone_us)
for dt in zone_asia:
change_us = 0
change_eu = 0
if dt in zone_us:
change_us = zone_us[dt]
if dt in zone_eu:
change_eu = zone_eu[dt]
dbif.save_zone_change(dt, change_us, zone_asia[dt], change_eu)
change_us_eth = 0
change_eu_eth = 0
if dt in zone_us_eth:
change_us_eth = zone_us_eth[dt]
if dt in zone_eu_eth:
change_eu_eth = zone_eu_eth[dt]
ethdbif.save_zone_change(dt, change_us_eth, zone_asia_eth[dt], change_eu_eth)
while True:
time.sleep(60)
cur_tm = time.gmtime(time.time())
if cur_tm.tm_hour != prev_tm.tm_hour:
prev_tm = cur_tm
time.sleep(60)
prices_open, prices_close, cache_open, cache_close = get_last_price(spot_client, "BTCUSDT", cache_open, cache_close)
prices_open_eth, prices_close_eth, cache_open_eth, cache_close_eth = get_last_price(spot_client, "ETHUSDT", cache_open_eth,
cache_close_eth)
print("update", cur_tm.tm_hour)
break
check_zone()

562
lyq/db_if_qt.py Normal file
View File

@@ -0,0 +1,562 @@
# coding=utf-8
import datetime
import pymysql
from loguru import logger
import time
class DbIf:
def __init__(self, host="172.17.0.1", port=4419, user="root", password="IeQcJNnagkaFP1Or", dbname="btcdb"):
self.conn = pymysql.connect(host=host, port=port, user=user, password=password, database=dbname,cursorclass=pymysql.cursors.DictCursor)
def update_to_dailyindsv2(self, dt_utc, height_begin, height_end, lth_volume, frm, cvdd, realized_price, transferred_price, balanced_price, nvt_ratio, velocity):
with self.conn.cursor() as cursor:
print(dt_utc, height_begin, height_end, lth_volume, frm, cvdd, realized_price, transferred_price, balanced_price, nvt_ratio, velocity)
sql_insert = "REPLACE INTO dailyindsv3e2 (unixdt, height_begin, height_end, lth_volume, frm, cvdd, realized_price, transferred_price, balanced_price, nvt_ratio, velocity"
sql_insert = sql_insert + ") VALUES (FROM_UNIXTIME(%s), %s, %s, %s, %s, %s, %s, %s, %s, %s, %s)"
cursor.execute(sql_insert, (
dt_utc, height_begin, height_end, lth_volume, frm, cvdd, realized_price, transferred_price, balanced_price, nvt_ratio, velocity))
'''
def update_to_realtimeindsv2(self, dt_utc, mempool_volume, mempool_fees):
with self.conn.cursor() as cursor:
sql_insert = "REPLACE INTO realtimeindsv2b (unixdt, mempool_volume, mempool_fees)"
cursor.execute(sql_insert, (dt_utc, mempool_volume, mempool_fees))
'''
def update_to_dailyinds(self, dt_utc, height_begin, height_end, profitrate, fees, txs, new_address, total_address, new_address_volume, active_address,
send_address, receive_address, volume, eavolume, sopr, asopr, easopr, lthsopr, sthsopr,
asol, eaasol, dormancy, adormancy, eadormancy, cdd, sacdd, eacdd, day1, day7, day30, day60,
day90, day180, day365, day730, csupply, mintusd, sumcsupply, sumcdd, sumeacdd,
liveliness, ealiveliness, rprofit, rloss, rplrate, price, marketcap, rcap, earcap, mvrv,
nupl,vdd):
with self.conn.cursor() as cursor:
sql_insert = "REPLACE INTO dailyindsv3e1 (unixdt, height_begin, height_end, profitrate, fees, txs, new_address, total_address, new_address_volume, active_address, send_address, receive_address, volume, eavolume, sopr, asopr, easopr, lthsopr, sthsopr,"
sql_insert = sql_insert + " asol, eaasol, dormancy, adormancy, eadormancy, cdd, sacdd, eacdd, day1, day7, day30, day60, day90, day180, day365, day730, csupply, mintusd, sumcsupply, sumcdd, sumeacdd, liveliness, "
sql_insert = sql_insert + " ealiveliness, rprofit, rloss, rplrate, price, marketcap, rcap, earcap, mvrv, nupl,vdd"
sql_insert = sql_insert + ") VALUES (FROM_UNIXTIME(%s), %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s)"
# print(sql_insert)
cursor.execute(sql_insert, (
dt_utc, height_begin, height_end, profitrate, fees, txs, new_address, total_address, new_address_volume, active_address, send_address,
receive_address, volume, eavolume, sopr, asopr, easopr, lthsopr, sthsopr, asol, eaasol, dormancy,
adormancy, eadormancy, cdd, sacdd, eacdd, day1, day7, day30, day60, day90, day180, day365, day730,
csupply, mintusd, sumcsupply, sumcdd, sumeacdd, liveliness, ealiveliness, rprofit, rloss, rplrate,
price, marketcap, rcap, earcap, mvrv,
nupl,vdd))
self.conn.commit()
'''
def update_to_dailyinds(self, dt_utc, height_begin, height_end, profitrate, fees, txs, new_address, new_address_volume, active_address,
send_address, receive_address, volume, eavolume, sopr, asopr, easopr, lthsopr, sthsopr,
asol, eaasol, dormancy, adormancy, eadormancy, cdd, sacdd, eacdd, day1, day7, day30, day60,
day90, day180, day365, day730, csupply, mintusd, sumcsupply, sumcdd, sumeacdd,
liveliness, ealiveliness, rprofit, rloss, rplrate, price, marketcap, rcap, earcap, mvrv,
lthmarketcap, lthrcap, sthmarketcap, sthrcap, lthmvrv, sthmvrv, nupl):
with self.conn.cursor() as cursor:
sql_insert = "REPLACE INTO dailyindsv1 (unixdt, height_begin, height_end, profitrate, fees, txs, new_address, new_address_volume, active_address, send_address, receive_address, volume, eavolume, sopr, asopr, easopr, lthsopr, sthsopr,"
sql_insert = sql_insert + " asol, eaasol, dormancy, adormancy, eadormancy, cdd, sacdd, eacdd, day1, day7, day30, day60, day90, day180, day365, day730, csupply, mintusd, sumcsupply, sumcdd, sumeacdd, liveliness, "
sql_insert = sql_insert + " ealiveliness, rprofit, rloss, rplrate, price, marketcap, rcap, earcap, mvrv, lthmarketcap, lthrcap, sthmarketcap, sthrcap, lthmvrv, sthmvrv, nupl"
sql_insert = sql_insert + ") VALUES (FROM_UNIXTIME(%s), %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s)"
# print(sql_insert)
cursor.execute(sql_insert, (
dt_utc, height_begin, height_end, profitrate, fees, txs, new_address, new_address_volume, active_address, send_address,
receive_address, volume, eavolume, sopr, asopr, easopr, lthsopr, sthsopr, asol, eaasol, dormancy,
adormancy, eadormancy, cdd, sacdd, eacdd, day1, day7, day30, day60, day90, day180, day365, day730,
csupply, mintusd, sumcsupply, sumcdd, sumeacdd, liveliness, ealiveliness, rprofit, rloss, rplrate,
price, marketcap, rcap, earcap, mvrv, lthmarketcap, lthrcap, sthmarketcap, sthrcap, lthmvrv, sthmvrv,
nupl))
self.conn.commit()
'''
'''
# daily daily on-chain volume
def query_from_dailyvolume(self, start_id=0, end_id=0, start_time="", end_time="", limit=0):
with self.conn.cursor() as cursor:
sql_query = "SELECT * from `dailyvolume`"
if start_id > 0:
sql_query = sql_query + " WHERE id > " + str(start_id)
if end_id > 0:
sql_query = sql_query + " AND id < " + str(end_id)
else:
if end_id > 0:
sql_query = sql_query + " WHERE id < " + str(end_id)
if len(start_time) > 0:
if len(end_time) > 0:
sql_query = sql_query + " WHERE UNIX_TIMESTAMP(unixdt) BETWEEN \
UNIX_TIMESTAMP(\'" + start_time + "\') AND UNIX_TIMESTAMP(\'" + end_time + "\')"
else:
sql_query = sql_query + " WHERE UNIX_TIMESTAMP(unixdt) BETWEEN \
UNIX_TIMESTAMP(\'" + start_time + "\') AND UNIX_TIMESTAMP(NOW())"
else:
if len(end_time) > 0:
sql_query = sql_query + " WHERE UNIX_TIMESTAMP(unixdt) BETWEEN \
UNIX_TIMESTAMP(NOW()) AND UNIX_TIMESTAMP(\'" + end_time + "\')"
sql_query = sql_query + " order by `unixdt` desc"
if limit > 0:
sql_query = sql_query + " LIMIT " + str(limit)
print(sql_query)
cursor.execute(sql_query)
return cursor.fetchall()
'''
# newaddrs
'''
def update_to_newaddr(self, dayutc, last_profit_rate, last_fees, last_txs, last_eatxs, last_newaddr_cnt,
last_newaddr_vol, last_active_addr_cnt, last_tx_addr_cnt, last_rx_addr_cnt, last_vol_change,
last_vol):
with self.conn.cursor() as cursor:
sql_query = "SELECT COUNT(id) FROM `newaddrs` WHERE unixdt=FROM_UNIXTIME(%s)"
cursor.execute(sql_query, {dayutc, })
result = cursor.fetchone()
# print(dt_utc)
# print(result)
if result is not None:
if "COUNT(id)" in result:
if result["COUNT(id)"] > 0:
print("update")
sql_update = 'UPDATE newaddrs SET `total`=%s, `amount`=%s, `active`=%s, `tx`=%s, `rx`=%s, `volume_change`=%s, `volume=%s`,`txs`=%s, `eatxs`=%s, `fees`=%s, `last_profit_rate`=%s WHERE unixdt=FROM_UNIXTIME(%s)'
cursor.execute(sql_update, (
last_newaddr_cnt, last_newaddr_vol, last_active_addr_cnt, last_tx_addr_cnt,
last_rx_addr_cnt,
last_vol_change, last_vol, last_txs, last_eatxs, last_fees, last_profit_rate, dayutc))
else:
print("insert")
sql_insert = "INSERT INTO `newaddrs` (`unixdt`, `total`, `amount`, `active`, `tx`, `rx`, `volume_change`, `volume`, `txs`, `eatxs`, `fees`, `last_profit_rate`) VALUES (FROM_UNIXTIME(%s), %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s)"
# print(sql_insert)
cursor.execute(sql_insert, (
dayutc, last_newaddr_cnt, last_newaddr_vol, last_active_addr_cnt, last_tx_addr_cnt,
last_rx_addr_cnt, last_vol_change, last_vol, last_txs, last_eatxs, last_fees,
last_profit_rate))
self.conn.commit()
'''
'''
def update_to_sellprofit(self, dayutc, current_price, block_buy_volume, block_sell_volume, block_sell_profit, last_height):
with self.conn.cursor() as cursor:
sql_insert = "INSERT INTO `dailybuysell` (`unixdt`, `price`, `buyvolume`, `sellvolume`, `sellprofit`, `height`) VALUES (FROM_UNIXTIME(%s), %s, %s, %s, %s, %s)"
#print(sql_insert)
#print(datetime, txid, vout, voutalias, amount, height)
cursor.execute(sql_insert, (dayutc, current_price, block_buy_volume, block_sell_volume, block_sell_profit, last_height))
self.conn.commit()
'''
'''
def update_to_bigsellprofit(self, dayutc, current_price, tx_sell_average, tx_sell_amount, tx_sell_profit,
days_earliest, days_latest, days_largest, days_current, tx_buy_address, txid,
block_height):
with self.conn.cursor() as cursor:
sql_insert = "INSERT INTO `bigsell` (`unixdt`, `buyprice`, `sellprice`, `amount`, `profit`, `days_earliest`, `days_latest`, `days_largest`, `days_current`, `address`, `txid`, `height`) VALUES (FROM_UNIXTIME(%s), %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s)"
# print(sql_insert)
# print(datetime, txid, vout, voutalias, amount, height)
cursor.execute(sql_insert, (
dayutc, current_price, tx_sell_average, tx_sell_amount, tx_sell_profit, days_earliest, days_latest,
days_largest, days_current, tx_buy_address, txid, block_height))
self.conn.commit()
'''
'''
def update_to_dailycdd(self, dt_utc, cdd):
with self.conn.cursor() as cursor:
sql_insert = "REPLACE INTO `dailycdd` (`unixdt`, `cdd`) VALUES (FROM_UNIXTIME(%s), %s)"
# print(sql_insert)
cursor.execute(sql_insert, (dt_utc, cdd))
self.conn.commit()
'''
'''
def update_to_dailycdddays(self, dt_utc, dormancy, adormancy, eadormancy, cdd, acdd, eacdd, day1, day7, day30,
day60, day90, day180, day365, day730):
with self.conn.cursor() as cursor:
sql_insert = "REPLACE INTO `dailycdddays` (`unixdt`, dormancy, adormancy, eadormancy, cdd, acdd, eacdd, `day1`, day7, day30, day60, day90, day180, day365, day730) VALUES (FROM_UNIXTIME(%s), %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s)"
# print(sql_insert)
cursor.execute(sql_insert, (
dt_utc, dormancy, adormancy, eadormancy, cdd, acdd, eacdd, day1, day7, day30, day60, day90, day180,
day365,
day730))
self.conn.commit()
'''
'''
def update_to_dailysopr(self, dt_utc, last_sopr, last_asopr, last_easopr, last_lth_sopr, last_sth_sopr):
with self.conn.cursor() as cursor:
sql_insert = "REPLACE INTO `dailysopr` (`unixdt`, `sopr`, asopr, easopr, lth_sopr, sth_sopr) VALUES (FROM_UNIXTIME(%s), %s, %s, %s, %s, %s, %s)"
# print(sql_insert)
cursor.execute(sql_insert, (dt_utc, last_sopr, last_asopr, last_easopr, last_lth_sopr, last_sth_sopr))
self.conn.commit()
'''
'''
def update_to_inds(self, dt_utc, csupply, mintusd, sumcsupply, sumcdd, sumeacdd, liveliness, ealiveliness, rprofit,
rloss, rplrate, price, marketcap, rcap, earcap, mvrv):
with self.conn.cursor() as cursor:
sql_insert = "REPLACE INTO `inds` (`unixdt`, csupply, mintusd, sumcsupply, sumcdd, sumeacdd, liveliness, ealiveliness, rprofit, rloss, rplrate, price, marketcap, rcap, earcap, mvrv) VALUES (FROM_UNIXTIME(%s), %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s)"
# print(sql_insert)
cursor.execute(sql_insert, (
dt_utc, csupply, mintusd, sumcsupply, sumcdd, sumeacdd, liveliness, ealiveliness, rprofit, rloss,
rplrate,
price, marketcap, rcap, earcap, mvrv))
self.conn.commit()
'''
# daily volume
'''
def update_to_dailyvolume(self, dt_utc, volume):
with self.conn.cursor() as cursor:
sql_insert = "REPLACE INTO `dailyvolume` (`unixdt`, `volume`) VALUES (FROM_UNIXTIME(%s), %s)"
# print(sql_insert)
cursor.execute(sql_insert, (dt_utc, volume))
self.conn.commit()
'''
'''with self.conn.cursor() as cursor:
sql_query = "SELECT COUNT(id) FROM `dailyvolume` WHERE unixdt=FROM_UNIXTIME(%s)"
cursor.execute(sql_query, {dt_utc,})
result = cursor.fetchone()
#print(dt_utc)
#print(result)
if result is not None:
if "COUNT(id)" in result:
if result["COUNT(id)"] > 0:
print("update")
sql_update = 'UPDATE dailyvolume SET `volume`=%s WHERE unixdt=FROM_UNIXTIME(%s)'
cursor.execute(sql_update, (volume, dt_utc))
else:
print("insert")
sql_insert = "INSERT INTO `dailyvolume` (`unixdt`, `volume`) VALUES (FROM_UNIXTIME(%s), %s)"
# print(sql_insert)
cursor.execute(sql_insert, (dt_utc, volume))
self.conn.commit()'''
'''
def update_to_dailyfees(self, dt_utc, fees):
with self.conn.cursor() as cursor:
sql_insert = "REPLACE INTO `dailyfees` (`unixdt`, `fees`) VALUES (FROM_UNIXTIME(%s), %s)"
# print(sql_insert)
cursor.execute(sql_insert, (dt_utc, fees))
self.conn.commit()
'''
'''
def import_to_dailyvolume2(self, dt_utc, volume):
with self.conn.cursor() as cursor:
sql_insert = "INSERT INTO `dailyvolume` (`unixdt`, `volume`) VALUES (FROM_UNIXTIME(%s), %s)"
# print(sql_insert)
cursor.execute(sql_insert, (dt_utc, volume))
self.conn.commit()
def delete_dailyvolume_data(self, config):
with self.conn.cursor() as cursor:
sql_query = "DELETE FROM `dailyvolume`"
cursor.execute(sql_query)
self.conn.commit()
# daily market cap
def query_from_marketcap(self, start_id=0, end_id=0, start_time="", end_time="", limit=0):
with self.conn.cursor() as cursor:
sql_query = "SELECT * from `dailyprice`"
if start_id > 0:
sql_query = sql_query + " WHERE id > " + str(start_id)
if end_id > 0:
sql_query = sql_query + " AND id < " + str(end_id)
else:
if end_id > 0:
sql_query = sql_query + " WHERE id < " + str(end_id)
if len(start_time) > 0:
if len(end_time) > 0:
sql_query = sql_query + " WHERE UNIX_TIMESTAMP(unixdt) BETWEEN \
UNIX_TIMESTAMP(\'" + start_time + "\') AND UNIX_TIMESTAMP(\'" + end_time + "\')"
else:
sql_query = sql_query + " WHERE UNIX_TIMESTAMP(unixdt) BETWEEN \
UNIX_TIMESTAMP(\'" + start_time + "\') AND UNIX_TIMESTAMP(NOW())"
else:
if len(end_time) > 0:
sql_query = sql_query + " WHERE UNIX_TIMESTAMP(unixdt) BETWEEN \
UNIX_TIMESTAMP(NOW()) AND UNIX_TIMESTAMP(\'" + end_time + "\')"
sql_query = sql_query + " order by `unixdt` desc"
if limit > 0:
sql_query = sql_query + " LIMIT " + str(limit)
print(sql_query)
cursor.execute(sql_query)
return cursor.fetchall()
#daily price
def import_to_dailyprice(self, dt_utc, price, volume, marketcap, csupply):
with self.conn.cursor() as cursor:
sql_insert = "INSERT INTO `dailyprice` (`unixdt`, `price`, `volume`, `marketcap`, `csupply`) VALUES (FROM_UNIXTIME(%s), %s, %s, %s, %s)"
#print(sql_insert)
cursor.execute(sql_insert, (dt_utc, price, volume, marketcap, csupply))
self.conn.commit()
def update_to_dailyprice(self, dt_utc, price, volume, change):
with self.conn.cursor() as cursor:
sql_insert = "INSERT INTO `dailyprice` (`unixdt`, `price`, `volume`, `change`) VALUES (FROM_UNIXTIME(%s), %s, %s, %s)"
#print(sql_insert)
cursor.execute(sql_insert, (dt_utc, price, volume, change))
self.conn.commit()
def update_to_dailyprice2(self, dt_utc, price, volume, change, marketcap, csupply):
with self.conn.cursor() as cursor:
sql_query = "SELECT COUNT(id) FROM `dailyprice` WHERE unixdt=FROM_UNIXTIME(%s)"
cursor.execute(sql_query, {dt_utc,})
result = cursor.fetchone()
#print(dt_utc)
#print(result)
if result is not None:
if "COUNT(id)" in result:
if result["COUNT(id)"] > 0:
print("update")
sql_update = 'UPDATE dailyprice SET `price`=%s, `marketcap`=%s, `csupply`=%s, `volume`=%s, `change`=%s WHERE unixdt=FROM_UNIXTIME(%s)'
cursor.execute(sql_update, (price, marketcap, csupply, volume, change, dt_utc))
else:
print("insert")
sql_insert = "INSERT INTO `dailyprice` (`unixdt`, `price`, `volume`, `change`, `marketcap`, `csupply`) VALUES (FROM_UNIXTIME(%s), %s, %s, %s, %s, %s)"
# print(sql_insert)
cursor.execute(sql_insert, (dt_utc, price, volume, change, marketcap, csupply))
self.conn.commit()
def update_dailypricechange(self):
with self.conn.cursor() as cursor:
sql_query = "SELECT unixdt,price FROM `dailyprice` order by unixdt"
cursor.execute(sql_query)
results = cursor.fetchall()
prevprice = -1
for result in results:
if prevprice < 0:
prevprice = result["price"]
else:
#print(result["unixdt"], result["price"], result["marketcap"])
try:
change = (result["price"]/prevprice - 1)*100
except:
change = 0
#print(csupply)
datestr = result["unixdt"]
logger.debug(datestr.__format__('%Y-%m-%d') + " " + str(change))
sql_update = 'UPDATE dailyprice SET `change`=%s WHERE unixdt=%s'
cursor.execute(sql_update, (str(change), result["unixdt"]))
prevprice = result["price"]
self.conn.commit()
def delete_dailyprice_data(self, config):
with self.conn.cursor() as cursor:
sql_query = "DELETE FROM `dailyprice`"
cursor.execute(sql_query)
self.conn.commit()
def delete_failed_blockvolume(self, height):
with self.conn.cursor() as cursor:
sql_insert = "DELETE FROM `bigamountvout` WHERE height=%s"
cursor.execute(sql_insert, (height,))
sql_insert = "DELETE FROM `bigamounttx` WHERE height=%s"
cursor.execute(sql_insert, (height,))
sql_insert = "DELETE FROM `blockamount` WHERE height=%s"
cursor.execute(sql_insert, (height,))
self.conn.commit()
#block check --- big amount for vout
def query_from_bigamountvout(self, start_id=0, end_id=0, start_time="", end_time="", address="", limit=0):
with self.conn.cursor() as cursor:
sql_query = "SELECT * from `bigamountvout`"
if start_id > 0:
sql_query = sql_query + " WHERE id > " + str(start_id)
if end_id > 0:
sql_query = sql_query + " AND id < " + str(end_id)
else:
if end_id > 0:
sql_query = sql_query + " WHERE id < " + str(end_id)
if len(start_time) > 0:
if len(end_time) > 0:
sql_query = sql_query + " WHERE UNIX_TIMESTAMP(unixdt) BETWEEN \
UNIX_TIMESTAMP(\'" + start_time + "\') AND UNIX_TIMESTAMP(\'" + end_time + "\')"
else:
sql_query = sql_query + " WHERE UNIX_TIMESTAMP(unixdt) BETWEEN \
UNIX_TIMESTAMP(\'" + start_time + "\') AND UNIX_TIMESTAMP(NOW())"
else:
if len(end_time) > 0:
sql_query = sql_query + " WHERE UNIX_TIMESTAMP(unixdt) BETWEEN \
UNIX_TIMESTAMP(NOW()) AND UNIX_TIMESTAMP(\'" + end_time + "\')"
sql_query = sql_query + " order by `unixdt` desc"
if limit > 0:
sql_query = sql_query + " LIMIT " + str(limit)
print(sql_query)
cursor.execute(sql_query)
return cursor.fetchall()
'''
def update_to_bigamountvout(self, datetime, txid, vout, voutn, vouttype, amount, height, days, buyin, sellout,
profit):
with self.conn.cursor() as cursor:
sql_insert = "INSERT INTO `bigamountvoutv3e` (`unixdt`, `vout`, `voutn`, `vouttype`, `amount`, `height`, `txid`, days, buyprice, sellprice, profit) VALUES (FROM_UNIXTIME(%s), %s, %s, %s, %s, %s, %s, %s, %s, %s, %s)"
# print(sql_insert)
# print(datetime, txid, vout, voutalias, amount, height)
cursor.execute(sql_insert,
(datetime, vout, voutn, vouttype, amount, height, txid, days, buyin, sellout, profit))
self.conn.commit()
'''
# block check --- big amount tx
def query_from_bigamounttx(self, start_id=0, end_id=0, start_time="", end_time="", address="", limit=0):
with self.conn.cursor() as cursor:
sql_query = "SELECT * from `bigamounttx`"
if start_id > 0:
sql_query = sql_query + " WHERE id > " + str(start_id)
if end_id > 0:
sql_query = sql_query + " AND id < " + str(end_id)
else:
if end_id > 0:
sql_query = sql_query + " WHERE id < " + str(end_id)
if len(start_time) > 0:
if len(end_time) > 0:
sql_query = sql_query + " WHERE UNIX_TIMESTAMP(unixdt) BETWEEN \
UNIX_TIMESTAMP(\'" + start_time + "\') AND UNIX_TIMESTAMP(\'" + end_time + "\')"
else:
sql_query = sql_query + " WHERE UNIX_TIMESTAMP(unixdt) BETWEEN \
UNIX_TIMESTAMP(\'" + start_time + "\') AND UNIX_TIMESTAMP(NOW())"
else:
if len(end_time) > 0:
sql_query = sql_query + " WHERE UNIX_TIMESTAMP(unixdt) BETWEEN \
UNIX_TIMESTAMP(NOW()) AND UNIX_TIMESTAMP(\'" + end_time + "\')"
sql_query = sql_query + " order by `unixdt` desc"
if limit > 0:
sql_query = sql_query + " LIMIT " + str(limit)
print(sql_query)
cursor.execute(sql_query)
return cursor.fetchall()
def update_to_bigamounttx(self, datetime, txid, amount, height):
with self.conn.cursor() as cursor:
sql_insert = "INSERT INTO `bigamounttx` (`unixdt`, `amount`, `height`, `txid`) VALUES (FROM_UNIXTIME(%s), %s, %s, %s)"
#print(sql_insert)
#print(datetime, txid, amount, height)
cursor.execute(sql_insert, (datetime, amount, height, txid))
self.conn.commit()
# block check --- per block amount
def query_from_blockamount(self, start_id=0, end_id=0, start_time="", end_time="", limit=0, amount=0):
with self.conn.cursor() as cursor:
sql_query = "SELECT * from `blockamount`"
if start_id > 0:
sql_query = sql_query + " WHERE id > " + str(start_id)
if end_id > 0:
sql_query = sql_query + " AND id < " + str(end_id)
if amount > 0:
sql_query = sql_query + " AND amount > " + str(amount)
else:
if end_id > 0:
sql_query = sql_query + " WHERE id < " + str(end_id)
if amount > 0:
sql_query = sql_query + " AND amount > " + str(amount)
else:
if amount > 0:
sql_query = sql_query + "WHERE amount > " + str(amount)
if len(start_time) > 0:
if len(end_time) > 0:
sql_query = sql_query + " WHERE UNIX_TIMESTAMP(unixdt) BETWEEN \
UNIX_TIMESTAMP(\'" + start_time + "\') AND UNIX_TIMESTAMP(\'" + end_time + "\')"
else:
sql_query = sql_query + " WHERE UNIX_TIMESTAMP(unixdt) BETWEEN \
UNIX_TIMESTAMP(\'" + start_time + "\') AND UNIX_TIMESTAMP(NOW())"
if amount > 0:
sql_query = sql_query + " AND amount > " + str(amount)
else:
if len(end_time) > 0:
sql_query = sql_query + " WHERE UNIX_TIMESTAMP(unixdt) BETWEEN \
UNIX_TIMESTAMP(NOW()) AND UNIX_TIMESTAMP(\'" + end_time + "\')"
if amount > 0:
sql_query = sql_query + " AND amount > " + str(amount)
sql_query = sql_query + " order by `unixdt` desc"
if limit > 0:
sql_query = sql_query + " LIMIT " + str(limit)
cursor.execute(sql_query)
return cursor.fetchall()
def update_to_blockamount(self, datetime, blockid, amount, height):
with self.conn.cursor() as cursor:
sql_insert = "INSERT INTO `blockamount` (`unixdt`, `amount`, `height`, `blockid`) VALUES (FROM_UNIXTIME(%s), %s, %s, %s)"
#print(sql_insert)
#print(datetime, blockid, amount, height)
cursor.execute(sql_insert, (datetime, amount, height, blockid))
self.conn.commit()
def delete_node_data(self, config):
with self.conn.cursor() as cursor:
sql_query = "DELETE FROM `blockamount`"
cursor.execute(sql_query)
sql_query = "DELETE FROM `bigamountvout`"
cursor.execute(sql_query)
sql_query = "DELETE FROM `bigamounttx`"
cursor.execute(sql_query)
self.conn.commit()
def update_realize_cap(self, dayutc, last_rv):
with self.conn.cursor() as cursor:
sql_insert = "INSERT INTO `dailyrcap` (`unixdt`, `rcap`) VALUES (FROM_UNIXTIME(%s), %s)"
#print(sql_insert)
#print(datetime, blockid, amount, height)
cursor.execute(sql_insert, (dayutc, last_rv))
self.conn.commit()
# daily realize cap
def query_from_realizecap(self, start_id=0, end_id=0, start_time="", end_time="", limit=0):
with self.conn.cursor() as cursor:
sql_query = "SELECT * from `dailyrcap`"
if start_id > 0:
sql_query = sql_query + " WHERE id > " + str(start_id)
if end_id > 0:
sql_query = sql_query + " AND id < " + str(end_id)
else:
if end_id > 0:
sql_query = sql_query + " WHERE id < " + str(end_id)
if len(start_time) > 0:
if len(end_time) > 0:
sql_query = sql_query + " WHERE UNIX_TIMESTAMP(unixdt) BETWEEN \
UNIX_TIMESTAMP(\'" + start_time + "\') AND UNIX_TIMESTAMP(\'" + end_time + "\')"
else:
sql_query = sql_query + " WHERE UNIX_TIMESTAMP(unixdt) BETWEEN \
UNIX_TIMESTAMP(\'" + start_time + "\') AND UNIX_TIMESTAMP(NOW())"
else:
if len(end_time) > 0:
sql_query = sql_query + " WHERE UNIX_TIMESTAMP(unixdt) BETWEEN \
UNIX_TIMESTAMP(NOW()) AND UNIX_TIMESTAMP(\'" + end_time + "\')"
sql_query = sql_query + " order by `unixdt` desc"
if limit > 0:
sql_query = sql_query + " LIMIT " + str(limit)
print(sql_query)
cursor.execute(sql_query)
return cursor.fetchall()
def update_daily_addr(self, dayutc, last_add_cnt):
with self.conn.cursor() as cursor:
sql_insert = "INSERT INTO `dailyaddradd` (`unixdt`, `addcnt`) VALUES (FROM_UNIXTIME(%s), %s)"
#print(sql_insert)
#print(datetime, blockid, amount, height)
cursor.execute(sql_insert, (dayutc, last_add_cnt))
self.conn.commit()
def delete_daily_addr(self, config):
with self.conn.cursor() as cursor:
sql_query = "DELETE FROM `dailyaddradd`"
cursor.execute(sql_query)
self.conn.commit()
def delete_daily_rv(self, config):
with self.conn.cursor() as cursor:
sql_query = "DELETE FROM `dailyrcap`"
cursor.execute(sql_query)
self.conn.commit()
'''
def __del__(self):
self.conn.close()

117
lyq/exchangeRate_lyq.py Normal file
View File

@@ -0,0 +1,117 @@
import requests
import pymysql
import time
from datetime import datetime
# 目标币种列表RUB 仍写在这里,方便统一逻辑)
symbols = ["EUR", "GBP", "JPY", "CAD", "SEK", "CHF", "CNY", "RUB"]
# 数据库配置
db_config = {
"host": "127.0.0.1",
"user": "root",
"password": "2GS@bPYcgiMyL14A",
"database": "Macroeconomics",
"port": 4423
}
def fetch_rates_frankfurter():
base = "USD"
url = f"https://api.frankfurter.app/latest?from={base}&to=" + ",".join([s for s in symbols if s != "RUB"])
retries = 5
while retries > 0:
try:
response = requests.get(url, timeout=10)
response.raise_for_status()
data = response.json()
rates = data.get("rates", {})
if not rates:
raise ValueError("接口返回空数据")
return rates
except Exception as e:
retries -= 1
print(f"Frankfurter 请求失败,重试中... 剩余次数: {retries}, 错误: {e}")
time.sleep(1)
print("Frankfurter 多次重试后失败,返回空数据")
return {}
def fetch_rub():
try:
url = "https://open.er-api.com/v6/latest/USD"
response = requests.get(url, timeout=10)
data = response.json()
if data.get("result") == "success":
rub_rate = data["rates"].get("RUB")
if rub_rate:
return rub_rate
except Exception as e:
print(f"获取 RUB 失败: {e}")
return None
def calc_dxy(rates):
weights = {
"EUR": 0.576,
"JPY": 0.136,
"GBP": 0.119,
"CAD": 0.091,
"SEK": 0.042,
"CHF": 0.036
}
weighted_sum = 0
weight_total = 0
for ccy, w in weights.items():
rate = rates.get(ccy)
if rate:
weighted_sum += rate * w
weight_total += w
if weight_total > 0:
return weighted_sum / weight_total
return None
def save_to_db(rates, dxy):
current_time = datetime.now().replace(second=0, microsecond=0)
data = {}
for ccy in symbols:
rate = rates.get(ccy)
if rate is not None:
data[f"USD{ccy}"] = round(rate, 5)
if dxy is not None:
data["DXY"] = round(dxy, 5)
connection = pymysql.connect(**db_config)
try:
with connection.cursor() as cursor:
for symbol, value in data.items():
query = """INSERT INTO exchangeRate (date, symbol, _value) VALUES (%s, %s, %s)"""
cursor.execute(query, (current_time, symbol, value))
connection.commit()
print(f"{current_time} 数据写入数据库成功")
except Exception as e:
print(f"写入数据库失败: {e}")
finally:
connection.close()
if __name__ == "__main__":
while True:
rates = fetch_rates_frankfurter()
# 获取 RUB 汇率
rub = fetch_rub()
if rub:
rates["RUB"] = rub
else:
print("未获取到 RUB 汇率")
if rates:
dxy = calc_dxy(rates)
print(f"汇率数据: {rates}")
if dxy:
print(f"美元指数近似值: {dxy:.5f}")
else:
print("美元指数近似值 无法计算")
save_to_db(rates, dxy)
else:
print("未获取到汇率数据")
time.sleep(1800) # 每30分钟执行一次

191
lyq/nochain_eth_lyq.py Normal file
View File

@@ -0,0 +1,191 @@
# coding=utf-8
import ujson
from binance.websocket.spot.websocket_client import SpotWebsocketClient as WebsocketClient
import time
import requests
#from loguru import logger
import datetime
import pymysql
import math
class NochainDbIf:
def __init__(self, host="172.17.0.1", port=4423, user="root", password="2GS@bPYcgiMyL14A", dbname="ethdb"):
self.conn = pymysql.connect(host=host, port=port, user=user, password=password, database=dbname,
cursorclass=pymysql.cursors.DictCursor)
print("init nochain db suceess!")
def save(self, day, price, ma350x2, ma111, ma350x1, ma350x1r6, ma350x3, ma350x5):
with self.conn.cursor() as cursor:
sql_insert = "REPLACE INTO `nochainv3a` (`unixdt`, `price`, `ma350x2`, `ma111`, ma350x1, ma350x1r6, ma350x3, ma350x5) VALUES (FROM_UNIXTIME(%s), %s, %s, %s, %s, %s, %s, %s)"
# print(sql_insert)
cursor.execute(sql_insert, (day, price, ma350x2, ma111, ma350x1, ma350x1r6, ma350x3, ma350x5))
self.conn.commit()
def prepare_maxxx(prices, day, madays):
total = 0
cnt = 0
for i in range(madays):
if day in prices:
total += prices[day]
cnt += 1
# print(day, total, cnt)
day = str(int(day) - 3600 * 24)
if cnt > 0:
return total / cnt
return 0
def prepare_ma350(prices, day):
return prepare_maxxx(prices, day, 350)
def prepare_ma111(prices, day):
return prepare_maxxx(prices, day, 111)
def prepare_gold_ratio(prices):
ma350x1 = {}
ma350x1r6 = {}
ma350x2 = {}
ma350x3 = {}
ma350x5 = {}
for day in prices:
ma350x1[day] = prepare_maxxx(prices, day, 350)
ma350x1r6[day] = ma350x1[day] * 1.6
ma350x2[day] = ma350x1[day] * 2
ma350x3[day] = ma350x1[day] * 3
ma350x5[day] = ma350x1[day] * 5
return ma350x1, ma350x1r6, ma350x2, ma350x3, ma350x5
def calc_pi_cycle_top(dbif, prices):
ma350x2 = {}
ma111 = {}
for day in prices:
ma350x2[day] = prepare_ma350(prices, day) * 2
ma111[day] = prepare_ma111(prices, day)
return ma350x2, ma111
def get_current_utc():
curtime = time.gmtime(time.time())
daystr = time.strftime("%d %b %Y", curtime)
dayutc = int(time.mktime(time.strptime(daystr, "%d %b %Y")))
return dayutc
def get_current_price():
url = "https://data.messari.io/api/v1/assets/eth/metrics/market-data&interval=1d"
header_set = {}
header_set["x-messari-api-key"] = "aH2pyj5i4QGo1k1gLxXEbIJ5RJr+FYKLEWk6cRT6RuSc6lRY"
response_price = requests.get(url, headers=header_set)
if response_price.status_code == 200:
priceweb = ujson.loads(response_price.content)
if "data" in priceweb:
priceset = priceweb["data"]
if "market_data" in priceset:
pricedata = priceset["market_data"]
if "price_usd" in pricedata:
price = pricedata["price_usd"]
return price
return None
def get_history_price(coin_id):
prices = {}
dayutc = get_current_utc()
price = get_current_price()
if price is not None:
prices[str(dayutc)] = price
print("start...", dayutc, price)
dayt = time.gmtime()
daystr = time.strftime("%Y", dayt)
year = int(daystr)
end_year = year
while True:
# if end_year < 2022:
# break
url = ""
if end_year != year:
start_year = end_year
url = "https://data.messari.io/api/v1/assets/" + coin_id + "/metrics/price/time-series?start="
else:
url = "https://data.messari.io/api/v1/assets/" + coin_id + "/metrics/price/time-series?after=" + str(
year) + "-01-01&order=descending&interval=1d"
# now_time = time.gmtime()
# daystr = time.strftime("%Y-%m-%d", now_time)
# url = url + daystr + "&order=desc&format=json"
if end_year != year:
url = url + str(start_year) + "-01-01&end=" + str(end_year) + "-12-31&interval=1d&order=descending&interval=1d"
header_set = {}
header_set["x-messari-api-key"] = "aH2pyj5i4QGo1k1gLxXEbIJ5RJr+FYKLEWk6cRT6RuSc6lRY"
# header_set["Content-Type"] = "application/json"
print(header_set, url)
response_supply = requests.get(url, headers=header_set)
# print(response_supply)
if response_supply.status_code == 200:
#print(response_supply.content)
supplyweb = ujson.loads(response_supply.content)
if "data" in supplyweb:
supplyset = supplyweb["data"]
if "values" in supplyset:
valueset = supplyset["values"]
if valueset is not None:
for supply in valueset:
dayutc = int(supply[0] / 1000)
s = supply[1]
prices[str(dayutc)] = float(s)
# print(s, dayutc, supplys[str(dayutc)])
# break
else:
break
else:
break
end_year -= 1
time.sleep(2)
return prices
def get_eth_history_price():
return get_history_price("ethereum")
def nochain():
global dbif
dbif = NochainDbIf()
print("prepare...")
prices = get_eth_history_price()
#print(prices)
ma350x2, ma111 = calc_pi_cycle_top(dbif, prices)
print("calc_pi_cycle_top ok.")
ma350x1, ma350x1r6, ma350x2, ma350x3, ma350x5 = prepare_gold_ratio(prices);
print("prepare_gold_ratio ok.")
for day in prices:
#print(day)
ma350x21 = 0
if day in ma350x2:
ma350x21 = ma350x2[day]
ma1111 = 0
if day in ma111:
ma1111 = ma111[day]
ma350x11 = 0
if day in ma350x1:
ma350x11 = ma350x1[day]
ma350x1r61 = 0
if day in ma350x1r6:
ma350x1r61 = ma350x1r6[day]
ma350x31 = 0
if day in ma350x3:
ma350x31 = ma350x3[day]
ma350x51 = 0
if day in ma350x5:
ma350x51 = ma350x5[day]
# print(day, prices[day], ma350x21, ma1111, supply, issue, s2f_ratio1, s2f_deflection1)
dbif.save(int(day), prices[day], ma350x21, ma1111, ma350x11,
ma350x1r61, ma350x31, ma350x51)
#print("save ok.")
nochain()

27
lyq/nochain_lyq_utc08.py Normal file
View File

@@ -0,0 +1,27 @@
import subprocess
from datetime import datetime, timedelta
import time
while True:
# 获取当前时间的UTC时间
now = datetime.utcnow()
# 计算到下一个08:00的时间间隔
next_run = datetime(now.year, now.month, now.day, 8, 0)
if now >= next_run:
next_run += timedelta(days=1)
sleep_time = (next_run - now).total_seconds()
# 休眠直到下一个08:00
time.sleep(sleep_time)
# 运行 nochain_lyq_v2.py
command1 = f"python3 nochain_lyq_v2.py"
subprocess.run(command1, shell=True)
# 运行 nochain_update_lyq.py
command2 = f"python3 nochain_update_lyq.py"
subprocess.run(command2, shell=True)
# 运行 nochain_eth_lyq.py
command3 = f"python3 nochain_eth_lyq.py"
subprocess.run(command3, shell=True)

736
lyq/nochain_lyq_v2.py Normal file
View File

@@ -0,0 +1,736 @@
# coding=utf-8
import ujson
from binance.websocket.spot.websocket_client import SpotWebsocketClient as WebsocketClient
import time
import requests
#from loguru import logger
from datetime import datetime
import pymysql
import math
from datetime import datetime
from stock_indicators import indicators
from stock_indicators.indicators.common.quote import Quote
class NochainDbIf:
def __init__(self, host="172.17.0.1", port=4423, user="root", password="2GS@bPYcgiMyL14A", dbname="btcdb"):
self.conn = pymysql.connect(host=host, port=port, user=user, password=password, database=dbname, cursorclass=pymysql.cursors.DictCursor)
print("init nochain db suceess!")
def save(self, day, price, ma350x2, ma111, supply, flow, s2fratio, s2fdeflection, ma350x1, ma350x1r6, ma350x3, ma350x5):
with self.conn.cursor() as cursor:
sql_insert = "REPLACE INTO `nochainv3c` (`unixdt`, `price`, `ma350x2`, `ma111`, `btcsupply`, `flow`, `s2fratio`, `s2fdeflection`, ma350x1, ma350x1r6, ma350x3, ma350x5) VALUES (FROM_UNIXTIME(%s), %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s)"
#print(sql_insert)
cursor.execute(sql_insert, (day, price, ma350x2, ma111, supply, flow, s2fratio, s2fdeflection, ma350x1, ma350x1r6, ma350x3, ma350x5))
self.conn.commit()
def save_ssr(self, day, price, marketcap, usdtsupply, usdcsupply, busdsupply, daisupply, stables_supply, ssr, ssrosc):
with self.conn.cursor() as cursor:
sql_insert = "REPLACE INTO `nochainv3b` (`unixdt`, `price`, marketcap, usdtsupply, usdcsupply, busdsupply, daisupply, stables_supply, ssr, ssrosc) VALUES (FROM_UNIXTIME(%s), %s, %s, %s, %s, %s, %s, %s, %s, %s)"
#print(sql_insert)
cursor.execute(sql_insert, (day, price, marketcap, usdtsupply, usdcsupply, busdsupply, daisupply, stables_supply, ssr, ssrosc))
self.conn.commit()
# 看起来您已经定义了一个名为 NochainDbIf Python 类,该类与 MySQL 数据库交互。此类具有将数据保存到两个不同表nochainv2c 和nochainv2b 的方法。save该方法用于保存与加密货币价格和指标相关的数据而save_ssr该方法用于保存与稳定币供应和SSR稳定币供应比率相关的数据。
# 下面是每种方法的作用的细分:
# __init__这是使用提供的参数主机、端口、用户、密码和数据库名称初始化数据库连接的构造函数方法。它使用pymysql库来建立连接。
# save此方法在nochainv2c表中插入或更新记录。它采用代表与加密货币价格和供应相关的各种指标的参数。
# save_ssr此方法在nochainv2b表中插入或更新记录。它采用代表与稳定币供应和 SSR 相关的指标的参数。
# 这两种方法都使用 SQL 语句REPLACE INTO该语句尝试在表中插入新行。如果存在主键或唯一键冲突的行则改为更新现有行。
# 此外,这些方法使用上下文管理器 with self.conn.cursor() as cursor 来确保数据库资源得到正确管理,并在执行 SQL 语句后提交事务 self.conn.commit() 以保留更改。
def get_history_price():
prices = {}
response_price = requests.get(
'https://data.nasdaq.com/api/v3/datatables/QDL/BCHAIN?code=MKPRU&api_key=FZqXog4sR-b7cYnXcRVV')
if response_price.status_code == 200:
#print(response_price.content)
priceweb = ujson.loads(response_price.content)
if "datatable" in priceweb:
priceset = priceweb["datatable"]
if "data" in priceset:
pricedata = priceset["data"]
for price in pricedata:
daystr = price[1]
p = price[2]
dayutc = time.mktime(time.strptime(daystr, "%Y-%m-%d"))
prices[str(int(dayutc))] = float(p)
#print(price, int(dayutc), prices[str(int(dayutc))])
return prices
# 此函数似乎可以从纳斯达克 API 检索历史加密货币价格。其工作原理如下get_history_price
# 它初始化一个空字典用于存储历史价格。prices
# 它使用该函数向纳斯达克 API 端点发送 GET 请求。requests.get()
# 如果响应状态代码为 200表示成功则继续分析响应数据。
# 它使用模块中的 JSON 响应加载到 Python 字典中。ujson.loads()ujson
# 它检查响应字典中是否存在该键,如果存在,则检索与之关联的密钥。"dataset""data"
# 它遍历每个价格数据点,提取日期和价格。
# 它将日期字符串转换为 Unix 时间戳,然后使用 解析 。time.mktime() time.strptime()
# 它将时间戳作为字符串键存储在字典中并将相应的价格作为值。prices
# 最后它返回包含历史价格的字典。prices
def get_history_price2(pricedict):
#pricedict = {}
dayt = time.gmtime()
daystr = time.strftime("%Y", dayt)
year = int(daystr)
end_year = year
while True:
url = ""
if end_year != year:
start_year = end_year
url = "https://data.messari.io/api/v1/assets/bitcoin/metrics/price/time-series?start="
else:
url = "https://data.messari.io/api/v1/assets/bitcoin/metrics/price/time-series?start=" + str(
year) + "-01-01&end="+str(year)+"-12-31&order=descending&interval=1d"
if end_year != year:
url = url + str(start_year) + "-01-01&end=" + str(end_year) + "-12-31&order=descending&interval=1d"
header_set = {}
header_set["x-messari-api-key"] = "aH2pyj5i4QGo1k1gLxXEbIJ5RJr+FYKLEWk6cRT6RuSc6lRY"
# header_set["Content-Type"] = "application/json"
print(header_set, url)
response_price = requests.get(url, headers=header_set)
# print(response_price)
if response_price.status_code == 200:
# print(response_price.content)
priceweb = ujson.loads(response_price.content)
if "data" in priceweb:
priceset = priceweb["data"]
if "values" in priceset:
valueset = priceset["values"]
if valueset is not None:
for supply in valueset:
dayutc = int(supply[0] / 1000)
s = supply[1]
ret_time = time.gmtime(dayutc)
ret_daystr = time.strftime("%d %b %Y", ret_time)
ret_dayutc = int(time.mktime(time.strptime(ret_daystr, "%d %b %Y")))
pricedict[str(ret_dayutc)] = float(s)
# print(s, dayutc, pricedict[str(dayutc)])
# break
else:
break
else:
break
end_year -= 1
time.sleep(2)
return pricedict
# 此函数似乎从 Messari API 获取历史加密货币价格并将其存储在字典中。让我们分解一下它是如何工作的get_history_price2
# 它需要一个字典pricedict作为参数大概是为了存储历史价格。
# 它初始化一些与时间相关的变量,包括当前年份 year 和结束年份end_year后者最初设置为当前年份。
# 它进入一个循环,无限期地持续下去,直到明确中断。
# 在循环中,它构造 URL用于根据start_year 和end_year 值从 Messari API 获取历史价格数据。
# 它设置一个包含用于身份验证的 Messari API 密钥的标头。
# 它使用构造的 URL 和标头向 Messari API 发送 GET 请求。requests.get()
# 如果响应状态代码为 200表示成功则会解析 JSON 响应。
# 它从响应中提取价格数据并对其进行迭代。
# 对于每个价格数据点,它将时间戳从毫秒转换为秒,将其转换为人类可读的日期格式,并将其作为 Unix 时间戳字符串键存储在字典pricedict中并以相应的价格作为值。
# 它会递减变量以获取上一年的数据,并在下一次迭代前增加 2 秒的延迟。end_year
# 循环一直持续到获取所有年份的历史数据或发生错误为止。
# 最后它打印包含获取的历史价格的字典pricedict并返回它。
def get_history_supply(supplys):
#supplys = {}
#issues = {}
response_supply = requests.get(
'https://data.nasdaq.com/api/v3/datatables/QDL/BCHAIN?code=TOTBC&api_key=FZqXog4sR-b7cYnXcRVV')
if response_supply.status_code == 200:
#print(response_supply.content)
supplyweb = ujson.loads(response_supply.content)
if "datatable" in supplyweb:
supplyset = supplyweb["datatable"]
# if "end_date" in supplyset:
# end_date = supplyset["end_date"]
if "data" in supplyset:
supplydata = supplyset["data"]
#previssue = 0
#prevday = 0
for supply in supplydata:
daystr = supply[1]
p = supply[2]
dayutc = time.mktime(time.strptime(daystr, "%Y-%m-%d"))
supplys[str(int(dayutc))] = float(p)
#if supply[0] == end_date:
#previssue = float(p)
#prevday = dayutc
#else:
#issues[str(int(prevday))] = previssue - supplys[str(int(dayutc))]
#previssue = float(p)
#prevday = dayutc
#print(supply, int(dayutc), supply[str(int(dayutc))])
#return supplys, issues
print("get_history_supply", supplys)
return supplys
# 此功能似乎从纳斯达克 API 检索与比特币总供应量相关的历史数据。以下是其工作原理的细分get_history_supply
# 它采用一个字典supplys作为参数大概是为了存储历史供应数据。
# 它使用该函数向纳斯达克 API 端点发送 GET 请求。requests.get()
# 如果响应状态代码为 200表示成功则继续分析响应数据。
# 它使用模块中的 JSON 响应加载到 Python 字典中。ujson.loads() ujson
# 它检查响应字典中是否存在该键,如果存在,则检索与之关联的密钥。"dataset""data"
# 它提取数据集的结束日期,大概是为了进行一些进一步的处理(似乎被注释掉了)。
# 它遍历数据集中的每个供应数据点。
# 对于每个数据点,它提取日期和比特币总供应量,在用 解析后将日期字符串转换为 Unix 时间戳并将时间戳作为字符串键存储在字典supplys中并以相应的供应值作为值。time.mktime() time.strptime()
# 最后它打印包含获取的历史供应数据的字典supplys并返回它。
# 似乎有一些注释掉的部分与计算连续天数之间的供应差异并将该信息存储在另一个名为issues 的字典中有关,但这些部分目前处于非活动状态。
def get_history_supply2():
supplys = {}
dayt = time.gmtime()
daystr = time.strftime("%Y", dayt)
year = int(daystr)
end_year = year
while True:
url = ""
if end_year != year:
start_year = end_year
url = "https://data.messari.io/api/v1/assets/bitcoin/metrics/sply-circ/time-series?start="
else:
url = "https://data.messari.io/api/v1/assets/bitcoin/metrics/sply-circ/time-series?start=" + str(
year) + "-01-01&end="+str(year)+"-12-31&order=descending&interval=1d"
if end_year != year:
url = url + str(start_year) + "-01-01&end=" + str(end_year) + "-12-31&order=descending&interval=1d"
header_set = {}
header_set["x-messari-api-key"] = "aH2pyj5i4QGo1k1gLxXEbIJ5RJr+FYKLEWk6cRT6RuSc6lRY"
# header_set["Content-Type"] = "application/json"
print(header_set, url)
response_csupply = requests.get(url, headers=header_set)
# print(response_price)
if response_csupply.status_code == 200:
# print(response_price.content)
csweb = ujson.loads(response_csupply.content)
if "data" in csweb:
csset = csweb["data"]
if "values" in csset:
valueset = csset["values"]
if valueset is not None:
for supply in valueset:
dayutc = int(supply[0] / 1000)
s = supply[1]
ret_time = time.gmtime(dayutc)
ret_daystr = time.strftime("%d %b %Y", ret_time)
ret_dayutc = int(time.mktime(time.strptime(ret_daystr, "%d %b %Y")))
supplys[str(ret_dayutc)] = float(s)
#print(s, dayutc, supplys[str(dayutc)])
#break
else:
break
else:
break
break
end_year -= 1
time.sleep(2)
return supplys
# 此函数似乎从 Messari API 获取历史比特币供应数据并将其存储在字典中。以下是其工作原理的细分get_history_supply2
# 它初始化一个空字典supplys用于存储历史供应数据。
# 它获取当前年份并将变量设置为当前年份。end_year
# 它进入一个循环,无限期地持续下去,直到明确中断。
# 在循环中,它构造 URL用于根据 start_year和 end_year值从 Messari API 获取历史供应数据。
# 它设置一个包含用于身份验证的 Messari API 密钥的标头。
# 它使用构造的 URL 和标头向 Messari API 发送 GET 请求。requests.get()
# 如果响应状态代码为 200表示成功则会解析 JSON 响应。
# 它从响应中提取供应数据并对其进行迭代。
# 对于每个供应数据点,它将时间戳从毫秒转换为秒,将其转换为人类可读的日期格式,并将其作为 Unix 时间戳字符串键存储在字典supplys中并以相应的供应值作为值。
# 它打印包含获取的历史供应数据的字典supplys并返回它。
# 但是在循环的第一次迭代之后有一个break语句该语句在仅获取数据一年后有效地终止了循环。这可能是无意的。
def calc_issues(supplys):
issues = {}
prevday = 0
previssue = 0
for supply_day in supplys:
if int(supply_day) > prevday:
prevday = int(supply_day)
previssue = supplys[supply_day]
print("calc_issues", prevday, previssue, supplys)
for dayutc in supplys:
issues[str(int(prevday))] = previssue - supplys[str(int(dayutc))]
previssue = float(supplys[dayutc])
prevday = dayutc
print(issues)
return issues
# 此函数似乎根据提供的历史供应数据计算连续几天之间的比特币供应量差异。让我们分解一下它是如何工作的calc_issues
# 它需要一个称为supplys输入的字典其中可能包含历史比特币供应数据。
# 它初始化一个空字典该字典issues用于存储计算出的供应差异。
# 它初始化变量并分别跟踪前一天的时间戳和供应量。prevday previssue
# 它会遍历字典supplys中每天的供应数据。
# 如果当天的时间戳大于前一天的时间戳则它会更新为当天的时间戳和供应量。prevday previssue
# 它计算前一天和当天之间的供应量差异并将其存储在字典issues中并将前一天的时间戳作为键。
# 它会更新并用于下一次迭代。previssue prevday
# 它打印出计算出的供应差异。
# 它返回包含计算出的供应差异的字典issues。
# 但是实现存在潜在问题。prevday和 previssue变量用于跟踪前一天的供应量但循环会覆盖每个新日期的供应量。因此字典issues将仅包含字典supplys中最后一天与所有其他日期之间的差异。
def prepare_year_issues(issues):
issues_year = {}
temp_issues_year = {}
for day in issues:
dayt = time.gmtime(int(day))
daystr = time.strftime("%Y", dayt)
if daystr in temp_issues_year:
temp_issues_year[daystr] = temp_issues_year[daystr] + issues[day]
else:
temp_issues_year[daystr] = issues[day]
#print(temp_issues_year)
for day in issues:
dayt = time.gmtime(int(day))
daystr = time.strftime("%Y", dayt)
if daystr in temp_issues_year:
issues_year[day] = temp_issues_year[daystr]
#print(issues_year)
return issues_year
# “prepare_year_issues”功能似乎将每日发行数据汇总为年度总数。其工作原理如下
# - 它使用一个名为“issues”的字典作为输入其中可能包含每日发行数据。
# - 它初始化了两个字典“issues_year”用于存储汇总的年度发行数据“temp_issues_year”用于临时存储每年的发行总和。
# - 它每天都在“issues”字典中迭代。
# - 对于每一天,它都会从当天的 Unix 时间戳中提取年份。
# - 它检查年份是否已经存在于“temp_issues_year”中。
# - 如果是,则将该日的发行量添加到相应年份的总数中。
# - 如果不是它会以“temp_issues_year”的形式初始化该日期的年份。
# - 然后它每天再次在“issues”字典中迭代。
# - 对于每一天它都会提取年份并检查年份是否以“temp_issues_year”形式存在。
# - 如果是则在“issues_year”字典中为该日期分配相应的年度总发行量。
# 最后它返回包含年度汇总发行数据的“issues_year”字典。
# 此功能有效地将每日发行数据汇总为年度总数,从而更容易进行分析和可视化。
def prepare_maxxx(prices, day, madays):
total = 0
cnt = 0
for i in range(madays):
if day in prices:
total += prices[day]
cnt += 1
# print(day, total, cnt)
day = str(int(day) - 3600 * 24)
if cnt > 0:
return total / cnt
return 0
# “prepare_maxxx”函数似乎计算了特定日期“天”结束的指定天数“madays”内加密货币价格的移动平均线。以下是其工作原理的细分
# - 它需要三个参数:
# - 'prices':包含以 Unix 时间戳为键的加密货币价格的字典。
# - 'day':一个 Unix 时间戳,表示计算移动平均线的周期的结束日期。
# - 'madays':一个整数,表示计算移动平均线的天数。
# - 它初始化“总计”和“cnt”变量以分别计算移动平均线中包含的价格总和和价格计数。
# - 它进入一个循环迭代“madays”时间表示计算移动平均线的天数。
# - 在每次迭代中:
# - 它检查当前“日”是否存在于“价格”字典中。
# - 如果是这样它会将当天的价格添加到“总计”中并增加“cnt”。
# - 然后,它将“一天”减少 24 小时3600 秒)以移动到前一天。
# - 循环后它会检查计算中是否包含任何价格如果“cnt”大于 0
# - 如果是这样,它将返回指定时间段内的平均价格(“总计/cnt”
# - 如果不是(即,如果在指定时间段内没有找到价格),则返回 0。
# 此函数计算在特定日期结束的指定天数内加密货币价格的简单移动平均线。
def prepare_ma350(prices, day):
return prepare_maxxx(prices, day, 350)
# 用于计算加密货币价格的 350 天移动平均线。它通过调用“prepare_maxxx”函数并将“madays”参数设置为 350 来实现这一点。以下是其工作原理的摘要:
# - 它需要两个参数:
# - 'prices':包含以 Unix 时间戳为键的加密货币价格的字典。
# - 'day':一个 Unix 时间戳,表示计算移动平均线的周期的结束日期。
# - 它只是返回调用“prepare_maxxxprices day 350”的结果。
# 此函数抽象了计算移动平均线的细节,并为获取加密货币价格的 350 天移动平均线提供了更简洁的界面。
def prepare_ma111(prices, day):
return prepare_maxxx(prices, day, 111)
# “prepare_ma111”函数类似于“prepare_ma350”函数但计算加密货币价格的 111 天移动平均线。其工作原理如下:
# - 它需要两个参数:
# - 'prices':包含以 Unix 时间戳为键的加密货币价格的字典。
# - 'day':一个 Unix 时间戳,表示计算移动平均线的周期的结束日期。
# - 它返回调用“prepare_maxxxprices day 111”的结果。
# 此函数提供了一种方便的方法来计算加密货币价格的 111 天移动平均线,而无需每次明确指定天数。
def prepare_ma200(ssr, day):
return prepare_maxxx(ssr, day, 200)
# “prepare_ma200”函数类似于之前的移动平均线函数但它计算的是 200 天移动平均线。让我们分解一下它的功能:
# -**参数**
# - 'ssr':包含与稳定币供应比率 SSR 相关的数据的字典,以 Unix 时间戳为键。
# - 'day':一个 Unix 时间戳,表示计算移动平均线的周期的结束日期。
# -**功能性**
# - 它调用“prepare_maxxx”函数并将“madays”参数设置为 200传递“ssr”字典和指定的“day”。
# - 此函数抽象了计算 200 天移动平均线的细节,并为获取它提供了更简洁的界面。
# 此函数用于计算与稳定币供应比率 SSR 相关的数据的 200 天移动平均线
def prepare_gold_ratio(prices):
ma350x1 = {}
ma350x1r6 = {}
ma350x2 = {}
ma350x3 = {}
ma350x5 = {}
for day in prices:
ma350x1[day] = prepare_maxxx(prices, day, 350)
ma350x1r6[day] = ma350x1[day]*1.6
ma350x2[day] = ma350x1[day] * 2
ma350x3[day] = ma350x1[day] * 3
ma350x5[day] = ma350x1[day] * 5
return ma350x1, ma350x1r6, ma350x2, ma350x3, ma350x5
# “prepare_gold_ratio”函数似乎根据一组给定的价格计算各种移动平均线。其工作原理如下
# -**参数**
# - 'prices':包含以 Unix 时间戳为键的加密货币价格的字典。
# -**功能性**
# - 它初始化字典以存储不同的移动平均线:'ma350x1'、'ma350x1r6'、'ma350x2'、'ma350x3' 和 'ma350x5'。
# - 它在“价格”字典中每天都会迭代。
# - 对于每一天它使用“prepare_maxxx”函数计算 350 天移动平均线并将其存储在“ma350x1”中。
# - 它将“ma350x1r6”计算为“ma350x1”值的 1.6 倍。
# - 它将“ma350x2”、“ma350x3”和“ma350x5”分别计算为“ma350x1”值的 2、3 和 5 倍。
# - 在所有天数遍历后,它返回计算出的移动平均线:'ma350x1'、'ma350x1r6'、'ma350x2'、'ma350x3' 和 'ma350x5'。
# 此功能旨在根据加密货币价格的 350 天移动平均线准备不同的比率,可能用于比较或分析目的
def cal_ssr_osc(ssr):
ssr_osc = {}
for day in ssr:
ssr_ma = prepare_ma200(ssr, day)
ssr_osc[day] = ssr[day]/ssr_ma
return ssr_osc
# “cal_ssr_osc”功能根据提供的 SSR 数据计算稳定币供应比率 SSR 的振荡器。其工作原理如下:
# -**参数**
# - 'ssr':包含与稳定币供应比率 SSR 相关的数据的字典,以 Unix 时间戳为键。
# -**功能性**
# - 它初始化一个名为“ssr_osc”的字典来存储计算出的 SSR 振荡器值。
# - 它在“ssr”字典中每天迭代。
# - 对于每一天它使用“prepare_ma200”函数计算 SSR 的 200 天移动平均线。
# - 它将当天的 SSR 值除以 200 天移动平均线,以计算 SSR 振荡器值。
# - 它将 SSR 振荡器值存储在“ssr_osc”字典中并以相应的日期作为键。
# - 在所有天数遍历后,它会返回包含计算出的 SSR 振荡器值的“ssr_osc”字典。
# 此函数计算 SSR 与其 200 天移动平均线的比率,代表 SSR 振荡器,可用于分析与稳定币供应动态相关的趋势或信号
def calc_pi_cycle_top(dbif, prices):
ma350x2 = {}
ma111 = {}
for day in prices:
ma350x2[day] = prepare_ma350(prices, day)*2
ma111[day] = prepare_ma111(prices, day)
return ma350x2, ma111
# “calc_pi_cycle_top”函数根据提供的价格计算两条移动平均线“ma350x2”和“ma111”。以下是其功能的细分
# -**参数**
# - 'dbif':此参数似乎未在函数中使用,因此可能用于将来使用。
# - 'prices':包含以 Unix 时间戳为键的加密货币价格的字典。
# -**功能性**
# - 它初始化字典“ma350x2”和“ma111”以存储计算出的移动平均线。
# - 它在“价格”字典中每天都会迭代。
# - 对于每一天,它计算:
# - 'ma350x2'使用“prepare_ma350”功能是 350 天移动平均线价格的两倍。
# - 'ma111'使用“prepare_ma111”函数的 111 天移动平均价格。
# - 在所有日期迭代后它返回计算出的移动平均线“ma350x2”和“ma111”。
# 此功能似乎是通过分析特定移动平均线来识别加密货币价格周期中潜在峰值的过程的一部分
def calc_s2f_ratio(dbif, prices, supplys, issues_year):
s2f_ratio = {}
s2f_deflection = {}
cnt = 1
for day in supplys:
if day in issues_year:
s2f = 0
if int(day) >= 1672502400: #2023
s2f = supplys[day]/(900*365)
else:
s2f = supplys[day]/issues_year[day]
print(s2f,day,supplys[day],issues_year[day])
s2f_ratio[day] = 0.09*(math.pow(s2f, 3.3))
#print(supplys[day], issues_year[day], s2f, s2f_ratio[day])
s2f_deflection[day] = prices[day]/s2f_ratio[day]
#print(day, prices[day], s2f, s2f_ratio[day], s2f_deflection[day])
#cnt+=1
#if cnt > 10:
#break
return s2f_ratio, s2f_deflection
# “calc_s2f_ratio”功能根据提供的数据计算与比特币的库存流量 S2F 模型相关的两个指标。让我们分解一下它的功能:
# -**参数**
# - 'dbif':此参数似乎未在函数中使用,因此可能用于将来使用。
# - 'prices':包含以 Unix 时间戳为键的加密货币价格的字典。
# - 'supplys':包含与加密货币供应相关的数据的字典,以 Unix 时间戳为键。
# - “issues_year”包含年度汇总发行数据的字典。
# -**功能性**
# - 它初始化两个字典“s2f_ratio”和“s2f_deflection”分别存储计算出的 S2F 比率和 S2F 偏转指标。
# - 它初始化一个值为 1 的计数器 'cnt'(似乎未使用)。
# - 它在“供应”字典中每天迭代。
# - 对于每一天它都会检查该日期是否存在于“issues_year”字典中确保该日有相应的发行数据可用。
# - 它根据可用的供应和发行数据计算库存流量 S2F 比率。如果该日期在 2023 年 1 月 1 日之后(以 Unix 时间戳1672502400表示则使用固定发行率每天 900 BTC计算 S2F 比率。否则,它使用供应量除以年度发行量来计算 S2F 比率。
# - 它使用涉及幂和乘法的公式计算 S2F 比率值。
# - 它通过将当天的价格除以当天的 S2F 比率来计算 S2F 偏转。
# - 它将计算出的 S2F 比率和 S2F 挠度值存储在各自的字典中,并以相应的日期为键。
# - 在遍历所有天数后它会返回包含计算指标的“s2f_ratio”和“s2f_deflection”字典。
# 这个功能似乎是分析比特币的库存流量模型的关键部分,提供了对其与价格动态和潜在偏差的关系的见解。
def get_stable_coin_supply(coin_id):
supplys = {}
dayt = time.gmtime()
daystr = time.strftime("%Y", dayt)
year = int(daystr)
end_year = year
#split_2023 = False
while True:
#if end_year < 2022:
#break
url = ""
#if split_2023 and end_year != year:
if end_year != year:
start_year = end_year
url = "https://data.messari.io/api/v1/assets/"+coin_id+"/metrics/sply-circ/time-series?start="
else:
#if split_2023:
# url = "https://data.messari.io/api/v1/assets/" + coin_id + "/metrics/sply-circ/time-series?after=" + str(
# year) + "-06-01&order=descending"
#else:
# url = "https://data.messari.io/api/v1/assets/" + coin_id + "/metrics/sply-circ/time-series?start=2023-01-01&end=2023-06-01&order=descending"
url = "https://data.messari.io/api/v1/assets/" + coin_id + "/metrics/sply-circ/time-series?start=" + str(
year) + "-01-01&end="+str(year)+"-12-31&order=descending&interval=1d"
#now_time = time.gmtime()
#daystr = time.strftime("%Y-%m-%d", now_time)
#url = url + daystr + "&order=desc&format=json"
if end_year != year:
url = url + str(start_year) + "-01-01&end="+ str(end_year) + "-12-31&order=descending&interval=1d"
header_set = {}
header_set["x-messari-api-key"] = "aH2pyj5i4QGo1k1gLxXEbIJ5RJr+FYKLEWk6cRT6RuSc6lRY"
#header_set["Content-Type"] = "application/json"
print(header_set, url)
response_supply = requests.get(url, headers=header_set)
#print(response_supply)
if response_supply.status_code == 200:
#print(response_supply.content)
supplyweb = ujson.loads(response_supply.content)
if "data" in supplyweb:
supplyset = supplyweb["data"]
if "values" in supplyset:
valueset = supplyset["values"]
if valueset is not None:
for supply in valueset:
dayutc = int(supply[0]/1000)
s = supply[1]
supplys[str(dayutc)] = float(s)
#print(s, dayutc, supplys[str(dayutc)])
#break
else:
break
else:
break
#if split_2023:
end_year -= 1
#else:
# split_2023 = True
time.sleep(2)
return supplys
# “get_stable_coin_supply”功能根据给定稳定币的代币 ID 检索其历史供应数据。让我们来看看它是如何工作的:
# -**论点**
# - 'coin_id':要获取历史供应数据的稳定币的标识符。
# -**功能性**
# - 它初始化一个名为“supplys”的空字典来存储检索到的供应数据。
# - 它检索当前年份并将“end_year”设置为当前年份。
# - 它进入一个循环,一直持续到数据检索完成。
# - 在循环中,它构造了 URL用于根据 'coin_id' 和 'end_year' 从 Messari API 获取历史供应数据。
# - 它使用“requests.get”向 Messari API 发送 GET 请求,其中包含包含 Messari API 密钥的构造 URL 和标头。
# - 如果响应状态码为 200表示成功则解析 JSON 响应。
# - 它从响应中提取供应数据并对其进行迭代。
# - 对于每个供应数据点,它将时间戳从毫秒转换为秒,并将其作为 Unix 时间戳字符串键存储在“供应”字典中,并以相应的供应值作为值。
# - 循环继续,直到检索到所有可用的历史供应数据。
# - 它将“end_year”递减 1以便在下一次迭代中获取上一年的数据。
# - 在API请求之间等待2秒避免服务器过载。
# - 检索到所有历史供应数据后,它将返回包含数据的“供应”字典。
# 此功能可以检索给定稳定币的历史供应数据,便于分析和监控其随时间推移的供应动态
def get_usdt_supply():
return get_stable_coin_supply("tether")
# “get_usdt_supply
# 它直接调用“get_stable_coin_supply“tether”
# 它返回从 'get_stable_coin_supply“tether” 获得的结果
# 该功能提供了一种方便的方式来获取 USDT 供应数据,而无需每次都指定代币 ID
def get_usdc_supply():
return get_stable_coin_supply("usd-coin")
# “get_usdc_sup但专门设计用于检索 USD Coin USDC 的历史供应数据。其工作原理如下:
# 它直接调用“get_stable_coin_supply("usd-coin")
# 它返回从“get_stable_coin_supply“binance-usd")中获取的结果
# 此功能提供了一种方便的方式来获取 USDC 供应数据,而无需每次都指定硬币 ID
def get_busd_supply():
return get_stable_coin_supply("binance-usd")
# “get_busd_suppget_stable_coin_supply函数这次专门用于检索 Binance USD BUSD 的历史供应数据。其工作原理如下:
# 它直接调用“get_stable_coin_supply("binance-usd"),该参数是 Binance USD BUSD 的硬币 ID
# 它返回从 'get_stable_coin_supply“binance-usd")中获取的结果
# 此函数提供了一种方便的方式来获取 BUSD 供应数据,而无需在每个时间指定硬币 ID
def get_dai_supply():
return get_stable_coin_supply("dai")
# 该函数是该函数的另一个包装器,专门用于检索 Dai DAI 的历史供应数据。以下是它的运作方式get_dai_supply get_stable_coin_supply
# 它直接调用get_stable_coin_supply"dai"函数,传递参数,该参数对应于 Dai DAI 的硬币 ID。
# 此函数返回从get_stable_coin_supply("dai")中获取的结果。
# 此功能简化了获取 DAI 供应数据的过程,无需每次都指定硬币 ID
def cal_ssr(dbif, prices, supplys):
print("calc_ssr")
usdts = get_usdt_supply()
#print(usdts)
#return
usdcs = get_usdc_supply()
#print(usdcs)
busds = get_busd_supply()
dais = get_dai_supply()
print("calc_ssr start")
stables = {}
ssr = {}
marketcap = {}
for day in usdts:
stables[day] = usdts[day]
if day in usdcs:
stables[day] += usdcs[day]
if day in busds:
stables[day] += busds[day]
if day in dais:
stables[day] += dais[day]
#print(day, stables[day], usdts[day], usdcs[day], busds[day], dais[day])
#print(prices[day])
if day in prices:
#print(day, prices)
if day in supplys:
#print(day, supplys)
marketcap[day] = prices[day] * supplys[day]
if stables[day] > 0:
ssr[day] = marketcap[day]/stables[day]
else:
ssr[day] = 0
#break
print(ssr)
ssrosc = {}
quotes_list = []
for day in ssr:
#print(day)
dayt = time.gmtime(int(day))
#dayt = datetime.date.fromtimestamp(int(day))
daystr = time.strftime("%Y-%m-%d", dayt)
dtobj = datetime.strptime(daystr, "%Y-%m-%d")
print(dtobj)
quotes_list.append(Quote(dtobj, 0, 0, 0, ssr[day], 0))
print(quotes_list)
ssr_osc = indicators.get_bollinger_bands(quotes_list, 200, 2)
for item in ssr_osc:
if item.z_score is not None:
ssrosc[str(int(item.date.timestamp()))] = item.z_score
#ssrosc = cal_ssr_osc(ssr)
print(ssrosc)
for day in ssr:
price = 0
if day in prices:
price = prices[day]
marketcap1 = 0
if day in marketcap:
marketcap1 = marketcap[day]
usdt = 0
if day in usdts:
usdt = usdts[day]
usdc = 0
if day in usdcs:
usdc = usdcs[day]
busd = 0
if day in busds:
busd = busds[day]
dai = 0
if day in dais:
dai = dais[day]
stable = 0
if day in stables:
stable = stables[day]
ssr1 = 0
if day in ssr:
ssr1 = ssr[day]
ssrosc1 = 0
if day in ssrosc:
ssrosc1 = ssrosc[day]
dbif.save_ssr(day, price, marketcap1, usdt, usdc, busd, dai,
stable, ssr1, ssrosc1)
# 似乎“cal_ssr”功能旨在计算稳定币供应比率 SSR 及其相应的振荡器 SSR Oscillator 并将其保存到数据库中。以下是该函数的运行方式:
# -**参数**
# - 'dbif':数据库接口类的一个实例,尽管它在当前实现中似乎未使用。
# - 'prices':包含以 Unix 时间戳为键的加密货币价格的字典。
# - 'supplys':包含以 Unix 时间戳为键的稳定币历史供应数据的字典。
# -**功能性**
# - 它分别使用专用函数“get_usdt_supply”、“get_usdc_supply”、“get_busd_supply”和“get_dai_supply”获取不同稳定币的历史供应数据包括 USDT、USDC、BUSD 和 DAI。
# - 它初始化字典以存储稳定币供应量、SSR、市值和 SSR 振荡器数据。
# - 它遍历所提供数据中的可用日期。
# - 对于每个日期,它通过将 USDT、USDC、BUSD 和 DAI 的供应量相加来计算稳定币的总供应量。
# - 它根据加密货币的价格及其相应的供应量计算市值。
# - 它通过将市值除以稳定币总供应量来计算 SSR。
# - 它构造一个包含每个日期的 SSR 值的“Quote”对象列表。
# - 它使用周期为 200 天且标准差为 2 的布林带计算 SSR 振荡器。
# - 它使用数据库接口类的“save_ssr”方法将 SSR 和 SSR 振荡器值保存到数据库中。
# 总体而言,该功能似乎提供了对稳定币供应比率及其相关指标的全面分析,有助于进一步研究和监控稳定币动态
def nochain():
global dbif
dbif = NochainDbIf()
temp_prices = get_history_price()
prices = get_history_price2(temp_prices)
#supplys, issues = get_history_supply()
temp_supplys = get_history_supply2()
supplys = get_history_supply(temp_supplys)
issues = calc_issues(supplys)
'''
cnt = 0;
for day in supplys:
print(supplys[day])
if day in issues:
print(issues[day])
cnt+=1
if cnt > 5:
break
'''
issues_year = prepare_year_issues(issues)
#print(issues_year)
ma350x2, ma111 = calc_pi_cycle_top(dbif, prices)
#print(ma350x2)
#print(ma111)
s2f_ratio, s2f_deflection = calc_s2f_ratio(dbif, prices, supplys, issues_year)
#print(s2f_ratio)
#print(s2f_deflection)
ma350x1, ma350x1r6, ma350x2, ma350x3, ma350x5 = prepare_gold_ratio(prices)
for day in prices:
#print(day)
ma350x21 = 0
if day in ma350x2:
ma350x21 = ma350x2[day]
ma1111 = 0
if day in ma111:
ma1111 = ma111[day]
supply = 0
if day in supplys:
supply = supplys[day]
issue = 0
if day in issues:
issue = issues[day]
s2f_ratio1 = 0
if day in s2f_ratio:
s2f_ratio1 = s2f_ratio[day]
s2f_deflection1 = 0
if day in s2f_deflection:
s2f_deflection1 = s2f_deflection[day]
ma350x11 = 0
if day in ma350x1:
ma350x11 = ma350x1[day]
ma350x1r61 = 0
if day in ma350x1r6:
ma350x1r61 = ma350x1r6[day]
ma350x31 = 0
if day in ma350x3:
ma350x31 = ma350x3[day]
ma350x51 = 0
if day in ma350x5:
ma350x51 = ma350x5[day]
#print(day, prices[day], ma350x21, ma1111, supply, issue, s2f_ratio1, s2f_deflection1)
dbif.save(int(day), prices[day], ma350x21, ma1111, supply, issue, s2f_ratio1, s2f_deflection1, ma350x11, ma350x1r61, ma350x31, ma350x51)
cal_ssr(dbif, prices, supplys)
# “nochain”功能负责协调NoChain项目的数据收集、处理和存储。以下是它的运作方式
# -**初始化**
# - 初始化 'NochainDbIf' 类的全局实例 'dbif',表示数据库接口。
# - **数据检索**
# - 它使用“get_history_price”检索历史价格数据。
# - 它使用“get_history_supply2”检索历史供应数据。
# - 它使用“calc_issues”根据历史供应数据计算问题。
# - 它使用“prepare_year_issues”准备年度汇总问题。
# - 它使用“calc_pi_cycle_top”计算 Pi Cycle 顶部指标“ma350x2”和“ma111”
# - 它使用“calc_s2f_ratio”计算库存流量 S2F 比率和挠度。
# - 它使用“prepare_gold_ratio”准备黄金比率指标'ma350x1'、'ma350x1r6'、'ma350x2'、'ma350x3'、'ma350x5')。
# - **数据存储**
# - 对于价格数据中的每一天:
# - 它检索相关指标。
# - 它使用“NochainDbIf”的“save”方法将数据存储到数据库中。
# - **稳定币供应比率SSR计算**
# - 它使用“cal_ssr”计算 SSR 及其振荡器利用来自“get_usdt_supply”、“get_usdc_supply”、“get_busd_supply”和“get_dai_supply”的数据。
# 总体而言该功能是收集、处理和存储与NoChain项目相关的数据的主要驱动力从而实现后续分析和可视化。
nochain()

87
lyq/nochain_update_lyq.py Normal file
View File

@@ -0,0 +1,87 @@
# coding=utf-8
import ujson
from binance.websocket.spot.websocket_client import SpotWebsocketClient as WebsocketClient
import time
import requests
import datetime
import pymysql
import math
from stock_indicators import indicators
from stock_indicators.indicators.common.quote import Quote
class NochainDbIf:
def __init__(self, host="172.17.0.1", port=4423, user="root", password="2GS@bPYcgiMyL14A", dbname="btcdb"):
self.conn = pymysql.connect(host=host, port=port, user=user, password=password, database=dbname, cursorclass=pymysql.cursors.DictCursor)
print("init nochain db suceess!")
# 可能用于与数据库交互。该方法是构造函数它使用库设置与MySQL数据库的连接。
# 以下是该方法的作用的细分__init__
# 它需要几个参数这些参数用于连接到MySQL数据库。如果未提供这些参数则使用默认值。
# 在该方法中它使用提供的参数建立与MySQL数据库的连接。
# 该参数指定结果应作为字典而不是元组返回这样可以更方便地访问数据。cursorclass = pymysql.cursors.DictCursor
# 最后,它打印一条消息,指示数据库连接的初始化成功。
def get_ssr(self):
with self.conn.cursor() as cursor:
sql_cmd = "SELECT unixdt,ssr FROM nochainv3b order by unixdt"
cursor.execute(sql_cmd)
self.conn.commit()
return cursor.fetchall()
# 该方法似乎旨在从名为nochainv2b的MySQL数据库表中检索数据。下面是此方法的细分get_ssr
# 它使用上下文管理器 自动处理游标并确保在执行查询后正确关闭连接。with
# 在上下文管理器中它准备一个SQL命令来从nochainv2b表中选择数据。该命令选择unixdt和ssr列并按unixdt对结果进行排序。
# 它使用光标的方法执行SQL命令。execute
# 执行命令后,它使用.如果对数据库进行了任何需要永久保存的更改则此步骤是必需的。self.conn.commit()
# 最后,它返回查询的结果 它以字典列表的形式检索查询返回的所有行其中每个字典表示一行其中列名作为键和相应的值。cursor.fetchall()
def update_ssr(self, dt, ssrosc):
with self.conn.cursor() as cursor:
sql_update = "UPDATE nochainv3b SET unixdt='%s', ssrosc='%s' WHERE unixdt='%s'" %(dt, ssrosc, dt)
print(sql_update)
cursor.execute(sql_update)
self.conn.commit()
# 该方法将更新MySQL数据库nochainv2b表中的数据。下面是此方法的细分update_ssr
# 它需要两个参数dt可能表示时间戳和ssrosc可能表示与SSR相关的某个值
# 在该方法中它准备一个SQLUPDATE命令来更新nochainv2b表中特定列的列。
# 该字符串的格式为dt和ssrosc的值以动态构造SQL命令。这是潜在的风险因为它开启了SQL注入攻击的可能性。改用参数化查询更安全。
# 然后该方法打印出构造的SQL命令以进行调试。
# 它使用光标的方法执行SQL命令。execute
# 执行命令后它会提交事务使更改永久化。self.conn.commit()
# 但是SQL命令中存在一个潜在问题在设置新值和WHERE子句的条件时使用相同的值。这可能不按预期运行因为它将更新与给定.
# 如果打算更新特定行则可能需要相应地调整WHERE子句。
def rollback(self):
self.conn.rollback()
# 它对当前数据库连接执行回滚操作。以下是它的作用rollback
# 它在数据库连接 上调用该方法。rollback self.conn
# 此操作将当前事务期间所做的任何未提交的更改还原到以前的状态。
# 当事务期间出现错误时,通常使用回滚,允许您还原在错误发生之前所做的任何更改。
def nochain():
try:
dbif = NochainDbIf()
ssr_ma = dbif.get_ssr()
#print(ssr_ma)
quotes_list = []
for item in ssr_ma:
print(item["unixdt"])
quotes_list.append(Quote(item["unixdt"],0,0,0,item["ssr"],0))
#print(quotes_list)
ssr_osc = indicators.get_bollinger_bands(quotes_list, 200, 2)
for item in ssr_osc:
if item.z_score is not None:
#print(item.date, item.sma, item.percent_b, item.z_score, item.width)
#dbif.update_ssr(item.date, item.z_score)
break
print("ok")
except Exception as e:
#dbif.rollback();
print(e)
finally:
print("end")
nochain()
# 可能使用该nochain类计算从 MySQL 数据库获取的 SSR大概是卖空比率数据上的布林带。NochainDbIf
# 以下是该函数功能的细分:
# 它首先创建类的实例以建立与数据库的连接。NochainDbIf
# 它使用实例的方法从数据库中检索 SSR 数据。get_ssr NochainDbIf
# 它使用检索到的 SSR 数据构造对象列表。Quote
# 它使用模块中的函数计算布林带。get_bollinger_bandsindicators
# 它遍历计算出的布林带,并打印有关每个带的一些信息。
# 目前,它似乎已注释掉了使用计算出的布林带更新数据库中 SSR 数据的部分。
# 如果在此过程中发生任何异常,它将捕获异常,打印它,然后继续。
# 最后,无论该过程是成功还是遇到错误,它都会打印“结束”

610
lyq/redis_if_qt.py Normal file
View File

@@ -0,0 +1,610 @@
import time
from walrus import *
from loguru import logger
class RedisIf:
def __init__(self, host="127.0.0.1", port=6379, password="", db=0):
self.db = Database(host=host, port=port, db=db)
self.zbalance = self.db.ZSet("balancev2d")
'''
#realize cap progress
self.rv = self.db.Hash("rv")
#address and balance progress
self.addr = self.db.Hash("addr")
#block volume progress
self.bv = self.db.Hash("bv")
#daily volume progress
self.dv = self.db.Hash("dv")
'''
'''
#stat tx progress
self.tx = self.db.Hash("tx")
#ETH daily contract progress
self.eth_dc = self.db.Hash("ethdc")
#btc stats fee
self.btc_stats = self.db.Hash("btcstats")
#btc stats volume
self.btc_volume = self.db.Hash("btcvolume")
# btc stats cdd
self.btc_cdd = self.db.Hash("btccdd")
# btc stats cdd days
self.btc_cdd_days = self.db.Hash("btccdddays")
'''
self.btc_block_time = self.db.Hash("btcblocktimev2d")
'''
self.btc_sopr = self.db.Hash("btcsopr")
'''
self.btc_data = self.db.Hash("btc_datav2d")
self.active_address = self.db.Set("active_addressv2d")
self.send_address = self.db.Set("send_addressv2d")
self.receive_address = self.db.Set("receive_addressv2d")
def get_btc_data(self, key):
value = None
if self.btc_data[key] is not None:
value = self.btc_data[key]
return value
def set_btc_data(self, key, value):
self.btc_data[key] = value
def reset_btc_data(self):
self.btc_data.clear()
self.zbalance.clear()
self.btc_block_time.clear()
'''
def get_last_btc_sopr(self):
last_sopr_buy = None
last_asopr_buy = None
last_easopr_buy = None
last_lth_sopr_buy = None
last_sth_sopr_buy = None
last_asol = None
last_eaasol = None
if self.btc_sopr["last_asol"] is not None:
last_asol = self.btc_sopr["last_asol"]
#last_asol = float(self.btc_sopr["last_asol"].decode("utf-8"))
if self.btc_sopr["last_eaasol"] is not None:
last_eaasol = self.btc_sopr["last_eaasol"]
#last_eaasol = float(self.btc_sopr["last_eaasol"].decode("utf-8"))
if self.btc_sopr["last_sopr_buy"] is not None:
last_sopr_buy = self.btc_sopr["last_sopr_buy"]
#last_sopr_buy = float(self.btc_sopr["last_sopr_buy"].decode("utf-8"))
if self.btc_sopr["last_asopr_buy"] is not None:
last_asopr_buy = self.btc_sopr["last_asopr_buy"]
#last_asopr_buy = float(self.btc_sopr["last_asopr_buy"].decode("utf-8"))
if self.btc_sopr["last_easopr_buy"] is not None:
last_easopr_buy = self.btc_sopr["last_easopr_buy"]
#last_easopr_buy = float(self.btc_sopr["last_easopr_buy"].decode("utf-8"))
if self.btc_sopr["last_lth_sopr_buy"] is not None:
last_lth_sopr_buy = self.btc_sopr["last_lth_sopr_buy"]
#last_lth_sopr_buy = float(self.btc_sopr["last_lth_sopr_buy"].decode("utf-8"))
if self.btc_sopr["last_sth_sopr_buy"] is not None:
last_sth_sopr_buy = self.btc_sopr["last_sth_sopr_buy"]
#last_sth_sopr_buy = float(self.btc_sopr["last_sth_sopr_buy"].decode("utf-8"))
last_sopr_sell = None
last_asopr_sell = None
last_easopr_sell = None
last_lth_sopr_sell = None
last_sth_sopr_sell = None
if self.btc_sopr["last_sopr_sell"] is not None:
last_sopr_sell = self.btc_sopr["last_sopr_sell"]
# last_sopr_sell = float(self.btc_sopr["last_sopr_sell"].decode("utf-8"))
if self.btc_sopr["last_asopr_sell"] is not None:
last_asopr_sell = self.btc_sopr["last_asopr_sell"]
# last_asopr = float(self.btc_sopr["last_asopr"].decode("utf-8"))
if self.btc_sopr["last_easopr_sell"] is not None:
last_easopr_sell = self.btc_sopr["last_easopr_sell"]
# last_easopr_sell = float(self.btc_sopr["last_easopr_sell"].decode("utf-8"))
if self.btc_sopr["last_lth_sopr_sell"] is not None:
last_lth_sopr_sell = self.btc_sopr["last_lth_sopr_sell"]
# last_lth_sopr_sell = float(self.btc_sopr["last_lth_sopr_sell"].decode("utf-8"))
if self.btc_sopr["last_sth_sopr_sell"] is not None:
last_sth_sopr_sell = self.btc_sopr["last_sth_sopr_sell"]
# last_sth_sopr_sell = float(self.btc_sopr["last_sth_sopr_sell"].decode("utf-8"))
return last_asol, last_eaasol, last_sopr_buy, last_asopr_buy, last_easopr_buy, last_lth_sopr_buy, last_sth_sopr_buy, last_sopr_sell, last_asopr_sell, last_easopr_sell, last_lth_sopr_sell, last_sth_sopr_sell
def set_last_btc_sopr(self, last_asol, last_eaasol, last_sopr_buy, last_asopr_buy, last_easopr_buy, last_lth_sopr_buy, last_sth_sopr_buy, last_sopr_sell, last_asopr_sell, last_easopr_sell, last_lth_sopr_sell, last_sth_sopr_sell):
self.btc_sopr["last_asol"] = last_asol
self.btc_sopr["last_eaasol"] = last_eaasol
self.btc_sopr["last_sopr_buy"] = last_sopr_buy
self.btc_sopr["last_asopr_buy"] = last_asopr_buy
self.btc_sopr["last_easopr_buy"] = last_easopr_buy
self.btc_sopr["last_lth_sopr_buy"] = last_lth_sopr_buy
self.btc_sopr["last_sth_sopr_buy"] = last_sth_sopr_buy
self.btc_sopr["last_sopr_sell"] = last_sopr_sell
self.btc_sopr["last_asopr_sell"] = last_asopr_sell
self.btc_sopr["last_easopr_sell"] = last_easopr_sell
self.btc_sopr["last_lth_sopr_sell"] = last_lth_sopr_sell
self.btc_sopr["last_sth_sopr_sell"] = last_sth_sopr_sell
'''
def get_block_time(self, height):
block_time = None
height_str = str(height)
if self.btc_block_time[height_str] is not None:
block_time = int(self.btc_block_time[height_str].decode("utf-8"))
#block_time = int(self.btc_block_time[height_str].decode("utf-8"))
return block_time
def set_block_time(self, height, ts):
height_str = str(height)
self.btc_block_time[height_str] = ts
'''
def get_last_btc_cdd_days(self):
last_cdd = None
last_acdd = None
last_eacdd = None
last_cdd_day1= None
last_cdd_day7 = None
last_cdd_day30 = None
last_cdd_day60 = None
last_cdd_day90 = None
last_cdd_day180 = None
last_cdd_day365 = None
last_cdd_day730 = None
last_date = None
last_height = None
last_date_str = None
if self.btc_cdd["last_cdd"] is not None:
last_cdd = float(self.btc_cdd["last_cdd"].decode("utf-8"))
if self.btc_cdd["last_acdd"] is not None:
last_acdd = float(self.btc_cdd["last_acdd"].decode("utf-8"))
if self.btc_cdd["last_eacdd"] is not None:
last_eacdd = float(self.btc_cdd["last_eacdd"].decode("utf-8"))
if self.btc_cdd_days["last_cdd_day1"] is not None:
last_cdd_day1 = float(self.btc_cdd_days["last_cdd_day1"].decode("utf-8"))
if self.btc_cdd_days["last_cdd_day7"] is not None:
last_cdd_day7 = float(self.btc_cdd_days["last_cdd_day7"].decode("utf-8"))
if self.btc_cdd_days["last_cdd_day30"] is not None:
last_cdd_day30 = float(self.btc_cdd_days["last_cdd_day30"].decode("utf-8"))
if self.btc_cdd_days["last_cdd_day60"] is not None:
last_cdd_day60 = float(self.btc_cdd_days["last_cdd_day60"].decode("utf-8"))
if self.btc_cdd_days["last_cdd_day90"] is not None:
last_cdd_day90 = float(self.btc_cdd_days["last_cdd_day90"].decode("utf-8"))
if self.btc_cdd_days["last_cdd_day180"] is not None:
last_cdd_day180 = float(self.btc_cdd_days["last_cdd_day180"].decode("utf-8"))
if self.btc_cdd_days["last_cdd_day365"] is not None:
last_cdd_day365 = float(self.btc_cdd_days["last_cdd_day365"].decode("utf-8"))
if self.btc_cdd_days["last_cdd_day730"] is not None:
last_cdd_day730 = float(self.btc_cdd_days["last_cdd_day730"].decode("utf-8"))
if self.btc_cdd_days["last_date"] is not None:
last_date = int(self.btc_cdd_days["last_date"].decode("utf-8"))
if self.btc_cdd_days["last_height"] is not None:
last_height = int(self.btc_cdd_days["last_height"].decode("utf-8"))
if self.btc_cdd_days["last_date_str"] is not None:
last_date_str = self.btc_cdd_days["last_date_str"].decode("utf-8")
return last_cdd, last_acdd, last_eacdd, last_cdd_day1, last_cdd_day7, last_cdd_day30, last_cdd_day60, last_cdd_day90, last_cdd_day180, last_cdd_day365, last_cdd_day730, last_date, last_height, last_date_str
def set_last_btc_cdd_days(self, cdd, acdd, eacdd, day1, day7, day30, day60, day90, day180, day365, day730, dt, height, dtstr):
self.btc_cdd["last_cdd"] = cdd
self.btc_cdd["last_acdd"] = acdd
self.btc_cdd["last_eacdd"] = eacdd
self.btc_cdd_days["last_cdd_day1"] = day1
self.btc_cdd_days["last_cdd_day7"] = day7
self.btc_cdd_days["last_cdd_day30"] = day30
self.btc_cdd_days["last_cdd_day60"] = day60
self.btc_cdd_days["last_cdd_day90"] = day90
self.btc_cdd_days["last_cdd_day180"] = day180
self.btc_cdd_days["last_cdd_day365"] = day365
self.btc_cdd_days["last_cdd_day730"] = day730
self.btc_cdd_days["last_date"] = dt
self.btc_cdd_days["last_height"] = height
self.btc_cdd_days["last_date_str"] = dtstr
'''
'''
def get_last_btc_cdd(self):
last_cdd = None
last_date = None
last_height = None
last_date_str = None
if self.btc_cdd["last_cdd"] is not None:
last_cdd = float(self.btc_cdd["last_cdd"].decode("utf-8"))
if self.btc_cdd["last_date"] is not None:
last_date = int(self.btc_cdd["last_date"].decode("utf-8"))
if self.btc_cdd["last_height"] is not None:
last_height = int(self.btc_cdd["last_height"].decode("utf-8"))
if self.btc_cdd["last_date_str"] is not None:
last_date_str = self.btc_cdd["last_date_str"].decode("utf-8")
return last_cdd, last_date, last_height, last_date_str
def set_last_btc_cdd(self, cdd, dt, height, dtstr):
self.btc_cdd["last_cdd"] = cdd
self.btc_cdd["last_date"] = dt
self.btc_cdd["last_height"] = height
self.btc_cdd["last_date_str"] = dtstr
def get_last_btc_volume(self):
last_volume = None
last_date = None
last_height = None
last_date_str = None
if self.btc_volume["last_volume"] is not None:
last_volume = float(self.btc_volume["last_volume"].decode("utf-8"))
if self.btc_volume["last_date"] is not None:
last_date = int(self.btc_volume["last_date"].decode("utf-8"))
if self.btc_volume["last_height"] is not None:
last_height = int(self.btc_volume["last_height"].decode("utf-8"))
if self.btc_volume["last_date_str"] is not None:
last_date_str = self.btc_volume["last_date_str"].decode("utf-8")
return last_volume, last_date, last_height, last_date_str
def set_last_btc_volume(self, volume, dt, height, dtstr):
self.btc_volume["last_volume"] = volume
self.btc_volume["last_date"] = dt
self.btc_volume["last_height"] = height
self.btc_volume["last_date_str"] = dtstr
'''
'''
def get_last_btc_stats(self):
last_fees = None
last_date = None
last_height = None
last_date_str = None
last_volume = None
if self.btc_stats["last_fees"] is not None:
last_fees = float(self.btc_stats["last_fees"].decode("utf-8"))
if self.btc_volume["last_volume"] is not None:
last_volume = float(self.btc_volume["last_volume"].decode("utf-8"))
if self.btc_stats["last_date"] is not None:
last_date = int(self.btc_stats["last_date"].decode("utf-8"))
if self.btc_stats["last_height"] is not None:
last_height = int(self.btc_stats["last_height"].decode("utf-8"))
if self.btc_stats["last_date_str"] is not None:
last_date_str = self.btc_stats["last_date_str"].decode("utf-8")
return last_fees, last_volume, last_date, last_height, last_date_str
def set_last_btc_stats(self, fees, volume, dt, height, dtstr):
self.btc_stats["last_fees"] = fees
self.btc_volume["last_volume"] = volume
self.btc_stats["last_date"] = dt
self.btc_stats["last_height"] = height
self.btc_stats["last_date_str"] = dtstr
def get_last_eth_dc(self):
last_date = None
last_height = None
last_date_str = None
if self.eth_dc["last_date"] is not None:
last_date = int(self.eth_dc["last_date"].decode("utf-8"))
if self.eth_dc["last_height"] is not None:
last_height = int(self.eth_dc["last_height"].decode("utf-8"))
if self.eth_dc["last_date_str"] is not None:
last_date_str = self.eth_dc["last_date_str"].decode("utf-8")
return last_date, last_height, last_date_str
def set_last_eth_dc(self, dt, height, dtstr):
self.eth_dc["last_date"] = dt
self.eth_dc["last_height"] = height
self.eth_dc["last_date_str"] = dtstr
'''
'''
def get_last_dv(self):
last_dv = None
last_date = None
last_height = None
last_date_str = None
if self.dv["last_dv"] is not None:
last_dv = float(self.dv["last_dv"].decode("utf-8"))
if self.dv["last_date"] is not None:
last_date = int(self.dv["last_date"].decode("utf-8"))
if self.dv["last_height"] is not None:
last_height = int(self.dv["last_height"].decode("utf-8"))
if self.dv["last_date_str"] is not None:
last_date_str = self.dv["last_date_str"].decode("utf-8")
return last_dv, last_date, last_height, last_date_str
def set_last_dv(self, dv, dt, height, dtstr):
self.dv["last_dv"] = dv
self.dv["last_date"] = dt
self.dv["last_height"] = height
self.dv["last_date_str"] = dtstr
def get_last_bv(self):
last_height = None
if self.bv["last_height"] is not None:
last_height = int(self.bv["last_height"].decode("utf-8"))
return last_height
def set_last_bv(self, height):
self.bv["last_height"] = height
'''
'''
def get_last_ind(self):
last_csupply = None
last_mintusd = None
last_sumcsupply = None
last_sumcdd = None
last_sumeacdd = None
last_rprofit = None
last_rloss = None
last_marketcap = None
last_rcap = None
last_mvrv = None
last_earcap = None
if self.tx["last_csupply"] is not None:
last_csupply = float(self.tx["last_csupply"].decode("utf-8"))
if self.tx["last_mintusd"] is not None:
last_mintusd = float(self.tx["last_mintusd"].decode("utf-8"))
if self.tx["last_sumcsupply"] is not None:
last_sumcsupply = float(self.tx["last_sumcsupply"].decode("utf-8"))
if self.tx["last_sumcdd"] is not None:
last_sumcdd = float(self.tx["last_sumcdd"].decode("utf-8"))
if self.tx["last_sumeacdd"] is not None:
last_sumeacdd = float(self.tx["last_sumeacdd"].decode("utf-8"))
if self.tx["last_rprofit"] is not None:
last_rprofit = float(self.tx["last_rprofit"].decode("utf-8"))
if self.tx["last_rloss"] is not None:
last_rloss = float(self.tx["last_rloss"].decode("utf-8"))
if self.tx["last_marketcap"] is not None:
last_marketcap = float(self.tx["last_marketcap"].decode("utf-8"))
if self.tx["last_rcap"] is not None:
last_rcap = float(self.tx["last_rcap"].decode("utf-8"))
if self.tx["last_earcap"] is not None:
last_earcap = float(self.tx["last_earcap"].decode("utf-8"))
if self.tx["last_mvrv"] is not None:
last_mvrv = float(self.tx["last_mvrv"].decode("utf-8"))
return last_csupply, last_mintusd, last_sumcsupply, last_sumcdd, last_sumeacdd, last_rprofit, last_rloss, last_marketcap, last_rcap, last_earcap, last_mvrv
def set_last_ind(self, last_csupply, last_mintusd, last_sumcsupply, last_sumcdd, last_sumeacdd, last_rprofit, last_rloss, last_marketcap, last_rcap, last_earcap, last_mvrv):
self.tx["last_csupply"] = last_csupply
self.tx["last_mintusd"] = last_mintusd
self.tx["last_sumcsupply"] = last_sumcsupply
self.tx["last_sumcdd"] = last_sumcdd
self.tx["last_sumeacdd"] = last_sumeacdd
self.tx["last_rprofit"] = last_rprofit
self.tx["last_rloss"] = last_rloss
self.tx["last_marketcap"] = last_marketcap
self.tx["last_rcap"] = last_rcap
self.tx["last_earcap"] = last_earcap
self.tx["last_mvrv"] = last_mvrv
def get_last_tx(self):
last_profit = None
last_fees = None
last_newaddr_cnt = None
last_newaddr_vol = None
last_active_addr_cnt = None
last_tx_addr_cnt = None
last_rx_addr_cnt = None
last_vol_change = None
last_vol = None
last_avol = None
last_date = None
last_height = None
last_date_str = None
last_txs = None
last_eatxs = None
if self.tx["last_profit_rate"] is not None:
last_profit = int(self.tx["last_profit"].decode("utf-8"))
if self.tx["last_fees"] is not None:
last_fees = int(self.tx["last_fees"].decode("utf-8"))
if self.tx["last_txs"] is not None:
last_txs = int(self.tx["last_txs"].decode("utf-8"))
if self.tx["last_eatxs"] is not None:
last_eatxs = int(self.tx["last_eatxs"].decode("utf-8"))
if self.tx["last_newaddr_cnt"] is not None:
last_newaddr_cnt = int(self.tx["last_newaddr_cnt"].decode("utf-8"))
if self.tx["last_newaddr_vol"] is not None:
last_newaddr_vol = float(self.tx["last_newaddr_vol"].decode("utf-8"))
if self.tx["last_active_addr_cnt"] is not None:
last_active_addr_cnt = int(self.tx["last_active_addr_cnt"].decode("utf-8"))
if self.tx["last_tx_addr_cnt"] is not None:
last_tx_addr_cnt = int(self.tx["last_tx_addr_cnt"].decode("utf-8"))
if self.tx["last_rx_addr_cnt"] is not None:
last_rx_addr_cnt = int(self.tx["last_rx_addr_cnt"].decode("utf-8"))
if self.tx["last_vol_change"] is not None:
last_vol_change = float(self.tx["last_vol_change"].decode("utf-8"))
if self.tx["last_vol"] is not None:
last_vol = float(self.tx["last_vol"].decode("utf-8"))
if self.tx["last_avol"] is not None:
last_avol = float(self.tx["last_avol"].decode("utf-8"))
if self.tx["last_date"] is not None:
last_date = int(self.tx["last_date"].decode("utf-8"))
if self.tx["last_height"] is not None:
last_height = int(self.tx["last_height"].decode("utf-8"))
if self.tx["last_date_str"] is not None:
last_date_str = self.tx["last_date_str"].decode("utf-8")
return last_profit, last_fees, last_txs, last_eatxs, last_newaddr_cnt, last_newaddr_vol, last_active_addr_cnt, last_tx_addr_cnt, last_rx_addr_cnt, last_vol_change, last_vol, last_avol, last_date, last_height, last_date_str
def set_last_tx(self, last_profit, last_fees, last_txs, last_eatxs, newaddr_cnt, newaddr_vol, active_addr_cnt, tx_addr_cnt, rx_addr_cnt, vol_change, vol, avol, dt, height, dtstr):
self.tx["last_profit"] = last_profit
self.tx["last_fees"] = last_fees
self.tx["last_txs"] = last_txs
self.tx["last_eatxs"] = last_eatxs
self.tx["last_newaddr_cnt"] = newaddr_cnt
self.tx["last_newaddr_vol"] = newaddr_vol
self.tx["last_active_addr_cnt"] = active_addr_cnt
self.tx["last_tx_addr_cnt"] = tx_addr_cnt
self.tx["last_rx_addr_cnt"] = rx_addr_cnt
self.tx["last_vol_change"] = vol_change
self.tx["last_vol"] = vol
self.tx["last_avol"] = avol
self.tx["last_date"] = dt
self.tx["last_height"] = height
self.tx["last_date_str"] = dtstr
'''
'''
def get_last_addr(self):
last_daily_cnt = None
last_date = None
last_height = None
last_date_str = None
if self.addr["last_daily_cnt"] is not None:
last_daily_cnt = int(self.addr["last_daily_cnt"].decode("utf-8"))
if self.addr["last_date"] is not None:
last_date = int(self.addr["last_date"].decode("utf-8"))
if self.addr["last_height"] is not None:
last_height = int(self.addr["last_height"].decode("utf-8"))
if self.addr["last_date_str"] is not None:
last_date_str = self.addr["last_date_str"].decode("utf-8")
return last_daily_cnt, last_date, last_height, last_date_str
def set_last_addr(self, daily_cnt, dt, height, dtstr):
self.addr["last_daily_cnt"] = daily_cnt
self.addr["last_date"] = dt
self.addr["last_height"] = height
self.addr["last_date_str"] = dtstr
'''
def is_active_address(self, address):
result = address in self.active_address
if not result:
self.active_address.add(address)
return result
def reset_active_address(self):
self.active_address.clear()
def get_active_address_cnt(self):
return len(self.active_address)
def is_send_address(self, address):
result = address in self.send_address
if not result:
self.send_address.add(address)
return result
def reset_send_address(self):
self.send_address.clear()
def get_send_address_cnt(self):
return len(self.send_address)
def is_receive_address(self, address):
result = address in self.receive_address
if not result:
self.receive_address.add(address)
return result
def reset_receive_address(self):
self.receive_address.clear()
def get_receive_address_cnt(self):
return len(self.receive_address)
def save_addr(self, address, balance):
new_balance = balance
if address in self.zbalance:
new_balance = self.zbalance.score(address) + balance
#print("update", self.zbalance.score(address), balance, new_balance)
#time.sleep(10)
if new_balance < 0.01:
del self.zbalance[address]
#print("check exist", address, address in self.zbalance)
#time.sleep(10)
return
self.zbalance.add({address: new_balance})
'''
def delete_addr(self, config):
self.addr.clear()
self.zbalance.clear()
'''
def is_in_addr(self, address):
return address in self.zbalance
def get_addr_cnt(self):
return len(self.zbalance)
'''
def delete_rv(self, config):
self.rv.clear()
def get_last_rv(self):
last_rv = None
last_date = None
last_height = None
last_date_str = None
if self.rv["last_rv"] is not None:
last_rv = float(self.rv["last_rv"].decode("utf-8"))
if self.rv["last_date"] is not None:
last_date = int(self.rv["last_date"].decode("utf-8"))
if self.rv["last_height"] is not None:
last_height = int(self.rv["last_height"].decode("utf-8"))
if self.rv["last_date_str"] is not None:
last_date_str = self.rv["last_date_str"].decode("utf-8")
return last_rv, last_date, last_height, last_date_str
def set_last_rv(self, rv, dt, height, dtstr):
self.rv["last_rv"] = rv
self.rv["last_date"] = dt
self.rv["last_height"] = height
self.rv["last_date_str"] = dtstr
'''
def get_all_address(self):
return self.zbalance.keys()
def delete_address_data(self, config):
self.zbalance.clear()
'''
def query_from_address(self, start_balance=0, end_balance=0, address="", limit=0):
if len(address) > 0:
results = []
result = {}
result["address"] = address
balance = self.zbalance.score(address)
print(balance)
if balance is not None:
result["balance"] = balance
results.append(result)
return results
match_result = None
if start_balance > 0:
if end_balance > 0:
match_result = self.zbalance.range_by_score(start_balance, end_balance, 0, -1, True, False)
else:
match_result = self.zbalance.range_by_score(0, start_balance, 0, -1, True, False)
else:
if end_balance > 0:
match_result = self.zbalance.range_by_score(end_balance, 21000000, 0, -1, True, False)
results = []
if match_result is not None:
#print(match_result)
for addr, balance2 in match_result:
address = addr.decode('utf-8')
result = {}
result["address"] = address
result["balance"] = balance2
results.append(result)
if limit > 0 and len(results) >= limit:
break
return results
'''

3
yq/.browserslistrc Normal file
View File

@@ -0,0 +1,3 @@
> 1%
last 2 versions
not dead

8
yq/.env.test Normal file
View File

@@ -0,0 +1,8 @@
# 测试环境
NODE_ENV = 'test'
# 测试环境api前缀
VUE_APP_BASE_API = ''
# 测试环境Url地址
VUE_APP_BASE_URL = 'http://10.168.2.125:7101'

20
yq/.eslintrc.js Normal file
View File

@@ -0,0 +1,20 @@
module.exports = {
root: true,
env: {
node: true
},
'extends': [
'plugin:vue/essential',
'eslint:recommended'
],
parserOptions: {
parser: 'babel-eslint'
},
rules: {
'no-console': process.env.NODE_ENV === 'production' ? 'warn' : 'off',
'no-debugger': process.env.NODE_ENV === 'production' ? 'warn' : 'off',
'no-redeclare': process.env.NODE_ENV === 'production' ? 'warn' : 'off',
'no-unused-vars': process.env.NODE_ENV === 'production' ? 'warn' : 'off'
}
}

23
yq/.gitignore vendored Normal file
View File

@@ -0,0 +1,23 @@
.DS_Store
node_modules
/dist
# local env files
.env.local
.env.*.local
# Log files
npm-debug.log*
yarn-debug.log*
yarn-error.log*
pnpm-debug.log*
# Editor directories and files
.idea
.vscode
*.suo
*.ntvs*
*.njsproj
*.sln
*.sw?

View File

@@ -1,5 +1,23 @@
目录简介:
- src/项目核心源码Vue2
- views/:业务页面(大量数据指标/图表页、个人中心、订阅、支付、帮助中心等)
- components/:通用组件(顶部/侧栏、表单、留言板等)
- router/路由配置history 模式、重复导航处理、部分页面权限信息)
- store/Vuex 状态管理(用户/Tab/大额交易等)
- api/:接口封装与请求工具
- util/工具库ECharts 数据处理、请求、加密、格式化等)
- lang/多语言与文案i18n、侧栏/页面文案、SEO 文案)
- public/:静态资源与入口 HTML图标/图片等)
- vue.config.js / babel.config.js构建配置
相关联其他文件:
- 入口与挂载src/main.js、src/App.vue、public/index.html
- 路由与导航src/router/index.js、src/components/CommonAside.vue、src/util/menu.js
- 接口请求src/api/*.js、src/util/request.js、src/api/http.js
- 图表与数据处理src/util/processingData.js、src/util/echarts.js、src/util/chartResize.js
- 多语言/SEO 文案src/lang/index.js、src/lang/zh.js、src/lang/en.js、src/lang/seoText/*
功能简介:
功能简介:
- 区块链/加密数据可视化平台前端:基于 Vue2 + ElementUI + ECharts 展示各类指标图表(含时间轴、缩放、均线等处理)。
- 用户与权限体系:路由 meta 中包含角色权限控制(注册/高级/vip/支持/审核/财务/管理员等)。
- 业务模块:个人中心、帮助中心/工单、订阅与告警、支付/充值/提现与后台管理、指标文档介绍、SEO 展示页等。

5
yq/babel.config.js Normal file
View File

@@ -0,0 +1,5 @@
module.exports = {
presets: [
'@vue/cli-plugin-babel/preset'
]
}

BIN
yq/dist.zip Normal file

Binary file not shown.

13420
yq/package-lock.json generated Normal file

File diff suppressed because it is too large Load Diff

48
yq/package.json Normal file
View File

@@ -0,0 +1,48 @@
{
"name": "project",
"version": "0.1.0",
"private": true,
"scripts": {
"dev": "vue-cli-service serve",
"build": "vue-cli-service build",
"lint": "vue-cli-service lint",
"serve:test": "vue-cli-service serve --mode test",
"build:test": "vue-cli-service build --mode test"
},
"dependencies": {
"@dreysolano/prerender-spa-plugin": "^1.0.3",
"@wangeditor/editor": "^5.1.23",
"@wangeditor/editor-for-vue": "^1.0.2",
"axios": "^0.27.2",
"bignumber.js": "^9.1.2",
"core-js": "^3.25.5",
"echarts": "^5.3.2",
"element-resize-detector": "^1.2.4",
"element-ui": "^2.15.8",
"encryptlong": "^3.1.4",
"vue": "^2.6.11",
"vue-i18n": "^8.27.2",
"vue-meta-info": "^0.1.7",
"vue-quill-editor": "^3.0.6",
"vue-router": "^3.2.0",
"vuex": "^3.4.0",
"vuex-persistedstate": "^4.1.0",
"wangeditor": "^4.7.15"
},
"devDependencies": {
"@vue/cli-plugin-babel": "~4.5.17",
"@vue/cli-plugin-eslint": "~4.5.17",
"@vue/cli-plugin-router": "~4.5.17",
"@vue/cli-plugin-vuex": "~4.5.17",
"@vue/cli-service": "~4.5.17",
"babel-eslint": "^10.1.0",
"eslint": "^6.7.2",
"eslint-plugin-vue": "^6.2.2",
"jsencrypt": "^3.3.1",
"prerender-spa-plugin": "^3.4.0",
"sass": "^1.26.5",
"sass-loader": "^8.0.2",
"sitemap-webpack-plugin": "^1.1.1",
"vue-template-compiler": "^2.6.11"
}
}

BIN
yq/public/favicon.ico Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 162 KiB

BIN
yq/public/img/btc.png Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 2.5 KiB

BIN
yq/public/img/busd.png Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 5.0 KiB

BIN
yq/public/img/eth.png Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 3.4 KiB

BIN
yq/public/img/f1.png Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 3.6 KiB

BIN
yq/public/img/f2.png Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 5.2 KiB

BIN
yq/public/img/f3.png Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 4.1 KiB

BIN
yq/public/img/favicon.ico Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 162 KiB

BIN
yq/public/img/gusd.png Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 2.6 KiB

BIN
yq/public/img/husd.png Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 4.0 KiB

BIN
yq/public/img/ltc.png Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 1.6 KiB

85
yq/public/index.html Normal file
View File

@@ -0,0 +1,85 @@
<!DOCTYPE html>
<html lang="zh">
<head>
<meta charset="utf-8">
<meta http-equiv="X-UA-Compatible" content="IE=edge">
<!-- <meta name="viewport" content="width=device-width,initial-scale=1.0"> -->
<link rel="icon" type="image/x-icon" href="./favicon.ico">
<link rel="stylesheet" href="//at.alicdn.com/t/c/font_3157209_ua5c8rb0fwc.css">
<meta name="google-site-verification" content="tnPx29xrawMDQ-WEm9nzlcid1_ZK8o1T-WwpGMKwNuQ" />
<!-- SEO 相关 meta 标签优化 -->
<!--
- index: 允许搜索引擎索引此页面
- follow: 允许搜索引擎跟踪页面中的链接
- max-image-preview:large: 允许在搜索结果中显示大图预览
- max-snippet:-1: 允许搜索引擎显示任意长度的文本片段
- max-video-preview:-1: 允许搜索引擎显示任意长度的视频预览
-->
<meta name="robots" content="index, follow, max-image-preview:large, max-snippet:-1, max-video-preview:-1">
<!-- 允许谷歌索引和跟踪链接-->
<meta name="googlebot" content="index, follow">
<meta name="googlebot-news" content="index, follow">
<meta name="bingbot" content="index, follow">
<link rel="canonical" href="https://coinbus.cc" />
<meta name="viewport"
content="width=device-width, initial-scale=1.0, maximum-scale=1.0, user-scalable=0,minimum-scale=1.0">
<title>coinbus - A professional blockchain data analysis platform</title>
<meta name="description" content="Coinbus data analysis platform, covering BTC ETH、 Blockchain data from various aspects such as stablecoins and macroeconomics. Provide precise analysis of on chain data, market indicators, exchange data, trading volume, monitoring of large transactions, liquidity, bonds, and many other sectors to help you gain insights into cryptocurrency market trends">
<meta name="keywords" content="coinbus,BTC,ETH,stablecoins,macroeconomics,blockchain data analysis,on chain data,market indicators,exchange data,trading volume,large transactions,liquidity,bonds,cryptocurrency market trends">
</head>
<style>
body {
margin: 0px;
padding: 0;
/* height: 0px; */
/* //隐藏滚动条 */
scrollbar-width: none;
-ms-overflow-style: none;
border-right: none;
}
html{
/* //隐藏滚动条 */
scrollbar-width: none;
-ms-overflow-style: none;
border-right: none;
margin: 0px;
padding: 0;
overflow: hidden;
}
ul {
padding: 0;
margin: 0;
list-style: none
}
/* .messageClass{
z-index: 99999;
color: blue !important;
background: rebeccapurple;
} */
</style>
<body>
<!-- 当网站不支持vue时显示 -->
<noscript>
<strong>We're sorry but <%= htmlWebpackPlugin.options.title %> doesn't work properly without JavaScript enabled.
Please enable it to continue.</strong>
</noscript>
<div id="app"></div>
<!-- built files will be auto injected -->
</body>
</html>

22
yq/public/robots.txt Normal file
View File

@@ -0,0 +1,22 @@
User-agent: * # 适用于所有搜索引擎爬虫
Allow: / #允许爬取根目录下的所有内容
Disallow: /admin
Disallow: /api
# 站点地图配置
Sitemap: https://coinbus.cc/sitemap.xml # 站点地图
# SEO优化 允许爬取各类静态资源文件
Allow: /*.js
Allow: /*.css
Allow: /*.png
Allow: /*.jpg
Allow: /*.gif
Allow: /*.svg
Allow: /*.ico

893
yq/src/App.vue Normal file
View File

@@ -0,0 +1,893 @@
<template >
<div id="app" class="capitalize">
<el-container style="height: 100vh">
<el-header class="headerTop" style="width: 100%; height: 60px">
<common-header></common-header>
</el-header>
<el-container style="overflow: auto">
<!-- 路由页面 v-if="routerPage == `IndexIntroduce`||routerPage == `noWeb` "-->
<div class="routePages" v-if="pageName !== `Web`">
<router-view class="capitalize"></router-view>
</div>
<!-- 组件页面 -->
<div class="mainBOOS" v-if="pageName == `Web`">
<div class="sidebar">
<common-aside></common-aside>
</div>
<div class="outer">
<el-container class="pageConcent">
<el-main class="el_main">
<router-view class="pages"></router-view>
</el-main>
</el-container>
</div>
</div>
</el-container>
</el-container>
<!-- 指标解释 -->
<el-col>
<div
v-show="pageName !== `IndexIntroduce`"
@click="handelIndexIntroduce"
class="i izhibiaoguanli indexIntroduce"
:data-text="$t(`home.introduction`)"
style="color: #6580a9; font-size: 50px"
></div>
</el-col>
</div>
</template>
<script>
import CommonAside from "./components/CommonAside.vue";
import CommonHeader from "./components/CommonHeader.vue";
import commonFooter from "./components/commonFooter.vue";
import * as echarts from "echarts";
export default {
components: {
CommonAside,
CommonHeader,
commonFooter,
},
data() {
return {
value: "1",
options: [
{
value: "1",
label: "Bitcoin (BTC)",
src: "https://studio.glassnode.com/images/crypto-icons/btc.png",
},
{
value: "2",
label: "Ethereum (ETH)",
src: "https://studio.glassnode.com/images/crypto-icons/eth.png",
},
{
value: "3",
label: "Litecoin (LTC)",
src: "https://studio.glassnode.com/images/crypto-icons/ltc.png",
},
{
value: "4",
label: "Binance USD (BUSD)",
src: "https://studio.glassnode.com/images/crypto-icons/busd.png",
},
{
value: "5",
label: "Gemini dollar (GUSD)",
src: "https://studio.glassnode.com/images/crypto-icons/gusd.png",
},
{
value: "6",
label: "HUSD (HUSD)",
src: "https://studio.glassnode.com/images/crypto-icons/husd.png",
},
],
// menuHeight: {
// height: "900px",
// },
bodyHeight: "",
mainComponent: "mainStartCom",
isShrink: false, //控制是否改变宽度
fatherWidth: "300",
pageWidth: 0,
chart: null,
routerPage: "web",
pageName: "",
};
},
created() {
//动态调整左侧菜单栏高度 document.documentElement.clientHeight
// var docHeight = document.body.clientHeight;
// if (!docHeight) {
// this.menuHeight.height = 900 +`px`
// }else{
// this.menuHeight.height = docHeight - 20 + "px";
// }
// this.menuHeight.height = docHeight - 20 + "px";
// this.menuHeight.height=localStorage.getItem("screenHight")
// 监听localstorage改变赋值
this.bodyHeight = localStorage.getItem("screenHight");
window.addEventListener("setItem", () => {
this.bodyHeight = localStorage.getItem("screenHight");
});
},
watch: {
menuHeight: {
handler(val) {
// console.log(val.height, 1234565);
},
deep: true,
immediate: true,
},
"$route.name"(newValue) {
this.pageName = newValue;
},
},
mounted() {
this.pageName = this.$route.name;
//监听localStorage
window.addEventListener("setItem", () => {
this.routerPage = JSON.parse(localStorage.getItem("routerPage"));
});
},
methods: {
//点击指标介绍 打开新窗口
handelIndexIntroduce() {
let url =
window.location.origin +
this.$router.resolve({ name: "IndexIntroduce" }).href;
let a = window.open(url, "_blank");
},
},
};
</script>
<style lang="scss" scoped>
// -----------顶部导航栏------------------------
* {
box-sizing: border-box;
}
#app{
//隐藏滚动条
scrollbar-width: none;
-ms-overflow-style: none;
border-right: none;
// overflow: hidden;
box-sizing: border-box;
overflow-x: hidden;
}
html{
//隐藏滚动条
scrollbar-width: none;
-ms-overflow-style: none;
border-right: none;
box-sizing: border-box;
overflow: hidden;
background: gold;
}
html,
body {
margin: 0px;
padding: 0px;
box-sizing: border-box;
overflow-x: hidden;
}
* {
box-sizing: border-box;
}
.el-container{
//隐藏滚动条
scrollbar-width: none;
-ms-overflow-style: none;
border-right: none;
}
.pageConcent {
// outline: 1px solid red;
// background: rgba(0,0,0, 0.1);
// background-image: linear-gradient(to bottom, rgba(231,248,255,0.9), rgba(0,0,0,0), rgba(0,0,0,0.1));
background-image: linear-gradient(
to bottom,
rgba(231, 248, 255, 0.6),
rgba(231, 248, 255, 0.6),
rgba(231, 248, 255, 0.2),
rgba(255, 255, 255, 0)
);
// background: #fff;
// color: var(--okd-color-text-amplifed);
// padding: 0px 1%;
// width: 100%;
min-height: 100%;
// width:calc(100vw - 330px);
// outline: 1px solid red;
// background: firebrick;
// background: #888888;
width: 100%;
.el_main {
overflow-x: hidden;
// width: 90%;
// width: 20%;
// width:calc(100vw - 330px);
min-height: 100%;
display: flex;
justify-content: center;
// background: green;
width: 100%;
// width: 74vw;
.pages {
width: 90%;
min-height: 85%;
margin: 80px 0px;
box-shadow: 2px 5px 15px -5px #888888;
padding: 30px 0px;
// background: #fff;
background: #fafdfe;
// background: #383d71;
}
}
}
#app {
// width: 100vw;
// height: 100vh;
box-sizing: border-box;
// width: 100%;
// height: 100%;
// outline: 1px solid red;
// background: honeydew;
background-image: linear-gradient(
to bottom,
rgba(231, 248, 255, 1),
rgba(231, 248, 255, 0.6),
rgba(231, 248, 255, 0.6),
#fff
);
// background: goldenrod;
overflow-x: hidden;
//loading 层级
::v-deep .el-loading-mask {
z-index: 1998 !important;
}
.indexIntroduce {
// outline: 1px solid red;
position: fixed;
top: 180px;
right: 50px;
width: 10px;
height: 80px;
border-radius: 80px;
display: flex;
align-items: center;
justify-content: center;
}
.indexIntroduce:hover {
color: #f7931a !important;
// background: #f5390e;
cursor: pointer;
}
.indexIntroduce:hover::after {
content: attr(data-text); /* 显示的文字内容 国际化 */
position: absolute;
top: 80%;
left: 50%;
transform: translateX(-50%); /* 水平居中 */
// padding: 5px; /* 内边距 */
color: #000; /* 文字颜色 */
font-size: 12px; /* 文字大小 */
width: 60px;
// outline: 1px solid red;
text-align: center;
color: #f7931a;
}
}
#app .header {
width: inherit;
height: 60px;
display: flex;
align-items: center;
justify-content: space-between;
position: fixed;
line-height: 60px;
color: #b3c0d1;
left: 0%;
top: 0%;
z-index: 1999.5;
// outline: 1px solid red;
padding: 0px 20px;
// background: #383d71;
color: #fff;
}
.el-card__body,
.el-main {
padding: 2px;
}
.headList,
.headList2 {
height: 100%;
display: flex;
align-items: center;
font-size: 13px;
color: rgba(0, 0, 0, 0.5);
// outline: 1px solid red;
padding-inline-start: 0px;
margin-block-end: 0px;
margin-block-start: 0px;
}
.mainBOOS {
// outline: 1px solid red;
min-width: 99vw;
// height: calc(100vh - 60px);
display: flex;
height: 100vh;
}
// .mainBOOS ::-webkit-scrollbar {
// //隐藏滚动条
// width: 0 !important;
// height: 0;
// }
// .mainBOOS ::-webkit-scrollbar {
// //隐藏滚动条
// width: 0 !important;
// }
// .mainBOOS ::-webkit-scrollbar {
// //隐藏滚动条
// width: 0 !important;
// height: 0;
// }
.sidebar {
// width: 320px;
height: calc(100vh - 60px);
// background: red;
background-image: linear-gradient(
to bottom,
rgba(231, 248, 255, 1),
rgba(231, 248, 255, 0.6),
rgba(231, 248, 255, 0.4),
);
box-shadow: 2px 5px 15px -5px #888888;
position: relative;
height: 100%;
overflow-y: auto;
overflow-x: hidden;
z-index: 1999;
}
.outer {
// height: calc(100vh - 60px);
// height: 100%;
overflow-y: auto;
// overflow-y: none;
// background: #383d71;
flex: 1;
// outline: 1px solid red;
// background: green;
width: 100%;
height: 100%;
}
// .outer > .pageConcent ::-webkit-scrollbar {
// //隐藏滚动条
// width: 0 !important;
// height: 0;
// }
// ::-webkit-scrollbar {
// //隐藏滚动条
// width: 0 !important;
// }
// ::-webkit-scrollbar {
// //隐藏滚动条
// width: 0 !important;
// height: 0;
// }
/* 设置滚动条的基本样式 */
::-webkit-scrollbar {
width: 0px; /* 设置滚动条的宽度 */
}
/* 设置滚动条轨道样式 */
::-webkit-scrollbar-track {
background-color: #f5f5f5; /* 设置滚动条轨道的背景色 */
}
/* 设置滚动条滑块样式 */
::-webkit-scrollbar-thumb {
background-color: #ccc; /* 设置滚动条滑块的背景色 */
border-radius: 20px; /* 设置滚动条滑块的圆角 */
// background: rgba(247,147,26, 0.1);
}
/* 设置滚动条滑块在鼠标悬停时的样式 */
::-webkit-scrollbar-thumb:hover {
background-color: #888; /* 设置滚动条滑块在鼠标悬停时的背景色 */
}
li {
list-style: none;
height: 30px;
line-height: 30px;
display: flex;
align-items: center;
justify-content: center;
// outline: 1px solid red ;
}
li:hover {
color: #000;
}
.headList li {
margin-left: 15px;
// outline: 1px solid red ;
}
.headList2 li {
margin-left: 15px;
// outline: 1px solid red ;
}
#app .headList2 li {
// outline: 1px solid red ;
padding: 5px 5px;
}
#app .headList li {
// outline: 1px solid red ;
padding: 5px 5px;
}
.registered {
outline: 1px solid red;
text-align: center;
outline: 1px solid rgba(0, 0, 0, 0.2);
}
// -----------中间部分-------------------------
.middle {
width: inherit;
// height: 100px;
outline: 1px solid;
// background: green;
padding-top: 60px;
display: flex;
justify-content: space-between;
align-items: center;
}
.middle > .left {
width: 280px;
// height: 600px;
// display: flex;
// flex-direction: column;
background: paleturquoise;
padding-top: 60px;
}
.middle > .right {
display: flex;
flex: 1;
height: 600px;
background: khaki;
}
// -----------侧边栏----------------------------
.left .search {
width: inherit;
height: 60px;
outline: 1px solid red;
position: fixed;
top: 60px;
left: 0px;
background: pink;
}
.left .sidebar {
outline: 1px solid red;
// width: inherit;
}
// .sidebar {
// width: 25%;
// height: 100vh;
// outline: 1px solid red;
// background: honeydew;
// margin-top: 41px;
// padding: 1px;
// position: relative;
// display: flex;
// }
// 侧边导航
// .sidebarNav {
// width: 100%;
// display: flex;
// flex: 1;
// outline: 1px solid red;
// margin-top: 80px;
// }
// ---------------搜索下拉框样式-----------------------
// .search {
// width: 280px;
// height: 35px;
// line-height: 35px;
// outline: 1px solid red;
// // background: red;
// margin-top: 10px;
// position: fixed;
// top: 40px;
// left: 20px;
// z-index: 99;
// }
// .dark-select {
// border: 1px solid rgba(0, 0, 0, 0.5);
// width: 200px;
// border-radius: 5px;
// margin-top: 40px;
// display: inline-block;
// // margin-top: 10px;
// .el-select-dropdown__item {
// line-height: 30px;
// height: 30px;
// }
// .el-select .el-input .el-select__caret.el-icon-::before {
// content: "\e790";
// position: absolute;
// width: 100%;
// height: 100%;
// top: 50%;
// left: 50%;
// transform: translate(-50%, -50%);
// }
// // 修改input默认值颜色 兼容其它主流浏览器
// input::-webkit-input-placeholder {
// // color: rgba(255, 255, 255, 0.60);
// color: rgba(0, 0, 0, 0.4);
// }
// input::-moz-input-placeholder {
// // color: rgba(255, 255, 255, 0.60);
// color: rgba(0, 0, 0, 0.6);
// }
// input::-ms-input-placeholder {
// // color: rgba(255, 255, 255, 0.60);
// color: rgba(0, 0, 0, 0.6);
// }
// // input框
// .el-select,
// .el-input,
// .el-input__inner {
// // background-color: rgba(0, 0, 0, 0.3);
// background-color: rgba(255, 255, 255, 0.1);
// // background-color: #ccc;
// // color: rgba(255, 255, 255, 0.50);
// // color: #fff;
// // border: 1px solid #000;
// // outline: 1px solid #000;
// outline: none;
// border: none; // 去掉边框
// // border-color: "#000";// 默认边框的颜色
// // text-align: left;
// // border-radius: 4px;
// }
// // 选中时边框颜色
// // .el-input__inner:focus{
// // border-color: "#000"
// // }
// // 鼠标悬浮时 input框颜色
// // .el-input__inner:hover{
// // background-color: rgba(0, 0, 0, 0.3);
// // }
// // input框 右侧的箭头
// // .el-select .el-input .el-select__caret {
// // color: rgba(255, 255, 255, 0.50);
// // }
// // option选项 上面的箭头
// // .el-popper[x-placement^="bottom"] .popper__arrow::after {
// // // border-bottom-color: rgba(43, 45, 55, 0.80);
// // // border-bottom-color: rgba(0, 0, 0, 0.10);
// // // background: rgba(0, 0, 0, 0.2);
// // // z-index: 9999;
// // }
// // .popper__arrow {
// // border: none;
// // }
// // option选项 最外层
// .el-select-dropdown {
// // border: none !important;
// // background: rgba(43, 45, 55, 0.80) !important;
// // background: rgba(0, 0, 0, 0.2) !important;
// // background: #fff;
// border: 1px solid rgba(0, 0, 0, 0.2);
// z-index: 9;
// }
// // option选项 文字样式
// .el-select-dropdown__item {
// // color: rgba(255, 255, 255, 0.50) !important;
// // color: rgba(0, 0, 0, 0.50) !important;
// // color: #000;
// z-index: 9;
// font-size: 12px;
// }
// .el-select-dropdown__item.selected span {
// // color: rgba(255, 255, 255, 0.80) !important;
// // color: rgba(0, 0, 0, 0.50) !important;\ \
// font-size: 12px;
// z-index: 9;
// }
// // 移入option选项 样式调整
// // .el-select-dropdown__item {
// // height: 20px;
// // margin-top: 10px;
// // }
// // // .el-select-dropdown__item.hover {
// // // background-color: rgba(255, 255, 255, 0.06);
// // // background-color: #d2d6e8;
// // // color: rgba(255, 255, 255, 0.60) !important;
// // z-index: 9999;
// // }
// // 下拉框垂直滚动条宽度
// // .el-scrollbar__bar.is-vertical {
// // width: 10px;
// // top: 2px;
// // }
// // 下拉框最大高度
// // .el-select-dropdown__wrap {
// // max-height: 200px;
// // }
// }
.option {
display: flex;
align-items: center;
justify-content: left;
.label {
margin-left: 5px;
}
}
// ------------子导航加内容整体盒子-------------------------
// .contents {
// width: 79vw;
// position: absolute;
// top: 0px;
// right: 0px;
// border: 1px solid rgba(0, 0, 0, 0.3);
// background: res;
// }
// .el-card__body,
// .el-main {
// padding: 0px !important;
// }
// .el-header {
// // background-color: #b3c0d1;
// height: 40px !important;
// display: flex;
// align-items: center;
// color: #333;
// // background: #b3c0d1;
// border-bottom: 1px solid rgba(0, 0, 0, 0.2);
// }
// // -------子导航----第二横向导航栏-------------------------
// .sc-jGprRt {
// display: flex;
// align-items: center;
// // outline: 1px solid red;
// width: 300px;
// justify-content: space-around;
// }
// .sc-jGprRt button {
// display: flex;
// // outline: 1px solid red;
// border: 1px solid rgba(0, 0, 0, 0.2);
// color: rgba(0, 0, 0, 0.6);
// padding: 2px 5px;
// align-items: center;
// justify-content: space-around;
// transition: linear 0.2s;
// border-radius: 2px;
// }
// .sc-jGprRt button:hover {
// border: 1px solid rgba(0, 0, 0, 0.6);
// }
// // .el-aside {
// // color: #333;
// // // color: #ccc;
// // }
// ::-webkit-scrollbar {
// //隐藏滚动条
// width: 0 !important;
// }
// ::-webkit-scrollbar {
// //隐藏滚动条
// width: 0 !important;
// height: 0;
// }
// // ----------侧边栏-------------------------------
// .optionItem span {
// margin-left: 10px;
// }
// .aside {
// outline: 1px slid red;
// }
// .el-submenu .el-submenu__title {
// height: 35px;
// line-height: 35px;
// display: flex;
// align-items: center;
// font-size: 12px;
// }
// .el-submenu .el-menu-item {
// height: 30px !important;
// // width: inherit;
// white-space: nowrap; //不换行
// text-overflow: ellipsis; //将文本溢出显示为(…)
// overflow: hidden; //溢出隐藏。
// line-height: 30px !important;
// margin: 0px;
// padding-left: 25px !important;
// font-size: 10px !important;
// // outline: 1px solid red;
// }
// //子选项
// .itemT1 span:nth-of-type(1) {
// color: rgb(104, 113, 152);
// display: inline-block;
// vertical-align: middle;
// // outline: 1px solid red;
// background: rgb(229, 231, 240);
// font-size: 0.75rem;
// font-weight: bold;
// text-transform: capitalize;
// border-radius: 4px;
// height: 20px;
// width: 20px;
// text-align: center;
// line-height: 20px;
// }
// .el-menu-item-group__title {
// padding-left: 20px !important;
// }
// .itemT2 span:nth-of-type(1) {
// display: inline-block;
// vertical-align: middle;
// width: 20px;
// height: 20px;
// line-height: 20px;
// font-size: 0.75rem;
// font-weight: bold;
// overflow: hidden;
// border-radius: 4px;
// text-align: center;
// letter-spacing: -1px;
// white-space: nowrap;
// text-transform: capitalize;
// background: rgb(230, 238, 255);
// color: rgb(35, 110, 254);
// }
// .itemT3 span:nth-of-type(1) {
// display: inline-block;
// vertical-align: middle;
// width: 20px;
// height: 20px;
// line-height: 20px;
// font-size: 0.75rem;
// font-weight: bold;
// overflow: hidden;
// border-radius: 4px;
// text-align: center;
// letter-spacing: -1px;
// white-space: nowrap;
// text-transform: capitalize;
// background: rgb(250, 233, 237);
// color: rgb(254, 109, 145);
// }
// .title2 {
// display: inline;
// margin-left: 10px;
// color: rgba(0, 0, 0, 0.6);
// }
//
//
// .el-container.is-vertical {
// flex-direction: row;
// }
// // .el-main{
// // background: red;
// // position: absolute;
// // top: 0px;
// // left: 0px;
// // }
.resize {
cursor: col-resize;
position: absolute;
right: 0;
height: 100%;
width: 5px;
}
.routePages {
min-width: 99vw;
height: 93.5vh;
// outline: 1px solid red;
// background: gold;
// padding: 20px;
// overflow-y: auto;
box-sizing: border-box;
// overflow: hidden;
.jumpHome {
// outline: 1px solid red;
width: 100%;
display: inline-block;
text-align: right;
height: 30px;
line-height: 50px;
color: rgba(0, 0, 0, 0.9);
span {
position: fixed;
right: 20px;
top: 60px;
font-size: 14px;
font-weight: 600;
}
span:hover {
color: #f7931a;
}
}
}
</style>
<style lang="scss">
/* 警告语层级 */
.messageClass {
z-index: 999999 !important;
}
//公式的公共样式
.formulas {
display: flex;
justify-content: center;
align-items: center;
font-weight: 550;
color: rgba(0, 0, 0, 0.6);
div:nth-of-type(2) {
display: flex;
flex-direction: column;
margin-left: 10px;
span {
text-align: center;
}
span:nth-of-type(1) {
border-bottom: 1px solid rgba(0, 0, 0, 1);
}
}
}
</style>

View File

@@ -0,0 +1,64 @@
// import { ref } from "vue";
// import axios from "axios";
// const keyword = ref("");
// let temp;
// const search = () => {
// temp?.("取消请求"); // 取消上一次请求
// axios({
// method: "post",
// url: "/api",
// data: { wd: keyword.value },
// cancelToken: new axios.CancelToken((cancel) => {
// // 接受一个 cancel 取消当前请求的方法
// temp = cancel;
// })
// }).then((res) => {
// // 成功
// }).catch((err) => {
// if (axios.isCancel(err)) {
// console.log(err.message); // 被取消时的参数
// } else {
// console.log(err); // 请求错误
// }
// });
// };
// import { AxiosRequestConfig } from "axios";
// export default {
// tokens: new Map(),
// getKey: (config: AxiosRequestConfig): string => [config.url,config.method].join("&"),
// add(config: AxiosRequestConfig) {
// const pendingKey = this.getKey(config);
// config.cancelToken =
// config.cancelToken ||
// new axios.CancelToken((cancel) => {
// if (!this.tokens.has(pendingKey)) {
// this.tokens.set(pendingKey, cancel);
// }
// });
// },
// remove(config: AxiosRequestConfig) {
// const pendingKey = this.getKey(config);
// if (this.tokens.has(pendingKey)) {
// const cancelToken = this.tokens.get(pendingKey);
// cancelToken(pendingKey);
// this.tokens.delete(pendingKey);
// }
// },
// removeAll() {
// this.tokens.forEach((value, key) => {
// const cancelToken = this.tokens.get(key);
// cancelToken(key);
// this.tokens.delete(key);
// });
// },
// };

685
yq/src/api/api.js Normal file
View File

@@ -0,0 +1,685 @@
import { get,POST } from './http'
export default class API {
//盈利地址数
static getDownloadFile(ids) {
return get(`/marketall/ticket/downloadFile?ids=${ids}`)
}
//盈利地址数
static getProfitAddress(req, radius, start, end) {
if (start) {
return get(`/marketall/${radius}/v2/${req}?start=${start}`)
} else {
return get(`/marketall/${radius}/v2/${req}`)
}
}
//
static getHolder(req, radius, start, end) {
if (start) {
return get(`/marketall/${radius}/${req}?start=${start}`)
} else {
return get(`/marketall/${radius}/${req}`)
}
}
//验证邮箱是否注册过
static getcheckMail(email) {
return get(`/system/user/checkMail/${email}`)
}
//请求个人中心资料
static getpersonal() {
return get(`/system/user/profile`)
}
//请求重置密码验证码
static getpasswrodCode() {
return get(`/auth/restPwdCode`)
}
//请求注册弹窗验证码图片
static getCodeEmail(username, email) {
return get(`/auth/emailcode?userName=${username}&email=${email}`)
}
//请求验证码图片
static getVerification() {
return get(`/code`)
}
//请求erc20 token大额交易 前50的token
static getETHTokenAbbrs() {
return get(`/marketall/eth/ETHTokenAbbrs`)
}
//BTC移动平均线
static getEthSma(day, start, end,) {
if (start) {
return get(`/marketall/eth/sma/${day}?start=${start}&end=${end}`)
} else {
return get(`/marketall/eth/sma/${day}`)
}
}
//BTC移动平均线
static getSma(day, start, end,) {
if (start) {
return get(`/marketall/btc/sma/${day}?start=${start}&end=${end}`)
} else {
return get(`/marketall/btc/sma/${day}`)
}
}
//新合约地址
static getNewContract(req, radius, page, limit,total) {
if (total) {
return get(`/marketall/${radius}/${req}?page=${page}&limit=${limit}&total=${total}`)
}else{
return get(`/marketall/${radius}/${req}?page=${page}&limit=${limit}`)
}
}
//11月2号 更改dailyBuySell接口
static getDailyBuySell(req, radius, start, end, page, limit) {
if (start) {
return get(`/marketall/${radius}/${req}?start=${start}&end=${end}&page=${page}&limit=${limit}`)
} else {
return get(`/marketall/${radius}/${req}?page=${page}&limit=${limit}`)
}
}
//大额交易搜索 分页 没有时间搜索 26修改接口
static getBigDeals3(req, radius, search, page, limit) {
if (search) {
return get(`/marketall/${radius}/${req}?total=${search}&page=${page}&limit=${limit}`)
} else if (!search) {
return get(`/marketall/${radius}/${req}?page=${page}&limit=${limit}`)
} else {
return get(`/marketall/${radius}/${req}?page=${page}&limit=${limit}`)
}
}
//新增csupply 流通供应量/ 累计币天销毁量(流通供应量)
static getMvrv(req, radius, start, end) {
if (radius !== "BTC" && start) {
return get(`/marketall/list/interface?req=market&radius=${radius}&start=${start}&end=${end}`)
} else if (radius == "BTC" && start && end) {
return get(`/marketall/btc/mvrv?start=${start}&end=${end}`)
} else if (radius == "BTC" && !start) {
return get(`/marketall/btc/mvrv`)
} else {
console.log("走着空间看到就", `${req, radius}`);
return get(`/marketall/list/interface?req=market&radius=${radius}`)
}
}
//新增csupply 流通供应量/ 累计币天销毁量(流通供应量)
static getCsupply(req, radius, start, end,sma) {
if (sma) {
if (start && end) {
return get(`/marketall/${radius}/${req}?start=${start}&end=${end}&sma=${sma}`)
} else {
return get(`/marketall/${radius}/${req}?sma=${sma}`)
}
}else{
if (start && end) {
return get(`/marketall/${radius}/${req}?start=${start}&end=${end}`)
} else {
return get(`/marketall/${radius}/${req}`)
}
}
}
//新增soag 币天销毁量年龄分布
static getSoag(req, radius, start, end,sma) {
if (sma) {
if (start && end) {
return get(`/marketall/${radius}/${req}?start=${start}&end=${end}&sma=${sma}`)
} else {
return get(`/marketall/${radius}/${req}?sma=${sma}`)
}
}else{
if (start && end) {
return get(`/marketall/${radius}/${req}?start=${start}&end=${end}`)
} else {
return get(`/marketall/${radius}/${req}`)
}
}
}
//新增dormancy 已花费输出休眠平均天数
static getDormancy(req, radius, start, end,sma) {
if (sma) {
if (start && end) {
return get(`/marketall/${radius}/${req}?start=${start}&end=${end}&sma=${sma}`)
} else {
return get(`/marketall/${radius}/${req}?sma=${sma}`)
}
}else{
if (start && end) {
return get(`/marketall/${radius}/${req}?start=${start}&end=${end}`)
} else {
return get(`/marketall/${radius}/${req}`)
}
}
}
//新增asol已花费输出平均寿命
static getAsol(req, radius, start, end,sma) {
if (sma) {
if (start && end) {
return get(`/marketall/${radius}/${req}?start=${start}&end=${end}&sma=${sma}`)
} else {
return get(`/marketall/${radius}/${req}?sma=${sma}`)
}
}else{
if (start && end) {
return get(`/marketall/${radius}/${req}?start=${start}&end=${end}`)
} else {
return get(`/marketall/${radius}/${req}`)
}
}
}
//新增sopr 已花费输出盈利比
static getSopr(req, radius, start, end,sma) {
if (sma) {
if (start && end) {
return get(`/marketall/${radius}/${req}?start=${start}&end=${end}&sma=${sma}`)
} else {
return get(`/marketall/${radius}/${req}?sma=${sma}`)
}
}else{
if (start && end) {
return get(`/marketall/${radius}/${req}?start=${start}&end=${end}`)
} else {
return get(`/marketall/${radius}/${req}`)
}
}
}
//新增Profitrate 已实现盈利交易占比
static getProfitrate(req, radius, start, end,sma) {
if (sma) {
if (start && end) {
return get(`/marketall/${radius}/${req}?start=${start}&end=${end}&sma=${sma}`)
} else {
return get(`/marketall/${radius}/${req}?sma=${sma}`)
}
}else{
if (start && end) {
return get(`/marketall/${radius}/${req}?start=${start}&end=${end}`)
} else {
return get(`/marketall/${radius}/${req}`)
}
}
}
//纳斯达克日K BTC 日K
static getNDAQBTCPrice(req, start, end,sma) {
if (sma) {
if (start) {
return get(`/marketall/market/${req}?start=${start}&end=${end}&sma=${sma}`)
} else {
return get(`/marketall/market/${req}?sma=${sma}`)
}
}else{
if (start) {
return get(`/marketall/market/${req}?start=${start}&end=${end}`)
} else {
return get(`/marketall/market/${req}`)
}
}
}
//合约主动买入量
static getTakerlongshortRatio(symbol, period, start, end, limit) {
// &limit=${limit}
if (start) {
return get(`/futures/data/takerlongshortRatio?symbol=${symbol}&period=${period}&startTime=${start}&endTime=${end}`)
} else {
return get(`/futures/data/takerlongshortRatio?symbol=${symbol}&period=${period}`)
}
}
//未平仓合约数
static getOpenInterest(symbol, start, end, limit) {
// &limit=${limit}
if (symbol && start) {
return get(`/fapi/v1/openInterest?symbol=${symbol}&startTime=${start}&endTime=${end}`)
} else if (!symbol && start) {
return get(`/fapi/v1/openInterest?startTime=${start}&endTime=${end}`)
} if (symbol && !start) {
return get(`/fapi/v1/openInterest?symbol=${symbol}`)
} else {
return get(`/fapi/v1/openInterest`)
}
}
//资金费率
static getFundingRate(symbol, start, end, limit) {
// &limit=${limit}
if (symbol && start) {
return get(`/fapi/v1/fundingRate?symbol=${symbol}&startTime=${start}&endTime=${end}`)
} else if (!symbol && start) {
return get(`/fapi/v1/fundingRate?startTime=${start}&endTime=${end}`)
} if (symbol && !start) {
return get(`/fapi/v1/fundingRate?symbol=${symbol}`)
} else {
return get(`/fapi/v1/fundingRate`)
}
}
//每日交易量、交易费
static getVolume(req, radius, start, end,sma) {
if (sma) {
if (start) {
return get(`/marketall/${radius}/${req}?start=${start}&end=${end}&sma=${sma}`)
} else {
return get(`/marketall/${radius}/${req}?sma=${sma}`)
}
}else{
if (start) {
return get(`/marketall/${radius}/${req}?start=${start}&end=${end}`)
} else {
return get(`/marketall/${radius}/${req}`)
}
}
}
static getFees(req, radius, start, end,) {
if (start) {
return get(`/marketall/${radius}/${req}?start=${start}&end=${end}`)
} else {
return get(`/marketall/${radius}/${req}`)
}
}
//ma370
static getMa370(req, radius, start, end,sma) {
if (sma) {
if (start) {
return get(`/marketall/${radius}/${req}?start=${start}&end=${end}&sma=${sma}`)
} else {
return get(`/marketall/${radius}/${req}?sma=${sma}`)
}
}else{
if (start) {
return get(`/marketall/${radius}/${req}?start=${start}&end=${end}`)
} else {
return get(`/marketall/${radius}/${req}`)
}
}
}
//每日币价K线 新增capReal
static getCapReal(start, end,) {
if (start) {
return get(`/marketall/btc/capReal?coin=BTC&start=${start}&end=${end}`)
} else {
return get(`/marketall/btc/capReal?coin=BTC`)
}
}
//请求BTC price价格
static getBtcPrice(start, end,) {
if (start) {
return get(`/marketall/list/interface?req=price&radius=btc&start=${start}&end=${end}`)
} else {
return get(`/marketall/list/interface?req=price&radius=btc`)
}
}
//请求BTC/ETH price价格
static getBtcPrice2(radius, start, end,) {
if (start) {
return get(`/marketall/list/interface?req=price&radius=${radius}&start=${start}&end=${end}`)
} else {
return get(`/marketall/list/interface?req=price&radius=${radius}`)
}
}
//币安 条件查询
static getPriceValueLine(start, end,) {
return get(`/marketall/bian/list?symbol=BTCUSDT&start=${start}&end=${end}`)
if (req && start && end) {
return get(`/marketall/${radius}/${req}?symbol=${symbol}&timeInterval=${timeInterval}&start=${start}&end=${end}`)
} else {
return get(`/marketall/${radius}/${req}?symbol=${symbol}&timeInterval=${timeInterval}`)
}
// &start=${start}&end=${end}
}
//以太坊巨鲸地址余额列表
static getTopAddrList(type, page, limit) {
if (type) {
return get(`/marketall/eth/ethTopAddrList?total=${type}&page=${page}&limit=${limit}`)
} else {
return get(`/marketall/eth/ethTopAddrList?page=${page}&limit=${limit}`)
}
if (req && start && end && type) {
return get(`/marketall/${radius}/${req}?start=${start}&end=${end}&type=${type}`)
} else if (req && start && end && !type) {
return get(`/marketall/${radius}/${req}?start=${start}&end=${end}`)
} else {
return get(`/marketall/${radius}/${req}`)
}
}
//地址对应标记持有者名称关系列表
static getBtctags(req, radius, page, limit) {
if (req && page && !limit) {
return get(`/marketall/${radius}/${req}?page=${page}`)
} else if (req && page && limit) {
return get(`/marketall/${radius}/${req}?page=${page}&limit=${limit}`)
} else if (req && !page && limit) {
return get(`/marketall/${radius}/${req}?limit=${limit}`)
}
else {
return get(`/marketall/${radius}/${req}`)
}
}
//天针
static getDaysNeedle(interval, limit) {
return get(`https://13.214.133.132:7101/marketall/bian/tz?interval=${interval}&limit=${limit}`)
}
//8/29新增币价当前市值排名
static getCoinmarketall() {
return get(`/marketall/list/coinmarket/all`)
}
//8/29新增24小时币价
static getCoinmarket(coin, start, end) {
if (coin) {
return get(`/marketall/list/coinmarket?coin=${coin}&start=${start}&end=${end}`)
} else {
return
}
if (start) {
return get(`/marketall/list/coinmarket?coin=${coin}&start=${start}&end=${end}`)
} else {
return get(`/marketall/list/coinmarket?coin=${coin}`)
}
}
//8/15新增币安价格
static getPriceValue(interval, start, end) {
return get(`/marketall/bian/priceValue?interval=${interval}&start=${start}&end=${end}`)
// return get(`/marketall/bian/priceValue?symbol=BTCUSDT&start=${start}&end=${end}`)
}
//8月5号新增BalanceData
static getBalanceData(req, radius, value, start, end) {
if (req && !start && !end) {
return get(`/marketall/${radius}/${req}/${value}`)
} else {
return get(`/marketall/${radius}/${req}/${value}?start=${start}&end=${end}`)
}
}
//8月2号新增getTopList
static getTopList(req, radius, type,) {
if (req && type) {
return get(`/marketall/${radius}/${req}?${type}`)
} else {
return get(`/marketall/${radius}/${req}`)
}
}
//20号新增newAddress
static getNewAddress(req, radius, start, end,sma) {
if (sma) {
if (start && end) {
return get(`/marketall/${radius}/${req}?start=${start}&end=${end}&sma=${sma}`)
} else {
return get(`/marketall/${radius}/${req}?sma=${sma}`)
}
}else{
if (start && end) {
return get(`/marketall/${radius}/${req}?start=${start}&end=${end}`)
} else {
return get(`/marketall/${radius}/${req}`)
}
}
}
//币安 条件查询
static getBiAnController(req, radius, symbol, timeInterval, start, end, type,) {
if (req && start && end) {
return get(`/marketall/${radius}/${req}?symbol=${symbol}&timeInterval=${timeInterval}&start=${start}&end=${end}`)
} else {
return get(`/marketall/${radius}/${req}?symbol=${symbol}&timeInterval=${timeInterval}`)
}
// &start=${start}&end=${end}
}
//arh99请求格式不同
static getArh(req, radius, start, end) {
if (req && start && end) {
return get(`/marketall/${radius}/${req}?start=${start}&end=${end}`)
} else {
return get(`/marketall/${radius}/${req}`)
}
}
//大额交易搜索 不分页
static getBigDeals2(req, radius, search, start, end, page, limit) {
if (search && start && end) {
return get(`/marketall/${radius}/${req}?total=${search}&start=${start}&end=${end}&page=${page}&limit=${limit}`)
} else if (req && search && !start) {
return get(`/marketall/${radius}/${req}?total=${search}&page=${page}&limit=${limit}`)
} else if (req && !search && !start) {
return get(`/marketall/${radius}/${req}?page=${page}&limit=${limit}`)
} else if (req && !search && start) {
return get(`/marketall/${radius}/${req}?start=${start}&end=${end}&page=${page}&limit=${limit}`)
} else {
return get(`/marketall/${radius}/${req}?page=${page}&limit=${limit}`)
}
}
//大额交易搜索 带分页
static getETHTokenBigtx(req, radius, search, start, end, page, limit, abbr) {
if (abbr) {
if (search && start && end) {
return get(`/marketall/${radius}/${req}?total=${search}&start=${start}&end=${end}&page=${page}&limit=${limit}&abbr=${abbr}`)
} else if (req && search && !start) {
return get(`/marketall/${radius}/${req}?total=${search}&page=${page}&limit=${limit}&abbr=${abbr}`)
} else if (req && !search && !start) {//默认渲染一天的数据 带时间
return get(`/marketall/${radius}/${req}?page=${page}&limit=${limit}&abbr=${abbr}`)
} else if (req && !search && start) {
return get(`/marketall/${radius}/${req}?start=${start}&end=${end}&page=${page}&limit=${limit}&abbr=${abbr}`)
} else if (req && !search && start) {
return get(`/marketall/${radius}/${req}?start=${start}&end=${end}&page=${page}&limit=${limit}&abbr=${abbr}`)
} else {
return get(`/marketall/${radius}/${req}?page=${page}&limit=${limit}&abbr=${abbr}`)
}
} else {
if (search && start && end) {
return get(`/marketall/${radius}/${req}?total=${search}&start=${start}&end=${end}&page=${page}&limit=${limit}`)
} else if (req && search && !start) {
return get(`/marketall/${radius}/${req}?total=${search}&page=${page}&limit=${limit}`)
} else if (req && !search && !start) {//默认渲染一天的数据 带时间
return get(`/marketall/${radius}/${req}?page=${page}&limit=${limit}`)
} else if (req && !search && start) {
return get(`/marketall/${radius}/${req}?start=${start}&end=${end}&page=${page}&limit=${limit}`)
} else if (req && !search && start) {
return get(`/marketall/${radius}/${req}?start=${start}&end=${end}&page=${page}&limit=${limit}`)
} else {
return get(`/marketall/${radius}/${req}?page=${page}&limit=${limit}`)
}
}
}
//大额交易搜索 带分页
static getBigDeals(req, radius, search, start, end, page, limit) {
if (search && start && end) {
return get(`/marketall/${radius}/${req}?total=${search}&start=${start}&end=${end}&page=${page}&limit=${limit}`)
} else if (req && search && !start) {
return get(`/marketall/${radius}/${req}?total=${search}&page=${page}&limit=${limit}`)
} else if (req && !search && !start) {//默认渲染一天的数据 带时间
return get(`/marketall/${radius}/${req}?page=${page}&limit=${limit}`)
} else if (req && !search && start && page) {
return get(`/marketall/${radius}/${req}?start=${start}&end=${end}&page=${page}&limit=${limit}`)
} else if (req && !search && start) {
return get(`/marketall/${radius}/${req}?start=${start}&end=${end}&page=${page}&limit=${limit}`)
} else {
return get(`/marketall/${radius}/${req}?page=${page}&limit=${limit}`)
}
}
//大额tabel表格
static getBigDealsTabel(req, radius, address, txId) {
if (address) {
return get(`/marketall/${radius}/${req}?address=${address}`)
} else {
return get(`/marketall/${radius}/${req}?txId=${txId}`)
}
}
//大额交易数量查询
static getBigDealsCount(req, radius, start, end, type,sma) {
if (sma) {
if (req && start && end && type) {
return get(`/marketall/${radius}/${req}?start=${start}&end=${end}&type=${type}&sma=${sma}`)
} else if (req && start && end && !type) {
return get(`/marketall/${radius}/${req}?start=${start}&end=${end}&sma=${sma}`)
} else {
return get(`/marketall/${radius}/${req}?sma=${sma}`)
}
}else{
if (req && start && end && type) {
return get(`/marketall/${radius}/${req}?start=${start}&end=${end}&type=${type}`)
} else if (req && start && end && !type) {
return get(`/marketall/${radius}/${req}?start=${start}&end=${end}`)
} else {
return get(`/marketall/${radius}/${req}`)
}
}
}
static getNdaqData(type, start, end) {//NAQA
if (start && end) {
return get(`${type}&start=${start}&end=${end}`)
} else {
return get(`${type}`)
}
}
//通用请求
static getActiveaddress(req, radius, start, end,sma) {//Activeaddress
if (sma) {
if (req && radius && start && end) {
return get(`/marketall/list/interface?req=${req}&radius=${radius}&start=${start}&end=${end}&sma=${sma}`)
} else if (req && radius && !start) {
return get(`/marketall/list/interface?req=${req}&radius=${radius}&sma=${sma}`)
} else if (req && !radius && start && end) {
return get(`/marketall/list/interface?req=${req}&start=${start}&end=${end}&sma=${sma}`)
} else {
return get(`/marketall/list/interface?req=${req}&sma=${sma}`)
}
}else{
if (req && radius && start && end) {
return get(`/marketall/list/interface?req=${req}&radius=${radius}&start=${start}&end=${end}`)
} else if (req && radius && !start) {
return get(`/marketall/list/interface?req=${req}&radius=${radius}`)
} else if (req && !radius && start && end) {
return get(`/marketall/list/interface?req=${req}&start=${start}&end=${end}`)
} else {
return get(`/marketall/list/interface?req=${req}`)
}
}
}
//请求价格
static getPriceOHLC(req, radius, start, end) {//Activeaddress
if (start && end) {
return get(`/marketall/list/interface?req=${req}&radius=${radius}&start=${start}&end=${end}`)
}
if (req && radius) {
return get(`/marketall/list/interface?req=${req}&radius=${radius}`)
} else if (req && !radius && start && end) {
return get(`/marketall/list/interface?req=${req}&start=${start}&end=${end}`)
} else {
return get(`/marketall/list/interface?req=${req}`)
}
}
//NDAQ
static getPriceNDAQ(req, start, end,sma) {//Activeaddress
if (sma) {
if (start, end) {
return get(`/marketall/market/${req}?start=${start}&end=${end}&sma=${sma}`)
} else {
return get(`/marketall/market/${req}?sma=${sma}`)
}
}else{
if (start, end) {
return get(`/marketall/market/${req}?start=${start}&end=${end}`)
} else {
return get(`/marketall/market/${req}`)
}
}
}
}

312
yq/src/api/apiPost.js Normal file
View File

@@ -0,0 +1,312 @@
import request from './http'
//盈利地址数
export function fetchProfitAddress(data) {
return request({
url: `/marketall/btc/v2/profitAddress`,
method: 'post',
data
})
}
//亏损地址数
export function fetchLossAddress(data) {
return request({
url: `/marketall/btc/v2/lossAddress`,
method: 'post',
data
})
}
//地址盈亏比
export function fetchProfitRatio(data) {
return request({
url: `/marketall/btc/v2/profitRatio`,
method: 'post',
data
})
}
//长期持有者供应量
export function fetchLthSupply(data) {
return request({
url: `/marketall/btc/v2/lthSupply`,
method: 'post',
data
})
}
//短期持有者供应量
export function fetchSthSupply(data) {
return request({
url: `/marketall/btc/v2/sthSupply`,
method: 'post',
data
})
}
// 已实现价格
export function fetchRealizedPrice(data) {
return request({
url: `/marketall/btc/v2/realizedPrice`,
method: 'post',
data
})
}
// 长/短期持有者供应量盈亏比
export function fetchRelativeLthSth(data) {
return request({
url: `/marketall/btc/v2/relativeLthSth`,
method: 'post',
data
})
}
// 长期持有者盈利总供应量
export function fetchLthProfitSupply(data) {
return request({
url: `/marketall/btc/v2/lthProfitSupply`,
method: 'post',
data
})
}
// 长期持有者亏损总供应量
export function fetchLthLossSupply(data) {
return request({
url: `/marketall/btc/v2/lthLossSupply`,
method: 'post',
data
})
}
// 短期持有者盈利总供应量
export function fetchSthProfitSupply(data) {
return request({
url: `/marketall/btc/v2/sthProfitSupply`,
method: 'post',
data
})
}
// 短期持有者亏损总供应量
export function fetchSthLossSupply(data) {
return request({
url: `/marketall/btc/v2/sthLossSupply`,
method: 'post',
data
})
}
// 短期持有者盈亏比
export function fetchSthProfitRatio(data) {
return request({
url: `/marketall/btc/v2/sthProfitRatio`,
method: 'post',
data
})
}
// 短期到长期实现价值比率
export function fetchSLRVRatio(data) {
return request({
url: `/marketall/btc/v2/SLRVRatio`,
method: 'post',
data
})
}
// 长期持有者盈亏比
export function fetchLthProfitRatio(data) {
return request({
url: `/marketall/btc/v2/lthProfitRatio`,
method: 'post',
data
})
}
//长期持有者每日交易量
export function fetchLthVolume(data) {
return request({
url: `/marketall/btc/v2/lthVolume`,
method: 'post',
data
})
}
//交易汇率乘数
export function fetchFrm(data) {
return request({
url: `/marketall/btc/v2/frm`,
method: 'post',
data
})
}
//累积价值-销毁天数比
export function fetchCvdd(data) {
return request({
url: `/marketall/btc/v2/cvdd`,
method: 'post',
data
})
}
//已平衡价格
export function fetchBalancedPrice(data) {
return request({
url: `/marketall/btc/v2/balancedPrice`,
method: 'post',
data
})
}
//市值/交易价值比
export function fetchNvtRatio(data) {
return request({
url: `/marketall/btc/v2/nvtRatio`,
method: 'post',
data
})
}
//流通速度
export function fetchVelocity(data) {
return request({
url: `/marketall/btc/v2/velocity`,
method: 'post',
data
})
}
//已实现流通市值
export function fetchRcap(data) {
return request({
url: `/marketall/btc/rcap`,
method: 'post',
data
})
}
//已实现流通市值(找零消除)
export function fetchEarcap(data) {
return request({
url: `/marketall/btc/earcap`,
method: 'post',
data
})
}
//时区购买力
export function getTzbp(data) {
return request({
url: `/marketall/exchange/v2/tzbp`,
method: 'post',
data
})
}
//买入大单挂单
export function getBobp(data) {
return request({
url: `/marketall/exchange/v2/bobp`,
method: 'post',
data
})
}
//卖出大单挂单
export function getBosp(data) {
return request({
url: `/marketall/exchange/v2/bosp`,
method: 'post',
data
})
}
//交易所余额
export function getExchange(data) {
return request({
url: `/marketall/btc/v2/ebalance`,
method: 'post',
data
})
}
//交易所余额 下拉框
export function getExchanges(data) {
return request({
url: `/marketall/btc/v2/exchanges`,
method: 'post',
data
})
}
//交易所余额ETH
export function getExchangeETH(data) {
return request({
url: `/marketall/eth/v2/ebalance`,
method: 'post',
data
})
}
//交易所余额 下拉框ETH
export function getExchangesETH(data) {
return request({
url: `/marketall/eth/v2/exchanges`,
method: 'post',
data
})
}
//待交易池详情
export function getMempool(data) {
return request({
url: `/marketall/btc/v2/mempool`,
method: 'post',
data
})
}
//NVT信号
export function getNvtSign(data) {
return request({
url: `/marketall/btc/v2/nvtSign`,
method: 'post',
data
})
}
//长/短期持有者供应量盈亏分布
export function DistributionLthSth(data) {
return request({
url: `/marketall/btc/v2/distributionLthSth`,
method: 'post',
data
})
}
//大额交易ETH 价格接口
export function ethPrice(data) {
return request({
url: `/marketall/eth/v2/price`,
method: 'post',
data
})
}
//价值损毁天数倍数
export function getVDD(data) {
return request({
url: `/marketall/btc/vdd`,
method: 'post',
data
})
}

147
yq/src/api/help.js Normal file
View File

@@ -0,0 +1,147 @@
import request from './http'
//获取问题分类
export function getQuestionType(data) {
return request({
url: `marketall/ticket/getQuestionType`,
method: 'post',
data
})
}
//提交工单
export function getSubmitTicket(data) {
return request({
url: `marketall/ticket/submitTicket`,
method: 'post',
data
})
}
//工单继续提交
export function getResubmitTicket(data) {
return request({
url: `marketall/ticket/resubmitTicket`,
method: 'post',
data
})
}
//撤回工单
export function getRetractTicket(data) {
return request({
url: `marketall/ticket/retractTicket`,
method: 'post',
data
})
}
//个人工单列表
export function getPrivateTicket(data) {
return request({
url: `marketall/ticket/getPrivateTicket`,
method: 'post',
data
})
}
//工单详情
export function getTicketDetails(data) {
return request({
url: `marketall/ticket/getTicketDetails`,
method: 'post',
data
})
}
//提交工单审核
export function getSubmitAuditTicket(data) {
return request({
url: `marketall/ticket/submitAuditTicket`,
method: 'post',
data
})
}
//个人未读工单数
export function getUnreadCount(data) {
return request({
url: `marketall/ticket/getUnreadCount`,
method: 'post',
data
})
}
//后台工单列表
export function getTicketList(data) {
return request({
url: `marketall/ticket/getTicketList`,
method: 'post',
data
})
}
//每日工单数
export function getDailyCount(data) {
return request({
url: `marketall/ticket/getDailyCount`,
method: 'post',
data
})
}
//审核工单
export function getAuditTicket(data) {
return request({
url: `marketall/ticket/auditTicket`,
method: 'post',
data
})
}
//文件下载
export function getDownloadFile(data) {
return request({
url: `marketall/ticket/downloadFile`,
method: 'post',
data
})
}
//回复工单
export function getResponTicket(data) {
return request({
url: `marketall/ticket/responTicket`,
method: 'post',
data
})
}
//个人中心 个人中心 已回复未读工单更改状态
export function getReadTicket(data) {
return request({
url: `marketall/ticket/readTicket`,
method: 'post',
data
})
}
//结束工单
export function getEndTicket(data) {
return request({
url: `marketall/ticket/endTicket`,
method: 'post',
data
})
}
//查询用户当前账户余额
export function GetUserAccount(data) {
return request({
url: `/order/transfer/getUserAccount`,
method: 'post',
data
})
}

418
yq/src/api/http.js Normal file
View File

@@ -0,0 +1,418 @@
import axios from 'axios'
import { Message } from 'element-ui'
import { Loading } from 'element-ui';
import CancelTokens from './CancelTokens'
const baseURL = '/api' //代理接口的名称
import { $addStorageEvent } from '../util/processingData'
//不需要带token 匿名访问的接口
const noTokenUrl = [
`/marketall/btc/address`,
`/marketall/list/interface?req=price&`,
`/marketall/BTC/csupply`,
`/marketall/btc/mintusd`,
`/marketall/btc/marketcap`,
`/marketall/BTC/mvrv`,
`/marketall/BTC/btcbigtxcount`,
`/marketall/BTC/bigamountvout`,
`/marketall/BTC/arh99`,
`/marketall/BTC/arh99day`,
`/marketall/BTC/jzr60`,
`/marketall/BTC/jzr60day`,
`/marketall/BTC/ma730`,
`marketall/BTC/ma730day`,
`/marketall/ETH/csupply`,
`/marketall/ETH/reward`,
`/marketall/eth/ETHVolume`,
`/marketall/eth/ETHFees`,
`/marketall/eth/address`,
`/marketall/ETH/ethbigtxcount`,
`/marketall/ETH/ethbigtx`,
`/eth/ethTopAddrList`,
`/marketall/ETH/tronUsdtVolume`,
`/marketall/eth/usdtVolume`,
`/marketall/eth/usdcVolume`,
`/marketall/ETH/tronUsdcVolume`,
`/marketall/market/NDAQohlc`,
`/marketall/market/NDAQma`,
]
let overtime = "" //同页面多次请求超时只报错一次
window.addEventListener("setItem", () => {
overtime = localStorage.getItem("overtime");
});
Message.customClass = "messageClass"
let message = '' //登录过期控制只提示一次报错
//开发环境 development
if (process.env.NODE_ENV == 'development') {
// axios.defaults.baseURL = 'http://10.168.2.197:7001'
// axios.defaults.baseURL = 'https://13.214.133.132:11447'
// axios.defaults.baseURL = 'http://10.168.2.125:7101'
axios.defaults.baseURL = 'https://coinbus.cc/api/v1'
}
//生产环境 production
if (process.env.NODE_ENV == 'production') {
axios.defaults.baseURL = 'https://coinbus.cc/api/v1'
// xios.defaults.baseURL = 'https://13.214.133.132:11447'
}
//测试环境
if (process.env.NODE_ENV === "staging") {
axios.defaults.baseURL = "https://coinbus.cc/api/v1"
}
//创建一个axios对象
const service = axios.create({
baseURL: process.env.VUE_APP_BASE_API,
timeout: 1000,// 10000
});
// 添加请求拦截器(post)
service.interceptors.request.use( config =>{
if (config.url.includes('/fapi/v1/fundingRate') || config.url.includes('/fapi/v1/openInterest') || config.url.includes('/futures/data/takerlongshortRatio')) {
config.baseURL = baseURL
// config.baseURL = `https://fapi.binance.com`
// config.baseURL = 'https://binancezh.jp'
}
else if(config.url.includes('transfer/endOrder')){
config.timeout = 60000//充值确认接口单独设置响应时间1分钟
}else if(config.url.includes('levelUp')){
config.timeout = 30000//消费升级接口改为30秒
}else{
config.timeout = 10000
}
// token
if (!noTokenUrl.includes(config.url)) {
const token = localStorage.token;
if (token) {
config.headers.Authorization = `Bearer ${token}`
}
}
return config
},error=> {
// if (error.code == "ECONNABORTED") {
// console.log(overtime,"几个都降低");
// if (!overtime) {
// overtime ="请求超时"
// Message({//请求超时
// showClose: true,
// message: window.vm.$i18n.t('login.timeout'),
// type: 'error',
// customClass: "messageClass",
// duration: 10000,
// });
// }
// }
// if (error.response.status >= 500) {//服务器错误
// console.log("甲方劲夫劲夫附件");
// Message({
// showClose: true,
// message: window.vm.$i18n.t('login.server'),
// type: 'error',
// customClass: "messageClass"
// });
// }
// 对请求错误做些什么
return Promise.reject(error)
})
// 添加请求拦截器
// token注入
axios.interceptors.request.use(function (config) {
if (config.url.includes('/fapi/v1/fundingRate') || config.url.includes('/fapi/v1/openInterest') || config.url.includes('/futures/data/takerlongshortRatio')) {
config.baseURL = baseURL
// config.baseURL = `https://fapi.binance.com`
// config.baseURL = 'https://binancezh.jp'
}
// config.headers={
// appid:"565656"
// }w'i'zhi
// token
if (!noTokenUrl.includes(config.url)) {
const token = localStorage.token;
if (token) {
config.headers.Authorization = `Bearer ${token}`
}
}
return config
}, (error) => {
// 对请求错误做些什么
return Promise.reject(error)
})
//响应超时的时间
axios.defaults.timeout = 10000;
service.defaults.timeout = 10000;
// 设置请求次数,请求的间隙
axios.defaults.retry = 1;
axios.defaults.retryDelay = 1000;
var axiosResError = ""
// 添加响应拦截器
axios.interceptors.response.use(
res => {
// console.log(res,"响应数据111");
if (res.config.baseURL !== "https://fapi.binance.com") {
if (res.data.code == 421) {//登录过期删除token 421
//清空就是拦截到过期就删除了token 把两外两个存成空
console.log("登录过期");
localStorage.removeItem("token");
$addStorageEvent(1, "username", '')
$addStorageEvent(1, "loginTime", '')
localStorage.setItem("identity", JSON.stringify({}))
$addStorageEvent(1, "dialogLoginVisible",JSON.stringify(true) );//弹出登录
if(!message){
message = res.data.msg
Message({
message: res.data.msg,
type: 'warning',
customClass: "messageClass",
duration: 3000,
showClose: true,
});
}
}else if(res.data.code == 401){//登录才能查看的页面
if (!overtime) {
overtime ="登录才能查看的页面"
Message({
message: window.vm.$i18n.t('login.viewPage'),
type: 'error',
customClass: "messageClass",
showClose: true,
});
}
}else if(res.data.code !== 200){
console.log(`!== 200`,res);
Message({
message: res.data.msg,
type: 'error',
customClass: "messageClass"
});
}
}
return res;
}, error => {
if (error.code == "ECONNABORTED") {
if (!overtime) {
overtime ="请求超时"
Message({//请求超时
showClose: true,
message: window.vm.$i18n.t('login.timeout'),
type: 'error',
customClass: "messageClass",
showClose: true,
duration: 4000,
});
}
}
console.log('err' + error)
let { message } = error;
if (message == "Network Error") {//后端接口连接异常,请刷新重试
message = window.vm.$i18n.t('login.linkError')
}
else if (message.includes("timeout")) {//超时
message = window.vm.$i18n.t('login.timeout')
}
else if (message.includes("Request failed with status code")) {//系统接口异常5...
message = window.vm.$i18n.t('login.system') + message.substr(message.length - 3);
}
if (!overtime) {
overtime=message
Message({
message: message,
type: 'error',
showClose: true,
duration: 5 * 1000
})
}
// Message({
// message: message,
// type: 'error',
// duration: 5 * 1000
// })
return Promise.reject(error)
}
);
var serviceResError=""
// 添加响应拦截器 post
service.interceptors.response.use(
res=>{
if (res.data.code == 421) {//登录过期删除token 421
//清空就是拦截到过期就删除了token 把两外两个存成空
localStorage.removeItem("token");
$addStorageEvent(1, "username", '')
$addStorageEvent(1, "loginTime", '')
localStorage.setItem("identity", JSON.stringify({}))
$addStorageEvent(1, "dialogLoginVisible",JSON.stringify(true) );
console.log(message,"service 1 ");
if(!message){//登录过期 控制报错信息只提示一次
message = res.data.msg
Message({
message: res.data.msg,
type: 'error',
customClass: "messageClass",
duration: 3000,
showClose: true,
});
}
}else if(res.data.code == 401){//登录才能查看的页面
if (!overtime) {
overtime=`登录才能查看的页面`
Message({
message: window.vm.$i18n.t('login.viewPage'),
type: 'error',
customClass: "messageClass",
showClose: true,
});
}
}else if(res.data.code !== 200){
console.log(`!== 200`,res);
Message({
message: res.data.msg,
type: 'error',
customClass: "messageClass",
showClose: true,
});
}
return res;
},
error=>{
if (error.code == "ECONNABORTED") {
if (!overtime) {
overtime ="请求超时"
Message({//请求超时
showClose: true,
message: window.vm.$i18n.t('login.timeout'),
type: 'error',
customClass: "messageClass",
showClose: true,
duration: 4000,
});
}
}
console.log('err5555555555555' + error)
let { message } = error;
if (message == "Network Error") {
message = window.vm.$i18n.t('login.linkError')
}
else if (message.includes("timeout")) {
message = window.vm.$i18n.t('login.timeout')
}
else if (message.includes("Request failed with status code")) {
message = window.vm.$i18n.t('login.system') + message.substr(message.length - 3);
}
if (!overtime) {
overtime=message
Message({
message: message,
type: 'error',
showClose: true,
duration: 5 * 1000
})
}
// Message({
// message: message,
// type: 'error',
// duration: 5 * 1000
// })
return Promise.reject(error)
}
)
// 封装 get 方法
export function get(url, params) {
return new Promise((resolve, reject) => {
axios.get(url, {
params: params
}).then(res => {
resolve(res)
}).catch(err => {
reject(err)
return
})
})
}
// 封装 post方法
export function POST(url, data) {
return new Promise((resolve, reject) => {
axios.post(url, data)
.then(res => {
resolve(res)
})
.catch(err => {
reject(err)
})
})
}
// 封装 delete方法
export function Delete(url) {
return new Promise((resolve, reject) => {
axios.delete(url)
.then(res => {
resolve(res)
})
.catch(err => {
reject(err)
})
})
}
export default service;

View File

@@ -0,0 +1,73 @@
import request from './http'
//进入页面默认请求留言数据
export function fetchMessageBoard(data) {
return request({
url: `/marketall/msgBorad/getPageMsg`,
method: 'post',
data
})
}
//提交留言
export function submitMessage(data) {
return request({
url: `/marketall/msgBorad/submitMsg`,
method: 'post',
data
})
}
//留言板后台 留言数量
export function msgBoradNum(data) {
return request({
url: `/marketall/msgBorad/getDailyMsgCount`,
method: 'post',
data
})
}
//留言板后台 tabel表格数据
export function msgBoradTabel(data) {
return request({
url: `/marketall/msgBorad/getAllMsg`,
method: 'post',
data
})
}
//回复留言
export function responMsg(data) {
return request({
url: `/marketall/msgBorad/responMsg`,
method: 'post',
data
})
}
//个人中心 未读留言
export function getPrivateMsg(data) {
return request({
url: `/marketall/msgBorad/getPrivateMsg`,
method: 'post',
data
})
}
//查询点赞 踩的数量
export function getPraise(data) {
return request({
url: `/marketall/msgBorad/getPageDetail`,
method: 'post',
data
})
}
//点赞 踩
export function getClickPraisePoor(data) {
return request({
url: `/marketall/msgBorad/comment`,
method: 'post',
data
})
}

View File

@@ -0,0 +1,25 @@
import request from './http'
//请求订阅
export function getSubscript(data) {
return request({
url: '/marketall/msgSub/subscript',
method: 'post',
data
})
}
//订阅列表
export function getSubscriptList(data) {
return request({
url: '/marketall/msgSub/list',
method: 'post',
data
})
}
//退订
export function getSubscriptDelete(data) {
return request({
url: '/marketall/msgSub/cancel',
method: 'post',
data
})
}

544
yq/src/api/pages.js Normal file
View File

@@ -0,0 +1,544 @@
import request from './http'
//PI周期顶指标
export function takePiCycleTopIndicator(data) {
return request({
url: `/marketall/btc/v2/PiCycleTopIndicator`,
method: 'post',
data
})
}
//稳定币供应比
export function takeSsr(data) {
return request({
url: `/marketall/btc/v2/ssr`,
method: 'post',
data
})
}
//稳定币供应比振荡器
export function takeSsro(data) {
return request({
url: `/marketall/btc/v2/ssro`,
method: 'post',
data
})
}
//库存/新发行比
export function takeStfRatio(data) {
return request({
url: `/marketall/btc/v2/stfRatio`,
method: 'post',
data
})
}
//库存/新发行偏差
export function takeStfDeflection(data) {
return request({
url: `/marketall/btc/v2/stfDeflection`,
method: 'post',
data
})
}
//黄金比例乘数
export function takeGoldenRatioMult(data) {
return request({
url: `/marketall/btc/v2/goldenRatioMult`,
method: 'post',
data
})
}
//PI周期顶指标 eth
export function takepiCycleTopEth(data) {
return request({
url: `/marketall/eth/v2/PiCycleTopIndicator`,
method: 'post',
data
})
}
//黄金比例乘数 eth takegoldenRatioMultEth
export function takegoldenRatioMultEth(data) {
return request({
url: `/marketall/eth/v2/goldenRatioMult`,
method: 'post',
data
})
}
//币天销毁量 eth cdd
export function getCddEth(data) {
return request({
url: `/marketall/eth/v2/cdd`,
method: 'post',
data
})
}
//已花费输出休眠平均天数 eth dormancy
export function getDormancyEth(data) {
return request({
url: `/marketall/eth/v2/dormancy`,
method: 'post',
data
})
}
//已花费输出平均寿命eth asol
export function getAsolEth(data) {
return request({
url: `/marketall/eth/v2/asol`,
method: 'post',
data
})
}
//币天销毁量分布eth
export function getCddwaveEth(data) {
return request({
url: `/marketall/eth/v2/cddwave`,
method: 'post',
data
})
}
//按余额分段的地址数分布eth
export function getDistributionEth(data) {
return request({
url: `/marketall/eth/v2/address/distribution`,
method: 'post',
data
})
}
//持有者地址的供应分布eth
export function getSupplyDistributionEth(data) {
return request({
url: `/marketall/eth/v2/address/supplyDistribution`,
method: 'post',
data
})
}
//盈利地址百分比eth
export function getProfitPercentEth(data) {
return request({
url: `/marketall/eth/v2/address/profitPercent`,
method: 'post',
data
})
}
//盈利地址百分比eth
export function getProfitEth(data) {
return request({
url: `/marketall/eth/v2/address/profit`,
method: 'post',
data
})
}
//亏损地址数eth
export function getLossEth(data) {
return request({
url: `/marketall/eth/v2/address/loss`,
method: 'post',
data
})
}
//通货膨胀率eth
export function getInflactionRateEth(data) {
return request({
url: `/marketall/eth/v2/inflactionRate`,
method: 'post',
data
})
}
//通货膨胀率eth
export function getIssuanceEth(data) {
return request({
url: `/marketall/eth/v2/issuance`,
method: 'post',
data
})
}
//已花费输出盈利比eth
export function getSoprEth(data) {
return request({
url: `/marketall/eth/v2/sopr`,
method: 'post',
data
})
}
//流通速度eth
export function getVelocityEth(data) {
return request({
url: `/marketall/eth/v2/velocity`,
method: 'post',
data
})
}
//流通速度eth
export function getNvtRatioEth(data) {
return request({
url: `/marketall/eth/v2/nvtRatio`,
method: 'post',
data
})
}
//活跃度eth
export function getLivelinessEth(data) {
return request({
url: `/marketall/eth/v2/liveliness`,
method: 'post',
data
})
}
//交易费率乘数eth
export function getFrmEth(data) {
return request({
url: `/marketall/eth/v2/frm`,
method: 'post',
data
})
}
//盈利百分比eth
export function getSupplyProfitPercentEth(data) {
return request({
url: `/marketall/eth/v2/supply/profitPercent`,
method: 'post',
data
})
}
//盈利供应量eth
export function getSupplyProfitEth(data) {
return request({
url: `/marketall/eth/v2/supply/profit`,
method: 'post',
data
})
}
//盈利亏损量eth
export function getSupplyLossEth(data) {
return request({
url: `/marketall/eth/v2/supply/loss`,
method: 'post',
data
})
}
//未实现的净收益损失比eth
export function getNuplEth(data) {
return request({
url: `/marketall/eth/v2/nupl`,
method: 'post',
data
})
}
//未实现的净收益量 eth
export function getRelativeUProfitEth(data) {
return request({
url: `/marketall/eth/v2/relativeUProfit`,
method: 'post',
data
})
}
//未实现的净亏损量 eth
export function getRelativeULossEth(data) {
return request({
url: `/marketall/eth/v2/relativeULoss`,
method: 'post',
data
})
}
//长持者未实现的净收益量 eth
export function getLthNUPLEth(data) {
return request({
url: `/marketall/eth/v2/lthNUPL`,
method: 'post',
data
})
}
//短持者未实现的净亏损量 eth
export function getSthNUPLEth(data) {
return request({
url: `/marketall/eth/v2/sthNUPL`,
method: 'post',
data
})
}
//流通市值 eth
export function getMarketcapEth(data) {
return request({
url: `/marketall/eth/v2/marketcap`,
method: 'post',
data
})
}
//已实现流通市值 eth
export function getRcapEth(data) {
return request({
url: `/marketall/eth/v2/rcap`,
method: 'post',
data
})
}
//已实现价格 eth
export function getRpriceEth(data) {
return request({
url: `/marketall/eth/v2/rprice`,
method: 'post',
data
})
}
//流通市值-已实现流通市值比 eth
export function getMvrvRatioEth(data) {
return request({
url: `/marketall/eth/v2/mvrvRatio`,
method: 'post',
data
})
}
//燃烧量 eth
export function getBurnedSupplyEth(data) {
return request({
url: `/marketall/eth/v2/burnedSupply`,
method: 'post',
data
})
}
//燃烧量 eth
export function getMintedSupplyEth(data) {
return request({
url: `/marketall/eth/v2/mintedSupply`,
method: 'post',
data
})
}
//USDT铸造量
export function getMintedUSDT(data) {
return request({
url: `/marketall/eth/v2/usdt/minted`,
method: 'post',
data
})
}
//USDT铸造量 表格数据
export function getMintedUSDTTable(data) {
return request({
url: `/marketall/eth/v2/usdt/minted/list`,
method: 'post',
data
})
}
//USDC铸造量
export function getMintedUSDC(data) {
return request({
url: `/marketall/eth/v2/usdc/minted`,
method: 'post',
data
})
}
//USDC铸造量 表格数据
export function getMintedUSDCTable(data) {
return request({
url: `/marketall/eth/v2/usdc/minted/list`,
method: 'post',
data
})
}
//BUSD铸造量
export function getMintedBUSD(data) {
return request({
url: `/marketall/eth/v2/busd/minted`,
method: 'post',
data
})
}
//BUSD铸造量 表格数据
export function getMintedBUSDTable(data) {
return request({
url: `/marketall/eth/v2/busd/minted/list`,
method: 'post',
data
})
}
//资金费率
export function getFundingRate(data) {
return request({
url: `/marketall/exchange/fundingRate`,
method: 'post',
data
})
}
//未平仓合约数
export function getOpenInterest(data) {
return request({
url: `/marketall/exchange/openInterest`,
method: 'post',
data
})
}
//合约主动买入量
export function getTakerLongShortRatio(data) {
return request({
url: `/marketall/exchange/takerlongshortRatio`,
method: 'post',
data
})
}
//成交量分布图
export function getVolumeProfile(data) {
return request({
url: `/marketall/exchange/volumeProfile`,
method: 'post',
data
})
}
//成交量分布图 K线数据
export function getKLine(data) {
return request({
url: `/marketall/bian/kline`,
method: 'post',
data
})
}
//代币交易量
export function getErcVolume(data) {
return request({
url: `/marketall/eth/ETHTokenVolume`,
method: 'post',
data
})
}
//国债收益率
export function getUSTreasuriesYields(data) {
return request({
url: `/marketall/market/USTreasuriesYields`,
method: 'post',
data
})
}
//货币供应量
export function MoneyStockMeasures(data) {
return request({
url: `/marketall/market/MoneyStockMeasures`,
method: 'post',
data
})
}
//私营部门贷款
export function loanPsi(data) {
return request({
url: `/marketall/market/loan/psi`,
method: 'post',
data
})
}
//私人债务占GDP比重
export function loanGdp(data) {
return request({
url: `/marketall/market/loan/gdp`,
method: 'post',
data
})
}
//每周经济指数
export function wei(data) {
return request({
url: `/marketall/market/wei`,
method: 'post',
data
})
}
//MVRV Z-Score BTC 修改接口为POST
export function MVRVZ(data) {
return request({
url: `/marketall/btc/mvrvz`,
method: 'post',
data
})
}
//外汇
export function exchangeRate(data) {
return request({
url: `/marketall/market/exchangeRate`,
method: 'post',
data
})
}
//外汇曲线图
export function exchangeRateChart(data) {
return request({
url: `/marketall/market/exchangeRate/list`,
method: 'post',
data
})
}
//GDP 曲线图
export function getGDPChart(data) {
return request({
url: `/marketall/market/GDP/list`,
method: 'post',
data
})
}
//GDP
export function getGDP(data) {
return request({
url: `/marketall/market/GDP`,
method: 'post',
data
})
}

161
yq/src/api/pay.js Normal file
View File

@@ -0,0 +1,161 @@
import request from './http'
//提交充值
export function Recharge(data) {
return request({
url: `/order/transfer/createOrder`,
method: 'post',
data
})
}
//订单支付确认
export function EndOrder(data) {
return request({
url: `/order/transfer/endOrder`,
method: 'post',
data
})
}
//请求订单取消 关闭转账交易
export function CancelOrder(data) {
return request({
url: `/order/transfer/cancelOrder`,
method: 'post',
data
})
}
//根据txid查询订单
export function CheckOrderByTxid(data) {
return request({
url: `/order/transfer/checkOrderByTxid`,
method: 'post',
data
})
}
//提交升级订单
export function LevelUp(data) {
return request({
url: `/order/consume/levelUp`,
method: 'post',
data
})
}
//获取对应等级所需金额
export function GetNeedFunding(data) {
return request({
url: `/order/consume/getNeedFunding`,
method: 'post',
data
})
}
//发起提现申请
export function Apply(data) {
return request({
url: `/order/withdraw/apply`,
method: 'post',
data
})
}
//查看个人提现历史
export function Pravite(data) {
return request({
url: `/order/withdraw//history/pravite`,
method: 'post',
data
})
}
//查看个人提现历史
export function historyAll(data) {
return request({
url: `/order/withdraw/history/all`,
method: 'post',
data
})
}
//财务处理提现订单
export function Handle(data) {
return request({
url: `/order/withdraw/handle`,
method: 'post',
data
})
}
//查看提现订单详情
export function getDetail(data) {
return request({
url: `/order/withdraw/getDetail`,
method: 'post',
data
})
}
//查看个人充值记录
export function privateOrder(data) {
return request({
url: `order/transfer/privateOrder`,
method: 'post',
data
})
}
//查看所有充值记录
export function allAddCreditOrder(data) {
return request({
url: `order/transfer/allAddCreditOrder`,
method: 'post',
data
})
}
//获取邮箱验证码
export function getEmailCode(data) {
return request({
url: `order/withdraw/getEmailCode`,
method: 'post',
data
})
}
//查看个人消费历史 消费记录
export function PrivateConsume(data) {
return request({
url: `order/consume/privateConsume`,
method: 'post',
data
})
}
//新增用户可用地址
export function AddNewAddress(data) {
return request({
url: `order/transfer/addNewAddress`,
method: 'post',
data
})
}
//查询上次提交交易hash时间 倒计时
export function LastUseTxid(data) {
return request({
url: `order/transfer/lastUseTxid`,
method: 'post',
data
})
}

View File

@@ -0,0 +1,93 @@
import request from './http'
//地址统计
export function getAddress(data) {
return request({
url: `/marketall/btc/rt/address`,
method: 'post',
data
})
}
//交易费
export function getBTCFees(data) {
return request({
url: `/marketall/btc/rt/BTCFees`,
method: 'post',
data
})
}
//交易量
export function getBTCVolume(data) {
return request({
url: `/marketall/btc/rt/BTCVolume`,
method: 'post',
data
})
}
//交易量(消除找零)
export function getEaVolume2(data) {
return request({
url: `/marketall/btc/rt/eaVolume`,
method: 'post',
data
})
}
//每日已实现盈利交易占比
export function getProfitRate2(data) {
return request({
url: `/marketall/btc/rt/profitRate`,
method: 'post',
data
})
}
//已花费输出盈利比
export function getSopr2(data) {
return request({
url: `/marketall/btc/rt/sopr`,
method: 'post',
data
})
}
//已花费输出平均寿命
export function getAsol2(data) {
return request({
url: `/marketall/btc/rt/asol`,
method: 'post',
data
})
}
//已花费输出休眠平均天数
export function getDormancy2(data) {
return request({
url: `/marketall/btc/rt/dormancy`,
method: 'post',
data
})
}
//币天销毁量
export function getCdd2(data) {
return request({
url: `/marketall/btc/rt/cdd`,
method: 'post',
data
})
}
//币天销毁量年龄分布
export function getSoag2(data) {
return request({
url: `/marketall/btc/rt/soag`,
method: 'post',
data
})
}

9
yq/src/api/sharing.js Normal file
View File

@@ -0,0 +1,9 @@
import server from "../util/request"
export function fetchActiveaddress(params){
return server({
method:"GET",
url:"/marketall/activeaddress/list?coin=btc",
params
})
}

Binary file not shown.

After

Width:  |  Height:  |  Size: 96 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 4.7 KiB

539
yq/src/assets/icon/demo.css Normal file
View File

@@ -0,0 +1,539 @@
/* Logo 字体 */
@font-face {
font-family: "iconfont logo";
src: url('https://at.alicdn.com/t/font_985780_km7mi63cihi.eot?t=1545807318834');
src: url('https://at.alicdn.com/t/font_985780_km7mi63cihi.eot?t=1545807318834#iefix') format('embedded-opentype'),
url('https://at.alicdn.com/t/font_985780_km7mi63cihi.woff?t=1545807318834') format('woff'),
url('https://at.alicdn.com/t/font_985780_km7mi63cihi.ttf?t=1545807318834') format('truetype'),
url('https://at.alicdn.com/t/font_985780_km7mi63cihi.svg?t=1545807318834#iconfont') format('svg');
}
.logo {
font-family: "iconfont logo";
font-size: 160px;
font-style: normal;
-webkit-font-smoothing: antialiased;
-moz-osx-font-smoothing: grayscale;
}
/* tabs */
.nav-tabs {
position: relative;
}
.nav-tabs .nav-more {
position: absolute;
right: 0;
bottom: 0;
height: 42px;
line-height: 42px;
color: #666;
}
#tabs {
border-bottom: 1px solid #eee;
}
#tabs li {
cursor: pointer;
width: 100px;
height: 40px;
line-height: 40px;
text-align: center;
font-size: 16px;
border-bottom: 2px solid transparent;
position: relative;
z-index: 1;
margin-bottom: -1px;
color: #666;
}
#tabs .active {
border-bottom-color: #f00;
color: #222;
}
.tab-container .content {
display: none;
}
/* 页面布局 */
.main {
padding: 30px 100px;
width: 960px;
margin: 0 auto;
}
.main .logo {
color: #333;
text-align: left;
margin-bottom: 30px;
line-height: 1;
height: 110px;
margin-top: -50px;
overflow: hidden;
*zoom: 1;
}
.main .logo a {
font-size: 160px;
color: #333;
}
.helps {
margin-top: 40px;
}
.helps pre {
padding: 20px;
margin: 10px 0;
border: solid 1px #e7e1cd;
background-color: #fffdef;
overflow: auto;
}
.icon_lists {
width: 100% !important;
overflow: hidden;
*zoom: 1;
}
.icon_lists li {
width: 100px;
margin-bottom: 10px;
margin-right: 20px;
text-align: center;
list-style: none !important;
cursor: default;
}
.icon_lists li .code-name {
line-height: 1.2;
}
.icon_lists .icon {
display: block;
height: 100px;
line-height: 100px;
font-size: 42px;
margin: 10px auto;
color: #333;
-webkit-transition: font-size 0.25s linear, width 0.25s linear;
-moz-transition: font-size 0.25s linear, width 0.25s linear;
transition: font-size 0.25s linear, width 0.25s linear;
}
.icon_lists .icon:hover {
font-size: 100px;
}
.icon_lists .svg-icon {
/* 通过设置 font-size 来改变图标大小 */
width: 1em;
/* 图标和文字相邻时,垂直对齐 */
vertical-align: -0.15em;
/* 通过设置 color 来改变 SVG 的颜色/fill */
fill: currentColor;
/* path 和 stroke 溢出 viewBox 部分在 IE 下会显示
normalize.css 中也包含这行 */
overflow: hidden;
}
.icon_lists li .name,
.icon_lists li .code-name {
color: #666;
}
/* markdown 样式 */
.markdown {
color: #666;
font-size: 14px;
line-height: 1.8;
}
.highlight {
line-height: 1.5;
}
.markdown img {
vertical-align: middle;
max-width: 100%;
}
.markdown h1 {
color: #404040;
font-weight: 500;
line-height: 40px;
margin-bottom: 24px;
}
.markdown h2,
.markdown h3,
.markdown h4,
.markdown h5,
.markdown h6 {
color: #404040;
margin: 1.6em 0 0.6em 0;
font-weight: 500;
clear: both;
}
.markdown h1 {
font-size: 28px;
}
.markdown h2 {
font-size: 22px;
}
.markdown h3 {
font-size: 16px;
}
.markdown h4 {
font-size: 14px;
}
.markdown h5 {
font-size: 12px;
}
.markdown h6 {
font-size: 12px;
}
.markdown hr {
height: 1px;
border: 0;
background: #e9e9e9;
margin: 16px 0;
clear: both;
}
.markdown p {
margin: 1em 0;
}
.markdown>p,
.markdown>blockquote,
.markdown>.highlight,
.markdown>ol,
.markdown>ul {
width: 80%;
}
.markdown ul>li {
list-style: circle;
}
.markdown>ul li,
.markdown blockquote ul>li {
margin-left: 20px;
padding-left: 4px;
}
.markdown>ul li p,
.markdown>ol li p {
margin: 0.6em 0;
}
.markdown ol>li {
list-style: decimal;
}
.markdown>ol li,
.markdown blockquote ol>li {
margin-left: 20px;
padding-left: 4px;
}
.markdown code {
margin: 0 3px;
padding: 0 5px;
background: #eee;
border-radius: 3px;
}
.markdown strong,
.markdown b {
font-weight: 600;
}
.markdown>table {
border-collapse: collapse;
border-spacing: 0px;
empty-cells: show;
border: 1px solid #e9e9e9;
width: 95%;
margin-bottom: 24px;
}
.markdown>table th {
white-space: nowrap;
color: #333;
font-weight: 600;
}
.markdown>table th,
.markdown>table td {
border: 1px solid #e9e9e9;
padding: 8px 16px;
text-align: left;
}
.markdown>table th {
background: #F7F7F7;
}
.markdown blockquote {
font-size: 90%;
color: #999;
border-left: 4px solid #e9e9e9;
padding-left: 0.8em;
margin: 1em 0;
}
.markdown blockquote p {
margin: 0;
}
.markdown .anchor {
opacity: 0;
transition: opacity 0.3s ease;
margin-left: 8px;
}
.markdown .waiting {
color: #ccc;
}
.markdown h1:hover .anchor,
.markdown h2:hover .anchor,
.markdown h3:hover .anchor,
.markdown h4:hover .anchor,
.markdown h5:hover .anchor,
.markdown h6:hover .anchor {
opacity: 1;
display: inline-block;
}
.markdown>br,
.markdown>p>br {
clear: both;
}
.hljs {
display: block;
background: white;
padding: 0.5em;
color: #333333;
overflow-x: auto;
}
.hljs-comment,
.hljs-meta {
color: #969896;
}
.hljs-string,
.hljs-variable,
.hljs-template-variable,
.hljs-strong,
.hljs-emphasis,
.hljs-quote {
color: #df5000;
}
.hljs-keyword,
.hljs-selector-tag,
.hljs-type {
color: #a71d5d;
}
.hljs-literal,
.hljs-symbol,
.hljs-bullet,
.hljs-attribute {
color: #0086b3;
}
.hljs-section,
.hljs-name {
color: #63a35c;
}
.hljs-tag {
color: #333333;
}
.hljs-title,
.hljs-attr,
.hljs-selector-id,
.hljs-selector-class,
.hljs-selector-attr,
.hljs-selector-pseudo {
color: #795da3;
}
.hljs-addition {
color: #55a532;
background-color: #eaffea;
}
.hljs-deletion {
color: #bd2c00;
background-color: #ffecec;
}
.hljs-link {
text-decoration: underline;
}
/* 代码高亮 */
/* PrismJS 1.15.0
https://prismjs.com/download.html#themes=prism&languages=markup+css+clike+javascript */
/**
* prism.js default theme for JavaScript, CSS and HTML
* Based on dabblet (http://dabblet.com)
* @author Lea Verou
*/
code[class*="language-"],
pre[class*="language-"] {
color: black;
background: none;
text-shadow: 0 1px white;
font-family: Consolas, Monaco, 'Andale Mono', 'Ubuntu Mono', monospace;
text-align: left;
white-space: pre;
word-spacing: normal;
word-break: normal;
word-wrap: normal;
line-height: 1.5;
-moz-tab-size: 4;
-o-tab-size: 4;
tab-size: 4;
-webkit-hyphens: none;
-moz-hyphens: none;
-ms-hyphens: none;
hyphens: none;
}
pre[class*="language-"]::-moz-selection,
pre[class*="language-"] ::-moz-selection,
code[class*="language-"]::-moz-selection,
code[class*="language-"] ::-moz-selection {
text-shadow: none;
background: #b3d4fc;
}
pre[class*="language-"]::selection,
pre[class*="language-"] ::selection,
code[class*="language-"]::selection,
code[class*="language-"] ::selection {
text-shadow: none;
background: #b3d4fc;
}
@media print {
code[class*="language-"],
pre[class*="language-"] {
text-shadow: none;
}
}
/* Code blocks */
pre[class*="language-"] {
padding: 1em;
margin: .5em 0;
overflow: auto;
}
:not(pre)>code[class*="language-"],
pre[class*="language-"] {
background: #f5f2f0;
}
/* Inline code */
:not(pre)>code[class*="language-"] {
padding: .1em;
border-radius: .3em;
white-space: normal;
}
.token.comment,
.token.prolog,
.token.doctype,
.token.cdata {
color: slategray;
}
.token.punctuation {
color: #999;
}
.namespace {
opacity: .7;
}
.token.property,
.token.tag,
.token.boolean,
.token.number,
.token.constant,
.token.symbol,
.token.deleted {
color: #905;
}
.token.selector,
.token.attr-name,
.token.string,
.token.char,
.token.builtin,
.token.inserted {
color: #690;
}
.token.operator,
.token.entity,
.token.url,
.language-css .token.string,
.style .token.string {
color: #9a6e3a;
background: hsla(0, 0%, 100%, .5);
}
.token.atrule,
.token.attr-value,
.token.keyword {
color: #07a;
}
.token.function,
.token.class-name {
color: #DD4A68;
}
.token.regex,
.token.important,
.token.variable {
color: #e90;
}
.token.important,
.token.bold {
font-weight: bold;
}
.token.italic {
font-style: italic;
}
.token.entity {
cursor: help;
}

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,571 @@
@font-face {
font-family: "i"; /* Project id 3157209 */
src: url('iconfont.woff2?t=1693193677535') format('woff2'),
url('iconfont.woff?t=1693193677535') format('woff'),
url('iconfont.ttf?t=1693193677535') format('truetype');
}
.i {
font-family: "i" !important;
font-size: 16px;
font-style: normal;
-webkit-font-smoothing: antialiased;
-moz-osx-font-smoothing: grayscale;
}
.ijingshigantanhao2:before {
content: "\e7f4";
}
.ishuaxin:before {
content: "\e6e0";
}
.ishuaxin1:before {
content: "\e63b";
}
.ishuaxin2:before {
content: "\e796";
}
.izixun:before {
content: "\e64e";
}
.iguzhi:before {
content: "\e65e";
}
.izixun1:before {
content: "\e645";
}
.izixun2:before {
content: "\e771";
}
.iGDP:before {
content: "\e884";
}
.imeiguzhibiao:before {
content: "\e6dc";
}
.izixun3:before {
content: "\e638";
}
.igdp:before {
content: "\e63f";
}
.iguzhi1:before {
content: "\e639";
}
.iicon_jinrongfuwu:before {
content: "\e649";
}
.iGDP1:before {
content: "\e658";
}
.iGDP2:before {
content: "\e648";
}
.ichanpin-huobijinrongfuwu:before {
content: "\e63a";
}
.ichanpin-huobijinrongfuwu1:before {
content: "\e663";
}
.izhaiquan-cijizhaiquan:before {
content: "\e671";
}
.iguzhiqiquan:before {
content: "\e7a2";
}
.iGDP3:before {
content: "\e692";
}
.izhaiquan:before {
content: "\e68e";
}
.izhaiquan1:before {
content: "\100f2";
}
.ihuidaodingbu:before {
content: "\e73c";
}
.ihuidaodingbu1:before {
content: "\e637";
}
.ixiangyouzhankai2:before {
content: "\e624";
}
.ixiangyouzhankai3:before {
content: "\e7d3";
}
.iarrow-right-fill-03:before {
content: "\e739";
}
.ixiangyouzhankai4:before {
content: "\e635";
}
.iyouzhankai-moren:before {
content: "\e636";
}
.izuoyouzhankai:before {
content: "\e65c";
}
.izuoyou:before {
content: "\e634";
}
.ijiantou_zuoyouqiehuan1:before {
content: "\eb0d";
}
.izuoyoutuo:before {
content: "\e673";
}
.izuoyoulashen:before {
content: "\e652";
}
.i10x6xiangyouzhankaisanjiao:before {
content: "\e63e";
}
.iyouzhankai:before {
content: "\e684";
}
.ixiangyouzhankai:before {
content: "\e651";
}
.ixiangyouzhankai1:before {
content: "\e693";
}
.izuoyouzhankai1:before {
content: "\ec3a";
}
.iicon-rgb_zhibiaozhinengguanli:before {
content: "\e6fa";
}
.izhibiaoguanli:before {
content: "\e72a";
}
.ixiaoshouzhibiao:before {
content: "\e664";
}
.ishoucang:before {
content: "\e630";
}
.ilevel1:before {
content: "\e631";
}
.ilevel2:before {
content: "\e632";
}
.ilevel3:before {
content: "\e633";
}
.ixingxing5:before {
content: "\e80d";
}
.ixingxing6:before {
content: "\efeb";
}
.i24gl-starEmpty:before {
content: "\e8c4";
}
.ixingxing7:before {
content: "\e75b";
}
.iicon-test1:before {
content: "\e625";
}
.iicon-test2:before {
content: "\e629";
}
.ixingxing:before {
content: "\e607";
}
.iiconzhengli-:before {
content: "\e61f";
}
.ixingxing1:before {
content: "\e870";
}
.istart:before {
content: "\e608";
}
.istar:before {
content: "\e65d";
}
.ixingxing2:before {
content: "\e621";
}
.ixingxing3:before {
content: "\e622";
}
.istart1:before {
content: "\e623";
}
.ixingxing4:before {
content: "\e62f";
}
.ishengji:before {
content: "\e62c";
}
.ishengban-shengji:before {
content: "\e643";
}
.ibanbenshengji-05:before {
content: "\e62a";
}
.iebr_shengji:before {
content: "\e61e";
}
.ifacebookfacebook52:before {
content: "\e606";
}
.itwitter-fill:before {
content: "\e63d";
}
.ireddit:before {
content: "\ea03";
}
.idiscord:before {
content: "\e617";
}
.itelegram:before {
content: "\e618";
}
.icai4:before {
content: "\e691";
}
.icai1:before {
content: "\e602";
}
.icai2:before {
content: "\e68f";
}
.idianzan:before {
content: "\e616";
}
.idianzan1:before {
content: "\e619";
}
.ifenxiang:before {
content: "\e601";
}
.icai:before {
content: "\e62e";
}
.iz-like:before {
content: "\e6bf";
}
.ixiaosanjiaodown:before {
content: "\e614";
}
.ixiaosanjiaoup:before {
content: "\e615";
}
.isousuotianchong:before {
content: "\e694";
}
.isousuo:before {
content: "\e60e";
}
.itongzhi:before {
content: "\e612";
}
.igl-folderOpen:before {
content: "\eabe";
}
.iiccandleline:before {
content: "\e613";
}
.igeren9:before {
content: "\e641";
}
.imeiyuan6:before {
content: "\e738";
}
.imeiyuan2:before {
content: "\e64b";
}
.imeiyuan3:before {
content: "\eb1a";
}
.imeiyuan_o:before {
content: "\eba1";
}
.i24gl-obelus:before {
content: "\e88f";
}
.i24gl-plusMinus:before {
content: "\e890";
}
.ishuben:before {
content: "\e60d";
}
.i-zhuzhuangtu:before {
content: "\e628";
}
.imeiyuan4:before {
content: "\e90d";
}
.ijiantou_zuoyouqiehuan:before {
content: "\e6eb";
}
.i9meiyuan:before {
content: "\e626";
}
.icode-branch:before {
content: "\e993";
}
.ishuben-book2:before {
content: "\e644";
}
.izhengfu:before {
content: "\e73d";
}
.iqianbao-copy:before {
content: "\ebb4";
}
.ixiebao:before {
content: "\e600";
}
.i30:before {
content: "\e620";
}
.ijinbi:before {
content: "\e6c8";
}
.iiconset0294:before {
content: "\e6c1";
}
.imeiyuan1:before {
content: "\e733";
}
.izhexiantu-xianxing:before {
content: "\e904";
}
.iqianbao:before {
content: "\e646";
}
.ishandian:before {
content: "\e799";
}
.ichuizi:before {
content: "\e61b";
}
.izhexiantu_o:before {
content: "\ebb3";
}
.iicon-test:before {
content: "\e603";
}
.iqianbao1:before {
content: "\e61c";
}
.itubiao:before {
content: "\e610";
}
.ithin-_home_flat:before {
content: "\e611";
}
.ibuilding:before {
content: "\e64a";
}
.itubiao1:before {
content: "\e61d";
}
.iwenjianjia:before {
content: "\e74a";
}
.ishizhong:before {
content: "\e60c";
}
.iyonghuguanli_huaban:before {
content: "\e62d";
}
.itubiao2:before {
content: "\e659";
}
.ifangzi:before {
content: "\e64f";
}
.ihammer:before {
content: "\e87c";
}
.imeiyuan:before {
content: "\e73f";
}
.irili:before {
content: "\e71a";
}
.idianchi:before {
content: "\e62b";
}
.ichangyongtubiao_dianyuanguanlixuanzhong:before {
content: "\e605";
}
.ichangyongtubiao-fuben-33:before {
content: "\e60a";
}
.ichangyongtubiao-fuben-41:before {
content: "\e60b";
}
.ichangyongtubiao-fuben-60:before {
content: "\e60f";
}
.ichangyong_faxian:before {
content: "\e6b0";
}
.ia-changyong_faxianfuben:before {
content: "\e6ba";
}
.ia-changyong_lianjiefuben:before {
content: "\e6bb";
}
.ixiaoxi:before {
content: "\e67c";
}
.iwangluo:before {
content: "\e627";
}
.ixiangji:before {
content: "\e609";
}
.ishouji:before {
content: "\e61a";
}
.idingwei:before {
content: "\e604";
}

File diff suppressed because one or more lines are too long

View File

@@ -0,0 +1,982 @@
{
"id": "3157209",
"name": "music",
"font_family": "i",
"css_prefix_text": "i",
"description": "",
"glyphs": [
{
"icon_id": "689256",
"name": "警示 感叹号 2",
"font_class": "jingshigantanhao2",
"unicode": "e7f4",
"unicode_decimal": 59380
},
{
"icon_id": "1781545",
"name": "刷新",
"font_class": "shuaxin",
"unicode": "e6e0",
"unicode_decimal": 59104
},
{
"icon_id": "6376283",
"name": "刷新",
"font_class": "shuaxin1",
"unicode": "e63b",
"unicode_decimal": 58939
},
{
"icon_id": "28944169",
"name": "刷新",
"font_class": "shuaxin2",
"unicode": "e796",
"unicode_decimal": 59286
},
{
"icon_id": "1115097",
"name": "资讯",
"font_class": "zixun",
"unicode": "e64e",
"unicode_decimal": 58958
},
{
"icon_id": "1279431",
"name": "股指",
"font_class": "guzhi",
"unicode": "e65e",
"unicode_decimal": 58974
},
{
"icon_id": "2506223",
"name": "资讯",
"font_class": "zixun1",
"unicode": "e645",
"unicode_decimal": 58949
},
{
"icon_id": "4933454",
"name": "资讯",
"font_class": "zixun2",
"unicode": "e771",
"unicode_decimal": 59249
},
{
"icon_id": "5719208",
"name": "GDP",
"font_class": "GDP",
"unicode": "e884",
"unicode_decimal": 59524
},
{
"icon_id": "6695200",
"name": "每股指标",
"font_class": "meiguzhibiao",
"unicode": "e6dc",
"unicode_decimal": 59100
},
{
"icon_id": "7708016",
"name": "资讯",
"font_class": "zixun3",
"unicode": "e638",
"unicode_decimal": 58936
},
{
"icon_id": "11086601",
"name": "gdp",
"font_class": "gdp",
"unicode": "e63f",
"unicode_decimal": 58943
},
{
"icon_id": "12084717",
"name": "股指",
"font_class": "guzhi1",
"unicode": "e639",
"unicode_decimal": 58937
},
{
"icon_id": "12319736",
"name": "货币金融服务",
"font_class": "icon_jinrongfuwu",
"unicode": "e649",
"unicode_decimal": 58953
},
{
"icon_id": "13759988",
"name": "GDP",
"font_class": "GDP1",
"unicode": "e658",
"unicode_decimal": 58968
},
{
"icon_id": "15216920",
"name": "GDP",
"font_class": "GDP2",
"unicode": "e648",
"unicode_decimal": 58952
},
{
"icon_id": "22992379",
"name": "产品-货币金融服务",
"font_class": "chanpin-huobijinrongfuwu",
"unicode": "e63a",
"unicode_decimal": 58938
},
{
"icon_id": "23093526",
"name": "产品-货币金融服务",
"font_class": "chanpin-huobijinrongfuwu1",
"unicode": "e663",
"unicode_decimal": 58979
},
{
"icon_id": "23524296",
"name": "债券-次级债券",
"font_class": "zhaiquan-cijizhaiquan",
"unicode": "e671",
"unicode_decimal": 58993
},
{
"icon_id": "27125111",
"name": "股指期权",
"font_class": "guzhiqiquan",
"unicode": "e7a2",
"unicode_decimal": 59298
},
{
"icon_id": "29689001",
"name": "GDP",
"font_class": "GDP3",
"unicode": "e692",
"unicode_decimal": 59026
},
{
"icon_id": "32334086",
"name": "债券",
"font_class": "zhaiquan",
"unicode": "e68e",
"unicode_decimal": 59022
},
{
"icon_id": "33096080",
"name": "债券",
"font_class": "zhaiquan1",
"unicode": "100f2",
"unicode_decimal": 65778
},
{
"icon_id": "7482196",
"name": "回到顶部",
"font_class": "huidaodingbu",
"unicode": "e73c",
"unicode_decimal": 59196
},
{
"icon_id": "8644758",
"name": "回到顶部",
"font_class": "huidaodingbu1",
"unicode": "e637",
"unicode_decimal": 58935
},
{
"icon_id": "2272804",
"name": "向右展开",
"font_class": "xiangyouzhankai2",
"unicode": "e624",
"unicode_decimal": 58916
},
{
"icon_id": "4039964",
"name": "向右展开3",
"font_class": "xiangyouzhankai3",
"unicode": "e7d3",
"unicode_decimal": 59347
},
{
"icon_id": "6136815",
"name": "右展开",
"font_class": "arrow-right-fill-03",
"unicode": "e739",
"unicode_decimal": 59193
},
{
"icon_id": "8209295",
"name": "向右展开",
"font_class": "xiangyouzhankai4",
"unicode": "e635",
"unicode_decimal": 58933
},
{
"icon_id": "20120590",
"name": "右展开-默认",
"font_class": "youzhankai-moren",
"unicode": "e636",
"unicode_decimal": 58934
},
{
"icon_id": "1615178",
"name": "左右展开",
"font_class": "zuoyouzhankai",
"unicode": "e65c",
"unicode_decimal": 58972
},
{
"icon_id": "2702787",
"name": "左右",
"font_class": "zuoyou",
"unicode": "e634",
"unicode_decimal": 58932
},
{
"icon_id": "5387596",
"name": "箭头_左右切换",
"font_class": "jiantou_zuoyouqiehuan1",
"unicode": "eb0d",
"unicode_decimal": 60173
},
{
"icon_id": "7717061",
"name": "左右拖",
"font_class": "zuoyoutuo",
"unicode": "e673",
"unicode_decimal": 58995
},
{
"icon_id": "9925313",
"name": "左右拉伸",
"font_class": "zuoyoulashen",
"unicode": "e652",
"unicode_decimal": 58962
},
{
"icon_id": "15160919",
"name": "10x6向右展开三角",
"font_class": "10x6xiangyouzhankaisanjiao",
"unicode": "e63e",
"unicode_decimal": 58942
},
{
"icon_id": "16324782",
"name": "右展开",
"font_class": "youzhankai",
"unicode": "e684",
"unicode_decimal": 59012
},
{
"icon_id": "18451417",
"name": "向右展开",
"font_class": "xiangyouzhankai",
"unicode": "e651",
"unicode_decimal": 58961
},
{
"icon_id": "33475546",
"name": "向右展开",
"font_class": "xiangyouzhankai1",
"unicode": "e693",
"unicode_decimal": 59027
},
{
"icon_id": "35915355",
"name": "左右展开",
"font_class": "zuoyouzhankai1",
"unicode": "ec3a",
"unicode_decimal": 60474
},
{
"icon_id": "5950487",
"name": "icon-rgb_指标智能管理",
"font_class": "icon-rgb_zhibiaozhinengguanli",
"unicode": "e6fa",
"unicode_decimal": 59130
},
{
"icon_id": "12594593",
"name": "指标管理",
"font_class": "zhibiaoguanli",
"unicode": "e72a",
"unicode_decimal": 59178
},
{
"icon_id": "22169360",
"name": "销售指标",
"font_class": "xiaoshouzhibiao",
"unicode": "e664",
"unicode_decimal": 58980
},
{
"icon_id": "4393",
"name": "收藏",
"font_class": "shoucang",
"unicode": "e630",
"unicode_decimal": 58928
},
{
"icon_id": "1127408",
"name": "等级一",
"font_class": "level1",
"unicode": "e631",
"unicode_decimal": 58929
},
{
"icon_id": "1127409",
"name": "等级二",
"font_class": "level2",
"unicode": "e632",
"unicode_decimal": 58930
},
{
"icon_id": "1127410",
"name": "等级三",
"font_class": "level3",
"unicode": "e633",
"unicode_decimal": 58931
},
{
"icon_id": "2932864",
"name": "星星",
"font_class": "xingxing5",
"unicode": "e80d",
"unicode_decimal": 59405
},
{
"icon_id": "7296812",
"name": "星星",
"font_class": "xingxing6",
"unicode": "efeb",
"unicode_decimal": 61419
},
{
"icon_id": "7553794",
"name": "24gl-starEmpty",
"font_class": "24gl-starEmpty",
"unicode": "e8c4",
"unicode_decimal": 59588
},
{
"icon_id": "11222358",
"name": "星星",
"font_class": "xingxing7",
"unicode": "e75b",
"unicode_decimal": 59227
},
{
"icon_id": "14007852",
"name": "lv2",
"font_class": "icon-test1",
"unicode": "e625",
"unicode_decimal": 58917
},
{
"icon_id": "14007854",
"name": "lv1",
"font_class": "icon-test2",
"unicode": "e629",
"unicode_decimal": 58921
},
{
"icon_id": "1671793",
"name": "星星",
"font_class": "xingxing",
"unicode": "e607",
"unicode_decimal": 58887
},
{
"icon_id": "4608572",
"name": "活动,五角星,星",
"font_class": "iconzhengli-",
"unicode": "e61f",
"unicode_decimal": 58911
},
{
"icon_id": "8289033",
"name": "星星",
"font_class": "xingxing1",
"unicode": "e870",
"unicode_decimal": 59504
},
{
"icon_id": "8406048",
"name": "五角星",
"font_class": "start",
"unicode": "e608",
"unicode_decimal": 58888
},
{
"icon_id": "19031627",
"name": "星星",
"font_class": "star",
"unicode": "e65d",
"unicode_decimal": 58973
},
{
"icon_id": "20188729",
"name": "星星",
"font_class": "xingxing2",
"unicode": "e621",
"unicode_decimal": 58913
},
{
"icon_id": "22402157",
"name": "星星",
"font_class": "xingxing3",
"unicode": "e622",
"unicode_decimal": 58914
},
{
"icon_id": "24106208",
"name": "五角星",
"font_class": "start1",
"unicode": "e623",
"unicode_decimal": 58915
},
{
"icon_id": "33438709",
"name": "星星",
"font_class": "xingxing4",
"unicode": "e62f",
"unicode_decimal": 58927
},
{
"icon_id": "3777786",
"name": "升级",
"font_class": "shengji",
"unicode": "e62c",
"unicode_decimal": 58924
},
{
"icon_id": "8802621",
"name": "升版-升级",
"font_class": "shengban-shengji",
"unicode": "e643",
"unicode_decimal": 58947
},
{
"icon_id": "14915034",
"name": "版本升级",
"font_class": "banbenshengji-05",
"unicode": "e62a",
"unicode_decimal": 58922
},
{
"icon_id": "16120758",
"name": "ebr_升级",
"font_class": "ebr_shengji",
"unicode": "e61e",
"unicode_decimal": 58910
},
{
"icon_id": "1188726",
"name": "facebook_facebook52",
"font_class": "facebookfacebook52",
"unicode": "e606",
"unicode_decimal": 58886
},
{
"icon_id": "9512669",
"name": "twitter",
"font_class": "twitter-fill",
"unicode": "e63d",
"unicode_decimal": 58941
},
{
"icon_id": "11983504",
"name": "reddit",
"font_class": "reddit",
"unicode": "ea03",
"unicode_decimal": 59907
},
{
"icon_id": "26267561",
"name": "discord",
"font_class": "discord",
"unicode": "e617",
"unicode_decimal": 58903
},
{
"icon_id": "26267569",
"name": "telegram",
"font_class": "telegram",
"unicode": "e618",
"unicode_decimal": 58904
},
{
"icon_id": "8358884",
"name": "踩2",
"font_class": "cai4",
"unicode": "e691",
"unicode_decimal": 59025
},
{
"icon_id": "4485651",
"name": "踩",
"font_class": "cai1",
"unicode": "e602",
"unicode_decimal": 58882
},
{
"icon_id": "8358883",
"name": "踩1",
"font_class": "cai2",
"unicode": "e68f",
"unicode_decimal": 59023
},
{
"icon_id": "9149167",
"name": "点赞",
"font_class": "dianzan",
"unicode": "e616",
"unicode_decimal": 58902
},
{
"icon_id": "9686817",
"name": "点赞",
"font_class": "dianzan1",
"unicode": "e619",
"unicode_decimal": 58905
},
{
"icon_id": "68268",
"name": "分享",
"font_class": "fenxiang",
"unicode": "e601",
"unicode_decimal": 58881
},
{
"icon_id": "848624",
"name": "cai",
"font_class": "cai",
"unicode": "e62e",
"unicode_decimal": 58926
},
{
"icon_id": "849179",
"name": "点赞",
"font_class": "z-like",
"unicode": "e6bf",
"unicode_decimal": 59071
},
{
"icon_id": "166176",
"name": "小三角down",
"font_class": "xiaosanjiaodown",
"unicode": "e614",
"unicode_decimal": 58900
},
{
"icon_id": "166177",
"name": "小三角up",
"font_class": "xiaosanjiaoup",
"unicode": "e615",
"unicode_decimal": 58901
},
{
"icon_id": "145727",
"name": "搜索_填充",
"font_class": "sousuotianchong",
"unicode": "e694",
"unicode_decimal": 59028
},
{
"icon_id": "5034919",
"name": "搜索",
"font_class": "sousuo",
"unicode": "e60e",
"unicode_decimal": 58894
},
{
"icon_id": "9826654",
"name": "通知",
"font_class": "tongzhi",
"unicode": "e612",
"unicode_decimal": 58898
},
{
"icon_id": "7594806",
"name": "24gl-folderOpen",
"font_class": "gl-folderOpen",
"unicode": "eabe",
"unicode_decimal": 60094
},
{
"icon_id": "659106",
"name": "ic_candleline",
"font_class": "iccandleline",
"unicode": "e613",
"unicode_decimal": 58899
},
{
"icon_id": "741931",
"name": "个人 (9)",
"font_class": "geren9",
"unicode": "e641",
"unicode_decimal": 58945
},
{
"icon_id": "844642",
"name": "美元6",
"font_class": "meiyuan6",
"unicode": "e738",
"unicode_decimal": 59192
},
{
"icon_id": "3762533",
"name": "美元",
"font_class": "meiyuan2",
"unicode": "e64b",
"unicode_decimal": 58955
},
{
"icon_id": "5387627",
"name": "美元",
"font_class": "meiyuan3",
"unicode": "eb1a",
"unicode_decimal": 60186
},
{
"icon_id": "5387968",
"name": "美元_o",
"font_class": "meiyuan_o",
"unicode": "eba1",
"unicode_decimal": 60321
},
{
"icon_id": "7542786",
"name": "24gl-obelus",
"font_class": "24gl-obelus",
"unicode": "e88f",
"unicode_decimal": 59535
},
{
"icon_id": "7542792",
"name": "24gl-plusMinus",
"font_class": "24gl-plusMinus",
"unicode": "e890",
"unicode_decimal": 59536
},
{
"icon_id": "8922368",
"name": "书本",
"font_class": "shuben",
"unicode": "e60d",
"unicode_decimal": 58893
},
{
"icon_id": "11227027",
"name": "4-柱状图",
"font_class": "-zhuzhuangtu",
"unicode": "e628",
"unicode_decimal": 58920
},
{
"icon_id": "12387153",
"name": "美元",
"font_class": "meiyuan4",
"unicode": "e90d",
"unicode_decimal": 59661
},
{
"icon_id": "15685587",
"name": "箭头_左右切换",
"font_class": "jiantou_zuoyouqiehuan",
"unicode": "e6eb",
"unicode_decimal": 59115
},
{
"icon_id": "16589891",
"name": "美元",
"font_class": "9meiyuan",
"unicode": "e626",
"unicode_decimal": 58918
},
{
"icon_id": "18170243",
"name": "代码,分支,分流,分叉",
"font_class": "code-branch",
"unicode": "e993",
"unicode_decimal": 59795
},
{
"icon_id": "19009843",
"name": "书本-book2",
"font_class": "shuben-book2",
"unicode": "e644",
"unicode_decimal": 58948
},
{
"icon_id": "21732462",
"name": "正负",
"font_class": "zhengfu",
"unicode": "e73d",
"unicode_decimal": 59197
},
{
"icon_id": "29890537",
"name": "钱包",
"font_class": "qianbao-copy",
"unicode": "ebb4",
"unicode_decimal": 60340
},
{
"icon_id": "5136",
"name": "鞋/包",
"font_class": "xiebao",
"unicode": "e600",
"unicode_decimal": 58880
},
{
"icon_id": "49730",
"name": "美元",
"font_class": "30",
"unicode": "e620",
"unicode_decimal": 58912
},
{
"icon_id": "313854",
"name": "金币",
"font_class": "jinbi",
"unicode": "e6c8",
"unicode_decimal": 59080
},
{
"icon_id": "554343",
"name": "美元",
"font_class": "iconset0294",
"unicode": "e6c1",
"unicode_decimal": 59073
},
{
"icon_id": "844637",
"name": "美元1",
"font_class": "meiyuan1",
"unicode": "e733",
"unicode_decimal": 59187
},
{
"icon_id": "1727596",
"name": "409折线图-线性",
"font_class": "zhexiantu-xianxing",
"unicode": "e904",
"unicode_decimal": 59652
},
{
"icon_id": "2668424",
"name": "钱包",
"font_class": "qianbao",
"unicode": "e646",
"unicode_decimal": 58950
},
{
"icon_id": "4294114",
"name": "闪电",
"font_class": "shandian",
"unicode": "e799",
"unicode_decimal": 59289
},
{
"icon_id": "5048147",
"name": "锤子",
"font_class": "chuizi",
"unicode": "e61b",
"unicode_decimal": 58907
},
{
"icon_id": "5388007",
"name": "折线图_o",
"font_class": "zhexiantu_o",
"unicode": "ebb3",
"unicode_decimal": 60339
},
{
"icon_id": "5650485",
"name": "房子",
"font_class": "icon-test",
"unicode": "e603",
"unicode_decimal": 58883
},
{
"icon_id": "5969564",
"name": "钱包",
"font_class": "qianbao1",
"unicode": "e61c",
"unicode_decimal": 58908
},
{
"icon_id": "6264220",
"name": "图标44",
"font_class": "tubiao",
"unicode": "e610",
"unicode_decimal": 58896
},
{
"icon_id": "6953121",
"name": "房子01",
"font_class": "thin-_home_flat",
"unicode": "e611",
"unicode_decimal": 58897
},
{
"icon_id": "9525397",
"name": "建筑",
"font_class": "building",
"unicode": "e64a",
"unicode_decimal": 58954
},
{
"icon_id": "10352335",
"name": "图表",
"font_class": "tubiao1",
"unicode": "e61d",
"unicode_decimal": 58909
},
{
"icon_id": "11520058",
"name": "文件夹",
"font_class": "wenjianjia",
"unicode": "e74a",
"unicode_decimal": 59210
},
{
"icon_id": "11855585",
"name": "时钟",
"font_class": "shizhong",
"unicode": "e60c",
"unicode_decimal": 58892
},
{
"icon_id": "12753449",
"name": "用户管理",
"font_class": "yonghuguanli_huaban",
"unicode": "e62d",
"unicode_decimal": 58925
},
{
"icon_id": "13638712",
"name": "图表",
"font_class": "tubiao2",
"unicode": "e659",
"unicode_decimal": 58969
},
{
"icon_id": "15819520",
"name": "房子",
"font_class": "fangzi",
"unicode": "e64f",
"unicode_decimal": 58959
},
{
"icon_id": "18165178",
"name": "槌子,锤子,敲打,工具",
"font_class": "hammer",
"unicode": "e87c",
"unicode_decimal": 59516
},
{
"icon_id": "21732485",
"name": "美元",
"font_class": "meiyuan",
"unicode": "e73f",
"unicode_decimal": 59199
},
{
"icon_id": "5483674",
"name": "日历",
"font_class": "rili",
"unicode": "e71a",
"unicode_decimal": 59162
},
{
"icon_id": "9982141",
"name": "电池",
"font_class": "dianchi",
"unicode": "e62b",
"unicode_decimal": 58923
},
{
"icon_id": "10283803",
"name": "电源管理",
"font_class": "changyongtubiao_dianyuanguanlixuanzhong",
"unicode": "e605",
"unicode_decimal": 58885
},
{
"icon_id": "20733427",
"name": "验证码",
"font_class": "changyongtubiao-fuben-33",
"unicode": "e60a",
"unicode_decimal": 58890
},
{
"icon_id": "20733429",
"name": "分类",
"font_class": "changyongtubiao-fuben-41",
"unicode": "e60b",
"unicode_decimal": 58891
},
{
"icon_id": "20733455",
"name": "券",
"font_class": "changyongtubiao-fuben-60",
"unicode": "e60f",
"unicode_decimal": 58895
},
{
"icon_id": "24330605",
"name": "常用_发现",
"font_class": "changyong_faxian",
"unicode": "e6b0",
"unicode_decimal": 59056
},
{
"icon_id": "24333891",
"name": "常用_发现 副本",
"font_class": "a-changyong_faxianfuben",
"unicode": "e6ba",
"unicode_decimal": 59066
},
{
"icon_id": "24333904",
"name": "常用_链接 副本",
"font_class": "a-changyong_lianjiefuben",
"unicode": "e6bb",
"unicode_decimal": 59067
},
{
"icon_id": "1212315",
"name": "消息",
"font_class": "xiaoxi",
"unicode": "e67c",
"unicode_decimal": 59004
},
{
"icon_id": "2378431",
"name": "网络",
"font_class": "wangluo",
"unicode": "e627",
"unicode_decimal": 58919
},
{
"icon_id": "5093354",
"name": "相机",
"font_class": "xiangji",
"unicode": "e609",
"unicode_decimal": 58889
},
{
"icon_id": "6570225",
"name": "手机",
"font_class": "shouji",
"unicode": "e61a",
"unicode_decimal": 58906
},
{
"icon_id": "7127376",
"name": "定位",
"font_class": "dingwei",
"unicode": "e604",
"unicode_decimal": 58884
}
]
}

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

After

Width:  |  Height:  |  Size: 8.3 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 7.4 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 7.4 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 7.9 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 530 KiB

BIN
yq/src/assets/imgs/why.png Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 7.7 KiB

BIN
yq/src/assets/logo.png Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 6.7 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 1.1 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 1.1 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 1.4 KiB

5
yq/src/assets/style.css Normal file
View File

@@ -0,0 +1,5 @@
/* 全局样式 */
/* 首字母大写 */
.capitalize {
text-transform: capitalize;
}

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,60 @@
<template>
<div class="hello">
<h1>{{ msg }}</h1>
<p>
For a guide and recipes on how to configure / customize this project,<br>
check out the
<a href="https://cli.vuejs.org" target="_blank" rel="noopener">vue-cli documentation</a>.
</p>
<h3>Installed CLI Plugins</h3>
<ul>
<li><a href="https://github.com/vuejs/vue-cli/tree/dev/packages/%40vue/cli-plugin-babel" target="_blank" rel="noopener">babel</a></li>
<li><a href="https://github.com/vuejs/vue-cli/tree/dev/packages/%40vue/cli-plugin-router" target="_blank" rel="noopener">router</a></li>
<li><a href="https://github.com/vuejs/vue-cli/tree/dev/packages/%40vue/cli-plugin-vuex" target="_blank" rel="noopener">vuex</a></li>
<li><a href="https://github.com/vuejs/vue-cli/tree/dev/packages/%40vue/cli-plugin-eslint" target="_blank" rel="noopener">eslint</a></li>
</ul>
<h3>Essential Links</h3>
<ul>
<li><a href="https://vuejs.org" target="_blank" rel="noopener">Core Docs</a></li>
<li><a href="https://forum.vuejs.org" target="_blank" rel="noopener">Forum</a></li>
<li><a href="https://chat.vuejs.org" target="_blank" rel="noopener">Community Chat</a></li>
<li><a href="https://twitter.com/vuejs" target="_blank" rel="noopener">Twitter</a></li>
<li><a href="https://news.vuejs.org" target="_blank" rel="noopener">News</a></li>
</ul>
<h3>Ecosystem</h3>
<ul>
<li><a href="https://router.vuejs.org" target="_blank" rel="noopener">vue-router</a></li>
<li><a href="https://vuex.vuejs.org" target="_blank" rel="noopener">vuex</a></li>
<li><a href="https://github.com/vuejs/vue-devtools#vue-devtools" target="_blank" rel="noopener">vue-devtools</a></li>
<li><a href="https://vue-loader.vuejs.org" target="_blank" rel="noopener">vue-loader</a></li>
<li><a href="https://github.com/vuejs/awesome-vue" target="_blank" rel="noopener">awesome-vue</a></li>
</ul>
</div>
</template>
<script>
export default {
name: 'HelloWorld',
props: {
msg: String
}
}
</script>
<!-- Add "scoped" attribute to limit CSS to this component only -->
<style scoped lang="scss">
h3 {
margin: 40px 0 0;
}
ul {
list-style-type: none;
padding: 0;
}
li {
display: inline-block;
margin: 0 10px;
}
a {
color: #42b983;
}
</style>

View File

@@ -0,0 +1,613 @@
<template>
<div class="leaveMessage" >
<h3>{{ $t(`message.messageBoard`) }}</h3>
<span class="wantMessage" @click="handelWantMessage" v-show="WantShow">
{{ $t(`message.message`) }}</span
>
<!-- 未留言 -->
<div class="inputContent" v-show="leaveShow">
<el-input
type="textarea"
:autosize="{ minRows: 2, maxRows: 5 }"
:placeholder="$t(`message.said`)"
v-model="textarea"
resize="none"
>
</el-input>
<el-button @click="handelPublish" class="publish">{{
$t(`message.publish`)
}}</el-button>
</div>
<!-- 已发表留言 -->
<div class="leaveContent" v-show="messageContentShow">
<!-- 用户名 -->
<el-row>
<el-col class="user" v-for="item in messageContent" :key="item.id">
<div class="picture">
<img
src="https://hbimg.huabanimg.com/94d26a8255b902cd27b98da5e4518217ebc437892616-RZJfHt_fw658"
alt=""
/>
</div>
<div class="name">
<!-- 留言第一层 -->
<div class="firstLayer">
<span>{{ item.userName }}</span>
<span>{{ item.id }} {{ $t(`message.building`) }}</span>
<!-- 留言内容 -->
<p>{{ item.userMsg }}</p>
<!-- 回复 -->
<div class="reply">
<span>{{handelTime(item.userDate)}}</span>
<span @click="handelReply(item)">{{
$t(`message.reply`)
}}</span>
</div>
</div>
<!-- 回复留言层 -->
<div v-if="item.responList[0]">
<div v-for="replyItem in getData(item.responList)" :key="replyItem.id">
<div class="replyMessage" v-for="replySubItem in replyItem" :key="replySubItem.responid">
<div>
<img
src="https://hbimg.huabanimg.com/94d26a8255b902cd27b98da5e4518217ebc437892616-RZJfHt_fw658"
alt=""
/>
</div>
<div class="replyContent" >
<span>{{replySubItem.responser}}</span>
<span>{{replySubItem.respon}}</span>
<p class="time">
{{ handelTime(replySubItem.responDate) }}
<span @click="handelReply(item)">{{
$t(`message.reply`)
}}</span>
</p>
</div>
</div>
</div>
</div>
<!-- 回复留言下面的输入框 -->
<div class="replyInput" v-if="replyId == item.id">
<el-input
v-model="reply"
:placeholder="$t(`message.said1`)"
></el-input>
<el-button class="publish" @click="handelDefiniteReply">{{
$t(`message.determine`)
}}</el-button>
<el-button class="replyCancel" @click="handelCancelReply">{{
$t(`message.cancel`)
}}</el-button>
</div>
</div>
</el-col>
</el-row>
</div>
<!-- 点赞分享 -->
<div class="praise">
<span>{{ $t(`message.help`) }} </span>
<span @click="handelPraise"
><i class="i iz-like"></i>{{ praiseNum }}</span
>
<span><i class="i icai" @click="handelPoor"></i> {{ poorNum }}</span>
<i class="i itwitter-fill bird" @click="handelTwitter"></i>
<i class="i ifacebookfacebook52 facebook" @click="handelFacebook"></i>
<i class="i itelegram bird" @click="handelTelegram"></i>
<i class="i ireddit Reddit" @click="handelReddit"></i>
<!-- <span @click="handelShare"> <i class="i ifenxiang"></i></span> -->
</div>
<el-dialog title="选择分享" :visible.sync="dialogVisibleShare" width="30%">
<div class="shareBox">
<p @click="handelTwitter">分享页面到 Twitter</p>
<p @click="handelFacebook">分享页面到 Facebook</p>
<p @click="handelTelegram">分享页面到 Telegram</p>
<p @click="handelReddit">分享页面到 Reddit</p>
<p>分享页面到 discord</p>
</div>
<!-- <div class="shareBtn">
<el-button @click="dialogVisibleShare = false"> </el-button>
<el-button type="primary" @click="dialogVisibleShare = false"
> </el-button
>
</div> -->
</el-dialog>
</div>
</template>
<script>
import {
fetchMessageBoard,
submitMessage,
responMsg,
getPraise,
getClickPraisePoor,
} from "../api/messageBoard";
export default {
data() {
return {
leaveShow: true,
textarea: "",
messageContent: [
// {
// id: 1,
// userMsg: "我是第一条留言",
// userName: "留言人",
// msgDate: "2009-01-09T08:00:00.000+00:00",
// responList: {
// responId: 3,
// respon: "管理员回复此留言的内容",
// responser: "管理员",
// toUser: "留言人",
// responDate: "2009-01-09T08:01:00.000+00:00",
// responList: {
// responId: 4,
// respon: "用户回复管理员答复的内容",
// responser: "留言人",
// toUser: "留言人",
// responDate: "2009-01-09T08:02:00.000+00:00",
// responList: null,
// },
// },
// },
// {
// id: 2,
// userMsg: "我是第二条留言",
// userName: "留言人",
// msgDate: "2009-01-09T09:00:00.000+00:00",
// responList: null,
// },
],
reply: "",
WantShow: false,
messageContentShow: false,
messageBoard: {
//请求历史留言
page: "",
coinType: "",
pageNum: 1,
pagesize: 10,
},
pagelabel: "",
submitMsg: {
//发表留言
message: "",
page: "",
coinType: "",
},
replyId: "",
responMsg: {
//回复留言
message: "",
msg_id: "",
},
praiseNum: 0,
praiseFlag: false,
poorNum: 0,
poorFlag: false,
dialogVisibleShare: false,
twitterURL:"",
pageUrl:"",//网站当前页面地址
telegramURL:"",
};
},
watch: {
pagelabel() {//跳转不同页面 请求不同页面数据
this.fetchDefaultMessage(this.messageBoard);
this.fetchPraise({page:this.messageBoard.page,coinType:this.messageBoard.coinType})
},
},
mounted() {
this.messageBoard.page =JSON.parse(localStorage.getItem("params")).label//当前所在页面
this.messageBoard.coinType = localStorage.getItem("radius"); //当前币种
this.fetchDefaultMessage(this.messageBoard);
this.fetchPraise({page:this.messageBoard.page,coinType:this.messageBoard.coinType})//请求点赞 踩的历史数量
this.pageUrl = window.location.href
//监听localstorage改变赋值
window.addEventListener("setItem", (e) => {
this.messageBoard.page =JSON.parse(localStorage.getItem("params")).label
this.messageBoard.coinType = localStorage.getItem("radius");
this.pagelabel = JSON.parse(localStorage.getItem("params")).label
});
},
methods: {
//请求历史留言
async fetchDefaultMessage(data) {
const list = await fetchMessageBoard(data);
this.messageContent = list.data.data.data
if (this.messageContent[0]) {//有留言数据的情况 默认发表留言框不显示
this.messageContentShow = true;
this.WantShow = true;
this.leaveShow = false;
}else{
this.leaveShow = true;
this.WantShow = false;
this.messageContentShow = false;
}
},
//发表留言请求
async fetchSubmitMessage(data) {
const list = await submitMessage(data);
//点击发表以后要重新请求所有留言数据
this.fetchDefaultMessage(this.messageBoard);
},
//回复留言
async fetchResponMsg(data) {
const list = await responMsg(data);
// 回复留言以后查询 所有留言重新渲染
this.fetchDefaultMessage(this.messageBoard);
},
//请求点赞 踩的历史数量
async fetchPraise(data){
const list = await getPraise(data)
this.praiseNum = list.data.data.likeNum
this.poorNum = list.data.data.choose
},
//点赞 点踩以后传送数据
async fetchClickNum(data){
const list = await getClickPraisePoor(data)
},
//点击顶部我要留言
handelWantMessage() {
this.leaveShow = true;
},
//点击发表留言
handelPublish() {
// let id = 1;
// let obj = {
// id: id++,
// message: this.textarea,
// };
// id = obj.id;
// this.messageContent.push(obj);
this.leaveShow = false;
this.WantShow = true;
// -------------------
this.submitMsg.message = this.textarea;
this.submitMsg.page = this.messageBoard.page;
this.submitMsg.coinType = this.messageBoard.coinType;
console.log(this.submitMsg,6621544561213);
this.fetchSubmitMessage(this.submitMsg);
this.textarea = "";
},
//点击回复留言
handelReply(data) {
this.replyId = data.id; //通过ID控制回复框是否显示
},
//取消回复留言
handelCancelReply() {
this.replyId = "";
},
//确定回复留言
handelDefiniteReply() {
this.responMsg.message = this.reply;
this.responMsg.msg_id = this.replyId;
this.fetchResponMsg(this.responMsg);
this.reply =""
},
//点赞
handelPraise() {
this.praiseFlag = !this.praiseFlag;
if (this.praiseFlag) {
this.praiseNum++;
this.fetchClickNum({page:this.messageBoard.page,coinType:this.messageBoard.coinType,choose:1})//发送点赞或踩的数据到后端
} else {
this.fetchClickNum({page:this.messageBoard.page,coinType:this.messageBoard.coinType,choose:0})
this.praiseNum--;
}
//请求所有历史点赞数量
this.fetchPraise({page:this.messageBoard.page,coinType:this.messageBoard.coinType})
},
//踩
handelPoor() {
this.poorFlag = !this.poorFlag;
if (this.poorFlag) {
this.poorNum++;
this.fetchClickNum({page:this.messageBoard.page,coinType:this.messageBoard.coinType,choose:2})
} else {
this.poorNum--;
this.fetchClickNum({page:this.messageBoard.page,coinType:this.messageBoard.coinType,choose:0})
}
//请求所有历史点赞 踩数量
this.fetchPraise({page:this.messageBoard.page,coinType:this.messageBoard.coinType})
},
//点击分享
handelShare(){
this.dialogVisibleShare =true
},
//分享到Twitter
handelTwitter(){
this.twitterURL=`http://twitter.com/share?text=${this.$t(this.messageBoard.page)}&url=${this.pageUrl}`
window.open(this.twitterURL)
},
//分享到Facebook
handelFacebook(){
this.facebookURL=`https://www.facebook.com/sharer.php?u=${this.pageUrl}`
window.open(this.facebookURL)
},
//分享到Telegram
handelTelegram(){
this.telegramURL= `https://t.me/share?url=${this.pageUrl}&text=${this.$t(this.messageBoard.page)}`
window.open(this.telegramURL)
},
//分享到Reddit
handelReddit(){
this.redditURL= `http://www.reddit.com/submit?url=${this.pageUrl}&title=${this.$t(this.messageBoard.page)}`
window.open(this.redditURL)
},
// 处理留言板嵌套数组
getData(data){
let newData =[]
this.handelArrList(data,newData)
return newData
},
// 处理留言板嵌套数组
handelArrList(data,newData) {
let newObj ={}
Object.keys(data).forEach(item=>{
newObj[item]=data[item]
})
newData.push(newObj)
if (!newObj.responList) return
this.handelArrList(newObj.responList,newData)
},
//处理时间格式
handelTime(time){
return `${time.split("T")[0]} ${time.split("T")[1].split(".")[0]}`
}
},
};
</script>
<style lang="scss" scoped>
* {
margin: 0px;
padding: 0px;
box-sizing: border-box;
}
.leaveMessage {
margin-top: 5px;
position: relative;
width: 100%;
min-height: 200px;
// height: 500px;
background: #e5e5e5;
padding: 0px 5px;
padding-bottom: 50px;
// background: rgba(247,147,26,0.1);
border: 1px solid rgba(0, 0, 0, 0.1);
// background: red;
// outline: 1px solid red;
h3 {
margin: 0px;
display: inline-block;
// border-bottom: 1px solid rgba(0, 0, 0, 0.2);
}
.wantMessage {
margin-left: 20px;
font-size: 14px;
color: #0098d8;
cursor: pointer;
}
.leaveContent {
width: 100%;
padding: 10px 0px;
// background: papayawhip;
margin-top: 10px;
border-top: 1px solid rgba(0, 0, 0, 0.1);
border-bottom: 1px solid rgba(0, 0, 0, 0.1);
.user {
// outline: 1px solid rgba(0, 0, 0, 0.1);
border-bottom: 1px solid rgba(0, 0, 0, 0.1);
padding: 10px 0px;
display: flex;
justify-content: space-around;
margin-top: 5px;
.picture {
padding: 10px;
img {
width: 40px;
height: 40px;
}
}
}
.name {
flex: 1;
// outline: 1px solid red;
font-size: 13px;
padding: 0px 20px;
.firstLayer{
// outline: 1px solid red;
border-bottom: 1px solid rgba(0, 0, 0, 0.1);
padding: 10px 0px;
}
span:nth-of-type(1) {
color: #0098d8;
}
span:nth-of-type(2) {
color: rgba(0, 0, 0, 0.6);
margin-left: 20px;
font-size: 13px;
}
p {
min-height: 20px; // outline: 1px solid red;
font-size: 13px;
margin-top: 10px;
}
.reply {
font-size: 12px;
span:nth-of-type(1) {
color: rgba(0, 0, 0, 0.6);
}
span:nth-of-type(2) {
color: #0098d8;
cursor: pointer;
}
}
}
}
.praise {
position: absolute;
bottom: 10px;
padding: 0px 10px;
color: rgba(0, 0, 0, 0.6);
// outline: 1px solid red;
span {
display: inline-block;
line-height: 30px;
font-weight: 600;
font-size: 15px;
// outline: 1px solid red;
}
i {
margin-left: 20px;
font-size: 18px;
cursor: pointer;
}
}
// 默认状态
.inputContent {
// outline: 1px solid red;
// border: 1PX solid #ccc;
margin-top: 5px;
.publish {
padding: 5px 20px;
margin-top: 10px;
background: #7b8c9e;
color: #fff;
}
}
// 回复留言
.replyMessage {
// outline: 1px solid red;
// height: 70px;
padding: 5px;
display: flex;
img {
width: 40px;
height: 40px;
}
.replyContent {
padding: 0px 10px;
span:nth-of-type(2) {
color: rgba(0, 0, 0, 1);
font-size: 14px;
}
.time {
color: rgba(0, 0, 0, 0.6);
height: 20px;
// margin: 0;
span {
cursor: pointer;
}
}
}
}
// .replyMessageBox{
// }
// 回复留言的输入框
.replyInput {
margin-top: 10px;
width: 60%;
.publish {
padding: 5px 10px;
margin-top: 10px;
background: #7b8c9e;
color: #fff;
}
.replyCancel {
padding: 5px 10px;
margin-top: 10px;
background: #7b8c9e;
color: #fff;
}
}
}
.passwordBox {
/* background: #b3c0d1; */
width: 70%;
margin: 0 auto;
z-index: 99998 !important;
}
// 选择分享
.shareBox{
// border: 1px solid red;
display: flex;
flex-direction: column;
p{
width: 100%;
border: 1px solid rgba(0, 0, 0, 0.1);
height: 40px;
margin-top: 5px;
text-align: center;
line-height: 40px;
cursor: pointer;
}
p:hover{
color:cadetblue;
background: floralwhite;
}
}
.shareBtn{
// border: 1px solid red;
margin-top: 20px;
display: flex;
height: 30px;
justify-content: right;
.el-button{
// border: 1px solid red;
width: 80px;
}
}
.bird{
// border:1px solid red;
color:#1d9bf0;
font-size: 20px !important;
}
.facebook{
color:#1d9bf0;
font-size: 20px !important;
}
.Reddit{
color:#ff4500;
font-size: 20px !important;
}
</style>

View File

@@ -0,0 +1,126 @@
<template>
<div class="footerMain">
<el-row>
<el-col >
<button class="navButton" v-for="item in menuItem" :key="item.value">{{item.label}}</button>
</el-col>
</el-row>
<el-row>
<el-col>
<img class="footerImg" src="../../public/img/f2.png" alt="">
<img class="footerImg" src="../../public/img/f1.png" alt="">
<img class="footerImg" src="../../public/img/f3.png" alt="">
</el-col>
</el-row>
<el-row>
<el-col>
<span class="version">©2022</span>
</el-col>
</el-row>
</div>
</template>
<script>
export default {
data(){
return{
menuItem:[
{
value:1,
label:"Home"
},
{
value:2,
label:"Pricing"
},
{
value:3,
label:"Catalog"
},
{
value:4,
label:"Insights"
},
{
value:5,
label:"Forum"
},
{
value:6,
label:"Documentation"
},
{
value:7,
label:"Academy"
},
{
value:8,
label:"Status"
},
{
value:9,
label:"Changelog"
},
{
value:10,
label:"Careers"
},
{
value:11,
label:"Contact"
},
{
value:12,
label:"Privacy Notice"
},
{
value:13,
label:"Terms & Conditions"
},
{
value:14,
label:"Impressum"
},
]
}
}
};
</script>
<style lang="scss" scoped>
.footerMain {
width: 100%;
height: 100%;
// outline: 1px solid red;
display: flex;
flex-direction: column;
justify-content: space-evenly;
}
.el-col{
// outline: 1px solid red;
display: flex;
align-items: center;
height: 28px;
}
.navButton{
font-size: 12px;
margin-left: 10px;
background: #333333;
color: #fff;
border: none;
padding: 0px;
cursor: pointer;
}
.navButton:hover{
border-bottom: 1px solid #fff;
}
.footerImg{
width: 15px;
margin-left: 10px;
cursor: pointer;
}
.version{
font-size: 12px;
margin-left: 10px;
}
</style>

View File

@@ -0,0 +1,179 @@
<template>
<div class="dropdownBox">
<div
style="
height: 100%;
border-right: 1px solid #ccc;
display: flex;
align-items: center;
padding-right: 10px;
flex-direction: column;
"
>
<span
style="
font-size: 12px;
color: rgba(0, 0, 0, 0.8);
margin-bottom: 3px;
display: inline-block;
width: 100px;
padding-left: 10px;
box-sizing: border-box;
"
>Resolution</span
>
<el-dropdown @command="handleCommand" style="width: 120px">
<span
class="el-dropdown-link"
style="font-size: 12px; display: inline-block;margin-left: 18px;"
>
{{ $t(activeValue) }}
<i class="el-icon-arrow-down el-icon--right"></i>
</span>
<el-dropdown-menu slot="dropdown">
<!-- :disabled="item.grade >= grade" -->
<el-dropdown-item
:disabled="item.disabled"
v-for="item in dataSelection" :key="item.label" :command="item.value" >{{ $t(item.label) }}</el-dropdown-item>
</el-dropdown-menu>
</el-dropdown>
</div>
</div>
</template>
<script>
export default {
props: {
dataSelection: {
type: Array,
required: true,
},
onSelect: {
type: Function,
required: true,
},
realTimeData: true,
},
data() {
return {
activeValue: "home.day1", //1天
roleKey: "",
grade: "",
};
},
watch: {
realTimeData(val) {
if (!val) {
this.activeValue = "home.day1";
}
},
roleKey(){
switch (this.roleKey) {
case "guest"|| undefined:
this.grade = 1;
break;
case "registered":
this.grade = 2;
break;
case "advance":
this.grade = 3;
break;
case "vip":
this.grade = 4;
break;
case "admin":
this.grade = 10;
break;
default:
break;
}
this.dataSelection.forEach(item => {
if (item.grade>this.grade) {
item.disabled =true
}else{
item.disabled =false
}
});
}
},
mounted() {
if (localStorage.getItem("identity")) {
this.roleKey = JSON.parse(localStorage.getItem("identity")).roleKey;
}
window.addEventListener("setItem", () => {
if (localStorage.getItem("identity")) {
this.roleKey = JSON.parse(localStorage.getItem("identity")).roleKey;
}
});
switch (this.roleKey) {
case "guest"|| undefined:
this.grade = 1;
break;
case "registered":
this.grade = 2;
break;
case "advance":
this.grade = 3;
break;
case "vip":
this.grade = 4;
break;
case "admin":
this.grade = 10;
break;
default:
break;
}
this.dataSelection.forEach(item => {
if (item.grade>this.grade) {
item.disabled =true
}else{
item.disabled =false
}
});
},
methods: {
handleCommand(command) {
this.onSelect(command);
this.activeValue = this.dataSelection.find(
(item) => item.value === command ).label;
this.$addStorageEvent(1, "sma", "");
if (command=="1d") {
this.$addStorageEvent(1, "timeDisable", false);
}else{
this.$addStorageEvent(1, "timeDisable", true);
}
},
},
};
</script>
<style lang="scss" scoped>
.dropdownBox {
outline: 1px solid rgba(0, 0, 0, 0.1);
width: 100%;
height: 50px;
display: flex;
align-items: center;
box-sizing: border-box;
// margin-left: -30px;
margin-top: 5px;
padding: 5px 50px;
}
</style>

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,650 @@
<template>
<div>
<!-- 普通用户头部 identityLabel !== `admin` &&-->
<div
v-show="
identityLabel !== `support` &&
identityLabel !== `verifier` &&
identityLabel !== `admin` &&
identityLabel !== `accounting`
"
>
<ul class="headList2">
<!-- 首页 -->
<li @click="handelJump(`/`)">{{ $t(`menu.homePage`) }}</li>
<!-- 账户升级 -->
<li @click="handelJump(`/AccountUpgrade`)">
{{ $t("menu.upgrade") }}
</li>
<!-- 退出 -->
<li @click="handelExit">{{ $t("login.exit") }}</li>
<!-- 联系客服 -->
<li @click="handelHelp">{{ $t("login.customerService") }}</li>
<!-- 用户名 退出 -->
<li v-show="!loginstatus">
<el-dropdown
trigger="click"
:modal-append-to-body="true"
szie="small"
class="loginMenu"
>
<!-- style="text-transform: none;" 用户名取消首字母大写 -->
<span class="el-dropdown-link" style="text-transform: none">
{{ userName }}<i class="el-icon-arrow-down el-icon--right"></i>
</span>
<!-- 退出 -->
<!-- <el-dropdown-menu slot="dropdown" class="header-new-dro">
<el-dropdown-item @click.native="handelExit">
{{ $t("login.exit") }}</el-dropdown-item
>
</el-dropdown-menu> -->
<!-- 个人资料 -->
<el-dropdown-menu slot="dropdown" class="header-new-dro">
<el-dropdown-item @click.native="handelPeople">
{{ $t(`login.personalData`) }}</el-dropdown-item
>
<!-- 订阅中心 -->
<!-- <el-dropdown-item @click.native="handelJump(`/subscribe`)">
{{ $t(`login.subscribeCenter`) }}</el-dropdown-item
> -->
<!-- 支付历史改为转账记录 -->
<el-dropdown-item @click.native="handelJump(`/payHistories`)">
{{ $t(`login.payHistories`) }}</el-dropdown-item
>
<!-- 提现记录 -->
<el-dropdown-item
@click.native="handelJump(`/withdrawalRecords`)"
>
{{ $t(`pay.withdrawalRecords`) }}</el-dropdown-item
>
<!-- 消费记录 -->
<el-dropdown-item
@click.native="handelJump(`/ConsumptionRecords`)"
>
{{ $t(`pay.consumption`) }}</el-dropdown-item
>
<!-- 充值 -->
<el-dropdown-item
@click.native="handelJump(`/Recharge`)"
style="font-family: Arial, sans-serif; font-size: 13px"
>
{{ $t(`pay.Recharge1`) }}</el-dropdown-item
>
</el-dropdown-menu>
</el-dropdown>
</li>
<!-- 余额 -->
<!-- <li>{{ $t(`login.balance`) }} :<span>***</span> </li> -->
<!--充值 -->
<!-- <li>{{ $t(`login.Recharge`) }}</li> -->
<!-- 提现 -->
<!-- <li>{{ $t(`login.Withdrawal`) }} </li> -->
<!-- 用户名 退出 -->
<!-- <li v-show="!loginstatus">
<el-dropdown
trigger="click"
:modal-append-to-body="true"
szie="small"
class="loginMenu"
>
<span class="el-dropdown-link">
{{ userName }}<i class="el-icon-arrow-down el-icon--right"></i>
</span>
<el-dropdown-menu slot="dropdown" class="header-new-dro">
<el-dropdown-item @click.native="handelExit">
{{ $t("login.exit") }}</el-dropdown-item
>
</el-dropdown-menu>
</el-dropdown>
</li> -->
<!-- 当前等级-->
<!-- <li>{{ $t(`login.currentLevel`) }} : L3<span></span> </li> -->
<!-- 个人中心 -->
<!-- <li>
<el-dropdown
trigger="click"
:modal-append-to-body="true"
szie="small"
class="loginMenu"
>
<span class="el-dropdown-link">
{{ $t("login.personal")
}}<i class="el-icon-arrow-down el-icon--right"></i>
</span>
<el-dropdown-menu slot="dropdown" class="header-new-dro">
<el-dropdown-item @click.native="handelPeople">
{{ $t(`login.personalData`) }}</el-dropdown-item
>
<el-dropdown-item @click.native="handelJump(`/subscribe`)">
{{ $t(`login.subscribeCenter`) }}</el-dropdown-item
>
<el-dropdown-item @click.native="handelJump(`/payHistories`)">
{{ $t(`login.payHistories`) }}</el-dropdown-item
>
</el-dropdown-menu>
</el-dropdown>
</li> -->
<!-- 帮助 修改为联系客服-->
<!-- <li class="help">
<el-dropdown
trigger="click"
:modal-append-to-body="true"
szie="small"
class="loginMenu"
>
<span class="el-dropdown-link">
{{ $t("login.customerService")
}}<i class="el-icon-arrow-down el-icon--right"></i>
</span>
<el-dropdown-menu slot="dropdown" class="header-new-dro">
<el-dropdown-item @click.native="handelHelp">
{{ $t(`login.feedback`) }}</el-dropdown-item
>
</el-dropdown-menu>
</el-dropdown>
</li> -->
<!-- 后台页面 -->
<!-- <li class="help">
<el-dropdown
trigger="click"
:modal-append-to-body="true"
szie="small"
class="loginMenu"
>
<span class="el-dropdown-link">
{{ $t("login.backgroundPage")
}}<i class="el-icon-arrow-down el-icon--right"></i>
</span>
<el-dropdown-menu slot="dropdown" class="header-new-dro">
<el-dropdown-item
v-show="
identityLabel == `support` ||
identityLabel == `verifier` ||
identityLabel == `admin`
"
@click.native="handelJump(`/HelpBackstage`)"
>
{{ $t(`login.backstage`) }}</el-dropdown-item
>
<el-dropdown-item
v-show="identityLabel == `verifier` || identityLabel == `admin`"
@click.native="handelJump(`/subscribeBoard`)"
>
{{ $t(`login.subscribeBoard`) }}</el-dropdown-item
>
<el-dropdown-item
v-show="
identityLabel == `accounting` || identityLabel == `admin`
"
@click.native="handelJump(`/payManagement`)"
>
{{ $t(`login.payManagement`) }}</el-dropdown-item
>
<el-dropdown-item @click.native="handelJump(`/BackendWithdrawal`)">
{{ $t(`login.backendWithdrawal`) }}</el-dropdown-item
>
<el-dropdown-item v-show=" identityLabel == `support`||identityLabel == `admin` " @click.native="handelJump(`/DataMonitoring`)"> {{ $t(`menu.dataMonitoring`) }}</el-dropdown-item
>
<el-dropdown-item
@click.native="handelJump(`/BackgroundAddress`)"
>
{{ $t(`login.backgroundAddress`) }}</el-dropdown-item
>
<el-dropdown-item @click.native="handelJump(`/BackendPayment`)">
{{ $t(`login.backendPayment`) }}</el-dropdown-item
>
<el-dropdown-item @click.native="handelJump(`/BackendRefund`)">
{{ $t(`login.backendRefund`) }}</el-dropdown-item
>
<el-dropdown-item @click.native="handelJump(`/messageBoard`)">
{{ $t("message.backstage") }}</el-dropdown-item
>
</el-dropdown-menu>
</el-dropdown>
</li> -->
<!-- 留言板后台 -->
<!-- <li
v-show="
identityLabel == `support` ||
identityLabel == `verifier` ||
identityLabel == `admin`
"
@click="handelJump(`/messageBoard`)"
>
{{ $t("message.backstage") }}
</li> -->
<!-- 语言 -->
<li class="langs" @click="handelSwitching">{{ langs }}</li>
<!-- <li >
<el-dropdown trigger="click" szie="small" class="loginMenu">
<span class="el-dropdown-link">
切换中英文<i class="el-icon-arrow-down el-icon--right"></i>
</span>
<el-dropdown-menu slot="dropdown" class="header-new-dro">
<el-dropdown-item @click.native="handelExit">
中文
</el-dropdown-item
>
<el-dropdown-item @click.native="handelExit">
中文
</el-dropdown-item
>
</el-dropdown-menu>
</el-dropdown>
</li> -->
</ul>
</div>
<!-- 技术人员登录后 头部 support、verifier-->
<div v-show="identityLabel == `support` || identityLabel == `verifier`">
<ul class="headList2">
<!-- 首页 -->
<li @click="handelJump(`/`)">{{ $t(`menu.homePage`) }}</li>
<!-- 退出 -->
<li @click="handelExit">{{ $t("login.exit") }}</li>
<!-- 联系客服 -->
<li @click="handelHelp">{{ $t("login.customerService") }}</li>
<!-- 用户名 退出 -->
<li v-show="!loginstatus">
<el-dropdown
trigger="click"
:modal-append-to-body="true"
szie="small"
class="loginMenu"
>
<span class="el-dropdown-link" style="text-transform: none">
{{ userName }}<i class="el-icon-arrow-down el-icon--right"></i>
</span>
<!-- 个人资料 -->
<el-dropdown-menu slot="dropdown" class="header-new-dro">
<el-dropdown-item @click.native="handelPeople">
{{ $t(`login.personalData`) }}</el-dropdown-item
>
<!-- 订阅中心 -->
<!-- <el-dropdown-item @click.native="handelJump(`/subscribe`)">
{{ $t(`login.subscribeCenter`) }}</el-dropdown-item
> -->
<!-- 支付历史 -->
<el-dropdown-item @click.native="handelJump(`/payHistories`)">
{{ $t(`login.payHistories`) }}</el-dropdown-item
>
<!-- 提现记录 -->
<el-dropdown-item
@click.native="handelJump(`/withdrawalRecords`)"
>
{{ $t(`pay.withdrawalRecords`) }}</el-dropdown-item
>
<!-- 消费记录 -->
<el-dropdown-item
@click.native="handelJump(`/ConsumptionRecords`)"
>
{{ $t(`pay.consumption`) }}</el-dropdown-item
>
<!-- 充值 -->
<el-dropdown-item
@click.native="handelJump(`/Recharge`)"
style="font-family: Arial, sans-serif; font-size: 13px"
>
{{ $t(`pay.Recharge1`) }}</el-dropdown-item
>
</el-dropdown-menu>
</el-dropdown>
</li>
<!-- 后台页面 后台管理-->
<li class="help">
<el-dropdown
trigger="click"
:modal-append-to-body="true"
szie="small"
class="loginMenu"
>
<span class="el-dropdown-link">
{{ $t("login.backgroundPage")
}}<i class="el-icon-arrow-down el-icon--right"></i>
</span>
<el-dropdown-menu slot="dropdown" class="header-new-dro">
<!-- 数据监控 改为数据统计后台 访问统计-->
<!-- <el-dropdown-item
@click.native="handelJump(`/DataMonitoring`)"
>
{{ $t(`menu.dataMonitoring`) }}</el-dropdown-item
> -->
<!-- 订阅管理后台 改为订阅后台页面 消息订阅-->
<!-- <el-dropdown-item
v-show="identityLabel == `verifier` || identityLabel == `admin`"
@click.native="handelJump(`/subscribeBoard`)"
>
{{ $t(`login.subscribeBoard`) }}</el-dropdown-item
> -->
<!-- 帮助中心后台改为客服后台 客服中心-->
<el-dropdown-item
v-show="
identityLabel == `support` ||
identityLabel == `verifier` ||
identityLabel == `admin`
"
@click.native="handelJump(`/HelpBackstage`)"
>
{{ $t(`login.backstage`) }}</el-dropdown-item
>
</el-dropdown-menu>
</el-dropdown>
</li>
<!-- 语言 -->
<li class="langs" @click="handelSwitching">{{ langs }}</li>
</ul>
</div>
<!-- 财务用户头部 accounting -->
<div v-show="identityLabel == `accounting`">
<ul class="headList2">
<!-- 首页 -->
<li @click="handelJump(`/`)">{{ $t(`menu.homePage`) }}</li>
<!-- 退出 -->
<li @click="handelExit">{{ $t("login.exit") }}</li>
<!-- 联系客服 -->
<li @click="handelHelp">{{ $t("login.customerService") }}</li>
<!-- 用户名 个人资料 -->
<li v-show="!loginstatus">
<el-dropdown
trigger="click"
:modal-append-to-body="true"
szie="small"
class="loginMenu"
>
<span class="el-dropdown-link" style="text-transform: none">
{{ userName }}<i class="el-icon-arrow-down el-icon--right"></i>
</span>
<!-- 个人资料 -->
<el-dropdown-menu slot="dropdown" class="header-new-dro">
<el-dropdown-item @click.native="handelPeople">
{{ $t(`login.personalData`) }}</el-dropdown-item
>
<!-- 订阅中心 -->
<!-- <el-dropdown-item @click.native="handelJump(`/subscribe`)">
{{ $t(`login.subscribeCenter`) }}</el-dropdown-item
> -->
<!-- 支付历史 -->
<el-dropdown-item @click.native="handelJump(`/payHistories`)">
{{ $t(`login.payHistories`) }}</el-dropdown-item
>
<!-- 提现记录 -->
<el-dropdown-item
@click.native="handelJump(`/withdrawalRecords`)"
>
{{ $t(`pay.withdrawalRecords`) }}</el-dropdown-item
>
<!-- 消费记录 -->
<el-dropdown-item
@click.native="handelJump(`/ConsumptionRecords`)"
>
{{ $t(`pay.consumption`) }}</el-dropdown-item
>
<!-- 充值 -->
<el-dropdown-item
@click.native="handelJump(`/Recharge`)"
style="font-family: Arial, sans-serif; font-size: 13px"
>
{{ $t(`pay.Recharge1`) }}</el-dropdown-item
>
</el-dropdown-menu>
</el-dropdown>
</li>
<!-- 财务管理 -->
<li class="help">
<el-dropdown
trigger="click"
:modal-append-to-body="true"
szie="small"
class="loginMenu"
>
<span class="el-dropdown-link">
{{ $t("login.finance")
}}<i class="el-icon-arrow-down el-icon--right"></i>
</span>
<el-dropdown-menu slot="dropdown" class="header-new-dro">
<!-- 支付管理后台 改为充值后台页面 数据中心-->
<!-- <el-dropdown-item
@click.native="handelJump(`/payManagement`)"
>
{{ $t(`login.paymentBackend`) }}</el-dropdown-item
> -->
<!-- 后台提现 改为提现后台页面 提现订单-->
<el-dropdown-item
@click.native="handelJump(`/BackendWithdrawal`)"
>
{{ $t(`login.backendWithdrawal`) }}</el-dropdown-item
>
<!-- 后台确认支付 转账确认 -->
<!-- <el-dropdown-item @click.native="handelJump(`/BackendPayment`)">
{{ $t(`login.backendPayment`) }}</el-dropdown-item
> -->
<!-- 后台退款 退款确认-->
<!-- <el-dropdown-item @click.native="handelJump(`/BackendRefund`)">
{{ $t(`login.backendRefund`) }}</el-dropdown-item
> -->
<!-- 后台地址管理 收款地址 -->
<el-dropdown-item
@click.native="handelJump(`/BackgroundAddress`)"
>
{{ $t(`login.backgroundAddress`) }}</el-dropdown-item
>
<!--充值记录 -->
<el-dropdown-item @click.native="handelJump(`/RechargeRecord`)">
{{ $t(`login.RechargeRecord`) }}</el-dropdown-item
>
</el-dropdown-menu>
<!-- -->
</el-dropdown>
</li>
<!-- 语言 -->
<li class="langs" @click="handelSwitching">{{ langs }}</li>
</ul>
</div>
<!-- 管理员登录 -->
<div v-show="identityLabel == `admin`">
<ul class="headList2">
<!-- 首页 -->
<!-- <li @click="handelJump(`/`)">{{ $t(`menu.homePage`) }}</li> -->
<!-- 账户升级 -->
<!-- <li @click="handelJump(`/AccountUpgrade`)">
{{ $t("menu.upgrade") }}
</li> -->
<!-- 退出 -->
<!-- <li @click="handelExit">{{ $t("login.exit") }}</li> -->
<!-- 联系客服 -->
<!-- <li @click="handelHelp">{{ $t("login.customerService") }}</li> -->
<!-- 用户名 退出 -->
<li v-show="!loginstatus">
<!-- <el-dropdown
trigger="click"
:modal-append-to-body="true"
szie="small"
class="loginMenu"
>
<span class="el-dropdown-link" style="text-transform: none">
{{ userName
}}<i class="el-icon-arrow-down el-icon--right"></i>
</span> -->
<!-- 退出 -->
<!-- <el-dropdown-menu slot="dropdown" class="header-new-dro">
<el-dropdown-item @click.native="handelExit">
{{ $t("login.exit") }}</el-dropdown-item
>
</el-dropdown-menu> -->
<!-- 个人资料 -->
<!-- <el-dropdown-menu slot="dropdown" class="header-new-dro">
<el-dropdown-item @click.native="handelPeople">
{{ $t(`login.personalData`) }}</el-dropdown-item
> -->
<!-- 站内信 -->
<!-- <el-dropdown-item @click.native="handelJump(`/subscriptionLetter`)">
{{ $t(`login.subscriptionLetter`) }}</el-dropdown-item
> -->
<!-- 订阅与告警 -->
<!-- <el-dropdown-item @click.native="handelJump(`/Alarm`)">
{{ $t(`login.Alarm`) }}</el-dropdown-item
> -->
<!-- 订阅中心 -->
<!-- <el-dropdown-item @click.native="handelJump(`/subscribe`)">
{{ $t(`login.subscribeCenter`) }}</el-dropdown-item
> -->
<!-- 支付历史 -->
<!-- <el-dropdown-item @click.native="handelJump(`/payHistories`)">
{{ $t(`login.payHistories`) }}</el-dropdown-item
> -->
<!-- 提现记录 -->
<!-- <el-dropdown-item
@click.native="handelJump(`/withdrawalRecords`)"
>
{{ $t(`pay.withdrawalRecords`) }}</el-dropdown-item
> -->
<!-- 消费记录 -->
<!-- <el-dropdown-item
@click.native="handelJump(`/ConsumptionRecords`)"
>
{{ $t(`pay.consumption`) }}</el-dropdown-item
> -->
<!-- 充值 -->
<!-- <el-dropdown-item
@click.native="handelJump(`/Recharge`)"
style="font-family: Arial, sans-serif; font-size: 13px"
>
{{ $t(`pay.Recharge1`) }}</el-dropdown-item
>
</el-dropdown-menu>
</el-dropdown> -->
</li>
<!-- 后台管理 -->
<li class="help">
<!-- <el-dropdown
trigger="click"
:modal-append-to-body="true"
szie="small"
class="loginMenu"
>
<span class="el-dropdown-link">
{{ $t("login.backgroundPage")
}}<i class="el-icon-arrow-down el-icon--right"></i>
</span>
<el-dropdown-menu slot="dropdown" class="header-new-dro"> -->
<!-- 数据监控 改为数据统计后台 访问统计-->
<!-- <el-dropdown-item
@click.native="handelJump(`/DataMonitoring`)"
>
{{ $t(`menu.dataMonitoring`) }}</el-dropdown-item
> -->
<!-- 发布观察报告-->
<!-- <el-dropdown-item
@click.native="handelJump(`/publishObservations`)"
>
{{ $t(`login.publishObservations`) }}</el-dropdown-item
> -->
<!-- 订阅管理后台 改为订阅后台页面 消息订阅-->
<!-- <el-dropdown-item
v-show="identityLabel == `verifier` || identityLabel == `admin`"
@click.native="handelJump(`/subscribeBoard`)"
>
{{ $t(`login.subscribeBoard`) }}</el-dropdown-item
> -->
<!-- 帮助中心后台改为客服后台 客服中心-->
<!-- <el-dropdown-item
v-show="
identityLabel == `support` ||
identityLabel == `verifier` ||
identityLabel == `admin`
"
@click.native="handelJump(`/HelpBackstage`)"
>
{{ $t(`login.backstage`) }}</el-dropdown-item
>
</el-dropdown-menu>
</el-dropdown> -->
</li>
<!-- 财务管理 -->
<!-- <li class="help">
<el-dropdown
trigger="click"
:modal-append-to-body="true"
szie="small"
class="loginMenu"
>
<span class="el-dropdown-link">
{{ $t("login.finance")
}}<i class="el-icon-arrow-down el-icon--right"></i>
</span>
<el-dropdown-menu slot="dropdown" class="header-new-dro"> -->
<!-- 支付管理后台 改为充值后台页面 充值确认 数据中心 -->
<!-- <el-dropdown-item
@click.native="handelJump(`/payManagement`)"
>
{{ $t(`login.paymentBackend`) }}</el-dropdown-item
> -->
<!-- 后台提现 改为提现后台页面 提现确认-->
<!-- <el-dropdown-item
@click.native="handelJump(`/BackendWithdrawal`)"
>
{{ $t(`login.backendWithdrawal`) }}</el-dropdown-item
> -->
<!-- 后台确认支付 转账确认 -->
<!-- <el-dropdown-item @click.native="handelJump(`/BackendPayment`)">
{{ $t(`login.backendPayment`) }}</el-dropdown-item
> -->
<!-- 后台退款 退款确认-->
<!-- <el-dropdown-item @click.native="handelJump(`/BackendRefund`)">
{{ $t(`login.backendRefund`) }}</el-dropdown-item
> -->
<!-- 后台地址管理 收款地址 -->
<!-- <el-dropdown-item
@click.native="handelJump(`/BackgroundAddress`)"
>
{{ $t(`login.backgroundAddress`) }}</el-dropdown-item
> -->
<!--充值记录 -->
<!-- <el-dropdown-item
@click.native="handelJump(`/RechargeRecord`)"
>
{{ $t(`login.RechargeRecord`) }}</el-dropdown-item
> -->
<!-- </el-dropdown-menu>
</el-dropdown> -->
<!-- </li> -->
<!-- 语言 -->
<!-- <li class="langs" @click="handelSwitching">{{ langs }}</li> -->
</ul>
</div>
</div>
</template>
<script>
export default {};
</script>
<style>
</style>

Some files were not shown because too many files have changed in this diff Show More