coinbus代码更新

This commit is contained in:
fengche 2025-08-22 15:18:56 +08:00
parent 7e68033117
commit 5964a7fb8b
7 changed files with 2190 additions and 0 deletions

119
coinbus/Binance_fapi.py Normal file
View File

@ -0,0 +1,119 @@
import pymysql
import requests
import time
import schedule
from datetime import datetime
# MySQL连接函数
def connect_to_db():
return pymysql.connect(
host="127.0.0.1", # 数据库主机
user="root", # 数据库用户名
password="2GS@bPYcgiMyL14A", # 数据库密码
database="binance_api", # 数据库名称
port=4423 # 数据库端口
)
# 执行SQL查询的函数
def execute_query(query, params=None):
conn = connect_to_db() # 连接数据库
with conn.cursor() as cursor:
cursor.execute(query, params) # 执行SQL语句
conn.commit() # 提交事务
conn.close() # 关闭数据库连接
# 北京时间转换函数
def bj_time(timestamp):
# 将时间戳转换为北京时间
return datetime.utcfromtimestamp(timestamp / 1000).strftime('%Y-%m-%d %H:%M:%S')
# Binance API客户端
class BinanceAPI:
base_url = "https://fapi.binance.com" # Binance的基础API URL
@staticmethod
def get(endpoint, params=None):
# 发送GET请求到Binance API
response = requests.get(f"{BinanceAPI.base_url}{endpoint}", params=params)
return response.json() # 返回JSON格式的响应数据
# 任务1获取资金费率并插入数据库
def funding_rate():
# 获取BTC和ETH的资金费率数据
btc_data = BinanceAPI.get("/fapi/v1/fundingRate", {"symbol": "BTCUSDT"})
eth_data = BinanceAPI.get("/fapi/v1/fundingRate", {"symbol": "ETHUSDT"})
# 准备SQL插入语句
btc_sql = """INSERT INTO fundingrate(symbol, ts, fundingRate)
VALUES ("BTCUSDT", %s, %s)"""
eth_sql = """INSERT INTO fundingrate(symbol, ts, fundingRate)
VALUES ("ETHUSDT", %s, %s)"""
# 执行SQL插入操作
execute_query(btc_sql, (btc_data[-1]['fundingTime'], btc_data[-1]['fundingRate']))
execute_query(eth_sql, (eth_data[-1]['fundingTime'], eth_data[-1]['fundingRate']))
# 任务2获取未平仓合约数并插入数据库
def open_interest():
# 获取BTC和ETH的未平仓合约数数据
btc_data = BinanceAPI.get("/fapi/v1/openInterest", {"symbol": "BTCUSDT"})
eth_data = BinanceAPI.get("/fapi/v1/openInterest", {"symbol": "ETHUSDT"})
# 准备SQL插入语句
btc_sql = """INSERT INTO openInterest(symbol, ts, openInterest)
VALUES ("BTCUSDT", %s, %s)"""
eth_sql = """INSERT INTO openInterest(symbol, ts, openInterest)
VALUES ("ETHUSDT", %s, %s)"""
# 执行SQL插入操作
execute_query(btc_sql, (btc_data['time'], btc_data['openInterest']))
execute_query(eth_sql, (eth_data['time'], eth_data['openInterest']))
# 任务3获取长短比并插入数据库
def long_short_ratio(interval):
# 获取BTC和ETH的长短比数据
btc_data = BinanceAPI.get("/futures/data/takerlongshortRatio", {
"symbol": "BTCUSDT", "period": interval
})
eth_data = BinanceAPI.get("/futures/data/takerlongshortRatio", {
"symbol": "ETHUSDT", "period": interval
})
# 准备SQL插入语句
btc_sql = f"""INSERT INTO longshortratio{interval}(symbol, ts, buyVol, sellVol, buySellRatio)
VALUES ("BTCUSDT", %s, %s, %s, %s)"""
eth_sql = f"""INSERT INTO longshortratio{interval}(symbol, ts, buyVol, sellVol, buySellRatio)
VALUES ("ETHUSDT", %s, %s, %s, %s)"""
# 执行SQL插入操作
execute_query(btc_sql, (btc_data[-1]['timestamp'], btc_data[-1]['buyVol'], btc_data[-1]['sellVol'], btc_data[-1]['buySellRatio']))
execute_query(eth_sql, (eth_data[-1]['timestamp'], eth_data[-1]['buyVol'], eth_data[-1]['sellVol'], eth_data[-1]['buySellRatio']))
# 定时任务调度
def schedule_jobs():
# 每天0点、8点和16点1分执行资金费率任务
schedule.every().day.at("00:01").do(funding_rate)
schedule.every().day.at("08:01").do(funding_rate)
schedule.every().day.at("16:01").do(funding_rate)
# 每分钟的15秒执行未平仓合约数任务
schedule.every().minute.at(":15").do(open_interest)
schedule.every().minute.at(":25").do(open_interest)
schedule.every().minute.at(":35").do(open_interest)
schedule.every().minute.at(":45").do(open_interest)
schedule.every().minute.at(":55").do(open_interest)
# 每分钟的15秒执行长短比任务周期为5m, 15m, 30m等
intervals = ["5m", "15m", "30m", "1h", "2h", "4h", "6h", "12h", "1d"]
for interval in intervals:
schedule.every().minute.at(":15").do(long_short_ratio, interval=interval)
# 启动任务调度
def run():
schedule_jobs() # 设置定时任务
while True:
schedule.run_pending() # 执行所有待处理任务
time.sleep(1) # 每秒检查一次任务是否到期
if __name__ == "__main__":
run() # 运行定时任务调度

115
coinbus/CoinmarketCap.py Normal file
View File

@ -0,0 +1,115 @@
import requests
import pymysql
import time
from apscheduler.schedulers.blocking import BlockingScheduler
from datetime import datetime, timedelta
# API 密钥和请求头
API_KEY = "83bf85c1-1bd8-426a-a043-6b67dad8bda5"
headers = { "X-CMC_PRO_API_KEY": API_KEY }
base_url = "https://pro-api.coinmarketcap.com"
url = f"{base_url}/v1/cryptocurrency/listings/latest"
# MySQL 数据库连接配置
db_config = {
'host': '127.0.0.1', # 数据库主机地址
'user': 'root', # 数据库用户名
'password': '2GS@bPYcgiMyL14A', # 数据库密码
'database': 'coinmarketcap', # 数据库名称
'port': 4423 # 数据库端口
}
# 创建数据库表格(如果不存在)
def create_table():
connection = pymysql.connect(**db_config) # 连接到数据库
cursor = connection.cursor() # 创建游标对象
# 创建表格的 SQL 语句
create_table_query = """
CREATE TABLE IF NOT EXISTS marketInfo (
id INT NOT NULL PRIMARY KEY AUTO_INCREMENT, # 自增ID
update_time DATETIME NOT NULL, # 更新时间
symbol CHAR(15) NOT NULL, # 币种符号
ranks INT NOT NULL, # 排名
price DOUBLE NOT NULL, # 当前价格
market_cap DOUBLE NOT NULL, # 市值
volume_24h DOUBLE NOT NULL, # 24小时交易量
volume_change_24h DOUBLE NOT NULL, # 24小时交易量变化
percent_change_1h DOUBLE NOT NULL, # 1小时价格变化
percent_change_24h DOUBLE NOT NULL, # 24小时价格变化
percent_change_7d DOUBLE NOT NULL, # 7天价格变化
percent_change_30d DOUBLE NOT NULL, # 30天价格变化
percent_change_60d DOUBLE NOT NULL, # 60天价格变化
percent_change_90d DOUBLE NOT NULL # 90天价格变化
);
"""
cursor.execute(create_table_query) # 执行创建表格的 SQL 语句
connection.commit() # 提交事务
cursor.close() # 关闭游标
connection.close() # 关闭数据库连接
# 将 UTC 时间转换为北京时间
def bj_time(utc_time):
""" 将 UTC 时间转换为北京时间 """
utc_time = datetime.strptime(utc_time, '%Y-%m-%dT%H:%M:%S.%fZ') # 将 UTC 时间字符串转换为 datetime 对象
beijing_time = utc_time + timedelta(hours=8) # 北京时间比 UTC 时间快 8 小时
return beijing_time.strftime('%Y-%m-%d %H:%M:%S') # 格式化成字符串
# 获取市场数据并插入到数据库
def marketcap():
try:
# 向 CoinMarketCap API 发送请求,获取加密货币的市场数据
response = requests.get(url, headers=headers, params={"limit": 200})
response.raise_for_status() # 如果请求失败,抛出异常
except requests.RequestException:
time.sleep(60) # 等待 1 分钟后重试
response = requests.get(url, headers=headers, params={"limit": 200})
data = response.json() # 将返回的 JSON 数据转换为 Python 字典
for item in data['data']: # 遍历获取的数据
quote = item['quote']['USD'] # 获取 USD 相关的市场数据
update_time = bj_time(quote['last_updated']) # 转换更新时间为北京时间
symbol = item['symbol'] # 获取币种符号
ranks = item['cmc_rank'] # 获取排名
price = quote['price'] # 获取价格
market_cap = quote['market_cap'] # 获取市值
volume_24h = quote['volume_24h'] # 获取 24 小时交易量
volume_change_24h = quote['volume_change_24h'] # 获取 24 小时交易量变化
percent_change_1h = quote['percent_change_1h'] # 获取 1 小时价格变化
percent_change_24h = quote['percent_change_24h'] # 获取 24 小时价格变化
percent_change_7d = quote['percent_change_7d'] # 获取 7 天价格变化
percent_change_30d = quote['percent_change_30d'] # 获取 30 天价格变化
percent_change_60d = quote['percent_change_60d'] # 获取 60 天价格变化
percent_change_90d = quote['percent_change_90d'] # 获取 90 天价格变化
# 将数据插入到 MySQL 数据库
connection = pymysql.connect(**db_config) # 连接到数据库
cursor = connection.cursor() # 创建游标对象
insert_query = """
INSERT INTO marketInfo (
update_time, symbol, ranks, price, market_cap, volume_24h,
volume_change_24h, percent_change_1h, percent_change_24h,
percent_change_7d, percent_change_30d, percent_change_60d,
percent_change_90d
) VALUES (%s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s);
"""
# 执行插入数据的 SQL 语句
cursor.execute(insert_query, (
update_time, symbol, ranks, price, market_cap, volume_24h,
volume_change_24h, percent_change_1h, percent_change_24h,
percent_change_7d, percent_change_30d, percent_change_60d,
percent_change_90d
))
connection.commit() # 提交事务
cursor.close() # 关闭游标
connection.close() # 关闭数据库连接
# 定时任务:每 5 分钟执行一次 marketcap 函数
def schedule_job():
scheduler = BlockingScheduler() # 创建一个阻塞式调度器
scheduler.add_job(marketcap, 'cron', minute='0,5,10,15,20,25,30,35,40,45,50,55') # 设置每 5 分钟执行一次
scheduler.start() # 启动调度器
if __name__ == "__main__":
create_table() # 程序启动时,先创建数据库表格(如果不存在)
schedule_job() # 启动定时任务,开始定时抓取数据并插入数据库

1005
coinbus/arh999_lyq.py Normal file

File diff suppressed because it is too large Load Diff

504
coinbus/arh999eth_lyq.py Normal file
View File

@ -0,0 +1,504 @@
# coding=utf-8
import ujson
from binance.websocket.spot.websocket_client import SpotWebsocketClient as WebsocketClient
import time
import requests
from loguru import logger
import datetime
import pymysql
import math
import csv
g_prices = {}
g_dbif = None
g_lastts = 0
def get_day60_rise(day, prices):
total = 0
cnt = 0
for i in range(60):
if str(day) in prices:
cur_price = prices[str(day)]
day = str(day - 3600 * 24)
if day in prices:
prev_price = prices[day]
try:
#print(((cur_price-prev_price)/prev_price), day, cur_price, prev_price)
total += (((cur_price-prev_price)/prev_price))
cnt += 1
except:
pass
# print(day, total, cnt)
day = int(day)
return total
def get_days_rise(day, maxdays, prices):
total = 0
cnt = 0
for i in range(maxdays):
if str(day) in prices:
cur_price = prices[str(day)]
day = str(day - 3600 * 24)
if day in prices:
prev_price = prices[day]
try:
#print(((cur_price-prev_price)/prev_price), day, cur_price, prev_price)
total += (((cur_price-prev_price)/prev_price))
cnt += 1
except:
pass
# print(day, total, cnt)
day = int(day)
return total
def append_jzr_day60(dbif, day, price, day60_rise, day7_rise, day30_rise, day90_rise):
dbif.append_jzr60(day, price, day60_rise, day7_rise, day30_rise, day90_rise)
def sync_jzr_day60(dbif, prices):
for day in prices:
print(day, prices[day])
day60_rise = get_days_rise(int(day), 60, prices)
day7_rise = get_days_rise(int(day), 7, prices)
day30_rise = get_days_rise(int(day), 30, prices)
day90_rise = get_days_rise(int(day), 90, prices)
print(day, day60_rise)
append_jzr_day60(dbif, day, prices[day], day60_rise, day7_rise, day30_rise, day90_rise)
def check_jzr60_sync(dbif):
return dbif.check_jzr60_sync()
def append_jzr60day(dbif, day, price, day60_rise, day7_rise, day30_rise, day90_rise):
dbif.append_jzr60_day(day, price, day60_rise, day7_rise, day30_rise, day90_rise)
def append_jzr60(dbif, dayutc, price, day60_rise, day7_rise, day30_rise, day90_rise):
dbif.append_jzr60(dayutc, price, day60_rise, day7_rise, day30_rise, day90_rise)
def clean_jzr60day(dbif, clean_day):
dbif.clean_jzr60_day(clean_day)
def handle_jzr_day60(dbif, day, dayutc, price, prices):
day60_rise = get_days_rise(dayutc, 60, prices)
day7_rise = get_days_rise(dayutc, 7, prices)
day30_rise = get_days_rise(dayutc, 30, prices)
day90_rise = get_days_rise(dayutc, 90, prices)
print(dayutc, price, day, day60_rise)
append_jzr60day(dbif, day, price, day60_rise, day7_rise, day30_rise, day90_rise)
append_jzr60(dbif, dayutc, price, day60_rise, day7_rise, day30_rise, day90_rise)
clean_day = dayutc - 3600 * 24 * 2
clean_jzr60day(dbif, clean_day)
class Arh99DbIf:
def __init__(self, host="172.17.0.1", port=4423, user="root", password="2GS@bPYcgiMyL14A", dbname="ethdb"):
self.conn = pymysql.connect(host=host, port=port, user=user, password=password, database=dbname, cursorclass=pymysql.cursors.DictCursor)
print("init arh99 db suceess!")
def check_sync(self):
synced = False
with self.conn.cursor() as cursor:
sql_query = "SELECT COUNT(id) FROM `arh99v3a`"
cursor.execute(sql_query)
result = cursor.fetchone()
print(result)
if result is not None:
if "COUNT(id)" in result:
if result["COUNT(id)"] > 0:
synced = True
self.conn.commit()
#print("synced", synced)
return synced
def append(self, day, price, arh99, arh99x):
with self.conn.cursor() as cursor:
sql_query = "SELECT COUNT(id) FROM `arh99v3a` WHERE unixdt=FROM_UNIXTIME(%s)"
cursor.execute(sql_query, (int(day),))
result = cursor.fetchone()
#print(dt_utc)
#print(result)
if result is not None:
if "COUNT(id)" in result:
if result["COUNT(id)"] > 0:
sql_update = 'UPDATE arh99v3a SET `arh99`=%s, `arh99x`=%s, `price`=%s, `unixdt`=FROM_UNIXTIME(%s) WHERE unixdt=FROM_UNIXTIME(%s)'
print(sql_update)
cursor.execute(sql_update, (arh99, arh99x, price, int(day), int(day)))
else:
sql_insert = "INSERT INTO `arh99v3a` (`unixdt`, `price`, `arh99`, `arh99x`) VALUES (FROM_UNIXTIME(%s), %s, %s, %s)"
print(sql_insert)
cursor.execute(sql_insert, (day, price, arh99, arh99x))
self.conn.commit()
def append_day(self, day, price, arh99, arh99x):
with self.conn.cursor() as cursor:
sql_insert = "INSERT INTO `arh99v3aday` (`unixdt`, `price`, `arh99`, `arh99x`) VALUES (FROM_UNIXTIME(%s), %s, %s, %s)"
print(sql_insert)
cursor.execute(sql_insert, (day, price, arh99, arh99x))
self.conn.commit()
def clean_day(self, day):
with self.conn.cursor() as cursor:
sql_clean = "DELETE from arh99v3aday where unixdt<FROM_UNIXTIME(%s)"
print(sql_clean)
cursor.execute(sql_clean, (int(day),))
self.conn.commit()
def check_jzr60_sync(self):
synced = False
with self.conn.cursor() as cursor:
sql_query = "SELECT COUNT(id) FROM `jzr60v3a`"
cursor.execute(sql_query)
result = cursor.fetchone()
print(result)
if result is not None:
if "COUNT(id)" in result:
if result["COUNT(id)"] > 0:
synced = True
self.conn.commit()
#print("synced", synced)
return synced
def append_jzr60(self, day, price, jzr60, jzr7, jzr30, jzr90):
with self.conn.cursor() as cursor:
sql_query = "SELECT COUNT(id) FROM `jzr60v3a` WHERE unixdt=FROM_UNIXTIME(%s)"
cursor.execute(sql_query, (int(day),))
result = cursor.fetchone()
#print(dt_utc)
#print(result)
if result is not None:
if "COUNT(id)" in result:
if result["COUNT(id)"] > 0:
sql_update = 'UPDATE jzr60v3a SET `jzr60`=%s,`jzr7`=%s,`jzr30`=%s,`jzr90`=%s,`price`=%s, `unixdt`=FROM_UNIXTIME(%s) WHERE unixdt=FROM_UNIXTIME(%s)'
print(sql_update)
cursor.execute(sql_update, (jzr60, jzr7, jzr30, jzr90, price, int(day), int(day)))
else:
sql_insert = "INSERT INTO `jzr60v3a` (`unixdt`, `price`, `jzr60`, `jzr7`, `jzr30`, `jzr90`) VALUES (FROM_UNIXTIME(%s), %s, %s, %s, %s, %s)"
print(sql_insert)
cursor.execute(sql_insert, (day, price, jzr60, jzr7, jzr30, jzr90))
self.conn.commit()
def append_jzr60_day(self, day, price, jzr60, jzr7, jzr30, jzr90):
with self.conn.cursor() as cursor:
sql_insert = "INSERT INTO `jzr60v3aday` (`unixdt`, `price`, `jzr60`, `jzr7`, `jzr30`, `jzr90`) VALUES (FROM_UNIXTIME(%s), %s, %s, %s, %s, %s)"
print(sql_insert)
cursor.execute(sql_insert, (day, price, jzr60, jzr7, jzr30, jzr90))
self.conn.commit()
def clean_jzr60_day(self, day):
with self.conn.cursor() as cursor:
sql_clean = "DELETE from jzr60v3aday where unixdt<FROM_UNIXTIME(%s)"
print(sql_clean)
cursor.execute(sql_clean, (int(day),))
self.conn.commit()
def check_ma730_sync(self):
synced = False
with self.conn.cursor() as cursor:
sql_query = "SELECT COUNT(id) FROM `ma730v3a`"
cursor.execute(sql_query)
result = cursor.fetchone()
print(result)
if result is not None:
if "COUNT(id)" in result:
if result["COUNT(id)"] > 0:
synced = True
self.conn.commit()
#print("synced", synced)
return synced
def append_ma730(self, day, price, ma730, ma365, ma200):
with self.conn.cursor() as cursor:
sql_query = "SELECT COUNT(id) FROM `ma730v3a` WHERE unixdt=FROM_UNIXTIME(%s)"
cursor.execute(sql_query, (int(day),))
result = cursor.fetchone()
#print(dt_utc)
#print(result)
if result is not None:
if "COUNT(id)" in result:
ma730x5 = ma730*5
if result["COUNT(id)"] > 0:
sql_update = 'UPDATE ma730v3a SET `ma730`=%s, `ma730x5`=%s, `ma365`=%s, `ma200`=%s, `price`=%s, `unixdt`=FROM_UNIXTIME(%s) WHERE unixdt=FROM_UNIXTIME(%s)'
print(sql_update)
cursor.execute(sql_update, (ma730, ma730x5, ma365, ma200, price, int(day), int(day)))
else:
sql_insert = "INSERT INTO `ma730v3a` (`unixdt`, `price`, `ma730`, `ma730x5`, `ma365`, `ma200`) VALUES (FROM_UNIXTIME(%s), %s, %s, %s, %s, %s)"
print(sql_insert)
cursor.execute(sql_insert, (day, price, ma730, ma730x5, ma365, ma200))
self.conn.commit()
def append_ma730_day(self, day, price, ma730, ma365, ma200):
with self.conn.cursor() as cursor:
ma730x5 = ma730*5
sql_insert = "INSERT INTO `ma730v3aday` (`unixdt`, `price`, `ma730`, `ma730x5`, `ma365`, `ma200`) VALUES (FROM_UNIXTIME(%s), %s, %s, %s, %s, %s)"
print(sql_insert)
cursor.execute(sql_insert, (day, price, ma730, ma730x5, ma365, ma200))
self.conn.commit()
def clean_ma730_day(self, day):
with self.conn.cursor() as cursor:
sql_clean = "DELETE from ma730v3aday where unixdt<FROM_UNIXTIME(%s)"
print(sql_clean)
cursor.execute(sql_clean, (int(day),))
self.conn.commit()
def get_history_price(dbif):
global g_prices
with open("eth_history_price.csv", newline='') as csvfile:
reader = csv.DictReader(csvfile)
for row in reader:
print(row)
daystr = row["Date"]
p = row["Price"]
dayutc = time.mktime(time.strptime(daystr, "%m/%d/%Y"))
g_prices[str(int(dayutc))] = float(p)
'''response_price = requests.get(
'https://data.nasdaq.com/api/v3/datasets/BCHAIN/MKPRU.json?api_key=FZqXog4sR-b7cYnXcRVV')
if response_price.status_code == 200:
#print(response_price.content)
priceweb = ujson.loads(response_price.content)
if "dataset" in priceweb:
priceset = priceweb["dataset"]
if "data" in priceset:
pricedata = priceset["data"]
for price in pricedata:
daystr = price[0]
p = price[1]
dayutc = time.mktime(time.strptime(daystr, "%Y-%m-%d"))
g_prices[str(int(dayutc))] = float(p)
#print(price, int(dayutc), g_prices[str(int(dayutc))])
'''
return g_prices
def get_history_price2(dbif):
global g_prices
#pricedict = {}
dayt = time.gmtime()
daystr = time.strftime("%Y", dayt)
year = int(daystr)
end_year = year
while True:
url = ""
if end_year != year:
start_year = end_year
url = "https://data.messari.io/api/v1/assets/ethereum/metrics/price/time-series?start="
else:
url = "https://data.messari.io/api/v1/assets/ethereum/metrics/price/time-series?after=" + str(
year) + "-01-01&order=descending&interval=1d"
if end_year != year:
url = url + str(start_year) + "-01-01&end=" + str(end_year) + "-12-31&order=descending&interval=1d"
header_set = {}
header_set["x-messari-api-key"] = "aH2pyj5i4QGo1k1gLxXEbIJ5RJr+FYKLEWk6cRT6RuSc6lRY"
# header_set["Content-Type"] = "application/json"
print(header_set, url)
response_price = requests.get(url, headers=header_set)
# print(response_price)
if response_price.status_code == 200:
# print(response_price.content)
priceweb = ujson.loads(response_price.content)
if "data" in priceweb:
priceset = priceweb["data"]
if "values" in priceset:
valueset = priceset["values"]
if valueset is not None:
for supply in valueset:
dayutc = int(supply[0] / 1000)
s = supply[1]
ret_time = time.gmtime(dayutc)
ret_daystr = time.strftime("%d %b %Y", ret_time)
ret_dayutc = int(time.mktime(time.strptime(ret_daystr, "%d %b %Y")))
#self.pricedict[str(ret_dayutc)] = float(s)
g_prices[str(ret_dayutc)] = float(s)
# print(s, dayutc, pricedict[str(dayutc)])
# break
else:
break
else:
break
end_year -= 1
time.sleep(2)
#print(self.pricedict)
#return self.pricedict
get_history_price(dbif)
return g_prices
def get_coin_days(day):
birthday = time.mktime(time.strptime("2009-01-03", "%Y-%m-%d"))
days = (int(day) - birthday)/3600/24
#print(day, birthday, days)
return days
def get_coin_exp(days):
try:
temp = 5.84*math.log10(days)-17.01
#print("temp", temp, math.log10(days), days)
exp = math.pow(10,temp)
return exp
except:
return 0
def cal_day200_price(prices, day):
total = 0
cnt = 0
for i in range(200):
if day in prices:
total += prices[day]
cnt += 1
#print(day, total, cnt)
day = str(int(day) - 3600 * 24)
if cnt > 0:
return total/cnt
return 0
def cal_arh99(prices, day, price):
day200 = cal_day200_price(prices, day)
#print("day200", day200)
days = get_coin_days(day)
#print("days", days)
exp = get_coin_exp(days)
#print("exp", exp, price)
try:
arh99 = (float(price)/day200)*(float(price)/exp)
arh99x = (day200/float(price))*(exp/float(price))*3
except:
arh99 = 0
arh99x = 0
#print("arh99", arh99)
return arh99, arh99x
def check_sync(dbif):
return dbif.check_sync()
def append_arh99(dbif, day, price, arh99, arh99x):
dbif.append(day, price, arh99, arh99x)
def sync_arh99(dbif, prices):
for day in prices:
print(day, prices[day])
arh99, arh99x = cal_arh99(prices, int(day), prices[day])
print(day, arh99, arh99x)
append_arh99(dbif, day, prices[day], arh99, arh99x)
def append_arh99day(dbif, day, price, arh99, arh99x):
dbif.append_day(day, price, arh99, arh99x)
def clean_arh99day(dbif, day):
dbif.clean_day(day)
def arh99_handler(message):
global g_prices
global g_dbif
global g_lastts
coin_data = message["data"]
#coin_symbol = coin_data["s"]
coin_ts = int(coin_data["E"])
coin_price = float(coin_data["c"])
#print((coin_ts/1000), int((coin_ts/1000)%60))
if int((coin_ts/1000)%60) == 0:
#if coin_ts/1000/60 != g_lastts:
if coin_ts/1000 - g_lastts >= 15:
#print(coin_ts, coin_price)
coin_ts2 = time.gmtime(coin_ts/1000)
daystr = time.strftime("%d %b %Y", coin_ts2)
print(daystr)
dayutc = int(time.mktime(time.strptime(daystr, "%d %b %Y")))
g_prices[str(dayutc)] = coin_price
arh99, arh99x = cal_arh99(g_prices, dayutc, coin_price)
print(dayutc, coin_price, arh99, arh99x)
append_arh99day(g_dbif, coin_ts/1000, coin_price, arh99, arh99x)
append_arh99(g_dbif, dayutc, coin_price, arh99, arh99x)
clean_day = dayutc - 3600*24*2
clean_arh99day(g_dbif, clean_day)
handle_jzr_day60(g_dbif, coin_ts/1000, dayutc, coin_price, g_prices)
handle_ma_day730(g_dbif, coin_ts / 1000, dayutc, coin_price, g_prices)
g_lastts = coin_ts/1000
def start_arh99(dbif, prices):
ws_client = WebsocketClient()
ws_client.start()
ws_client.instant_subscribe(
stream=['ethusdt@miniTicker'],
callback=arh99_handler,
)
def arh99():
global g_dbif
g_dbif = Arh99DbIf()
prices = get_history_price2(g_dbif)
#if not check_sync(g_dbif):
if True:
sync_arh99(g_dbif, prices)
#if not check_jzr60_sync(g_dbif):
if True:
sync_jzr_day60(g_dbif, prices)
#if not check_ma730_sync(g_dbif):
if True:
sync_ma_day730(g_dbif, prices)
start_arh99(g_dbif, prices)
#2-year ma multiplier
def get_day730_rise(day, prices):
total = 0
cnt = 0
for i in range(730):
if str(day) in prices:
cur_price = prices[str(day)]
total += cur_price
cnt += 1
day = str(day - 3600 * 24)
day = int(day)
if cnt > 0:
return total/cnt
return 0
def get_day365_rise(day, maxdays, prices):
total = 0
cnt = 0
for i in range(maxdays):
if str(day) in prices:
cur_price = prices[str(day)]
total += cur_price
cnt += 1
day = str(day - 3600 * 24)
day = int(day)
if cnt > 0:
return total/cnt
return 0
def append_ma_day730(dbif, day, price, day730_rise, day365_rise, day200_rise):
dbif.append_ma730(day, price, day730_rise, day365_rise, day200_rise)
def sync_ma_day730(dbif, prices):
for day in prices:
print(day, prices[day])
day730_rise = get_day730_rise(int(day), prices)
day365_rise = get_day365_rise(int(day), 365, prices)
day200_rise = get_day365_rise(int(day), 200, prices)
print(day, day730_rise)
append_ma_day730(dbif, day, prices[day], day730_rise, day365_rise, day200_rise)
def check_ma730_sync(dbif):
return dbif.check_ma730_sync()
def append_ma730day(dbif, day, price, day730_rise, day365_rise, day200_rise):
dbif.append_ma730_day(day, price, day730_rise, day365_rise, day200_rise)
def append_ma730(dbif, dayutc, price, day730_rise, day365_rise, day200_rise):
dbif.append_ma730(dayutc, price, day730_rise, day365_rise, day200_rise)
def clean_ma730day(dbif, clean_day):
dbif.clean_ma730_day(clean_day)
def handle_ma_day730(dbif, day, dayutc, price, prices):
day730_rise = get_day730_rise(dayutc, prices)
day365_rise = get_day365_rise(dayutc, 365, prices)
day200_rise = get_day365_rise(dayutc, 200, prices)
print(dayutc, price, day, day730_rise)
append_ma730day(dbif, day, price, day730_rise, day365_rise, day200_rise)
append_ma730(dbif, dayutc, price, day730_rise, day365_rise, day200_rise)
clean_day = dayutc - 3600 * 24 * 2
clean_ma730day(dbif, clean_day)
arh99()

184
coinbus/check_order_lyq.py Normal file
View File

@ -0,0 +1,184 @@
# coding=utf-8
import ujson
from binance.websocket.spot.websocket_client import SpotWebsocketClient as WebsocketClient
from binance.spot import Spot
import time
import requests
import datetime
import pymysql
import math
import pymongo
g_spot_client = Spot()
class Pair:
def __init__(self):
pass
depth_u = 0
depth_U = 0
depth_ts = 0
bids = {}
asks = {}
g_btcusdt = None
g_btcusdt = None
def init_db():
mc = pymongo.MongoClient("mongodb://127.0.0.1:27020/")
mdb = mc["border2"]
return mc, mdb
def get_depth(client, pair):
new_pair = Pair()
d = client.depth(pair, limit=5000)
new_pair.bids = d["bids"]
new_pair.asks = d["asks"]
new_pair.depth_u = d["lastUpdateId"]
print(pair, ": get_depth: init", new_pair.depth_u)
#print(new_pair.bids)
return new_pair
def dict2number(dict_in):
dict_out = {}
#print("dict2number", dict_in)
for id in dict_in:
#print("dict2number", id)
#price = (int(float(id[0])) / 100) * 100
#price = float(id[0])
quantity = float(id[1])
#pricestr = str(price)
dict_out[id[0]] = quantity
return dict_out
def dict2save(mdb, pair, dict_in, ts):
mdbc = mdb[pair]
s_append = {}
s_append["unixdt"] = int(ts / 1000)
#cnt = 0
for id in dict_in:
# print(cnt, id)
#if cnt >= 50:
#break
# bids_append[id] = top_bids[id]
s_append[id[0]] = id[1]
#cnt += 1
print("dict2save", s_append)
mdbc.insert_one(s_append)
def classify_order(dict_in):
dict_out = {}
for id in dict_in:
price = int(int(float(id))/100)*100
pricestr = str(price)
if pricestr in dict_out:
dict_out[pricestr] = dict_out[pricestr]+dict_in[id]
else:
dict_out[pricestr] = dict_in[id]
return dict_out
def stat_order(pair, bids_in, asks_in, ts, old_ts):
print(pair, ": stat_order cmp", ts, old_ts)
if ts - old_ts < 1000 * 60 * 5:
return False
bids = dict2number(bids_in)
asks = dict2number(asks_in)
bids_classify = classify_order(bids)
asks_classify = classify_order(asks)
print("bids_classify", bids_classify)
top_bids = sorted(bids_classify.items(), key=lambda x: x[1], reverse=False)
top_asks = sorted(asks_classify.items(), key=lambda x: x[1], reverse=False)
print("top_bids", top_bids)
mc, mdb = init_db()
dict2save(mdb, pair + "_bids", top_bids, ts)
dict2save(mdb, pair + "_asks", top_asks, ts)
print(pair, ": stat_order OK at", ts)
return True
def merge_order(dst, src):
new_dst = []
for dst_item in dst:
found = False
for src_item in src:
#print("dst", dst_item, "src", src_item)
if dst_item[0] == src_item[0]:
new_dst.append(src_item)
found = True
break
if found is False:
#print("merge_order dst copy", dst_item)
new_dst.append(dst_item)
return new_dst
def handler_order(pair, pair_name, msg_in, client):
ts = msg_in["E"]
dU = msg_in["U"]
du = msg_in["u"]
need_reinit = False
if pair is not None:
if (dU == pair.depth_u + 1) or (
(du > pair.depth_u) and (pair.depth_ts == 0) and (pair.depth_u != 0)):
bids = msg_in["b"]
asks = msg_in["a"]
#print("merge_order dst", pair.bids)
#print("merge_order src", bids)
#print("handle", pair_name, ts, dU, du, pair.depth_u)
pair.bids = merge_order(pair.bids, bids)
pair.asks = merge_order(pair.asks, asks)
pair.depth_U = dU
pair.depth_u = du
if stat_order(pair_name, pair.bids, pair.asks, ts, pair.depth_ts):
pair.depth_ts = ts
print(pair_name, ": append", du)
else:
if (dU != pair.depth_u + 1) and (pair.depth_u != 0):
need_reinit = True
else:
pass
if need_reinit:
pair = get_depth(client, pair_name)
print(pair_name, ": reinit", pair.depth_u, dU, pair.depth_ts)
return pair
def order_handler(message):
#print(message)
global g_spot_client
global g_btcusdt
global g_ethusdt
if message["stream"] == "btcusdt@depth":
ddata = message["data"]
if ddata["e"] == "depthUpdate":
g_btcusdt = handler_order(g_btcusdt, "BTCUSDT", ddata, g_spot_client)
elif message["stream"] == "ethusdt@depth":
ddata = message["data"]
if ddata["e"] == "depthUpdate":
g_ethusdt = handler_order(g_ethusdt, "ETHUSDT", ddata, g_spot_client)
else:
pass
def check_order():
global g_spot_client
global g_btcusdt
global g_ethusdt
ws_client = WebsocketClient()
ws_client.start()
ws_client.instant_subscribe(
stream=['btcusdt@depth', 'ethusdt@depth'],
callback=order_handler,
)
g_btcusdt = get_depth(g_spot_client, "BTCUSDT")
g_ethusdt = get_depth(g_spot_client, "ETHUSDT")
check_order()

146
coinbus/check_zone_lyq.py Normal file
View File

@ -0,0 +1,146 @@
# coding=utf-8
import ujson
#from binance.websocket.spot.websocket_client import SpotWebsocketClient as WebsocketClient
from binance.spot import Spot
import time
import requests
import datetime
import pymysql
import math
#import pymongo
g_btcusdt_prices = {}
g_ethusdt_prices = {}
class ZoneDbIf:
def __init__(self, host="172.17.0.1", port=4423, user="root", password="2GS@bPYcgiMyL14A", dbname="btcdb"):
self.conn = pymysql.connect(host=host, port=port, user=user, password=password, database=dbname, cursorclass=pymysql.cursors.DictCursor)
print("init zone db suceess!")
def save_zone_change(self, dayutc, change_us, change_asia, change_eu):
with self.conn.cursor() as cursor:
print(
dayutc, change_us, change_asia, change_eu)
sql_insert = "REPLACE INTO btczonechange3 (unixdt, change_us, change_asia, change_eu"
sql_insert = sql_insert + ") VALUES (FROM_UNIXTIME(%s), %s, %s, %s)"
cursor.execute(sql_insert, (
dayutc, change_us, change_asia, change_eu))
self.conn.commit()
class EthZoneDbIf:
def __init__(self, host="172.17.0.1", port=4423, user="root", password="2GS@bPYcgiMyL14A", dbname="ethdb"):
self.conn = pymysql.connect(host=host, port=port, user=user, password=password, database=dbname, cursorclass=pymysql.cursors.DictCursor)
print("init zone db suceess!")
def save_zone_change(self, dayutc, change_us, change_asia, change_eu):
with self.conn.cursor() as cursor:
print(
dayutc, change_us, change_asia, change_eu)
sql_insert = "REPLACE INTO ethzonechange3 (unixdt, change_us, change_asia, change_eu"
sql_insert = sql_insert + ") VALUES (FROM_UNIXTIME(%s), %s, %s, %s)"
cursor.execute(sql_insert, (
dayutc, change_us, change_asia, change_eu))
self.conn.commit()
def get_history_price(spot_client, pair_name):
result = spot_client.klines(pair_name, "1h", limit=1000)
prices_open = {}
prices_close = {}
for price in result:
prices_open[str(price[0])] = float(price[1])
prices_close[str(price[0])] = float(price[4])
open_out = sorted(prices_open.items(), reverse=True)
close_out = sorted(prices_close.items(), reverse=True)
return open_out, close_out, prices_open, prices_close
def get_last_price(spot_client, pair_name, cache_open, cache_close):
result = spot_client.klines(pair_name, "1h", limit=1)
for price in result:
cache_open[str(price[0])] = float(price[1])
cache_close[str(price[0])] = float(price[4])
open_out = sorted(cache_open.items(), reverse=True)
close_out = sorted(cache_close.items(), reverse=True)
return open_out, close_out, cache_open, cache_close
def calc_zone(prices_open, price_close, zone_start, zone_end):
zone_total = 30*24
zone_hours = 0
zones = {}
price_start = 0
price_end = 0
dt_start = None
item_idx = 0
for dt in prices_open:
tobj = time.gmtime(int(dt[0]) / 1000)
if tobj.tm_hour == zone_start:
price_start = dt[1]
dt_start = tobj
if zone_hours == 0 and tobj.tm_hour < zone_end:
zone_total = zone_total + tobj.tm_hour + 1
close_list = price_close[item_idx]
price_end = close_list[1]
else:
if tobj.tm_hour == zone_end:
close_list = price_close[item_idx]
price_end = close_list[1]
if price_start > 0 and price_end > 0:
#zones[dt_end] = (price_end-price_start)/price_start
daystr = time.strftime("%d %b %Y", dt_start)
dayutc = int(time.mktime(time.strptime(daystr, "%d %b %Y")))
zones[str(dayutc)] = price_end - price_start
price_start = 0
price_end = 0
item_idx = item_idx + 1
zone_hours = zone_hours + 1
if zone_hours >= zone_total:
break
return zones
def check_zone():
dbif = ZoneDbIf()
ethdbif = EthZoneDbIf()
spot_client = Spot()
prices_open, prices_close, cache_open, cache_close = get_history_price(spot_client, "BTCUSDT")
prices_open_eth, prices_close_eth, cache_open_eth, cache_close_eth = get_history_price(spot_client, "ETHUSDT")
prev_tm = time.gmtime(time.time())
print("update", prev_tm.tm_hour)
while True:
zone_asia = calc_zone(prices_open, prices_close, 0, 12)
zone_eu = calc_zone(prices_open, prices_close, 6, 18)
zone_us = calc_zone(prices_open, prices_close, 12, 0)
zone_asia_eth = calc_zone(prices_open_eth, prices_close_eth, 0, 12)
zone_eu_eth = calc_zone(prices_open_eth, prices_close_eth, 6, 18)
zone_us_eth = calc_zone(prices_open_eth, prices_close_eth, 12, 0)
#print(zone_asia)
#print(zone_eu)
#print(zone_us)
for dt in zone_asia:
change_us = 0
change_eu = 0
if dt in zone_us:
change_us = zone_us[dt]
if dt in zone_eu:
change_eu = zone_eu[dt]
dbif.save_zone_change(dt, change_us, zone_asia[dt], change_eu)
change_us_eth = 0
change_eu_eth = 0
if dt in zone_us_eth:
change_us_eth = zone_us_eth[dt]
if dt in zone_eu_eth:
change_eu_eth = zone_eu_eth[dt]
ethdbif.save_zone_change(dt, change_us_eth, zone_asia_eth[dt], change_eu_eth)
while True:
time.sleep(60)
cur_tm = time.gmtime(time.time())
if cur_tm.tm_hour != prev_tm.tm_hour:
prev_tm = cur_tm
time.sleep(60)
prices_open, prices_close, cache_open, cache_close = get_last_price(spot_client, "BTCUSDT", cache_open, cache_close)
prices_open_eth, prices_close_eth, cache_open_eth, cache_close_eth = get_last_price(spot_client, "ETHUSDT", cache_open_eth,
cache_close_eth)
print("update", cur_tm.tm_hour)
break
check_zone()

117
coinbus/exchangeRate_lyq.py Normal file
View File

@ -0,0 +1,117 @@
import requests
import pymysql
import time
from datetime import datetime
# 目标币种列表RUB 仍写在这里,方便统一逻辑)
symbols = ["EUR", "GBP", "JPY", "CAD", "SEK", "CHF", "CNY", "RUB"]
# 数据库配置
db_config = {
"host": "127.0.0.1",
"user": "root",
"password": "2GS@bPYcgiMyL14A",
"database": "Macroeconomics",
"port": 4423
}
def fetch_rates_frankfurter():
base = "USD"
url = f"https://api.frankfurter.app/latest?from={base}&to=" + ",".join([s for s in symbols if s != "RUB"])
retries = 5
while retries > 0:
try:
response = requests.get(url, timeout=10)
response.raise_for_status()
data = response.json()
rates = data.get("rates", {})
if not rates:
raise ValueError("接口返回空数据")
return rates
except Exception as e:
retries -= 1
print(f"Frankfurter 请求失败,重试中... 剩余次数: {retries}, 错误: {e}")
time.sleep(1)
print("Frankfurter 多次重试后失败,返回空数据")
return {}
def fetch_rub():
try:
url = "https://open.er-api.com/v6/latest/USD"
response = requests.get(url, timeout=10)
data = response.json()
if data.get("result") == "success":
rub_rate = data["rates"].get("RUB")
if rub_rate:
return rub_rate
except Exception as e:
print(f"获取 RUB 失败: {e}")
return None
def calc_dxy(rates):
weights = {
"EUR": 0.576,
"JPY": 0.136,
"GBP": 0.119,
"CAD": 0.091,
"SEK": 0.042,
"CHF": 0.036
}
weighted_sum = 0
weight_total = 0
for ccy, w in weights.items():
rate = rates.get(ccy)
if rate:
weighted_sum += rate * w
weight_total += w
if weight_total > 0:
return weighted_sum / weight_total
return None
def save_to_db(rates, dxy):
current_time = datetime.now().replace(second=0, microsecond=0)
data = {}
for ccy in symbols:
rate = rates.get(ccy)
if rate is not None:
data[f"USD{ccy}"] = round(rate, 5)
if dxy is not None:
data["DXY"] = round(dxy, 5)
connection = pymysql.connect(**db_config)
try:
with connection.cursor() as cursor:
for symbol, value in data.items():
query = """INSERT INTO exchangeRate (date, symbol, _value) VALUES (%s, %s, %s)"""
cursor.execute(query, (current_time, symbol, value))
connection.commit()
print(f"{current_time} 数据写入数据库成功")
except Exception as e:
print(f"写入数据库失败: {e}")
finally:
connection.close()
if __name__ == "__main__":
while True:
rates = fetch_rates_frankfurter()
# 获取 RUB 汇率
rub = fetch_rub()
if rub:
rates["RUB"] = rub
else:
print("未获取到 RUB 汇率")
if rates:
dxy = calc_dxy(rates)
print(f"汇率数据: {rates}")
if dxy:
print(f"美元指数近似值: {dxy:.5f}")
else:
print("美元指数近似值 无法计算")
save_to_db(rates, dxy)
else:
print("未获取到汇率数据")
time.sleep(1800) # 每30分钟执行一次