宏观经济部分代码更新
This commit is contained in:
parent
5964a7fb8b
commit
7fb7b07815
|
@ -0,0 +1,83 @@
|
||||||
|
import requests
|
||||||
|
import pymysql
|
||||||
|
from datetime import datetime
|
||||||
|
import time
|
||||||
|
|
||||||
|
def get_bea_data(year):
|
||||||
|
url = ("https://apps.bea.gov/api/data?&UserID=146B5757-D9E3-442C-B6AC-ADE9E6B71114&method=GetData&DataSetName=GDPbyIndustry&Year=%s&Industry=ALL&tableID=15&Frequency=Q&ResultFormat=JSON" % year)
|
||||||
|
response = requests.get(url)
|
||||||
|
return response.json()['BEAAPI']['Results'][0]['Data']
|
||||||
|
|
||||||
|
def update_database(cursor, data):
|
||||||
|
industry_map = {
|
||||||
|
'Agriculture, forestry, fishing, and hunting': 'VAPGDPAFH',
|
||||||
|
'Mining': 'VAPGDPM',
|
||||||
|
'Construction': 'VAPGDPC',
|
||||||
|
'Manufacturing': 'VAPGDPMA',
|
||||||
|
'Retail trade': 'VAPGDPR',
|
||||||
|
'Wholesale trade': 'VAPGDPW',
|
||||||
|
'Utilities': 'VAPGDPU',
|
||||||
|
'Transportation and warehousing': 'VAPGDPT',
|
||||||
|
'Information': 'VAPGDPI',
|
||||||
|
'Finance, insurance, real estate, rental, and leasing': 'VAPGDPFIRL',
|
||||||
|
'Professional and business services': 'VAPGDPPBS',
|
||||||
|
'Educational services, health care, and social assistance': 'VAPGDPHCSA',
|
||||||
|
'Arts, entertainment, recreation, accommodation, and food services': 'VAPGDPAF',
|
||||||
|
'Other services, except government': 'CPGDPOSEG',
|
||||||
|
'Government': 'Federation',
|
||||||
|
'State and local': 'State_local'
|
||||||
|
}
|
||||||
|
|
||||||
|
for entry in data:
|
||||||
|
year = entry["Year"]
|
||||||
|
quarter = entry["Quarter"]
|
||||||
|
new_time = f"{year}Q{quarter}"
|
||||||
|
industry = entry["IndustrYDescription"]
|
||||||
|
value = entry["DataValue"]
|
||||||
|
|
||||||
|
if industry in industry_map:
|
||||||
|
column = industry_map[industry]
|
||||||
|
|
||||||
|
cursor.execute("SELECT quarterly FROM COVITGDP WHERE quarterly = %s", (new_time,))
|
||||||
|
result = cursor.fetchone()
|
||||||
|
if result:
|
||||||
|
cursor.execute(f"SELECT {column} FROM COVITGDP WHERE quarterly = %s", (new_time,))
|
||||||
|
old_value = cursor.fetchone()[0]
|
||||||
|
|
||||||
|
if old_value != value:
|
||||||
|
cursor.execute(f"UPDATE COVITGDP SET {column} = %s WHERE quarterly = %s", (value, new_time))
|
||||||
|
else:
|
||||||
|
print(f"No update needed for {column} for {new_time}")
|
||||||
|
|
||||||
|
else:
|
||||||
|
if column == 'VAPGDPAFH':
|
||||||
|
cursor.execute("INSERT INTO COVITGDP (quarterly, VAPGDPAFH) VALUES (%s, %s)", (new_time, value))
|
||||||
|
else:
|
||||||
|
cursor.execute(f"INSERT INTO COVITGDP (quarterly, {column}) VALUES (%s, %s) ON DUPLICATE KEY UPDATE {column} = VALUES({column})", (new_time, value))
|
||||||
|
|
||||||
|
def main():
|
||||||
|
years = 2025
|
||||||
|
|
||||||
|
while True:
|
||||||
|
try:
|
||||||
|
db = pymysql.connect(host="127.0.0.1",user="root",password="2GS@bPYcgiMyL14A",database="Macroeconomics",port=4423)
|
||||||
|
cursor = db.cursor()
|
||||||
|
|
||||||
|
data = get_bea_data(years)
|
||||||
|
update_database(cursor, data)
|
||||||
|
db.commit()
|
||||||
|
except pymysql.MySQLError as e:
|
||||||
|
print(f"Database connection error: {e}")
|
||||||
|
break
|
||||||
|
except Exception as e:
|
||||||
|
print(f"An error occurred: {e}")
|
||||||
|
finally:
|
||||||
|
if 'cursor' in locals():
|
||||||
|
cursor.close()
|
||||||
|
if 'db' in locals():
|
||||||
|
db.close()
|
||||||
|
|
||||||
|
time.sleep(86400)
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
main()
|
|
@ -0,0 +1,101 @@
|
||||||
|
import time
|
||||||
|
import requests
|
||||||
|
import json
|
||||||
|
import pymysql
|
||||||
|
from datetime import datetime
|
||||||
|
|
||||||
|
# Function to fetch data from BLS API
|
||||||
|
def fetch_data(series_ids):
|
||||||
|
headers = {'Content-type': 'application/json'}
|
||||||
|
data = json.dumps({"seriesid": series_ids, "startyear": "2024", "endyear": "2024"})
|
||||||
|
try:
|
||||||
|
response = requests.post('https://api.bls.gov/publicAPI/v2/timeseries/data/', data=data, headers=headers)
|
||||||
|
response.raise_for_status() # Raise exception for HTTP errors
|
||||||
|
return json.loads(response.text)
|
||||||
|
except requests.exceptions.RequestException as e:
|
||||||
|
return None
|
||||||
|
|
||||||
|
# Function to convert BLS period format to datetime
|
||||||
|
def convert_date(year, period):
|
||||||
|
date_string = f"{year}/{period.replace('M', '')}/01"
|
||||||
|
return datetime.strptime(date_string, '%Y/%m/%d')
|
||||||
|
|
||||||
|
# Function to insert data into MySQL database
|
||||||
|
def insert_data(cursor, table_name, date, name, value):
|
||||||
|
cursor.execute(
|
||||||
|
f"INSERT INTO {table_name}(date, name, value) VALUES (%s, %s, %s)",
|
||||||
|
(date, name, value)
|
||||||
|
)
|
||||||
|
|
||||||
|
# Function to process series data and insert into the database
|
||||||
|
def process_series_data(json_data, table_name, names):
|
||||||
|
db = pymysql.connect(host="127.0.0.1",user="root",password="2GS@bPYcgiMyL14A",database="Macroeconomics",port=4423)
|
||||||
|
|
||||||
|
cursor = db.cursor()
|
||||||
|
|
||||||
|
for i, series in enumerate(json_data['Results']['series']):
|
||||||
|
for data_point in sorted(series['data'], key=lambda x: (x['year'], x['period'])):
|
||||||
|
year = data_point['year']
|
||||||
|
period = data_point['period']
|
||||||
|
value = data_point['value']
|
||||||
|
date = convert_date(year, period)
|
||||||
|
name = names[i] if i < len(names) else f"Unknown {i}"
|
||||||
|
|
||||||
|
cursor.execute(f"SELECT COUNT(*) FROM {table_name} WHERE date = %s AND name = %s", (date, name))
|
||||||
|
if cursor.fetchone()[0] == 0:
|
||||||
|
insert_data(cursor, table_name, date, name, value)
|
||||||
|
db.commit()
|
||||||
|
|
||||||
|
db.close()
|
||||||
|
|
||||||
|
# Function to merge JSON data
|
||||||
|
def merge_json_data(json_data_list):
|
||||||
|
merged_series = []
|
||||||
|
for json_data in json_data_list:
|
||||||
|
if json_data and 'Results' in json_data and 'series' in json_data['Results']:
|
||||||
|
merged_series.extend(json_data['Results']['series'])
|
||||||
|
return {'Results': {'series': merged_series}}
|
||||||
|
|
||||||
|
# Main script logic
|
||||||
|
while True:
|
||||||
|
series_ids1 = [
|
||||||
|
'CUUR0000SA0', 'CUUR0000SAF1', 'CUUR0000SAF11', 'CUUR0000SAF111', 'CUUR0000SAF112', 'CUUR0000SEFJ',
|
||||||
|
'CUUR0000SAF113', 'CUUR0000SAF114', 'CUUR0000SEFV', 'CUUR0000SA0E', 'CUUR0000SACE', 'CUUR0000SEHE01',
|
||||||
|
'CUUR0000SETB', 'CUUR0000SETB01', 'CUUR0000SEHF', 'CUUR0000SEHF01', 'CUUR0000SEHF02'
|
||||||
|
]
|
||||||
|
series_ids2 = [
|
||||||
|
'CUUR0000SA0L1E', 'CUUR0000SACL1E', 'CUUR0000SAA', 'CUUR0000SETA01', 'CUUR0000SETA02', 'CUUR0000SAM1',
|
||||||
|
'CUUR0000SAF116', 'CUUR0000SEGA', 'CUUR0000SASLE', 'CUUR0000SAH1', 'CUUR0000SEHA', 'CUUR0000SEHC',
|
||||||
|
'CUUR0000SAM2', 'CUUR0000SEMC01', 'CUUR0000SEMD01', 'CUUR0000SAS4', 'CUUR0000SETD', 'CUUR0000SETE',
|
||||||
|
'CUUR0000SETG01'
|
||||||
|
]
|
||||||
|
series_ids3 = [s.replace('CUUR', 'CUSR') for s in series_ids1]
|
||||||
|
series_ids4 = [s.replace('CUUR', 'CUSR') for s in series_ids2]
|
||||||
|
|
||||||
|
json_data1 = fetch_data(series_ids1)
|
||||||
|
json_data2 = fetch_data(series_ids2)
|
||||||
|
json_data3 = fetch_data(series_ids3)
|
||||||
|
json_data4 = fetch_data(series_ids4)
|
||||||
|
|
||||||
|
combined_json_data_NSA = merge_json_data([json_data1, json_data2])
|
||||||
|
combined_json_data_SA = merge_json_data([json_data3, json_data4])
|
||||||
|
|
||||||
|
names = [
|
||||||
|
'All items', 'Food', 'Food at home', 'Cereals and bakery products', 'Meats, poultry, fish, and eggs',
|
||||||
|
'Dairy and related products', 'Fruits and vegetables', 'Nonalcoholic beverages and beverage materials',
|
||||||
|
'Food away from home', 'Energy', 'Energy commodities', 'Fuel oil', 'Motor fuel', 'Gasoline (all types)',
|
||||||
|
'Energy services', 'Electricity', 'Utility (piped) gas service', 'All items less food and energy',
|
||||||
|
'Commodities less food and energy commodities', 'Apparel', 'New vehicles', 'Used cars and trucks',
|
||||||
|
'Medical care commodities', 'Alcoholic beverages', 'Tobacco and smoking products',
|
||||||
|
'Services less energy services', 'Shelter', 'Rent of primary residence', "Owners equivalent rent of residences",
|
||||||
|
'Medical care services', "Physicians services", 'Hospital services', 'Transportation services',
|
||||||
|
'Motor vehicle maintenance and repair', 'Motor vehicle insurance', 'Airline fares'
|
||||||
|
]
|
||||||
|
|
||||||
|
if combined_json_data_NSA and 'Results' in combined_json_data_NSA and 'series' in combined_json_data_NSA['Results']:
|
||||||
|
process_series_data(combined_json_data_NSA, 'CPI_NSA', names)
|
||||||
|
|
||||||
|
if combined_json_data_SA and 'Results' in combined_json_data_SA and 'series' in combined_json_data_SA['Results']:
|
||||||
|
process_series_data(combined_json_data_SA, 'CPI_SA', names)
|
||||||
|
|
||||||
|
time.sleep(86400)
|
|
@ -0,0 +1,98 @@
|
||||||
|
import requests
|
||||||
|
import pymysql
|
||||||
|
from datetime import datetime
|
||||||
|
from w3lib.html import remove_tags
|
||||||
|
import pandas as pd
|
||||||
|
import time
|
||||||
|
|
||||||
|
def parse_treasury_data(data):
|
||||||
|
# 找到列头位置
|
||||||
|
header_index = data.index("Country")
|
||||||
|
columns = data[header_index:header_index+14] # Country + 13个月
|
||||||
|
rows = data[header_index+14:]
|
||||||
|
|
||||||
|
result = []
|
||||||
|
i = 0
|
||||||
|
while i < len(rows):
|
||||||
|
# 拼接国家名
|
||||||
|
country_parts = []
|
||||||
|
while i < len(rows) and not rows[i].replace('.', '', 1).isdigit():
|
||||||
|
country_parts.append(rows[i])
|
||||||
|
i += 1
|
||||||
|
country = " ".join(country_parts).replace(",", "")
|
||||||
|
|
||||||
|
# 取13个数值
|
||||||
|
values = rows[i:i+13]
|
||||||
|
i += 13
|
||||||
|
|
||||||
|
if len(values) == 13:
|
||||||
|
result.append([country] + values)
|
||||||
|
|
||||||
|
# 转成 DataFrame
|
||||||
|
df = pd.DataFrame(result, columns=columns)
|
||||||
|
|
||||||
|
# =================== 名称清洗 ===================
|
||||||
|
rename_map = {
|
||||||
|
"Of Which: Foreign Official": "Foreign Official",
|
||||||
|
"Of Which: Foreign Official Treasury Bills": "Treasury Bills",
|
||||||
|
"Of Which: Foreign Official T-Bonds & Notes": "T-Bonds & Notes"
|
||||||
|
}
|
||||||
|
df["Country"] = df["Country"].replace(rename_map)
|
||||||
|
|
||||||
|
return df
|
||||||
|
|
||||||
|
|
||||||
|
def run_job():
|
||||||
|
print("=== 开始爬取并更新数据库 ===")
|
||||||
|
|
||||||
|
# =================== 爬取网页 =====================
|
||||||
|
page = requests.get("https://ticdata.treasury.gov/resource-center/data-chart-center/tic/Documents/slt_table5.html")
|
||||||
|
page = remove_tags(str(page.text))
|
||||||
|
page = page.split()
|
||||||
|
|
||||||
|
df = parse_treasury_data(page)
|
||||||
|
|
||||||
|
# =================== 连接数据库 =====================
|
||||||
|
db = pymysql.connect(
|
||||||
|
host="127.0.0.1",
|
||||||
|
user="root",
|
||||||
|
password="2GS@bPYcgiMyL14A",
|
||||||
|
database="Macroeconomics",
|
||||||
|
port=4423
|
||||||
|
)
|
||||||
|
cursor = db.cursor()
|
||||||
|
|
||||||
|
# 查询数据库中最新日期
|
||||||
|
cursor.execute("SELECT date FROM FBI ORDER BY date DESC LIMIT 1")
|
||||||
|
result = cursor.fetchone()
|
||||||
|
latest_date_in_db = result[0] if result else None # datetime 类型或 None
|
||||||
|
|
||||||
|
# =================== 补齐逻辑 =====================
|
||||||
|
for col in df.columns[1:]: # 遍历所有月份列
|
||||||
|
col_date = datetime.strptime(col, "%Y-%m")
|
||||||
|
|
||||||
|
# 如果数据库已有该日期,跳过
|
||||||
|
if latest_date_in_db and col_date <= latest_date_in_db:
|
||||||
|
continue
|
||||||
|
|
||||||
|
print(f"正在插入 {col} 的数据...")
|
||||||
|
insert_sql = "INSERT INTO FBI (date, name, value) VALUES (%s, %s, %s)"
|
||||||
|
for _, row in df.iterrows():
|
||||||
|
country = row["Country"]
|
||||||
|
value = row[col]
|
||||||
|
cursor.execute(insert_sql, (col_date.strftime("%Y-%m-01"), country, value))
|
||||||
|
|
||||||
|
db.commit()
|
||||||
|
print(f"{col} 插入完成")
|
||||||
|
|
||||||
|
cursor.close()
|
||||||
|
db.close()
|
||||||
|
print("=== 本次任务完成 ===\n")
|
||||||
|
|
||||||
|
|
||||||
|
# =================== 循环执行 =====================
|
||||||
|
if __name__ == "__main__":
|
||||||
|
while True:
|
||||||
|
run_job()
|
||||||
|
print("休眠 21600 秒(6 小时)...\n")
|
||||||
|
time.sleep(21600) # 6小时
|
|
@ -0,0 +1,89 @@
|
||||||
|
import time
|
||||||
|
import requests
|
||||||
|
import pymysql
|
||||||
|
from bs4 import BeautifulSoup
|
||||||
|
from w3lib.html import remove_tags
|
||||||
|
import datetime
|
||||||
|
while True:
|
||||||
|
try:
|
||||||
|
# now_time = datetime.datetime.now()
|
||||||
|
# next_time = now_time + datetime.timedelta(days=+1)
|
||||||
|
# next_year = next_time.date().year
|
||||||
|
# next_month = next_time.date().month
|
||||||
|
# next_day = next_time.date().day
|
||||||
|
# next_time = datetime.datetime.strptime(str(next_year) + "-" + str(next_month) + "-" + str(next_day) + " 20:30:01","%Y-%m-%d %H:%M:%S")
|
||||||
|
# timer_start_time = (next_time - now_time).total_seconds()
|
||||||
|
page = requests.get("https://www.federalreserve.gov/data/intlsumm/current.htm")
|
||||||
|
page = page.text
|
||||||
|
page = BeautifulSoup(page, 'html.parser')
|
||||||
|
page1 = page.find_all('th', class_="colorrev")
|
||||||
|
page = page.find_all('td', class_="shadedata1")
|
||||||
|
value1=remove_tags(str(page[-1]))
|
||||||
|
value1 = value1.replace(",", "")
|
||||||
|
value1 = value1.replace(" ", "")
|
||||||
|
date1=remove_tags(str(page1[-1]))
|
||||||
|
date1 = date1.replace(" ", "")
|
||||||
|
date1 = date1.replace("/r", "")
|
||||||
|
date1 = date1.replace("/p", "")
|
||||||
|
date1= date1[-4:] + date1[0:3]
|
||||||
|
date1 = date1.replace("Jan", "/1/01")
|
||||||
|
date1 = date1.replace("Feb", "/2/01")
|
||||||
|
date1 = date1.replace("Mar", "/3/01")
|
||||||
|
date1 = date1.replace("Apr", "/4/01")
|
||||||
|
date1 = date1.replace("May", "/5/01")
|
||||||
|
date1 = date1.replace("Jun", "/6/01")
|
||||||
|
date1 = date1.replace("Jul", "/7/01")
|
||||||
|
date1 = date1.replace("Aug", "/8/01")
|
||||||
|
date1 = date1.replace("Sep", "/9/01")
|
||||||
|
date1 = date1.replace("Oct", "/10/01")
|
||||||
|
date1 = date1.replace("Nov", "/11/01")
|
||||||
|
date1 = date1.replace("Dec", "/12/01")
|
||||||
|
format1 = '%Y/%m/%d'
|
||||||
|
|
||||||
|
value2 = remove_tags(str(page[-2]))
|
||||||
|
value2 = value2.replace(",", "")
|
||||||
|
value2 = value2.replace(" ", "")
|
||||||
|
|
||||||
|
date2 = remove_tags(str(page1[-2]))
|
||||||
|
date2 = date2.replace(" ", "")
|
||||||
|
date2 = date2.replace("/r", "")
|
||||||
|
date2 = date2.replace("/p", "")
|
||||||
|
date2 = date2[-4:] + date2[0:3]
|
||||||
|
date2 = date2.replace("Jan", "/1/01")
|
||||||
|
date2 = date2.replace("Feb", "/2/01")
|
||||||
|
date2 = date2.replace("Mar", "/3/01")
|
||||||
|
date2 = date2.replace("Apr", "/4/01")
|
||||||
|
date2 = date2.replace("May", "/5/01")
|
||||||
|
date2 = date2.replace("Jun", "/6/01")
|
||||||
|
date2 = date2.replace("Jul", "/7/01")
|
||||||
|
date2 = date2.replace("Aug", "/8/01")
|
||||||
|
date2 = date2.replace("Sep", "/9/01")
|
||||||
|
date2 = date2.replace("Oct", "/10/01")
|
||||||
|
date2 = date2.replace("Nov", "/11/01")
|
||||||
|
date2 = date2.replace("Dec", "/12/01")
|
||||||
|
format2 = '%Y/%m/%d'
|
||||||
|
from datetime import datetime
|
||||||
|
date1 = datetime.strptime(date1, format1)
|
||||||
|
date2 = datetime.strptime(date2, format2)
|
||||||
|
db = pymysql.connect(host="127.0.0.1",user="root",password="2GS@bPYcgiMyL14A",database="Macroeconomics",port=4423)
|
||||||
|
cursor = db.cursor()
|
||||||
|
sql = "select date from FER order by date desc limit 1"
|
||||||
|
cursor.execute(sql)
|
||||||
|
db.commit()
|
||||||
|
ole_time = cursor.fetchall()
|
||||||
|
ole_time = ole_time[0][0]
|
||||||
|
date2= "'" + str(date2) + "'"
|
||||||
|
sql = "update FER set FER= %s where date=%s" % (value2, date2)
|
||||||
|
cursor.execute(sql)
|
||||||
|
db.commit()
|
||||||
|
if date1 != ole_time:
|
||||||
|
sql = "insert into FER(date,FER)values('%s','%s')" % (date1, value1 + '*')
|
||||||
|
cursor.execute(sql)
|
||||||
|
db.commit()
|
||||||
|
db.close()
|
||||||
|
# time.sleep(timer_start_time)
|
||||||
|
time.sleep(21600)
|
||||||
|
except:
|
||||||
|
time.sleep(30)
|
||||||
|
continue
|
||||||
|
|
|
@ -0,0 +1,252 @@
|
||||||
|
import time
|
||||||
|
from full_fred.fred import Fred
|
||||||
|
import pymysql
|
||||||
|
import requests
|
||||||
|
from datetime import datetime
|
||||||
|
from bs4 import BeautifulSoup
|
||||||
|
from w3lib.html import remove_tags
|
||||||
|
while True:
|
||||||
|
fred=Fred('example_key.txt')
|
||||||
|
fred.set_api_key_file('example_key.txt')
|
||||||
|
DFEDTARU = fred.get_series_df('DFEDTARU')
|
||||||
|
DFEDTARL = fred.get_series_df('DFEDTARL')
|
||||||
|
FEDFUNDS = fred.get_series_df('FEDFUNDS')
|
||||||
|
IORB = fred.get_series_df('IORB')
|
||||||
|
RRPONTSYAWARD = fred.get_series_df('RRPONTSYAWARD')
|
||||||
|
SOFR = fred.get_series_df('SOFR')
|
||||||
|
|
||||||
|
list_date1 = DFEDTARU['date']
|
||||||
|
list_value1 = DFEDTARU['value']
|
||||||
|
list_date2 = DFEDTARL['date']
|
||||||
|
list_value2 = DFEDTARL['value']
|
||||||
|
list_date3 = FEDFUNDS['date']
|
||||||
|
list_value3 = FEDFUNDS['value']
|
||||||
|
list_date4 = IORB['date']
|
||||||
|
list_value4 = IORB['value']
|
||||||
|
list_date5 = RRPONTSYAWARD['date']
|
||||||
|
list_value5 = RRPONTSYAWARD['value']
|
||||||
|
list_date6 = SOFR['date']
|
||||||
|
list_value6 = SOFR['value']
|
||||||
|
|
||||||
|
date1 = []
|
||||||
|
value1 = []
|
||||||
|
date2 = []
|
||||||
|
value2 = []
|
||||||
|
date3 = []
|
||||||
|
value3 = []
|
||||||
|
date4 = []
|
||||||
|
value4 = []
|
||||||
|
date5 = []
|
||||||
|
value5 = []
|
||||||
|
date6 = []
|
||||||
|
value6 = []
|
||||||
|
for i in list_date1:
|
||||||
|
date1 += [i]
|
||||||
|
for i in list_value1:
|
||||||
|
value1 += [i]
|
||||||
|
for i in list_date2:
|
||||||
|
date2 += [i]
|
||||||
|
for i in list_value2:
|
||||||
|
value2 += [i]
|
||||||
|
for i in list_date3:
|
||||||
|
date3 += [i]
|
||||||
|
for i in list_value3:
|
||||||
|
value3 += [i]
|
||||||
|
for i in list_date4:
|
||||||
|
date4 += [i]
|
||||||
|
for i in list_value4:
|
||||||
|
value4 += [i]
|
||||||
|
for i in list_date5:
|
||||||
|
date5 += [i]
|
||||||
|
for i in list_value5:
|
||||||
|
value5 += [i]
|
||||||
|
for i in list_date6:
|
||||||
|
date6 += [i]
|
||||||
|
for i in list_value6:
|
||||||
|
value6 += [i]
|
||||||
|
date1 = date1[-1]
|
||||||
|
value1 = value1[-1]
|
||||||
|
date2 = date2[-1]
|
||||||
|
value2 = value2[-1]
|
||||||
|
date3 = date3[-1]
|
||||||
|
value3 = value3[-1]
|
||||||
|
date4 = date4[-1]
|
||||||
|
value4 = value4[-1]
|
||||||
|
date5 = date5[-1]
|
||||||
|
value5 = value5[-1]
|
||||||
|
date6 = date6[-1]
|
||||||
|
value6 = value6[-1]
|
||||||
|
|
||||||
|
date1 = date1.replace('-', '/')
|
||||||
|
date_string = date1
|
||||||
|
format = '%Y/%m/%d'
|
||||||
|
date1 = datetime.strptime(date_string, format)
|
||||||
|
|
||||||
|
date2 = date2.replace('-', '/')
|
||||||
|
date_string = date2
|
||||||
|
format = '%Y/%m/%d'
|
||||||
|
date2 = datetime.strptime(date_string, format)
|
||||||
|
|
||||||
|
date3 = date3.replace('-', '/')
|
||||||
|
date_string = date3
|
||||||
|
format = '%Y/%m/%d'
|
||||||
|
date3 = datetime.strptime(date_string, format)
|
||||||
|
|
||||||
|
date4 = date4.replace('-', '/')
|
||||||
|
date_string = date4
|
||||||
|
format = '%Y/%m/%d'
|
||||||
|
date4 = datetime.strptime(date_string, format)
|
||||||
|
|
||||||
|
date5 = date5.replace('-', '/')
|
||||||
|
date_string = date5
|
||||||
|
format = '%Y/%m/%d'
|
||||||
|
date5 = datetime.strptime(date_string, format)
|
||||||
|
|
||||||
|
date6 = date6.replace('-', '/')
|
||||||
|
date_string = date6
|
||||||
|
format = '%Y/%m/%d'
|
||||||
|
date6 = datetime.strptime(date_string, format)
|
||||||
|
db = pymysql.connect(host="127.0.0.1",user="root",password="2GS@bPYcgiMyL14A",database="Macroeconomics",port=4423)
|
||||||
|
cursor = db.cursor()
|
||||||
|
sql = "select date from InterestRate where name='DFEDTARU'"
|
||||||
|
cursor.execute(sql)
|
||||||
|
db.commit()
|
||||||
|
DFEDTARU_old_time = cursor.fetchall()
|
||||||
|
DFEDTARU_old_time=DFEDTARU_old_time[-1][0]
|
||||||
|
if DFEDTARU_old_time != date1 :
|
||||||
|
sql = "insert into InterestRate(date,name,_value)values('%s','%s','%s')" % (date1, 'DFEDTARU', value1)
|
||||||
|
cursor.execute(sql)
|
||||||
|
db.commit()
|
||||||
|
sql2 = "select date from InterestRate where name='DFEDTARL'"
|
||||||
|
cursor.execute(sql2)
|
||||||
|
db.commit()
|
||||||
|
DFEDTARL_old_time = cursor.fetchall()
|
||||||
|
DFEDTARL_old_time=DFEDTARL_old_time[-1][0]
|
||||||
|
if DFEDTARL_old_time != date2 :
|
||||||
|
sql = "insert into InterestRate(date,name,_value)values('%s','%s','%s')" % (date2, 'DFEDTARL', value2)
|
||||||
|
cursor.execute(sql)
|
||||||
|
db.commit()
|
||||||
|
sql3 = "select date from InterestRate where name='FEDFUNDS'"
|
||||||
|
cursor.execute(sql3)
|
||||||
|
db.commit()
|
||||||
|
FEDFUNDS_old_time = cursor.fetchall()
|
||||||
|
FEDFUNDS_old_time=FEDFUNDS_old_time[-1][0]
|
||||||
|
if FEDFUNDS_old_time != date3 :
|
||||||
|
sql = "insert into InterestRate(date,name,_value)values('%s','%s','%s')" % (date3, 'FEDFUNDS', value3)
|
||||||
|
cursor.execute(sql)
|
||||||
|
db.commit()
|
||||||
|
sql4 = "select date from InterestRate where name='IORB'"
|
||||||
|
cursor.execute(sql4)
|
||||||
|
db.commit()
|
||||||
|
IORB_old_time = cursor.fetchall()
|
||||||
|
IORB_old_time=IORB_old_time[-1][0]
|
||||||
|
if IORB_old_time != date4 :
|
||||||
|
sql = "insert into InterestRate(date,name,_value)values('%s','%s','%s')" % (date4, 'IORB', value4)
|
||||||
|
cursor.execute(sql)
|
||||||
|
db.commit()
|
||||||
|
sql5 = "select date from InterestRate where name='RRPONTSYAWARD'"
|
||||||
|
cursor.execute(sql5)
|
||||||
|
db.commit()
|
||||||
|
RRPONTSYAWARD_old_time = cursor.fetchall()
|
||||||
|
RRPONTSYAWARD_old_time=RRPONTSYAWARD_old_time[-1][0]
|
||||||
|
if RRPONTSYAWARD_old_time != date5 :
|
||||||
|
sql = "insert into InterestRate(date,name,_value)values('%s','%s','%s')" % (date5, 'RRPONTSYAWARD', value5)
|
||||||
|
cursor.execute(sql)
|
||||||
|
db.commit()
|
||||||
|
sql6 = "select date from InterestRate where name='SOFR'"
|
||||||
|
cursor.execute(sql6)
|
||||||
|
db.commit()
|
||||||
|
SOFR_old_time = cursor.fetchall()
|
||||||
|
SOFR_old_time=SOFR_old_time[-1][0]
|
||||||
|
if SOFR_old_time != date6 :
|
||||||
|
sql = "insert into InterestRate(date,name,_value)values('%s','%s','%s')" % (date6, 'SOFR', value6)
|
||||||
|
cursor.execute(sql)
|
||||||
|
db.commit()
|
||||||
|
|
||||||
|
pagee = requests.get("https://www.frbsf.org/wp-content/uploads/sites/4/proxy-funds-rate-chart1-data.csv")
|
||||||
|
pagee = pagee.text
|
||||||
|
pagee = pagee.split()
|
||||||
|
number = 0
|
||||||
|
for i in pagee:
|
||||||
|
number += 1
|
||||||
|
if number <= 5:
|
||||||
|
continue
|
||||||
|
else:
|
||||||
|
pagee = i.split()[-1]
|
||||||
|
pagee = pagee.replace(',', ' , ')
|
||||||
|
PFR_new_time = pagee[0:10]
|
||||||
|
PFR_new_time = PFR_new_time.replace('-', '/')
|
||||||
|
PFR_value = pagee[-8:]
|
||||||
|
PFR_value = PFR_value.replace(' ', '')
|
||||||
|
date_string = PFR_new_time
|
||||||
|
format = '%Y/%m/%d'
|
||||||
|
PFR_new_time = datetime.strptime(date_string, format)
|
||||||
|
sql = "select * from InterestRate where name='PFR' and date='%s'" % (PFR_new_time)
|
||||||
|
cursor.execute(sql)
|
||||||
|
outcome = cursor.fetchall()
|
||||||
|
if outcome == () or outcome == 0 or outcome == None:
|
||||||
|
sql = "insert into InterestRate(date,name,_value)values('%s','%s','%s')" % (PFR_new_time, 'PFR', PFR_value)
|
||||||
|
cursor.execute(sql)
|
||||||
|
db.commit()
|
||||||
|
else:
|
||||||
|
sql = "update InterestRate set _value='%s' where 'name'='PFR' and 'date' = '%s'" % (PFR_value, PFR_new_time)
|
||||||
|
cursor.execute(sql)
|
||||||
|
db.commit()
|
||||||
|
number=0
|
||||||
|
pagee = requests.get("https://markets.newyorkfed.org/api/rp/repo/multiple/results/last/1.json")
|
||||||
|
pagee = pagee.json()
|
||||||
|
page=pagee['repo']['operations'][0]
|
||||||
|
page2=page['details'][0]
|
||||||
|
if page2.__contains__('minimumBidRate'):
|
||||||
|
RR_value=page2['minimumBidRate']
|
||||||
|
RR_new_time = page['operationDate']
|
||||||
|
RR_new_time = RR_new_time.replace('-', '/')
|
||||||
|
date_string = RR_new_time
|
||||||
|
format = '%Y/%m/%d'
|
||||||
|
RR_new_time = datetime.strptime(date_string, format)
|
||||||
|
sql = "select date from InterestRate where name='RR'"
|
||||||
|
cursor.execute(sql)
|
||||||
|
db.commit()
|
||||||
|
RR_old_time = cursor.fetchall()
|
||||||
|
RR_old_time = RR_old_time[-1][0]
|
||||||
|
if RR_old_time != RR_new_time:
|
||||||
|
sql = "insert into InterestRate(date,name,_value)values('%s','%s','%s')" % (RR_new_time, 'RR', RR_value)
|
||||||
|
cursor.execute(sql)
|
||||||
|
db.commit()
|
||||||
|
|
||||||
|
page = requests.get("https://www.global-rates.com/en/interest-rates/libor/american-dollar/american-dollar.aspx")
|
||||||
|
page = page.text
|
||||||
|
page = BeautifulSoup(page, 'html.parser')
|
||||||
|
data = page.find_all('div', class_="table-normal text-end")
|
||||||
|
LIBOR_new_time = data[0]
|
||||||
|
LIBOR1M_value = data[5]
|
||||||
|
LIBOR3M_value = data[10]
|
||||||
|
LIBOR6M_value = data[15]
|
||||||
|
LIBOR_new_time = remove_tags(str(LIBOR_new_time))
|
||||||
|
LIBOR1M_value = remove_tags(str(LIBOR1M_value))
|
||||||
|
LIBOR3M_value = remove_tags(str(LIBOR3M_value))
|
||||||
|
LIBOR6M_value = remove_tags(str(LIBOR6M_value))
|
||||||
|
LIBOR_new_time = LIBOR_new_time[6:10]+'-'+LIBOR_new_time[0:5]
|
||||||
|
LIBOR_new_time = LIBOR_new_time.replace("-", "/")
|
||||||
|
LIBOR1M_value = LIBOR1M_value.replace(' ', '')
|
||||||
|
LIBOR3M_value = LIBOR3M_value.replace(' ', '')
|
||||||
|
LIBOR6M_value = LIBOR6M_value.replace(' ', '')
|
||||||
|
format = '%Y/%m/%d'
|
||||||
|
LIBOR_new_time = datetime.strptime(LIBOR_new_time, format)
|
||||||
|
sql = "select date from InterestRate where name='LIBOR1M'"
|
||||||
|
cursor.execute(sql)
|
||||||
|
db.commit()
|
||||||
|
LIBOR_old_time = cursor.fetchall()
|
||||||
|
LIBOR_old_time = LIBOR_old_time[-1][0]
|
||||||
|
if LIBOR_new_time != LIBOR_old_time:
|
||||||
|
sql = "insert into InterestRate(date,name,_value)values('%s','%s','%s')" % (LIBOR_new_time, 'LIBOR1M', LIBOR1M_value)
|
||||||
|
sql1 = "insert into InterestRate(date,name,_value)values('%s','%s','%s')" % (LIBOR_new_time, 'LIBOR3M', LIBOR3M_value)
|
||||||
|
sql2 = "insert into InterestRate(date,name,_value)values('%s','%s','%s')" % (LIBOR_new_time, 'LIBOR6M', LIBOR6M_value)
|
||||||
|
cursor.execute(sql)
|
||||||
|
cursor.execute(sql1)
|
||||||
|
cursor.execute(sql2)
|
||||||
|
db.commit()
|
||||||
|
db.close()
|
||||||
|
time.sleep(7200)
|
||||||
|
|
||||||
|
|
|
@ -0,0 +1,102 @@
|
||||||
|
import time
|
||||||
|
from full_fred.fred import Fred
|
||||||
|
import pymysql
|
||||||
|
from datetime import datetime
|
||||||
|
fred=Fred('example_key.txt')
|
||||||
|
fred.set_api_key_file('example_key.txt')
|
||||||
|
while True:
|
||||||
|
BUSLOANS=fred.get_series_df('BUSLOANS')
|
||||||
|
HBPIGDQ188S=fred.get_series_df('HBPIGDQ188S')
|
||||||
|
date1_all = BUSLOANS['date']
|
||||||
|
value1_all = BUSLOANS['value']
|
||||||
|
date2_all = HBPIGDQ188S['date']
|
||||||
|
value2_all = HBPIGDQ188S['value']
|
||||||
|
|
||||||
|
list_date1 = []
|
||||||
|
list_value1 = []
|
||||||
|
list_date2 = []
|
||||||
|
list_value2 = []
|
||||||
|
|
||||||
|
for i in date1_all:
|
||||||
|
list_date1 += [i]
|
||||||
|
for i in value1_all:
|
||||||
|
list_value1 += [i]
|
||||||
|
for i in date2_all:
|
||||||
|
list_date2 += [i]
|
||||||
|
for i in value2_all:
|
||||||
|
list_value2 += [i]
|
||||||
|
|
||||||
|
date1 = list_date1[-2]
|
||||||
|
value1 = list_value1[-2]
|
||||||
|
date2 = list_date1[-3]
|
||||||
|
value2 = list_value1[-3]
|
||||||
|
date3 = list_date1[-4]
|
||||||
|
value3 = list_value1[-4]
|
||||||
|
date4 = list_date1[-5]
|
||||||
|
value4 = list_value1[-5]
|
||||||
|
|
||||||
|
list_date1 = list_date1[-1]
|
||||||
|
list_value1 = list_value1[-1]
|
||||||
|
list_date2 = list_date2[-1]
|
||||||
|
list_value2 = list_value2[-1]
|
||||||
|
|
||||||
|
list_date1 = list_date1.replace('-', '/')
|
||||||
|
date_string = list_date1
|
||||||
|
format = '%Y/%m/%d'
|
||||||
|
list_date1 = datetime.strptime(date_string, format)
|
||||||
|
list_date2 = list_date2.replace('-', '/')
|
||||||
|
date_string2 = list_date2
|
||||||
|
format = '%Y/%m/%d'
|
||||||
|
list_date2 = datetime.strptime(date_string2, format)
|
||||||
|
db = pymysql.connect(host="127.0.0.1",user="root",password="2GS@bPYcgiMyL14A",database="Macroeconomics",port=4423)
|
||||||
|
cursor = db.cursor()
|
||||||
|
sql = "select date from Loan order by date desc limit 1"
|
||||||
|
cursor.execute(sql)
|
||||||
|
db.commit()
|
||||||
|
old_time = cursor.fetchall()
|
||||||
|
old_time = old_time[0][0]
|
||||||
|
date1 = date1.replace('-', '/')
|
||||||
|
date_string = date1
|
||||||
|
format = '%Y/%m/%d'
|
||||||
|
date1 = datetime.strptime(date_string, format)
|
||||||
|
|
||||||
|
date2 = date2.replace('-', '/')
|
||||||
|
date_string = date2
|
||||||
|
format = '%Y/%m/%d'
|
||||||
|
date2 = datetime.strptime(date_string, format)
|
||||||
|
|
||||||
|
date3 = date3.replace('-', '/')
|
||||||
|
date_string = date3
|
||||||
|
format = '%Y/%m/%d'
|
||||||
|
date3 = datetime.strptime(date_string, format)
|
||||||
|
|
||||||
|
date4 = date4.replace('-', '/')
|
||||||
|
date_string = date4
|
||||||
|
format = '%Y/%m/%d'
|
||||||
|
date4 = datetime.strptime(date_string, format)
|
||||||
|
sql = "update Loan set PSI=%s where date='%s'" % (value1, date1)
|
||||||
|
cursor.execute(sql)
|
||||||
|
sql = "update Loan set PSI=%s where date='%s'" % (value2, date2)
|
||||||
|
cursor.execute(sql)
|
||||||
|
sql = "update Loan set PSI=%s where date='%s'" % (value3, date3)
|
||||||
|
cursor.execute(sql)
|
||||||
|
sql = "update Loan set PSI=%s where date='%s'" % (value4, date4)
|
||||||
|
cursor.execute(sql)
|
||||||
|
db.commit()
|
||||||
|
if list_date1 == old_time:
|
||||||
|
db = pymysql.connect(host="127.0.0.1",user="root",password="2GS@bPYcgiMyL14A",database="Macroeconomics",port=4423)
|
||||||
|
cursor = db.cursor()
|
||||||
|
sql = "update Loan set PSI= %s where date='%s'" % (list_value1, list_date1)
|
||||||
|
cursor.execute(sql)
|
||||||
|
sql1 = "update Loan set FDHBPI_GDP=%s where date='%s'" % (list_value2, list_date2)
|
||||||
|
cursor.execute(sql1)
|
||||||
|
db.commit()
|
||||||
|
time.sleep(21600)
|
||||||
|
else:
|
||||||
|
db = pymysql.connect(host="127.0.0.1",user="root",password="2GS@bPYcgiMyL14A",database="Macroeconomics",port=4423)
|
||||||
|
cursor = db.cursor()
|
||||||
|
sql = "insert into Loan(date,PSI)values('%s','%s')" % (list_date1, list_value1)
|
||||||
|
cursor.execute(sql)
|
||||||
|
sql1 = "update Loan set FDHBPI_GDP=%s where date='%s'" % (list_value2, list_date2)
|
||||||
|
cursor.execute(sql1)
|
||||||
|
db.commit()
|
|
@ -0,0 +1,216 @@
|
||||||
|
import pymysql
|
||||||
|
import time
|
||||||
|
import requests
|
||||||
|
from w3lib.html import remove_tags
|
||||||
|
from bs4 import BeautifulSoup
|
||||||
|
condition=True
|
||||||
|
while condition:
|
||||||
|
import datetime
|
||||||
|
# 获取时间
|
||||||
|
now_time = datetime.datetime.now()
|
||||||
|
next_time = now_time + datetime.timedelta(days=+1)
|
||||||
|
next_year = next_time.date().year
|
||||||
|
next_month = next_time.date().month
|
||||||
|
next_day = next_time.date().day
|
||||||
|
next_time = datetime.datetime.strptime(str(next_year) + "-" + str(next_month) + "-" + str(next_day) + " 17:00:01","%Y-%m-%d %H:%M:%S")
|
||||||
|
timer_start_time = (next_time - now_time).total_seconds()
|
||||||
|
|
||||||
|
page =requests.get("https://www.federalreserve.gov/releases/h6/current/default.htm")
|
||||||
|
page=page.text
|
||||||
|
soup = BeautifulSoup(page, 'html.parser')
|
||||||
|
page_tbody = soup.find_all('tbody')
|
||||||
|
# 获取三个表数据MoneyStockMeasures,SeasonallyAdjusted,NotSeasonallyAdjusted
|
||||||
|
MoneyStockMeasures = page_tbody[0]
|
||||||
|
SeasonallyAdjusted = page_tbody[1]
|
||||||
|
NotSeasonallyAdjusted = page_tbody[2]
|
||||||
|
MoneyStockMeasures = remove_tags(str(MoneyStockMeasures))
|
||||||
|
SeasonallyAdjusted = remove_tags(str(SeasonallyAdjusted))
|
||||||
|
NotSeasonallyAdjusted = remove_tags(str(NotSeasonallyAdjusted))
|
||||||
|
# 修改时间字段
|
||||||
|
MoneyStockMeasures=MoneyStockMeasures.replace('Jan.', '1')
|
||||||
|
MoneyStockMeasures = MoneyStockMeasures.replace('Feb.', '2')
|
||||||
|
MoneyStockMeasures = MoneyStockMeasures.replace('Mar.', '3')
|
||||||
|
MoneyStockMeasures = MoneyStockMeasures.replace('Apr.', '4')
|
||||||
|
MoneyStockMeasures = MoneyStockMeasures.replace('May', '5')
|
||||||
|
MoneyStockMeasures = MoneyStockMeasures.replace('June', '6')
|
||||||
|
MoneyStockMeasures = MoneyStockMeasures.replace('July', '7')
|
||||||
|
MoneyStockMeasures = MoneyStockMeasures.replace('Aug.', '8')
|
||||||
|
MoneyStockMeasures = MoneyStockMeasures.replace('Sept.', '9')
|
||||||
|
MoneyStockMeasures = MoneyStockMeasures.replace('Oct.', '10')
|
||||||
|
MoneyStockMeasures = MoneyStockMeasures.replace('Nov.', '11')
|
||||||
|
MoneyStockMeasures = MoneyStockMeasures.replace('Dec.', '12')
|
||||||
|
MoneyStockMeasures = MoneyStockMeasures.replace('e', '')
|
||||||
|
MoneyStockMeasures = MoneyStockMeasures.split()
|
||||||
|
|
||||||
|
SeasonallyAdjusted = SeasonallyAdjusted.replace('Jan.', '1')
|
||||||
|
SeasonallyAdjusted = SeasonallyAdjusted.replace('Feb.', '2')
|
||||||
|
SeasonallyAdjusted = SeasonallyAdjusted.replace('Mar.', '3')
|
||||||
|
SeasonallyAdjusted = SeasonallyAdjusted.replace('Apr.', '4')
|
||||||
|
SeasonallyAdjusted = SeasonallyAdjusted.replace('May', '5')
|
||||||
|
SeasonallyAdjusted = SeasonallyAdjusted.replace('June', '6')
|
||||||
|
SeasonallyAdjusted = SeasonallyAdjusted.replace('July', '7')
|
||||||
|
SeasonallyAdjusted = SeasonallyAdjusted.replace('Aug.', '8')
|
||||||
|
SeasonallyAdjusted = SeasonallyAdjusted.replace('Sept.', '9')
|
||||||
|
SeasonallyAdjusted = SeasonallyAdjusted.replace('Oct.', '10')
|
||||||
|
SeasonallyAdjusted = SeasonallyAdjusted.replace('Nov.', '11')
|
||||||
|
SeasonallyAdjusted = SeasonallyAdjusted.replace('Dec.', '12')
|
||||||
|
SeasonallyAdjusted = SeasonallyAdjusted.replace('e', '')
|
||||||
|
SeasonallyAdjusted = SeasonallyAdjusted.split()
|
||||||
|
|
||||||
|
NotSeasonallyAdjusted = NotSeasonallyAdjusted.replace('Jan.', '1')
|
||||||
|
NotSeasonallyAdjusted = NotSeasonallyAdjusted.replace('Feb.', '2')
|
||||||
|
NotSeasonallyAdjusted = NotSeasonallyAdjusted.replace('Mar.', '3')
|
||||||
|
NotSeasonallyAdjusted = NotSeasonallyAdjusted.replace('Apr.', '4')
|
||||||
|
NotSeasonallyAdjusted = NotSeasonallyAdjusted.replace('May', '5')
|
||||||
|
NotSeasonallyAdjusted = NotSeasonallyAdjusted.replace('June', '6')
|
||||||
|
NotSeasonallyAdjusted = NotSeasonallyAdjusted.replace('July', '7')
|
||||||
|
NotSeasonallyAdjusted = NotSeasonallyAdjusted.replace('Aug.', '8')
|
||||||
|
NotSeasonallyAdjusted = NotSeasonallyAdjusted.replace('Sept.', '9')
|
||||||
|
NotSeasonallyAdjusted = NotSeasonallyAdjusted.replace('Oct.', '10')
|
||||||
|
NotSeasonallyAdjusted = NotSeasonallyAdjusted.replace('Nov.', '11')
|
||||||
|
NotSeasonallyAdjusted = NotSeasonallyAdjusted.replace('Dec.', '12')
|
||||||
|
NotSeasonallyAdjusted = NotSeasonallyAdjusted.replace('e', '')
|
||||||
|
NotSeasonallyAdjusted = NotSeasonallyAdjusted.split()
|
||||||
|
condition1=0
|
||||||
|
for i in range(17):
|
||||||
|
date1= MoneyStockMeasures[condition1 + 1] + '/' + MoneyStockMeasures[condition1 + 0] + '/1'
|
||||||
|
format = '%Y/%m/%d'
|
||||||
|
from datetime import datetime
|
||||||
|
# 获取数据时间和各项数据的值
|
||||||
|
date1 = datetime.strptime(date1, format)
|
||||||
|
value11=MoneyStockMeasures[condition1 + 2]
|
||||||
|
value11 = value11.replace(",", "")
|
||||||
|
value12=MoneyStockMeasures[condition1 + 3]
|
||||||
|
value12 = value12.replace(",", "")
|
||||||
|
value13=MoneyStockMeasures[condition1 + 7]
|
||||||
|
value13 = value13.replace(",", "")
|
||||||
|
value14=MoneyStockMeasures[condition1 + 8]
|
||||||
|
value14 = value14.replace(",", "")
|
||||||
|
value15=MoneyStockMeasures[condition1 + 4]
|
||||||
|
value15 = value15.replace(",", "")
|
||||||
|
value16=MoneyStockMeasures[condition1 + 5]
|
||||||
|
value16 = value16.replace(",", "")
|
||||||
|
value17=MoneyStockMeasures[condition1 + 6]
|
||||||
|
value17 = value17.replace(",", "")
|
||||||
|
value18=MoneyStockMeasures[condition1 + 9]
|
||||||
|
value18 = value18.replace(",", "")
|
||||||
|
value19=MoneyStockMeasures[condition1 + 10]
|
||||||
|
value19 = value19.replace(",", "")
|
||||||
|
value20=MoneyStockMeasures[condition1 + 11]
|
||||||
|
value20 = value20.replace(",", "")
|
||||||
|
condition1+=12
|
||||||
|
|
||||||
|
db = pymysql.connect(host="127.0.0.1",user="root",password="2GS@bPYcgiMyL14A",database="Macroeconomics",port=4423)
|
||||||
|
cursor = db.cursor()
|
||||||
|
sql = "select date from MoneyStockMeasures order by date desc limit 1"
|
||||||
|
cursor.execute(sql)
|
||||||
|
old_date = cursor.fetchall()
|
||||||
|
date2=old_date[0][0]
|
||||||
|
if i != 16:
|
||||||
|
sql="UPDATE MoneyStockMeasures SET adjustedM1=%s,adjustedM2=%s,notAdjustedM1=%s,notAdjustedM2=%s,currencyincirculation=%s,reserveBalances=%s,monetaryBase=%s,totalReserves=%s,totalMborrowings_M=%s,nonborrowedReserves=%s WHERE date= '%s'"%(value11, value12, value13, value14, value15, value16, value17, value18, value19, value20, date1)
|
||||||
|
cursor.execute(sql)
|
||||||
|
db.commit()
|
||||||
|
db.close()
|
||||||
|
else:
|
||||||
|
if date1 == date2:
|
||||||
|
sql = "UPDATE MoneyStockMeasures SET adjustedM1=%s,adjustedM2=%s,notAdjustedM1=%s,notAdjustedM2=%s,currencyincirculation=%s,reserveBalances=%s,monetaryBase=%s,totalReserves=%s,totalMborrowings_M=%s,nonborrowedReserves=%s WHERE date= '%s'" % (
|
||||||
|
value11, value12, value13, value14, value15, value16, value17, value18, value19, value20, date1)
|
||||||
|
cursor.execute(sql)
|
||||||
|
db.commit()
|
||||||
|
db.close()
|
||||||
|
else:
|
||||||
|
sql = "insert into MoneyStockMeasures(date,adjustedM1,adjustedM2,notAdjustedM1,notAdjustedM2,currencyincirculation,reserveBalances,monetaryBase,totalReserves,totalMborrowings_M,nonborrowedReserves)values('%s','%s','%s','%s','%s','%s','%s','%s','%s','%s','%s')" % (
|
||||||
|
date1, value11, value12, value13, value14, value15, value16, value17, value18, value19, value20)
|
||||||
|
cursor.execute(sql)
|
||||||
|
db.commit()
|
||||||
|
db.close()
|
||||||
|
condition2=0
|
||||||
|
for i in range(17):
|
||||||
|
date3= SeasonallyAdjusted[condition2 + 1] + '/' + SeasonallyAdjusted[condition2 + 0] + '/1'
|
||||||
|
format = '%Y/%m/%d'
|
||||||
|
from datetime import datetime
|
||||||
|
date3 = datetime.strptime(date3, format)
|
||||||
|
value21=SeasonallyAdjusted[condition2 + 2]
|
||||||
|
value21 = value21.replace(",", "")
|
||||||
|
value22=SeasonallyAdjusted[condition2 + 3]
|
||||||
|
value22 = value22.replace(",", "")
|
||||||
|
value23=SeasonallyAdjusted[condition2 + 4]
|
||||||
|
value23 = value23.replace(",", "")
|
||||||
|
value24=SeasonallyAdjusted[condition2 + 5]
|
||||||
|
value24 = value24.replace(",", "")
|
||||||
|
value25=SeasonallyAdjusted[condition2 + 6]
|
||||||
|
value25 = value25.replace(",", "")
|
||||||
|
condition2+=7
|
||||||
|
|
||||||
|
db = pymysql.connect(host="127.0.0.1",user="root",password="2GS@bPYcgiMyL14A",database="Macroeconomics",port=4423)
|
||||||
|
cursor = db.cursor()
|
||||||
|
sql = "select date from SeasonallyAdjusted order by date desc limit 1"
|
||||||
|
cursor.execute(sql)
|
||||||
|
old_date = cursor.fetchall()
|
||||||
|
date4=old_date[0][0]
|
||||||
|
if i != 16:
|
||||||
|
sql="UPDATE SeasonallyAdjusted SET currencyM1=%s,demandM1=%s,otherLiquid=%s,smallDenominationTimeNonM1M2=%s,retailMoneyMarketFundsNonM1M2=%s WHERE date= '%s'"%(value21, value22, value23, value24, value25, date3)
|
||||||
|
cursor.execute(sql)
|
||||||
|
db.commit()
|
||||||
|
db.close()
|
||||||
|
else:
|
||||||
|
if date3 == date4:
|
||||||
|
sql = "UPDATE SeasonallyAdjusted SET currencyM1=%s,demandM1=%s,otherLiquid=%s,smallDenominationTimeNonM1M2=%s,retailMoneyMarketFundsNonM1M2=%s WHERE date= '%s'" % (
|
||||||
|
value21, value22, value23, value24, value25, date3)
|
||||||
|
cursor.execute(sql)
|
||||||
|
db.commit()
|
||||||
|
db.close()
|
||||||
|
else:
|
||||||
|
sql = "insert into SeasonallyAdjusted(date,currencyM1,demandM1,otherLiquid,smallDenominationTimeNonM1M2,retailMoneyMarketFundsNonM1M2)values('%s','%s','%s','%s','%s','%s')" % (
|
||||||
|
date3, value21, value22, value23, value24, value25)
|
||||||
|
cursor.execute(sql)
|
||||||
|
db.commit()
|
||||||
|
db.close()
|
||||||
|
condition3=0
|
||||||
|
for i in range(17):
|
||||||
|
date5= NotSeasonallyAdjusted[condition3 + 1] + '/' + NotSeasonallyAdjusted[condition3 + 0] + '/1'
|
||||||
|
format = '%Y/%m/%d'
|
||||||
|
from datetime import datetime
|
||||||
|
date5 = datetime.strptime(date5, format)
|
||||||
|
value31=NotSeasonallyAdjusted[condition3 + 2]
|
||||||
|
value31 = value31.replace(",", "")
|
||||||
|
value32=NotSeasonallyAdjusted[condition3 + 3]
|
||||||
|
value32 = value32.replace(",", "")
|
||||||
|
value33=NotSeasonallyAdjusted[condition3 + 4]
|
||||||
|
value33 = value33.replace(",", "")
|
||||||
|
value34=NotSeasonallyAdjusted[condition3 + 5]
|
||||||
|
value34 = value34.replace(",", "")
|
||||||
|
value35=NotSeasonallyAdjusted[condition3 + 6]
|
||||||
|
value35 = value35.replace(",", "")
|
||||||
|
value36 = NotSeasonallyAdjusted[condition3 + 7]
|
||||||
|
value36 = value36.replace(",", "")
|
||||||
|
value37 = NotSeasonallyAdjusted[condition3 + 8]
|
||||||
|
value37 = value37.replace(",", "")
|
||||||
|
value38 = NotSeasonallyAdjusted[condition3 + 9]
|
||||||
|
value38 = value38.replace(",", "")
|
||||||
|
condition3+=10
|
||||||
|
db = pymysql.connect(host="127.0.0.1",user="root",password="2GS@bPYcgiMyL14A",database="Macroeconomics",port=4423)
|
||||||
|
cursor = db.cursor()
|
||||||
|
sql = "select date from NotSeasonallyAdjusted order by date desc limit 1"
|
||||||
|
cursor.execute(sql)
|
||||||
|
old_date = cursor.fetchall()
|
||||||
|
date6=old_date[0][0]
|
||||||
|
if i != 16:
|
||||||
|
sql="UPDATE NotSeasonallyAdjusted SET currencyM1=%s,demandM1=%s,otherLiquid=%s,smallDenominationTimeNonM1M2=%s,retailMoneyMarketFundsNonM1M2=%s,atDepositoryInstitutions=%s,atMoneyMarketFunds=%s,total=%s WHERE date= '%s'"%(value31, value32, value33, value34, value35, value36, value37, value38, date5)
|
||||||
|
cursor.execute(sql)
|
||||||
|
db.commit()
|
||||||
|
db.close()
|
||||||
|
else:
|
||||||
|
if date5 == date6:
|
||||||
|
sql = "UPDATE NotSeasonallyAdjusted SET currencyM1=%s,demandM1=%s,otherLiquid=%s,smallDenominationTimeNonM1M2=%s,retailMoneyMarketFundsNonM1M2=%s,atDepositoryInstitutions=%s,atMoneyMarketFunds=%s,total=%s WHERE date= '%s'" % (
|
||||||
|
value31, value32, value33, value34, value35, value36, value37, value38, date5)
|
||||||
|
cursor.execute(sql)
|
||||||
|
db.commit()
|
||||||
|
db.close()
|
||||||
|
else:
|
||||||
|
sql = "insert into NotSeasonallyAdjusted(date,currencyM1,demandM1,otherLiquid,smallDenominationTimeNonM1M2,retailMoneyMarketFundsNonM1M2,atDepositoryInstitutions,atMoneyMarketFunds,total)values('%s','%s','%s','%s','%s','%s','%s','%s','%s')" % (
|
||||||
|
date5, value31, value32, value33, value34, value35, value36, value37, value38)
|
||||||
|
cursor.execute(sql)
|
||||||
|
db.commit()
|
||||||
|
db.close()
|
||||||
|
time.sleep(timer_start_time)
|
|
@ -0,0 +1,117 @@
|
||||||
|
import requests
|
||||||
|
import pymysql
|
||||||
|
from datetime import datetime
|
||||||
|
import time
|
||||||
|
|
||||||
|
BEA_USER_ID = "146B5757-D9E3-442C-B6AC-ADE9E6B71114"
|
||||||
|
YEARS = ["2023","2024","2025"] # 第一次运行抓全部年份
|
||||||
|
SLEEP_SECONDS = 21600 # 6小时
|
||||||
|
|
||||||
|
def get_bea_data(year):
|
||||||
|
"""抓取指定年份的季度数据"""
|
||||||
|
url = (
|
||||||
|
f'https://apps.bea.gov/api/data?UserID={BEA_USER_ID}'
|
||||||
|
f'&method=GetData&datasetname=NIPA&TableName=T10105&Frequency=Q'
|
||||||
|
f'&Year={year}&ResultFormat=JSON'
|
||||||
|
)
|
||||||
|
response = requests.get(url)
|
||||||
|
return response.json()['BEAAPI']['Results']['Data']
|
||||||
|
|
||||||
|
def update_database(cursor, data):
|
||||||
|
"""整理并插入缺失季度数据"""
|
||||||
|
# 查询数据库已存在的季度
|
||||||
|
cursor.execute("SELECT times FROM PCE")
|
||||||
|
existing_pce = {row[0] for row in cursor.fetchall()}
|
||||||
|
cursor.execute("SELECT times FROM GPDI")
|
||||||
|
existing_gpdi = {row[0] for row in cursor.fetchall()}
|
||||||
|
cursor.execute("SELECT times FROM NETEXP")
|
||||||
|
existing_netexp = {row[0] for row in cursor.fetchall()}
|
||||||
|
|
||||||
|
# 整理数据
|
||||||
|
pce_data, gpdi_data, netexp_data = {}, {}, {}
|
||||||
|
for entry in data:
|
||||||
|
t = entry["TimePeriod"]
|
||||||
|
desc = entry["LineDescription"]
|
||||||
|
val = entry["DataValue"]
|
||||||
|
|
||||||
|
if desc == "Personal consumption expenditures":
|
||||||
|
pce_data.setdefault(t, {})["PCE"] = val
|
||||||
|
elif desc == "Durable goods":
|
||||||
|
pce_data.setdefault(t, {})["PCEDG"] = val
|
||||||
|
elif desc == "Nondurable goods":
|
||||||
|
pce_data.setdefault(t, {})["PCEND"] = val
|
||||||
|
elif desc == "Services" and entry["LineNumber"] == '6':
|
||||||
|
pce_data.setdefault(t, {})["PCES"] = val
|
||||||
|
|
||||||
|
elif desc == "Gross private domestic investment":
|
||||||
|
gpdi_data.setdefault(t, {})["GPDI"] = val
|
||||||
|
elif desc == "Fixed investment":
|
||||||
|
gpdi_data.setdefault(t, {})["FPI"] = val
|
||||||
|
elif desc == "Change in private inventories":
|
||||||
|
gpdi_data.setdefault(t, {})["CBI"] = val
|
||||||
|
|
||||||
|
elif desc == "Net exports of goods and services":
|
||||||
|
netexp_data.setdefault(t, {})["NETEXP"] = val
|
||||||
|
elif desc == "Imports":
|
||||||
|
netexp_data.setdefault(t, {})["IMPGS"] = val
|
||||||
|
elif desc == "Exports":
|
||||||
|
netexp_data.setdefault(t, {})["EXPGS"] = val
|
||||||
|
|
||||||
|
# 插入数据库缺失数据
|
||||||
|
for t, vals in pce_data.items():
|
||||||
|
if t not in existing_pce:
|
||||||
|
cursor.execute(
|
||||||
|
"INSERT INTO PCE (times, PCE, PCEDG, PCEND, PCES) VALUES (%s,%s,%s,%s,%s)",
|
||||||
|
(t, vals.get("PCE"), vals.get("PCEDG"), vals.get("PCEND"), vals.get("PCES"))
|
||||||
|
)
|
||||||
|
for t, vals in gpdi_data.items():
|
||||||
|
if t not in existing_gpdi:
|
||||||
|
cursor.execute(
|
||||||
|
"INSERT INTO GPDI (times, GPDI, FPI, CBI) VALUES (%s,%s,%s,%s)",
|
||||||
|
(t, vals.get("GPDI"), vals.get("FPI"), vals.get("CBI"))
|
||||||
|
)
|
||||||
|
for t, vals in netexp_data.items():
|
||||||
|
if t not in existing_netexp:
|
||||||
|
cursor.execute(
|
||||||
|
"INSERT INTO NETEXP (times, NETEXP, IMPGS, EXPGS) VALUES (%s,%s,%s,%s)",
|
||||||
|
(t, vals.get("NETEXP"), vals.get("IMPGS"), vals.get("EXPGS"))
|
||||||
|
)
|
||||||
|
|
||||||
|
def run_job(first_run=False):
|
||||||
|
"""运行一次抓取和更新"""
|
||||||
|
print(f"[{datetime.now()}] 开始抓取 BEA 数据并更新数据库...")
|
||||||
|
try:
|
||||||
|
db = pymysql.connect(
|
||||||
|
host="127.0.0.1",
|
||||||
|
user="root",
|
||||||
|
password="2GS@bPYcgiMyL14A",
|
||||||
|
database="Macroeconomics",
|
||||||
|
port=4423
|
||||||
|
)
|
||||||
|
cursor = db.cursor()
|
||||||
|
|
||||||
|
years_to_fetch = YEARS if first_run else [YEARS[-1]] # 第一次抓全部年份,否则只抓最新年份
|
||||||
|
for year in years_to_fetch:
|
||||||
|
data = get_bea_data(year)
|
||||||
|
update_database(cursor, data)
|
||||||
|
db.commit()
|
||||||
|
print(f"[{datetime.now()}] {year} 数据更新完成")
|
||||||
|
|
||||||
|
except pymysql.MySQLError as e:
|
||||||
|
print(f"[{datetime.now()}] 数据库错误: {e}")
|
||||||
|
except Exception as e:
|
||||||
|
print(f"[{datetime.now()}] 其他错误: {e}")
|
||||||
|
finally:
|
||||||
|
if 'cursor' in locals():
|
||||||
|
cursor.close()
|
||||||
|
if 'db' in locals():
|
||||||
|
db.close()
|
||||||
|
print(f"[{datetime.now()}] 本次任务完成。\n")
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
first_run = True
|
||||||
|
while True:
|
||||||
|
run_job(first_run)
|
||||||
|
first_run = False # 之后循环只抓最新季度
|
||||||
|
print(f"[{datetime.now()}] 休眠 {SLEEP_SECONDS} 秒(6小时)...\n")
|
||||||
|
time.sleep(SLEEP_SECONDS)
|
File diff suppressed because it is too large
Load Diff
Loading…
Reference in New Issue