This commit is contained in:
TerenceLiu98 2021-06-07 12:53:37 +08:00
parent f0dfae60e2
commit f18255ca24
7 changed files with 137 additions and 99 deletions

View File

@ -18,7 +18,7 @@ def gdp_quarterly():
YoY: year on year growth
Data source: http://data.eastmoney.com/cjsj/gdp.html
"""
ua = UserAgent()
ua = UserAgent(verify_ssl=False)
request_header = {"User-Agent": ua.random}
tmp_url = url["eastmoney"]
request_params = {
@ -66,7 +66,7 @@ def ppi_monthly():
Accum: Accumulation
Data source: http://data.eastmoney.com/cjsj/ppi.html
"""
ua = UserAgent()
ua = UserAgent(verify_ssl=False)
request_header = {"User-Agent": ua.random}
tmp_url = url["eastmoney"]
request_params = {
@ -105,7 +105,7 @@ def cpi_monthly():
Data source: http://data.eastmoney.com/cjsj/cpi.html
"""
tmp_url = url["eastmoney"]
ua = UserAgent()
ua = UserAgent(verify_ssl=False)
request_header = {"User-Agent": ua.random}
tmp_url = url["eastmoney"]
request_params = {
@ -170,7 +170,7 @@ def pmi_monthly():
Data Source: http://data.eastmoney.com/cjsj/pmi.html
"""
tmp_url = url["eastmoney"]
ua = UserAgent()
ua = UserAgent(verify_ssl=False)
request_header = {"User-Agent": ua.random}
tmp_url = url["eastmoney"]
request_params = {
@ -209,7 +209,7 @@ def fai_monthly(): # fix asset investment
Data Source: http://data.eastmoney.com/cjsj/gdzctz.html
"""
tmp_url = url["eastmoney"]
ua = UserAgent()
ua = UserAgent(verify_ssl=False)
request_header = {"User-Agent": ua.random}
tmp_url = url["eastmoney"]
request_params = {
@ -248,7 +248,7 @@ def hi_old_monthly(): # house index old version (2008-2010)
Data Source: http://data.eastmoney.com/cjsj/house.html
"""
tmp_url = url["eastmoney"]
ua = UserAgent()
ua = UserAgent(verify_ssl=False)
request_header = {"User-Agent": ua.random}
tmp_url = url["eastmoney"]
request_params = {
@ -295,7 +295,7 @@ def hi_new_monthly(city1: str, city2: str):
Data Source: http://data.eastmoney.com/cjsj/newhouse.html
"""
tmp_url = "http://data.eastmoney.com/dataapi/cjsj/getnewhousechartdata?"
ua = UserAgent()
ua = UserAgent(verify_ssl=False)
request_header = {"User-Agent": ua.random}
request_params_nbch_MoM = {
"mkt": "1",
@ -379,7 +379,7 @@ def ci_eei_monthly(): # Climate Index & Entrepreneur Expectation Index
Data Source: http://data.eastmoney.com/cjsj/qyjqzs.html
"""
tmp_url = url["eastmoney"]
ua = UserAgent()
ua = UserAgent(verify_ssl=False)
request_header = {"User-Agent": ua.random}
tmp_url = url["eastmoney"]
request_params = {
@ -421,7 +421,7 @@ def ig_monthly(): # Industry Growth
Data Source: http://data.eastmoney.com/cjsj/gyzjz.html
"""
tmp_url = url["eastmoney"]
ua = UserAgent()
ua = UserAgent(verify_ssl=False)
request_header = {"User-Agent": ua.random}
tmp_url = url["eastmoney"]
request_params = {
@ -456,7 +456,7 @@ def cgpi_monthly(): # Corporate Goods Price Index
Data Source: http://data.eastmoney.com/cjsj/qyspjg.html
"""
tmp_url = url["eastmoney"]
ua = UserAgent()
ua = UserAgent(verify_ssl=False)
request_header = {"User-Agent": ua.random}
tmp_url = url["eastmoney"]
request_params = {
@ -517,7 +517,7 @@ def cci_csi_cei_monthly(): # Consumer Confidence Index & Consumer Satisfaction
Data Source: http://data.eastmoney.com/cjsj/xfzxx.html
"""
tmp_url = url["eastmoney"]
ua = UserAgent()
ua = UserAgent(verify_ssl=False)
request_header = {"User-Agent": ua.random}
tmp_url = url["eastmoney"]
request_params = {
@ -565,7 +565,7 @@ def trscg_monthly(): # Total Retail Sales of Consumer Goods
Data Source: http://data.eastmoney.com/cjsj/xfp.html
"""
tmp_url = url["eastmoney"]
ua = UserAgent()
ua = UserAgent(verify_ssl=False)
request_header = {"User-Agent": ua.random}
tmp_url = url["eastmoney"]
request_params = {
@ -606,7 +606,7 @@ def ms_monthly(): # monetary Supply
Data Source: http://data.eastmoney.com/cjsj/hbgyl.html
"""
tmp_url = url["eastmoney"]
ua = UserAgent()
ua = UserAgent(verify_ssl=False)
request_header = {"User-Agent": ua.random}
tmp_url = url["eastmoney"]
request_params = {
@ -650,7 +650,7 @@ def ie_monthly(): # Import & Export
Data Source: http://data.eastmoney.com/cjsj/hgjck.html
"""
tmp_url = url["eastmoney"]
ua = UserAgent()
ua = UserAgent(verify_ssl=False)
request_header = {"User-Agent": ua.random}
tmp_url = url["eastmoney"]
request_params = {
@ -704,7 +704,7 @@ def stock_monthly(): # Import & Export
Data Source: http://data.eastmoney.com/cjsj/gpjytj.html
"""
tmp_url = url["eastmoney"]
ua = UserAgent()
ua = UserAgent(verify_ssl=False)
request_header = {"User-Agent": ua.random}
tmp_url = url["eastmoney"]
request_params = {
@ -747,7 +747,7 @@ def fgr_monthly(): # Forex and Gold Reserve
Data Source: http://data.eastmoney.com/cjsj/gpjytj.html
"""
tmp_url = url["eastmoney"]
ua = UserAgent()
ua = UserAgent(verify_ssl=False)
request_header = {"User-Agent": ua.random}
tmp_url = url["eastmoney"]
request_params = {
@ -790,7 +790,7 @@ def ctsf_monthly(): # Client Transaction Settlement Funds
http://data.eastmoney.com/cjsj/banktransfer.html
"""
tmp_url = "http://data.eastmoney.com/dataapi/cjsj/getbanktransferdata?"
ua = UserAgent()
ua = UserAgent(verify_ssl=False)
request_header = {"User-Agent": ua.random}
request_params = {
"p": "1",
@ -814,7 +814,7 @@ def sao_monthly(): # Stock Account Overview
http://data.eastmoney.com/cjsj/gpkhsj.html
"""
tmp_url = "http://dcfm.eastmoney.com/em_mutisvcexpandinterface/api/js/get?"
ua = UserAgent()
ua = UserAgent(verify_ssl=False)
request_header = {"User-Agent": ua.random}
request_params = {
"callback": "datatable4006236",
@ -858,7 +858,7 @@ def fdi_monthly(): # Foreign Direct Investment
http://data.eastmoney.com/cjsj/fdi.html
"""
tmp_url = url["eastmoney"]
ua = UserAgent()
ua = UserAgent(verify_ssl=False)
request_header = {"User-Agent": ua.random}
tmp_url = url["eastmoney"]
request_params = {
@ -897,7 +897,7 @@ def gr_monthly(): # Government Revenue
http://data.eastmoney.com/cjsj/czsr.html
"""
tmp_url = url["eastmoney"]
ua = UserAgent()
ua = UserAgent(verify_ssl=False)
request_header = {"User-Agent": ua.random}
tmp_url = url["eastmoney"]
request_params = {
@ -935,7 +935,7 @@ def ti_monthly(): # Tax Income
http://data.eastmoney.com/cjsj/qgsssr.html
"""
tmp_url = url["eastmoney"]
ua = UserAgent()
ua = UserAgent(verify_ssl=False)
request_header = {"User-Agent": ua.random}
tmp_url = url["eastmoney"]
request_params = {
@ -971,7 +971,7 @@ def nl_monthly(): # New Loan
http://data.eastmoney.com/cjsj/xzxd.html
"""
tmp_url = url["eastmoney"]
ua = UserAgent()
ua = UserAgent(verify_ssl=False)
request_header = {"User-Agent": ua.random}
tmp_url = url["eastmoney"]
request_params = {
@ -1009,7 +1009,7 @@ def dfclc_monthly(): # Deposit of Foreign Currency and Local Currency
http://data.eastmoney.com/cjsj/wbck.html
"""
tmp_url = url["eastmoney"]
ua = UserAgent()
ua = UserAgent(verify_ssl=False)
request_header = {"User-Agent": ua.random}
tmp_url = url["eastmoney"]
request_params = {
@ -1046,7 +1046,7 @@ def fl_monthly(): # Forex Loan
http://data.eastmoney.com/cjsj/whxd.html
"""
tmp_url = url["eastmoney"]
ua = UserAgent()
ua = UserAgent(verify_ssl=False)
request_header = {"User-Agent": ua.random}
tmp_url = url["eastmoney"]
request_params = {
@ -1083,7 +1083,7 @@ def drr_monthly(): # Deposit Reserve Ratio
http://data.eastmoney.com/cjsj/ckzbj.html
"""
tmp_url = url["eastmoney"]
ua = UserAgent()
ua = UserAgent(verify_ssl=False)
request_header = {"User-Agent": ua.random}
tmp_url = url["eastmoney"]
request_params = {
@ -1136,7 +1136,7 @@ def interest_monthly(): # Interest
http://data.eastmoney.com/cjsj/yhll.html
"""
tmp_url = url["eastmoney"]
ua = UserAgent()
ua = UserAgent(verify_ssl=False)
request_header = {"User-Agent": ua.random}
tmp_url = url["eastmoney"]
request_params = {
@ -1188,7 +1188,7 @@ def gdc_daily(): # gasoline, Diesel and Crude Oil
http://data.eastmoney.com/cjsj/oil_default.html
"""
tmp_url = "http://datacenter-web.eastmoney.com/api/data/get?"
ua = UserAgent()
ua = UserAgent(verify_ssl=False)
request_header = {"User-Agent": ua.random}
request_params = {
"callback": "jQuery112302601302322321093_1622082348721",

View File

@ -5,7 +5,6 @@ import demjson
import requests
from fake_useragent import UserAgent
url = {
"eurostat": "http://ec.europa.eu/eurostat/wdds/rest/data/v2.1/json/en/",
"ecb": "https://sdw-wsrest.ecb.europa.eu/service/data/"
@ -26,7 +25,7 @@ class ecb_data(object):
"""
"""
tmp_url = self.url + "{}/".format(datacode) + "{}".format(key)
ua = UserAgent()
ua = UserAgent(verify_ssl=False)
request_header = {"User-Agent": ua.random, 'Accept': 'text/csv'}
request_params = {
"startPeriod": "{}".format(startdate),
@ -53,7 +52,7 @@ class eurostat_data(object):
"""
"""
tmp_url = self.url + "{}".format(datasetcode)
ua = UserAgent()
ua = UserAgent(verify_ssl=False)
request_header = {"User-Agent": ua.random, 'Accept': 'text/csv'}
request_params = {
"precision": "{}".format(precision),

View File

@ -20,7 +20,7 @@ def gdp_quarterly(startdate="1947-01-01", enddate="2021-01-01"):
Return: pd.DataFrame
"""
tmp_url = url["fred_econ"]
ua = UserAgent()
ua = UserAgent(verify_ssl=False)
request_header = {"User-Agent": ua.random}
request_params = {
"id": "GDP",
@ -39,7 +39,7 @@ def gdpc1_quarterly(startdate="1947-01-01", enddate="2021-01-01"):
Return: pd.DataFrame
"""
tmp_url = url["fred_econ"]
ua = UserAgent()
ua = UserAgent(verify_ssl=False)
request_header = {"User-Agent": ua.random}
request_params = {
"id": "GDPC1",
@ -58,7 +58,7 @@ def oecd_gdp_monthly(startdate="1947-01-01", enddate="2021-01-01"):
Return: pd.DataFrame
"""
tmp_url = url["fred_econ"]
ua = UserAgent()
ua = UserAgent(verify_ssl=False)
request_header = {"User-Agent": ua.random}
request_params = {
"id": "USALORSGPNOSTSAM",
@ -77,7 +77,7 @@ def payems_monthly(startdate="1939-01-01", enddate="2021-01-01"):
Return: pd.DataFrame
"""
tmp_url = url["fred_econ"]
ua = UserAgent()
ua = UserAgent(verify_ssl=False)
request_header = {"User-Agent": ua.random}
request_params = {
"id": "PAYEMS",
@ -96,7 +96,7 @@ def unrate(startdate="1948-01-01", enddate="2021-01-01"):
Return: pd.DataFrame
"""
tmp_url = url["fred_econ"]
ua = UserAgent()
ua = UserAgent(verify_ssl=False)
request_header = {"User-Agent": ua.random}
request_params = {
"id": "LRUN64TTUSM156S",
@ -107,7 +107,7 @@ def unrate(startdate="1948-01-01", enddate="2021-01-01"):
data_text = r.content
df_monthly = pd.read_csv(io.StringIO(data_text.decode('utf-8')))
df_monthly["DATE"] = pd.to_datetime(df_monthly["DATE"], format="%Y-%m-%d")
ua = UserAgent()
ua = UserAgent(verify_ssl=False)
request_header = {"User-Agent": ua.random}
request_params = {
"id": "LRUN64TTUSQ156S",
@ -118,7 +118,7 @@ def unrate(startdate="1948-01-01", enddate="2021-01-01"):
data_text = r.content
df_quarterly = pd.read_csv(io.StringIO(data_text.decode('utf-8')))
df_quarterly["DATE"] = pd.to_datetime(df_quarterly["DATE"], format="%Y-%m-%d")
ua = UserAgent()
ua = UserAgent(verify_ssl=False)
request_header = {"User-Agent": ua.random}
request_params = {
"id": "LRUN64TTUSA156S",
@ -141,7 +141,7 @@ def erate(startdate="1955-01-01", enddate="2021-01-01"):
Return: pd.DataFrame
"""
tmp_url = url["fred_econ"]
ua = UserAgent()
ua = UserAgent(verify_ssl=False)
request_header = {"User-Agent": ua.random}
request_params = {
"id": "LREM25TTUSM156S",
@ -152,7 +152,7 @@ def erate(startdate="1955-01-01", enddate="2021-01-01"):
data_text = r.content
df_monthly = pd.read_csv(io.StringIO(data_text.decode('utf-8')))
df_monthly["DATE"] = pd.to_datetime(df_monthly["DATE"], format="%Y-%m-%d")
ua = UserAgent()
ua = UserAgent(verify_ssl=False)
request_header = {"User-Agent": ua.random}
request_params = {
"id": "LREM25TTUSQ156S",
@ -163,7 +163,7 @@ def erate(startdate="1955-01-01", enddate="2021-01-01"):
data_text = r.content
df_quarterly = pd.read_csv(io.StringIO(data_text.decode('utf-8')))
df_quarterly["DATE"] = pd.to_datetime(df_quarterly["DATE"], format="%Y-%m-%d")
ua = UserAgent()
ua = UserAgent(verify_ssl=False)
request_header = {"User-Agent": ua.random}
request_params = {
"id": "LREM25TTUSA156S",
@ -185,7 +185,7 @@ def pce_monthly(startdate="1959-01-01", enddate="2021-01-01"):
Return: pd.DataFrame
"""
tmp_url = url["fred_econ"]
ua = UserAgent()
ua = UserAgent(verify_ssl=False)
request_header = {"User-Agent": ua.random}
request_params = {
"id": "PCE",
@ -204,7 +204,7 @@ def cpi(startdate="1960-01-01", enddate="2021-01-01"):
Return: pd.DataFrame
"""
tmp_url = url["fred_econ"]
ua = UserAgent()
ua = UserAgent(verify_ssl=False)
request_header = {"User-Agent": ua.random}
request_params = {
"id": "CPALTT01USM661S",
@ -215,7 +215,7 @@ def cpi(startdate="1960-01-01", enddate="2021-01-01"):
data_text = r.content
df_monthly = pd.read_csv(io.StringIO(data_text.decode('utf-8')))
df_monthly["DATE"] = pd.to_datetime(df_monthly["DATE"], format="%Y-%m-%d")
ua = UserAgent()
ua = UserAgent(verify_ssl=False)
request_header = {"User-Agent": ua.random}
request_params = {
"id": "CPALTT01USQ661S",
@ -226,7 +226,7 @@ def cpi(startdate="1960-01-01", enddate="2021-01-01"):
data_text = r.content
df_quarterly = pd.read_csv(io.StringIO(data_text.decode('utf-8')))
df_quarterly["DATE"] = pd.to_datetime(df_quarterly["DATE"], format="%Y-%m-%d")
ua = UserAgent()
ua = UserAgent(verify_ssl=False)
request_header = {"User-Agent": ua.random}
request_params = {
"id": "CPALTT01USA661S",
@ -249,7 +249,7 @@ def m1(startdate="1960-01-01", enddate="2021-01-01"):
Return: pd.DataFrame
"""
tmp_url = url["fred_econ"]
ua = UserAgent()
ua = UserAgent(verify_ssl=False)
request_header = {"User-Agent": ua.random}
request_params = {
"id": "WM1NS",
@ -260,7 +260,7 @@ def m1(startdate="1960-01-01", enddate="2021-01-01"):
data_text = r.content
df_weekly = pd.read_csv(io.StringIO(data_text.decode('utf-8')))
df_weekly["DATE"] = pd.to_datetime(df_weekly["DATE"], format="%Y-%m-%d")
ua = UserAgent()
ua = UserAgent(verify_ssl=False)
request_header = {"User-Agent": ua.random}
request_params = {
"id": "MANMM101USM657S",
@ -271,7 +271,7 @@ def m1(startdate="1960-01-01", enddate="2021-01-01"):
data_text = r.content
df_monthly = pd.read_csv(io.StringIO(data_text.decode('utf-8')))
df_monthly["DATE"] = pd.to_datetime(df_monthly["DATE"], format="%Y-%m-%d")
ua = UserAgent()
ua = UserAgent(verify_ssl=False)
request_header = {"User-Agent": ua.random}
request_params = {
"id": "MANMM101USQ657S",
@ -282,7 +282,7 @@ def m1(startdate="1960-01-01", enddate="2021-01-01"):
data_text = r.content
df_quarterly = pd.read_csv(io.StringIO(data_text.decode('utf-8')))
df_quarterly["DATE"] = pd.to_datetime(df_quarterly["DATE"], format="%Y-%m-%d")
ua = UserAgent()
ua = UserAgent(verify_ssl=False)
request_header = {"User-Agent": ua.random}
request_params = {
"id": "MANMM101USA657S",
@ -306,7 +306,7 @@ def m2(startdate="1960-01-01", enddate="2021-01-01"):
Return: pd.DataFrame
"""
tmp_url = url["fred_econ"]
ua = UserAgent()
ua = UserAgent(verify_ssl=False)
request_header = {"User-Agent": ua.random}
request_params = {
"id": "WM2NS",
@ -317,7 +317,7 @@ def m2(startdate="1960-01-01", enddate="2021-01-01"):
data_text = r.content
df_weekly = pd.read_csv(io.StringIO(data_text.decode('utf-8')))
df_weekly["DATE"] = pd.to_datetime(df_weekly["DATE"], format="%Y-%m-%d")
ua = UserAgent()
ua = UserAgent(verify_ssl=False)
request_header = {"User-Agent": ua.random}
request_params = {
"id": "M2SL",
@ -338,7 +338,7 @@ def m3(startdate="1960-01-01", enddate="2021-01-01"):
Return: pd.DataFrame
"""
tmp_url = url["fred_econ"]
ua = UserAgent()
ua = UserAgent(verify_ssl=False)
request_header = {"User-Agent": ua.random}
request_params = {
"id": "MABMM301USM657S",
@ -349,7 +349,7 @@ def m3(startdate="1960-01-01", enddate="2021-01-01"):
data_text = r.content
df_monthly = pd.read_csv(io.StringIO(data_text.decode('utf-8')))
df_monthly["DATE"] = pd.to_datetime(df_monthly["DATE"], format="%Y-%m-%d")
ua = UserAgent()
ua = UserAgent(verify_ssl=False)
request_header = {"User-Agent": ua.random}
request_params = {
"id": "MABMM301USQ657S",
@ -360,7 +360,7 @@ def m3(startdate="1960-01-01", enddate="2021-01-01"):
data_text = r.content
df_quarterly = pd.read_csv(io.StringIO(data_text.decode('utf-8')))
df_quarterly["DATE"] = pd.to_datetime(df_quarterly["DATE"], format="%Y-%m-%d")
ua = UserAgent()
ua = UserAgent(verify_ssl=False)
request_header = {"User-Agent": ua.random}
request_params = {
"id": "MABMM301USA657S",
@ -383,7 +383,7 @@ def ltgby(startdate="1955-01-01", enddate="2021-01-01"):
Return: pd.DataFrame
"""
tmp_url = url["fred_econ"]
ua = UserAgent()
ua = UserAgent(verify_ssl=False)
request_header = {"User-Agent": ua.random}
request_params = {
"id": "IRLTLT01USM156N",
@ -394,7 +394,7 @@ def ltgby(startdate="1955-01-01", enddate="2021-01-01"):
data_text = r.content
df_monthly = pd.read_csv(io.StringIO(data_text.decode('utf-8')))
df_monthly["DATE"] = pd.to_datetime(df_monthly["DATE"], format="%Y-%m-%d")
ua = UserAgent()
ua = UserAgent(verify_ssl=False)
request_header = {"User-Agent": ua.random}
request_params = {
"id": "IRLTLT01USQ156N",
@ -405,7 +405,7 @@ def ltgby(startdate="1955-01-01", enddate="2021-01-01"):
data_text = r.content
df_quarterly = pd.read_csv(io.StringIO(data_text.decode('utf-8')))
df_quarterly["DATE"] = pd.to_datetime(df_quarterly["DATE"], format="%Y-%m-%d")
ua = UserAgent()
ua = UserAgent(verify_ssl=False)
request_header = {"User-Agent": ua.random}
request_params = {
"id": "IRLTLT01USA156N",
@ -428,7 +428,7 @@ def gdp_ipd(startdate="1955-01-01", enddate="2021-01-01"):
Return: pd.DataFrame
"""
tmp_url = url["fred_econ"]
ua = UserAgent()
ua = UserAgent(verify_ssl=False)
request_header = {"User-Agent": ua.random}
request_params = {
"id": "USAGDPDEFQISMEI",
@ -439,7 +439,7 @@ def gdp_ipd(startdate="1955-01-01", enddate="2021-01-01"):
data_text = r.content
df_quarterly = pd.read_csv(io.StringIO(data_text.decode('utf-8')))
df_quarterly["DATE"] = pd.to_datetime(df_quarterly["DATE"], format="%Y-%m-%d")
ua = UserAgent()
ua = UserAgent(verify_ssl=False)
request_header = {"User-Agent": ua.random}
request_params = {
"id": "USAGDPDEFAISMEI",
@ -461,7 +461,7 @@ def cci(startdate="1955-01-01", enddate="2021-01-01"):
Return: pd.DataFrame
"""
tmp_url = url["fred_econ"]
ua = UserAgent()
ua = UserAgent(verify_ssl=False)
request_header = {"User-Agent": ua.random}
request_params = {
"id": "CSCICP03USM665S",
@ -481,7 +481,7 @@ def bci(startdate="1955-01-01", enddate="2021-01-01"):
Return: pd.DataFrame
"""
tmp_url = url["fred_econ"]
ua = UserAgent()
ua = UserAgent(verify_ssl=False)
request_header = {"User-Agent": ua.random}
request_params = {
"id": "BSCICP03USM665S",
@ -502,7 +502,7 @@ def ibr_3(startdate="1965-01-01", enddate="2021-01-01"):
Description: Percent, Not Seasonally Adjusted, Monthly and Quarterly
"""
tmp_url = url["fred_econ"]
ua = UserAgent()
ua = UserAgent(verify_ssl=False)
request_header = {"User-Agent": ua.random}
request_params = {
"id": "IR3TIB01USM156N",
@ -513,7 +513,7 @@ def ibr_3(startdate="1965-01-01", enddate="2021-01-01"):
data_text = r.content
df_monthly = pd.read_csv(io.StringIO(data_text.decode('utf-8')))
df_monthly["DATE"] = pd.to_datetime(df_monthly["DATE"], format="%Y-%m-%d")
ua = UserAgent()
ua = UserAgent(verify_ssl=False)
request_header = {"User-Agent": ua.random}
request_params = {
"id": "IR3TIB01USQ156N",
@ -533,7 +533,7 @@ def gfcf_3(startdate="1965-01-01", enddate="2021-01-01"):
Description: United States Dollars,Not Seasonally Adjusted, Quarterly and Annually
"""
tmp_url = url["fred_econ"]
ua = UserAgent()
ua = UserAgent(verify_ssl=False)
request_header = {"User-Agent": ua.random}
request_params = {
"id": "USAGFCFQDSMEI",
@ -544,7 +544,7 @@ def gfcf_3(startdate="1965-01-01", enddate="2021-01-01"):
data_text = r.content
df_quarterly = pd.read_csv(io.StringIO(data_text.decode('utf-8')))
df_quarterly["DATE"] = pd.to_datetime(df_quarterly["DATE"], format="%Y-%m-%d")
ua = UserAgent()
ua = UserAgent(verify_ssl=False)
request_header = {"User-Agent": ua.random}
request_params = {
"id": "USAGFCFADSMEI",
@ -566,7 +566,7 @@ def pfce(startdate="1955-01-01", enddate="2021-01-01"):
Return: pd.DataFrame
"""
tmp_url = url["fred_econ"]
ua = UserAgent()
ua = UserAgent(verify_ssl=False)
request_header = {"User-Agent": ua.random}
request_params = {
"id": "USAPFCEQDSMEI",
@ -577,7 +577,7 @@ def pfce(startdate="1955-01-01", enddate="2021-01-01"):
data_text = r.content
df_quarterly = pd.read_csv(io.StringIO(data_text.decode('utf-8')))
df_quarterly["DATE"] = pd.to_datetime(df_quarterly["DATE"], format="%Y-%m-%d")
ua = UserAgent()
ua = UserAgent(verify_ssl=False)
request_header = {"User-Agent": ua.random}
request_params = {
"id": "USAPFCEADSMEI",
@ -598,7 +598,7 @@ def tlp(startdate="1955-01-01", enddate="2021-01-01"):
Return: pd.DataFrame
"""
tmp_url = url["fred_econ"]
ua = UserAgent()
ua = UserAgent(verify_ssl=False)
request_header = {"User-Agent": ua.random}
request_params = {
"id": "ULQELP01USQ657S",
@ -609,7 +609,7 @@ def tlp(startdate="1955-01-01", enddate="2021-01-01"):
data_text = r.content
df_quarterly = pd.read_csv(io.StringIO(data_text.decode('utf-8')))
df_quarterly["DATE"] = pd.to_datetime(df_quarterly["DATE"], format="%Y-%m-%d")
ua = UserAgent()
ua = UserAgent(verify_ssl=False)
request_header = {"User-Agent": ua.random}
request_params = {
"id": "ULQELP01USQ659S",
@ -630,7 +630,7 @@ def rt(startdate="1955-01-01", enddate="2021-01-01"):
Return: pd.DataFrame
"""
tmp_url = url["fred_econ"]
ua = UserAgent()
ua = UserAgent(verify_ssl=False)
request_header = {"User-Agent": ua.random}
request_params = {
"id": "USASARTMISMEI",
@ -662,7 +662,7 @@ def bir(startdate="2003-01-01", enddate="2021-01-01"):
Return: pd.DataFrame
"""
tmp_url = url["fred_econ"]
ua = UserAgent()
ua = UserAgent(verify_ssl=False)
request_header = {"User-Agent": ua.random}
request_params = {
"id": "T5YIE",
@ -691,7 +691,7 @@ def adsbci():
"""
An index designed to track real business conditions at high observation frequency
"""
ua = UserAgent()
ua = UserAgent(verify_ssl=False)
request_header = {"User-Agent": ua.random}
tmp_url = url["philfed"] + "ads"
r = requests.get(tmp_url, headers = request_header)
@ -719,7 +719,7 @@ def inflation_noewcasting():
"""
"""
ua = UserAgent()
ua = UserAgent(verify_ssl=False)
request_header = {"User-Agent": ua.random}
tmp_url = "https://www.clevelandfed.org/~/media/files/charting/%20nowcast_quarter.json"

View File

@ -1,4 +1,4 @@
# -*- coding: utf-8 -*-
# time: 05/29/2021 UTC+8
# author: terencelau
# email: t_lau@uicstat.com
# email: t_lau@uicstat.com

View File

@ -13,7 +13,6 @@ url = {
"moneywatch": "https://www.marketwatch.com/investing/"
}
def dukascopy(
instrument: str,
startdate: str,
@ -23,7 +22,7 @@ def dukascopy(
volume: bool,
flat: bool):
tmp_url = url["dukascopy"]
ua = UserAgent()
ua = UserAgent(verify_ssl=False)
request_header = {"User-Agent": ua.random}
request_params = {
"instrument": "{}".format(instrument),
@ -50,30 +49,66 @@ def dukascopy(
]
return df
def currency_list(instrument = "eurusd", startdate="01/01/2020", enddate = "01/01/2021"):
def FX(instrument = "eurusd", startdate="01/01/2020", enddate = "01/01/2021", long=False):
"""
https://www.marketwatch.com/investing/
"""
tmp_url = url["moneywatch"] + "currency/{}/downloaddatapartial".format(instrument)
ua = UserAgent()
request_header = {"User-Agent": ua.random}
request_params = urlencode({
"startdate": r"{}".format(startdate),
"enddate": r"{}".format(enddate),
"daterange": "d30",
"frequency": "p1d",
"csvdownload": "true",
"downloadpartial": "false",
"newdates": "false"}, quote_via= quote)
r = requests.get(tmp_url, params=request_params, headers=request_header)
data_text = r.content
df = pd.read_csv(io.StringIO(data_text.decode('utf-8')))
Date = []
for i in range(0, len(df)):
Date.append(datetime.strptime(df["Date"][i], "%m/%d/%Y"))
df["Date"] = Date
return df
if long == False:
tmp_url = url["moneywatch"] + "currency/{}/downloaddatapartial".format(instrument)
ua = UserAgent(verify_ssl=False)
request_header = {"User-Agent": ua.random}
request_params = urlencode({
"startdate": r"{}".format(startdate),
"enddate": r"{}".format(enddate),
"daterange": "d30",
"frequency": "p1d",
"csvdownload": "true",
"downloadpartial": "false",
"newdates": "false"}, quote_via= quote)
r = requests.get(tmp_url, params=request_params.replace("%2F", "/").replace("%20", " ").replace("%3A", ":"), headers=request_header)
data_text = r.content
df = pd.read_csv(io.StringIO(data_text.decode('utf-8')))
Date = []
for i in range(0, len(df)):
Date.append(datetime.strptime(df["Date"][i], "%m/%d/%Y"))
df["Date"] = Date
return df
else:
tmp_url = url["moneywatch"] + "currency/{}/downloaddatapartial".format(instrument)
ua = UserAgent(verify_ssl=False)
request_header = {"User-Agent": ua.random}
df = pd.DataFrame()
for i in range(int(startdate[6:10]), int(enddate[6:10])):
tmp_startdate = "01/01/" + str(i) + " 00:00:00"
if i+1 == int(enddate[6:10]):
tmp_enddate = enddate[0:6] + str((i+1)) + " 00:00:00"
else:
tmp_enddate = "01/01" + str((i+1)) + " 00:00:00"
request_params = urlencode({
"startdate": r"{}".format(tmp_startdate),
"enddate": r"{}".format(tmp_enddate),
"daterange": "d30",
"frequency": "p1d",
"csvdownload": "true",
"downloadpartial": "false",
"newdates": "false"}, quote_via= quote)
r = requests.get(tmp_url, params=request_params.replace("%2F", "/").replace("%20", " ").replace("%3A", ":"), headers=request_header)
data_text = r.content
tmp_df = pd.read_csv(io.StringIO(data_text.decode('utf-8')))
Date = []
for i in range(0, len(tmp_df)):
Date.append(datetime.strptime(tmp_df["Date"][i], "%m/%d/%Y"))
tmp_df["Date"] = Date
if i == int(startdate[6:10]):
df = tmp_df
else:
df = pd.concat([tmp_df, df], axis=0)
df.reset_index(drop=True, inplace = True)
return df
if __name__ == "__main__":
@ -84,3 +119,6 @@ if __name__ == "__main__":
pricetype="bid",
volume=True,
flat=True)
#https://www.marketwatch.com/investing/currency/eurusd/downloaddatapartial?startdate=01/04/1971 00:00:00&enddate=06/04/2021 00:00:00&daterange=d30&frequency=p1d&csvdownload=true&downloadpartial=false&newdates=false

View File

@ -1 +1 @@
from CEDA import *
from CEDA import *

View File

@ -2,7 +2,7 @@ from setuptools import setup, find_packages
import os
setup(
name = "CEDApy",
version = "1.0.6",
version = "1.0.8",
keywords = "quantitative economic data",
long_description = open(
os.path.join(
@ -23,7 +23,8 @@ setup(
"html5lib>=1.0.1",
"xlrd==1.2.0",
"bs4",
"urllib3>=1.26.5"
"urllib3>=1.26.5",
"fake-useragent"
],
license = "MIT",
classifiers=[