update moneywatch

This commit is contained in:
TerenceLiu98 2021-06-03 22:09:23 +08:00
parent 3867bf4045
commit a319c33b4d
2 changed files with 35 additions and 3 deletions

View File

@ -1,14 +1,20 @@
import re
import io
import requests import requests
import demjson import demjson
import pandas as pd import pandas as pd
from bs4 import BeautifulSoup
from datetime import datetime
from urllib.parse import quote, urlencode
from fake_useragent import UserAgent from fake_useragent import UserAgent
url = { url = {
"dukascopy": "http://data.uicstat.com/api_1.0" "dukascopy": "http://data.uicstat.com/api_1.0",
"moneywatch": "https://www.marketwatch.com/investing/"
} }
def market_data( def dukascopy(
instrument: str, instrument: str,
startdate: str, startdate: str,
enddate: str, enddate: str,
@ -44,9 +50,34 @@ def market_data(
] ]
return df return df
def currency_list(instrument = "eurusd", startdate="01/01/2020", enddate = "01/01/2021"):
"""
https://www.marketwatch.com/investing/
"""
tmp_url = url["moneywatch"] + "currency/{}/downloaddatapartial".format(instrument)
ua = UserAgent()
request_header = {"User-Agent": ua.random}
request_params = urlencode({
"startdate": r"{}".format(startdate),
"enddate": r"{}".format(enddate),
"daterange": "d30",
"frequency": "p1d",
"csvdownload": "true",
"downloadpartial": "false",
"newdates": "false"}, quote_via= quote)
r = requests.get(tmp_url, params=request_params, headers=request_header)
data_text = r.content
df = pd.read_csv(io.StringIO(data_text.decode('utf-8')))
Date = []
for i in range(0, len(df)):
Date.append(datetime.strptime(df["Date"][i], "%m/%d/%Y"))
df["Date"] = Date
return df
if __name__ == "__main__": if __name__ == "__main__":
data = market_data(instrument="eurusd", data = dukascopy(instrument="eurusd",
startdate="2020-01-01", startdate="2020-01-01",
enddate="2021-01-01", enddate="2021-01-01",
timeframe="d1", timeframe="d1",

View File

@ -22,6 +22,7 @@ setup(
"demjson>=2.2.4", "demjson>=2.2.4",
"html5lib>=1.0.1", "html5lib>=1.0.1",
"xlrd==1.2.0", "xlrd==1.2.0",
"bs4"
], ],
license = "MIT", license = "MIT",
classifiers=[ classifiers=[