diff --git a/CEDA/Market/market.py b/CEDA/Market/market.py index 9370def..ae146bb 100644 --- a/CEDA/Market/market.py +++ b/CEDA/Market/market.py @@ -1,14 +1,20 @@ +import re +import io import requests import demjson import pandas as pd +from bs4 import BeautifulSoup +from datetime import datetime +from urllib.parse import quote, urlencode from fake_useragent import UserAgent url = { - "dukascopy": "http://data.uicstat.com/api_1.0" + "dukascopy": "http://data.uicstat.com/api_1.0", + "moneywatch": "https://www.marketwatch.com/investing/" } -def market_data( +def dukascopy( instrument: str, startdate: str, enddate: str, @@ -44,9 +50,34 @@ def market_data( ] return df +def currency_list(instrument = "eurusd", startdate="01/01/2020", enddate = "01/01/2021"): + """ + https://www.marketwatch.com/investing/ + """ + tmp_url = url["moneywatch"] + "currency/{}/downloaddatapartial".format(instrument) + ua = UserAgent() + request_header = {"User-Agent": ua.random} + request_params = urlencode({ + "startdate": r"{}".format(startdate), + "enddate": r"{}".format(enddate), + "daterange": "d30", + "frequency": "p1d", + "csvdownload": "true", + "downloadpartial": "false", + "newdates": "false"}, quote_via= quote) + r = requests.get(tmp_url, params=request_params, headers=request_header) + data_text = r.content + df = pd.read_csv(io.StringIO(data_text.decode('utf-8'))) + Date = [] + for i in range(0, len(df)): + Date.append(datetime.strptime(df["Date"][i], "%m/%d/%Y")) + + df["Date"] = Date + return df + if __name__ == "__main__": - data = market_data(instrument="eurusd", + data = dukascopy(instrument="eurusd", startdate="2020-01-01", enddate="2021-01-01", timeframe="d1", diff --git a/setup.py b/setup.py index 66d2994..3b3437a 100644 --- a/setup.py +++ b/setup.py @@ -22,6 +22,7 @@ setup( "demjson>=2.2.4", "html5lib>=1.0.1", "xlrd==1.2.0", + "bs4" ], license = "MIT", classifiers=[