-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathfetchFlashHistory.py
66 lines (53 loc) · 2.29 KB
/
fetchFlashHistory.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
import argparse
import requests
import datetime
import pandas as pd
from dateutil.relativedelta import relativedelta
import os
from dotenv import load_dotenv
def getFlashHistory():
# Load the API key, ticker, and other settings from the .env file
load_dotenv()
api_key = os.getenv('API_KEY')
ticker = os.getenv('TICKER')
timeunit = os.getenv('TIMEUNIT')
timespan = os.getenv('TIMESPAN')
timeinterval = int(os.getenv('TIMEINTERVAL'))
intervaltype = os.getenv('INTERVALTYPE')
timeamount = int(os.getenv('TIMEAMOUNT'))
today = datetime.date.today()
# Calculate the start date based on the time unit and time amount
window = today - relativedelta(days=5)
# Define the API endpoint URL template
url_template = "https://api.polygon.io/v2/aggs/ticker/{ticker}/range/{multiplier}/{timespan}/{from_date}/{to_date}?apiKey={api_key}"
# Set the interval based on the time interval and interval type
multiplier = timeinterval
# Update the timespan value based on the interval type
timespan = intervaltype
# Function to fetch data for a specific date range
def fetch_data(from_date, to_date):
#print(timespan, multiplier, from_date, to_date)
url = url_template.format(ticker=ticker, multiplier=multiplier, timespan=timespan,
from_date=from_date, to_date=to_date, api_key=api_key)
response = requests.get(url)
data = response.json()
return data.get('results', [])
# Fetch data in smaller chunks (e.g., one month at a time)
all_data = []
start_date = window
total_chunks = (today - window).days // 30
current_chunk = 1
while start_date < today:
end_date = start_date + relativedelta(days=30)
if end_date > today:
end_date = today
#print(f"Fetching data for {ticker} from {start_date} to {end_date} ({current_chunk}/{total_chunks})")
chunk_data = fetch_data(start_date, end_date)
all_data.extend(chunk_data)
start_date = end_date
current_chunk += 1
# Convert the data to a pandas DataFrame and save it to a CSV file
df = pd.DataFrame(all_data)
output_file = f"data/flash_historical_data.csv"
df.to_csv(output_file, index=False)
#print(f"Data for {ticker} saved to {output_file}")