-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathget_data_daily.py
65 lines (46 loc) · 1.75 KB
/
get_data_daily.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
from getdata import YahooFinance
from getdata import SQLrepo
import sqlite3
import multiprocessing
import time
db_name = "stocks.sqlite"
connection = sqlite3.connect(database=db_name, check_same_thread=False)
repo = SQLrepo(connection=connection)
repo.delete_all_tables()
print("All tables deleted from the database")
#Important to note that the table name should be the same as the ticker name and the values used for the API requests,
#Otherwise the code will not work, and table name is the company global name to make naming easy in the front end .
ls_tick = {
"Apple": "AAPL",
"IBM": "IBM",
"Microsoft": "MSFT",
"Google": "GOOG",
"Tesla": "TSLA",
"Amazon": "AMZN",
"META": "META",
}
#using multiprocessing to download data for all stocks at the same time for faster execution
def process_stock(ticker, table_name):
# Add Exception handling -- To be added later
data = YahooFinance(ticker)
print(f"Trying Downloading data for {table_name} from Yahoo Finance API")
data.get_data()
if len(data.data) < 10:
print(f"Data for {table_name} not available, try again")
else:
print(f"Data for {table_name} downloaded !!!!")
repo.insert_table(table_name=table_name, records=data.data)
if __name__ == '__main__':
start_time = time.time()
processes = []
for table_name, ticker in ls_tick.items():
p = multiprocessing.Process(
target=process_stock, args=(ticker, table_name))
processes.append(p)
p.start()
for p in processes:
p.join()
end_time = time.time() # Record the end time
# Calculate the execution time in seconds
execution_time = end_time - start_time
print(f"Total execution time: {execution_time} seconds")