-
Notifications
You must be signed in to change notification settings - Fork 4
/
runNas100.py
178 lines (153 loc) · 5.59 KB
/
runNas100.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
from src.mvc.controllers import (
GetIchimokuCloudDataNAS100,
GetIchimokuCloudDataNAS100Aggregator,
GetIchimokuCloudDataNAS100MultiTFMerger,
GetIchimokuTKxDataNas100,
GetIchimokuTKxDataNas100Aggregator,
GetSymbolNAS100,
GetDataNas100,
GetIchimokuKijunDataNas100,
GetIchimokuKijunDataNas100Aggregator,
GetKickerDataNas100,
GetKickerDataNas100Aggregator,
GetIchimokuTKxDataNas100MultiTFMerger,
GetIchimokuSumCloudTKxDataNas100MultiTFMerger,
)
from datetime import datetime
from pytz import timezone
fetch_symbols_latest_Nas100 = True
fetch_Nas100_1H = True
fetch_Nas100_D = True
fetch_Nas100_W = True
fetch_Nas100_M = True
run_Multi_TimeFrame_Merger_Nas100 = True
fetch_kijun_analysis = False
fetch_kicker = False
# Use "Datetime" for Yahoo intraday data,
# "Date" for D, W, M data.
# Use "Datetime" for all Oanda data.
fetch_Kicker_use_datetime_format = False
def main(
fetch_symbols_latest_Nas100=fetch_symbols_latest_Nas100,
fetch_Nas100_1H=fetch_Nas100_1H,
fetch_Nas100_D=fetch_Nas100_D,
fetch_Nas100_W=fetch_Nas100_W,
fetch_Nas100_M=fetch_Nas100_M,
fetch_kijun_analysis=fetch_kijun_analysis,
fetch_Kicker_use_datetime_format=fetch_Kicker_use_datetime_format,
run_Multi_TimeFrame_Merger_Nas100=run_Multi_TimeFrame_Merger_Nas100,
fetch_kicker=fetch_kicker,
):
# Stop script being auto-run by Replit or Gitpod
# return
london_tz_start = timezone("Europe/London")
time_start = datetime.now(london_tz_start)
time_start_formatted = time_start.strftime("%Y-%m-%d %H:%M:%S")
print(f"Nasdaq 100 task begins at: {time_start_formatted} [UK]")
# ---------------- Nasdaq 100 ----------------
# 1. Grab latest symbols
_getSymbolNAS100 = GetSymbolNAS100
_getSymbolNAS100.main(fetch_symbols_latest_Nas100)
# 2. Download latest OHLC data for each symbol
_getDataNas100 = GetDataNas100
_getDataNas100.main(
fetch_Nas100_1H, fetch_Nas100_D, fetch_Nas100_W, fetch_Nas100_M
)
# 3. Produce Ichimoku Cloud data for each symbol
_getIchimokuCloudDataNas100 = GetIchimokuCloudDataNAS100
_getIchimokuCloudDataNas100.main(
fetch_Nas100_1H, fetch_Nas100_D, fetch_Nas100_W, fetch_Nas100_M
)
# 3.1 Combine latest cloud signals of all symbols into one spreadsheet
_getIchimokuCloudDataNas100Aggregator = (
GetIchimokuCloudDataNAS100Aggregator
)
_getIchimokuCloudDataNas100Aggregator.main(
fetch_Nas100_1H, fetch_Nas100_D, fetch_Nas100_W, fetch_Nas100_M
)
# 3.2 Merge Multi Time Frame Cloud signals
_getIchimokuCloudDataNAS100MultiTFMerger = (
GetIchimokuCloudDataNAS100MultiTFMerger
)
_getIchimokuCloudDataNAS100MultiTFMerger.main(
run_Multi_TimeFrame_Merger_Nas100
)
# 3.3 Produce Ichimoku TK Cross data
_getIchimokuTKxDataNas100 = GetIchimokuTKxDataNas100
_getIchimokuTKxDataNas100.main(
fetch_Nas100_1H, fetch_Nas100_D, fetch_Nas100_W, fetch_Nas100_M
)
# 3.4 Combine latest TK Cross signals from all symbols into one spreadsheet
_getIchimokuTKxDataNas100Aggregator = GetIchimokuTKxDataNas100Aggregator
_getIchimokuTKxDataNas100Aggregator.main(
fetch_Nas100_1H, fetch_Nas100_D, fetch_Nas100_W, fetch_Nas100_M
)
# 3.5 Merge Multi Time Frame TKx signals
_getIchimokuTKxDataNas100MultiTFMerger = (
GetIchimokuTKxDataNas100MultiTFMerger
)
_getIchimokuTKxDataNas100MultiTFMerger.main(
run_Multi_TimeFrame_Merger_Nas100
)
# 3.6 Merge Multi Time Frame Cloud and TKx Sum signals
_getIchimokuSumCloudTKxDataNas100MultiTFMerger = (
GetIchimokuSumCloudTKxDataNas100MultiTFMerger
)
_getIchimokuSumCloudTKxDataNas100MultiTFMerger.main(
run_Multi_TimeFrame_Merger_Nas100
)
# 4. Produce Kijun data
_getIchimokuKijunDataNas100 = GetIchimokuKijunDataNas100
_getIchimokuKijunDataNas100.main(
fetch_kijun_analysis,
fetch_kijun_analysis,
fetch_kijun_analysis,
fetch_kijun_analysis,
)
# 4.1 Combine latest Kijun signals from all symbols into one spreadsheet
_getIchimokuKijunDataNas100Aggregator = (
GetIchimokuKijunDataNas100Aggregator
)
_getIchimokuKijunDataNas100Aggregator.main(
fetch_kijun_analysis,
fetch_kijun_analysis,
fetch_kijun_analysis,
fetch_kijun_analysis,
)
if fetch_kicker:
# 5. Produce Kicker data
_getKickerDataNas100 = GetKickerDataNas100
_getKickerDataNas100.main(
fetch_Kicker_use_datetime_format,
fetch_Nas100_D,
fetch_Nas100_W,
fetch_Nas100_M,
)
# 5.1 Combine latest Kicker signals from all symbols into one spreadsheet
_getKickerDataNas100Aggregator = GetKickerDataNas100Aggregator
_getKickerDataNas100Aggregator.main(
fetch_Kicker_use_datetime_format,
fetch_Nas100_D,
fetch_Nas100_W,
fetch_Nas100_M,
)
# calculate time elapsed
london_tz_finish = timezone("Europe/London")
time_finish = datetime.now(london_tz_finish)
time_elapsed = time_finish - time_start
time_finish_formatted = time_finish.strftime("%Y-%m-%d %H:%M:%S")
print(
f"\nNasdaq 100 tasks completed at {time_finish_formatted} [UK] (Time elapsed: {time_elapsed})",
)
if __name__ == "__main__":
main(
fetch_symbols_latest_Nas100,
fetch_Nas100_1H,
fetch_Nas100_D,
fetch_Nas100_W,
fetch_Nas100_M,
fetch_kijun_analysis,
fetch_Kicker_use_datetime_format,
run_Multi_TimeFrame_Merger_Nas100,
fetch_kicker,
)