-
Notifications
You must be signed in to change notification settings - Fork 0
/
RNN for predicting Renewable dta for 2022
93 lines (77 loc) · 2.73 KB
/
RNN for predicting Renewable dta for 2022
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Fri Aug 19 09:42:00 2022
@author: sunny
"""
#Libraries
import numpy as np
import matplotlib.pyplot as plt
import pandas as pd
#Training Set
dataset_train = pd.read_csv('/Users/sunny/Desktop/Gen_Data_Train.csv')
training_set = dataset_train.iloc[:, 6:7].values
#Feature Scaling
from sklearn.preprocessing import MinMaxScaler
sc = MinMaxScaler(feature_range= (0, 1))
training_set_scaled = sc.fit_transform(training_set)
#Creating a data structure with severel timesteps and 1 output
from numpy.ma.core import append
x_train = []
y_train = []
for i in range(2, 84):
x_train.append(training_set_scaled[i-2:i, 0])
y_train.append(training_set_scaled[i, 0])
x_train, y_train = np.array(x_train), np.array(y_train)
#Reshaping
x_train = np.reshape(x_train,(x_train.shape[0], x_train.shape[1], 1))
#Keras and Packages
from keras.models import Sequential
from keras.layers import Dense
from keras.layers import LSTM
from keras.layers import Dropout
#RNN
regressor = Sequential()
#LSTM Layer and dropout regulations
#1st Layer of LSTM
regressor.add(LSTM(units = 40, return_sequences = True, input_shape = (x_train.shape[1], 1)))
regressor.add(Dropout(0.01))
#2nd Layer of LSTM
regressor.add(LSTM(units = 40, return_sequences = True))
regressor.add(Dropout(0.01))
#3rd Layer of LSTM
regressor.add(LSTM(units = 40, return_sequences = True))
regressor.add(Dropout(0.01))
#4th Layer of LSTM
regressor.add(LSTM(units = 40))
regressor.add(Dropout(0.01))
#Output Layer
regressor.add(Dense(units = 1))
#Compiler
regressor.compile(optimizer = 'adam', loss = 'mean_squared_error')
#Fitting the RNN to the Training Set
regressor.fit(x_train, y_train, epochs = 100, batch_size = 12)
#Making the predictions and visualization
#Real Gen Data
dataset_test = pd.read_csv('Gen_Data_Train.csv')
real_energy = dataset_test.iloc[:, 6:7].values
#Predict the Gen Data
dataset_total= pd.concat((dataset_train['all solar thousand megawatthours'], dataset_test['all solar thousand megawatthours']), axis = 0)
inputs = dataset_total[len(dataset_total) - len(dataset_test) - 2:].values
inputs = inputs.reshape(-1, 1)
inputs = sc.transform(inputs)
x_test = []
for i in range(2, 86):
x_test.append(inputs[i-2:i, 0])
x_test = np.array(x_test)
x_test = np.reshape(x_test,(x_test.shape[0], x_test.shape[1], 1))
predicted_energy = regressor.predict(x_test)
predicted_energy = sc.inverse_transform(predicted_energy)
#Visualising the results
plt.plot(real_energy, color = 'red', label = 'Real all solar thousand megawatthours')
plt.plot(predicted_energy, color = 'blue', label = 'Predicted all solar thousand megawatthours')
plt.title('all solar thousand megawatthours')
plt.xlabel('Time')
plt.ylabel('Solar 1000 Megawatthours')
plt.legend()
plt.show()