Commit 2d957a89 authored by szymon sadkowski's avatar szymon sadkowski

removed private file

parent 2c213d57
from FCRdataLoader.fcrdataloader.dataset import FCRdatasetFactory
from torch.utils.data import DataLoader
from torch.optim.lr_scheduler import ReduceLROnPlateau
from sklearn.preprocessing import MinMaxScaler, StandardScaler
from .LitFCRtestBase import BaseTestEncoder
import torch.nn.functional as F
import torch.nn as nn
import torch
'''
Dont touch great performance
'''
HIDDEN_SIZE = 30
BATCH_SIZE = 128
SEQ_LEN = 20
HORIZON = 5
LSTM_LAYERS = 3
LR = 0.1
FEATURES = 3
OUTPUT = 6
class Encoder(BaseTestEncoder):
def __init__(
self,
features=FEATURES,
output=OUTPUT,
lr=LR,
batch_size=BATCH_SIZE,
seq_len=SEQ_LEN,
horizon=HORIZON,
hidden_size=HIDDEN_SIZE,
lstm_layers=LSTM_LAYERS
):
super(Encoder, self).__init__()
self.seq_len = seq_len
self.horizon = horizon
self.batch_size = batch_size
self.lstm_layers = LSTM_LAYERS
self.criterion = nn.MSELoss()
self.lr = lr
self.lstm = nn.LSTM(features, hidden_size, num_layers=self.lstm_layers,
bidirectional=True, batch_first=True)
self.fc1 = nn.Linear(hidden_size * 2, output)
# data transformation
self.data_set_factory = FCRdatasetFactory(SEQ_LEN, HORIZON)
def forward(self, x):
out, _ = self.lstm(x)
# out: (batch, features, hidden_size * directions)
out = out[:, -1, :]
# out: (batch, hidden_size * directions)
out = self.fc1(out)
return out
def training_step(self, batch, batch_idx):
x, y = batch
prediction = self(x)
#print(f"x = {x[0]}")
#print(f"pred = {torch.round(prediction[0])}")
#print(f"y = {y[0]}")
loss = self.criterion(prediction, y)
self.log('train_loss', loss, on_step=False, on_epoch=True)
return loss
def val_dataloader(self):
return self.test_dataloader()
def train_dataloader(self):
return DataLoader(
self.data_set_factory.get_train_dataset(),
batch_size=self.batch_size,
num_workers=4,
sampler=self.data_set_factory.get_uniform_dist_y_sampler()
)
def test_dataloader(self):
loader = DataLoader(
self.data_set_factory.get_test_dataset(),
batch_size=self.batch_size,
num_workers=4
)
return loader
def configure_optimizers(self):
optimizer = torch.optim.Adam(self.parameters(), lr=self.lr)
scheduler = ReduceLROnPlateau(
optimizer, 'min', patience=20, verbose=True)
return {
'optimizer': optimizer,
'lr_scheduler': scheduler,
'monitor': 'train_loss'
}
def __get_scaler(self, train_dataset):
scaler_loader = DataLoader(
train_dataset,
batch_size=len(train_dataset)
)
scaler = MinMaxScaler(feature_range=(-10, 10))
batch = next(iter(scaler_loader))[0].reshape(-1, 3) # fixed for fcr data
scaler.fit(batch)
return scaler
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment