I made my own dataset using NumPy
in the prepare_data()
methods using the DataModules
method of PyTorch Lightning. Now, I want to pass the data into the setup()
method to split into training and validation.
import numpy as np
import pytorch_lightning as pl
from torch.utils.data import random_split, DataLoader, TensorDataset
import torch
from torch.autograd import Variable
from torchvision import transforms
np.random.seed(42)
device = 'cuda' if torch.cuda.is_available() else 'cpu'
class DataModuleClass(pl.LightningDataModule):
def __init__(self):
super().__init__()
self.constant = 2
self.batch_size = 10
def prepare_data(self):
a = np.random.uniform(0, 500, 500)
b = np.random.normal(0, self.constant, len(a))
c = a + b
X = np.transpose(np.array([a, b]))
# Converting numpy array to Tensor
self.x_train_tensor = torch.from_numpy(X).float().to(device)
self.y_train_tensor = torch.from_numpy(c).float().to(device)
training_dataset = TensorDataset(self.x_train_tensor, self.y_train_tensor)
return training_dataset
def setup(self):
data = # What I have to write to get the data from prepare_data()
self.train_data, self.val_data = random_split(data, [400, 100])
def train_dataloader(self):
training_dataloader = setup() # Need to get the training data
return DataLoader(self.training_dataloader)
def val_dataloader(self):
validation_dataloader = prepare_data() # Need to get the validation data
return DataLoader(self.validation_dataloader)
obj = DataModuleClass()
print(obj.prepare_data())
The same answer as your previous question...
def prepare_data(self):
a = np.random.uniform(0, 500, 500)
b = np.random.normal(0, self.constant, len(a))
c = a + b
X = np.transpose(np.array([a, b]))
# Converting numpy array to Tensor
self.x_train_tensor = torch.from_numpy(X).float().to(device)
self.y_train_tensor = torch.from_numpy(c).float().to(device)
training_dataset = TensorDataset(self.x_train_tensor, self.y_train_tensor)
self.training_dataset = training_dataset
def setup(self):
data = self.training_dataset
self.train_data, self.val_data = random_split(data, [400, 100])
def train_dataloader(self):
return DataLoader(self.train_data)
def val_dataloader(self):
return DataLoader(self.val_data)