原版代码:
torch_dataset = Data.TensorDataset(data_tensor=x, target_tensor=y)
loader = Data.DataLoader(dataset=torch_dataset, # torch TensorDataset formatbatch_size=BATCH_SIZE, # mini batch sizeshuffle=True, # random shuffle for trainingnum_workers=2, # subprocesses for loading data
)
运行报错:
TypeError Traceback (most recent call last)
<ipython-input-19-5bb67537d9eb> in <module>
----> 1 torch_dataset = Data.TensorDataset(data_tensor=x, target_tensor=y)2 # 代码修改为:参考https://blog.csdn.net/idwtwt/article/details/867676343 # torch_dataset = Data.TensorDataset(x, y)4 loader = Data.DataLoader(5 dataset=torch_dataset, # torch TensorDataset formatTypeError: __init__() got an unexpected keyword argument 'data_tensor'
原因是新版把之前的data_tensor 和target_tensor去掉了,输入变成了可变参数,也就是我们平常使用*args
class TensorDataset(Dataset):"""Dataset wrapping tensors.Each sample will be retrieved by indexing tensors along the first dimension.Arguments:*tensors (Tensor): tensors that have the same size of the first dimension."""def __init__(self, *tensors):assert all(tensors[0].size(0) == tensor.size(0) for tensor in tensors)self.tensors = tensorsdef __getitem__(self, index):return tuple(tensor[index] for tensor in self.tensors)def __len__(self):return self.tensors[0].size(0)
所以新版的使用方法是直接传入参数
# 原版使用方法
torch_dataset = Data.TensorDataset(data_tensor=x, target_tensor=y)# 新版使用方法
torch_dataset = Data.TensorDataset(x, y)