I am trying to run a code for medical image registration, but I am facing this problem
"AttributeError: 'int' object has no attribute 'dim'". The part of the code where this error is occurring is given below.
'''''
def forward_part1(self, x, xa, u):
B, D, H, W, C = x.shape
window_size = get_window_size((D, H, W), self.window_size)
x = self.norm1(x)
# Pad feature maps to multiples of window size
pad_l = pad_t = pad_d0 = 0
pad_d1 = (window_size[0] - D % window_size[0]) % window_size[0]
pad_b = (window_size[1] - H % window_size[1]) % window_size[1]
pad_r = (window_size[2] - W % window_size[2]) % window_size[2]
# Pad the input tensors
# Pad the input tensors
x = F.pad(x, (0, 0, pad_l, pad_r, pad_t, pad_b, pad_d0, pad_d1))
pad_value = 0
pad_tuple = tuple(map(int, (pad_l, pad_r, pad_t, pad_b, pad_d0, pad_d1, 0, 0)))
xa = F.pad(xa, pad=pad_tuple, mode='constant', value=pad_value)
_, Dp, Hp, Wp, _ = x.shape
# Partition windows
x_windows = window_partition(x, window_size, u) # B*nW, Wd*Wh*Ww, C
x_area_windows = window_area_partition(xa, window_size)
# W-MSA/SW-MSA
attn_windows = self.cross_attn(x_windows, x_area_windows, u) # B*nW, Wd*Wh*Ww, C
# Merge windows
attn_windows = attn_windows.view(-1, *(window_size + (C,)))
x = window_reverse(attn_windows, window_size, B, Dp, Hp, Wp) # B D' H' W' C
if pad_d1 > 0 or pad_r > 0 or pad_b > 0:
x = x[:, :D, :H, :W, :].contiguous()
return x
''''' The detail of the error that facing has given below, so how can I solve this error. '''''
Traceback (most recent call last):
File "/home/ubuntu/Anwar/halfimagesize/changing
crossattention,XMorpher/XMorpher/Unsup_train.py", line 149, in <module>
RSTNet.train()
File "/home/ubuntu/Anwar/halfimagesize/changing
crossattention,XMorpher/XMorpher/Unsup_train.py", line 117, in train
output = self.train_iterator(loss_weights, mi=mi, fi=fi, ml=ml)
File "/home/ubuntu/Anwar/halfimagesize/changing
crossattention,XMorpher/XMorpher/Unsup_train.py", line 94, in train_iterator
w_m_to_f, w_f_to_m, w_label_m_to_f, w_label_f_to_m, flow, time = self.Reger(mi, fi, ml, fl)
File "/home/ubuntu/anaconda3/envs/Anwar/lib/python3.9/site-
packages/torch/nn/modules/module.py", line 1102, in _call_impl
return forward_call(*input, **kwargs)
File "/home/ubuntu/Anwar/halfimagesize/changing
crossattention,XMorpher/XMorpher/models/XMorpher.py", line 1124, in forward
x = self.swin(fixed, moving)
File "/home/ubuntu/anaconda3/envs/Anwar/lib/python3.9/site-
packages/torch/nn/modules/module.py", line 1102, in _call_impl
return forward_call(*input, **kwargs)
File "/home/ubuntu/Anwar/halfimagesize/changing
crossattention,XMorpher/XMorpher/models/XMorpher.py", line 1029, in forward
moving_out, fixed_out, moving, fixed = layer(moving.contiguous(), fixed.contiguous(), u)
File "/home/ubuntu/anaconda3/envs/Anwar/lib/python3.9/site-
packages/torch/nn/modules/module.py", line 1102, in _call_impl
return forward_call(*input, **kwargs)
File "/home/ubuntu/Anwar/halfimagesize/changing
crossattention,XMorpher/XMorpher/models/XMorpher.py", line 665, in forward
x = blk (x, u)
File "/home/ubuntu/anaconda3/envs/Anwar/lib/python3.9/site-
packages/torch/nn/modules/module.py", line 1102, in _call_impl
return forward_call(*input, **kwargs)
File "/home/ubuntu/Anwar/halfimagesize/changing
crossattention,XMorpher/XMorpher/models/XMorpher.py", line 538, in forward
x = self.forward_part1(x, xa, u)
File "/home/ubuntu/Anwar/halfimagesize/changing
crossattention,XMorpher/XMorpher/models/XMorpher.py", line 497, in forward_part1
xa = F.pad(xa, pad=pad_tuple, mode='constant', value=pad_value)
File "/home/ubuntu/anaconda3/envs/Anwar/lib/python3.9/site-packages/torch/nn/functional.py",
line 4172, in _pad
assert len(pad) // 2 <= input.dim(), "Padding length too large"
AttributeError: 'int' object has no attribute 'dim'
'''''
It is likely due to the fact that the "pad' argument in f.pad function is not being provided as a tuple. It looks like there might be an issue in how it is being used.
Here is a modified version of your code:
# pad the input tensors
pad_tuple = (pad_l, pad_r, pad_t, pad_b)
xa = F.pad(xa, pad=pad_tuple, mode='constant', value=pad_value)
Let me know if this helps.