mirror of
https://github.com/facebookresearch/sam2.git
synced 2025-09-18 04:32:48 +08:00
Update hieradet.py
Not used head_dim = dim_out // num_heads self.scale = head_dim**-0.5 F.scaled_dot_product_attention takes care of this automatically.
This commit is contained in:
parent
511199d7a9
commit
6ec8560436
@ -46,11 +46,8 @@ class MultiScaleAttention(nn.Module):
|
||||
|
||||
self.dim = dim
|
||||
self.dim_out = dim_out
|
||||
|
||||
self.num_heads = num_heads
|
||||
head_dim = dim_out // num_heads
|
||||
self.scale = head_dim**-0.5
|
||||
|
||||
|
||||
self.q_pool = q_pool
|
||||
self.qkv = nn.Linear(dim, dim_out * 3)
|
||||
self.proj = nn.Linear(dim_out, dim_out)
|
||||
|
Loading…
x
Reference in New Issue
Block a user