update training config as well to be consistent

This commit is contained in:
Chay Ryali 2024-12-11 07:07:23 +00:00
parent beacd9a521
commit a5dc1d5924

View File

@ -97,7 +97,7 @@ trainer:
self_attention:
_target_: sam2.modeling.sam.transformer.RoPEAttention
rope_theta: 10000.0
feat_sizes: [32, 32]
feat_sizes: [64, 64]
embedding_dim: 256
num_heads: 1
downsample_rate: 1
@ -108,7 +108,7 @@ trainer:
cross_attention:
_target_: sam2.modeling.sam.transformer.RoPEAttention
rope_theta: 10000.0
feat_sizes: [32, 32]
feat_sizes: [64, 64]
rope_k_repeat: True
embedding_dim: 256
num_heads: 1