diff --git a/sam2/configs/sam2.1_training/sam2.1_hiera_b+_MOSE_finetune.yaml b/sam2/configs/sam2.1_training/sam2.1_hiera_b+_MOSE_finetune.yaml index 2046791..9b6faa7 100644 --- a/sam2/configs/sam2.1_training/sam2.1_hiera_b+_MOSE_finetune.yaml +++ b/sam2/configs/sam2.1_training/sam2.1_hiera_b+_MOSE_finetune.yaml @@ -97,7 +97,7 @@ trainer: self_attention: _target_: sam2.modeling.sam.transformer.RoPEAttention rope_theta: 10000.0 - feat_sizes: [32, 32] + feat_sizes: [64, 64] embedding_dim: 256 num_heads: 1 downsample_rate: 1 @@ -108,7 +108,7 @@ trainer: cross_attention: _target_: sam2.modeling.sam.transformer.RoPEAttention rope_theta: 10000.0 - feat_sizes: [32, 32] + feat_sizes: [64, 64] rope_k_repeat: True embedding_dim: 256 num_heads: 1