Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
200 changes: 89 additions & 111 deletions src/transformers/modeling_rope_utils.py

Large diffs are not rendered by default.

11 changes: 6 additions & 5 deletions src/transformers/models/apertus/configuration_apertus.py
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,7 @@
from typing import Optional

from ...configuration_utils import PreTrainedConfig
from ...modeling_rope_utils import RopeParameters, rope_config_validation, standardize_rope_params
from ...modeling_rope_utils import RopeParameters, rope_config_standardize_and_validate


class ApertusConfig(PreTrainedConfig):
Expand Down Expand Up @@ -160,14 +160,15 @@ def __init__(
self.use_cache = use_cache
self.attention_bias = attention_bias
self.attention_dropout = attention_dropout

# Try to set `rope_scaling` if available, otherwise use `rope_parameters`
rope_scaling = kwargs.pop("rope_scaling", None)
self.rope_parameters = rope_scaling or rope_parameters
rope_parameters = rope_scaling or rope_parameters
rope_parameters = rope_parameters if rope_parameters is not None else {}

# Validate the correctness of rotary position embeddings parameters
rope_theta = kwargs.get("rope_theta", 12000000.0)
standardize_rope_params(self, rope_theta=rope_theta)
rope_config_validation(self)
rope_parameters["rope_theta"] = kwargs.get("rope_theta", 12000000.0)
rope_config_standardize_and_validate(self)

super().__init__(
pad_token_id=pad_token_id,
Expand Down
7 changes: 3 additions & 4 deletions src/transformers/models/apertus/modular_apertus.py
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,7 @@
from torch import nn

from ...cache_utils import Cache
from ...modeling_rope_utils import RopeParameters, rope_config_validation, standardize_rope_params
from ...modeling_rope_utils import RopeParameters, rope_config_standardize_and_validate
from ...modeling_utils import ALL_ATTENTION_FUNCTIONS
from ...processing_utils import Unpack
from ...utils import TransformersKwargs, logging
Expand Down Expand Up @@ -180,9 +180,8 @@ def __init__(
del self.head_dim

# Validate the correctness of rotary position embeddings parameters
rope_theta = kwargs.get("rope_theta", 12000000.0)
standardize_rope_params(self, rope_theta=rope_theta)
rope_config_validation(self)
self.rope_parameters["rope_theta"] = kwargs.get("rope_theta", 12000000.0)
rope_config_standardize_and_validate(self)


class ApertusMLP(NemotronMLP):
Expand Down
10 changes: 5 additions & 5 deletions src/transformers/models/arcee/configuration_arcee.py
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,7 @@
from typing import Optional

from ...configuration_utils import PreTrainedConfig
from ...modeling_rope_utils import RopeParameters, rope_config_validation, standardize_rope_params
from ...modeling_rope_utils import RopeParameters, rope_config_standardize_and_validate


class ArceeConfig(PreTrainedConfig):
Expand Down Expand Up @@ -165,12 +165,12 @@ def __init__(
self.head_dim = head_dim if head_dim is not None else self.hidden_size // self.num_attention_heads
# Try to set `rope_scaling` if available, otherwise use `rope_parameters`
rope_scaling = kwargs.pop("rope_scaling", None)
self.rope_parameters = rope_scaling or rope_parameters
rope_parameters = rope_scaling or rope_parameters
self.rope_parameters = rope_parameters if rope_parameters is not None else {}

# Validate the correctness of rotary position embeddings parameters
rope_theta = kwargs.get("rope_theta", 10000.0)
standardize_rope_params(self, rope_theta=rope_theta)
rope_config_validation(self)
self.rope_parameters["rope_theta"] = kwargs.get("rope_theta", 10000.0)
rope_config_standardize_and_validate(self)

super().__init__(
pad_token_id=pad_token_id,
Expand Down
10 changes: 5 additions & 5 deletions src/transformers/models/aria/configuration_aria.py
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,7 @@
from typing import Optional

from ...configuration_utils import PreTrainedConfig
from ...modeling_rope_utils import RopeParameters, rope_config_validation, standardize_rope_params
from ...modeling_rope_utils import RopeParameters, rope_config_standardize_and_validate
from ..auto import CONFIG_MAPPING, AutoConfig


Expand Down Expand Up @@ -170,12 +170,12 @@ def __init__(
self.head_dim = head_dim if head_dim is not None else self.hidden_size // self.num_attention_heads
# Try to set `rope_scaling` if available, otherwise use `rope_parameters`
rope_scaling = kwargs.pop("rope_scaling", None)
self.rope_parameters = rope_scaling or rope_parameters
rope_parameters = rope_scaling or rope_parameters
self.rope_parameters = rope_parameters if rope_parameters is not None else {}

# Validate the correctness of rotary position embeddings parameters
rope_theta = kwargs.get("rope_theta", 10000.0)
standardize_rope_params(self, rope_theta=rope_theta)
rope_config_validation(self)
self.rope_parameters["rope_theta"] = kwargs.get("rope_theta", 10000.0)
rope_config_standardize_and_validate(self)

super().__init__(
pad_token_id=pad_token_id,
Expand Down
13 changes: 7 additions & 6 deletions src/transformers/models/bamba/configuration_bamba.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,7 @@
from typing import Optional

from ...configuration_utils import PreTrainedConfig
from ...modeling_rope_utils import RopeParameters, rope_config_validation, standardize_rope_params
from ...modeling_rope_utils import RopeParameters, rope_config_standardize_and_validate
from ...utils import logging


Expand Down Expand Up @@ -171,15 +171,16 @@ def __init__(
self.num_logits_to_keep = num_logits_to_keep

self.attn_layer_indices = attn_layer_indices

# Try to set `rope_scaling` if available, otherwise use `rope_parameters`
self.partial_rotary_factor = 0.5
rope_scaling = kwargs.pop("rope_scaling", None)
self.rope_parameters = rope_scaling or rope_parameters
rope_parameters = rope_scaling or rope_parameters
self.rope_parameters = rope_parameters if rope_parameters is not None else {}
self.rope_parameters["partial_rotary_factor"] = 0.5

# Validate the correctness of rotary position embeddings parameters
rope_theta = kwargs.get("rope_theta", 10000.0)
standardize_rope_params(self, rope_theta=rope_theta)
rope_config_validation(self)
self.rope_parameters["rope_theta"] = kwargs.get("rope_theta", 10000.0)
rope_config_standardize_and_validate(self)

mamba_intermediate = mamba_expand * hidden_size

Expand Down
10 changes: 5 additions & 5 deletions src/transformers/models/bitnet/configuration_bitnet.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,7 @@
from typing import Optional

from ...configuration_utils import PreTrainedConfig
from ...modeling_rope_utils import RopeParameters, rope_config_validation, standardize_rope_params
from ...modeling_rope_utils import RopeParameters, rope_config_standardize_and_validate
from ...utils import logging


Expand Down Expand Up @@ -140,12 +140,12 @@ def __init__(
self.attention_dropout = attention_dropout
# Try to set `rope_scaling` if available, otherwise use `rope_parameters`
rope_scaling = kwargs.pop("rope_scaling", None)
self.rope_parameters = rope_scaling or rope_parameters
rope_parameters = rope_scaling or rope_parameters
self.rope_parameters = rope_parameters if rope_parameters is not None else {}

# Validate the correctness of rotary position embeddings parameters
rope_theta = kwargs.get("rope_theta", 500000.0)
standardize_rope_params(self, rope_theta=rope_theta)
rope_config_validation(self)
self.rope_parameters["rope_theta"] = kwargs.get("rope_theta", 500000.0)
rope_config_standardize_and_validate(self)

super().__init__(
pad_token_id=pad_token_id,
Expand Down
39 changes: 21 additions & 18 deletions src/transformers/models/blt/configuration_blt.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,7 @@
from typing import Optional

from ...configuration_utils import PreTrainedConfig
from ...modeling_rope_utils import RopeParameters, rope_config_validation, standardize_rope_params
from ...modeling_rope_utils import RopeParameters, rope_config_standardize_and_validate
from ...utils import logging


Expand Down Expand Up @@ -67,12 +67,12 @@ def __init__(
self.initializer_range = initializer_range
# Try to set `rope_scaling` if available, otherwise use `rope_parameters`
rope_scaling = kwargs.pop("rope_scaling", None)
self.rope_parameters = rope_scaling or rope_parameters
rope_parameters = rope_scaling or rope_parameters
self.rope_parameters = rope_parameters if rope_parameters is not None else {}

# Validate the correctness of rotary position embeddings parameters
rope_theta = kwargs.get("rope_theta", 500000.0)
standardize_rope_params(self, rope_theta=rope_theta)
rope_config_validation(self)
self.rope_parameters["rope_theta"] = kwargs.get("rope_theta", 500000.0)
rope_config_standardize_and_validate(self)

# Remove tie_word_embeddings from kwargs to avoid duplicate parameter error
kwargs.pop("tie_word_embeddings", None)
Expand Down Expand Up @@ -122,12 +122,12 @@ def __init__(
self.initializer_range = initializer_range
# Try to set `rope_scaling` if available, otherwise use `rope_parameters`
rope_scaling = kwargs.pop("rope_scaling", None)
self.rope_parameters = rope_scaling or rope_parameters
rope_parameters = rope_scaling or rope_parameters
self.rope_parameters = rope_parameters if rope_parameters is not None else {}

# Validate the correctness of rotary position embeddings parameters
rope_theta = kwargs.get("rope_theta", 500000.0)
standardize_rope_params(self, rope_theta=rope_theta)
rope_config_validation(self)
self.rope_parameters["rope_theta"] = kwargs.get("rope_theta", 500000.0)
rope_config_standardize_and_validate(self)

# Remove tie_word_embeddings from kwargs to avoid duplicate parameter error
kwargs.pop("tie_word_embeddings", None)
Expand Down Expand Up @@ -169,11 +169,12 @@ def __init__(
self.initializer_range = initializer_range
# Try to set `rope_scaling` if available, otherwise use `rope_parameters`
rope_scaling = kwargs.pop("rope_scaling", None)
self.rope_parameters = rope_scaling or rope_parameters
rope_parameters = rope_scaling or rope_parameters
self.rope_parameters = rope_parameters if rope_parameters is not None else {}

# Validate the correctness of rotary position embeddings parameters
rope_theta = kwargs.get("rope_theta", 500000.0)
standardize_rope_params(self, rope_theta=rope_theta)
self.rope_parameters["rope_theta"] = kwargs.get("rope_theta", 500000.0)
rope_config_standardize_and_validate(self)

# Remove tie_word_embeddings from kwargs to avoid duplicate parameter error
kwargs.pop("tie_word_embeddings", None)
Expand Down Expand Up @@ -249,11 +250,12 @@ def __init__(
self.initializer_range = initializer_range
# Try to set `rope_scaling` if available, otherwise use `rope_parameters`
rope_scaling = kwargs.pop("rope_scaling", None)
self.rope_parameters = rope_scaling or rope_parameters
rope_parameters = rope_scaling or rope_parameters
self.rope_parameters = rope_parameters if rope_parameters is not None else {}

# Validate the correctness of rotary position embeddings parameters
rope_theta = kwargs.get("rope_theta", 10000.0)
standardize_rope_params(self, rope_theta=rope_theta)
self.rope_parameters["rope_theta"] = kwargs.get("rope_theta", 10000.0)
rope_config_standardize_and_validate(self)

# Remove tie_word_embeddings from kwargs to avoid duplicate parameter error
kwargs.pop("tie_word_embeddings", None)
Expand Down Expand Up @@ -377,11 +379,12 @@ def __init__(
self.monotonicity = kwargs.get("monotonicity", False)
# Try to set `rope_scaling` if available, otherwise use `rope_parameters`
rope_scaling = kwargs.pop("rope_scaling", None)
self.rope_parameters = rope_scaling or rope_parameters
rope_parameters = rope_scaling or rope_parameters
self.rope_parameters = rope_parameters if rope_parameters is not None else {}

# Validate the correctness of rotary position embeddings parameters
rope_theta = kwargs.get("rope_theta", 500000.0)
standardize_rope_params(self, rope_theta=rope_theta)
self.rope_parameters["rope_theta"] = kwargs.get("rope_theta", 500000.0)
rope_config_standardize_and_validate(self)

# Cross attention configurations
self.cross_attn_k = cross_attn_k
Expand Down
9 changes: 5 additions & 4 deletions src/transformers/models/chameleon/configuration_chameleon.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,7 @@
from typing import Optional

from ...configuration_utils import PreTrainedConfig
from ...modeling_rope_utils import RopeParameters, standardize_rope_params
from ...modeling_rope_utils import RopeParameters, rope_config_standardize_and_validate
from ...utils import logging


Expand Down Expand Up @@ -232,11 +232,12 @@ def __init__(
self.swin_norm = swin_norm
# Try to set `rope_scaling` if available, otherwise use `rope_parameters`
rope_scaling = kwargs.pop("rope_scaling", None)
self.rope_parameters = rope_scaling or rope_parameters
rope_parameters = rope_scaling or rope_parameters
self.rope_parameters = rope_parameters if rope_parameters is not None else {}

# Validate the correctness of rotary position embeddings parameters
rope_theta = kwargs.get("rope_theta", 10000.0)
standardize_rope_params(self, rope_theta=rope_theta)
self.rope_parameters["rope_theta"] = kwargs.get("rope_theta", 10000.0)
rope_config_standardize_and_validate(self)

if vq_config is None:
vq_config = {}
Expand Down
9 changes: 5 additions & 4 deletions src/transformers/models/cohere/configuration_cohere.py
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,7 @@
from typing import Optional

from ...configuration_utils import PreTrainedConfig
from ...modeling_rope_utils import RopeParameters, standardize_rope_params
from ...modeling_rope_utils import RopeParameters, rope_config_standardize_and_validate
from ...utils import logging


Expand Down Expand Up @@ -167,11 +167,12 @@ def __init__(
self.use_qk_norm = use_qk_norm
# Try to set `rope_scaling` if available, otherwise use `rope_parameters`
rope_scaling = kwargs.pop("rope_scaling", None)
self.rope_parameters = rope_scaling or rope_parameters
rope_parameters = rope_scaling or rope_parameters
self.rope_parameters = rope_parameters if rope_parameters is not None else {}

# Validate the correctness of rotary position embeddings parameters
rope_theta = kwargs.get("rope_theta", 500000.0)
standardize_rope_params(self, rope_theta=rope_theta)
self.rope_parameters["rope_theta"] = kwargs.get("rope_theta", 500000.0)
rope_config_standardize_and_validate(self)

super().__init__(
pad_token_id=pad_token_id,
Expand Down
10 changes: 5 additions & 5 deletions src/transformers/models/cohere2/configuration_cohere2.py
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,7 @@
from typing import Optional

from ...configuration_utils import PreTrainedConfig, layer_type_validation
from ...modeling_rope_utils import RopeParameters, rope_config_validation, standardize_rope_params
from ...modeling_rope_utils import RopeParameters, rope_config_standardize_and_validate


class Cohere2Config(PreTrainedConfig):
Expand Down Expand Up @@ -168,7 +168,8 @@ def __init__(
self.layer_types = layer_types
# Try to set `rope_scaling` if available, otherwise use `rope_parameters`
rope_scaling = kwargs.pop("rope_scaling", None)
self.rope_parameters = rope_scaling or rope_parameters
rope_parameters = rope_scaling or rope_parameters
self.rope_parameters = rope_parameters if rope_parameters is not None else {}
# Need to specify head_dim in the config so it can be used in the attention forward functions
self.head_dim = hidden_size // num_attention_heads

Expand All @@ -193,9 +194,8 @@ def __init__(
layer_type_validation(self.layer_types, self.num_hidden_layers)

# Validate the correctness of rotary position embeddings parameters
rope_theta = kwargs.get("rope_theta", 10000.0)
standardize_rope_params(self, rope_theta=rope_theta)
rope_config_validation(self)
self.rope_parameters["rope_theta"] = kwargs.get("rope_theta", 10000.0)
rope_config_standardize_and_validate(self)


__all__ = ["Cohere2Config"]
11 changes: 5 additions & 6 deletions src/transformers/models/cohere2/modular_cohere2.py
Original file line number Diff line number Diff line change
Expand Up @@ -27,8 +27,7 @@
from ...modeling_rope_utils import (
RopeParameters,
dynamic_rope_update,
rope_config_validation,
standardize_rope_params,
rope_config_standardize_and_validate,
)
from ...modeling_utils import ALL_ATTENTION_FUNCTIONS
from ...processing_utils import Unpack
Expand Down Expand Up @@ -192,7 +191,8 @@ def __init__(
self.layer_types = layer_types
# Try to set `rope_scaling` if available, otherwise use `rope_parameters`
rope_scaling = kwargs.pop("rope_scaling", None)
self.rope_parameters = rope_scaling or rope_parameters
rope_parameters = rope_scaling or rope_parameters
self.rope_parameters = rope_parameters if rope_parameters is not None else {}
# Need to specify head_dim in the config so it can be used in the attention forward functions
self.head_dim = hidden_size // num_attention_heads

Expand All @@ -217,9 +217,8 @@ def __init__(
layer_type_validation(self.layer_types, self.num_hidden_layers)

# Validate the correctness of rotary position embeddings parameters
rope_theta = kwargs.get("rope_theta", 10000.0)
standardize_rope_params(self, rope_theta=rope_theta)
rope_config_validation(self)
self.rope_parameters["rope_theta"] = kwargs.get("rope_theta", 10000.0)
rope_config_standardize_and_validate(self)


class Cohere2RotaryEmbedding(CohereRotaryEmbedding):
Expand Down
Loading
Loading