We read every piece of feedback, and take your input very seriously.
To see all available qualifiers, see our documentation.
There was an error while loading. Please reload this page.
1 parent 23e8ff5 commit 124758eCopy full SHA for 124758e
examples/models/llama/config/llm_config.py
@@ -227,9 +227,9 @@ class ExportConfig:
227
export_only: bool = False
228
229
def __post_init__(self):
230
- if self.max_context_length > self.max_seq_length:
+ if self.max_context_length < self.max_seq_length:
231
raise ValueError(
232
- f"max_context_length of {self.max_context_length} cannot be greater than max_seq_length of {self.max_seq_length}"
+ f"max_context_length of {self.max_context_length} cannot be shorter than max_seq_length of {self.max_seq_length}"
233
)
234
235
0 commit comments