Skip to content
This repository has been archived by the owner on Sep 24, 2024. It is now read-only.

Commit

Permalink
reverting trust_remote_code changes to address in a distinct pr
Browse files Browse the repository at this point in the history
  • Loading branch information
binaryaaron authored and aittalam committed Aug 6, 2024
1 parent 462caf0 commit 53b5ba2
Show file tree
Hide file tree
Showing 3 changed files with 2 additions and 3 deletions.
2 changes: 1 addition & 1 deletion src/lm_buddy/configs/huggingface.py
Original file line number Diff line number Diff line change
Expand Up @@ -100,7 +100,7 @@ def validate_adapter_args(cls, config: "AdapterConfig"):
# Filter fields to those found on the PeftConfig
extra_fields = config.model_fields_set.difference(allowed_fields)
if extra_fields:
raise ValueError(f"Unknowon arguments for {peft_type} adapter: {extra_fields}")
raise ValueError(f"Unknown arguments for {peft_type} adapter: {extra_fields}")

return config

Expand Down
1 change: 0 additions & 1 deletion src/lm_buddy/configs/jobs/lm_harness.py
Original file line number Diff line number Diff line change
Expand Up @@ -47,7 +47,6 @@ class LMHarnessJobConfig(JobConfig):
model: AutoModelConfig | LocalChatCompletionsConfig
evaluation: LMHarnessEvaluationConfig
quantization: QuantizationConfig | None = None
trust_remote_code: bool = False

def asset_paths(self) -> list[AssetPath]:
match self.model:
Expand Down
2 changes: 1 addition & 1 deletion src/lm_buddy/jobs/evaluation/lm_harness.py
Original file line number Diff line number Diff line change
Expand Up @@ -50,7 +50,7 @@ def load_harness_model(config: LMHarnessJobConfig) -> HFLM | OpenaiCompletionsLM
tokenizer=model_path,
peft=peft_path,
device="cuda" if torch.cuda.device_count() > 0 else "cpu",
trust_remote_code=config.trust_remote_code,
trust_remote_code=model_config.trust_remote_code,
dtype=config.model.torch_dtype if config.model.torch_dtype else "auto",
**quantization_kwargs,
)
Expand Down

0 comments on commit 53b5ba2

Please sign in to comment.