Skip to content

Commit

Permalink
fix num layers
Browse files Browse the repository at this point in the history
  • Loading branch information
markus583 committed May 21, 2024
1 parent 5a99038 commit f46b995
Show file tree
Hide file tree
Showing 2 changed files with 3 additions and 2 deletions.
3 changes: 2 additions & 1 deletion configs/peft/lora_xlmr.json
Original file line number Diff line number Diff line change
@@ -1,8 +1,9 @@
{
"model_name_or_path": "xlm-roberta-base",
"output_dir": "xlmr-base-3_lora-v2_ep30_s10k",
"output_dir": "xlmr-base-3l_lora-v2_ep30_s10k",
"block_size": 256,
"eval_stride": 128,
"num_hidden_layers": 3,
"do_train": true,
"do_eval": true,
"per_device_train_batch_size": 64,
Expand Down
2 changes: 1 addition & 1 deletion wtpsplit/train/train_adapter.py
Original file line number Diff line number Diff line change
Expand Up @@ -57,7 +57,7 @@ class Args:
adapter_warmup_steps: int = 0
adapter_lr_multiplier: float = 1.0
text_column: str = "text"
num_hidden_layers = None
num_hidden_layers: int = 0

# NEW PARAMS
use_subwords: bool = False
Expand Down

0 comments on commit f46b995

Please sign in to comment.