Skip to content

Commit fa7fe64

Browse files
sutaakaropenshift-merge-bot[bot]
authored andcommitted
KFTO: Remove tokenizer_name_or_path parameters in tests
1 parent ba6fd4c commit fa7fe64

13 files changed

+8
-21
lines changed

tests/kfto/core/config.json

Lines changed: 1 addition & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -13,6 +13,5 @@
1313
"include_tokens_per_second": true,
1414
"response_template": "\n### Label:",
1515
"dataset_text_field": "output",
16-
"use_flash_attn": false,
17-
"tokenizer_name_or_path": "/tmp/model/bloom-560m"
16+
"use_flash_attn": false
1817
}

tests/kfto/core/config_granite_20b_code_instruct.json

Lines changed: 1 addition & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -13,6 +13,5 @@
1313
"include_tokens_per_second": true,
1414
"response_template": "\n### Response:",
1515
"dataset_text_field": "output",
16-
"use_flash_attn": false,
17-
"tokenizer_name_or_path": "ibm-granite/granite-20b-code-instruct"
16+
"use_flash_attn": false
1817
}

tests/kfto/core/config_granite_34b_code_instruct_lora.json

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -14,7 +14,6 @@
1414
"response_template": "\n### Response:",
1515
"dataset_text_field": "output",
1616
"use_flash_attn": false,
17-
"tokenizer_name_or_path": "ibm-granite/granite-34b-code-instruct",
1817
"peft_method": "lora",
1918
"target_modules": ["all-linear"]
2019
}

tests/kfto/core/config_llama2_13b_chat_hf.json

Lines changed: 1 addition & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -13,6 +13,5 @@
1313
"include_tokens_per_second": true,
1414
"response_template": "\n### Response:",
1515
"dataset_text_field": "output",
16-
"use_flash_attn": false,
17-
"tokenizer_name_or_path": "meta-llama/Llama-2-13b-chat-hf"
16+
"use_flash_attn": false
1817
}

tests/kfto/core/config_llama2_13b_chat_hf_lora.json

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -14,6 +14,5 @@
1414
"response_template": "\n### Response:",
1515
"dataset_text_field": "output",
1616
"use_flash_attn": false,
17-
"tokenizer_name_or_path": "meta-llama/Llama-2-13b-chat-hf",
1817
"peft_method": "lora"
1918
}

tests/kfto/core/config_lora.json

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -14,7 +14,6 @@
1414
"response_template": "\n### Label:",
1515
"dataset_text_field": "output",
1616
"use_flash_attn": false,
17-
"tokenizer_name_or_path": "/tmp/model/bloom-560m",
1817
"peft_method": "lora",
1918
"target_modules": ["all-linear"]
2019
}

tests/kfto/core/config_merlinite_7b.json

Lines changed: 1 addition & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -13,6 +13,5 @@
1313
"include_tokens_per_second": true,
1414
"response_template": "\n### Response:",
1515
"dataset_text_field": "output",
16-
"use_flash_attn": false,
17-
"tokenizer_name_or_path": "ibm/merlinite-7b"
16+
"use_flash_attn": false
1817
}

tests/kfto/core/config_meta_llama3_1_70b_lora.json

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -14,6 +14,5 @@
1414
"response_template": "\n### Response:",
1515
"dataset_text_field": "output",
1616
"use_flash_attn": false,
17-
"tokenizer_name_or_path": "meta-llama/Meta-Llama-3.1-70B",
1817
"peft_method": "lora"
1918
}

tests/kfto/core/config_meta_llama3_1_8b.json

Lines changed: 1 addition & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -13,6 +13,5 @@
1313
"include_tokens_per_second": true,
1414
"response_template": "\n### Response:",
1515
"dataset_text_field": "output",
16-
"use_flash_attn": false,
17-
"tokenizer_name_or_path": "meta-llama/Meta-Llama-3.1-8B"
16+
"use_flash_attn": false
1817
}

tests/kfto/core/config_meta_llama3_70b_instruct_lora.json

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -14,6 +14,5 @@
1414
"response_template": "\n### Response:",
1515
"dataset_text_field": "output",
1616
"use_flash_attn": false,
17-
"tokenizer_name_or_path": "meta-llama/Meta-Llama-3-70B-Instruct",
1817
"peft_method": "lora"
1918
}

0 commit comments

Comments
 (0)