Luigi commited on
Commit
16d38dd
·
1 Parent(s): 0f55482

do not add generation prompt in the end of example

Browse files
Files changed (1) hide show
  1. train_with_unsloth.py +1 -1
train_with_unsloth.py CHANGED
@@ -105,7 +105,7 @@ def load_fitting_samples(dataset_id, tokenizer, max_len, n_samples, seed=3407):
105
  text = tokenizer.apply_chat_template(
106
  example["messages"],
107
  tokenize=False,
108
- add_generation_prompt=True,
109
  )
110
  # 3) Quick length check on token IDs
111
  tokens = tokenizer(text, add_special_tokens=False)["input_ids"]
 
105
  text = tokenizer.apply_chat_template(
106
  example["messages"],
107
  tokenize=False,
108
+ add_generation_prompt=False,
109
  )
110
  # 3) Quick length check on token IDs
111
  tokens = tokenizer(text, add_special_tokens=False)["input_ids"]