Skip to content

Commit 340d4ed

Browse files
committed
Fixed token id
1 parent 53700a1 commit 340d4ed

File tree

1 file changed

+8
-1
lines changed

1 file changed

+8
-1
lines changed

llm_exl2_dynamic_gen.py

Lines changed: 8 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -201,6 +201,13 @@ def display(self):
201201
if self.console_line is not None:
202202
print(term.move_xy(0, self.console_line) + self.display_text)
203203

204+
def get_stop_conditions(prompt_format, tokenizer):
205+
if prompt_format == "llama":
206+
return [tokenizer.eos_token_id]
207+
elif prompt_format == "llama3":
208+
return [tokenizer.single_id("<|eot_id|>")]
209+
elif prompt_format == "granite":
210+
return [tokenizer.eos_token_id, "\n\nQuestion:"]
204211

205212
config = configparser.ConfigParser()
206213
config.read('config.ini')
@@ -462,7 +469,7 @@ def process_prompts():
462469
job = ExLlamaV2DynamicJob(
463470
input_ids = ids,
464471
max_new_tokens = max_tokens,
465-
stop_conditions = [tokenizer.eos_token_id] if stop_at is None else [tokenizer.eos_token_id, stop_at],
472+
stop_conditions = get_stop_conditions() if stop_at is None else [*get_stop_conditions(), stop_at],
466473
gen_settings = ExLlamaV2Sampler.Settings(),
467474
filters = filters,
468475
token_healing = healing

0 commit comments

Comments
 (0)