๐Phi-2
Last updated
Last updated
def generate_answer(question, options, context, abbreviations, model, tokenizer):
prompt = create_prompt(question, options, context, abbreviations)
input_ids = tokenizer.encode(prompt, return_tensors="pt").to("cuda")
attention_mask = input_ids.ne(tokenizer.pad_token_id).long().to("cuda")
outputs = model.generate(
input_ids,
attention_mask=attention_mask,
max_new_tokens=10,
pad_token_id=tokenizer.eos_token_id,
num_beams=1,
early_stopping=True,
)
answer = tokenizer.decode(
outputs[0][input_ids.shape[1] :], skip_special_tokens=True
)
return answerdef parse_answer(response):
match = re.search(r"Answer:\s*Option\s*(\d+)", response, re.IGNORECASE)
if match:
answer = f"Option {match.group(1)}"
else:
match = re.search(r"(\d+)", response, re.IGNORECASE)
if match:
answer = f"Option {match.group(1)}"
else:
answer = "Error"
return answer