mirror of
https://github.com/deepseek-ai/DeepSeek-VL2.git
synced 2025-02-23 06:09:04 -05:00
parent
7caa51a05c
commit
2ebcda008d
@ -156,6 +156,7 @@ inputs_embeds = vl_gpt.prepare_inputs_embeds(**prepare_inputs)
|
||||
|
||||
# run the model to get the response
|
||||
outputs = vl_gpt.language.generate(
|
||||
input_ids = prepare_inputs["input_ids"].to(vl_gpt.device),
|
||||
inputs_embeds=inputs_embeds,
|
||||
attention_mask=prepare_inputs.attention_mask,
|
||||
pad_token_id=tokenizer.eos_token_id,
|
||||
|
@ -1753,13 +1753,16 @@ class DeepseekV2ForCausalLM(DeepseekV2PreTrainedModel):
|
||||
output = (logits,) + outputs[1:]
|
||||
return (loss,) + output if loss is not None else output
|
||||
|
||||
return CausalLMOutputWithPast(
|
||||
device = input_ids.device if input_ids is not None else inputs_embeds.device
|
||||
output = CausalLMOutputWithPast(
|
||||
loss=loss,
|
||||
logits=logits,
|
||||
past_key_values=outputs.past_key_values,
|
||||
hidden_states=outputs.hidden_states,
|
||||
attentions=outputs.attentions,
|
||||
)
|
||||
output['logits'] = output['logits'].to(device)
|
||||
return output
|
||||
|
||||
def prepare_inputs_for_generation(
|
||||
self,
|
||||
|
Loading…
Reference in New Issue
Block a user