File size: 669 Bytes
2589681
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
#!/usr/bin/env python3

from transformers import AutoModelForCausalLM, AutoTokenizer

model_path = "./"
tokenizer = AutoTokenizer.from_pretrained(model_path, trust_remote_code=True)
model = AutoModelForCausalLM.from_pretrained(
    model_path,
    device_map="auto",
    trust_remote_code=True,
)

prompt = "山东省最高的山是"

print("=================== input ===================")
print(prompt)
print("=================== output ==================")

inputs = tokenizer(prompt, return_tensors="pt").to(model.device)
outputs = model.generate(
    **inputs,
    max_new_tokens=50,
)
result = tokenizer.decode(outputs[0], skip_special_tokens=True)

print(result)