#模型下载
from modelscope import snapshot_download
from transformers import AutoModelForCausalLM,AutoTokenizer
# 推理函数
def generate_txet(prompt):
model_dir = snapshot_download('Qwen/Qwen3-0.6B', cache_dir = "./")
tokenizer = AutoTokenizer.from_pretrained("Qwen/Qwen3-0.6B")
model = AutoModelForCausalLM.from_pretrained("Qwen/Qwen3-0.6B")
#1 构造输入
messages = [
{"role": "user", "content": prompt}
]
text = tokenizer.apply_chat_template(
messages,
tokenizer=False,
add_generation_promot= True,
enable_thinking=True
)
# Tokenizer
model_inputs = tokenizer([text], return_tensors="pt").to(model.device)
# 生成输出
generated_ids = model.generate(
**model_inputs,
max_new_tokens=32768
)
output_ids = generated_ids[0][len(model_inputs.input_ids[0]):].tolist()
try:
# rindex finding 151668 (</think>)
index = len(output_ids) - output_ids[::-1].index(151668)
except ValueError:
index = 0
thinking_content = tokenizer.decode(output_ids[:index], skip_special_tokens=True).strip("\n")
content = tokenizer.decode(output_ids[index:], skip_special_tokens=True).strip("\n")
'''
print("thinking content:", thinking_content)
print("content:", content)
'''
return thinking_content + content
if __name__ == "__main__":
generate_txet(["人工智能对社会的影响是什么"])
报错内容
root@I2515370b0e0040194b:~/chensq/course_code/chapter3# /bin/python3.11 /root/chensq/course_code/chapter3/chensq_test.py
Downloading Model from https://www.modelscope.cn to directory: ./Qwen/Qwen3-0.6B
2025-11-19 18:44:45,366 - modelscope - INFO - Target directory already exists, skipping creation.
Traceback (most recent call last):
File “/root/chensq/course_code/chapter3/chensq_test.py”, line 45, in
generate_txet(“人工智能对社会的影响是什么”)
File “/root/chensq/course_code/chapter3/chensq_test.py”, line 21, in generate_txet
model_inputs = tokenizer([text], return_tensors=“pt”).to(model.device)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File “/usr/local/lib/python3.11/dist-packages/transformers/tokenization_utils_base.py”, line 2938, in call
encodings = self._call_one(text=text, text_pair=text_pair, **all_kwargs)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File “/usr/local/lib/python3.11/dist-packages/transformers/tokenization_utils_base.py”, line 2998, in _call_one
raise ValueError(
ValueError: text input must be of type str (single example), list[str] (batch or single pretokenized example) or list[list[str]]