import os
from transformers import AutoTokenizer, AutoModelForQuestionAnswering, pipeline

os.environ["HF_MODELS_HOME"] = "E:\\data\\ai_model\\"
os.environ["KMP_DUPLICATE_LIB_OK"] = "TRUE"
# 指定本地模型路径
local_model_path = "E:\\data\\ai_model\\"


def pipeline_question1():
    # 加载预训练模型和分词器
    model_name = local_model_path + "distilbert-base-uncased-distilled-squad"

    # 加载本地模型和分词器,确保 local_files_only=True
    tokenizer = AutoTokenizer.from_pretrained(model_name, local_files_only=True)
    model = AutoModelForQuestionAnswering.from_pretrained(model_name, local_files_only=True)

    # 创建问答pipeline
    qa_pipeline = pipeline("question-answering", model=model, tokenizer=tokenizer)

    # 定义上下文和问题
    context = "The Apollo program was the third United States human spaceflight program carried out by the National Aeronautics and Space Administration (NASA), which succeeded in landing the first humans on the Moon from 1969 to 1972."
    question = "What was the Apollo program?"

    # 使用pipeline进行问答
    result = qa_pipeline(question=question, context=context)

    # 打印结果
    print(f"Question: {question}")
    print(f"Answer: {result['answer']}")


if __name__ == '__main__':
    pipeline_question1()

执行结果
Question: What was the Apollo program?
Answer: the third United States human spaceflight program

Logo

AtomGit 是由开放原子开源基金会联合 CSDN 等生态伙伴共同推出的新一代开源与人工智能协作平台。平台坚持“开放、中立、公益”的理念,把代码托管、模型共享、数据集托管、智能体开发体验和算力服务整合在一起,为开发者提供从开发、训练到部署的一站式体验。

更多推荐