当前位置: 首页 > news >正文

python调用多平台deepseek等大模型api

deepseek官方平台、阿里云百炼、百度千帆、腾讯Ti(除deepseek外其它模型也适用)

非流式输出

import os, time
import tiktoken
from datetime import datetime
from openai import OpenAI
from markitdown import MarkItDown

def file_analysis(file_path):
    md = MarkItDown()
    result = md.convert(file_path)
    file_content = result.text_content
    encoding = tiktoken.encoding_for_model("gpt-4")
    tokens_num = len(encoding.encode(file_content))
    return file_content, tokens_num

class deepseek():
    def __init__(self):
        self.name = "deepseek"
        self.model_chat = "deepseek-chat"
        self.model_reason = "deepseek-reasoner"
        self.api_key = "xxx"
        self.base_url = "https://api.deepseek.com"
        self.client = OpenAI(
            api_key = self.api_key,
            base_url = self.base_url,
        )

class aliyun():
    def __init__(self):
        self.name = "aliyun"
        self.model_chat = "deepseek-v3"
        self.model_reason = "deepseek-r1"
        self.api_key = "xxx"
        self.base_url = "https://dashscope.aliyuncs.com/compatible-mode/v1"
        self.client = OpenAI(
            api_key = self.api_key,
            base_url = self.base_url,
        )

class tencent():
    def __init__(self):
        self.name = "tencent"
        self.model_chat = "deepseek-v3"
        self.model_reason = "deepseek-r1"
        self.api_key = "xxx"
        self.base_url = "https://api.lkeap.cloud.tencent.com/v1"
        self.client = OpenAI(
            api_key = self.api_key,
            base_url = self.base_url,
        )

class baidu():
    def __init__(self):
        self.name = "baidu"
        self.model_chat = "deepseek-v3"
        self.model_reason = "deepseek-r1"
        self.api_key = "bce-v3/ALTAK-KLSTDTJedsS2XsjsaKOWa/9820379f0cbedfd1eddb7f5cfa005369a1259e83"
        self.base_url = "https://qianfan.baidubce.com/v2"
        self.client = OpenAI(
            api_key = self.api_key,
            base_url = self.base_url,
        )

# client = deepseek()
# client = aliyun()
# client = tencent()
client = baidu()

old_timestamp = int(time.time()) # 发送时的时间戳
selected_model = client.model_reason

question = "中国首都是哪里?"
completion = client.client.chat.completions.create(
    model = selected_model,
    messages = [
        {'role': 'user', 'content': question},
    ],
    stream=False,
    temperature = 0.6,
)

response = completion.choices[0].message.content
if 'reasoning_content' in completion.choices[0].message.model_extra:
    response = f"<think>\n{completion.choices[0].message.model_extra['reasoning_content']}\n</think>\n\n" + response
print(f'response:{response}')

tokens_input = completion.usage.prompt_tokens
tokens_output = completion.usage.completion_tokens

new_timestamp = int(time.time()) # 接收到的时间戳
time_diff = new_timestamp - old_timestamp # 计算时间差
speed = round((tokens_input + tokens_output) / time_diff, 2) # 计算速度

record = f"{datetime.now().strftime('%Y/%m/%d %H:%M:%S')} Provider: {client.name} Model: {selected_model}.\ninput-tokens: {tokens_input}, output-tokens: {tokens_output}, time={time_diff}s.\nspeed: {speed} token/s.\nUser:{question}\nResponse:{response}\n{100*'*'}\n\n"

with open("record.txt", "a") as f:
    f.write(record)

流式输出

import os, time
import tiktoken
from datetime import datetime
from openai import OpenAI
from markitdown import MarkItDown

def file_analysis(file_path):
    md = MarkItDown()
    result = md.convert(file_path)
    file_content = result.text_content
    encoding = tiktoken.encoding_for_model("gpt-4")
    tokens_num = len(encoding.encode(file_content))
    return file_content, tokens_num

class deepseek():
    def __init__(self):
        self.name = "deepseek"
        self.model_chat = "deepseek-chat"
        self.model_reason = "deepseek-reasoner"
        self.api_key = "xxx"
        self.base_url = "https://api.deepseek.com"
        self.client = OpenAI(
            api_key = self.api_key,
            base_url = self.base_url,
        )

class aliyun():
    def __init__(self):
        self.name = "aliyun"
        self.model_chat = "deepseek-v3"
        self.model_reason = "deepseek-r1"
        self.api_key = "xxx"
        self.base_url = "https://dashscope.aliyuncs.com/compatible-mode/v1"
        self.client = OpenAI(
            api_key = self.api_key,
            base_url = self.base_url,
        )

class tencent():
    def __init__(self):
        self.name = "tencent"
        self.model_chat = "deepseek-v3"
        self.model_reason = "deepseek-r1"
        self.api_key = "xxx"
        self.base_url = "https://api.lkeap.cloud.tencent.com/v1"
        self.client = OpenAI(
            api_key = self.api_key,
            base_url = self.base_url,
        )

class baidu():
    def __init__(self):
        self.name = "baidu"
        self.model_chat = "deepseek-v3"
        self.model_reason = "deepseek-r1"
        self.api_key = "xxx"
        self.base_url = "https://qianfan.baidubce.com/v2"
        self.client = OpenAI(
            api_key = self.api_key,
            base_url = self.base_url,
        )

client = deepseek()
# client = aliyun()
# client = tencent()
# client = baidu()

selected_model = client.model_reason

old_timestamp = int(time.time()) # 发送时的时间戳
question = "中国首都是哪里?只回答城市名,不要分析。"

response = client.client.chat.completions.create(

    model = selected_model,

    messages = [
        {'role': 'user', 'content': question},
    ],
    stream=True, # 启动流式输出
    temperature = 0.6,
)

text = ""
start_think_flag = 1
end_think_flag = 1
start_think_string = "<think>\n"
end_think_string = "</think>\n"
for msg in response:
    delta = msg.choices[0].delta
    if 'reasoning_content' in delta.model_extra and delta.model_extra['reasoning_content'] is not None:
        text_delta = delta.model_extra['reasoning_content']
        text_delta = start_think_string * start_think_flag + text_delta # 添加"<think>"
        start_think_flag = 0
        print(text_delta, end='')
        text = text + text_delta
    if delta.content:
        text_delta = delta.content
        if start_think_flag == 0:
            text_delta = text_delta + end_think_string * end_think_flag # 添加"</think>"
            end_think_flag = 0
        print(text_delta, end='')
        text = text + text_delta
print()

if msg.usage is None:
    # 初始化tokenizer
    encoding = tiktoken.encoding_for_model("gpt-4")
    tokens_input = len(encoding.encode(question))
    tokens_output = len(encoding.encode(text))
else:
    tokens_input = msg.usage.prompt_tokens
    tokens_output = msg.usage.completion_tokens

new_timestamp = int(time.time()) # 接收到的时间戳
time_diff = new_timestamp - old_timestamp # 计算时间差
speed = round((tokens_input + tokens_output) / time_diff, 2) # 计算速度

record = f"{datetime.now().strftime('%Y/%m/%d %H:%M:%S')} Provider: {client.name} Model: {selected_model}.\ninput-tokens: {tokens_input}, output-tokens: {tokens_output}, time={time_diff}s.\nspeed: {speed} token/s.\nUser:{question}\nResponse:{text}\n{100*'*'}\n\n"

with open("record.txt", "a") as f:
    f.write(record)

相关文章:

  • 求助文心一言帮我用antv x6开发一个直线审批流程设计页面Vue2.0
  • int* a = new int(3);delete a;后会调用析构函数吗?
  • ClickHouse的前世今生
  • Training for Computer Use
  • 【实战AI】利用deepseek 在mac本地部署搭建个人知识库
  • 堆栈欺骗技术
  • leetcode 2684. 矩阵中移动的最大次数
  • DeepSeek介绍本地部署保姆级教程
  • 三角测量——用相机运动估计特征点的空间位置
  • MySQL与Oracle对比及区别
  • #渗透测试#批量漏洞挖掘#致远互联AnalyticsCloud 分析云 任意文件读取
  • Maven 构建优化技巧
  • Grafana-使用Button修改MySQL数据库
  • Proxmox 更新软件包数据库(TASK ERROR: command ‘apt-get update‘ failed: exit code 100)
  • 青少年编程与数学 02-009 Django 5 Web 编程 12课题、表单处理
  • HTTP请求响应分析:HTTP/1.1→HTTP/2
  • Linux 查看磁盘中的大文件
  • 第一章:认识Tailwind CSS - 第四节 - Tailwind CSS 与其他 CSS 方案的对比
  • sib报错:com.*.xctrunner is not in your device!
  • 排序算法——人无完人
  • 百度信息流/百度seo引流
  • wordpress后台极慢/深圳优化网站
  • 上海企业建站工具/百度推广网站一年多少钱
  • 时网站建设公司管理/seo公司资源
  • 公司做网站提供产品加盟费/看颜色应该搜索哪些词汇
  • 网站微信客服代码/网页设计和网站制作