python调用多平台deepseek等大模型api
deepseek官方平台、阿里云百炼、百度千帆、腾讯Ti(除deepseek外其它模型也适用)
非流式输出
import os, time
import tiktoken
from datetime import datetime
from openai import OpenAI
from markitdown import MarkItDown
def file_analysis(file_path):
md = MarkItDown()
result = md.convert(file_path)
file_content = result.text_content
encoding = tiktoken.encoding_for_model("gpt-4")
tokens_num = len(encoding.encode(file_content))
return file_content, tokens_num
class deepseek():
def __init__(self):
self.name = "deepseek"
self.model_chat = "deepseek-chat"
self.model_reason = "deepseek-reasoner"
self.api_key = "xxx"
self.base_url = "https://api.deepseek.com"
self.client = OpenAI(
api_key = self.api_key,
base_url = self.base_url,
)
class aliyun():
def __init__(self):
self.name = "aliyun"
self.model_chat = "deepseek-v3"
self.model_reason = "deepseek-r1"
self.api_key = "xxx"
self.base_url = "https://dashscope.aliyuncs.com/compatible-mode/v1"
self.client = OpenAI(
api_key = self.api_key,
base_url = self.base_url,
)
class tencent():
def __init__(self):
self.name = "tencent"
self.model_chat = "deepseek-v3"
self.model_reason = "deepseek-r1"
self.api_key = "xxx"
self.base_url = "https://api.lkeap.cloud.tencent.com/v1"
self.client = OpenAI(
api_key = self.api_key,
base_url = self.base_url,
)
class baidu():
def __init__(self):
self.name = "baidu"
self.model_chat = "deepseek-v3"
self.model_reason = "deepseek-r1"
self.api_key = "bce-v3/ALTAK-KLSTDTJedsS2XsjsaKOWa/9820379f0cbedfd1eddb7f5cfa005369a1259e83"
self.base_url = "https://qianfan.baidubce.com/v2"
self.client = OpenAI(
api_key = self.api_key,
base_url = self.base_url,
)
# client = deepseek()
# client = aliyun()
# client = tencent()
client = baidu()
old_timestamp = int(time.time()) # 发送时的时间戳
selected_model = client.model_reason
question = "中国首都是哪里?"
completion = client.client.chat.completions.create(
model = selected_model,
messages = [
{'role': 'user', 'content': question},
],
stream=False,
temperature = 0.6,
)
response = completion.choices[0].message.content
if 'reasoning_content' in completion.choices[0].message.model_extra:
response = f"<think>\n{completion.choices[0].message.model_extra['reasoning_content']}\n</think>\n\n" + response
print(f'response:{response}')
tokens_input = completion.usage.prompt_tokens
tokens_output = completion.usage.completion_tokens
new_timestamp = int(time.time()) # 接收到的时间戳
time_diff = new_timestamp - old_timestamp # 计算时间差
speed = round((tokens_input + tokens_output) / time_diff, 2) # 计算速度
record = f"{datetime.now().strftime('%Y/%m/%d %H:%M:%S')} Provider: {client.name} Model: {selected_model}.\ninput-tokens: {tokens_input}, output-tokens: {tokens_output}, time={time_diff}s.\nspeed: {speed} token/s.\nUser:{question}\nResponse:{response}\n{100*'*'}\n\n"
with open("record.txt", "a") as f:
f.write(record)
流式输出
import os, time
import tiktoken
from datetime import datetime
from openai import OpenAI
from markitdown import MarkItDown
def file_analysis(file_path):
md = MarkItDown()
result = md.convert(file_path)
file_content = result.text_content
encoding = tiktoken.encoding_for_model("gpt-4")
tokens_num = len(encoding.encode(file_content))
return file_content, tokens_num
class deepseek():
def __init__(self):
self.name = "deepseek"
self.model_chat = "deepseek-chat"
self.model_reason = "deepseek-reasoner"
self.api_key = "xxx"
self.base_url = "https://api.deepseek.com"
self.client = OpenAI(
api_key = self.api_key,
base_url = self.base_url,
)
class aliyun():
def __init__(self):
self.name = "aliyun"
self.model_chat = "deepseek-v3"
self.model_reason = "deepseek-r1"
self.api_key = "xxx"
self.base_url = "https://dashscope.aliyuncs.com/compatible-mode/v1"
self.client = OpenAI(
api_key = self.api_key,
base_url = self.base_url,
)
class tencent():
def __init__(self):
self.name = "tencent"
self.model_chat = "deepseek-v3"
self.model_reason = "deepseek-r1"
self.api_key = "xxx"
self.base_url = "https://api.lkeap.cloud.tencent.com/v1"
self.client = OpenAI(
api_key = self.api_key,
base_url = self.base_url,
)
class baidu():
def __init__(self):
self.name = "baidu"
self.model_chat = "deepseek-v3"
self.model_reason = "deepseek-r1"
self.api_key = "xxx"
self.base_url = "https://qianfan.baidubce.com/v2"
self.client = OpenAI(
api_key = self.api_key,
base_url = self.base_url,
)
client = deepseek()
# client = aliyun()
# client = tencent()
# client = baidu()
selected_model = client.model_reason
old_timestamp = int(time.time()) # 发送时的时间戳
question = "中国首都是哪里?只回答城市名,不要分析。"
response = client.client.chat.completions.create(
model = selected_model,
messages = [
{'role': 'user', 'content': question},
],
stream=True, # 启动流式输出
temperature = 0.6,
)
text = ""
start_think_flag = 1
end_think_flag = 1
start_think_string = "<think>\n"
end_think_string = "</think>\n"
for msg in response:
delta = msg.choices[0].delta
if 'reasoning_content' in delta.model_extra and delta.model_extra['reasoning_content'] is not None:
text_delta = delta.model_extra['reasoning_content']
text_delta = start_think_string * start_think_flag + text_delta # 添加"<think>"
start_think_flag = 0
print(text_delta, end='')
text = text + text_delta
if delta.content:
text_delta = delta.content
if start_think_flag == 0:
text_delta = text_delta + end_think_string * end_think_flag # 添加"</think>"
end_think_flag = 0
print(text_delta, end='')
text = text + text_delta
print()
if msg.usage is None:
# 初始化tokenizer
encoding = tiktoken.encoding_for_model("gpt-4")
tokens_input = len(encoding.encode(question))
tokens_output = len(encoding.encode(text))
else:
tokens_input = msg.usage.prompt_tokens
tokens_output = msg.usage.completion_tokens
new_timestamp = int(time.time()) # 接收到的时间戳
time_diff = new_timestamp - old_timestamp # 计算时间差
speed = round((tokens_input + tokens_output) / time_diff, 2) # 计算速度
record = f"{datetime.now().strftime('%Y/%m/%d %H:%M:%S')} Provider: {client.name} Model: {selected_model}.\ninput-tokens: {tokens_input}, output-tokens: {tokens_output}, time={time_diff}s.\nspeed: {speed} token/s.\nUser:{question}\nResponse:{text}\n{100*'*'}\n\n"
with open("record.txt", "a") as f:
f.write(record)