import { type DocEntry } from "./openai";
export const deepseekDocs: DocEntry[] = [
{
id: "deepseek-chat",
title: "DeepSeek Chat API",
category: "chat",
provider: "deepseek",
keywords: ["deepseek", "chat", "completions", "reasoning", "coder"],
content: `# DeepSeek Chat API
## Endpoint
POST https://api.deepseek.com/chat/completions
## Headers
- Authorization: Bearer YOUR_API_KEY
- Content-Type: application/json
## Request Body
\`\`\`json
{
"model": "deepseek-chat" | "deepseek-reasoner",
"messages": [
{
"role": "system" | "user" | "assistant" | "tool",
"content": "string"
}
],
"max_tokens": number,
"temperature": number (0-2, default 1),
"top_p": number (0-1),
"stream": boolean,
"stop": string[],
"presence_penalty": number (-2 to 2),
"frequency_penalty": number (-2 to 2),
"logprobs": boolean,
"top_logprobs": number (0-20),
"tools": [
{
"type": "function",
"function": {
"name": "string",
"description": "string",
"parameters": {JSON Schema}
}
}
],
"tool_choice": "auto" | "none" | "required",
"response_format": {"type": "text" | "json_object"}
}
\`\`\`
## Response
\`\`\`json
{
"id": "xxx",
"object": "chat.completion",
"created": 1234567890,
"model": "deepseek-chat",
"choices": [
{
"index": 0,
"message": {
"role": "assistant",
"content": "string",
"tool_calls": [...]
},
"finish_reason": "stop" | "length" | "tool_calls"
}
],
"usage": {
"prompt_tokens": 0,
"completion_tokens": 0,
"total_tokens": 0,
"prompt_cache_hit_tokens": 0,
"prompt_cache_miss_tokens": 0
}
}
\`\`\`
## Available Models
- deepseek-chat: General purpose, strong coding & math (DeepSeek-V3)
- deepseek-reasoner: Chain-of-thought reasoning model
## Python SDK Example
\`\`\`python
from openai import OpenAI
client = OpenAI(
base_url="https://api.deepseek.com",
api_key="sk-..."
)
response = client.chat.completions.create(
model="deepseek-chat",
messages=[
{"role": "user", "content": "Hello!"}
],
max_tokens=1024,
stream=False
)
print(response.choices[0].message.content)
\`\`\`
## Reasoning Model
\`\`\`python
response = client.chat.completions.create(
model="deepseek-reasoner",
messages=[
{"role": "user", "content": "Solve this math problem..."}
]
)
# Reasoning model includes thinking process
print(response.choices[0].message.content)
\`\`\`
## Node.js Example
\`\`\`javascript
import OpenAI from "openai";
const client = new OpenAI({
baseURL: "https://api.deepseek.com",
apiKey: "sk-..."
});
const response = await client.chat.completions.create({
model: "deepseek-chat",
messages: [{ role: "user", content: "Hello!" }],
max_tokens: 1024
});
console.log(response.choices[0].message.content);
\`\`\`
## cURL Example
\`\`\`bash
curl https://api.deepseek.com/chat/completions \\
-H "Authorization: Bearer $DEEPSEEK_API_KEY" \\
-H "Content-Type: application/json" \\
-d '{
"model": "deepseek-chat",
"messages": [{"role": "user", "content": "Hello!"}],
"max_tokens": 1024
}'
\`\`\``
},
{
id: "deepseek-rate-limits",
title: "DeepSeek Rate Limits & Pricing",
category: "limits",
provider: "deepseek",
keywords: ["deepseek", "rate limit", "pricing", "cost", "cheap", "limits"],
content: `# DeepSeek Rate Limits & Pricing
## Rate Limits
- Default: Varies by account tier
- Contact support for higher limits
## Pricing (per 1M tokens)
### deepseek-chat (DeepSeek-V3)
- Input (cache hit): $0.07
- Input (cache miss): $0.27
- Output: $1.10
### deepseek-reasoner
- Input (cache hit): $0.14
- Input (cache miss): $0.55
- Output: $2.19
## Context Windows
- deepseek-chat: 64K tokens (input), 8K tokens (output)
- deepseek-reasoner: 64K tokens (input), 8K tokens (output)
## Key Features
- Very competitive pricing
- Prompt caching (reduced price for repeated prefixes)
- OpenAI-compatible API
- Strong coding capabilities
## Error Codes
- 400: Bad Request
- 401: Invalid API key
- 429: Rate limit exceeded
- 500: Internal error`
},
{
id: "deepseek-function-calling",
title: "DeepSeek Function Calling",
category: "tools",
provider: "deepseek",
keywords: ["deepseek", "function", "tools", "tool calling"],
content: `# DeepSeek Function Calling
DeepSeek supports OpenAI-compatible function calling.
## Supported Models
- deepseek-chat: Full function calling support
## Tool Definition
Same format as OpenAI:
\`\`\`json
{
"type": "function",
"function": {
"name": "get_weather",
"description": "Get weather",
"parameters": {
"type": "object",
"properties": {
"location": {"type": "string"}
},
"required": ["location"]
}
}
}
\`\`\`
## Python Example
\`\`\`python
from openai import OpenAI
client = OpenAI(
base_url="https://api.deepseek.com",
api_key="sk-..."
)
tools = [
{
"type": "function",
"function": {
"name": "get_weather",
"description": "Get weather",
"parameters": {
"type": "object",
"properties": {
"location": {"type": "string"}
},
"required": ["location"]
}
}
}
]
response = client.chat.completions.create(
model="deepseek-chat",
messages=[{"role": "user", "content": "Weather in Beijing?"}],
tools=tools
)
# Handle tool calls same as OpenAI
\`\`\``
}
];