Skip to content

Migration Guide

Learn how to migrate from other AI providers to DeepSeek API with minimal code changes and maximum compatibility.

Overview

DeepSeek API is designed to be compatible with OpenAI's API format, making migration straightforward. This guide covers:

  • OpenAI to DeepSeek: Direct migration with minimal changes
  • Other providers: Migration strategies for various AI services
  • Code examples: Practical migration examples
  • Best practices: Ensuring smooth transition
  • Testing strategies: Validating your migration

OpenAI to DeepSeek Migration

Quick Migration

The simplest migration requires only changing the base URL and API key:

python
# Before (OpenAI)
from openai import OpenAI

client = OpenAI(
    api_key="sk-your-openai-key"
)

response = client.chat.completions.create(
    model="gpt-3.5-turbo",
    messages=[
        {"role": "user", "content": "Hello, world!"}
    ]
)

# After (DeepSeek)
from openai import OpenAI

client = OpenAI(
    api_key="sk-your-deepseek-key",
    base_url="https://api.deepseek.com/v1"
)

response = client.chat.completions.create(
    model="deepseek-chat",  # Changed model name
    messages=[
        {"role": "user", "content": "Hello, world!"}
    ]
)

Environment Variable Migration

bash
# Before (OpenAI)
export OPENAI_API_KEY="sk-your-openai-key"

# After (DeepSeek)
export DEEPSEEK_API_KEY="sk-your-deepseek-key"
export DEEPSEEK_BASE_URL="https://api.deepseek.com/v1"
python
import os
from openai import OpenAI

# Environment-based configuration
client = OpenAI(
    api_key=os.getenv("DEEPSEEK_API_KEY"),
    base_url=os.getenv("DEEPSEEK_BASE_URL", "https://api.deepseek.com/v1")
)

Configuration File Migration

python
# config.py
import os
from typing import Optional

class APIConfig:
    """Centralized API configuration"""
    
    def __init__(self, provider: str = "deepseek"):
        self.provider = provider.lower()
        
        if self.provider == "openai":
            self.api_key = os.getenv("OPENAI_API_KEY")
            self.base_url = None
            self.default_model = "gpt-3.5-turbo"
        
        elif self.provider == "deepseek":
            self.api_key = os.getenv("DEEPSEEK_API_KEY")
            self.base_url = "https://api.deepseek.com/v1"
            self.default_model = "deepseek-chat"
        
        else:
            raise ValueError(f"Unsupported provider: {provider}")
    
    def get_client(self):
        """Get configured OpenAI client"""
        from openai import OpenAI
        
        kwargs = {"api_key": self.api_key}
        if self.base_url:
            kwargs["base_url"] = self.base_url
        
        return OpenAI(**kwargs)

# Usage
config = APIConfig("deepseek")  # Change this to switch providers
client = config.get_client()

response = client.chat.completions.create(
    model=config.default_model,
    messages=[{"role": "user", "content": "Hello!"}]
)

Model Mapping

Direct Model Equivalents

python
# Model mapping for migration
MODEL_MAPPING = {
    # OpenAI -> DeepSeek
    "gpt-3.5-turbo": "deepseek-chat",
    "gpt-4": "deepseek-chat",
    "gpt-4-turbo": "deepseek-chat",
    "gpt-4o": "deepseek-chat",
    
    # Code-specific models
    "gpt-3.5-turbo-instruct": "deepseek-coder",
    "code-davinci-002": "deepseek-coder",
    
    # Math-specific (if available)
    "gpt-4": "deepseek-math",  # For math-heavy tasks
}

def migrate_model_name(openai_model: str) -> str:
    """Convert OpenAI model name to DeepSeek equivalent"""
    return MODEL_MAPPING.get(openai_model, "deepseek-chat")

# Usage
openai_model = "gpt-3.5-turbo"
deepseek_model = migrate_model_name(openai_model)
print(f"Migrating from {openai_model} to {deepseek_model}")

Model Selection Guide

python
class ModelSelector:
    """Help select appropriate DeepSeek model based on use case"""
    
    @staticmethod
    def recommend_model(use_case: str, original_model: str = None) -> str:
        """Recommend DeepSeek model based on use case"""
        
        use_case = use_case.lower()
        
        if any(keyword in use_case for keyword in ["code", "programming", "debug", "refactor"]):
            return "deepseek-coder"
        
        elif any(keyword in use_case for keyword in ["math", "calculation", "equation", "formula"]):
            return "deepseek-math"
        
        else:
            return "deepseek-chat"
    
    @staticmethod
    def get_model_capabilities(model: str) -> dict:
        """Get capabilities of DeepSeek models"""
        
        capabilities = {
            "deepseek-chat": {
                "context_length": 128000,
                "strengths": ["General conversation", "Text analysis", "Creative writing"],
                "best_for": "General-purpose AI tasks"
            },
            "deepseek-coder": {
                "context_length": 128000,
                "strengths": ["Code generation", "Debugging", "Code explanation"],
                "best_for": "Programming and development tasks"
            },
            "deepseek-math": {
                "context_length": 128000,
                "strengths": ["Mathematical reasoning", "Problem solving", "Calculations"],
                "best_for": "Mathematical and analytical tasks"
            }
        }
        
        return capabilities.get(model, {})

# Usage
selector = ModelSelector()

# Get recommendation
recommended = selector.recommend_model("Generate Python code for data analysis")
print(f"Recommended model: {recommended}")

# Get capabilities
capabilities = selector.get_model_capabilities("deepseek-coder")
print(f"Model capabilities: {capabilities}")

Parameter Migration

Supported Parameters

python
# Parameters that work the same way
COMPATIBLE_PARAMETERS = [
    "messages",
    "model",
    "max_tokens",
    "temperature",
    "top_p",
    "stream",
    "stop",
    "presence_penalty",
    "frequency_penalty",
    "user"
]

# Parameters that need adjustment
PARAMETER_ADJUSTMENTS = {
    "n": "Not supported - use multiple requests instead",
    "logit_bias": "Not supported",
    "logprobs": "Not supported",
    "top_logprobs": "Not supported"
}

def validate_parameters(params: dict) -> dict:
    """Validate and adjust parameters for DeepSeek API"""
    
    validated = {}
    warnings = []
    
    for key, value in params.items():
        if key in COMPATIBLE_PARAMETERS:
            validated[key] = value
        elif key in PARAMETER_ADJUSTMENTS:
            warnings.append(f"Parameter '{key}' {PARAMETER_ADJUSTMENTS[key]}")
        else:
            warnings.append(f"Unknown parameter '{key}' - removing")
    
    if warnings:
        print("Migration warnings:")
        for warning in warnings:
            print(f"  - {warning}")
    
    return validated

# Example usage
openai_params = {
    "model": "gpt-3.5-turbo",
    "messages": [{"role": "user", "content": "Hello"}],
    "temperature": 0.7,
    "n": 2,  # Not supported
    "logprobs": True  # Not supported
}

deepseek_params = validate_parameters(openai_params)
deepseek_params["model"] = "deepseek-chat"  # Update model name

print("Validated parameters:", deepseek_params)

Function Calling Migration

python
# OpenAI function calling format (compatible with DeepSeek)
def migrate_function_calling():
    """Example of function calling migration"""
    
    # Function definition (same format)
    functions = [
        {
            "name": "get_weather",
            "description": "Get current weather information",
            "parameters": {
                "type": "object",
                "properties": {
                    "location": {
                        "type": "string",
                        "description": "City name"
                    },
                    "unit": {
                        "type": "string",
                        "enum": ["celsius", "fahrenheit"],
                        "description": "Temperature unit"
                    }
                },
                "required": ["location"]
            }
        }
    ]
    
    # Usage (same format)
    response = client.chat.completions.create(
        model="deepseek-chat",
        messages=[
            {"role": "user", "content": "What's the weather in New York?"}
        ],
        functions=functions,
        function_call="auto"
    )
    
    return response

# Tools format (newer OpenAI format, also supported)
def migrate_tools_format():
    """Example of tools format migration"""
    
    tools = [
        {
            "type": "function",
            "function": {
                "name": "get_weather",
                "description": "Get current weather information",
                "parameters": {
                    "type": "object",
                    "properties": {
                        "location": {"type": "string", "description": "City name"}
                    },
                    "required": ["location"]
                }
            }
        }
    ]
    
    response = client.chat.completions.create(
        model="deepseek-chat",
        messages=[
            {"role": "user", "content": "What's the weather in New York?"}
        ],
        tools=tools,
        tool_choice="auto"
    )
    
    return response

Framework-Specific Migration

LangChain Migration

python
# Before (OpenAI with LangChain)
from langchain.llms import OpenAI
from langchain.chat_models import ChatOpenAI

# Chat model
chat_model = ChatOpenAI(
    model_name="gpt-3.5-turbo",
    openai_api_key="sk-your-openai-key"
)

# After (DeepSeek with LangChain)
from langchain.chat_models import ChatOpenAI

chat_model = ChatOpenAI(
    model_name="deepseek-chat",
    openai_api_key="sk-your-deepseek-key",
    openai_api_base="https://api.deepseek.com/v1"
)

# Usage remains the same
from langchain.schema import HumanMessage

response = chat_model([HumanMessage(content="Hello, world!")])
print(response.content)

LlamaIndex Migration

python
# Before (OpenAI with LlamaIndex)
from llama_index.llms import OpenAI
from llama_index import ServiceContext, VectorStoreIndex

llm = OpenAI(
    model="gpt-3.5-turbo",
    api_key="sk-your-openai-key"
)

# After (DeepSeek with LlamaIndex)
from llama_index.llms import OpenAI
from llama_index import ServiceContext, VectorStoreIndex

llm = OpenAI(
    model="deepseek-chat",
    api_key="sk-your-deepseek-key",
    api_base="https://api.deepseek.com/v1"
)

# Create service context
service_context = ServiceContext.from_defaults(llm=llm)

# Usage remains the same
index = VectorStoreIndex.from_documents(documents, service_context=service_context)
query_engine = index.as_query_engine()
response = query_engine.query("Your question here")

Streamlit Migration

python
# streamlit_app.py
import streamlit as st
from openai import OpenAI

# Configuration
st.set_page_config(page_title="AI Chat App")

# Sidebar for API configuration
with st.sidebar:
    st.title("API Configuration")
    
    provider = st.selectbox(
        "Choose AI Provider",
        ["DeepSeek", "OpenAI"]
    )
    
    if provider == "DeepSeek":
        api_key = st.text_input("DeepSeek API Key", type="password")
        base_url = "https://api.deepseek.com/v1"
        model = st.selectbox("Model", ["deepseek-chat", "deepseek-coder", "deepseek-math"])
    else:
        api_key = st.text_input("OpenAI API Key", type="password")
        base_url = None
        model = st.selectbox("Model", ["gpt-3.5-turbo", "gpt-4"])

# Initialize client
if api_key:
    client_kwargs = {"api_key": api_key}
    if base_url:
        client_kwargs["base_url"] = base_url
    
    client = OpenAI(**client_kwargs)
    
    # Chat interface
    st.title("AI Chat")
    
    if "messages" not in st.session_state:
        st.session_state.messages = []
    
    # Display chat history
    for message in st.session_state.messages:
        with st.chat_message(message["role"]):
            st.markdown(message["content"])
    
    # Chat input
    if prompt := st.chat_input("What would you like to know?"):
        # Add user message
        st.session_state.messages.append({"role": "user", "content": prompt})
        with st.chat_message("user"):
            st.markdown(prompt)
        
        # Generate response
        with st.chat_message("assistant"):
            with st.spinner("Thinking..."):
                response = client.chat.completions.create(
                    model=model,
                    messages=st.session_state.messages
                )
                
                assistant_response = response.choices[0].message.content
                st.markdown(assistant_response)
                
                # Add assistant message
                st.session_state.messages.append({
                    "role": "assistant", 
                    "content": assistant_response
                })

Migration Utilities

Automated Migration Tool

python
import re
import os
from pathlib import Path
from typing import List, Dict, Tuple

class MigrationTool:
    """Automated migration tool for code files"""
    
    def __init__(self):
        self.patterns = {
            # API key patterns
            r'openai\.api_key\s*=\s*["\']([^"\']+)["\']': 'client = OpenAI(api_key="{}", base_url="https://api.deepseek.com/v1")',
            r'OPENAI_API_KEY': 'DEEPSEEK_API_KEY',
            
            # Model name patterns
            r'"gpt-3\.5-turbo"': '"deepseek-chat"',
            r'"gpt-4"': '"deepseek-chat"',
            r'"gpt-4-turbo"': '"deepseek-chat"',
            r'model="gpt-3\.5-turbo"': 'model="deepseek-chat"',
            r'model="gpt-4"': 'model="deepseek-chat"',
            
            # Import patterns
            r'import openai': 'from openai import OpenAI',
            r'from openai import openai': 'from openai import OpenAI',
        }
    
    def scan_file(self, file_path: str) -> List[Dict[str, str]]:
        """Scan file for migration opportunities"""
        
        with open(file_path, 'r', encoding='utf-8') as f:
            content = f.read()
        
        findings = []
        
        for pattern, replacement in self.patterns.items():
            matches = re.finditer(pattern, content)
            for match in matches:
                findings.append({
                    "file": file_path,
                    "line": content[:match.start()].count('\n') + 1,
                    "pattern": pattern,
                    "match": match.group(),
                    "suggested_replacement": replacement,
                    "context": self._get_context(content, match.start(), match.end())
                })
        
        return findings
    
    def _get_context(self, content: str, start: int, end: int, context_lines: int = 2) -> str:
        """Get context around a match"""
        
        lines = content.split('\n')
        match_line = content[:start].count('\n')
        
        start_line = max(0, match_line - context_lines)
        end_line = min(len(lines), match_line + context_lines + 1)
        
        context_lines_list = lines[start_line:end_line]
        
        # Highlight the match line
        if match_line - start_line < len(context_lines_list):
            context_lines_list[match_line - start_line] = f">>> {context_lines_list[match_line - start_line]}"
        
        return '\n'.join(context_lines_list)
    
    def scan_directory(self, directory: str, extensions: List[str] = None) -> List[Dict[str, str]]:
        """Scan directory for Python files that need migration"""
        
        if extensions is None:
            extensions = ['.py', '.ipynb']
        
        all_findings = []
        
        for root, dirs, files in os.walk(directory):
            for file in files:
                if any(file.endswith(ext) for ext in extensions):
                    file_path = os.path.join(root, file)
                    findings = self.scan_file(file_path)
                    all_findings.extend(findings)
        
        return all_findings
    
    def generate_migration_report(self, findings: List[Dict[str, str]]) -> str:
        """Generate a migration report"""
        
        if not findings:
            return "No migration opportunities found."
        
        report = f"Migration Report\n{'=' * 50}\n\n"
        report += f"Found {len(findings)} migration opportunities:\n\n"
        
        grouped_by_file = {}
        for finding in findings:
            file_path = finding["file"]
            if file_path not in grouped_by_file:
                grouped_by_file[file_path] = []
            grouped_by_file[file_path].append(finding)
        
        for file_path, file_findings in grouped_by_file.items():
            report += f"File: {file_path}\n"
            report += "-" * len(f"File: {file_path}") + "\n"
            
            for finding in file_findings:
                report += f"  Line {finding['line']}: {finding['match']}\n"
                report += f"  Suggested: {finding['suggested_replacement']}\n"
                report += f"  Context:\n"
                for line in finding['context'].split('\n'):
                    report += f"    {line}\n"
                report += "\n"
        
        return report
    
    def apply_automatic_migrations(self, file_path: str, backup: bool = True) -> bool:
        """Apply automatic migrations to a file"""
        
        if backup:
            backup_path = f"{file_path}.backup"
            with open(file_path, 'r') as original, open(backup_path, 'w') as backup_file:
                backup_file.write(original.read())
        
        try:
            with open(file_path, 'r', encoding='utf-8') as f:
                content = f.read()
            
            modified_content = content
            
            for pattern, replacement in self.patterns.items():
                modified_content = re.sub(pattern, replacement, modified_content)
            
            with open(file_path, 'w', encoding='utf-8') as f:
                f.write(modified_content)
            
            return True
        
        except Exception as e:
            print(f"Error applying migrations to {file_path}: {e}")
            return False

# Usage example
migration_tool = MigrationTool()

# Scan current directory
findings = migration_tool.scan_directory("./src")

# Generate report
report = migration_tool.generate_migration_report(findings)
print(report)

# Apply automatic migrations (with backup)
for finding in findings:
    file_path = finding["file"]
    success = migration_tool.apply_automatic_migrations(file_path, backup=True)
    print(f"Migration {'successful' if success else 'failed'} for {file_path}")

Testing Migration

python
import unittest
from unittest.mock import patch, MagicMock

class MigrationTester:
    """Test migration compatibility"""
    
    def __init__(self, deepseek_client, openai_client=None):
        self.deepseek_client = deepseek_client
        self.openai_client = openai_client
    
    def test_basic_completion(self) -> Dict[str, bool]:
        """Test basic completion functionality"""
        
        test_message = "Hello, how are you?"
        
        try:
            response = self.deepseek_client.chat.completions.create(
                model="deepseek-chat",
                messages=[{"role": "user", "content": test_message}]
            )
            
            return {
                "success": True,
                "response_length": len(response.choices[0].message.content),
                "has_content": bool(response.choices[0].message.content.strip())
            }
        
        except Exception as e:
            return {
                "success": False,
                "error": str(e)
            }
    
    def test_streaming(self) -> Dict[str, bool]:
        """Test streaming functionality"""
        
        try:
            response = self.deepseek_client.chat.completions.create(
                model="deepseek-chat",
                messages=[{"role": "user", "content": "Count from 1 to 5"}],
                stream=True
            )
            
            chunks = []
            for chunk in response:
                if chunk.choices[0].delta.content:
                    chunks.append(chunk.choices[0].delta.content)
            
            return {
                "success": True,
                "chunk_count": len(chunks),
                "has_chunks": len(chunks) > 0
            }
        
        except Exception as e:
            return {
                "success": False,
                "error": str(e)
            }
    
    def test_function_calling(self) -> Dict[str, bool]:
        """Test function calling functionality"""
        
        functions = [
            {
                "name": "test_function",
                "description": "A test function",
                "parameters": {
                    "type": "object",
                    "properties": {
                        "input": {"type": "string"}
                    },
                    "required": ["input"]
                }
            }
        ]
        
        try:
            response = self.deepseek_client.chat.completions.create(
                model="deepseek-chat",
                messages=[{"role": "user", "content": "Call the test function with input 'hello'"}],
                functions=functions,
                function_call="auto"
            )
            
            has_function_call = (
                response.choices[0].message.function_call is not None
                if hasattr(response.choices[0].message, 'function_call')
                else False
            )
            
            return {
                "success": True,
                "has_function_call": has_function_call
            }
        
        except Exception as e:
            return {
                "success": False,
                "error": str(e)
            }
    
    def run_all_tests(self) -> Dict[str, Dict[str, bool]]:
        """Run all migration tests"""
        
        tests = {
            "basic_completion": self.test_basic_completion,
            "streaming": self.test_streaming,
            "function_calling": self.test_function_calling
        }
        
        results = {}
        
        for test_name, test_func in tests.items():
            print(f"Running {test_name}...")
            results[test_name] = test_func()
        
        return results
    
    def compare_with_openai(self, test_prompt: str) -> Dict[str, Any]:
        """Compare responses between OpenAI and DeepSeek"""
        
        if not self.openai_client:
            return {"error": "OpenAI client not provided"}
        
        try:
            # DeepSeek response
            deepseek_response = self.deepseek_client.chat.completions.create(
                model="deepseek-chat",
                messages=[{"role": "user", "content": test_prompt}],
                temperature=0.7
            )
            
            # OpenAI response
            openai_response = self.openai_client.chat.completions.create(
                model="gpt-3.5-turbo",
                messages=[{"role": "user", "content": test_prompt}],
                temperature=0.7
            )
            
            return {
                "deepseek_response": deepseek_response.choices[0].message.content,
                "openai_response": openai_response.choices[0].message.content,
                "deepseek_length": len(deepseek_response.choices[0].message.content),
                "openai_length": len(openai_response.choices[0].message.content)
            }
        
        except Exception as e:
            return {"error": str(e)}

# Usage
from openai import OpenAI

# Initialize clients
deepseek_client = OpenAI(
    api_key="sk-your-deepseek-key",
    base_url="https://api.deepseek.com/v1"
)

# Optional: OpenAI client for comparison
# openai_client = OpenAI(api_key="sk-your-openai-key")

# Run tests
tester = MigrationTester(deepseek_client)
test_results = tester.run_all_tests()

print("Migration Test Results:")
for test_name, result in test_results.items():
    status = "✅ PASS" if result.get("success", False) else "❌ FAIL"
    print(f"{test_name}: {status}")
    if not result.get("success", False):
        print(f"  Error: {result.get('error', 'Unknown error')}")

Best Practices

Migration Checklist

python
MIGRATION_CHECKLIST = [
    "✅ Update API endpoint to https://api.deepseek.com/v1",
    "✅ Replace API key with DeepSeek API key",
    "✅ Update model names (gpt-3.5-turbo → deepseek-chat)",
    "✅ Remove unsupported parameters (n, logprobs, etc.)",
    "✅ Test basic completion functionality",
    "✅ Test streaming if used",
    "✅ Test function calling if used",
    "✅ Update environment variables",
    "✅ Update configuration files",
    "✅ Test error handling",
    "✅ Validate response format compatibility",
    "✅ Update documentation and comments",
    "✅ Run comprehensive tests",
    "✅ Monitor performance and costs"
]

def print_migration_checklist():
    """Print migration checklist"""
    print("DeepSeek Migration Checklist")
    print("=" * 30)
    for item in MIGRATION_CHECKLIST:
        print(item)

print_migration_checklist()

Gradual Migration Strategy

python
class GradualMigration:
    """Implement gradual migration with A/B testing"""
    
    def __init__(self, deepseek_client, openai_client, migration_percentage: float = 0.1):
        self.deepseek_client = deepseek_client
        self.openai_client = openai_client
        self.migration_percentage = migration_percentage
    
    def should_use_deepseek(self, user_id: str = None) -> bool:
        """Determine if request should use DeepSeek based on migration percentage"""
        
        if user_id:
            # Consistent routing based on user ID
            import hashlib
            hash_value = int(hashlib.md5(user_id.encode()).hexdigest(), 16)
            return (hash_value % 100) < (self.migration_percentage * 100)
        else:
            # Random routing
            import random
            return random.random() < self.migration_percentage
    
    def chat_completion(self, messages: List[Dict], user_id: str = None, **kwargs):
        """Route chat completion to appropriate provider"""
        
        if self.should_use_deepseek(user_id):
            # Use DeepSeek
            kwargs["model"] = kwargs.get("model", "gpt-3.5-turbo").replace("gpt-3.5-turbo", "deepseek-chat")
            kwargs["model"] = kwargs["model"].replace("gpt-4", "deepseek-chat")
            
            return self.deepseek_client.chat.completions.create(
                messages=messages,
                **kwargs
            )
        else:
            # Use OpenAI
            return self.openai_client.chat.completions.create(
                messages=messages,
                **kwargs
            )
    
    def increase_migration_percentage(self, new_percentage: float):
        """Gradually increase migration percentage"""
        self.migration_percentage = min(1.0, new_percentage)
        print(f"Migration percentage updated to {self.migration_percentage * 100}%")

# Usage
gradual_migration = GradualMigration(
    deepseek_client=deepseek_client,
    openai_client=openai_client,
    migration_percentage=0.1  # Start with 10%
)

# Use in your application
response = gradual_migration.chat_completion(
    messages=[{"role": "user", "content": "Hello!"}],
    user_id="user123"
)

# Gradually increase migration
gradual_migration.increase_migration_percentage(0.25)  # 25%
gradual_migration.increase_migration_percentage(0.50)  # 50%
gradual_migration.increase_migration_percentage(1.0)   # 100%

Troubleshooting

Common Migration Issues

  1. Authentication errors: Verify API key and base URL
  2. Model not found: Update model names to DeepSeek equivalents
  3. Parameter errors: Remove unsupported parameters
  4. Response format differences: Validate response parsing

Debug Migration

python
def debug_migration_issue(error_message: str, request_data: dict):
    """Debug common migration issues"""
    
    print(f"Migration Error: {error_message}")
    print(f"Request Data: {request_data}")
    
    # Check common issues
    if "model" in request_data:
        model = request_data["model"]
        if model.startswith("gpt-"):
            print(f"❌ Issue: Using OpenAI model '{model}' with DeepSeek API")
            print(f"✅ Solution: Change to 'deepseek-chat', 'deepseek-coder', or 'deepseek-math'")
    
    if "n" in request_data:
        print(f"❌ Issue: Parameter 'n' is not supported by DeepSeek")
        print(f"✅ Solution: Remove 'n' parameter and make multiple requests if needed")
    
    if "logprobs" in request_data:
        print(f"❌ Issue: Parameter 'logprobs' is not supported by DeepSeek")
        print(f"✅ Solution: Remove 'logprobs' parameter")
    
    if "401" in error_message or "authentication" in error_message.lower():
        print(f"❌ Issue: Authentication failed")
        print(f"✅ Solution: Check API key and base URL configuration")

# Usage
try:
    response = client.chat.completions.create(
        model="gpt-3.5-turbo",  # Wrong model
        messages=[{"role": "user", "content": "Hello"}],
        n=2  # Unsupported parameter
    )
except Exception as e:
    debug_migration_issue(str(e), {
        "model": "gpt-3.5-turbo",
        "n": 2
    })

Next Steps

基于 DeepSeek AI 大模型技术