Files
lijiaoqiao/llm-gateway-competitors/litellm-wheel-src/litellm/llms/deepseek/cost_calculator.py
2026-03-26 20:06:14 +08:00

22 lines
587 B
Python

"""
Cost calculator for DeepSeek Chat models.
Handles prompt caching scenario.
"""
from typing import Tuple
from litellm.litellm_core_utils.llm_cost_calc.utils import generic_cost_per_token
from litellm.types.utils import Usage
def cost_per_token(model: str, usage: Usage) -> Tuple[float, float]:
"""
Calculates the cost per token for a given model, prompt tokens, and completion tokens.
Follows the same logic as Anthropic's cost per token calculation.
"""
return generic_cost_per_token(
model=model, usage=usage, custom_llm_provider="deepseek"
)