chore: initial public snapshot for github upload
This commit is contained in:
@@ -0,0 +1,113 @@
|
||||
"""
|
||||
Support for OpenAI's `/v1/chat/completions` endpoint.
|
||||
|
||||
Calls done in OpenAI/openai.py as Vercel AI Gateway is openai-compatible.
|
||||
|
||||
Docs: https://vercel.com/docs/ai-gateway
|
||||
"""
|
||||
|
||||
from typing import List, Optional, Tuple, Union
|
||||
|
||||
import httpx
|
||||
|
||||
from litellm.llms.base_llm.chat.transformation import BaseLLMException
|
||||
from litellm.types.llms.openai import AllMessageValues
|
||||
from litellm.secret_managers.main import get_secret_str
|
||||
import litellm
|
||||
|
||||
from ...openai.chat.gpt_transformation import OpenAIGPTConfig
|
||||
from ..common_utils import VercelAIGatewayException
|
||||
|
||||
|
||||
class VercelAIGatewayConfig(OpenAIGPTConfig):
|
||||
@property
|
||||
def custom_llm_provider(self) -> Optional[str]:
|
||||
return "vercel_ai_gateway"
|
||||
|
||||
def get_supported_openai_params(self, model: str) -> list:
|
||||
base_params = super().get_supported_openai_params(model)
|
||||
if "extra_body" not in base_params:
|
||||
base_params.append("extra_body")
|
||||
return base_params
|
||||
|
||||
def _get_openai_compatible_provider_info(
|
||||
self, api_base: Optional[str], api_key: Optional[str]
|
||||
) -> Tuple[Optional[str], Optional[str]]:
|
||||
api_base = (
|
||||
api_base
|
||||
or get_secret_str("VERCEL_AI_GATEWAY_API_BASE")
|
||||
or "https://ai-gateway.vercel.sh/v1"
|
||||
)
|
||||
user_api_key = (
|
||||
api_key
|
||||
or get_secret_str("VERCEL_AI_GATEWAY_API_KEY")
|
||||
or get_secret_str("VERCEL_OIDC_TOKEN")
|
||||
)
|
||||
return api_base, user_api_key
|
||||
|
||||
def map_openai_params(
|
||||
self,
|
||||
non_default_params: dict,
|
||||
optional_params: dict,
|
||||
model: str,
|
||||
drop_params: bool,
|
||||
) -> dict:
|
||||
mapped_openai_params = super().map_openai_params(
|
||||
non_default_params, optional_params, model, drop_params
|
||||
)
|
||||
|
||||
# Vercel AI Gateway-only parameters
|
||||
extra_body = {}
|
||||
provider_options = non_default_params.pop("providerOptions", None)
|
||||
|
||||
if provider_options is not None:
|
||||
extra_body["providerOptions"] = provider_options
|
||||
|
||||
mapped_openai_params[
|
||||
"extra_body"
|
||||
] = extra_body # openai client supports `extra_body` param
|
||||
return mapped_openai_params
|
||||
|
||||
def transform_request(
|
||||
self,
|
||||
model: str,
|
||||
messages: List[AllMessageValues],
|
||||
optional_params: dict,
|
||||
litellm_params: dict,
|
||||
headers: dict,
|
||||
) -> dict:
|
||||
"""
|
||||
Transform the overall request to be sent to the API.
|
||||
|
||||
Returns:
|
||||
dict: The transformed request. Sent as the body of the API call.
|
||||
"""
|
||||
return super().transform_request(
|
||||
model, messages, optional_params, litellm_params, headers
|
||||
)
|
||||
|
||||
def get_error_class(
|
||||
self, error_message: str, status_code: int, headers: Union[dict, httpx.Headers]
|
||||
) -> BaseLLMException:
|
||||
return VercelAIGatewayException(
|
||||
message=error_message,
|
||||
status_code=status_code,
|
||||
headers=headers,
|
||||
)
|
||||
|
||||
def get_models(
|
||||
self, api_key: Optional[str] = None, api_base: Optional[str] = None
|
||||
) -> List[str]:
|
||||
api_base, _ = self._get_openai_compatible_provider_info(api_base, api_key)
|
||||
|
||||
if api_base is None:
|
||||
api_base = "https://ai-gateway.vercel.sh/v1"
|
||||
|
||||
models_url = f"{api_base}/models"
|
||||
response = litellm.module_level_client.get(url=models_url)
|
||||
|
||||
if response.status_code != 200:
|
||||
raise Exception(f"Failed to get models: {response.text}")
|
||||
|
||||
models = response.json()["data"]
|
||||
return [model["id"] for model in models]
|
||||
@@ -0,0 +1,5 @@
|
||||
from litellm.llms.base_llm.chat.transformation import BaseLLMException
|
||||
|
||||
|
||||
class VercelAIGatewayException(BaseLLMException):
|
||||
pass
|
||||
@@ -0,0 +1,176 @@
|
||||
"""
|
||||
Vercel AI Gateway Embedding API Configuration.
|
||||
|
||||
This module provides the configuration for Vercel AI Gateway's Embedding API.
|
||||
Vercel AI Gateway is OpenAI-compatible and supports embeddings via the /v1/embeddings endpoint.
|
||||
|
||||
Docs: https://vercel.com/docs/ai-gateway/openai-compat/embeddings
|
||||
"""
|
||||
|
||||
from typing import TYPE_CHECKING, Any, Optional
|
||||
|
||||
import httpx
|
||||
|
||||
from litellm.llms.base_llm.embedding.transformation import BaseEmbeddingConfig
|
||||
from litellm.secret_managers.main import get_secret_str
|
||||
from litellm.types.llms.openai import AllEmbeddingInputValues
|
||||
from litellm.types.utils import EmbeddingResponse
|
||||
from litellm.utils import convert_to_model_response_object
|
||||
|
||||
from ..common_utils import VercelAIGatewayException
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from litellm.litellm_core_utils.litellm_logging import Logging as _LiteLLMLoggingObj
|
||||
|
||||
LiteLLMLoggingObj = _LiteLLMLoggingObj
|
||||
else:
|
||||
LiteLLMLoggingObj = Any
|
||||
|
||||
|
||||
class VercelAIGatewayEmbeddingConfig(BaseEmbeddingConfig):
|
||||
"""
|
||||
Configuration for Vercel AI Gateway's Embedding API.
|
||||
|
||||
Reference: https://vercel.com/docs/ai-gateway/openai-compat/embeddings
|
||||
"""
|
||||
|
||||
def validate_environment(
|
||||
self,
|
||||
headers: dict,
|
||||
model: str,
|
||||
messages: list,
|
||||
optional_params: dict,
|
||||
litellm_params: dict,
|
||||
api_key: Optional[str] = None,
|
||||
api_base: Optional[str] = None,
|
||||
) -> dict:
|
||||
"""
|
||||
Validate environment and set up headers for Vercel AI Gateway API.
|
||||
|
||||
Vercel AI Gateway requires:
|
||||
- Authorization header with Bearer token (API key or OIDC token)
|
||||
"""
|
||||
vercel_headers = {
|
||||
"Content-Type": "application/json",
|
||||
}
|
||||
|
||||
# Add Authorization header if api_key is provided
|
||||
if api_key:
|
||||
vercel_headers["Authorization"] = f"Bearer {api_key}"
|
||||
|
||||
# Merge with existing headers (user's extra_headers take priority)
|
||||
merged_headers = {**vercel_headers, **headers}
|
||||
|
||||
return merged_headers
|
||||
|
||||
def get_complete_url(
|
||||
self,
|
||||
api_base: Optional[str],
|
||||
api_key: Optional[str],
|
||||
model: str,
|
||||
optional_params: dict,
|
||||
litellm_params: dict,
|
||||
stream: Optional[bool] = None,
|
||||
) -> str:
|
||||
"""
|
||||
Get the complete URL for Vercel AI Gateway Embedding API endpoint.
|
||||
"""
|
||||
if api_base:
|
||||
api_base = api_base.rstrip("/")
|
||||
else:
|
||||
api_base = (
|
||||
get_secret_str("VERCEL_AI_GATEWAY_API_BASE")
|
||||
or "https://ai-gateway.vercel.sh/v1"
|
||||
)
|
||||
|
||||
return f"{api_base}/embeddings"
|
||||
|
||||
def transform_embedding_request(
|
||||
self,
|
||||
model: str,
|
||||
input: AllEmbeddingInputValues,
|
||||
optional_params: dict,
|
||||
headers: dict,
|
||||
) -> dict:
|
||||
"""
|
||||
Transform embedding request to Vercel AI Gateway format (OpenAI-compatible).
|
||||
"""
|
||||
# Ensure input is a list
|
||||
if isinstance(input, str):
|
||||
input = [input]
|
||||
|
||||
# Strip 'vercel_ai_gateway/' prefix if present
|
||||
if model.startswith("vercel_ai_gateway/"):
|
||||
model = model.replace("vercel_ai_gateway/", "", 1)
|
||||
|
||||
return {
|
||||
"model": model,
|
||||
"input": input,
|
||||
**optional_params,
|
||||
}
|
||||
|
||||
def transform_embedding_response(
|
||||
self,
|
||||
model: str,
|
||||
raw_response: httpx.Response,
|
||||
model_response: EmbeddingResponse,
|
||||
logging_obj: LiteLLMLoggingObj,
|
||||
api_key: Optional[str],
|
||||
request_data: dict,
|
||||
optional_params: dict,
|
||||
litellm_params: dict,
|
||||
) -> EmbeddingResponse:
|
||||
"""
|
||||
Transform embedding response from Vercel AI Gateway format (OpenAI-compatible).
|
||||
"""
|
||||
logging_obj.post_call(original_response=raw_response.text)
|
||||
|
||||
# Vercel AI Gateway returns standard OpenAI-compatible embedding response
|
||||
response_json = raw_response.json()
|
||||
|
||||
return convert_to_model_response_object(
|
||||
response_object=response_json,
|
||||
model_response_object=model_response,
|
||||
response_type="embedding",
|
||||
)
|
||||
|
||||
def get_supported_openai_params(self, model: str) -> list:
|
||||
"""
|
||||
Get list of supported OpenAI parameters for Vercel AI Gateway embeddings.
|
||||
|
||||
Vercel AI Gateway supports the standard OpenAI embeddings parameters
|
||||
and auto-maps 'dimensions' to each provider's expected field.
|
||||
"""
|
||||
return [
|
||||
"timeout",
|
||||
"dimensions",
|
||||
"encoding_format",
|
||||
"user",
|
||||
]
|
||||
|
||||
def map_openai_params(
|
||||
self,
|
||||
non_default_params: dict,
|
||||
optional_params: dict,
|
||||
model: str,
|
||||
drop_params: bool,
|
||||
) -> dict:
|
||||
"""
|
||||
Map OpenAI parameters to Vercel AI Gateway format.
|
||||
"""
|
||||
for param, value in non_default_params.items():
|
||||
if param in self.get_supported_openai_params(model):
|
||||
optional_params[param] = value
|
||||
return optional_params
|
||||
|
||||
def get_error_class(
|
||||
self, error_message: str, status_code: int, headers: Any
|
||||
) -> Any:
|
||||
"""
|
||||
Get the error class for Vercel AI Gateway errors.
|
||||
"""
|
||||
return VercelAIGatewayException(
|
||||
message=error_message,
|
||||
status_code=status_code,
|
||||
headers=headers,
|
||||
)
|
||||
Reference in New Issue
Block a user