Fix billing error for Grok models by adding x-ai model aliases and pricing

- Added 'x-ai/grok-4' alias to xai/grok-4 model configuration
- Added OpenRouter x-ai model pricing support in constants generation
- Resolves LiteLLM 'LLM Provider NOT provided' errors in billing system
- Ensures proper token cost calculation for all Grok model variations
This commit is contained in:
marko-kraemer 2025-07-25 20:17:02 +02:00
parent 293e97fa69
commit 39cf6c13a5
1 changed files with 5 additions and 1 deletions

View File

@ -51,7 +51,7 @@ MODELS = {
"tier_availability": ["free", "paid"] "tier_availability": ["free", "paid"]
}, },
"xai/grok-4": { "xai/grok-4": {
"aliases": ["grok-4"], "aliases": ["grok-4", "x-ai/grok-4"],
"pricing": { "pricing": {
"input_cost_per_million_tokens": 5.00, "input_cost_per_million_tokens": 5.00,
"output_cost_per_million_tokens": 15.00 "output_cost_per_million_tokens": 15.00
@ -152,6 +152,10 @@ def _generate_model_structures():
# Add anthropic/claude-sonnet-4 alias for claude-sonnet-4-20250514 # Add anthropic/claude-sonnet-4 alias for claude-sonnet-4-20250514
if "claude-sonnet-4-20250514" in model_name: if "claude-sonnet-4-20250514" in model_name:
pricing["anthropic/claude-sonnet-4"] = config["pricing"] pricing["anthropic/claude-sonnet-4"] = config["pricing"]
elif model_name.startswith("xai/"):
# Add pricing for OpenRouter x-ai models
openrouter_name = model_name.replace("xai/", "openrouter/x-ai/")
pricing[openrouter_name] = config["pricing"]
return free_models, paid_models, aliases, pricing return free_models, paid_models, aliases, pricing