fix(codex): model_catalog_json with correct Codex internal format

Previous attempt used OpenAI API format (bare array of {id, object}).
Codex expects ModelsResponse format: {"models": [{slug, display_name,
visibility, shell_type, supported_reasoning_levels, ...}]}.

Format reverse-engineered from codex-rs/core/models.json in official repo.
All 4 models (gpt-5.4, gpt-5.3-codex-spark, gpt-5.3-codex, gpt-5.2-codex)
now appear in interactive model picker.

Cleanup logic detects old bare-array format and replaces automatically.

Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
This commit is contained in:
delta-cloud-208e
2026-03-08 08:58:33 +00:00
parent 60703fc7db
commit 0e5564732b
3 changed files with 181 additions and 26 deletions

View File

@@ -44,11 +44,56 @@ RESET = "\033[0m"
# Managed config keys (we update these, preserve everything else)
MANAGED_TOP_KEYS = {
"model", "model_reasoning_effort", "model_provider",
"model_catalog_json",
"approval_policy", "sandbox_mode",
"check_for_update_on_startup", "forced_login_method",
}
MANAGED_SECTIONS = {"analytics", "model_providers"}
# Model catalog template (Codex internal format from codex-rs/core/models.json)
MODEL_TEMPLATE = {
"prefer_websockets": False,
"support_verbosity": True,
"default_verbosity": "low",
"apply_patch_tool_type": "freeform",
"input_modalities": ["text", "image"],
"supports_image_detail_original": True,
"truncation_policy": {"mode": "tokens", "limit": 10000},
"supports_parallel_tool_calls": True,
"context_window": 272000,
"default_reasoning_summary": "none",
"shell_type": "shell_command",
"visibility": "list",
"supported_in_api": True,
"availability_nux": None,
"upgrade": None,
"priority": 0,
"base_instructions": "",
"model_messages": None,
"experimental_supported_tools": [],
"supports_reasoning_summaries": True,
"supported_reasoning_levels": [
{"effort": "low", "description": "Fast responses with lighter reasoning"},
{"effort": "medium", "description": "Balances speed and reasoning depth"},
{"effort": "high", "description": "Greater reasoning depth for complex problems"},
{"effort": "xhigh", "description": "Extra high reasoning depth"},
],
"default_reasoning_level": "medium",
}
def generate_model_catalog(models):
"""Generate model catalog JSON in Codex internal format."""
entries = []
for i, slug in enumerate(models):
entry = dict(MODEL_TEMPLATE)
entry["slug"] = slug
entry["display_name"] = slug
entry["description"] = f"Model {slug}"
entry["priority"] = i
entries.append(entry)
return {"models": entries}
# ─── Config Loading ─────────────────────────────────────────────────────
@@ -163,6 +208,13 @@ def generate_config_toml(existing, config):
lines.append(f'model = "{config["model"]}"')
lines.append(f'model_reasoning_effort = "{config.get("model_reasoning_effort", "high")}"')
lines.append('model_provider = "custom"')
# Model catalog path (for model picker)
codex_dir_path = os.path.join(os.path.expanduser("~"), ".codex")
catalog_path = os.path.join(codex_dir_path, "model_catalog.json")
catalog_path_toml = catalog_path.replace("\\", "/")
lines.append(f'model_catalog_json = "{catalog_path_toml}"')
lines.append(f'approval_policy = "{config.get("approval_policy", "never")}"')
lines.append(f'sandbox_mode = "{config.get("sandbox_mode", "danger-full-access")}"')
lines.append(f'check_for_update_on_startup = {toml_value(config.get("check_for_update", False))}')
@@ -458,20 +510,17 @@ def apply_all_patches(config, home_dir=None):
print(f" Proxy: {config['base_url']}")
print()
# Clean up stale model_catalog.json from previous broken installs
stale_catalog = os.path.join(codex_dir, "model_catalog.json")
if os.path.isfile(stale_catalog):
os.remove(stale_catalog)
print(f" {YELLOW}Removed stale model_catalog.json{RESET}")
# Generate model catalog JSON for model picker (Codex internal format)
catalog_path = os.path.join(codex_dir, "model_catalog.json")
models = config.get("models", [config["model"]])
catalog = generate_model_catalog(models)
with open(catalog_path, "w", encoding="utf-8") as f:
json.dump(catalog, f, indent=2)
print(f" {'[OK]':>8} Catalog: {catalog_path} ({len(models)} models)")
# Read existing config
existing = read_toml(config_path)
# Remove model_catalog_json if present (wrong format crashes Codex)
if "model_catalog_json" in existing:
del existing["model_catalog_json"]
print(f" {YELLOW}Removed model_catalog_json from config (unsupported format){RESET}")
# Backup before any changes
backup_file(config_path)
@@ -581,6 +630,12 @@ def patch_user(user_home, config):
codex_dir = os.path.join(user_home, ".codex")
os.makedirs(codex_dir, exist_ok=True)
# Generate model catalog
catalog_path = os.path.join(codex_dir, "model_catalog.json")
models = config.get("models", [config["model"]])
with open(catalog_path, "w", encoding="utf-8") as f:
json.dump(generate_model_catalog(models), f, indent=2)
config_path = os.path.join(codex_dir, "config.toml")
existing = read_toml(config_path)
backup_file(config_path)

View File

@@ -162,11 +162,7 @@ if (Test-Path $codexConfigFile) {
$needsCleanup = $true
}
# Check 2: model_catalog_json with wrong format (crashes Codex on startup)
if ($existingContent -match "model_catalog_json") {
Write-Host " Detected model_catalog_json (unsupported, removing)" -ForegroundColor Yellow
$needsCleanup = $true
}
# Check 2: config will be regenerated anyway by patcher below
if ($needsCleanup) {
Write-Host " Removing broken config.toml..." -ForegroundColor Yellow
@@ -174,11 +170,14 @@ if (Test-Path $codexConfigFile) {
}
}
# Clean up stale model_catalog.json if it exists
# Clean up old-format model_catalog.json (bare array instead of {models:[...]})
$staleCatalog = "$codexConfigDir\model_catalog.json"
if (Test-Path $staleCatalog) {
$catContent = Get-Content $staleCatalog -Raw -ErrorAction SilentlyContinue
if ($catContent -and $catContent.TrimStart().StartsWith("[")) {
Remove-Item $staleCatalog -Force -ErrorAction SilentlyContinue
Write-Host " Removed stale model_catalog.json" -ForegroundColor Yellow
Write-Host " Removed old-format model_catalog.json (wrong structure)" -ForegroundColor Yellow
}
}
# ---- Apply patches ----
@@ -235,10 +234,61 @@ if (-not $pyCmd) {
Remove-Item $configToml -Force -ErrorAction SilentlyContinue
}
# Generate model catalog (Codex internal format)
$catalogFile = Join-Path $configDir "model_catalog.json"
$catalogPath = $catalogFile -replace '\\', '/'
$modelTemplate = @{
prefer_websockets = $false
support_verbosity = $true
default_verbosity = "low"
apply_patch_tool_type = "freeform"
input_modalities = @("text", "image")
supports_image_detail_original = $true
truncation_policy = @{ mode = "tokens"; limit = 10000 }
supports_parallel_tool_calls = $true
context_window = 272000
default_reasoning_summary = "none"
shell_type = "shell_command"
visibility = "list"
supported_in_api = $true
availability_nux = $null
upgrade = $null
base_instructions = ""
model_messages = $null
experimental_supported_tools = @()
supports_reasoning_summaries = $true
supported_reasoning_levels = @(
@{ effort = "low"; description = "Fast responses with lighter reasoning" }
@{ effort = "medium"; description = "Balances speed and reasoning depth" }
@{ effort = "high"; description = "Greater reasoning depth for complex problems" }
@{ effort = "xhigh"; description = "Extra high reasoning depth" }
)
default_reasoning_level = "medium"
}
$modelSlugs = @("gpt-5.4", "gpt-5.3-codex-spark", "gpt-5.3-codex", "gpt-5.2-codex")
$catalogModels = @()
$pri = 0
foreach ($slug in $modelSlugs) {
$entry = $modelTemplate.Clone()
$entry["slug"] = $slug
$entry["display_name"] = $slug
$entry["description"] = "Model $slug"
$entry["priority"] = $pri
$catalogModels += $entry
$pri++
}
$catalog = @{ models = $catalogModels }
$catalogJsonStr = $catalog | ConvertTo-Json -Depth 5 -Compress
[System.IO.File]::WriteAllText($catalogFile, $catalogJsonStr)
Write-Host " model_catalog.json created ($($modelSlugs.Count) models)" -ForegroundColor Green
$tomlContent = @"
model = "gpt-5.4"
model_reasoning_effort = "high"
model_provider = "custom"
model_catalog_json = "$catalogPath"
approval_policy = "never"
sandbox_mode = "danger-full-access"
check_for_update_on_startup = false

View File

@@ -107,11 +107,7 @@ if (Test-Path $codexConfigFile) {
$needsCleanup = $true
}
# Check 2: model_catalog_json with wrong format (crashes Codex)
if ($existingContent -match "model_catalog_json") {
Write-Host " Detected model_catalog_json (unsupported, removing)" -ForegroundColor Yellow
$needsCleanup = $true
}
# Check 2: config will be regenerated anyway by patcher below
if ($needsCleanup) {
Write-Host " Removing broken config.toml..." -ForegroundColor Yellow
@@ -119,11 +115,14 @@ if (Test-Path $codexConfigFile) {
}
}
# Clean up stale model_catalog.json
# Clean up old-format model_catalog.json (bare array instead of {models:[...]})
$staleCatalog = "$codexConfigDir\model_catalog.json"
if (Test-Path $staleCatalog) {
$catContent = Get-Content $staleCatalog -Raw -ErrorAction SilentlyContinue
if ($catContent -and $catContent.TrimStart().StartsWith("[")) {
Remove-Item $staleCatalog -Force -ErrorAction SilentlyContinue
Write-Host " Removed stale model_catalog.json" -ForegroundColor Yellow
Write-Host " Removed old-format model_catalog.json (wrong structure)" -ForegroundColor Yellow
}
}
# ---- Download and apply patches ----
@@ -171,10 +170,61 @@ if (-not $pyCmd) {
$configToml = Join-Path $configDir "config.toml"
# Remove old broken config
if (Test-Path $configToml) { Remove-Item $configToml -Force -ErrorAction SilentlyContinue }
# Generate model catalog (Codex internal format)
$catalogFile = Join-Path $configDir "model_catalog.json"
$catalogPath = $catalogFile -replace '\\', '/'
$modelTemplate = @{
prefer_websockets = $false
support_verbosity = $true
default_verbosity = "low"
apply_patch_tool_type = "freeform"
input_modalities = @("text", "image")
supports_image_detail_original = $true
truncation_policy = @{ mode = "tokens"; limit = 10000 }
supports_parallel_tool_calls = $true
context_window = 272000
default_reasoning_summary = "none"
shell_type = "shell_command"
visibility = "list"
supported_in_api = $true
availability_nux = $null
upgrade = $null
base_instructions = ""
model_messages = $null
experimental_supported_tools = @()
supports_reasoning_summaries = $true
supported_reasoning_levels = @(
@{ effort = "low"; description = "Fast responses with lighter reasoning" }
@{ effort = "medium"; description = "Balances speed and reasoning depth" }
@{ effort = "high"; description = "Greater reasoning depth for complex problems" }
@{ effort = "xhigh"; description = "Extra high reasoning depth" }
)
default_reasoning_level = "medium"
}
$modelSlugs = @("gpt-5.4", "gpt-5.3-codex-spark", "gpt-5.3-codex", "gpt-5.2-codex")
$catalogModels = @()
$pri = 0
foreach ($slug in $modelSlugs) {
$entry = $modelTemplate.Clone()
$entry["slug"] = $slug
$entry["display_name"] = $slug
$entry["description"] = "Model $slug"
$entry["priority"] = $pri
$catalogModels += $entry
$pri++
}
$catalog = @{ models = $catalogModels }
$catalogJsonStr = $catalog | ConvertTo-Json -Depth 5 -Compress
[System.IO.File]::WriteAllText($catalogFile, $catalogJsonStr)
$tomlContent = @"
model = "gpt-5.4"
model_reasoning_effort = "high"
model_provider = "custom"
model_catalog_json = "$catalogPath"
approval_policy = "never"
sandbox_mode = "danger-full-access"
check_for_update_on_startup = false