Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
22 commits
Select commit Hold shift + click to select a range
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
111 changes: 78 additions & 33 deletions registry/coder-labs/modules/codex/README.md
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@ display_name: Codex CLI
icon: ../../../../.icons/openai.svg
description: Run Codex CLI in your workspace with AgentAPI integration
verified: true
tags: [agent, codex, ai, openai, tasks]
tags: [agent, codex, ai, openai, tasks, aibridge]
---

# Codex CLI
Expand All @@ -13,7 +13,7 @@ Run Codex CLI in your workspace to access OpenAI's models through the Codex inte
```tf
module "codex" {
source = "registry.coder.com/coder-labs/codex/coder"
version = "4.0.0"
version = "4.1.0"
agent_id = coder_agent.example.id
openai_api_key = var.openai_api_key
workdir = "/home/coder/project"
Expand All @@ -32,15 +32,57 @@ module "codex" {
module "codex" {
count = data.coder_workspace.me.start_count
source = "registry.coder.com/coder-labs/codex/coder"
version = "4.0.0"
version = "4.1.0"
agent_id = coder_agent.example.id
openai_api_key = "..."
workdir = "/home/coder/project"
report_tasks = false
}
```

### Tasks integration
### Usage with AI Bridge

[AI Bridge](https://coder.com/docs/ai-coder/ai-bridge) is a Premium Coder feature that provides centralized LLM proxy management. To use AI Bridge, set `enable_aibridge = true`. Requires Coder version 2.30+

For tasks integration with AI Bridge, add `enable_aibridge = true` to the [Usage with Tasks](#usage-with-tasks) example below.

#### Standalone usage with AI Bridge

```tf
module "codex" {
source = "registry.coder.com/coder-labs/codex/coder"
version = "4.1.0"
agent_id = coder_agent.example.id
workdir = "/home/coder/project"
enable_aibridge = true
}
```

When `enable_aibridge = true`, the module:

- Configures Codex to use the AI Bridge profile with `base_url` pointing to `${data.coder_workspace.me.access_url}/api/v2/aibridge/openai/v1` and `env_key` pointing to the workspace owner's session token

```toml
[model_providers.aibridge]
name = "AI Bridge"
base_url = "https://example.coder.com/api/v2/aibridge/openai/v1"
env_key = "CODER_AIBRIDGE_SESSION_TOKEN"
wire_api = "responses"

[profiles.aibridge]
model_provider = "aibridge"
model = "<model>" # as configured in the module input
model_reasoning_effort = "<model_reasoning_effort>" # as configured in the module input
```

Codex then runs with `--profile aibridge`

This allows Codex to route API requests through Coder's AI Bridge instead of directly to OpenAI's API.
Template build will fail if `openai_api_key` is provided alongside `enable_aibridge = true`.

### Usage with Tasks

This example shows how to configure Codex with Coder tasks.

```tf
resource "coder_ai_task" "task" {
Expand All @@ -52,17 +94,46 @@ data "coder_task" "me" {}

module "codex" {
source = "registry.coder.com/coder-labs/codex/coder"
version = "4.0.0"
version = "4.1.0"
agent_id = coder_agent.example.id
openai_api_key = "..."
ai_prompt = data.coder_task.me.prompt
workdir = "/home/coder/project"

# Custom configuration for full auto mode
# Optional: route through AI Bridge (Premium feature)
# enable_aibridge = true
}
```

### Advanced Configuration

This example shows additional configuration options for custom models, MCP servers, and base configuration.

```tf
module "codex" {
source = "registry.coder.com/coder-labs/codex/coder"
version = "4.1.0"
agent_id = coder_agent.example.id
openai_api_key = "..."
workdir = "/home/coder/project"

codex_version = "0.1.0" # Pin to a specific version
codex_model = "gpt-4o" # Custom model

# Override default configuration
base_config_toml = <<-EOT
sandbox_mode = "danger-full-access"
approval_policy = "never"
preferred_auth_method = "apikey"
EOT

# Add extra MCP servers
additional_mcp_servers = <<-EOT
[mcp_servers.GitHub]
command = "npx"
args = ["-y", "@modelcontextprotocol/server-github"]
type = "stdio"
EOT
}
```

Expand Down Expand Up @@ -92,33 +163,6 @@ preferred_auth_method = "apikey"
network_access = true
```

### Custom Configuration

For custom Codex configuration, use `base_config_toml` and/or `additional_mcp_servers`:

```tf
module "codex" {
source = "registry.coder.com/coder-labs/codex/coder"
version = "4.0.0"
# ... other variables ...

# Override default configuration
base_config_toml = <<-EOT
sandbox_mode = "danger-full-access"
approval_policy = "never"
preferred_auth_method = "apikey"
EOT

# Add extra MCP servers
additional_mcp_servers = <<-EOT
[mcp_servers.GitHub]
command = "npx"
args = ["-y", "@modelcontextprotocol/server-github"]
type = "stdio"
EOT
}
```

> [!NOTE]
> If no custom configuration is provided, the module uses secure defaults. The Coder MCP server is always included automatically. For containerized workspaces (Docker/Kubernetes), you may need `sandbox_mode = "danger-full-access"` to avoid permission issues. For advanced options, see [Codex config docs](https://github.com/openai/codex/blob/main/codex-rs/config.md).

Expand All @@ -137,3 +181,4 @@ module "codex" {
- [Codex CLI Documentation](https://github.com/openai/codex)
- [AgentAPI Documentation](https://github.com/coder/agentapi)
- [Coder AI Agents Guide](https://coder.com/docs/tutorials/ai-agents)
- [AI Bridge](https://coder.com/docs/ai-coder/ai-bridge)
36 changes: 32 additions & 4 deletions registry/coder-labs/modules/codex/main.test.ts
Original file line number Diff line number Diff line change
Expand Up @@ -113,7 +113,7 @@ describe("codex", async () => {
sandbox_mode = "danger-full-access"
approval_policy = "never"
preferred_auth_method = "apikey"
[custom_section]
new_feature = true
`.trim();
Expand Down Expand Up @@ -189,7 +189,7 @@ describe("codex", async () => {
args = ["-y", "@modelcontextprotocol/server-github"]
type = "stdio"
description = "GitHub integration"
[mcp_servers.FileSystem]
command = "npx"
args = ["-y", "@modelcontextprotocol/server-filesystem", "/workspace"]
Expand All @@ -215,7 +215,7 @@ describe("codex", async () => {
approval_policy = "untrusted"
preferred_auth_method = "chatgpt"
custom_setting = "test-value"
[advanced_settings]
timeout = 30000
debug = true
Expand All @@ -228,7 +228,7 @@ describe("codex", async () => {
args = ["--serve", "--port", "8080"]
type = "stdio"
description = "Custom development tool"
[mcp_servers.DatabaseMCP]
command = "python"
args = ["-m", "database_mcp_server"]
Expand Down Expand Up @@ -454,4 +454,32 @@ describe("codex", async () => {
);
expect(startLog.stdout).not.toContain("test prompt");
});

test("codex-with-aibridge", async () => {
const { id } = await setup({
moduleVariables: {
enable_aibridge: "true",
model_reasoning_effort: "none",
},
});

await execModuleScript(id);

const startLog = await readFileContainer(
id,
"/home/coder/.codex-module/agentapi-start.log",
);

const configToml = await readFileContainer(
id,
"/home/coder/.codex/config.toml",
);
expect(startLog).toContain("AI Bridge is enabled, using profile aibridge");
expect(startLog).toContain(
"Starting Codex with arguments: --profile aibridge",
);
expect(configToml).toContain(
"[profiles.aibridge]\n" + 'model_provider = "aibridge"',
);
});
});
49 changes: 46 additions & 3 deletions registry/coder-labs/modules/codex/main.tf
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
terraform {
required_version = ">= 1.0"
required_version = ">= 1.9"

required_providers {
coder = {
Expand Down Expand Up @@ -71,6 +71,27 @@ variable "cli_app_display_name" {
default = "Codex CLI"
}

variable "enable_aibridge" {
type = bool
description = "Use AI Bridge for Codex. https://coder.com/docs/ai-coder/ai-bridge"
default = false

validation {
condition = !(var.enable_aibridge && length(var.openai_api_key) > 0)
error_message = "openai_api_key cannot be provided when enable_aibridge is true. AI Bridge automatically authenticates the client using Coder credentials."
}
}

variable "model_reasoning_effort" {
type = string
description = "The reasoning effort for the AI Bridge model. One of: none, low, medium, high. https://platform.openai.com/docs/guides/latest-model#lower-reasoning-effort"
default = "medium"
validation {
condition = contains(["none", "low", "medium", "high"], var.model_reasoning_effort)
error_message = "model_reasoning_effort must be one of: none, low, medium, high."
}
}

variable "install_codex" {
type = bool
description = "Whether to install Codex."
Expand Down Expand Up @@ -115,8 +136,8 @@ variable "agentapi_version" {

variable "codex_model" {
type = string
description = "The model for Codex to use. Defaults to gpt-5.1-codex-max."
default = ""
description = "The model for Codex to use. Defaults to gpt-5.2-codex."
default = "gpt-5.2-codex"
}

variable "pre_install_script" {
Expand Down Expand Up @@ -155,12 +176,31 @@ resource "coder_env" "openai_api_key" {
value = var.openai_api_key
}

resource "coder_env" "coder_aibridge_session_token" {
count = var.enable_aibridge ? 1 : 0
agent_id = var.agent_id
name = "CODER_AIBRIDGE_SESSION_TOKEN"
value = data.coder_workspace_owner.me.session_token
}

locals {
workdir = trimsuffix(var.workdir, "/")
app_slug = "codex"
install_script = file("${path.module}/scripts/install.sh")
start_script = file("${path.module}/scripts/start.sh")
module_dir_name = ".codex-module"
aibridge_config = <<-EOF
[model_providers.aibridge]
name = "AI Bridge"
base_url = "${data.coder_workspace.me.access_url}/api/v2/aibridge/openai/v1"
env_key = "CODER_AIBRIDGE_SESSION_TOKEN"
wire_api = "responses"

[profiles.aibridge]
model_provider = "aibridge"
model = "${var.codex_model}"
model_reasoning_effort = "${var.model_reasoning_effort}"
EOF
}

module "agentapi" {
Expand Down Expand Up @@ -196,6 +236,7 @@ module "agentapi" {
ARG_CODEX_START_DIRECTORY='${local.workdir}' \
ARG_CODEX_TASK_PROMPT='${base64encode(var.ai_prompt)}' \
ARG_CONTINUE='${var.continue}' \
ARG_ENABLE_AIBRIDGE='${var.enable_aibridge}' \
/tmp/start.sh
EOT

Expand All @@ -211,6 +252,8 @@ module "agentapi" {
ARG_INSTALL='${var.install_codex}' \
ARG_CODEX_VERSION='${var.codex_version}' \
ARG_BASE_CONFIG_TOML='${base64encode(var.base_config_toml)}' \
ARG_ENABLE_AIBRIDGE='${var.enable_aibridge}' \
ARG_AIBRIDGE_CONFIG='${base64encode(var.enable_aibridge ? local.aibridge_config : "")}' \
ARG_ADDITIONAL_MCP_SERVERS='${base64encode(var.additional_mcp_servers)}' \
ARG_CODER_MCP_APP_STATUS_SLUG='${local.app_slug}' \
ARG_CODEX_START_DIRECTORY='${local.workdir}' \
Expand Down
22 changes: 21 additions & 1 deletion registry/coder-labs/modules/codex/scripts/install.sh
Original file line number Diff line number Diff line change
Expand Up @@ -13,6 +13,8 @@ set -o nounset
ARG_BASE_CONFIG_TOML=$(echo -n "$ARG_BASE_CONFIG_TOML" | base64 -d)
ARG_ADDITIONAL_MCP_SERVERS=$(echo -n "$ARG_ADDITIONAL_MCP_SERVERS" | base64 -d)
ARG_CODEX_INSTRUCTION_PROMPT=$(echo -n "$ARG_CODEX_INSTRUCTION_PROMPT" | base64 -d)
ARG_ENABLE_AIBRIDGE=${ARG_ENABLE_AIBRIDGE:-false}
ARG_AIBRIDGE_CONFIG=$(echo -n "$ARG_AIBRIDGE_CONFIG" | base64 -d)

echo "=== Codex Module Configuration ==="
printf "Install Codex: %s\n" "$ARG_INSTALL"
Expand All @@ -24,6 +26,7 @@ printf "Has Additional MCP: %s\n" "$([ -n "$ARG_ADDITIONAL_MCP_SERVERS" ] && ech
printf "Has System Prompt: %s\n" "$([ -n "$ARG_CODEX_INSTRUCTION_PROMPT" ] && echo "Yes" || echo "No")"
printf "OpenAI API Key: %s\n" "$([ -n "$ARG_OPENAI_API_KEY" ] && echo "Provided" || echo "Not provided")"
printf "Report Tasks: %s\n" "$ARG_REPORT_TASKS"
printf "Enable Coder AI Bridge: %s\n" "$ARG_ENABLE_AIBRIDGE"
echo "======================================"

set +o nounset
Expand Down Expand Up @@ -127,6 +130,15 @@ EOF
fi
}

append_aibridge_config_section() {
local config_path="$1"

if [ -n "$ARG_AIBRIDGE_CONFIG" ]; then
printf "Adding AI Bridge configuration\n"
echo -e "\n# AI Bridge Configuration\n$ARG_AIBRIDGE_CONFIG" >> "$config_path"
fi
}

function populate_config_toml() {
CONFIG_PATH="$HOME/.codex/config.toml"
mkdir -p "$(dirname "$CONFIG_PATH")"
Expand All @@ -140,6 +152,11 @@ function populate_config_toml() {
fi

append_mcp_servers_section "$CONFIG_PATH"

if [ "$ARG_ENABLE_AIBRIDGE" = "true" ]; then
printf "AI Bridge is enabled\n"
append_aibridge_config_section "$CONFIG_PATH"
fi
}

function add_instruction_prompt_if_exists() {
Expand Down Expand Up @@ -185,4 +202,7 @@ install_codex
codex --version
populate_config_toml
add_instruction_prompt_if_exists
add_auth_json

if [ "$ARG_ENABLE_AIBRIDGE" = "false" ]; then
add_auth_json
fi
Loading