diff --git a/CLAUDE.md b/CLAUDE.md index a459860e2..7a3a4ce9b 100644 --- a/CLAUDE.md +++ b/CLAUDE.md @@ -426,6 +426,12 @@ Langfuse supports OpenTelemetry as of 2025: - REQUIRED: Always use `GetK8sClientsForRequest(c)` to get user-scoped K8s clients - REQUIRED: Return `401 Unauthorized` if user token is missing or invalid - Exception: Backend service account ONLY for CR writes and token minting (handlers/sessions.go:227, handlers/sessions.go:449) + - Exception: Operator service account for startup migrations (operator/internal/handlers/migration.go:29-47) + - V1→V2 repo format migration runs once at operator startup + - Updates existing CRs the operator already has RBAC access to + - Only modifies data structure format, not repository content + - Active sessions (Running/Creating) are skipped to avoid interference + - See ADR-0002 for detailed security model documentation 2. **Never Panic in Production Code** - FORBIDDEN: `panic()` in handlers, reconcilers, or any production path diff --git a/Makefile b/Makefile index 13fa26ca6..c33d4e934 100644 --- a/Makefile +++ b/Makefile @@ -1,4 +1,4 @@ -.PHONY: help setup build-all build-frontend build-backend build-operator build-runner deploy clean +.PHONY: help setup build-all build-frontend build-backend build-operator build-runner deploy clean test-runner test-runner-autopush .PHONY: local-up local-down local-clean local-status local-rebuild local-reload-backend local-reload-frontend local-reload-operator local-sync-version .PHONY: local-dev-token .PHONY: local-logs local-logs-backend local-logs-frontend local-logs-operator local-shell local-shell-frontend @@ -47,7 +47,8 @@ GIT_VERSION := $(shell git describe --tags --always --dirty 2>/dev/null || echo BUILD_DATE := $(shell date -u +"%Y-%m-%dT%H:%M:%SZ") BUILD_USER := $(shell whoami)@$(shell hostname) -# Colors for output +# Colors for output (use printf to properly interpret escape sequences) +# Use like: @printf "$(COLOR_BLUE)Text here$(COLOR_RESET)\n" COLOR_RESET := \033[0m COLOR_BOLD := \033[1m COLOR_GREEN := \033[32m @@ -55,6 +56,11 @@ COLOR_YELLOW := \033[33m COLOR_BLUE := \033[34m COLOR_RED := \033[31m +# Helper to echo with colors (use this instead of echo) +define echo_color + @echo "$(1)" +endef + # Platform flag ifneq ($(PLATFORM),) PLATFORM_FLAG := --platform=$(PLATFORM) @@ -64,30 +70,41 @@ endif ##@ General +test-colors: ## Test color output rendering + @echo "$(COLOR_BOLD)Testing Color Output$(COLOR_RESET)" + @echo "" + @echo "$(COLOR_RED)✗ Red text (errors)$(COLOR_RESET)" + @echo "$(COLOR_GREEN)✓ Green text (success)$(COLOR_RESET)" + @echo "$(COLOR_BLUE)▶ Blue text (info)$(COLOR_RESET)" + @echo "$(COLOR_YELLOW)⚠ Yellow text (warnings)$(COLOR_RESET)" + @echo "$(COLOR_BOLD)Bold text$(COLOR_RESET)" + @echo "" + @echo "If you see color codes like \033[1m instead of colors, your terminal may not support ANSI colors" + help: ## Display this help message - @echo '$(COLOR_BOLD)Ambient Code Platform - Development Makefile$(COLOR_RESET)' - @echo '' - @echo '$(COLOR_BOLD)Quick Start:$(COLOR_RESET)' - @echo ' $(COLOR_GREEN)make local-up$(COLOR_RESET) Start local development environment' - @echo ' $(COLOR_GREEN)make local-status$(COLOR_RESET) Check status of local environment' - @echo ' $(COLOR_GREEN)make local-logs$(COLOR_RESET) View logs from all components' - @echo ' $(COLOR_GREEN)make local-down$(COLOR_RESET) Stop local environment' - @echo '' - @echo '$(COLOR_BOLD)Quality Assurance:$(COLOR_RESET)' - @echo ' $(COLOR_GREEN)make validate-makefile$(COLOR_RESET) Validate Makefile quality (runs in CI)' - @echo ' $(COLOR_GREEN)make makefile-health$(COLOR_RESET) Run comprehensive health check' - @echo '' + @echo "$(COLOR_BOLD)Ambient Code Platform - Development Makefile$(COLOR_RESET)" + @echo "" + @echo "$(COLOR_BOLD)Quick Start:$(COLOR_RESET)" + @echo " $(COLOR_GREEN)make local-up$(COLOR_RESET) Start local development environment" + @echo " $(COLOR_GREEN)make local-status$(COLOR_RESET) Check status of local environment" + @echo " $(COLOR_GREEN)make local-logs$(COLOR_RESET) View logs from all components" + @echo " $(COLOR_GREEN)make local-down$(COLOR_RESET) Stop local environment" + @echo "" + @echo "$(COLOR_BOLD)Quality Assurance:$(COLOR_RESET)" + @echo " $(COLOR_GREEN)make validate-makefile$(COLOR_RESET) Validate Makefile quality (runs in CI)" + @echo " $(COLOR_GREEN)make makefile-health$(COLOR_RESET) Run comprehensive health check" + @echo "" @awk 'BEGIN {FS = ":.*##"; printf "$(COLOR_BOLD)Available Targets:$(COLOR_RESET)\n"} /^[a-zA-Z_-]+:.*?##/ { printf " $(COLOR_BLUE)%-20s$(COLOR_RESET) %s\n", $$1, $$2 } /^##@/ { printf "\n$(COLOR_BOLD)%s$(COLOR_RESET)\n", substr($$0, 5) } ' $(MAKEFILE_LIST) - @echo '' - @echo '$(COLOR_BOLD)Configuration Variables:$(COLOR_RESET)' - @echo ' CONTAINER_ENGINE=$(CONTAINER_ENGINE) (docker or podman)' - @echo ' NAMESPACE=$(NAMESPACE)' - @echo ' PLATFORM=$(PLATFORM)' - @echo '' - @echo '$(COLOR_BOLD)Examples:$(COLOR_RESET)' - @echo ' make local-up CONTAINER_ENGINE=docker' - @echo ' make local-reload-backend' - @echo ' make build-all PLATFORM=linux/arm64' + @echo "" + @echo "$(COLOR_BOLD)Configuration Variables:$(COLOR_RESET)" + @echo " CONTAINER_ENGINE=$(CONTAINER_ENGINE) (docker or podman)" + @echo " NAMESPACE=$(NAMESPACE)" + @echo " PLATFORM=$(PLATFORM)" + @echo "" + @echo "$(COLOR_BOLD)Examples:$(COLOR_RESET)" + @echo " make local-up CONTAINER_ENGINE=docker" + @echo " make local-reload-backend" + @echo " make build-all PLATFORM=linux/arm64" ##@ Building @@ -145,6 +162,24 @@ build-runner: ## Build Claude Code runner image -t $(RUNNER_IMAGE) -f claude-code-runner/Dockerfile . @echo "$(COLOR_GREEN)✓$(COLOR_RESET) Runner built: $(RUNNER_IMAGE)" +test-runner: build-runner ## Run runner tests in container + @echo "$(COLOR_BLUE)▶$(COLOR_RESET) Running runner tests in container..." + @$(CONTAINER_ENGINE) run --rm \ + -v $(PWD)/components/runners/claude-code-runner:/app/test-runner:Z \ + -w /app/test-runner \ + $(RUNNER_IMAGE) \ + bash -c "pip install pytest pytest-asyncio && python -m pytest tests/ -v" + @echo "$(COLOR_GREEN)✓$(COLOR_RESET) Runner tests passed" + +test-runner-autopush: build-runner ## Run autoPush tests in container + @echo "$(COLOR_BLUE)▶$(COLOR_RESET) Running autoPush tests in container..." + @$(CONTAINER_ENGINE) run --rm \ + -v $(PWD)/components/runners/claude-code-runner:/app/test-runner:Z \ + -w /app/test-runner \ + $(RUNNER_IMAGE) \ + bash -c "pip install pytest pytest-asyncio && python -m pytest tests/test_repo_autopush.py -v" + @echo "$(COLOR_GREEN)✓$(COLOR_RESET) autoPush tests passed" + ##@ Git Hooks setup-hooks: ## Install git hooks for branch protection diff --git a/components/backend/handlers/content.go b/components/backend/handlers/content.go index 0732cc67d..504d9e406 100644 --- a/components/backend/handlers/content.go +++ b/components/backend/handlers/content.go @@ -14,6 +14,7 @@ import ( "ambient-code-backend/git" "ambient-code-backend/pathutil" + "ambient-code-backend/types" "github.com/gin-gonic/gin" ) @@ -270,7 +271,7 @@ func ContentGitConfigureRemote(c *gin.Context) { // This is best-effort - don't fail if fetch fails branch := body.Branch if branch == "" { - branch = "main" + branch = types.DefaultBranch } cmd := exec.CommandContext(c.Request.Context(), "git", "fetch", "origin", branch) cmd.Dir = abs @@ -684,7 +685,7 @@ func ContentGitMergeStatus(c *gin.Context) { } if branch == "" { - branch = "main" + branch = types.DefaultBranch } // Check if git repo exists @@ -734,7 +735,7 @@ func ContentGitPull(c *gin.Context) { } if body.Branch == "" { - body.Branch = "main" + body.Branch = types.DefaultBranch } if err := GitPullRepo(c.Request.Context(), abs, body.Branch); err != nil { @@ -769,7 +770,7 @@ func ContentGitPushToBranch(c *gin.Context) { } if body.Branch == "" { - body.Branch = "main" + body.Branch = types.DefaultBranch } if body.Message == "" { diff --git a/components/backend/handlers/helpers.go b/components/backend/handlers/helpers.go index c251e2504..757b15507 100644 --- a/components/backend/handlers/helpers.go +++ b/components/backend/handlers/helpers.go @@ -1,10 +1,12 @@ package handlers import ( + "ambient-code-backend/types" "context" "fmt" "log" "math" + "strings" "time" authv1 "k8s.io/api/authorization/v1" @@ -74,3 +76,77 @@ func ValidateSecretAccess(ctx context.Context, k8sClient kubernetes.Interface, n return nil } + +// ParseRepoMap parses a repository map (from CR spec.repos[]) into a SimpleRepo struct. +// This helper is exported for testing purposes. +// Supports both legacy format (url/branch) and new format (input/output/autoPush). +func ParseRepoMap(m map[string]interface{}) (types.SimpleRepo, error) { + r := types.SimpleRepo{} + + // Check for new format (input/output/autoPush) + if inputMap, hasInput := m["input"].(map[string]interface{}); hasInput { + // New format + input := &types.RepoLocation{} + if url, ok := inputMap["url"].(string); ok { + input.URL = url + } + if branch, ok := inputMap["branch"].(string); ok && strings.TrimSpace(branch) != "" { + input.Branch = types.StringPtr(branch) + } + r.Input = input + + // Parse output if present + if outputMap, hasOutput := m["output"].(map[string]interface{}); hasOutput { + output := &types.RepoLocation{} + if url, ok := outputMap["url"].(string); ok { + output.URL = url + } + if branch, ok := outputMap["branch"].(string); ok && strings.TrimSpace(branch) != "" { + output.Branch = types.StringPtr(branch) + } + r.Output = output + } + + // Parse autoPush if present + if autoPush, ok := m["autoPush"].(bool); ok { + r.AutoPush = types.BoolPtr(autoPush) + } + + if strings.TrimSpace(r.Input.URL) == "" { + return r, fmt.Errorf("input.url is required") + } + + // Validate that output differs from input (if output is specified) + if r.Output != nil { + inputURL := strings.TrimSpace(r.Input.URL) + outputURL := strings.TrimSpace(r.Output.URL) + inputBranch := "" + outputBranch := "" + if r.Input.Branch != nil { + inputBranch = strings.TrimSpace(*r.Input.Branch) + } + if r.Output.Branch != nil { + outputBranch = strings.TrimSpace(*r.Output.Branch) + } + + // Output must differ from input in either URL or branch + if inputURL == outputURL && inputBranch == outputBranch { + return r, fmt.Errorf("output repository must differ from input (different URL or branch required)") + } + } + } else { + // Legacy format + if url, ok := m["url"].(string); ok { + r.URL = url + } + if branch, ok := m["branch"].(string); ok && strings.TrimSpace(branch) != "" { + r.Branch = types.StringPtr(branch) + } + + if strings.TrimSpace(r.URL) == "" { + return r, fmt.Errorf("url is required") + } + } + + return r, nil +} diff --git a/components/backend/handlers/helpers_test.go b/components/backend/handlers/helpers_test.go new file mode 100644 index 000000000..6513ed24c --- /dev/null +++ b/components/backend/handlers/helpers_test.go @@ -0,0 +1,244 @@ +package handlers_test + +import ( + "testing" + + "ambient-code-backend/handlers" + "ambient-code-backend/types" + + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +// TestParseRepoMap_NewFormat verifies parsing of new format repos from CR maps +func TestParseRepoMap_NewFormat(t *testing.T) { + tests := []struct { + name string + repoMap map[string]interface{} + expectError bool + validate func(t *testing.T, repo types.SimpleRepo) + }{ + { + name: "new format with input and output", + repoMap: map[string]interface{}{ + "input": map[string]interface{}{ + "url": "https://github.com/org/repo", + "branch": "main", + }, + "output": map[string]interface{}{ + "url": "https://github.com/user/fork", + "branch": "feature", + }, + "autoPush": true, + }, + expectError: false, + validate: func(t *testing.T, repo types.SimpleRepo) { + require.NotNil(t, repo.Input) + assert.Equal(t, "https://github.com/org/repo", repo.Input.URL) + assert.Equal(t, "main", *repo.Input.Branch) + + require.NotNil(t, repo.Output) + assert.Equal(t, "https://github.com/user/fork", repo.Output.URL) + assert.Equal(t, "feature", *repo.Output.Branch) + + require.NotNil(t, repo.AutoPush) + assert.True(t, *repo.AutoPush) + }, + }, + { + name: "new format input only", + repoMap: map[string]interface{}{ + "input": map[string]interface{}{ + "url": "https://github.com/org/repo", + "branch": "develop", + }, + "autoPush": false, + }, + expectError: false, + validate: func(t *testing.T, repo types.SimpleRepo) { + require.NotNil(t, repo.Input) + assert.Equal(t, "https://github.com/org/repo", repo.Input.URL) + assert.Equal(t, "develop", *repo.Input.Branch) + + assert.Nil(t, repo.Output) + + require.NotNil(t, repo.AutoPush) + assert.False(t, *repo.AutoPush) + }, + }, + { + name: "new format without branches", + repoMap: map[string]interface{}{ + "input": map[string]interface{}{ + "url": "https://github.com/org/repo", + }, + "autoPush": true, + }, + expectError: false, + validate: func(t *testing.T, repo types.SimpleRepo) { + require.NotNil(t, repo.Input) + assert.Equal(t, "https://github.com/org/repo", repo.Input.URL) + assert.Nil(t, repo.Input.Branch) + }, + }, + { + name: "new format without autoPush field", + repoMap: map[string]interface{}{ + "input": map[string]interface{}{ + "url": "https://github.com/org/repo", + }, + }, + expectError: false, + validate: func(t *testing.T, repo types.SimpleRepo) { + require.NotNil(t, repo.Input) + assert.Nil(t, repo.AutoPush, "AutoPush should be nil when not specified") + }, + }, + { + name: "new format with empty input URL", + repoMap: map[string]interface{}{ + "input": map[string]interface{}{ + "url": "", + }, + }, + expectError: true, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + repo, err := handlers.ParseRepoMap(tt.repoMap) + + if tt.expectError { + assert.Error(t, err) + } else { + require.NoError(t, err) + tt.validate(t, repo) + } + }) + } +} + +// TestParseRepoMap_LegacyFormat verifies parsing of legacy format repos from CR maps +func TestParseRepoMap_LegacyFormat(t *testing.T) { + tests := []struct { + name string + repoMap map[string]interface{} + expectError bool + validate func(t *testing.T, repo types.SimpleRepo) + }{ + { + name: "legacy format with branch", + repoMap: map[string]interface{}{ + "url": "https://github.com/org/repo", + "branch": "main", + }, + expectError: false, + validate: func(t *testing.T, repo types.SimpleRepo) { + assert.Equal(t, "https://github.com/org/repo", repo.URL) + require.NotNil(t, repo.Branch) + assert.Equal(t, "main", *repo.Branch) + + // New format fields should be nil + assert.Nil(t, repo.Input) + assert.Nil(t, repo.Output) + assert.Nil(t, repo.AutoPush) + }, + }, + { + name: "legacy format without branch", + repoMap: map[string]interface{}{ + "url": "https://github.com/org/repo", + }, + expectError: false, + validate: func(t *testing.T, repo types.SimpleRepo) { + assert.Equal(t, "https://github.com/org/repo", repo.URL) + assert.Nil(t, repo.Branch) + + // New format fields should be nil + assert.Nil(t, repo.Input) + }, + }, + { + name: "legacy format with empty URL", + repoMap: map[string]interface{}{ + "url": "", + }, + expectError: true, + }, + { + name: "legacy format with whitespace-only branch", + repoMap: map[string]interface{}{ + "url": "https://github.com/org/repo", + "branch": " ", + }, + expectError: false, + validate: func(t *testing.T, repo types.SimpleRepo) { + assert.Equal(t, "https://github.com/org/repo", repo.URL) + assert.Nil(t, repo.Branch, "Whitespace-only branch should result in nil") + }, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + repo, err := handlers.ParseRepoMap(tt.repoMap) + + if tt.expectError { + assert.Error(t, err) + } else { + require.NoError(t, err) + tt.validate(t, repo) + } + }) + } +} + +// TestParseRepoMap_RoundTrip verifies ToMapForCR and ParseRepoMap are inverses +func TestParseRepoMap_RoundTrip(t *testing.T) { + tests := []struct { + name string + repo types.SimpleRepo + }{ + { + name: "new format with all fields", + repo: types.SimpleRepo{ + Input: &types.RepoLocation{ + URL: "https://github.com/org/repo", + Branch: types.StringPtr("main"), + }, + Output: &types.RepoLocation{ + URL: "https://github.com/user/fork", + Branch: types.StringPtr("feature"), + }, + AutoPush: types.BoolPtr(true), + }, + }, + { + name: "new format input only", + repo: types.SimpleRepo{ + Input: &types.RepoLocation{ + URL: "https://github.com/org/repo", + Branch: types.StringPtr("develop"), + }, + AutoPush: types.BoolPtr(false), + }, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + // Convert to map + m := tt.repo.ToMapForCR() + + // Parse back + parsed, err := handlers.ParseRepoMap(m) + require.NoError(t, err) + + // Verify they match + assert.Equal(t, tt.repo.Input, parsed.Input) + assert.Equal(t, tt.repo.Output, parsed.Output) + assert.Equal(t, tt.repo.AutoPush, parsed.AutoPush) + }) + } +} diff --git a/components/backend/handlers/repo_seed.go b/components/backend/handlers/repo_seed.go index 90b608b03..f67745f8b 100644 --- a/components/backend/handlers/repo_seed.go +++ b/components/backend/handlers/repo_seed.go @@ -312,7 +312,7 @@ func SeedRepositoryEndpoint(c *gin.Context) { } if req.Branch == "" { - req.Branch = "main" + req.Branch = types.DefaultBranch } userID, _ := c.Get("userID") diff --git a/components/backend/handlers/sessions.go b/components/backend/handlers/sessions.go index 0e359f930..090e2f98a 100644 --- a/components/backend/handlers/sessions.go +++ b/components/backend/handlers/sessions.go @@ -153,7 +153,7 @@ func parseSpec(spec map[string]interface{}) types.AgenticSessionSpec { result.UserContext = uc } - // Multi-repo parsing (simplified format) + // Multi-repo parsing (supports both legacy and new formats) if arr, ok := spec["repos"].([]interface{}); ok { repos := make([]types.SimpleRepo, 0, len(arr)) for _, it := range arr { @@ -161,16 +161,14 @@ func parseSpec(spec map[string]interface{}) types.AgenticSessionSpec { if !ok { continue } - r := types.SimpleRepo{} - if url, ok := m["url"].(string); ok { - r.URL = url - } - if branch, ok := m["branch"].(string); ok && strings.TrimSpace(branch) != "" { - r.Branch = types.StringPtr(branch) - } - if strings.TrimSpace(r.URL) != "" { - repos = append(repos, r) + + // Use ParseRepoMap helper to avoid code duplication + r, err := ParseRepoMap(m) + if err != nil { + log.Printf("Skipping invalid repo in spec: %v", err) + continue } + repos = append(repos, r) } result.Repos = repos } @@ -620,17 +618,27 @@ func CreateSession(c *gin.Context) { session["spec"].(map[string]interface{})["autoPushOnComplete"] = *req.AutoPushOnComplete } - // Set multi-repo configuration on spec (simplified format) + // Set multi-repo configuration on spec with new input/output/autoPush structure { spec := session["spec"].(map[string]interface{}) if len(req.Repos) > 0 { + // Get session-level autoPush default (false if not set) + sessionAutoPush := false + if req.AutoPushOnComplete != nil { + sessionAutoPush = *req.AutoPushOnComplete + } + arr := make([]map[string]interface{}, 0, len(req.Repos)) for _, r := range req.Repos { - m := map[string]interface{}{"url": r.URL} - if r.Branch != nil { - m["branch"] = *r.Branch + // Normalize legacy format to new format + normalized, err := r.NormalizeRepo(sessionAutoPush) + if err != nil { + log.Printf("Failed to normalize repo: %v", err) + c.JSON(http.StatusBadRequest, gin.H{"error": fmt.Sprintf("Invalid repository configuration: %v", err)}) + return } - arr = append(arr, m) + // Convert to map for CR storage + arr = append(arr, normalized.ToMapForCR()) } spec["repos"] = arr } @@ -1405,20 +1413,14 @@ func AddRepo(c *gin.Context) { return } - var req struct { - URL string `json:"url" binding:"required"` - Branch string `json:"branch"` - } + // Request body supports both legacy and new repo formats + var req types.SimpleRepo if err := c.ShouldBindJSON(&req); err != nil { c.JSON(http.StatusBadRequest, gin.H{"error": err.Error()}) return } - if req.Branch == "" { - req.Branch = "main" - } - gvr := GetAgenticSessionV1Alpha1Resource() item, err := k8sDyn.Resource(gvr).Namespace(project).Get(context.TODO(), sessionName, v1.GetOptions{}) if err != nil { @@ -1447,10 +1449,21 @@ func AddRepo(c *gin.Context) { repos = []interface{}{} } - newRepo := map[string]interface{}{ - "url": req.URL, - "branch": req.Branch, + // Get session-level autoPush default + sessionAutoPush := false + if v, ok := spec["autoPushOnComplete"].(bool); ok { + sessionAutoPush = v + } + + // Normalize and convert to CR format + normalized, err := req.NormalizeRepo(sessionAutoPush) + if err != nil { + log.Printf("Failed to normalize repo: %v", err) + c.JSON(http.StatusBadRequest, gin.H{"error": fmt.Sprintf("Invalid repository configuration: %v", err)}) + return } + newRepo := normalized.ToMapForCR() + repos = append(repos, newRepo) spec["repos"] = repos @@ -1474,7 +1487,14 @@ func AddRepo(c *gin.Context) { session.Status = parseStatus(statusMap) } - log.Printf("Added repository %s to session %s in project %s", req.URL, sessionName, project) + // Log the added repo URL (from either new or legacy format) + repoURL := "" + if req.Input != nil { + repoURL = req.Input.URL + } else { + repoURL = req.URL + } + log.Printf("Added repository %s to session %s in project %s", repoURL, sessionName, project) c.JSON(http.StatusOK, gin.H{"message": "Repository added", "session": session}) } @@ -1518,8 +1538,26 @@ func RemoveRepo(c *gin.Context) { filteredRepos := []interface{}{} found := false for _, r := range repos { - rm, _ := r.(map[string]interface{}) - url, _ := rm["url"].(string) + rm, ok := r.(map[string]interface{}) + if !ok { + log.Printf("Warning: repo entry is not a map, skipping") + continue + } + + // Get URL from either new format (input.url) or legacy format (url) + url := "" + if inputMap, hasInput := rm["input"].(map[string]interface{}); hasInput { + if urlStr, ok := inputMap["url"].(string); ok { + url = urlStr + } else { + log.Printf("Warning: input.url is not a string in repo map") + } + } else if urlStr, ok := rm["url"].(string); ok { + url = urlStr + } else { + log.Printf("Warning: url is not a string in repo map") + } + if DeriveRepoFolderFromURL(url) != repoName { filteredRepos = append(filteredRepos, r) } else { diff --git a/components/backend/routes.go b/components/backend/routes.go index a0231ac39..c01b71797 100644 --- a/components/backend/routes.go +++ b/components/backend/routes.go @@ -35,6 +35,7 @@ func registerRoutes(r *gin.Engine) { api.POST("/projects/:projectName/agentic-sessions/:sessionName/github/token", handlers.MintSessionGitHubToken) + projectGroup := api.Group("/projects/:projectName", handlers.ValidateProjectContext()) { projectGroup.GET("/access", handlers.AccessCheck) diff --git a/components/backend/types/common.go b/components/backend/types/common.go index 13745df0b..3c830b92c 100644 --- a/components/backend/types/common.go +++ b/components/backend/types/common.go @@ -120,6 +120,9 @@ const DefaultPaginationLimit = 20 // MaxPaginationLimit is the maximum allowed items per page const MaxPaginationLimit = 100 +// DefaultBranch is the default Git branch name used when no branch is specified +const DefaultBranch = "main" + // NormalizePaginationParams ensures pagination params are within valid bounds func NormalizePaginationParams(params *PaginationParams) { if params.Limit <= 0 { diff --git a/components/backend/types/session.go b/components/backend/types/session.go index 1ee23676b..8b345f684 100644 --- a/components/backend/types/session.go +++ b/components/backend/types/session.go @@ -1,5 +1,10 @@ package types +import ( + "fmt" + "strings" +) + // AgenticSession represents the structure of our custom resource type AgenticSession struct { APIVersion string `json:"apiVersion"` @@ -26,8 +31,21 @@ type AgenticSessionSpec struct { ActiveWorkflow *WorkflowSelection `json:"activeWorkflow,omitempty"` } -// SimpleRepo represents a simplified repository configuration +// SimpleRepo represents a repository configuration with support for both +// legacy (url/branch) and new (input/output/autoPush) formats type SimpleRepo struct { + // New structure (preferred) + Input *RepoLocation `json:"input,omitempty"` + Output *RepoLocation `json:"output,omitempty"` + AutoPush *bool `json:"autoPush,omitempty"` + + // Legacy structure (deprecated, for backwards compatibility) + URL string `json:"url,omitempty"` + Branch *string `json:"branch,omitempty"` +} + +// RepoLocation represents a git repository location (input source or output target) +type RepoLocation struct { URL string `json:"url"` Branch *string `json:"branch,omitempty"` } @@ -113,3 +131,96 @@ type Condition struct { LastTransitionTime string `json:"lastTransitionTime,omitempty"` ObservedGeneration int64 `json:"observedGeneration,omitempty"` } + +// NormalizeRepo converts a legacy repo format to the new input/output structure. +// If the repo already uses the new format, it returns the repo as-is. +// Legacy: {url: "...", branch: "..."} -> New: {input: {url: "...", branch: "..."}, autoPush: sessionDefaultAutoPush} +// The autoPush field is set to the session's default value (sessionDefaultAutoPush parameter). +// Returns an error if the repo has an empty URL. +func (r *SimpleRepo) NormalizeRepo(sessionDefaultAutoPush bool) (SimpleRepo, error) { + // If already using new format, validate and return as-is + if r.Input != nil { + if strings.TrimSpace(r.Input.URL) == "" { + return SimpleRepo{}, fmt.Errorf("cannot normalize repo with empty input.url") + } + + // Validate that output differs from input (if output is specified) + if r.Output != nil { + inputURL := strings.TrimSpace(r.Input.URL) + outputURL := strings.TrimSpace(r.Output.URL) + inputBranch := "" + outputBranch := "" + if r.Input.Branch != nil { + inputBranch = strings.TrimSpace(*r.Input.Branch) + } + if r.Output.Branch != nil { + outputBranch = strings.TrimSpace(*r.Output.Branch) + } + + // Output must differ from input in either URL or branch + if inputURL == outputURL && inputBranch == outputBranch { + return SimpleRepo{}, fmt.Errorf("output repository must differ from input (different URL or branch required)") + } + } + + return *r, nil + } + + // Validate legacy format before normalizing + if strings.TrimSpace(r.URL) == "" { + return SimpleRepo{}, fmt.Errorf("cannot normalize repo with empty url") + } + + // Convert legacy format to new format + normalized := SimpleRepo{ + Input: &RepoLocation{ + URL: r.URL, + Branch: r.Branch, + }, + AutoPush: BoolPtr(sessionDefaultAutoPush), + } + + return normalized, nil +} + +// ToMapForCR converts SimpleRepo to a map suitable for CustomResource spec.repos[] +func (r *SimpleRepo) ToMapForCR() map[string]interface{} { + m := make(map[string]interface{}) + + // Use new format if Input is defined + if r.Input != nil { + inputMap := map[string]interface{}{ + "url": r.Input.URL, + } + if r.Input.Branch != nil { + inputMap["branch"] = *r.Input.Branch + } + m["input"] = inputMap + + // Add output if defined + if r.Output != nil { + outputMap := map[string]interface{}{ + "url": r.Output.URL, + } + if r.Output.Branch != nil { + outputMap["branch"] = *r.Output.Branch + } + m["output"] = outputMap + } + + // Add autoPush flag + if r.AutoPush != nil { + m["autoPush"] = *r.AutoPush + } + } else { + // Legacy format - preserve for backward compatibility with un-normalized repos + // This path should only be reached for repos that haven't been normalized yet + // (e.g., when reading existing CRs created before the new format was introduced) + m["url"] = r.URL + if r.Branch != nil { + m["branch"] = *r.Branch + } + } + + return m +} diff --git a/components/backend/types/session_test.go b/components/backend/types/session_test.go new file mode 100644 index 000000000..444f3e4c8 --- /dev/null +++ b/components/backend/types/session_test.go @@ -0,0 +1,340 @@ +package types_test + +import ( + "testing" + + "ambient-code-backend/types" + + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +// TestNormalizeRepo_LegacyFormat verifies that legacy format repos are converted to new format +func TestNormalizeRepo_LegacyFormat(t *testing.T) { + tests := []struct { + name string + repo types.SimpleRepo + sessionDefault bool + wantInputURL string + wantInputBr *string + wantAutoPush bool + }{ + { + name: "legacy with branch, session autoPush=false", + repo: types.SimpleRepo{ + URL: "https://github.com/org/repo", + Branch: types.StringPtr("main"), + }, + sessionDefault: false, + wantInputURL: "https://github.com/org/repo", + wantInputBr: types.StringPtr("main"), + wantAutoPush: false, + }, + { + name: "legacy with branch, session autoPush=true", + repo: types.SimpleRepo{ + URL: "https://github.com/org/repo", + Branch: types.StringPtr("develop"), + }, + sessionDefault: true, + wantInputURL: "https://github.com/org/repo", + wantInputBr: types.StringPtr("develop"), + wantAutoPush: true, + }, + { + name: "legacy without branch", + repo: types.SimpleRepo{ + URL: "https://github.com/org/repo", + }, + sessionDefault: false, + wantInputURL: "https://github.com/org/repo", + wantInputBr: nil, + wantAutoPush: false, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + normalized, err := tt.repo.NormalizeRepo(tt.sessionDefault) + require.NoError(t, err, "NormalizeRepo should not return error for valid repos") + + // Verify input structure was created + require.NotNil(t, normalized.Input, "Input should not be nil") + assert.Equal(t, tt.wantInputURL, normalized.Input.URL) + if tt.wantInputBr != nil { + require.NotNil(t, normalized.Input.Branch) + assert.Equal(t, *tt.wantInputBr, *normalized.Input.Branch) + } else { + assert.Nil(t, normalized.Input.Branch) + } + + // Verify autoPush was set from session default + require.NotNil(t, normalized.AutoPush) + assert.Equal(t, tt.wantAutoPush, *normalized.AutoPush) + + // Verify output is nil (legacy repos don't specify output) + assert.Nil(t, normalized.Output) + + // Verify normalized struct uses new format fields only (not legacy fields) + assert.Empty(t, normalized.URL, "normalized struct should not use legacy URL field") + assert.Nil(t, normalized.Branch, "normalized struct should not use legacy Branch field") + }) + } +} + +// TestNormalizeRepo_NewFormat verifies that new format repos are returned unchanged +func TestNormalizeRepo_NewFormat(t *testing.T) { + tests := []struct { + name string + repo types.SimpleRepo + }{ + { + name: "new format with input only", + repo: types.SimpleRepo{ + Input: &types.RepoLocation{ + URL: "https://github.com/org/repo", + Branch: types.StringPtr("main"), + }, + AutoPush: types.BoolPtr(true), + }, + }, + { + name: "new format with input and output", + repo: types.SimpleRepo{ + Input: &types.RepoLocation{ + URL: "https://github.com/org/repo", + Branch: types.StringPtr("main"), + }, + Output: &types.RepoLocation{ + URL: "https://github.com/user/fork", + Branch: types.StringPtr("feature"), + }, + AutoPush: types.BoolPtr(false), + }, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + // Session default should be ignored for repos already in new format + normalized, err := tt.repo.NormalizeRepo(true) + require.NoError(t, err, "NormalizeRepo should not return error for valid repos") + + // Should be identical to original + assert.Equal(t, tt.repo.Input, normalized.Input) + assert.Equal(t, tt.repo.Output, normalized.Output) + assert.Equal(t, tt.repo.AutoPush, normalized.AutoPush) + }) + } +} + +// TestToMapForCR_NewFormat verifies conversion to CR map for new format repos +func TestToMapForCR_NewFormat(t *testing.T) { + tests := []struct { + name string + repo types.SimpleRepo + validate func(t *testing.T, m map[string]interface{}) + }{ + { + name: "new format with all fields", + repo: types.SimpleRepo{ + Input: &types.RepoLocation{ + URL: "https://github.com/org/repo", + Branch: types.StringPtr("main"), + }, + Output: &types.RepoLocation{ + URL: "https://github.com/user/fork", + Branch: types.StringPtr("feature"), + }, + AutoPush: types.BoolPtr(true), + }, + validate: func(t *testing.T, m map[string]interface{}) { + input := m["input"].(map[string]interface{}) + assert.Equal(t, "https://github.com/org/repo", input["url"]) + assert.Equal(t, "main", input["branch"]) + + output := m["output"].(map[string]interface{}) + assert.Equal(t, "https://github.com/user/fork", output["url"]) + assert.Equal(t, "feature", output["branch"]) + + assert.Equal(t, true, m["autoPush"]) + }, + }, + { + name: "new format input only, no branches", + repo: types.SimpleRepo{ + Input: &types.RepoLocation{ + URL: "https://github.com/org/repo", + }, + AutoPush: types.BoolPtr(false), + }, + validate: func(t *testing.T, m map[string]interface{}) { + input := m["input"].(map[string]interface{}) + assert.Equal(t, "https://github.com/org/repo", input["url"]) + _, hasBranch := input["branch"] + assert.False(t, hasBranch, "branch should not be present when nil") + + _, hasOutput := m["output"] + assert.False(t, hasOutput, "output should not be present when nil") + + assert.Equal(t, false, m["autoPush"]) + }, + }, + { + name: "new format with nil autoPush", + repo: types.SimpleRepo{ + Input: &types.RepoLocation{ + URL: "https://github.com/org/repo", + }, + }, + validate: func(t *testing.T, m map[string]interface{}) { + _, hasAutoPush := m["autoPush"] + assert.False(t, hasAutoPush, "autoPush should not be present when nil") + }, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + m := tt.repo.ToMapForCR() + tt.validate(t, m) + }) + } +} + +// TestToMapForCR_LegacyFormat verifies conversion preserves legacy format +func TestToMapForCR_LegacyFormat(t *testing.T) { + repo := types.SimpleRepo{ + URL: "https://github.com/org/repo", + Branch: types.StringPtr("main"), + } + + m := repo.ToMapForCR() + + assert.Equal(t, "https://github.com/org/repo", m["url"]) + assert.Equal(t, "main", m["branch"]) + + // New format fields should not be present + _, hasInput := m["input"] + assert.False(t, hasInput, "input should not be present for legacy format") +} + +// TestNormalizeAndConvert_RoundTrip verifies normalize + convert workflow +func TestNormalizeAndConvert_RoundTrip(t *testing.T) { + // Start with legacy format + legacy := types.SimpleRepo{ + URL: "https://github.com/org/repo", + Branch: types.StringPtr("main"), + } + + // Normalize with session autoPush=true + normalized, err := legacy.NormalizeRepo(true) + require.NoError(t, err, "NormalizeRepo should not return error for valid repo") + + // Convert to CR map + m := normalized.ToMapForCR() + + // Verify map has new format structure + input := m["input"].(map[string]interface{}) + assert.Equal(t, "https://github.com/org/repo", input["url"]) + assert.Equal(t, "main", input["branch"]) + assert.Equal(t, true, m["autoPush"]) + + // Legacy fields should not be in the map + _, hasURL := m["url"] + assert.False(t, hasURL, "legacy url should not be present") +} + +// TestBackwardCompatibility_LegacyRepoFields ensures legacy fields still accessible +func TestBackwardCompatibility_LegacyRepoFields(t *testing.T) { + repo := types.SimpleRepo{ + URL: "https://github.com/org/repo", + Branch: types.StringPtr("develop"), + } + + // Legacy fields should still be accessible + assert.Equal(t, "https://github.com/org/repo", repo.URL) + require.NotNil(t, repo.Branch) + assert.Equal(t, "develop", *repo.Branch) + + // New fields should be nil for legacy repos + assert.Nil(t, repo.Input) + assert.Nil(t, repo.Output) + assert.Nil(t, repo.AutoPush) +} + +// TestNormalizeRepo_ErrorCases verifies that NormalizeRepo returns errors for invalid repos +func TestNormalizeRepo_ErrorCases(t *testing.T) { + tests := []struct { + name string + repo types.SimpleRepo + expectedError string + }{ + { + name: "legacy format with empty URL", + repo: types.SimpleRepo{ + URL: "", + }, + expectedError: "cannot normalize repo with empty url", + }, + { + name: "legacy format with whitespace-only URL", + repo: types.SimpleRepo{ + URL: " ", + }, + expectedError: "cannot normalize repo with empty url", + }, + { + name: "new format with empty input URL", + repo: types.SimpleRepo{ + Input: &types.RepoLocation{ + URL: "", + }, + }, + expectedError: "cannot normalize repo with empty input.url", + }, + { + name: "new format with whitespace-only input URL", + repo: types.SimpleRepo{ + Input: &types.RepoLocation{ + URL: " ", + }, + }, + expectedError: "cannot normalize repo with empty input.url", + }, + { + name: "output same as input (same URL and branch)", + repo: types.SimpleRepo{ + Input: &types.RepoLocation{ + URL: "https://github.com/org/repo", + Branch: types.StringPtr("main"), + }, + Output: &types.RepoLocation{ + URL: "https://github.com/org/repo", + Branch: types.StringPtr("main"), + }, + }, + expectedError: "output repository must differ from input", + }, + { + name: "output same URL as input (both nil branches)", + repo: types.SimpleRepo{ + Input: &types.RepoLocation{ + URL: "https://github.com/org/repo", + }, + Output: &types.RepoLocation{ + URL: "https://github.com/org/repo", + }, + }, + expectedError: "output repository must differ from input", + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + _, err := tt.repo.NormalizeRepo(false) + require.Error(t, err) + assert.Contains(t, err.Error(), tt.expectedError) + }) + } +} diff --git a/components/frontend/src/app/projects/[name]/sessions/[sessionName]/components/accordions/repositories-accordion.tsx b/components/frontend/src/app/projects/[name]/sessions/[sessionName]/components/accordions/repositories-accordion.tsx index 5724a08a7..5b7baa9ec 100644 --- a/components/frontend/src/app/projects/[name]/sessions/[sessionName]/components/accordions/repositories-accordion.tsx +++ b/components/frontend/src/app/projects/[name]/sessions/[sessionName]/components/accordions/repositories-accordion.tsx @@ -5,11 +5,8 @@ import { GitBranch, X, Link, Loader2, CloudUpload } from "lucide-react"; import { AccordionItem, AccordionTrigger, AccordionContent } from "@/components/ui/accordion"; import { Badge } from "@/components/ui/badge"; import { Button } from "@/components/ui/button"; - -type Repository = { - url: string; - branch?: string; -}; +import type { SessionRepo } from "@/types/agentic-session"; +import { getRepoDisplayName } from "@/utils/repo"; type UploadedFile = { name: string; @@ -18,7 +15,7 @@ type UploadedFile = { }; type RepositoriesAccordionProps = { - repositories?: Repository[]; + repositories?: SessionRepo[]; uploadedFiles?: UploadedFile[]; onAddRepository: () => void; onRemoveRepository: (repoName: string) => void; @@ -78,7 +75,7 @@ export function RepositoriesAccordion({

Add additional context to improve AI responses.

- + {/* Context Items List (Repos + Uploaded Files) */} {totalContextItems === 0 ? (
@@ -95,7 +92,8 @@ export function RepositoriesAccordion({
{/* Repositories */} {repositories.map((repo, idx) => { - const repoName = repo.url.split('/').pop()?.replace('.git', '') || `repo-${idx}`; + const repoName = getRepoDisplayName(repo, idx); + const repoUrl = repo.input.url; const isRemoving = removingRepo === repoName; return ( @@ -103,7 +101,7 @@ export function RepositoriesAccordion({
{repoName}
-
{repo.url}
+
{repoUrl}
-
- - + }}> + Cancel + + +
diff --git a/components/frontend/src/app/projects/[name]/sessions/new/repository-list.tsx b/components/frontend/src/app/projects/[name]/sessions/new/repository-list.tsx index 7e4a18452..ea7d104f3 100644 --- a/components/frontend/src/app/projects/[name]/sessions/new/repository-list.tsx +++ b/components/frontend/src/app/projects/[name]/sessions/new/repository-list.tsx @@ -2,17 +2,13 @@ import { Button } from "@/components/ui/button"; import { Badge } from "@/components/ui/badge"; -import { Edit2, Plus, Trash2 } from "lucide-react"; +import { Edit2, Plus, Trash2, GitBranch } from "lucide-react"; import { EmptyState } from "@/components/empty-state"; import { FolderGit2 } from "lucide-react"; - -type Repo = { - url: string; - branch?: string; -}; +import type { SessionRepo } from "@/types/agentic-session"; type RepositoryListProps = { - repos: Repo[]; + repos: SessionRepo[]; onAddRepo: () => void; onEditRepo: (index: number) => void; onRemoveRepo: (index: number) => void; @@ -57,42 +53,65 @@ export function RepositoryList({
- {repos.map((repo, idx) => ( -
-
-
-
- {repo.url} - {repo.branch && ( - - {repo.branch} - - )} - {idx === 0 && ( - Working Directory + {repos.map((repo, idx) => { + const inputUrl = repo.input?.url || ""; + const inputBranch = repo.input?.branch; + const hasOutput = !!repo.output?.url; + const autoPush = repo.autoPush ?? false; + + return ( +
+
+
+
+ {inputUrl} + {inputBranch && ( + + + {inputBranch} + + )} + {idx === 0 && ( + Working Directory + )} + {autoPush && ( + + Auto-push + + )} +
+ {hasOutput && ( +
+ → Push to: + {repo.output?.url} + {repo.output?.branch && ( + + {repo.output.branch} + + )} +
)}
-
-
- - +
+ + +
-
- ))} + ); + })}

- The first repo ({repos[0]?.url || "selected"}) is Claude's working directory. Other + The first repo ({repos[0]?.input?.url || "selected"}) is Claude's working directory. Other repos are available as add_dirs.

diff --git a/components/frontend/src/types/agentic-session.ts b/components/frontend/src/types/agentic-session.ts index 4deb326cb..8a76fad27 100644 --- a/components/frontend/src/types/agentic-session.ts +++ b/components/frontend/src/types/agentic-session.ts @@ -12,12 +12,20 @@ export type GitRepository = { branch?: string; }; -// Simplified multi-repo session mapping -export type SessionRepo = { +// Repository input/output configuration for new structured format +export type RepoLocation = { url: string; branch?: string; }; +// Multi-repo session mapping - structured format with input/output/autoPush +export type SessionRepo = { + name?: string; + input: RepoLocation; + output?: RepoLocation; + autoPush?: boolean; +}; + export type AgenticSessionSpec = { initialPrompt?: string; llmSettings: LLMSettings; @@ -41,6 +49,7 @@ export type ReconciledRepo = { name?: string; status?: "Cloning" | "Ready" | "Failed"; clonedAt?: string; + pushed?: boolean; // Whether autoPush successfully pushed changes }; export type ReconciledWorkflow = { @@ -184,6 +193,10 @@ export type CreateAgenticSessionRequest = { interactive?: boolean; // Multi-repo support repos?: SessionRepo[]; + /** + * @deprecated Use per-repo autoPush flags in SessionRepo instead. + * This global flag is kept for backward compatibility only. + */ autoPushOnComplete?: boolean; labels?: Record; annotations?: Record; diff --git a/components/frontend/src/types/api/sessions.ts b/components/frontend/src/types/api/sessions.ts index 2140165d5..6692f581b 100644 --- a/components/frontend/src/types/api/sessions.ts +++ b/components/frontend/src/types/api/sessions.ts @@ -3,6 +3,22 @@ * These types align with the backend Go structs and Kubernetes CRD */ +// Import shared types from agentic-session to avoid duplication +import type { + SessionRepo, + RepoLocation, + AgenticSessionPhase, + LLMSettings, +} from '../agentic-session'; + +// Re-export for convenience +export type { + SessionRepo, + RepoLocation, + AgenticSessionPhase, + LLMSettings, +}; + export type UserContext = { userId: string; displayName: string; @@ -20,26 +36,6 @@ export type ResourceOverrides = { priorityClass?: string; }; -export type AgenticSessionPhase = - | 'Pending' - | 'Creating' - | 'Running' - | 'Stopping' - | 'Stopped' - | 'Completed' - | 'Failed'; - -export type LLMSettings = { - model: string; - temperature: number; - maxTokens: number; -}; - -export type SessionRepo = { - url: string; - branch?: string; -}; - export type AgenticSessionSpec = { initialPrompt?: string; llmSettings: LLMSettings; @@ -62,6 +58,7 @@ export type ReconciledRepo = { name?: string; status?: 'Cloning' | 'Ready' | 'Failed'; clonedAt?: string; + pushed?: boolean; // Whether autoPush successfully pushed changes }; export type ReconciledWorkflow = { @@ -117,6 +114,10 @@ export type CreateAgenticSessionRequest = { environmentVariables?: Record; interactive?: boolean; repos?: SessionRepo[]; + /** + * @deprecated Use per-repo autoPush flags in SessionRepo instead. + * This global flag is kept for backward compatibility only. + */ autoPushOnComplete?: boolean; userContext?: UserContext; labels?: Record; diff --git a/components/frontend/src/utils/repo.ts b/components/frontend/src/utils/repo.ts new file mode 100644 index 000000000..eb84d3084 --- /dev/null +++ b/components/frontend/src/utils/repo.ts @@ -0,0 +1,73 @@ +import type { SessionRepo } from '@/types/agentic-session'; + +/** + * Default branch name used when not specified + */ +export const DEFAULT_BRANCH = 'main'; + +/** + * Extract a display-friendly name from a SessionRepo. + * Uses repo.name if available, otherwise derives from repo.input.url. + * + * @param repo - The repository configuration + * @param fallbackIndex - Index to use if name cannot be derived + * @returns Display name for the repository + */ +export function getRepoDisplayName(repo: SessionRepo, fallbackIndex: number): string { + if (repo.name) { + return repo.name; + } + + try { + // Extract repository name from URL (e.g., "https://github.com/org/repo.git" -> "repo") + const match = repo.input.url.match(/\/([^/]+?)(\.git)?$/); + return match ? match[1] : `repo-${fallbackIndex}`; + } catch { + return `repo-${fallbackIndex}`; + } +} + +/** + * Check if a repository has a valid output configuration that differs from input. + * Used to determine if push functionality should be available. + * + * @param repo - The repository configuration + * @returns True if repo has a different output URL or branch + */ +export function hasValidOutputConfig(repo: SessionRepo): boolean { + if (!repo.output?.url) { + return false; + } + + // Output is valid if URL is different OR branch is different + return ( + repo.output.url !== repo.input.url || + (repo.output.branch || DEFAULT_BRANCH) !== (repo.input.branch || DEFAULT_BRANCH) + ); +} + +/** + * Sanitize a repository URL for display by redacting credentials. + * Protects against token exposure in UI by replacing username/password with asterisks. + * + * Examples: + * - "https://token@github.com/org/repo" -> "https://***@github.com/org/repo" + * - "https://user:pass@gitlab.com/repo" -> "https://***:***@gitlab.com/repo" + * - "https://github.com/org/repo" -> "https://github.com/org/repo" (unchanged) + * + * @param url - The repository URL to sanitize + * @returns URL with credentials redacted, or original URL if parsing fails + */ +export function sanitizeUrlForDisplay(url: string): string { + try { + const parsed = new URL(url); + if (parsed.username || parsed.password) { + parsed.username = '***'; + parsed.password = '***'; + } + return parsed.toString(); + } catch { + // If URL parsing fails, return as-is (not a valid URL) + return url; + } +} diff --git a/components/manifests/base/crds/agenticsessions-crd.yaml b/components/manifests/base/crds/agenticsessions-crd.yaml index d0c38ed4f..452207faf 100644 --- a/components/manifests/base/crds/agenticsessions-crd.yaml +++ b/components/manifests/base/crds/agenticsessions-crd.yaml @@ -2,6 +2,11 @@ apiVersion: apiextensions.k8s.io/v1 kind: CustomResourceDefinition metadata: name: agenticsessions.vteam.ambient-code + # Migration Annotations: + # - ambient-code.io/repos-migrated: "v2" - Marks sessions migrated from legacy repo format. + # Added by operator during startup migration. Prevents re-migration on subsequent restarts. + # - ambient-code.io/migration-skipped-active: "Running|Creating" - Marks active sessions skipped during migration. + # Allows tracking which sessions need migration once they complete. Value indicates phase at skip time. spec: group: vteam.ambient-code versions: @@ -23,16 +28,40 @@ spec: description: "List of Git repositories to clone and work with" items: type: object - required: - - url properties: + # New structure (preferred) + input: + type: object + description: "Input repository configuration (source to clone)" + properties: + url: + type: string + description: "Git repository URL" + branch: + type: string + description: "Branch to checkout" + default: "main" + output: + type: object + description: "Output repository configuration (target for push). If not specified, pushes to input repo." + properties: + url: + type: string + description: "Git repository URL (typically a fork)" + branch: + type: string + description: "Branch to push to" + autoPush: + type: boolean + default: false + description: "When true, automatically commit and push changes to this repository after session completion. Overrides session-level autoPushOnComplete for this repo." + # Legacy structure (deprecated, for backwards compatibility) url: type: string - description: "Git repository URL" + description: "[DEPRECATED] Git repository URL. Use 'input.url' instead." branch: type: string - description: "Branch to checkout" - default: "main" + description: "[DEPRECATED] Branch to checkout. Use 'input.branch' instead." interactive: type: boolean description: "When true, run session in interactive chat mode using inbox/outbox files" diff --git a/components/operator/internal/handlers/migration.go b/components/operator/internal/handlers/migration.go new file mode 100644 index 000000000..3e148fa30 --- /dev/null +++ b/components/operator/internal/handlers/migration.go @@ -0,0 +1,395 @@ +// Package handlers provides migration functions for upgrading AgenticSession resources. +package handlers + +import ( + "context" + "fmt" + "log" + + "ambient-code-operator/internal/config" + "ambient-code-operator/internal/types" + + corev1 "k8s.io/api/core/v1" + metav1 "k8s.io/apimachinery/pkg/apis/meta/v1" + "k8s.io/apimachinery/pkg/apis/meta/v1/unstructured" +) + +const ( + // MigrationAnnotation marks sessions that have been migrated to v2 repo format + MigrationAnnotation = "ambient-code.io/repos-migrated" + // MigrationVersion is the current migration version + MigrationVersion = "v2" + + // Event reasons for migration tracking + eventReasonMigrationStarted = "MigrationStarted" + eventReasonMigrationCompleted = "MigrationCompleted" + eventReasonMigrationFailed = "MigrationFailed" +) + +// MigrateAllSessions migrates all AgenticSessions from legacy repo format to v2 format. +// This function is idempotent and safe to run multiple times. +// +// Legacy format: repos[].{url, branch} +// New format: repos[].{input: {url, branch}, autoPush: false} +// +// SECURITY MODEL: This migration runs at operator startup using the operator's service +// account privileges. This is intentional and safe because: +// 1. Migration occurs before any user requests are processed (operator startup only) +// 2. The operator is reconciling existing CRs it already has RBAC access to +// 3. No new user data is being created or accessed +// 4. Migration only updates the repo structure format, not repository content +// 5. Active sessions (Running/Creating) are skipped to avoid interfering with in-flight work +// +// Returns nil even if individual migrations fail (failures are logged). +// This allows operator startup to continue and retry failed sessions on next restart. +func MigrateAllSessions() error { + ctx := context.Background() + gvr := types.GetAgenticSessionResource() + + // List all AgenticSessions across all namespaces + log.Println("========================================") + log.Println("Starting AgenticSession v2 migration...") + log.Println("========================================") + sessionList, err := config.DynamicClient.Resource(gvr).List(ctx, metav1.ListOptions{}) + if err != nil { + return fmt.Errorf("failed to list AgenticSessions: %w", err) + } + + totalSessions := len(sessionList.Items) + migratedCount := 0 + skippedCount := 0 + errorCount := 0 + activeSkippedCount := 0 + + log.Printf("Found %d AgenticSessions to process", totalSessions) + + for _, session := range sessionList.Items { + name := session.GetName() + namespace := session.GetNamespace() + + // Check if session is currently active + status, found, _ := unstructured.NestedMap(session.Object, "status") + if found && status != nil { + phase, found, _ := unstructured.NestedString(status, "phase") + if found && (phase == "Running" || phase == "Creating") { + // Add annotation to track that migration was skipped due to active status + // This allows us to identify sessions that need migration once they complete + annotations := session.GetAnnotations() + if annotations == nil { + annotations = make(map[string]string) + } + // Only add if not already present + if _, exists := annotations["ambient-code.io/migration-skipped-active"]; !exists { + annotations["ambient-code.io/migration-skipped-active"] = phase + session.SetAnnotations(annotations) + + gvr := types.GetAgenticSessionResource() + if _, err := config.DynamicClient.Resource(gvr).Namespace(namespace).Update(context.TODO(), &session, metav1.UpdateOptions{}); err != nil { + log.Printf("Warning: failed to add skip annotation to active session %s/%s: %v", namespace, name, err) + } else { + log.Printf("Marked active session %s/%s (phase: %s) as skipped for migration", namespace, name, phase) + } + } + activeSkippedCount++ + continue + } + } + + // Check if already migrated + annotations := session.GetAnnotations() + if annotations != nil { + if version, exists := annotations[MigrationAnnotation]; exists { + if version == MigrationVersion { + skippedCount++ + continue + } + } + } + + // Check if session has repos that need migration + if needsMigration, err := sessionNeedsMigration(&session); err != nil { + log.Printf("ERROR: Failed to check migration status for session %s/%s: %v", namespace, name, err) + recordMigrationEvent(&session, corev1.EventTypeWarning, eventReasonMigrationFailed, + fmt.Sprintf("Failed to check migration status: %v", err)) + errorCount++ + continue + } else if !needsMigration { + // Session already has new format (no migration annotation but already using v2 format) + if err := addMigrationAnnotation(&session, namespace, name); err != nil { + log.Printf("ERROR: Failed to add migration annotation to session %s/%s: %v", namespace, name, err) + errorCount++ + } else { + skippedCount++ + } + continue + } + + // Migrate the session + if err := migrateSession(&session, namespace, name); err != nil { + log.Printf("ERROR: Failed to migrate session %s/%s: %v", namespace, name, err) + recordMigrationEvent(&session, corev1.EventTypeWarning, eventReasonMigrationFailed, + fmt.Sprintf("Failed to migrate to v2 repo format: %v", err)) + errorCount++ + continue + } + + migratedCount++ + log.Printf("Successfully migrated session %s/%s to v2 repo format", namespace, name) + recordMigrationEvent(&session, corev1.EventTypeNormal, eventReasonMigrationCompleted, + "Successfully migrated to v2 repo format") + } + + log.Println("========================================") + log.Println("Migration Summary:") + log.Printf(" Total sessions processed: %d", totalSessions) + log.Printf(" Successfully migrated: %d", migratedCount) + log.Printf(" Already migrated (skipped): %d", skippedCount) + log.Printf(" Active sessions (skipped): %d", activeSkippedCount) + log.Printf(" Errors: %d", errorCount) + + if errorCount > 0 { + log.Printf("⚠️ Migration completed with %d errors - failed sessions will retry on next restart", errorCount) + } else { + log.Println("✅ Migration completed successfully") + } + log.Println("========================================") + + // Note: We return nil even with errors to allow operator startup to continue. + // Individual session errors are logged above and don't prevent other sessions + // from being migrated. Failed sessions will be retried on next operator restart. + return nil +} + +// sessionNeedsMigration checks if a session has repos in legacy format. +// Returns true if migration is needed, false otherwise. +// NOTE: Active sessions are filtered out before this function is called. +func sessionNeedsMigration(session *unstructured.Unstructured) (bool, error) { + spec, found, err := unstructured.NestedMap(session.Object, "spec") + if err != nil || !found { + return false, nil // No spec or error reading it - nothing to migrate + } + + repos, found, err := unstructured.NestedSlice(spec, "repos") + if err != nil { + return false, fmt.Errorf("failed to read repos: %w", err) + } + if !found || len(repos) == 0 { + return false, nil // No repos - nothing to migrate + } + + // Defensive type checking: validate first element type before processing + // Catches type errors early with clearer error messages + if len(repos) > 0 { + if _, ok := repos[0].(map[string]interface{}); !ok { + return false, fmt.Errorf("repos[0]: invalid type %T (expected map)", repos[0]) + } + } + + // Check all repos to detect if any are in legacy format + // We check all repos (not just the first) to handle edge cases where + // someone manually edited a CR and created mixed formats + for i, repoInterface := range repos { + repo, ok := repoInterface.(map[string]interface{}) + if !ok { + return false, fmt.Errorf("repo[%d]: invalid format (not a map)", i) + } + + // Legacy format has "url" field directly + // New format has "input" object with "url" field + _, hasURL := repo["url"] + _, hasInput := repo["input"] + + if hasURL && !hasInput { + return true, nil // Found at least one legacy repo + } else if hasInput { + // New format - continue checking other repos + continue + } else { + // Neither url nor input - invalid repo + return false, fmt.Errorf("repo[%d]: missing both 'url' and 'input' fields", i) + } + } + + return false, nil // All repos are in new format +} + +// migrateSession converts a session's repos from legacy to v2 format and updates the CR. +func migrateSession(session *unstructured.Unstructured, namespace, name string) error { + ctx := context.Background() + gvr := types.GetAgenticSessionResource() + + spec, found, err := unstructured.NestedMap(session.Object, "spec") + if err != nil || !found { + return fmt.Errorf("failed to read spec: %w", err) + } + + repos, found, err := unstructured.NestedSlice(spec, "repos") + if err != nil { + return fmt.Errorf("failed to read repos: %w", err) + } + if !found || len(repos) == 0 { + return nil // No repos to migrate + } + + // Read session-level autoPushOnComplete to use as default for per-repo autoPush + // This preserves existing behavior when migrating from v1 to v2 format. + // Defaults to false if the field is missing or has an invalid type (safe default). + defaultAutoPush := false + if autoPushOnComplete, found, err := unstructured.NestedBool(spec, "autoPushOnComplete"); err == nil && found { + defaultAutoPush = autoPushOnComplete + } + + // Convert each repo from legacy to new format + // Handle mixed v1/v2 format (edge case from manual CR editing) + migratedRepos := make([]interface{}, 0, len(repos)) + for i, repoInterface := range repos { + repo, ok := repoInterface.(map[string]interface{}) + if !ok { + return fmt.Errorf("repo[%d] is not a map", i) + } + + // Check if repo is already in v2 format + _, hasInput := repo["input"] + if hasInput { + // Already v2 format, preserve as-is + migratedRepos = append(migratedRepos, repo) + continue + } + + // Migrate from v1 format + url, hasURL := repo["url"].(string) + if !hasURL { + return fmt.Errorf("repo[%d] missing url field", i) + } + + branch, _ := repo["branch"].(string) // Optional field + + // Create new format + newRepo := map[string]interface{}{ + "input": map[string]interface{}{ + "url": url, + }, + "autoPush": defaultAutoPush, // Use session-level autoPushOnComplete as default + } + + if branch != "" { + inputMap, ok := newRepo["input"].(map[string]interface{}) + if !ok { + return fmt.Errorf("migration failed: input field at repo[%d] is not a map", i) + } + inputMap["branch"] = branch + } + + // Preserve name if present + if repoName, hasName := repo["name"].(string); hasName { + newRepo["name"] = repoName + } + + migratedRepos = append(migratedRepos, newRepo) + } + + // Validate migrated data before updating + if len(migratedRepos) == 0 { + return fmt.Errorf("migration produced empty repos list") + } + + for i, repoInterface := range migratedRepos { + repo, ok := repoInterface.(map[string]interface{}) + if !ok { + return fmt.Errorf("validation failed: migrated repo[%d] is not a map", i) + } + + input, ok := repo["input"].(map[string]interface{}) + if !ok { + return fmt.Errorf("validation failed: migrated repo[%d] missing input object", i) + } + + if _, ok := input["url"].(string); !ok { + return fmt.Errorf("validation failed: migrated repo[%d] missing input.url", i) + } + + if _, ok := repo["autoPush"].(bool); !ok { + return fmt.Errorf("validation failed: migrated repo[%d] missing autoPush field", i) + } + } + + // Update spec with migrated repos + if err := unstructured.SetNestedSlice(spec, migratedRepos, "repos"); err != nil { + return fmt.Errorf("failed to set migrated repos: %w", err) + } + + if err := unstructured.SetNestedMap(session.Object, spec, "spec"); err != nil { + return fmt.Errorf("failed to set spec: %w", err) + } + + // Add migration annotation + annotations := session.GetAnnotations() + if annotations == nil { + annotations = make(map[string]string) + } + annotations[MigrationAnnotation] = MigrationVersion + session.SetAnnotations(annotations) + + // Update the CR + _, err = config.DynamicClient.Resource(gvr).Namespace(namespace).Update(ctx, session, metav1.UpdateOptions{}) + if err != nil { + return fmt.Errorf("failed to update CR: %w", err) + } + + return nil +} + +// addMigrationAnnotation adds the migration annotation to a session that's already in v2 format. +func addMigrationAnnotation(session *unstructured.Unstructured, namespace, name string) error { + ctx := context.Background() + gvr := types.GetAgenticSessionResource() + + annotations := session.GetAnnotations() + if annotations == nil { + annotations = make(map[string]string) + } + annotations[MigrationAnnotation] = MigrationVersion + session.SetAnnotations(annotations) + + _, err := config.DynamicClient.Resource(gvr).Namespace(namespace).Update(ctx, session, metav1.UpdateOptions{}) + if err != nil { + return fmt.Errorf("failed to add annotation: %w", err) + } + + return nil +} + +// recordMigrationEvent records a Kubernetes Event for migration tracking. +// Events appear in `kubectl describe agenticsession` output. +func recordMigrationEvent(session *unstructured.Unstructured, eventType, reason, message string) { + ctx := context.Background() + + event := &corev1.Event{ + ObjectMeta: metav1.ObjectMeta{ + Name: fmt.Sprintf("%s.%s", session.GetName(), metav1.Now().Format("20060102150405")), + Namespace: session.GetNamespace(), + }, + InvolvedObject: corev1.ObjectReference{ + Kind: session.GetKind(), + Namespace: session.GetNamespace(), + Name: session.GetName(), + UID: session.GetUID(), + APIVersion: session.GetAPIVersion(), + }, + Reason: reason, + Message: message, + Type: eventType, + Source: corev1.EventSource{ + Component: "agentic-operator", + }, + FirstTimestamp: metav1.Now(), + LastTimestamp: metav1.Now(), + Count: 1, + } + + _, err := config.K8sClient.CoreV1().Events(session.GetNamespace()).Create(ctx, event, metav1.CreateOptions{}) + if err != nil { + // Don't fail migration if event recording fails + log.Printf("Warning: failed to record migration event for %s/%s: %v", session.GetNamespace(), session.GetName(), err) + } +} diff --git a/components/operator/internal/handlers/migration_test.go b/components/operator/internal/handlers/migration_test.go new file mode 100644 index 000000000..a9c86a512 --- /dev/null +++ b/components/operator/internal/handlers/migration_test.go @@ -0,0 +1,1016 @@ +package handlers_test + +import ( + "context" + "testing" + + "ambient-code-operator/internal/config" + "ambient-code-operator/internal/handlers" + "ambient-code-operator/internal/types" + + metav1 "k8s.io/apimachinery/pkg/apis/meta/v1" + "k8s.io/apimachinery/pkg/apis/meta/v1/unstructured" + "k8s.io/apimachinery/pkg/runtime" + "k8s.io/apimachinery/pkg/runtime/schema" + "k8s.io/client-go/dynamic/fake" + k8sfake "k8s.io/client-go/kubernetes/fake" +) + +// setupTestDynamicClient initializes a fake dynamic client for testing with custom resource support +func setupTestDynamicClient(objects ...runtime.Object) { + scheme := runtime.NewScheme() + + // Register the AgenticSession resource type + // We need to use NewSimpleDynamicClientWithCustomListKinds to support List operations + gvrToListKind := map[schema.GroupVersionResource]string{ + types.GetAgenticSessionResource(): "AgenticSessionList", + } + + config.DynamicClient = fake.NewSimpleDynamicClientWithCustomListKinds(scheme, gvrToListKind, objects...) + + // Initialize fake K8sClient for Event recording + config.K8sClient = k8sfake.NewSimpleClientset() +} + +// createLegacySession creates an AgenticSession with legacy repo format +func createLegacySession(name, namespace string) *unstructured.Unstructured { + return &unstructured.Unstructured{ + Object: map[string]interface{}{ + "apiVersion": "vteam.ambient-code/v1alpha1", + "kind": "AgenticSession", + "metadata": map[string]interface{}{ + "name": name, + "namespace": namespace, + }, + "spec": map[string]interface{}{ + "repos": []interface{}{ + map[string]interface{}{ + "url": "https://github.com/org/repo1.git", + "branch": "main", + }, + map[string]interface{}{ + "url": "https://github.com/org/repo2.git", + "branch": "develop", + "name": "secondary-repo", + }, + }, + }, + }, + } +} + +// createV2Session creates an AgenticSession with v2 repo format +func createV2Session(name, namespace string, withAnnotation bool) *unstructured.Unstructured { + session := &unstructured.Unstructured{ + Object: map[string]interface{}{ + "apiVersion": "vteam.ambient-code/v1alpha1", + "kind": "AgenticSession", + "metadata": map[string]interface{}{ + "name": name, + "namespace": namespace, + }, + "spec": map[string]interface{}{ + "repos": []interface{}{ + map[string]interface{}{ + "input": map[string]interface{}{ + "url": "https://github.com/org/repo.git", + "branch": "main", + }, + "autoPush": true, + }, + }, + }, + }, + } + + if withAnnotation { + session.SetAnnotations(map[string]string{ + handlers.MigrationAnnotation: handlers.MigrationVersion, + }) + } + + return session +} + +// createSessionWithoutRepos creates an AgenticSession with no repos +func createSessionWithoutRepos(name, namespace string) *unstructured.Unstructured { + return &unstructured.Unstructured{ + Object: map[string]interface{}{ + "apiVersion": "vteam.ambient-code/v1alpha1", + "kind": "AgenticSession", + "metadata": map[string]interface{}{ + "name": name, + "namespace": namespace, + }, + "spec": map[string]interface{}{}, + }, + } +} + +func TestMigrateAllSessions_NoSessions(t *testing.T) { + setupTestDynamicClient() + + err := handlers.MigrateAllSessions() + if err != nil { + t.Errorf("MigrateAllSessions() with no sessions should not error, got: %v", err) + } +} + +func TestMigrateAllSessions_SingleLegacySession(t *testing.T) { + session := createLegacySession("test-session", "default") + setupTestDynamicClient(session) + + err := handlers.MigrateAllSessions() + if err != nil { + t.Fatalf("MigrateAllSessions() failed: %v", err) + } + + // Verify the session was migrated + gvr := types.GetAgenticSessionResource() + updated, err := config.DynamicClient.Resource(gvr).Namespace("default").Get(context.Background(), "test-session", metav1.GetOptions{}) + if err != nil { + t.Fatalf("Failed to get updated session: %v", err) + } + + // Check migration annotation + annotations := updated.GetAnnotations() + if annotations == nil { + t.Fatal("Expected annotations to be set") + } + if annotations[handlers.MigrationAnnotation] != handlers.MigrationVersion { + t.Errorf("Expected migration annotation %s, got: %s", handlers.MigrationVersion, annotations[handlers.MigrationAnnotation]) + } + + // Check repo format + spec, found, err := unstructured.NestedMap(updated.Object, "spec") + if err != nil || !found { + t.Fatal("Failed to get spec") + } + + repos, found, err := unstructured.NestedSlice(spec, "repos") + if err != nil || !found { + t.Fatal("Failed to get repos") + } + + if len(repos) != 2 { + t.Fatalf("Expected 2 repos, got %d", len(repos)) + } + + // Verify first repo + repo1, ok := repos[0].(map[string]interface{}) + if !ok { + t.Fatal("Repo is not a map") + } + + input1, found, err := unstructured.NestedMap(repo1, "input") + if err != nil || !found { + t.Fatal("Expected repo to have 'input' field") + } + + url1, found, err := unstructured.NestedString(input1, "url") + if err != nil || !found { + t.Fatal("Expected input to have 'url' field") + } + if url1 != "https://github.com/org/repo1.git" { + t.Errorf("Expected url 'https://github.com/org/repo1.git', got: %s", url1) + } + + branch1, found, err := unstructured.NestedString(input1, "branch") + if err != nil || !found { + t.Fatal("Expected input to have 'branch' field") + } + if branch1 != "main" { + t.Errorf("Expected branch 'main', got: %s", branch1) + } + + autoPush1, found, err := unstructured.NestedBool(repo1, "autoPush") + if err != nil || !found { + t.Fatal("Expected repo to have 'autoPush' field") + } + if autoPush1 { + t.Error("Expected autoPush to be false by default") + } + + // Verify second repo (has name field) + repo2, ok := repos[1].(map[string]interface{}) + if !ok { + t.Fatal("Repo 2 is not a map") + } + + name2, found, err := unstructured.NestedString(repo2, "name") + if err != nil || !found { + t.Fatal("Expected repo 2 to have 'name' field") + } + if name2 != "secondary-repo" { + t.Errorf("Expected name 'secondary-repo', got: %s", name2) + } +} + +func TestMigrateAllSessions_AlreadyMigrated(t *testing.T) { + session := createV2Session("test-session", "default", true) + setupTestDynamicClient(session) + + err := handlers.MigrateAllSessions() + if err != nil { + t.Fatalf("MigrateAllSessions() failed: %v", err) + } + + // Verify session was not modified (still has annotation) + gvr := types.GetAgenticSessionResource() + updated, err := config.DynamicClient.Resource(gvr).Namespace("default").Get(context.Background(), "test-session", metav1.GetOptions{}) + if err != nil { + t.Fatalf("Failed to get session: %v", err) + } + + annotations := updated.GetAnnotations() + if annotations[handlers.MigrationAnnotation] != handlers.MigrationVersion { + t.Error("Migration annotation should be preserved") + } + + // Verify repo format is still v2 + spec, found, err := unstructured.NestedMap(updated.Object, "spec") + if err != nil || !found { + t.Fatal("Failed to get spec") + } + + repos, found, err := unstructured.NestedSlice(spec, "repos") + if err != nil || !found { + t.Fatal("Failed to get repos") + } + + repo, ok := repos[0].(map[string]interface{}) + if !ok { + t.Fatal("Repo is not a map") + } + + // Should still have input field + _, found, err = unstructured.NestedMap(repo, "input") + if err != nil || !found { + t.Error("Expected repo to still have 'input' field") + } +} + +func TestMigrateAllSessions_V2WithoutAnnotation(t *testing.T) { + // Session already has v2 format but no annotation + session := createV2Session("test-session", "default", false) + setupTestDynamicClient(session) + + err := handlers.MigrateAllSessions() + if err != nil { + t.Fatalf("MigrateAllSessions() failed: %v", err) + } + + // Verify annotation was added without modifying repos + gvr := types.GetAgenticSessionResource() + updated, err := config.DynamicClient.Resource(gvr).Namespace("default").Get(context.Background(), "test-session", metav1.GetOptions{}) + if err != nil { + t.Fatalf("Failed to get session: %v", err) + } + + annotations := updated.GetAnnotations() + if annotations == nil { + t.Fatal("Expected annotations to be set") + } + if annotations[handlers.MigrationAnnotation] != handlers.MigrationVersion { + t.Errorf("Expected migration annotation to be added") + } + + // Verify autoPush value was preserved (not reset to false) + spec, found, err := unstructured.NestedMap(updated.Object, "spec") + if err != nil || !found { + t.Fatal("Failed to get spec") + } + + repos, found, err := unstructured.NestedSlice(spec, "repos") + if err != nil || !found { + t.Fatal("Failed to get repos") + } + + repo, ok := repos[0].(map[string]interface{}) + if !ok { + t.Fatal("Repo is not a map") + } + + autoPush, found, err := unstructured.NestedBool(repo, "autoPush") + if err != nil || !found { + t.Fatal("Expected repo to have 'autoPush' field") + } + if autoPush != true { + t.Error("Expected autoPush to be preserved as true") + } +} + +func TestMigrateAllSessions_SessionWithoutRepos(t *testing.T) { + session := createSessionWithoutRepos("test-session", "default") + setupTestDynamicClient(session) + + err := handlers.MigrateAllSessions() + if err != nil { + t.Fatalf("MigrateAllSessions() with session without repos should not error, got: %v", err) + } + + // Session should be marked as checked (annotation added) even with no repos + gvr := types.GetAgenticSessionResource() + updated, err := config.DynamicClient.Resource(gvr).Namespace("default").Get(context.Background(), "test-session", metav1.GetOptions{}) + if err != nil { + t.Fatalf("Failed to get session: %v", err) + } + + // Annotation should be added to mark it as checked (even with no repos) + annotations := updated.GetAnnotations() + if annotations == nil || annotations[handlers.MigrationAnnotation] != handlers.MigrationVersion { + t.Error("Should add migration annotation to sessions without repos to mark as checked") + } +} + +func TestMigrateAllSessions_MultipleNamespaces(t *testing.T) { + session1 := createLegacySession("session-1", "namespace-1") + session2 := createLegacySession("session-2", "namespace-2") + session3 := createV2Session("session-3", "namespace-3", true) + + setupTestDynamicClient(session1, session2, session3) + + err := handlers.MigrateAllSessions() + if err != nil { + t.Fatalf("MigrateAllSessions() failed: %v", err) + } + + gvr := types.GetAgenticSessionResource() + + // Verify session-1 was migrated + updated1, err := config.DynamicClient.Resource(gvr).Namespace("namespace-1").Get(context.Background(), "session-1", metav1.GetOptions{}) + if err != nil { + t.Fatalf("Failed to get session-1: %v", err) + } + annotations1 := updated1.GetAnnotations() + if annotations1 == nil || annotations1[handlers.MigrationAnnotation] != handlers.MigrationVersion { + t.Error("Session-1 should have migration annotation") + } + + // Verify session-2 was migrated + updated2, err := config.DynamicClient.Resource(gvr).Namespace("namespace-2").Get(context.Background(), "session-2", metav1.GetOptions{}) + if err != nil { + t.Fatalf("Failed to get session-2: %v", err) + } + annotations2 := updated2.GetAnnotations() + if annotations2 == nil || annotations2[handlers.MigrationAnnotation] != handlers.MigrationVersion { + t.Error("Session-2 should have migration annotation") + } + + // Verify session-3 was skipped (already migrated) + updated3, err := config.DynamicClient.Resource(gvr).Namespace("namespace-3").Get(context.Background(), "session-3", metav1.GetOptions{}) + if err != nil { + t.Fatalf("Failed to get session-3: %v", err) + } + annotations3 := updated3.GetAnnotations() + if annotations3[handlers.MigrationAnnotation] != handlers.MigrationVersion { + t.Error("Session-3 should still have migration annotation") + } +} + +func TestMigrateAllSessions_Idempotency(t *testing.T) { + session := createLegacySession("test-session", "default") + setupTestDynamicClient(session) + + // Run migration first time + err := handlers.MigrateAllSessions() + if err != nil { + t.Fatalf("First migration failed: %v", err) + } + + // Get the migrated session + gvr := types.GetAgenticSessionResource() + firstMigration, err := config.DynamicClient.Resource(gvr).Namespace("default").Get(context.Background(), "test-session", metav1.GetOptions{}) + if err != nil { + t.Fatalf("Failed to get session after first migration: %v", err) + } + + // Run migration second time (should be idempotent) + err = handlers.MigrateAllSessions() + if err != nil { + t.Fatalf("Second migration failed: %v", err) + } + + // Get the session again + secondMigration, err := config.DynamicClient.Resource(gvr).Namespace("default").Get(context.Background(), "test-session", metav1.GetOptions{}) + if err != nil { + t.Fatalf("Failed to get session after second migration: %v", err) + } + + // Verify the session was not modified on second run + firstSpec, _, _ := unstructured.NestedMap(firstMigration.Object, "spec") + secondSpec, _, _ := unstructured.NestedMap(secondMigration.Object, "spec") + + firstRepos, _, _ := unstructured.NestedSlice(firstSpec, "repos") + secondRepos, _, _ := unstructured.NestedSlice(secondSpec, "repos") + + if len(firstRepos) != len(secondRepos) { + t.Error("Idempotency check failed: repo count changed on second migration") + } + + // Verify annotations are the same + firstAnnotations := firstMigration.GetAnnotations() + secondAnnotations := secondMigration.GetAnnotations() + + if firstAnnotations[handlers.MigrationAnnotation] != secondAnnotations[handlers.MigrationAnnotation] { + t.Error("Idempotency check failed: annotation changed on second migration") + } +} + +func TestMigrateAllSessions_InvalidRepoFormat(t *testing.T) { + // Session with invalid repo format (string instead of map) + session := &unstructured.Unstructured{ + Object: map[string]interface{}{ + "apiVersion": "vteam.ambient-code/v1alpha1", + "kind": "AgenticSession", + "metadata": map[string]interface{}{ + "name": "invalid-session", + "namespace": "default", + }, + "spec": map[string]interface{}{ + "repos": []interface{}{ + "invalid-string-instead-of-map", + }, + }, + }, + } + + setupTestDynamicClient(session) + + // Should not panic, but should log error and continue + err := handlers.MigrateAllSessions() + if err != nil { + t.Errorf("MigrateAllSessions() should return nil even with invalid sessions, got: %v", err) + } + + // Session should not have migration annotation (failed validation) + gvr := types.GetAgenticSessionResource() + updated, err := config.DynamicClient.Resource(gvr).Namespace("default").Get(context.Background(), "invalid-session", metav1.GetOptions{}) + if err != nil { + t.Fatalf("Failed to get session: %v", err) + } + + annotations := updated.GetAnnotations() + if annotations != nil && annotations[handlers.MigrationAnnotation] == handlers.MigrationVersion { + t.Error("Invalid session should not have migration annotation") + } +} + +func TestMigrateAllSessions_MissingURLField(t *testing.T) { + // Session with repo missing required url field + session := &unstructured.Unstructured{ + Object: map[string]interface{}{ + "apiVersion": "vteam.ambient-code/v1alpha1", + "kind": "AgenticSession", + "metadata": map[string]interface{}{ + "name": "missing-url-session", + "namespace": "default", + }, + "spec": map[string]interface{}{ + "repos": []interface{}{ + map[string]interface{}{ + "branch": "main", + // Missing "url" field + }, + }, + }, + }, + } + + setupTestDynamicClient(session) + + // Should handle gracefully + err := handlers.MigrateAllSessions() + if err != nil { + t.Errorf("MigrateAllSessions() should return nil even with errors, got: %v", err) + } +} + +func TestMigrateAllSessions_PartialFailure(t *testing.T) { + // Create mix of valid and invalid sessions + validSession := createLegacySession("valid-session", "default") + invalidSession := &unstructured.Unstructured{ + Object: map[string]interface{}{ + "apiVersion": "vteam.ambient-code/v1alpha1", + "kind": "AgenticSession", + "metadata": map[string]interface{}{ + "name": "invalid-session", + "namespace": "default", + }, + "spec": map[string]interface{}{ + "repos": []interface{}{ + "not-a-map", + }, + }, + }, + } + + setupTestDynamicClient(validSession, invalidSession) + + err := handlers.MigrateAllSessions() + if err != nil { + t.Errorf("MigrateAllSessions() should return nil even with partial failures, got: %v", err) + } + + gvr := types.GetAgenticSessionResource() + + // Verify valid session was migrated successfully + validUpdated, err := config.DynamicClient.Resource(gvr).Namespace("default").Get(context.Background(), "valid-session", metav1.GetOptions{}) + if err != nil { + t.Fatalf("Failed to get valid session: %v", err) + } + + validAnnotations := validUpdated.GetAnnotations() + if validAnnotations == nil || validAnnotations[handlers.MigrationAnnotation] != handlers.MigrationVersion { + t.Error("Valid session should have been migrated successfully") + } + + // Verify invalid session was not migrated + invalidUpdated, err := config.DynamicClient.Resource(gvr).Namespace("default").Get(context.Background(), "invalid-session", metav1.GetOptions{}) + if err != nil { + t.Fatalf("Failed to get invalid session: %v", err) + } + + invalidAnnotations := invalidUpdated.GetAnnotations() + if invalidAnnotations != nil && invalidAnnotations[handlers.MigrationAnnotation] == handlers.MigrationVersion { + t.Error("Invalid session should not have migration annotation") + } +} + +func TestMigrateAllSessions_ActiveSession(t *testing.T) { + // Session with Running status should be skipped + session := createLegacySession("running-session", "default") + // Add Running status + status := map[string]interface{}{ + "phase": "Running", + } + session.Object["status"] = status + + setupTestDynamicClient(session) + + err := handlers.MigrateAllSessions() + if err != nil { + t.Fatalf("MigrateAllSessions() failed: %v", err) + } + + // Verify session was skipped (no annotation added) + gvr := types.GetAgenticSessionResource() + updated, err := config.DynamicClient.Resource(gvr).Namespace("default").Get(context.Background(), "running-session", metav1.GetOptions{}) + if err != nil { + t.Fatalf("Failed to get session: %v", err) + } + + annotations := updated.GetAnnotations() + if annotations != nil && annotations[handlers.MigrationAnnotation] == handlers.MigrationVersion { + t.Error("Running session should not have been migrated") + } + + // Verify repos are still in legacy format (not migrated) + spec, found, _ := unstructured.NestedMap(updated.Object, "spec") + if !found { + t.Fatal("Failed to get spec") + } + + repos, found, _ := unstructured.NestedSlice(spec, "repos") + if !found { + t.Fatal("Failed to get repos") + } + + repo, ok := repos[0].(map[string]interface{}) + if !ok { + t.Fatal("Repo is not a map") + } + + // Should still have legacy "url" field (not migrated) + if _, hasURL := repo["url"]; !hasURL { + t.Error("Running session should retain legacy format") + } +} + +func TestMigrateAllSessions_MixedFormats(t *testing.T) { + legacySession := createLegacySession("legacy-session", "default") + v2Session := createV2Session("v2-session", "default", false) + v2MigratedSession := createV2Session("v2-migrated-session", "default", true) + noReposSession := createSessionWithoutRepos("no-repos-session", "default") + + setupTestDynamicClient(legacySession, v2Session, v2MigratedSession, noReposSession) + + err := handlers.MigrateAllSessions() + if err != nil { + t.Fatalf("MigrateAllSessions() failed: %v", err) + } + + gvr := types.GetAgenticSessionResource() + + // Verify legacy session was migrated + legacyUpdated, _ := config.DynamicClient.Resource(gvr).Namespace("default").Get(context.Background(), "legacy-session", metav1.GetOptions{}) + legacyAnnotations := legacyUpdated.GetAnnotations() + if legacyAnnotations == nil || legacyAnnotations[handlers.MigrationAnnotation] != handlers.MigrationVersion { + t.Error("Legacy session should have migration annotation") + } + + // Verify v2 session got annotation added + v2Updated, _ := config.DynamicClient.Resource(gvr).Namespace("default").Get(context.Background(), "v2-session", metav1.GetOptions{}) + v2Annotations := v2Updated.GetAnnotations() + if v2Annotations == nil || v2Annotations[handlers.MigrationAnnotation] != handlers.MigrationVersion { + t.Error("V2 session should have migration annotation added") + } + + // Verify already-migrated session was skipped + v2MigratedUpdated, _ := config.DynamicClient.Resource(gvr).Namespace("default").Get(context.Background(), "v2-migrated-session", metav1.GetOptions{}) + v2MigratedAnnotations := v2MigratedUpdated.GetAnnotations() + if v2MigratedAnnotations[handlers.MigrationAnnotation] != handlers.MigrationVersion { + t.Error("Already-migrated session annotation should be preserved") + } + + // Verify no-repos session was marked as checked + noReposUpdated, _ := config.DynamicClient.Resource(gvr).Namespace("default").Get(context.Background(), "no-repos-session", metav1.GetOptions{}) + noReposAnnotations := noReposUpdated.GetAnnotations() + if noReposAnnotations == nil || noReposAnnotations[handlers.MigrationAnnotation] != handlers.MigrationVersion { + t.Error("No-repos session should have migration annotation to mark as checked") + } +} + +// TestMigrateAllSessions_SingleSessionMixedV1V2Repos tests migration of a session +// with some repos in v1 format and some in v2 format (edge case from manual editing) +func TestMigrateAllSessions_SingleSessionMixedV1V2Repos(t *testing.T) { + // Create a session with mixed repo formats (could happen from manual CR editing) + mixedSession := &unstructured.Unstructured{ + Object: map[string]interface{}{ + "apiVersion": "vteam.ambient-code/v1alpha1", + "kind": "AgenticSession", + "metadata": map[string]interface{}{ + "name": "mixed-repos-session", + "namespace": "default", + }, + "spec": map[string]interface{}{ + "repos": []interface{}{ + // v1 format repo + map[string]interface{}{ + "url": "https://github.com/org/legacy-repo.git", + "branch": "main", + }, + // v2 format repo + map[string]interface{}{ + "input": map[string]interface{}{ + "url": "https://github.com/org/new-repo.git", + "branch": "develop", + }, + "autoPush": false, + }, + // v1 format repo without branch + map[string]interface{}{ + "url": "https://github.com/org/another-legacy.git", + }, + }, + }, + }, + } + + setupTestDynamicClient(mixedSession) + + err := handlers.MigrateAllSessions() + if err != nil { + t.Fatalf("MigrateAllSessions() failed: %v", err) + } + + gvr := types.GetAgenticSessionResource() + + // Verify session was migrated + updated, err := config.DynamicClient.Resource(gvr).Namespace("default").Get(context.Background(), "mixed-repos-session", metav1.GetOptions{}) + if err != nil { + t.Fatalf("Failed to get updated session: %v", err) + } + + // Check migration annotation + annotations := updated.GetAnnotations() + if annotations == nil || annotations[handlers.MigrationAnnotation] != handlers.MigrationVersion { + t.Error("Session with mixed repo formats should have migration annotation") + } + + // Verify all repos are now in v2 format + spec, found, err := unstructured.NestedMap(updated.Object, "spec") + if !found || err != nil { + t.Fatalf("Failed to get spec: %v", err) + } + + repos, found, err := unstructured.NestedSlice(spec, "repos") + if !found || err != nil { + t.Fatalf("Failed to get repos: %v", err) + } + + if len(repos) != 3 { + t.Fatalf("Expected 3 repos, got %d", len(repos)) + } + + // Verify each repo is in v2 format + for i, repoInterface := range repos { + repo, ok := repoInterface.(map[string]interface{}) + if !ok { + t.Errorf("Repo %d is not a map", i) + continue + } + + // All repos should have "input" field + input, hasInput := repo["input"] + if !hasInput { + t.Errorf("Repo %d missing input field after migration", i) + continue + } + + inputMap, ok := input.(map[string]interface{}) + if !ok { + t.Errorf("Repo %d input is not a map", i) + continue + } + + // Check URL exists + if _, hasURL := inputMap["url"]; !hasURL { + t.Errorf("Repo %d input missing url field", i) + } + + // All repos should have autoPush field + if _, hasAutoPush := repo["autoPush"]; !hasAutoPush { + t.Errorf("Repo %d missing autoPush field after migration", i) + } + + // Legacy fields should not be present + if _, hasLegacyURL := repo["url"]; hasLegacyURL { + t.Errorf("Repo %d should not have legacy url field after migration", i) + } + } + + // Verify that migration events were recorded + events, err := config.K8sClient.CoreV1().Events("default").List(context.Background(), metav1.ListOptions{}) + if err != nil { + t.Fatalf("Failed to list events: %v", err) + } + + // Should have one event for the successful migration + foundMigrationEvent := false + for _, event := range events.Items { + if event.InvolvedObject.Name == "mixed-repos-session" && event.Reason == "MigrationCompleted" { + foundMigrationEvent = true + if event.Type != "Normal" { + t.Errorf("Expected event type 'Normal', got '%s'", event.Type) + } + if event.Message != "Successfully migrated to v2 repo format" { + t.Errorf("Unexpected event message: %s", event.Message) + } + } + } + + if !foundMigrationEvent { + t.Error("Expected MigrationCompleted event to be recorded") + } +} + +// TestMigrateAllSessions_PreservesAutoPushOnCompleteTrue tests that migration preserves +// autoPushOnComplete: true at session level by setting autoPush: true on migrated repos +func TestMigrateAllSessions_PreservesAutoPushOnCompleteTrue(t *testing.T) { + session := &unstructured.Unstructured{ + Object: map[string]interface{}{ + "apiVersion": "vteam.ambient-code/v1alpha1", + "kind": "AgenticSession", + "metadata": map[string]interface{}{ + "name": "autopush-enabled-session", + "namespace": "default", + }, + "spec": map[string]interface{}{ + "autoPushOnComplete": true, + "repos": []interface{}{ + map[string]interface{}{ + "url": "https://github.com/org/repo.git", + "branch": "main", + }, + }, + }, + }, + } + + setupTestDynamicClient(session) + + err := handlers.MigrateAllSessions() + if err != nil { + t.Fatalf("MigrateAllSessions() failed: %v", err) + } + + // Verify migrated repo inherited autoPushOnComplete value + gvr := types.GetAgenticSessionResource() + updated, err := config.DynamicClient.Resource(gvr).Namespace("default").Get(context.Background(), "autopush-enabled-session", metav1.GetOptions{}) + if err != nil { + t.Fatalf("Failed to get updated session: %v", err) + } + + spec, found, err := unstructured.NestedMap(updated.Object, "spec") + if err != nil || !found { + t.Fatal("Failed to get spec") + } + + repos, found, err := unstructured.NestedSlice(spec, "repos") + if err != nil || !found { + t.Fatal("Failed to get repos") + } + + repo, ok := repos[0].(map[string]interface{}) + if !ok { + t.Fatal("Repo is not a map") + } + + autoPush, found, err := unstructured.NestedBool(repo, "autoPush") + if err != nil || !found { + t.Fatal("Expected repo to have 'autoPush' field") + } + if !autoPush { + t.Error("Expected autoPush to inherit autoPushOnComplete=true, got false") + } + + // Verify migration annotation was added + annotations := updated.GetAnnotations() + if annotations == nil || annotations["ambient-code.io/repos-migrated"] != "v2" { + t.Error("Expected migration annotation to be set") + } +} + +// TestMigrateAllSessions_PreservesAutoPushOnCompleteFalse tests that migration preserves +// autoPushOnComplete: false at session level by setting autoPush: false on migrated repos +func TestMigrateAllSessions_PreservesAutoPushOnCompleteFalse(t *testing.T) { + session := &unstructured.Unstructured{ + Object: map[string]interface{}{ + "apiVersion": "vteam.ambient-code/v1alpha1", + "kind": "AgenticSession", + "metadata": map[string]interface{}{ + "name": "autopush-disabled-session", + "namespace": "default", + }, + "spec": map[string]interface{}{ + "autoPushOnComplete": false, + "repos": []interface{}{ + map[string]interface{}{ + "url": "https://github.com/org/repo.git", + "branch": "develop", + }, + }, + }, + }, + } + + setupTestDynamicClient(session) + + err := handlers.MigrateAllSessions() + if err != nil { + t.Fatalf("MigrateAllSessions() failed: %v", err) + } + + // Verify migrated repo has autoPush: false + gvr := types.GetAgenticSessionResource() + updated, err := config.DynamicClient.Resource(gvr).Namespace("default").Get(context.Background(), "autopush-disabled-session", metav1.GetOptions{}) + if err != nil { + t.Fatalf("Failed to get updated session: %v", err) + } + + spec, found, err := unstructured.NestedMap(updated.Object, "spec") + if err != nil || !found { + t.Fatal("Failed to get spec") + } + + repos, found, err := unstructured.NestedSlice(spec, "repos") + if err != nil || !found { + t.Fatal("Failed to get repos") + } + + repo, ok := repos[0].(map[string]interface{}) + if !ok { + t.Fatal("Repo is not a map") + } + + autoPush, found, err := unstructured.NestedBool(repo, "autoPush") + if err != nil || !found { + t.Fatal("Expected repo to have 'autoPush' field") + } + if autoPush { + t.Error("Expected autoPush to inherit autoPushOnComplete=false, got true") + } +} + +// TestMigrateAllSessions_DefaultsAutoPushWhenFieldMissing tests that migration defaults to +// autoPush: false when autoPushOnComplete field is missing (safe default) +func TestMigrateAllSessions_DefaultsAutoPushWhenFieldMissing(t *testing.T) { + session := &unstructured.Unstructured{ + Object: map[string]interface{}{ + "apiVersion": "vteam.ambient-code/v1alpha1", + "kind": "AgenticSession", + "metadata": map[string]interface{}{ + "name": "no-autopush-field-session", + "namespace": "default", + }, + "spec": map[string]interface{}{ + // Note: autoPushOnComplete field intentionally omitted + "repos": []interface{}{ + map[string]interface{}{ + "url": "https://github.com/org/repo.git", + "branch": "main", + }, + }, + }, + }, + } + + setupTestDynamicClient(session) + + err := handlers.MigrateAllSessions() + if err != nil { + t.Fatalf("MigrateAllSessions() failed: %v", err) + } + + // Verify migrated repo defaults to autoPush: false + gvr := types.GetAgenticSessionResource() + updated, err := config.DynamicClient.Resource(gvr).Namespace("default").Get(context.Background(), "no-autopush-field-session", metav1.GetOptions{}) + if err != nil { + t.Fatalf("Failed to get updated session: %v", err) + } + + spec, found, err := unstructured.NestedMap(updated.Object, "spec") + if err != nil || !found { + t.Fatal("Failed to get spec") + } + + repos, found, err := unstructured.NestedSlice(spec, "repos") + if err != nil || !found { + t.Fatal("Failed to get repos") + } + + repo, ok := repos[0].(map[string]interface{}) + if !ok { + t.Fatal("Repo is not a map") + } + + autoPush, found, err := unstructured.NestedBool(repo, "autoPush") + if err != nil || !found { + t.Fatal("Expected repo to have 'autoPush' field") + } + if autoPush { + t.Error("Expected autoPush to default to false when autoPushOnComplete is missing, got true") + } +} + +// TestMigrateAllSessions_HandlesInvalidAutoPushOnCompleteType tests that migration handles +// gracefully when autoPushOnComplete has an invalid type (not a boolean) +func TestMigrateAllSessions_HandlesInvalidAutoPushOnCompleteType(t *testing.T) { + session := &unstructured.Unstructured{ + Object: map[string]interface{}{ + "apiVersion": "vteam.ambient-code/v1alpha1", + "kind": "AgenticSession", + "metadata": map[string]interface{}{ + "name": "invalid-autopush-type-session", + "namespace": "default", + }, + "spec": map[string]interface{}{ + "autoPushOnComplete": "not-a-bool", // Invalid type (string instead of bool) + "repos": []interface{}{ + map[string]interface{}{ + "url": "https://github.com/org/repo.git", + "branch": "main", + }, + }, + }, + }, + } + + setupTestDynamicClient(session) + + err := handlers.MigrateAllSessions() + if err != nil { + t.Fatalf("MigrateAllSessions() failed: %v", err) + } + + // Verify migration succeeded and defaulted to autoPush: false (safe default) + gvr := types.GetAgenticSessionResource() + updated, err := config.DynamicClient.Resource(gvr).Namespace("default").Get(context.Background(), "invalid-autopush-type-session", metav1.GetOptions{}) + if err != nil { + t.Fatalf("Failed to get updated session: %v", err) + } + + spec, found, err := unstructured.NestedMap(updated.Object, "spec") + if err != nil || !found { + t.Fatal("Failed to get spec") + } + + repos, found, err := unstructured.NestedSlice(spec, "repos") + if err != nil || !found { + t.Fatal("Failed to get repos") + } + + repo, ok := repos[0].(map[string]interface{}) + if !ok { + t.Fatal("Repo is not a map") + } + + autoPush, found, err := unstructured.NestedBool(repo, "autoPush") + if err != nil || !found { + t.Fatal("Expected repo to have 'autoPush' field") + } + if autoPush { + t.Error("Expected autoPush to default to false when autoPushOnComplete has invalid type, got true") + } +} diff --git a/components/operator/internal/handlers/sessions.go b/components/operator/internal/handlers/sessions.go index 862d82027..432f94dd6 100644 --- a/components/operator/internal/handlers/sessions.go +++ b/components/operator/internal/handlers/sessions.go @@ -36,6 +36,94 @@ var ( monitoredJobsMu sync.Mutex ) +const ( + // defaultBranch is the default git branch when none is specified + defaultBranch = "main" +) + +// repoLocation represents a git repository location (input source or output target) +type repoLocation struct { + URL string `json:"url"` + Branch string `json:"branch,omitempty"` +} + +// repoConfig represents repository configuration supporting both legacy and new formats +type repoConfig struct { + // New format (preferred) + Input *repoLocation `json:"input,omitempty"` + Output *repoLocation `json:"output,omitempty"` + AutoPush bool `json:"autoPush,omitempty"` + + // Legacy format (deprecated, for backwards compatibility) + URL string `json:"url,omitempty"` + Branch string `json:"branch,omitempty"` +} + +// parseRepoConfig parses a repository configuration map supporting both legacy and new formats. +// New format (preferred): {input: {url, branch}, output: {url, branch}, autoPush: bool} +// Legacy format: {url: string, branch: string} +// Returns error if URL is empty or invalid. +func parseRepoConfig(repoMap map[string]interface{}, namespace, sessionName string) (repoConfig, error) { + repo := repoConfig{} + + // Check for new format (input/output/autoPush) + // New format takes precedence if both formats are present + if inputMap, hasInput := repoMap["input"].(map[string]interface{}); hasInput { + // New format + input := &repoLocation{} + if url, ok := inputMap["url"].(string); ok { + input.URL = url + } + if branch, ok := inputMap["branch"].(string); ok { + input.Branch = branch + } else { + input.Branch = defaultBranch + } + repo.Input = input + + // Parse output if present + if outputMap, hasOutput := repoMap["output"].(map[string]interface{}); hasOutput { + output := &repoLocation{} + if url, ok := outputMap["url"].(string); ok { + output.URL = url + } + if branch, ok := outputMap["branch"].(string); ok { + output.Branch = branch + } + repo.Output = output + } + + // Parse autoPush if present + if autoPush, ok := repoMap["autoPush"].(bool); ok { + repo.AutoPush = autoPush + } + + // Validate input URL + if strings.TrimSpace(repo.Input.URL) == "" { + return repoConfig{}, fmt.Errorf("repo input.url is empty in session %s/%s", namespace, sessionName) + } + + return repo, nil + } + + // Legacy format + if url, ok := repoMap["url"].(string); ok { + repo.URL = url + } + if branch, ok := repoMap["branch"].(string); ok { + repo.Branch = branch + } else { + repo.Branch = defaultBranch + } + + // Validate legacy URL + if strings.TrimSpace(repo.URL) == "" { + return repoConfig{}, fmt.Errorf("repo url is empty in session %s/%s", namespace, sessionName) + } + + return repo, nil +} + // WatchAgenticSessions watches for AgenticSession custom resources and creates jobs func WatchAgenticSessions() { gvr := types.GetAgenticSessionResource() @@ -919,32 +1007,28 @@ func handleAgenticSessionEvent(obj *unstructured.Unstructured) error { }) } - // Extract repos configuration (simplified format: url and branch) - type RepoConfig struct { - URL string - Branch string - } + // Extract repos configuration (supports both legacy and new formats) + var repos []repoConfig - var repos []RepoConfig - - // Read simplified repos[] array format + // Read repos[] array format (supports both legacy and new formats) if reposArr, found, _ := unstructured.NestedSlice(spec, "repos"); found && len(reposArr) > 0 { - repos = make([]RepoConfig, 0, len(reposArr)) - for _, repoItem := range reposArr { - if repoMap, ok := repoItem.(map[string]interface{}); ok { - repo := RepoConfig{} - if url, ok := repoMap["url"].(string); ok { - repo.URL = url - } - if branch, ok := repoMap["branch"].(string); ok { - repo.Branch = branch - } else { - repo.Branch = "main" - } - if repo.URL != "" { - repos = append(repos, repo) - } + repos = make([]repoConfig, 0, len(reposArr)) + for i, repoItem := range reposArr { + repoMap, ok := repoItem.(map[string]interface{}) + if !ok { + log.Printf("Warning: Invalid repo item type at index %d in session %s/%s (expected map, got %T)", + i, sessionNamespace, name, repoItem) + continue + } + + repo, err := parseRepoConfig(repoMap, sessionNamespace, name) + if err != nil { + log.Printf("Warning: Skipping invalid repo at index %d in session %s/%s: %v", + i, sessionNamespace, name, err) + continue } + + repos = append(repos, repo) } } else { // Fallback to old format for backward compatibility (input/output structure) @@ -958,9 +1042,9 @@ func handleAgenticSessionEvent(obj *unstructured.Unstructured) error { } if inputRepo != "" { if inputBranch == "" { - inputBranch = "main" + inputBranch = defaultBranch } - repos = []RepoConfig{{ + repos = []repoConfig{{ URL: inputRepo, Branch: inputBranch, }} @@ -970,10 +1054,26 @@ func handleAgenticSessionEvent(obj *unstructured.Unstructured) error { // Get first repo for backward compatibility env vars (first repo is always main repo) var inputRepo, inputBranch, outputRepo, outputBranch string if len(repos) > 0 { - inputRepo = repos[0].URL - inputBranch = repos[0].Branch - outputRepo = repos[0].URL // Output same as input in simplified format - outputBranch = repos[0].Branch + firstRepo := repos[0] + // Extract from new format if available, otherwise use legacy format + if firstRepo.Input != nil { + inputRepo = firstRepo.Input.URL + inputBranch = firstRepo.Input.Branch + // Use output if specified, otherwise fall back to input + if firstRepo.Output != nil { + outputRepo = firstRepo.Output.URL + outputBranch = firstRepo.Output.Branch + } else { + outputRepo = firstRepo.Input.URL + outputBranch = firstRepo.Input.Branch + } + } else { + // Legacy format + inputRepo = firstRepo.URL + inputBranch = firstRepo.Branch + outputRepo = firstRepo.URL + outputBranch = firstRepo.Branch + } } // Read autoPushOnComplete flag @@ -1249,10 +1349,10 @@ func handleAgenticSessionEvent(obj *unstructured.Unstructured) error { }) // Add CR-provided envs last (override base when same key) if spec, ok := currentObj.Object["spec"].(map[string]interface{}); ok { - // Inject REPOS_JSON and MAIN_REPO_NAME from spec.repos and spec.mainRepoName if present - if repos, ok := spec["repos"].([]interface{}); ok && len(repos) > 0 { - // Use a minimal JSON serialization via fmt (we'll rely on client to pass REPOS_JSON too) - // This ensures runner gets repos even if env vars weren't passed from frontend + // Inject REPOS_JSON from parsed repos array (includes autoPush flags and normalized structure) + if len(repos) > 0 { + // Serialize parsed repos array with autoPush flags and normalized structure + // This ensures runner gets the fully-parsed repo configuration b, _ := json.Marshal(repos) base = append(base, corev1.EnvVar{Name: "REPOS_JSON", Value: string(b)}) } @@ -1553,7 +1653,7 @@ func reconcileSpecReposWithPatch(sessionNamespace, sessionName string, spec map[ if strings.TrimSpace(url) == "" { continue } - branch := "main" + branch := defaultBranch if b, ok := repoMap["branch"].(string); ok && strings.TrimSpace(b) != "" { branch = b } @@ -1717,7 +1817,7 @@ func reconcileActiveWorkflowWithPatch(sessionNamespace, sessionName string, spec } gitURL, _ := workflow["gitUrl"].(string) - branch := "main" + branch := defaultBranch if b, ok := workflow["branch"].(string); ok && strings.TrimSpace(b) != "" { branch = b } @@ -1879,6 +1979,24 @@ func monitorJob(jobName, sessionName, sessionNamespace string) { return } + // Check Job conditions for failures (e.g., DeadlineExceeded) + for _, condition := range job.Status.Conditions { + if condition.Type == batchv1.JobFailed && condition.Status == corev1.ConditionTrue { + statusPatch.SetField("phase", "Failed") + statusPatch.SetField("completionTime", time.Now().UTC().Format(time.RFC3339)) + statusPatch.AddCondition(conditionUpdate{ + Type: conditionReady, + Status: "False", + Reason: condition.Reason, + Message: condition.Message, + }) + _ = statusPatch.Apply() + _ = ensureSessionIsInteractive(sessionNamespace, sessionName) + _ = deleteJobAndPerJobService(sessionNamespace, jobName, sessionName) + return + } + } + if len(pods.Items) == 0 { if job.Status.Active == 0 && job.Status.Succeeded == 0 && job.Status.Failed == 0 { statusPatch.SetField("phase", "Failed") diff --git a/components/operator/internal/handlers/sessions_test.go b/components/operator/internal/handlers/sessions_test.go index 75f3688dc..340dafd62 100644 --- a/components/operator/internal/handlers/sessions_test.go +++ b/components/operator/internal/handlers/sessions_test.go @@ -2,22 +2,24 @@ package handlers import ( "context" + "strings" "testing" "ambient-code-operator/internal/config" "ambient-code-operator/internal/types" + batchv1 "k8s.io/api/batch/v1" corev1 "k8s.io/api/core/v1" metav1 "k8s.io/apimachinery/pkg/apis/meta/v1" "k8s.io/apimachinery/pkg/apis/meta/v1/unstructured" "k8s.io/apimachinery/pkg/runtime" k8stypes "k8s.io/apimachinery/pkg/types" - "k8s.io/client-go/kubernetes/fake" + k8sfake "k8s.io/client-go/kubernetes/fake" ) // setupTestClient initializes a fake Kubernetes client for testing func setupTestClient(objects ...runtime.Object) { - config.K8sClient = fake.NewSimpleClientset(objects...) + config.K8sClient = k8sfake.NewSimpleClientset(objects...) } // TestCopySecretToNamespace_NoSharedDataMutation verifies that we don't mutate cached secret objects @@ -596,3 +598,629 @@ func TestDeleteAmbientVertexSecret_NilAnnotations(t *testing.T) { t.Error("Secret should still exist") } } + +// TestJobConditionHandling_DeadlineExceeded tests detection of DeadlineExceeded Job condition +func TestJobConditionHandling_DeadlineExceeded(t *testing.T) { + // Create a Job with DeadlineExceeded condition + now := metav1.Now() + job := &batchv1.Job{ + ObjectMeta: metav1.ObjectMeta{ + Name: "test-session-job", + Namespace: "test-ns", + }, + Status: batchv1.JobStatus{ + Conditions: []batchv1.JobCondition{ + { + Type: batchv1.JobFailed, + Status: corev1.ConditionTrue, + LastTransitionTime: now, + Reason: "DeadlineExceeded", + Message: "Job was active longer than specified deadline", + }, + }, + Failed: 1, + }, + } + + // Expected behavior: Job should be detected as failed with DeadlineExceeded reason + if len(job.Status.Conditions) == 0 { + t.Fatal("Job should have at least one condition") + } + + foundDeadlineExceeded := false + for _, condition := range job.Status.Conditions { + if condition.Type == batchv1.JobFailed && condition.Status == corev1.ConditionTrue { + if condition.Reason == "DeadlineExceeded" { + foundDeadlineExceeded = true + } + } + } + + if !foundDeadlineExceeded { + t.Error("DeadlineExceeded condition not found in Job status") + } +} + +// TestJobConditionHandling_OtherFailure tests detection of non-deadline Job failures +func TestJobConditionHandling_OtherFailure(t *testing.T) { + // Create a Job with a different failure reason + now := metav1.Now() + job := &batchv1.Job{ + ObjectMeta: metav1.ObjectMeta{ + Name: "test-session-job", + Namespace: "test-ns", + }, + Status: batchv1.JobStatus{ + Conditions: []batchv1.JobCondition{ + { + Type: batchv1.JobFailed, + Status: corev1.ConditionTrue, + LastTransitionTime: now, + Reason: "BackoffLimitExceeded", + Message: "Job has reached the specified backoff limit", + }, + }, + Failed: 3, + }, + } + + // Verify the condition is present + foundFailure := false + for _, condition := range job.Status.Conditions { + if condition.Type == batchv1.JobFailed && condition.Status == corev1.ConditionTrue { + foundFailure = true + if condition.Reason != "BackoffLimitExceeded" { + t.Errorf("Expected reason 'BackoffLimitExceeded', got '%s'", condition.Reason) + } + } + } + + if !foundFailure { + t.Error("Job failure condition not found") + } +} + +// TestJobConditionHandling_NoFailure tests Job without failure conditions +func TestJobConditionHandling_NoFailure(t *testing.T) { + // Create a Job with no failure conditions (running or succeeded) + job := &batchv1.Job{ + ObjectMeta: metav1.ObjectMeta{ + Name: "test-session-job", + Namespace: "test-ns", + }, + Status: batchv1.JobStatus{ + Active: 1, + }, + } + + // Verify no failure conditions + for _, condition := range job.Status.Conditions { + if condition.Type == batchv1.JobFailed && condition.Status == corev1.ConditionTrue { + t.Error("Job should not have JobFailed condition") + } + } +} + +// TestJobConditionHandling_MultipleConditions tests Job with multiple conditions +func TestJobConditionHandling_MultipleConditions(t *testing.T) { + // Create a Job with multiple conditions, including DeadlineExceeded + now := metav1.Now() + job := &batchv1.Job{ + ObjectMeta: metav1.ObjectMeta{ + Name: "test-session-job", + Namespace: "test-ns", + }, + Status: batchv1.JobStatus{ + Conditions: []batchv1.JobCondition{ + { + Type: batchv1.JobComplete, + Status: corev1.ConditionFalse, + LastTransitionTime: now, + Reason: "NotComplete", + Message: "Job is not complete", + }, + { + Type: batchv1.JobFailed, + Status: corev1.ConditionTrue, + LastTransitionTime: now, + Reason: "DeadlineExceeded", + Message: "Job was active longer than specified deadline", + }, + }, + Failed: 1, + }, + } + + // Should find DeadlineExceeded among multiple conditions + foundDeadlineExceeded := false + for _, condition := range job.Status.Conditions { + if condition.Type == batchv1.JobFailed && condition.Status == corev1.ConditionTrue { + if condition.Reason == "DeadlineExceeded" { + foundDeadlineExceeded = true + if condition.Message != "Job was active longer than specified deadline" { + t.Errorf("Unexpected message: %s", condition.Message) + } + } + } + } + + if !foundDeadlineExceeded { + t.Error("DeadlineExceeded condition not found among multiple conditions") + } +} + +// TestJobConditionHandling_FailedButNotTrue tests Job with Failed condition but status False +func TestJobConditionHandling_FailedButNotTrue(t *testing.T) { + // Create a Job with JobFailed condition but Status=False (cleared failure) + now := metav1.Now() + job := &batchv1.Job{ + ObjectMeta: metav1.ObjectMeta{ + Name: "test-session-job", + Namespace: "test-ns", + }, + Status: batchv1.JobStatus{ + Conditions: []batchv1.JobCondition{ + { + Type: batchv1.JobFailed, + Status: corev1.ConditionFalse, + LastTransitionTime: now, + Reason: "PreviouslyFailed", + Message: "Job was previously failed but is now retrying", + }, + }, + Active: 1, + }, + } + + // Should NOT detect as failed (Status must be True) + for _, condition := range job.Status.Conditions { + if condition.Type == batchv1.JobFailed && condition.Status == corev1.ConditionTrue { + t.Error("Job should not be detected as failed when Status is False") + } + } +} + +// TestParseRepos_NewFormat tests parsing repos in new format (input/output/autoPush) +func TestParseRepos_NewFormat(t *testing.T) { + tests := []struct { + name string + reposMap []interface{} + validate func(t *testing.T, repos []repoConfig) + }{ + { + name: "new format with input and output", + reposMap: []interface{}{ + map[string]interface{}{ + "input": map[string]interface{}{ + "url": "https://github.com/org/repo", + "branch": "main", + }, + "output": map[string]interface{}{ + "url": "https://github.com/user/fork", + "branch": "feature", + }, + "autoPush": true, + }, + }, + validate: func(t *testing.T, repos []repoConfig) { + if len(repos) != 1 { + t.Fatalf("Expected 1 repo, got %d", len(repos)) + } + + repo := repos[0] + if repo.Input == nil { + t.Fatal("Input should not be nil") + } + if repo.Input.URL != "https://github.com/org/repo" { + t.Errorf("Expected input URL 'https://github.com/org/repo', got '%s'", repo.Input.URL) + } + if repo.Input.Branch != "main" { + t.Errorf("Expected input branch 'main', got '%s'", repo.Input.Branch) + } + + if repo.Output == nil { + t.Fatal("Output should not be nil") + } + if repo.Output.URL != "https://github.com/user/fork" { + t.Errorf("Expected output URL 'https://github.com/user/fork', got '%s'", repo.Output.URL) + } + if repo.Output.Branch != "feature" { + t.Errorf("Expected output branch 'feature', got '%s'", repo.Output.Branch) + } + + if !repo.AutoPush { + t.Error("Expected autoPush to be true") + } + }, + }, + { + name: "new format with input only", + reposMap: []interface{}{ + map[string]interface{}{ + "input": map[string]interface{}{ + "url": "https://github.com/org/repo", + "branch": "develop", + }, + "autoPush": false, + }, + }, + validate: func(t *testing.T, repos []repoConfig) { + if len(repos) != 1 { + t.Fatalf("Expected 1 repo, got %d", len(repos)) + } + + repo := repos[0] + if repo.Input == nil { + t.Fatal("Input should not be nil") + } + if repo.Input.URL != "https://github.com/org/repo" { + t.Errorf("Expected input URL 'https://github.com/org/repo', got '%s'", repo.Input.URL) + } + if repo.Input.Branch != "develop" { + t.Errorf("Expected input branch 'develop', got '%s'", repo.Input.Branch) + } + + if repo.Output != nil { + t.Error("Output should be nil when not specified") + } + + if repo.AutoPush { + t.Error("Expected autoPush to be false") + } + }, + }, + { + name: "new format without branch defaults to main", + reposMap: []interface{}{ + map[string]interface{}{ + "input": map[string]interface{}{ + "url": "https://github.com/org/repo", + }, + }, + }, + validate: func(t *testing.T, repos []repoConfig) { + if len(repos) != 1 { + t.Fatalf("Expected 1 repo, got %d", len(repos)) + } + + repo := repos[0] + if repo.Input.Branch != "main" { + t.Errorf("Expected default branch 'main', got '%s'", repo.Input.Branch) + } + }, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + // Call production parseRepoConfig() function instead of duplicating logic + repos := make([]repoConfig, 0, len(tt.reposMap)) + for _, repoItem := range tt.reposMap { + if repoMap, ok := repoItem.(map[string]interface{}); ok { + repo, err := parseRepoConfig(repoMap, "test-ns", "test-session") + if err != nil { + t.Fatalf("parseRepoConfig failed: %v", err) + } + repos = append(repos, repo) + } + } + + tt.validate(t, repos) + }) + } +} + +// TestParseRepos_LegacyFormat tests parsing repos in legacy format (url/branch) +func TestParseRepos_LegacyFormat(t *testing.T) { + tests := []struct { + name string + reposMap []interface{} + validate func(t *testing.T, repos []repoConfig) + }{ + { + name: "legacy format with branch", + reposMap: []interface{}{ + map[string]interface{}{ + "url": "https://github.com/org/legacy", + "branch": "master", + }, + }, + validate: func(t *testing.T, repos []repoConfig) { + if len(repos) != 1 { + t.Fatalf("Expected 1 repo, got %d", len(repos)) + } + + repo := repos[0] + if repo.URL != "https://github.com/org/legacy" { + t.Errorf("Expected URL 'https://github.com/org/legacy', got '%s'", repo.URL) + } + if repo.Branch != "master" { + t.Errorf("Expected branch 'master', got '%s'", repo.Branch) + } + + // New format fields should be nil for legacy repos + if repo.Input != nil { + t.Error("Input should be nil for legacy format") + } + if repo.Output != nil { + t.Error("Output should be nil for legacy format") + } + }, + }, + { + name: "legacy format without branch defaults to main", + reposMap: []interface{}{ + map[string]interface{}{ + "url": "https://github.com/org/legacy", + }, + }, + validate: func(t *testing.T, repos []repoConfig) { + if len(repos) != 1 { + t.Fatalf("Expected 1 repo, got %d", len(repos)) + } + + repo := repos[0] + if repo.Branch != "main" { + t.Errorf("Expected default branch 'main', got '%s'", repo.Branch) + } + }, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + // Call production parseRepoConfig() function instead of duplicating logic + repos := make([]repoConfig, 0, len(tt.reposMap)) + for _, repoItem := range tt.reposMap { + if repoMap, ok := repoItem.(map[string]interface{}); ok { + repo, err := parseRepoConfig(repoMap, "test-ns", "test-session") + if err != nil { + t.Fatalf("parseRepoConfig failed: %v", err) + } + repos = append(repos, repo) + } + } + + tt.validate(t, repos) + }) + } +} + +// TestParseRepos_EdgeCases tests edge cases and error handling +func TestParseRepos_EdgeCases(t *testing.T) { + tests := []struct { + name string + repoMap map[string]interface{} + expectError bool + validate func(t *testing.T, repo repoConfig, err error) + }{ + { + name: "empty URL in new format", + repoMap: map[string]interface{}{ + "input": map[string]interface{}{ + "url": "", + "branch": "main", + }, + }, + expectError: true, + validate: func(t *testing.T, repo repoConfig, err error) { + if err == nil { + t.Error("Expected error for empty input URL") + } + if err != nil && !strings.Contains(err.Error(), "empty") { + t.Errorf("Expected 'empty' in error message, got: %v", err) + } + }, + }, + { + name: "empty URL in legacy format", + repoMap: map[string]interface{}{ + "url": "", + "branch": "main", + }, + expectError: true, + validate: func(t *testing.T, repo repoConfig, err error) { + if err == nil { + t.Error("Expected error for empty URL") + } + }, + }, + { + name: "whitespace-only URL in new format", + repoMap: map[string]interface{}{ + "input": map[string]interface{}{ + "url": " ", + "branch": "main", + }, + }, + expectError: true, + validate: func(t *testing.T, repo repoConfig, err error) { + if err == nil { + t.Error("Expected error for whitespace-only URL") + } + }, + }, + { + name: "both new and legacy formats present (new should win)", + repoMap: map[string]interface{}{ + "input": map[string]interface{}{ + "url": "https://github.com/new/format", + "branch": "new-branch", + }, + "url": "https://github.com/legacy/format", + "branch": "legacy-branch", + }, + expectError: false, + validate: func(t *testing.T, repo repoConfig, err error) { + if err != nil { + t.Fatalf("Unexpected error: %v", err) + } + // New format should take precedence + if repo.Input == nil { + t.Fatal("Input should not be nil when new format present") + } + if repo.Input.URL != "https://github.com/new/format" { + t.Errorf("Expected new format URL, got '%s'", repo.Input.URL) + } + if repo.Input.Branch != "new-branch" { + t.Errorf("Expected new format branch, got '%s'", repo.Input.Branch) + } + }, + }, + { + name: "new format without branch defaults to main", + repoMap: map[string]interface{}{ + "input": map[string]interface{}{ + "url": "https://github.com/org/repo", + }, + }, + expectError: false, + validate: func(t *testing.T, repo repoConfig, err error) { + if err != nil { + t.Fatalf("Unexpected error: %v", err) + } + if repo.Input == nil { + t.Fatal("Input should not be nil") + } + if repo.Input.Branch != "main" { + t.Errorf("Expected default branch 'main', got '%s'", repo.Input.Branch) + } + }, + }, + { + name: "legacy format without branch defaults to main", + repoMap: map[string]interface{}{ + "url": "https://github.com/org/repo", + }, + expectError: false, + validate: func(t *testing.T, repo repoConfig, err error) { + if err != nil { + t.Fatalf("Unexpected error: %v", err) + } + if repo.Branch != "main" { + t.Errorf("Expected default branch 'main', got '%s'", repo.Branch) + } + }, + }, + { + name: "invalid type for input (not a map)", + repoMap: map[string]interface{}{ + "input": "not-a-map", + }, + expectError: true, + validate: func(t *testing.T, repo repoConfig, err error) { + if err == nil { + t.Error("Expected error for invalid input type") + } + }, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + repo, err := parseRepoConfig(tt.repoMap, "test-ns", "test-session") + tt.validate(t, repo, err) + }) + } +} + +// TestBackwardCompatEnvVars tests extraction of backward compat env vars from repos +func TestBackwardCompatEnvVars(t *testing.T) { + tests := []struct { + name string + repos []repoConfig + expectedInput string + expectedInBranch string + expectedOutput string + expectedOutBranch string + }{ + { + name: "new format with output", + repos: []repoConfig{ + { + Input: &repoLocation{ + URL: "https://github.com/org/repo", + Branch: "main", + }, + Output: &repoLocation{ + URL: "https://github.com/user/fork", + Branch: "feature", + }, + AutoPush: true, + }, + }, + expectedInput: "https://github.com/org/repo", + expectedInBranch: "main", + expectedOutput: "https://github.com/user/fork", + expectedOutBranch: "feature", + }, + { + name: "new format without output", + repos: []repoConfig{ + { + Input: &repoLocation{ + URL: "https://github.com/org/repo", + Branch: "develop", + }, + AutoPush: false, + }, + }, + expectedInput: "https://github.com/org/repo", + expectedInBranch: "develop", + expectedOutput: "https://github.com/org/repo", + expectedOutBranch: "develop", + }, + { + name: "legacy format", + repos: []repoConfig{ + { + URL: "https://github.com/org/legacy", + Branch: "master", + }, + }, + expectedInput: "https://github.com/org/legacy", + expectedInBranch: "master", + expectedOutput: "https://github.com/org/legacy", + expectedOutBranch: "master", + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + // Extract backward compat env vars using the same logic as operator + var inputRepo, inputBranch, outputRepo, outputBranch string + if len(tt.repos) > 0 { + firstRepo := tt.repos[0] + if firstRepo.Input != nil { + inputRepo = firstRepo.Input.URL + inputBranch = firstRepo.Input.Branch + if firstRepo.Output != nil { + outputRepo = firstRepo.Output.URL + outputBranch = firstRepo.Output.Branch + } else { + outputRepo = firstRepo.Input.URL + outputBranch = firstRepo.Input.Branch + } + } else { + inputRepo = firstRepo.URL + inputBranch = firstRepo.Branch + outputRepo = firstRepo.URL + outputBranch = firstRepo.Branch + } + } + + if inputRepo != tt.expectedInput { + t.Errorf("Expected inputRepo '%s', got '%s'", tt.expectedInput, inputRepo) + } + if inputBranch != tt.expectedInBranch { + t.Errorf("Expected inputBranch '%s', got '%s'", tt.expectedInBranch, inputBranch) + } + if outputRepo != tt.expectedOutput { + t.Errorf("Expected outputRepo '%s', got '%s'", tt.expectedOutput, outputRepo) + } + if outputBranch != tt.expectedOutBranch { + t.Errorf("Expected outputBranch '%s', got '%s'", tt.expectedOutBranch, outputBranch) + } + }) + } +} diff --git a/components/operator/main.go b/components/operator/main.go index df9c31821..6bc836758 100644 --- a/components/operator/main.go +++ b/components/operator/main.go @@ -60,6 +60,11 @@ func main() { } } + // Run v2 repo format migration before starting watchers + log.Println("Running v2 repo format migration...") + handlers.MigrateAllSessions() + log.Println("Migration check complete - see logs above for details") + // Start watching AgenticSession resources go handlers.WatchAgenticSessions() diff --git a/components/runners/claude-code-runner/adapter.py b/components/runners/claude-code-runner/adapter.py index 15800ba49..1636abc7e 100644 --- a/components/runners/claude-code-runner/adapter.py +++ b/components/runners/claude-code-runner/adapter.py @@ -213,14 +213,26 @@ async def process_run(self, input_data: RunAgentInput) -> AsyncIterator[BaseEven async for event in self._run_claude_agent_sdk(user_message, thread_id, run_id): yield event logger.info(f"Claude SDK processing completed for run {run_id}") - + + # Check if AUTO_PUSH_ON_COMPLETE is enabled + try: + auto_push_on_complete = str(os.getenv('AUTO_PUSH_ON_COMPLETE', 'false')).strip().lower() in ('1', 'true', 'yes') + except Exception: + auto_push_on_complete = False + + if auto_push_on_complete: + logger.info("AUTO_PUSH_ON_COMPLETE enabled, pushing results...") + async for event in self._push_results_if_any(): + yield event + logger.info("Auto-push completed") + # Emit RUN_FINISHED yield RunFinishedEvent( type=EventType.RUN_FINISHED, thread_id=thread_id, run_id=run_id, ) - + self.last_exit_code = 0 except PrerequisiteError as e: @@ -1337,6 +1349,283 @@ def _parse_owner_repo(self, url: str) -> tuple[str, str, str]: return "", "", host return "", "", host + async def _create_pull_request(self, upstream_repo: str, fork_repo: str, + head_branch: str, base_branch: str) -> str: + """Create a pull request (NOT YET IMPLEMENTED in adapter.py). + + Args: + upstream_repo: URL of the upstream repository + fork_repo: URL of the fork repository + head_branch: Branch name with changes + base_branch: Target branch for PR + + Returns: + PR URL if created, empty string otherwise + """ + logger.warning("PR creation not yet implemented in adapter.py - skipping") + # TODO: Port PR creation logic from wrapper.py or implement via GitHub API + return "" + + async def _push_results_if_any(self) -> AsyncIterator[BaseEvent]: + """Commit and push changes to output repo/branch if configured (respects per-repo autoPush flag).""" + # Get GitHub token once for all repos + token = os.getenv("GITHUB_TOKEN") or await self._fetch_github_token() + if token: + logger.info("GitHub token obtained for push operations") + else: + logger.warning("No GitHub token available - push may fail for private repos") + + repos_cfg = self._get_repos_config() + if repos_cfg: + # Multi-repo flow + try: + for r in repos_cfg: + name = (r.get('name') or '').strip() + if not name: + continue + + # Check per-repo autoPush flag FIRST (cheapest check) + auto_push = r.get('autoPush', False) + if not auto_push: + logger.info(f"autoPush disabled for {name}, skipping push") + continue + + # Check output URL configured SECOND (before expensive git operations) + out = r.get('output') or {} + out_url_raw = (out.get('url') or '').strip() + if not out_url_raw: + logger.warning(f"No output URL configured for {name}, skipping push") + continue + + # Verify repository directory exists + repo_dir = Path(self.context.workspace_path) / name + if not repo_dir.exists() or not repo_dir.is_dir(): + logger.warning(f"Repository directory not found: {repo_dir}, skipping push") + continue + + # Check for changes LAST (most expensive operation) + status = await self._run_cmd(["git", "status", "--porcelain"], cwd=str(repo_dir), capture_stdout=True) + if not status.strip(): + logger.info(f"No changes detected for {name}, skipping push") + continue + + # Add token to output URL + out_url = self._url_with_token(out_url_raw, token) if token else out_url_raw + + in_ = r.get('input') or {} + in_branch = (in_.get('branch') or '').strip() + out_branch = (out.get('branch') or '').strip() or f"sessions/{self.context.session_id}" + + yield RawEvent( + type=EventType.RAW, + thread_id=self._current_thread_id or self.context.session_id, + run_id=self._current_run_id or "push", + event={"type": "system_log", "message": f"📤 Pushing changes for {name}..."} + ) + logger.info(f"Configuring output remote with authentication for {name}") + + # Reconfigure output remote with token before push + await self._run_cmd(["git", "remote", "remove", "output"], cwd=str(repo_dir), ignore_errors=True) + await self._run_cmd(["git", "remote", "add", "output", out_url], cwd=str(repo_dir)) + + logger.info(f"Checking out branch {out_branch} for {name}") + await self._run_cmd(["git", "checkout", "-B", out_branch], cwd=str(repo_dir)) + + logger.info(f"Staging all changes for {name}") + await self._run_cmd(["git", "add", "-A"], cwd=str(repo_dir)) + + logger.info(f"Committing changes for {name}") + try: + await self._run_cmd(["git", "commit", "-m", f"Session {self.context.session_id}: update"], cwd=str(repo_dir)) + except RuntimeError as e: + if "nothing to commit" in str(e).lower(): + logger.info(f"No changes to commit for {name}") + continue + else: + logger.error(f"Commit failed for {name}: {e}") + raise + + # Verify we have a valid output remote + logger.info(f"Verifying output remote for {name}") + remotes_output = await self._run_cmd(["git", "remote", "-v"], cwd=str(repo_dir), capture_stdout=True) + logger.info(f"Git remotes for {name}:\n{self._redact_secrets(remotes_output)}") + + if "output" not in remotes_output: + raise RuntimeError(f"Output remote not configured for {name}") + + logger.info(f"Pushing to output remote: {out_branch} for {name}") + yield RawEvent( + type=EventType.RAW, + thread_id=self._current_thread_id or self.context.session_id, + run_id=self._current_run_id or "push", + event={"type": "system_log", "message": f"Pushing {name} to {out_branch}..."} + ) + # Note: We use "output" remote (configured above) not "origin" (which Claude uses for manual pushes) + await self._run_cmd(["git", "push", "-u", "output", f"HEAD:{out_branch}"], cwd=str(repo_dir)) + + logger.info(f"Push completed for {name}") + yield RawEvent( + type=EventType.RAW, + thread_id=self._current_thread_id or self.context.session_id, + run_id=self._current_run_id or "push", + event={"type": "system_log", "message": f"✓ Push completed for {name}"} + ) + + # PR creation (if configured) + create_pr_flag = (os.getenv("CREATE_PR", "").strip().lower() == "true") + if create_pr_flag and in_branch and out_branch and out_branch != in_branch and out_url: + upstream_url = (in_.get('url') or '').strip() or out_url + target_branch = os.getenv("PR_TARGET_BRANCH", "").strip() or in_branch + try: + pr_url = await self._create_pull_request(upstream_repo=upstream_url, fork_repo=out_url, head_branch=out_branch, base_branch=target_branch) + if pr_url: + yield RawEvent( + type=EventType.RAW, + thread_id=self._current_thread_id or self.context.session_id, + run_id=self._current_run_id or "push", + event={"type": "system_log", "message": f"Pull request created for {name}: {pr_url}"} + ) + except Exception as e: + yield RawEvent( + type=EventType.RAW, + thread_id=self._current_thread_id or self.context.session_id, + run_id=self._current_run_id or "push", + event={"type": "system_log", "message": f"⚠️ PR creation failed for {name}: {e}"} + ) + # NOTE: Push failures are non-fatal - session completes successfully even if git push fails. + # This allows users to manually push via workspace access if automatic push encounters errors. + except Exception as e: + logger.error(f"Failed to push results: {e}") + yield RawEvent( + type=EventType.RAW, + thread_id=self._current_thread_id or self.context.session_id, + run_id=self._current_run_id or "push", + event={"type": "system_log", "message": f"⚠️ Push failed: {e}"} + ) + return + + # Single-repo legacy flow + # Note: Legacy flow does not support per-repo autoPush flags - honors global AUTO_PUSH_ON_COMPLETE only + output_repo_raw = os.getenv("OUTPUT_REPO_URL", "").strip() + if not output_repo_raw: + logger.info("No OUTPUT_REPO_URL configured, skipping legacy single-repo push") + return + + # Add token to output URL + output_repo = self._url_with_token(output_repo_raw, token) if token else output_repo_raw + + output_branch = os.getenv("OUTPUT_BRANCH", "").strip() or f"sessions/{self.context.session_id}" + input_repo = os.getenv("INPUT_REPO_URL", "").strip() + input_branch = os.getenv("INPUT_BRANCH", "").strip() + workspace = Path(self.context.workspace_path) + + # Verify workspace directory exists + if not workspace.exists() or not workspace.is_dir(): + logger.warning(f"Workspace directory not found: {workspace}, skipping push") + return + + try: + status = await self._run_cmd(["git", "status", "--porcelain"], cwd=str(workspace), capture_stdout=True) + if not status.strip(): + yield RawEvent( + type=EventType.RAW, + thread_id=self._current_thread_id or self.context.session_id, + run_id=self._current_run_id or "push", + event={"type": "system_log", "message": "No changes to push"} + ) + return + + yield RawEvent( + type=EventType.RAW, + thread_id=self._current_thread_id or self.context.session_id, + run_id=self._current_run_id or "push", + event={"type": "system_log", "message": "📤 Committing and pushing changes..."} + ) + logger.info("Configuring output remote with authentication") + + # Reconfigure output remote with token before push + await self._run_cmd(["git", "remote", "remove", "output"], cwd=str(workspace), ignore_errors=True) + await self._run_cmd(["git", "remote", "add", "output", output_repo], cwd=str(workspace)) + + logger.info(f"Checking out branch {output_branch}") + await self._run_cmd(["git", "checkout", "-B", output_branch], cwd=str(workspace)) + + logger.info("Staging all changes") + await self._run_cmd(["git", "add", "-A"], cwd=str(workspace)) + + logger.info("Committing changes") + try: + await self._run_cmd(["git", "commit", "-m", f"Session {self.context.session_id}: update"], cwd=str(workspace)) + except RuntimeError as e: + if "nothing to commit" in str(e).lower(): + logger.info("No changes to commit") + yield RawEvent( + type=EventType.RAW, + thread_id=self._current_thread_id or self.context.session_id, + run_id=self._current_run_id or "push", + event={"type": "system_log", "message": "No new changes to commit"} + ) + return + else: + logger.error(f"Commit failed: {e}") + raise + + # Verify we have a valid output remote + logger.info("Verifying output remote") + remotes_output = await self._run_cmd(["git", "remote", "-v"], cwd=str(workspace), capture_stdout=True) + logger.info(f"Git remotes:\n{self._redact_secrets(remotes_output)}") + + if "output" not in remotes_output: + raise RuntimeError("Output remote not configured") + + logger.info(f"Pushing to output remote: {output_branch}") + yield RawEvent( + type=EventType.RAW, + thread_id=self._current_thread_id or self.context.session_id, + run_id=self._current_run_id or "push", + event={"type": "system_log", "message": f"Pushing to {output_branch}..."} + ) + await self._run_cmd(["git", "push", "-u", "output", f"HEAD:{output_branch}"], cwd=str(workspace)) + + yield RawEvent( + type=EventType.RAW, + thread_id=self._current_thread_id or self.context.session_id, + run_id=self._current_run_id or "push", + event={"type": "system_log", "message": "✓ Push completed"} + ) + + # PR creation (if configured) + create_pr_flag = (os.getenv("CREATE_PR", "").strip().lower() == "true") + if create_pr_flag and input_repo and input_branch and output_branch != input_branch: + try: + target_branch = os.getenv("PR_TARGET_BRANCH", "").strip() or input_branch + pr_url = await self._create_pull_request(upstream_repo=input_repo, fork_repo=output_repo, head_branch=output_branch, base_branch=target_branch) + if pr_url: + yield RawEvent( + type=EventType.RAW, + thread_id=self._current_thread_id or self.context.session_id, + run_id=self._current_run_id or "push", + event={"type": "system_log", "message": f"Pull request created: {pr_url}"} + ) + except Exception as e: + yield RawEvent( + type=EventType.RAW, + thread_id=self._current_thread_id or self.context.session_id, + run_id=self._current_run_id or "push", + event={"type": "system_log", "message": f"⚠️ PR creation failed: {e}"} + ) + + # NOTE: Push failures are non-fatal - session completes successfully even if git push fails. + # This allows users to manually push via workspace access if automatic push encounters errors. + except Exception as e: + logger.error(f"Failed to push results: {e}") + yield RawEvent( + type=EventType.RAW, + thread_id=self._current_thread_id or self.context.session_id, + run_id=self._current_run_id or "push", + event={"type": "system_log", "message": f"⚠️ Push failed: {e}"} + ) + def _get_repos_config(self) -> list[dict]: """Read repos mapping from REPOS_JSON env if present.""" try: @@ -1352,6 +1641,7 @@ def _get_repos_config(self) -> list[dict]: name = str(it.get('name') or '').strip() input_obj = it.get('input') or {} output_obj = it.get('output') or None + auto_push = it.get('autoPush', False) url = str((input_obj or {}).get('url') or '').strip() if not name and url: try: @@ -1366,7 +1656,7 @@ def _get_repos_config(self) -> list[dict]: except Exception: name = '' if name and isinstance(input_obj, dict) and url: - out.append({'name': name, 'input': input_obj, 'output': output_obj}) + out.append({'name': name, 'input': input_obj, 'output': output_obj, 'autoPush': auto_push}) return out except Exception: return [] @@ -1449,7 +1739,27 @@ def _build_workspace_context_prompt(self, repos_cfg, workflow_name, artifacts_pa for i, repo in enumerate(repos_cfg): name = repo.get('name', f'repo-{i}') prompt += f"- {name}/\n" - prompt += "\nThese repositories contain source code you can read or modify.\n\n" + prompt += "\nThese repositories contain source code you can read or modify.\n" + prompt += "Each has its own git configuration and remote.\n\n" + + # Add git push instructions if any repo has autoPush enabled + auto_push_repos = [] + for repo in repos_cfg: + if repo.get('autoPush', False): + auto_push_repos.append(repo.get('name', 'unknown')) + + if auto_push_repos: + prompt += "## Git Operations - IMPORTANT\n" + prompt += "When you complete your work:\n" + prompt += "1. Commit your changes with a clear, descriptive message\n" + prompt += "2. Push changes to the configured output branch\n" + prompt += "3. Confirm the push succeeded\n\n" + prompt += "Git push best practices:\n" + prompt += "- Use: git push -u origin \n" + prompt += "- Only retry on network errors (up to 4 times with backoff)\n" + prompt += "- Do not force push unless explicitly requested by the user\n" + prompt += "- Check git status before and after pushing\n\n" + prompt += f"Repositories configured for auto-push: {', '.join(auto_push_repos)}\n\n" if ambient_config.get("systemPrompt"): prompt += f"## Workflow Instructions\n{ambient_config['systemPrompt']}\n\n" diff --git a/components/runners/claude-code-runner/tests/test_repo_autopush.py b/components/runners/claude-code-runner/tests/test_repo_autopush.py new file mode 100644 index 000000000..293d5d6c0 --- /dev/null +++ b/components/runners/claude-code-runner/tests/test_repo_autopush.py @@ -0,0 +1,528 @@ +""" +Test cases for repository autoPush flag extraction and handling. + +This module tests the _get_repos_config() method's extraction of per-repo +autoPush flags from the REPOS_JSON environment variable. +""" + +import json +import os +import pytest +from pathlib import Path +import sys +from unittest.mock import Mock, patch + +# Add parent directory to path for importing adapter module +adapter_dir = Path(__file__).parent.parent +if str(adapter_dir) not in sys.path: + sys.path.insert(0, str(adapter_dir)) + +from adapter import ClaudeCodeAdapter # type: ignore[import] + + +class TestGetReposConfig: + """Test suite for _get_repos_config method's autoPush extraction""" + + def test_extract_autopush_true(self): + """Test extraction of autoPush=true from repos config""" + repos_json = json.dumps([ + { + "name": "repo1", + "input": {"url": "https://github.com/org/repo1", "branch": "main"}, + "output": {"url": "https://github.com/user/fork1", "branch": "feature"}, + "autoPush": True + } + ]) + + with patch.dict(os.environ, {'REPOS_JSON': repos_json}): + # Create mock context with required attributes + mock_context = Mock() + mock_context.workspace_path = "/tmp/workspace" + mock_context.session_id = "test-session-123" + mock_context.get_env = lambda k, d=None: os.getenv(k, d) + + adapter = ClaudeCodeAdapter() + adapter.context = mock_context + result = adapter._get_repos_config() + + assert len(result) == 1 + assert result[0]['name'] == 'repo1' + assert result[0]['autoPush'] is True + + def test_extract_autopush_false(self): + """Test extraction of autoPush=false from repos config""" + repos_json = json.dumps([ + { + "name": "repo2", + "input": {"url": "https://github.com/org/repo2", "branch": "develop"}, + "autoPush": False + } + ]) + + with patch.dict(os.environ, {'REPOS_JSON': repos_json}): + mock_context = Mock() + mock_context.workspace_path = "/tmp/workspace" + mock_context.session_id = "test-session-456" + mock_context.get_env = lambda k, d=None: os.getenv(k, d) + + adapter = ClaudeCodeAdapter() + adapter.context = mock_context + result = adapter._get_repos_config() + + assert len(result) == 1 + assert result[0]['name'] == 'repo2' + assert result[0]['autoPush'] is False + + def test_default_autopush_false_when_missing(self): + """Test that autoPush defaults to False when not specified""" + repos_json = json.dumps([ + { + "name": "repo3", + "input": {"url": "https://github.com/org/repo3", "branch": "main"} + # No autoPush field + } + ]) + + with patch.dict(os.environ, {'REPOS_JSON': repos_json}): + mock_context = Mock() + mock_context.workspace_path = "/tmp/workspace" + mock_context.session_id = "test-session-789" + # mock_context.model_name removed + # mock_context.system_prompt removed + mock_context.get_env = lambda k, d=None: os.getenv(k, d) + + adapter = ClaudeCodeAdapter() + adapter.context = mock_context + result = adapter._get_repos_config() + + assert len(result) == 1 + assert result[0]['name'] == 'repo3' + assert result[0]['autoPush'] is False # Default when not specified + + def test_mixed_autopush_flags(self): + """Test handling of multiple repos with different autoPush settings""" + repos_json = json.dumps([ + { + "name": "repo-push", + "input": {"url": "https://github.com/org/repo-push", "branch": "main"}, + "autoPush": True + }, + { + "name": "repo-no-push", + "input": {"url": "https://github.com/org/repo-no-push", "branch": "main"}, + "autoPush": False + }, + { + "name": "repo-default", + "input": {"url": "https://github.com/org/repo-default", "branch": "main"} + # No autoPush field - defaults to False + } + ]) + + with patch.dict(os.environ, {'REPOS_JSON': repos_json}): + mock_context = Mock() + mock_context.workspace_path = "/tmp/workspace" + mock_context.session_id = "test-session-multi" + # mock_context.model_name removed + # mock_context.system_prompt removed + mock_context.get_env = lambda k, d=None: os.getenv(k, d) + + adapter = ClaudeCodeAdapter() + adapter.context = mock_context + result = adapter._get_repos_config() + + assert len(result) == 3 + + # Find each repo by name and verify autoPush + repo_push = next(r for r in result if r['name'] == 'repo-push') + assert repo_push['autoPush'] is True + + repo_no_push = next(r for r in result if r['name'] == 'repo-no-push') + assert repo_no_push['autoPush'] is False + + repo_default = next(r for r in result if r['name'] == 'repo-default') + assert repo_default['autoPush'] is False + + def test_legacy_format_without_autopush(self): + """Test that legacy format (no input/output structure) still works""" + # This tests backward compatibility - old format doesn't have autoPush + repos_json = json.dumps([ + { + "url": "https://github.com/org/legacy-repo", + "branch": "main" + } + ]) + + with patch.dict(os.environ, {'REPOS_JSON': repos_json}): + mock_context = Mock() + mock_context.workspace_path = "/tmp/workspace" + mock_context.session_id = "test-session-legacy" + # mock_context.model_name removed + # mock_context.system_prompt removed + mock_context.get_env = lambda k, d=None: os.getenv(k, d) + + adapter = ClaudeCodeAdapter() + adapter.context = mock_context + result = adapter._get_repos_config() + + # Legacy format without input/output structure is intentionally filtered out + # because autoPush requires the new structured format to distinguish input from output repos + assert len(result) == 0 + + def test_empty_repos_json(self): + """Test handling of empty REPOS_JSON""" + with patch.dict(os.environ, {'REPOS_JSON': ''}): + mock_context = Mock() + mock_context.workspace_path = "/tmp/workspace" + mock_context.session_id = "test-session-empty" + # mock_context.model_name removed + # mock_context.system_prompt removed + mock_context.get_env = lambda k, d=None: os.getenv(k, d) + + adapter = ClaudeCodeAdapter() + adapter.context = mock_context + result = adapter._get_repos_config() + + assert result == [] + + def test_invalid_json(self): + """Test handling of invalid JSON in REPOS_JSON""" + with patch.dict(os.environ, {'REPOS_JSON': 'invalid-json-{['}): + mock_context = Mock() + mock_context.workspace_path = "/tmp/workspace" + mock_context.session_id = "test-session-invalid" + # mock_context.model_name removed + # mock_context.system_prompt removed + mock_context.get_env = lambda k, d=None: os.getenv(k, d) + + adapter = ClaudeCodeAdapter() + adapter.context = mock_context + result = adapter._get_repos_config() + + # Should return empty list on parse error + assert result == [] + + +class TestSystemPromptInjection: + """Test suite for system prompt git instruction injection based on autoPush flag""" + + def test_git_instructions_when_autopush_enabled(self): + """Test that git push instructions appear when at least one repo has autoPush=true""" + repos_cfg = [ + { + 'name': 'repo-with-autopush', + 'input': {'url': 'https://github.com/org/repo', 'branch': 'main'}, + 'output': {'url': 'https://github.com/user/fork', 'branch': 'feature'}, + 'autoPush': True + } + ] + + mock_context = Mock() + mock_context.workspace_path = "/tmp/workspace" + mock_context.session_id = "test-session" + # mock_context.model_name removed + # mock_context.system_prompt removed + mock_context.get_env = lambda k, d=None: None + + adapter = ClaudeCodeAdapter() + adapter.context = mock_context + prompt = adapter._build_workspace_context_prompt( + repos_cfg=repos_cfg, + workflow_name=None, + artifacts_path="artifacts", + ambient_config={} + ) + + # Verify git instructions section is present + assert "## Git Operations - IMPORTANT" in prompt + assert "When you complete your work:" in prompt + assert "1. Commit your changes with a clear, descriptive message" in prompt + assert "2. Push changes to the configured output branch" in prompt + assert "3. Confirm the push succeeded" in prompt + + # Verify best practices are included + assert "Git push best practices:" in prompt + assert "Use: git push -u origin " in prompt + assert "Only retry on network errors (up to 4 times with backoff)" in prompt + + # Verify repo list is correct + assert "Repositories configured for auto-push: repo-with-autopush" in prompt + + def test_git_instructions_omitted_when_autopush_disabled(self): + """Test that git push instructions are NOT included when all repos have autoPush=false""" + repos_cfg = [ + { + 'name': 'repo-no-push', + 'input': {'url': 'https://github.com/org/repo', 'branch': 'main'}, + 'autoPush': False + } + ] + + mock_context = Mock() + mock_context.workspace_path = "/tmp/workspace" + mock_context.session_id = "test-session" + # mock_context.model_name removed + # mock_context.system_prompt removed + mock_context.get_env = lambda k, d=None: None + + adapter = ClaudeCodeAdapter() + adapter.context = mock_context + prompt = adapter._build_workspace_context_prompt( + repos_cfg=repos_cfg, + workflow_name=None, + artifacts_path="artifacts", + ambient_config={} + ) + + # Verify git instructions section is NOT present + assert "## Git Operations - IMPORTANT" not in prompt + assert "When you complete your work:" not in prompt + assert "Repositories configured for auto-push:" not in prompt + + def test_git_instructions_with_multiple_autopush_repos(self): + """Test that all repos with autoPush=true are listed in the system prompt""" + repos_cfg = [ + { + 'name': 'repo1', + 'input': {'url': 'https://github.com/org/repo1', 'branch': 'main'}, + 'output': {'url': 'https://github.com/user/fork1', 'branch': 'feature'}, + 'autoPush': True + }, + { + 'name': 'repo2', + 'input': {'url': 'https://github.com/org/repo2', 'branch': 'main'}, + 'autoPush': False # This one should NOT be listed + }, + { + 'name': 'repo3', + 'input': {'url': 'https://github.com/org/repo3', 'branch': 'main'}, + 'output': {'url': 'https://github.com/user/fork3', 'branch': 'feature'}, + 'autoPush': True + } + ] + + mock_context = Mock() + mock_context.workspace_path = "/tmp/workspace" + mock_context.session_id = "test-session" + # mock_context.model_name removed + # mock_context.system_prompt removed + mock_context.get_env = lambda k, d=None: None + + adapter = ClaudeCodeAdapter() + adapter.context = mock_context + prompt = adapter._build_workspace_context_prompt( + repos_cfg=repos_cfg, + workflow_name=None, + artifacts_path="artifacts", + ambient_config={} + ) + + # Verify git instructions section is present + assert "## Git Operations - IMPORTANT" in prompt + + # Verify repo list includes ONLY repos with autoPush=true + assert "Repositories configured for auto-push: repo1, repo3" in prompt + # Verify repo2 (autoPush=false) is NOT listed + assert "repo2" not in prompt.split("Repositories configured for auto-push:")[1].split("\n")[0] + + def test_git_instructions_omitted_when_no_repos(self): + """Test that git push instructions are NOT included when repos_cfg is empty""" + mock_context = Mock() + mock_context.workspace_path = "/tmp/workspace" + mock_context.session_id = "test-session" + # mock_context.model_name removed + # mock_context.system_prompt removed + mock_context.get_env = lambda k, d=None: None + + adapter = ClaudeCodeAdapter() + adapter.context = mock_context + prompt = adapter._build_workspace_context_prompt( + repos_cfg=[], + workflow_name=None, + artifacts_path="artifacts", + ambient_config={} + ) + + # Verify git instructions section is NOT present + assert "## Git Operations - IMPORTANT" not in prompt + assert "Repositories configured for auto-push:" not in prompt + + +class TestPushBehavior: + """Test suite for git push behavior based on autoPush flag""" + + @pytest.mark.asyncio + async def test_push_skipped_when_autopush_false(self): + """Test that git push is NOT called when autoPush=false""" + mock_context = Mock() + mock_context.workspace_path = "/tmp/workspace" + mock_context.session_id = "test-session" + # mock_context.model_name removed + # mock_context.system_prompt removed + mock_context.get_env = lambda k, d=None: None + + adapter = ClaudeCodeAdapter() + adapter.context = mock_context + + # Mock _run_cmd to track calls + run_cmd_calls = [] + async def mock_run_cmd(cmd, **kwargs): + run_cmd_calls.append(cmd) + # Return empty string for git status (no changes) + if cmd[0] == 'git' and cmd[1] == 'status': + return "?? test-file.txt" # Simulate changes + return "" + + adapter._run_cmd = mock_run_cmd + + # Mock _get_repos_config to return repo with autoPush=false + def mock_get_repos_config(): + return [ + { + 'name': 'repo-no-push', + 'input': {'url': 'https://github.com/org/repo', 'branch': 'main'}, + 'output': {'url': 'https://github.com/user/fork', 'branch': 'feature'}, + 'autoPush': False + } + ] + adapter._get_repos_config = mock_get_repos_config + + # Mock _fetch_token_for_url to avoid actual token fetching + async def mock_fetch_token(url): + return "fake-token" + adapter._fetch_token_for_url = mock_fetch_token + + # Call _push_results_if_any + await adapter._push_results_if_any() + + # Verify git push was NOT called + push_commands = [cmd for cmd in run_cmd_calls if 'push' in cmd] + assert len(push_commands) == 0, "git push should not be called when autoPush=false" + + @pytest.mark.asyncio + async def test_push_executed_when_autopush_true(self): + """Test that git push IS called when autoPush=true""" + mock_context = Mock() + mock_context.workspace_path = "/tmp/workspace" + mock_context.session_id = "test-session" + # mock_context.model_name removed + # mock_context.system_prompt removed + mock_context.get_env = lambda k, d=None: None + + adapter = ClaudeCodeAdapter() + adapter.context = mock_context + + # Mock _run_cmd to track calls + run_cmd_calls = [] + async def mock_run_cmd(cmd, **kwargs): + run_cmd_calls.append(cmd) + # Return appropriate responses for different git commands + if cmd[0] == 'git': + if cmd[1] == 'status' and '--porcelain' in cmd: + return "?? test-file.txt" # Simulate changes + elif cmd[1] == 'remote': + return "output" # Simulate remote exists + return "" + + adapter._run_cmd = mock_run_cmd + + # Mock _get_repos_config to return repo with autoPush=true + def mock_get_repos_config(): + return [ + { + 'name': 'repo-with-push', + 'input': {'url': 'https://github.com/org/repo', 'branch': 'main'}, + 'output': {'url': 'https://github.com/user/fork', 'branch': 'feature'}, + 'autoPush': True + } + ] + adapter._get_repos_config = mock_get_repos_config + + # Mock _fetch_token_for_url to avoid actual token fetching + async def mock_fetch_token(url): + return "fake-token" + adapter._fetch_token_for_url = mock_fetch_token + + # Mock _url_with_token + adapter._url_with_token = lambda url, token: url + + # Call _push_results_if_any + await adapter._push_results_if_any() + + # Verify git push WAS called + push_commands = [cmd for cmd in run_cmd_calls if 'git' in cmd and 'push' in cmd] + assert len(push_commands) > 0, "git push should be called when autoPush=true" + + @pytest.mark.asyncio + async def test_mixed_autopush_settings(self): + """Test that only repos with autoPush=true are pushed""" + mock_context = Mock() + mock_context.workspace_path = "/tmp/workspace" + mock_context.session_id = "test-session" + # mock_context.model_name removed + # mock_context.system_prompt removed + mock_context.get_env = lambda k, d=None: None + + adapter = ClaudeCodeAdapter() + adapter.context = mock_context + + # Track which repos were pushed + pushed_repos = [] + run_cmd_calls = [] + + async def mock_run_cmd(cmd, cwd=None, **kwargs): + run_cmd_calls.append({'cmd': cmd, 'cwd': cwd}) + # Track push commands by the working directory + if cmd[0] == 'git' and 'push' in cmd and cwd: + repo_name = cwd.split('/')[-1] + pushed_repos.append(repo_name) + + # Return appropriate responses + if cmd[0] == 'git': + if cmd[1] == 'status' and '--porcelain' in cmd: + return "?? test-file.txt" # Simulate changes + elif cmd[1] == 'remote': + return "output" # Simulate remote exists + return "" + + adapter._run_cmd = mock_run_cmd + + # Mock _get_repos_config with mixed autoPush settings + def mock_get_repos_config(): + return [ + { + 'name': 'repo-push', + 'input': {'url': 'https://github.com/org/repo1', 'branch': 'main'}, + 'output': {'url': 'https://github.com/user/fork1', 'branch': 'feature'}, + 'autoPush': True + }, + { + 'name': 'repo-no-push', + 'input': {'url': 'https://github.com/org/repo2', 'branch': 'main'}, + 'output': {'url': 'https://github.com/user/fork2', 'branch': 'feature'}, + 'autoPush': False + }, + { + 'name': 'repo-push-2', + 'input': {'url': 'https://github.com/org/repo3', 'branch': 'main'}, + 'output': {'url': 'https://github.com/user/fork3', 'branch': 'feature'}, + 'autoPush': True + } + ] + adapter._get_repos_config = mock_get_repos_config + + # Mock _fetch_token_for_url + async def mock_fetch_token(url): + return "fake-token" + adapter._fetch_token_for_url = mock_fetch_token + + # Mock _url_with_token + adapter._url_with_token = lambda url, token: url + + # Call _push_results_if_any + await adapter._push_results_if_any() + + # Verify only repos with autoPush=true were pushed + assert 'repo-push' in pushed_repos, "repo-push should be pushed (autoPush=true)" + assert 'repo-push-2' in pushed_repos, "repo-push-2 should be pushed (autoPush=true)" + assert 'repo-no-push' not in pushed_repos, "repo-no-push should NOT be pushed (autoPush=false)" diff --git a/components/runners/claude-code-runner/verify_autopush.py b/components/runners/claude-code-runner/verify_autopush.py new file mode 100644 index 000000000..ee1ad9d33 --- /dev/null +++ b/components/runners/claude-code-runner/verify_autopush.py @@ -0,0 +1,168 @@ +#!/usr/bin/env python3 +""" +Verification script for autoPush implementation. + +This script demonstrates the autoPush flag extraction from REPOS_JSON +without requiring the full wrapper environment. +""" + +import json + + +def parse_repos_config(repos_json_str: str) -> list[dict]: + """ + Simplified version of _get_repos_config() for verification. + + Extracts name, input, output, and autoPush from REPOS_JSON. + """ + try: + if not repos_json_str.strip(): + return [] + + data = json.loads(repos_json_str) + if not isinstance(data, list): + return [] + + result = [] + for item in data: + if not isinstance(item, dict): + continue + + name = str(item.get('name', '')).strip() + input_obj = item.get('input') or {} + output_obj = item.get('output') + + # Get URL from input + url = str((input_obj or {}).get('url', '')).strip() + + if not name and url: + # Derive name from URL (simplified) + parts = url.rstrip('/').split('/') + if parts: + name = parts[-1].removesuffix('.git').strip() + + if name and isinstance(input_obj, dict) and url: + # Extract autoPush flag (default to False if not present) + auto_push = item.get('autoPush', False) + result.append({ + 'name': name, + 'input': input_obj, + 'output': output_obj, + 'autoPush': auto_push + }) + + return result + except Exception as e: + print(f"Error parsing repos: {e}") + return [] + + +def should_push_repo(repo_config: dict) -> tuple[bool, str]: + """ + Check if a repo should be pushed based on autoPush flag. + + Returns (should_push, reason) + """ + name = repo_config.get('name', 'unknown') + auto_push = repo_config.get('autoPush', False) + + if not auto_push: + return False, f"autoPush disabled for {name}" + + output = repo_config.get('output') + if not output or not output.get('url'): + return False, f"No output URL configured for {name}" + + return True, f"Will push {name} (autoPush enabled)" + + +def main(): + """Run verification tests""" + + print("=== AutoPush Implementation Verification ===\n") + + # Test Case 1: Mixed autoPush flags + print("Test 1: Mixed autoPush settings") + repos_json = json.dumps([ + { + "name": "repo-push", + "input": {"url": "https://github.com/org/repo-push", "branch": "main"}, + "output": {"url": "https://github.com/user/fork-push", "branch": "feature"}, + "autoPush": True + }, + { + "name": "repo-no-push", + "input": {"url": "https://github.com/org/repo-no-push", "branch": "main"}, + "output": {"url": "https://github.com/user/fork-no-push", "branch": "feature"}, + "autoPush": False + }, + { + "name": "repo-default", + "input": {"url": "https://github.com/org/repo-default", "branch": "main"}, + "output": {"url": "https://github.com/user/fork-default", "branch": "feature"} + # No autoPush field - should default to False + } + ]) + + repos = parse_repos_config(repos_json) + print(f"Parsed {len(repos)} repos:") + + for repo in repos: + should_push, reason = should_push_repo(repo) + status = "✓ PUSH" if should_push else "✗ SKIP" + print(f" {status}: {reason}") + print(f" autoPush={repo.get('autoPush', 'not set')}") + + print() + + # Test Case 2: All autoPush enabled + print("Test 2: All repos with autoPush=true") + repos_json = json.dumps([ + { + "name": "repo1", + "input": {"url": "https://github.com/org/repo1", "branch": "main"}, + "output": {"url": "https://github.com/user/fork1", "branch": "feature"}, + "autoPush": True + }, + { + "name": "repo2", + "input": {"url": "https://github.com/org/repo2", "branch": "develop"}, + "output": {"url": "https://github.com/user/fork2", "branch": "feature"}, + "autoPush": True + } + ]) + + repos = parse_repos_config(repos_json) + for repo in repos: + should_push, reason = should_push_repo(repo) + status = "✓ PUSH" if should_push else "✗ SKIP" + print(f" {status}: {reason}") + + print() + + # Test Case 3: All autoPush disabled + print("Test 3: All repos with autoPush=false") + repos_json = json.dumps([ + { + "name": "repo1", + "input": {"url": "https://github.com/org/repo1", "branch": "main"}, + "autoPush": False + }, + { + "name": "repo2", + "input": {"url": "https://github.com/org/repo2", "branch": "develop"}, + "autoPush": False + } + ]) + + repos = parse_repos_config(repos_json) + for repo in repos: + should_push, reason = should_push_repo(repo) + status = "✓ PUSH" if should_push else "✗ SKIP" + print(f" {status}: {reason}") + + print("\n=== Verification Complete ===") + + +if __name__ == '__main__': + main() diff --git a/docs/adr/0002-user-token-authentication.md b/docs/adr/0002-user-token-authentication.md index 4a103558c..b83195a20 100644 --- a/docs/adr/0002-user-token-authentication.md +++ b/docs/adr/0002-user-token-authentication.md @@ -38,11 +38,20 @@ Chosen option: "User token for all operations", because: 4. **Least privilege:** Backend only uses service account when necessary 5. **Simplicity:** One pattern for user operations, exceptions documented -**Exception:** Backend service account ONLY for: +**Backend Exception:** Backend service account ONLY for: * Writing CRs after user authorization validated (handlers/sessions.go:417) * Minting service account tokens for runner pods (handlers/sessions.go:449) * Cross-namespace operations backend is explicitly authorized for +**Operator Exception:** Operator service account for startup migrations: +* V1→V2 repo format migration (operator/internal/handlers/migration.go) +* Runs once at operator startup before processing user requests +* Updates existing CRs the operator already has RBAC access to +* Only modifies data structure format, not repository content +* Active sessions (Running/Creating) are skipped to avoid interference +* Does NOT create new resources or access user repositories +* See migration.go:29-47 for detailed security model documentation + ### Consequences **Positive:**