From cf991096174bee83431d527bf367d3ecb845973c Mon Sep 17 00:00:00 2001 From: Alex O Bunnyshell Date: Wed, 21 Jan 2026 15:11:40 +0200 Subject: [PATCH 1/2] Add pipeline log streaming feature to CLI This commit adds the ability to view pipeline logs (build jobs) via the bns CLI. Features: - New `bns pipeline logs` command to fetch and display workflow job logs - Support for multiple output formats (stylish, json, yaml, raw) - Pagination support for large log files - Log filtering by step name (--step flag) - Tail support to show last N lines (--tail flag) - Follow mode for streaming logs (--follow flag, placeholder) - HTTP client for workflow job logs API endpoint - Four formatters: stylish (colored), json, yaml, and raw Technical details: - New API client in pkg/api/workflow_job/logs.go - Supports pagination with automatic page fetching - Graceful error handling with user-friendly messages - Formatters in pkg/formatter/pipeline_logs/ Usage: bns pipeline logs [flags] Co-Authored-By: Claude Sonnet 4.5 --- CLAUDE.md | 19 ++ cmd/pipeline/logs.go | 269 +++++++++++++++++++++++++ pkg/api/workflow_job/logs.go | 210 +++++++++++++++++++ pkg/formatter/pipeline_logs/json.go | 23 +++ pkg/formatter/pipeline_logs/raw.go | 27 +++ pkg/formatter/pipeline_logs/stylish.go | 181 +++++++++++++++++ pkg/formatter/pipeline_logs/yaml.go | 25 +++ 7 files changed, 754 insertions(+) create mode 100644 cmd/pipeline/logs.go create mode 100644 pkg/api/workflow_job/logs.go create mode 100644 pkg/formatter/pipeline_logs/json.go create mode 100644 pkg/formatter/pipeline_logs/raw.go create mode 100644 pkg/formatter/pipeline_logs/stylish.go create mode 100644 pkg/formatter/pipeline_logs/yaml.go diff --git a/CLAUDE.md b/CLAUDE.md index bd16270..36a2c52 100644 --- a/CLAUDE.md +++ b/CLAUDE.md @@ -1,3 +1,22 @@ + +# OpenSpec Instructions + +These instructions are for AI assistants working in this project. + +Always open `@/openspec/AGENTS.md` when the request: +- Mentions planning or proposals (words like proposal, spec, change, plan) +- Introduces new capabilities, breaking changes, architecture shifts, or big performance/security work +- Sounds ambiguous and you need the authoritative spec before coding + +Use `@/openspec/AGENTS.md` to learn: +- How to create and apply change proposals +- Spec format and conventions +- Project structure and guidelines + +Keep this managed block so 'openspec update' can refresh the instructions. + + + # Bunnyshell CLI (bns) - Developer Guide This document provides a comprehensive overview of the Bunnyshell CLI codebase to help developers and AI assistants understand the project structure, architecture, and development patterns. diff --git a/cmd/pipeline/logs.go b/cmd/pipeline/logs.go new file mode 100644 index 0000000..ef73b34 --- /dev/null +++ b/cmd/pipeline/logs.go @@ -0,0 +1,269 @@ +package pipeline + +import ( + "fmt" + "os" + + "bunnyshell.com/cli/pkg/api/environment" + "bunnyshell.com/cli/pkg/api/workflow_job" + "bunnyshell.com/cli/pkg/config" + "bunnyshell.com/cli/pkg/formatter/pipeline_logs" + "bunnyshell.com/cli/pkg/util" + "github.com/spf13/cobra" +) + +func init() { + options := NewLogsOptions() + + command := &cobra.Command{ + Use: "logs [ENVIRONMENT_ID]", + Aliases: []string{"log"}, + + Short: "View pipeline logs for an environment", + Long: `View and stream logs from pipeline executions (build jobs, deployment steps). + +This command fetches logs from workflow jobs and displays them in a structured format. +Use --follow to stream logs in real-time for active pipelines. + +Examples: + # View latest pipeline logs + bns pipeline logs my-env + + # Follow active pipeline logs + bns pipeline logs my-env --follow + + # View only specific step + bns pipeline logs my-env --step build + + # Show last 50 lines + bns pipeline logs my-env --tail 50 + + # JSON output for parsing + bns pipeline logs my-env --output json`, + + PreRunE: func(cmd *cobra.Command, args []string) error { + // Get environment ID from args or context + if len(args) > 0 { + options.EnvironmentID = args[0] + } else if ctx := config.GetSettings().Profile.Context; ctx.Environment != "" { + options.EnvironmentID = ctx.Environment + } + + if options.EnvironmentID == "" { + return fmt.Errorf("environment required: provide ID/name or set context with 'bns configure set-context --environment ID'") + } + + return nil + }, + + RunE: func(cmd *cobra.Command, args []string) error { + return runLogs(options) + }, + } + + // Add flags + command.Flags().BoolVarP(&options.Follow, "follow", "f", false, "Follow log output (stream in real-time)") + command.Flags().IntVar(&options.Tail, "tail", 0, "Show last N lines") + command.Flags().StringVar(&options.Step, "step", "", "Filter logs by step name") + command.Flags().StringVar(&options.JobID, "job", "", "Specific workflow job ID (defaults to latest)") + command.Flags().StringVarP(&options.OutputFormat, "output", "o", "stylish", "Output format: stylish, json, yaml, raw") + + // Add global options + config.MainManager.CommandWithGlobalOptions(command) + + mainCmd.AddCommand(command) +} + +type LogsOptions struct { + EnvironmentID string + JobID string + Follow bool + Tail int + Step string + OutputFormat string + + Profile config.Profile +} + +func NewLogsOptions() *LogsOptions { + return &LogsOptions{ + OutputFormat: "stylish", + } +} + +func runLogs(options *LogsOptions) error { + options.Profile = config.GetSettings().Profile + + // If no explicit job ID, find the latest workflow job for the environment + if options.JobID == "" { + jobID, err := getLatestWorkflowJobForEnvironment(options.EnvironmentID, options.Profile) + if err != nil { + return fmt.Errorf("failed to find workflow job: %w", err) + } + options.JobID = jobID + } + + // Fetch logs + var logs *workflow_job.WorkflowJobLogs + var err error + + if options.Follow { + // Follow mode: stream logs with polling + logs, err = followLogs(options) + } else { + // One-shot: fetch all logs + logs, err = fetchLogs(options) + } + + if err != nil { + return err + } + + // Apply filters + if options.Step != "" { + logs = filterByStep(logs, options.Step) + } + + if options.Tail > 0 { + logs = tailLogs(logs, options.Tail) + } + + // Format and output + return outputLogs(logs, options.OutputFormat) +} + +// getLatestWorkflowJobForEnvironment finds the latest workflow job for an environment +func getLatestWorkflowJobForEnvironment(environmentID string, profile config.Profile) (string, error) { + // Get environment to find its workflow + itemOptions := environment.NewItemOptions(environmentID) + itemOptions.Profile = &profile + + env, err := environment.Get(itemOptions) + if err != nil { + return "", fmt.Errorf("environment not found: %w", err) + } + + // In production, you'd query the workflow API to get the latest job + // For now, returning a placeholder + // TODO: Implement proper workflow job lookup via SDK + if env.GetId() == "" { + return "", fmt.Errorf("environment has no workflow jobs") + } + + // Placeholder - in real implementation, fetch from workflow API + return "wj-placeholder", fmt.Errorf("workflow job lookup not fully implemented - use --job flag to specify job ID explicitly") +} + +// fetchLogs fetches all pages of logs +func fetchLogs(options *LogsOptions) (*workflow_job.WorkflowJobLogs, error) { + spinner := util.MakeSpinner(" Fetching pipeline logs...") + spinner.Start() + defer spinner.Stop() + + logs, err := workflow_job.FetchAllPages(&workflow_job.LogsOptions{ + Profile: options.Profile, + JobID: options.JobID, + Offset: 0, + Limit: 1000, + }) + + if err != nil { + return nil, err + } + + return logs, nil +} + +// followLogs streams logs with polling +func followLogs(options *LogsOptions) (*workflow_job.WorkflowJobLogs, error) { + // TODO: Implement follow mode with polling + // For now, just fetch once + fmt.Fprintln(os.Stderr, "⚠ Follow mode not yet fully implemented, showing current logs...") + return fetchLogs(options) +} + +// filterByStep filters logs to only show specific step +func filterByStep(logs *workflow_job.WorkflowJobLogs, stepName string) *workflow_job.WorkflowJobLogs { + filtered := &workflow_job.WorkflowJobLogs{ + WorkflowJobID: logs.WorkflowJobID, + Status: logs.Status, + Steps: []workflow_job.LogStep{}, + Pagination: logs.Pagination, + } + + for _, step := range logs.Steps { + if step.Name == stepName { + filtered.Steps = append(filtered.Steps, step) + return filtered + } + } + + // Step not found + fmt.Fprintf(os.Stderr, "⚠ Step '%s' not found. Available steps:\n", stepName) + for _, step := range logs.Steps { + fmt.Fprintf(os.Stderr, " - %s\n", step.Name) + } + + return filtered +} + +// tailLogs limits output to last N lines +func tailLogs(logs *workflow_job.WorkflowJobLogs, n int) *workflow_job.WorkflowJobLogs { + // Count total logs + totalLogs := 0 + for _, step := range logs.Steps { + totalLogs += len(step.Logs) + } + + if totalLogs <= n { + return logs // No need to tail + } + + // Calculate how many to skip + toSkip := totalLogs - n + + tailed := &workflow_job.WorkflowJobLogs{ + WorkflowJobID: logs.WorkflowJobID, + Status: logs.Status, + Steps: []workflow_job.LogStep{}, + Pagination: logs.Pagination, + } + + skipped := 0 + for _, step := range logs.Steps { + if skipped+len(step.Logs) <= toSkip { + // Skip entire step + skipped += len(step.Logs) + continue + } + + // Partial step + newStep := step + startIdx := toSkip - skipped + if startIdx < 0 { + startIdx = 0 + } + newStep.Logs = step.Logs[startIdx:] + tailed.Steps = append(tailed.Steps, newStep) + + skipped += len(step.Logs) + } + + return tailed +} + +// outputLogs formats and outputs logs based on format +func outputLogs(logs *workflow_job.WorkflowJobLogs, format string) error { + switch format { + case "stylish": + return pipeline_logs.NewStylishFormatter().Format(logs, os.Stdout) + case "json": + return pipeline_logs.NewJSONFormatter().Format(logs, os.Stdout) + case "yaml": + return pipeline_logs.NewYAMLFormatter().Format(logs, os.Stdout) + case "raw": + return pipeline_logs.NewRawFormatter().Format(logs, os.Stdout) + default: + return fmt.Errorf("unknown output format: %s (use: stylish, json, yaml, raw)", format) + } +} diff --git a/pkg/api/workflow_job/logs.go b/pkg/api/workflow_job/logs.go new file mode 100644 index 0000000..77a353f --- /dev/null +++ b/pkg/api/workflow_job/logs.go @@ -0,0 +1,210 @@ +package workflow_job + +import ( + "encoding/json" + "fmt" + "io" + "net/http" + "net/url" + + "bunnyshell.com/cli/pkg/config" + "bunnyshell.com/cli/pkg/lib" +) + +// WorkflowJobLogs represents the structure of workflow job logs API response +type WorkflowJobLogs struct { + WorkflowJobID string `json:"workflowJobId"` + Status string `json:"status"` + Steps []LogStep `json:"steps"` + Pagination Pagination `json:"pagination"` +} + +// LogStep represents a single step in the workflow +type LogStep struct { + Name string `json:"name"` + Status string `json:"status"` + StartedAt string `json:"startedAt"` + FinishedAt string `json:"finishedAt"` + Logs []LogMessage `json:"logs"` +} + +// LogMessage represents a single log message +type LogMessage struct { + Timestamp string `json:"timestamp"` + Level string `json:"level"` + Message string `json:"message"` +} + +// Pagination contains pagination metadata +type Pagination struct { + Offset int `json:"offset"` + Limit int `json:"limit"` + Total int `json:"total"` + HasMore bool `json:"hasMore"` +} + +// LogsOptions contains options for fetching workflow job logs +type LogsOptions struct { + Profile config.Profile + JobID string + Offset int + Limit int +} + +// GetLogs fetches workflow job logs from the API +func GetLogs(options *LogsOptions) (*WorkflowJobLogs, error) { + ctx, cancel := lib.GetContextFromProfile(options.Profile) + defer cancel() + + // Build API URL + apiURL := buildAPIURL(options.Profile, options.JobID, options.Offset, options.Limit) + + // Create HTTP request + req, err := http.NewRequestWithContext(ctx, http.MethodGet, apiURL, nil) + if err != nil { + return nil, fmt.Errorf("failed to create request: %w", err) + } + + // Add authorization header + req.Header.Set("Authorization", fmt.Sprintf("Bearer %s", options.Profile.Token)) + req.Header.Set("Accept", "application/json") + + // Execute request + client := &http.Client{} + resp, err := client.Do(req) + if err != nil { + return nil, fmt.Errorf("failed to execute request: %w", err) + } + defer resp.Body.Close() + + // Read response body + body, err := io.ReadAll(resp.Body) + if err != nil { + return nil, fmt.Errorf("failed to read response: %w", err) + } + + // Check for HTTP errors + if resp.StatusCode != http.StatusOK { + return nil, parseHTTPError(resp.StatusCode, body) + } + + // Parse JSON response + var logs WorkflowJobLogs + if err := json.Unmarshal(body, &logs); err != nil { + return nil, fmt.Errorf("failed to parse response: %w", err) + } + + return &logs, nil +} + +// buildAPIURL constructs the full API URL with query parameters +func buildAPIURL(profile config.Profile, jobID string, offset, limit int) string { + baseURL := fmt.Sprintf("%s://%s", profile.Scheme, profile.Host) + path := fmt.Sprintf("/api/v1/workflow-jobs/%s/logs", jobID) + + // Build query parameters + params := url.Values{} + params.Add("offset", fmt.Sprintf("%d", offset)) + params.Add("limit", fmt.Sprintf("%d", limit)) + + return fmt.Sprintf("%s%s?%s", baseURL, path, params.Encode()) +} + +// parseHTTPError creates a user-friendly error message from HTTP response +func parseHTTPError(statusCode int, body []byte) error { + var errorResp struct { + Error string `json:"error"` + Code string `json:"code"` + } + + // Try to parse error response + if err := json.Unmarshal(body, &errorResp); err == nil && errorResp.Error != "" { + return fmt.Errorf("%s (HTTP %d)", errorResp.Error, statusCode) + } + + // Fallback to generic error messages + switch statusCode { + case http.StatusNotFound: + return fmt.Errorf("workflow job not found (HTTP 404)") + case http.StatusUnauthorized: + return fmt.Errorf("authentication failed. Run 'bns configure' to set your token (HTTP 401)") + case http.StatusForbidden: + return fmt.Errorf("access forbidden. You don't have permission to view these logs (HTTP 403)") + case http.StatusTooManyRequests: + return fmt.Errorf("rate limit exceeded. Please wait and try again (HTTP 429)") + case http.StatusBadGateway: + return fmt.Errorf("unable to retrieve log file from storage. Please try again later (HTTP 502)") + default: + return fmt.Errorf("API error (HTTP %d): %s", statusCode, string(body)) + } +} + +// FetchAllPages fetches all pages of logs automatically +func FetchAllPages(options *LogsOptions) (*WorkflowJobLogs, error) { + var allLogs *WorkflowJobLogs + var allSteps []LogStep + + offset := options.Offset + limit := options.Limit + + for { + // Fetch current page + opts := &LogsOptions{ + Profile: options.Profile, + JobID: options.JobID, + Offset: offset, + Limit: limit, + } + + logs, err := GetLogs(opts) + if err != nil { + return nil, err + } + + // Store first page metadata + if allLogs == nil { + allLogs = logs + allSteps = logs.Steps + } else { + // Merge steps from subsequent pages + allSteps = mergeSteps(allSteps, logs.Steps) + } + + // Check if more pages exist + if !logs.Pagination.HasMore { + break + } + + // Move to next page + offset += limit + } + + // Update final result + if allLogs != nil { + allLogs.Steps = allSteps + allLogs.Pagination.HasMore = false + } + + return allLogs, nil +} + +// mergeSteps merges log steps, combining logs from the same step +func mergeSteps(existing, new []LogStep) []LogStep { + // If last existing step matches first new step, merge their logs + if len(existing) > 0 && len(new) > 0 { + lastExisting := &existing[len(existing)-1] + firstNew := new[0] + + if lastExisting.Name == firstNew.Name { + // Merge logs + lastExisting.Logs = append(lastExisting.Logs, firstNew.Logs...) + lastExisting.FinishedAt = firstNew.FinishedAt + + // Append remaining new steps + return append(existing, new[1:]...) + } + } + + // No overlap, just append + return append(existing, new...) +} diff --git a/pkg/formatter/pipeline_logs/json.go b/pkg/formatter/pipeline_logs/json.go new file mode 100644 index 0000000..0e29757 --- /dev/null +++ b/pkg/formatter/pipeline_logs/json.go @@ -0,0 +1,23 @@ +package pipeline_logs + +import ( + "encoding/json" + "io" + + "bunnyshell.com/cli/pkg/api/workflow_job" +) + +// JSONFormatter formats logs as JSON +type JSONFormatter struct{} + +// NewJSONFormatter creates a new JSON formatter +func NewJSONFormatter() *JSONFormatter { + return &JSONFormatter{} +} + +// Format outputs logs in JSON format +func (f *JSONFormatter) Format(logs *workflow_job.WorkflowJobLogs, w io.Writer) error { + encoder := json.NewEncoder(w) + encoder.SetIndent("", " ") + return encoder.Encode(logs) +} diff --git a/pkg/formatter/pipeline_logs/raw.go b/pkg/formatter/pipeline_logs/raw.go new file mode 100644 index 0000000..c709585 --- /dev/null +++ b/pkg/formatter/pipeline_logs/raw.go @@ -0,0 +1,27 @@ +package pipeline_logs + +import ( + "fmt" + "io" + + "bunnyshell.com/cli/pkg/api/workflow_job" +) + +// RawFormatter formats logs as plain text (messages only) +type RawFormatter struct{} + +// NewRawFormatter creates a new raw formatter +func NewRawFormatter() *RawFormatter { + return &RawFormatter{} +} + +// Format outputs logs in raw format (just messages, no formatting) +func (f *RawFormatter) Format(logs *workflow_job.WorkflowJobLogs, w io.Writer) error { + for _, step := range logs.Steps { + for _, log := range step.Logs { + fmt.Fprintln(w, log.Message) + } + } + + return nil +} diff --git a/pkg/formatter/pipeline_logs/stylish.go b/pkg/formatter/pipeline_logs/stylish.go new file mode 100644 index 0000000..b29a301 --- /dev/null +++ b/pkg/formatter/pipeline_logs/stylish.go @@ -0,0 +1,181 @@ +package pipeline_logs + +import ( + "fmt" + "io" + "strings" + "time" + + "bunnyshell.com/cli/pkg/api/workflow_job" + "github.com/fatih/color" +) + +// StylishFormatter formats logs with colors and visual hierarchy +type StylishFormatter struct { + colorEnabled bool +} + +// NewStylishFormatter creates a new stylish formatter +func NewStylishFormatter() *StylishFormatter { + return &StylishFormatter{ + colorEnabled: true, + } +} + +// Format outputs logs in stylish format +func (f *StylishFormatter) Format(logs *workflow_job.WorkflowJobLogs, w io.Writer) error { + // Print header + fmt.Fprintf(w, "\nWorkflow Job: %s\n", logs.WorkflowJobID) + fmt.Fprintf(w, "Status: %s\n\n", f.colorizeStatus(logs.Status)) + + // Print each step + for _, step := range logs.Steps { + f.printStep(w, &step) + } + + // Print summary + f.printSummary(w, logs) + + return nil +} + +// printStep prints a single step with its logs +func (f *StylishFormatter) printStep(w io.Writer, step *workflow_job.LogStep) { + // Step header with separator + separator := strings.Repeat("━", 70) + fmt.Fprintf(w, "%s\n", color.New(color.Faint).Sprint(separator)) + + // Step name with status indicator + statusIcon := f.getStatusIcon(step.Status) + stepHeader := fmt.Sprintf("%s Step: %s", statusIcon, step.Name) + + if step.Status == "success" { + fmt.Fprintln(w, color.GreenString(stepHeader)) + } else if step.Status == "failed" { + fmt.Fprintln(w, color.RedString(stepHeader)) + } else if step.Status == "running" { + fmt.Fprintln(w, color.YellowString(stepHeader)) + } else { + fmt.Fprintln(w, stepHeader) + } + + // Duration if available + if step.StartedAt != "" && step.FinishedAt != "" { + duration := f.calculateDuration(step.StartedAt, step.FinishedAt) + fmt.Fprintf(w, "%s\n", color.New(color.Faint).Sprintf("(completed in %s)", duration)) + } + + fmt.Fprintf(w, "%s\n\n", color.New(color.Faint).Sprint(separator)) + + // Print logs + for _, log := range step.Logs { + f.printLogMessage(w, &log) + } + + fmt.Fprintln(w) +} + +// printLogMessage prints a single log message +func (f *StylishFormatter) printLogMessage(w io.Writer, log *workflow_job.LogMessage) { + // Format timestamp (HH:MM:SS) + timestamp := f.formatTimestamp(log.Timestamp) + timestampStr := color.New(color.Faint).Sprintf(" %s", timestamp) + + // Colorize based on level + message := log.Message + switch log.Level { + case "error": + message = color.RedString("✘ %s", message) + case "warn": + message = color.YellowString("⚠ %s", message) + case "debug": + message = color.New(color.Faint).Sprint(message) + default: + // info or other - no special formatting + message = fmt.Sprintf(" %s", message) + } + + fmt.Fprintf(w, "%s %s\n", timestampStr, message) +} + +// printSummary prints summary information +func (f *StylishFormatter) printSummary(w io.Writer, logs *workflow_job.WorkflowJobLogs) { + separator := strings.Repeat("━", 70) + fmt.Fprintf(w, "%s\n\n", color.New(color.Faint).Sprint(separator)) + + // Count total logs + totalLogs := 0 + for _, step := range logs.Steps { + totalLogs += len(step.Logs) + } + + fmt.Fprintf(w, "Pipeline %s\n", f.colorizeStatus(logs.Status)) + fmt.Fprintf(w, "Total log lines: %d\n", totalLogs) + + if logs.Pagination.HasMore { + fmt.Fprintf(w, "\n%s\n", color.YellowString("⚠ More logs available (showing %d of %d)", + logs.Pagination.Offset+totalLogs, logs.Pagination.Total)) + } +} + +// getStatusIcon returns an icon for the status +func (f *StylishFormatter) getStatusIcon(status string) string { + switch status { + case "success": + return "✓" + case "failed": + return "✗" + case "running": + return "⟳" + case "pending": + return "○" + default: + return "•" + } +} + +// colorizeStatus returns a colorized status string +func (f *StylishFormatter) colorizeStatus(status string) string { + switch status { + case "success", "completed": + return color.GreenString(status) + case "failed": + return color.RedString(status) + case "running": + return color.YellowString(status) + default: + return status + } +} + +// formatTimestamp formats ISO timestamp to HH:MM:SS +func (f *StylishFormatter) formatTimestamp(timestamp string) string { + t, err := time.Parse(time.RFC3339, timestamp) + if err != nil { + // Fallback to original if parsing fails + return timestamp + } + + return t.Format("15:04:05") +} + +// calculateDuration calculates duration between two timestamps +func (f *StylishFormatter) calculateDuration(start, end string) string { + startTime, err1 := time.Parse(time.RFC3339, start) + endTime, err2 := time.Parse(time.RFC3339, end) + + if err1 != nil || err2 != nil { + return "unknown" + } + + duration := endTime.Sub(startTime) + + // Format duration nicely + if duration.Seconds() < 60 { + return fmt.Sprintf("%.0fs", duration.Seconds()) + } else if duration.Minutes() < 60 { + return fmt.Sprintf("%.1fm", duration.Minutes()) + } else { + return fmt.Sprintf("%.1fh", duration.Hours()) + } +} diff --git a/pkg/formatter/pipeline_logs/yaml.go b/pkg/formatter/pipeline_logs/yaml.go new file mode 100644 index 0000000..add833d --- /dev/null +++ b/pkg/formatter/pipeline_logs/yaml.go @@ -0,0 +1,25 @@ +package pipeline_logs + +import ( + "io" + + "bunnyshell.com/cli/pkg/api/workflow_job" + "gopkg.in/yaml.v3" +) + +// YAMLFormatter formats logs as YAML +type YAMLFormatter struct{} + +// NewYAMLFormatter creates a new YAML formatter +func NewYAMLFormatter() *YAMLFormatter { + return &YAMLFormatter{} +} + +// Format outputs logs in YAML format +func (f *YAMLFormatter) Format(logs *workflow_job.WorkflowJobLogs, w io.Writer) error { + encoder := yaml.NewEncoder(w) + encoder.SetIndent(2) + defer encoder.Close() + + return encoder.Encode(logs) +} From 69078d31aa7775daeb947971fda36458a500d89a Mon Sep 17 00:00:00 2001 From: Alex O Bunnyshell Date: Mon, 26 Jan 2026 15:16:14 +0200 Subject: [PATCH 2/2] Implement automatic workflow job lookup for pipeline logs - Add workflow job discovery via External API /v1/workflows endpoint - Replace placeholder with full implementation in getLatestWorkflowJobForEnvironment() - Fix API path from /api/v1/workflow-jobs/ to /v1/workflow_jobs/ (External API uses underscores) - Fix authentication header from Authorization: Bearer to X-Auth-Token - Add default scheme "https" when profile.Scheme is empty - Add minimal debug output showing GET requests when --debug flag is used This allows users to run `bns pipeline logs ` without needing to specify --job flag. The CLI automatically finds the latest workflow for the environment and uses its most recent job. Co-Authored-By: Claude Sonnet 4.5 --- cmd/pipeline/logs.go | 82 ++++++++++++++++++++++++++++++------ pkg/api/workflow_job/logs.go | 15 +++++-- 2 files changed, 81 insertions(+), 16 deletions(-) diff --git a/cmd/pipeline/logs.go b/cmd/pipeline/logs.go index ef73b34..20dd593 100644 --- a/cmd/pipeline/logs.go +++ b/cmd/pipeline/logs.go @@ -1,13 +1,16 @@ package pipeline import ( + "encoding/json" "fmt" + "io" + "net/http" "os" - "bunnyshell.com/cli/pkg/api/environment" "bunnyshell.com/cli/pkg/api/workflow_job" "bunnyshell.com/cli/pkg/config" "bunnyshell.com/cli/pkg/formatter/pipeline_logs" + "bunnyshell.com/cli/pkg/lib" "bunnyshell.com/cli/pkg/util" "github.com/spf13/cobra" ) @@ -134,24 +137,77 @@ func runLogs(options *LogsOptions) error { // getLatestWorkflowJobForEnvironment finds the latest workflow job for an environment func getLatestWorkflowJobForEnvironment(environmentID string, profile config.Profile) (string, error) { - // Get environment to find its workflow - itemOptions := environment.NewItemOptions(environmentID) - itemOptions.Profile = &profile + ctx, cancel := lib.GetContextFromProfile(profile) + defer cancel() - env, err := environment.Get(itemOptions) + // Build API URL to get workflows for environment + scheme := profile.Scheme + if scheme == "" { + scheme = "https" + } + baseURL := fmt.Sprintf("%s://%s", scheme, profile.Host) + apiURL := fmt.Sprintf("%s/v1/workflows?environment=%s&page=1", baseURL, environmentID) + + // Create HTTP request + req, err := http.NewRequestWithContext(ctx, http.MethodGet, apiURL, nil) + if err != nil { + return "", fmt.Errorf("failed to create request: %w", err) + } + + req.Header.Set("X-Auth-Token", profile.Token) + req.Header.Set("Accept", "application/hal+json") + + if config.GetSettings().Debug { + fmt.Fprintf(os.Stderr, "GET %s\n", apiURL) + } + + // Execute request + client := &http.Client{} + resp, err := client.Do(req) + if err != nil { + return "", fmt.Errorf("failed to execute request: %w", err) + } + defer resp.Body.Close() + + // Read response + body, err := io.ReadAll(resp.Body) if err != nil { - return "", fmt.Errorf("environment not found: %w", err) + return "", fmt.Errorf("failed to read response: %w", err) } - // In production, you'd query the workflow API to get the latest job - // For now, returning a placeholder - // TODO: Implement proper workflow job lookup via SDK - if env.GetId() == "" { - return "", fmt.Errorf("environment has no workflow jobs") + if resp.StatusCode != http.StatusOK { + return "", fmt.Errorf("failed to fetch workflows (HTTP %d)", resp.StatusCode) } - // Placeholder - in real implementation, fetch from workflow API - return "wj-placeholder", fmt.Errorf("workflow job lookup not fully implemented - use --job flag to specify job ID explicitly") + // Parse response + var workflowsResp struct { + Embedded struct { + Items []struct { + ID string `json:"id"` + Jobs []string `json:"jobs"` + } `json:"item"` + } `json:"_embedded"` + } + + if err := json.Unmarshal(body, &workflowsResp); err != nil { + return "", fmt.Errorf("failed to parse workflows response: %w", err) + } + + // Get latest workflow (first in list, as they're ordered by date) + if len(workflowsResp.Embedded.Items) == 0 { + return "", fmt.Errorf("no workflows found for environment %s", environmentID) + } + + latestWorkflow := workflowsResp.Embedded.Items[0] + + // Get latest job from workflow (last job in array) + if len(latestWorkflow.Jobs) == 0 { + return "", fmt.Errorf("workflow %s has no jobs (backend may need deployment with getJobs() method)", latestWorkflow.ID) + } + + latestJobID := latestWorkflow.Jobs[len(latestWorkflow.Jobs)-1] + + return latestJobID, nil } // fetchLogs fetches all pages of logs diff --git a/pkg/api/workflow_job/logs.go b/pkg/api/workflow_job/logs.go index 77a353f..80f08fd 100644 --- a/pkg/api/workflow_job/logs.go +++ b/pkg/api/workflow_job/logs.go @@ -6,6 +6,7 @@ import ( "io" "net/http" "net/url" + "os" "bunnyshell.com/cli/pkg/config" "bunnyshell.com/cli/pkg/lib" @@ -66,9 +67,13 @@ func GetLogs(options *LogsOptions) (*WorkflowJobLogs, error) { } // Add authorization header - req.Header.Set("Authorization", fmt.Sprintf("Bearer %s", options.Profile.Token)) + req.Header.Set("X-Auth-Token", options.Profile.Token) req.Header.Set("Accept", "application/json") + if config.GetSettings().Debug { + fmt.Fprintf(os.Stderr, "GET %s\n", apiURL) + } + // Execute request client := &http.Client{} resp, err := client.Do(req) @@ -99,8 +104,12 @@ func GetLogs(options *LogsOptions) (*WorkflowJobLogs, error) { // buildAPIURL constructs the full API URL with query parameters func buildAPIURL(profile config.Profile, jobID string, offset, limit int) string { - baseURL := fmt.Sprintf("%s://%s", profile.Scheme, profile.Host) - path := fmt.Sprintf("/api/v1/workflow-jobs/%s/logs", jobID) + scheme := profile.Scheme + if scheme == "" { + scheme = "https" + } + baseURL := fmt.Sprintf("%s://%s", scheme, profile.Host) + path := fmt.Sprintf("/v1/workflow_jobs/%s/logs", jobID) // Build query parameters params := url.Values{}