From 8d323aec03ad3fe6f444e6b8eb197f225e0fe5a1 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Miko=C5=82aj=20=C5=9Awi=C4=85tek?= Date: Sat, 13 Dec 2025 15:19:06 +0100 Subject: [PATCH 01/10] Add a builkite pipeline generation package to dev tools --- dev-tools/buildkite/pipeline/README.md | 262 ++++++++++ dev-tools/buildkite/pipeline/agents.go | 135 +++++ dev-tools/buildkite/pipeline/doc.go | 55 ++ dev-tools/buildkite/pipeline/example_test.go | 204 ++++++++ dev-tools/buildkite/pipeline/images.go | 85 ++++ dev-tools/buildkite/pipeline/pipeline.go | 186 +++++++ dev-tools/buildkite/pipeline/pipeline_test.go | 376 ++++++++++++++ dev-tools/buildkite/pipeline/plugins.go | 133 +++++ dev-tools/buildkite/pipeline/step.go | 481 ++++++++++++++++++ .../pipeline/testdata/example_pipeline.yml | 181 +++++++ go.mod | 4 +- go.sum | 4 + internal/edot/go.mod | 2 +- wrapper/windows/archive-proxy/go.mod | 2 +- 14 files changed, 2107 insertions(+), 3 deletions(-) create mode 100644 dev-tools/buildkite/pipeline/README.md create mode 100644 dev-tools/buildkite/pipeline/agents.go create mode 100644 dev-tools/buildkite/pipeline/doc.go create mode 100644 dev-tools/buildkite/pipeline/example_test.go create mode 100644 dev-tools/buildkite/pipeline/images.go create mode 100644 dev-tools/buildkite/pipeline/pipeline.go create mode 100644 dev-tools/buildkite/pipeline/pipeline_test.go create mode 100644 dev-tools/buildkite/pipeline/plugins.go create mode 100644 dev-tools/buildkite/pipeline/step.go create mode 100644 dev-tools/buildkite/pipeline/testdata/example_pipeline.yml diff --git a/dev-tools/buildkite/pipeline/README.md b/dev-tools/buildkite/pipeline/README.md new file mode 100644 index 00000000000..13ef75e73e7 --- /dev/null +++ b/dev-tools/buildkite/pipeline/README.md @@ -0,0 +1,262 @@ +# Buildkite Pipeline Generator + +This package provides a Go API for generating Buildkite pipeline YAML files programmatically using the official Buildkite SDK. + +## Overview + +Instead of maintaining YAML files directly, pipelines are defined in Go code. This provides: + +- **Type safety**: Catch configuration errors at compile time +- **Reusability**: Share common configurations (agents, plugins, env vars) across pipelines +- **Testability**: Unit test pipeline generation and compare against expected output +- **Maintainability**: Refactor and update pipelines using standard Go tooling + +## Quick Start + +```go +package main + +import ( + "github.com/elastic/elastic-agent/dev-tools/buildkite/pipeline" +) + +func main() { + // Create a command step + step := pipeline.CommandWithKey("Unit tests", "unit-tests", ".buildkite/scripts/steps/unit-tests.sh") + pipeline.SetAgent(step, pipeline.GCPAgent(pipeline.ImageUbuntu2204X86, pipeline.MachineTypeN2Standard8)) + pipeline.SetArtifactPaths(step, "build/TEST-*.xml") + pipeline.SetRetry(step, 1, true) + pipeline.WithVaultDockerLogin(step) + + // Create a trigger step + trigger := pipeline.Trigger("Trigger downstream", "downstream-pipeline") + pipeline.SetTriggerIf(trigger, "build.pull_request.id == null") + + // Build the pipeline + p := pipeline.New(). + Env("VAULT_PATH", pipeline.VaultPathGCP). + WithImageEnvVars(). + Add(step). + Wait(). + Add(trigger) + + if err := p.WriteYAML(".buildkite/pipeline.yml"); err != nil { + panic(err) + } +} +``` + +## API Reference + +### Pipeline + +```go +// Create a new pipeline +p := pipeline.New() + +// Add environment variables +p.Env("KEY", "value") +p.EnvMap(map[string]string{"KEY1": "val1", "KEY2": "val2"}) +p.WithImageEnvVars() // Adds all standard VM image env vars + +// Add steps +p.Add(step) // Accepts CommandStep, GroupStep, TriggerStep, InputStep, BlockStep +p.Wait() // Add wait step + +// Output +yaml, err := p.MarshalYAML() +err := p.WriteYAML("path/to/pipeline.yml") +``` + +### Command Steps + +```go +// Create command steps +step := pipeline.Command("Label", "command") +step := pipeline.CommandWithKey("Label", "key", "command") + +// Configure command steps +pipeline.SetAgent(step, agent) +pipeline.SetEnv(step, map[string]string{"KEY": "value"}) +pipeline.AddEnv(step, "KEY", "value") +pipeline.SetArtifactPaths(step, "path1", "path2") +pipeline.SetRetry(step, automaticLimit, manualAllowed) +pipeline.SetRetryAutomatic(step, limit) +pipeline.SetRetryManual(step) +pipeline.SetDependsOn(step, "other-step") +pipeline.SetDependsOnWithFailure(step, pipeline.DependsOnDep{Step: "s1", AllowFailure: true}) +pipeline.SetMatrix(step, map[string][]string{"os": {"linux", "windows"}}) +pipeline.SetSimpleMatrix(step, []string{"a", "b", "c"}) +pipeline.SetIf(step, "build.pull_request.id != null") +pipeline.SetBranches(step, "main 8.* 9.*") +pipeline.SetTimeout(step, 60) +pipeline.SetParallelism(step, 4) +pipeline.SetNotify(step, "buildkite/context") +pipeline.AddPlugin(step, source, config) +``` + +### Group Steps + +```go +// Create group steps +group := pipeline.Group("Label") +group := pipeline.GroupWithKey("Label", "key") + +// Configure group steps +pipeline.AddGroupStep(group, step) +pipeline.SetGroupDependsOn(group, "previous") +pipeline.SetGroupNotify(group, "buildkite/context") +``` + +### Trigger Steps + +```go +// Create trigger steps +trigger := pipeline.Trigger("Label", "pipeline-slug") + +// Configure trigger steps +pipeline.SetTriggerIf(trigger, "condition") +pipeline.SetTriggerAsync(trigger, true) +pipeline.SetTriggerBranches(trigger, "main") +pipeline.SetTriggerBuild(trigger, commit, branch, envMap) +``` + +### Input Steps + +```go +// Create input steps +input := pipeline.Input("Build parameters") + +// Configure input steps +pipeline.SetInputIf(input, "condition") +pipeline.AddInputField(input, "Label", "key", "default", "hint", required) +pipeline.AddSelectField(input, "Label", "key", "hint", required, + pipeline.SelectOption{Label: "Yes", Value: "1"}, + pipeline.SelectOption{Label: "No", Value: "0"}, +) +``` + +### Block Steps + +```go +block := pipeline.Block("Approval required") +``` + +## Agent Configuration + +```go +// GCP agents +pipeline.GCPAgent(image, machineType) +pipeline.GCPAgentWithDisk(image, machineType, diskSizeGB, diskType) + +// AWS agents +pipeline.AWSAgent(image, instanceType) +pipeline.AWSAgentWithDisk(image, instanceType, diskSizeGB) + +// Orka agents (macOS) +pipeline.OrkaAgent(imagePrefix) + +// Docker agents +pipeline.DockerAgent(image) +pipeline.DockerAgentWithHooks(image) + +// Presets +pipeline.AgentUbuntu2204X86Standard8 +pipeline.AgentUbuntu2204ARMM6gXLarge +pipeline.AgentWin2022Standard8 +pipeline.AgentMacOS15ARM +pipeline.AgentMacOS13X86 +pipeline.BeatsCI() +pipeline.JunitAnnotateAgent() +``` + +## Plugin Configuration + +```go +// Add plugins to steps +pipeline.WithVaultDockerLogin(step) +pipeline.WithVaultECKeyProd(step) +pipeline.WithGoogleOIDC(step) +pipeline.WithGCPSecretManagerServerless(step) +pipeline.WithJunitAnnotate(step, artifactPattern) +pipeline.WithTestCollector(step, filesPattern, format) + +// Get plugin source and config for manual addition +source, config := pipeline.PluginVaultDockerLogin() +source, config := pipeline.PluginVaultSecrets(path, field, envVar) +source, config := pipeline.PluginGCPSecretManager(envSecrets) +source, config := pipeline.PluginGoogleOIDC() +source, config := pipeline.PluginJunitAnnotate(artifactPattern) +source, config := pipeline.PluginTestCollector(filesPattern, format) +``` + +## Constants + +### VM Images + +```go +pipeline.ImageUbuntu2204X86 // Ubuntu 22.04 x86_64 +pipeline.ImageUbuntu2204ARM // Ubuntu 22.04 ARM64 +pipeline.ImageUbuntu2404X86 // Ubuntu 24.04 x86_64 +pipeline.ImageUbuntu2404ARM // Ubuntu 24.04 ARM64 +pipeline.ImageWin2022 // Windows Server 2022 +pipeline.ImageWin2025 // Windows Server 2025 +pipeline.ImageRHEL8 // RHEL 8 +pipeline.ImageRHEL10 // RHEL 10 +pipeline.ImageDebian11 // Debian 11 +pipeline.ImageDebian13 // Debian 13 +``` + +### Machine Types + +```go +pipeline.MachineTypeN2Standard8 // GCP n2-standard-8 +pipeline.InstanceTypeM6gXLarge // AWS m6g.xlarge +pipeline.DiskSize200GB // 200 GB disk +``` + +### Vault Paths + +```go +pipeline.VaultPathGCP // GCP vault path +pipeline.VaultPathDockerRegistry // Docker registry credentials +pipeline.VaultPathECKeyProd // EC production API key +``` + +## Testing + +The package includes utilities for testing pipeline generation: + +```go +// Compare generated pipeline with existing YAML file +result, err := pipeline.CompareWithFile(p, ".buildkite/pipeline.yml") +if !result.Equal { + t.Errorf("pipelines differ:\n%s", result.Diff) +} + +// Compare two YAML representations +result, err := pipeline.Compare(generated, expected) +``` + +## Migration Strategy + +1. **Start with simpler pipelines**: Begin migrating less complex pipelines first +2. **Add parity tests**: Create tests that compare generated YAML with existing files +3. **Keep both during transition**: Maintain both YAML and Go during migration +4. **Validate before switching**: Ensure generated YAML passes schema validation +5. **Switch source of truth**: Once confident, generate YAML from Go in CI + +## Directory Structure + +``` +dev-tools/buildkite/ +├── pipeline/ # This package - core types and helpers +│ ├── agents.go # Agent configuration helpers +│ ├── images.go # VM image constants +│ ├── pipeline.go # Pipeline wrapper +│ ├── plugins.go # Plugin configuration helpers +│ └── step.go # Step creation and configuration helpers +└── pipelines/ # Pipeline definitions (future) + ├── main.go # pipeline.yml + └── integration.go # integration.pipeline.yml +``` diff --git a/dev-tools/buildkite/pipeline/agents.go b/dev-tools/buildkite/pipeline/agents.go new file mode 100644 index 00000000000..7ac4602154a --- /dev/null +++ b/dev-tools/buildkite/pipeline/agents.go @@ -0,0 +1,135 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License 2.0; +// you may not use this file except in compliance with the Elastic License 2.0. + +package pipeline + +// Agent represents a Buildkite agent configuration. +// It is serialized as a map in the YAML output. +type Agent map[string]any + +// GCPAgent creates an agent configuration for Google Cloud Platform. +func GCPAgent(image, machineType string) Agent { + return Agent{ + "provider": "gcp", + "image": image, + "machineType": machineType, + } +} + +// GCPAgentWithDisk creates a GCP agent configuration with custom disk settings. +func GCPAgentWithDisk(image, machineType string, diskSizeGB int, diskType string) Agent { + a := GCPAgent(image, machineType) + a["diskSizeGb"] = diskSizeGB + if diskType != "" { + a["disk_type"] = diskType + } + return a +} + +// AWSAgent creates an agent configuration for Amazon Web Services. +func AWSAgent(image, instanceType string) Agent { + return Agent{ + "provider": "aws", + "image": image, + "instanceType": instanceType, + } +} + +// AWSAgentWithDisk creates an AWS agent configuration with custom disk size. +func AWSAgentWithDisk(image, instanceType string, diskSizeGB int) Agent { + a := AWSAgent(image, instanceType) + a["diskSizeGb"] = diskSizeGB + return a +} + +// OrkaAgent creates an agent configuration for Orka (macOS). +func OrkaAgent(imagePrefix string) Agent { + return Agent{ + "provider": "orka", + "imagePrefix": imagePrefix, + } +} + +// DockerAgent creates an agent configuration that runs in a Docker container. +func DockerAgent(image string) Agent { + return Agent{ + "image": image, + } +} + +// DockerAgentWithHooks creates a Docker agent with custom global hooks enabled. +func DockerAgentWithHooks(image string) Agent { + return Agent{ + "image": image, + "useCustomGlobalHooks": true, + } +} + +// WithDiskSize adds disk size configuration to an existing agent. +func (a Agent) WithDiskSize(sizeGB int) Agent { + a["diskSizeGb"] = sizeGB + return a +} + +// WithDiskType adds disk type configuration to an existing agent (GCP only). +func (a Agent) WithDiskType(diskType string) Agent { + a["disk_type"] = diskType + return a +} + +// WithMachineType sets the machine type (GCP) or instance type (AWS). +func (a Agent) WithMachineType(machineType string) Agent { + if a["provider"] == "aws" { + a["instanceType"] = machineType + } else { + a["machineType"] = machineType + } + return a +} + +// Clone creates a copy of the agent configuration. +func (a Agent) Clone() Agent { + clone := make(Agent, len(a)) + for k, v := range a { + clone[k] = v + } + return clone +} + +// Common agent presets for frequently used configurations. +var ( + // AgentUbuntu2204X86Standard8 is a standard Ubuntu 22.04 x86_64 agent on GCP. + AgentUbuntu2204X86Standard8 = GCPAgent(ImageUbuntu2204X86, MachineTypeN2Standard8) + + // AgentUbuntu2204ARMM6gXLarge is a standard Ubuntu 22.04 ARM64 agent on AWS. + AgentUbuntu2204ARMM6gXLarge = AWSAgent(ImageUbuntu2204ARM, InstanceTypeM6gXLarge) + + // AgentUbuntu2404X86Standard8 is a standard Ubuntu 24.04 x86_64 agent on GCP. + AgentUbuntu2404X86Standard8 = GCPAgent(ImageUbuntu2404X86, MachineTypeN2Standard8) + + // AgentUbuntu2404ARMM6g2XLarge is a standard Ubuntu 24.04 ARM64 agent on AWS. + AgentUbuntu2404ARMM6g2XLarge = AWSAgent(ImageUbuntu2404ARM, InstanceTypeM6g2XLarge) + + // AgentWin2022Standard8 is a standard Windows 2022 agent on GCP. + AgentWin2022Standard8 = GCPAgentWithDisk(ImageWin2022, MachineTypeN2Standard8, DiskSize200GB, "pd-ssd") + + // AgentWin2016Standard8 is a standard Windows 2016 agent on GCP. + AgentWin2016Standard8 = GCPAgentWithDisk(ImageWin2016, MachineTypeN2Standard8, DiskSize200GB, "pd-ssd") + + // AgentMacOS15ARM is a macOS 15 ARM agent on Orka. + AgentMacOS15ARM = OrkaAgent("generic-base-15-arm-002") + + // AgentMacOS13X86 is a macOS 13 x86_64 agent on Orka. + AgentMacOS13X86 = OrkaAgent("generic-13-ventura-x64") +) + +// BeatsCI returns the standard Beats CI Docker agent. +func BeatsCI() Agent { + return DockerAgentWithHooks("docker.elastic.co/ci-agent-images/platform-ingest/buildkite-agent-beats-ci-with-hooks:0.5") +} + +// JunitAnnotateAgent returns the agent for junit annotation steps. +func JunitAnnotateAgent() Agent { + return DockerAgent("docker.elastic.co/ci-agent-images/buildkite-junit-annotate:1.0") +} diff --git a/dev-tools/buildkite/pipeline/doc.go b/dev-tools/buildkite/pipeline/doc.go new file mode 100644 index 00000000000..0527ecc49a1 --- /dev/null +++ b/dev-tools/buildkite/pipeline/doc.go @@ -0,0 +1,55 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License 2.0; +// you may not use this file except in compliance with the Elastic License 2.0. + +// Package pipeline provides types and helpers for generating Buildkite pipeline +// YAML files programmatically using Go. +// +// This package wraps github.com/buildkite/buildkite-sdk with additional helpers +// specific to the Elastic Agent project, including predefined agent configurations, +// plugin helpers, and shared constants for VM images. +// +// # Basic Usage +// +// step := pipeline.CommandWithKey("Unit tests", "unit-tests", ".buildkite/scripts/steps/unit-tests.sh") +// pipeline.SetAgent(step, pipeline.GCPAgent(pipeline.ImageUbuntu2204X86, pipeline.MachineTypeN2Standard8)) +// pipeline.SetArtifactPaths(step, "build/TEST-*.xml") +// pipeline.SetRetry(step, 1, true) +// +// p := pipeline.New(). +// Env("VAULT_PATH", pipeline.VaultPathGCP). +// Add(step) +// +// yaml, err := p.MarshalYAML() +// +// # Agent Configuration +// +// The package provides helpers for common agent configurations: +// +// // GCP agent +// pipeline.GCPAgent(pipeline.ImageUbuntu2204X86, pipeline.MachineTypeN2Standard8) +// +// // AWS agent +// pipeline.AWSAgent(pipeline.ImageUbuntu2204ARM, pipeline.InstanceTypeM6gXLarge) +// +// // Orka agent (macOS) +// pipeline.OrkaAgent("generic-base-15-arm-002") +// +// # Plugin Configuration +// +// Common plugins are available as helper functions: +// +// step := pipeline.Command("Build", "make build") +// pipeline.WithVaultDockerLogin(step) +// pipeline.AddPlugin(step, source, config) +// +// # Testing +// +// The package includes utilities for comparing generated YAML with existing files +// to ensure migration parity: +// +// result, err := pipeline.CompareWithFile(p, ".buildkite/pipeline.yml") +// if !result.Equal { +// fmt.Println(result.Diff) +// } +package pipeline diff --git a/dev-tools/buildkite/pipeline/example_test.go b/dev-tools/buildkite/pipeline/example_test.go new file mode 100644 index 00000000000..02dbf9c3e59 --- /dev/null +++ b/dev-tools/buildkite/pipeline/example_test.go @@ -0,0 +1,204 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License 2.0; +// you may not use this file except in compliance with the Elastic License 2.0. + +package pipeline_test + +import ( + "fmt" + "testing" + + "github.com/elastic/elastic-agent/dev-tools/buildkite/pipeline" +) + +// TestGeneratePipeline generates a pipeline similar to .buildkite/pipeline.yml +// and compares it against a known fixture file. +func TestGeneratePipeline(t *testing.T) { + p := generateExamplePipeline() + + result, err := pipeline.CompareWithFile(p, "testdata/example_pipeline.yml") + if err != nil { + t.Fatalf("failed to compare pipeline: %v", err) + } + + if !result.Equal { + t.Errorf("generated pipeline does not match expected:\n%s", result.Diff) + } +} + +// TestGeneratePipelineYAML verifies the pipeline can be marshaled to YAML without error. +func TestGeneratePipelineYAML(t *testing.T) { + p := generateExamplePipeline() + + yaml, err := p.MarshalYAML() + if err != nil { + t.Fatalf("failed to marshal pipeline: %v", err) + } + + if len(yaml) == 0 { + t.Error("expected non-empty YAML output") + } +} + +// generateExamplePipeline creates a pipeline demonstrating all major features. +// This generates a pipeline similar to .buildkite/pipeline.yml +func generateExamplePipeline() *pipeline.Pipeline { + // Common artifact paths for test steps + testArtifacts := []string{ + "build/TEST-*.html", + "build/TEST-*.xml", + "build/diagnostics/*", + "coverage-*.out", + } + + // check-ci step + checkCIStep := pipeline.CommandWithKey("check-ci", "check-ci", ".buildkite/scripts/steps/check-ci.sh") + pipeline.SetAgent(checkCIStep, pipeline.Agent{ + "provider": "gcp", + "image": "${IMAGE_UBUNTU_2204_X86_64}", + }) + pipeline.SetRetryManual(checkCIStep) + + // Build the unit tests group + unitTestsGroup := pipeline.GroupWithKey("Unit tests", "unit-tests") + + // Add Ubuntu 22.04 unit tests + ubuntuStep := pipeline.CommandWithKey("Unit tests - Ubuntu 22.04", "unit-tests-2204", ".buildkite/scripts/steps/unit-tests.sh") + pipeline.SetAgent(ubuntuStep, pipeline.Agent{ + "provider": "gcp", + "image": "${IMAGE_UBUNTU_2204_X86_64}", + "machineType": pipeline.MachineTypeN2Standard8, + }) + pipeline.SetArtifactPaths(ubuntuStep, testArtifacts...) + pipeline.SetRetry(ubuntuStep, 1, true) + pipeline.AddGroupStep(unitTestsGroup, ubuntuStep) + + // Add Ubuntu 22.04 FIPS unit tests + fipsStep := pipeline.CommandWithKey("Unit tests - Ubuntu 22.04 with requirefips build tag", "unit-tests-2204-fips-tag", ".buildkite/scripts/steps/unit-tests.sh") + pipeline.SetAgent(fipsStep, pipeline.Agent{ + "provider": "gcp", + "image": "${IMAGE_UBUNTU_2204_X86_64}", + "machineType": pipeline.MachineTypeN2Standard8, + }) + pipeline.AddEnv(fipsStep, "FIPS", "true") + pipeline.SetArtifactPaths(fipsStep, testArtifacts...) + pipeline.SetRetry(fipsStep, 1, true) + pipeline.AddGroupStep(unitTestsGroup, fipsStep) + + // Add Ubuntu 22.04 ARM unit tests + armStep := pipeline.CommandWithKey("Unit tests - Ubuntu 22.04 ARM64", "unit-tests-2204-arm64", ".buildkite/scripts/steps/unit-tests.sh") + pipeline.SetAgent(armStep, pipeline.Agent{ + "provider": "aws", + "image": "${IMAGE_UBUNTU_2204_ARM_64}", + "instanceType": pipeline.InstanceTypeM6gXLarge, + "diskSizeGb": 200, + }) + pipeline.SetArtifactPaths(armStep, testArtifacts...) + pipeline.SetRetry(armStep, 1, true) + pipeline.AddGroupStep(unitTestsGroup, armStep) + + // Add Windows 2022 unit tests + winStep := pipeline.CommandWithKey("Unit tests - Windows 2022", "unit-tests-win2022", ".buildkite/scripts/steps/unit-tests.ps1") + pipeline.SetAgent(winStep, pipeline.Agent{ + "provider": "gcp", + "image": "${IMAGE_WIN_2022}", + "machineType": pipeline.MachineTypeN2Standard8, + "disk_size": 200, + "disk_type": "pd-ssd", + }) + pipeline.SetArtifactPaths(winStep, testArtifacts...) + pipeline.SetRetry(winStep, 1, true) + pipeline.AddGroupStep(unitTestsGroup, winStep) + + // Build the macOS tests group + macOSGroup := pipeline.GroupWithKey("macOS tests", "macos-unit-tests") + + macArmStep := pipeline.Command("Unit tests - macOS 15 ARM", ".buildkite/scripts/steps/unit-tests.sh") + pipeline.SetAgent(macArmStep, pipeline.AgentMacOS15ARM) + pipeline.SetArtifactPaths(macArmStep, testArtifacts...) + pipeline.SetRetry(macArmStep, 1, true) + pipeline.AddGroupStep(macOSGroup, macArmStep) + + macX86Step := pipeline.Command("Unit tests - macOS 13", ".buildkite/scripts/steps/unit-tests.sh") + pipeline.SetAgent(macX86Step, pipeline.AgentMacOS13X86) + pipeline.SetBranches(macX86Step, "main 8.* 9.*") + pipeline.SetArtifactPaths(macX86Step, testArtifacts...) + pipeline.SetRetry(macX86Step, 1, true) + pipeline.AddGroupStep(macOSGroup, macX86Step) + + // Build the JUnit annotate step + junitStep := pipeline.Command(":junit: Junit annotate", "") + pipeline.SetAgent(junitStep, pipeline.JunitAnnotateAgent()) + pipeline.WithJunitAnnotate(junitStep, "**TEST-*.xml") + pipeline.SetDependsOnWithFailure(junitStep, + pipeline.DependsOnDep{Step: "unit-tests-2204", AllowFailure: true}, + pipeline.DependsOnDep{Step: "unit-tests-2204-fips-tag", AllowFailure: true}, + pipeline.DependsOnDep{Step: "unit-tests-2204-arm64", AllowFailure: true}, + pipeline.DependsOnDep{Step: "unit-tests-win2022", AllowFailure: true}, + pipeline.DependsOnDep{Step: "macos-unit-tests", AllowFailure: true}, + ) + + // Build the K8s tests group with matrix + k8sGroup := pipeline.GroupWithKey("K8s tests", "k8s-tests") + + k8sStep := pipeline.Command("K8s tests: {{matrix.k8s_version}}", ".buildkite/scripts/steps/k8s-tests.sh") + pipeline.SetEnv(k8sStep, map[string]string{ + "K8S_VERSION": "v{{matrix.k8s_version}}", + "KIND_VERSION": "v0.27.0", + }) + pipeline.SetAgent(k8sStep, pipeline.Agent{ + "provider": "gcp", + "image": "${IMAGE_UBUNTU_2204_X86_64}", + }) + pipeline.SetMatrix(k8sStep, map[string][]string{ + "k8s_version": {"1.33.0", "1.32.0", "1.31.0", "1.30.0", "1.29.4", "1.28.9"}, + }) + pipeline.SetRetryManual(k8sStep) + pipeline.AddGroupStep(k8sGroup, k8sStep) + + // Trigger extended tests + extendedTrigger := pipeline.Trigger("Triggering Extended tests for branches", "elastic-agent-extended-testing") + pipeline.SetTriggerIf(extendedTrigger, "build.pull_request.id == null") + pipeline.SetTriggerBuild(extendedTrigger, "${BUILDKITE_COMMIT}", "${BUILDKITE_BRANCH}", nil) + + // Publish to serverless trigger + serverlessTrigger := pipeline.Trigger("Publish to serverless", "agentless-serverless-release") + pipeline.SetTriggerBranches(serverlessTrigger, "main") + pipeline.SetTriggerBuild(serverlessTrigger, "${BUILDKITE_COMMIT}", "", nil) + + // Build the complete pipeline + p := pipeline.New(). + Env("VAULT_PATH", pipeline.VaultPathGCP). + Env("IMAGE_UBUNTU_2204_X86_64", pipeline.ImageUbuntu2204X86). + Env("IMAGE_UBUNTU_2204_ARM_64", pipeline.ImageUbuntu2204ARM). + Env("IMAGE_WIN_2022", pipeline.ImageWin2022). + Add(checkCIStep). + Add(unitTestsGroup). + Add(macOSGroup). + Add(junitStep). + Add(k8sGroup). + Add(extendedTrigger). + Wait(). + Add(serverlessTrigger) + + return p +} + +// Example_pipeline demonstrates generating a pipeline programmatically. +func Example_pipeline() { + p := generateExamplePipeline() + + yaml, err := p.MarshalYAML() + if err != nil { + panic(err) + } + + // The generated YAML can be written to a file + _ = yaml + + // To write to a file: + // p.WriteYAML(".buildkite/pipeline.yml") + + fmt.Println("Pipeline generated successfully") + // Output: Pipeline generated successfully +} diff --git a/dev-tools/buildkite/pipeline/images.go b/dev-tools/buildkite/pipeline/images.go new file mode 100644 index 00000000000..9a4f21b2944 --- /dev/null +++ b/dev-tools/buildkite/pipeline/images.go @@ -0,0 +1,85 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License 2.0; +// you may not use this file except in compliance with the Elastic License 2.0. + +package pipeline + +// Image constants for VM images used in Buildkite pipelines. +// These values are managed by updatecli and should not be changed manually. +// +// The image names follow the pattern: platform-ingest-elastic-agent-{os}-{version} +// where version is a timestamp-based identifier. +const ( + // Ubuntu images + ImageUbuntu2204X86 = "platform-ingest-elastic-agent-ubuntu-2204-1762801856" + ImageUbuntu2204ARM = "platform-ingest-elastic-agent-ubuntu-2204-aarch64-1762801856" + ImageUbuntu2404X86 = "platform-ingest-elastic-agent-ubuntu-2404-1762801856" + ImageUbuntu2404ARM = "platform-ingest-elastic-agent-ubuntu-2404-aarch64-1762801856" + + // Windows images + ImageWin2016 = "platform-ingest-elastic-agent-windows-2016-1762801856" + ImageWin2022 = "platform-ingest-elastic-agent-windows-2022-1762801856" + ImageWin2025 = "platform-ingest-elastic-agent-windows-2025-1762801856" + ImageWin10 = "platform-ingest-elastic-agent-windows-10-1764775167" + ImageWin11 = "platform-ingest-elastic-agent-windows-11-1764775167" + + // RHEL images + ImageRHEL8 = "platform-ingest-elastic-agent-rhel-8-1762801856" + ImageRHEL10 = "platform-ingest-elastic-agent-rhel-10-1762801856" + + // Debian images + ImageDebian11 = "platform-ingest-elastic-agent-debian-11-1762801856" + ImageDebian13 = "platform-ingest-elastic-agent-debian-13-1762801856" +) + +// ImageEnvVars returns a map of environment variable names to image values. +// This is useful for pipelines that reference images via environment variables +// (e.g., ${IMAGE_UBUNTU_2204_X86_64}). +func ImageEnvVars() map[string]string { + return map[string]string{ + "IMAGE_UBUNTU_2204_X86_64": ImageUbuntu2204X86, + "IMAGE_UBUNTU_2204_ARM_64": ImageUbuntu2204ARM, + "IMAGE_UBUNTU_2404_X86_64": ImageUbuntu2404X86, + "IMAGE_UBUNTU_2404_ARM_64": ImageUbuntu2404ARM, + "IMAGE_WIN_2016": ImageWin2016, + "IMAGE_WIN_2022": ImageWin2022, + "IMAGE_WIN_2025": ImageWin2025, + "IMAGE_WIN_10": ImageWin10, + "IMAGE_WIN_11": ImageWin11, + "IMAGE_RHEL_8": ImageRHEL8, + "IMAGE_RHEL_10": ImageRHEL10, + "IMAGE_DEBIAN_11": ImageDebian11, + "IMAGE_DEBIAN_13": ImageDebian13, + } +} + +// Common machine types for GCP and AWS agents. +const ( + // GCP machine types + MachineTypeN2Standard4 = "n2-standard-4" + MachineTypeN2Standard8 = "n2-standard-8" + MachineTypeN2Standard16 = "n2-standard-16" + MachineTypeC2Standard16 = "c2-standard-16" + + // AWS instance types + InstanceTypeM6gXLarge = "m6g.xlarge" + InstanceTypeM6g2XLarge = "m6g.2xlarge" + InstanceTypeC6gXLarge = "c6g.xlarge" + InstanceTypeC6g2XLarge = "c6g.2xlarge" + InstanceTypeC6g4XLarge = "c6g.4xlarge" + InstanceTypeT4g2XLarge = "t4g.2xlarge" +) + +// Common disk sizes in GB. +const ( + DiskSize80GB = 80 + DiskSize200GB = 200 + DiskSize400GB = 400 +) + +// Common vault paths. +const ( + VaultPathGCP = "kv/ci-shared/observability-ingest/cloud/gcp" + VaultPathDockerRegistry = "kv/ci-shared/platform-ingest/elastic_docker_registry" + VaultPathECKeyProd = "kv/ci-shared/platform-ingest/platform-ingest-ec-prod" +) diff --git a/dev-tools/buildkite/pipeline/pipeline.go b/dev-tools/buildkite/pipeline/pipeline.go new file mode 100644 index 00000000000..d5a891d9e6d --- /dev/null +++ b/dev-tools/buildkite/pipeline/pipeline.go @@ -0,0 +1,186 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License 2.0; +// you may not use this file except in compliance with the Elastic License 2.0. + +package pipeline + +import ( + "bytes" + "fmt" + "os" + "strings" + + "github.com/buildkite/buildkite-sdk/sdk/go/sdk/buildkite" +) + +// Pipeline wraps the buildkite.Pipeline with additional helper methods. +type Pipeline struct { + *buildkite.Pipeline +} + +// New creates a new pipeline. +func New() *Pipeline { + return &Pipeline{ + Pipeline: buildkite.NewPipeline(), + } +} + +// Env adds an environment variable to the pipeline. +func (p *Pipeline) Env(key, value string) *Pipeline { + p.AddEnvironmentVariable(key, value) + return p +} + +// EnvMap adds multiple environment variables to the pipeline. +func (p *Pipeline) EnvMap(env map[string]string) *Pipeline { + for k, v := range env { + p.AddEnvironmentVariable(k, v) + } + return p +} + +// WithImageEnvVars adds all standard image environment variables to the pipeline. +func (p *Pipeline) WithImageEnvVars() *Pipeline { + for k, v := range ImageEnvVars() { + p.AddEnvironmentVariable(k, v) + } + return p +} + +// Add adds a step to the pipeline. It accepts CommandStep, GroupStep, TriggerStep, +// InputStep, BlockStep, or WaitStep. +func (p *Pipeline) Add(step any) *Pipeline { + switch s := step.(type) { + case *buildkite.CommandStep: + p.AddStep(s) + case *buildkite.GroupStep: + p.AddStep(s) + case *buildkite.TriggerStep: + p.AddStep(s) + case *buildkite.InputStep: + p.AddStep(s) + case *buildkite.BlockStep: + p.AddStep(s) + case *buildkite.WaitStep: + p.AddStep(s) + default: + panic(fmt.Sprintf("unsupported step type: %T", step)) + } + return p +} + +// Wait adds a wait step to the pipeline. +func (p *Pipeline) Wait() *Pipeline { + p.AddStep(&buildkite.WaitStep{ + Wait: Ptr(""), + }) + return p +} + +// MarshalYAML marshals the pipeline to YAML bytes with the schema comment. +func (p *Pipeline) MarshalYAML() ([]byte, error) { + yaml, err := p.ToYAML() + if err != nil { + return nil, fmt.Errorf("marshaling pipeline to YAML: %w", err) + } + + var buf bytes.Buffer + buf.WriteString("# yaml-language-server: $schema=https://raw.githubusercontent.com/buildkite/pipeline-schema/main/schema.json\n") + buf.WriteString(yaml) + + return buf.Bytes(), nil +} + +// WriteYAML writes the pipeline to a file. +func (p *Pipeline) WriteYAML(path string) error { + data, err := p.MarshalYAML() + if err != nil { + return err + } + return os.WriteFile(path, data, 0644) +} + +// Ptr is a helper to convert a value to a pointer. +// This is useful when setting fields that require pointers. +func Ptr[T any](v T) *T { + return &v +} + +// CompareResult contains the result of comparing two pipelines. +type CompareResult struct { + Equal bool + Diff string + Generated string + Expected string + ParseError error +} + +// CompareWithFile compares a generated pipeline with an existing YAML file. +func CompareWithFile(p *Pipeline, path string) (*CompareResult, error) { + generated, err := p.MarshalYAML() + if err != nil { + return nil, fmt.Errorf("marshaling generated pipeline: %w", err) + } + + expected, err := os.ReadFile(path) + if err != nil { + return nil, fmt.Errorf("reading expected file %s: %w", path, err) + } + + return Compare(generated, expected) +} + +// Compare compares two YAML representations of pipelines. +func Compare(generated, expected []byte) (*CompareResult, error) { + result := &CompareResult{ + Generated: string(generated), + Expected: string(expected), + } + + // Normalize both by trimming whitespace and comparing + genNorm := strings.TrimSpace(string(generated)) + expNorm := strings.TrimSpace(string(expected)) + + result.Equal = genNorm == expNorm + + if !result.Equal { + result.Diff = computeDiff(genNorm, expNorm) + } + + return result, nil +} + +// computeDiff creates a simple line-by-line diff. +func computeDiff(generated, expected string) string { + genLines := strings.Split(generated, "\n") + expLines := strings.Split(expected, "\n") + + var diff strings.Builder + diff.WriteString("--- expected\n+++ generated\n") + + maxLen := len(genLines) + if len(expLines) > maxLen { + maxLen = len(expLines) + } + + for i := 0; i < maxLen; i++ { + var genLine, expLine string + if i < len(genLines) { + genLine = genLines[i] + } + if i < len(expLines) { + expLine = expLines[i] + } + + if genLine != expLine { + if expLine != "" { + diff.WriteString(fmt.Sprintf("-%d: %s\n", i+1, expLine)) + } + if genLine != "" { + diff.WriteString(fmt.Sprintf("+%d: %s\n", i+1, genLine)) + } + } + } + + return diff.String() +} diff --git a/dev-tools/buildkite/pipeline/pipeline_test.go b/dev-tools/buildkite/pipeline/pipeline_test.go new file mode 100644 index 00000000000..5c582ae0dba --- /dev/null +++ b/dev-tools/buildkite/pipeline/pipeline_test.go @@ -0,0 +1,376 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License 2.0; +// you may not use this file except in compliance with the Elastic License 2.0. + +package pipeline + +import ( + "strings" + "testing" +) + +func TestNewPipeline(t *testing.T) { + p := New(). + Env("FOO", "bar"). + Env("BAZ", "qux") + + yaml, err := p.MarshalYAML() + if err != nil { + t.Fatalf("failed to marshal: %v", err) + } + + yamlStr := string(yaml) + if !strings.Contains(yamlStr, "FOO") { + t.Error("expected FOO in output") + } + if !strings.Contains(yamlStr, "BAZ") { + t.Error("expected BAZ in output") + } +} + +func TestCommandStep(t *testing.T) { + step := CommandWithKey("Test step", "test-step", "echo hello") + SetAgent(step, GCPAgent(ImageUbuntu2204X86, MachineTypeN2Standard8)) + SetArtifactPaths(step, "build/*.xml", "build/*.html") + SetRetry(step, 1, true) + + if *step.Label != "Test step" { + t.Errorf("expected label 'Test step', got %s", *step.Label) + } + + if *step.Key != "test-step" { + t.Errorf("expected key 'test-step', got %s", *step.Key) + } + + if step.Agents == nil { + t.Fatal("expected agents to be set") + } + + if step.ArtifactPaths == nil || len(step.ArtifactPaths.StringArray) != 2 { + t.Error("expected 2 artifact paths") + } + + if step.Retry == nil { + t.Fatal("expected retry to be set") + } +} + +func TestGroupStep(t *testing.T) { + group := GroupWithKey("Test group", "test-group") + SetGroupDependsOn(group, "previous-step") + SetGroupNotify(group, "buildkite/test") + + step := Command("Nested step", "echo nested") + AddGroupStep(group, step) + + if group.Group == nil || *group.Group != "Test group" { + t.Errorf("expected group label 'Test group'") + } + + if group.Key == nil || *group.Key != "test-group" { + t.Errorf("expected key 'test-group', got %v", group.Key) + } + + if group.Steps == nil || len(*group.Steps) != 1 { + t.Error("expected 1 step in group") + } +} + +func TestTriggerStep(t *testing.T) { + trigger := Trigger("Trigger other", "other-pipeline") + SetTriggerIf(trigger, "build.pull_request.id != null") + SetTriggerAsync(trigger, true) + SetTriggerBuild(trigger, "${BUILDKITE_COMMIT}", "${BUILDKITE_BRANCH}", nil) + + if *trigger.Trigger != "other-pipeline" { + t.Errorf("expected trigger 'other-pipeline', got %s", *trigger.Trigger) + } + + if *trigger.Label != "Trigger other" { + t.Errorf("expected label 'Trigger other', got %s", *trigger.Label) + } + + if trigger.Async == nil || trigger.Async.Bool == nil || !*trigger.Async.Bool { + t.Error("expected async true") + } +} + +func TestMarshalYAML(t *testing.T) { + step1 := Command("Test", "echo hello") + step2 := Command("After wait", "echo after") + + p := New(). + Env("VAULT_PATH", "kv/ci-shared/test"). + Add(step1). + Wait(). + Add(step2) + + yaml, err := p.MarshalYAML() + if err != nil { + t.Fatalf("failed to marshal: %v", err) + } + + yamlStr := string(yaml) + + // Check for schema comment + if !strings.Contains(yamlStr, "yaml-language-server") { + t.Error("expected schema comment in output") + } + + // Check for env + if !strings.Contains(yamlStr, "VAULT_PATH") { + t.Error("expected VAULT_PATH in output") + } + + // Check for steps + if !strings.Contains(yamlStr, "steps") { + t.Error("expected steps in output") + } +} + +func TestAgentHelpers(t *testing.T) { + tests := []struct { + name string + agent Agent + expected map[string]any + }{ + { + name: "GCP agent", + agent: GCPAgent("test-image", "n2-standard-8"), + expected: map[string]any{ + "provider": "gcp", + "image": "test-image", + "machineType": "n2-standard-8", + }, + }, + { + name: "AWS agent", + agent: AWSAgent("test-image", "m6g.xlarge"), + expected: map[string]any{ + "provider": "aws", + "image": "test-image", + "instanceType": "m6g.xlarge", + }, + }, + { + name: "Orka agent", + agent: OrkaAgent("test-prefix"), + expected: map[string]any{ + "provider": "orka", + "imagePrefix": "test-prefix", + }, + }, + { + name: "GCP agent with disk", + agent: GCPAgentWithDisk("test-image", "n2-standard-8", 200, "pd-ssd"), + expected: map[string]any{ + "provider": "gcp", + "image": "test-image", + "machineType": "n2-standard-8", + "diskSizeGb": 200, + "disk_type": "pd-ssd", + }, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + for k, v := range tt.expected { + if tt.agent[k] != v { + t.Errorf("expected %s=%v, got %v", k, v, tt.agent[k]) + } + } + }) + } +} + +func TestPluginHelpers(t *testing.T) { + t.Run("VaultDockerLogin", func(t *testing.T) { + source, config := PluginVaultDockerLogin() + if !strings.Contains(source, "vault-docker-login") { + t.Errorf("expected vault-docker-login in source, got %s", source) + } + if config["secret_path"] == nil { + t.Error("expected secret_path in config") + } + }) + + t.Run("VaultSecrets", func(t *testing.T) { + source, config := PluginVaultSecrets("kv/path", "field", "ENV_VAR") + if !strings.Contains(source, "vault-secrets") { + t.Errorf("expected vault-secrets in source, got %s", source) + } + if config["path"] != "kv/path" { + t.Errorf("expected path 'kv/path', got %v", config["path"]) + } + }) + + t.Run("JunitAnnotate", func(t *testing.T) { + source, _ := PluginJunitAnnotate("**/*.xml") + if !strings.Contains(source, "junit-annotate") { + t.Errorf("expected junit-annotate in source, got %s", source) + } + }) +} + +func TestMatrixConfiguration(t *testing.T) { + step := Command("Matrix test", "echo {{matrix.os}} {{matrix.version}}") + SetMatrix(step, map[string][]string{ + "os": {"linux", "windows"}, + "version": {"1.0", "2.0"}, + }) + + if step.Matrix == nil { + t.Fatal("expected matrix to be set") + } +} + +func TestSimpleMatrix(t *testing.T) { + step := Command("Simple matrix", "echo {{matrix}}") + SetSimpleMatrix(step, []string{"a", "b", "c"}) + + if step.Matrix == nil { + t.Fatal("expected matrix to be set") + } + if step.Matrix.MatrixElementList == nil || len(*step.Matrix.MatrixElementList) != 3 { + t.Error("expected 3 matrix values") + } +} + +func TestCompare(t *testing.T) { + generated := []byte(`steps: + - command: echo hello + label: Test +`) + expected := []byte(`steps: + - command: echo hello + label: Test +`) + + result, err := Compare(generated, expected) + if err != nil { + t.Fatalf("compare failed: %v", err) + } + + if !result.Equal { + t.Errorf("expected pipelines to be equal, diff: %s", result.Diff) + } +} + +func TestCompareDifferent(t *testing.T) { + generated := []byte(`steps: + - command: echo hello + label: Test +`) + expected := []byte(`steps: + - command: echo world + label: Test +`) + + result, err := Compare(generated, expected) + if err != nil { + t.Fatalf("compare failed: %v", err) + } + + if result.Equal { + t.Error("expected pipelines to be different") + } + + if result.Diff == "" { + t.Error("expected diff to be non-empty") + } +} + +func TestInputStep(t *testing.T) { + input := Input("Build parameters") + SetInputIf(input, `build.env("MANIFEST_URL") == null`) + AddInputField(input, "MANIFEST_URL", "MANIFEST_URL", "", "Link to the build manifest", true) + AddSelectField(input, "Verbose", "VERBOSE", "Enable verbose output", false, + SelectOption{Label: "Yes", Value: "1"}, + SelectOption{Label: "No", Value: "0"}, + ) + + if input.Input == nil || *input.Input != "Build parameters" { + t.Error("expected input 'Build parameters'") + } + + if input.Fields == nil || len(*input.Fields) != 2 { + t.Error("expected 2 fields") + } +} + +func TestDependsOnWithFailure(t *testing.T) { + step := Command("Test", "echo test") + SetDependsOnWithFailure(step, + DependsOnDep{Step: "step-1", AllowFailure: true}, + DependsOnDep{Step: "step-2", AllowFailure: false}, + ) + + if step.DependsOn == nil { + t.Fatal("expected depends_on to be set") + } + if step.DependsOn.DependsOnList == nil || len(*step.DependsOn.DependsOnList) != 2 { + t.Error("expected 2 dependencies") + } +} + +func TestSetNotify(t *testing.T) { + step := Command("Test", "echo test") + SetNotify(step, "buildkite/test") + + if step.Notify == nil { + t.Fatal("expected notify to be set") + } + if len(*step.Notify) != 1 { + t.Error("expected 1 notification") + } +} + +func TestAddPlugin(t *testing.T) { + step := Command("Test", "echo test") + AddPlugin(step, "some-plugin#v1.0.0", map[string]any{"key": "value"}) + AddPlugin(step, "another-plugin#v2.0.0", nil) + + if step.Plugins == nil { + t.Fatal("expected plugins to be set") + } + if step.Plugins.PluginsList == nil || len(*step.Plugins.PluginsList) != 2 { + t.Errorf("expected 2 plugins") + } +} + +func TestWithPluginHelpers(t *testing.T) { + step := Command("Test", "echo test") + WithVaultDockerLogin(step) + WithVaultECKeyProd(step) + + if step.Plugins == nil { + t.Fatal("expected plugins to be set") + } + if step.Plugins.PluginsList == nil || len(*step.Plugins.PluginsList) != 2 { + t.Errorf("expected 2 plugins") + } +} + +// BenchmarkMarshalYAML benchmarks YAML marshaling. +func BenchmarkMarshalYAML(b *testing.B) { + p := New(). + Env("VAULT_PATH", VaultPathGCP). + WithImageEnvVars() + + for i := 0; i < 10; i++ { + step := Command("Test step", "echo hello") + SetAgent(step, GCPAgent(ImageUbuntu2204X86, MachineTypeN2Standard8)) + SetArtifactPaths(step, "build/*.xml") + SetRetryAutomatic(step, 1) + p.Add(step) + } + + b.ResetTimer() + for i := 0; i < b.N; i++ { + _, err := p.MarshalYAML() + if err != nil { + b.Fatal(err) + } + } +} diff --git a/dev-tools/buildkite/pipeline/plugins.go b/dev-tools/buildkite/pipeline/plugins.go new file mode 100644 index 00000000000..5ff309beb28 --- /dev/null +++ b/dev-tools/buildkite/pipeline/plugins.go @@ -0,0 +1,133 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License 2.0; +// you may not use this file except in compliance with the Elastic License 2.0. + +package pipeline + +import ( + "github.com/buildkite/buildkite-sdk/sdk/go/sdk/buildkite" +) + +// Plugin versions - centralized for easy updates. +const ( + PluginVersionVaultDockerLogin = "v0.5.2" + PluginVersionVaultSecrets = "v0.1.0" + PluginVersionGCPSecretManager = "v1.3.0-elastic" + PluginVersionGoogleOIDC = "v1.3.0" + PluginVersionJunitAnnotate = "v2.7.0" + PluginVersionTestCollector = "v1.11.0" +) + +// PluginVaultDockerLogin returns the vault-docker-login plugin source and config. +func PluginVaultDockerLogin() (string, map[string]any) { + return PluginVaultDockerLoginWithPath(VaultPathDockerRegistry) +} + +// PluginVaultDockerLoginWithPath returns the vault-docker-login plugin with a custom path. +func PluginVaultDockerLoginWithPath(secretPath string) (string, map[string]any) { + return "elastic/vault-docker-login#" + PluginVersionVaultDockerLogin, map[string]any{ + "secret_path": secretPath, + } +} + +// PluginVaultSecrets returns the vault-secrets plugin source and config. +func PluginVaultSecrets(path, field, envVar string) (string, map[string]any) { + return "elastic/vault-secrets#" + PluginVersionVaultSecrets, map[string]any{ + "path": path, + "field": field, + "env_var": envVar, + } +} + +// PluginVaultECKeyProd returns the vault-secrets plugin for EC production API key. +func PluginVaultECKeyProd() (string, map[string]any) { + return PluginVaultSecrets(VaultPathECKeyProd, "apiKey", "EC_API_KEY") +} + +// PluginGCPSecretManager returns the gcp-secret-manager plugin source and config. +func PluginGCPSecretManager(envSecrets map[string]string) (string, map[string]any) { + return "elastic/gcp-secret-manager#" + PluginVersionGCPSecretManager, map[string]any{ + "env": envSecrets, + } +} + +// PluginGCPSecretManagerServerless returns the GCP secret manager plugin for serverless tests. +func PluginGCPSecretManagerServerless() (string, map[string]any) { + return PluginGCPSecretManager(map[string]string{ + "ELASTICSEARCH_HOST": "ea-serverless-it-elasticsearch-hostname", + "ELASTICSEARCH_PASSWORD": "ea-serverless-it-elasticsearch-password", + "ELASTICSEARCH_USERNAME": "ea-serverless-it-elasticsearch-username", + "KIBANA_HOST": "ea-serverless-it-kibana-hostname", + "KIBANA_USERNAME": "ea-serverless-it-kibana-username", + "KIBANA_PASSWORD": "ea-serverless-it-kibana-password", + }) +} + +// PluginGoogleOIDC returns the Google OIDC authentication plugin source and config. +func PluginGoogleOIDC() (string, map[string]any) { + return PluginGoogleOIDCWithConfig("elastic-observability-ci", "911195782929", 10800) +} + +// PluginGoogleOIDCWithConfig returns the Google OIDC plugin with custom configuration. +func PluginGoogleOIDCWithConfig(projectID, projectNumber string, lifetimeSeconds int) (string, map[string]any) { + return "elastic/oblt-google-auth#" + PluginVersionGoogleOIDC, map[string]any{ + "lifetime": lifetimeSeconds, + "project-id": projectID, + "project-number": projectNumber, + } +} + +// PluginJunitAnnotate returns the junit-annotate plugin source and config. +func PluginJunitAnnotate(artifactPattern string) (string, map[string]any) { + return "junit-annotate#" + PluginVersionJunitAnnotate, map[string]any{ + "artifacts": artifactPattern, + "always-annotate": true, + "run-in-docker": false, + } +} + +// PluginTestCollector returns the test-collector plugin source and config. +func PluginTestCollector(filesPattern, format string) (string, map[string]any) { + return "test-collector#" + PluginVersionTestCollector, map[string]any{ + "files": filesPattern, + "format": format, + "branches": "main", + "debug": true, + } +} + +// WithVaultDockerLogin adds the vault-docker-login plugin to a step. +func WithVaultDockerLogin(step *buildkite.CommandStep) *buildkite.CommandStep { + source, config := PluginVaultDockerLogin() + return AddPlugin(step, source, config) +} + +// WithVaultECKeyProd adds the vault EC key prod plugin to a step. +func WithVaultECKeyProd(step *buildkite.CommandStep) *buildkite.CommandStep { + source, config := PluginVaultECKeyProd() + return AddPlugin(step, source, config) +} + +// WithGoogleOIDC adds the Google OIDC plugin to a step. +func WithGoogleOIDC(step *buildkite.CommandStep) *buildkite.CommandStep { + source, config := PluginGoogleOIDC() + return AddPlugin(step, source, config) +} + +// WithGCPSecretManagerServerless adds the GCP secret manager serverless plugin to a step. +func WithGCPSecretManagerServerless(step *buildkite.CommandStep) *buildkite.CommandStep { + source, config := PluginGCPSecretManagerServerless() + return AddPlugin(step, source, config) +} + +// WithJunitAnnotate adds the junit-annotate plugin to a step. +func WithJunitAnnotate(step *buildkite.CommandStep, artifactPattern string) *buildkite.CommandStep { + source, config := PluginJunitAnnotate(artifactPattern) + return AddPlugin(step, source, config) +} + +// WithTestCollector adds the test-collector plugin to a step. +func WithTestCollector(step *buildkite.CommandStep, filesPattern, format string) *buildkite.CommandStep { + source, config := PluginTestCollector(filesPattern, format) + return AddPlugin(step, source, config) +} diff --git a/dev-tools/buildkite/pipeline/step.go b/dev-tools/buildkite/pipeline/step.go new file mode 100644 index 00000000000..4b89c5fa528 --- /dev/null +++ b/dev-tools/buildkite/pipeline/step.go @@ -0,0 +1,481 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License 2.0; +// you may not use this file except in compliance with the Elastic License 2.0. + +package pipeline + +import ( + "github.com/buildkite/buildkite-sdk/sdk/go/sdk/buildkite" +) + +// Re-export SDK types for convenience +type ( + CommandStep = buildkite.CommandStep + GroupStep = buildkite.GroupStep + TriggerStep = buildkite.TriggerStep + InputStep = buildkite.InputStep + BlockStep = buildkite.BlockStep + WaitStep = buildkite.WaitStep +) + +// Command creates a new command step with the given label and command. +func Command(label, command string) *buildkite.CommandStep { + return &buildkite.CommandStep{ + Label: Ptr(label), + Command: &buildkite.CommandStepCommand{ + String: Ptr(command), + }, + } +} + +// CommandWithKey creates a new command step with label, key, and command. +func CommandWithKey(label, key, command string) *buildkite.CommandStep { + return &buildkite.CommandStep{ + Label: Ptr(label), + Key: Ptr(key), + Command: &buildkite.CommandStepCommand{ + String: Ptr(command), + }, + } +} + +// Group creates a new group step with the given label. +func Group(label string) *buildkite.GroupStep { + return &buildkite.GroupStep{ + Group: Ptr(label), + } +} + +// GroupWithKey creates a new group step with label and key. +func GroupWithKey(label, key string) *buildkite.GroupStep { + return &buildkite.GroupStep{ + Group: Ptr(label), + Key: Ptr(key), + } +} + +// Trigger creates a new trigger step for the given pipeline. +func Trigger(label, pipelineSlug string) *buildkite.TriggerStep { + return &buildkite.TriggerStep{ + Label: Ptr(label), + Trigger: Ptr(pipelineSlug), + } +} + +// Input creates a new input step with the given prompt. +func Input(prompt string) *buildkite.InputStep { + return &buildkite.InputStep{ + Input: Ptr(prompt), + } +} + +// Block creates a new block step with the given label. +func Block(label string) *buildkite.BlockStep { + return &buildkite.BlockStep{ + Block: Ptr(label), + } +} + +// SetAgent sets the agent configuration on a command step. +func SetAgent(step *buildkite.CommandStep, agent Agent) *buildkite.CommandStep { + agentsObject := buildkite.AgentsObject(agent) + step.Agents = &buildkite.Agents{ + AgentsObject: &agentsObject, + } + return step +} + +// SetEnv sets environment variables on a command step. +func SetEnv(step *buildkite.CommandStep, env map[string]string) *buildkite.CommandStep { + envMap := make(buildkite.Env, len(env)) + for k, v := range env { + envMap[k] = v + } + step.Env = &envMap + return step +} + +// AddEnv adds a single environment variable to a command step. +func AddEnv(step *buildkite.CommandStep, key, value string) *buildkite.CommandStep { + if step.Env == nil { + envMap := make(buildkite.Env) + step.Env = &envMap + } + (*step.Env)[key] = value + return step +} + +// SetArtifactPaths sets artifact paths on a command step. +func SetArtifactPaths(step *buildkite.CommandStep, paths ...string) *buildkite.CommandStep { + step.ArtifactPaths = &buildkite.CommandStepArtifactPaths{ + StringArray: paths, + } + return step +} + +// SetRetry sets retry configuration on a command step. +func SetRetry(step *buildkite.CommandStep, automaticLimit int, manualAllowed bool) *buildkite.CommandStep { + step.Retry = &buildkite.CommandStepRetry{} + + if automaticLimit > 0 { + step.Retry.Automatic = &buildkite.CommandStepAutomaticRetry{ + AutomaticRetry: &buildkite.AutomaticRetry{ + Limit: Ptr(automaticLimit), + }, + } + } + + if manualAllowed { + step.Retry.Manual = &buildkite.CommandStepManualRetry{ + CommandStepManualRetryObject: &buildkite.CommandStepManualRetryObject{ + Allowed: &buildkite.CommandStepManualRetryObjectAllowed{ + Bool: Ptr(true), + }, + }, + } + } + + return step +} + +// SetRetryAutomatic sets automatic retry on a command step. +func SetRetryAutomatic(step *buildkite.CommandStep, limit int) *buildkite.CommandStep { + if step.Retry == nil { + step.Retry = &buildkite.CommandStepRetry{} + } + step.Retry.Automatic = &buildkite.CommandStepAutomaticRetry{ + AutomaticRetry: &buildkite.AutomaticRetry{ + Limit: Ptr(limit), + }, + } + return step +} + +// SetRetryManual enables manual retry on a command step. +func SetRetryManual(step *buildkite.CommandStep) *buildkite.CommandStep { + if step.Retry == nil { + step.Retry = &buildkite.CommandStepRetry{} + } + step.Retry.Manual = &buildkite.CommandStepManualRetry{ + CommandStepManualRetryObject: &buildkite.CommandStepManualRetryObject{ + Allowed: &buildkite.CommandStepManualRetryObjectAllowed{ + Bool: Ptr(true), + }, + }, + } + return step +} + +// SetMatrix sets a matrix configuration on a command step. +func SetMatrix(step *buildkite.CommandStep, setup map[string][]string) *buildkite.CommandStep { + matrixSetup := make(buildkite.MatrixSetupObject, len(setup)) + for k, v := range setup { + elements := make([]buildkite.MatrixElement, len(v)) + for i, val := range v { + elements[i] = buildkite.MatrixElement{String: Ptr(val)} + } + matrixSetup[k] = elements + } + step.Matrix = &buildkite.Matrix{ + MatrixObject: &buildkite.MatrixObject{ + Setup: &buildkite.MatrixSetup{ + MatrixSetup: &matrixSetup, + }, + }, + } + return step +} + +// SetSimpleMatrix sets a simple (single-dimension) matrix on a command step. +func SetSimpleMatrix(step *buildkite.CommandStep, values []string) *buildkite.CommandStep { + elements := make(buildkite.MatrixElementList, len(values)) + for i, v := range values { + elements[i] = buildkite.MatrixElement{String: Ptr(v)} + } + step.Matrix = &buildkite.Matrix{ + MatrixElementList: &elements, + } + return step +} + +// SetBranches sets branch filter on a command step. +func SetBranches(step *buildkite.CommandStep, branches string) *buildkite.CommandStep { + step.Branches = &buildkite.Branches{ + String: Ptr(branches), + } + return step +} + +// SetIf sets a conditional expression on a command step. +func SetIf(step *buildkite.CommandStep, condition string) *buildkite.CommandStep { + step.If = Ptr(condition) + return step +} + +// SetDependsOn sets step dependencies on a command step. +func SetDependsOn(step *buildkite.CommandStep, keys ...string) *buildkite.CommandStep { + if len(keys) == 1 { + step.DependsOn = &buildkite.DependsOn{ + String: Ptr(keys[0]), + } + } else { + items := make(buildkite.DependsOnList, len(keys)) + for i, k := range keys { + items[i] = buildkite.DependsOnListItem{ + String: Ptr(k), + } + } + step.DependsOn = &buildkite.DependsOn{ + DependsOnList: &items, + } + } + return step +} + +// SetDependsOnWithFailure sets step dependencies that allow failures. +func SetDependsOnWithFailure(step *buildkite.CommandStep, deps ...DependsOnDep) *buildkite.CommandStep { + items := make(buildkite.DependsOnList, len(deps)) + for i, dep := range deps { + items[i] = buildkite.DependsOnListItem{ + DependsOnList: &buildkite.DependsOnListObject{ + Step: Ptr(dep.Step), + AllowFailure: &buildkite.DependsOnListObjectAllowFailure{ + Bool: Ptr(dep.AllowFailure), + }, + }, + } + } + step.DependsOn = &buildkite.DependsOn{ + DependsOnList: &items, + } + return step +} + +// DependsOnDep represents a step dependency with optional allow_failure. +type DependsOnDep struct { + Step string + AllowFailure bool +} + +// SetSoftFail configures soft failure handling on a command step. +func SetSoftFail(step *buildkite.CommandStep, softFail bool) *buildkite.CommandStep { + step.SoftFail = &buildkite.SoftFail{ + SoftFailEnum: &buildkite.SoftFailEnum{ + Bool: Ptr(softFail), + }, + } + return step +} + +// SetTimeout sets the timeout in minutes on a command step. +func SetTimeout(step *buildkite.CommandStep, minutes int) *buildkite.CommandStep { + step.TimeoutInMinutes = Ptr(minutes) + return step +} + +// SetSkip marks a command step as skipped. +func SetSkip(step *buildkite.CommandStep, skip bool) *buildkite.CommandStep { + step.Skip = &buildkite.Skip{ + Bool: Ptr(skip), + } + return step +} + +// AddPlugin adds a plugin to a command step. +func AddPlugin(step *buildkite.CommandStep, source string, config map[string]any) *buildkite.CommandStep { + pluginObj := buildkite.PluginsListObject{source: config} + plugin := buildkite.PluginsListItem{ + PluginsList: &pluginObj, + } + + if step.Plugins == nil { + plugins := buildkite.PluginsList{plugin} + step.Plugins = &buildkite.Plugins{ + PluginsList: &plugins, + } + } else if step.Plugins.PluginsList != nil { + *step.Plugins.PluginsList = append(*step.Plugins.PluginsList, plugin) + } else { + plugins := buildkite.PluginsList{plugin} + step.Plugins.PluginsList = &plugins + } + return step +} + +// SetNotify sets GitHub commit status notification on a command step. +func SetNotify(step *buildkite.CommandStep, context string) *buildkite.CommandStep { + notify := buildkite.CommandStepNotify{ + buildkite.CommandStepNotifyItem{ + NotifyGithubCommitStatus: &buildkite.NotifyGithubCommitStatus{ + GithubCommitStatus: &buildkite.NotifyGithubCommitStatusGithubCommitStatus{ + Context: Ptr(context), + }, + }, + }, + } + step.Notify = ¬ify + return step +} + +// SetParallelism sets the parallelism for a command step. +func SetParallelism(step *buildkite.CommandStep, n int) *buildkite.CommandStep { + step.Parallelism = Ptr(n) + return step +} + +// AddGroupStep adds a command step to a group. +func AddGroupStep(group *buildkite.GroupStep, step *buildkite.CommandStep) *buildkite.GroupStep { + item := buildkite.GroupStepsItem{ + CommandStep: step, + } + if group.Steps == nil { + steps := buildkite.GroupSteps{item} + group.Steps = &steps + } else { + *group.Steps = append(*group.Steps, item) + } + return group +} + +// SetGroupDependsOn sets dependencies on a group step. +func SetGroupDependsOn(group *buildkite.GroupStep, keys ...string) *buildkite.GroupStep { + if len(keys) == 1 { + group.DependsOn = &buildkite.DependsOn{ + String: Ptr(keys[0]), + } + } else { + items := make(buildkite.DependsOnList, len(keys)) + for i, k := range keys { + items[i] = buildkite.DependsOnListItem{ + String: Ptr(k), + } + } + group.DependsOn = &buildkite.DependsOn{ + DependsOnList: &items, + } + } + return group +} + +// SetGroupNotify sets notification on a group step. +func SetGroupNotify(group *buildkite.GroupStep, context string) *buildkite.GroupStep { + notify := buildkite.BuildNotify{ + buildkite.BuildNotifyItem{ + NotifyGithubCommitStatus: &buildkite.NotifyGithubCommitStatus{ + GithubCommitStatus: &buildkite.NotifyGithubCommitStatusGithubCommitStatus{ + Context: Ptr(context), + }, + }, + }, + } + group.Notify = ¬ify + return group +} + +// SetTriggerBuild sets build configuration on a trigger step. +func SetTriggerBuild(trigger *buildkite.TriggerStep, commit, branch string, env map[string]string) *buildkite.TriggerStep { + build := &buildkite.TriggerStepBuild{} + if commit != "" { + build.Commit = Ptr(commit) + } + if branch != "" { + build.Branch = Ptr(branch) + } + if len(env) > 0 { + envMap := make(buildkite.Env, len(env)) + for k, v := range env { + envMap[k] = v + } + build.Env = &envMap + } + trigger.Build = build + return trigger +} + +// SetTriggerIf sets a conditional expression on a trigger step. +func SetTriggerIf(trigger *buildkite.TriggerStep, condition string) *buildkite.TriggerStep { + trigger.If = Ptr(condition) + return trigger +} + +// SetTriggerBranches sets branch filter on a trigger step. +func SetTriggerBranches(trigger *buildkite.TriggerStep, branches string) *buildkite.TriggerStep { + trigger.Branches = &buildkite.Branches{ + String: Ptr(branches), + } + return trigger +} + +// SetTriggerAsync makes a trigger step asynchronous. +func SetTriggerAsync(trigger *buildkite.TriggerStep, async bool) *buildkite.TriggerStep { + trigger.Async = &buildkite.TriggerStepAsync{ + Bool: Ptr(async), + } + return trigger +} + +// AddInputField adds a text field to an input step. +func AddInputField(input *buildkite.InputStep, label, key, defaultVal, hint string, required bool) *buildkite.InputStep { + field := buildkite.FieldsItem{ + TextField: &buildkite.TextField{ + Text: Ptr(label), + Key: Ptr(key), + Default: Ptr(defaultVal), + Hint: Ptr(hint), + Required: &buildkite.TextFieldRequired{ + Bool: Ptr(required), + }, + }, + } + + if input.Fields == nil { + fields := buildkite.Fields{field} + input.Fields = &fields + } else { + *input.Fields = append(*input.Fields, field) + } + return input +} + +// AddSelectField adds a select field to an input step. +func AddSelectField(input *buildkite.InputStep, label, key, hint string, required bool, options ...SelectOption) *buildkite.InputStep { + selectOptions := make([]buildkite.SelectFieldOption, len(options)) + for i, opt := range options { + selectOptions[i] = buildkite.SelectFieldOption{ + Label: Ptr(opt.Label), + Value: Ptr(opt.Value), + } + } + + field := buildkite.FieldsItem{ + SelectField: &buildkite.SelectField{ + Select: Ptr(label), + Key: Ptr(key), + Hint: Ptr(hint), + Options: selectOptions, + Required: &buildkite.SelectFieldRequired{ + Bool: Ptr(required), + }, + }, + } + + if input.Fields == nil { + fields := buildkite.Fields{field} + input.Fields = &fields + } else { + *input.Fields = append(*input.Fields, field) + } + return input +} + +// SelectOption represents a select input option. +type SelectOption struct { + Label string + Value string +} + +// SetInputIf sets a conditional expression on an input step. +func SetInputIf(input *buildkite.InputStep, condition string) *buildkite.InputStep { + input.If = Ptr(condition) + return input +} diff --git a/dev-tools/buildkite/pipeline/testdata/example_pipeline.yml b/dev-tools/buildkite/pipeline/testdata/example_pipeline.yml new file mode 100644 index 00000000000..340d74a091a --- /dev/null +++ b/dev-tools/buildkite/pipeline/testdata/example_pipeline.yml @@ -0,0 +1,181 @@ +# yaml-language-server: $schema=https://raw.githubusercontent.com/buildkite/pipeline-schema/main/schema.json +env: + IMAGE_UBUNTU_2204_ARM_64: platform-ingest-elastic-agent-ubuntu-2204-aarch64-1762801856 + IMAGE_UBUNTU_2204_X86_64: platform-ingest-elastic-agent-ubuntu-2204-1762801856 + IMAGE_WIN_2022: platform-ingest-elastic-agent-windows-2022-1762801856 + VAULT_PATH: kv/ci-shared/observability-ingest/cloud/gcp +steps: + - agents: + image: ${IMAGE_UBUNTU_2204_X86_64} + provider: gcp + command: .buildkite/scripts/steps/check-ci.sh + key: check-ci + label: check-ci + retry: + manual: + allowed: true + - group: Unit tests + key: unit-tests + steps: + - agents: + image: ${IMAGE_UBUNTU_2204_X86_64} + machineType: n2-standard-8 + provider: gcp + artifact_paths: + - build/TEST-*.html + - build/TEST-*.xml + - build/diagnostics/* + - coverage-*.out + command: .buildkite/scripts/steps/unit-tests.sh + key: unit-tests-2204 + label: Unit tests - Ubuntu 22.04 + retry: + automatic: + limit: 1 + manual: + allowed: true + - agents: + image: ${IMAGE_UBUNTU_2204_X86_64} + machineType: n2-standard-8 + provider: gcp + artifact_paths: + - build/TEST-*.html + - build/TEST-*.xml + - build/diagnostics/* + - coverage-*.out + command: .buildkite/scripts/steps/unit-tests.sh + env: + FIPS: "true" + key: unit-tests-2204-fips-tag + label: Unit tests - Ubuntu 22.04 with requirefips build tag + retry: + automatic: + limit: 1 + manual: + allowed: true + - agents: + diskSizeGb: 200 + image: ${IMAGE_UBUNTU_2204_ARM_64} + instanceType: m6g.xlarge + provider: aws + artifact_paths: + - build/TEST-*.html + - build/TEST-*.xml + - build/diagnostics/* + - coverage-*.out + command: .buildkite/scripts/steps/unit-tests.sh + key: unit-tests-2204-arm64 + label: Unit tests - Ubuntu 22.04 ARM64 + retry: + automatic: + limit: 1 + manual: + allowed: true + - agents: + disk_size: 200 + disk_type: pd-ssd + image: ${IMAGE_WIN_2022} + machineType: n2-standard-8 + provider: gcp + artifact_paths: + - build/TEST-*.html + - build/TEST-*.xml + - build/diagnostics/* + - coverage-*.out + command: .buildkite/scripts/steps/unit-tests.ps1 + key: unit-tests-win2022 + label: Unit tests - Windows 2022 + retry: + automatic: + limit: 1 + manual: + allowed: true + - group: macOS tests + key: macos-unit-tests + steps: + - agents: + imagePrefix: generic-base-15-arm-002 + provider: orka + artifact_paths: + - build/TEST-*.html + - build/TEST-*.xml + - build/diagnostics/* + - coverage-*.out + command: .buildkite/scripts/steps/unit-tests.sh + label: Unit tests - macOS 15 ARM + retry: + automatic: + limit: 1 + manual: + allowed: true + - agents: + imagePrefix: generic-13-ventura-x64 + provider: orka + artifact_paths: + - build/TEST-*.html + - build/TEST-*.xml + - build/diagnostics/* + - coverage-*.out + branches: main 8.* 9.* + command: .buildkite/scripts/steps/unit-tests.sh + label: Unit tests - macOS 13 + retry: + automatic: + limit: 1 + manual: + allowed: true + - agents: + image: docker.elastic.co/ci-agent-images/buildkite-junit-annotate:1.0 + command: "" + depends_on: + - allow_failure: true + step: unit-tests-2204 + - allow_failure: true + step: unit-tests-2204-fips-tag + - allow_failure: true + step: unit-tests-2204-arm64 + - allow_failure: true + step: unit-tests-win2022 + - allow_failure: true + step: macos-unit-tests + label: ":junit: Junit annotate" + plugins: + - junit-annotate#v2.7.0: + always-annotate: true + artifacts: "**TEST-*.xml" + run-in-docker: false + - group: K8s tests + key: k8s-tests + steps: + - agents: + image: ${IMAGE_UBUNTU_2204_X86_64} + provider: gcp + command: .buildkite/scripts/steps/k8s-tests.sh + env: + K8S_VERSION: v{{matrix.k8s_version}} + KIND_VERSION: v0.27.0 + label: "K8s tests: {{matrix.k8s_version}}" + matrix: + setup: + k8s_version: + - 1.33.0 + - 1.32.0 + - 1.31.0 + - 1.30.0 + - 1.29.4 + - 1.28.9 + retry: + manual: + allowed: true + - build: + branch: ${BUILDKITE_BRANCH} + commit: ${BUILDKITE_COMMIT} + if: build.pull_request.id == null + label: Triggering Extended tests for branches + trigger: elastic-agent-extended-testing + - wait: "" + - branches: main + build: + commit: ${BUILDKITE_COMMIT} + label: Publish to serverless + trigger: agentless-serverless-release diff --git a/go.mod b/go.mod index c5642146c80..8644d094cab 100644 --- a/go.mod +++ b/go.mod @@ -1,12 +1,13 @@ module github.com/elastic/elastic-agent -go 1.24.11 +go 1.25.4 require ( github.com/Jeffail/gabs/v2 v2.6.0 github.com/Microsoft/go-winio v0.6.2 github.com/antlr4-go/antlr/v4 v4.13.1 github.com/blakesmith/ar v0.0.0-20190502131153-809d4375e1fb + github.com/buildkite/buildkite-sdk/sdk/go v0.6.0 github.com/cavaliergopher/rpm v1.2.0 github.com/cenkalti/backoff/v4 v4.3.0 github.com/cenkalti/backoff/v5 v5.0.3 @@ -414,6 +415,7 @@ require ( github.com/icholy/digest v0.1.22 // indirect github.com/inconshreveable/mousetrap v1.1.0 // indirect github.com/ionos-cloud/sdk-go/v6 v6.3.4 // indirect + github.com/itchyny/json2yaml v0.1.4 // indirect github.com/jaegertracing/jaeger-idl v0.6.0 // indirect github.com/jaypipes/pcidb v1.0.0 // indirect github.com/jcmturner/aescts/v2 v2.0.0 // indirect diff --git a/go.sum b/go.sum index 8212d7c6dcb..8267037ef7d 100644 --- a/go.sum +++ b/go.sum @@ -342,6 +342,8 @@ github.com/bsm/ginkgo/v2 v2.12.0 h1:Ny8MWAHyOepLGlLKYmXG4IEkioBysk6GpaRTLC8zwWs= github.com/bsm/ginkgo/v2 v2.12.0/go.mod h1:SwYbGRRDovPVboqFv0tPTcG1sN61LM1Z4ARdbAV9g4c= github.com/bsm/gomega v1.27.10 h1:yeMWxP2pV2fG3FgAODIY8EiRE3dy0aeFYt4l7wh6yKA= github.com/bsm/gomega v1.27.10/go.mod h1:JyEr/xRbxbtgWNi8tIEVPUYZ5Dzef52k01W3YH0H+O0= +github.com/buildkite/buildkite-sdk/sdk/go v0.6.0 h1:dGOI8NBGX49myn6oVq5JtbPmHOZkUTgLxgqch+XCiZM= +github.com/buildkite/buildkite-sdk/sdk/go v0.6.0/go.mod h1:7LyiVlsVKmLmCmPq6F41kf7etbaSquCXgZPatfAKj+U= github.com/cavaliergopher/rpm v1.2.0 h1:s0h+QeVK252QFTolkhGiMeQ1f+tMeIMhGl8B1HUmGUc= github.com/cavaliergopher/rpm v1.2.0/go.mod h1:R0q3vTqa7RUvPofAZYrnjJ63hh2vngjFfphuXiExVos= github.com/cenkalti/backoff/v4 v4.3.0 h1:MyRJ/UdXutAwSAT+s3wNd7MfTIcy71VQueUuFK343L8= @@ -902,6 +904,8 @@ github.com/inconshreveable/mousetrap v1.1.0 h1:wN+x4NVGpMsO7ErUn/mUI3vEoE6Jt13X2 github.com/inconshreveable/mousetrap v1.1.0/go.mod h1:vpF70FUmC8bwa3OWnCshd2FqLfsEA9PFc4w1p2J65bw= github.com/ionos-cloud/sdk-go/v6 v6.3.4 h1:jTvGl4LOF8v8OYoEIBNVwbFoqSGAFqn6vGE7sp7/BqQ= github.com/ionos-cloud/sdk-go/v6 v6.3.4/go.mod h1:wCVwNJ/21W29FWFUv+fNawOTMlFoP1dS3L+ZuztFW48= +github.com/itchyny/json2yaml v0.1.4 h1:/pErVOXGG5iTyXHi/QKR4y3uzhLjGTEmmJIy97YT+k8= +github.com/itchyny/json2yaml v0.1.4/go.mod h1:6iudhBZdarpjLFRNj+clWLAkGft+9uCcjAZYXUH9eGI= github.com/jaegertracing/jaeger-idl v0.6.0 h1:LOVQfVby9ywdMPI9n3hMwKbyLVV3BL1XH2QqsP5KTMk= github.com/jaegertracing/jaeger-idl v0.6.0/go.mod h1:mpW0lZfG907/+o5w5OlnNnig7nHJGT3SfKmRqC42HGQ= github.com/jarcoal/httpmock v1.4.1 h1:0Ju+VCFuARfFlhVXFc2HxlcQkfB+Xq12/EotHko+x2A= diff --git a/internal/edot/go.mod b/internal/edot/go.mod index 6396311a866..1b99dcb5977 100644 --- a/internal/edot/go.mod +++ b/internal/edot/go.mod @@ -1,6 +1,6 @@ module github.com/elastic/elastic-agent/internal/edot -go 1.24.11 +go 1.25.4 replace github.com/elastic/elastic-agent => ../../ diff --git a/wrapper/windows/archive-proxy/go.mod b/wrapper/windows/archive-proxy/go.mod index caf615a65bf..cd456e973dd 100644 --- a/wrapper/windows/archive-proxy/go.mod +++ b/wrapper/windows/archive-proxy/go.mod @@ -1,6 +1,6 @@ module github.com/elastic/elastic-agent/wrapper/windows/archive-proxy -go 1.24.11 +go 1.25.4 require github.com/elastic/elastic-agent v0.0.0 From 8d3896028371ba047d3fb0c0d6edaa483c8299e0 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Miko=C5=82aj=20=C5=9Awi=C4=85tek?= Date: Sat, 13 Dec 2025 16:42:44 +0100 Subject: [PATCH 02/10] Generate agentless-app-release and gce-cleanup pipelines # Conflicts: # go.mod --- dev-tools/buildkite/pipeline/pipeline.go | 168 ++++++++++++++++++ .../pipelines/agentless_app_release.go | 116 ++++++++++++ dev-tools/buildkite/pipelines/gce_cleanup.go | 23 +++ .../buildkite/pipelines/pipelines_test.go | 101 +++++++++++ .../pipeline.agentless-app-release.yaml | 74 ++++++++ .../pipeline.elastic-agent-gce-cleanup.yml | 9 + go.mod | 1 + 7 files changed, 492 insertions(+) create mode 100644 dev-tools/buildkite/pipelines/agentless_app_release.go create mode 100644 dev-tools/buildkite/pipelines/gce_cleanup.go create mode 100644 dev-tools/buildkite/pipelines/pipelines_test.go create mode 100644 dev-tools/buildkite/pipelines/testdata/pipeline.agentless-app-release.yaml create mode 100644 dev-tools/buildkite/pipelines/testdata/pipeline.elastic-agent-gce-cleanup.yml diff --git a/dev-tools/buildkite/pipeline/pipeline.go b/dev-tools/buildkite/pipeline/pipeline.go index d5a891d9e6d..f0fc0e6f9ae 100644 --- a/dev-tools/buildkite/pipeline/pipeline.go +++ b/dev-tools/buildkite/pipeline/pipeline.go @@ -8,9 +8,11 @@ import ( "bytes" "fmt" "os" + "reflect" "strings" "github.com/buildkite/buildkite-sdk/sdk/go/sdk/buildkite" + "gopkg.in/yaml.v3" ) // Pipeline wraps the buildkite.Pipeline with additional helper methods. @@ -184,3 +186,169 @@ func computeDiff(generated, expected string) string { return diff.String() } + +// SemanticCompareResult contains the result of semantic comparison. +type SemanticCompareResult struct { + Equal bool + Differences []string + ParseError error +} + +// SemanticCompareWithFile compares a generated pipeline with an existing YAML file semantically. +// This handles differences in comments, field ordering, and YAML anchor expansion. +func SemanticCompareWithFile(p *Pipeline, path string) (*SemanticCompareResult, error) { + generated, err := p.MarshalYAML() + if err != nil { + return nil, fmt.Errorf("marshaling generated pipeline: %w", err) + } + + expected, err := os.ReadFile(path) + if err != nil { + return nil, fmt.Errorf("reading expected file %s: %w", path, err) + } + + return SemanticCompare(generated, expected) +} + +// SemanticCompare compares two YAML representations semantically by parsing them +// into maps and comparing the data structures. This ignores comments, field ordering, +// and handles YAML anchor expansion. +func SemanticCompare(generated, expected []byte) (*SemanticCompareResult, error) { + result := &SemanticCompareResult{} + + var genData, expData map[string]any + if err := yaml.Unmarshal(generated, &genData); err != nil { + result.ParseError = fmt.Errorf("parsing generated YAML: %w", err) + return result, nil + } + if err := yaml.Unmarshal(expected, &expData); err != nil { + result.ParseError = fmt.Errorf("parsing expected YAML: %w", err) + return result, nil + } + + // Remove the 'common' key used for YAML anchors (not part of actual pipeline) + delete(expData, "common") + + result.Differences = compareValues("", genData, expData) + result.Equal = len(result.Differences) == 0 + + return result, nil +} + +// compareValues recursively compares two values and returns differences. +func compareValues(path string, generated, expected any) []string { + var diffs []string + + if generated == nil && expected == nil { + return nil + } + + // Handle wait step equivalence: wait: ~ (null) == wait: "" (empty string) + if path == "wait" || strings.HasSuffix(path, ".wait") { + genStr, genIsStr := generated.(string) + if expected == nil && genIsStr && genStr == "" { + return nil + } + expStr, expIsStr := expected.(string) + if generated == nil && expIsStr && expStr == "" { + return nil + } + } + + if generated == nil { + return []string{fmt.Sprintf("%s: missing in generated (expected: %v)", path, expected)} + } + if expected == nil { + return []string{fmt.Sprintf("%s: extra in generated (value: %v)", path, generated)} + } + + // Normalize types for comparison + generated = normalizeValue(generated) + expected = normalizeValue(expected) + + switch exp := expected.(type) { + case map[string]any: + gen, ok := generated.(map[string]any) + if !ok { + return []string{fmt.Sprintf("%s: type mismatch (generated: %T, expected: map)", path, generated)} + } + // Check all expected keys + for k, v := range exp { + newPath := k + if path != "" { + newPath = path + "." + k + } + diffs = append(diffs, compareValues(newPath, gen[k], v)...) + } + // Check for extra keys in generated + for k, v := range gen { + if _, exists := exp[k]; !exists { + newPath := k + if path != "" { + newPath = path + "." + k + } + diffs = append(diffs, fmt.Sprintf("%s: extra in generated (value: %v)", newPath, v)) + } + } + + case []any: + gen, ok := generated.([]any) + if !ok { + return []string{fmt.Sprintf("%s: type mismatch (generated: %T, expected: array)", path, generated)} + } + if len(gen) != len(exp) { + return []string{fmt.Sprintf("%s: array length mismatch (generated: %d, expected: %d)", path, len(gen), len(exp))} + } + for i := range exp { + newPath := fmt.Sprintf("%s[%d]", path, i) + diffs = append(diffs, compareValues(newPath, gen[i], exp[i])...) + } + + default: + if !reflect.DeepEqual(generated, expected) { + return []string{fmt.Sprintf("%s: value mismatch (generated: %v, expected: %v)", path, generated, expected)} + } + } + + return diffs +} + +// normalizeValue normalizes values for comparison (e.g., int to int64, trim trailing newlines). +func normalizeValue(v any) any { + switch val := v.(type) { + case string: + // Normalize trailing newlines (YAML block scalars add trailing newline) + return strings.TrimRight(val, "\n") + case int: + return int64(val) + case int32: + return int64(val) + case float64: + // YAML often parses integers as float64 + if val == float64(int64(val)) { + return int64(val) + } + return val + case map[any]any: + // Convert map[any]any to map[string]any + result := make(map[string]any) + for k, v := range val { + result[fmt.Sprintf("%v", k)] = normalizeValue(v) + } + return result + case map[string]any: + result := make(map[string]any) + for k, v := range val { + result[k] = normalizeValue(v) + } + return result + case []any: + result := make([]any, len(val)) + for i, v := range val { + result[i] = normalizeValue(v) + } + return result + default: + return v + } +} diff --git a/dev-tools/buildkite/pipelines/agentless_app_release.go b/dev-tools/buildkite/pipelines/agentless_app_release.go new file mode 100644 index 00000000000..d02bae1d4bc --- /dev/null +++ b/dev-tools/buildkite/pipelines/agentless_app_release.go @@ -0,0 +1,116 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License 2.0; +// you may not use this file except in compliance with the Elastic License 2.0. + +package pipelines + +import ( + "github.com/elastic/elastic-agent/dev-tools/buildkite/pipeline" +) + +// AgentlessAppRelease generates the pipeline.agentless-app-release.yaml pipeline. +// This pipeline builds and publishes the agentless service container to the internal registry. +func AgentlessAppRelease() *pipeline.Pipeline { + // Packaging step for linux/amd64 + packagingAmd64 := pipeline.CommandWithKey( + "Packaging: Service Container linux/amd64", + "packaging-service-container-amd64", + ".buildkite/scripts/steps/integration-package.sh", + ) + pipeline.SetEnv(packagingAmd64, map[string]string{ + "PACKAGES": "docker", + "PLATFORMS": "linux/amd64", + "DOCKER_VARIANTS": "service", + }) + pipeline.SetArtifactPaths(packagingAmd64, "build/distributions/elastic-agent-service-git-*.docker.tar.gz") + pipeline.SetAgent(packagingAmd64, pipeline.Agent{ + "provider": "gcp", + "machineType": "c2-standard-16", + "diskSizeGb": 400, + }) + + // Packaging step for linux/arm64 + packagingArm64 := pipeline.CommandWithKey( + "Packaging: Service Container linux/arm64", + "packaging-service-container-arm64", + ".buildkite/scripts/steps/integration-package.sh", + ) + pipeline.SetEnv(packagingArm64, map[string]string{ + "PACKAGES": "docker", + "PLATFORMS": "linux/arm64", + "DOCKER_VARIANTS": "service", + }) + pipeline.SetArtifactPaths(packagingArm64, "build/distributions/elastic-agent-service-git-*.docker.tar.gz") + pipeline.SetAgent(packagingArm64, pipeline.Agent{ + "provider": "aws", + "instanceType": "t4g.2xlarge", + "imagePrefix": "core-ubuntu-2204-aarch64", + "diskSizeGb": 400, + }) + + // Publish to internal registry + publishStep := pipeline.CommandWithKey( + "Publish to internal registry", + "mirror-elastic-agent", + ".buildkite/scripts/steps/ecp-internal-release.sh", + ) + pipeline.SetAgent(publishStep, pipeline.Agent{ + "provider": "gcp", + "machineType": "c2-standard-16", + }) + pipeline.AddPlugin(publishStep, "elastic/vault-docker-login#v0.5.2", map[string]any{ + "secret_path": "kv/ci-shared/platform-ingest/elastic_docker_registry", + }) + + // Validate docker image + validateStep := pipeline.Command( + ":docker: Validate docker image is built for all architectures", + ".buildkite/scripts/steps/validate-agentless-docker-image.sh", + ) + pipeline.AddEnv(validateStep, "SERVICE_VERSION", "${VERSION}") + pipeline.SetAgent(validateStep, pipeline.Agent{ + "image": "docker.elastic.co/ci-agent-images/observability/oci-image-tools-agent:latest@sha256:a4ababd1347111759babc05c9ad5a680f4af48892784951358488b7e7fc94af9", + }) + pipeline.AddPlugin(validateStep, "elastic/vault-docker-login#v0.6.3", map[string]any{ + "secret_path": "kv/ci-shared/platform-ingest/elastic_docker_registry", + }) + + // Promote agentless app release + // Note: $$ is Buildkite's escape sequence for $ to prevent variable interpolation + promoteCommand := `export COMMIT_HASH=$$(buildkite-agent meta-data get git-short-commit) +if [ $$(buildkite-agent step get "outcome" --step "mirror-elastic-agent") == "passed" ]; then + cat <<- YAML | buildkite-agent pipeline upload + steps: + - label: ":serverless::argo: Run synthetics tests and update agentless to $${COMMIT_HASH} in serverless-gitops" + async: true + branches: main + trigger: gpctl-promote-after-serverless-devenv-synthetics + build: + env: + SERVICE_COMMIT_HASH: $${COMMIT_HASH} + SERVICE: agentless + SYNTHETICS_PROJECT: "agentless" + SYNTHETICS_TAG: "agentless-ci" +YAML +fi` + promoteStep := pipeline.Command( + ":grey_question: Promote agentless app release if validation passes", + promoteCommand, + ) + pipeline.SetIf(promoteStep, `build.env("DRY_RUN") == null || build.env("DRY_RUN") == "false"`) + pipeline.SetDependsOn(promoteStep, "mirror-elastic-agent") + pipeline.SetAgent(promoteStep, pipeline.Agent{ + "image": "docker.elastic.co/ci-agent-images/serverless-helm-builder:0.0.2@sha256:d00e8a7a0ab3618cfaacb0a7b1e1b06ee29728eb2b44de602374bd8f6b9b92ac", + }) + + return pipeline.New(). + Env("VERSION", "${BUILDKITE_COMMIT:0:12}"). + Add(packagingAmd64). + Add(packagingArm64). + Wait(). + Add(publishStep). + Wait(). + Add(validateStep). + Wait(). + Add(promoteStep) +} diff --git a/dev-tools/buildkite/pipelines/gce_cleanup.go b/dev-tools/buildkite/pipelines/gce_cleanup.go new file mode 100644 index 00000000000..f83ba702545 --- /dev/null +++ b/dev-tools/buildkite/pipelines/gce_cleanup.go @@ -0,0 +1,23 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License 2.0; +// you may not use this file except in compliance with the Elastic License 2.0. + +package pipelines + +import ( + "github.com/elastic/elastic-agent/dev-tools/buildkite/pipeline" +) + +// GCECleanup generates the pipeline.elastic-agent-gce-cleanup.yml pipeline. +// This pipeline removes stale GCE instances having matching labels, name prefixes +// and older than 24 hours. +func GCECleanup() *pipeline.Pipeline { + step := pipeline.CommandWithKey("GCE Cleanup", "gce-cleanup", ".buildkite/scripts/steps/gce-cleanup.sh") + pipeline.SetAgent(step, pipeline.Agent{ + "provider": "gcp", + }) + + return pipeline.New(). + Env("VAULT_PATH", pipeline.VaultPathGCP). + Add(step) +} diff --git a/dev-tools/buildkite/pipelines/pipelines_test.go b/dev-tools/buildkite/pipelines/pipelines_test.go new file mode 100644 index 00000000000..d6afa7e5655 --- /dev/null +++ b/dev-tools/buildkite/pipelines/pipelines_test.go @@ -0,0 +1,101 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License 2.0; +// you may not use this file except in compliance with the Elastic License 2.0. + +package pipelines + +import ( + "os" + "path/filepath" + "strings" + "testing" + + "gotest.tools/v3/golden" + + "github.com/elastic/elastic-agent/dev-tools/buildkite/pipeline" +) + +// findRepoRoot finds the repository root by looking for go.mod. +func findRepoRoot(t *testing.T) string { + t.Helper() + + dir, err := os.Getwd() + if err != nil { + t.Fatalf("failed to get working directory: %v", err) + } + + for { + if _, err := os.Stat(filepath.Join(dir, "go.mod")); err == nil { + return dir + } + parent := filepath.Dir(dir) + if parent == dir { + t.Fatal("could not find repository root (go.mod)") + } + dir = parent + } +} + +func TestGCECleanup(t *testing.T) { + p := GCECleanup() + + yaml, err := p.MarshalYAML() + if err != nil { + t.Fatalf("failed to marshal pipeline: %v", err) + } + + // Golden file test - update with: go test -update + golden.AssertBytes(t, yaml, "pipeline.elastic-agent-gce-cleanup.yml") +} + +func TestGCECleanupMatchesActual(t *testing.T) { + repoRoot := findRepoRoot(t) + actualPath := filepath.Join(repoRoot, ".buildkite", "pipeline.elastic-agent-gce-cleanup.yml") + + p := GCECleanup() + result, err := pipeline.SemanticCompareWithFile(p, actualPath) + if err != nil { + t.Fatalf("failed to compare: %v", err) + } + + if result.ParseError != nil { + t.Fatalf("failed to parse YAML: %v", result.ParseError) + } + + if !result.Equal { + t.Errorf("Generated pipeline does not match %s:\n%s", + actualPath, strings.Join(result.Differences, "\n")) + } +} + +func TestAgentlessAppRelease(t *testing.T) { + p := AgentlessAppRelease() + + yaml, err := p.MarshalYAML() + if err != nil { + t.Fatalf("failed to marshal pipeline: %v", err) + } + + // Golden file test - update with: go test -update + golden.AssertBytes(t, yaml, "pipeline.agentless-app-release.yaml") +} + +func TestAgentlessAppReleaseMatchesActual(t *testing.T) { + repoRoot := findRepoRoot(t) + actualPath := filepath.Join(repoRoot, ".buildkite", "pipeline.agentless-app-release.yaml") + + p := AgentlessAppRelease() + result, err := pipeline.SemanticCompareWithFile(p, actualPath) + if err != nil { + t.Fatalf("failed to compare: %v", err) + } + + if result.ParseError != nil { + t.Fatalf("failed to parse YAML: %v", result.ParseError) + } + + if !result.Equal { + t.Errorf("Generated pipeline does not match %s:\n%s", + actualPath, strings.Join(result.Differences, "\n")) + } +} diff --git a/dev-tools/buildkite/pipelines/testdata/pipeline.agentless-app-release.yaml b/dev-tools/buildkite/pipelines/testdata/pipeline.agentless-app-release.yaml new file mode 100644 index 00000000000..152b2ebcda5 --- /dev/null +++ b/dev-tools/buildkite/pipelines/testdata/pipeline.agentless-app-release.yaml @@ -0,0 +1,74 @@ +# yaml-language-server: $schema=https://raw.githubusercontent.com/buildkite/pipeline-schema/main/schema.json +env: + VERSION: ${BUILDKITE_COMMIT:0:12} +steps: + - agents: + diskSizeGb: 400 + machineType: c2-standard-16 + provider: gcp + artifact_paths: + - build/distributions/elastic-agent-service-git-*.docker.tar.gz + command: .buildkite/scripts/steps/integration-package.sh + env: + DOCKER_VARIANTS: service + PACKAGES: docker + PLATFORMS: linux/amd64 + key: packaging-service-container-amd64 + label: "Packaging: Service Container linux/amd64" + - agents: + diskSizeGb: 400 + imagePrefix: core-ubuntu-2204-aarch64 + instanceType: t4g.2xlarge + provider: aws + artifact_paths: + - build/distributions/elastic-agent-service-git-*.docker.tar.gz + command: .buildkite/scripts/steps/integration-package.sh + env: + DOCKER_VARIANTS: service + PACKAGES: docker + PLATFORMS: linux/arm64 + key: packaging-service-container-arm64 + label: "Packaging: Service Container linux/arm64" + - wait: "" + - agents: + machineType: c2-standard-16 + provider: gcp + command: .buildkite/scripts/steps/ecp-internal-release.sh + key: mirror-elastic-agent + label: Publish to internal registry + plugins: + - elastic/vault-docker-login#v0.5.2: + secret_path: kv/ci-shared/platform-ingest/elastic_docker_registry + - wait: "" + - agents: + image: docker.elastic.co/ci-agent-images/observability/oci-image-tools-agent:latest@sha256:a4ababd1347111759babc05c9ad5a680f4af48892784951358488b7e7fc94af9 + command: .buildkite/scripts/steps/validate-agentless-docker-image.sh + env: + SERVICE_VERSION: ${VERSION} + label: ":docker: Validate docker image is built for all architectures" + plugins: + - elastic/vault-docker-login#v0.6.3: + secret_path: kv/ci-shared/platform-ingest/elastic_docker_registry + - wait: "" + - agents: + image: docker.elastic.co/ci-agent-images/serverless-helm-builder:0.0.2@sha256:d00e8a7a0ab3618cfaacb0a7b1e1b06ee29728eb2b44de602374bd8f6b9b92ac + command: |- + export COMMIT_HASH=$$(buildkite-agent meta-data get git-short-commit) + if [ $$(buildkite-agent step get "outcome" --step "mirror-elastic-agent") == "passed" ]; then + cat <<- YAML | buildkite-agent pipeline upload + steps: + - label: ":serverless::argo: Run synthetics tests and update agentless to $${COMMIT_HASH} in serverless-gitops" + async: true + branches: main + trigger: gpctl-promote-after-serverless-devenv-synthetics + build: + env: + SERVICE_COMMIT_HASH: $${COMMIT_HASH} + SERVICE: agentless + SYNTHETICS_PROJECT: "agentless" + SYNTHETICS_TAG: "agentless-ci" + YAML + fi + depends_on: mirror-elastic-agent + if: build.env("DRY_RUN") == null || build.env("DRY_RUN") == "false" + label: ":grey_question: Promote agentless app release if validation passes" diff --git a/dev-tools/buildkite/pipelines/testdata/pipeline.elastic-agent-gce-cleanup.yml b/dev-tools/buildkite/pipelines/testdata/pipeline.elastic-agent-gce-cleanup.yml new file mode 100644 index 00000000000..488241fd934 --- /dev/null +++ b/dev-tools/buildkite/pipelines/testdata/pipeline.elastic-agent-gce-cleanup.yml @@ -0,0 +1,9 @@ +# yaml-language-server: $schema=https://raw.githubusercontent.com/buildkite/pipeline-schema/main/schema.json +env: + VAULT_PATH: kv/ci-shared/observability-ingest/cloud/gcp +steps: + - agents: + provider: gcp + command: .buildkite/scripts/steps/gce-cleanup.sh + key: gce-cleanup + label: GCE Cleanup diff --git a/go.mod b/go.mod index 8644d094cab..8e3ac710036 100644 --- a/go.mod +++ b/go.mod @@ -81,6 +81,7 @@ require ( gopkg.in/yaml.v2 v2.4.0 gopkg.in/yaml.v3 v3.0.1 gotest.tools/gotestsum v1.13.0 + gotest.tools/v3 v3.5.2 helm.sh/helm/v3 v3.19.4 howett.net/plist v1.0.1 k8s.io/api v0.34.2 From 3300b6fd607d09df36220c9b3fed85d4390669d9 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Miko=C5=82aj=20=C5=9Awi=C4=85tek?= Date: Sat, 13 Dec 2025 17:01:59 +0100 Subject: [PATCH 03/10] Generate more buildkite pipelines using Go code --- dev-tools/buildkite/pipeline/pipeline.go | 10 +- dev-tools/buildkite/pipeline/step.go | 63 +++ .../pipelines/elastic_agent_package.go | 242 +++++++++++ .../pipelines/integration_pipeline.go | 312 ++++++++++++++ dev-tools/buildkite/pipelines/pipeline.go | 382 ++++++++++++++++++ .../buildkite/pipelines/pipelines_test.go | 124 +++--- .../testdata/integration.pipeline.yml | 207 ++++++++++ .../pipeline.elastic-agent-package.yml | 200 +++++++++ .../buildkite/pipelines/testdata/pipeline.yml | 351 ++++++++++++++++ 9 files changed, 1832 insertions(+), 59 deletions(-) create mode 100644 dev-tools/buildkite/pipelines/elastic_agent_package.go create mode 100644 dev-tools/buildkite/pipelines/integration_pipeline.go create mode 100644 dev-tools/buildkite/pipelines/pipeline.go create mode 100644 dev-tools/buildkite/pipelines/testdata/integration.pipeline.yml create mode 100644 dev-tools/buildkite/pipelines/testdata/pipeline.elastic-agent-package.yml create mode 100644 dev-tools/buildkite/pipelines/testdata/pipeline.yml diff --git a/dev-tools/buildkite/pipeline/pipeline.go b/dev-tools/buildkite/pipeline/pipeline.go index f0fc0e6f9ae..61ea8cda39a 100644 --- a/dev-tools/buildkite/pipeline/pipeline.go +++ b/dev-tools/buildkite/pipeline/pipeline.go @@ -318,7 +318,15 @@ func normalizeValue(v any) any { switch val := v.(type) { case string: // Normalize trailing newlines (YAML block scalars add trailing newline) - return strings.TrimRight(val, "\n") + trimmed := strings.TrimRight(val, "\n") + // Normalize boolean strings to actual booleans + if trimmed == "true" { + return true + } + if trimmed == "false" { + return false + } + return trimmed case int: return int64(val) case int32: diff --git a/dev-tools/buildkite/pipeline/step.go b/dev-tools/buildkite/pipeline/step.go index 4b89c5fa528..3d5147aa06f 100644 --- a/dev-tools/buildkite/pipeline/step.go +++ b/dev-tools/buildkite/pipeline/step.go @@ -479,3 +479,66 @@ func SetInputIf(input *buildkite.InputStep, condition string) *buildkite.InputSt input.If = Ptr(condition) return input } + +// SetIfChanged sets if_changed patterns on a command step. +func SetIfChanged(step *buildkite.CommandStep, include ...string) *buildkite.CommandStep { + step.IfChanged = &buildkite.IfChanged{ + IfChanged: &buildkite.IfChangedObject{ + Include: &buildkite.IfChangedObjectInclude{ + StringArray: include, + }, + }, + } + return step +} + +// SetTriggerIfChanged sets if_changed patterns on a trigger step. +func SetTriggerIfChanged(trigger *buildkite.TriggerStep, include ...string) *buildkite.TriggerStep { + trigger.IfChanged = &buildkite.IfChanged{ + IfChanged: &buildkite.IfChangedObject{ + Include: &buildkite.IfChangedObjectInclude{ + StringArray: include, + }, + }, + } + return trigger +} + +// SetTriggerBuildWithMessage sets build configuration with message on a trigger step. +func SetTriggerBuildWithMessage(trigger *buildkite.TriggerStep, commit, branch, message string, env map[string]string) *buildkite.TriggerStep { + build := &buildkite.TriggerStepBuild{} + if commit != "" { + build.Commit = Ptr(commit) + } + if branch != "" { + build.Branch = Ptr(branch) + } + if message != "" { + build.Message = Ptr(message) + } + if len(env) > 0 { + envMap := make(buildkite.Env, len(env)) + for k, v := range env { + envMap[k] = v + } + build.Env = &envMap + } + trigger.Build = build + return trigger +} + +// SetCommands sets multiple commands on a command step using the 'commands' field. +func SetCommands(step *buildkite.CommandStep, commands ...string) *buildkite.CommandStep { + step.Commands = &buildkite.CommandStepCommand{ + StringArray: commands, + } + return step +} + +// WaitIf creates a conditional wait step. +func WaitIf(condition string) *buildkite.WaitStep { + return &buildkite.WaitStep{ + Wait: Ptr(""), + If: Ptr(condition), + } +} diff --git a/dev-tools/buildkite/pipelines/elastic_agent_package.go b/dev-tools/buildkite/pipelines/elastic_agent_package.go new file mode 100644 index 00000000000..13073e21cee --- /dev/null +++ b/dev-tools/buildkite/pipelines/elastic_agent_package.go @@ -0,0 +1,242 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License 2.0; +// you may not use this file except in compliance with the Elastic License 2.0. + +package pipelines + +import ( + "github.com/elastic/elastic-agent/dev-tools/buildkite/pipeline" +) + +// ElasticAgentPackage generates the .buildkite/pipeline.elastic-agent-package.yml pipeline. +// This pipeline handles DRA (Daily Release Automation) packaging. +func ElasticAgentPackage() *pipeline.Pipeline { + p := pipeline.New(). + Env("BEAT_NAME", "elastic-agent"). + Env("BEAT_URL", "https://www.elastic.co/elastic-agent"). + Env("IMAGE_UBUNTU_2404_X86_64", pipeline.ImageUbuntu2404X86). + Env("IMAGE_UBUNTU_2404_ARM_64", pipeline.ImageUbuntu2404ARM) + + // Input step for build parameters + input := pipeline.Input("Build parameters") + pipeline.SetInputIf(input, `build.env("MANIFEST_URL") == null`) + pipeline.AddInputField(input, "MANIFEST_URL", "MANIFEST_URL", "", "Link to the build manifest URL.", true) + pipeline.AddSelectField(input, "Mage verbose", "MAGEFILE_VERBOSE", "Increase verbosity of the mage commands, defaults to 0", false, + pipeline.SelectOption{Label: "True", Value: "1"}, + pipeline.SelectOption{Label: "False", Value: "0"}, + ) + pipeline.AddSelectField(input, "DRA Workflow", "DRA_WORKFLOW", "What workflow of the DRA release process this build is going to be triggered for", true, + pipeline.SelectOption{Label: "snapshot", Value: "snapshot"}, + pipeline.SelectOption{Label: "staging", Value: "staging"}, + ) + pipeline.AddInputField(input, "DRA Version", "DRA_VERSION", "", "The packaging version to use", true) + pipeline.AddSelectField(input, "DRA DRY-RUN", "DRA_DRY_RUN", "If the DRA release manager script would actually publish anything or just print", false, + pipeline.SelectOption{Label: "True", Value: "--dry-run"}, + pipeline.SelectOption{Label: "False", Value: ""}, + ) + p.Add(input) + + // Conditional wait after input + p.Add(pipeline.WaitIf(`build.env("MANIFEST_URL") == null`)) + + // Packaging group + p.Add(packageGroup()) + + // DRA publish step + p.Add(draPublishStep()) + + // BK API publish for independent agent release + p.Add(bkAPIPublishStep()) + + return p +} + +// packageGroup creates the packaging artefacts group. +func packageGroup() *pipeline.GroupStep { + group := pipeline.GroupWithKey(":Packaging Artefacts", "package") + + // Cross-platform packaging (x86_64) + crossBuild := draPackageStep( + ":package: FIPS={{matrix.fips}} Cross Building and package elastic-agent", + "package_elastic-agent", + "gcp", + "c2-standard-16", + "${IMAGE_UBUNTU_2404_X86_64}", + 400, + "linux/amd64 windows/amd64 darwin/amd64", + "", + true, // with docker login + ) + pipeline.AddGroupStep(group, crossBuild) + + // ARM packaging + armBuild := draPackageStep( + ":package: FIPS={{matrix.fips}} Package ARM elastic-agent", + "package_elastic-agent-arm", + "aws", + "t4g.2xlarge", + "${IMAGE_UBUNTU_2404_ARM_64}", + 400, + "linux/arm64 darwin/arm64 windows/arm64", + "docker,tar.gz,deb,rpm,zip", + false, // no docker login + ) + pipeline.AddGroupStep(group, armBuild) + + return group +} + +// draPackageStep creates a DRA packaging step with matrix for FIPS. +func draPackageStep(label, key, provider, machineType, image string, diskSize int, platforms, packages string, dockerLogin bool) *pipeline.CommandStep { + command := `if [[ -z "$${MANIFEST_URL}" ]]; then + export MANIFEST_URL=$(buildkite-agent meta-data get MANIFEST_URL --default "") + if [[ -z "$${MANIFEST_URL}" ]]; then + echo ":broken_heart: Missing MANIFEST_URL variable or empty string provided" + exit 1 + fi +fi +if [[ -z "$${MAGEFILE_VERBOSE}" ]]; then + export MAGEFILE_VERBOSE=$(buildkite-agent meta-data get MAGEFILE_VERBOSE --default "0") +fi +.buildkite/scripts/steps/package.sh` + + // ARM build has additional multiarch setup + if provider == "aws" { + command = `echo "Add support for multiarch" +docker run --privileged --rm tonistiigi/binfmt:master --install all + +` + command + ` +ls -lahR build/` + } + + step := pipeline.CommandWithKey(label, key, command) + + env := map[string]string{ + "PLATFORMS": platforms, + "FIPS": "{{matrix.fips}}", + } + if packages != "" { + env["PACKAGES"] = packages + } + pipeline.SetEnv(step, env) + + agent := pipeline.Agent{ + "provider": provider, + "image": image, + "diskSizeGb": diskSize, + } + if provider == "gcp" { + agent["machineType"] = machineType + } else { + agent["instanceType"] = machineType + } + pipeline.SetAgent(step, agent) + + pipeline.SetArtifactPaths(step, "build/distributions/**/*") + + // Matrix for FIPS true/false + pipeline.SetMatrix(step, map[string][]string{ + "fips": {"false", "true"}, + }) + + if dockerLogin { + pipeline.WithVaultDockerLogin(step) + } + + return step +} + +// draPublishStep creates the DRA publish step. +func draPublishStep() *pipeline.CommandStep { + command := `echo "+++ Restoring Artifacts" +buildkite-agent artifact download "build/**/*" . + +echo "+++ Changing permissions for the release manager" +sudo chmod -R a+r build/distributions/ +sudo chown -R :1000 build/distributions/ +ls -lahR build/ + +echo "+++ Running DRA publish step" +if [[ -z "$${MAGEFILE_VERBOSE}" ]]; then + export MAGEFILE_VERBOSE=$(buildkite-agent meta-data get MAGEFILE_VERBOSE --default "0") +fi +if [[ -z "$${DRA_DRY_RUN}" ]]; then + DRA_DRY_RUN=$(buildkite-agent meta-data get DRA_DRY_RUN --default "") + export DRA_DRY_RUN +fi +if [[ -z "$${DRA_VERSION}" ]]; then + DRA_VERSION=$(buildkite-agent meta-data get DRA_VERSION --default "") + export DRA_VERSION +fi +if [[ -z "$${DRA_WORKFLOW}" ]]; then + DRA_WORKFLOW=$(buildkite-agent meta-data get DRA_WORKFLOW --default "") + export DRA_WORKFLOW +fi +.buildkite/scripts/steps/dra-publish.sh` + + step := pipeline.CommandWithKey(":elastic-stack: Publishing to DRA", "dra-publish", command) + pipeline.SetIf(step, `build.env("BUILDKITE_TRIGGERED_FROM_BUILD_PIPELINE_SLUG") == null || build.env("BUILDKITE_TRIGGERED_FROM_BUILD_PIPELINE_SLUG") != "independent-agent-release-staging"`) + pipeline.SetDependsOn(step, "package") + pipeline.SetAgent(step, pipeline.Agent{ + "provider": "gcp", + "image": "${IMAGE_UBUNTU_2404_X86_64}", + }) + pipeline.SetEnv(step, map[string]string{ + "DRA_PROJECT_ID": "elastic-agent-package", + "DRA_PROJECT_ARTIFACT_ID": "agent-package", + }) + + return step +} + +// bkAPIPublishStep creates the BK API publish step for independent agent release. +func bkAPIPublishStep() *pipeline.CommandStep { + command := `echo "+++ Restoring Artifacts" +buildkite-agent artifact download "build/**/*" . +echo "+++ Changing permissions for the BK API commands" +sudo chown -R :1000 build/distributions/ +echo "--- File listing" +ls -alR build +echo "--- Copy workaround for ironbank container filename" +.buildkite/scripts/steps/ironbank-cp-workaround.sh +echo "--- File listing after workaround" +ls -alR build +echo "+++ Checking artifact validity with release-manager collect dry run" +DRA_DRY_RUN="--dry-run" +export DRA_DRY_RUN +.buildkite/scripts/steps/dra-publish.sh +# Artifacts will be uploaded via the artifact_paths entry above +echo "+++ Set job metadata if TRIGGER_JOB_ID is properly set" +if [[ -z "$${TRIGGER_JOB_ID}" ]]; then + echo "TRIGGER_JOB_ID is not set, so not setting metadata" +else + # If a pipeline that triggered this build passes in a "TRIGGER_JOB_ID" env var, that + # is an indicator that it will want us to set some metadata in that calling build + # so that it can reference this specific build ID in order to easily download + # artifacts saved off in this build. + # + # This is a much easier way to pull back artifacts from a triggered build than using + # a Buildkite token that we then have to manage via vault, etc. + # See: https://forum.buildkite.community/t/how-to-download-artifacts-back-from-triggered-pipeline/3480/2 + echo "Setting metadata for job that trigger this one" + buildkite-agent meta-data set "triggered_build_id" "$BUILDKITE_BUILD_ID" --job $TRIGGER_JOB_ID + buildkite-agent meta-data set "triggered_commit_hash" "$BUILDKITE_COMMIT" --job $TRIGGER_JOB_ID +fi` + + step := pipeline.CommandWithKey("Publishing via BK API for Independent Agent Release", "bk-api-publish-independent-agent", command) + pipeline.SetIf(step, `build.env("BUILDKITE_TRIGGERED_FROM_BUILD_PIPELINE_SLUG") == "independent-agent-release-staging"`) + pipeline.SetDependsOn(step, "package") + pipeline.SetAgent(step, pipeline.Agent{ + "provider": "gcp", + "machineType": pipeline.MachineTypeN2Standard8, + "diskSizeGb": 400, + "image": "${IMAGE_UBUNTU_2404_X86_64}", + }) + pipeline.SetEnv(step, map[string]string{ + "DRA_PROJECT_ID": "elastic-agent-package", + "DRA_PROJECT_ARTIFACT_ID": "agent-package", + }) + pipeline.SetArtifactPaths(step, "build/distributions/**/*") + + return step +} diff --git a/dev-tools/buildkite/pipelines/integration_pipeline.go b/dev-tools/buildkite/pipelines/integration_pipeline.go new file mode 100644 index 00000000000..bac2aca931a --- /dev/null +++ b/dev-tools/buildkite/pipelines/integration_pipeline.go @@ -0,0 +1,312 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License 2.0; +// you may not use this file except in compliance with the Elastic License 2.0. + +package pipelines + +import ( + "github.com/elastic/elastic-agent/dev-tools/buildkite/pipeline" +) + +// IntegrationPipeline generates the .buildkite/integration.pipeline.yml pipeline. +// This pipeline handles packaging and triggers integration tests. +func IntegrationPipeline() *pipeline.Pipeline { + p := pipeline.New(). + Env("VAULT_PATH", pipeline.VaultPathGCP). + Env("ASDF_MAGE_VERSION", "1.14.0"). + Env("BUILDKIT_PROGRESS", "plain"). + Env("IMAGE_UBUNTU_2204_X86_64", pipeline.ImageUbuntu2204X86). + Env("IMAGE_UBUNTU_2204_ARM_64", pipeline.ImageUbuntu2204ARM) + + // Integration tests: packaging group + p.Add(packagingGroup()) + + // Trigger integration tests + triggerIntegration := pipeline.Command("Triggering Integration tests", "buildkite-agent pipeline upload .buildkite/bk.integration.pipeline.yml") + p.Add(triggerIntegration) + + // Trigger FIPS integration tests + triggerFips := pipeline.Command("Triggering custom FIPS integration tests", "buildkite-agent pipeline upload .buildkite/bk.integration-fips.pipeline.yml") + p.Add(triggerFips) + + return p +} + +// packagingGroup creates the "Integration tests: packaging" group. +func packagingGroup() *pipeline.GroupStep { + group := pipeline.GroupWithKey("Integration tests: packaging", "int-packaging") + pipeline.SetGroupNotify(group, "buildkite/elastic-agent - Packaging") + + // Packaging amd64 non-containers + amd64 := packagingStep( + ":package: amd64: zip,tar.gz,rpm,deb ", + "packaging-amd64", + "windows/amd64,linux/amd64", + "zip,tar.gz,rpm,deb", + false, // not FIPS + false, // not OTEL + "gcp", + pipeline.MachineTypeN2Standard8, + "${IMAGE_UBUNTU_2204_X86_64}", + 0, // no extra disk + false, + ) + pipeline.AddGroupStep(group, amd64) + + // Packaging amd64 OTEL_COMPONENT + amd64Otel := packagingStep( + ":package: amd64: OTEL_COMPONENT zip,tar.gz,rpm,deb ", + "packaging-amd64-otel-component", + "windows/amd64,linux/amd64", + "zip,tar.gz,rpm,deb", + false, + true, // OTEL + "gcp", + pipeline.MachineTypeN2Standard8, + "${IMAGE_UBUNTU_2204_X86_64}", + 0, + false, + ) + pipeline.AddGroupStep(group, amd64Otel) + + // Packaging amd64 FIPS tar.gz + amd64Fips := packagingStep( + ":package: amd64: FIPS tar.gz", + "packaging-amd64-fips", + "linux/amd64", + "tar.gz", + true, // FIPS + false, + "gcp", + "n2-standard-4", + "${IMAGE_UBUNTU_2204_X86_64}", + 0, + false, + ) + pipeline.AddGroupStep(group, amd64Fips) + + // Packaging arm64 zip,tar.gz + arm64 := packagingStep( + ":package: arm64: zip,tar.gz", + "packaging-arm64", + "windows/arm64,linux/arm64", + "tar.gz,zip", + false, + false, + "aws", + "c6g.2xlarge", + "${IMAGE_UBUNTU_2204_ARM_64}", + 0, + false, + ) + pipeline.AddGroupStep(group, arm64) + + // Packaging arm64 OTEL_COMPONENT + arm64Otel := packagingStep( + ":package: arm64: OTEL_COMPONENT zip,tar.gz", + "packaging-arm64-otel-component", + "windows/arm64,linux/arm64", + "tar.gz,zip", + false, + true, + "aws", + "c6g.2xlarge", + "${IMAGE_UBUNTU_2204_ARM_64}", + 0, + false, + ) + pipeline.AddGroupStep(group, arm64Otel) + + // Packaging arm64 FIPS tar.gz + arm64Fips := packagingStep( + ":package: arm64: FIPS tar.gz", + "packaging-arm64-fips", + "linux/arm64", + "tar.gz", + true, + false, + "aws", + "c6g.2xlarge", + "${IMAGE_UBUNTU_2204_ARM_64}", + 0, + false, + ) + pipeline.AddGroupStep(group, arm64Fips) + + // Container steps + // amd64 containers + containersAmd64 := containerPackagingStep( + ":package: amd64: Containers", + "packaging-containers-amd64", + "linux/amd64", + false, + false, + "gcp", + pipeline.MachineTypeN2Standard8, + "${IMAGE_UBUNTU_2204_X86_64}", + true, // includes cloud image push + true, // needs docker login + ) + pipeline.AddGroupStep(group, containersAmd64) + + // arm64 containers + containersArm64 := containerPackagingStep( + ":package: arm64: Containers", + "packaging-containers-arm64", + "linux/arm64", + false, + false, + "aws", + "c6g.4xlarge", + "${IMAGE_UBUNTU_2204_ARM_64}", + false, // no cloud image push + false, // no docker login + ) + pipeline.AddGroupStep(group, containersArm64) + + // amd64 FIPS containers + containersFipsAmd64 := containerPackagingStep( + ":package: amd64: FIPS Containers", + "packaging-containers-amd64-fips", + "linux/amd64", + true, + false, + "gcp", + pipeline.MachineTypeN2Standard8, + "${IMAGE_UBUNTU_2204_X86_64}", + true, + true, + ) + pipeline.AddGroupStep(group, containersFipsAmd64) + + // arm64 FIPS containers + containersFipsArm64 := containerPackagingStep( + ":package: arm64: FIPS Containers", + "packaging-containers-arm64-fips", + "linux/arm64", + true, + false, + "aws", + "c6g.2xlarge", + "${IMAGE_UBUNTU_2204_ARM_64}", + false, + false, + ) + pipeline.AddGroupStep(group, containersFipsArm64) + + // amd64 OTEL containers + containersOtelAmd64 := containerPackagingStep( + ":package: amd64: OTEL_COMPONENT Containers", + "packaging-containers-amd64-otel-component", + "linux/amd64", + false, + true, + "gcp", + pipeline.MachineTypeN2Standard8, + "${IMAGE_UBUNTU_2204_X86_64}", + false, + true, + ) + pipeline.AddGroupStep(group, containersOtelAmd64) + + // arm64 OTEL containers + containersOtelArm64 := containerPackagingStep( + ":package: arm64: OTEL_COMPONENT Containers", + "packaging-containers-arm64-otel-component", + "linux/arm64", + false, + true, + "aws", + "c6g.2xlarge", + "${IMAGE_UBUNTU_2204_ARM_64}", + false, + false, + ) + pipeline.AddGroupStep(group, containersOtelArm64) + + return group +} + +// packagingStep creates a standard packaging step. +func packagingStep(label, key, platforms, packages string, fips, otel bool, provider, machineType, image string, diskSize int, dockerLogin bool) *pipeline.CommandStep { + step := pipeline.CommandWithKey(label, key, ".buildkite/scripts/steps/integration-package.sh") + + env := map[string]string{ + "PLATFORMS": platforms, + "PACKAGES": packages, + } + if fips { + env["FIPS"] = "true" + } + if otel { + env["OTEL_COMPONENT"] = "true" + } + pipeline.SetEnv(step, env) + + pipeline.SetArtifactPaths(step, "build/distributions/**") + pipeline.SetRetryAutomatic(step, 1) + + agent := pipeline.Agent{ + "provider": provider, + "image": image, + } + if provider == "gcp" { + agent["machineType"] = machineType + } else { + agent["instanceType"] = machineType + } + if diskSize > 0 { + agent["diskSizeGb"] = diskSize + } + pipeline.SetAgent(step, agent) + + if dockerLogin { + pipeline.WithVaultDockerLogin(step) + } + + return step +} + +// containerPackagingStep creates a container packaging step. +func containerPackagingStep(label, key, platforms string, fips, otel bool, provider, machineType, image string, cloudPush, dockerLogin bool) *pipeline.CommandStep { + var command string + if cloudPush { + command = ".buildkite/scripts/steps/integration-package.sh\n.buildkite/scripts/steps/integration-cloud-image-push.sh" + } else { + command = ".buildkite/scripts/steps/integration-package.sh" + } + + step := pipeline.CommandWithKey(label, key, command) + + env := map[string]string{ + "PACKAGES": "docker", + "PLATFORMS": platforms, + } + if fips { + env["FIPS"] = "true" + } + if otel { + env["OTEL_COMPONENT"] = "true" + } + pipeline.SetEnv(step, env) + + pipeline.SetArtifactPaths(step, "build/distributions/**") + + agent := pipeline.Agent{ + "provider": provider, + "image": image, + "diskSizeGb": 200, + } + if provider == "gcp" { + agent["machineType"] = machineType + } else { + agent["instanceType"] = machineType + } + pipeline.SetAgent(step, agent) + + if dockerLogin { + pipeline.WithVaultDockerLogin(step) + } + + return step +} diff --git a/dev-tools/buildkite/pipelines/pipeline.go b/dev-tools/buildkite/pipelines/pipeline.go new file mode 100644 index 00000000000..f3483543e62 --- /dev/null +++ b/dev-tools/buildkite/pipelines/pipeline.go @@ -0,0 +1,382 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License 2.0; +// you may not use this file except in compliance with the Elastic License 2.0. + +package pipelines + +import ( + "github.com/elastic/elastic-agent/dev-tools/buildkite/pipeline" +) + +// Pipeline generates the main .buildkite/pipeline.yml pipeline. +// This is the primary CI pipeline for elastic-agent including unit tests, +// K8s tests, and various triggers. +func Pipeline() *pipeline.Pipeline { + p := pipeline.New(). + Env("VAULT_PATH", pipeline.VaultPathGCP). + Env("IMAGE_UBUNTU_2204_X86_64", pipeline.ImageUbuntu2204X86). + Env("IMAGE_UBUNTU_2204_ARM_64", pipeline.ImageUbuntu2204ARM). + Env("IMAGE_WIN_2016", pipeline.ImageWin2016). + Env("IMAGE_WIN_2022", pipeline.ImageWin2022). + Env("IMAGE_WIN_10", pipeline.ImageWin10). + Env("IMAGE_WIN_11", pipeline.ImageWin11) + + // check-ci step + checkCI := pipeline.CommandWithKey("check-ci", "check-ci", ".buildkite/scripts/steps/check-ci.sh") + pipeline.SetAgent(checkCI, pipeline.Agent{ + "provider": "gcp", + "image": "${IMAGE_UBUNTU_2204_X86_64}", + }) + pipeline.SetRetryManual(checkCI) + p.Add(checkCI) + + // Unit tests group + p.Add(unitTestsGroup()) + + // macOS tests group + p.Add(macOSTestsGroup()) + + // Desktop Windows tests group + p.Add(desktopWindowsTestsGroup()) + + // Junit annotate step + p.Add(junitAnnotateStep()) + + // K8s tests group + p.Add(k8sTestsGroup()) + + // Trigger k8s sync + p.Add(triggerK8sSync()) + + // Trigger Extended tests for Pull request + p.Add(triggerExtendedTestsPR()) + + // Trigger Extended tests for branches + p.Add(triggerExtendedTestsBranch()) + + // Trigger Serverless Beats Tests + p.Add(triggerServerlessBeatsTests()) + + // Trigger Elastic Agent Package + p.Add(triggerElasticAgentPackage()) + + // DRY RUN publish to serverless + p.Add(triggerDryRunServerless()) + + // wait for CI to be done + p.Wait() + + // Publish to serverless + p.Add(triggerPublishServerless()) + + return p +} + +// unitTestsGroup creates the "Unit tests" group with all unit test steps. +func unitTestsGroup() *pipeline.GroupStep { + group := pipeline.GroupWithKey("Unit tests", "unit-tests") + + // Unit tests - Ubuntu 22.04 + ubuntu2204 := pipeline.CommandWithKey("Unit tests - Ubuntu 22.04", "unit-tests-2204", ".buildkite/scripts/steps/unit-tests.sh") + pipeline.SetArtifactPaths(ubuntu2204, "build/TEST-*.html", "build/TEST-*.xml", "build/diagnostics/*", "coverage-*.out") + pipeline.SetAgent(ubuntu2204, pipeline.Agent{ + "provider": "gcp", + "image": "${IMAGE_UBUNTU_2204_X86_64}", + "machineType": pipeline.MachineTypeN2Standard8, + }) + pipeline.SetRetry(ubuntu2204, 1, true) + pipeline.AddGroupStep(group, ubuntu2204) + + // Unit tests - Ubuntu 22.04 with requirefips build tag + ubuntu2204Fips := pipeline.CommandWithKey("Unit tests - Ubuntu 22.04 with requirefips build tag", "unit-tests-2204-fips-tag", ".buildkite/scripts/steps/unit-tests.sh") + pipeline.AddEnv(ubuntu2204Fips, "FIPS", "true") + pipeline.SetArtifactPaths(ubuntu2204Fips, "build/TEST-*.html", "build/TEST-*.xml", "build/diagnostics/*", "coverage-*.out") + pipeline.SetAgent(ubuntu2204Fips, pipeline.Agent{ + "provider": "gcp", + "image": "${IMAGE_UBUNTU_2204_X86_64}", + "machineType": pipeline.MachineTypeN2Standard8, + }) + pipeline.SetRetry(ubuntu2204Fips, 1, true) + pipeline.AddGroupStep(group, ubuntu2204Fips) + + // Unit tests - fips140=only Ubuntu 22.04 + ubuntu2204Fips140 := pipeline.CommandWithKey("Unit tests - fips140=only Ubuntu 22.04", "unit-tests-2204-fips140-only", `GODEBUG="fips140=only" .buildkite/scripts/steps/unit-tests.sh`) + pipeline.AddEnv(ubuntu2204Fips140, "FIPS", "true") + pipeline.SetArtifactPaths(ubuntu2204Fips140, "build/TEST-*.html", "build/TEST-*.xml", "build/diagnostics/*", "coverage-*.out") + pipeline.SetAgent(ubuntu2204Fips140, pipeline.Agent{ + "provider": "gcp", + "image": "${IMAGE_UBUNTU_2204_X86_64}", + "machineType": pipeline.MachineTypeN2Standard8, + }) + pipeline.SetRetry(ubuntu2204Fips140, 1, true) + pipeline.AddGroupStep(group, ubuntu2204Fips140) + + // Unit tests - Ubuntu 22.04 ARM64 + ubuntu2204ARM := pipeline.CommandWithKey("Unit tests - Ubuntu 22.04 ARM64", "unit-tests-2204-arm64", ".buildkite/scripts/steps/unit-tests.sh") + pipeline.SetArtifactPaths(ubuntu2204ARM, "build/TEST-*.html", "build/TEST-*.xml", "build/diagnostics/*", "coverage-*.out") + pipeline.SetAgent(ubuntu2204ARM, pipeline.Agent{ + "provider": "aws", + "image": "${IMAGE_UBUNTU_2204_ARM_64}", + "diskSizeGb": 200, + "instanceType": pipeline.InstanceTypeM6gXLarge, + }) + pipeline.SetRetry(ubuntu2204ARM, 1, true) + pipeline.AddGroupStep(group, ubuntu2204ARM) + + // Unit tests - Windows 2022 + win2022 := pipeline.CommandWithKey("Unit tests - Windows 2022", "unit-tests-win2022", ".buildkite/scripts/steps/unit-tests.ps1") + pipeline.SetArtifactPaths(win2022, "build/TEST-*.html", "build/TEST-*.xml", "build/diagnostics/*", "coverage-*.out") + pipeline.SetAgent(win2022, pipeline.Agent{ + "provider": "gcp", + "image": "${IMAGE_WIN_2022}", + "machineType": pipeline.MachineTypeN2Standard8, + "disk_size": 200, + "disk_type": "pd-ssd", + }) + pipeline.SetRetry(win2022, 1, true) + pipeline.AddGroupStep(group, win2022) + + // Unit tests - Windows 2016 + win2016 := pipeline.CommandWithKey("Unit tests - Windows 2016", "unit-tests-win2016", ".buildkite/scripts/steps/unit-tests.ps1") + pipeline.SetArtifactPaths(win2016, "build/TEST-*.html", "build/TEST-*.xml", "build/diagnostics/*", "coverage-*.out") + pipeline.SetAgent(win2016, pipeline.Agent{ + "provider": "gcp", + "image": "${IMAGE_WIN_2016}", + "machineType": pipeline.MachineTypeN2Standard8, + "disk_size": 200, + "disk_type": "pd-ssd", + }) + pipeline.SetRetry(win2016, 1, true) + pipeline.AddGroupStep(group, win2016) + + return group +} + +// macOSTestsGroup creates the "macOS tests" group. +func macOSTestsGroup() *pipeline.GroupStep { + group := pipeline.GroupWithKey("macOS tests", "macos-unit-tests") + + // Unit tests - macOS 15 ARM + macOS15 := pipeline.Command("Unit tests - macOS 15 ARM", ".buildkite/scripts/steps/unit-tests.sh") + pipeline.SetArtifactPaths(macOS15, "build/TEST-*.html", "build/TEST-*.xml", "build/diagnostics/*", "coverage-*.out") + pipeline.SetAgent(macOS15, pipeline.Agent{ + "provider": "orka", + "imagePrefix": "generic-base-15-arm-002", + }) + pipeline.SetRetry(macOS15, 1, true) + pipeline.AddGroupStep(group, macOS15) + + // Unit tests - macOS 13 (main and release branches only) + macOS13 := pipeline.Command("Unit tests - macOS 13", ".buildkite/scripts/steps/unit-tests.sh") + pipeline.SetBranches(macOS13, "main 8.* 9.*") + pipeline.SetArtifactPaths(macOS13, "build/TEST-*.html", "build/TEST-*.xml", "build/diagnostics/*", "coverage-*.out") + pipeline.SetAgent(macOS13, pipeline.Agent{ + "provider": "orka", + "imagePrefix": "generic-13-ventura-x64", + }) + pipeline.SetRetry(macOS13, 1, true) + pipeline.AddGroupStep(group, macOS13) + + return group +} + +// desktopWindowsTestsGroup creates the "Desktop Windows tests" group. +func desktopWindowsTestsGroup() *pipeline.GroupStep { + group := pipeline.GroupWithKey("Desktop Windows tests", "extended-windows") + + // Unit tests - Windows 10 + win10 := pipeline.CommandWithKey("Unit tests - Windows 10", "unit-tests-win10", ".buildkite/scripts/steps/unit-tests.ps1") + pipeline.SetArtifactPaths(win10, "build/TEST-*.html", "build/TEST-*.xml", "build/diagnostics/*", "coverage-*.out") + pipeline.SetAgent(win10, pipeline.Agent{ + "provider": "gcp", + "image": "${IMAGE_WIN_10}", + "machineType": pipeline.MachineTypeN2Standard8, + "disk_type": "pd-ssd", + }) + pipeline.SetRetry(win10, 1, true) + pipeline.AddGroupStep(group, win10) + + // Unit tests - Windows 11 + win11 := pipeline.CommandWithKey("Unit tests - Windows 11", "unit-tests-win11", ".buildkite/scripts/steps/unit-tests.ps1") + pipeline.SetArtifactPaths(win11, "build/TEST-*.html", "build/TEST-*.xml", "build/diagnostics/*", "coverage-*.out") + pipeline.SetAgent(win11, pipeline.Agent{ + "provider": "gcp", + "image": "${IMAGE_WIN_11}", + "machineType": pipeline.MachineTypeN2Standard8, + "disk_type": "pd-ssd", + }) + pipeline.SetRetry(win11, 1, true) + pipeline.AddGroupStep(group, win11) + + return group +} + +// junitAnnotateStep creates the junit annotate step with dependencies. +func junitAnnotateStep() *pipeline.CommandStep { + step := &pipeline.CommandStep{ + Label: pipeline.Ptr(":junit: Junit annotate"), + } + pipeline.SetAgent(step, pipeline.Agent{ + "image": "docker.elastic.co/ci-agent-images/buildkite-junit-annotate:1.0", + }) + pipeline.AddPlugin(step, "junit-annotate#v2.7.0", map[string]any{ + "artifacts": "**TEST-*.xml", + "always-annotate": true, + "run-in-docker": false, + }) + pipeline.SetDependsOnWithFailure(step, + pipeline.DependsOnDep{Step: "unit-tests-2204", AllowFailure: true}, + pipeline.DependsOnDep{Step: "unit-tests-2204-fips-tag", AllowFailure: true}, + pipeline.DependsOnDep{Step: "unit-tests-2204-fips140-only", AllowFailure: true}, + pipeline.DependsOnDep{Step: "unit-tests-2204-arm64", AllowFailure: true}, + pipeline.DependsOnDep{Step: "unit-tests-win2022", AllowFailure: true}, + pipeline.DependsOnDep{Step: "unit-tests-win2016", AllowFailure: true}, + pipeline.DependsOnDep{Step: "macos-unit-tests", AllowFailure: true}, + pipeline.DependsOnDep{Step: "unit-tests-win10", AllowFailure: true}, + pipeline.DependsOnDep{Step: "unit-tests-win11", AllowFailure: true}, + ) + return step +} + +// k8sTestsGroup creates the "K8s tests" group with matrix. +func k8sTestsGroup() *pipeline.GroupStep { + group := pipeline.GroupWithKey("K8s tests", "k8s-tests") + + step := pipeline.Command("K8s tests: {{matrix.k8s_version}}", ".buildkite/scripts/steps/k8s-tests.sh") + pipeline.SetEnv(step, map[string]string{ + "K8S_VERSION": "v{{matrix.k8s_version}}", + "KIND_VERSION": "v0.27.0", + }) + pipeline.SetAgent(step, pipeline.Agent{ + "provider": "gcp", + "image": "${IMAGE_UBUNTU_2204_X86_64}", + }) + pipeline.SetMatrix(step, map[string][]string{ + "k8s_version": {"1.33.0", "1.32.0", "1.31.0", "1.30.0", "1.29.4", "1.28.9"}, + }) + pipeline.SetRetryManual(step) + pipeline.AddGroupStep(group, step) + + return group +} + +// triggerK8sSync creates the trigger for k8s sync. +func triggerK8sSync() *pipeline.CommandStep { + step := pipeline.Command("Trigger k8s sync", ".buildkite/scripts/steps/sync-k8s.sh") + pipeline.SetBranches(step, "main") + pipeline.SetAgent(step, pipeline.Agent{ + "provider": "gcp", + "image": "${IMAGE_UBUNTU_2204_X86_64}", + }) + pipeline.AddEnv(step, "GH_VERSION", "2.4.0") + pipeline.SetIfChanged(step, + "deploy/kubernetes/*", + "version/docs/version.asciidoc", + ) + return step +} + +// triggerExtendedTestsPR creates the trigger for extended tests on PRs. +func triggerExtendedTestsPR() *pipeline.CommandStep { + step := pipeline.Command("Trigger Extended tests for Pull request", "buildkite-agent pipeline upload .buildkite/integration.pipeline.yml") + pipeline.SetIf(step, `(build.pull_request.id != null && !build.env("GITHUB_PR_LABELS") =~ /skip-it/) || +build.env("GITHUB_PR_TRIGGER_COMMENT") =~ /.*extended.*/`) + pipeline.SetEnv(step, map[string]string{ + "BUILDKITE_PULL_REQUEST": "${BUILDKITE_PULL_REQUEST}", + "BUILDKITE_PULL_REQUEST_BASE_BRANCH": "${BUILDKITE_PULL_REQUEST_BASE_BRANCH}", + "GITHUB_PR_LABELS": "${GITHUB_PR_LABELS}", + }) + pipeline.SetIfChanged(step, + "internal/**", + "dev-tools/**", + "pkg/**", + "deploy/**", + "test_infra/**", + "testing/**", + "version/**", + "specs/**", + ".agent-versions.json", + ".go-version", + ".package-version", + "go.mod", + "go.sum", + "magefile.go", + "main.go", + ".buildkite/integration.pipeline.yml", + ".buildkite/bk.integration.pipeline.yml", + ".buildkite/bk.integration-fips.pipeline.yml", + ".buildkite/pipeline.yml", + ".buildkite/scripts/**", + ".buildkite/hooks/**", + ) + return step +} + +// triggerExtendedTestsBranch creates the trigger for extended tests on branches. +func triggerExtendedTestsBranch() *pipeline.TriggerStep { + trigger := pipeline.Trigger("Triggering Extended tests for branches", "elastic-agent-extended-testing") + pipeline.SetTriggerIf(trigger, "build.pull_request.id == null") + pipeline.SetTriggerBuild(trigger, "${BUILDKITE_COMMIT}", "${BUILDKITE_BRANCH}", nil) + return trigger +} + +// triggerServerlessBeatsTests creates the trigger for serverless beats tests. +func triggerServerlessBeatsTests() *pipeline.TriggerStep { + trigger := pipeline.Trigger("Trigger Serverless Beats Tests", "beats-agent-serverless-tests") + pipeline.SetTriggerIf(trigger, "build.pull_request.id != null") + pipeline.SetTriggerBuild(trigger, "${BUILDKITE_COMMIT}", "${BUILDKITE_BRANCH}", nil) + pipeline.SetTriggerIfChanged(trigger, + ".buildkite/serverless.beats.tests.yml", + ".buildkite/scripts/steps/beats_tests.sh", + ".buildkite/hooks/pre-command", + ) + return trigger +} + +// triggerElasticAgentPackage creates the trigger for elastic agent package. +func triggerElasticAgentPackage() *pipeline.CommandStep { + step := &pipeline.CommandStep{ + Label: pipeline.Ptr("Trigger Elastic Agent Package"), + } + pipeline.SetIf(step, "build.pull_request.id != null") + pipeline.SetCommands(step, + ".buildkite/scripts/steps/trigger-elastic-agent-package.sh", + ".buildkite/scripts/steps/trigger-elastic-agent-package.sh | buildkite-agent pipeline upload", + ) + pipeline.SetIfChanged(step, + ".buildkite/pipeline.elastic-agent-package.yml", + ".buildkite/scripts/steps/package.sh", + ".buildkite/scripts/steps/trigger-elastic-agent-package.sh", + "magefile.go", + "dev-tools/**/*", + ) + return step +} + +// triggerDryRunServerless creates the DRY RUN publish to serverless trigger. +func triggerDryRunServerless() *pipeline.TriggerStep { + trigger := pipeline.Trigger("DRY RUN publish to serverless", "agentless-serverless-release") + pipeline.SetTriggerIf(trigger, `build.pull_request.id != null && build.env("BUILDKITE_PULL_REQUEST_BASE_BRANCH") == "main"`) + pipeline.SetTriggerIfChanged(trigger, + ".buildkite/pipeline.yml", + ".buildkite/pipeline.agentless-app-release.yaml", + ".buildkite/scripts/steps/ecp-internal-release.sh", + ".buildkite/scripts/steps/integration-package.sh", + ".buildkite/scripts/steps/validate-agentless-docker-image.sh", + ) + pipeline.SetTriggerBuildWithMessage(trigger, + "${BUILDKITE_COMMIT}", + "${BUILDKITE_BRANCH}", + "publish to serverless (dry-run) #${BUILDKITE_PULL_REQUEST}", + map[string]string{"DRY_RUN": "true"}, + ) + return trigger +} + +// triggerPublishServerless creates the publish to serverless trigger. +func triggerPublishServerless() *pipeline.TriggerStep { + trigger := pipeline.Trigger("Publish to serverless", "agentless-serverless-release") + pipeline.SetTriggerBranches(trigger, "main") + pipeline.SetTriggerBuild(trigger, "${BUILDKITE_COMMIT}", "", nil) + return trigger +} diff --git a/dev-tools/buildkite/pipelines/pipelines_test.go b/dev-tools/buildkite/pipelines/pipelines_test.go index d6afa7e5655..20b27c3f5ee 100644 --- a/dev-tools/buildkite/pipelines/pipelines_test.go +++ b/dev-tools/buildkite/pipelines/pipelines_test.go @@ -10,92 +10,100 @@ import ( "strings" "testing" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" "gotest.tools/v3/golden" "github.com/elastic/elastic-agent/dev-tools/buildkite/pipeline" ) +// pipelineTestCase defines a test case for pipeline generation. +type pipelineTestCase struct { + name string + generator func() *pipeline.Pipeline + goldenFile string + actualFile string +} + +var pipelineTestCases = []pipelineTestCase{ + { + name: "GCECleanup", + generator: GCECleanup, + goldenFile: "pipeline.elastic-agent-gce-cleanup.yml", + actualFile: "pipeline.elastic-agent-gce-cleanup.yml", + }, + { + name: "AgentlessAppRelease", + generator: AgentlessAppRelease, + goldenFile: "pipeline.agentless-app-release.yaml", + actualFile: "pipeline.agentless-app-release.yaml", + }, + { + name: "Pipeline", + generator: Pipeline, + goldenFile: "pipeline.yml", + actualFile: "pipeline.yml", + }, + { + name: "IntegrationPipeline", + generator: IntegrationPipeline, + goldenFile: "integration.pipeline.yml", + actualFile: "integration.pipeline.yml", + }, + { + name: "ElasticAgentPackage", + generator: ElasticAgentPackage, + goldenFile: "pipeline.elastic-agent-package.yml", + actualFile: "pipeline.elastic-agent-package.yml", + }, +} + // findRepoRoot finds the repository root by looking for go.mod. func findRepoRoot(t *testing.T) string { t.Helper() dir, err := os.Getwd() - if err != nil { - t.Fatalf("failed to get working directory: %v", err) - } + require.NoError(t, err, "failed to get working directory") for { if _, err := os.Stat(filepath.Join(dir, "go.mod")); err == nil { return dir } parent := filepath.Dir(dir) - if parent == dir { - t.Fatal("could not find repository root (go.mod)") - } + require.NotEqual(t, parent, dir, "could not find repository root (go.mod)") dir = parent } } -func TestGCECleanup(t *testing.T) { - p := GCECleanup() - - yaml, err := p.MarshalYAML() - if err != nil { - t.Fatalf("failed to marshal pipeline: %v", err) - } - - // Golden file test - update with: go test -update - golden.AssertBytes(t, yaml, "pipeline.elastic-agent-gce-cleanup.yml") -} - -func TestGCECleanupMatchesActual(t *testing.T) { - repoRoot := findRepoRoot(t) - actualPath := filepath.Join(repoRoot, ".buildkite", "pipeline.elastic-agent-gce-cleanup.yml") - - p := GCECleanup() - result, err := pipeline.SemanticCompareWithFile(p, actualPath) - if err != nil { - t.Fatalf("failed to compare: %v", err) - } +func TestPipelines(t *testing.T) { + for _, tc := range pipelineTestCases { + t.Run(tc.name, func(t *testing.T) { + p := tc.generator() - if result.ParseError != nil { - t.Fatalf("failed to parse YAML: %v", result.ParseError) - } + yaml, err := p.MarshalYAML() + require.NoError(t, err, "failed to marshal pipeline") - if !result.Equal { - t.Errorf("Generated pipeline does not match %s:\n%s", - actualPath, strings.Join(result.Differences, "\n")) + // Golden file test - update with: go test -update + golden.AssertBytes(t, yaml, tc.goldenFile) + }) } } -func TestAgentlessAppRelease(t *testing.T) { - p := AgentlessAppRelease() - - yaml, err := p.MarshalYAML() - if err != nil { - t.Fatalf("failed to marshal pipeline: %v", err) - } - - // Golden file test - update with: go test -update - golden.AssertBytes(t, yaml, "pipeline.agentless-app-release.yaml") -} - -func TestAgentlessAppReleaseMatchesActual(t *testing.T) { +func TestPipelinesMatchActual(t *testing.T) { repoRoot := findRepoRoot(t) - actualPath := filepath.Join(repoRoot, ".buildkite", "pipeline.agentless-app-release.yaml") - p := AgentlessAppRelease() - result, err := pipeline.SemanticCompareWithFile(p, actualPath) - if err != nil { - t.Fatalf("failed to compare: %v", err) - } + for _, tc := range pipelineTestCases { + t.Run(tc.name, func(t *testing.T) { + actualPath := filepath.Join(repoRoot, ".buildkite", tc.actualFile) - if result.ParseError != nil { - t.Fatalf("failed to parse YAML: %v", result.ParseError) - } + p := tc.generator() + result, err := pipeline.SemanticCompareWithFile(p, actualPath) + require.NoError(t, err, "failed to compare") + require.NoError(t, result.ParseError, "failed to parse YAML") - if !result.Equal { - t.Errorf("Generated pipeline does not match %s:\n%s", - actualPath, strings.Join(result.Differences, "\n")) + assert.True(t, result.Equal, + "Generated pipeline does not match %s:\n%s", + actualPath, strings.Join(result.Differences, "\n")) + }) } } diff --git a/dev-tools/buildkite/pipelines/testdata/integration.pipeline.yml b/dev-tools/buildkite/pipelines/testdata/integration.pipeline.yml new file mode 100644 index 00000000000..40581b73361 --- /dev/null +++ b/dev-tools/buildkite/pipelines/testdata/integration.pipeline.yml @@ -0,0 +1,207 @@ +# yaml-language-server: $schema=https://raw.githubusercontent.com/buildkite/pipeline-schema/main/schema.json +env: + ASDF_MAGE_VERSION: 1.14.0 + BUILDKIT_PROGRESS: plain + IMAGE_UBUNTU_2204_ARM_64: platform-ingest-elastic-agent-ubuntu-2204-aarch64-1762801856 + IMAGE_UBUNTU_2204_X86_64: platform-ingest-elastic-agent-ubuntu-2204-1762801856 + VAULT_PATH: kv/ci-shared/observability-ingest/cloud/gcp +steps: + - group: "Integration tests: packaging" + key: int-packaging + notify: + - github_commit_status: + context: buildkite/elastic-agent - Packaging + steps: + - agents: + image: ${IMAGE_UBUNTU_2204_X86_64} + machineType: n2-standard-8 + provider: gcp + artifact_paths: + - build/distributions/** + command: .buildkite/scripts/steps/integration-package.sh + env: + PACKAGES: zip,tar.gz,rpm,deb + PLATFORMS: windows/amd64,linux/amd64 + key: packaging-amd64 + label: ":package: amd64: zip,tar.gz,rpm,deb " + retry: + automatic: + limit: 1 + - agents: + image: ${IMAGE_UBUNTU_2204_X86_64} + machineType: n2-standard-8 + provider: gcp + artifact_paths: + - build/distributions/** + command: .buildkite/scripts/steps/integration-package.sh + env: + OTEL_COMPONENT: "true" + PACKAGES: zip,tar.gz,rpm,deb + PLATFORMS: windows/amd64,linux/amd64 + key: packaging-amd64-otel-component + label: ":package: amd64: OTEL_COMPONENT zip,tar.gz,rpm,deb " + retry: + automatic: + limit: 1 + - agents: + image: ${IMAGE_UBUNTU_2204_X86_64} + machineType: n2-standard-4 + provider: gcp + artifact_paths: + - build/distributions/** + command: .buildkite/scripts/steps/integration-package.sh + env: + FIPS: "true" + PACKAGES: tar.gz + PLATFORMS: linux/amd64 + key: packaging-amd64-fips + label: ":package: amd64: FIPS tar.gz" + retry: + automatic: + limit: 1 + - agents: + image: ${IMAGE_UBUNTU_2204_ARM_64} + instanceType: c6g.2xlarge + provider: aws + artifact_paths: + - build/distributions/** + command: .buildkite/scripts/steps/integration-package.sh + env: + PACKAGES: tar.gz,zip + PLATFORMS: windows/arm64,linux/arm64 + key: packaging-arm64 + label: ":package: arm64: zip,tar.gz" + retry: + automatic: + limit: 1 + - agents: + image: ${IMAGE_UBUNTU_2204_ARM_64} + instanceType: c6g.2xlarge + provider: aws + artifact_paths: + - build/distributions/** + command: .buildkite/scripts/steps/integration-package.sh + env: + OTEL_COMPONENT: "true" + PACKAGES: tar.gz,zip + PLATFORMS: windows/arm64,linux/arm64 + key: packaging-arm64-otel-component + label: ":package: arm64: OTEL_COMPONENT zip,tar.gz" + retry: + automatic: + limit: 1 + - agents: + image: ${IMAGE_UBUNTU_2204_ARM_64} + instanceType: c6g.2xlarge + provider: aws + artifact_paths: + - build/distributions/** + command: .buildkite/scripts/steps/integration-package.sh + env: + FIPS: "true" + PACKAGES: tar.gz + PLATFORMS: linux/arm64 + key: packaging-arm64-fips + label: ":package: arm64: FIPS tar.gz" + retry: + automatic: + limit: 1 + - agents: + diskSizeGb: 200 + image: ${IMAGE_UBUNTU_2204_X86_64} + machineType: n2-standard-8 + provider: gcp + artifact_paths: + - build/distributions/** + command: |- + .buildkite/scripts/steps/integration-package.sh + .buildkite/scripts/steps/integration-cloud-image-push.sh + env: + PACKAGES: docker + PLATFORMS: linux/amd64 + key: packaging-containers-amd64 + label: ":package: amd64: Containers" + plugins: + - elastic/vault-docker-login#v0.5.2: + secret_path: kv/ci-shared/platform-ingest/elastic_docker_registry + - agents: + diskSizeGb: 200 + image: ${IMAGE_UBUNTU_2204_ARM_64} + instanceType: c6g.4xlarge + provider: aws + artifact_paths: + - build/distributions/** + command: .buildkite/scripts/steps/integration-package.sh + env: + PACKAGES: docker + PLATFORMS: linux/arm64 + key: packaging-containers-arm64 + label: ":package: arm64: Containers" + - agents: + diskSizeGb: 200 + image: ${IMAGE_UBUNTU_2204_X86_64} + machineType: n2-standard-8 + provider: gcp + artifact_paths: + - build/distributions/** + command: |- + .buildkite/scripts/steps/integration-package.sh + .buildkite/scripts/steps/integration-cloud-image-push.sh + env: + FIPS: "true" + PACKAGES: docker + PLATFORMS: linux/amd64 + key: packaging-containers-amd64-fips + label: ":package: amd64: FIPS Containers" + plugins: + - elastic/vault-docker-login#v0.5.2: + secret_path: kv/ci-shared/platform-ingest/elastic_docker_registry + - agents: + diskSizeGb: 200 + image: ${IMAGE_UBUNTU_2204_ARM_64} + instanceType: c6g.2xlarge + provider: aws + artifact_paths: + - build/distributions/** + command: .buildkite/scripts/steps/integration-package.sh + env: + FIPS: "true" + PACKAGES: docker + PLATFORMS: linux/arm64 + key: packaging-containers-arm64-fips + label: ":package: arm64: FIPS Containers" + - agents: + diskSizeGb: 200 + image: ${IMAGE_UBUNTU_2204_X86_64} + machineType: n2-standard-8 + provider: gcp + artifact_paths: + - build/distributions/** + command: .buildkite/scripts/steps/integration-package.sh + env: + OTEL_COMPONENT: "true" + PACKAGES: docker + PLATFORMS: linux/amd64 + key: packaging-containers-amd64-otel-component + label: ":package: amd64: OTEL_COMPONENT Containers" + plugins: + - elastic/vault-docker-login#v0.5.2: + secret_path: kv/ci-shared/platform-ingest/elastic_docker_registry + - agents: + diskSizeGb: 200 + image: ${IMAGE_UBUNTU_2204_ARM_64} + instanceType: c6g.2xlarge + provider: aws + artifact_paths: + - build/distributions/** + command: .buildkite/scripts/steps/integration-package.sh + env: + OTEL_COMPONENT: "true" + PACKAGES: docker + PLATFORMS: linux/arm64 + key: packaging-containers-arm64-otel-component + label: ":package: arm64: OTEL_COMPONENT Containers" + - command: buildkite-agent pipeline upload .buildkite/bk.integration.pipeline.yml + label: Triggering Integration tests + - command: buildkite-agent pipeline upload .buildkite/bk.integration-fips.pipeline.yml + label: Triggering custom FIPS integration tests diff --git a/dev-tools/buildkite/pipelines/testdata/pipeline.elastic-agent-package.yml b/dev-tools/buildkite/pipelines/testdata/pipeline.elastic-agent-package.yml new file mode 100644 index 00000000000..4d41a9b5ddb --- /dev/null +++ b/dev-tools/buildkite/pipelines/testdata/pipeline.elastic-agent-package.yml @@ -0,0 +1,200 @@ +# yaml-language-server: $schema=https://raw.githubusercontent.com/buildkite/pipeline-schema/main/schema.json +env: + BEAT_NAME: elastic-agent + BEAT_URL: https://www.elastic.co/elastic-agent + IMAGE_UBUNTU_2404_ARM_64: platform-ingest-elastic-agent-ubuntu-2404-aarch64-1762801856 + IMAGE_UBUNTU_2404_X86_64: platform-ingest-elastic-agent-ubuntu-2404-1762801856 +steps: + - fields: + - default: "" + hint: Link to the build manifest URL. + key: MANIFEST_URL + required: true + text: MANIFEST_URL + - hint: Increase verbosity of the mage commands, defaults to 0 + key: MAGEFILE_VERBOSE + options: + - label: "True" + value: "1" + - label: "False" + value: "0" + required: false + select: Mage verbose + - hint: What workflow of the DRA release process this build is going to be triggered for + key: DRA_WORKFLOW + options: + - label: snapshot + value: snapshot + - label: staging + value: staging + required: true + select: DRA Workflow + - default: "" + hint: The packaging version to use + key: DRA_VERSION + required: true + text: DRA Version + - hint: If the DRA release manager script would actually publish anything or just print + key: DRA_DRY_RUN + options: + - label: "True" + value: --dry-run + - label: "False" + value: "" + required: false + select: DRA DRY-RUN + if: build.env("MANIFEST_URL") == null + input: Build parameters + - if: build.env("MANIFEST_URL") == null + wait: "" + - group: :Packaging Artefacts + key: package + steps: + - agents: + diskSizeGb: 400 + image: ${IMAGE_UBUNTU_2404_X86_64} + machineType: c2-standard-16 + provider: gcp + artifact_paths: + - build/distributions/**/* + command: |- + if [[ -z "$${MANIFEST_URL}" ]]; then + export MANIFEST_URL=$(buildkite-agent meta-data get MANIFEST_URL --default "") + if [[ -z "$${MANIFEST_URL}" ]]; then + echo ":broken_heart: Missing MANIFEST_URL variable or empty string provided" + exit 1 + fi + fi + if [[ -z "$${MAGEFILE_VERBOSE}" ]]; then + export MAGEFILE_VERBOSE=$(buildkite-agent meta-data get MAGEFILE_VERBOSE --default "0") + fi + .buildkite/scripts/steps/package.sh + env: + FIPS: "{{matrix.fips}}" + PLATFORMS: linux/amd64 windows/amd64 darwin/amd64 + key: package_elastic-agent + label: ":package: FIPS={{matrix.fips}} Cross Building and package elastic-agent" + matrix: + setup: + fips: + - "false" + - "true" + plugins: + - elastic/vault-docker-login#v0.5.2: + secret_path: kv/ci-shared/platform-ingest/elastic_docker_registry + - agents: + diskSizeGb: 400 + image: ${IMAGE_UBUNTU_2404_ARM_64} + instanceType: t4g.2xlarge + provider: aws + artifact_paths: + - build/distributions/**/* + command: |- + echo "Add support for multiarch" + docker run --privileged --rm tonistiigi/binfmt:master --install all + + if [[ -z "$${MANIFEST_URL}" ]]; then + export MANIFEST_URL=$(buildkite-agent meta-data get MANIFEST_URL --default "") + if [[ -z "$${MANIFEST_URL}" ]]; then + echo ":broken_heart: Missing MANIFEST_URL variable or empty string provided" + exit 1 + fi + fi + if [[ -z "$${MAGEFILE_VERBOSE}" ]]; then + export MAGEFILE_VERBOSE=$(buildkite-agent meta-data get MAGEFILE_VERBOSE --default "0") + fi + .buildkite/scripts/steps/package.sh + ls -lahR build/ + env: + FIPS: "{{matrix.fips}}" + PACKAGES: docker,tar.gz,deb,rpm,zip + PLATFORMS: linux/arm64 darwin/arm64 windows/arm64 + key: package_elastic-agent-arm + label: ":package: FIPS={{matrix.fips}} Package ARM elastic-agent" + matrix: + setup: + fips: + - "false" + - "true" + - agents: + image: ${IMAGE_UBUNTU_2404_X86_64} + provider: gcp + command: |- + echo "+++ Restoring Artifacts" + buildkite-agent artifact download "build/**/*" . + + echo "+++ Changing permissions for the release manager" + sudo chmod -R a+r build/distributions/ + sudo chown -R :1000 build/distributions/ + ls -lahR build/ + + echo "+++ Running DRA publish step" + if [[ -z "$${MAGEFILE_VERBOSE}" ]]; then + export MAGEFILE_VERBOSE=$(buildkite-agent meta-data get MAGEFILE_VERBOSE --default "0") + fi + if [[ -z "$${DRA_DRY_RUN}" ]]; then + DRA_DRY_RUN=$(buildkite-agent meta-data get DRA_DRY_RUN --default "") + export DRA_DRY_RUN + fi + if [[ -z "$${DRA_VERSION}" ]]; then + DRA_VERSION=$(buildkite-agent meta-data get DRA_VERSION --default "") + export DRA_VERSION + fi + if [[ -z "$${DRA_WORKFLOW}" ]]; then + DRA_WORKFLOW=$(buildkite-agent meta-data get DRA_WORKFLOW --default "") + export DRA_WORKFLOW + fi + .buildkite/scripts/steps/dra-publish.sh + depends_on: package + env: + DRA_PROJECT_ARTIFACT_ID: agent-package + DRA_PROJECT_ID: elastic-agent-package + if: build.env("BUILDKITE_TRIGGERED_FROM_BUILD_PIPELINE_SLUG") == null || build.env("BUILDKITE_TRIGGERED_FROM_BUILD_PIPELINE_SLUG") != "independent-agent-release-staging" + key: dra-publish + label: ":elastic-stack: Publishing to DRA" + - agents: + diskSizeGb: 400 + image: ${IMAGE_UBUNTU_2404_X86_64} + machineType: n2-standard-8 + provider: gcp + artifact_paths: + - build/distributions/**/* + command: |- + echo "+++ Restoring Artifacts" + buildkite-agent artifact download "build/**/*" . + echo "+++ Changing permissions for the BK API commands" + sudo chown -R :1000 build/distributions/ + echo "--- File listing" + ls -alR build + echo "--- Copy workaround for ironbank container filename" + .buildkite/scripts/steps/ironbank-cp-workaround.sh + echo "--- File listing after workaround" + ls -alR build + echo "+++ Checking artifact validity with release-manager collect dry run" + DRA_DRY_RUN="--dry-run" + export DRA_DRY_RUN + .buildkite/scripts/steps/dra-publish.sh + # Artifacts will be uploaded via the artifact_paths entry above + echo "+++ Set job metadata if TRIGGER_JOB_ID is properly set" + if [[ -z "$${TRIGGER_JOB_ID}" ]]; then + echo "TRIGGER_JOB_ID is not set, so not setting metadata" + else + # If a pipeline that triggered this build passes in a "TRIGGER_JOB_ID" env var, that + # is an indicator that it will want us to set some metadata in that calling build + # so that it can reference this specific build ID in order to easily download + # artifacts saved off in this build. + # + # This is a much easier way to pull back artifacts from a triggered build than using + # a Buildkite token that we then have to manage via vault, etc. + # See: https://forum.buildkite.community/t/how-to-download-artifacts-back-from-triggered-pipeline/3480/2 + echo "Setting metadata for job that trigger this one" + buildkite-agent meta-data set "triggered_build_id" "$BUILDKITE_BUILD_ID" --job $TRIGGER_JOB_ID + buildkite-agent meta-data set "triggered_commit_hash" "$BUILDKITE_COMMIT" --job $TRIGGER_JOB_ID + fi + depends_on: package + env: + DRA_PROJECT_ARTIFACT_ID: agent-package + DRA_PROJECT_ID: elastic-agent-package + if: build.env("BUILDKITE_TRIGGERED_FROM_BUILD_PIPELINE_SLUG") == "independent-agent-release-staging" + key: bk-api-publish-independent-agent + label: Publishing via BK API for Independent Agent Release diff --git a/dev-tools/buildkite/pipelines/testdata/pipeline.yml b/dev-tools/buildkite/pipelines/testdata/pipeline.yml new file mode 100644 index 00000000000..e177ba76349 --- /dev/null +++ b/dev-tools/buildkite/pipelines/testdata/pipeline.yml @@ -0,0 +1,351 @@ +# yaml-language-server: $schema=https://raw.githubusercontent.com/buildkite/pipeline-schema/main/schema.json +env: + IMAGE_UBUNTU_2204_ARM_64: platform-ingest-elastic-agent-ubuntu-2204-aarch64-1762801856 + IMAGE_UBUNTU_2204_X86_64: platform-ingest-elastic-agent-ubuntu-2204-1762801856 + IMAGE_WIN_10: platform-ingest-elastic-agent-windows-10-1764775167 + IMAGE_WIN_11: platform-ingest-elastic-agent-windows-11-1764775167 + IMAGE_WIN_2016: platform-ingest-elastic-agent-windows-2016-1762801856 + IMAGE_WIN_2022: platform-ingest-elastic-agent-windows-2022-1762801856 + VAULT_PATH: kv/ci-shared/observability-ingest/cloud/gcp +steps: + - agents: + image: ${IMAGE_UBUNTU_2204_X86_64} + provider: gcp + command: .buildkite/scripts/steps/check-ci.sh + key: check-ci + label: check-ci + retry: + manual: + allowed: true + - group: Unit tests + key: unit-tests + steps: + - agents: + image: ${IMAGE_UBUNTU_2204_X86_64} + machineType: n2-standard-8 + provider: gcp + artifact_paths: + - build/TEST-*.html + - build/TEST-*.xml + - build/diagnostics/* + - coverage-*.out + command: .buildkite/scripts/steps/unit-tests.sh + key: unit-tests-2204 + label: Unit tests - Ubuntu 22.04 + retry: + automatic: + limit: 1 + manual: + allowed: true + - agents: + image: ${IMAGE_UBUNTU_2204_X86_64} + machineType: n2-standard-8 + provider: gcp + artifact_paths: + - build/TEST-*.html + - build/TEST-*.xml + - build/diagnostics/* + - coverage-*.out + command: .buildkite/scripts/steps/unit-tests.sh + env: + FIPS: "true" + key: unit-tests-2204-fips-tag + label: Unit tests - Ubuntu 22.04 with requirefips build tag + retry: + automatic: + limit: 1 + manual: + allowed: true + - agents: + image: ${IMAGE_UBUNTU_2204_X86_64} + machineType: n2-standard-8 + provider: gcp + artifact_paths: + - build/TEST-*.html + - build/TEST-*.xml + - build/diagnostics/* + - coverage-*.out + command: GODEBUG="fips140=only" .buildkite/scripts/steps/unit-tests.sh + env: + FIPS: "true" + key: unit-tests-2204-fips140-only + label: Unit tests - fips140=only Ubuntu 22.04 + retry: + automatic: + limit: 1 + manual: + allowed: true + - agents: + diskSizeGb: 200 + image: ${IMAGE_UBUNTU_2204_ARM_64} + instanceType: m6g.xlarge + provider: aws + artifact_paths: + - build/TEST-*.html + - build/TEST-*.xml + - build/diagnostics/* + - coverage-*.out + command: .buildkite/scripts/steps/unit-tests.sh + key: unit-tests-2204-arm64 + label: Unit tests - Ubuntu 22.04 ARM64 + retry: + automatic: + limit: 1 + manual: + allowed: true + - agents: + disk_size: 200 + disk_type: pd-ssd + image: ${IMAGE_WIN_2022} + machineType: n2-standard-8 + provider: gcp + artifact_paths: + - build/TEST-*.html + - build/TEST-*.xml + - build/diagnostics/* + - coverage-*.out + command: .buildkite/scripts/steps/unit-tests.ps1 + key: unit-tests-win2022 + label: Unit tests - Windows 2022 + retry: + automatic: + limit: 1 + manual: + allowed: true + - agents: + disk_size: 200 + disk_type: pd-ssd + image: ${IMAGE_WIN_2016} + machineType: n2-standard-8 + provider: gcp + artifact_paths: + - build/TEST-*.html + - build/TEST-*.xml + - build/diagnostics/* + - coverage-*.out + command: .buildkite/scripts/steps/unit-tests.ps1 + key: unit-tests-win2016 + label: Unit tests - Windows 2016 + retry: + automatic: + limit: 1 + manual: + allowed: true + - group: macOS tests + key: macos-unit-tests + steps: + - agents: + imagePrefix: generic-base-15-arm-002 + provider: orka + artifact_paths: + - build/TEST-*.html + - build/TEST-*.xml + - build/diagnostics/* + - coverage-*.out + command: .buildkite/scripts/steps/unit-tests.sh + label: Unit tests - macOS 15 ARM + retry: + automatic: + limit: 1 + manual: + allowed: true + - agents: + imagePrefix: generic-13-ventura-x64 + provider: orka + artifact_paths: + - build/TEST-*.html + - build/TEST-*.xml + - build/diagnostics/* + - coverage-*.out + branches: main 8.* 9.* + command: .buildkite/scripts/steps/unit-tests.sh + label: Unit tests - macOS 13 + retry: + automatic: + limit: 1 + manual: + allowed: true + - group: Desktop Windows tests + key: extended-windows + steps: + - agents: + disk_type: pd-ssd + image: ${IMAGE_WIN_10} + machineType: n2-standard-8 + provider: gcp + artifact_paths: + - build/TEST-*.html + - build/TEST-*.xml + - build/diagnostics/* + - coverage-*.out + command: .buildkite/scripts/steps/unit-tests.ps1 + key: unit-tests-win10 + label: Unit tests - Windows 10 + retry: + automatic: + limit: 1 + manual: + allowed: true + - agents: + disk_type: pd-ssd + image: ${IMAGE_WIN_11} + machineType: n2-standard-8 + provider: gcp + artifact_paths: + - build/TEST-*.html + - build/TEST-*.xml + - build/diagnostics/* + - coverage-*.out + command: .buildkite/scripts/steps/unit-tests.ps1 + key: unit-tests-win11 + label: Unit tests - Windows 11 + retry: + automatic: + limit: 1 + manual: + allowed: true + - agents: + image: docker.elastic.co/ci-agent-images/buildkite-junit-annotate:1.0 + depends_on: + - allow_failure: true + step: unit-tests-2204 + - allow_failure: true + step: unit-tests-2204-fips-tag + - allow_failure: true + step: unit-tests-2204-fips140-only + - allow_failure: true + step: unit-tests-2204-arm64 + - allow_failure: true + step: unit-tests-win2022 + - allow_failure: true + step: unit-tests-win2016 + - allow_failure: true + step: macos-unit-tests + - allow_failure: true + step: unit-tests-win10 + - allow_failure: true + step: unit-tests-win11 + label: ":junit: Junit annotate" + plugins: + - junit-annotate#v2.7.0: + always-annotate: true + artifacts: "**TEST-*.xml" + run-in-docker: false + - group: K8s tests + key: k8s-tests + steps: + - agents: + image: ${IMAGE_UBUNTU_2204_X86_64} + provider: gcp + command: .buildkite/scripts/steps/k8s-tests.sh + env: + K8S_VERSION: v{{matrix.k8s_version}} + KIND_VERSION: v0.27.0 + label: "K8s tests: {{matrix.k8s_version}}" + matrix: + setup: + k8s_version: + - 1.33.0 + - 1.32.0 + - 1.31.0 + - 1.30.0 + - 1.29.4 + - 1.28.9 + retry: + manual: + allowed: true + - agents: + image: ${IMAGE_UBUNTU_2204_X86_64} + provider: gcp + branches: main + command: .buildkite/scripts/steps/sync-k8s.sh + env: + GH_VERSION: 2.4.0 + if_changed: + include: + - deploy/kubernetes/* + - version/docs/version.asciidoc + label: Trigger k8s sync + - command: buildkite-agent pipeline upload .buildkite/integration.pipeline.yml + env: + BUILDKITE_PULL_REQUEST: ${BUILDKITE_PULL_REQUEST} + BUILDKITE_PULL_REQUEST_BASE_BRANCH: ${BUILDKITE_PULL_REQUEST_BASE_BRANCH} + GITHUB_PR_LABELS: ${GITHUB_PR_LABELS} + if: |- + (build.pull_request.id != null && !build.env("GITHUB_PR_LABELS") =~ /skip-it/) || + build.env("GITHUB_PR_TRIGGER_COMMENT") =~ /.*extended.*/ + if_changed: + include: + - internal/** + - dev-tools/** + - pkg/** + - deploy/** + - test_infra/** + - testing/** + - version/** + - specs/** + - .agent-versions.json + - .go-version + - .package-version + - go.mod + - go.sum + - magefile.go + - main.go + - .buildkite/integration.pipeline.yml + - .buildkite/bk.integration.pipeline.yml + - .buildkite/bk.integration-fips.pipeline.yml + - .buildkite/pipeline.yml + - .buildkite/scripts/** + - .buildkite/hooks/** + label: Trigger Extended tests for Pull request + - build: + branch: ${BUILDKITE_BRANCH} + commit: ${BUILDKITE_COMMIT} + if: build.pull_request.id == null + label: Triggering Extended tests for branches + trigger: elastic-agent-extended-testing + - build: + branch: ${BUILDKITE_BRANCH} + commit: ${BUILDKITE_COMMIT} + if: build.pull_request.id != null + if_changed: + include: + - .buildkite/serverless.beats.tests.yml + - .buildkite/scripts/steps/beats_tests.sh + - .buildkite/hooks/pre-command + label: Trigger Serverless Beats Tests + trigger: beats-agent-serverless-tests + - commands: + - .buildkite/scripts/steps/trigger-elastic-agent-package.sh + - .buildkite/scripts/steps/trigger-elastic-agent-package.sh | buildkite-agent pipeline upload + if: build.pull_request.id != null + if_changed: + include: + - .buildkite/pipeline.elastic-agent-package.yml + - .buildkite/scripts/steps/package.sh + - .buildkite/scripts/steps/trigger-elastic-agent-package.sh + - magefile.go + - dev-tools/**/* + label: Trigger Elastic Agent Package + - build: + branch: ${BUILDKITE_BRANCH} + commit: ${BUILDKITE_COMMIT} + env: + DRY_RUN: "true" + message: "publish to serverless (dry-run) #${BUILDKITE_PULL_REQUEST}" + if: build.pull_request.id != null && build.env("BUILDKITE_PULL_REQUEST_BASE_BRANCH") == "main" + if_changed: + include: + - .buildkite/pipeline.yml + - .buildkite/pipeline.agentless-app-release.yaml + - .buildkite/scripts/steps/ecp-internal-release.sh + - .buildkite/scripts/steps/integration-package.sh + - .buildkite/scripts/steps/validate-agentless-docker-image.sh + label: DRY RUN publish to serverless + trigger: agentless-serverless-release + - wait: "" + - branches: main + build: + commit: ${BUILDKITE_COMMIT} + label: Publish to serverless + trigger: agentless-serverless-release From 979440e973cad9573d3ab2115a395b3c3536d2b9 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Miko=C5=82aj=20=C5=9Awi=C4=85tek?= Date: Sun, 14 Dec 2025 16:34:34 +0100 Subject: [PATCH 04/10] Generate integration buildkite pipelines using Go code --- dev-tools/buildkite/pipeline/images.go | 12 +- dev-tools/buildkite/pipeline/pipeline.go | 24 + dev-tools/buildkite/pipeline/plugins.go | 22 + dev-tools/buildkite/pipeline/step.go | 94 ++- .../pipelines/bk_integration_fips_pipeline.go | 170 ++++ .../pipelines/bk_integration_pipeline.go | 520 ++++++++++++ .../buildkite/pipelines/pipelines_test.go | 12 + .../testdata/bk.integration-fips.pipeline.yml | 176 +++++ .../testdata/bk.integration.pipeline.yml | 746 ++++++++++++++++++ .../pipeline.agentless-app-release.yaml | 3 +- .../pipeline.elastic-agent-package.yml | 6 +- 11 files changed, 1753 insertions(+), 32 deletions(-) create mode 100644 dev-tools/buildkite/pipelines/bk_integration_fips_pipeline.go create mode 100644 dev-tools/buildkite/pipelines/bk_integration_pipeline.go create mode 100644 dev-tools/buildkite/pipelines/testdata/bk.integration-fips.pipeline.yml create mode 100644 dev-tools/buildkite/pipelines/testdata/bk.integration.pipeline.yml diff --git a/dev-tools/buildkite/pipeline/images.go b/dev-tools/buildkite/pipeline/images.go index 9a4f21b2944..97def6ed0a3 100644 --- a/dev-tools/buildkite/pipeline/images.go +++ b/dev-tools/buildkite/pipeline/images.go @@ -30,6 +30,10 @@ const ( // Debian images ImageDebian11 = "platform-ingest-elastic-agent-debian-11-1762801856" ImageDebian13 = "platform-ingest-elastic-agent-debian-13-1762801856" + + // FIPS images + ImageUbuntuX86FIPS = "platform-ingest-elastic-agent-ubuntu-2204-fips-1762801856" + ImageUbuntuARM64FIPS = "platform-ingest-elastic-agent-ubuntu-2204-fips-aarch64-1762801856" ) // ImageEnvVars returns a map of environment variable names to image values. @@ -79,7 +83,9 @@ const ( // Common vault paths. const ( - VaultPathGCP = "kv/ci-shared/observability-ingest/cloud/gcp" - VaultPathDockerRegistry = "kv/ci-shared/platform-ingest/elastic_docker_registry" - VaultPathECKeyProd = "kv/ci-shared/platform-ingest/platform-ingest-ec-prod" + VaultPathGCP = "kv/ci-shared/observability-ingest/cloud/gcp" + VaultPathDockerRegistry = "kv/ci-shared/platform-ingest/elastic_docker_registry" + VaultPathECKeyProd = "kv/ci-shared/platform-ingest/platform-ingest-ec-prod" + VaultPathECKeyStagingGov = "kv/ci-shared/platform-ingest/platform-ingest-ec-staging-gov" + VaultPathBuildkiteAnalytics = "kv/ci-shared/platform-ingest/buildkite_analytics_token" ) diff --git a/dev-tools/buildkite/pipeline/pipeline.go b/dev-tools/buildkite/pipeline/pipeline.go index 61ea8cda39a..aa83d038302 100644 --- a/dev-tools/buildkite/pipeline/pipeline.go +++ b/dev-tools/buildkite/pipeline/pipeline.go @@ -255,6 +255,30 @@ func compareValues(path string, generated, expected any) []string { } } + // Handle depends_on equivalence: "key" == ["key"] + if path == "depends_on" || strings.HasSuffix(path, ".depends_on") { + // Case 1: generated is array, expected is string + if genArr, genIsArr := generated.([]any); genIsArr { + if expStr, expIsStr := expected.(string); expIsStr { + if len(genArr) == 1 { + if genStr, ok := genArr[0].(string); ok && genStr == expStr { + return nil + } + } + } + } + // Case 2: generated is string, expected is array + if genStr, genIsStr := generated.(string); genIsStr { + if expArr, expIsArr := expected.([]any); expIsArr { + if len(expArr) == 1 { + if expStrVal, ok := expArr[0].(string); ok && expStrVal == genStr { + return nil + } + } + } + } + } + if generated == nil { return []string{fmt.Sprintf("%s: missing in generated (expected: %v)", path, expected)} } diff --git a/dev-tools/buildkite/pipeline/plugins.go b/dev-tools/buildkite/pipeline/plugins.go index 5ff309beb28..209b4194e40 100644 --- a/dev-tools/buildkite/pipeline/plugins.go +++ b/dev-tools/buildkite/pipeline/plugins.go @@ -44,6 +44,16 @@ func PluginVaultECKeyProd() (string, map[string]any) { return PluginVaultSecrets(VaultPathECKeyProd, "apiKey", "EC_API_KEY") } +// PluginVaultECKeyStagingGov returns the vault-secrets plugin for EC staging gov API key. +func PluginVaultECKeyStagingGov() (string, map[string]any) { + return PluginVaultSecrets(VaultPathECKeyStagingGov, "apiKey", "EC_API_KEY") +} + +// PluginVaultBuildkiteAnalytics returns the vault-secrets plugin for Buildkite analytics token. +func PluginVaultBuildkiteAnalytics() (string, map[string]any) { + return PluginVaultSecrets(VaultPathBuildkiteAnalytics, "token", "BUILDKITE_ANALYTICS_TOKEN") +} + // PluginGCPSecretManager returns the gcp-secret-manager plugin source and config. func PluginGCPSecretManager(envSecrets map[string]string) (string, map[string]any) { return "elastic/gcp-secret-manager#" + PluginVersionGCPSecretManager, map[string]any{ @@ -108,6 +118,18 @@ func WithVaultECKeyProd(step *buildkite.CommandStep) *buildkite.CommandStep { return AddPlugin(step, source, config) } +// WithVaultECKeyStagingGov adds the vault EC key staging gov plugin to a step. +func WithVaultECKeyStagingGov(step *buildkite.CommandStep) *buildkite.CommandStep { + source, config := PluginVaultECKeyStagingGov() + return AddPlugin(step, source, config) +} + +// WithVaultBuildkiteAnalytics adds the vault Buildkite analytics token plugin to a step. +func WithVaultBuildkiteAnalytics(step *buildkite.CommandStep) *buildkite.CommandStep { + source, config := PluginVaultBuildkiteAnalytics() + return AddPlugin(step, source, config) +} + // WithGoogleOIDC adds the Google OIDC plugin to a step. func WithGoogleOIDC(step *buildkite.CommandStep) *buildkite.CommandStep { source, config := PluginGoogleOIDC() diff --git a/dev-tools/buildkite/pipeline/step.go b/dev-tools/buildkite/pipeline/step.go index 3d5147aa06f..e840024c391 100644 --- a/dev-tools/buildkite/pipeline/step.go +++ b/dev-tools/buildkite/pipeline/step.go @@ -213,22 +213,17 @@ func SetIf(step *buildkite.CommandStep, condition string) *buildkite.CommandStep } // SetDependsOn sets step dependencies on a command step. +// Always uses array format for consistency with YAML files. func SetDependsOn(step *buildkite.CommandStep, keys ...string) *buildkite.CommandStep { - if len(keys) == 1 { - step.DependsOn = &buildkite.DependsOn{ - String: Ptr(keys[0]), - } - } else { - items := make(buildkite.DependsOnList, len(keys)) - for i, k := range keys { - items[i] = buildkite.DependsOnListItem{ - String: Ptr(k), - } - } - step.DependsOn = &buildkite.DependsOn{ - DependsOnList: &items, + items := make(buildkite.DependsOnList, len(keys)) + for i, k := range keys { + items[i] = buildkite.DependsOnListItem{ + String: Ptr(k), } } + step.DependsOn = &buildkite.DependsOn{ + DependsOnList: &items, + } return step } @@ -338,22 +333,17 @@ func AddGroupStep(group *buildkite.GroupStep, step *buildkite.CommandStep) *buil } // SetGroupDependsOn sets dependencies on a group step. +// Always uses array format for consistency with YAML files. func SetGroupDependsOn(group *buildkite.GroupStep, keys ...string) *buildkite.GroupStep { - if len(keys) == 1 { - group.DependsOn = &buildkite.DependsOn{ - String: Ptr(keys[0]), - } - } else { - items := make(buildkite.DependsOnList, len(keys)) - for i, k := range keys { - items[i] = buildkite.DependsOnListItem{ - String: Ptr(k), - } - } - group.DependsOn = &buildkite.DependsOn{ - DependsOnList: &items, + items := make(buildkite.DependsOnList, len(keys)) + for i, k := range keys { + items[i] = buildkite.DependsOnListItem{ + String: Ptr(k), } } + group.DependsOn = &buildkite.DependsOn{ + DependsOnList: &items, + } return group } @@ -542,3 +532,55 @@ func WaitIf(condition string) *buildkite.WaitStep { If: Ptr(condition), } } + +// SetAllowDependencyFailure sets allow_dependency_failure on a command step. +func SetAllowDependencyFailure(step *buildkite.CommandStep, allow bool) *buildkite.CommandStep { + step.AllowDependencyFailure = &buildkite.AllowDependencyFailure{ + Bool: Ptr(allow), + } + return step +} + +// SetSoftFailExitStatus sets soft_fail with exit status pattern on a command step. +func SetSoftFailExitStatus(step *buildkite.CommandStep, exitStatus string) *buildkite.CommandStep { + enumVal := buildkite.SoftFailObjectExitStatusEnum(exitStatus) + list := buildkite.SoftFailList{ + buildkite.SoftFailObject{ + ExitStatus: &buildkite.SoftFailObjectExitStatus{ + SoftFailObjectExitStatusEnum: &enumVal, + }, + }, + } + step.SoftFail = &buildkite.SoftFail{ + SoftFailList: &list, + } + return step +} + +// SetSkipWithMessage marks a command step as skipped with a reason. +func SetSkipWithMessage(step *buildkite.CommandStep, message string) *buildkite.CommandStep { + step.Skip = &buildkite.Skip{ + String: Ptr(message), + } + return step +} + +// SetGroupAllowDependencyFailure sets allow_dependency_failure on a group step. +func SetGroupAllowDependencyFailure(group *buildkite.GroupStep, allow bool) *buildkite.GroupStep { + group.AllowDependencyFailure = &buildkite.AllowDependencyFailure{ + Bool: Ptr(allow), + } + return group +} + +// SetGroupIf sets a conditional expression on a group step. +func SetGroupIf(group *buildkite.GroupStep, condition string) *buildkite.GroupStep { + group.If = Ptr(condition) + return group +} + +// SetKey sets the key on a command step. +func SetKey(step *buildkite.CommandStep, key string) *buildkite.CommandStep { + step.Key = Ptr(key) + return step +} diff --git a/dev-tools/buildkite/pipelines/bk_integration_fips_pipeline.go b/dev-tools/buildkite/pipelines/bk_integration_fips_pipeline.go new file mode 100644 index 00000000000..d2af4667f1f --- /dev/null +++ b/dev-tools/buildkite/pipelines/bk_integration_fips_pipeline.go @@ -0,0 +1,170 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License 2.0; +// you may not use this file except in compliance with the Elastic License 2.0. + +package pipelines + +import ( + "github.com/elastic/elastic-agent/dev-tools/buildkite/pipeline" +) + +// fipsCommonEnv returns the common FIPS environment variables. +func fipsCommonEnv() map[string]string { + return map[string]string{ + "FIPS": "true", + "EC_ENDPOINT": "https://api.staging.elastic-gov.com", + "ESS_REGION": "us-gov-east-1", + "TF_VAR_deployment_template_id": "aws-general-purpose", + "TF_VAR_integration_server_docker_image": "docker.elastic.co/beats-ci/elastic-agent-cloud-fips:git-${BUILDKITE_COMMIT:0:12}", + "TF_VAR_docker_images_name_suffix": "-fips", + } +} + +// BKIntegrationFIPSPipeline generates the .buildkite/bk.integration-fips.pipeline.yml pipeline. +// This pipeline runs FIPS-specific integration tests. +func BKIntegrationFIPSPipeline() *pipeline.Pipeline { + p := pipeline.New(). + Env("ASDF_MAGE_VERSION", "1.14.0"). + Env("MS_GOTOOLCHAIN_TELEMETRY_ENABLED", "0"). + // Image environment variables - managed by updatecli + Env("IMAGE_UBUNTU_2404_X86_64", pipeline.ImageUbuntu2404X86). + Env("IMAGE_UBUNTU_X86_64_FIPS", pipeline.ImageUbuntuX86FIPS). + Env("IMAGE_UBUNTU_ARM64_FIPS", pipeline.ImageUbuntuARM64FIPS). + Env("ASDF_TERRAFORM_VERSION", "1.9.2") + + // Start ESS stack for FIPS integration tests + p.Add(fipsEssStartStep()) + + // FIPS Ubuntu tests group + p.Add(fipsUbuntuTestsGroup()) + + // ESS FIPS stack cleanup + p.Add(fipsEssCleanupStep()) + + // Aggregate test reports + p.Add(fipsAggregateReportsStep()) + + return p +} + +// fipsEssStartStep creates the Start ESS stack step for FIPS tests. +func fipsEssStartStep() *pipeline.CommandStep { + step := pipeline.CommandWithKey("Start ESS stack for FIPS integration tests", "integration-fips-ess", + "source .buildkite/scripts/steps/ess_start.sh") + + pipeline.SetDependsOn(step, "packaging-containers-amd64-fips", "packaging-containers-arm64-fips") + + env := fipsCommonEnv() + pipeline.SetEnv(step, env) + + pipeline.SetArtifactPaths(step, "test_infra/ess/*.tfstate", "test_infra/ess/*.lock.hcl") + pipeline.SetAgent(step, pipeline.DockerAgentWithHooks("docker.elastic.co/ci-agent-images/platform-ingest/buildkite-agent-beats-ci-with-hooks:0.5")) + pipeline.WithVaultECKeyStagingGov(step) + + return step +} + +// fipsUbuntuTestsGroup creates the FIPS Ubuntu tests group. +func fipsUbuntuTestsGroup() *pipeline.GroupStep { + group := pipeline.GroupWithKey("fips:Stateful:Ubuntu", "integration-tests-ubuntu-fips") + pipeline.SetGroupDependsOn(group, "integration-fips-ess") + + // fips:x86_64:sudo-{{matrix.sudo}}:{{matrix.groups}} + x86Test := fipsUbuntuTestStep("fips:x86_64:sudo-{{matrix.sudo}}:{{matrix.groups}}", + "packaging-amd64-fips", + "buildkite-agent artifact download build/distributions/** . --step 'packaging-amd64-fips'\n.buildkite/scripts/steps/integration_tests_tf.sh {{matrix.groups}} {{matrix.sudo}}", + "${IMAGE_UBUNTU_X86_64_FIPS}", + "m5.2xlarge") + pipeline.AddGroupStep(group, x86Test) + + // fips:arm64:sudo-{{matrix.sudo}}:{{matrix.groups}} + arm64Test := fipsUbuntuTestStep("fips:arm64:sudo-{{matrix.sudo}}:{{matrix.groups}}", + "packaging-arm64-fips", + "buildkite-agent artifact download build/distributions/** . --step 'packaging-arm64-fips'\n.buildkite/scripts/steps/integration_tests_tf.sh {{matrix.groups}} {{matrix.sudo}}", + "${IMAGE_UBUNTU_ARM64_FIPS}", + "m6g.2xlarge") + pipeline.AddGroupStep(group, arm64Test) + + // fips:upgrade-ech-deployment + upgradeEch := fipsUpgradeEchStep() + pipeline.AddGroupStep(group, upgradeEch) + + return group +} + +// fipsUbuntuTestStep creates a FIPS Ubuntu test step with matrix. +func fipsUbuntuTestStep(label, dependsOn, command, image, instanceType string) *pipeline.CommandStep { + step := pipeline.Command(label, command) + pipeline.SetDependsOn(step, dependsOn) + + env := fipsCommonEnv() + env["TEST_PACKAGE"] = "github.com/elastic/elastic-agent/testing/integration/ess" + pipeline.SetEnv(step, env) + + pipeline.SetArtifactPaths(step, "build/**", "build/diagnostics/**") + pipeline.SetRetryAutomatic(step, 1) + pipeline.SetAgent(step, pipeline.AWSAgent(image, instanceType)) + pipeline.WithVaultECKeyStagingGov(step) + pipeline.SetMatrix(step, map[string][]string{ + "sudo": {"false", "true"}, + "groups": {"fleet"}, + }) + + return step +} + +// fipsUpgradeEchStep creates the FIPS upgrade ECH deployment step. +func fipsUpgradeEchStep() *pipeline.CommandStep { + step := pipeline.Command("fips:upgrade-ech-deployment", + ".buildkite/scripts/buildkite-integration-tests.sh ech-deployment false") + + pipeline.SetIf(step, `build.env("BUILDKITE_PULL_REQUEST") != "false" && build.env("GITHUB_PR_LABELS") =~ /.*(Testing:run:TestUpgradeIntegrationsServer).*/`) + + pipeline.SetEnv(step, map[string]string{ + "FIPS": "true", + "EC_ENDPOINT": "https://api.staging.elastic-gov.com", + "ESS_REGION": "us-gov-east-1", + "TEST_PACKAGE": "github.com/elastic/elastic-agent/testing/integration/ess", + }) + + pipeline.SetArtifactPaths(step, "build/**", "build/diagnostics/**") + pipeline.SetRetryAutomatic(step, 1) + pipeline.SetAgent(step, pipeline.AWSAgent("${IMAGE_UBUNTU_X86_64_FIPS}", "m5.2xlarge")) + pipeline.WithVaultECKeyStagingGov(step) + + return step +} + +// fipsEssCleanupStep creates the ESS FIPS stack cleanup step. +func fipsEssCleanupStep() *pipeline.CommandStep { + step := pipeline.Command("ESS FIPS stack cleanup", + `buildkite-agent artifact download "test_infra/ess/**" . --step "integration-fips-ess" +ls -lah test_infra/ess +.buildkite/scripts/steps/ess_down.sh`) + + pipeline.SetDependsOn(step, "integration-tests-ubuntu-fips") + + env := fipsCommonEnv() + pipeline.SetEnv(step, env) + + pipeline.SetAllowDependencyFailure(step, true) + pipeline.SetAgent(step, pipeline.DockerAgentWithHooks("docker.elastic.co/ci-agent-images/platform-ingest/buildkite-agent-beats-ci-with-hooks:0.5")) + pipeline.WithVaultECKeyStagingGov(step) + + return step +} + +// fipsAggregateReportsStep creates the FIPS Aggregate test reports step. +func fipsAggregateReportsStep() *pipeline.CommandStep { + step := pipeline.Command("Aggregate test reports", + `buildkite-agent artifact download "build/*.xml" .`) + + pipeline.SetDependsOn(step, "integration-tests-ubuntu-fips") + pipeline.SetAllowDependencyFailure(step, true) + pipeline.SetAgent(step, pipeline.DockerAgentWithHooks("docker.elastic.co/ci-agent-images/platform-ingest/buildkite-agent-beats-ci-with-hooks:0.5")) + pipeline.SetSoftFailExitStatus(step, "*") + pipeline.WithVaultBuildkiteAnalytics(step) + pipeline.WithTestCollector(step, "build/*.xml", "junit") + + return step +} diff --git a/dev-tools/buildkite/pipelines/bk_integration_pipeline.go b/dev-tools/buildkite/pipelines/bk_integration_pipeline.go new file mode 100644 index 00000000000..fe8f06fa7d7 --- /dev/null +++ b/dev-tools/buildkite/pipelines/bk_integration_pipeline.go @@ -0,0 +1,520 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License 2.0; +// you may not use this file except in compliance with the Elastic License 2.0. + +package pipelines + +import ( + "github.com/elastic/elastic-agent/dev-tools/buildkite/pipeline" +) + +// K8s test versions to run against. +var ( + k8sMinTestVersion = "v1.27.16" + k8sMaxTestVersion = "v1.34.0" + k8sAllTestVersions = []string{ + k8sMinTestVersion, + "v1.28.15", + "v1.29.14", + "v1.30.0", + "v1.31.0", + "v1.32.0", + "v1.33.0", + k8sMaxTestVersion, + } +) + +// BKIntegrationPipeline generates the .buildkite/bk.integration.pipeline.yml pipeline. +// This pipeline runs integration tests for various platforms and configurations. +func BKIntegrationPipeline() *pipeline.Pipeline { + p := pipeline.New(). + Env("VAULT_PATH", pipeline.VaultPathGCP). + Env("ASDF_MAGE_VERSION", "1.14.0"). + // Image environment variables - managed by updatecli + Env("IMAGE_UBUNTU_2404_X86_64", pipeline.ImageUbuntu2404X86). + Env("IMAGE_UBUNTU_2404_ARM_64", pipeline.ImageUbuntu2404ARM). + Env("IMAGE_RHEL_8", pipeline.ImageRHEL8). + Env("IMAGE_RHEL_10", pipeline.ImageRHEL10). + Env("IMAGE_DEBIAN_11", pipeline.ImageDebian11). + Env("IMAGE_DEBIAN_13", pipeline.ImageDebian13). + Env("IMAGE_WIN_2022", pipeline.ImageWin2022). + Env("IMAGE_WIN_2025", pipeline.ImageWin2025). + Env("ASDF_TERRAFORM_VERSION", "1.9.2") + + // Custom ECH Testing + p.Add(echTestingStep()) + + // Start ESS stack + p.Add(essStartStep()) + + // Extended runtime leak tests group + p.Add(extendedLeakTestsGroup()) + + // Stateful: Windows group + p.Add(windowsTestsGroup()) + + // Stateful: Ubuntu group + p.Add(ubuntuTestsGroup()) + + // Stateful: Debian group + p.Add(debianTestsGroup()) + + // Stateful: RHEL group + p.Add(rhelTestsGroup()) + + // Kubernetes group + p.Add(kubernetesTestsGroup()) + + // Serverless integration test group + p.Add(serverlessTestsGroup()) + + // ESS stack cleanup + p.Add(essCleanupStep()) + + // Aggregate test reports + p.Add(aggregateReportsStep()) + + return p +} + +// echTestingStep creates the Custom ECH Testing step. +func echTestingStep() *pipeline.CommandStep { + step := pipeline.CommandWithKey("Custom ECH Testing", "integration-tests-ech", + `#!/usr/bin/env bash +buildkite-agent artifact download build/distributions/elastic-agent-*-linux-x86_64* . --step 'packaging-amd64' +.buildkite/scripts/steps/integration_tests_tf.sh ech true`) + + pipeline.SetDependsOn(step, "packaging-containers-amd64", "packaging-containers-arm64") + pipeline.SetEnv(step, map[string]string{ + "TEST_PACKAGE": "github.com/elastic/elastic-agent/testing/integration/ess", + "TF_VAR_integration_server_docker_image": "docker.elastic.co/beats-ci/elastic-agent-cloud:git-${BUILDKITE_COMMIT:0:12}", + "FORCE_ESS_CREATE": "true", + }) + pipeline.SetArtifactPaths(step, "build/*", "build/diagnostics/**") + pipeline.SetRetryAutomatic(step, 1) + pipeline.SetAgent(step, pipeline.GCPAgent("${IMAGE_UBUNTU_2404_X86_64}", pipeline.MachineTypeN2Standard8)) + pipeline.WithVaultECKeyProd(step) + + return step +} + +// essStartStep creates the Start ESS stack step. +func essStartStep() *pipeline.CommandStep { + step := pipeline.CommandWithKey("Start ESS stack for integration tests", "integration-ess", + ".buildkite/scripts/steps/ess_start.sh") + + pipeline.SetNotify(step, "buildkite/elastic-agent-extended-testing - ESS stack provision") + pipeline.SetRetryAutomatic(step, 1) + pipeline.SetArtifactPaths(step, "test_infra/ess/*.tfstate", "test_infra/ess/*.lock.hcl") + pipeline.SetAgent(step, pipeline.DockerAgentWithHooks("docker.elastic.co/ci-agent-images/platform-ingest/buildkite-agent-beats-ci-with-hooks:0.5")) + pipeline.WithVaultECKeyProd(step) + + return step +} + +// extendedLeakTestsGroup creates the Extended runtime leak tests group. +func extendedLeakTestsGroup() *pipeline.GroupStep { + group := pipeline.GroupWithKey("Extended runtime leak tests", "extended-integration-tests") + pipeline.SetGroupNotify(group, "buildkite/elastic-agent-extended-testing - Runtime leak tests") + pipeline.SetGroupDependsOn(group, "integration-ess") + + // Windows:2022:amd64:sudo + win2022 := leakTestStep("Windows:2022:amd64:sudo", "packaging-amd64", + "buildkite-agent artifact download build/distributions/elastic-agent-*-windows-x86_64* . --step 'packaging-amd64'\n.buildkite/scripts/steps/integration_tests_tf.ps1 fleet true", + "${IMAGE_WIN_2022}") + pipeline.AddGroupStep(group, win2022) + + // Windows:2025:amd64:sudo + win2025 := leakTestStep("Windows:2025:amd64:sudo", "packaging-amd64", + "buildkite-agent artifact download build/distributions/elastic-agent-*-windows-x86_64* . --step 'packaging-amd64'\n.buildkite/scripts/steps/integration_tests_tf.ps1 fleet true", + "${IMAGE_WIN_2025}") + pipeline.AddGroupStep(group, win2025) + + // Ubuntu:2404:amd64:sudo + ubuntu := leakTestStep("Ubuntu:2404:amd64:sudo", "packaging-amd64", + "buildkite-agent artifact download build/distributions/elastic-agent-*-linux-x86_64* . --step 'packaging-amd64'\n.buildkite/scripts/steps/integration_tests_tf.sh fleet true", + "${IMAGE_UBUNTU_2404_X86_64}") + pipeline.AddGroupStep(group, ubuntu) + + return group +} + +// leakTestStep creates a leak test step. +func leakTestStep(label, dependsOn, command, image string) *pipeline.CommandStep { + step := pipeline.Command(label, command) + pipeline.SetDependsOn(step, dependsOn) + pipeline.SetEnv(step, map[string]string{ + "TEST_PACKAGE": "github.com/elastic/elastic-agent/testing/integration/leak", + }) + pipeline.SetArtifactPaths(step, "build/*", "build/diagnostics/**") + pipeline.SetRetryAutomatic(step, 1) + pipeline.SetAgent(step, pipeline.GCPAgent(image, pipeline.MachineTypeN2Standard8)) + pipeline.WithVaultECKeyProd(step) + + return step +} + +// windowsTestsGroup creates the Stateful: Windows tests group. +func windowsTestsGroup() *pipeline.GroupStep { + group := pipeline.GroupWithKey("Stateful: Windows", "integration-tests-win") + pipeline.SetGroupNotify(group, "buildkite/elastic-agent-extended-testing - Windows") + pipeline.SetGroupDependsOn(group, "integration-ess") + + // Win2022:sudo:{{matrix}} + win2022Sudo := windowsTestStep("Win2022:sudo:{{matrix}}", "${IMAGE_WIN_2022}", true, + []string{"default", "fleet", "fleet-endpoint-security", "fleet-privileged", "standalone-upgrade", "upgrade", "upgrade-flavor", "install-uninstall"}) + pipeline.AddGroupStep(group, win2022Sudo) + + // Win2022:non-sudo:{{matrix}} + win2022NonSudo := windowsTestStep("Win2022:non-sudo:{{matrix}}", "${IMAGE_WIN_2022}", false, + []string{"default"}) + pipeline.AddGroupStep(group, win2022NonSudo) + + // Win2025:sudo:{{matrix}} + win2025Sudo := windowsTestStep("Win2025:sudo:{{matrix}}", "${IMAGE_WIN_2025}", true, + []string{"default", "fleet", "fleet-endpoint-security", "fleet-privileged", "standalone-upgrade", "upgrade", "upgrade-flavor", "install-uninstall"}) + pipeline.AddGroupStep(group, win2025Sudo) + + // Win2025:non-sudo:{{matrix}} + win2025NonSudo := windowsTestStep("Win2025:non-sudo:{{matrix}}", "${IMAGE_WIN_2025}", false, + []string{"default"}) + pipeline.AddGroupStep(group, win2025NonSudo) + + return group +} + +// windowsTestStep creates a Windows test step with matrix. +func windowsTestStep(label, image string, sudo bool, matrix []string) *pipeline.CommandStep { + sudoArg := "false" + if sudo { + sudoArg = "true" + } + + step := pipeline.Command(label, + "buildkite-agent artifact download build/distributions/elastic-agent-*-windows-x86_64* . --step 'packaging-amd64'\n.buildkite/scripts/steps/integration_tests_tf.ps1 {{matrix}} "+sudoArg) + pipeline.SetDependsOn(step, "packaging-amd64") + pipeline.SetEnv(step, map[string]string{ + "TEST_PACKAGE": "github.com/elastic/elastic-agent/testing/integration/ess", + }) + pipeline.SetArtifactPaths(step, "build/*", "build/diagnostics/**") + pipeline.SetAgent(step, pipeline.GCPAgent(image, pipeline.MachineTypeN2Standard8)) + pipeline.SetRetryAutomatic(step, 1) + pipeline.WithVaultECKeyProd(step) + pipeline.SetSimpleMatrix(step, matrix) + + return step +} + +// ubuntuTestsGroup creates the Stateful: Ubuntu tests group. +func ubuntuTestsGroup() *pipeline.GroupStep { + group := pipeline.GroupWithKey("Stateful:Ubuntu", "integration-tests-ubuntu") + pipeline.SetGroupNotify(group, "buildkite/elastic-agent-extended-testing - Ubuntu") + pipeline.SetGroupDependsOn(group, "integration-ess") + + // x86_64:non-sudo: {{matrix}} + x86NonSudo := ubuntuTestStep("x86_64:non-sudo: {{matrix}}", "packaging-amd64", false, + "buildkite-agent artifact download build/distributions/elastic-agent-*-linux-x86_64* . --step 'packaging-amd64'\n.buildkite/scripts/steps/integration_tests_tf.sh {{matrix}} false", + pipeline.GCPAgent("${IMAGE_UBUNTU_2404_X86_64}", pipeline.MachineTypeN2Standard8), + []string{"default"}) + pipeline.AddGroupStep(group, x86NonSudo) + + // x86_64:sudo: {{matrix}} + x86Sudo := ubuntuTestStep("x86_64:sudo: {{matrix}}", "packaging-amd64", true, + "buildkite-agent artifact download build/distributions/elastic-agent-*-linux-x86_64* . --step packaging-amd64\nbuildkite-agent artifact download build/distributions/elastic-agent-*-amd64.deb* . --step packaging-amd64\n.buildkite/scripts/steps/integration_tests_tf.sh {{matrix}} true", + pipeline.GCPAgent("${IMAGE_UBUNTU_2404_X86_64}", pipeline.MachineTypeN2Standard8), + []string{"default", "upgrade", "upgrade-flavor", "standalone-upgrade", "fleet", "fleet-endpoint-security", "fleet-airgapped", "fleet-airgapped-privileged", "fleet-privileged", "fleet-upgrade-to-pr-build", "install-uninstall", "fqdn", "deb", "container"}) + pipeline.AddGroupStep(group, x86Sudo) + + // arm:sudo: {{matrix}} + armSudo := ubuntuArmTestStep("arm:sudo: {{matrix}}", "packaging-arm64", + "buildkite-agent artifact download build/distributions/elastic-agent-*-linux-arm64* . --step 'packaging-arm64'\n.buildkite/scripts/steps/integration_tests_tf.sh {{matrix}} true", + []string{"default", "upgrade", "upgrade-flavor", "standalone-upgrade", "fleet"}) + pipeline.AddGroupStep(group, armSudo) + + // arm:non-sudo: {{matrix}} (skipped) + armNonSudo := ubuntuArmNonSudoTestStep() + pipeline.AddGroupStep(group, armNonSudo) + + return group +} + +// ubuntuTestStep creates an Ubuntu test step with matrix. +func ubuntuTestStep(label, dependsOn string, _ bool, command string, agent pipeline.Agent, matrix []string) *pipeline.CommandStep { + step := pipeline.Command(label, command) + pipeline.SetDependsOn(step, dependsOn) + pipeline.SetEnv(step, map[string]string{ + "TEST_PACKAGE": "github.com/elastic/elastic-agent/testing/integration/ess", + }) + pipeline.SetArtifactPaths(step, "build/*", "build/diagnostics/**") + pipeline.SetRetryAutomatic(step, 1) + pipeline.SetAgent(step, agent) + pipeline.WithVaultECKeyProd(step) + pipeline.SetSimpleMatrix(step, matrix) + + return step +} + +// ubuntuArmTestStep creates an ARM Ubuntu test step with matrix. +func ubuntuArmTestStep(label, dependsOn, command string, matrix []string) *pipeline.CommandStep { + step := pipeline.Command(label, command) + pipeline.SetDependsOn(step, dependsOn) + pipeline.SetEnv(step, map[string]string{ + "TEST_PACKAGE": "github.com/elastic/elastic-agent/testing/integration/ess", + }) + pipeline.SetArtifactPaths(step, "build/*", "build/diagnostics/**") + pipeline.SetAgent(step, pipeline.AWSAgent("${IMAGE_UBUNTU_2404_ARM_64}", "m6g.2xlarge")) + pipeline.SetRetryAutomatic(step, 1) + pipeline.WithVaultECKeyProd(step) + pipeline.SetSimpleMatrix(step, matrix) + + return step +} + +// ubuntuArmNonSudoTestStep creates the skipped ARM non-sudo test step. +func ubuntuArmNonSudoTestStep() *pipeline.CommandStep { + step := pipeline.Command("arm:non-sudo: {{matrix}}", + "buildkite-agent artifact download build/distributions/elastic-agent-*-linux-arm64* . --step 'packaging-arm64'\n.buildkite/scripts/steps/integration_tests_tf.sh {{matrix}} false") + pipeline.SetSkip(step, true) + pipeline.SetDependsOn(step, "packaging-arm64") + pipeline.SetEnv(step, map[string]string{ + "TEST_PACKAGE": "github.com/elastic/elastic-agent/testing/integration/ess", + }) + pipeline.SetArtifactPaths(step, "build/*", "build/diagnostics/**") + pipeline.SetRetryAutomatic(step, 1) + pipeline.SetAgent(step, pipeline.AWSAgent("${IMAGE_UBUNTU_2404_ARM_64}", "m6g.xlarge")) + pipeline.WithVaultECKeyProd(step) + pipeline.SetSimpleMatrix(step, []string{"default"}) + + return step +} + +// debianTestsGroup creates the Stateful: Debian tests group. +func debianTestsGroup() *pipeline.GroupStep { + group := pipeline.GroupWithKey("Stateful:Debian", "integration-tests-debian") + pipeline.SetGroupNotify(group, "buildkite/elastic-agent-extended-testing - Debian") + pipeline.SetGroupDependsOn(group, "integration-ess") + + // x86_64:non-sudo: {{matrix.group}} - {{matrix.image}} + nonSudo := debianTestStep("x86_64:non-sudo: {{matrix.group}} - {{matrix.image}}", "packaging-amd64", + "buildkite-agent artifact download build/distributions/elastic-agent-*-linux-x86_64* . --step 'packaging-amd64'\n.buildkite/scripts/steps/integration_tests_tf.sh {{matrix.group}} false", + []string{"${IMAGE_DEBIAN_11}", "${IMAGE_DEBIAN_13}"}, + []string{"default"}) + pipeline.AddGroupStep(group, nonSudo) + + // x86_64:sudo: {{matrix.group}} - {{matrix.image}} + sudo := debianTestStep("x86_64:sudo: {{matrix.group}} - {{matrix.image}}", "packaging-amd64", + "buildkite-agent artifact download build/distributions/elastic-agent-*-linux-x86_64* . --step packaging-amd64\nbuildkite-agent artifact download build/distributions/elastic-agent-*-amd64.deb* . --step packaging-amd64\n.buildkite/scripts/steps/integration_tests_tf.sh {{matrix.group}} true", + []string{"${IMAGE_DEBIAN_11}", "${IMAGE_DEBIAN_13}"}, + []string{"default", "upgrade", "upgrade-flavor", "standalone-upgrade", "fleet", "fleet-endpoint-security", "fleet-airgapped", "fleet-airgapped-privileged", "fleet-privileged", "fleet-upgrade-to-pr-build", "install-uninstall", "deb", "container"}) + pipeline.AddGroupStep(group, sudo) + + return group +} + +// debianTestStep creates a Debian test step with setup matrix. +func debianTestStep(label, dependsOn, command string, images, groups []string) *pipeline.CommandStep { + step := pipeline.Command(label, command) + pipeline.SetDependsOn(step, dependsOn) + pipeline.SetEnv(step, map[string]string{ + "TEST_PACKAGE": "github.com/elastic/elastic-agent/testing/integration/ess", + }) + pipeline.SetArtifactPaths(step, "build/*", "build/diagnostics/**") + pipeline.SetRetryAutomatic(step, 1) + pipeline.SetAgent(step, pipeline.Agent{ + "provider": "gcp", + "machineType": pipeline.MachineTypeN2Standard8, + "image": "{{matrix.image}}", + }) + pipeline.WithVaultECKeyProd(step) + pipeline.SetMatrix(step, map[string][]string{ + "image": images, + "group": groups, + }) + + return step +} + +// rhelTestsGroup creates the Stateful: RHEL tests group. +func rhelTestsGroup() *pipeline.GroupStep { + group := pipeline.GroupWithKey("Stateful:RHEL", "integration-tests-rhel") + pipeline.SetGroupNotify(group, "buildkite/elastic-agent-extended-testing - RHEL") + pipeline.SetGroupDependsOn(group, "integration-ess") + + // x86_64:sudo:rpm - {{matrix.image}} + rpm := rhelTestStep() + pipeline.AddGroupStep(group, rpm) + + return group +} + +// rhelTestStep creates the RHEL RPM test step. +func rhelTestStep() *pipeline.CommandStep { + step := pipeline.Command("x86_64:sudo:rpm - {{matrix.image}}", + "buildkite-agent artifact download build/distributions/elastic-agent-*-x86_64.rpm* . --step packaging-amd64\n.buildkite/scripts/steps/integration_tests_tf.sh rpm true") + pipeline.SetDependsOn(step, "packaging-amd64") + pipeline.SetEnv(step, map[string]string{ + "TEST_PACKAGE": "github.com/elastic/elastic-agent/testing/integration/ess", + }) + pipeline.SetArtifactPaths(step, "build/*", "build/diagnostics/**") + pipeline.SetRetryAutomatic(step, 1) + pipeline.WithVaultECKeyProd(step) + pipeline.SetAgent(step, pipeline.Agent{ + "provider": "gcp", + "machineType": pipeline.MachineTypeN2Standard8, + "image": "{{matrix.image}}", + }) + pipeline.SetMatrix(step, map[string][]string{ + "image": {"${IMAGE_RHEL_8}", "${IMAGE_RHEL_10}"}, + }) + + return step +} + +// kubernetesTestsGroup creates the Kubernetes tests group. +func kubernetesTestsGroup() *pipeline.GroupStep { + group := pipeline.GroupWithKey(":kubernetes: Kubernetes", "integration-tests-kubernetes") + pipeline.SetGroupNotify(group, "buildkite/elastic-agent-extended-testing - Kubernetes") + pipeline.SetGroupDependsOn(group, "integration-ess", "packaging-containers-amd64") + + // Non-PR builds: all k8s versions with grouped variants + fullK8s := k8sTestStep(":git: :kubernetes: {{matrix.version}}:amd64:{{matrix.variants}}", + `build.pull_request.id == null`, + k8sAllTestVersions, + []string{ + "basic,slim,complete,service,elastic-otel-collector", + "wolfi,slim-wolfi,complete-wolfi,elastic-otel-collector-wolfi", + }) + pipeline.AddGroupStep(group, fullK8s) + + // PR builds: only min/max versions with individual variants + prK8s := k8sTestStep(":open-pull-request: :kubernetes: {{matrix.version}}:amd64:{{matrix.variants}}", + `build.pull_request.id != null`, + []string{k8sMinTestVersion, k8sMaxTestVersion}, + []string{"basic", "slim", "complete", "service", "elastic-otel-collector", "wolfi", "slim-wolfi", "complete-wolfi", "elastic-otel-collector-wolfi"}) + pipeline.AddGroupStep(group, prK8s) + + return group +} + +// k8sTestStep creates a Kubernetes test step with matrix. +func k8sTestStep(label, condition string, versions, variants []string) *pipeline.CommandStep { + step := pipeline.Command(label, + `buildkite-agent artifact download build/distributions/*-linux-amd64.docker.tar.gz . --step 'packaging-containers-amd64' +.buildkite/scripts/steps/integration_tests_tf.sh kubernetes false`) + + pipeline.SetIf(step, condition) + pipeline.SetEnv(step, map[string]string{ + "K8S_VERSION": "{{matrix.version}}", + "ASDF_KIND_VERSION": "0.27.0", + "DOCKER_VARIANTS": "{{matrix.variants}}", + "TARGET_ARCH": "amd64", + }) + pipeline.SetArtifactPaths(step, "build/*", "build/diagnostics/**", "build/*.pod_logs_dump/*") + pipeline.SetRetryAutomatic(step, 1) + pipeline.SetAgent(step, pipeline.GCPAgentWithDisk("${IMAGE_UBUNTU_2404_X86_64}", pipeline.MachineTypeN2Standard8, 80, "")) + pipeline.WithVaultECKeyProd(step) + pipeline.SetMatrix(step, map[string][]string{ + "variants": variants, + "version": versions, + }) + + return step +} + +// serverlessTestsGroup creates the Serverless integration tests group. +func serverlessTestsGroup() *pipeline.GroupStep { + group := pipeline.GroupWithKey("Serverless integration test", "integration-tests-serverless") + pipeline.SetGroupNotify(group, "buildkite/elastic-agent-extended-testing - Serverless integration test") + + // Windows:2022:amd64:sudo + win2022 := serverlessWindowsTestStep("Windows:2022:amd64:sudo", "${IMAGE_WIN_2022}") + pipeline.AddGroupStep(group, win2022) + + // Windows:2025:amd64:sudo + win2025 := serverlessWindowsTestStep("Windows:2025:amd64:sudo", "${IMAGE_WIN_2025}") + pipeline.AddGroupStep(group, win2025) + + // Ubuntu:2404:amd64:sudo + ubuntu := serverlessUbuntuTestStep() + pipeline.AddGroupStep(group, ubuntu) + + return group +} + +// serverlessWindowsTestStep creates a serverless Windows test step. +func serverlessWindowsTestStep(label, image string) *pipeline.CommandStep { + step := pipeline.Command(label, + "buildkite-agent artifact download build/distributions/elastic-agent-*-windows-x86_64* . --step 'packaging-amd64'\n.buildkite/scripts/buildkite-integration-tests.ps1 fleet true") + pipeline.SetDependsOn(step, "packaging-amd64") + pipeline.SetEnv(step, map[string]string{ + "TEST_PACKAGE": "github.com/elastic/elastic-agent/testing/integration/serverless", + }) + pipeline.SetArtifactPaths(step, "build/*", "build/diagnostics/**") + pipeline.SetRetryAutomatic(step, 1) + pipeline.SetAgent(step, pipeline.GCPAgent(image, pipeline.MachineTypeN2Standard8)) + pipeline.WithGoogleOIDC(step) + pipeline.WithGCPSecretManagerServerless(step) + + return step +} + +// serverlessUbuntuTestStep creates the serverless Ubuntu test step. +func serverlessUbuntuTestStep() *pipeline.CommandStep { + step := pipeline.Command("Ubuntu:2404:amd64:sudo", + "buildkite-agent artifact download build/distributions/elastic-agent-*-linux-x86_64* . --step 'packaging-amd64'\nsudo -E .buildkite/scripts/buildkite-integration-tests.sh fleet true") + pipeline.SetDependsOn(step, "packaging-amd64") + pipeline.SetEnv(step, map[string]string{ + "TEST_PACKAGE": "github.com/elastic/elastic-agent/testing/integration/serverless", + }) + pipeline.SetArtifactPaths(step, "build/*", "build/diagnostics/**") + pipeline.SetRetryAutomatic(step, 1) + pipeline.SetAgent(step, pipeline.GCPAgent("${IMAGE_UBUNTU_2404_X86_64}", pipeline.MachineTypeN2Standard8)) + pipeline.WithGoogleOIDC(step) + pipeline.WithGCPSecretManagerServerless(step) + + return step +} + +// essCleanupStep creates the ESS stack cleanup step. +func essCleanupStep() *pipeline.CommandStep { + step := pipeline.Command("ESS stack cleanup", + `buildkite-agent artifact download "test_infra/ess/**" . --step "integration-ess" +ls -lah test_infra/ess +.buildkite/scripts/steps/ess_down.sh`) + + pipeline.SetDependsOn(step, + "integration-tests-ubuntu", + "integration-tests-win", + "integration-tests-rhel", + "integration-tests-kubernetes", + "extended-integration-tests", + "integration-tests-debian") + pipeline.SetAllowDependencyFailure(step, true) + pipeline.SetAgent(step, pipeline.DockerAgentWithHooks("docker.elastic.co/ci-agent-images/platform-ingest/buildkite-agent-beats-ci-with-hooks:0.5")) + pipeline.WithVaultECKeyProd(step) + + return step +} + +// aggregateReportsStep creates the Aggregate test reports step. +func aggregateReportsStep() *pipeline.CommandStep { + step := pipeline.CommandWithKey("Aggregate test reports", "aggregate-reports", + `buildkite-agent artifact download "build/*.xml" . +buildkite-agent artifact download "build\*.xml" .`) + + pipeline.SetDependsOn(step, + "integration-tests-ech", + "integration-tests-ubuntu", + "integration-tests-win", + "integration-tests-rhel", + "integration-tests-kubernetes", + "integration-tests-serverless", + "integration-tests-debian") + pipeline.SetAllowDependencyFailure(step, true) + pipeline.SetAgent(step, pipeline.DockerAgentWithHooks("docker.elastic.co/ci-agent-images/platform-ingest/buildkite-agent-beats-ci-with-hooks:0.5")) + pipeline.SetSoftFailExitStatus(step, "*") + pipeline.WithVaultBuildkiteAnalytics(step) + pipeline.WithTestCollector(step, "build/*.xml", "junit") + + return step +} diff --git a/dev-tools/buildkite/pipelines/pipelines_test.go b/dev-tools/buildkite/pipelines/pipelines_test.go index 20b27c3f5ee..421dc8220a0 100644 --- a/dev-tools/buildkite/pipelines/pipelines_test.go +++ b/dev-tools/buildkite/pipelines/pipelines_test.go @@ -56,6 +56,18 @@ var pipelineTestCases = []pipelineTestCase{ goldenFile: "pipeline.elastic-agent-package.yml", actualFile: "pipeline.elastic-agent-package.yml", }, + { + name: "BKIntegrationPipeline", + generator: BKIntegrationPipeline, + goldenFile: "bk.integration.pipeline.yml", + actualFile: "bk.integration.pipeline.yml", + }, + { + name: "BKIntegrationFIPSPipeline", + generator: BKIntegrationFIPSPipeline, + goldenFile: "bk.integration-fips.pipeline.yml", + actualFile: "bk.integration-fips.pipeline.yml", + }, } // findRepoRoot finds the repository root by looking for go.mod. diff --git a/dev-tools/buildkite/pipelines/testdata/bk.integration-fips.pipeline.yml b/dev-tools/buildkite/pipelines/testdata/bk.integration-fips.pipeline.yml new file mode 100644 index 00000000000..7574b932b44 --- /dev/null +++ b/dev-tools/buildkite/pipelines/testdata/bk.integration-fips.pipeline.yml @@ -0,0 +1,176 @@ +# yaml-language-server: $schema=https://raw.githubusercontent.com/buildkite/pipeline-schema/main/schema.json +env: + ASDF_MAGE_VERSION: 1.14.0 + ASDF_TERRAFORM_VERSION: 1.9.2 + IMAGE_UBUNTU_2404_X86_64: platform-ingest-elastic-agent-ubuntu-2404-1762801856 + IMAGE_UBUNTU_ARM64_FIPS: platform-ingest-elastic-agent-ubuntu-2204-fips-aarch64-1762801856 + IMAGE_UBUNTU_X86_64_FIPS: platform-ingest-elastic-agent-ubuntu-2204-fips-1762801856 + MS_GOTOOLCHAIN_TELEMETRY_ENABLED: "0" +steps: + - agents: + image: docker.elastic.co/ci-agent-images/platform-ingest/buildkite-agent-beats-ci-with-hooks:0.5 + useCustomGlobalHooks: true + artifact_paths: + - test_infra/ess/*.tfstate + - test_infra/ess/*.lock.hcl + command: source .buildkite/scripts/steps/ess_start.sh + depends_on: + - packaging-containers-amd64-fips + - packaging-containers-arm64-fips + env: + EC_ENDPOINT: https://api.staging.elastic-gov.com + ESS_REGION: us-gov-east-1 + FIPS: "true" + TF_VAR_deployment_template_id: aws-general-purpose + TF_VAR_docker_images_name_suffix: -fips + TF_VAR_integration_server_docker_image: docker.elastic.co/beats-ci/elastic-agent-cloud-fips:git-${BUILDKITE_COMMIT:0:12} + key: integration-fips-ess + label: Start ESS stack for FIPS integration tests + plugins: + - elastic/vault-secrets#v0.1.0: + env_var: EC_API_KEY + field: apiKey + path: kv/ci-shared/platform-ingest/platform-ingest-ec-staging-gov + - depends_on: + - integration-fips-ess + group: fips:Stateful:Ubuntu + key: integration-tests-ubuntu-fips + steps: + - agents: + image: ${IMAGE_UBUNTU_X86_64_FIPS} + instanceType: m5.2xlarge + provider: aws + artifact_paths: + - build/** + - build/diagnostics/** + command: |- + buildkite-agent artifact download build/distributions/** . --step 'packaging-amd64-fips' + .buildkite/scripts/steps/integration_tests_tf.sh {{matrix.groups}} {{matrix.sudo}} + depends_on: + - packaging-amd64-fips + env: + EC_ENDPOINT: https://api.staging.elastic-gov.com + ESS_REGION: us-gov-east-1 + FIPS: "true" + TEST_PACKAGE: github.com/elastic/elastic-agent/testing/integration/ess + TF_VAR_deployment_template_id: aws-general-purpose + TF_VAR_docker_images_name_suffix: -fips + TF_VAR_integration_server_docker_image: docker.elastic.co/beats-ci/elastic-agent-cloud-fips:git-${BUILDKITE_COMMIT:0:12} + label: fips:x86_64:sudo-{{matrix.sudo}}:{{matrix.groups}} + matrix: + setup: + groups: + - fleet + sudo: + - "false" + - "true" + plugins: + - elastic/vault-secrets#v0.1.0: + env_var: EC_API_KEY + field: apiKey + path: kv/ci-shared/platform-ingest/platform-ingest-ec-staging-gov + retry: + automatic: + limit: 1 + - agents: + image: ${IMAGE_UBUNTU_ARM64_FIPS} + instanceType: m6g.2xlarge + provider: aws + artifact_paths: + - build/** + - build/diagnostics/** + command: |- + buildkite-agent artifact download build/distributions/** . --step 'packaging-arm64-fips' + .buildkite/scripts/steps/integration_tests_tf.sh {{matrix.groups}} {{matrix.sudo}} + depends_on: + - packaging-arm64-fips + env: + EC_ENDPOINT: https://api.staging.elastic-gov.com + ESS_REGION: us-gov-east-1 + FIPS: "true" + TEST_PACKAGE: github.com/elastic/elastic-agent/testing/integration/ess + TF_VAR_deployment_template_id: aws-general-purpose + TF_VAR_docker_images_name_suffix: -fips + TF_VAR_integration_server_docker_image: docker.elastic.co/beats-ci/elastic-agent-cloud-fips:git-${BUILDKITE_COMMIT:0:12} + label: fips:arm64:sudo-{{matrix.sudo}}:{{matrix.groups}} + matrix: + setup: + groups: + - fleet + sudo: + - "false" + - "true" + plugins: + - elastic/vault-secrets#v0.1.0: + env_var: EC_API_KEY + field: apiKey + path: kv/ci-shared/platform-ingest/platform-ingest-ec-staging-gov + retry: + automatic: + limit: 1 + - agents: + image: ${IMAGE_UBUNTU_X86_64_FIPS} + instanceType: m5.2xlarge + provider: aws + artifact_paths: + - build/** + - build/diagnostics/** + command: .buildkite/scripts/buildkite-integration-tests.sh ech-deployment false + env: + EC_ENDPOINT: https://api.staging.elastic-gov.com + ESS_REGION: us-gov-east-1 + FIPS: "true" + TEST_PACKAGE: github.com/elastic/elastic-agent/testing/integration/ess + if: build.env("BUILDKITE_PULL_REQUEST") != "false" && build.env("GITHUB_PR_LABELS") =~ /.*(Testing:run:TestUpgradeIntegrationsServer).*/ + label: fips:upgrade-ech-deployment + plugins: + - elastic/vault-secrets#v0.1.0: + env_var: EC_API_KEY + field: apiKey + path: kv/ci-shared/platform-ingest/platform-ingest-ec-staging-gov + retry: + automatic: + limit: 1 + - agents: + image: docker.elastic.co/ci-agent-images/platform-ingest/buildkite-agent-beats-ci-with-hooks:0.5 + useCustomGlobalHooks: true + allow_dependency_failure: true + command: |- + buildkite-agent artifact download "test_infra/ess/**" . --step "integration-fips-ess" + ls -lah test_infra/ess + .buildkite/scripts/steps/ess_down.sh + depends_on: + - integration-tests-ubuntu-fips + env: + EC_ENDPOINT: https://api.staging.elastic-gov.com + ESS_REGION: us-gov-east-1 + FIPS: "true" + TF_VAR_deployment_template_id: aws-general-purpose + TF_VAR_docker_images_name_suffix: -fips + TF_VAR_integration_server_docker_image: docker.elastic.co/beats-ci/elastic-agent-cloud-fips:git-${BUILDKITE_COMMIT:0:12} + label: ESS FIPS stack cleanup + plugins: + - elastic/vault-secrets#v0.1.0: + env_var: EC_API_KEY + field: apiKey + path: kv/ci-shared/platform-ingest/platform-ingest-ec-staging-gov + - agents: + image: docker.elastic.co/ci-agent-images/platform-ingest/buildkite-agent-beats-ci-with-hooks:0.5 + useCustomGlobalHooks: true + allow_dependency_failure: true + command: buildkite-agent artifact download "build/*.xml" . + depends_on: + - integration-tests-ubuntu-fips + label: Aggregate test reports + plugins: + - elastic/vault-secrets#v0.1.0: + env_var: BUILDKITE_ANALYTICS_TOKEN + field: token + path: kv/ci-shared/platform-ingest/buildkite_analytics_token + - test-collector#v1.11.0: + branches: main + debug: true + files: build/*.xml + format: junit + soft_fail: + - exit_status: "*" diff --git a/dev-tools/buildkite/pipelines/testdata/bk.integration.pipeline.yml b/dev-tools/buildkite/pipelines/testdata/bk.integration.pipeline.yml new file mode 100644 index 00000000000..4af2c8d7ddc --- /dev/null +++ b/dev-tools/buildkite/pipelines/testdata/bk.integration.pipeline.yml @@ -0,0 +1,746 @@ +# yaml-language-server: $schema=https://raw.githubusercontent.com/buildkite/pipeline-schema/main/schema.json +env: + ASDF_MAGE_VERSION: 1.14.0 + ASDF_TERRAFORM_VERSION: 1.9.2 + IMAGE_DEBIAN_11: platform-ingest-elastic-agent-debian-11-1762801856 + IMAGE_DEBIAN_13: platform-ingest-elastic-agent-debian-13-1762801856 + IMAGE_RHEL_10: platform-ingest-elastic-agent-rhel-10-1762801856 + IMAGE_RHEL_8: platform-ingest-elastic-agent-rhel-8-1762801856 + IMAGE_UBUNTU_2404_ARM_64: platform-ingest-elastic-agent-ubuntu-2404-aarch64-1762801856 + IMAGE_UBUNTU_2404_X86_64: platform-ingest-elastic-agent-ubuntu-2404-1762801856 + IMAGE_WIN_2022: platform-ingest-elastic-agent-windows-2022-1762801856 + IMAGE_WIN_2025: platform-ingest-elastic-agent-windows-2025-1762801856 + VAULT_PATH: kv/ci-shared/observability-ingest/cloud/gcp +steps: + - agents: + image: ${IMAGE_UBUNTU_2404_X86_64} + machineType: n2-standard-8 + provider: gcp + artifact_paths: + - build/* + - build/diagnostics/** + command: |- + #!/usr/bin/env bash + buildkite-agent artifact download build/distributions/elastic-agent-*-linux-x86_64* . --step 'packaging-amd64' + .buildkite/scripts/steps/integration_tests_tf.sh ech true + depends_on: + - packaging-containers-amd64 + - packaging-containers-arm64 + env: + FORCE_ESS_CREATE: "true" + TEST_PACKAGE: github.com/elastic/elastic-agent/testing/integration/ess + TF_VAR_integration_server_docker_image: docker.elastic.co/beats-ci/elastic-agent-cloud:git-${BUILDKITE_COMMIT:0:12} + key: integration-tests-ech + label: Custom ECH Testing + plugins: + - elastic/vault-secrets#v0.1.0: + env_var: EC_API_KEY + field: apiKey + path: kv/ci-shared/platform-ingest/platform-ingest-ec-prod + retry: + automatic: + limit: 1 + - agents: + image: docker.elastic.co/ci-agent-images/platform-ingest/buildkite-agent-beats-ci-with-hooks:0.5 + useCustomGlobalHooks: true + artifact_paths: + - test_infra/ess/*.tfstate + - test_infra/ess/*.lock.hcl + command: .buildkite/scripts/steps/ess_start.sh + key: integration-ess + label: Start ESS stack for integration tests + notify: + - github_commit_status: + context: buildkite/elastic-agent-extended-testing - ESS stack provision + plugins: + - elastic/vault-secrets#v0.1.0: + env_var: EC_API_KEY + field: apiKey + path: kv/ci-shared/platform-ingest/platform-ingest-ec-prod + retry: + automatic: + limit: 1 + - depends_on: + - integration-ess + group: Extended runtime leak tests + key: extended-integration-tests + notify: + - github_commit_status: + context: buildkite/elastic-agent-extended-testing - Runtime leak tests + steps: + - agents: + image: ${IMAGE_WIN_2022} + machineType: n2-standard-8 + provider: gcp + artifact_paths: + - build/* + - build/diagnostics/** + command: |- + buildkite-agent artifact download build/distributions/elastic-agent-*-windows-x86_64* . --step 'packaging-amd64' + .buildkite/scripts/steps/integration_tests_tf.ps1 fleet true + depends_on: + - packaging-amd64 + env: + TEST_PACKAGE: github.com/elastic/elastic-agent/testing/integration/leak + label: Windows:2022:amd64:sudo + plugins: + - elastic/vault-secrets#v0.1.0: + env_var: EC_API_KEY + field: apiKey + path: kv/ci-shared/platform-ingest/platform-ingest-ec-prod + retry: + automatic: + limit: 1 + - agents: + image: ${IMAGE_WIN_2025} + machineType: n2-standard-8 + provider: gcp + artifact_paths: + - build/* + - build/diagnostics/** + command: |- + buildkite-agent artifact download build/distributions/elastic-agent-*-windows-x86_64* . --step 'packaging-amd64' + .buildkite/scripts/steps/integration_tests_tf.ps1 fleet true + depends_on: + - packaging-amd64 + env: + TEST_PACKAGE: github.com/elastic/elastic-agent/testing/integration/leak + label: Windows:2025:amd64:sudo + plugins: + - elastic/vault-secrets#v0.1.0: + env_var: EC_API_KEY + field: apiKey + path: kv/ci-shared/platform-ingest/platform-ingest-ec-prod + retry: + automatic: + limit: 1 + - agents: + image: ${IMAGE_UBUNTU_2404_X86_64} + machineType: n2-standard-8 + provider: gcp + artifact_paths: + - build/* + - build/diagnostics/** + command: |- + buildkite-agent artifact download build/distributions/elastic-agent-*-linux-x86_64* . --step 'packaging-amd64' + .buildkite/scripts/steps/integration_tests_tf.sh fleet true + depends_on: + - packaging-amd64 + env: + TEST_PACKAGE: github.com/elastic/elastic-agent/testing/integration/leak + label: Ubuntu:2404:amd64:sudo + plugins: + - elastic/vault-secrets#v0.1.0: + env_var: EC_API_KEY + field: apiKey + path: kv/ci-shared/platform-ingest/platform-ingest-ec-prod + retry: + automatic: + limit: 1 + - depends_on: + - integration-ess + group: "Stateful: Windows" + key: integration-tests-win + notify: + - github_commit_status: + context: buildkite/elastic-agent-extended-testing - Windows + steps: + - agents: + image: ${IMAGE_WIN_2022} + machineType: n2-standard-8 + provider: gcp + artifact_paths: + - build/* + - build/diagnostics/** + command: |- + buildkite-agent artifact download build/distributions/elastic-agent-*-windows-x86_64* . --step 'packaging-amd64' + .buildkite/scripts/steps/integration_tests_tf.ps1 {{matrix}} true + depends_on: + - packaging-amd64 + env: + TEST_PACKAGE: github.com/elastic/elastic-agent/testing/integration/ess + label: Win2022:sudo:{{matrix}} + matrix: + - default + - fleet + - fleet-endpoint-security + - fleet-privileged + - standalone-upgrade + - upgrade + - upgrade-flavor + - install-uninstall + plugins: + - elastic/vault-secrets#v0.1.0: + env_var: EC_API_KEY + field: apiKey + path: kv/ci-shared/platform-ingest/platform-ingest-ec-prod + retry: + automatic: + limit: 1 + - agents: + image: ${IMAGE_WIN_2022} + machineType: n2-standard-8 + provider: gcp + artifact_paths: + - build/* + - build/diagnostics/** + command: |- + buildkite-agent artifact download build/distributions/elastic-agent-*-windows-x86_64* . --step 'packaging-amd64' + .buildkite/scripts/steps/integration_tests_tf.ps1 {{matrix}} false + depends_on: + - packaging-amd64 + env: + TEST_PACKAGE: github.com/elastic/elastic-agent/testing/integration/ess + label: Win2022:non-sudo:{{matrix}} + matrix: + - default + plugins: + - elastic/vault-secrets#v0.1.0: + env_var: EC_API_KEY + field: apiKey + path: kv/ci-shared/platform-ingest/platform-ingest-ec-prod + retry: + automatic: + limit: 1 + - agents: + image: ${IMAGE_WIN_2025} + machineType: n2-standard-8 + provider: gcp + artifact_paths: + - build/* + - build/diagnostics/** + command: |- + buildkite-agent artifact download build/distributions/elastic-agent-*-windows-x86_64* . --step 'packaging-amd64' + .buildkite/scripts/steps/integration_tests_tf.ps1 {{matrix}} true + depends_on: + - packaging-amd64 + env: + TEST_PACKAGE: github.com/elastic/elastic-agent/testing/integration/ess + label: Win2025:sudo:{{matrix}} + matrix: + - default + - fleet + - fleet-endpoint-security + - fleet-privileged + - standalone-upgrade + - upgrade + - upgrade-flavor + - install-uninstall + plugins: + - elastic/vault-secrets#v0.1.0: + env_var: EC_API_KEY + field: apiKey + path: kv/ci-shared/platform-ingest/platform-ingest-ec-prod + retry: + automatic: + limit: 1 + - agents: + image: ${IMAGE_WIN_2025} + machineType: n2-standard-8 + provider: gcp + artifact_paths: + - build/* + - build/diagnostics/** + command: |- + buildkite-agent artifact download build/distributions/elastic-agent-*-windows-x86_64* . --step 'packaging-amd64' + .buildkite/scripts/steps/integration_tests_tf.ps1 {{matrix}} false + depends_on: + - packaging-amd64 + env: + TEST_PACKAGE: github.com/elastic/elastic-agent/testing/integration/ess + label: Win2025:non-sudo:{{matrix}} + matrix: + - default + plugins: + - elastic/vault-secrets#v0.1.0: + env_var: EC_API_KEY + field: apiKey + path: kv/ci-shared/platform-ingest/platform-ingest-ec-prod + retry: + automatic: + limit: 1 + - depends_on: + - integration-ess + group: Stateful:Ubuntu + key: integration-tests-ubuntu + notify: + - github_commit_status: + context: buildkite/elastic-agent-extended-testing - Ubuntu + steps: + - agents: + image: ${IMAGE_UBUNTU_2404_X86_64} + machineType: n2-standard-8 + provider: gcp + artifact_paths: + - build/* + - build/diagnostics/** + command: |- + buildkite-agent artifact download build/distributions/elastic-agent-*-linux-x86_64* . --step 'packaging-amd64' + .buildkite/scripts/steps/integration_tests_tf.sh {{matrix}} false + depends_on: + - packaging-amd64 + env: + TEST_PACKAGE: github.com/elastic/elastic-agent/testing/integration/ess + label: "x86_64:non-sudo: {{matrix}}" + matrix: + - default + plugins: + - elastic/vault-secrets#v0.1.0: + env_var: EC_API_KEY + field: apiKey + path: kv/ci-shared/platform-ingest/platform-ingest-ec-prod + retry: + automatic: + limit: 1 + - agents: + image: ${IMAGE_UBUNTU_2404_X86_64} + machineType: n2-standard-8 + provider: gcp + artifact_paths: + - build/* + - build/diagnostics/** + command: |- + buildkite-agent artifact download build/distributions/elastic-agent-*-linux-x86_64* . --step packaging-amd64 + buildkite-agent artifact download build/distributions/elastic-agent-*-amd64.deb* . --step packaging-amd64 + .buildkite/scripts/steps/integration_tests_tf.sh {{matrix}} true + depends_on: + - packaging-amd64 + env: + TEST_PACKAGE: github.com/elastic/elastic-agent/testing/integration/ess + label: "x86_64:sudo: {{matrix}}" + matrix: + - default + - upgrade + - upgrade-flavor + - standalone-upgrade + - fleet + - fleet-endpoint-security + - fleet-airgapped + - fleet-airgapped-privileged + - fleet-privileged + - fleet-upgrade-to-pr-build + - install-uninstall + - fqdn + - deb + - container + plugins: + - elastic/vault-secrets#v0.1.0: + env_var: EC_API_KEY + field: apiKey + path: kv/ci-shared/platform-ingest/platform-ingest-ec-prod + retry: + automatic: + limit: 1 + - agents: + image: ${IMAGE_UBUNTU_2404_ARM_64} + instanceType: m6g.2xlarge + provider: aws + artifact_paths: + - build/* + - build/diagnostics/** + command: |- + buildkite-agent artifact download build/distributions/elastic-agent-*-linux-arm64* . --step 'packaging-arm64' + .buildkite/scripts/steps/integration_tests_tf.sh {{matrix}} true + depends_on: + - packaging-arm64 + env: + TEST_PACKAGE: github.com/elastic/elastic-agent/testing/integration/ess + label: "arm:sudo: {{matrix}}" + matrix: + - default + - upgrade + - upgrade-flavor + - standalone-upgrade + - fleet + plugins: + - elastic/vault-secrets#v0.1.0: + env_var: EC_API_KEY + field: apiKey + path: kv/ci-shared/platform-ingest/platform-ingest-ec-prod + retry: + automatic: + limit: 1 + - agents: + image: ${IMAGE_UBUNTU_2404_ARM_64} + instanceType: m6g.xlarge + provider: aws + artifact_paths: + - build/* + - build/diagnostics/** + command: |- + buildkite-agent artifact download build/distributions/elastic-agent-*-linux-arm64* . --step 'packaging-arm64' + .buildkite/scripts/steps/integration_tests_tf.sh {{matrix}} false + depends_on: + - packaging-arm64 + env: + TEST_PACKAGE: github.com/elastic/elastic-agent/testing/integration/ess + label: "arm:non-sudo: {{matrix}}" + matrix: + - default + plugins: + - elastic/vault-secrets#v0.1.0: + env_var: EC_API_KEY + field: apiKey + path: kv/ci-shared/platform-ingest/platform-ingest-ec-prod + retry: + automatic: + limit: 1 + skip: true + - depends_on: + - integration-ess + group: Stateful:Debian + key: integration-tests-debian + notify: + - github_commit_status: + context: buildkite/elastic-agent-extended-testing - Debian + steps: + - agents: + image: "{{matrix.image}}" + machineType: n2-standard-8 + provider: gcp + artifact_paths: + - build/* + - build/diagnostics/** + command: |- + buildkite-agent artifact download build/distributions/elastic-agent-*-linux-x86_64* . --step 'packaging-amd64' + .buildkite/scripts/steps/integration_tests_tf.sh {{matrix.group}} false + depends_on: + - packaging-amd64 + env: + TEST_PACKAGE: github.com/elastic/elastic-agent/testing/integration/ess + label: "x86_64:non-sudo: {{matrix.group}} - {{matrix.image}}" + matrix: + setup: + group: + - default + image: + - ${IMAGE_DEBIAN_11} + - ${IMAGE_DEBIAN_13} + plugins: + - elastic/vault-secrets#v0.1.0: + env_var: EC_API_KEY + field: apiKey + path: kv/ci-shared/platform-ingest/platform-ingest-ec-prod + retry: + automatic: + limit: 1 + - agents: + image: "{{matrix.image}}" + machineType: n2-standard-8 + provider: gcp + artifact_paths: + - build/* + - build/diagnostics/** + command: |- + buildkite-agent artifact download build/distributions/elastic-agent-*-linux-x86_64* . --step packaging-amd64 + buildkite-agent artifact download build/distributions/elastic-agent-*-amd64.deb* . --step packaging-amd64 + .buildkite/scripts/steps/integration_tests_tf.sh {{matrix.group}} true + depends_on: + - packaging-amd64 + env: + TEST_PACKAGE: github.com/elastic/elastic-agent/testing/integration/ess + label: "x86_64:sudo: {{matrix.group}} - {{matrix.image}}" + matrix: + setup: + group: + - default + - upgrade + - upgrade-flavor + - standalone-upgrade + - fleet + - fleet-endpoint-security + - fleet-airgapped + - fleet-airgapped-privileged + - fleet-privileged + - fleet-upgrade-to-pr-build + - install-uninstall + - deb + - container + image: + - ${IMAGE_DEBIAN_11} + - ${IMAGE_DEBIAN_13} + plugins: + - elastic/vault-secrets#v0.1.0: + env_var: EC_API_KEY + field: apiKey + path: kv/ci-shared/platform-ingest/platform-ingest-ec-prod + retry: + automatic: + limit: 1 + - depends_on: + - integration-ess + group: Stateful:RHEL + key: integration-tests-rhel + notify: + - github_commit_status: + context: buildkite/elastic-agent-extended-testing - RHEL + steps: + - agents: + image: "{{matrix.image}}" + machineType: n2-standard-8 + provider: gcp + artifact_paths: + - build/* + - build/diagnostics/** + command: |- + buildkite-agent artifact download build/distributions/elastic-agent-*-x86_64.rpm* . --step packaging-amd64 + .buildkite/scripts/steps/integration_tests_tf.sh rpm true + depends_on: + - packaging-amd64 + env: + TEST_PACKAGE: github.com/elastic/elastic-agent/testing/integration/ess + label: x86_64:sudo:rpm - {{matrix.image}} + matrix: + setup: + image: + - ${IMAGE_RHEL_8} + - ${IMAGE_RHEL_10} + plugins: + - elastic/vault-secrets#v0.1.0: + env_var: EC_API_KEY + field: apiKey + path: kv/ci-shared/platform-ingest/platform-ingest-ec-prod + retry: + automatic: + limit: 1 + - depends_on: + - integration-ess + - packaging-containers-amd64 + group: ":kubernetes: Kubernetes" + key: integration-tests-kubernetes + notify: + - github_commit_status: + context: buildkite/elastic-agent-extended-testing - Kubernetes + steps: + - agents: + diskSizeGb: 80 + image: ${IMAGE_UBUNTU_2404_X86_64} + machineType: n2-standard-8 + provider: gcp + artifact_paths: + - build/* + - build/diagnostics/** + - build/*.pod_logs_dump/* + command: |- + buildkite-agent artifact download build/distributions/*-linux-amd64.docker.tar.gz . --step 'packaging-containers-amd64' + .buildkite/scripts/steps/integration_tests_tf.sh kubernetes false + env: + ASDF_KIND_VERSION: 0.27.0 + DOCKER_VARIANTS: "{{matrix.variants}}" + K8S_VERSION: "{{matrix.version}}" + TARGET_ARCH: amd64 + if: build.pull_request.id == null + label: ":git: :kubernetes: {{matrix.version}}:amd64:{{matrix.variants}}" + matrix: + setup: + variants: + - basic,slim,complete,service,elastic-otel-collector + - wolfi,slim-wolfi,complete-wolfi,elastic-otel-collector-wolfi + version: + - v1.27.16 + - v1.28.15 + - v1.29.14 + - v1.30.0 + - v1.31.0 + - v1.32.0 + - v1.33.0 + - v1.34.0 + plugins: + - elastic/vault-secrets#v0.1.0: + env_var: EC_API_KEY + field: apiKey + path: kv/ci-shared/platform-ingest/platform-ingest-ec-prod + retry: + automatic: + limit: 1 + - agents: + diskSizeGb: 80 + image: ${IMAGE_UBUNTU_2404_X86_64} + machineType: n2-standard-8 + provider: gcp + artifact_paths: + - build/* + - build/diagnostics/** + - build/*.pod_logs_dump/* + command: |- + buildkite-agent artifact download build/distributions/*-linux-amd64.docker.tar.gz . --step 'packaging-containers-amd64' + .buildkite/scripts/steps/integration_tests_tf.sh kubernetes false + env: + ASDF_KIND_VERSION: 0.27.0 + DOCKER_VARIANTS: "{{matrix.variants}}" + K8S_VERSION: "{{matrix.version}}" + TARGET_ARCH: amd64 + if: build.pull_request.id != null + label: ":open-pull-request: :kubernetes: {{matrix.version}}:amd64:{{matrix.variants}}" + matrix: + setup: + variants: + - basic + - slim + - complete + - service + - elastic-otel-collector + - wolfi + - slim-wolfi + - complete-wolfi + - elastic-otel-collector-wolfi + version: + - v1.27.16 + - v1.34.0 + plugins: + - elastic/vault-secrets#v0.1.0: + env_var: EC_API_KEY + field: apiKey + path: kv/ci-shared/platform-ingest/platform-ingest-ec-prod + retry: + automatic: + limit: 1 + - group: Serverless integration test + key: integration-tests-serverless + notify: + - github_commit_status: + context: buildkite/elastic-agent-extended-testing - Serverless integration test + steps: + - agents: + image: ${IMAGE_WIN_2022} + machineType: n2-standard-8 + provider: gcp + artifact_paths: + - build/* + - build/diagnostics/** + command: |- + buildkite-agent artifact download build/distributions/elastic-agent-*-windows-x86_64* . --step 'packaging-amd64' + .buildkite/scripts/buildkite-integration-tests.ps1 fleet true + depends_on: + - packaging-amd64 + env: + TEST_PACKAGE: github.com/elastic/elastic-agent/testing/integration/serverless + label: Windows:2022:amd64:sudo + plugins: + - elastic/oblt-google-auth#v1.3.0: + lifetime: 10800 + project-id: elastic-observability-ci + project-number: "911195782929" + - elastic/gcp-secret-manager#v1.3.0-elastic: + env: + ELASTICSEARCH_HOST: ea-serverless-it-elasticsearch-hostname + ELASTICSEARCH_PASSWORD: ea-serverless-it-elasticsearch-password + ELASTICSEARCH_USERNAME: ea-serverless-it-elasticsearch-username + KIBANA_HOST: ea-serverless-it-kibana-hostname + KIBANA_PASSWORD: ea-serverless-it-kibana-password + KIBANA_USERNAME: ea-serverless-it-kibana-username + retry: + automatic: + limit: 1 + - agents: + image: ${IMAGE_WIN_2025} + machineType: n2-standard-8 + provider: gcp + artifact_paths: + - build/* + - build/diagnostics/** + command: |- + buildkite-agent artifact download build/distributions/elastic-agent-*-windows-x86_64* . --step 'packaging-amd64' + .buildkite/scripts/buildkite-integration-tests.ps1 fleet true + depends_on: + - packaging-amd64 + env: + TEST_PACKAGE: github.com/elastic/elastic-agent/testing/integration/serverless + label: Windows:2025:amd64:sudo + plugins: + - elastic/oblt-google-auth#v1.3.0: + lifetime: 10800 + project-id: elastic-observability-ci + project-number: "911195782929" + - elastic/gcp-secret-manager#v1.3.0-elastic: + env: + ELASTICSEARCH_HOST: ea-serverless-it-elasticsearch-hostname + ELASTICSEARCH_PASSWORD: ea-serverless-it-elasticsearch-password + ELASTICSEARCH_USERNAME: ea-serverless-it-elasticsearch-username + KIBANA_HOST: ea-serverless-it-kibana-hostname + KIBANA_PASSWORD: ea-serverless-it-kibana-password + KIBANA_USERNAME: ea-serverless-it-kibana-username + retry: + automatic: + limit: 1 + - agents: + image: ${IMAGE_UBUNTU_2404_X86_64} + machineType: n2-standard-8 + provider: gcp + artifact_paths: + - build/* + - build/diagnostics/** + command: |- + buildkite-agent artifact download build/distributions/elastic-agent-*-linux-x86_64* . --step 'packaging-amd64' + sudo -E .buildkite/scripts/buildkite-integration-tests.sh fleet true + depends_on: + - packaging-amd64 + env: + TEST_PACKAGE: github.com/elastic/elastic-agent/testing/integration/serverless + label: Ubuntu:2404:amd64:sudo + plugins: + - elastic/oblt-google-auth#v1.3.0: + lifetime: 10800 + project-id: elastic-observability-ci + project-number: "911195782929" + - elastic/gcp-secret-manager#v1.3.0-elastic: + env: + ELASTICSEARCH_HOST: ea-serverless-it-elasticsearch-hostname + ELASTICSEARCH_PASSWORD: ea-serverless-it-elasticsearch-password + ELASTICSEARCH_USERNAME: ea-serverless-it-elasticsearch-username + KIBANA_HOST: ea-serverless-it-kibana-hostname + KIBANA_PASSWORD: ea-serverless-it-kibana-password + KIBANA_USERNAME: ea-serverless-it-kibana-username + retry: + automatic: + limit: 1 + - agents: + image: docker.elastic.co/ci-agent-images/platform-ingest/buildkite-agent-beats-ci-with-hooks:0.5 + useCustomGlobalHooks: true + allow_dependency_failure: true + command: |- + buildkite-agent artifact download "test_infra/ess/**" . --step "integration-ess" + ls -lah test_infra/ess + .buildkite/scripts/steps/ess_down.sh + depends_on: + - integration-tests-ubuntu + - integration-tests-win + - integration-tests-rhel + - integration-tests-kubernetes + - extended-integration-tests + - integration-tests-debian + label: ESS stack cleanup + plugins: + - elastic/vault-secrets#v0.1.0: + env_var: EC_API_KEY + field: apiKey + path: kv/ci-shared/platform-ingest/platform-ingest-ec-prod + - agents: + image: docker.elastic.co/ci-agent-images/platform-ingest/buildkite-agent-beats-ci-with-hooks:0.5 + useCustomGlobalHooks: true + allow_dependency_failure: true + command: |- + buildkite-agent artifact download "build/*.xml" . + buildkite-agent artifact download "build\*.xml" . + depends_on: + - integration-tests-ech + - integration-tests-ubuntu + - integration-tests-win + - integration-tests-rhel + - integration-tests-kubernetes + - integration-tests-serverless + - integration-tests-debian + key: aggregate-reports + label: Aggregate test reports + plugins: + - elastic/vault-secrets#v0.1.0: + env_var: BUILDKITE_ANALYTICS_TOKEN + field: token + path: kv/ci-shared/platform-ingest/buildkite_analytics_token + - test-collector#v1.11.0: + branches: main + debug: true + files: build/*.xml + format: junit + soft_fail: + - exit_status: "*" diff --git a/dev-tools/buildkite/pipelines/testdata/pipeline.agentless-app-release.yaml b/dev-tools/buildkite/pipelines/testdata/pipeline.agentless-app-release.yaml index 152b2ebcda5..81b5ce9d444 100644 --- a/dev-tools/buildkite/pipelines/testdata/pipeline.agentless-app-release.yaml +++ b/dev-tools/buildkite/pipelines/testdata/pipeline.agentless-app-release.yaml @@ -69,6 +69,7 @@ steps: SYNTHETICS_TAG: "agentless-ci" YAML fi - depends_on: mirror-elastic-agent + depends_on: + - mirror-elastic-agent if: build.env("DRY_RUN") == null || build.env("DRY_RUN") == "false" label: ":grey_question: Promote agentless app release if validation passes" diff --git a/dev-tools/buildkite/pipelines/testdata/pipeline.elastic-agent-package.yml b/dev-tools/buildkite/pipelines/testdata/pipeline.elastic-agent-package.yml index 4d41a9b5ddb..d6dddd4386b 100644 --- a/dev-tools/buildkite/pipelines/testdata/pipeline.elastic-agent-package.yml +++ b/dev-tools/buildkite/pipelines/testdata/pipeline.elastic-agent-package.yml @@ -145,7 +145,8 @@ steps: export DRA_WORKFLOW fi .buildkite/scripts/steps/dra-publish.sh - depends_on: package + depends_on: + - package env: DRA_PROJECT_ARTIFACT_ID: agent-package DRA_PROJECT_ID: elastic-agent-package @@ -191,7 +192,8 @@ steps: buildkite-agent meta-data set "triggered_build_id" "$BUILDKITE_BUILD_ID" --job $TRIGGER_JOB_ID buildkite-agent meta-data set "triggered_commit_hash" "$BUILDKITE_COMMIT" --job $TRIGGER_JOB_ID fi - depends_on: package + depends_on: + - package env: DRA_PROJECT_ARTIFACT_ID: agent-package DRA_PROJECT_ID: elastic-agent-package From 66fd40a7595a54f399a9d97eb84366e9cca5af76 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Miko=C5=82aj=20=C5=9Awi=C4=85tek?= Date: Sun, 14 Dec 2025 18:13:08 +0100 Subject: [PATCH 05/10] Generate buildkite pipeline definitions via mage targets --- dev-tools/mage/buildkite.go | 229 +++++++++++++++++++++++++++++++ dev-tools/mage/buildkite_test.go | 192 ++++++++++++++++++++++++++ magefile.go | 57 ++++++++ 3 files changed, 478 insertions(+) create mode 100644 dev-tools/mage/buildkite.go create mode 100644 dev-tools/mage/buildkite_test.go diff --git a/dev-tools/mage/buildkite.go b/dev-tools/mage/buildkite.go new file mode 100644 index 00000000000..b42f187bae1 --- /dev/null +++ b/dev-tools/mage/buildkite.go @@ -0,0 +1,229 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License 2.0; +// you may not use this file except in compliance with the Elastic License 2.0. + +package mage + +import ( + "fmt" + "strings" + + "github.com/elastic/elastic-agent/dev-tools/buildkite/pipeline" + "github.com/elastic/elastic-agent/dev-tools/buildkite/pipelines" +) + +// PipelineDefinition defines a Buildkite pipeline that can be generated. +type PipelineDefinition struct { + Name string + Generator func() *pipeline.Pipeline + // YAMLFile is the path to the existing YAML file for validation/comparison. + YAMLFile string +} + +// BuildkitePipelines is the list of all pipelines that can be generated. +// The YAMLFile field points to the existing static YAML file for validation. +var BuildkitePipelines = []PipelineDefinition{ + {"GCECleanup", pipelines.GCECleanup, ".buildkite/pipeline.elastic-agent-gce-cleanup.yml"}, + {"AgentlessAppRelease", pipelines.AgentlessAppRelease, ".buildkite/pipeline.agentless-app-release.yaml"}, + {"Pipeline", pipelines.Pipeline, ".buildkite/pipeline.yml"}, + {"IntegrationPipeline", pipelines.IntegrationPipeline, ".buildkite/integration.pipeline.yml"}, + {"ElasticAgentPackage", pipelines.ElasticAgentPackage, ".buildkite/pipeline.elastic-agent-package.yml"}, + {"BKIntegrationPipeline", pipelines.BKIntegrationPipeline, ".buildkite/bk.integration.pipeline.yml"}, + {"BKIntegrationFIPSPipeline", pipelines.BKIntegrationFIPSPipeline, ".buildkite/bk.integration-fips.pipeline.yml"}, +} + +// BuildkiteGeneratePipeline generates a pipeline by name and outputs YAML to stdout. +// This is designed to be piped to `buildkite-agent pipeline upload`. +func BuildkiteGeneratePipeline(name string) error { + for _, p := range BuildkitePipelines { + if p.Name == name { + pl := p.Generator() + yaml, err := pl.MarshalYAML() + if err != nil { + return fmt.Errorf("failed to marshal %s: %w", p.Name, err) + } + fmt.Print(string(yaml)) + return nil + } + } + return fmt.Errorf("pipeline %q not found", name) +} + +// Individual pipeline generators - each outputs YAML to stdout. +// These are designed to be piped to `buildkite-agent pipeline upload`. + +// BuildkitePipeline outputs the main pipeline YAML to stdout. +func BuildkitePipeline() error { + return BuildkiteGeneratePipeline("Pipeline") +} + +// BuildkiteIntegration outputs the integration pipeline YAML to stdout. +func BuildkiteIntegration() error { + return BuildkiteGeneratePipeline("IntegrationPipeline") +} + +// BuildkiteIntegrationFull outputs the full integration tests pipeline YAML to stdout. +func BuildkiteIntegrationFull() error { + return BuildkiteGeneratePipeline("BKIntegrationPipeline") +} + +// BuildkiteIntegrationFIPS outputs the FIPS integration tests pipeline YAML to stdout. +func BuildkiteIntegrationFIPS() error { + return BuildkiteGeneratePipeline("BKIntegrationFIPSPipeline") +} + +// BuildkitePackage outputs the Elastic Agent package pipeline YAML to stdout. +func BuildkitePackage() error { + return BuildkiteGeneratePipeline("ElasticAgentPackage") +} + +// BuildkiteAgentlessRelease outputs the agentless app release pipeline YAML to stdout. +func BuildkiteAgentlessRelease() error { + return BuildkiteGeneratePipeline("AgentlessAppRelease") +} + +// BuildkiteGCECleanup outputs the GCE cleanup pipeline YAML to stdout. +func BuildkiteGCECleanup() error { + return BuildkiteGeneratePipeline("GCECleanup") +} + +// BuildkiteValidateResult contains the result of validating a single pipeline. +type BuildkiteValidateResult struct { + Name string + YAMLFile string + Valid bool + Error error + Differences []string +} + +// BuildkiteValidate validates that generated pipelines match the existing YAML files. +// Returns the validation results and an error if any pipeline doesn't match. +func BuildkiteValidate() ([]BuildkiteValidateResult, error) { + fmt.Println(">> buildkite:validate - Validating Buildkite pipelines against YAML files...") + + var results []BuildkiteValidateResult + var errs []string + + for _, p := range BuildkitePipelines { + result := BuildkiteValidateResult{ + Name: p.Name, + YAMLFile: p.YAMLFile, + } + + pl := p.Generator() + compareResult, err := pipeline.SemanticCompareWithFile(pl, p.YAMLFile) + if err != nil { + result.Error = err + errs = append(errs, fmt.Sprintf("%s: %v", p.Name, err)) + results = append(results, result) + continue + } + if compareResult.ParseError != nil { + result.Error = compareResult.ParseError + errs = append(errs, fmt.Sprintf("%s: parse error: %v", p.Name, compareResult.ParseError)) + results = append(results, result) + continue + } + if !compareResult.Equal { + result.Differences = compareResult.Differences + errs = append(errs, fmt.Sprintf("%s: generated pipeline does not match %s:\n%s", + p.Name, p.YAMLFile, strings.Join(compareResult.Differences, "\n"))) + } else { + result.Valid = true + fmt.Printf(" ✓ %s matches %s\n", p.Name, p.YAMLFile) + } + results = append(results, result) + } + + if len(errs) > 0 { + fmt.Println("\n>> buildkite:validate - FAILED!") + for _, e := range errs { + fmt.Printf(" ✗ %s\n", e) + } + return results, fmt.Errorf("pipeline validation failed: %d errors", len(errs)) + } + + fmt.Println(">> buildkite:validate - Done! All pipelines match.") + return results, nil +} + +// BuildkiteValidatePipeline validates a single pipeline by name. +// Returns the validation result. +func BuildkiteValidatePipeline(name string) (*BuildkiteValidateResult, error) { + for _, p := range BuildkitePipelines { + if p.Name == name { + result := &BuildkiteValidateResult{ + Name: p.Name, + YAMLFile: p.YAMLFile, + } + + pl := p.Generator() + compareResult, err := pipeline.SemanticCompareWithFile(pl, p.YAMLFile) + if err != nil { + result.Error = err + return result, err + } + if compareResult.ParseError != nil { + result.Error = compareResult.ParseError + return result, compareResult.ParseError + } + if !compareResult.Equal { + result.Differences = compareResult.Differences + return result, fmt.Errorf("generated pipeline does not match %s", p.YAMLFile) + } + result.Valid = true + return result, nil + } + } + return nil, fmt.Errorf("pipeline %q not found", name) +} + +// BuildkiteDiffResult contains the diff result for a single pipeline. +type BuildkiteDiffResult struct { + Name string + YAMLFile string + Equal bool + Diff string + Error error +} + +// BuildkiteDiff compares generated pipelines with existing YAML files. +// Returns the diff results for each pipeline. +func BuildkiteDiff() []BuildkiteDiffResult { + fmt.Println(">> buildkite:diff - Comparing generated pipelines with YAML files...") + + var results []BuildkiteDiffResult + anyDiff := false + + for _, p := range BuildkitePipelines { + result := BuildkiteDiffResult{ + Name: p.Name, + YAMLFile: p.YAMLFile, + } + + pl := p.Generator() + compareResult, err := pipeline.CompareWithFile(pl, p.YAMLFile) + if err != nil { + result.Error = err + fmt.Printf("\n--- %s ---\nError: %v\n", p.Name, err) + anyDiff = true + results = append(results, result) + continue + } + + result.Equal = compareResult.Equal + if !compareResult.Equal { + result.Diff = compareResult.Diff + anyDiff = true + fmt.Printf("\n--- %s (%s) ---\n", p.Name, p.YAMLFile) + fmt.Println(compareResult.Diff) + } + results = append(results, result) + } + + if !anyDiff { + fmt.Println(">> buildkite:diff - No differences found!") + } + + return results +} diff --git a/dev-tools/mage/buildkite_test.go b/dev-tools/mage/buildkite_test.go new file mode 100644 index 00000000000..b9fa249bd8d --- /dev/null +++ b/dev-tools/mage/buildkite_test.go @@ -0,0 +1,192 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License 2.0; +// you may not use this file except in compliance with the Elastic License 2.0. + +package mage + +import ( + "os" + "path/filepath" + "testing" + + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +// findRepoRoot finds the repository root by looking for go.mod. +func findRepoRoot(t *testing.T) string { + t.Helper() + + dir, err := os.Getwd() + require.NoError(t, err, "failed to get working directory") + + for { + if _, err := os.Stat(filepath.Join(dir, "go.mod")); err == nil { + return dir + } + parent := filepath.Dir(dir) + require.NotEqual(t, parent, dir, "could not find repository root (go.mod)") + dir = parent + } +} + +func TestBuildkitePipelinesRegistered(t *testing.T) { + // Verify that all expected pipelines are registered + expectedPipelines := []string{ + "GCECleanup", + "AgentlessAppRelease", + "Pipeline", + "IntegrationPipeline", + "ElasticAgentPackage", + "BKIntegrationPipeline", + "BKIntegrationFIPSPipeline", + } + + actualNames := make([]string, len(BuildkitePipelines)) + for i, p := range BuildkitePipelines { + actualNames[i] = p.Name + } + + assert.Equal(t, expectedPipelines, actualNames, "pipeline registry mismatch") +} + +func TestBuildkitePipelineYAMLFiles(t *testing.T) { + // Verify that all pipeline YAML files are in the .buildkite directory + for _, p := range BuildkitePipelines { + assert.True(t, filepath.HasPrefix(p.YAMLFile, ".buildkite/"), + "pipeline %s YAML file %q should start with .buildkite/", p.Name, p.YAMLFile) + } +} + +func TestBuildkitePipelineGenerators(t *testing.T) { + // Verify that all pipeline generators return non-nil pipelines + for _, p := range BuildkitePipelines { + t.Run(p.Name, func(t *testing.T) { + pl := p.Generator() + require.NotNil(t, pl, "generator for %s returned nil pipeline", p.Name) + }) + } +} + +func TestBuildkiteGeneratePipeline(t *testing.T) { + // Test generating a known pipeline (outputs to stdout) + err := BuildkiteGeneratePipeline("GCECleanup") + require.NoError(t, err, "should generate GCECleanup pipeline") +} + +func TestBuildkiteGeneratePipelineNotFound(t *testing.T) { + // Test generating a non-existent pipeline + err := BuildkiteGeneratePipeline("NonExistentPipeline") + require.Error(t, err, "should error for non-existent pipeline") + assert.Contains(t, err.Error(), "not found") +} + +func TestBuildkiteIndividualPipelineFunctions(t *testing.T) { + // Test each individual pipeline function + tests := []struct { + name string + fn func() error + }{ + {"BuildkitePipeline", BuildkitePipeline}, + {"BuildkiteIntegration", BuildkiteIntegration}, + {"BuildkiteIntegrationFull", BuildkiteIntegrationFull}, + {"BuildkiteIntegrationFIPS", BuildkiteIntegrationFIPS}, + {"BuildkitePackage", BuildkitePackage}, + {"BuildkiteAgentlessRelease", BuildkiteAgentlessRelease}, + {"BuildkiteGCECleanup", BuildkiteGCECleanup}, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + err := tt.fn() + require.NoError(t, err, "%s should succeed", tt.name) + }) + } +} + +func TestBuildkiteValidatePipeline(t *testing.T) { + // Change to repo root so relative paths work + repoRoot := findRepoRoot(t) + originalDir, err := os.Getwd() + require.NoError(t, err) + err = os.Chdir(repoRoot) + require.NoError(t, err) + defer func() { + _ = os.Chdir(originalDir) + }() + + // Test validating a known pipeline + result, err := BuildkiteValidatePipeline("GCECleanup") + require.NoError(t, err, "GCECleanup validation should succeed") + assert.True(t, result.Valid, "GCECleanup should be valid") + assert.Equal(t, "GCECleanup", result.Name) + assert.Equal(t, ".buildkite/pipeline.elastic-agent-gce-cleanup.yml", result.YAMLFile) +} + +func TestBuildkiteValidatePipelineNotFound(t *testing.T) { + // Test validating a non-existent pipeline + result, err := BuildkiteValidatePipeline("NonExistentPipeline") + require.Error(t, err, "should error for non-existent pipeline") + assert.Nil(t, result) + assert.Contains(t, err.Error(), "not found") +} + +func TestBuildkiteValidateAllPipelines(t *testing.T) { + // Change to repo root so relative paths work + repoRoot := findRepoRoot(t) + originalDir, err := os.Getwd() + require.NoError(t, err) + err = os.Chdir(repoRoot) + require.NoError(t, err) + defer func() { + _ = os.Chdir(originalDir) + }() + + // Test validating all pipelines + results, err := BuildkiteValidate() + require.NoError(t, err, "all pipeline validations should succeed") + + // Verify we got results for all pipelines + assert.Len(t, results, len(BuildkitePipelines), "should have results for all pipelines") + + // Verify all results are valid + for _, r := range results { + assert.True(t, r.Valid, "pipeline %s should be valid", r.Name) + assert.Nil(t, r.Error, "pipeline %s should have no error", r.Name) + assert.Empty(t, r.Differences, "pipeline %s should have no differences", r.Name) + } +} + +func TestBuildkiteDiff(t *testing.T) { + // Change to repo root so relative paths work + repoRoot := findRepoRoot(t) + originalDir, err := os.Getwd() + require.NoError(t, err) + err = os.Chdir(repoRoot) + require.NoError(t, err) + defer func() { + _ = os.Chdir(originalDir) + }() + + // Test diff for all pipelines + results := BuildkiteDiff() + + // Verify we got results for all pipelines + assert.Len(t, results, len(BuildkitePipelines), "should have results for all pipelines") + + // Verify all results have no errors + for _, r := range results { + assert.Nil(t, r.Error, "pipeline %s should have no error", r.Name) + } +} + +func TestPipelineDefinitionFields(t *testing.T) { + // Verify that all pipeline definitions have required fields + for _, p := range BuildkitePipelines { + t.Run(p.Name, func(t *testing.T) { + assert.NotEmpty(t, p.Name, "pipeline should have a name") + assert.NotNil(t, p.Generator, "pipeline should have a generator") + assert.NotEmpty(t, p.YAMLFile, "pipeline should have a YAML file path") + }) + } +} diff --git a/magefile.go b/magefile.go index 2414e3dc2fb..51c3f1b88fb 100644 --- a/magefile.go +++ b/magefile.go @@ -179,6 +179,9 @@ type Otel mg.Namespace // Devmachine namespace contains tasks related to remote development machines. type Devmachine mg.Namespace +// Buildkite namespace contains tasks for managing Buildkite pipeline YAML generation. +type Buildkite mg.Namespace + func CheckNoChanges() error { fmt.Println(">> fmt - go run") err := sh.RunV("go", "mod", "tidy", "-v") @@ -3850,6 +3853,60 @@ func (h Helm) RenderExamples() error { return nil } +// Pipeline outputs the main pipeline YAML to stdout. +// Usage: mage buildkite:pipeline | buildkite-agent pipeline upload +func (Buildkite) Pipeline() error { + return devtools.BuildkitePipeline() +} + +// Integration outputs the integration pipeline YAML to stdout. +// Usage: mage buildkite:integration | buildkite-agent pipeline upload +func (Buildkite) Integration() error { + return devtools.BuildkiteIntegration() +} + +// IntegrationFull outputs the full integration tests pipeline YAML to stdout. +// Usage: mage buildkite:integrationFull | buildkite-agent pipeline upload +func (Buildkite) IntegrationFull() error { + return devtools.BuildkiteIntegrationFull() +} + +// IntegrationFIPS outputs the FIPS integration tests pipeline YAML to stdout. +// Usage: mage buildkite:integrationFIPS | buildkite-agent pipeline upload +func (Buildkite) IntegrationFIPS() error { + return devtools.BuildkiteIntegrationFIPS() +} + +// Package outputs the Elastic Agent package pipeline YAML to stdout. +// Usage: mage buildkite:package | buildkite-agent pipeline upload +func (Buildkite) Package() error { + return devtools.BuildkitePackage() +} + +// AgentlessRelease outputs the agentless app release pipeline YAML to stdout. +// Usage: mage buildkite:agentlessRelease | buildkite-agent pipeline upload +func (Buildkite) AgentlessRelease() error { + return devtools.BuildkiteAgentlessRelease() +} + +// GceCleanup outputs the GCE cleanup pipeline YAML to stdout. +// Usage: mage buildkite:gceCleanup | buildkite-agent pipeline upload +func (Buildkite) GceCleanup() error { + return devtools.BuildkiteGCECleanup() +} + +// Validate validates that generated pipelines match the existing YAML files. +func (Buildkite) Validate() error { + _, err := devtools.BuildkiteValidate() + return err +} + +// Diff shows the diff between generated pipelines and existing YAML files. +func (Buildkite) Diff() error { + devtools.BuildkiteDiff() + return nil +} + // UpdateAgentVersion updates the agent version in the Elastic-Agent and EDOT-Collector Helm charts. func (Helm) UpdateAgentVersion() error { agentVersion := bversion.GetParsedAgentPackageVersion().CoreVersion() From 01a361399589f9555a4c66af3c54a26d89089a80 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Miko=C5=82aj=20=C5=9Awi=C4=85tek?= Date: Sun, 14 Dec 2025 19:07:28 +0100 Subject: [PATCH 06/10] Use mage for the GCE cleanup pipeline --- .../pipeline.elastic-agent-gce-cleanup.yml | 12 +-- dev-tools/mage/buildkite.go | 73 ++++++++++--------- dev-tools/mage/buildkite_test.go | 22 ++++-- 3 files changed, 62 insertions(+), 45 deletions(-) diff --git a/.buildkite/pipeline.elastic-agent-gce-cleanup.yml b/.buildkite/pipeline.elastic-agent-gce-cleanup.yml index 45f47ff308c..c30626170f4 100644 --- a/.buildkite/pipeline.elastic-agent-gce-cleanup.yml +++ b/.buildkite/pipeline.elastic-agent-gce-cleanup.yml @@ -1,12 +1,12 @@ # yaml-language-server: $schema=https://raw.githubusercontent.com/buildkite/pipeline-schema/main/schema.json # Removes stale GCE instances having matching labels, name prefixes and older than 24 hours -# See gce-cleanup.sh and .buildkite/misc/gce-cleanup.yml -env: - VAULT_PATH: "kv/ci-shared/observability-ingest/cloud/gcp" +# This pipeline is dynamically generated from Go code. +# See: dev-tools/buildkite/pipelines/gce_cleanup.go steps: - - label: "GCE Cleanup" - key: "gce-cleanup" - command: ".buildkite/scripts/steps/gce-cleanup.sh" + - label: ":pipeline: Upload pipeline" + command: | + source .buildkite/scripts/common.sh + mage buildkite:gceCleanup | buildkite-agent pipeline upload agents: provider: "gcp" diff --git a/dev-tools/mage/buildkite.go b/dev-tools/mage/buildkite.go index b42f187bae1..b633ae16c88 100644 --- a/dev-tools/mage/buildkite.go +++ b/dev-tools/mage/buildkite.go @@ -16,20 +16,27 @@ import ( type PipelineDefinition struct { Name string Generator func() *pipeline.Pipeline - // YAMLFile is the path to the existing YAML file for validation/comparison. - YAMLFile string + // GoldenFile is the path to the golden file for validation/comparison. + // This is used to verify the generated pipeline matches expected output. + GoldenFile string + // Dynamic indicates whether this pipeline has been migrated to dynamic upload. + // Dynamic pipelines use a stub in .buildkite/ that calls mage to generate the pipeline. + Dynamic bool } +// goldenFileDir is the directory containing golden files for pipeline validation. +const goldenFileDir = "dev-tools/buildkite/pipelines/testdata" + // BuildkitePipelines is the list of all pipelines that can be generated. -// The YAMLFile field points to the existing static YAML file for validation. +// The GoldenFile field points to the golden file for validation. var BuildkitePipelines = []PipelineDefinition{ - {"GCECleanup", pipelines.GCECleanup, ".buildkite/pipeline.elastic-agent-gce-cleanup.yml"}, - {"AgentlessAppRelease", pipelines.AgentlessAppRelease, ".buildkite/pipeline.agentless-app-release.yaml"}, - {"Pipeline", pipelines.Pipeline, ".buildkite/pipeline.yml"}, - {"IntegrationPipeline", pipelines.IntegrationPipeline, ".buildkite/integration.pipeline.yml"}, - {"ElasticAgentPackage", pipelines.ElasticAgentPackage, ".buildkite/pipeline.elastic-agent-package.yml"}, - {"BKIntegrationPipeline", pipelines.BKIntegrationPipeline, ".buildkite/bk.integration.pipeline.yml"}, - {"BKIntegrationFIPSPipeline", pipelines.BKIntegrationFIPSPipeline, ".buildkite/bk.integration-fips.pipeline.yml"}, + {"GCECleanup", pipelines.GCECleanup, goldenFileDir + "/pipeline.elastic-agent-gce-cleanup.yml", true}, + {"AgentlessAppRelease", pipelines.AgentlessAppRelease, goldenFileDir + "/pipeline.agentless-app-release.yaml", false}, + {"Pipeline", pipelines.Pipeline, goldenFileDir + "/pipeline.yml", false}, + {"IntegrationPipeline", pipelines.IntegrationPipeline, goldenFileDir + "/integration.pipeline.yml", false}, + {"ElasticAgentPackage", pipelines.ElasticAgentPackage, goldenFileDir + "/pipeline.elastic-agent-package.yml", false}, + {"BKIntegrationPipeline", pipelines.BKIntegrationPipeline, goldenFileDir + "/bk.integration.pipeline.yml", false}, + {"BKIntegrationFIPSPipeline", pipelines.BKIntegrationFIPSPipeline, goldenFileDir + "/bk.integration-fips.pipeline.yml", false}, } // BuildkiteGeneratePipeline generates a pipeline by name and outputs YAML to stdout. @@ -90,28 +97,28 @@ func BuildkiteGCECleanup() error { // BuildkiteValidateResult contains the result of validating a single pipeline. type BuildkiteValidateResult struct { Name string - YAMLFile string + GoldenFile string Valid bool Error error Differences []string } -// BuildkiteValidate validates that generated pipelines match the existing YAML files. +// BuildkiteValidate validates that generated pipelines match the golden files. // Returns the validation results and an error if any pipeline doesn't match. func BuildkiteValidate() ([]BuildkiteValidateResult, error) { - fmt.Println(">> buildkite:validate - Validating Buildkite pipelines against YAML files...") + fmt.Println(">> buildkite:validate - Validating Buildkite pipelines against golden files...") var results []BuildkiteValidateResult var errs []string for _, p := range BuildkitePipelines { result := BuildkiteValidateResult{ - Name: p.Name, - YAMLFile: p.YAMLFile, + Name: p.Name, + GoldenFile: p.GoldenFile, } pl := p.Generator() - compareResult, err := pipeline.SemanticCompareWithFile(pl, p.YAMLFile) + compareResult, err := pipeline.SemanticCompareWithFile(pl, p.GoldenFile) if err != nil { result.Error = err errs = append(errs, fmt.Sprintf("%s: %v", p.Name, err)) @@ -127,10 +134,10 @@ func BuildkiteValidate() ([]BuildkiteValidateResult, error) { if !compareResult.Equal { result.Differences = compareResult.Differences errs = append(errs, fmt.Sprintf("%s: generated pipeline does not match %s:\n%s", - p.Name, p.YAMLFile, strings.Join(compareResult.Differences, "\n"))) + p.Name, p.GoldenFile, strings.Join(compareResult.Differences, "\n"))) } else { result.Valid = true - fmt.Printf(" ✓ %s matches %s\n", p.Name, p.YAMLFile) + fmt.Printf(" ✓ %s matches %s\n", p.Name, p.GoldenFile) } results = append(results, result) } @@ -153,12 +160,12 @@ func BuildkiteValidatePipeline(name string) (*BuildkiteValidateResult, error) { for _, p := range BuildkitePipelines { if p.Name == name { result := &BuildkiteValidateResult{ - Name: p.Name, - YAMLFile: p.YAMLFile, + Name: p.Name, + GoldenFile: p.GoldenFile, } pl := p.Generator() - compareResult, err := pipeline.SemanticCompareWithFile(pl, p.YAMLFile) + compareResult, err := pipeline.SemanticCompareWithFile(pl, p.GoldenFile) if err != nil { result.Error = err return result, err @@ -169,7 +176,7 @@ func BuildkiteValidatePipeline(name string) (*BuildkiteValidateResult, error) { } if !compareResult.Equal { result.Differences = compareResult.Differences - return result, fmt.Errorf("generated pipeline does not match %s", p.YAMLFile) + return result, fmt.Errorf("generated pipeline does not match %s", p.GoldenFile) } result.Valid = true return result, nil @@ -180,29 +187,29 @@ func BuildkiteValidatePipeline(name string) (*BuildkiteValidateResult, error) { // BuildkiteDiffResult contains the diff result for a single pipeline. type BuildkiteDiffResult struct { - Name string - YAMLFile string - Equal bool - Diff string - Error error + Name string + GoldenFile string + Equal bool + Diff string + Error error } -// BuildkiteDiff compares generated pipelines with existing YAML files. +// BuildkiteDiff compares generated pipelines with golden files. // Returns the diff results for each pipeline. func BuildkiteDiff() []BuildkiteDiffResult { - fmt.Println(">> buildkite:diff - Comparing generated pipelines with YAML files...") + fmt.Println(">> buildkite:diff - Comparing generated pipelines with golden files...") var results []BuildkiteDiffResult anyDiff := false for _, p := range BuildkitePipelines { result := BuildkiteDiffResult{ - Name: p.Name, - YAMLFile: p.YAMLFile, + Name: p.Name, + GoldenFile: p.GoldenFile, } pl := p.Generator() - compareResult, err := pipeline.CompareWithFile(pl, p.YAMLFile) + compareResult, err := pipeline.CompareWithFile(pl, p.GoldenFile) if err != nil { result.Error = err fmt.Printf("\n--- %s ---\nError: %v\n", p.Name, err) @@ -215,7 +222,7 @@ func BuildkiteDiff() []BuildkiteDiffResult { if !compareResult.Equal { result.Diff = compareResult.Diff anyDiff = true - fmt.Printf("\n--- %s (%s) ---\n", p.Name, p.YAMLFile) + fmt.Printf("\n--- %s (%s) ---\n", p.Name, p.GoldenFile) fmt.Println(compareResult.Diff) } results = append(results, result) diff --git a/dev-tools/mage/buildkite_test.go b/dev-tools/mage/buildkite_test.go index b9fa249bd8d..a6e6778f5b2 100644 --- a/dev-tools/mage/buildkite_test.go +++ b/dev-tools/mage/buildkite_test.go @@ -7,6 +7,7 @@ package mage import ( "os" "path/filepath" + "strings" "testing" "github.com/stretchr/testify/assert" @@ -50,11 +51,11 @@ func TestBuildkitePipelinesRegistered(t *testing.T) { assert.Equal(t, expectedPipelines, actualNames, "pipeline registry mismatch") } -func TestBuildkitePipelineYAMLFiles(t *testing.T) { - // Verify that all pipeline YAML files are in the .buildkite directory +func TestBuildkitePipelineGoldenFiles(t *testing.T) { + // Verify that all pipeline golden files are in the testdata directory for _, p := range BuildkitePipelines { - assert.True(t, filepath.HasPrefix(p.YAMLFile, ".buildkite/"), - "pipeline %s YAML file %q should start with .buildkite/", p.Name, p.YAMLFile) + assert.True(t, strings.HasPrefix(p.GoldenFile, "dev-tools/buildkite/pipelines/testdata/"), + "pipeline %s golden file %q should be in testdata/", p.Name, p.GoldenFile) } } @@ -120,7 +121,7 @@ func TestBuildkiteValidatePipeline(t *testing.T) { require.NoError(t, err, "GCECleanup validation should succeed") assert.True(t, result.Valid, "GCECleanup should be valid") assert.Equal(t, "GCECleanup", result.Name) - assert.Equal(t, ".buildkite/pipeline.elastic-agent-gce-cleanup.yml", result.YAMLFile) + assert.Equal(t, "dev-tools/buildkite/pipelines/testdata/pipeline.elastic-agent-gce-cleanup.yml", result.GoldenFile) } func TestBuildkiteValidatePipelineNotFound(t *testing.T) { @@ -186,7 +187,16 @@ func TestPipelineDefinitionFields(t *testing.T) { t.Run(p.Name, func(t *testing.T) { assert.NotEmpty(t, p.Name, "pipeline should have a name") assert.NotNil(t, p.Generator, "pipeline should have a generator") - assert.NotEmpty(t, p.YAMLFile, "pipeline should have a YAML file path") + assert.NotEmpty(t, p.GoldenFile, "pipeline should have a golden file path") }) } } + +func TestDynamicPipelines(t *testing.T) { + // Verify that GCECleanup is marked as dynamic + for _, p := range BuildkitePipelines { + if p.Name == "GCECleanup" { + assert.True(t, p.Dynamic, "GCECleanup should be marked as dynamic") + } + } +} From cf8a70f6658873466d95366cb6bad9041f5434ba Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Miko=C5=82aj=20=C5=9Awi=C4=85tek?= Date: Sun, 14 Dec 2025 21:09:38 +0100 Subject: [PATCH 07/10] Use mage for the integration pipeline --- .buildkite/integration.pipeline.yml | 247 ++-------------------------- dev-tools/mage/buildkite.go | 2 +- dev-tools/mage/buildkite_test.go | 11 +- 3 files changed, 19 insertions(+), 241 deletions(-) diff --git a/.buildkite/integration.pipeline.yml b/.buildkite/integration.pipeline.yml index 79179d427e8..b057d122fa4 100644 --- a/.buildkite/integration.pipeline.yml +++ b/.buildkite/integration.pipeline.yml @@ -1,242 +1,15 @@ # yaml-language-server: $schema=https://raw.githubusercontent.com/buildkite/pipeline-schema/main/schema.json +# Integration tests pipeline - dynamically generated from Go code. +# See: dev-tools/buildkite/pipelines/integration_pipeline.go env: - VAULT_PATH: "kv/ci-shared/observability-ingest/cloud/gcp" - ASDF_MAGE_VERSION: 1.14.0 - - # Make Docker builds less verbose - BUILDKIT_PROGRESS: plain - - # The following images are defined here and their values will be updated by updatecli - # Please do not change them manually. - IMAGE_UBUNTU_2204_X86_64: "platform-ingest-elastic-agent-ubuntu-2204-1762801856" - IMAGE_UBUNTU_2204_ARM_64: "platform-ingest-elastic-agent-ubuntu-2204-aarch64-1762801856" - -common: - - vault_docker_login: &vault_docker_login - elastic/vault-docker-login#v0.5.2: - secret_path: 'kv/ci-shared/platform-ingest/elastic_docker_registry' + IMAGE_UBUNTU_2404_X86_64: "platform-ingest-elastic-agent-ubuntu-2404-1762801856" steps: - - group: "Integration tests: packaging" - key: "int-packaging" - notify: - - github_commit_status: - context: "buildkite/elastic-agent - Packaging" - steps: - # Build matrix is not used for packaging in favor to unique step keys - # Packaging amd64 non-containers - - label: ":package: amd64: zip,tar.gz,rpm,deb " - key: packaging-amd64 - env: - PLATFORMS: "windows/amd64,linux/amd64" - PACKAGES: "zip,tar.gz,rpm,deb" - command: ".buildkite/scripts/steps/integration-package.sh" - artifact_paths: - - build/distributions/** - retry: - automatic: - limit: 1 - agents: - provider: "gcp" - machineType: "n2-standard-8" - image: "${IMAGE_UBUNTU_2204_X86_64}" - - # Temporary build step to verify building the elastic-agent works. - # This will be removed once OTEL_COMPONENT is removed, because it - # became the default. - - label: ":package: amd64: OTEL_COMPONENT zip,tar.gz,rpm,deb " - key: packaging-amd64-otel-component - env: - PLATFORMS: "windows/amd64,linux/amd64" - PACKAGES: "zip,tar.gz,rpm,deb" - OTEL_COMPONENT: "true" - command: ".buildkite/scripts/steps/integration-package.sh" - artifact_paths: - - build/distributions/** - retry: - automatic: - limit: 1 - agents: - provider: "gcp" - machineType: "n2-standard-8" - image: "${IMAGE_UBUNTU_2204_X86_64}" - - - label: ":package: amd64: FIPS tar.gz" - key: "packaging-amd64-fips" - env: - PACKAGES: "tar.gz" - PLATFORMS: "linux/amd64" - FIPS: "true" - command: ".buildkite/scripts/steps/integration-package.sh" - artifact_paths: - - build/distributions/** - retry: - automatic: - limit: 1 - agents: - provider: "gcp" - machineType: "n2-standard-4" - image: "${IMAGE_UBUNTU_2204_X86_64}" - - # Packaging linux and windows for arm64 - - label: ":package: arm64: zip,tar.gz" - key: packaging-arm64 - env: - PLATFORMS: "windows/arm64,linux/arm64" - PACKAGES: "tar.gz,zip" - command: ".buildkite/scripts/steps/integration-package.sh" - artifact_paths: - - build/distributions/** - retry: - automatic: - limit: 1 - agents: - provider: "aws" - instanceType: "c6g.2xlarge" - image: "${IMAGE_UBUNTU_2204_ARM_64}" - - # Temporary build step to verify building the elastic-agent works. - # This will be removed once OTEL_COMPONENT is removed, because it - # became the default. - - label: ":package: arm64: OTEL_COMPONENT zip,tar.gz" - key: packaging-arm64-otel-component - env: - PLATFORMS: "windows/arm64,linux/arm64" - PACKAGES: "tar.gz,zip" - OTEL_COMPONENT: "true" - command: ".buildkite/scripts/steps/integration-package.sh" - artifact_paths: - - build/distributions/** - retry: - automatic: - limit: 1 - agents: - provider: "aws" - instanceType: "c6g.2xlarge" - image: "${IMAGE_UBUNTU_2204_ARM_64}" - - - label: ":package: arm64: FIPS tar.gz" - key: "packaging-arm64-fips" - env: - PACKAGES: "tar.gz" - PLATFORMS: "linux/arm64" - FIPS: "true" - command: ".buildkite/scripts/steps/integration-package.sh" - artifact_paths: - - build/distributions/** - retry: - automatic: - limit: 1 - agents: - provider: "aws" - instanceType: "c6g.2xlarge" - image: "${IMAGE_UBUNTU_2204_ARM_64}" - - - label: ":package: amd64: Containers" - key: packaging-containers-amd64 - env: - PACKAGES: "docker" - PLATFORMS: "linux/amd64" - command: | - .buildkite/scripts/steps/integration-package.sh - .buildkite/scripts/steps/integration-cloud-image-push.sh - artifact_paths: - - build/distributions/** - agents: - provider: "gcp" - machineType: "n2-standard-8" - diskSizeGb: 200 - image: "${IMAGE_UBUNTU_2204_X86_64}" - plugins: - - *vault_docker_login - - - label: ":package: arm64: Containers" - key: packaging-containers-arm64 - env: - PACKAGES: "docker" - PLATFORMS: "linux/arm64" - command: | - .buildkite/scripts/steps/integration-package.sh - artifact_paths: - - build/distributions/** - agents: - provider: "aws" - instanceType: "c6g.4xlarge" - diskSizeGb: 200 - image: "${IMAGE_UBUNTU_2204_ARM_64}" - - - label: ":package: amd64: FIPS Containers" - key: packaging-containers-amd64-fips - env: - PACKAGES: "docker" - PLATFORMS: "linux/amd64" - FIPS: "true" - command: | - .buildkite/scripts/steps/integration-package.sh - .buildkite/scripts/steps/integration-cloud-image-push.sh - artifact_paths: - - build/distributions/** - agents: - provider: "gcp" - machineType: "n2-standard-8" - diskSizeGb: 200 - image: "${IMAGE_UBUNTU_2204_X86_64}" - plugins: - - *vault_docker_login - - - label: ":package: arm64: FIPS Containers" - key: packaging-containers-arm64-fips - env: - PACKAGES: "docker" - PLATFORMS: "linux/arm64" - FIPS: "true" - command: | - .buildkite/scripts/steps/integration-package.sh - artifact_paths: - - build/distributions/** - agents: - provider: "aws" - instanceType: "c6g.2xlarge" - diskSizeGb: 200 - image: "${IMAGE_UBUNTU_2204_ARM_64}" - - - label: ":package: amd64: OTEL_COMPONENT Containers" - key: packaging-containers-amd64-otel-component - env: - PACKAGES: "docker" - PLATFORMS: "linux/amd64" - OTEL_COMPONENT: "true" - command: | - .buildkite/scripts/steps/integration-package.sh - artifact_paths: - - build/distributions/** - agents: - provider: "gcp" - machineType: "n2-standard-8" - diskSizeGb: 200 - image: "${IMAGE_UBUNTU_2204_X86_64}" - plugins: - - *vault_docker_login - - - label: ":package: arm64: OTEL_COMPONENT Containers" - key: packaging-containers-arm64-otel-component - env: - PACKAGES: "docker" - PLATFORMS: "linux/arm64" - OTEL_COMPONENT: "true" - command: | - .buildkite/scripts/steps/integration-package.sh - artifact_paths: - - build/distributions/** - agents: - provider: "aws" - instanceType: "c6g.2xlarge" - diskSizeGb: 200 - image: "${IMAGE_UBUNTU_2204_ARM_64}" - - - label: "Triggering Integration tests" - command: "buildkite-agent pipeline upload .buildkite/bk.integration.pipeline.yml" - - - label: "Triggering custom FIPS integration tests" - command: "buildkite-agent pipeline upload .buildkite/bk.integration-fips.pipeline.yml" + - label: ":pipeline: Upload pipeline" + command: | + source .buildkite/scripts/common.sh + mage buildkite:integration | buildkite-agent pipeline upload + agents: + provider: "gcp" + image: "${IMAGE_UBUNTU_2204_X86_64}" diff --git a/dev-tools/mage/buildkite.go b/dev-tools/mage/buildkite.go index b633ae16c88..88d329a009f 100644 --- a/dev-tools/mage/buildkite.go +++ b/dev-tools/mage/buildkite.go @@ -33,7 +33,7 @@ var BuildkitePipelines = []PipelineDefinition{ {"GCECleanup", pipelines.GCECleanup, goldenFileDir + "/pipeline.elastic-agent-gce-cleanup.yml", true}, {"AgentlessAppRelease", pipelines.AgentlessAppRelease, goldenFileDir + "/pipeline.agentless-app-release.yaml", false}, {"Pipeline", pipelines.Pipeline, goldenFileDir + "/pipeline.yml", false}, - {"IntegrationPipeline", pipelines.IntegrationPipeline, goldenFileDir + "/integration.pipeline.yml", false}, + {"IntegrationPipeline", pipelines.IntegrationPipeline, goldenFileDir + "/integration.pipeline.yml", true}, {"ElasticAgentPackage", pipelines.ElasticAgentPackage, goldenFileDir + "/pipeline.elastic-agent-package.yml", false}, {"BKIntegrationPipeline", pipelines.BKIntegrationPipeline, goldenFileDir + "/bk.integration.pipeline.yml", false}, {"BKIntegrationFIPSPipeline", pipelines.BKIntegrationFIPSPipeline, goldenFileDir + "/bk.integration-fips.pipeline.yml", false}, diff --git a/dev-tools/mage/buildkite_test.go b/dev-tools/mage/buildkite_test.go index a6e6778f5b2..a8141dd4357 100644 --- a/dev-tools/mage/buildkite_test.go +++ b/dev-tools/mage/buildkite_test.go @@ -193,10 +193,15 @@ func TestPipelineDefinitionFields(t *testing.T) { } func TestDynamicPipelines(t *testing.T) { - // Verify that GCECleanup is marked as dynamic + // Verify that migrated pipelines are marked as dynamic + dynamicPipelines := map[string]bool{ + "GCECleanup": true, + "IntegrationPipeline": true, + } + for _, p := range BuildkitePipelines { - if p.Name == "GCECleanup" { - assert.True(t, p.Dynamic, "GCECleanup should be marked as dynamic") + if dynamicPipelines[p.Name] { + assert.True(t, p.Dynamic, "%s should be marked as dynamic", p.Name) } } } From 5e618a674ea032b38b1c1245eaa224990e8048c9 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Miko=C5=82aj=20=C5=9Awi=C4=85tek?= Date: Tue, 16 Dec 2025 12:20:02 +0100 Subject: [PATCH 08/10] Use mage for the main pipeline --- .buildkite/pipeline.yml | 392 +----------------------------------- dev-tools/mage/buildkite.go | 2 +- 2 files changed, 9 insertions(+), 385 deletions(-) diff --git a/.buildkite/pipeline.yml b/.buildkite/pipeline.yml index 5d6836699d6..bffec066a60 100644 --- a/.buildkite/pipeline.yml +++ b/.buildkite/pipeline.yml @@ -1,392 +1,16 @@ # yaml-language-server: $schema=https://raw.githubusercontent.com/buildkite/pipeline-schema/main/schema.json -env: - VAULT_PATH: "kv/ci-shared/observability-ingest/cloud/gcp" +# Main CI pipeline - dynamically generated from Go code. +# See: dev-tools/buildkite/pipelines/pipeline.go - # The following images are defined here and their values will be updated by updatecli - # Please do not change them manually. - IMAGE_UBUNTU_2204_X86_64: "platform-ingest-elastic-agent-ubuntu-2204-1762801856" - IMAGE_UBUNTU_2204_ARM_64: "platform-ingest-elastic-agent-ubuntu-2204-aarch64-1762801856" - IMAGE_WIN_2016: "platform-ingest-elastic-agent-windows-2016-1762801856" - IMAGE_WIN_2022: "platform-ingest-elastic-agent-windows-2022-1762801856" - IMAGE_WIN_10: "platform-ingest-elastic-agent-windows-10-1764775167" - IMAGE_WIN_11: "platform-ingest-elastic-agent-windows-11-1764775167" +env: + IMAGE_UBUNTU_2404_X86_64: "platform-ingest-elastic-agent-ubuntu-2404-1762801856" steps: - - label: "check-ci" - key: "check-ci" - command: ".buildkite/scripts/steps/check-ci.sh" - agents: - provider: "gcp" - image: "${IMAGE_UBUNTU_2204_X86_64}" - retry: - manual: - allowed: true - - - group: "Unit tests" - key: "unit-tests" - steps: - - label: "Unit tests - Ubuntu 22.04" - key: "unit-tests-2204" - command: ".buildkite/scripts/steps/unit-tests.sh" - artifact_paths: - - "build/TEST-*.html" - - "build/TEST-*.xml" - - "build/diagnostics/*" - - "coverage-*.out" - agents: - provider: "gcp" - image: "${IMAGE_UBUNTU_2204_X86_64}" - machineType: "n2-standard-8" - retry: - automatic: - limit: 1 - manual: - allowed: true - - - label: "Unit tests - Ubuntu 22.04 with requirefips build tag" - key: "unit-tests-2204-fips-tag" - command: ".buildkite/scripts/steps/unit-tests.sh" - env: - FIPS: "true" - artifact_paths: - - "build/TEST-*.html" - - "build/TEST-*.xml" - - "build/diagnostics/*" - - "coverage-*.out" - agents: - provider: "gcp" - image: "${IMAGE_UBUNTU_2204_X86_64}" - machineType: "n2-standard-8" - retry: - automatic: - limit: 1 - manual: - allowed: true - - - label: "Unit tests - fips140=only Ubuntu 22.04" - key: "unit-tests-2204-fips140-only" - # Note: The GODEBUG=fips140=only environment variable must be set in the command itself (as opposed to - # in the env block) so that it is applied *only* to the 'go' command invoked by the script, and - # not to any other Go code executed as part of the Buildkite agent itself. - command: 'GODEBUG="fips140=only" .buildkite/scripts/steps/unit-tests.sh' - env: - FIPS: "true" - artifact_paths: - - "build/TEST-*.html" - - "build/TEST-*.xml" - - "build/diagnostics/*" - - "coverage-*.out" - agents: - provider: "gcp" - image: "${IMAGE_UBUNTU_2204_X86_64}" - machineType: "n2-standard-8" - retry: - automatic: - limit: 1 - manual: - allowed: true - - - label: "Unit tests - Ubuntu 22.04 ARM64" - key: "unit-tests-2204-arm64" - command: ".buildkite/scripts/steps/unit-tests.sh" - artifact_paths: - - "build/TEST-*.html" - - "build/TEST-*.xml" - - "build/diagnostics/*" - - "coverage-*.out" - agents: - provider: "aws" - image: "${IMAGE_UBUNTU_2204_ARM_64}" - diskSizeGb: 200 - instanceType: "m6g.xlarge" - retry: - automatic: - limit: 1 - manual: - allowed: true - - - label: "Unit tests - Windows 2022" - key: "unit-tests-win2022" - command: .buildkite/scripts/steps/unit-tests.ps1 - artifact_paths: - - "build/TEST-*.html" - - "build/TEST-*.xml" - - "build/diagnostics/*" - - "coverage-*.out" - agents: - provider: "gcp" - image: "${IMAGE_WIN_2022}" - machineType: "n2-standard-8" - disk_size: 200 - disk_type: "pd-ssd" - retry: - automatic: - limit: 1 - manual: - allowed: true - - - label: "Unit tests - Windows 2016" - key: "unit-tests-win2016" - command: .buildkite/scripts/steps/unit-tests.ps1 - artifact_paths: - - "build/TEST-*.html" - - "build/TEST-*.xml" - - "build/diagnostics/*" - - "coverage-*.out" - agents: - provider: "gcp" - image: "${IMAGE_WIN_2016}" - machineType: "n2-standard-8" - disk_size: 200 - disk_type: "pd-ssd" - retry: - automatic: - limit: 1 - manual: - allowed: true - - - group: "macOS tests" - key: "macos-unit-tests" - steps: - - label: "Unit tests - macOS 15 ARM" - command: ".buildkite/scripts/steps/unit-tests.sh" - artifact_paths: - - "build/TEST-*.html" - - "build/TEST-*.xml" - - "build/diagnostics/*" - - "coverage-*.out" - agents: - provider: orka - imagePrefix: generic-base-15-arm-002 - retry: - automatic: - limit: 1 - manual: - allowed: true - - # Runs only on main and release branches - - label: "Unit tests - macOS 13" - command: ".buildkite/scripts/steps/unit-tests.sh" - branches: "main 8.* 9.*" - artifact_paths: - - "build/TEST-*.html" - - "build/TEST-*.xml" - - "build/diagnostics/*" - - "coverage-*.out" - agents: - provider: orka - imagePrefix: generic-13-ventura-x64 - retry: - automatic: - limit: 1 - manual: - allowed: true - - - group: "Desktop Windows tests" - key: "extended-windows" - steps: - - label: "Unit tests - Windows 10" - key: "unit-tests-win10" - command: .buildkite/scripts/steps/unit-tests.ps1 - artifact_paths: - - "build/TEST-*.html" - - "build/TEST-*.xml" - - "build/diagnostics/*" - - "coverage-*.out" - agents: - provider: "gcp" - image: "${IMAGE_WIN_10}" - machineType: "n2-standard-8" - disk_type: "pd-ssd" - retry: - automatic: - limit: 1 - manual: - allowed: true - - - label: "Unit tests - Windows 11" - key: "unit-tests-win11" - command: .buildkite/scripts/steps/unit-tests.ps1 - artifact_paths: - - "build/TEST-*.html" - - "build/TEST-*.xml" - - "build/diagnostics/*" - - "coverage-*.out" - agents: - provider: "gcp" - image: "${IMAGE_WIN_11}" - machineType: "n2-standard-8" - disk_type: "pd-ssd" - retry: - automatic: - limit: 1 - manual: - allowed: true - - - label: ":junit: Junit annotate" - agents: - # requires at least "bash", "curl" and "git" - image: "docker.elastic.co/ci-agent-images/buildkite-junit-annotate:1.0" - plugins: - - junit-annotate#v2.7.0: - artifacts: "**TEST-*.xml" - always-annotate: true - run-in-docker: false - depends_on: - - step: "unit-tests-2204" - allow_failure: true - - step: "unit-tests-2204-fips-tag" - allow_failure: true - - step: "unit-tests-2204-fips140-only" - allow_failure: true - - step: "unit-tests-2204-arm64" - allow_failure: true - - step: "unit-tests-win2022" - allow_failure: true - - step: "unit-tests-win2016" - allow_failure: true - - step: "macos-unit-tests" - allow_failure: true - - step: "unit-tests-win10" - allow_failure: true - - step: "unit-tests-win11" - allow_failure: true - - - group: "K8s tests" - key: "k8s-tests" - steps: - - label: "K8s tests: {{matrix.k8s_version}}" - env: - K8S_VERSION: "v{{matrix.k8s_version}}" - KIND_VERSION: "v0.27.0" - command: ".buildkite/scripts/steps/k8s-tests.sh" - agents: - provider: "gcp" - image: "${IMAGE_UBUNTU_2204_X86_64}" - matrix: - setup: - k8s_version: - - "1.33.0" - - "1.32.0" - - "1.31.0" - - "1.30.0" - - "1.29.4" - - "1.28.9" - retry: - manual: - allowed: true - - # Triggers a dynamic step: Sync K8s - # Runs only on main and if k8s files are changed - - label: "Trigger k8s sync" - branches: main - command: ".buildkite/scripts/steps/sync-k8s.sh" + - label: ":pipeline: Upload pipeline" + command: | + source .buildkite/scripts/common.sh + mage buildkite:pipeline | buildkite-agent pipeline upload agents: provider: "gcp" image: "${IMAGE_UBUNTU_2204_X86_64}" - env: - GH_VERSION: "2.4.0" - if_changed: - include: - - deploy/kubernetes/* - - version/docs/version.asciidoc - - # Trigger for pull requests - - label: "Trigger Extended tests for Pull request" - if: | - (build.pull_request.id != null && !build.env("GITHUB_PR_LABELS") =~ /skip-it/) || - build.env("GITHUB_PR_TRIGGER_COMMENT") =~ /.*extended.*/ - command: "buildkite-agent pipeline upload .buildkite/integration.pipeline.yml" - env: - BUILDKITE_PULL_REQUEST: ${BUILDKITE_PULL_REQUEST} - BUILDKITE_PULL_REQUEST_BASE_BRANCH: ${BUILDKITE_PULL_REQUEST_BASE_BRANCH} - GITHUB_PR_LABELS: ${GITHUB_PR_LABELS} - if_changed: - include: - - internal/** - - dev-tools/** - - pkg/** - - deploy/** - - test_infra/** - - testing/** - - version/** - - specs/** - - .agent-versions.json - - .go-version - - .package-version - - go.mod - - go.sum - - magefile.go - - main.go - - .buildkite/integration.pipeline.yml - - .buildkite/bk.integration.pipeline.yml - - .buildkite/bk.integration-fips.pipeline.yml - - .buildkite/pipeline.yml - - .buildkite/scripts/** - - .buildkite/hooks/** - - # Trigger for branches - - label: "Triggering Extended tests for branches" - if: build.pull_request.id == null - trigger: "elastic-agent-extended-testing" - build: - commit: "${BUILDKITE_COMMIT}" - branch: "${BUILDKITE_BRANCH}" - - # Trigger for pull requests - Serverless Beats Tests - # this should help detecting issues earlier in the development cycle - - label: "Trigger Serverless Beats Tests" - if: build.pull_request.id != null - trigger: "beats-agent-serverless-tests" - build: - commit: "${BUILDKITE_COMMIT}" - branch: "${BUILDKITE_BRANCH}" - if_changed: - include: - - .buildkite/serverless.beats.tests.yml - - .buildkite/scripts/steps/beats_tests.sh - - .buildkite/hooks/pre-command - - # NOTE: This should help detecting issues earlier in the development cycle - # See https://github.com/elastic/elastic-agent/issues/11604 - - label: "Trigger Elastic Agent Package" - if: build.pull_request.id != null - commands: - - .buildkite/scripts/steps/trigger-elastic-agent-package.sh - - .buildkite/scripts/steps/trigger-elastic-agent-package.sh | buildkite-agent pipeline upload - if_changed: - include: - - .buildkite/pipeline.elastic-agent-package.yml - - .buildkite/scripts/steps/package.sh - - .buildkite/scripts/steps/trigger-elastic-agent-package.sh - - magefile.go - - dev-tools/**/* - - # NOTE: This should help detecting issues earlier in the development cycle - # See https://github.com/elastic/elastic-agent/pull/11725 - - label: "DRY RUN publish to serverless" - if: build.pull_request.id != null && build.env("BUILDKITE_PULL_REQUEST_BASE_BRANCH") == "main" - if_changed: - include: - - .buildkite/pipeline.yml - - .buildkite/pipeline.agentless-app-release.yaml - - .buildkite/scripts/steps/ecp-internal-release.sh - - .buildkite/scripts/steps/integration-package.sh - - .buildkite/scripts/steps/validate-agentless-docker-image.sh - trigger: "agentless-serverless-release" - build: - commit: "${BUILDKITE_COMMIT}" - branch: "${BUILDKITE_BRANCH}" - message: "publish to serverless (dry-run) #${BUILDKITE_PULL_REQUEST}" - env: - DRY_RUN: "true" - - # wait for CI to be done - - wait: ~ - - - label: "Publish to serverless" - branches: main - trigger: "agentless-serverless-release" - build: - commit: "${BUILDKITE_COMMIT}" diff --git a/dev-tools/mage/buildkite.go b/dev-tools/mage/buildkite.go index 88d329a009f..edab55edacb 100644 --- a/dev-tools/mage/buildkite.go +++ b/dev-tools/mage/buildkite.go @@ -32,7 +32,7 @@ const goldenFileDir = "dev-tools/buildkite/pipelines/testdata" var BuildkitePipelines = []PipelineDefinition{ {"GCECleanup", pipelines.GCECleanup, goldenFileDir + "/pipeline.elastic-agent-gce-cleanup.yml", true}, {"AgentlessAppRelease", pipelines.AgentlessAppRelease, goldenFileDir + "/pipeline.agentless-app-release.yaml", false}, - {"Pipeline", pipelines.Pipeline, goldenFileDir + "/pipeline.yml", false}, + {"Pipeline", pipelines.Pipeline, goldenFileDir + "/pipeline.yml", true}, {"IntegrationPipeline", pipelines.IntegrationPipeline, goldenFileDir + "/integration.pipeline.yml", true}, {"ElasticAgentPackage", pipelines.ElasticAgentPackage, goldenFileDir + "/pipeline.elastic-agent-package.yml", false}, {"BKIntegrationPipeline", pipelines.BKIntegrationPipeline, goldenFileDir + "/bk.integration.pipeline.yml", false}, From ab64b653cae87fe237c64ed5267e10f981a6084d Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Miko=C5=82aj=20=C5=9Awi=C4=85tek?= Date: Tue, 16 Dec 2025 13:19:38 +0100 Subject: [PATCH 09/10] Add debug logging to pipeline upload --- .buildkite/pipeline.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.buildkite/pipeline.yml b/.buildkite/pipeline.yml index bffec066a60..a3c4334996b 100644 --- a/.buildkite/pipeline.yml +++ b/.buildkite/pipeline.yml @@ -10,7 +10,7 @@ steps: - label: ":pipeline: Upload pipeline" command: | source .buildkite/scripts/common.sh - mage buildkite:pipeline | buildkite-agent pipeline upload + mage buildkite:pipeline | buildkite-agent pipeline upload --debug agents: provider: "gcp" image: "${IMAGE_UBUNTU_2204_X86_64}" From abecef533b445e15e4c7cd9861fd69e9f487cc67 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Miko=C5=82aj=20=C5=9Awi=C4=85tek?= Date: Tue, 16 Dec 2025 16:49:51 +0100 Subject: [PATCH 10/10] Fix wait step conversion --- .buildkite/pipeline.agentless-app-release.yaml | 6 +++--- dev-tools/buildkite/pipeline/pipeline.go | 5 ++--- .../buildkite/pipeline/testdata/example_pipeline.yml | 2 +- dev-tools/buildkite/pipelines/pipelines_test.go | 11 +++++++++++ .../testdata/pipeline.agentless-app-release.yaml | 6 +++--- dev-tools/buildkite/pipelines/testdata/pipeline.yml | 2 +- 6 files changed, 21 insertions(+), 11 deletions(-) diff --git a/.buildkite/pipeline.agentless-app-release.yaml b/.buildkite/pipeline.agentless-app-release.yaml index 81b2dbb4099..9370eb72ae6 100644 --- a/.buildkite/pipeline.agentless-app-release.yaml +++ b/.buildkite/pipeline.agentless-app-release.yaml @@ -41,7 +41,7 @@ steps: diskSizeGb: 400 # wait for packaging to be done - - wait: ~ + - wait - label: "Publish to internal registry" key: "mirror-elastic-agent" @@ -53,7 +53,7 @@ steps: - *docker_login_plugin # wait for metadata to be set - - wait: ~ + - wait - label: ":docker: Validate docker image is built for all architectures" command: ".buildkite/scripts/steps/validate-agentless-docker-image.sh" @@ -66,7 +66,7 @@ steps: secret_path: 'kv/ci-shared/platform-ingest/elastic_docker_registry' # wait for image architecture validation - - wait: ~ + - wait - label: ":grey_question: Promote agentless app release if validation passes" # DRY_RUN will help to skip this step when set to true diff --git a/dev-tools/buildkite/pipeline/pipeline.go b/dev-tools/buildkite/pipeline/pipeline.go index aa83d038302..543b6a2fb08 100644 --- a/dev-tools/buildkite/pipeline/pipeline.go +++ b/dev-tools/buildkite/pipeline/pipeline.go @@ -73,9 +73,8 @@ func (p *Pipeline) Add(step any) *Pipeline { // Wait adds a wait step to the pipeline. func (p *Pipeline) Wait() *Pipeline { - p.AddStep(&buildkite.WaitStep{ - Wait: Ptr(""), - }) + wait := buildkite.StringWaitStep("wait") + p.AddStep(wait) return p } diff --git a/dev-tools/buildkite/pipeline/testdata/example_pipeline.yml b/dev-tools/buildkite/pipeline/testdata/example_pipeline.yml index 340d74a091a..78959c2e4c0 100644 --- a/dev-tools/buildkite/pipeline/testdata/example_pipeline.yml +++ b/dev-tools/buildkite/pipeline/testdata/example_pipeline.yml @@ -173,7 +173,7 @@ steps: if: build.pull_request.id == null label: Triggering Extended tests for branches trigger: elastic-agent-extended-testing - - wait: "" + - wait - branches: main build: commit: ${BUILDKITE_COMMIT} diff --git a/dev-tools/buildkite/pipelines/pipelines_test.go b/dev-tools/buildkite/pipelines/pipelines_test.go index 421dc8220a0..ebf68c50ccc 100644 --- a/dev-tools/buildkite/pipelines/pipelines_test.go +++ b/dev-tools/buildkite/pipelines/pipelines_test.go @@ -23,6 +23,10 @@ type pipelineTestCase struct { generator func() *pipeline.Pipeline goldenFile string actualFile string + // dynamic indicates the pipeline has been migrated to dynamic upload. + // Dynamic pipelines have a stub in .buildkite/ that calls mage, so we + // skip the actual file comparison for them. + dynamic bool } var pipelineTestCases = []pipelineTestCase{ @@ -31,6 +35,7 @@ var pipelineTestCases = []pipelineTestCase{ generator: GCECleanup, goldenFile: "pipeline.elastic-agent-gce-cleanup.yml", actualFile: "pipeline.elastic-agent-gce-cleanup.yml", + dynamic: true, }, { name: "AgentlessAppRelease", @@ -43,12 +48,14 @@ var pipelineTestCases = []pipelineTestCase{ generator: Pipeline, goldenFile: "pipeline.yml", actualFile: "pipeline.yml", + dynamic: true, }, { name: "IntegrationPipeline", generator: IntegrationPipeline, goldenFile: "integration.pipeline.yml", actualFile: "integration.pipeline.yml", + dynamic: true, }, { name: "ElasticAgentPackage", @@ -106,6 +113,10 @@ func TestPipelinesMatchActual(t *testing.T) { for _, tc := range pipelineTestCases { t.Run(tc.name, func(t *testing.T) { + if tc.dynamic { + t.Skip("Skipping actual file comparison for dynamic pipeline (uses mage for upload)") + } + actualPath := filepath.Join(repoRoot, ".buildkite", tc.actualFile) p := tc.generator() diff --git a/dev-tools/buildkite/pipelines/testdata/pipeline.agentless-app-release.yaml b/dev-tools/buildkite/pipelines/testdata/pipeline.agentless-app-release.yaml index 81b5ce9d444..783924bd016 100644 --- a/dev-tools/buildkite/pipelines/testdata/pipeline.agentless-app-release.yaml +++ b/dev-tools/buildkite/pipelines/testdata/pipeline.agentless-app-release.yaml @@ -29,7 +29,7 @@ steps: PLATFORMS: linux/arm64 key: packaging-service-container-arm64 label: "Packaging: Service Container linux/arm64" - - wait: "" + - wait - agents: machineType: c2-standard-16 provider: gcp @@ -39,7 +39,7 @@ steps: plugins: - elastic/vault-docker-login#v0.5.2: secret_path: kv/ci-shared/platform-ingest/elastic_docker_registry - - wait: "" + - wait - agents: image: docker.elastic.co/ci-agent-images/observability/oci-image-tools-agent:latest@sha256:a4ababd1347111759babc05c9ad5a680f4af48892784951358488b7e7fc94af9 command: .buildkite/scripts/steps/validate-agentless-docker-image.sh @@ -49,7 +49,7 @@ steps: plugins: - elastic/vault-docker-login#v0.6.3: secret_path: kv/ci-shared/platform-ingest/elastic_docker_registry - - wait: "" + - wait - agents: image: docker.elastic.co/ci-agent-images/serverless-helm-builder:0.0.2@sha256:d00e8a7a0ab3618cfaacb0a7b1e1b06ee29728eb2b44de602374bd8f6b9b92ac command: |- diff --git a/dev-tools/buildkite/pipelines/testdata/pipeline.yml b/dev-tools/buildkite/pipelines/testdata/pipeline.yml index e177ba76349..489239fb30f 100644 --- a/dev-tools/buildkite/pipelines/testdata/pipeline.yml +++ b/dev-tools/buildkite/pipelines/testdata/pipeline.yml @@ -343,7 +343,7 @@ steps: - .buildkite/scripts/steps/validate-agentless-docker-image.sh label: DRY RUN publish to serverless trigger: agentless-serverless-release - - wait: "" + - wait - branches: main build: commit: ${BUILDKITE_COMMIT}