diff --git a/actions/aws-cloudfront-invalidation/action.yml b/actions/aws-cloudfront-invalidation/action.yml index aa60f19..d6a635a 100644 --- a/actions/aws-cloudfront-invalidation/action.yml +++ b/actions/aws-cloudfront-invalidation/action.yml @@ -1,43 +1,57 @@ --- -name: CloudFront Invalidation -description: Creates CloudFront invalidation to clear cache for updated content +name: 'CloudFront Invalidation' +description: 'Create a CloudFront invalidation to clear cache' inputs: aws_access_key: - description: AWS access key ID (optional if using OIDC) + description: 'AWS access key ID' required: false aws_secret_key: - description: AWS secret access key (optional if using OIDC) + description: 'AWS secret access key' required: false aws_region: - description: AWS region + description: 'AWS region' required: true role_to_assume: - description: AWS IAM role ARN to assume (for OIDC authentication) + description: 'AWS IAM role ARN to assume' required: false distribution_id: - description: CloudFront distribution ID + description: 'CloudFront distribution ID' required: true paths: description: > - Paths to invalidate (space-separated, - e.g. "/* /index.html /css/*") + Space-separated paths to invalidate (e.g. "/* /index.html /css/*") required: false default: "/*" caller_reference: description: > - Unique reference for this invalidation - (auto-generated if not provided) + Unique reference for this invalidation (auto-generated if not provided) required: false + wait_for_completion: + description: 'Wait until invalidation status becomes Completed' + required: false + default: 'false' + + show_summary: + description: 'Print summary in the job summary' + required: false + default: 'true' + summary_limit: + description: 'Max number of lines (paths) to show in summary' + required: false + default: '250' outputs: invalidation_id: - description: The ID of the created invalidation + description: 'ID of the created invalidation' value: ${{ steps.invalidate.outputs.invalidation_id }} status: - description: The status of the invalidation + description: 'Status of the invalidation (InProgress/Completed)' value: ${{ steps.invalidate.outputs.status }} + caller_reference: + description: 'CallerReference used for this invalidation' + value: ${{ steps.invalidate.outputs.caller_reference }} runs: using: composite @@ -45,44 +59,50 @@ runs: - name: Validate inputs shell: bash run: | - set -e + set -euo pipefail - if ! command -v jq &> /dev/null; then - echo "❌ jq is required but not found. Please install jq or use ubuntu-latest runner" + if ! command -v aws >/dev/null 2>&1 || ! command -v jq >/dev/null 2>&1; then + echo "❌ AWS CLI and jq are required on the runner (use ubuntu-latest)" exit 1 fi if [[ ! "${{ inputs.distribution_id }}" =~ ^E[A-Z0-9]{13}$ ]]; then - echo "❌ Invalid CloudFront distribution ID format: ${{ inputs.distribution_id }}" - echo "Expected format: E + 13 alphanumeric characters (e.g., E1234567890ABC)" + echo "❌ Invalid CloudFront distribution ID: ${{ inputs.distribution_id }}" + echo "Expected pattern: ^E[A-Z0-9]{13}$ (e.g., E1234567890ABC)" exit 1 fi PATHS="${{ inputs.paths }}" - if [ -z "$PATHS" ]; then - echo "❌ Paths cannot be empty" + if [[ -z "$PATHS" ]]; then + echo "❌ 'paths' cannot be empty" exit 1 fi - IFS=' ' read -ra PATHS_ARRAY <<< "$PATHS" + read -r -a PATHS_ARRAY <<< "$PATHS" PATHS_COUNT=${#PATHS_ARRAY[@]} - if [ $PATHS_COUNT -gt 1000 ]; then - echo "❌ Too many paths ($PATHS_COUNT). CloudFront allows maximum 1000 paths per invalidation" - echo "Consider using fewer paths or /* wildcard" + + if (( PATHS_COUNT == 0 )); then + echo "❌ No paths provided" exit 1 fi - for path in "${PATHS_ARRAY[@]}"; do - if [[ ! "$path" =~ ^/.* ]]; then - echo "❌ Invalid path: $path (must start with /)" + if (( PATHS_COUNT > 1000 )); then + echo "❌ Too many paths ($PATHS_COUNT) — CloudFront allows up to 1000 per invalidation" + echo " Consider using fewer paths or a wildcard like /*" + exit 1 + fi + + for p in "${PATHS_ARRAY[@]}"; do + if [[ ! "$p" =~ ^/ ]]; then + echo "❌ Invalid path: $p (must start with /)" exit 1 fi done - echo "✅ Input validation passed ($PATHS_COUNT paths)" + echo "✅ Input validation passed ($PATHS_COUNT path(s))" - name: Configure AWS authentication - uses: Mad-Pixels/github-workflows/internal/aws-auth@main + uses: Mad-Pixels/github-workflows/internal/aws-auth@v1 with: aws_access_key: ${{ inputs.aws_access_key }} aws_secret_key: ${{ inputs.aws_secret_key }} @@ -93,61 +113,136 @@ runs: id: invalidate shell: bash run: | - set -e + set -euo pipefail - if [ -z "${{ inputs.caller_reference }}" ]; then - TIMESTAMP=$(date +%s) - SHORT_SHA="${{ github.sha }}" - SHORT_SHA=${SHORT_SHA:0:8} - CALLER_REF="gh-${TIMESTAMP}-${{ github.run_id }}-${SHORT_SHA}" - else + if [[ -n "${{ inputs.caller_reference }}" ]]; then CALLER_REF="${{ inputs.caller_reference }}" + else + TS=$(date +%s) + SHA="${{ github.sha }}"; SHORT_SHA="${SHA:0:8}" + CALLER_REF="gh-${TS}-${{ github.run_id }}-${SHORT_SHA}" fi - echo "🚀 Creating CloudFront invalidation..." - echo "Distribution ID: ${{ inputs.distribution_id }}" - echo "Caller Reference: ${CALLER_REF}" - echo "Paths: ${{ inputs.paths }}" - - IFS=' ' read -ra PATHS_ARRAY <<< "${{ inputs.paths }}" + read -r -a PATHS_ARRAY <<< "${{ inputs.paths }}" PATHS_COUNT=${#PATHS_ARRAY[@]} - PATHS_JSON=$(printf '%s\n' "${PATHS_ARRAY[@]}" | jq -R . | jq -s .) - echo "📝 Invalidating $PATHS_COUNT path(s)..." INVALIDATION_BATCH=$(jq -n \ --argjson paths "$PATHS_JSON" \ - --arg caller_ref "$CALLER_REF" \ - --argjson quantity "$PATHS_COUNT" \ - '{ - "Paths": { - "Quantity": $quantity, - "Items": $paths - }, - "CallerReference": $caller_ref - }') - - INVALIDATION_RESPONSE=$(aws cloudfront create-invalidation \ + --arg caller "$CALLER_REF" \ + --argjson qty "$PATHS_COUNT" \ + '{Paths:{Quantity:$qty,Items:$paths},CallerReference:$caller}') + + echo "🚀 Creating CloudFront invalidation" + echo "• Distribution: ${{ inputs.distribution_id }}" + echo "• CallerReference: $CALLER_REF" + echo "• Paths ($PATHS_COUNT): ${{ inputs.paths }}" + + RESP=$(aws cloudfront create-invalidation \ --distribution-id "${{ inputs.distribution_id }}" \ --invalidation-batch "$INVALIDATION_BATCH" \ --output json \ --no-cli-pager) - INVALIDATION_ID=$(echo "$INVALIDATION_RESPONSE" | jq -r '.Invalidation.Id') - STATUS=$(echo "$INVALIDATION_RESPONSE" | jq -r '.Invalidation.Status') + ID=$(echo "$RESP" | jq -r '.Invalidation.Id') + STATUS=$(echo "$RESP" | jq -r '.Invalidation.Status') - echo "✅ CloudFront invalidation created successfully!" - echo "Invalidation ID: $INVALIDATION_ID" - echo "Status: $STATUS" - echo "invalidation_id=$INVALIDATION_ID" >> $GITHUB_OUTPUT - echo "status=$STATUS" >> $GITHUB_OUTPUT + echo "invalidation_id=$ID" >> "$GITHUB_OUTPUT" + echo "status=$STATUS" >> "$GITHUB_OUTPUT" + echo "caller_reference=$CALLER_REF" >> "$GITHUB_OUTPUT" + + echo "✅ Invalidation created: $ID (status: $STATUS)" + + - name: Wait for completion + if: inputs.wait_for_completion == 'true' + shell: bash + run: | + set -euo pipefail + + DIST="${{ inputs.distribution_id }}" + ID="${{ steps.invalidate.outputs.invalidation_id }}" + + echo "⏳ Waiting for invalidation $ID to become Completed..." + + ATTEMPTS=0 + MAX_ATTEMPTS=90 + while (( ATTEMPTS < MAX_ATTEMPTS )); do + STATUS=$(aws cloudfront get-invalidation \ + --distribution-id "$DIST" \ + --id "$ID" \ + --output json \ + --no-cli-pager | jq -r '.Invalidation.Status') + + echo " Attempt $((ATTEMPTS+1))/$MAX_ATTEMPTS — status: $STATUS" + if [[ "$STATUS" == "Completed" ]]; then + echo "✅ Invalidation completed" + break + fi + + ATTEMPTS=$((ATTEMPTS+1)) + sleep 10 + done + + if (( ATTEMPTS == MAX_ATTEMPTS )); then + echo "⚠️ Timed out waiting for completion — current status: $STATUS" + fi - name: Summary + if: always() && inputs.show_summary == 'true' shell: bash run: | - echo "## 📊 CloudFront Invalidation Summary" >> $GITHUB_STEP_SUMMARY - echo "- **Invalidation ID:** ${{ steps.invalidate.outputs.invalidation_id }}" >> $GITHUB_STEP_SUMMARY - echo "- **Status:** ${{ steps.invalidate.outputs.status }}" >> $GITHUB_STEP_SUMMARY - echo "- **Paths invalidated:** ${{ inputs.paths }}" >> $GITHUB_STEP_SUMMARY - echo "" >> $GITHUB_STEP_SUMMARY - echo "ℹ️ Invalidation started. It may take 10-15 minutes to complete." >> $GITHUB_STEP_SUMMARY + set -euo pipefail + + STATUS_ICON="❌" + [[ -n "${{ steps.invalidate.outputs.invalidation_id }}" ]] && STATUS_ICON="✅" + + DIST="${{ inputs.distribution_id }}" + ID="${{ steps.invalidate.outputs.invalidation_id }}" + CF_LINK="" + if [[ -n "$DIST" && -n "$ID" ]]; then + CF_LINK="https://console.aws.amazon.com/cloudfront/v4/home#/distributions/${DIST}/invalidations/${ID}" + fi + + LIMIT="${{ inputs.summary_limit }}" + [[ "$LIMIT" =~ ^[0-9]+$ ]] || LIMIT="250" + + PATHS_RAW='${{ inputs.paths }}' + + set -f + IFS=' ' read -r -a P_ARR <<< "$PATHS_RAW" + set +f + + TOTAL="${#P_ARR[@]}" + SHOW="$LIMIT"; (( TOTAL < LIMIT )) && SHOW="$TOTAL" + + { + echo "## 📊 CloudFront Invalidation ${STATUS_ICON}" + echo "- **Invalidation ID:** \`${ID:-N/A}\`" + echo "- **Status:** \`${{ steps.invalidate.outputs.status || 'N/A' }}\`" + echo "- **CallerReference:** \`${{ steps.invalidate.outputs.caller_reference || 'auto' }}\`" + echo "- **Distribution:** \`${DIST}\`" + if [[ -n "$CF_LINK" ]]; then + echo "- **Console:** ${CF_LINK}" + fi + + echo "" + if (( TOTAL > 0 )); then + if (( TOTAL <= LIMIT )); then + echo "### Paths" + else + echo "### Paths (first ${LIMIT} of ${TOTAL})" + fi + echo '```' + for ((i=0;i> "$GITHUB_STEP_SUMMARY" diff --git a/actions/aws-cloudfront-invalidation/examples/base.yml b/actions/aws-cloudfront-invalidation/examples/base.yml new file mode 100644 index 0000000..f0aadb9 --- /dev/null +++ b/actions/aws-cloudfront-invalidation/examples/base.yml @@ -0,0 +1,20 @@ +--- +name: Invalidate CloudFront Cache + +on: + workflow_dispatch: + +jobs: + invalidate: + runs-on: ubuntu-latest + permissions: + id-token: write + contents: read + steps: + - name: Invalidate CloudFront Cache + uses: Mad-Pixels/github-workflows/actions/cloudfront-invalidation@v1 + with: + aws_region: us-east-1 + role_to_assume: arn:aws:iam::123456789012:role/GHA-OIDC + distribution_id: E1234567890ABC + paths: "/* /index.html /assets/*" diff --git a/actions/aws-cloudfront-invalidation/readme.md b/actions/aws-cloudfront-invalidation/readme.md new file mode 100644 index 0000000..4cec18c --- /dev/null +++ b/actions/aws-cloudfront-invalidation/readme.md @@ -0,0 +1,64 @@ +# ⚡️ CloudFront Invalidation +Create a CloudFront invalidation + +## ✅ Features +- Create invalidations for one or many paths (supports wildcards) +- Auto‑generated caller reference (or provide your own) + +## 📖 Related Documentation +- CloudFront Invalidation API: https://docs.aws.amazon.com/AmazonCloudFront/latest/DeveloperGuide/Invalidation.html +- AWS CLI cloudfront create‑invalidation: https://docs.aws.amazon.com/cli/latest/reference/cloudfront/create-invalidation.html +- GitHub OIDC for AWS: https://docs.aws.amazon.com/IAM/latest/UserGuide/id_roles_providers_create_oidc.html + +## 🚀 Prerequisites +Your workflow must: +- Run on `ubuntu-latest` +- Have access to AWS credentials or an assumable IAM role +- Have a valid CloudFront distribution ID + +## 🔧 Quick Example +```yaml +name: Invalidate CloudFront Cache + +on: + workflow_dispatch: + +jobs: + invalidate: + runs-on: ubuntu-latest + permissions: + id-token: write + contents: read + steps: + - name: Create invalidation via OIDC + uses: Mad-Pixels/github-workflows/actions/cloudfront-invalidation@v1 + with: + aws_region: us-east-1 + role_to_assume: arn:aws:iam::123456789012:role/GHA-OIDC + distribution_id: E1234567890ABC + paths: "/* /index.html /assets/*" +``` + +## 📥 Inputs +| **Name** | **Required** | **Description** | **Default** | +|--------------------|--------------|---------------------------------------------------------------------------------------------------------|-------------| +| `aws_region` | ✅ Yes | AWS region (used by the CLI) | - | +| `distribution_id` | ✅ Yes | CloudFront distribution ID (format: E + 13 alphanumeric chars, e.g. `E1234567890ABC`) | - | +| `aws_access_key` | ❌ No | AWS access key ID (optional if using OIDC) | - | +| `aws_secret_key` | ❌ No | AWS secret access key (optional if using OIDC) | - | +| `role_to_assume` | ❌ No | AWS IAM role ARN to assume (OIDC) | - | +| `paths` | ❌ No | Space‑separated list of paths to invalidate (must start with `/`; max 1000 entries; wildcards allowed) | `/*` | +| `caller_reference` | ❌ No | Custom caller reference for idempotency (auto‑generated if not provided) | - | +| `show_summary` | ❌ No | Print summary with task output in job summary | `true` | +| `summary_limit` | ❌ No | Max number of output lines to show in summary | `250` | + +## 📤 Outputs +| **Name** | **Description** | +|-------------------|-------------------------------------| +| `invalidation_id` | ID of the created invalidation | +| `status` | Status returned by CloudFront | +| `caller_reference`| Reference used for this invalidation| + +## 📋 Examples +[View example →](./examples/base.yml) + diff --git a/actions/aws-lambda-restart/action.yml b/actions/aws-lambda-restart/action.yml index ad67e39..47b4ec3 100644 --- a/actions/aws-lambda-restart/action.yml +++ b/actions/aws-lambda-restart/action.yml @@ -1,29 +1,30 @@ --- -name: Lambda Restart -description: Update AWS Lambda function with latest container image from ECR +name: 'Lambda Restart' +description: 'Update an AWS Lambda function' inputs: aws_access_key_id: - description: 'AWS access key ID (optional if using OIDC)' + description: 'AWS access key ID' required: false aws_secret_access_key: - description: 'AWS secret access key (optional if using OIDC)' + description: 'AWS secret access key' required: false aws_region: description: 'AWS region' required: true aws_account_id: description: 'AWS account ID' - required: true + required: false role_to_assume: - description: 'AWS IAM role ARN to assume (for OIDC authentication)' + description: 'AWS IAM role ARN to assume' required: false function_name: description: 'Full Lambda function name' required: true + image_uri: - description: 'Full ECR image URI (if provided, takes precedence over repository/tag)' + description: 'Full ECR image URI' required: false repository: description: 'ECR repository name' @@ -32,110 +33,197 @@ inputs: description: 'ECR image tag' required: false default: 'latest' + wait_for_update: - description: 'Wait for function update to complete' + description: 'Wait for function to become Active after update' + required: false + default: 'true' + + show_summary: + description: 'Print summary in the job summary' required: false default: 'true' + summary_limit: + description: 'Max number of lines to show in summary (kept for consistency)' + required: false + default: '250' outputs: function_arn: description: 'Lambda function ARN' - value: ${{ steps.update.outputs.function_arn }} + value: ${{ steps.update_code.outputs.function_arn }} last_modified: description: 'Function last modified timestamp' - value: ${{ steps.update.outputs.last_modified }} + value: ${{ steps.update_code.outputs.last_modified }} code_sha256: description: 'Function code SHA256' - value: ${{ steps.update.outputs.code_sha256 }} + value: ${{ steps.update_code.outputs.code_sha256 }} + image_uri: + description: 'Resolved image URI used for the update' + value: ${{ steps.resolve_image.outputs.image_uri }} runs: - using: "composite" + using: composite steps: - - name: Set environment variables - shell: bash - run: | - echo "FUNCTION_NAME=${{ inputs.function_name }}" >> $GITHUB_ENV - echo "AWS_REGION=${{ inputs.aws_region }}" >> $GITHUB_ENV - - if [ -n "${{ inputs.image_uri }}" ]; then - IMAGE_URI="${{ inputs.image_uri }}" - else - IMAGE_URI="${{ inputs.aws_account_id }}.dkr.ecr.${{ inputs.aws_region }}.amazonaws.com" - IMAGE_URI="$IMAGE_URI/${{ inputs.repository }}:${{ inputs.image_tag }}" - fi - echo "IMAGE_URI=$IMAGE_URI" >> $GITHUB_ENV - - name: Validate inputs shell: bash run: | - set -e + set -euo pipefail - if ! command -v aws &>/dev/null || ! command -v jq &>/dev/null; then - echo "❌ AWS CLI and jq are required" + if [[ ! "${{ inputs.aws_region }}" =~ ^[a-z]{2}-[a-z]+-[0-9]+$ ]]; then + echo "❌ Invalid AWS region: ${{ inputs.aws_region }}" exit 1 fi - if [[ ! "${{ inputs.aws_account_id }}" =~ ^[0-9]{12}$ ]]; then - echo "❌ Invalid AWS account ID: ${{ inputs.aws_account_id }}" + if [[ -z "${{ inputs.function_name }}" ]]; then + echo "❌ function_name is required" exit 1 fi - if [ -z "${{ inputs.image_uri }}" ] && [ -z "${{ inputs.repository }}" ]; then - echo "❌ Provide either image_uri or repository" + if [[ -z "${{ inputs.image_uri }}" && -z "${{ inputs.repository }}" ]]; then + echo "❌ Provide either image_uri or repository(+image_tag)" exit 1 fi + if [[ -z "${{ inputs.image_uri }}" ]]; then + if [[ ! "${{ inputs.aws_account_id }}" =~ ^[0-9]{12}$ ]]; then + echo "❌ Invalid or missing aws_account_id: '${{ inputs.aws_account_id }}'" + exit 1 + fi + fi + echo "✅ Inputs validated" - name: Configure AWS authentication - uses: Mad-Pixels/github-workflows/internal/aws-auth@main + uses: Mad-Pixels/github-workflows/internal/aws-auth@v1 with: aws_access_key: ${{ inputs.aws_access_key_id }} aws_secret_key: ${{ inputs.aws_secret_access_key }} role_to_assume: ${{ inputs.role_to_assume }} aws_region: ${{ inputs.aws_region }} - - name: Validate Lambda function exists + - name: Resolve image URI + id: resolve_image shell: bash run: | - set -e - aws lambda get-function \ - --function-name "$FUNCTION_NAME" \ - --region "$AWS_REGION" >/dev/null || { - echo "❌ Lambda function not found: $FUNCTION_NAME" - exit 1 - } + set -euo pipefail + + if [[ -n "${{ inputs.image_uri }}" ]]; then + IMAGE_URI="${{ inputs.image_uri }}" + echo "🎯 Using provided image URI" + else + REGISTRY_HOST="${{ inputs.aws_account_id }}.dkr.ecr.${{ inputs.aws_region }}.amazonaws.com" + IMAGE_URI="${REGISTRY_HOST}/${{ inputs.repository }}:${{ inputs.image_tag }}" + echo "🎯 Constructed image URI from repository and tag" + fi + + echo "📦 Image URI: $IMAGE_URI" + echo "image_uri=$IMAGE_URI" >> "$GITHUB_OUTPUT" + + - name: Validate Lambda function + shell: bash + run: | + set -euo pipefail + + if ! command -v aws >/dev/null 2>&1 || ! command -v jq >/dev/null 2>&1; then + echo "❌ AWS CLI and jq are required on the runner" + exit 1 + fi + + echo "🔍 Validating Lambda function..." + if ! CFG_JSON=$(aws lambda get-function-configuration \ + --function-name "${{ inputs.function_name }}" \ + --region "${{ inputs.aws_region }}" \ + --output json 2>/dev/null); then + echo "❌ Lambda function not found: ${{ inputs.function_name }}" + echo " Check function name and region are correct" + exit 1 + fi + + PKG=$(echo "$CFG_JSON" | jq -r '.PackageType // "Zip"') + if [[ "$PKG" != "Image" ]]; then + echo "❌ Function package type is '$PKG'" + echo " This action only supports container-based functions (PackageType=Image)" + exit 1 + fi + + STATE=$(echo "$CFG_JSON" | jq -r '.State // "Active"') + if [[ "$STATE" != "Active" ]]; then + echo "⚠️ Function state is '$STATE' — proceeding; waiter will ensure it becomes Active" + else + echo "✅ Lambda function is ready for update" + fi - name: Update Lambda function code - id: update + id: update_code shell: bash run: | - set -e - UPDATE_RESPONSE=$(aws lambda update-function-code \ + set -euo pipefail + + IMAGE_URI="${{ steps.resolve_image.outputs.image_uri }}" + FUNCTION_NAME="${{ inputs.function_name }}" + + echo "🚀 Updating Lambda function: $FUNCTION_NAME" + echo "📦 New image: $IMAGE_URI" + + if ! RESP=$(aws lambda update-function-code \ --function-name "$FUNCTION_NAME" \ --image-uri "$IMAGE_URI" \ - --region "$AWS_REGION" \ - --output json) + --region "${{ inputs.aws_region }}" \ + --output json 2>&1); then + echo "❌ Failed to update Lambda function" + echo "$RESP" + exit 1 + fi + + echo "✅ Update request accepted" + + FUNCTION_ARN=$(echo "$RESP" | jq -r '.FunctionArn') + CODE_SHA256=$(echo "$RESP" | jq -r '.CodeSha256') + LAST_MODIFIED=$(echo "$RESP" | jq -r '.LastModified') - echo "$UPDATE_RESPONSE" - echo "last_modified=$(echo "$UPDATE_RESPONSE" | jq -r '.LastModified')" \ - >> $GITHUB_OUTPUT + echo "function_arn=$FUNCTION_ARN" >> "$GITHUB_OUTPUT" + echo "code_sha256=$CODE_SHA256" >> "$GITHUB_OUTPUT" + echo "last_modified=$LAST_MODIFIED" >> "$GITHUB_OUTPUT" - - name: Wait for update completion + - name: Wait for function to become Active if: inputs.wait_for_update == 'true' shell: bash run: | - echo "⏳ Waiting for Lambda function update to complete..." - aws lambda wait function-updated \ - --function-name "$FUNCTION_NAME" \ - --region "$AWS_REGION" - echo "✅ Update completed" + set -euo pipefail + + echo "⏳ Waiting until function is Active..." + aws lambda wait function-active-v2 \ + --function-name "${{ inputs.function_name }}" \ + --region "${{ inputs.aws_region }}" + + echo "✅ Function is Active" - name: Summary + if: always() && inputs.show_summary == 'true' shell: bash run: | - echo "## 🚀 Lambda Update Summary" >> $GITHUB_STEP_SUMMARY - echo "- Function: $FUNCTION_NAME" >> $GITHUB_STEP_SUMMARY - echo "- Image URI: $IMAGE_URI" >> $GITHUB_STEP_SUMMARY - echo "- Last Modified: ${{ steps.update.outputs.last_modified }}" \ - >> $GITHUB_STEP_SUMMARY + set -euo pipefail + + STATUS_ICON="❌" + if [[ "${{ steps.update_code.outcome }}" == "success" ]]; then + STATUS_ICON="✅" + fi + + LIMIT="${{ inputs.summary_limit }}" + [[ "$LIMIT" =~ ^[0-9]+$ ]] || LIMIT="250" + + { + echo "## 🚀 Lambda Update ${STATUS_ICON}" + echo "- **Function:** \`${{ inputs.function_name }}\`" + echo "- **Region:** \`${{ inputs.aws_region }}\`" + echo "- **Image:** \`${{ steps.resolve_image.outputs.image_uri || 'N/A' }}\`" + if [[ "${{ steps.update_code.outcome }}" == "success" ]]; then + echo "- **Function ARN:** \`${{ steps.update_code.outputs.function_arn || 'N/A' }}\`" + echo "- **Code SHA256:** \`${{ steps.update_code.outputs.code_sha256 || 'N/A' }}\`" + echo "- **Last Modified:** \`${{ steps.update_code.outputs.last_modified || 'N/A' }}\`" + echo "- **Waited for Active:** \`${{ inputs.wait_for_update }}\`" + else + echo "- **Status:** Update failed — check logs above" + fi + } >> "$GITHUB_STEP_SUMMARY" diff --git a/actions/aws-lambda-restart/examples/base.yml b/actions/aws-lambda-restart/examples/base.yml new file mode 100644 index 0000000..86988ee --- /dev/null +++ b/actions/aws-lambda-restart/examples/base.yml @@ -0,0 +1,42 @@ +--- +name: Lambda Restart + +on: + workflow_dispatch: + inputs: + function: + description: 'Lambda function name' + required: true + type: string + image_uri: + description: 'Full ECR image URI (e.g., 123456789012.dkr.ecr.us-east-1.amazonaws.com/svc@sha256:...)' + required: true + type: string + wait: + description: 'Wait until function becomes Active' + required: false + default: true + type: boolean + +jobs: + update: + runs-on: ubuntu-latest + permissions: + id-token: write + contents: read + steps: + - name: Update Lambda to image_uri + id: restart + uses: Mad-Pixels/github-workflows/actions/lambda-restart@v1 + with: + aws_region: us-east-1 + role_to_assume: arn:aws:iam::123456789012:role/GHA-OIDC + function_name: ${{ inputs.function }} + image_uri: ${{ inputs.image_uri }} + wait_for_update: ${{ inputs.wait }} + + - name: Show outputs + run: | + echo "Function ARN: ${{ steps.restart.outputs.function_arn }}" + echo "Code SHA256: ${{ steps.restart.outputs.code_sha256 }}" + echo "Last Modified: ${{ steps.restart.outputs.last_modified }}" diff --git a/actions/aws-lambda-restart/readme.md b/actions/aws-lambda-restart/readme.md new file mode 100644 index 0000000..fd108a5 --- /dev/null +++ b/actions/aws-lambda-restart/readme.md @@ -0,0 +1,72 @@ +# 🚀 Lambda Restart +Update an AWS Lambda function to a new container image (ECR). + +## ✅ Features +- Update Lambda to a specific container image (ECR) +- Accept either full `image_uri` or `repository` + `image_tag` +- Validates Lambda existence before update +- Optional wait until function update completes + +## 📖 Related Documentation +- AWS Lambda container images: https://docs.aws.amazon.com/lambda/latest/dg/images-create.html +- Update function code (CLI): https://docs.aws.amazon.com/cli/latest/reference/lambda/update-function-code.html +- ECR repositories and images: https://docs.aws.amazon.com/AmazonECR/latest/userguide/what-is-ecr.html +- GitHub OIDC for AWS: https://docs.aws.amazon.com/IAM/latest/UserGuide/id_roles_providers_create_oidc.html + +## 🚀 Prerequisites +Your workflow must: +- Run on `ubuntu-latest` +- Have access to AWS credentials or an assumable IAM role +- Ensure the target ECR image exists and the Lambda function is configured for images + +## 🔧 Quick Example +```yaml +name: Restart Lambda (image_uri) + +on: + workflow_dispatch: + +jobs: + update: + runs-on: ubuntu-latest + permissions: + id-token: write + contents: read + steps: + - name: Update Lambda to specific image URI + uses: Mad-Pixels/github-workflows/actions/lambda-restart@v1 + with: + aws_region: us-east-1 + role_to_assume: arn:aws:iam::123456789012:role/GHA-OIDC + function_name: my-service-prod + image_uri: 123456789012.dkr.ecr.us-east-1.amazonaws.com/my-service@sha256:deadbeef + wait_for_update: 'true' +``` + +## 📥 Inputs +| **Name** | **Required** | **Description** | **Default** | +|--------------------------|--------------|--------------------------------------------------------------------------------------|-------------| +| `function_name` | ✅ Yes | Full Lambda function name | - | +| `aws_region` | ✅ Yes | AWS region | - | +| `aws_account_id` | ⚠️ Cond. | AWS account ID (12 digits) — required only when using `repository` + `image_tag` | - | +| `aws_access_key_id` | ❌ No | AWS access key ID (optional if using OIDC) | - | +| `aws_secret_access_key` | ❌ No | AWS secret access key (optional if using OIDC) | - | +| `role_to_assume` | ❌ No | AWS IAM role ARN to assume (for OIDC authentication) | - | +| `image_uri` | ❌ No | Full ECR image URI (overrides `repository`/`image_tag` when provided) | - | +| `repository` | ❌ No | ECR repository name (used if `image_uri` not provided) | - | +| `image_tag` | ❌ No | ECR image tag (used with `repository`) | `latest` | +| `wait_for_update` | ❌ No | Wait for function update to complete (`true`/`false`) | `true` | +| `show_summary` | ❌ No | Print summary with task output in job summary | `true` | +| `summary_limit` | ❌ No | Max number of output lines to show in summary | `250` | + +## 📤 Outputs +| **Name** | **Description** | +|------------------|-----------------------------------------| +| `function_arn` | Lambda function ARN | +| `last_modified` | Function last modified timestamp | +| `code_sha256` | Lambda code SHA256 | +| `imgae_url` | Resolved image URI | + +## 📋 Examples +[View example →](./examples/base.yml) + diff --git a/actions/aws-s3-sync/action.yml b/actions/aws-s3-sync/action.yml index bc72884..bb50cca 100644 --- a/actions/aws-s3-sync/action.yml +++ b/actions/aws-s3-sync/action.yml @@ -1,32 +1,34 @@ --- -name: Sync Directory to S3 -description: Uploads a local directory to S3 with optional prefix, deletion, and headers +name: 'Sync Directory to S3' +description: 'Upload a local directory to S3' inputs: aws_access_key: - description: 'AWS access key ID (optional if using OIDC)' + description: 'AWS access key ID' required: false aws_secret_key: - description: 'AWS secret access key (optional if using OIDC)' + description: 'AWS secret access key' required: false role_to_assume: - description: 'AWS IAM role ARN to assume (for OIDC authentication)' + description: 'AWS IAM role ARN to assume' required: false aws_region: description: 'AWS region' required: true - bucket_name: - description: 'Target S3 bucket name' - required: true + source_dir: description: 'Local path to sync' required: true + bucket_name: + description: 'Target S3 bucket name' + required: true bucket_prefix: description: 'Optional subpath prefix inside the bucket' required: false - default: "" + default: '' + delete_removed: - description: 'Remove S3 files not in source_dir' + description: 'Remove S3 files not in source_dir (true/false)' required: false default: 'true' exclude_patterns: @@ -34,8 +36,21 @@ inputs: required: false default: ".git/* .github/* .gitignore .gitattributes" cache_control: - description: 'Value for Cache-Control header' + description: 'Value for Cache-Control header (applied to uploaded/updated files)' + required: false + content_type_detection: + description: 'Enable automatic content-type detection based on file extensions' required: false + default: 'true' + + show_summary: + description: 'Print summary in the job summary' + required: false + default: 'true' + summary_limit: + description: 'Max number of output lines to show in summary (kept for consistency)' + required: false + default: '250' outputs: files_uploaded: @@ -45,46 +60,45 @@ outputs: description: 'Number of deleted files' value: ${{ steps.sync.outputs.files_deleted }} total_size: - description: 'Total size in bytes' - value: ${{ steps.sync.outputs.total_size }} + description: 'Total size of local source (bytes)' + value: ${{ steps.analyze.outputs.total_size }} + file_count: + description: 'Total number of files in source directory' + value: ${{ steps.analyze.outputs.file_count }} + sync_duration: + description: 'Sync duration in seconds' + value: ${{ steps.sync.outputs.sync_duration }} s3_url: description: 'Final S3 sync URL' - value: ${{ steps.sync.outputs.s3_url }} + value: ${{ steps.url.outputs.s3_url }} runs: using: composite steps: - - name: Set env and S3 URL + - name: Validate inputs shell: bash run: | - set -eo pipefail + set -euo pipefail BUCKET="${{ inputs.bucket_name }}" - PREFIX="${{ inputs.bucket_prefix }}" - REGION="${{ inputs.aws_region }}" - SOURCE="${{ inputs.source_dir }}" - - PREFIX="${PREFIX#/}" - PREFIX="${PREFIX%/}" - - if [[ -n "$PREFIX" ]]; then - S3_URL="s3://$BUCKET/$PREFIX" - else - S3_URL="s3://$BUCKET" + if [[ ${#BUCKET} -lt 3 || ${#BUCKET} -gt 63 ]]; then + echo "❌ S3 bucket name must be 3-63 characters: $BUCKET" + exit 1 fi - echo "BUCKET_NAME=$BUCKET" >> $GITHUB_ENV - echo "SOURCE_DIR=$SOURCE" >> $GITHUB_ENV - echo "AWS_REGION=$REGION" >> $GITHUB_ENV - echo "S3_URL=$S3_URL" >> $GITHUB_ENV + if [[ ! "$BUCKET" =~ ^[a-z0-9][a-z0-9.-]{1,61}[a-z0-9]$ ]]; then + echo "❌ Invalid S3 bucket name: $BUCKET" + echo "Must be lowercase letters/digits/dots/hyphens, not start/end with dot/hyphen" + exit 1 + fi - - name: Validate inputs - shell: bash - run: | - set -eo pipefail + if [[ "$BUCKET" == *".."* ]]; then + echo "❌ S3 bucket name cannot contain consecutive dots: $BUCKET" + exit 1 + fi - if [[ ! "${{ inputs.bucket_name }}" =~ ^[a-z0-9][a-z0-9.-]{1,61}[a-z0-9]$ ]]; then - echo "❌ Invalid S3 bucket name: ${{ inputs.bucket_name }}" + if [[ ! "${{ inputs.aws_region }}" =~ ^[a-z]{2}-[a-z]+-[0-9]+$ ]]; then + echo "❌ Invalid AWS region: ${{ inputs.aws_region }}" exit 1 fi @@ -100,76 +114,168 @@ runs: echo "✅ Inputs validated" + - name: Validate exclude patterns + shell: bash + run: | + set -euo pipefail + + EXCLUDES="${{ inputs.exclude_patterns }}" + if [[ -n "$EXCLUDES" ]]; then + echo "🔍 Validating exclude patterns..." + read -r -a patterns <<< "$EXCLUDES" + + for pattern in "${patterns[@]}"; do + [[ -z "$pattern" ]] && continue + if [[ "$pattern" == "/*" || "$pattern" == "*" ]]; then + echo "⚠️ Exclude pattern '$pattern' will exclude everything" + fi + echo " - Exclude: '$pattern'" + done + fi + + echo "✅ Exclude patterns validated" + - name: Configure AWS authentication - uses: Mad-Pixels/github-workflows/internal/aws-auth@main + uses: Mad-Pixels/github-workflows/internal/aws-auth@v1 with: aws_access_key: ${{ inputs.aws_access_key }} aws_secret_key: ${{ inputs.aws_secret_key }} role_to_assume: ${{ inputs.role_to_assume }} aws_region: ${{ inputs.aws_region }} + - name: Build S3 URL + id: url + shell: bash + run: | + set -euo pipefail + + BUCKET="${{ inputs.bucket_name }}" + PREFIX="${{ inputs.bucket_prefix }}" + PREFIX="${PREFIX#/}"; PREFIX="${PREFIX%/}" + + [[ -n "$PREFIX" ]] && S3_URL="s3://$BUCKET/$PREFIX" || S3_URL="s3://$BUCKET" + echo "s3_url=$S3_URL" >> "$GITHUB_OUTPUT" + echo "S3_URL=$S3_URL" >> "$GITHUB_ENV" + - name: Check S3 bucket access shell: bash run: | - set -eo pipefail - aws s3 ls "s3://${{ inputs.bucket_name }}" --region "${{ inputs.aws_region }}" >/dev/null || { + set -euo pipefail + + if ! command -v aws >/dev/null 2>&1; then + echo "ℹ️ AWS CLI not found; skipping pre-check (sync will fail if perms are wrong)." + exit 0 + fi + + echo "🔍 Checking S3 bucket access..." + if ! aws s3api head-bucket \ + --bucket "${{ inputs.bucket_name }}" \ + --region "${{ inputs.aws_region }}" >/dev/null 2>&1; then + echo "❌ Cannot access S3 bucket: ${{ inputs.bucket_name }}" + echo " Ensure the bucket exists, region matches, and IAM permissions are sufficient." exit 1 - } - echo "✅ Access to S3 confirmed" + fi + + echo "✅ S3 bucket accessible" + + - name: Analyze source directory + id: analyze + shell: bash + run: | + set -euo pipefail + + SRC="${{ inputs.source_dir }}" + echo "📊 Analyzing source directory: $SRC" + + FILE_COUNT=$(find "$SRC" -type f | wc -l | awk '{print $1}') + if find "$SRC" -type f -printf '%s\n' >/dev/null 2>&1; then + BYTES=$(find "$SRC" -type f -printf '%s\n' | awk '{sum+=$1} END{print sum+0}') + else + BYTES=$(find "$SRC" -type f -exec stat -f%z {} \; 2>/dev/null | awk '{sum+=$1} END{print sum+0}' || echo "0") + fi + + echo "file_count=$FILE_COUNT" >> "$GITHUB_OUTPUT" + echo "total_size=$BYTES" >> "$GITHUB_OUTPUT" - name: Sync files to S3 id: sync shell: bash run: | - set -Eeuo pipefail + set -euo pipefail - BUCKET="${{ inputs.bucket_name }}" - PREFIX="${{ inputs.bucket_prefix }}" + SYNC_START=$(date +%s) + SRC="${{ inputs.source_dir }}" REGION="${{ inputs.aws_region }}" - SOURCE="${{ inputs.source_dir }}" - CACHE_CONTROL="${{ inputs.cache_control }}" DELETE="${{ inputs.delete_removed }}" EXCLUDES="${{ inputs.exclude_patterns }}" + CACHE_CONTROL="${{ inputs.cache_control }}" + DETECT_CT="${{ inputs.content_type_detection }}" + S3_URL="${S3_URL:?missing}" - PREFIX="${PREFIX#/}" - PREFIX="${PREFIX%/}" - [[ -n "$PREFIX" ]] && S3_URL="s3://$BUCKET/$PREFIX" || S3_URL="s3://$BUCKET" - - SYNC_CMD=(aws s3 sync "$SOURCE" "$S3_URL" --region "$REGION") + SYNC_CMD=(aws s3 sync "$SRC" "$S3_URL" --region "$REGION" --no-progress) + [[ "$DELETE" == "true" ]] && SYNC_CMD+=("--delete") + [[ "$DETECT_CT" == "false" ]] && SYNC_CMD+=("--no-guess-mime-type") - if [[ "$DELETE" == "true" ]]; then - SYNC_CMD+=("--delete") + if [[ -n "$EXCLUDES" ]]; then + read -r -a patterns <<< "$EXCLUDES" + for pattern in "${patterns[@]}"; do + [[ -n "$pattern" ]] && SYNC_CMD+=("--exclude" "$pattern") + done fi - IFS=' ' read -r -a patterns <<< "$EXCLUDES" - for pattern in "${patterns[@]}"; do - SYNC_CMD+=("--exclude" "$pattern") - done + [[ -n "$CACHE_CONTROL" ]] && SYNC_CMD+=("--cache-control" "$CACHE_CONTROL") + echo "🔄 Executing: ${SYNC_CMD[*]}" - if [[ -n "$CACHE_CONTROL" ]]; then - SYNC_CMD+=("--cache-control" "$CACHE_CONTROL") - fi + set +e + OUTPUT="$("${SYNC_CMD[@]}" 2>&1)" + EXIT_CODE=$? + set -e - echo "🔄 Executing: ${SYNC_CMD[*]}" - OUTPUT=$("${SYNC_CMD[@]}" 2>&1) echo "$OUTPUT" + [[ $EXIT_CODE -ne 0 ]] && { echo "❌ Sync failed ($EXIT_CODE)"; exit 1; } + + SYNC_END=$(date +%s) + SYNC_DURATION=$((SYNC_END - SYNC_START)) - FILES_UPLOADED=$(echo "$OUTPUT" | grep -c "upload:" || true) - FILES_DELETED=$(echo "$OUTPUT" | grep -c "delete:" || true) - BYTES=$(find "$SOURCE" -type f -exec stat -c %s {} \; | awk '{sum+=$1} END {print sum}') - MB=$(awk "BEGIN {printf \"%.2f\", $BYTES/1024/1024}") - echo "files_uploaded=$FILES_UPLOADED" >> $GITHUB_OUTPUT - echo "files_deleted=$FILES_DELETED" >> $GITHUB_OUTPUT - echo "total_size_mb=$MB" >> $GITHUB_OUTPUT - echo "s3_url=$S3_URL" >> $GITHUB_OUTPUT + FILES_UPLOADED=$(echo "$OUTPUT" | grep -E -c '^upload:' || true) + FILES_DELETED=$(echo "$OUTPUT" | grep -E -c '^delete:' || true) + + echo "files_uploaded=$FILES_UPLOADED" >> "$GITHUB_OUTPUT" + echo "files_deleted=$FILES_DELETED" >> "$GITHUB_OUTPUT" + echo "sync_duration=$SYNC_DURATION" >> "$GITHUB_OUTPUT" - name: Summary + if: always() && inputs.show_summary == 'true' shell: bash run: | - echo "## ☁️ Sync Summary" >> "$GITHUB_STEP_SUMMARY" - echo "- Bucket: ${{ inputs.bucket_name }}" >> "$GITHUB_STEP_SUMMARY" - echo "- Path: ${{ inputs.source_dir }} → ${{ steps.sync.outputs.s3_url }}" >> "$GITHUB_STEP_SUMMARY" - echo "- Files uploaded: ${{ steps.sync.outputs.files_uploaded }}" >> "$GITHUB_STEP_SUMMARY" - echo "- Files deleted: ${{ steps.sync.outputs.files_deleted }}" >> "$GITHUB_STEP_SUMMARY" - echo "- Total size: ${{ steps.sync.outputs.total_size }} bytes" >> "$GITHUB_STEP_SUMMARY" + set -euo pipefail + + OUTCOME="${{ steps.sync.outcome }}" + STATUS_ICON="❌"; [[ "$OUTCOME" == "success" ]] && STATUS_ICON="✅" + BYTES="${{ steps.analyze.outputs.total_size || 0 }}" + MB=$(awk "BEGIN {printf \"%.2f\", (${BYTES})/1024/1024}") + + LIMIT="${{ inputs.summary_limit }}" + [[ "$LIMIT" =~ ^[0-9]+$ ]] || LIMIT="250" + + { + echo "## ☁️ S3 Sync ${STATUS_ICON}" + echo "- **Bucket:** \`${{ inputs.bucket_name }}\`" + echo "- **Source:** \`${{ inputs.source_dir }}\`" + echo "- **Target:** \`${{ steps.url.outputs.s3_url }}\`" + echo "- **Region:** \`${{ inputs.aws_region }}\`" + echo "- **Delete removed:** \`${{ inputs.delete_removed }}\`" + echo "- **Cache-Control:** \`${{ inputs.cache_control || 'N/A' }}\`" + echo "- **Content-Type detection:** \`${{ inputs.content_type_detection }}\`" + echo "- **Excludes:** \`${{ inputs.exclude_patterns }}\`" + if [[ "$OUTCOME" == "success" ]]; then + echo "- **Files uploaded:** \`${{ steps.sync.outputs.files_uploaded || '0' }}\`" + echo "- **Files deleted:** \`${{ steps.sync.outputs.files_deleted || '0' }}\`" + echo "- **Total files:** \`${{ steps.analyze.outputs.file_count || '0' }}\`" + echo "- **Total size:** \`${BYTES}\` bytes (~${MB} MiB)" + echo "- **Sync duration:** \`${{ steps.sync.outputs.sync_duration || 'N/A' }}\` seconds" + else + echo "- **Status:** Sync failed — check logs above" + fi + } >> "$GITHUB_STEP_SUMMARY" diff --git a/actions/aws-s3-sync/examples/base.yml b/actions/aws-s3-sync/examples/base.yml new file mode 100644 index 0000000..0b6c0b2 --- /dev/null +++ b/actions/aws-s3-sync/examples/base.yml @@ -0,0 +1,44 @@ +--- +name: Synk + +on: + workflow_dispatch: + +jobs: + build: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v4 + - name: Build site + run: | + mkdir -p dist && echo "hello" > dist/index.html + - name: Upload artifact + uses: actions/upload-artifact@v4 + with: + name: web-dist + path: dist/ + + deploy: + runs-on: ubuntu-latest + needs: build + permissions: + id-token: write + contents: read + steps: + - name: Download artifact + uses: actions/download-artifact@v4 + with: + name: web-dist + path: ./artifact + + - name: Sync artifact/ to S3 + uses: Mad-Pixels/github-workflows/actions/s3-sync@v1 + with: + aws_region: us-east-1 + role_to_assume: arn:aws:iam::123456789012:role/GHA-OIDC-Deploy + bucket_name: my-static-site-bucket + source_dir: artifact + bucket_prefix: web + delete_removed: 'true' + exclude_patterns: ".git/* .github/*" + cache_control: "public, max-age=31536000, immutable" diff --git a/actions/aws-s3-sync/readme.md b/actions/aws-s3-sync/readme.md new file mode 100644 index 0000000..32f9a4e --- /dev/null +++ b/actions/aws-s3-sync/readme.md @@ -0,0 +1,80 @@ +# ☁️ Sync Directory to S3 +Upload a local directory to an Amazon S3 bucket. + +## ✅ Features +- Optional deletion of objects missing in source (keep destination clean) +- Exclude files via space‑separated patterns (e.g., ".git/* *.tmp") +- Optional Cache-Control header applied to uploaded objects +- Automatic content-type detection (can be disabled) +- Summary with counts (uploaded/deleted) and total size + +## 📖 Related Documentation +- AWS CLI S3 sync: https://docs.aws.amazon.com/cli/latest/reference/s3/sync.html +- S3 bucket naming rules: https://docs.aws.amazon.com/AmazonS3/latest/userguide/bucketnamingrules.html +- GitHub OIDC for AWS: https://docs.aws.amazon.com/IAM/latest/UserGuide/id_roles_providers_create_oidc.html + +## 🚀 Prerequisites +Your workflow must: +- Run on `ubuntu-latest` +- Have access to AWS credentials or an assumable IAM role +- Ensure the target S3 bucket already exists and is accessible + +## 🔧 Quick Example +```yaml +name: Sync Web Assets to S3 + +on: + push: + branches: [main] + +jobs: + s3-sync: + runs-on: ubuntu-latest + permissions: + id-token: write + contents: read + steps: + - name: Sync dist/ to S3 + uses: Mad-Pixels/github-workflows/actions/s3-sync@v1 + with: + aws_region: us-east-1 + role_to_assume: arn:aws:iam::123456789012:role/GHA-OIDC-Deploy + bucket_name: my-static-site-bucket + source_dir: dist + bucket_prefix: web + delete_removed: 'true' + exclude_patterns: ".git/* .github/* .DS_Store" + cache_control: "public, max-age=31536000, immutable" + +``` + +## 📥 Inputs +| **Name** | **Required** | **Description** | **Default** | +|--------------------------|--------------|---------------------------------------------------------------------------------|----------------------------------------------| +| `aws_region` | ✅ Yes | AWS region | - | +| `bucket_name` | ✅ Yes | Target S3 bucket name | - | +| `source_dir` | ✅ Yes | Local path to sync | - | +| `aws_access_key` | ❌ No | AWS access key ID (optional if using OIDC) | - | +| `aws_secret_key` | ❌ No | AWS secret access key (optional if using OIDC) | - | +| `role_to_assume` | ❌ No | AWS IAM role ARN to assume (OIDC) | - | +| `bucket_prefix` | ❌ No | Optional subpath prefix inside the bucket (trimmed of leading/trailing slashes) | `""` | +| `delete_removed` | ❌ No | Remove objects in S3 that are not present in `source_dir` (`true`/`false`) | `true` | +| `exclude_patterns` | ❌ No | Space‑separated exclude patterns passed to `aws s3 sync --exclude` | `.git/* .github/* .gitignore .gitattributes` | +| `cache_control` | ❌ No | Value for `Cache-Control` header applied to uploads | - | +| `content_type_detection` | ❌ No | Enable automatic content-type guessing based on file extension (true/false) | true | +| `show_summary` | ❌ No | Print summary with task output in job summary | `true` | +| `summary_limit` | ❌ No | Max number of output lines to show in summary | `250` | + +## 📤 Outputs +| **Name** | **Description** | +|-------------------|-------------------------------------------------| +| `files_uploaded` | Number of uploaded files | +| `files_deleted` | Number of deleted files | +| `total_size` | Total size in bytes of local files synced | +| `file_count` | Total number of local files considered for sync | +| `sync_duration` | Sync duration in seconds | +| `s3_url` | Final S3 sync url | + +## 📋 Examples +[View example →](./examples/base.yml) + diff --git a/actions/aws-terraform-runner/action.yml b/actions/aws-terraform-runner/action.yml index db66242..b51f968 100644 --- a/actions/aws-terraform-runner/action.yml +++ b/actions/aws-terraform-runner/action.yml @@ -1,33 +1,33 @@ --- name: 'Terraform Runner' -description: 'Invoke terraform actions' +description: 'Invoke AWS Terraform with S3 backend' inputs: aws_access_key_id: - description: 'AWS access key ID (optional if using OIDC)' + description: 'AWS access key ID' required: false aws_secret_access_key: - description: 'AWS secret access key (optional if using OIDC)' + description: 'AWS secret access key' required: false aws_region: description: 'AWS region' required: true role_to_assume: - description: 'AWS IAM role ARN to assume (for OIDC authentication)' + description: 'AWS IAM role ARN to assume' required: false tf_dir: description: 'Path to Terraform config' required: true tf_workspace: - description: 'Terraform workspace (leave empty to use default)' + description: 'Terraform workspace' required: false default: "" tf_command: - description: 'Terraform command: [plan, apply, destroy]' + description: 'Terraform command: plan | apply | destroy' required: true tf_vars: - description: 'Extra `-var` flags (CLI)' + description: 'Extra CLI flags for terraform (e.g. -var="a=b" -var-file=prod.tfvars)' required: false default: '' tf_version: @@ -42,9 +42,29 @@ inputs: description: 'S3 key for state' required: true backend_region: - description: 'Backend region' + description: 'Backend AWS region' required: true + show_summary: + description: 'Print summary with output in job summary' + required: false + default: 'true' + summary_limit: + description: 'Max number of plan output lines to show in summary' + required: false + default: '500' + +outputs: + terraform_command: + description: 'Executed Terraform command' + value: ${{ steps.collect.outputs.terraform_command }} + workspace: + description: 'Terraform workspace used' + value: ${{ steps.collect.outputs.workspace }} + terraform_version: + description: 'Detected Terraform version' + value: ${{ steps.collect.outputs.terraform_version }} + runs: using: composite steps: @@ -53,17 +73,55 @@ runs: with: fetch-depth: 1 + - name: Validate inputs + shell: bash + run: | + set -euo pipefail + + if [[ ! -d "${{ inputs.tf_dir }}" ]]; then + echo "❌ tf_dir not found: ${{ inputs.tf_dir }}" + exit 1 + fi + + case "${{ inputs.tf_command }}" in + plan|apply|destroy) ;; + *) + echo "❌ Unknown tf_command: ${{ inputs.tf_command }} (expected: plan|apply|destroy)" + exit 1 + ;; + esac + + for r in "${{ inputs.aws_region }}" "${{ inputs.backend_region }}"; do + if [[ ! "$r" =~ ^[a-z]{2}-[a-z]+-[0-9]+$ ]]; then + echo "❌ Invalid AWS region: $r" + exit 1 + fi + done + + if [[ -z "${{ inputs.backend_bucket }}" || -z "${{ inputs.backend_key }}" ]]; then + echo "❌ backend_bucket/backend_key must be provided" + exit 1 + fi + + echo "✅ Inputs validated" + - name: Configure AWS authentication - uses: Mad-Pixels/github-workflows/internal/aws-auth@main + uses: Mad-Pixels/github-workflows/internal/aws-auth@v1 with: aws_secret_key: ${{ inputs.aws_secret_access_key }} aws_access_key: ${{ inputs.aws_access_key_id }} role_to_assume: ${{ inputs.role_to_assume }} aws_region: ${{ inputs.aws_region }} - - name: Export AWS region for Terraform + - name: Export Terraform env shell: bash - run: echo "TF_VAR_aws_region=${{ inputs.aws_region }}" >> $GITHUB_ENV + run: | + set -euo pipefail + { + echo "TF_IN_AUTOMATION=1" + echo "TF_INPUT=0" + echo "TF_VAR_aws_region=${{ inputs.aws_region }}" + } >> "$GITHUB_ENV" - name: Setup Terraform uses: hashicorp/setup-terraform@v2 @@ -74,64 +132,135 @@ runs: working-directory: ${{ inputs.tf_dir }} shell: bash run: | - echo "bucket = \"${{ inputs.backend_bucket }}\"" > backend_aws.hcl - echo "key = \"${{ inputs.backend_key }}\"" >> backend_aws.hcl - echo "region = \"${{ inputs.backend_region }}\"" >> backend_aws.hcl - echo "encrypt = true" >> backend_aws.hcl + set -euo pipefail + cat > backend_aws.hcl <<'HCL' + bucket = "${{ inputs.backend_bucket }}" + key = "${{ inputs.backend_key }}" + region = "${{ inputs.backend_region }}" + encrypt = true + HCL - name: Write backend.tf working-directory: ${{ inputs.tf_dir }} shell: bash run: | - echo 'terraform {' > backend.tf - echo ' backend "s3" {}' >> backend.tf - echo '}' >> backend.tf + set -euo pipefail + cat > backend.tf <<'TF' + terraform { + backend "s3" {} + } + TF - name: Terraform Init shell: bash working-directory: ${{ inputs.tf_dir }} - run: terraform init -input=false -reconfigure -backend-config=backend_aws.hcl + run: | + set -euo pipefail + terraform init -input=false -reconfigure -backend-config=backend_aws.hcl - name: Select or create workspace if: inputs.tf_workspace != '' shell: bash working-directory: ${{ inputs.tf_dir }} run: | + set -euo pipefail terraform workspace select "${{ inputs.tf_workspace }}" \ || terraform workspace new "${{ inputs.tf_workspace }}" echo "Current workspace: $(terraform workspace show)" - - name: Run Terraform Validate + - name: Terraform Validate shell: bash working-directory: ${{ inputs.tf_dir }} - run: terraform validate + run: | + set -euo pipefail + terraform validate - name: Run Terraform Command + id: tf-run shell: bash working-directory: ${{ inputs.tf_dir }} run: | - case "${{ inputs.tf_command }}" in + set -euo pipefail + CMD="${{ inputs.tf_command }}" + EXTRA='${{ inputs.tf_vars }}' + case "$CMD" in plan) - terraform plan -input=false ${{ inputs.tf_vars }} + echo "🚀 terraform plan" + terraform plan -input=false -lock-timeout=300s -out=tfplan $EXTRA ;; apply) - terraform apply -input=false -auto-approve ${{ inputs.tf_vars }} + echo "🚀 terraform apply" + terraform apply -input=false -auto-approve -lock-timeout=300s $EXTRA ;; destroy) - terraform destroy -input=false -auto-approve ${{ inputs.tf_vars }} - ;; - *) - echo "❌ Unknown tf_command: ${{ inputs.tf_command }}" >&2 - exit 1 + echo "🚀 terraform destroy" + terraform destroy -input=false -auto-approve -lock-timeout=300s $EXTRA ;; esac + + - name: Upload plan artifact + if: inputs.tf_command == 'plan' + uses: actions/upload-artifact@v4 + with: + name: terraform-plan + path: ${{ inputs.tf_dir }}/tfplan + if-no-files-found: error + + - name: Collect outputs + id: collect + shell: bash + working-directory: ${{ inputs.tf_dir }} + run: | + set -euo pipefail + CMD="${{ inputs.tf_command }}" + + WS_IN="${{ inputs.tf_workspace }}" + if [[ -n "$WS_IN" ]]; then + WS="$WS_IN" + else + WS="$(terraform workspace show 2>/dev/null || echo default)" + fi + + TFV="$(terraform version -json 2>/dev/null | sed -n 's/.*"terraform_version":"\([^"]*\)".*/\1/p')" + [[ -z "$TFV" ]] && TFV="${{ inputs.tf_version }}" + + { + echo "terraform_command=$CMD" + echo "workspace=$WS" + echo "terraform_version=$TFV" + } >> "$GITHUB_OUTPUT" + - name: Terraform Summary + if: inputs.show_summary != 'false' shell: bash + working-directory: ${{ inputs.tf_dir }} run: | - WORKSPACE="${{ inputs.tf_workspace != '' && inputs.tf_workspace || 'default' }}" + set -euo pipefail + + WORKSPACE="${{ steps.collect.outputs.workspace }}" + TFV="${{ steps.collect.outputs.terraform_version }}" + + LIMIT_RAW="${{ inputs.summary_limit }}" + if ! [[ "$LIMIT_RAW" =~ ^[0-9]+$ ]]; then LIMIT=250; else LIMIT="$LIMIT_RAW"; fi + + { + echo "## 🛠️ Terraform Execution Summary" + echo "- **Command:** \`${{ inputs.tf_command }}\`" + echo "- **Directory:** \`${{ inputs.tf_dir }}\`" + echo "- **Workspace:** \`$WORKSPACE\`" + echo "- **Terraform version:** \`$TFV\`" + echo "- **Backend:** S3 \`${{ inputs.backend_bucket }}:${{ inputs.backend_key }}\`" + echo " in \`${{ inputs.backend_region }}\`" + echo "- **AWS region (vars):** \`${{ inputs.aws_region }}\`" - echo "## 🛠️ Terraform Execution Summary" >> "$GITHUB_STEP_SUMMARY" - echo "- Command: \`${{ inputs.tf_command }}\`" >> "$GITHUB_STEP_SUMMARY" - echo "- Directory: \`${{ inputs.tf_dir }}\`" >> "$GITHUB_STEP_SUMMARY" - echo "- Workspace: \`$WORKSPACE\`" >> "$GITHUB_STEP_SUMMARY" - echo "- Terraform version: \`${{ inputs.tf_version }}\`" >> "$GITHUB_STEP_SUMMARY" + if [[ "${{ inputs.tf_command }}" == "plan" && -f "tfplan" ]]; then + echo "" + TOTAL_LINES=$(terraform show -no-color tfplan | wc -l | awk '{print $1}') + echo "### 📄 Plan (first ${LIMIT} lines${TOTAL_LINES:+ of ${TOTAL_LINES}})" + terraform show -no-color tfplan | sed -n "1,${LIMIT}p" + if [[ -n "$TOTAL_LINES" && "$TOTAL_LINES" -gt "$LIMIT" ]]; then + echo "" + echo "_…truncated, total lines: $TOTAL_LINES_" + fi + fi + } >> "$GITHUB_STEP_SUMMARY" diff --git a/actions/aws-terraform-runner/examples/base.yml b/actions/aws-terraform-runner/examples/base.yml new file mode 100644 index 0000000..98d7be4 --- /dev/null +++ b/actions/aws-terraform-runner/examples/base.yml @@ -0,0 +1,63 @@ +--- +name: Terraform Plan & Apply + +on: + push: + branches: [main] + workflow_dispatch: + +concurrency: + group: tf-${{ github.ref_name }} + cancel-in-progress: true + +jobs: + plan: + name: Terraform Plan + runs-on: ubuntu-latest + permissions: + id-token: write # OIDC + contents: read + steps: + - name: Run Terraform Plan + uses: Mad-Pixels/github-workflows/actions/terraform-runner@v1 + with: + aws_region: us-east-1 + role_to_assume: arn:aws:iam::123456789012:role/TerraformRole + tf_dir: infra/ + tf_workspace: production + tf_command: plan + tf_vars: -var="image_tag=${{ github.sha }}" + tf_version: 1.8.5 + backend_bucket: my-terraform-state + backend_key: production/terraform.tfstate + backend_region: us-east-1 + + apply: + name: Terraform Apply (manual) + needs: plan + if: github.event_name == 'workflow_dispatch' + runs-on: ubuntu-latest + environment: production + permissions: + id-token: write + contents: read + steps: + - name: Download plan artifact (optional) + uses: actions/download-artifact@v4 + with: + name: terraform-plan + path: infra/ + + - name: Run Terraform Apply + id: tf-apply + uses: Mad-Pixels/github-workflows/actions/terraform-runner@v1 + with: + aws_region: us-east-1 + role_to_assume: arn:aws:iam::123456789012:role/TerraformRole + tf_dir: infra/ + tf_workspace: production + tf_command: apply + tf_version: 1.8.5 + backend_bucket: my-terraform-state + backend_key: production/terraform.tfstate + backend_region: us-east-1 diff --git a/actions/aws-terraform-runner/examples/destroy.yml b/actions/aws-terraform-runner/examples/destroy.yml new file mode 100644 index 0000000..b747380 --- /dev/null +++ b/actions/aws-terraform-runner/examples/destroy.yml @@ -0,0 +1,54 @@ +--- +name: Terraform Destroy (manual) + +on: + workflow_dispatch: + inputs: + workspace: + description: 'Workspace to destroy' + required: true + type: string + default: staging + confirm: + description: 'Type TRUE to confirm destroy' + required: true + type: choice + options: [false, true] + default: false + +concurrency: + group: tf-destroy-${{ github.event.inputs.workspace }} + cancel-in-progress: false + +jobs: + destroy: + name: Destroy ${{ github.event.inputs.workspace }} + if: github.event.inputs.confirm == 'TRUE' + runs-on: ubuntu-latest + environment: ${{ github.event.inputs.workspace }} + permissions: + contents: read + steps: + - name: Terraform Destroy + uses: Mad-Pixels/github-workflows/actions/terraform-runner@v1 + with: + aws_access_key_id: ${{ secrets.AWS_ACCESS_KEY_ID }} + aws_secret_access_key: ${{ secrets.AWS_SECRET_ACCESS_KEY }} + aws_region: eu-west-1 + + tf_dir: infra/ + tf_workspace: ${{ github.event.inputs.workspace }} + tf_command: destroy + tf_vars: -var-file=${{ github.event.inputs.workspace }}.tfvars + tf_version: 1.8.5 + + backend_bucket: company-tf-state + backend_key: ${{ github.event.inputs.workspace }}/terraform.tfstate + backend_region: eu-west-1 + + guard: + name: Guard (no confirm) + if: github.event.inputs.confirm != 'TRUE' + runs-on: ubuntu-latest + steps: + - run: echo "❌ Destroy blocked - set input 'confirm' to TRUE to proceed." diff --git a/actions/aws-terraform-runner/readme.md b/actions/aws-terraform-runner/readme.md new file mode 100644 index 0000000..8771cb8 --- /dev/null +++ b/actions/aws-terraform-runner/readme.md @@ -0,0 +1,76 @@ +# 🌍 Terraform Runner +Run AWS Terraform commands with S3 Backend. + +## ✅ Features +- Supports `plan`, `apply`, and `destroy` commands +- AWS authentication via static credentials or OIDC role assumption +- Optional workspace selection and creation +- Execution summary in GitHub Actions summary +- Optionally stores Terraform plan output as an artifact + +## 📖 Related Documentation +- Terraform CLI: https://developer.hashicorp.com/terraform/cli +- Terraform Backends (S3): https://developer.hashicorp.com/terraform/language/settings/backends/s3 +- GitHub Actions for Terraform: https://github.com/hashicorp/setup-terraform +- AWS Authentication for GitHub Actions: https://docs.aws.amazon.com/IAM/latest/UserGuide/id_roles_providers_create_oidc.html + +## 🚀 Prerequisites +Your workflow must: +- Run on `ubuntu-latest` +- Have access to AWS credentials or an assumable IAM role +- Ensure `tf_dir` contains a valid Terraform configuration + +## 🔧 Quick Example +```yaml +name: Terraform Plan + +on: + workflow_dispatch: + +jobs: + terraform: + runs-on: ubuntu-latest + permissions: + id-token: write + contents: read + steps: + - name: Terraform Plan + uses: Mad-Pixels/github-workflows/actions/terraform-runner@v1 + with: + aws_region: us-east-1 + role_to_assume: arn:aws:iam::123456789012:role/TerraformRole + tf_dir: infra/ + tf_workspace: staging + tf_command: plan + backend_bucket: my-terraform-state + backend_key: staging/terraform.tfstate + backend_region: us-east-1 +``` + +## 📥 Inputs +| **Name** | **Required** | **Description** | **Default** | +|-------------------------|--------------|---------------------------------------------------------------------------------|--------------| +| `backend_bucket` | ✅ Yes | S3 bucket for storing Terraform state | - | +| `backend_region` | ✅ Yes | AWS region for S3 backend | - | +| `backend_key` | ✅ Yes | S3 key (path) for Terraform state | - | +| `aws_region` | ✅ Yes | AWS region | - | +| `tf_command` | ✅ Yes | Terraform command: `plan`, `apply`, or `destroy` | - | +| `tf_dir` | ✅ Yes | Path to Terraform configuration directory | - | +| `aws_access_key_id` | ❌ No | AWS access key ID (optional if using OIDC) | - | +| `aws_secret_access_key` | ❌ No | AWS secret access key (optional if using OIDC) | - | +| `role_to_assume` | ❌ No | AWS IAM role ARN for OIDC authentication | - | +| `tf_workspace` | ❌ No | Terraform workspace name | `""` | +| `tf_vars` | ❌ No | Extra CLI `-var` flags | `""` | +| `tf_version` | ❌ No | Terraform version | `1.8.5` | +| `show_summary` | ❌ No | Print summary with task output in job summary | `true` | +| `summary_limit` | ❌ No | Max number of output lines to show in summary | `500` | + +## 📤 Outputs +| **Name** | **Description** | +|---------------------|----------------------------------------------------------| +| `terraform_command` | Executed Terraform command (`plan`/`apply`/`destroy`) | +| `workspace` | Workspace used during execution | +| `terraform_version` | Used terraform version | + +## 📋 Examples +[View example →](./examples/base.yml) diff --git a/actions/docker-build-push/action.yml b/actions/docker-build-push/action.yml new file mode 100644 index 0000000..067be1c --- /dev/null +++ b/actions/docker-build-push/action.yml @@ -0,0 +1,340 @@ +--- +name: 'Docker Build & Push' +description: 'Build and push multi-platform Docker images' + +inputs: + docker_user: + description: 'Registry username' + required: true + docker_token: + description: 'Registry access token / password' + required: true + registry: + description: 'Container registry host (e.g. docker.io, ghcr.io)' + required: false + default: 'docker.io' + + repository: + description: 'Image repository (e.g. username/image or ghcr.io/org/image)' + required: true + tag: + description: 'Image tag (e.g. v1.0.0)' + required: true + push_latest: + description: 'Also tag and push :latest (true/false)' + required: false + default: 'false' + + platforms: + description: 'Target platforms (comma-separated)' + required: false + default: 'linux/amd64,linux/arm64' + + build_args: + description: 'Build args as JSON object (e.g. {"VERSION":"1.0","NODE_ENV":"production"})' + required: false + default: '{}' + + artifact_name: + description: 'Optional artifact name to download and use as build context' + required: false + default: '' + context_path: + description: 'Build context path (relative to repo root or artifact root)' + required: false + default: '.' + dockerfile_path: + description: 'Path to Dockerfile (relative to context_path)' + required: false + default: 'Dockerfile' + + show_summary: + description: 'Print summary in the job summary' + required: false + default: 'true' + summary_limit: + description: 'Max number of output lines to show in summary (kept for consistency)' + required: false + default: '250' + +outputs: + image_digest: + description: 'Pushed image manifest-list digest (sha256:...)' + value: ${{ steps.resolve_digest.outputs.digest }} + build_duration: + description: 'Build duration in seconds' + value: ${{ steps.build.outputs.build_duration }} + image_size: + description: 'Image size in bytes' + value: ${{ steps.resolve_digest.outputs.image_size }} + image_ref: + description: 'Fully qualified image@digest' + value: ${{ steps.resolve_digest.outputs.image_ref }} + +runs: + using: composite + steps: + - name: Validate inputs + shell: bash + run: | + set -euo pipefail + + if [[ ! "${{ inputs.tag }}" =~ ^[A-Za-z0-9_][A-Za-z0-9_.-]{0,127}$ ]]; then + echo "❌ Invalid tag: ${{ inputs.tag }}" + echo "Allowed: first [A-Za-z0-9_], total ≤128, chars [A-Za-z0-9_.-]" + exit 1 + fi + + IFS=',' read -ra PLATFORM_ARRAY <<< "${{ inputs.platforms }}" + for platform in "${PLATFORM_ARRAY[@]}"; do + platform="$(echo "$platform" | xargs)" + if [[ ! "$platform" =~ ^[a-z]+/[a-z0-9]+(/v[0-9]+)?$ ]]; then + echo "❌ Invalid platform: $platform (expected os/arch or os/arch/vN)" + exit 1 + fi + done + + if [[ "${{ inputs.repository }}" =~ [A-Z] ]]; then + echo "❌ Repository name cannot contain uppercase letters: ${{ inputs.repository }}" + exit 1 + fi + + if [[ "${{ inputs.build_args }}" != "{}" ]]; then + if ! command -v jq >/dev/null 2>&1; then + echo "❌ 'jq' is required to parse build_args JSON. Use ubuntu-latest or install jq." + exit 1 + fi + if ! echo "${{ inputs.build_args }}" | jq . >/dev/null 2>&1; then + echo "❌ Invalid JSON in build_args" + exit 1 + fi + fi + + echo "✅ Inputs validated" + + - name: Decide build context + id: ctx + shell: bash + run: | + set -euo pipefail + + if [[ -n "${{ inputs.artifact_name }}" ]]; then + echo "using_artifact=true" >> "$GITHUB_OUTPUT" + echo "context=./artifact/${{ inputs.context_path }}" >> "$GITHUB_OUTPUT" + else + echo "using_artifact=false" >> "$GITHUB_OUTPUT" + echo "context=${{ inputs.context_path }}" >> "$GITHUB_OUTPUT" + fi + + - name: Checkout repository + if: steps.ctx.outputs.using_artifact == 'false' + uses: actions/checkout@v4 + + - name: Download artifact + if: steps.ctx.outputs.using_artifact == 'true' + uses: actions/download-artifact@v4 + with: + name: ${{ inputs.artifact_name }} + path: ./artifact + + - name: Log in to registry + uses: docker/login-action@v3 + with: + registry: ${{ inputs.registry }} + username: ${{ inputs.docker_user }} + password: ${{ inputs.docker_token }} + + - name: Set up QEMU + uses: docker/setup-qemu-action@v3 + + - name: Set up Docker Buildx + uses: docker/setup-buildx-action@v3 + with: + install: true + + - name: Build and push + id: build + shell: bash + env: + DOCKERFILE_REL: ${{ inputs.dockerfile_path }} + BUILD_ARGS_JSON: ${{ inputs.build_args }} + CONTEXT: ${{ steps.ctx.outputs.context }} + PUSH_LATEST: ${{ inputs.push_latest }} + PLATFORMS: ${{ inputs.platforms }} + REGISTRY: ${{ inputs.registry }} + REPO: ${{ inputs.repository }} + TAG: ${{ inputs.tag }} + run: | + set -euo pipefail + + BUILD_START=$(date +%s) + if [[ "$REGISTRY" != "docker.io" && "$REPO" != "${REGISTRY}/"* ]]; then + IMAGE="${REGISTRY}/${REPO}" + else + IMAGE="${REPO}" + fi + + DOCKERFILE="${CONTEXT}/${DOCKERFILE_REL}" + echo "📦 Context: ${CONTEXT}" + echo "📝 Dockerfile: ${DOCKERFILE}" + echo "🎯 Platforms: ${PLATFORMS}" + echo "🏷️ Image: ${IMAGE}:${TAG}" + + if [[ ! -d "${CONTEXT}" ]]; then + echo "❌ Build context not found: ${CONTEXT}" + exit 1 + fi + if [[ ! -f "${DOCKERFILE}" ]]; then + echo "❌ Dockerfile not found: ${DOCKERFILE}" + exit 1 + fi + + declare -a BUILD_ARGS_ARR=() + if [[ "${BUILD_ARGS_JSON}" != "{}" ]]; then + echo "🔧 Build args provided:" + + while IFS=$'\t' read -r k v; do + echo " - ${k}=${v}" + BUILD_ARGS_ARR+=(--build-arg "${k}=${v}") + done < <(echo "${BUILD_ARGS_JSON}" | jq -r 'to_entries[] | "\(.key)\t\(.value|tostring)"') + fi + + TAGS=(-t "${IMAGE}:${TAG}") + if [[ "${PUSH_LATEST}" == "true" ]]; then + TAGS+=(-t "${IMAGE}:latest") + fi + + echo "🚀 Starting Docker build..." + docker buildx build \ + --platform "${PLATFORMS}" \ + --file "${DOCKERFILE}" \ + "${TAGS[@]}" \ + "${BUILD_ARGS_ARR[@]}" \ + --cache-from type=gha \ + --cache-to type=gha,mode=max \ + --push \ + "${CONTEXT}" + + BUILD_END=$(date +%s) + BUILD_DURATION=$((BUILD_END - BUILD_START)) + echo "⏱️ Build completed in ${BUILD_DURATION} seconds" + echo "build_duration=${BUILD_DURATION}" >> "$GITHUB_OUTPUT" + echo "✅ Docker build completed successfully" + + unset DOCKER_CONFIG 2>/dev/null || true + + - name: Resolve pushed digest + id: resolve_digest + shell: bash + env: + REGISTRY: ${{ inputs.registry }} + REPO: ${{ inputs.repository }} + TAG: ${{ inputs.tag }} + run: | + set -euo pipefail + + if [[ "$REGISTRY" != "docker.io" && "$REPO" != "${REGISTRY}/"* ]]; then + IMAGE="${REGISTRY}/${REPO}" + else + IMAGE="${REPO}" + fi + + echo "🔎 Resolving digest for ${IMAGE}:${TAG}" + + MAX_ATTEMPTS=5 + ATTEMPT=0 + DIGEST="" + + while [[ $ATTEMPT -lt $MAX_ATTEMPTS ]]; do + ATTEMPT=$((ATTEMPT + 1)) + echo "Attempt $ATTEMPT/$MAX_ATTEMPTS..." + + if DIGEST=$( + docker buildx imagetools inspect \ + "${IMAGE}:${TAG}" 2>/dev/null | awk '/Digest:/ {print $2; exit}'); then + if [[ -n "${DIGEST}" ]]; then + echo "✅ Digest resolved: ${DIGEST}" + break + fi + fi + if [[ $ATTEMPT -lt $MAX_ATTEMPTS ]]; then + echo "⏳ Retrying in 3 seconds..." + sleep 3 + fi + done + + if [[ -z "${DIGEST}" ]]; then + echo "❌ Unable to resolve digest for ${IMAGE}:${TAG} after $MAX_ATTEMPTS attempts" + exit 1 + fi + + SIZE_INFO="$( + docker buildx imagetools inspect \ + "${IMAGE}:${TAG}" \ + --format '{{.Manifest.Size}}' \ + 2>/dev/null || true + )" + : "${SIZE_INFO:=unknown}" + + echo "digest=${DIGEST}" >> "$GITHUB_OUTPUT" + echo "image_size=${SIZE_INFO}" >> "$GITHUB_OUTPUT" + echo "image_ref=${IMAGE}@${DIGEST}" >> "$GITHUB_OUTPUT" + + - name: Summary + if: always() && inputs.show_summary == 'true' + shell: bash + env: + BUILD_ARGS_JSON: ${{ inputs.build_args }} + PUSH_LATEST: ${{ inputs.push_latest }} + PLATFORMS: ${{ inputs.platforms }} + REGISTRY: ${{ inputs.registry }} + REPO: ${{ inputs.repository }} + TAG: ${{ inputs.tag }} + LIMIT: ${{ inputs.summary_limit }} + run: | + set -euo pipefail + + STATUS_ICON="❌" + if [[ "${{ steps.build.outcome }}" == "success" ]]; then + STATUS_ICON="✅" + fi + + [[ "$LIMIT" =~ ^[0-9]+$ ]] || LIMIT="250" + + if [[ "$REGISTRY" != "docker.io" && "$REPO" != "${REGISTRY}/"* ]]; then + IMAGE="${REGISTRY}/${REPO}" + else + IMAGE="${REPO}" + fi + + { + echo "## 🐳 Docker Build & Push ${STATUS_ICON}" + echo "- **Image:** \`${IMAGE}\`" + echo "- **Tags:**" + echo " - \`${IMAGE}:${TAG}\`" + if [[ "${PUSH_LATEST}" == "true" ]]; then + echo " - \`${IMAGE}:latest\`" + fi + echo "- **Platforms:** \`${PLATFORMS}\`" + echo "- **Dockerfile:** \`${{ steps.ctx.outputs.context }}/${{ inputs.dockerfile_path }}\`" + echo "- **Context:** \`${{ steps.ctx.outputs.context }}\`" + if [[ -n "${{ steps.resolve_digest.outputs.digest || '' }}" ]]; then + echo "- **Digest:** \`${{ steps.resolve_digest.outputs.digest }}\`" + echo "- **Build duration:** ${{ steps.build.outputs.build_duration || 'N/A' }} seconds" + echo "- **Image size:** ${{ steps.resolve_digest.outputs.image_size || 'N/A' }} bytes" + echo "- **Image ref:** \`${{ steps.resolve_digest.outputs.image_ref || '' }}\`" + fi + if [[ "${BUILD_ARGS_JSON}" != "{}" ]]; then + echo "" + echo "### Build args" + echo '```json' + echo "${BUILD_ARGS_JSON}" | jq . + echo '```' + fi + echo "" + if [[ "${{ steps.build.outcome }}" == "success" ]]; then + echo "✅ **Build successful** — image pushed to registry" + else + echo "❌ **Build failed** — check logs for details" + fi + } >> "$GITHUB_STEP_SUMMARY" diff --git a/actions/docker-build-push/examples/artifact.yml b/actions/docker-build-push/examples/artifact.yml new file mode 100644 index 0000000..ad7e0c9 --- /dev/null +++ b/actions/docker-build-push/examples/artifact.yml @@ -0,0 +1,35 @@ +--- +name: Build from Artifact + +on: + workflow_dispatch: + +jobs: + build-artifact: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v4 + - name: Build app bundle + run: | + mkdir -p dist && echo "hello" > dist/index.html + - name: Upload artifact + uses: actions/upload-artifact@v4 + with: + name: web-dist + path: dist/ + + docker: + runs-on: ubuntu-latest + needs: build-artifact + steps: + - name: Build & Push using artifact as context + uses: Mad-Pixels/github-workflows/actions/docker-build-push@v1 + with: + docker_user: ${{ secrets.DOCKERHUB_USERNAME }} + docker_token: ${{ secrets.DOCKERHUB_TOKEN }} + repository: myuser/static-site + tag: ${{ github.run_number }} + artifact_name: web-dist + context_path: . + dockerfile_path: Dockerfile + platforms: linux/amd64,linux/arm64 diff --git a/actions/docker-build-push/examples/base.yml b/actions/docker-build-push/examples/base.yml new file mode 100644 index 0000000..90aa5f8 --- /dev/null +++ b/actions/docker-build-push/examples/base.yml @@ -0,0 +1,28 @@ +--- +name: Build & Push Docker Image + +on: + push: + branches: + - main + workflow_dispatch: + +jobs: + docker: + runs-on: ubuntu-latest + permissions: + contents: read + packages: write + steps: + - name: Build & Push Multi-Platform Image + uses: Mad-Pixels/github-workflows/actions/docker-build-push@v1 + with: + docker_user: ${{ secrets.DOCKERHUB_USERNAME }} + docker_token: ${{ secrets.DOCKERHUB_TOKEN }} + repository: myuser/myimage + tag: ${{ github.sha }} + push_latest: 'true' + platforms: linux/amd64,linux/arm64 + build_args: '{"VERSION":"${{ github.sha }}","NODE_ENV":"production"}' + context_path: . + dockerfile_path: Dockerfile diff --git a/actions/docker-build-push/examples/ghcr.yml b/actions/docker-build-push/examples/ghcr.yml new file mode 100644 index 0000000..23ad0c9 --- /dev/null +++ b/actions/docker-build-push/examples/ghcr.yml @@ -0,0 +1,26 @@ +--- +name: Build & Push (GHCR) + +on: + push: + branches: [main] + +jobs: + docker: + runs-on: ubuntu-latest + permissions: + contents: read + packages: write + steps: + - name: Build & Push to GHCR + uses: Mad-Pixels/github-workflows/actions/docker-build-push@v1 + with: + registry: ghcr.io + docker_user: ${{ github.actor }} + docker_token: ${{ secrets.GITHUB_TOKEN }} + repository: my-org/my-app + tag: ${{ github.ref_name }} + platforms: linux/amd64,linux/arm64 + build_args: '{"VERSION":"${{ github.ref_name }}"}' + context_path: . + dockerfile_path: Dockerfile diff --git a/actions/docker-build-push/examples/latest.yml b/actions/docker-build-push/examples/latest.yml new file mode 100644 index 0000000..d0330e0 --- /dev/null +++ b/actions/docker-build-push/examples/latest.yml @@ -0,0 +1,23 @@ +--- +name: Build & Push with latest + +on: + push: + branches: [main] + +jobs: + docker: + runs-on: ubuntu-latest + steps: + - name: Build & Push (main + latest) + uses: Mad-Pixels/github-workflows/actions/docker-build-push@v1 + with: + docker_user: ${{ secrets.DOCKERHUB_USERNAME }} + docker_token: ${{ secrets.DOCKERHUB_TOKEN }} + repository: myuser/myimage + tag: v1.2.3 + push_latest: 'true' + platforms: linux/amd64,linux/arm64 + build_args: '{"VERSION":"v1.2.3"}' + context_path: . + dockerfile_path: Dockerfile diff --git a/actions/docker-build-push/examples/matrix.yml b/actions/docker-build-push/examples/matrix.yml new file mode 100644 index 0000000..a05a684 --- /dev/null +++ b/actions/docker-build-push/examples/matrix.yml @@ -0,0 +1,37 @@ +--- +name: Monorepo Matrix Build + +on: + push: + branches: [main] + +jobs: + docker: + runs-on: ubuntu-latest + strategy: + fail-fast: false + matrix: + include: + - name: api + repo: myuser/my-api + context: services/api + dockerfile: Dockerfile + - name: web + repo: myuser/my-web + context: services/web + dockerfile: Dockerfile + + steps: + - uses: actions/checkout@v4 + + - name: Build & Push (${{ matrix.name }}) + uses: Mad-Pixels/github-workflows/actions/docker-build-push@v1 + with: + docker_user: ${{ secrets.DOCKERHUB_USERNAME }} + docker_token: ${{ secrets.DOCKERHUB_TOKEN }} + repository: ${{ matrix.repo }} + tag: ${{ github.sha }} + platforms: linux/amd64,linux/arm64 + build_args: '{"VERSION":"${{ github.sha }}","SERVICE":"${{ matrix.name }}"}' + context_path: ${{ matrix.context }} + dockerfile_path: ${{ matrix.dockerfile }} diff --git a/actions/docker-build-push/readme.md b/actions/docker-build-push/readme.md new file mode 100644 index 0000000..16561b9 --- /dev/null +++ b/actions/docker-build-push/readme.md @@ -0,0 +1,82 @@ +# 🐳 Docker Build & Push. +Build and push multi-platform Docker images. + +## ✅ Features +- Multi-arch builds via Buildx + QEMU (`linux/amd64`, `linux/arm64`, etc.) +- Push to Docker Hub or custom registry (`registry` input) +- Optional `:latest` tagging (`push_latest`) +- GitHub Actions cache for faster rebuilds + +## 📖 Related Documentation +- Docker Buildx: https://docs.docker.com/build/buildx/ +- Docker Hub: https://hub.docker.com/ +- OCI Image Index (multi-arch): https://github.com/opencontainers/image-spec + +## 🚀 Prerequisites +Your workflow must: +- Run on `ubuntu-latest` with Docker available +- JSON-driven build args +- Provide registry credentials (`docker_user`, `docker_token`) +- If using artifact context: ensure the artifact is produced earlier in the workflow + +## 🔧 Quick Example +```yaml +name: Build & Push Image + +on: + push: + branches: [main] + +jobs: + docker: + runs-on: ubuntu-latest + permissions: + contents: read + packages: write + steps: + - name: Build & Push + uses: Mad-Pixels/github-workflows/actions/docker-build-push@v1 + with: + docker_user: ${{ secrets.DOCKERHUB_USERNAME }} + docker_token: ${{ secrets.DOCKERHUB_TOKEN }} + repository: myuser/myimage + tag: ${{ github.sha }} + push_latest: 'false' + platforms: linux/amd64,linux/arm64 + build_args: '{"VERSION":"${{ github.sha }}","NODE_ENV":"production"}' + context_path: . + dockerfile_path: Dockerfile +``` + +## 📥 Inputs +| **Name** | **Required** | **Description** | **Default** | +|------------------|--------------|-----------------------------------------------------------------------------------------------------|---------------------------------------------| +| `docker_user` | ✅ Yes | Registry username (Docker Hub by default) | - | +| `docker_token` | ✅ Yes | Registry access token / password | - | +| `repository` | ✅ Yes | Image repository (e.g. `user/image` or `ghcr.io/org/image` with non-default registry) | - | +| `tag` | ✅ Yes | Image tag (e.g. `v1.0.0`, `sha`) | - | +| `registry` | ❌ No | Registry host (e.g. `docker.io`, `ghcr.io`) | `docker.io` | +| `push_latest` | ❌ No | Also tag and push `:latest` (`true`/`false`) | `false` | +| `platforms` | ❌ No | Target platforms (comma-separated) | `linux/amd64,linux/arm64` | +| `build_args` | ❌ No | Build args as JSON object (values kept intact; requires valid JSON) | `{}` | +| `artifact_name` | ❌ No | If set, downloads artifact and uses it as build context | `''` | +| `context_path` | ❌ No | Build context path (relative to repo root or artifact root) | `.` | +| `dockerfile_path`| ❌ No | Path to Dockerfile (relative to `context_path`) | `Dockerfile` | +| `show_summary` | ❌ No | Print summary with task output in job summary | `true` | +| `summary_limit` | ❌ No | Max number of output lines to show in summary | `250` | + +## 📤 Outputs +| **Name** | **Description** | +|-----------------|------------------------------------------| +| `image_digest` | Pushed image manifest-list digest (sha) | +| `build_duration`| Duration in sec | +| `image_size` | Size in bytes | +| `image_ref` | Fully qualified `image@digest` | + +## 📋 Examples +[View example →](./examples/base.yml) +[Artifact →](./examples/artifact.yml) +[Set repository →](./examples/ghcr.yml) +[With latest →](./examples/latest.yml) +[Matrix →](./examples/matrix.yml) + diff --git a/actions/dockerhub-build-push/action.yml b/actions/dockerhub-build-push/action.yml deleted file mode 100644 index 9c04679..0000000 --- a/actions/dockerhub-build-push/action.yml +++ /dev/null @@ -1,135 +0,0 @@ ---- -name: DockerHub Push -description: Build and push multi-platform Docker images to DockerHub. - -inputs: - docker_user: - description: 'DockerHub username' - required: true - docker_token: - description: 'DockerHub access token' - required: true - - repository: - description: 'DockerHub repository name (e.g. username/image)' - required: true - tag: - description: 'Tag for the Docker image (e.g. v1.0.0)' - required: true - - platforms: - description: 'Target platforms (comma-separated)' - required: false - default: 'linux/amd64,linux/arm64' - - build_args: - description: 'Build arguments as JSON object (e.g. {"VERSION":"1.0"})' - required: false - default: '{}' - - artifact_name: - description: 'Optional artifact name to download and use as context' - required: false - default: '' - - context_path: - description: 'Build context directory' - required: false - default: '.' - - dockerfile_path: - description: 'Path to Dockerfile (relative to context_path)' - required: false - default: 'Dockerfile' - -runs: - using: composite - steps: - - name: Determine context path - id: ctx - shell: bash - run: | - if [[ -n "${{ inputs.artifact_name }}" ]]; then - echo "using_artifact=true" >> "$GITHUB_OUTPUT" - ctx_path="./artifact/${{ inputs.context_path }}" - echo "context=$ctx_path" >> "$GITHUB_OUTPUT" - else - echo "using_artifact=false" >> "$GITHUB_OUTPUT" - echo "context=${{ inputs.context_path }}" >> "$GITHUB_OUTPUT" - fi - - - name: Checkout repository - if: steps.ctx.outputs.using_artifact == 'false' - uses: actions/checkout@v4 - - - name: Download artifact - if: steps.ctx.outputs.using_artifact == 'true' - uses: actions/download-artifact@v4 - with: - name: ${{ inputs.artifact_name }} - path: ./artifact - - - name: DockerHub login - uses: docker/login-action@v3 - with: - username: ${{ inputs.docker_user }} - password: ${{ inputs.docker_token }} - - - name: Setup QEMU (for cross-platform builds) - uses: docker/setup-qemu-action@v3 - - - name: Setup Buildx - uses: docker/setup-buildx-action@v3 - with: - install: true - - - name: Build and push Docker image - shell: bash - run: | - CONTEXT="${{ steps.ctx.outputs.context }}" - DOCKERFILE="${CONTEXT}/${{ inputs.dockerfile_path }}" - - echo "📦 Building from context: $CONTEXT" - echo "📝 Dockerfile: $DOCKERFILE" - - BUILD_ARGS="" - if [[ "${{ inputs.build_args }}" != "{}" ]]; then - echo "🔧 Parsing build args..." - echo '${{ inputs.build_args }}' | jq -r ' - to_entries[] | - "--build-arg \(.key)=\(.value)" - ' | while read -r arg; do - BUILD_ARGS="$BUILD_ARGS $arg" - done - fi - - docker buildx build \ - --platform "${{ inputs.platforms }}" \ - --file "$DOCKERFILE" \ - $BUILD_ARGS \ - -t "${{ inputs.repository }}:${{ inputs.tag }}" \ - -t "${{ inputs.repository }}:latest" \ - --push \ - "$CONTEXT" - - - name: Docker Build Summary - shell: bash - run: | - CONTEXT="${{ steps.ctx.outputs.context }}" - DOCKERFILE_PATH="${CONTEXT}/${{ inputs.dockerfile_path }}" - - echo "## 🐳 Docker Image Summary" >> "$GITHUB_STEP_SUMMARY" - echo "- Repository: \`${{ inputs.repository }}\`" >> "$GITHUB_STEP_SUMMARY" - echo "- Tags:" >> "$GITHUB_STEP_SUMMARY" - echo " - \`${{ inputs.repository }}:${{ inputs.tag }}\`" >> "$GITHUB_STEP_SUMMARY" - echo " - \`${{ inputs.repository }}:latest\`" >> "$GITHUB_STEP_SUMMARY" - echo "- Platforms: \`${{ inputs.platforms }}\`" >> "$GITHUB_STEP_SUMMARY" - echo "- Context: \`${CONTEXT}\`" >> "$GITHUB_STEP_SUMMARY" - echo "- Dockerfile: \`${DOCKERFILE_PATH}\`" >> "$GITHUB_STEP_SUMMARY" - - if [[ "${{ inputs.build_args }}" != "{}" ]]; then - echo "- Build args:" >> "$GITHUB_STEP_SUMMARY" - echo '```json' >> "$GITHUB_STEP_SUMMARY" - echo '${{ inputs.build_args }}' | jq . >> "$GITHUB_STEP_SUMMARY" - echo '```' >> "$GITHUB_STEP_SUMMARY" - fi diff --git a/actions/github-check-branch/action.yml b/actions/github-check-branch/action.yml index 16b89e8..697246e 100644 --- a/actions/github-check-branch/action.yml +++ b/actions/github-check-branch/action.yml @@ -1,21 +1,49 @@ --- name: 'Branch Validator' -description: 'Verify that tag or commit is reachable from specified branch (default: main)' +description: 'Verify that a commit (HEAD / tag / explicit SHA) is reachable from a target branch' inputs: target_branch: - description: 'Branch to validate against (e.g., main or release/v1)' + description: 'Branch to validate' required: false default: 'main' tag_name: - description: 'Tag name to validate (if empty, uses current HEAD)' + description: 'Tag name to validate' required: false default: '' + commit_sha: + description: 'Explicit commit SHA to validate' + required: false + default: '' + fail_on_invalid: + description: 'Fail action when commit is not reachable' + required: false + default: 'true' + show_summary: + description: 'Print summary in the job summary' + required: false + default: 'true' + summary_limit: + description: 'Max number of output lines to show in summary (kept for consistency)' + required: false + default: '250' outputs: is_valid: - description: 'true if commit is from target branch, false otherwise' + description: 'true if the commit is reachable from target branch, false otherwise' value: ${{ steps.validate.outputs.is_valid }} + commit: + description: 'The validated commit SHA' + value: ${{ steps.validate.outputs.commit }} + subject: + description: 'Description of what was validated (HEAD/tag/commit)' + value: ${{ steps.validate.outputs.subject }} + target_branch: + description: 'The target branch used for validation' + value: ${{ steps.validate.outputs.target_branch }} + merge_base: + description: 'Common ancestor commit (only set when validation fails)' + value: ${{ steps.validate.outputs.merge_base }} runs: using: composite @@ -25,41 +53,169 @@ runs: with: fetch-depth: 0 - - name: Validate branch ancestry - id: validate + - name: Export inputs as env shell: bash run: | set -euo pipefail + echo "TARGET_BRANCH=${{ inputs.target_branch }}" >> "$GITHUB_ENV" + echo "TAG_NAME=${{ inputs.tag_name }}" >> "$GITHUB_ENV" + echo "COMMIT_SHA_INPUT=${{ inputs.commit_sha }}" >> "$GITHUB_ENV" + echo "FAIL_ON_INVALID=${{ inputs.fail_on_invalid }}" >> "$GITHUB_ENV" - TARGET_BRANCH="${{ inputs.target_branch }}" - TAG_NAME="${{ inputs.tag_name }}" + - name: Validate inputs + shell: bash + run: | + set -euo pipefail - echo "🔍 Validating commit against branch: $TARGET_BRANCH" + if [ -n "${TAG_NAME//[$'\t\r\n ']/}" ] && [ -n "${COMMIT_SHA_INPUT//[$'\t\r\n ']/}" ]; then + echo "❌ Cannot specify both tag_name and commit_sha" + exit 1 + fi - if [ -n "$TAG_NAME" ]; then - if ! git rev-parse "$TAG_NAME" >/dev/null 2>&1; then - echo "❌ Tag '$TAG_NAME' does not exist" - echo "is_valid=false" >> $GITHUB_OUTPUT + if [ -n "${COMMIT_SHA_INPUT//[$'\t\r\n ']/}" ]; then + if [[ ! "${COMMIT_SHA_INPUT}" =~ ^[a-f0-9]{7,40}$ ]]; then + echo "❌ Invalid commit SHA format: ${COMMIT_SHA_INPUT}" + echo "Expected: 7-40 hexadecimal characters" exit 1 fi - COMMIT=$(git rev-list -n 1 "$TAG_NAME") - echo "📦 Using tag '$TAG_NAME' → commit $COMMIT" - else - COMMIT=$(git rev-parse HEAD) - echo "📦 Using HEAD → commit $COMMIT" fi - if ! git show-ref --verify --quiet "refs/remotes/origin/$TARGET_BRANCH"; then - echo "❌ Remote branch 'origin/$TARGET_BRANCH' not found" - echo "is_valid=false" >> $GITHUB_OUTPUT + if [[ ! "${TARGET_BRANCH}" =~ ^[a-zA-Z0-9._/-]+$ ]]; then + echo "❌ Invalid branch name: ${TARGET_BRANCH}" exit 1 fi - if git merge-base --is-ancestor "$COMMIT" "origin/$TARGET_BRANCH"; then - echo "✅ Commit $COMMIT is reachable from branch '$TARGET_BRANCH'" - echo "is_valid=true" >> $GITHUB_OUTPUT + echo "✅ Inputs validated" + + - name: Fetch target branch and tags + shell: bash + run: | + set -euo pipefail + + echo "📥 Fetching target branch and (optional) tags…" + + if ! git ls-remote --heads origin "refs/heads/${TARGET_BRANCH}" | grep -q .; then + echo "❌ Branch '${TARGET_BRANCH}' does not exist on remote" + echo "BRANCH_EXISTS=false" >> "$GITHUB_ENV" + exit 0 + fi + echo "BRANCH_EXISTS=true" >> "$GITHUB_ENV" + + git fetch --no-tags --force origin "refs/heads/${TARGET_BRANCH}:refs/remotes/origin/${TARGET_BRANCH}" || { + echo "❌ Failed to fetch branch '${TARGET_BRANCH}'" + echo "FETCH_SUCCESS=false" >> "$GITHUB_ENV" + exit 0 + } + if [ -n "${TAG_NAME//[$'\t\r\n ']/}" ]; then + git fetch --force --tags origin || echo "⚠️ Failed to fetch tags, continuing..." + fi + + echo "FETCH_SUCCESS=true" >> "$GITHUB_ENV" + echo "✅ Fetch complete" + + - name: Validate branch ancestry + id: validate + shell: bash + run: | + set -euo pipefail + + echo "🔍 Validating reachability to branch: ${TARGET_BRANCH}" + + if [ "${BRANCH_EXISTS:-false}" = "false" ]; then + echo "❌ Remote branch 'origin/${TARGET_BRANCH}' not found" + echo "is_valid=false" >> "$GITHUB_OUTPUT" + echo "target_branch=${TARGET_BRANCH}" >> "$GITHUB_OUTPUT" + if [ "${FAIL_ON_INVALID}" = "true" ]; then exit 1; else exit 0; fi + fi + + if [ "${FETCH_SUCCESS:-false}" = "false" ]; then + echo "❌ Failed to fetch required git data" + echo "is_valid=false" >> "$GITHUB_OUTPUT" + echo "target_branch=${TARGET_BRANCH}" >> "$GITHUB_OUTPUT" + if [ "${FAIL_ON_INVALID}" = "true" ]; then exit 1; else exit 0; fi + fi + + SUBJECT="HEAD" + if [ -n "${COMMIT_SHA_INPUT//[$'\t\r\n ']/}" ]; then + if ! git rev-parse --verify -q "${COMMIT_SHA_INPUT}^{commit}" >/dev/null; then + echo "❌ Commit SHA '${COMMIT_SHA_INPUT}' does not exist" + echo "is_valid=false" >> "$GITHUB_OUTPUT" + echo "target_branch=${TARGET_BRANCH}" >> "$GITHUB_OUTPUT" + if [ "${FAIL_ON_INVALID}" = "true" ]; then exit 1; else exit 0; fi + fi + COMMIT="${COMMIT_SHA_INPUT}" + SUBJECT="commit:${COMMIT}" + elif [ -n "${TAG_NAME//[$'\t\r\n ']/}" ]; then + if ! git rev-parse --verify -q "${TAG_NAME}^{commit}" >/dev/null; then + echo "❌ Tag '${TAG_NAME}' does not exist or does not point to a commit" + echo "is_valid=false" >> "$GITHUB_OUTPUT" + echo "target_branch=${TARGET_BRANCH}" >> "$GITHUB_OUTPUT" + if [ "${FAIL_ON_INVALID}" = "true" ]; then exit 1; else exit 0; fi + fi + COMMIT="$(git rev-list -n 1 "${TAG_NAME}")" + SUBJECT="tag:${TAG_NAME}" else - echo "❌ Commit $COMMIT is NOT reachable from branch '$TARGET_BRANCH'" - echo "is_valid=false" >> $GITHUB_OUTPUT - exit 1 + COMMIT="$(git rev-parse HEAD)" + SUBJECT="HEAD:${COMMIT}" + fi + + echo "📦 Subject: ${SUBJECT}" + echo "🔗 Commit: ${COMMIT}" + + if git merge-base --is-ancestor "${COMMIT}" "refs/remotes/origin/${TARGET_BRANCH}"; then + echo "✅ Commit is reachable from '${TARGET_BRANCH}'" + echo "is_valid=true" >> "$GITHUB_OUTPUT" + else + MERGE_BASE=$(git merge-base "${COMMIT}" "refs/remotes/origin/${TARGET_BRANCH}" 2>/dev/null || echo "") + if [ -z "$MERGE_BASE" ]; then + echo "❌ No common history between commit and '${TARGET_BRANCH}'" + else + echo "❌ Commit is NOT reachable from '${TARGET_BRANCH}'" + echo " Common ancestor: ${MERGE_BASE}" + echo "merge_base=${MERGE_BASE}" >> "$GITHUB_OUTPUT" + fi + echo "is_valid=false" >> "$GITHUB_OUTPUT" + if [ "${FAIL_ON_INVALID}" = "true" ]; then exit 1; fi + fi + + echo "commit=${COMMIT}" >> "$GITHUB_OUTPUT" + echo "subject=${SUBJECT}" >> "$GITHUB_OUTPUT" + echo "target_branch=${TARGET_BRANCH}" >> "$GITHUB_OUTPUT" + + - name: Summary + if: always() && inputs.show_summary == 'true' + shell: bash + env: + LIMIT: ${{ inputs.summary_limit }} + run: | + set -euo pipefail + + RESULT_ICON="❌" + if [ "${{ steps.validate.outputs.is_valid || 'false' }}" = "true" ]; then + RESULT_ICON="✅" fi + + # LIMIT kept for interface parity (not used for truncation here) + [[ "$LIMIT" =~ ^[0-9]+$ ]] || LIMIT="250" + + { + echo "## 🔎 Branch Validator ${RESULT_ICON}" + echo "- **Target branch:** \`${{ inputs.target_branch }}\`" + echo "- **Subject:** \`${{ steps.validate.outputs.subject || 'N/A' }}\`" + echo "- **Commit:** \`${{ steps.validate.outputs.commit || 'N/A' }}\`" + echo "- **Reachable:** \`${{ steps.validate.outputs.is_valid || 'false' }}\`" + + if [ -n "${{ steps.validate.outputs.merge_base || '' }}" ]; then + echo "- **Common ancestor:** \`${{ steps.validate.outputs.merge_base }}\`" + fi + echo "" + if [ "${{ steps.validate.outputs.is_valid || 'false' }}" = "true" ]; then + echo "✅ **Validation passed** — commit is reachable from target branch" + else + echo "❌ **Validation failed** — commit is not reachable from target branch" + if [ "${{ inputs.fail_on_invalid }}" = "false" ]; then + echo "" + echo "ℹ️ Action continued due to \`fail_on_invalid: false\`" + fi + fi + } >> "$GITHUB_STEP_SUMMARY" diff --git a/actions/github-check-branch/examples/base.yml b/actions/github-check-branch/examples/base.yml new file mode 100644 index 0000000..614c6b0 --- /dev/null +++ b/actions/github-check-branch/examples/base.yml @@ -0,0 +1,39 @@ +--- +name: Deploy by Tag (validated) + +on: + workflow_dispatch: + inputs: + tag: + description: 'Tag to deploy (e.g., v1.2.3)' + required: true + type: string + +concurrency: + group: deploy-${{ github.event.inputs.tag }} + cancel-in-progress: true + +permissions: + contents: write + +jobs: + validate: + runs-on: ubuntu-latest + steps: + - name: Validate tag against main + id: check + uses: Mad-Pixels/github-workflows/actions/branch-validator@v1 + with: + target_branch: main + tag_name: ${{ github.event.inputs.tag }} + + - name: Proceed with deployment + if: steps.check.outputs.is_valid == 'true' + run: | + echo "✅ Tag ${{ github.event.inputs.tag }} is reachable from main" + + - name: Stop if invalid + if: steps.check.outputs.is_valid != 'true' + run: | + echo "❌ Tag ${{ github.event.inputs.tag }} is NOT from main" + exit 1 diff --git a/actions/github-check-branch/readme.md b/actions/github-check-branch/readme.md new file mode 100644 index 0000000..203ced3 --- /dev/null +++ b/actions/github-check-branch/readme.md @@ -0,0 +1,60 @@ +# 🧬 Branch Validator +Verify that a tag or commit is reachable from a specified branch. + +## ✅ Features +- Checks if a commit/tag/HEAD is in the history of a target branch +- Supports explicit commit SHA (`commit_sha`) and tag (`tag_name`) +- Optional soft mode via `fail_on_invalid: 'false'` + +## 📖 Related Documentation +- [Git merge-base Documentation](https://git-scm.com/docs/git-merge-base) + +## 🚀 Prerequisites +Your workflow must: +- Run on a runner with Git installed (default `ubuntu-latest` meets this) + +## 🔧 Quick Example +```yaml +name: Validate Tag Origin + +on: + workflow_dispatch: + inputs: + tag: + description: 'Tag to validate' + required: true + type: string + +jobs: + validate-branch: + runs-on: ubuntu-latest + steps: + - name: Check if tag is from main + uses: Mad-Pixels/github-workflows/actions/branch-validator@v1 + with: + target_branch: main + tag_name: ${{ github.event.inputs.tag }} +``` + +## 📥 Inputs +| **Name** | **Required** | **Description** | **Default** | +|-------------------|--------------|--------------------------------------------------------------|-------------| +| `target_branch` | ❌ No | Branch to validate against (e.g., `main`, `release/v1`) | `main` | +| `tag_name` | ❌ No | Tag name to validate; if empty, validates current `HEAD` | ` ` | +| `commit_sha` | ❌ No | Explicit commit SHA to validate (overrides `tag_name/HEAD`) | ` ` | +| `fail_on_invalid` | ❌ No | Fail the action if not reachable ('true'/'false') | ` ` | +| `show_summary` | ❌ No | Print summary with task output in job summary | `true` | +| `summary_limit` | ❌ No | Max number of output lines to show in summary | `250` | + +## 📤 Outputs +| **Name** | **Description** | +|-----------------|------------------------------------------------------------------------------| +| `is_valid` | `true` if commit/tag is reachable from target branch, else `false` | +| `commit` | The validated commit SHA | +| `subject` | What was validated (`HEAD:`, `tag:`, or `commit:`) | +| `target_branch` | The branch used for validation | +| `merge_base` | Common ancestor SHA (only set when validation fails and histories intersect) | + +## 📋 Examples +[View example →](./examples/base.yml) + diff --git a/actions/github-create-tag/action.yml b/actions/github-create-tag/action.yml index 21c2536..0416a64 100644 --- a/actions/github-create-tag/action.yml +++ b/actions/github-create-tag/action.yml @@ -1,13 +1,13 @@ --- name: 'Tag Creator' -description: 'Create and manage git tags with validation' +description: 'Create git tags with validation' inputs: tag: - description: 'Tag to create (e.g., v1.0.0)' + description: 'Tag to create' required: true token: - description: 'GitHub token for authentication' + description: 'GitHub token' required: true force: description: 'Force overwrite existing tag' @@ -28,6 +28,14 @@ inputs: description: 'Create lightweight tag (ignores message)' required: false default: 'false' + show_summary: + description: 'Print summary in the job summary' + required: false + default: 'true' + summary_limit: + description: 'Max number of message lines to show in summary' + required: false + default: '250' outputs: tag_sha: @@ -46,12 +54,21 @@ runs: - name: Export inputs as env shell: bash run: | - echo "TAG_NAME=${{ inputs.tag }}" >> $GITHUB_ENV - echo "BRANCH_NAME=${{ inputs.branch }}" >> $GITHUB_ENV - echo "FORCE_CREATE=${{ inputs.force }}" >> $GITHUB_ENV - echo "LIGHTWEIGHT=${{ inputs.lightweight }}" >> $GITHUB_ENV - echo "TAG_MESSAGE=${{ inputs.message }}" >> $GITHUB_ENV - echo "TAG_FORMAT=${{ inputs.tag_format }}" >> $GITHUB_ENV + set -euo pipefail + + echo "TAG_NAME=${{ inputs.tag }}" >> "$GITHUB_ENV" + echo "BRANCH_NAME=${{ inputs.branch }}" >> "$GITHUB_ENV" + echo "FORCE_CREATE=${{ inputs.force }}" >> "$GITHUB_ENV" + echo "LIGHTWEIGHT=${{ inputs.lightweight }}" >> "$GITHUB_ENV" + echo "TAG_FORMAT=${{ inputs.tag_format }}" >> "$GITHUB_ENV" + + DELIM="MSG_$(date +%s%N)" + { + echo "TAG_MESSAGE<<$DELIM" + printf "%s" "${{ inputs.message }}" + echo + echo "$DELIM" + } >> "$GITHUB_ENV" - name: Validate inputs shell: bash @@ -63,14 +80,14 @@ runs: exit 1 fi - if [[ ! "$TAG_NAME" =~ $TAG_FORMAT ]]; then - echo "❌ Invalid tag format: $TAG_NAME" - echo "Expected: $TAG_FORMAT" + if [ -z "$TAG_NAME" ]; then + echo "❌ Tag name cannot be empty" exit 1 fi - if [ -z "$TAG_NAME" ]; then - echo "❌ Tag name cannot be empty" + if [[ ! "$TAG_NAME" =~ $TAG_FORMAT ]]; then + echo "❌ Invalid tag format: $TAG_NAME" + echo "Expected: $TAG_FORMAT" exit 1 fi @@ -79,8 +96,8 @@ runs: - name: Checkout uses: actions/checkout@v4 with: - ref: ${{ inputs.branch }} fetch-depth: 0 + ref: ${{ inputs.branch }} token: ${{ inputs.token }} - name: Verify branch @@ -89,7 +106,7 @@ runs: set -euo pipefail echo "Verifying branch $BRANCH_NAME" - if ! git ls-remote --heads origin "$BRANCH_NAME" | grep -q "$BRANCH_NAME"; then + if ! git ls-remote --heads origin "refs/heads/$BRANCH_NAME" | grep -q "refs/heads/${BRANCH_NAME}$"; then echo "❌ Branch $BRANCH_NAME not found on remote" exit 1 fi @@ -117,6 +134,8 @@ runs: - name: Configure Git user shell: bash run: | + set -euo pipefail + git config user.name "${{ github.actor }}" git config user.email "${{ github.actor }}@users.noreply.github.com" @@ -137,15 +156,23 @@ runs: if [ "$TAG_EXISTS" = "true" ]; then echo "🗑️ Deleting existing tag..." - git tag -d "$TAG_NAME" || true - git push origin ":refs/tags/$TAG_NAME" || true + + if git rev-parse "$TAG_NAME" >/dev/null 2>&1; then + git tag -d "$TAG_NAME" + echo "✅ Local tag deleted" + fi + + if git ls-remote --tags origin | grep -q "refs/tags/$TAG_NAME$"; then + git push origin ":refs/tags/$TAG_NAME" + echo "✅ Remote tag deleted" + fi fi if [ "$LIGHTWEIGHT" = "true" ]; then git tag "$TAG_NAME" echo "✅ Created lightweight tag" - elif [ -n "$TAG_MESSAGE" ]; then - git tag -a "$TAG_NAME" -m "$TAG_MESSAGE" + elif [ -n "${TAG_MESSAGE// /}" ]; then + printf "%s" "$TAG_MESSAGE" | git tag -a "$TAG_NAME" -F - echo "✅ Created annotated tag with message" else git tag -a "$TAG_NAME" -m "Release $TAG_NAME" @@ -159,6 +186,7 @@ runs: shell: bash run: | set -euo pipefail + echo "⏳ Verifying remote tag exists..." ATTEMPTS=0 @@ -176,22 +204,37 @@ runs: fi - name: Summary + if: always() && inputs.show_summary == 'true' shell: bash + env: + LIMIT: ${{ inputs.summary_limit }} run: | - echo "## 🏷️ Tag Created" >> $GITHUB_STEP_SUMMARY - echo "- **Tag:** $TAG_NAME" >> $GITHUB_STEP_SUMMARY - echo "- **SHA:** ${{ steps.create_tag.outputs.tag_sha }}" >> $GITHUB_STEP_SUMMARY - echo "- **Branch:** $BRANCH_NAME" >> $GITHUB_STEP_SUMMARY - echo "- **Type:** $( - [ "$LIGHTWEIGHT" = "true" ] && echo "Lightweight" || echo "Annotated" - )" >> $GITHUB_STEP_SUMMARY - echo "- **Force:** $FORCE_CREATE" >> $GITHUB_STEP_SUMMARY - echo "- **Tag URL:** [View Tag](${{ steps.create_tag.outputs.tag_url }})" \ - >> $GITHUB_STEP_SUMMARY - - if [ -n "$TAG_MESSAGE" ]; then - echo "- **Message:** $TAG_MESSAGE" >> $GITHUB_STEP_SUMMARY + set -euo pipefail + + [[ "$LIMIT" =~ ^[0-9]+$ ]] || LIMIT="250" + + TYPE="Annotated" + if [ "${LIGHTWEIGHT}" = "true" ]; then + TYPE="Lightweight" fi - echo "" >> $GITHUB_STEP_SUMMARY - echo "🎉 Tagging complete!" >> $GITHUB_STEP_SUMMARY + { + echo "## 🏷️ Tag Created" + echo "- **Tag:** ${TAG_NAME}" + echo "- **SHA:** ${{ steps.create_tag.outputs.tag_sha }}" + echo "- **Branch:** ${BRANCH_NAME}" + echo "- **Type:** ${TYPE}" + echo "- **Force:** ${FORCE_CREATE}" + echo "- **Tag URL:** [View Tag](${{ steps.create_tag.outputs.tag_url }})" + + if [ -n "${TAG_MESSAGE// }" ]; then + echo "" + echo "### Message" + echo '```' + printf "%s\n" "${TAG_MESSAGE}" | awk -v lim="$LIMIT" 'NR<=lim{print}' + echo '```' + fi + + echo "" + echo "🎉 Tagging complete!" + } >> "$GITHUB_STEP_SUMMARY" diff --git a/actions/github-create-tag/examples/custom_branch.yml b/actions/github-create-tag/examples/custom_branch.yml new file mode 100644 index 0000000..c109906 --- /dev/null +++ b/actions/github-create-tag/examples/custom_branch.yml @@ -0,0 +1,33 @@ +--- +name: Create tag from branch + +on: + workflow_dispatch: + inputs: + tag: + description: 'Tag to create' + required: true + type: string + branch: + description: 'Branch to tag from (must exist on remote)' + required: true + type: string + default: 'release/1.x' + +concurrency: + group: manual-tag-${{ inputs.tag }} + cancel-in-progress: true + +permissions: + contents: write + +jobs: + create-tag: + runs-on: ubuntu-latest + steps: + - name: Create tag from non-default branch + uses: Mad-Pixels/github-workflows/actions/github-create-tag@v1 + with: + tag: ${{ inputs.tag }} + branch: ${{ inputs.branch }} + token: ${{ secrets.PAT_TOKEN }} diff --git a/actions/github-create-tag/examples/custom_format.yml b/actions/github-create-tag/examples/custom_format.yml new file mode 100644 index 0000000..1239d77 --- /dev/null +++ b/actions/github-create-tag/examples/custom_format.yml @@ -0,0 +1,28 @@ +--- +name: Create tag (custom regex) + +on: + workflow_dispatch: + inputs: + tag: + description: 'Tag (e.g., v2025.08.10-rc1)' + required: true + type: string + +concurrency: + group: manual-tag-${{ inputs.tag }} + cancel-in-progress: true + +permissions: + contents: write + +jobs: + create-tag: + runs-on: ubuntu-latest + steps: + - name: Create tag with custom format + uses: Mad-Pixels/github-workflows/actions/github-create-tag@v1 + with: + tag: ${{ inputs.tag }} + tag_format: '^v[0-9]{4}\.[0-9]{2}\.[0-9]{2}(-[A-Za-z0-9._-]+)?$' + token: ${{ secrets.PAT_TOKEN }} diff --git a/actions/github-create-tag/examples/lightweight.yml b/actions/github-create-tag/examples/lightweight.yml new file mode 100644 index 0000000..d9246f9 --- /dev/null +++ b/actions/github-create-tag/examples/lightweight.yml @@ -0,0 +1,28 @@ +--- +name: Create tag (lightweight) + +on: + workflow_dispatch: + inputs: + tag: + description: 'Tag to create (e.g., v1.0.0)' + required: true + type: string + +concurrency: + group: manual-tag-${{ inputs.tag }} + cancel-in-progress: true + +permissions: + contents: write + +jobs: + create-tag: + runs-on: ubuntu-latest + steps: + - name: Create lightweight tag + uses: Mad-Pixels/github-workflows/actions/github-create-tag@v1 + with: + tag: ${{ inputs.tag }} + lightweight: 'true' + token: ${{ secrets.PAT_TOKEN }} diff --git a/actions/github-create-tag/examples/overwrite.yml b/actions/github-create-tag/examples/overwrite.yml new file mode 100644 index 0000000..7a682fa --- /dev/null +++ b/actions/github-create-tag/examples/overwrite.yml @@ -0,0 +1,33 @@ +--- +name: Create/Replace tag + +on: + workflow_dispatch: + inputs: + tag: + description: 'Tag to create or replace' + required: true + type: string + force: + description: 'Overwrite existing tag' + required: false + type: boolean + default: true + +concurrency: + group: manual-tag-${{ inputs.tag }} + cancel-in-progress: true + +permissions: + contents: write + +jobs: + create-tag: + runs-on: ubuntu-latest + steps: + - name: Force create/replace tag + uses: Mad-Pixels/github-workflows/actions/github-create-tag@v1 + with: + tag: ${{ inputs.tag }} + force: ${{ inputs.force }} + token: ${{ secrets.PAT_TOKEN }} diff --git a/actions/github-create-tag/examples/with_message.yml b/actions/github-create-tag/examples/with_message.yml new file mode 100644 index 0000000..dda198d --- /dev/null +++ b/actions/github-create-tag/examples/with_message.yml @@ -0,0 +1,32 @@ +--- +name: Create annotated tag (multi-line message) + +on: + workflow_dispatch: + inputs: + tag: + description: 'Tag to create' + required: true + type: string + +concurrency: + group: manual-tag-${{ inputs.tag }} + cancel-in-progress: true + +permissions: + contents: write + +jobs: + create-tag: + runs-on: ubuntu-latest + steps: + - name: Create annotated tag with release notes + uses: Mad-Pixels/github-workflows/actions/github-create-tag@v1 + with: + tag: ${{ inputs.tag }} + message: | + Highlights: + - Faster startup + - Memory p95: -30% + - Bugfix: #123, #124 + token: ${{ secrets.PAT_TOKEN }} diff --git a/actions/github-create-tag/readme.md b/actions/github-create-tag/readme.md new file mode 100644 index 0000000..6f708c7 --- /dev/null +++ b/actions/github-create-tag/readme.md @@ -0,0 +1,76 @@ +# 🏷️ Tag Creator +Create tags with validation. + +## ✅ Features +- Validates tag format using customizable regex +- Supports annotated and lightweight tags +- Can overwrite existing tags with `force: true` +- Creates tags from any branch +- Optional custom tag message +- Outputs tag SHA, existence flag, and URL +- Verifies the tag on the remote after push (with retries) + +## 📖 Related Documentation +- [Git Tag Documentation](https://git-scm.com/book/en/v2/Git-Basics-Tagging) + +## 🚀 Prerequisites +Your workflow must: +- Run on a runner with Git installed (default `ubuntu-latest` meets this) +- Provide a token with `contents: write` permission to push tags +```yaml +permissions: + contents: write +``` + +## 🔧 Quick Example +```yaml +name: Create Release Tag + +on: + workflow_dispatch: + inputs: + tag: + description: 'Tag to create (e.g., v1.2.3)' + required: true + +jobs: + tag: + runs-on: ubuntu-latest + permissions: + contents: write + steps: + - name: Create Git Tag + uses: Mad-Pixels/github-workflows/actions/tag-creator@v1 + with: + tag: ${{ github.event.inputs.tag }} + token: ${{ secrets.GITHUB_TOKEN }} + message: "Release ${{ github.event.inputs.tag }}" +``` + +## 📥 Inputs +| **Name** | **Required** | **Description** | **Default** | +|----------------|--------------|---------------------------------------------------------------------------|-----------------------------------| +| `tag` | ✅ Yes | Tag to create (e.g., v1.2.3) | - | +| `token` | ✅ Yes | GitHub token or PAT with `contents: write` permissions | - | +| `force` | ❌ No | Overwrite existing tag if it exists (`true`/`false`) | `false` | +| `branch` | ❌ No | Branch to tag from | `main` | +| `tag_format` | ❌ No | Regex to validate tag format | `^v[0-9]+\.[0-9]+\.[0-9]+(-.*)?$` | +| `message` | ❌ No | Message for annotated tag (ignored for lightweight tags) | - | +| `lightweight` | ❌ No | Create lightweight tag (overrides message) (`true`/`false`) | `false` | +| `show_summary` | ❌ No | Print summary with task output in job summary | `true` | +| `summary_limit`| ❌ No | Max number of output lines to show in summary | `250` | + +## 📤 Outputs +| **Name** | **Description** | +|--------------|--------------------------------------------------| +| `tag_sha` | SHA of the commit the tag points to | +| `tag_exists` | Whether the tag already existed before creation | +| `tag_url` | GitHub URL to view the created tag | + +## 📋 Examples +[lightweight →](./examples/lightweight.yml) +[create/replace tag →](./examples/overwrite.yml) +[custom regex →](./examples/custom_format.yml) +[non-default branch →](./examples/custom_branch.yml) +[with message →](./examples/with_message.yml) + diff --git a/actions/taskfile-runner/action.yml b/actions/taskfile-runner/action.yml index 8390fe6..3227945 100644 --- a/actions/taskfile-runner/action.yml +++ b/actions/taskfile-runner/action.yml @@ -1,22 +1,41 @@ --- name: Task Runner -description: Run Taskfile task using specified version and env variables +description: Invoke Taskfile commands inputs: command: - description: 'Task command to run (e.g. "build" or "lint")' + description: 'Task command to run for invoke' required: true vars: - description: 'Optional environment variables in format key:value,key:value' + description: 'Envs for command, format: (key=value,key=value)' required: false dir: - description: 'Working directory inside the project' + description: 'Working directory' required: false default: '.' version: - description: 'Task binary version to use' + description: 'Taskfile version' required: false default: '3.44.1' + show_summary: + description: 'Print summary with task output in job summary' + required: false + default: 'true' + summary_limit: + description: 'Max number of output lines to show in summary' + required: false + default: '250' + +outputs: + task_version: + description: 'Installed Task version' + value: ${{ steps.install.outputs.version }} + task_output: + description: 'Task command output' + value: ${{ steps.run.outputs.output }} + task_command: + description: 'Executed task command' + value: ${{ inputs.command }} runs: using: composite @@ -24,51 +43,177 @@ runs: - name: Checkout repository uses: actions/checkout@v4 - - name: Detect architecture and install Task binary + - name: Validate Taskfile exists shell: bash + working-directory: ${{ inputs.dir }} run: | set -euo pipefail + if [[ ! -f "Taskfile.yml" && ! -f "Taskfile.yaml" ]]; then + echo "❌ No Taskfile found in ${{ inputs.dir }}" + echo "Expected: Taskfile.yml or Taskfile.yaml" + exit 1 + fi + echo "✅ Taskfile found" + - name: Detect architecture + id: arch + shell: bash + run: | + set -euo pipefail ARCH=$(uname -m) case "$ARCH" in x86_64) ARCH="amd64" ;; - aarch64) ARCH="arm64" ;; + aarch64|arm64) ARCH="arm64" ;; *) echo "❌ Unsupported architecture: $ARCH" && exit 1 ;; esac + echo "arch=$ARCH" >> "$GITHUB_OUTPUT" + echo "Using arch: $ARCH" + + - name: Cache Task binary + id: cache + uses: actions/cache@v4 + with: + path: ~/.cache/task + key: task-${{ inputs.version }}-${{ steps.arch.outputs.arch }} + restore-keys: | + task-${{ inputs.version }}- + task- + - name: Download Task binary + if: steps.cache.outputs.cache-hit != 'true' + shell: bash + run: | + set -euo pipefail VERSION="${{ inputs.version }}" + ARCH="${{ steps.arch.outputs.arch }}" echo "⬇️ Downloading Task v$VERSION for $ARCH..." - curl -sL "https://github.com/go-task/task/releases/download/v${VERSION}/task_linux_${ARCH}.tar.gz" \ - | tar -xz -C /tmp + TMPDIR="$(mktemp -d)"; trap 'rm -rf "$TMPDIR"' EXIT + mkdir -p "$HOME/.cache/task" + + BASE="https://github.com/go-task/task/releases/download" + FILE="task_linux_${ARCH}.tar.gz" + URL="${BASE}/v${VERSION}/${FILE}" + curl -fsSLo "$TMPDIR/task.tgz" "$URL" + tar -xzf "$TMPDIR/task.tgz" -C "$TMPDIR" - sudo mv /tmp/task /usr/local/bin/task - sudo chmod +x /usr/local/bin/task + TASK_BIN="$(find "$TMPDIR" -maxdepth 3 -type f -name task -perm -111 -print -quit || true)" + if [[ -z "$TASK_BIN" ]]; then + echo "❌ Task binary not found in archive" + exit 1 + fi - echo "✅ Installed task binary:" - task --version + install -m 0755 "$TASK_BIN" "$HOME/.cache/task/task-$VERSION-$ARCH" + echo "✅ Downloaded to $HOME/.cache/task/task-$VERSION-$ARCH" + + - name: Install Task binary + id: install + shell: bash + run: | + set -euo pipefail + VERSION="${{ inputs.version }}" + ARCH="${{ steps.arch.outputs.arch }}" + SRC="$HOME/.cache/task/task-$VERSION-$ARCH" + DEST="$HOME/.local/bin/task" + + if [[ ! -x "$SRC" ]]; then + echo "❌ Task binary not found or not executable: $SRC" + exit 1 + fi + + mkdir -p "$(dirname "$DEST")" + install -m 0755 "$SRC" "$DEST" + echo "$HOME/.local/bin" >> "$GITHUB_PATH" + + VER_RAW="$("$DEST" --version 2>&1 | head -n1 | tr -d '\r')" + VER="$(printf '%s\n' "$VER_RAW" | grep -Eo 'v?[0-9]+(\.[0-9]+)+' | head -n1 || true)" + [[ -z "$VER" ]] && VER="$VER_RAW" + [[ "$VER" != v* ]] && VER="v$VER" + + echo "✅ Installed task $VER" + echo "version=$VER" >> "$GITHUB_OUTPUT" - name: Export environment variables if: ${{ inputs.vars != '' }} shell: bash run: | - echo "🌐 Exporting env vars:" - echo "${{ inputs.vars }}" | tr ',' '\n' | while IFS=: read -r key value; do - echo " - $key=$value" - echo "$key=$value" >> $GITHUB_ENV + set -euo pipefail + IFS=',' read -ra VARS <<< "${{ inputs.vars }}" + for var in "${VARS[@]}"; do + pair="$(echo "$var" | xargs)" + [[ -z "$pair" ]] && continue + key="${pair%%=*}" + value="${pair#*=}" + if [[ "$value" == "$key" ]]; then + echo "❌ Invalid var format: $pair (missing '=' separator)" + exit 1 + fi + if [[ -z "$key" ]]; then + echo "❌ Invalid var (empty key): $pair" + exit 1 + fi + delim="ENV_${key}_$RANDOM$RANDOM" + { + echo "$key<<$delim" + printf "%s\n" "$value" + echo "$delim" + } >> "$GITHUB_ENV" done - name: Run Taskfile command + id: run shell: bash working-directory: ${{ inputs.dir }} run: | + set -euo pipefail echo "🚀 Running task ${{ inputs.command }}" - task ${{ inputs.command }} + + set +e + OUTPUT="$(task ${{ inputs.command }} 2>&1)" + EXIT_CODE=$? + set -e + + printf "%s\n" "$OUTPUT" + + DELIM="__TASK_OUTPUT_$(date +%s%N)__" + { + echo "output<<$DELIM" + printf "%s\n" "$OUTPUT" + echo "$DELIM" + } >> "$GITHUB_OUTPUT" + + if [[ $EXIT_CODE -ne 0 ]]; then + echo "❌ Task failed with exit code $EXIT_CODE" + exit $EXIT_CODE + fi + + echo "✅ Task completed successfully" - name: Task Runner Summary + if: inputs.show_summary != 'false' shell: bash run: | - echo "## 🧰 Task Runner Summary" >> "$GITHUB_STEP_SUMMARY" - echo "- Command: \`${{ inputs.command }}\`" >> "$GITHUB_STEP_SUMMARY" - echo "- Directory: \`${{ inputs.dir }}\`" >> "$GITHUB_STEP_SUMMARY" - echo "- Task version: \`${{ inputs.version }}\`" >> "$GITHUB_STEP_SUMMARY" + set -euo pipefail + OUT="${{ steps.run.outputs.output }}" + LIMIT="${{ inputs.summary_limit }}" + if ! [[ "$LIMIT" =~ ^[0-9]+$ ]]; then LIMIT=250; fi + + { + echo "## 🧰 Task Runner Summary" + echo "- Command: \`${{ inputs.command }}\`" + echo "- Directory: \`${{ inputs.dir }}\`" + echo "- Task version: \`${{ steps.install.outputs.version }}\`" + + if [ -n "$OUT" ]; then + echo "" + TOTAL=$(printf "%s\n" "$OUT" | wc -l | awk '{print $1}') + if [ "$TOTAL" -le "$LIMIT" ]; then + echo "### 🧾 Task output" + else + echo "### 🧾 Task output (first ${LIMIT} lines of ${TOTAL})" + fi + echo '```' + printf "%s\n" "$OUT" | awk -v lim="$LIMIT" 'NR<=lim{print}' + echo '```' + fi + } >> "$GITHUB_STEP_SUMMARY" diff --git a/actions/taskfile-runner/examples/base.yml b/actions/taskfile-runner/examples/base.yml new file mode 100644 index 0000000..4ddda80 --- /dev/null +++ b/actions/taskfile-runner/examples/base.yml @@ -0,0 +1,28 @@ +--- +name: Invoke Taskfile commands + +on: + push: + branches: [main] + pull_request: + branches: [main] + +jobs: + build: + runs-on: ubuntu-latest + steps: + - id: task + uses: Mad-Pixels/github-workflows/actions/taskfile-runner@v1 + with: + command: build + dir: ./app + # Comma-separated key=value pairs + # Values may include '=' and newlines, but **not commas** + vars: | + NODE_ENV=production,VERSION=1.2.3,NOTES=Line1\nLine2 + + - name: Use outputs + run: | + echo "Task version: ${{ steps.task.outputs.task_version }}" + echo "Task output:" + printf "%s\n" "${{ steps.task.outputs.task_output }}" diff --git a/actions/taskfile-runner/readme.md b/actions/taskfile-runner/readme.md new file mode 100644 index 0000000..c17d6fc --- /dev/null +++ b/actions/taskfile-runner/readme.md @@ -0,0 +1,53 @@ +# 🧬 Taskfile Runner +Invoke [Taskfile](https://taskfile.dev/) commands. + +## ✅ Features +- Linux runners only (`ubuntu-latest`), amd64 and arm64 +- Auto-installs `Taskfile` for a specified version (cached per version+arch) +- Supports custom working directory and environment variables +- Captures full stdout/stderr and exposes it via outputs + +## 📖 Related Documentation +- [📋 Taskfile Documentation](https://taskfile.dev/) +- [💡 Usage concept](../../../Concept.md) + +## 🚀 Prerequisites +Your repository must contain: +- `Taskfile.yml` or `Taskfile.yaml` in the directory you run the action from +- The tasks you plan to invoke + +## 🔧 Quick Example +```yaml +name: CI Pipeline +on: [push, pull_request] + +jobs: + test: + runs-on: ubuntu-latest + steps: + - name: Run tests + uses: Mad-Pixels/github-workflows/actions/taskfile-runner@v1 + with: + command: "test" +``` + +## 📥 Inputs +| **Name** | **Required** | **Description** | **Default** | +|-----------------|--------------|----------------------------------------------------------------------------------------------------------------------------------------------|-------------| +| `command` | ✅ Yes | Name of the task to run (e.g. build, test, lint) | - | +| `vars` | ❌ No | Comma-separated key=value pairs. Values may contain = and newlines; commas are not allowed. Leading/trailing spaces around pairs are trimmed | - | +| `dir` | ❌ No | Working directory for the Taskfile | `.` | +| `version` | ❌ No | Version of go-task to install | `3.44.1` | +| `show_summary` | ❌ No | Print summary with task output in job summary | `true` | +| `summary_limit` | ❌ No | Max number of output lines to show in summary | `250` | + +## 📤 Outputs +| **Name** | **Description** | +|----------------|------------------------------------| +| `task_version` | Installed Task version | +| `task_command` | Task command | +| `task_output` | Complete output from task command | + +## 📋 Examples +[View example →](./examples/base.yml) + diff --git a/examples/actions/aws-cloudfront-invalidation.md b/examples/actions/aws-cloudfront-invalidation.md deleted file mode 100644 index 25824ae..0000000 --- a/examples/actions/aws-cloudfront-invalidation.md +++ /dev/null @@ -1,44 +0,0 @@ -# 🧬 CloudFront Invalidation · GitHub Composite Action - -This composite GitHub Action creates CloudFront invalidations to clear cache for updated content. - -## ✅ Features -- Uses AWS CLI to create CloudFront invalidations -- Supports custom paths or wildcard invalidation -- Auto-generates unique caller reference -- Simple and fast cache clearing for static sites - -## 🔧 Usage Example -```yaml -name: Deploy Static Site - -on: - push: - branches: [main] - -jobs: - deploy: - runs-on: ubuntu-latest - steps: - - name: Deploy to S3 - # ... your S3 sync step ... - - - name: Invalidate CloudFront - uses: "Mad-Pixels/github-workflows/.github/actions/aws-cloudfront-invalidation@main" - with: - aws_access_key: ${{ secrets.AWS_ACCESS_KEY_ID }} - aws_secret_key: ${{ secrets.AWS_SECRET_ACCESS_KEY }} - aws_region: "us-east-1" - distribution_id: ${{ secrets.CLOUDFRONT_DISTRIBUTION_ID }} - paths: "/*" -``` - -## 📥 Inputs -| **Name** | **Required** | **Description** | -|-------------------------|--------------|---------------------------------------------------| -| `aws_access_key_id` | ✅ Yes | AWS access key ID | -| `aws_secret_access_key` | ✅ Yes | AWS secret access key | -| `aws_region` | ✅ Yes | AWS region (e.g. us-east-1) | -| `distribution_id` | ✅ Yes | CloudFront distribution ID | -| `paths` | ❌ No | Paths to invalidate (default: "/*") | -| `caller_reference` | ❌ No | Unique reference (auto-generated if not provided) | diff --git a/examples/actions/aws-ecr-push.md b/examples/actions/aws-ecr-push.md deleted file mode 100644 index 214665f..0000000 --- a/examples/actions/aws-ecr-push.md +++ /dev/null @@ -1,42 +0,0 @@ -# 🧬 ECR Push · GitHub Composite Action - -This composite GitHub Action authenticates to AWS ECR and pushes a local Docker image to the specified ECR repository. - -## ✅ Features -- Authenticates with AWS ECR using provided credentials -- Supports automatic tagging and pushing of Docker images -- Simple and reusable for deployment workflows - -## 🔧 Usage Example -```yaml -name: Push Docker Image to ECR - -on: - push: - branches: [main] - -jobs: - push-ecr: - runs-on: ubuntu-latest - steps: - - name: Build Docker image - run: docker build -t my-service:latest . - - - name: Push to ECR - uses: "Mad-Pixels/github-workflows/.github/actions/aws-ecr-push@main" - with: - aws_access_key_id: ${{ secrets.AWS_ACCESS_KEY_ID }} - aws_secret_access_key: ${{ secrets.AWS_SECRET_ACCESS_KEY }} - aws_region: "us-east-1" - aws_account_id: "123456789012" - image: "my-service:latest" -``` - -## 📥 Inputs -| **Name** | **Required** | **Description** | -|-------------------------|--------------|---------------------------------------------------| -| `aws_access_key_id` | ✅ Yes | AWS access key ID | -| `aws_secret_access_key` | ✅ Yes | AWS secret access key | -| `aws_region` | ✅ Yes | AWS region (e.g. us-east-1) | -| `aws_account_id` | ✅ Yes | AWS Account ID (used to construct ECR image URI) | -| `image` | ✅ Yes | Local image name with tag (e.g. my-repo:latest) | \ No newline at end of file diff --git a/examples/actions/aws-lambda-restart.md b/examples/actions/aws-lambda-restart.md deleted file mode 100644 index 8b838be..0000000 --- a/examples/actions/aws-lambda-restart.md +++ /dev/null @@ -1,40 +0,0 @@ -# 🧬 Lambda Restart · GitHub Composite Action - -This composite GitHub Action updates an AWS Lambda function with the latest container image from ECR. - -## ✅ Features -- Uses AWS CLI to update Lambda with ECR image -- Automatically waits for function update to complete -- Supports per-function suffix/tag image mapping -- Simple and fast container-based Lambda deploys - -## 🔧 Usage Example -```yaml -name: Restart Lambda Function - -on: - workflow_dispatch: - -jobs: - restart-lambda: - runs-on: ubuntu-latest - steps: - - uses: "Mad-Pixels/github-workflows/.github/actions/aws-lambda-restart@main" - with: - aws_access_key_id: ${{ secrets.AWS_ACCESS_KEY_ID }} - aws_secret_access_key: ${{ secrets.AWS_SECRET_ACCESS_KEY }} - aws_region: "us-east-1" - aws_account_id: "123456789012" - repository: "my-repo" - function: "my-func" -``` - -## 📥 Inputs -| **Name** | **Required** | **Description** | -|-------------------------|--------------|---------------------------------------------------| -| `aws_access_key_id` | ✅ Yes | AWS access key ID | -| `aws_secret_access_key` | ✅ Yes | AWS secret access key | -| `aws_region` | ✅ Yes | AWS region (e.g. us-east-1) | -| `aws_account_id` | ✅ Yes | AWS Account ID (used to construct ECR image URI) | -| `repository` | ✅ Yes | Name of your ECR repository | -| `function` | ✅ Yes | Lambda function suffix | diff --git a/examples/actions/aws-s3-sync.md b/examples/actions/aws-s3-sync.md deleted file mode 100644 index 747c457..0000000 --- a/examples/actions/aws-s3-sync.md +++ /dev/null @@ -1,83 +0,0 @@ -# 🧬 S3 Sync · GitHub Composite Action - -This GitHub composite action uploads a local directory to an AWS S3 bucket using aws s3 sync. It supports optional path prefixing and cleans up removed files with --delete. - -## ✅ Features -- Upload any local folder to S3 -- AWS credentials via inputs -- Optional bucket_prefix -- Excludes common .git files -- Cleans up deleted files with --delete - -## 🔧 Usage Example -```yaml -name: Upload Static Files - -on: - push: - branches: [main] - -jobs: - sync: - runs-on: ubuntu-latest - steps: - - name: Checkout code (if needed) - uses: actions/checkout@v4 - - - name: Upload to S3 - uses: "Mad-Pixels/github-workflows/.github/actions/aws-s3-sync@main" - with: - aws_access_key: ${{ secrets.AWS_ACCESS_KEY_ID }} - aws_secret_key: ${{ secrets.AWS_SECRET_ACCESS_KEY }} - aws_region: us-east-1 - bucket_name: my-app-assets - source_dir: ./public - bucket_prefix: static/ -``` - -```yaml -jobs: - build: - runs-on: ubuntu-latest - steps: - - name: Checkout - uses: actions/checkout@v4 - - - name: Build site - run: npm run build - - - name: Upload artifacts - uses: actions/upload-artifact@v4 - with: - name: build-files - path: dist/ - - deploy: - needs: build - runs-on: ubuntu-latest - steps: - - name: Download artifacts - uses: actions/download-artifact@v4 - with: - name: build-files - path: build-output - - - name: Upload to S3 - uses: "Mad-Pixels/github-workflows/.github/actions/aws-s3-sync@main" - with: - aws_access_key: ${{ secrets.AWS_ACCESS_KEY }} - aws_secret_key: ${{ secrets.AWS_SECRET_KEY }} - aws_region: ${{ secrets.AWS_REGION }} - bucket_name: ${{ secrets.BUCKET_NAME }} - source_dir: "build-output" -``` - -## 📥 Inputs -| **Name** | **Required** | **Description** | -|-------------------------|--------------|--------------------------------------------| -| `aws_access_key` | ✅ Yes | AWS access key ID | -| `aws_secret_key` | ✅ Yes | AWS secret access key | -| `aws_region` | ✅ Yes | AWS region (e.g. us-east-1) | -| `bucket_name` | ✅ Yes | Name of the S3 bucket | -| `source_dir` | ✅ Yes | Local directory to sync | -| `bucket_prefix` | ❌ No | Optional path inside bucket (e.g. static/) | diff --git a/examples/actions/dockerhub-build-push.md b/examples/actions/dockerhub-build-push.md deleted file mode 100644 index d64aaa8..0000000 --- a/examples/actions/dockerhub-build-push.md +++ /dev/null @@ -1,47 +0,0 @@ -# 🧬 DockerHub Push · GitHub Composite Action - -This composite GitHub Action builds and pushes multi-platform Docker images to DockerHub with support for artifacts and custom build arguments. - -## ✅ Features - -Multi-platform image building (linux/amd64, linux/arm64) -Support for build arguments via JSON -Works with artifacts or direct repository checkout -Automatic Docker Buildx setup with QEMU emulation -Custom Dockerfile and context path support - -## 🔧 Usage Example -```yaml -name: Build and Push to DockerHub - -on: - push: - branches: [main] - -jobs: - build-and-push: - runs-on: ubuntu-latest - steps: - - name: Push to DockerHub - uses: "Mad-Pixels/github-workflows/.github/actions/dockerhub-push@main" - with: - docker_user: ${{ secrets.DOCKER_USER }} - docker_token: ${{ secrets.DOCKER_TOKEN }} - repository: "myuser/myapp" - tag: "v1.0.0" - platforms: "linux/amd64,linux/arm64" - build_args: '{"NODE_ENV": "production", "VERSION": "1.0.0"}' -``` - -## 📥 Inputs -| **Name** | **Required** | **Description** | -|-------------------|--------------|---------------------------------------------------| -| `docker_user` | ✅ Yes | DockerHub username | -| `docker_token` | ✅ Yes | DockerHub access token | -| `repository` | ✅ Yes | DockerHub repository name (username/repository) | -| `tag` | ✅ Yes | Docker image tag | -| `platforms` | ❌ No | Comma-separated list of platforms | -| `build_args` | ❌ No | JSON object with build arguments | -| `artifact_name` | ❌ No | Artifact name to download (if using artifacts) | -| `context_path` | ❌ No | Docker build context path | -| `dockerfile_path` | ❌ No | Path to Dockerfile relative to context | \ No newline at end of file diff --git a/examples/actions/github-check-commit.md b/examples/actions/github-check-commit.md deleted file mode 100644 index b469da0..0000000 --- a/examples/actions/github-check-commit.md +++ /dev/null @@ -1,45 +0,0 @@ -# 🧬 Is Commit · GitHub Composite Action - -This composite GitHub Action determines if a push event is a normal commit (proceed=true) or the result of a merged/squashed pull request (proceed=false) by inspecting GitHub API data associated with the current commit SHA. - -## ✅ Features -- Detects whether a commit was made directly or as part of a PR merge/squash -- Outputs proceed=true only for direct commits -- Supports fallback detection via GitHub Search API -- Useful for conditionally skipping workflows on PR merges - -## 🔧 Usage Example -```yaml -name: Check Commit Type - -on: - push: - branches: [main] - -jobs: - check-commit: - runs-on: ubuntu-latest - outputs: - proceed: ${{ steps.commit-check.outputs.proceed }} - steps: - - name: Check if it's a plain commit - id: commit-check - uses: "Mad-Pixels/github-workflows/.github/actions/github-check-commit@main" - with: - github_token: ${{ secrets.GITHUB_TOKEN }} - - - name: Proceed only on plain commits - if: steps.commit-check.outputs.proceed == 'true' - run: echo "This is a normal commit — continue pipeline" -``` - -## 🔐 Required Secrets -| **Name** | **Description** | -|------------------|---------------------------------------------| -| `GITHUB_TOKEN` | Automatically provide gy GitHub (no action) | - -## 📤 Outputs -| **Name** | **Description** | -|--------------|-------------------------------------------------------------------------| -| `proceed` | 'true' if this is a direct commit, 'false' if part of a PR merge/squash | -| `pr_numbers` | Comma-separated list of PR numbers touching the commit (if any) | \ No newline at end of file diff --git a/examples/actions/golang-lint.md b/examples/actions/golang-lint.md deleted file mode 100644 index 4c4eb78..0000000 --- a/examples/actions/golang-lint.md +++ /dev/null @@ -1,33 +0,0 @@ -# 🧬 GolangCI Lint · GitHub Composite Action - -This composite GitHub Action runs golangci-lint against your Go project with configurable version and working directory. - -## ✅ Features -- Automatically sets up Go environment -- Installs and runs golangci-lint with configurable version -- Supports custom working directory -- Fast and CI-friendly with timeout built-in - -## 🔧 Usage Example -```yaml -name: Lint Go Code - -on: - push: - branches: [main] - -jobs: - lint: - uses: "Mad-Pixels/github-workflows/.github/actions/golang-lint@main" - with: - go_dir: ./ - go_version: "1.21" - golangci_lint_version: "v1.56.2" -``` - -## 📥 Inputs -| **Name** | **Required** | **Description** | -|-------------------------|--------------|---------------------------------------------------------| -| `golangci_lint_version` | ❌ No | Version of golangci-lint to install (default: "v2.1.2") | -| `go_version` | ❌ No | Go version to install (default: "1.24") | -| `go_dir` | ❌ No | Directory with Go code (default: "./") | \ No newline at end of file diff --git a/examples/actions/taskfile-runner.md b/examples/actions/taskfile-runner.md deleted file mode 100644 index f42f2cc..0000000 --- a/examples/actions/taskfile-runner.md +++ /dev/null @@ -1,38 +0,0 @@ -# 🧬 Task Runner · GitHub Composite Action - -This composite GitHub Action runs a task command using a Taskfile, optionally setting up Go and exporting environment variables. - -## ✅ Features -- Runs any task target -- Automatically installs go-task -- Optionally sets up Go environment -- Supports custom working directory -- Allows passing dynamic environment variables - -## 🔧 Usage Example -```yaml -name: Run Taskfile Command - -on: - push: - branches: [main] - -jobs: - task: - runs-on: ubuntu-latest - steps: - - uses: "Mad-Pixels/github-workflows/.github/actions/taskfile-runner@main" - with: - go_dir: "./" - go_version: "1.22" - command: "build" - vars: "ENV:production,DEBUG:false" -``` - -## 📥 Inputs -| **Name** | **Required** | **Description** | -|--------------|--------------|-----------------------------------------------------------------------------| -| `command` | ✅ Yes | Name of the task to run (e.g. build, deploy) | -| `go_version` | ❌ No | Version of Go to set up (default: 1.24) | -| `go_dir` | ❌ No | Directory to run the task from (default: ./) | -| `vars` | ❌ No | Comma-separated key:value pairs to export as env variables before task runs | \ No newline at end of file diff --git a/examples/actions/terraform-fmt.md b/examples/actions/terraform-fmt.md deleted file mode 100644 index 97b5137..0000000 --- a/examples/actions/terraform-fmt.md +++ /dev/null @@ -1,31 +0,0 @@ -# 🧬 Terraform Format Check · GitHub Composite Action - -This composite GitHub Action checks Terraform code formatting using terraform fmt -check -diff -recursive. - -## ✅ Features -- Validates Terraform formatting recursively -- Uses the specified Terraform version -- Can be reused across multiple repositories or workflows - -## 🔧 Usage Example -```yaml -name: Check Terraform Formatting - -on: - pull_request: - paths: - - '**.tf' - -jobs: - fmt: - uses: "Mad-Pixels/github-workflows/.github/actions/terraform-fmt@main" - with: - tf_dir: "infra" - tf_version: "1.6.1" -``` - -## 📥 Inputs -| **Name** | **Required** | **Description** | -|-------------------------|--------------|------------------------------------| -| `tf_dir` | ❌ No | Path to Terraform directory | -| `tf_version` | ❌ No | Terraform version (default: 1.6.1) | \ No newline at end of file diff --git a/examples/actions/terreform-runner.md b/examples/actions/terreform-runner.md deleted file mode 100644 index c2624e5..0000000 --- a/examples/actions/terreform-runner.md +++ /dev/null @@ -1,54 +0,0 @@ -# 🧬 Terraform Runner · GitHub Composite Action - -This GitHub composite action provides a standardized way to run Terraform commands (plan, apply, destroy) with S3 remote backend, AWS credentials, and optional workspace support. - -✅ Features -- Built-in AWS credentials support -- Remote backend configuration with backend_aws.hcl -- Workspace selection and creation -- Supports additional -var flags -- Terraform version is configurable - -## 🔧 Usage Example -```yaml -name: Deploy Infra - -on: - push: - branches: [main] - -jobs: - terraform: - runs-on: ubuntu-latest - steps: - - uses: "Mad-Pixels/github-workflows/.github/actions/terraform-fmt@main" - with: - aws_access_key_id: ${{ secrets.AWS_ACCESS_KEY_ID }} - aws_secret_access_key: ${{ secrets.AWS_SECRET_ACCESS_KEY }} - aws_region: "us-east-1" - - tf_dir: "./infra" - tf_workspace: "prod" - tf_command: "apply" - tf_vars: "-var-file=prod.tfvars" - tf_version: "1.6.1" - - backend_bucket: "your-terraform-state" - backend_key: "envs/prod/terraform.tfstate" - backend_region: "us-east-1" -``` - -## 📥 Inputs -| **Name** | **Required** | **Description** | -|-------------------------|--------------|---------------------------------------------------| -| `aws_access_key_id` | ✅ Yes | AWS access key ID | -| `aws_secret_access_key` | ✅ Yes | AWS secret access key | -| `aws_region` | ✅ Yes | AWS region (e.g. us-east-1) | -| `backend_bucket` | ✅ Yes | S3 bucket name used for remote state | -| `backend_key` | ✅ Yes | S3 object key for the .tfstate file | -| `backend_region` | ✅ Yes | Bucket AWS region (e.g. us-east-1) | -| `tf_dir` | ✅ Yes | Path to Terraform directory | -| `tf_command` | ✅ Yes | Terraform command: plan, apply, or destroy | -| `tf_workspace` | ❌ No | Terraform workspace to select/create | -| `tf_vars` | ❌ No | Additional CLI flags (e.g. -var-file=prod.tfvars) | -| `tf_version` | ❌ No | Terraform version (default: 1.6.1) | \ No newline at end of file diff --git a/examples/flows/nodejs/.github/workflows/.check.yml b/examples/flows/nodejs/.github/workflows/.check.yml deleted file mode 100644 index 98aae61..0000000 --- a/examples/flows/nodejs/.github/workflows/.check.yml +++ /dev/null @@ -1,42 +0,0 @@ -name: Checks - -on: - workflow_call: - -jobs: - eslint: - name: ESLint - runs-on: ${{ vars.RUNS_ON }} - steps: - - name: Invoke - uses: Mad-Pixels/github-workflows/.github/actions/taskfile-runner@main - with: - command: node/lint - - prettier: - name: Prettier - runs-on: ${{ vars.RUNS_ON }} - steps: - - name: Invoke - uses: Mad-Pixels/github-workflows/.github/actions/taskfile-runner@main - with: - command: node/format - - audit: - name: Audit - runs-on: ${{ vars.RUNS_ON }} - steps: - - name: Invoke - uses: Mad-Pixels/github-workflows/.github/actions/taskfile-runner@main - with: - command: node/audit - - type-check: - name: TypeCheck - runs-on: ${{ vars.RUNS_ON }} - steps: - - name: Invoke - uses: Mad-Pixels/github-workflows/.github/actions/taskfile-runner@main - with: - command: node/type-check - diff --git a/examples/flows/nodejs/.github/workflows/.terraform.yml b/examples/flows/nodejs/.github/workflows/.terraform.yml deleted file mode 100644 index b6a87c1..0000000 --- a/examples/flows/nodejs/.github/workflows/.terraform.yml +++ /dev/null @@ -1,53 +0,0 @@ -name: Terraform - -on: - workflow_call: - inputs: - tf_command: - type: string - - secrets: - aws_secret_key: - required: true - aws_access_key: - required: true - aws_region: - required: true - - aws_backend_bucket: - required: true - aws_backend_region: - required: true - aws_backend_key: - required: true - - acm_crt: - required: true - bucket_name: - required: true - -jobs: - tf-command: - name: Tf Command - runs-on: ${{ vars.RUNS_ON }} - - steps: - - name: Checkout repository - uses: actions/checkout@v4 - - - name: Invoke - uses: Mad-Pixels/github-workflows/.github/actions/terraform-runner@main - with: - aws_access_key_id: ${{ secrets.aws_access_key }} - aws_secret_access_key: ${{ secrets.aws_secret_key }} - aws_region: ${{ secrets.aws_region }} - - backend_bucket: ${{ secrets.aws_backend_bucket }} - backend_key: ${{ secrets.aws_backend_key }} - backend_region: ${{ secrets.aws_backend_region }} - - tf_dir: terraform - tf_command: ${{ inputs.tf_command }} - tf_vars: >- - -var acm_crt=${{ secrets.acm_crt }} - -var bucket_name=${{ secrets.bucket_name }} diff --git a/examples/flows/nodejs/.github/workflows/commit.yml b/examples/flows/nodejs/.github/workflows/commit.yml deleted file mode 100644 index f586403..0000000 --- a/examples/flows/nodejs/.github/workflows/commit.yml +++ /dev/null @@ -1,24 +0,0 @@ -name: Commit - -on: - push: - branches-ignore: - - main - -concurrency: - group: commit-${{ github.ref }} - cancel-in-progress: true - -jobs: - checks: - name: Commit Checks - uses: ./.github/workflows/.checks.yml - - commit-check: - name: Commit Check - needs: checks - runs-on: ${{ vars.RUNS_ON }} - steps: - - name: All checks passed - run: | - echo "✅ All Commit checks completed successfully!" diff --git a/examples/flows/nodejs/.github/workflows/release.yml b/examples/flows/nodejs/.github/workflows/release.yml deleted file mode 100644 index a9a3019..0000000 --- a/examples/flows/nodejs/.github/workflows/release.yml +++ /dev/null @@ -1,87 +0,0 @@ -name: Release - -on: - push: - tags: - - 'v*' - -concurrency: - group: release-tag-${{ github.ref_name }} - cancel-in-progress: true - -jobs: - check-main-branch: - name: Verify tag from main branch - runs-on: ${{ vars.RUNS_ON }} - steps: - - name: Invoke - uses: Mad-Pixels/github-workflows/.github/actions/github-check-branch@main - with: - tag_name: ${{ github.ref_name }} - - apply: - needs: check-main-branch - uses: ./.github/workflows/.terraform.yml - name: Apply - with: - tf_command: "apply" - secrets: - aws_access_key: ${{ secrets.AWS_ACCESS_KEY }} - aws_secret_key: ${{ secrets.AWS_SECRET_KEY }} - aws_region: ${{ secrets.AWS_REGION }} - - aws_backend_bucket: ${{ secrets.AWS_BACKEND_BUCKET }} - aws_backend_region: ${{ secrets.AWS_BACKEND_REGION }} - aws_backend_key: ${{ secrets.AWS_BACKEND_KEY }} - - acm_crt: ${{ secrets.ACM_CRT }} - bucket_name: ${{ secrets.SITE_BUCKET_NAME }} - - build: - needs: check-main-branch - runs-on: ${{ vars.RUNS_ON }} - name: Build - steps: - - name: Invoke - uses: Mad-Pixels/github-workflows/.github/actions/taskfile-runner@main - with: - command: node/build - - name: Upload build artifacts - uses: actions/upload-artifact@v4 - with: - name: site - path: site/dist/ - retention-days: 7 - - deploy: - needs: [build, apply] - runs-on: ${{ vars.RUNS_ON }} - name: Deploy - steps: - - name: Download build artifacts - uses: actions/download-artifact@v4 - with: - name: site - path: build-output - - - name: Invoke - uses: Mad-Pixels/github-workflows/.github/actions/aws-s3-sync@main - with: - aws_access_key: ${{ secrets.AWS_ACCESS_KEY }} - aws_secret_key: ${{ secrets.AWS_SECRET_KEY }} - aws_region: ${{ secrets.AWS_REGION }} - - bucket_name: "site-personal-${{ secrets.SITE_BUCKET_NAME }}" - source_dir: "build-output" - - - name: Invalidate cache - uses: Mad-Pixels/github-workflows/.github/actions/aws-cloudfront-invalidation@main - with: - aws_access_key: ${{ secrets.AWS_ACCESS_KEY }} - aws_secret_key: ${{ secrets.AWS_SECRET_KEY }} - aws_region: ${{ secrets.AWS_REGION }} - distribution_id: ${{ secrets.CLOUDFRONT_DISTRIBUTION_ID }} - - paths: "/*" - caller_reference: "release-${{ github.ref_name }}" - diff --git a/examples/flows/nodejs/Taskfile.yml b/examples/flows/nodejs/Taskfile.yml deleted file mode 100644 index 7f89799..0000000 --- a/examples/flows/nodejs/Taskfile.yml +++ /dev/null @@ -1,128 +0,0 @@ -version: '3' - -vars: - git_root: - sh: git rev-parse --show-toplevel - - node_version: "23" # setup default node version - dev_port: 3000 # setup default port for start dev env - -tasks: - default: - desc: Default task. - cmds: - - echo "Please enter a task or use '-l' or '--list-all' to list all available tasks" - silent: true - -# ================================================# -# ---------------------INTERNAL-------------------# -# ================================================# - - _docker/run: - desc: Internal task to run secure container. - internal: true - dir: "{{.git_root}}" - cmd: | - docker run --rm --init {{if .TTY}}-it{{end}} \ - --cap-drop=ALL \ - --security-opt no-new-privileges \ - --user $(id -u):$(id -g) \ - --workdir /workspace \ - {{if .ENVS}}{{range $env := .ENVS}}--env {{$env}} {{end}}{{end}}\ - {{if .PORTS}}{{range $port := .PORTS}}--publish {{$port}} {{end}}{{end}}\ - --volume "{{.git_root}}/{{.MOUNT_DIR}}:/workspace:rw" \ - {{.IMAGE}} \ - {{.CMD}} - silent: true - requires: - vars: [IMAGE, CMD, MOUNT_DIR] - -# ================================================# -# ----------------------PUBLIC--------------------# -# ================================================# - - dev: - desc: Run dev on {{ .dev_port }}. - deps: - - _image/prepare - cmds: - - task: _docker/run - vars: - IMAGE: "node:{{.node_version}}" - CMD: "sh -c 'npm ci && npm run dev -- --host 0.0.0.0 --port {{ .dev_port }}'" - MOUNT_DIR: "." - PORTS: - - "{{ .dev_port }}:{{ .dev_port }}" - ENVS: - - "NPM_CONFIG_CACHE=/workspace/.cache" - - "NPM_CONFIG_UPDATE_NOTIFIER=false" - TTY: "true" - -# ================================================# -# ----------------------CI/CD---------------------# -# ================================================# - - node/lint: - desc: Run ESLint. - cmds: - - task: _docker/run - vars: - IMAGE: "node:{{.node_version}}" - CMD: "sh -c 'npm ci && npx eslint .'" - MOUNT_DIR: "." - ENVS: - - "NPM_CONFIG_CACHE=/workspace/.cache" - - "NPM_CONFIG_UPDATE_NOTIFIER=false" - silent: true - - node/format: - desc: Run Prettier. - cmds: - - task: _docker/run - vars: - IMAGE: "node:{{.node_version}}" - CMD: "sh -c 'npm ci && npx prettier --check src/'" - MOUNT_DIR: "." - ENVS: - - "NPM_CONFIG_CACHE=/workspace/.cache" - - "NPM_CONFIG_UPDATE_NOTIFIER=false" - silent: true - - node/audit: - desc: Run dependencies audit. - cmds: - - task: _docker/run - vars: - IMAGE: "node:{{.node_version}}" - CMD: "sh -c 'npm ci && npm audit --audit-level high'" - MOUNT_DIR: "." - ENVS: - - "NPM_CONFIG_CACHE=/workspace/.cache" - - "NPM_CONFIG_UPDATE_NOTIFIER=false" - silent: true - - node/type-check: - desc: TypeScript type checking. - cmds: - - task: _docker/run - vars: - IMAGE: "node:{{.node_version}}" - CMD: "sh -c 'npm ci && npm run type-check'" - MOUNT_DIR: "." - ENVS: - - "NPM_CONFIG_CACHE=/workspace/.cache" - - "NPM_CONFIG_UPDATE_NOTIFIER=false" - silent: true - - node/build: - desc: Build production version. - cmds: - - task: _docker/run - vars: - IMAGE: "node:{{.node_version}}" - CMD: "sh -c 'npm install && npm run build'" - MOUNT_DIR: "." - ENVS: - - "NPM_CONFIG_CACHE=/workspace/.cache" - - "NPM_CONFIG_UPDATE_NOTIFIER=false" - silent: true diff --git a/internal/aws-auth/action.yml b/internal/aws-auth/action.yml index 9b61442..6977209 100644 --- a/internal/aws-auth/action.yml +++ b/internal/aws-auth/action.yml @@ -63,4 +63,5 @@ runs: exit 1 } echo "✅ AWS authentication successful" - echo "::endgroup::" \ No newline at end of file + echo "::endgroup::" +