Skip to content

AWS Backup Feature Discovery #34

AWS Backup Feature Discovery

AWS Backup Feature Discovery #34

name: AWS Backup Feature Discovery
on:
schedule:
# Run weekly on Sundays at 00:00 UTC
- cron: '0 0 * * 0'
workflow_dispatch:
inputs:
provider_version:
description: 'AWS Provider version to check (default: latest)'
required: false
default: 'latest'
type: string
# Validate semantic version format or "latest"
pattern: '^(latest|[0-9]+\.[0-9]+\.[0-9]+(-[a-zA-Z0-9\-]+)?(\+[a-zA-Z0-9\-]+)?)$'
dry_run:
description: 'Run analysis without creating issues'
required: false
default: false
type: boolean
force_scan:
description: 'Force full scan even if no changes detected'
required: false
default: false
type: boolean
jobs:
discover-backup-features:
runs-on: ubuntu-latest
timeout-minutes: 15
permissions:
contents: write
issues: write
pull-requests: write
actions: read
id-token: write
steps:
- name: Checkout repository
uses: actions/checkout@v4
with:
fetch-depth: 1
- name: Create feature tracker directory
run: |
mkdir -p .github/feature-tracker
# Create initial tracker file if it doesn't exist
if [ ! -f .github/feature-tracker/backup-features.json ]; then
cat > .github/feature-tracker/backup-features.json << 'EOF'
{
"last_scan": "1970-01-01T00:00:00Z",
"provider_version": "0.0.0",
"scan_history": [],
"features": {},
"issues_created": []
}
EOF
echo "Created initial feature tracker file"
fi
- name: Validate inputs
run: |
# Validate provider_version format
PROVIDER_VERSION="${{ inputs.provider_version || 'latest' }}"
if [[ ! "$PROVIDER_VERSION" =~ ^(latest|[0-9]+\.[0-9]+\.[0-9]+(-[a-zA-Z0-9\-]+)?(\+[a-zA-Z0-9\-]+)?)$ ]]; then
echo "::error::Invalid provider_version format: $PROVIDER_VERSION"
echo "Must be 'latest' or semantic version (e.g., '5.82.0')"
exit 1
fi
echo "Provider version validation passed: $PROVIDER_VERSION"
- name: Environment Diagnostics & Cache Clearing
run: |
echo "=== Environment Snapshot ==="
echo "Node.js: $(node --version)"
echo "NPM: $(npm --version)"
echo "NPX: $(npx --version)"
echo "Runner: $(uname -a)"
echo "Disk space: $(df -h /)"
echo "Memory: $(free -h)"
echo "Network test: $(curl -s -o /dev/null -w '%{http_code}' https://registry.npmjs.org/)"
echo "=== Cache Clearing ==="
echo "Clearing NPM caches..."
npm cache clean --force || echo "npm cache clean failed"
rm -rf ~/.npm/_cacache ~/.npm/_logs || echo "npm cache dir cleanup failed"
echo "Clearing NPX cache..."
rm -rf ~/.npm/_npx || echo "npx cache cleanup failed"
echo "=== Testing MCP Server Access ==="
echo "Testing Terraform MCP server Docker image availability..."
docker pull hashicorp/terraform-mcp-server:latest --quiet || echo "Docker image check completed"
echo "Testing Context7 MCP server package access..."
npm view @upstash/context7-mcp version || echo "Package lookup failed"
echo "=== Pre-installation Test ==="
echo "Testing NPX with verbose output..."
npx --version
echo "Environment diagnostics complete!"
- name: Pre-Discovery Verification
id: pre-verification
env:
GITHUB_TOKEN: ${{ secrets.CLAUDE_ISSUE_TOKEN }}
run: |
echo "🔍 Pre-discovery verification checks..."
# Check GitHub token permissions (skip auth status check)
echo "Verifying GitHub token permissions..."
echo "✅ GitHub token configured"
# Test issue creation capability
echo "Testing GitHub CLI issue operations..."
gh issue list --limit 1 > /dev/null || echo "⚠️ Issue operations may fail"
# Verify MCP server accessibility
echo "Testing Docker availability for Terraform MCP server..."
docker --version
echo "Testing NPX availability for Context7 MCP server..."
npx --version
# Verify tracker file state
echo "Current feature tracker state:"
if [ -f .github/feature-tracker/backup-features.json ]; then
echo "✅ Feature tracker exists"
PENDING_COUNT=$(jq '.issues_created[]? | select(.status == "pending_creation") | length' .github/feature-tracker/backup-features.json 2>/dev/null | wc -l || echo "0")
echo "Pending creation entries: $PENDING_COUNT"
else
echo "⚠️ Feature tracker will be created"
fi
echo "✅ Pre-verification complete"
- name: Run Claude Code Feature Discovery
id: claude-discovery
uses: anthropics/claude-code-action@beta
env:
CLAUDE_TOKEN: ${{ secrets.CLAUDE_CODE_OAUTH_TOKEN }}
GITHUB_TOKEN: ${{ secrets.CLAUDE_ISSUE_TOKEN }}
with:
claude_code_oauth_token: ${{ env.CLAUDE_TOKEN }}
mode: agent
github_token: ${{ secrets.CLAUDE_ISSUE_TOKEN }}
# MCP Configuration for Terraform and Context7 documentation access
mcp_config: |
{
"mcpServers": {
"terraform": {
"command": "docker",
"args": [
"run",
"-i",
"--rm",
"hashicorp/terraform-mcp-server"
]
},
"context7": {
"command": "npx",
"args": [
"-y",
"@upstash/context7-mcp@latest"
]
}
}
}
# Allow necessary tools for feature discovery
allowed_tools: |
mcp__terraform__search_providers
mcp__terraform__get_provider_details
mcp__terraform__get_latest_provider_version
mcp__terraform__search_modules
mcp__terraform__get_module_details
mcp__terraform__get_latest_module_version
mcp__terraform__search_policies
mcp__terraform__get_policy_details
mcp__context7__resolve-library-id
mcp__context7__get-library-docs
Bash(git diff)
Bash(git status)
Bash(gh issue create)
Bash(gh issue list)
Bash(jq)
Bash(cat)
Bash(echo)
# Direct prompt for Claude Code to perform feature discovery
direct_prompt: |
# AWS Backup Feature Discovery Analysis
You are performing automated feature discovery for the terraform-aws-backup module.
## Objective
Analyze the latest AWS provider Backup resources and compare them with the current module implementation to identify:
1. **New Features**: AWS Backup resources/arguments not yet implemented
2. **Deprecations**: Features marked as deprecated in the provider
3. **Bug Fixes**: Important fixes mentioned in provider changelogs
## Configuration
- Provider Version: ${{ inputs.provider_version || 'latest' }}
- Dry Run Mode: ${{ inputs.dry_run }}
- Force Scan: ${{ inputs.force_scan }}
## Process
### Step 1: Load Current State
Read the feature tracking database:
```bash
cat .github/feature-tracker/backup-features.json
```
### Step 2: Fetch AWS Provider Backup Documentation
Use the Terraform MCP server to get the latest Backup documentation:
1. Use `mcp__terraform-server__resolveProviderDocID` with:
- providerName: "aws"
- providerNamespace: "hashicorp"
- serviceSlug: "backup"
- providerVersion: "${{ inputs.provider_version || 'latest' }}"
- providerDataType: "resources"
2. Get documentation for all Backup resources (aws_backup_*)
3. Also check data sources with providerDataType: "data-sources"
### Step 3: Analyze Current Module Implementation
Examine these files to understand current implementation:
- `main.tf` - Primary backup resources and locals
- `iam.tf` - IAM roles and policies
- `notifications.tf` - SNS and notification configurations
- `organizations.tf` - AWS Organizations backup policies
- `selection.tf` - Resource selection logic
- `reports.tf` - Backup reporting configurations
- `audit_manager.tf` - Audit framework configurations
- `variables.tf` - Input variables
- `outputs.tf` - Module outputs
Create an inventory of:
- Implemented resources (aws_backup_vault, aws_backup_plan, etc.)
- Implemented arguments/attributes on each resource
- Configuration patterns used in examples (16 examples available)
### Step 4: Comparison and Analysis
Compare provider documentation with module implementation:
**New Features to Look For:**
- New `aws_backup_*` resources not in the module
- New arguments on existing resources (vault, plan, selection, etc.)
- New data sources (`data.aws_backup_*`)
- New backup lifecycle and retention features
- New compliance and audit capabilities
- New cross-region and cross-account features
- New reporting and monitoring capabilities
- New organization-level backup policies
- New VSS (Volume Shadow Copy Service) features
**Deprecations to Check:**
- Arguments marked as deprecated
- Resources marked for removal
- Backup patterns no longer recommended
- Configuration approaches that are outdated
**Bug Fixes:**
- Check Context7 for AWS provider changelogs
- Look for Backup-related fixes that might affect the module
### Step 5: Generate Structured Output for Issue Creation
**CRITICAL: DO NOT execute gh issue create commands directly.**
Instead, create a structured JSON file for the post-process step to handle:
```bash
cat > /tmp/discovered-features.json << 'EOF'
{
"scan_metadata": {
"scan_date": "$(date -u '+%Y-%m-%dT%H:%M:%SZ')",
"provider_version": "${{ inputs.provider_version || 'latest' }}",
"dry_run": ${{ inputs.dry_run || 'false' }},
"force_scan": ${{ inputs.force_scan || 'false' }}
},
"discovered_features": [
// For each new resource discovered:
{
"type": "new_resource",
"resource_name": "[EXACT_RESOURCE_NAME]",
"provider_doc_id": "[DOC_ID]",
"description": "[RESOURCE_DESCRIPTION]",
"arguments": ["arg1", "arg2", "arg3"],
"priority": "high|medium|low",
"security_impact": "[SECURITY_DESCRIPTION]",
"issue_title": "feat: Add support for [EXACT_RESOURCE_NAME]",
"terraform_registry_url": "https://registry.terraform.io/providers/hashicorp/aws/latest/docs/resources/[resource_name_without_aws_prefix]"
}
// For each new argument discovered:
{
"type": "new_argument",
"resource_name": "[EXISTING_RESOURCE_NAME]",
"argument_name": "[ARGUMENT_NAME]",
"description": "[ARGUMENT_DESCRIPTION]",
"priority": "high|medium|low",
"implementation_impact": "[IMPACT_DESCRIPTION]",
"issue_title": "feat: Add [ARGUMENT_NAME] support to [RESOURCE_NAME]"
}
],
"deprecations_found": [],
"bug_fixes_found": []
}
EOF
```
### Step 6: Update Feature Tracker
Update `.github/feature-tracker/backup-features.json` with:
- Current scan timestamp
- Provider version analyzed
- New findings (mark as "pending_creation")
- Scan summary
### Step 7: Generate Summary Report
Create a comprehensive summary including:
- Features discovered: count and details
- Deprecations found: count and impact
- Issues to be created: count and priorities
- Recommendations for next steps
## Important Notes
- Skip features already tracked as "implemented"
- Check existing GitHub issues to avoid duplicates
- Prioritize compliance, security, and data protection changes
- Focus on Backup-specific features (ignore general AWS provider changes)
- Pay special attention to vault, plan, selection, and audit configurations
- Consider cross-region backup and disaster recovery improvements
- Look for cost optimization and lifecycle management enhancements
## Expected Output
1. Structured JSON file with discovered features
2. Updated feature tracker file
3. Detailed analysis report
- name: Post-Discovery Verification
id: post-verification
if: steps.claude-discovery.conclusion != 'failure'
run: |
echo "🔍 Post-discovery verification..."
# Check what Claude Code actually produced
echo "Checking for structured output file..."
if [ -f "/tmp/discovered-features.json" ]; then
echo "✅ Structured output file exists"
echo "File size: $(wc -c < /tmp/discovered-features.json) bytes"
# Validate JSON
if jq empty /tmp/discovered-features.json 2>/dev/null; then
echo "✅ Valid JSON structure"
FEATURE_COUNT=$(jq '.discovered_features | length' /tmp/discovered-features.json)
echo "Features discovered: $FEATURE_COUNT"
else
echo "❌ Invalid JSON structure"
echo "Content preview:"
head -20 /tmp/discovered-features.json
fi
else
echo "⚠️ No structured output file found"
echo "Checking for temp files:"
ls -la /tmp/ | grep -E "(discovered|feature|backup)" || echo "No related temp files"
fi
# Check tracker file updates
echo "Checking feature tracker updates..."
if [ -f ".github/feature-tracker/backup-features.json" ]; then
LAST_SCAN=$(jq -r '.metadata.last_scan // "unknown"' .github/feature-tracker/backup-features.json)
PENDING_COUNT=$(jq '[.issues_created[]? | select(.status == "pending_creation")] | length' .github/feature-tracker/backup-features.json)
echo "Last scan: $LAST_SCAN"
echo "Pending creation entries: $PENDING_COUNT"
fi
echo "✅ Post-verification complete"
- name: Create GitHub Issues from Structured Output
id: create-issues-from-json
if: steps.claude-discovery.conclusion == 'success' && inputs.dry_run != true
env:
GITHUB_TOKEN: ${{ secrets.CLAUDE_ISSUE_TOKEN }}
run: |
set -euo pipefail
echo "🔍 Processing discovered features for issue creation..."
DISCOVERED_FILE="/tmp/discovered-features.json"
TRACKER_FILE=".github/feature-tracker/backup-features.json"
TOTAL_ISSUES_CREATED=0
# Check if structured output exists
if [ ! -f "$DISCOVERED_FILE" ]; then
echo "⚠️ No structured output found at $DISCOVERED_FILE"
echo "Checking for pending_creation entries in tracker file..."
if [ ! -f "$TRACKER_FILE" ]; then
echo "Feature tracker file not found, skipping post-processing"
echo "issues_created=0" >> $GITHUB_OUTPUT
exit 0
fi
# Fallback: Extract pending creation features from tracker
PENDING_FEATURES=$(jq -r '.issues_created[]? | select(.status == "pending_creation") | @base64' "$TRACKER_FILE" 2>/dev/null || echo "")
if [ -z "$PENDING_FEATURES" ]; then
echo "✅ No features with pending_creation status found"
echo "issues_created=0" >> $GITHUB_OUTPUT
exit 0
fi
echo "📝 Found features with pending_creation status. Creating issues..."
# Process pending features from tracker
while IFS= read -r feature_data; do
if [ -n "$feature_data" ]; then
# Decode base64 and extract fields
FEATURE_JSON=$(echo "$feature_data" | base64 --decode)
RESOURCE=$(echo "$FEATURE_JSON" | jq -r '.resource')
TITLE=$(echo "$FEATURE_JSON" | jq -r '.title')
ISSUE_TYPE=$(echo "$FEATURE_JSON" | jq -r '.issue_type // "new-feature"')
echo "Creating issue for: $RESOURCE"
# Create the issue
ISSUE_URL=$(gh issue create \
--title "$TITLE" \
--body "## AWS Backup Feature Request
### Resource
**AWS Resource:** \`$RESOURCE\`
**Provider Version:** ${{ inputs.provider_version || 'latest' }}
**Discovery Date:** $(date -u '+%Y-%m-%d')
### Auto-Discovery Details
This feature was automatically discovered by the AWS Backup Feature Discovery workflow.
**Discovery Metadata:**
- Scan Date: $(date -u '+%Y-%m-%d %H:%M:%S UTC')
- Workflow Run: ${{ github.run_id }}
- Repository: ${{ github.repository }}
### Next Steps
1. Review AWS provider documentation for this resource
2. Analyze integration requirements with existing module
3. Design implementation approach
4. Add comprehensive tests
5. Update documentation and examples
### Implementation Priority
This feature requires evaluation for:
- Security and compliance impact
- Backward compatibility
- Module architecture integration
---
*Auto-generated by AWS Backup Feature Discovery Bot*" \
--label "enhancement,aws-backup,features,terraform" \
--assignee "lgallard")
# Extract issue number from URL
ISSUE_NUMBER=$(echo "$ISSUE_URL" | grep -o '[0-9]*$')
echo "✅ Created issue #$ISSUE_NUMBER for $RESOURCE: $ISSUE_URL"
TOTAL_ISSUES_CREATED=$((TOTAL_ISSUES_CREATED + 1))
# Update the tracker file to mark as created
jq --arg resource "$RESOURCE" --arg issue_num "$ISSUE_NUMBER" --arg issue_url "$ISSUE_URL" '
(.issues_created[] | select(.resource == $resource)) |= (
.status = "created" |
.issue_number = ($issue_num | tonumber) |
.issue_url = $issue_url |
.actual_creation_date = now | strftime("%Y-%m-%dT%H:%M:%SZ")
)' "$TRACKER_FILE" > "${TRACKER_FILE}.tmp" && mv "${TRACKER_FILE}.tmp" "$TRACKER_FILE"
fi
done <<< "$PENDING_FEATURES"
echo "🎯 Fallback processing complete: Created $TOTAL_ISSUES_CREATED issues"
echo "issues_created=$TOTAL_ISSUES_CREATED" >> $GITHUB_OUTPUT
exit 0
fi
# Process structured JSON output
echo "📋 Processing structured output from Claude Code..."
# Validate JSON structure
if ! jq empty "$DISCOVERED_FILE" 2>/dev/null; then
echo "❌ Invalid JSON in discovered features file"
exit 1
fi
# Extract metadata
SCAN_DATE=$(jq -r '.scan_metadata.scan_date // "unknown"' "$DISCOVERED_FILE")
PROVIDER_VERSION=$(jq -r '.scan_metadata.provider_version // "latest"' "$DISCOVERED_FILE")
FEATURE_COUNT=$(jq '.discovered_features | length' "$DISCOVERED_FILE")
echo "Scan metadata: $SCAN_DATE, Provider: $PROVIDER_VERSION, Features: $FEATURE_COUNT"
# Initialize issue counter (will accumulate from both structured and fallback paths)
TOTAL_ISSUES_CREATED=0
# Process each discovered feature
jq -r '.discovered_features[] | @base64' "$DISCOVERED_FILE" | while IFS= read -r feature_data; do
if [ -n "$feature_data" ]; then
FEATURE_JSON=$(echo "$feature_data" | base64 --decode)
FEATURE_TYPE=$(echo "$FEATURE_JSON" | jq -r '.type')
RESOURCE_NAME=$(echo "$FEATURE_JSON" | jq -r '.resource_name')
ISSUE_TITLE=$(echo "$FEATURE_JSON" | jq -r '.issue_title')
PRIORITY=$(echo "$FEATURE_JSON" | jq -r '.priority // "medium"')
echo "Creating issue for $FEATURE_TYPE: $RESOURCE_NAME"
# Build issue body based on type
if [ "$FEATURE_TYPE" = "new_resource" ]; then
DESCRIPTION=$(echo "$FEATURE_JSON" | jq -r '.description // "AWS Backup resource"')
SECURITY_IMPACT=$(echo "$FEATURE_JSON" | jq -r '.security_impact // "To be evaluated"')
ARGUMENTS=$(echo "$FEATURE_JSON" | jq -r '.arguments[]? // empty' | tr '\n' ' ')
REGISTRY_URL=$(echo "$FEATURE_JSON" | jq -r '.terraform_registry_url // ""')
ISSUE_BODY="## New AWS Backup Resource Request
### Resource Details
**AWS Resource:** \`$RESOURCE_NAME\`
**Provider Version:** $PROVIDER_VERSION
**Discovery Date:** $(date -u '+%Y-%m-%d')
**Priority:** $PRIORITY
### Description
$DESCRIPTION
### Available Arguments
$ARGUMENTS
### Security Impact
$SECURITY_IMPACT
### References
- Terraform Registry: $REGISTRY_URL
### Auto-Discovery Metadata
- Discovered by: AWS Backup Feature Discovery Bot
- Scan Date: $SCAN_DATE
- Workflow Run: ${{ github.run_id }}
---
*Auto-generated by AWS Backup Feature Discovery Bot*"
elif [ "$FEATURE_TYPE" = "new_argument" ]; then
ARGUMENT_NAME=$(echo "$FEATURE_JSON" | jq -r '.argument_name')
DESCRIPTION=$(echo "$FEATURE_JSON" | jq -r '.description // "New argument"')
IMPACT=$(echo "$FEATURE_JSON" | jq -r '.implementation_impact // "To be evaluated"')
ISSUE_BODY="## New Argument Enhancement Request
### Enhancement Details
**Resource:** \`$RESOURCE_NAME\`
**New Argument:** \`$ARGUMENT_NAME\`
**Priority:** $PRIORITY
### Description
$DESCRIPTION
### Implementation Impact
$IMPACT
### Auto-Discovery Metadata
- Discovered by: AWS Backup Feature Discovery Bot
- Scan Date: $SCAN_DATE
- Workflow Run: ${{ github.run_id }}
---
*Auto-generated by AWS Backup Feature Discovery Bot*"
fi
# Create the GitHub issue
ISSUE_URL=$(gh issue create \
--title "$ISSUE_TITLE" \
--body "$ISSUE_BODY" \
--label "enhancement,aws-backup,features,terraform" \
--assignee "lgallard")
# Extract issue number
ISSUE_NUMBER=$(echo "$ISSUE_URL" | grep -o '[0-9]*$')
echo "✅ Created issue #$ISSUE_NUMBER: $ISSUE_URL"
TOTAL_ISSUES_CREATED=$((TOTAL_ISSUES_CREATED + 1))
fi
done
echo "🎯 Issue creation complete: Created $TOTAL_ISSUES_CREATED issues"
echo "issues_created=$TOTAL_ISSUES_CREATED" >> $GITHUB_OUTPUT
- name: Commit feature tracker updates
if: steps.claude-discovery.conclusion == 'success'
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
run: |
set -euo pipefail
# Atomic file operations with locking
LOCKFILE="/tmp/feature-tracker.lock"
TRACKER_FILE=".github/feature-tracker/backup-features.json"
TEMP_FILE="${TRACKER_FILE}.tmp"
# Get issues created count from previous step
ISSUES_CREATED="${{ steps.create-issues-from-json.outputs.issues_created || '0' }}"
# Acquire lock with timeout
exec 200>"$LOCKFILE"
if ! flock -w 30 200; then
echo "::error::Failed to acquire lock for feature tracker update"
exit 1
fi
# Check if there are changes to commit
if git diff --quiet .github/feature-tracker/; then
echo "No changes to feature tracker detected"
flock -u 200
exit 0
fi
# Only create PR if new issues were created (meaningful changes)
if [ "$ISSUES_CREATED" -eq 0 ]; then
echo "📊 Tracker updated with metadata only - skipping PR creation"
echo "ℹ️ Scan completed but no new features discovered"
echo "ℹ️ Updated metadata: $(date -u '+%Y-%m-%d %H:%M:%S UTC')"
flock -u 200
exit 0
fi
echo "🚀 Creating PR for tracker updates with $ISSUES_CREATED new issues"
# Validate JSON before committing
if [ -f "$TRACKER_FILE" ]; then
if ! python3 -m json.tool "$TRACKER_FILE" > /dev/null; then
echo "::error::Invalid JSON in feature tracker file"
flock -u 200
exit 1
fi
fi
# Configure git
git config --global user.name "AWS Backup Feature Discovery Bot"
git config --global user.email "actions@github.com"
# Atomic commit with validation
git add .github/feature-tracker/
git commit -m "chore: update AWS Backup feature discovery tracker
- Updated feature tracking database
- Scan completed: $(date -u '+%Y-%m-%d %H:%M:%S UTC')
- Provider version: ${{ inputs.provider_version || 'latest' }}
[skip ci]"
# Create branch and push for PR since master is protected
BRANCH_NAME="feature-discovery/tracker-update-$(date +%Y%m%d-%H%M%S)"
git checkout -b "$BRANCH_NAME"
git push origin "$BRANCH_NAME"
# Create pull request for tracker updates
gh pr create \
--title "chore: update AWS Backup feature discovery tracker" \
--body "Automated update of feature discovery tracker database.
**Scan Details:**
- Scan completed: $(date -u '+%Y-%m-%d %H:%M:%S UTC')
- Provider version: ${{ inputs.provider_version || 'latest' }}
- Workflow run: ${{ github.run_id }}
This PR contains automated updates to the feature tracking database and can be safely merged.
---
*Auto-generated by AWS Backup Feature Discovery workflow*" \
--label "aws-backup,ci-cd,configuration" \
--assignee "lgallard"
echo "Created PR for tracker updates on branch: $BRANCH_NAME"
# Release lock
flock -u 200
echo "Feature tracker updated successfully with atomic operations"
- name: Workflow Summary
if: always()
run: |
echo "## 🔍 AWS Backup Feature Discovery Summary" >> $GITHUB_STEP_SUMMARY
echo "" >> $GITHUB_STEP_SUMMARY
# Configuration
echo "### ⚙️ Configuration" >> $GITHUB_STEP_SUMMARY
echo "- **Provider Version**: \`${{ inputs.provider_version || 'latest' }}\`" >> $GITHUB_STEP_SUMMARY
echo "- **Dry Run Mode**: \`${{ inputs.dry_run }}\`" >> $GITHUB_STEP_SUMMARY
echo "- **Force Scan**: \`${{ inputs.force_scan }}\`" >> $GITHUB_STEP_SUMMARY
echo "- **Triggered**: \`${{ github.event_name }}\`" >> $GITHUB_STEP_SUMMARY
echo "" >> $GITHUB_STEP_SUMMARY
# MCP Configuration
echo "### 🔗 MCP Servers" >> $GITHUB_STEP_SUMMARY
echo "- **Terraform MCP**: \`@modelcontextprotocol/server-terraform@latest\`" >> $GITHUB_STEP_SUMMARY
echo "- **Context7 MCP**: \`@upstash/context7-mcp@latest\`" >> $GITHUB_STEP_SUMMARY
echo "" >> $GITHUB_STEP_SUMMARY
# Status
echo "### 📊 Execution Status" >> $GITHUB_STEP_SUMMARY
if [ "${{ steps.claude-discovery.conclusion }}" = "success" ]; then
echo "- ✅ **Feature Discovery**: Completed successfully" >> $GITHUB_STEP_SUMMARY
else
echo "- ❌ **Feature Discovery**: Failed" >> $GITHUB_STEP_SUMMARY
fi
# Issue Creation Status
if [ "${{ inputs.dry_run }}" = "true" ]; then
echo "- 🧪 **Issue Creation**: Skipped (dry run mode)" >> $GITHUB_STEP_SUMMARY
elif [ "${{ steps.create-issues-from-json.conclusion }}" = "success" ]; then
ISSUES_COUNT="${{ steps.create-issues-from-json.outputs.issues_created || '0' }}"
if [ "$ISSUES_COUNT" -gt 0 ]; then
echo "- ✅ **Issue Creation**: Created $ISSUES_COUNT new issues" >> $GITHUB_STEP_SUMMARY
else
echo "- ✅ **Issue Creation**: No new issues needed" >> $GITHUB_STEP_SUMMARY
fi
elif [ "${{ steps.create-issues-from-json.conclusion }}" = "skipped" ]; then
echo "- ⏭️ **Issue Creation**: Skipped (no feature discovery or dry run)" >> $GITHUB_STEP_SUMMARY
else
echo "- ❌ **Issue Creation**: Failed or incomplete" >> $GITHUB_STEP_SUMMARY
fi
# Tracker Update Status
if [ "${{ steps.claude-discovery.conclusion }}" = "success" ]; then
ISSUES_COUNT="${{ steps.create-issues-from-json.outputs.issues_created || '0' }}"
if [ "$ISSUES_COUNT" -gt 0 ]; then
echo "- ✅ **Tracker Updates**: Created PR for database updates" >> $GITHUB_STEP_SUMMARY
else
echo "- 📊 **Tracker Updates**: Metadata updated (no PR needed)" >> $GITHUB_STEP_SUMMARY
fi
else
echo "- ⏭️ **Tracker Updates**: Skipped (discovery failed)" >> $GITHUB_STEP_SUMMARY
fi
echo "" >> $GITHUB_STEP_SUMMARY
# Available Commands
echo "### 🚀 Manual Execution" >> $GITHUB_STEP_SUMMARY
echo "Run feature discovery manually:" >> $GITHUB_STEP_SUMMARY
echo "\`\`\`bash" >> $GITHUB_STEP_SUMMARY
echo "# Standard discovery" >> $GITHUB_STEP_SUMMARY
echo "gh workflow run feature-discovery.yml" >> $GITHUB_STEP_SUMMARY
echo "" >> $GITHUB_STEP_SUMMARY
echo "# Dry run mode" >> $GITHUB_STEP_SUMMARY
echo "gh workflow run feature-discovery.yml -f dry_run=true" >> $GITHUB_STEP_SUMMARY
echo "" >> $GITHUB_STEP_SUMMARY
echo "# Specific provider version" >> $GITHUB_STEP_SUMMARY
echo "gh workflow run feature-discovery.yml -f provider_version=5.82.0" >> $GITHUB_STEP_SUMMARY
echo "\`\`\`" >> $GITHUB_STEP_SUMMARY