Jake Shore f3c4cd817b Add all MCP servers + factory infra to MCPEngine — 2026-02-06
=== NEW SERVERS ADDED (7) ===
- servers/closebot — 119 tools, 14 modules, 4,656 lines TS (Stage 7)
- servers/google-console — Google Search Console MCP (Stage 7)
- servers/meta-ads — Meta/Facebook Ads MCP (Stage 8)
- servers/twilio — Twilio communications MCP (Stage 8)
- servers/competitor-research — Competitive intel MCP (Stage 6)
- servers/n8n-apps — n8n workflow MCP apps (Stage 6)
- servers/reonomy — Commercial real estate MCP (Stage 1)

=== FACTORY INFRASTRUCTURE ADDED ===
- infra/factory-tools — mcp-jest, mcp-validator, mcp-add, MCP Inspector
  - 60 test configs, 702 auto-generated test cases
  - All 30 servers score 100/100 protocol compliance
- infra/command-center — Pipeline state, operator playbook, dashboard config
- infra/factory-reviews — Automated eval reports

=== DOCS ADDED ===
- docs/MCP-FACTORY.md — Factory overview
- docs/reports/ — 5 pipeline evaluation reports
- docs/research/ — Browser MCP research

=== RULES ESTABLISHED ===
- CONTRIBUTING.md — All MCP work MUST go in this repo
- README.md — Full inventory of 37 servers + infra docs
- .gitignore — Updated for Python venvs

TOTAL: 37 MCP servers + full factory pipeline in one repo.
This is now the single source of truth for all MCP work.
2026-02-06 06:32:29 -05:00

183 lines
7.4 KiB
YAML

# Template for validating HTTP-based MCP servers
# Copy this file to your server's .github/workflows/ directory and customize as needed
name: MCP HTTP Protocol Validation
on:
pull_request:
branches: [ main, master ] # Adjust branch names as needed
workflow_dispatch: # Allows manual triggering
# IMPORTANT: Before using this template:
# 1. Update SERVER_PATH below to point to your actual HTTP server file
# 2. Ensure your server implements a health check endpoint or responds to root URL
# 3. Adjust environment variables and dependencies as needed for your server
jobs:
validate-http:
name: Validate HTTP MCP Server
runs-on: ubuntu-latest
strategy:
matrix:
# Test against multiple protocol versions including the latest 2025-06-18
protocol-version: ["2025-03-26", "2025-06-18"]
# Optionally test multiple Python versions
# python-version: ['3.10', '3.11', '3.12']
# Optional: Add environment variables needed by your server
# For 2025-06-18 OAuth 2.1 support, you may need:
# env:
# API_KEY: ${{ secrets.API_KEY }}
# OAUTH_TOKEN: ${{ secrets.OAUTH_TOKEN }}
# SERVER_PORT: 8088
# OTHER_VAR: value
steps:
- name: Checkout code
uses: actions/checkout@v4
- name: Set up Python
uses: actions/setup-python@v5
with:
python-version: '3.12' # Adjust version as needed
- name: Install dependencies
run: |
python -m pip install --upgrade pip
# Install your server's dependencies
if [ -f requirements.txt ]; then pip install -r requirements.txt; fi
# Install the MCP validator from the repo
pip install git+https://github.com/janix-ai/mcp-validator.git
# Install curl for health check
sudo apt-get update && sudo apt-get install -y curl
- name: Run HTTP Compliance Tests
id: http-test
run: |
mkdir -p reports
# Configuration
# REQUIRED: Update this path to your actual server file!
SERVER_PATH="path/to/your/http_server.py" # <-- CHANGE THIS LINE
SERVER_PORT="${SERVER_PORT:-8088}" # Use env var if set, else default to 8088
SERVER_URL="http://localhost:${SERVER_PORT}"
MAX_RETRIES=30
RETRY_INTERVAL=1
# Ensure server file exists and is executable
if [ ! -f "$SERVER_PATH" ]; then
echo "Error: Server file not found at $SERVER_PATH"
exit 1
fi
chmod +x "$SERVER_PATH"
# Start your HTTP server in background
echo "Starting HTTP server..."
python "$SERVER_PATH" --port "$SERVER_PORT" &
SERVER_PID=$!
# Wait for server to be ready
echo "Waiting for server to be ready at $SERVER_URL..."
for i in $(seq 1 $MAX_RETRIES); do
if curl -s "$SERVER_URL/health" >/dev/null || curl -s "$SERVER_URL/" >/dev/null; then
echo "Server is ready!"
break
fi
if [ $i -eq $MAX_RETRIES ]; then
echo "Error: Server failed to start after $MAX_RETRIES attempts"
kill $SERVER_PID || true
exit 1
fi
echo "Attempt $i/$MAX_RETRIES - Server not ready, waiting..."
sleep $RETRY_INTERVAL
done
# Run HTTP compliance tests using the correct script
echo "Running compliance tests..."
python -m mcp_testing.http.cli \
--server-url "$SERVER_URL" \
--protocol-version ${{ matrix.protocol-version }} \
--output-dir reports \
--timeout 30 \
--debug
# Store the exit code
TEST_EXIT_CODE=$?
# Additional options for 2025-06-18:
# --oauth-token "$OAUTH_TOKEN" # For OAuth 2.1 authentication
# --test-structured-output # Test structured tool output
# --test-batch-rejection # Test batch request rejection
# --test-elicitation # Test elicitation support
# Kill the server
echo "Stopping HTTP server..."
kill $SERVER_PID || true
# Exit with the test exit code
exit $TEST_EXIT_CODE
- name: Upload test reports
if: always() # Upload reports even if tests fail
uses: actions/upload-artifact@v4
with:
name: mcp-http-reports-${{ matrix.protocol-version }}
path: reports/
retention-days: 14
- name: Post summary to PR
if: github.event_name == 'pull_request'
uses: actions/github-script@v7
with:
github-token: ${{ secrets.GITHUB_TOKEN }}
script: |
const fs = require('fs');
try {
const reportFiles = fs.readdirSync('reports');
const jsonReportFile = reportFiles.find(file => file.endsWith('.json'));
let summary = `## MCP HTTP Validation Results (${{ matrix.protocol-version }})\n\n`;
if (jsonReportFile) {
const reportData = JSON.parse(fs.readFileSync(`reports/${jsonReportFile}`, 'utf8'));
summary += `- Protocol Version: ${reportData.protocol_version || 'Unknown'}\n`;
summary += `- Success Rate: ${reportData.success_rate || 'Unknown'}\n`;
summary += `- Tests Run: ${reportData.total_tests || 0}\n`;
// Add 2025-06-18 specific features if tested
if (reportData.protocol_version === '2025-06-18') {
summary += `- OAuth 2.1 Support: ${reportData.oauth_support ? '✅' : '❌'}\n`;
summary += `- Structured Tool Output: ${reportData.structured_output_support ? '✅' : '❌'}\n`;
summary += `- Batch Request Rejection: ${reportData.batch_rejection_support ? '✅' : '❌'}\n`;
summary += `- Elicitation Support: ${reportData.elicitation_support ? '✅' : '❌'}\n`;
}
// Add failed tests if any
const failedTests = (reportData.test_cases || []).filter(tc => tc.status === 'failed');
if (failedTests.length > 0) {
summary += `\n### Failed Tests (${failedTests.length}):\n\n`;
failedTests.forEach(test => {
summary += `- ${test.name}: ${test.error_message || 'No error message'}\n`;
});
}
} else {
summary += "No test report found.\n";
}
github.rest.issues.createComment({
issue_number: context.issue.number,
owner: context.repo.owner,
repo: context.repo.repo,
body: summary
});
} catch (error) {
console.error('Error creating PR comment:', error);
github.rest.issues.createComment({
issue_number: context.issue.number,
owner: context.repo.owner,
repo: context.repo.repo,
body: `## MCP HTTP Validation (${{ matrix.protocol-version }})\n\nError generating test results summary. Check the workflow logs for details.`
});
}