Updates the secret with new build details #20
Workflow file for this run
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
name: Axiom auto-test + auto-deploy | |
permissions: | |
contents: read | |
pull-requests: write | |
on: | |
pull_request: | |
branches: | |
- main | |
jobs: | |
deploy: | |
runs-on: ubuntu-latest | |
needs: test | |
steps: | |
- name: Checkout Repository | |
uses: actions/checkout@v4 | |
- name: Set up DDN CLI and Login | |
uses: hasura/ddn-deployment@main | |
with: | |
hasura-pat: ${{ secrets.HASURA_PAT }} | |
- name: Install dependencies | |
run: | | |
sudo apt-get update | |
sudo apt-get install -y jq | |
- name: Create .env.cloud.default | |
run: | | |
echo "${{ secrets.ENV_CLOUD_DEFAULT }}" > .env.cloud.default | |
- name: Detect Connector Changes | |
id: detect_changes | |
run: | | |
# Check if any changes occurred in the connector directories | |
if git diff --name-only ${{ github.sha }}~1 ${{ github.sha }} | grep -q 'connector/'; then | |
echo "connector_changes=true" >> $GITHUB_ENV | |
else | |
echo "connector_changes=false" >> $GITHUB_ENV | |
fi | |
- name: Build supergraph | |
run: | | |
calculatedSha=$(git rev-parse --short ${{ github.sha }}) | |
if [ "${{ env.connector_changes }}" = "true" ]; then | |
echo "Building connectors..." | |
ddn supergraph build create \ | |
--supergraph ./supergraph-with-mutations.yaml \ | |
--context default \ | |
--description "${calculatedSha} [PR-${{ github.event.pull_request.number }}] Test build for commit $GITHUB_SHA" \ | |
--out=json > build_output.json | |
else | |
echo "Skipping connector build." | |
ddn supergraph build create --no-build-connectors \ | |
--supergraph ./supergraph-with-mutations.yaml \ | |
--context default \ | |
--description "${calculatedSha} [PR-${{ github.event.pull_request.number }}] Test build for commit $GITHUB_SHA" \ | |
--out=json > build_output.json | |
fi | |
- name: Extract URLs from JSON | |
id: extract_urls | |
run: | | |
BUILD_URL=$(jq -r '.build_url' build_output.json) | |
CONSOLE_URL=$(jq -r '.console_url' build_output.json) | |
echo "build_url=$BUILD_URL" >> $GITHUB_ENV | |
echo "console_url=$CONSOLE_URL" >> $GITHUB_ENV | |
- name: Add PR comment with build details | |
uses: actions/github-script@v7 | |
with: | |
script: | | |
const buildUrl = process.env.build_url; | |
const consoleUrl = process.env.console_url; | |
const prNumber = context.payload.pull_request.number; | |
const commitId = context.sha; | |
github.rest.issues.createComment({ | |
issue_number: context.issue.number, | |
owner: context.repo.owner, | |
repo: context.repo.repo, | |
body: `Supergraph build was successful! 🎉\n\n**Build URL:** [${buildUrl}](${buildUrl})\n**Console URL:** [${consoleUrl}](${consoleUrl})\n**Commit ID:** ${commitId}` | |
}) | |
- name: Encrypt and update ENV_CLOUD_DEFAULT | |
uses: actions/github-script@v7 | |
with: | |
github-token: ${{ secrets.GITHUB_TOKEN }} | |
script: | | |
const fs = require('fs'); | |
const sodium = require('tweetsodium'); | |
const secretValue = fs.readFileSync('.env.cloud.default', 'utf8').trim(); | |
const { data: publicKeyData } = await github.rest.actions.getRepoPublicKey({ | |
owner: context.repo.owner, | |
repo: context.repo.repo, | |
}); | |
const publicKey = publicKeyData.key; | |
const keyId = publicKeyData.key_id; | |
const messageBytes = Buffer.from(secretValue); | |
const keyBytes = Buffer.from(publicKey, 'base64'); | |
const encryptedBytes = sodium.seal(messageBytes, keyBytes); | |
const encryptedValue = Buffer.from(encryptedBytes).toString('base64'); | |
await github.rest.actions.createOrUpdateRepoSecret({ | |
owner: context.repo.owner, | |
repo: context.repo.repo, | |
secret_name: 'ENV_CLOUD_DEFAULT', | |
encrypted_value: encryptedValue, | |
key_id: keyId | |
}); | |
test: | |
runs-on: ubuntu-latest | |
steps: | |
- name: Checkout Repository | |
uses: actions/checkout@v4 | |
- name: Set up DDN CLI and Login | |
uses: hasura/ddn-deployment@main | |
with: | |
hasura-pat: ${{ secrets.HASURA_PAT }} | |
- name: Install Dependencies (jq) | |
run: | | |
sudo apt-get update | |
sudo apt-get install -y jq | |
- name: Prep Repository | |
run: | | |
cp .hasura/context.yaml.template .hasura/context.yaml | |
cp .env.template .env | |
cp .env.local.template .env.local | |
- name: Build All Supergraphs | |
run: | | |
ddn supergraph build local --env-file .env.local --env-file .env --supergraph supergraph-project-queries.yaml | |
ddn supergraph build local --env-file .env.local --env-file .env --supergraph supergraph-domain.yaml | |
ddn supergraph build local --env-file .env.local --env-file .env --supergraph supergraph.yaml | |
ddn supergraph build local --env-file .env.local --env-file .env --supergraph supergraph-with-mutations.yaml | |
- name: Set up demo databases | |
run: DATASET=telco docker compose -f .data/compose.yaml --env-file .data/.env up --build --pull always -d | |
- name: Run DDN | |
env: | |
HASURA_DDN_PAT: ${{ secrets.HASURA_PAT }} | |
run: | | |
docker compose -f compose.yaml \ | |
--env-file .env.local \ | |
--env-file .env \ | |
up --build --pull always -d | |
- name: Wait for GraphQL Service to Be Ready | |
run: | | |
echo "Waiting for GraphQL service to start..." | |
until curl -s http://localhost:3000/graphql -o /dev/null; do | |
echo "Service not ready, retrying in 5 seconds..." | |
sleep 5 | |
done | |
echo "Service is up!" | |
- name: Query DDN Endpoint and Validate Response | |
run: | | |
QUERY='{ | |
"query": "query getUsers { usersById(id: 1) { email formatCreatedAtTimestamp } customers(limit: 1) { firstName lastName email segment customerLinks { customerPreferences { socialMedia { linkedin } } supportDB { supportHistory { date status } } } creditCards { maskCreditCard expiry cvv } billings { formatBillingDate paymentStatus totalAmount } } calls(limit: 1) { callid } cdr(limit: 1) { guid } documents(limit: 1) { uuid } }" | |
}' | |
EXPECTED_RESPONSE='{ | |
"data": { | |
"usersById": { "email": "adam.mcdaniel@bigpond.com", "formatCreatedAtTimestamp": "Sun Aug 18 2024" }, | |
"customers": [{ | |
"firstName": "Adam", | |
"lastName": "Mcdaniel", | |
"email": "adam.mcdaniel@bigpond.com", | |
"segment": "family", | |
"customerLinks": [{ | |
"customerPreferences": { "socialMedia": { "linkedin": null } }, | |
"supportDB": { "supportHistory": [{ "date": "2020-03-22", "status": "Resolved" }] } | |
}], | |
"creditCards": [{ "maskCreditCard": "***********8922", "expiry": "2028-04-23", "cvv": 651 }], | |
"billings": [{ "formatBillingDate": "Thu Feb 02 2023", "paymentStatus": "overdue", "totalAmount": "50.50" }] | |
}], | |
"calls": [{ "callid": 188359 }], | |
"cdr": [{ "guid": "dd264970-f61f-429f-97f8-4761fea4de2f" }], | |
"documents": [{ "uuid": "a1b2c3d4-5e6f-7a8b-9c0d-1e2f3a4b5c6d" }] | |
} | |
}' | |
RESPONSE=$(curl -s -X POST http://localhost:3000/graphql \ | |
-H "Content-Type: application/json" \ | |
-d "$QUERY") | |
# Compare the actual response with the expected response | |
if echo "$RESPONSE" | jq --argjson expected "$EXPECTED_RESPONSE" 'if . == $expected then "MATCH" else "MISMATCH" end' | grep -q "MATCH"; then | |
echo "✅ Response matches expected output." | |
else | |
echo "❌ Response does not match expected output." | |
echo "Expected: $EXPECTED_RESPONSE" | |
echo "Got: $RESPONSE" | |
exit 1 | |
fi |