[skip changelog] Add stats workflow to gather downloads data

This commit is contained in:
rsora 2021-09-24 17:50:25 +02:00 committed by Silvano Cerza
parent bfb90a8b4f
commit a46f36acd1
3 changed files with 270 additions and 0 deletions

118
.github/tools/fetch_athena_stats.sh vendored Executable file
View File

@ -0,0 +1,118 @@
#!/usr/bin/env bash
# This script performs the following:
# 1. Run the query, use jq to capture the QueryExecutionId, and then capture that into bash variable
# 2. Wait for the query to finish running (240 seconds).
# 3. Get the results.
# 4. Json data points struct build
# Expected env variables are:
# AWS_ACCESS_KEY_ID and AWS_SECRET_ACCESS_KEY for accessing AWS resources
# AWS_ATHENA_SOURCE_TABLE
# AWS_ATHENA_OUTPUT_LOCATION
# GITHUB_REPOSITORY
set -euo pipefail
loadExecutionId=$(
aws athena start-query-execution \
--query-string "MSCK REPAIR TABLE ${AWS_ATHENA_SOURCE_TABLE};" \
--result-configuration "OutputLocation=${AWS_ATHENA_OUTPUT_LOCATION}" \
--region us-east-1 | jq -r ".QueryExecutionId"
)
echo "QueryExecutionId is ${loadExecutionId}"
for i in $(seq 1 120); do
loadState=$(
aws athena get-query-execution \
--query-execution-id "${loadExecutionId}" \
--region us-east-1 | jq -r ".QueryExecution.Status.State"
)
if [[ "${loadState}" == "SUCCEEDED" ]]; then
break
fi
echo "QueryExecutionId ${loadExecutionId} - state is ${loadState}"
if [[ "${loadState}" == "FAILED" ]]; then
exit 1
fi
sleep 2
done
! read -r -d '' query <<EOM
SELECT split_part(replace(json_extract_scalar(url_decode(url_decode(querystring)),'$.data.url'), 'https://downloads.arduino.cc/arduino-ide/arduino-ide_', ''),'?',1) AS flavor, count(json_extract(url_decode(url_decode(querystring)),'$')) AS gauge
FROM stats_ingest_prod.complete_cf_logs_partitioned
WHERE json_extract_scalar(url_decode(url_decode(querystring)),'$.data.url') LIKE 'https://downloads.arduino.cc/arduino-ide/arduino-ide_%'
AND json_extract_scalar(url_decode(url_decode(querystring)),'$.data.url') NOT LIKE '%latest%' -- exclude latest redirect
group by 1 ;
EOM
queryExecutionId=$(
aws athena start-query-execution \
--query-string "${query}" \
--result-configuration "OutputLocation=${AWS_ATHENA_OUTPUT_LOCATION}" \
--region us-east-1 | jq -r ".QueryExecutionId"
)
echo "QueryExecutionId is ${queryExecutionId}"
for i in $(seq 1 120); do
queryState=$(
aws athena get-query-execution \
--query-execution-id "${queryExecutionId}" \
--region us-east-1 | jq -r ".QueryExecution.Status.State"
)
if [[ "${queryState}" == "SUCCEEDED" ]]; then
break
fi
echo "QueryExecutionId ${queryExecutionId} - state is ${queryState}"
if [[ "${queryState}" == "FAILED" ]]; then
exit 1
fi
sleep 2
done
echo "Query succeeded. Processing data"
queryResult=$(
aws athena get-query-results \
--query-execution-id "${queryExecutionId}" \
--region us-east-1 | jq --compact-output
)
! read -r -d '' jsonTemplate <<EOM
{
"type": "gauge",
"name": "arduino.downloads.total",
"value": "%s",
"host": "${GITHUB_REPOSITORY}",
"tags": [
"version:%s",
"os:%s",
"arch:%s",
"cdn:downloads.arduino.cc",
"project:arduino-ide"
]
},
EOM
datapoints="["
for row in $(echo "${queryResult}" | jq 'del(.ResultSet.Rows[0])' | jq -r '.ResultSet.Rows[] | .Data' --compact-output); do
value=$(jq -r ".[1].VarCharValue" <<<"${row}")
tag=$(jq -r ".[0].VarCharValue" <<<"${row}")
# Some splitting to obtain 0.6.0, Windows, 32bit elements from string 0.6.0_Windows_32bit.zip
split=($(echo "$tag" | tr '_' '\n'))
if [[ ${#split[@]} -ne 3 ]]; then
continue
fi
archSplit=($(echo "${split[2]}" | tr '.' '\n'))
datapoints+=$(printf "${jsonTemplate}" "${value}" "${split[0]}" "${split[1]}" "${archSplit[0]}")
done
datapoints="${datapoints::-1}]"
echo "::set-output name=result::$(jq --compact-output <<<"${datapoints}")"

56
.github/workflows/arduino-stats.yaml vendored Normal file
View File

@ -0,0 +1,56 @@
name: arduino-stats
on:
schedule:
# run every day at 07:00 AM, 03:00 PM and 11:00 PM
- cron: "0 7,15,23 * * *"
workflow_dispatch:
repository_dispatch:
jobs:
push-stats:
# This workflow is only of value to the arduino/arduino-ide repository and
# would always fail in forks
if: github.repository == 'arduino/arduino-ide'
runs-on: ubuntu-latest
steps:
- name: Checkout
uses: actions/checkout@v2
- name: Fetch downloads count form Arduino CDN using AWS Athena
id: fetch
env:
AWS_ACCESS_KEY_ID: ${{ secrets.STATS_AWS_ACCESS_KEY_ID }}
AWS_SECRET_ACCESS_KEY: ${{ secrets.STATS_AWS_SECRET_ACCESS_KEY }}
AWS_ATHENA_SOURCE_TABLE: ${{ secrets.STATS_AWS_ATHENA_SOURCE_TABLE }}
AWS_ATHENA_OUTPUT_LOCATION: ${{ secrets.STATS_AWS_ATHENA_OUTPUT_LOCATION }}
GITHUB_REPOSITORY: ${{ github.repository }}
run: |
# Fetch jq 1.6 as VM has only 1.5 ATM
wget -q https://github.com/stedolan/jq/releases/download/jq-1.6/jq-linux64 -O jq
chmod +x jq
PATH="${{ github.workspace }}:$PATH"
.github/tools/fetch_athena_stats.sh
- name: Send metrics
uses: masci/datadog@v1
with:
api-key: ${{ secrets.DD_API_KEY }}
# Metrics input expects YAML but JSON will work just right.
metrics: ${{steps.fetch.outputs.result}}
- name: Report failure
if: failure()
uses: masci/datadog@v1
with:
api-key: ${{ secrets.DD_API_KEY }}
events: |
- title: "Arduino IDE stats failing"
text: "Stats collection failed"
alert_type: "error"
host: ${{ github.repository }}
tags:
- "project:arduino-ide"
- "cdn:downloads.arduino.cc"
- "workflow:${{ github.workflow }}"

96
.github/workflows/github-stats.yaml vendored Normal file
View File

@ -0,0 +1,96 @@
name: github-stats
on:
schedule:
# run every 30 minutes
- cron: "*/30 * * * *"
workflow_dispatch:
repository_dispatch:
jobs:
push-stats:
# This workflow is only of value to the arduino/arduino-ide repository and
# would always fail in forks
if: github.repository == 'arduino/arduino-ide'
runs-on: ubuntu-latest
steps:
- name: Fetch downloads count
id: fetch
uses: actions/github-script@v4
with:
github-token: ${{github.token}}
script: |
let metrics = []
// Get a list of releases
const opts = github.repos.listReleases.endpoint.merge({
...context.repo
})
const releases = await github.paginate(opts)
// Get download stats for every release
for (const rel of releases) {
// Names for assets are like `arduino-ide_2.0.0-beta.11_Linux_64bit.zip`,
// we'll use this later to split the asset file name more easily
const baseName = `arduino-ide_${rel.name}_`
// Get a list of assets for this release
const opts = github.repos.listReleaseAssets.endpoint.merge({
...context.repo,
release_id: rel.id
})
const assets = await github.paginate(opts)
for (const asset of assets) {
// Ignore files that are not arduino-ide packages
if (!asset.name.startsWith(baseName)) {
continue
}
// Strip the base and remove file extension to get `Linux_32bit`
systemArch = asset.name.replace(baseName, "").split(".")[0].split("_")
// Add a metric object to the list of gathered metrics
metrics.push({
"type": "gauge",
"name": "arduino.downloads.total",
"value": asset.download_count,
"host": "${{ github.repository }}",
"tags": [
`version:${rel.name}`,
`os:${systemArch[0]}`,
`arch:${systemArch[1]}`,
"cdn:github.com",
"project:arduino-ide"
]
})
}
}
// The action will put whatever we return from this function in
// `outputs.result`, JSON encoded. So we just return the array
// of objects and GitHub will do the rest.
return metrics
- name: Send metrics
uses: masci/datadog@v1
with:
api-key: ${{ secrets.DD_API_KEY }}
# Metrics input expects YAML but JSON will work just right.
metrics: ${{steps.fetch.outputs.result}}
- name: Report failure
if: failure()
uses: masci/datadog@v1
with:
api-key: ${{ secrets.DD_API_KEY }}
events: |
- title: "Arduino IDE stats failing"
text: "Stats collection failed"
alert_type: "error"
host: ${{ github.repository }}
tags:
- "project:arduino-ide"
- "cdn:github.com"
- "workflow:${{ github.workflow }}"