Merge branch 'pre-commit-ci-update-config' into update-precommit #2073
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
name: CI Upstream | |
on: | |
push: | |
pull_request: | |
schedule: | |
- cron: "0 23 * * 0" # Sun 23:00 UTC | |
workflow_dispatch: # allows you to trigger the workflow run manually | |
concurrency: | |
group: ${{ github.workflow }}-${{ github.ref }} | |
cancel-in-progress: true | |
jobs: | |
detect-ci-trigger: | |
name: detect upstream-dev ci trigger | |
runs-on: ubuntu-latest | |
if: github.event_name == 'push' || github.event_name == 'pull_request' | |
outputs: | |
triggered: ${{ steps.detect-trigger.outputs.trigger-found }} | |
steps: | |
- uses: actions/checkout@v4 | |
with: | |
fetch-depth: 2 | |
- uses: xarray-contrib/[email protected] | |
id: detect-trigger | |
with: | |
keyword: "[test-upstream]" | |
upstream-dev: | |
name: upstream-dev, Python${{ matrix.python-version }} | |
runs-on: ubuntu-latest | |
needs: detect-ci-trigger | |
if: | | |
always() | |
&& github.repository == 'pangeo-data/climpred' | |
&& ( | |
(github.event_name == 'schedule' || github.event_name == 'workflow_dispatch') | |
|| needs.detect-ci-trigger.outputs.triggered == 'true' | |
) | |
defaults: | |
run: | |
shell: bash -l {0} | |
strategy: | |
fail-fast: false | |
matrix: | |
python-version: ["3.10"] | |
outputs: | |
artifacts_availability: ${{ steps.status.outputs.ARTIFACTS_AVAILABLE }} | |
steps: | |
- uses: actions/checkout@v4 | |
with: | |
fetch-depth: 0 # Fetch all history for all branches and tags. | |
- name: Set up conda (micromamba) | |
uses: mamba-org/setup-micromamba@v1 | |
with: | |
environment-file: ci/requirements/climpred-dev.yml | |
environment-name: climpred-dev | |
cache-environment: true | |
create-args: >- | |
python=${{ matrix.python-version }} | |
pytest-reportlog | |
conda | |
- name: Set up conda environment | |
run: | | |
bash ci/install-upstream-wheels.sh | |
- name: Install climpred (no deps) | |
run: | | |
python -m pip install --no-deps -e . | |
- name: Conda info | |
run: conda info -a | |
- name: Conda list | |
run: conda list | |
- name: import climpred | |
run: | | |
python -c 'import climpred; climpred.show_versions()' | |
- name: Run Tests | |
if: success() | |
id: status | |
run: | | |
python -m pytest --timeout=60 -rf \ | |
--report-log output-${{ matrix.python-version }}-log.jsonl \ | |
|| ( | |
echo '::set-output name=ARTIFACTS_AVAILABLE::true' && false | |
) | |
- name: Upload artifacts | |
if: | | |
failure() | |
&& github.event_name == 'schedule' | |
&& steps.status.outcome == 'failure' | |
&& github.repository == 'pangeo-data/climpred' | |
uses: actions/upload-artifact@v4 | |
with: | |
name: output-${{ matrix.python-version }}-log.jsonl | |
path: output-${{ matrix.python-version }}-log.jsonl | |
retention-days: 5 | |
report: | |
name: report | |
needs: upstream-dev | |
if: | | |
always() | |
&& github.event_name == 'schedule' | |
&& github.repository == 'pangeo-data/climpred' | |
&& needs.upstream-dev.outputs.artifacts_availability == 'true' | |
runs-on: ubuntu-latest | |
defaults: | |
run: | |
shell: bash | |
steps: | |
- uses: actions/checkout@v4 | |
- uses: actions/setup-python@v5 | |
with: | |
python-version: "3.x" | |
- uses: actions/download-artifact@v4 | |
with: | |
path: /tmp/workspace/logs | |
- name: Move all log files into a single directory | |
run: | | |
rsync -a /tmp/workspace/logs/output-*/ ./logs | |
ls -R ./logs | |
- name: Parse logs | |
run: | | |
shopt -s globstar | |
wget https://raw.githubusercontent.com/pydata/xarray/main/.github/workflows/parse_logs.py | |
python -m pip install pytest | |
ls logs/**/*-log* | |
ls | |
python parse_logs.py logs/**/*-log* | |
cat pytest-logs.txt | |
- name: Report failures | |
uses: actions/github-script@v7 | |
with: | |
github-token: ${{ secrets.GITHUB_TOKEN }} | |
script: | | |
const fs = require('fs'); | |
const pytest_logs = fs.readFileSync('pytest-logs.txt', 'utf8'); | |
const title = "⚠️ Nightly upstream-dev CI failed ⚠️" | |
const workflow_url = `https://github.com/${process.env.GITHUB_REPOSITORY}/actions/runs/${process.env.GITHUB_RUN_ID}` | |
const issue_body = `[Workflow Run URL](${workflow_url})\n${pytest_logs}` | |
// Run GraphQL query against GitHub API to find the most recent open issue used for reporting failures | |
const query = `query($owner:String!, $name:String!, $creator:String!, $label:String!){ | |
repository(owner: $owner, name: $name) { | |
issues(first: 1, states: OPEN, filterBy: {createdBy: $creator, labels: [$label]}, orderBy: {field: CREATED_AT, direction: DESC}) { | |
edges { | |
node { | |
body | |
id | |
number | |
} | |
} | |
} | |
} | |
}`; | |
const variables = { | |
owner: context.repo.owner, | |
name: context.repo.repo, | |
label: 'CI', | |
creator: "github-actions[bot]" | |
} | |
const result = await github.graphql(query, variables) | |
// If no issue is open, create a new issue, | |
// else update the body of the existing issue. | |
if (result.repository.issues.edges.length === 0) { | |
github.rest.issues.create({ | |
owner: variables.owner, | |
repo: variables.name, | |
body: issue_body, | |
title: title, | |
labels: [variables.label] | |
}) | |
} else { | |
github.rest.issues.update({ | |
owner: variables.owner, | |
repo: variables.name, | |
issue_number: result.repository.issues.edges[0].node.number, | |
body: issue_body | |
}) | |
} |