forked from chamilad/process-metrics-collector
-
Notifications
You must be signed in to change notification settings - Fork 1
168 lines (165 loc) · 6.37 KB
/
pre-commit.yml
File metadata and controls
168 lines (165 loc) · 6.37 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
name: pre-commit
on:
workflow_dispatch:
pull_request:
push:
branches: [exec]
paths-ignore:
- 'constraints-*[0-9].txt'
jobs:
pre-commit:
runs-on: ubuntu-latest
strategy:
matrix:
python-version: [ "3.9", "3.10", "3.11", "3.12", "3.13", "3.14" ]
name: Pre-commit python ${{ matrix.python-version }}
steps:
- uses: actions/checkout@v6
with:
fetch-depth: 100
- uses: actions/setup-python@v6
with:
python-version: ${{ matrix.python-version }}
cache: 'pip'
cache-dependency-path: |
requirements.txt
mypy-requirements.txt
dev-requirements.txt
architecture: x64
- name: 'Install requirements (standard or constraints ${{ matrix.python-version }})'
run: |
pip install --upgrade pip wheel
constraints_file="constraints-${{ matrix.python-version }}.txt"
regen_constraints=
if [ -f "$constraints_file" ] ; then
at="$(git --no-pager log -p -1 "--format=tformat:%at" --no-patch -- "$constraints_file")"
dat="$(git --no-pager log -p -1 "--format=tformat:%at" --no-patch -- "requirements.txt")"
if [ "$at" -lt "$dat" ] ; then
regen_constraints=true
fi
else
regen_constraints=true
fi
if [ -n "$regen_constraints" ] ; then
pip install -r requirements.txt
pip freeze > "$constraints_file"
grep -vF git+ "$constraints_file" > "$constraints_file"-relaxed
else
grep -vF git+ "$constraints_file" > "$constraints_file"-relaxed
pip install -r requirements.txt -c "$constraints_file"-relaxed
fi
- name: 'Install development requirements'
run: |
pip install -r dev-requirements.txt -r mypy-requirements.txt -c constraints-${{ matrix.python-version }}.txt-relaxed
- name: MyPy cache
uses: actions/cache@v5
with:
path: '.mypy_cache/[0-9]*'
key: mypy-${{ matrix.python-version }}
- name: 'pre-commit'
uses: pre-commit/action@v3.0.1
# if: ${{ matrix.python-version != '3.6' }}
with:
extra_args: --all -c .pre-commit-config.yaml
# - name: 'pre-commit (custom Python ${{ matrix.python-version }})'
# uses: pre-commit/action@v3.0.1
# if: ${{ matrix.python-version == '3.6' }}
# with:
# extra_args: --all -c .pre-commit-config-gh-${{ matrix.python-version }}.yaml
- name: CPU database cache restore
id: cache-cpu-databases-restore
uses: actions/cache/restore@v5
with:
path: |
cpumark_table.csv
cpu-spec-dataset
key: cpu_databases
- name: CPU database population
if: steps.cache-cpu-databases-restore.outputs.cache-hit != 'true'
run: |
python -m treecript.tdp_sources cpumark_table.tsv
git clone https://github.com/JosuaCarl/cpu-spec-dataset
- name: CPU database cache save
if: steps.cache-cpu-databases-restore.outputs.cache-hit != 'true'
id: cache-cpu-databases-save
uses: actions/cache/save@v5
with:
path: |
cpumark_table.csv
cpu-spec-dataset
key: ${{ steps.cache-cpu-databases-restore.outputs.cache-primary-key }}
- name: 'pytest + coverage (${{ matrix.python-version }})'
run: |
CACHED_CPU_SPEC_DATASET=${PWD}/cpu-spec-dataset CACHED_CPUMARK_DATASET=${PWD}/cpumark_table.tsv pytest --cov=treecript
- name: Get transitive dependencies licences
id: license_check_print_report
# continue-on-error: true
uses: pilosus/action-pip-license-checker@v3.1.0
with:
requirements: constraints-${{ matrix.python-version }}.txt
# - name: Check transitive dependencies licences
# id: license_check_report
# uses: pilosus/action-pip-license-checker@v3.1.0
# with:
# requirements: constraints-${{ matrix.python-version }}.txt
# fail: 'StrongCopyleft'
# exclude: '(?i)^(pylint|dulwich|docutils).*'
- name: Print licences report
if: ${{ always() }}
run: echo "${{ steps.license_check_print_report.outputs.report }}"
- uses: actions/upload-artifact@v6
with:
name: pre-commit-${{ matrix.python-version }}
retention-days: 2
path: constraints-${{ matrix.python-version }}.txt
pull_request_changes:
# Do this only when it is not a pull request validation
if: github.event_name != 'pull_request'
runs-on: ubuntu-latest
name: Pull request with the newly generated contents
needs:
- pre-commit
steps:
- name: Get analysis timestamp
id: timestamp
run: echo "timestamp=$(date -Is)" >> "$GITHUB_OUTPUT"
- uses: actions/checkout@v6
- uses: actions/download-artifact@v7
id: download
with:
pattern: pre-commit-*
merge-multiple: true
path: changes-dir
- name: Move artifacts to their right place
id: move
run: |
skip=true
if [ -d "${{steps.download.outputs.download-path}}" ] ; then
for con in "${{steps.download.outputs.download-path}}"/constraints-*.txt ; do
case "$con" in
*/constraints-\*.txt)
break
;;
*)
cp -p "$con" .
skip=false
;;
esac
done
fi
echo "skip=$skip" >> "$GITHUB_OUTPUT"
- name: Create Pull Request
id: cpr
uses: peter-evans/create-pull-request@v8
if: steps.move.outputs.skip == 'false'
with:
title: Updated constraints (triggered on ${{ steps.timestamp.outputs.timestamp }} by ${{ github.sha }})
branch: create-pull-request/patch-constraints
add-paths: constraints-*.txt
delete-branch: true
commit-message: "[create-pull-request] Automatically commit updated contents (constraints)"
- name: Check outputs
if: ${{ steps.cpr.outputs.pull-request-number }}
run: |
echo "Pull Request Number - ${{ steps.cpr.outputs.pull-request-number }}" >> "$GITHUB_STEP_SUMMARY"
echo "Pull Request URL - ${{ steps.cpr.outputs.pull-request-url }}" >> "$GITHUB_STEP_SUMMARY"