From 67487a9ec10be2e91c68fe9e3e6fcd68dbd6e0c5 Mon Sep 17 00:00:00 2001 From: Jacob Floyd Date: Sat, 20 Sep 2025 08:54:12 -0500 Subject: [PATCH 001/102] gha: stub workflow to run pants generate-lockfiles --- .github/workflows/lockfiles.yaml | 131 +++++++++++++++++++++++++++++++ 1 file changed, 131 insertions(+) create mode 100644 .github/workflows/lockfiles.yaml diff --git a/.github/workflows/lockfiles.yaml b/.github/workflows/lockfiles.yaml new file mode 100644 index 0000000000..6cc2879a19 --- /dev/null +++ b/.github/workflows/lockfiles.yaml @@ -0,0 +1,131 @@ +--- +name: Regenerate Lockfiles + +on: + workflow_dispatch: + inputs: + resolves: + description: Regenerate lockfiles only for the resolves in this comma-separated list, or for all resolves if this is empty. + required: false + type: string + default: "" + pr: + description: Push regenerated lockfiles to the branch of this PR (Use 'new' to open a new PR). + required: true + type: string + + # FIXME: Add a baseRef for use when creating a 'new' PR to target a release branch. + # FOr now, open the PR to alternate branches first, and then run this workflow. + +jobs: + input_vars: + name: Ensure input vars are valid + runs-on: ubuntu-22.04 + outputs: + RESOLVES_JSON: ${{ steps.resolves.outputs.JSON }} + PR_JSON: ${{ steps.pr.outputs.JSON }} + PR_REPO: ${{ steps.pr.outputs.PR_REPO }} + PR_REF: ${{ steps.pr.outputs.PR_REF }} + PR_BASE_REF: ${{ steps.pr.outputs.PR_BASE_REF }} + PR_CHECKOUT: ${{ steps.pr.outputs.PR_CHECKOUT }} + steps: + - name: Validate input - resolves + id: resolves + run: | + if [[ "${{ inputs.resolves }}" =~ ^([a-z0-9-]+(,[a-z0-9-]+)*|)$ ]]; then + echo "VALID INPUT: resolves" + JSON=$(jq '.|split(",")' <<<'"${{ inputs.resolves }}"') + echo "JSON=${JSON}" >> ${GITHUB_OUTPUT} + exit 0 + else + echo "INVALID INPUT: resolves" + echo "resolves must be a comma separated list of resolve names, or an empty string" + exit 1 + fi + + - name: Validate input - pr + id: pr + run: | + if [ "${{ inputs.pr }}" = new ]; then + echo "VALID INPUT: pr" + echo 'PR_REPO=${{ github.repository }}' >> ${GITHUB_OUTPUT} + echo 'JSON={"headRepository":"${{ github.repository_owner }}"}' >> ${GITHUB_OUTPUT} + exit 0 + elif [[ "${{ inputs.pr }}" =~ ^[0-9]+$ ]]; then + echo "VALID INPUT: pr" + pr_fields=id,number,state,isDraft,closed,labels,url + pr_fields+=,author,maintainerCanModify + pr_fields+=,headRepositoryOwner,headRepository,headRefName,baseRefName + pr=$(gh pr view "${{ inputs.pr }}" --json "${pr_fields}") + echo 'PR_REPO=${{ github.repository }}' >> ${GITHUB_OUTPUT} + echo "PR_REF=${GITHUB_REF_NAME}" >> ${GITHUB_OUTPUT} + echo "PR_BASE_REF=${GITHUB_REF_NAME}" >> ${GITHUB_OUTPUT} + echo "PR_CHECKOUT=${GITHUB_REF}" >> ${GITHUB_OUTPUT} + echo "JSON=${pr}" >> ${GITHUB_OUTPUT} + if [ $? > 0 ]; then + echo "Pull Request #${{ inputs.pr }} not found! Giving up." + exit 2 + elif (jq -e .closed <<<${pr} >/dev/null); then + echo "Pull Request #${{ inputs.pr }} is already closed! Giving up." + exit 3 + elif ! (jq -e .maintainerCanModify <<<${pr} >/dev/null); then + echo "Pull Request #${{ inputs.pr }} does not allow maintainer modification! Giving up." + exit 4 + fi + PR_REPO=$(jq -r '.headRepositoryOwner.login + "/" + .headRepository.name' <<<${pr}) + PR_REF=$(jq -r '.headRefName' <<<${pr}) + PR_BASE_REF=$(jq -r '.baseRefName' <<<${pr}) + echo "PR_REPO=${PR_REPO}" >> ${GITHUB_OUTPUT} + echo "PR_REF=${PR_REF}" >> ${GITHUB_OUTPUT} + echo "PR_BASE_REF=${PR_BASE_REF}" >> ${GITHUB_OUTPUT} + # TODO: should this build on top of the merge commit? or HEAD? + # if HEAD, what is the equivalent to github.event.pull_request.head.sha ? + echo "PR_CHECKOUT=refs/pull/${{ inputs.pr }}/merge" >> ${GITHUB_OUTPUT} + echo "Found Pull Request #${{ inputs.pr }} by @$(jq .author.login <<<${pr})" + echo "Pull from ${PR_REPO}:${PR_REF} into ${PR_BASE_REF}" + echo "URL: $(jq -r .url <<<${pr})" + exit 0 + else + echo "INVALID INPUT: pr" + echo "pr must be a PR number, or the magic string 'new'." + exit 1 + fi + + regenerate: + name: Regenerated requested lockfiles + needs: input_vars + runs-on: ubuntu-22.04 + steps: + - name: Checkout repository + uses: actions/checkout@v4 + with: + # a test uses a submodule, and pants needs access to it to calculate deps. + submodules: 'true' + ref: ${{ needs.input_vars.outputs.PR_CHECKOUT }} + + - name: Initialize Pants and its GHA caches + uses: ./.github/actions/init-pants + with: + # To ignore a bad cache, bump the cache* integer. + gha-cache-key: cache0-BUILD + + - name: Regenerate ALL lockfiles + if: ${{ inputs.resolves == '' }} + run: | + pants generate-lockfiles + + - name: Regenerate selected lockfiles + if: ${{ inputs.resolves != '' }} + run: > + pants generate-lockfiles + --resolve=${{ join(fromJSON(needs.input_vars.outputs.RESOLVES_JSON), ' --resolve=') }} + + # TODO: capture diff output for use in commit message + # TODO: create commit + + - name: Upload pants log + uses: actions/upload-artifact@v4 + with: + name: pants-log-py${{ matrix.python-version }} + path: .pants.d/pants.log + if: always() # We want the log even on failures. From f5d5a29af256fb8d3ad07f555ff2cda44ec385b8 Mon Sep 17 00:00:00 2001 From: Jacob Floyd Date: Sat, 20 Sep 2025 12:56:43 -0500 Subject: [PATCH 002/102] gha: refactor input var handling in lockfiles workflow --- .github/workflows/lockfiles.yaml | 174 ++++++++++++++++++++----------- 1 file changed, 115 insertions(+), 59 deletions(-) diff --git a/.github/workflows/lockfiles.yaml b/.github/workflows/lockfiles.yaml index 6cc2879a19..dd1cda2d1a 100644 --- a/.github/workflows/lockfiles.yaml +++ b/.github/workflows/lockfiles.yaml @@ -18,24 +18,16 @@ on: # FOr now, open the PR to alternate branches first, and then run this workflow. jobs: - input_vars: - name: Ensure input vars are valid + resolves: + name: Preprocess input var - resolves runs-on: ubuntu-22.04 outputs: - RESOLVES_JSON: ${{ steps.resolves.outputs.JSON }} - PR_JSON: ${{ steps.pr.outputs.JSON }} - PR_REPO: ${{ steps.pr.outputs.PR_REPO }} - PR_REF: ${{ steps.pr.outputs.PR_REF }} - PR_BASE_REF: ${{ steps.pr.outputs.PR_BASE_REF }} - PR_CHECKOUT: ${{ steps.pr.outputs.PR_CHECKOUT }} + JSON: ${{ steps.resolves.outputs.JSON }} steps: - - name: Validate input - resolves - id: resolves + - name: Validate input var - resolves run: | if [[ "${{ inputs.resolves }}" =~ ^([a-z0-9-]+(,[a-z0-9-]+)*|)$ ]]; then echo "VALID INPUT: resolves" - JSON=$(jq '.|split(",")' <<<'"${{ inputs.resolves }}"') - echo "JSON=${JSON}" >> ${GITHUB_OUTPUT} exit 0 else echo "INVALID INPUT: resolves" @@ -43,47 +35,42 @@ jobs: exit 1 fi - - name: Validate input - pr - id: pr + - name: Get resolves in JSON + id: resolves + run: | + if [[ "${{ inputs.resolves }}" != "" ]]; then + JSON=$(jq '.|split(",")' <<< '"${{ inputs.resolves }}"') + else + # Pull pants.toml from the branch that the workflow runs from + pants_toml=$( + gh api -X GET \ + 'repos/${{ github.repository }}/contents/pants.toml' \ + -f 'ref=${{ github.sha }}' + ) + JSON=$(yq -e -p toml '.python.resolves|keys()' -o json -I 0 <<< ${pants_toml}) + fi + echo ${JSON} + echo "JSON=${JSON}" >> ${GITHUB_OUTPUT} + + pr: + name: Preprocess input var - pr + runs-on: ubuntu-22.04 + outputs: + JSON: ${{ steps.pr.outputs.JSON }} + CHECKOUT_REF: ${{ steps.pr.outputs.CHECKOUT_REF }} + PR_REPO: ${{ steps.pr.outputs.PR_REPO }} + PR_REF: ${{ steps.pr.outputs.PR_REF }} + PR_BASE_REF: ${{ steps.pr.outputs.PR_BASE_REF }} + steps: + - name: Validate input var - pr run: | if [ "${{ inputs.pr }}" = new ]; then echo "VALID INPUT: pr" - echo 'PR_REPO=${{ github.repository }}' >> ${GITHUB_OUTPUT} - echo 'JSON={"headRepository":"${{ github.repository_owner }}"}' >> ${GITHUB_OUTPUT} + echo "The next step will collect some data for PR creation." exit 0 elif [[ "${{ inputs.pr }}" =~ ^[0-9]+$ ]]; then echo "VALID INPUT: pr" - pr_fields=id,number,state,isDraft,closed,labels,url - pr_fields+=,author,maintainerCanModify - pr_fields+=,headRepositoryOwner,headRepository,headRefName,baseRefName - pr=$(gh pr view "${{ inputs.pr }}" --json "${pr_fields}") - echo 'PR_REPO=${{ github.repository }}' >> ${GITHUB_OUTPUT} - echo "PR_REF=${GITHUB_REF_NAME}" >> ${GITHUB_OUTPUT} - echo "PR_BASE_REF=${GITHUB_REF_NAME}" >> ${GITHUB_OUTPUT} - echo "PR_CHECKOUT=${GITHUB_REF}" >> ${GITHUB_OUTPUT} - echo "JSON=${pr}" >> ${GITHUB_OUTPUT} - if [ $? > 0 ]; then - echo "Pull Request #${{ inputs.pr }} not found! Giving up." - exit 2 - elif (jq -e .closed <<<${pr} >/dev/null); then - echo "Pull Request #${{ inputs.pr }} is already closed! Giving up." - exit 3 - elif ! (jq -e .maintainerCanModify <<<${pr} >/dev/null); then - echo "Pull Request #${{ inputs.pr }} does not allow maintainer modification! Giving up." - exit 4 - fi - PR_REPO=$(jq -r '.headRepositoryOwner.login + "/" + .headRepository.name' <<<${pr}) - PR_REF=$(jq -r '.headRefName' <<<${pr}) - PR_BASE_REF=$(jq -r '.baseRefName' <<<${pr}) - echo "PR_REPO=${PR_REPO}" >> ${GITHUB_OUTPUT} - echo "PR_REF=${PR_REF}" >> ${GITHUB_OUTPUT} - echo "PR_BASE_REF=${PR_BASE_REF}" >> ${GITHUB_OUTPUT} - # TODO: should this build on top of the merge commit? or HEAD? - # if HEAD, what is the equivalent to github.event.pull_request.head.sha ? - echo "PR_CHECKOUT=refs/pull/${{ inputs.pr }}/merge" >> ${GITHUB_OUTPUT} - echo "Found Pull Request #${{ inputs.pr }} by @$(jq .author.login <<<${pr})" - echo "Pull from ${PR_REPO}:${PR_REF} into ${PR_BASE_REF}" - echo "URL: $(jq -r .url <<<${pr})" + echo "The next step will validate that PR #${{ inputs.pr }} exists." exit 0 else echo "INVALID INPUT: pr" @@ -91,17 +78,88 @@ jobs: exit 1 fi + - name: Get pr in JSON + id: pr + env: + PR_FIELDS: "\ + id,\ + number,\ + url,\ + closed,\ + author,\ + maintainerCanModify,\ + headRepositoryOwner,\ + headRepository,\ + headRefName,\ + baseRefName" + run: | + if [[ "${{ inputs.pr }}" == new ]]; then + echo "Planning new Pull Request metadata ..." + PR=$( + yq -e -p yaml . -o json -I 0 <<-HEREYAML + id: "" + number: "new" + url: "" + closed: false + author: # see https://api.github.com/users/github-actions[bot] + id: 41898282 + is_bot: true + login: "github-actions[bot]" + name: "github-actions[bot]" + maintainerCanModify: true + headRepositoryOwner: + id: "${GITHUB_REPOSITORY_OWNER_ID}" + login: "${GITHUB_REPOSITORY_OWNER}" + headRepository: + id: "${GITHUB_REPOSITORY_ID}" + name: "${GITHUB_REPOSITORY#*/}" + headRefName: "regen-lockfiles-${GITHUB_RUN_ID}" + baseRefName: "${GITHUB_REF_NAME}" + HEREYAML + ) + CHECKOUT_REF="${GITHUB_REF}" + else + CHECKOUT_REF="refs/pull/${{ inputs.pr }}/merge" + echo "Searching for Pull Request #${{ inputs.pr }} ..." + PR=$(gh pr view "${{ inputs.pr }}" --json "${PR_FIELDS}") + pr_search_rc=$? + if [ ${pr_search_rc} > 0 ]; then + echo "Pull Request #${{ inputs.pr }} not found!" + exit 2 + elif (jq -e .closed <<< ${PR} >/dev/null); then + echo "Pull Request #${{ inputs.pr }} is closed!" + exit 3 + elif ! (jq -e .maintainerCanModify <<< ${PR} >/dev/null); then + echo "Pull Request #${{ inputs.pr }} does not allow maintainer modification!" + exit 4 + fi + echo "Found Pull Request #${{ inputs.pr }} by @$(jq .author.login <<< ${PR})" + echo "URL: $(jq -r .url <<< ${PR})" + fi + echo "JSON=${PR}" >> ${GITHUB_OUTPUT} + echo "CHECKOUT_REF=${CHECKOUT_REF}" >> ${GITHUB_OUTPUT} + PR_REPO=$(jq -r '.headRepositoryOwner.login + "/" + .headRepository.name' <<< ${PR}) + PR_REF=$(jq -r '.headRefName' <<< ${PR}) + PR_BASE_REF=$(jq -r '.baseRefName' <<< ${PR}) + echo "PR_REPO=${PR_REPO}" >> ${GITHUB_OUTPUT} + echo "PR_REF=${PR_REF}" >> ${GITHUB_OUTPUT} + echo "PR_BASE_REF=${PR_BASE_REF}" >> ${GITHUB_OUTPUT} + echo "Pull from ${PR_REPO}:${PR_REF} into ${PR_BASE_REF}" + regenerate: - name: Regenerated requested lockfiles - needs: input_vars + name: Regenerate lockfiles/${{ matrix.resolve }}.lock + needs: [resolves, pr] runs-on: ubuntu-22.04 + strategy: + matrix: + resolve: ${{ fromJSON(needs.resolves.ouptuts.JSON) }} steps: - name: Checkout repository uses: actions/checkout@v4 with: # a test uses a submodule, and pants needs access to it to calculate deps. submodules: 'true' - ref: ${{ needs.input_vars.outputs.PR_CHECKOUT }} + ref: ${{ needs.pr.outputs.CHECKOUT_REF }} - name: Initialize Pants and its GHA caches uses: ./.github/actions/init-pants @@ -109,16 +167,14 @@ jobs: # To ignore a bad cache, bump the cache* integer. gha-cache-key: cache0-BUILD - - name: Regenerate ALL lockfiles - if: ${{ inputs.resolves == '' }} + - name: Regenerate lockfiles/${{ matrix.resolve }}.lock + env: + STDOUT_LOG: ${{ runner.temp }}/pants-gen-lock.stdout.log + STDERR_LOG: ${{ runner.temp }}/pants-gen-lock.stderr.log run: | - pants generate-lockfiles - - - name: Regenerate selected lockfiles - if: ${{ inputs.resolves != '' }} - run: > - pants generate-lockfiles - --resolve=${{ join(fromJSON(needs.input_vars.outputs.RESOLVES_JSON), ' --resolve=') }} + pants generate-lockfiles '--resolve=${{ matrix.resolve }}' \ + > >(tee ${STDOUT_LOG}) \ + 2> >(tee ${STDERR_LOG} >&2) # TODO: capture diff output for use in commit message # TODO: create commit From a49ac0a9010422040985e826e274eeea43519d56 Mon Sep 17 00:00:00 2001 From: Jacob Floyd Date: Sat, 20 Sep 2025 20:36:47 -0500 Subject: [PATCH 003/102] gha: capture lockfile diff from pants generate-lockfiles --- .github/workflows/lockfiles.yaml | 18 +++++++++++++----- 1 file changed, 13 insertions(+), 5 deletions(-) diff --git a/.github/workflows/lockfiles.yaml b/.github/workflows/lockfiles.yaml index dd1cda2d1a..73cb4f80b8 100644 --- a/.github/workflows/lockfiles.yaml +++ b/.github/workflows/lockfiles.yaml @@ -153,6 +153,8 @@ jobs: strategy: matrix: resolve: ${{ fromJSON(needs.resolves.ouptuts.JSON) }} + outputs: + LOCKFILE_DIFF: ${{ steps.lockfile.outputs.DIFF }} # This has ansi escape chars! steps: - name: Checkout repository uses: actions/checkout@v4 @@ -168,20 +170,26 @@ jobs: gha-cache-key: cache0-BUILD - name: Regenerate lockfiles/${{ matrix.resolve }}.lock + id: lockfile env: - STDOUT_LOG: ${{ runner.temp }}/pants-gen-lock.stdout.log + # We only need stderr, because generate-lockfiles puts the diff on stderr. + # Nothing else should be on stderr because Pants disables logging to stderr when + # it detects the stderr redirection. (NOTE: stdout should be empty.) STDERR_LOG: ${{ runner.temp }}/pants-gen-lock.stderr.log run: | pants generate-lockfiles '--resolve=${{ matrix.resolve }}' \ - > >(tee ${STDOUT_LOG}) \ - 2> >(tee ${STDERR_LOG} >&2) + 2> >(tee ${STDERR_LOG} >&2 ) + { + echo 'DIFF<> ${GITHUB_OUTPUT} - # TODO: capture diff output for use in commit message # TODO: create commit - name: Upload pants log uses: actions/upload-artifact@v4 with: - name: pants-log-py${{ matrix.python-version }} + name: pants-log-${{ matrix.resolve }} path: .pants.d/pants.log if: always() # We want the log even on failures. From 570506ebaeea8bfc4a86f4c45f5c32f72c6258bc Mon Sep 17 00:00:00 2001 From: Jacob Floyd Date: Mon, 22 Sep 2025 21:59:55 -0500 Subject: [PATCH 004/102] gha: add output_dir for logs/images under workspace --- .github/workflows/lockfiles.yaml | 34 +++++++++++++++----------------- 1 file changed, 16 insertions(+), 18 deletions(-) diff --git a/.github/workflows/lockfiles.yaml b/.github/workflows/lockfiles.yaml index 73cb4f80b8..fbf3c911ac 100644 --- a/.github/workflows/lockfiles.yaml +++ b/.github/workflows/lockfiles.yaml @@ -23,6 +23,7 @@ jobs: runs-on: ubuntu-22.04 outputs: JSON: ${{ steps.resolves.outputs.JSON }} + LOCKFILES: ${{ steps.resolves.outputs.LOCKFILES }} steps: - name: Validate input var - resolves run: | @@ -48,9 +49,10 @@ jobs: -f 'ref=${{ github.sha }}' ) JSON=$(yq -e -p toml '.python.resolves|keys()' -o json -I 0 <<< ${pants_toml}) + LOCKFILES=$(yq -e -p toml '.python.resolves' -o json -I 0 <<< ${pants_toml}) fi - echo ${JSON} - echo "JSON=${JSON}" >> ${GITHUB_OUTPUT} + echo "JSON=${JSON}" | tee -a ${GITHUB_OUTPUT} + echo "LOCKFILES=${LOCKFILES}" | tee -a ${GITHUB_OUTPUT} pr: name: Preprocess input var - pr @@ -147,14 +149,19 @@ jobs: echo "Pull from ${PR_REPO}:${PR_REF} into ${PR_BASE_REF}" regenerate: - name: Regenerate lockfiles/${{ matrix.resolve }}.lock + name: Regenerate ${{ matrix.resolve }} lockfile needs: [resolves, pr] runs-on: ubuntu-22.04 strategy: matrix: resolve: ${{ fromJSON(needs.resolves.ouptuts.JSON) }} - outputs: - LOCKFILE_DIFF: ${{ steps.lockfile.outputs.DIFF }} # This has ansi escape chars! + env: + LOCKFILE: ${{ fromJSON(needs.resolves.outputs.LOCKFILES)[matrix.resolve] }} + # We only need stderr, because generate-lockfiles puts the diff on stderr. + # Nothing else should be on stderr because Pants disables logging to stderr when + # it detects the stderr redirection. (NOTE: stdout should be empty.) + OUTPUT_DIR: dist/lockfiles/${{ matrix.resolve }} # /dist/ is in .gitignore + STDERR_LOG: dist/lockfiles/${{ matrix.resolve }}/stderr.log steps: - name: Checkout repository uses: actions/checkout@v4 @@ -169,21 +176,12 @@ jobs: # To ignore a bad cache, bump the cache* integer. gha-cache-key: cache0-BUILD - - name: Regenerate lockfiles/${{ matrix.resolve }}.lock + - name: Regenerate ${{ matrix.resolve }} lockfile id: lockfile - env: - # We only need stderr, because generate-lockfiles puts the diff on stderr. - # Nothing else should be on stderr because Pants disables logging to stderr when - # it detects the stderr redirection. (NOTE: stdout should be empty.) - STDERR_LOG: ${{ runner.temp }}/pants-gen-lock.stderr.log run: | - pants generate-lockfiles '--resolve=${{ matrix.resolve }}' \ - 2> >(tee ${STDERR_LOG} >&2 ) - { - echo 'DIFF<> ${GITHUB_OUTPUT} + mkdir -p ${OUTPUT_DIR}/ + pants generate-lockfiles '--resolve=${{ matrix.resolve }}' 2> >(tee ${STDERR_LOG} >&2 ) + cp ${LOCKFILE} ${OUTPUT_DIR}/ # TODO: create commit From b18e04609ddbfd22d61039c11aa7cb1f545158bf Mon Sep 17 00:00:00 2001 From: Jacob Floyd Date: Mon, 22 Sep 2025 22:02:48 -0500 Subject: [PATCH 005/102] gha: install/use freeze to generate color lockfile diff pics --- .github/workflows/lockfiles.yaml | 35 ++++++++++++++++++++++++++++++++ 1 file changed, 35 insertions(+) diff --git a/.github/workflows/lockfiles.yaml b/.github/workflows/lockfiles.yaml index fbf3c911ac..4ff74d3ca5 100644 --- a/.github/workflows/lockfiles.yaml +++ b/.github/workflows/lockfiles.yaml @@ -183,6 +183,41 @@ jobs: pants generate-lockfiles '--resolve=${{ matrix.resolve }}' 2> >(tee ${STDERR_LOG} >&2 ) cp ${LOCKFILE} ${OUTPUT_DIR}/ + - name: Install rsvg-convert for freeze + # rsvg-convert doesn't cause a panic like the resvg lib freeze uses, and it's faster. + run: | + sudo apt-get install librsvg2-bin + + - name: Install freeze + uses: robinraju/release-downloader@v1 + with: + repository: charmbracelet/freeze + tag: v0.2.2 + fileName: freeze_*_${{ runner.os }}_${{ fromJSON('{"X86":"i386","X64":"x86_64","ARM":"arm","ARM64":"arm64"}')[runner.arch] }}.tar.gz + extract: true + + - name: Freeze lockfile diff as picture + # For samples of the themes freeze can use when generating a "terminal screenshot", see: + # - "charm" theme (default): https://github.com/charmbracelet/freeze + # - all other themes: https://xyproto.github.io/splash/docs/ + env: + # Freeze processes the output line-by-line, so the ansi escape sequences + # that span multiple lines only apply to the first line. + # This sed script repeats those ansi escape sequences on each line. + SED_SCRIPT: | + s/^\x1B\[4m \+$/\0\x1B[0m/ # append ansi reset on line above heading + s/^== .* ==$/\x1B[4m\0\x1B[0m/ # add ansi underline to headling + s/^\x1b\[0m$// # drop ansi reset after the heading + run: | + for theme in github github-dark; do + for ext in svg png; do + # The diff output applies ansi underlines across multiple lines, but freeze formats + # each line separately. So, use sed to repeat ansi chars per line. + sed -e "${SED_SCRIPT}" ${STDERR_LOG} \ + | freeze --config full --language ansi --theme ${theme} --output ${STDERR_LOG}.${theme}.${ext} - + done + done + # TODO: create commit - name: Upload pants log From c7296400d29cf2b2c73f878eb591795ff34b08f2 Mon Sep 17 00:00:00 2001 From: Jacob Floyd Date: Mon, 22 Sep 2025 22:04:11 -0500 Subject: [PATCH 006/102] gha: clean up lockfile diff for use in commit msg + job summary --- .github/workflows/lockfiles.yaml | 19 +++++++++++++++++++ 1 file changed, 19 insertions(+) diff --git a/.github/workflows/lockfiles.yaml b/.github/workflows/lockfiles.yaml index 4ff74d3ca5..97e0bea50b 100644 --- a/.github/workflows/lockfiles.yaml +++ b/.github/workflows/lockfiles.yaml @@ -218,6 +218,25 @@ jobs: done done + - name: Prepare text-only lockfile diff for commit message + env: + # This sed script replaces ansi escape chars with unicode for use in the commit msg. + SED_SCRIPT: | + /^\x1B\[4m\( \+\)\(\x1B\[0m\)\?$/{ # line above headings (spaces with ansi underline) + s/ /_/g; # use underline char instead of space + } + # the next one adds a line of overline chars to replace the ansi overline + /^== .* ==$/{ # heading text line (the matched line goes in pattern space) + h; # save copy of heading line in "hold" space + s/./‾/g; # make an line of overline chars to replace the heading's ansi underline + H; # update "hold" space: append newline and line of overline chars + g # replace the original line with lines from the "hold" space + } + /^\x1B\[0m$/d # drop blank line after heading (replaced with line of overline chars) + s/\x1B\[[0-9]\+m//g # strip out ansi escapes + run: | + sed -e "${SED_SCRIPT}" > ${STDERR_LOG}.txt + # TODO: create commit - name: Upload pants log From 163af7ee8d3796b10e9c3aa581fd8d1fb3f62e83 Mon Sep 17 00:00:00 2001 From: Jacob Floyd Date: Mon, 22 Sep 2025 22:07:10 -0500 Subject: [PATCH 007/102] gha: upload lockfile and lockfile diffs as artifacts --- .github/workflows/lockfiles.yaml | 8 +++++++- 1 file changed, 7 insertions(+), 1 deletion(-) diff --git a/.github/workflows/lockfiles.yaml b/.github/workflows/lockfiles.yaml index 97e0bea50b..e166c30172 100644 --- a/.github/workflows/lockfiles.yaml +++ b/.github/workflows/lockfiles.yaml @@ -237,7 +237,13 @@ jobs: run: | sed -e "${SED_SCRIPT}" > ${STDERR_LOG}.txt - # TODO: create commit + # TODO: create commit in a subsequent job to combine all of the updates in one commit + + - name: Upload lockfile diff files + uses: actions/upload-artifact@v4 + with: + name: lockfile-${{ matrix.resolve }} + path: ${{ env.OUTPUT_DIR }} - name: Upload pants log uses: actions/upload-artifact@v4 From dde2f5a818d2508a503f6eceab2563364b63471e Mon Sep 17 00:00:00 2001 From: Jacob Floyd Date: Mon, 22 Sep 2025 22:09:06 -0500 Subject: [PATCH 008/102] gha: add markdown job summary the srcset/src of the picture will need to be adjusted once we know how to get the artifacts. --- .github/workflows/lockfiles.yaml | 25 +++++++++++++++++++++++++ 1 file changed, 25 insertions(+) diff --git a/.github/workflows/lockfiles.yaml b/.github/workflows/lockfiles.yaml index e166c30172..4e54c12254 100644 --- a/.github/workflows/lockfiles.yaml +++ b/.github/workflows/lockfiles.yaml @@ -237,6 +237,31 @@ jobs: run: | sed -e "${SED_SCRIPT}" > ${STDERR_LOG}.txt + - name: Prepare Job Summary + run: | + ( + echo '## ${{ matrix.resolve }} Lockfile Diff' + echo + echo '' + echo ' ' + echo ' ' + echo ' ' + echo ' ' + echo ' Terminal screenshot of lockfile diff in color. The text from the image is included below.' + echo '' + echo + echo '
' + echo ' Lockfile diff: ${{ env.LOCKFILE }} (plain text)' + echo + echo '```' + cat ${STDERR_LOG}.txt + echo '```' + echo + echo '
' + echo + ) >> ${GITHUB_STEP_SUMMARY} + # TODO: create commit in a subsequent job to combine all of the updates in one commit - name: Upload lockfile diff files From 8e4ebfa7e6f2e999da0bcc611764bba8bf1792d5 Mon Sep 17 00:00:00 2001 From: Jacob Floyd Date: Mon, 22 Sep 2025 22:24:28 -0500 Subject: [PATCH 009/102] gha: use tee when writing to GITHUB_OUTPUT --- .github/workflows/lockfiles.yaml | 12 +++++++----- 1 file changed, 7 insertions(+), 5 deletions(-) diff --git a/.github/workflows/lockfiles.yaml b/.github/workflows/lockfiles.yaml index 4e54c12254..a90f506c38 100644 --- a/.github/workflows/lockfiles.yaml +++ b/.github/workflows/lockfiles.yaml @@ -138,14 +138,14 @@ jobs: echo "Found Pull Request #${{ inputs.pr }} by @$(jq .author.login <<< ${PR})" echo "URL: $(jq -r .url <<< ${PR})" fi - echo "JSON=${PR}" >> ${GITHUB_OUTPUT} - echo "CHECKOUT_REF=${CHECKOUT_REF}" >> ${GITHUB_OUTPUT} + echo "JSON=${PR}" | tee -a ${GITHUB_OUTPUT} + echo "CHECKOUT_REF=${CHECKOUT_REF}" | tee -a ${GITHUB_OUTPUT} PR_REPO=$(jq -r '.headRepositoryOwner.login + "/" + .headRepository.name' <<< ${PR}) PR_REF=$(jq -r '.headRefName' <<< ${PR}) PR_BASE_REF=$(jq -r '.baseRefName' <<< ${PR}) - echo "PR_REPO=${PR_REPO}" >> ${GITHUB_OUTPUT} - echo "PR_REF=${PR_REF}" >> ${GITHUB_OUTPUT} - echo "PR_BASE_REF=${PR_BASE_REF}" >> ${GITHUB_OUTPUT} + echo "PR_REPO=${PR_REPO}" | tee -a ${GITHUB_OUTPUT} + echo "PR_REF=${PR_REF}" | tee -a ${GITHUB_OUTPUT} + echo "PR_BASE_REF=${PR_BASE_REF}" | tee -a ${GITHUB_OUTPUT} echo "Pull from ${PR_REPO}:${PR_REF} into ${PR_BASE_REF}" regenerate: @@ -254,6 +254,8 @@ jobs: echo '
' echo ' Lockfile diff: ${{ env.LOCKFILE }} (plain text)' echo + echo 'Base git ref for diff: *${{ steps.pr.outputs.PR_BASE_REF }}* ' + echo echo '```' cat ${STDERR_LOG}.txt echo '```' From 65a59f8dd0b5550f997430f970981b201f364c8d Mon Sep 17 00:00:00 2001 From: Jacob Floyd Date: Mon, 22 Sep 2025 22:25:41 -0500 Subject: [PATCH 010/102] gha: create lockfile diff artifacts iff CHANGED --- .github/workflows/lockfiles.yaml | 19 +++++++++++++++++++ 1 file changed, 19 insertions(+) diff --git a/.github/workflows/lockfiles.yaml b/.github/workflows/lockfiles.yaml index a90f506c38..21a9cb5d1d 100644 --- a/.github/workflows/lockfiles.yaml +++ b/.github/workflows/lockfiles.yaml @@ -162,6 +162,8 @@ jobs: # it detects the stderr redirection. (NOTE: stdout should be empty.) OUTPUT_DIR: dist/lockfiles/${{ matrix.resolve }} # /dist/ is in .gitignore STDERR_LOG: dist/lockfiles/${{ matrix.resolve }}/stderr.log + outputs: + CHANGED: ${{ steps.lockfile.outputs.CHANGED }} steps: - name: Checkout repository uses: actions/checkout@v4 @@ -178,17 +180,30 @@ jobs: - name: Regenerate ${{ matrix.resolve }} lockfile id: lockfile + env: + PR_BASE_REF: ${{ needs.jobs.pr.outputs.PR_BASE_REF }} run: | mkdir -p ${OUTPUT_DIR}/ + git checkout ${{ env.PR_BASE_REF }} -- ${LOCKFILE} # diff is for whole PR not just a commit. pants generate-lockfiles '--resolve=${{ matrix.resolve }}' 2> >(tee ${STDERR_LOG} >&2 ) cp ${LOCKFILE} ${OUTPUT_DIR}/ + CHANGED=$( + if git diff ${{ env.PR_BASE_REF }} --exit-code --quiet -- ${LOCKFILE}; then + echo "false" + else + echo "true" + fi + ) + echo "CHANGED=${CHANGED}" | tee -a ${GITHUB_OUTPUT} - name: Install rsvg-convert for freeze + if: steps.lockfile.outputs.CHANGED == 'true' # rsvg-convert doesn't cause a panic like the resvg lib freeze uses, and it's faster. run: | sudo apt-get install librsvg2-bin - name: Install freeze + if: steps.lockfile.outputs.CHANGED == 'true' uses: robinraju/release-downloader@v1 with: repository: charmbracelet/freeze @@ -197,6 +212,7 @@ jobs: extract: true - name: Freeze lockfile diff as picture + if: steps.lockfile.outputs.CHANGED == 'true' # For samples of the themes freeze can use when generating a "terminal screenshot", see: # - "charm" theme (default): https://github.com/charmbracelet/freeze # - all other themes: https://xyproto.github.io/splash/docs/ @@ -219,6 +235,7 @@ jobs: done - name: Prepare text-only lockfile diff for commit message + if: steps.lockfile.outputs.CHANGED == 'true' env: # This sed script replaces ansi escape chars with unicode for use in the commit msg. SED_SCRIPT: | @@ -238,6 +255,7 @@ jobs: sed -e "${SED_SCRIPT}" > ${STDERR_LOG}.txt - name: Prepare Job Summary + if: steps.lockfile.outputs.CHANGED == 'true' run: | ( echo '## ${{ matrix.resolve }} Lockfile Diff' @@ -271,6 +289,7 @@ jobs: with: name: lockfile-${{ matrix.resolve }} path: ${{ env.OUTPUT_DIR }} + if: steps.lockfile.outputs.CHANGED == 'true' - name: Upload pants log uses: actions/upload-artifact@v4 From 95e1aca41e7723b951e40af3b073c0b9d003a62d Mon Sep 17 00:00:00 2001 From: Jacob Floyd Date: Mon, 22 Sep 2025 22:38:22 -0500 Subject: [PATCH 011/102] gha: move pr base ref reporting to a NOTE in summary --- .github/workflows/lockfiles.yaml | 11 ++++++++--- 1 file changed, 8 insertions(+), 3 deletions(-) diff --git a/.github/workflows/lockfiles.yaml b/.github/workflows/lockfiles.yaml index 21a9cb5d1d..16f7184385 100644 --- a/.github/workflows/lockfiles.yaml +++ b/.github/workflows/lockfiles.yaml @@ -272,15 +272,20 @@ jobs: echo '
' echo ' Lockfile diff: ${{ env.LOCKFILE }} (plain text)' echo - echo 'Base git ref for diff: *${{ steps.pr.outputs.PR_BASE_REF }}* ' - echo echo '```' cat ${STDERR_LOG}.txt echo '```' echo + ) | tee -a ${STDERR_LOG}.md >> ${GITHUB_STEP_SUMMARY} + ( + echo '[!NOTE]' + echo 'Diff base (git ref): *${{ steps.pr.outputs.PR_BASE_REF }}* ' + echo + ) >> ${GITHUB_STEP_SUMMARY} # only add this note to the job summary + ( echo '
' echo - ) >> ${GITHUB_STEP_SUMMARY} + ) | tee -a ${STDERR_LOG}.md >> ${GITHUB_STEP_SUMMARY} # TODO: create commit in a subsequent job to combine all of the updates in one commit From 11f77c252c86bc98ad1721e4c1aca5fb704a5f99 Mon Sep 17 00:00:00 2001 From: Jacob Floyd Date: Tue, 23 Sep 2025 11:08:21 -0500 Subject: [PATCH 012/102] gha: stub commit job --- .github/workflows/lockfiles.yaml | 28 +++++++++++++++++++++++----- 1 file changed, 23 insertions(+), 5 deletions(-) diff --git a/.github/workflows/lockfiles.yaml b/.github/workflows/lockfiles.yaml index 16f7184385..de20880a8f 100644 --- a/.github/workflows/lockfiles.yaml +++ b/.github/workflows/lockfiles.yaml @@ -17,6 +17,10 @@ on: # FIXME: Add a baseRef for use when creating a 'new' PR to target a release branch. # FOr now, open the PR to alternate branches first, and then run this workflow. +env: + # logs and screenshots go in {OUTPUT_BASE_DIR}/lockfile-{resolve}/ (where "lockfile-{resolve}" is the artifact name) + OUTPUT_BASE_DIR: dist/lockfiles # /dist/ is in .gitignore + jobs: resolves: name: Preprocess input var - resolves @@ -160,8 +164,8 @@ jobs: # We only need stderr, because generate-lockfiles puts the diff on stderr. # Nothing else should be on stderr because Pants disables logging to stderr when # it detects the stderr redirection. (NOTE: stdout should be empty.) - OUTPUT_DIR: dist/lockfiles/${{ matrix.resolve }} # /dist/ is in .gitignore - STDERR_LOG: dist/lockfiles/${{ matrix.resolve }}/stderr.log + OUTPUT_DIR: ${{ env.OUTPUT_BASE_DIR }}/lockfile-${{ matrix.resolve }} + STDERR_LOG: ${{ env.OUTPUT_BASE_DIR }}/lockfile-${{ matrix.resolve }}/stderr.log outputs: CHANGED: ${{ steps.lockfile.outputs.CHANGED }} steps: @@ -287,9 +291,7 @@ jobs: echo ) | tee -a ${STDERR_LOG}.md >> ${GITHUB_STEP_SUMMARY} - # TODO: create commit in a subsequent job to combine all of the updates in one commit - - - name: Upload lockfile diff files + - name: Upload lockfile and lockfile diff files uses: actions/upload-artifact@v4 with: name: lockfile-${{ matrix.resolve }} @@ -302,3 +304,19 @@ jobs: name: pants-log-${{ matrix.resolve }} path: .pants.d/pants.log if: always() # We want the log even on failures. + + commit: + name: Commit regenerated ${{ matrix.resolve }} lockfile(s) + needs: [resolves, pr] + runs-on: ubuntu-22.04 + env: + COMMIT_MSG: ${{ env.OUTPUT_BASE_DIR }}/commit_msg + steps: + - name: Download lockfiles and lockfile diff files + uses: actions/download-artifact@v5 + with: + pattern: lockfile-* # lockfile-{resolve} + path: ${{ env.OUTPUT_BASE_DIR }} + merge-multiple: true + + # TODO: create commit to combine all of the updates in one commit From 4779f80c93d61e6eb74f49f21905f450bc1ce0f1 Mon Sep 17 00:00:00 2001 From: Jacob Floyd Date: Tue, 23 Sep 2025 18:56:58 -0500 Subject: [PATCH 013/102] gha: fix needs context usage --- .github/workflows/lockfiles.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/lockfiles.yaml b/.github/workflows/lockfiles.yaml index de20880a8f..dddca4a964 100644 --- a/.github/workflows/lockfiles.yaml +++ b/.github/workflows/lockfiles.yaml @@ -185,7 +185,7 @@ jobs: - name: Regenerate ${{ matrix.resolve }} lockfile id: lockfile env: - PR_BASE_REF: ${{ needs.jobs.pr.outputs.PR_BASE_REF }} + PR_BASE_REF: ${{ needs.pr.outputs.PR_BASE_REF }} run: | mkdir -p ${OUTPUT_DIR}/ git checkout ${{ env.PR_BASE_REF }} -- ${LOCKFILE} # diff is for whole PR not just a commit. From 5495c8dfe243d4263fed8deb12ae1fee1849c717 Mon Sep 17 00:00:00 2001 From: Jacob Floyd Date: Tue, 23 Sep 2025 18:59:56 -0500 Subject: [PATCH 014/102] gha: prepare commit and PR comment/body messages --- .github/workflows/lockfiles.yaml | 45 +++++++++++++++++++++++++++++--- 1 file changed, 42 insertions(+), 3 deletions(-) diff --git a/.github/workflows/lockfiles.yaml b/.github/workflows/lockfiles.yaml index dddca4a964..fbbd05341e 100644 --- a/.github/workflows/lockfiles.yaml +++ b/.github/workflows/lockfiles.yaml @@ -307,16 +307,55 @@ jobs: commit: name: Commit regenerated ${{ matrix.resolve }} lockfile(s) - needs: [resolves, pr] + needs: [resolves, pr, regenerate] runs-on: ubuntu-22.04 env: - COMMIT_MSG: ${{ env.OUTPUT_BASE_DIR }}/commit_msg + COMMIT_MSG: ${{ env.OUTPUT_BASE_DIR }}/commit_msg.txt + PR_COMMENT: ${{ env.OUTPUT_BASE_DIR }}/pr_comment.md + # preserve the order of resolves from input (eg pants.toml has st2 first, before tools) + RESOLVES: ${{ join(fromJSON(needs.resolves.ouptuts.JSON), ''' ''') }} + RESOLVES_CSV: ${{ join(fromJSON(needs.resolves.ouptuts.JSON), ', ') }} + LOCKFILES: ${{ needs.resolves.outputs.LOCKFILES }} steps: - name: Download lockfiles and lockfile diff files uses: actions/download-artifact@v5 with: pattern: lockfile-* # lockfile-{resolve} path: ${{ env.OUTPUT_BASE_DIR }} - merge-multiple: true + merge-multiple: true # unpack in {path}/{artifact_name}/ + + - name: Prepare commit + run: | + echo "pants generate-lockfiles: ${RESOLVES_CSV}" > ${COMMIT_MSG} + echo >> ${COMMIT_MSG} + for resolve in ${RESOLVES}; do + LOCKFILE=$(jq '.["'"${resolve}"'"]' <<< "${LOCKFILES}") + cp "${{ env.OUTPUT_BASE_DIR }}/lockfile-${resolve}/$(basename ${LOCKFILE})" "${LOCKFILE}" + git add "${LOCKFILE}" + STDERR_LOG="${{ env.OUTPUT_BASE_DIR }}/lockfile-${resolve}/stderr.log" + if [ -e ${STDERR_LOG}.txt ]; then + cat ${STDERR_LOG}.txt >> ${COMMIT_MSG} + else + echo "${STDERR_LOG}.txt is missing" + echo "No changes to: ${LOCKFILE}" >> ${COMMIT_MSG} + echo >> ${COMMIT_MSG} + fi + done + + - name: Prepare PR comment + run: | + echo "# Lockfile Diffs" > ${PR_COMMENT} + echo >> ${PR_COMMENT} + for resolve in ${RESOLVES}; do + LOCKFILE=$(jq '.["'"${resolve}"'"]' <<< "${LOCKFILES}") + STDERR_LOG="${{ env.OUTPUT_BASE_DIR }}/lockfile-${resolve}/stderr.log" + if [ -e ${STDERR_LOG}.md ]; then + cat ${STDERR_LOG}.md >> ${PR_COMMENT} + else + echo "${STDERR_LOG}.md is missing" + echo "No changes to: `${LOCKFILE}`" >> ${PR_COMMENT} + echo >> ${PR_COMMENT} + fi + done # TODO: create commit to combine all of the updates in one commit From 68cdb7ba1665a9c70e64227341ac155cd5927212 Mon Sep 17 00:00:00 2001 From: Jacob Floyd Date: Tue, 23 Sep 2025 19:01:30 -0500 Subject: [PATCH 015/102] gha: git steps (checkout, branch, commit, push) --- .github/workflows/lockfiles.yaml | 19 ++++++++++++++++++- 1 file changed, 18 insertions(+), 1 deletion(-) diff --git a/.github/workflows/lockfiles.yaml b/.github/workflows/lockfiles.yaml index fbbd05341e..c00d15a409 100644 --- a/.github/workflows/lockfiles.yaml +++ b/.github/workflows/lockfiles.yaml @@ -317,6 +317,20 @@ jobs: RESOLVES_CSV: ${{ join(fromJSON(needs.resolves.ouptuts.JSON), ', ') }} LOCKFILES: ${{ needs.resolves.outputs.LOCKFILES }} steps: + - name: Checkout repository + uses: actions/checkout@v4 + with: + # a test uses a submodule, and pants needs access to it to calculate deps. + submodules: 'true' + ref: ${{ needs.pr.outputs.CHECKOUT_REF }} + + - name: Create branch for new PR + if: ${{ inputs.pr == 'new' }} + run: | + git config --local user.name "github-actions[bot]" + git config --local user.email "41898282+github-actions[bot]@users.noreply.github.com" + git checkout -b "${{ needs.pr.outputs.PR_REF }}" FETCH_HEAD + - name: Download lockfiles and lockfile diff files uses: actions/download-artifact@v5 with: @@ -358,4 +372,7 @@ jobs: fi done - # TODO: create commit to combine all of the updates in one commit + - name: Commit and push + run: | + git commit -F "${COMMIT_MSG}" + git push -u origin "${{ needs.pr.outputs.PR_REF }}" From 0e5c92556b6f5bcf6b6edc2db114ad99fdd0f9ec Mon Sep 17 00:00:00 2001 From: Jacob Floyd Date: Tue, 23 Sep 2025 19:02:10 -0500 Subject: [PATCH 016/102] gha: create new PR if requested --- .github/workflows/lockfiles.yaml | 12 ++++++++++++ 1 file changed, 12 insertions(+) diff --git a/.github/workflows/lockfiles.yaml b/.github/workflows/lockfiles.yaml index c00d15a409..b647b954f7 100644 --- a/.github/workflows/lockfiles.yaml +++ b/.github/workflows/lockfiles.yaml @@ -376,3 +376,15 @@ jobs: run: | git commit -F "${COMMIT_MSG}" git push -u origin "${{ needs.pr.outputs.PR_REF }}" + + - name: Create new PR + if: ${{ inputs.pr == 'new' }} + run: > + gh pr create + --base "${{ needs.pr.outputs.PR_BASE_REF }}" + --title "pants generate-lockfiles: ${RESOLVES_CSV}" + --body-file "${PR_COMMENT}" + --reviewer "Maintainers" + --assignee "${{ github.event.sender.login }}" + --label "external dependency" + --label "python3" From fa3b0a42722b05eeb99f619a0449c6253d052f91 Mon Sep 17 00:00:00 2001 From: Jacob Floyd Date: Tue, 23 Sep 2025 19:16:06 -0500 Subject: [PATCH 017/102] gha: update or add PR comment on existing PR --- .github/workflows/lockfiles.yaml | 12 ++++++++++++ 1 file changed, 12 insertions(+) diff --git a/.github/workflows/lockfiles.yaml b/.github/workflows/lockfiles.yaml index b647b954f7..ca70b2f3fb 100644 --- a/.github/workflows/lockfiles.yaml +++ b/.github/workflows/lockfiles.yaml @@ -357,6 +357,8 @@ jobs: done - name: Prepare PR comment + env: + RUN_LINK: ${{ github.server_url }}/${{ github.repository }}/actions/runs/${{ github.run_id }}) run: | echo "# Lockfile Diffs" > ${PR_COMMENT} echo >> ${PR_COMMENT} @@ -371,6 +373,7 @@ jobs: echo >> ${PR_COMMENT} fi done + echo ":robot: [GitHub Actions Workflow Run](${RUN_LINK})" >> ${PR_COMMENT} - name: Commit and push run: | @@ -388,3 +391,12 @@ jobs: --assignee "${{ github.event.sender.login }}" --label "external dependency" --label "python3" + + # TODO This updates an existing comment. Should it just add a new comment on re-run? + - name: Comment on existing PR + if: ${{ inputs.pr != 'new' }} + run: > + gh pr comment ${{ inputs.pr }} + --body-file "${PR_COMMENT}" + --edit-last + --create-if-none From 6cb57da18b11698aa6590f5412173fbe7c8dff23 Mon Sep 17 00:00:00 2001 From: Jacob Floyd Date: Tue, 23 Sep 2025 20:17:51 -0500 Subject: [PATCH 018/102] gha: Fix job-level env creation that uses env context --- .github/workflows/lockfiles.yaml | 27 +++++++++++++++++---------- 1 file changed, 17 insertions(+), 10 deletions(-) diff --git a/.github/workflows/lockfiles.yaml b/.github/workflows/lockfiles.yaml index ca70b2f3fb..8bc91fca57 100644 --- a/.github/workflows/lockfiles.yaml +++ b/.github/workflows/lockfiles.yaml @@ -14,9 +14,6 @@ on: required: true type: string - # FIXME: Add a baseRef for use when creating a 'new' PR to target a release branch. - # FOr now, open the PR to alternate branches first, and then run this workflow. - env: # logs and screenshots go in {OUTPUT_BASE_DIR}/lockfile-{resolve}/ (where "lockfile-{resolve}" is the artifact name) OUTPUT_BASE_DIR: dist/lockfiles # /dist/ is in .gitignore @@ -161,11 +158,7 @@ jobs: resolve: ${{ fromJSON(needs.resolves.ouptuts.JSON) }} env: LOCKFILE: ${{ fromJSON(needs.resolves.outputs.LOCKFILES)[matrix.resolve] }} - # We only need stderr, because generate-lockfiles puts the diff on stderr. - # Nothing else should be on stderr because Pants disables logging to stderr when - # it detects the stderr redirection. (NOTE: stdout should be empty.) - OUTPUT_DIR: ${{ env.OUTPUT_BASE_DIR }}/lockfile-${{ matrix.resolve }} - STDERR_LOG: ${{ env.OUTPUT_BASE_DIR }}/lockfile-${{ matrix.resolve }}/stderr.log + # see also the 'Setup env' step outputs: CHANGED: ${{ steps.lockfile.outputs.CHANGED }} steps: @@ -182,6 +175,15 @@ jobs: # To ignore a bad cache, bump the cache* integer. gha-cache-key: cache0-BUILD + # This step exists because we cannot use the 'env' context in 'jobs..env.*'. + - name: Setup env + # We only need stderr, because generate-lockfiles puts the diff on stderr. + # Nothing else should be on stderr because Pants disables logging to stderr when + # it detects the stderr redirection. (NOTE: stdout should be empty.) + run: | + echo "OUTPUT_DIR=${{ env.OUTPUT_BASE_DIR }}/lockfile-${{ matrix.resolve }}" >> ${GITHUB_ENV} + echo "STDERR_LOG=${{ env.OUTPUT_BASE_DIR }}/lockfile-${{ matrix.resolve }}/stderr.log" >> ${GITHUB_ENV} + - name: Regenerate ${{ matrix.resolve }} lockfile id: lockfile env: @@ -310,12 +312,11 @@ jobs: needs: [resolves, pr, regenerate] runs-on: ubuntu-22.04 env: - COMMIT_MSG: ${{ env.OUTPUT_BASE_DIR }}/commit_msg.txt - PR_COMMENT: ${{ env.OUTPUT_BASE_DIR }}/pr_comment.md # preserve the order of resolves from input (eg pants.toml has st2 first, before tools) RESOLVES: ${{ join(fromJSON(needs.resolves.ouptuts.JSON), ''' ''') }} RESOLVES_CSV: ${{ join(fromJSON(needs.resolves.ouptuts.JSON), ', ') }} LOCKFILES: ${{ needs.resolves.outputs.LOCKFILES }} + # see also the 'Setup env' step steps: - name: Checkout repository uses: actions/checkout@v4 @@ -324,6 +325,12 @@ jobs: submodules: 'true' ref: ${{ needs.pr.outputs.CHECKOUT_REF }} + # This step exists because we cannot use the 'env' context in 'jobs..env.*'. + - name: Setup env + run: | + echo "COMMIT_MSG=${{ env.OUTPUT_BASE_DIR }}/commit_msg.txt" >> ${GITHUB_ENV} + echo "PR_COMMENT=${{ env.OUTPUT_BASE_DIR }}/pr_comment.md" >> ${GITHUB_ENV} + - name: Create branch for new PR if: ${{ inputs.pr == 'new' }} run: | From 6be3117f7acbdcaba66bf97ad44b57625ec4969b Mon Sep 17 00:00:00 2001 From: Jacob Floyd Date: Tue, 23 Sep 2025 22:45:30 -0500 Subject: [PATCH 019/102] gha: move resolves.outputs.LOCKFILES calc out of conditional --- .github/workflows/lockfiles.yaml | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/.github/workflows/lockfiles.yaml b/.github/workflows/lockfiles.yaml index 8bc91fca57..c5d9772353 100644 --- a/.github/workflows/lockfiles.yaml +++ b/.github/workflows/lockfiles.yaml @@ -40,17 +40,17 @@ jobs: - name: Get resolves in JSON id: resolves run: | + pants_toml=$( + gh api -X GET \ + 'repos/${{ github.repository }}/contents/pants.toml' \ + -f 'ref=${{ github.sha }}' + ) + LOCKFILES=$(yq -e -p toml '.python.resolves' -o json -I 0 <<< ${pants_toml}) if [[ "${{ inputs.resolves }}" != "" ]]; then JSON=$(jq '.|split(",")' <<< '"${{ inputs.resolves }}"') else # Pull pants.toml from the branch that the workflow runs from - pants_toml=$( - gh api -X GET \ - 'repos/${{ github.repository }}/contents/pants.toml' \ - -f 'ref=${{ github.sha }}' - ) JSON=$(yq -e -p toml '.python.resolves|keys()' -o json -I 0 <<< ${pants_toml}) - LOCKFILES=$(yq -e -p toml '.python.resolves' -o json -I 0 <<< ${pants_toml}) fi echo "JSON=${JSON}" | tee -a ${GITHUB_OUTPUT} echo "LOCKFILES=${LOCKFILES}" | tee -a ${GITHUB_OUTPUT} From b03808ee4383e9f9a43d23248d1a7486c2648f1b Mon Sep 17 00:00:00 2001 From: Jacob Floyd Date: Tue, 23 Sep 2025 22:48:30 -0500 Subject: [PATCH 020/102] gha: fix jq usage --- .github/workflows/lockfiles.yaml | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/.github/workflows/lockfiles.yaml b/.github/workflows/lockfiles.yaml index c5d9772353..32346dfdd9 100644 --- a/.github/workflows/lockfiles.yaml +++ b/.github/workflows/lockfiles.yaml @@ -47,7 +47,7 @@ jobs: ) LOCKFILES=$(yq -e -p toml '.python.resolves' -o json -I 0 <<< ${pants_toml}) if [[ "${{ inputs.resolves }}" != "" ]]; then - JSON=$(jq '.|split(",")' <<< '"${{ inputs.resolves }}"') + JSON=$(jq -c '.|split(",")' <<< '"${{ inputs.resolves }}"') else # Pull pants.toml from the branch that the workflow runs from JSON=$(yq -e -p toml '.python.resolves|keys()' -o json -I 0 <<< ${pants_toml}) @@ -136,7 +136,7 @@ jobs: echo "Pull Request #${{ inputs.pr }} does not allow maintainer modification!" exit 4 fi - echo "Found Pull Request #${{ inputs.pr }} by @$(jq .author.login <<< ${PR})" + echo "Found Pull Request #${{ inputs.pr }} by @$(jq -r .author.login <<< ${PR})" echo "URL: $(jq -r .url <<< ${PR})" fi echo "JSON=${PR}" | tee -a ${GITHUB_OUTPUT} @@ -350,7 +350,7 @@ jobs: echo "pants generate-lockfiles: ${RESOLVES_CSV}" > ${COMMIT_MSG} echo >> ${COMMIT_MSG} for resolve in ${RESOLVES}; do - LOCKFILE=$(jq '.["'"${resolve}"'"]' <<< "${LOCKFILES}") + LOCKFILE=$(jq -r '.["'"${resolve}"'"]' <<< "${LOCKFILES}") cp "${{ env.OUTPUT_BASE_DIR }}/lockfile-${resolve}/$(basename ${LOCKFILE})" "${LOCKFILE}" git add "${LOCKFILE}" STDERR_LOG="${{ env.OUTPUT_BASE_DIR }}/lockfile-${resolve}/stderr.log" @@ -370,7 +370,7 @@ jobs: echo "# Lockfile Diffs" > ${PR_COMMENT} echo >> ${PR_COMMENT} for resolve in ${RESOLVES}; do - LOCKFILE=$(jq '.["'"${resolve}"'"]' <<< "${LOCKFILES}") + LOCKFILE=$(jq -r '.["'"${resolve}"'"]' <<< "${LOCKFILES}") STDERR_LOG="${{ env.OUTPUT_BASE_DIR }}/lockfile-${resolve}/stderr.log" if [ -e ${STDERR_LOG}.md ]; then cat ${STDERR_LOG}.md >> ${PR_COMMENT} From 194f900ddf417ac4a97bb285471f719030f53c43 Mon Sep 17 00:00:00 2001 From: Jacob Floyd Date: Tue, 23 Sep 2025 23:17:09 -0500 Subject: [PATCH 021/102] gha: fix embedded shell script based on shellcheck feedback --- .github/workflows/lockfiles.yaml | 31 ++++++++++++++++--------------- 1 file changed, 16 insertions(+), 15 deletions(-) diff --git a/.github/workflows/lockfiles.yaml b/.github/workflows/lockfiles.yaml index 32346dfdd9..da87a7d609 100644 --- a/.github/workflows/lockfiles.yaml +++ b/.github/workflows/lockfiles.yaml @@ -99,7 +99,8 @@ jobs: if [[ "${{ inputs.pr }}" == new ]]; then echo "Planning new Pull Request metadata ..." PR=$( - yq -e -p yaml . -o json -I 0 <<-HEREYAML + yq -e -p yaml . -o json -I 0 \ + < 0 ]; then + if [ ${pr_search_rc} -gt 0 ]; then echo "Pull Request #${{ inputs.pr }} not found!" exit 2 - elif (jq -e .closed <<< ${PR} >/dev/null); then + elif (jq -e .closed <<< "${PR}" >/dev/null); then echo "Pull Request #${{ inputs.pr }} is closed!" exit 3 - elif ! (jq -e .maintainerCanModify <<< ${PR} >/dev/null); then + elif ! (jq -e .maintainerCanModify <<< "${PR}" >/dev/null); then echo "Pull Request #${{ inputs.pr }} does not allow maintainer modification!" exit 4 fi - echo "Found Pull Request #${{ inputs.pr }} by @$(jq -r .author.login <<< ${PR})" - echo "URL: $(jq -r .url <<< ${PR})" + echo "Found Pull Request #${{ inputs.pr }} by @$(jq -r .author.login <<< "${PR}")" + echo "URL: $(jq -r .url <<< "${PR}")" fi - echo "JSON=${PR}" | tee -a ${GITHUB_OUTPUT} - echo "CHECKOUT_REF=${CHECKOUT_REF}" | tee -a ${GITHUB_OUTPUT} - PR_REPO=$(jq -r '.headRepositoryOwner.login + "/" + .headRepository.name' <<< ${PR}) - PR_REF=$(jq -r '.headRefName' <<< ${PR}) - PR_BASE_REF=$(jq -r '.baseRefName' <<< ${PR}) - echo "PR_REPO=${PR_REPO}" | tee -a ${GITHUB_OUTPUT} - echo "PR_REF=${PR_REF}" | tee -a ${GITHUB_OUTPUT} - echo "PR_BASE_REF=${PR_BASE_REF}" | tee -a ${GITHUB_OUTPUT} + echo "JSON=${PR}" | tee -a "${GITHUB_OUTPUT}" + echo "CHECKOUT_REF=${CHECKOUT_REF}" | tee -a "${GITHUB_OUTPUT}" + PR_REPO=$(jq -r '.headRepositoryOwner.login + "/" + .headRepository.name' <<< "${PR}") + PR_REF=$(jq -r '.headRefName' <<< "${PR}") + PR_BASE_REF=$(jq -r '.baseRefName' <<< "${PR}") + echo "PR_REPO=${PR_REPO}" | tee -a "${GITHUB_OUTPUT}" + echo "PR_REF=${PR_REF}" | tee -a "${GITHUB_OUTPUT}" + echo "PR_BASE_REF=${PR_BASE_REF}" | tee -a "${GITHUB_OUTPUT}" echo "Pull from ${PR_REPO}:${PR_REF} into ${PR_BASE_REF}" regenerate: From f4be2f502963427124a6072e94f3ffc24bc34e73 Mon Sep 17 00:00:00 2001 From: Jacob Floyd Date: Wed, 24 Sep 2025 00:16:11 -0500 Subject: [PATCH 022/102] gha: add GH_TOKEN to env --- .github/workflows/lockfiles.yaml | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/.github/workflows/lockfiles.yaml b/.github/workflows/lockfiles.yaml index da87a7d609..729b45ebcc 100644 --- a/.github/workflows/lockfiles.yaml +++ b/.github/workflows/lockfiles.yaml @@ -39,6 +39,8 @@ jobs: - name: Get resolves in JSON id: resolves + env: + GH_TOKEN: ${{ github.token }} run: | pants_toml=$( gh api -X GET \ @@ -84,6 +86,7 @@ jobs: - name: Get pr in JSON id: pr env: + GH_TOKEN: ${{ github.token }} PR_FIELDS: "\ id,\ number,\ @@ -390,6 +393,8 @@ jobs: - name: Create new PR if: ${{ inputs.pr == 'new' }} + env: + GH_TOKEN: ${{ github.token }} run: > gh pr create --base "${{ needs.pr.outputs.PR_BASE_REF }}" @@ -403,6 +408,8 @@ jobs: # TODO This updates an existing comment. Should it just add a new comment on re-run? - name: Comment on existing PR if: ${{ inputs.pr != 'new' }} + env: + GH_TOKEN: ${{ github.token }} run: > gh pr comment ${{ inputs.pr }} --body-file "${PR_COMMENT}" From ae8ac23eb0a684f21b88218f5e8dbced43a2dbb6 Mon Sep 17 00:00:00 2001 From: Jacob Floyd Date: Wed, 24 Sep 2025 00:33:17 -0500 Subject: [PATCH 023/102] gha: more shellcheck --- .github/workflows/lockfiles.yaml | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/.github/workflows/lockfiles.yaml b/.github/workflows/lockfiles.yaml index 729b45ebcc..d95f75b5f3 100644 --- a/.github/workflows/lockfiles.yaml +++ b/.github/workflows/lockfiles.yaml @@ -47,15 +47,15 @@ jobs: 'repos/${{ github.repository }}/contents/pants.toml' \ -f 'ref=${{ github.sha }}' ) - LOCKFILES=$(yq -e -p toml '.python.resolves' -o json -I 0 <<< ${pants_toml}) + LOCKFILES=$(yq -e -p toml '.python.resolves' -o json -I 0 <<< "${pants_toml}") if [[ "${{ inputs.resolves }}" != "" ]]; then JSON=$(jq -c '.|split(",")' <<< '"${{ inputs.resolves }}"') else # Pull pants.toml from the branch that the workflow runs from - JSON=$(yq -e -p toml '.python.resolves|keys()' -o json -I 0 <<< ${pants_toml}) + JSON=$(yq -e -p toml '.python.resolves|keys()' -o json -I 0 <<< "${pants_toml}") fi - echo "JSON=${JSON}" | tee -a ${GITHUB_OUTPUT} - echo "LOCKFILES=${LOCKFILES}" | tee -a ${GITHUB_OUTPUT} + echo "JSON=${JSON}" | tee -a "${GITHUB_OUTPUT}" + echo "LOCKFILES=${LOCKFILES}" | tee -a "${GITHUB_OUTPUT}" pr: name: Preprocess input var - pr From aec14620d491cd23217079d071107fae86e381a6 Mon Sep 17 00:00:00 2001 From: Jacob Floyd Date: Wed, 24 Sep 2025 00:54:21 -0500 Subject: [PATCH 024/102] gha: extract value from gh api response --- .github/workflows/lockfiles.yaml | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/.github/workflows/lockfiles.yaml b/.github/workflows/lockfiles.yaml index d95f75b5f3..85ffebdfd2 100644 --- a/.github/workflows/lockfiles.yaml +++ b/.github/workflows/lockfiles.yaml @@ -45,14 +45,15 @@ jobs: pants_toml=$( gh api -X GET \ 'repos/${{ github.repository }}/contents/pants.toml' \ - -f 'ref=${{ github.sha }}' + -f 'ref=${{ github.sha }}' \ + --jq $'.content' | base64 -d ) LOCKFILES=$(yq -e -p toml '.python.resolves' -o json -I 0 <<< "${pants_toml}") if [[ "${{ inputs.resolves }}" != "" ]]; then JSON=$(jq -c '.|split(",")' <<< '"${{ inputs.resolves }}"') else # Pull pants.toml from the branch that the workflow runs from - JSON=$(yq -e -p toml '.python.resolves|keys()' -o json -I 0 <<< "${pants_toml}") + JSON=$(yq -e -p toml '.python.resolves|keys' -o json -I 0 <<< "${pants_toml}") fi echo "JSON=${JSON}" | tee -a "${GITHUB_OUTPUT}" echo "LOCKFILES=${LOCKFILES}" | tee -a "${GITHUB_OUTPUT}" From ce51629174c93ed2c598fd53cc018e137c5636be Mon Sep 17 00:00:00 2001 From: Jacob Floyd Date: Wed, 24 Sep 2025 08:40:00 -0500 Subject: [PATCH 025/102] gha: typo fix --- .github/workflows/lockfiles.yaml | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/.github/workflows/lockfiles.yaml b/.github/workflows/lockfiles.yaml index 85ffebdfd2..a3787642a1 100644 --- a/.github/workflows/lockfiles.yaml +++ b/.github/workflows/lockfiles.yaml @@ -160,7 +160,7 @@ jobs: runs-on: ubuntu-22.04 strategy: matrix: - resolve: ${{ fromJSON(needs.resolves.ouptuts.JSON) }} + resolve: ${{ fromJSON(needs.resolves.outputs.JSON) }} env: LOCKFILE: ${{ fromJSON(needs.resolves.outputs.LOCKFILES)[matrix.resolve] }} # see also the 'Setup env' step @@ -318,8 +318,8 @@ jobs: runs-on: ubuntu-22.04 env: # preserve the order of resolves from input (eg pants.toml has st2 first, before tools) - RESOLVES: ${{ join(fromJSON(needs.resolves.ouptuts.JSON), ''' ''') }} - RESOLVES_CSV: ${{ join(fromJSON(needs.resolves.ouptuts.JSON), ', ') }} + RESOLVES: ${{ join(fromJSON(needs.resolves.outputs.JSON), ''' ''') }} + RESOLVES_CSV: ${{ join(fromJSON(needs.resolves.outputs.JSON), ', ') }} LOCKFILES: ${{ needs.resolves.outputs.LOCKFILES }} # see also the 'Setup env' step steps: From 91771fc55a2a7d5d66cf6dd0ad09b5df9a4c5d7d Mon Sep 17 00:00:00 2001 From: Jacob Floyd Date: Wed, 24 Sep 2025 10:10:31 -0500 Subject: [PATCH 026/102] gha: do not use job outputs from matrix jobs --- .github/workflows/lockfiles.yaml | 85 +++++++++++++++++++------------- 1 file changed, 52 insertions(+), 33 deletions(-) diff --git a/.github/workflows/lockfiles.yaml b/.github/workflows/lockfiles.yaml index a3787642a1..324cbfad78 100644 --- a/.github/workflows/lockfiles.yaml +++ b/.github/workflows/lockfiles.yaml @@ -164,8 +164,6 @@ jobs: env: LOCKFILE: ${{ fromJSON(needs.resolves.outputs.LOCKFILES)[matrix.resolve] }} # see also the 'Setup env' step - outputs: - CHANGED: ${{ steps.lockfile.outputs.CHANGED }} steps: - name: Checkout repository uses: actions/checkout@v4 @@ -206,6 +204,9 @@ jobs: fi ) echo "CHANGED=${CHANGED}" | tee -a ${GITHUB_OUTPUT} + # The last job's outputs overwrite the outputs of other jobs in matrix. + # So, matrix jobs should not use job outputs. Workaround: use artifacts. + echo "${CHANGED}" > "${OUTPUT_DIR}/$(basename ${LOCKFILE}).CHANGED" - name: Install rsvg-convert for freeze if: steps.lockfile.outputs.CHANGED == 'true' @@ -266,26 +267,29 @@ jobs: sed -e "${SED_SCRIPT}" > ${STDERR_LOG}.txt - name: Prepare Job Summary - if: steps.lockfile.outputs.CHANGED == 'true' run: | ( echo '## ${{ matrix.resolve }} Lockfile Diff' echo - echo '' - echo ' ' - echo ' ' - echo ' ' - echo ' ' - echo ' Terminal screenshot of lockfile diff in color. The text from the image is included below.' - echo '' - echo - echo '
' - echo ' Lockfile diff: ${{ env.LOCKFILE }} (plain text)' - echo - echo '```' - cat ${STDERR_LOG}.txt - echo '```' + if [ "${{ steps.lockfile.outputs.CHANGED }} != true" ]; then + echo 'No changes required for ${{ env.LOCKFILE }}' + else + echo '' + echo ' ' + echo ' ' + echo ' ' + echo ' ' + echo ' Terminal screenshot of lockfile diff in color. The text from the image is included below.' + echo '' + echo + echo '
' + echo ' Lockfile diff: ${{ env.LOCKFILE }} (plain text)' + echo + echo '```' + cat ${STDERR_LOG}.txt + echo '```' + fi echo ) | tee -a ${STDERR_LOG}.md >> ${GITHUB_STEP_SUMMARY} ( @@ -293,10 +297,12 @@ jobs: echo 'Diff base (git ref): *${{ steps.pr.outputs.PR_BASE_REF }}* ' echo ) >> ${GITHUB_STEP_SUMMARY} # only add this note to the job summary - ( - echo '
' - echo - ) | tee -a ${STDERR_LOG}.md >> ${GITHUB_STEP_SUMMARY} + if [ "${{ steps.lockfile.outputs.CHANGED }} == true" ]; then + ( + echo '
' + echo + ) | tee -a ${STDERR_LOG}.md >> ${GITHUB_STEP_SUMMARY} + fi - name: Upload lockfile and lockfile diff files uses: actions/upload-artifact@v4 @@ -330,14 +336,8 @@ jobs: submodules: 'true' ref: ${{ needs.pr.outputs.CHECKOUT_REF }} - # This step exists because we cannot use the 'env' context in 'jobs..env.*'. - - name: Setup env - run: | - echo "COMMIT_MSG=${{ env.OUTPUT_BASE_DIR }}/commit_msg.txt" >> ${GITHUB_ENV} - echo "PR_COMMENT=${{ env.OUTPUT_BASE_DIR }}/pr_comment.md" >> ${GITHUB_ENV} - - name: Create branch for new PR - if: ${{ inputs.pr == 'new' }} + if: inputs.pr == 'new' run: | git config --local user.name "github-actions[bot]" git config --local user.email "41898282+github-actions[bot]@users.noreply.github.com" @@ -350,7 +350,26 @@ jobs: path: ${{ env.OUTPUT_BASE_DIR }} merge-multiple: true # unpack in {path}/{artifact_name}/ + # This step exists because we cannot use the 'env' context in 'jobs..env.*', + # and because we cannot rely on 'jobs..outputs.*' for matrix jobs. + - name: Setup env + run: | + echo "COMMIT_MSG=${{ env.OUTPUT_BASE_DIR }}/commit_msg.txt" >> ${GITHUB_ENV} + echo "PR_COMMENT=${{ env.OUTPUT_BASE_DIR }}/pr_comment.md" >> ${GITHUB_ENV} + CHANGED="false" + for resolve in ${RESOLVES}; do + CHANGED_FILE="${{ env.OUTPUT_BASE_DIR }}/lockfile-${resolve}/$(basename ${LOCKFILE}).CHANGED" + if [ -e "${CHANGED_FILE}" ] && [ "$(cat "${CHANGED_FILE}")" == true ]; then + CHANGED="true" + echo "${LOCKFILE} was modified" + else + echo "${LOCKFILE} was NOT modified" + fi + done + echo "CHANGED=${CHANGED}" >> ${GITHUB_ENV} + - name: Prepare commit + if: env.CHANGED == 'true' run: | echo "pants generate-lockfiles: ${RESOLVES_CSV}" > ${COMMIT_MSG} echo >> ${COMMIT_MSG} @@ -388,12 +407,13 @@ jobs: echo ":robot: [GitHub Actions Workflow Run](${RUN_LINK})" >> ${PR_COMMENT} - name: Commit and push + if: env.CHANGED == 'true' run: | git commit -F "${COMMIT_MSG}" git push -u origin "${{ needs.pr.outputs.PR_REF }}" - name: Create new PR - if: ${{ inputs.pr == 'new' }} + if: inputs.pr == 'new' && env.CHANGED == 'true' env: GH_TOKEN: ${{ github.token }} run: > @@ -406,9 +426,8 @@ jobs: --label "external dependency" --label "python3" - # TODO This updates an existing comment. Should it just add a new comment on re-run? - - name: Comment on existing PR - if: ${{ inputs.pr != 'new' }} + - name: Update or Add Comment on existing PR + if: inputs.pr != 'new' env: GH_TOKEN: ${{ github.token }} run: > From 5608e3d2f0c9fa16e600c42bf6b4950e29a29af3 Mon Sep 17 00:00:00 2001 From: Jacob Floyd Date: Wed, 24 Sep 2025 10:15:00 -0500 Subject: [PATCH 027/102] gha: fix job summary --- .github/workflows/lockfiles.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/lockfiles.yaml b/.github/workflows/lockfiles.yaml index 324cbfad78..b551fa1b8c 100644 --- a/.github/workflows/lockfiles.yaml +++ b/.github/workflows/lockfiles.yaml @@ -294,7 +294,7 @@ jobs: ) | tee -a ${STDERR_LOG}.md >> ${GITHUB_STEP_SUMMARY} ( echo '[!NOTE]' - echo 'Diff base (git ref): *${{ steps.pr.outputs.PR_BASE_REF }}* ' + echo 'Diff base (git ref): *${{ needs.pr.outputs.PR_BASE_REF }}*' echo ) >> ${GITHUB_STEP_SUMMARY} # only add this note to the job summary if [ "${{ steps.lockfile.outputs.CHANGED }} == true" ]; then From dfa23881bfe3d0e485fc27ddb5f8adfcf13527be Mon Sep 17 00:00:00 2001 From: Jacob Floyd Date: Wed, 24 Sep 2025 10:25:58 -0500 Subject: [PATCH 028/102] gha: add missing var --- .github/workflows/lockfiles.yaml | 1 + 1 file changed, 1 insertion(+) diff --git a/.github/workflows/lockfiles.yaml b/.github/workflows/lockfiles.yaml index b551fa1b8c..30437c9c01 100644 --- a/.github/workflows/lockfiles.yaml +++ b/.github/workflows/lockfiles.yaml @@ -358,6 +358,7 @@ jobs: echo "PR_COMMENT=${{ env.OUTPUT_BASE_DIR }}/pr_comment.md" >> ${GITHUB_ENV} CHANGED="false" for resolve in ${RESOLVES}; do + LOCKFILE=$(jq -r '.["'"${resolve}"'"]' <<< "${LOCKFILES}") CHANGED_FILE="${{ env.OUTPUT_BASE_DIR }}/lockfile-${resolve}/$(basename ${LOCKFILE}).CHANGED" if [ -e "${CHANGED_FILE}" ] && [ "$(cat "${CHANGED_FILE}")" == true ]; then CHANGED="true" From 4ec512ae5dc53354daa795b1cff1bcaac7cfc1b3 Mon Sep 17 00:00:00 2001 From: Jacob Floyd Date: Wed, 24 Sep 2025 10:41:49 -0500 Subject: [PATCH 029/102] gha: we need artifacts even ehen not CHANGED --- .github/workflows/lockfiles.yaml | 1 - 1 file changed, 1 deletion(-) diff --git a/.github/workflows/lockfiles.yaml b/.github/workflows/lockfiles.yaml index 30437c9c01..fb5800f2f0 100644 --- a/.github/workflows/lockfiles.yaml +++ b/.github/workflows/lockfiles.yaml @@ -309,7 +309,6 @@ jobs: with: name: lockfile-${{ matrix.resolve }} path: ${{ env.OUTPUT_DIR }} - if: steps.lockfile.outputs.CHANGED == 'true' - name: Upload pants log uses: actions/upload-artifact@v4 From 4ee29bf3f5c34ade6b9cf5bab6607d563d2f3099 Mon Sep 17 00:00:00 2001 From: Jacob Floyd Date: Wed, 24 Sep 2025 10:49:46 -0500 Subject: [PATCH 030/102] gha: fix note syntax in job summary --- .github/workflows/lockfiles.yaml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/lockfiles.yaml b/.github/workflows/lockfiles.yaml index fb5800f2f0..501aefc3b5 100644 --- a/.github/workflows/lockfiles.yaml +++ b/.github/workflows/lockfiles.yaml @@ -293,8 +293,8 @@ jobs: echo ) | tee -a ${STDERR_LOG}.md >> ${GITHUB_STEP_SUMMARY} ( - echo '[!NOTE]' - echo 'Diff base (git ref): *${{ needs.pr.outputs.PR_BASE_REF }}*' + echo '> [!NOTE]' + echo '> Diff base (git ref): *${{ needs.pr.outputs.PR_BASE_REF }}*' echo ) >> ${GITHUB_STEP_SUMMARY} # only add this note to the job summary if [ "${{ steps.lockfile.outputs.CHANGED }} == true" ]; then From 00853969201061ac5721390b48694edf850d48cb Mon Sep 17 00:00:00 2001 From: Jacob Floyd Date: Wed, 24 Sep 2025 11:17:34 -0500 Subject: [PATCH 031/102] gha: put freeze in dist/ So it does not dirty the checkout. To make it clear that freeze is not on PATH. --- .github/workflows/lockfiles.yaml | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/.github/workflows/lockfiles.yaml b/.github/workflows/lockfiles.yaml index 501aefc3b5..87168c31b7 100644 --- a/.github/workflows/lockfiles.yaml +++ b/.github/workflows/lockfiles.yaml @@ -222,6 +222,7 @@ jobs: tag: v0.2.2 fileName: freeze_*_${{ runner.os }}_${{ fromJSON('{"X86":"i386","X64":"x86_64","ARM":"arm","ARM64":"arm64"}')[runner.arch] }}.tar.gz extract: true + out-file-path: dist # relative to github.workspace - name: Freeze lockfile diff as picture if: steps.lockfile.outputs.CHANGED == 'true' @@ -237,12 +238,13 @@ jobs: s/^== .* ==$/\x1B[4m\0\x1B[0m/ # add ansi underline to headling s/^\x1b\[0m$// # drop ansi reset after the heading run: | + chmod +x dist/freeze for theme in github github-dark; do for ext in svg png; do # The diff output applies ansi underlines across multiple lines, but freeze formats # each line separately. So, use sed to repeat ansi chars per line. sed -e "${SED_SCRIPT}" ${STDERR_LOG} \ - | freeze --config full --language ansi --theme ${theme} --output ${STDERR_LOG}.${theme}.${ext} - + | dist/freeze --config full --language ansi --theme ${theme} --output ${STDERR_LOG}.${theme}.${ext} - done done From 4eb67345942bffac9c0ba407a57fc16ce06ebc5d Mon Sep 17 00:00:00 2001 From: Jacob Floyd Date: Wed, 24 Sep 2025 11:49:36 -0500 Subject: [PATCH 032/102] gha: try setting default run shell to fix dist/freeze not found --- .github/workflows/lockfiles.yaml | 11 +++++++++-- 1 file changed, 9 insertions(+), 2 deletions(-) diff --git a/.github/workflows/lockfiles.yaml b/.github/workflows/lockfiles.yaml index 87168c31b7..4150b82676 100644 --- a/.github/workflows/lockfiles.yaml +++ b/.github/workflows/lockfiles.yaml @@ -18,6 +18,11 @@ env: # logs and screenshots go in {OUTPUT_BASE_DIR}/lockfile-{resolve}/ (where "lockfile-{resolve}" is the artifact name) OUTPUT_BASE_DIR: dist/lockfiles # /dist/ is in .gitignore +defaults: + run: + shell: bash + #working-directory: # defaults to ${{ github.workspace }} + jobs: resolves: name: Preprocess input var - resolves @@ -237,14 +242,16 @@ jobs: s/^\x1B\[4m \+$/\0\x1B[0m/ # append ansi reset on line above heading s/^== .* ==$/\x1B[4m\0\x1B[0m/ # add ansi underline to headling s/^\x1b\[0m$// # drop ansi reset after the heading + FREEZE: dist/freeze run: | - chmod +x dist/freeze + ls -l dist + chmod +x "${FREEZE}" for theme in github github-dark; do for ext in svg png; do # The diff output applies ansi underlines across multiple lines, but freeze formats # each line separately. So, use sed to repeat ansi chars per line. sed -e "${SED_SCRIPT}" ${STDERR_LOG} \ - | dist/freeze --config full --language ansi --theme ${theme} --output ${STDERR_LOG}.${theme}.${ext} - + | "${FREEZE}" --config full --language ansi --theme ${theme} --output ${STDERR_LOG}.${theme}.${ext} - done done From 25941a10f0596516d34fbc9df9d8fbd6280ff009 Mon Sep 17 00:00:00 2001 From: Jacob Floyd Date: Wed, 24 Sep 2025 12:36:27 -0500 Subject: [PATCH 033/102] gha: use gh to download freeze and unpack with tar robinraju/release-downloader did not provide enough controls around download directory and archive extraction. --- .github/workflows/lockfiles.yaml | 27 ++++++++++++++++----------- 1 file changed, 16 insertions(+), 11 deletions(-) diff --git a/.github/workflows/lockfiles.yaml b/.github/workflows/lockfiles.yaml index 4150b82676..8ceeb56b67 100644 --- a/.github/workflows/lockfiles.yaml +++ b/.github/workflows/lockfiles.yaml @@ -221,13 +221,21 @@ jobs: - name: Install freeze if: steps.lockfile.outputs.CHANGED == 'true' - uses: robinraju/release-downloader@v1 - with: - repository: charmbracelet/freeze - tag: v0.2.2 - fileName: freeze_*_${{ runner.os }}_${{ fromJSON('{"X86":"i386","X64":"x86_64","ARM":"arm","ARM64":"arm64"}')[runner.arch] }}.tar.gz - extract: true - out-file-path: dist # relative to github.workspace + env: + VERSION: '0.2.2' + GOARCH: ${{ fromJSON('{"X86":"i386","X64":"x86_64","ARM":"arm","ARM64":"arm64"}')[runner.arch] }} + GH_TOKEN: ${{ github.token }} + run: | + gh release download "v${VERSION}" \ + --pattern "freeze_*_${RUNNER_OS}_${GOARCH}.tar.gz" \ + --output "${RUNNER_TEMP}/freeze.tar.gz" + tar xzf "${RUNNER_TEMP}/freeze.tar.gz" \ + -C ~/.local/bin/freeze \ + --strip-components=1 \ + '*/freeze' + ls -l ~/.local/bin + chmod +x ~/.local/bin/freeze + ls -l ~/.local/bin - name: Freeze lockfile diff as picture if: steps.lockfile.outputs.CHANGED == 'true' @@ -242,16 +250,13 @@ jobs: s/^\x1B\[4m \+$/\0\x1B[0m/ # append ansi reset on line above heading s/^== .* ==$/\x1B[4m\0\x1B[0m/ # add ansi underline to headling s/^\x1b\[0m$// # drop ansi reset after the heading - FREEZE: dist/freeze run: | - ls -l dist - chmod +x "${FREEZE}" for theme in github github-dark; do for ext in svg png; do # The diff output applies ansi underlines across multiple lines, but freeze formats # each line separately. So, use sed to repeat ansi chars per line. sed -e "${SED_SCRIPT}" ${STDERR_LOG} \ - | "${FREEZE}" --config full --language ansi --theme ${theme} --output ${STDERR_LOG}.${theme}.${ext} - + | freeze --config full --language ansi --theme ${theme} --output ${STDERR_LOG}.${theme}.${ext} - done done From 8d7321853b05aa13b23834acc074948babbf27f8 Mon Sep 17 00:00:00 2001 From: Jacob Floyd Date: Wed, 24 Sep 2025 12:52:16 -0500 Subject: [PATCH 034/102] gha: fix freeze download and drop apt progress bars --- .github/workflows/lockfiles.yaml | 3 +++ 1 file changed, 3 insertions(+) diff --git a/.github/workflows/lockfiles.yaml b/.github/workflows/lockfiles.yaml index 8ceeb56b67..cf6ee8f35b 100644 --- a/.github/workflows/lockfiles.yaml +++ b/.github/workflows/lockfiles.yaml @@ -216,6 +216,8 @@ jobs: - name: Install rsvg-convert for freeze if: steps.lockfile.outputs.CHANGED == 'true' # rsvg-convert doesn't cause a panic like the resvg lib freeze uses, and it's faster. + env: + DEBIAN_FRONTEND: noninteractive # get rid of the progress bars run: | sudo apt-get install librsvg2-bin @@ -227,6 +229,7 @@ jobs: GH_TOKEN: ${{ github.token }} run: | gh release download "v${VERSION}" \ + -R "charmbracelet/freeze" \ --pattern "freeze_*_${RUNNER_OS}_${GOARCH}.tar.gz" \ --output "${RUNNER_TEMP}/freeze.tar.gz" tar xzf "${RUNNER_TEMP}/freeze.tar.gz" \ From 8a96d6f22b95e43b1d6c969d022c6c897522b6b0 Mon Sep 17 00:00:00 2001 From: Jacob Floyd Date: Wed, 24 Sep 2025 12:54:33 -0500 Subject: [PATCH 035/102] gha: drop apt progress bars --- .github/workflows/lockfiles.yaml | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/.github/workflows/lockfiles.yaml b/.github/workflows/lockfiles.yaml index cf6ee8f35b..0b588bb432 100644 --- a/.github/workflows/lockfiles.yaml +++ b/.github/workflows/lockfiles.yaml @@ -216,10 +216,8 @@ jobs: - name: Install rsvg-convert for freeze if: steps.lockfile.outputs.CHANGED == 'true' # rsvg-convert doesn't cause a panic like the resvg lib freeze uses, and it's faster. - env: - DEBIAN_FRONTEND: noninteractive # get rid of the progress bars run: | - sudo apt-get install librsvg2-bin + sudo DEBIAN_FRONTEND=noninteractive apt-get install librsvg2-bin - name: Install freeze if: steps.lockfile.outputs.CHANGED == 'true' From dd8c49e7cc41c825c1f4b3a20de42aa1f15ef996 Mon Sep 17 00:00:00 2001 From: Jacob Floyd Date: Wed, 24 Sep 2025 12:55:48 -0500 Subject: [PATCH 036/102] gha: fix unpacking freeze archive --- .github/workflows/lockfiles.yaml | 1 + 1 file changed, 1 insertion(+) diff --git a/.github/workflows/lockfiles.yaml b/.github/workflows/lockfiles.yaml index 0b588bb432..677cdc284f 100644 --- a/.github/workflows/lockfiles.yaml +++ b/.github/workflows/lockfiles.yaml @@ -233,6 +233,7 @@ jobs: tar xzf "${RUNNER_TEMP}/freeze.tar.gz" \ -C ~/.local/bin/freeze \ --strip-components=1 \ + --wildcards \ '*/freeze' ls -l ~/.local/bin chmod +x ~/.local/bin/freeze From e7ab0fb2c3ded528a3505be094bac2cd8713a04f Mon Sep 17 00:00:00 2001 From: Jacob Floyd Date: Wed, 24 Sep 2025 12:59:15 -0500 Subject: [PATCH 037/102] gha: fix unpacking freeze archive --- .github/workflows/lockfiles.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/lockfiles.yaml b/.github/workflows/lockfiles.yaml index 677cdc284f..2e115b64d5 100644 --- a/.github/workflows/lockfiles.yaml +++ b/.github/workflows/lockfiles.yaml @@ -231,7 +231,7 @@ jobs: --pattern "freeze_*_${RUNNER_OS}_${GOARCH}.tar.gz" \ --output "${RUNNER_TEMP}/freeze.tar.gz" tar xzf "${RUNNER_TEMP}/freeze.tar.gz" \ - -C ~/.local/bin/freeze \ + -C ~/.local/bin \ --strip-components=1 \ --wildcards \ '*/freeze' From 7d02315f6c175b53a6548a0a25ca52cb91074a0c Mon Sep 17 00:00:00 2001 From: Jacob Floyd Date: Wed, 24 Sep 2025 13:09:23 -0500 Subject: [PATCH 038/102] gha: drop apt progress bars --- .github/workflows/lockfiles.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/lockfiles.yaml b/.github/workflows/lockfiles.yaml index 2e115b64d5..23f9da733b 100644 --- a/.github/workflows/lockfiles.yaml +++ b/.github/workflows/lockfiles.yaml @@ -217,7 +217,7 @@ jobs: if: steps.lockfile.outputs.CHANGED == 'true' # rsvg-convert doesn't cause a panic like the resvg lib freeze uses, and it's faster. run: | - sudo DEBIAN_FRONTEND=noninteractive apt-get install librsvg2-bin + sudo DEBIAN_FRONTEND=noninteractive apt-get install librsvg2-bin < /dev/null # null stdin disables extra dpkg output - name: Install freeze if: steps.lockfile.outputs.CHANGED == 'true' From 713caae5970769c03390165738afbab85e6c6eec Mon Sep 17 00:00:00 2001 From: Jacob Floyd Date: Wed, 24 Sep 2025 13:25:45 -0500 Subject: [PATCH 039/102] gha: some cleanup --- .github/workflows/lockfiles.yaml | 6 ++---- 1 file changed, 2 insertions(+), 4 deletions(-) diff --git a/.github/workflows/lockfiles.yaml b/.github/workflows/lockfiles.yaml index 23f9da733b..be81c7562c 100644 --- a/.github/workflows/lockfiles.yaml +++ b/.github/workflows/lockfiles.yaml @@ -217,7 +217,7 @@ jobs: if: steps.lockfile.outputs.CHANGED == 'true' # rsvg-convert doesn't cause a panic like the resvg lib freeze uses, and it's faster. run: | - sudo DEBIAN_FRONTEND=noninteractive apt-get install librsvg2-bin < /dev/null # null stdin disables extra dpkg output + sudo DEBIAN_FRONTEND=noninteractive apt-get -o Dpkg::Use-Pty=0 install librsvg2-bin - name: Install freeze if: steps.lockfile.outputs.CHANGED == 'true' @@ -235,9 +235,7 @@ jobs: --strip-components=1 \ --wildcards \ '*/freeze' - ls -l ~/.local/bin - chmod +x ~/.local/bin/freeze - ls -l ~/.local/bin + [[ -x ~/.local/bin/freeze ]] || chmod +x ~/.local/bin/freeze - name: Freeze lockfile diff as picture if: steps.lockfile.outputs.CHANGED == 'true' From a55e71b9ae750b667247e7f3fa63abea64422cd2 Mon Sep 17 00:00:00 2001 From: Jacob Floyd Date: Wed, 24 Sep 2025 14:17:58 -0500 Subject: [PATCH 040/102] gha: install JetBrains Mono font for freeze+rsvg-convert to use --- .github/workflows/lockfiles.yaml | 13 ++++++++++++- 1 file changed, 12 insertions(+), 1 deletion(-) diff --git a/.github/workflows/lockfiles.yaml b/.github/workflows/lockfiles.yaml index be81c7562c..8dc88c1113 100644 --- a/.github/workflows/lockfiles.yaml +++ b/.github/workflows/lockfiles.yaml @@ -213,11 +213,22 @@ jobs: # So, matrix jobs should not use job outputs. Workaround: use artifacts. echo "${CHANGED}" > "${OUTPUT_DIR}/$(basename ${LOCKFILE}).CHANGED" - - name: Install rsvg-convert for freeze + - name: Install rsvg-convert and JetBrains Mono font for freeze if: steps.lockfile.outputs.CHANGED == 'true' # rsvg-convert doesn't cause a panic like the resvg lib freeze uses, and it's faster. + # But, rsvg-convert doesn't handle woff font freeze embeds in the svg, so install the font. run: | sudo DEBIAN_FRONTEND=noninteractive apt-get -o Dpkg::Use-Pty=0 install librsvg2-bin + # yes, this is not great + bash -c "$(curl -fsSL https://raw.githubusercontent.com/JetBrains/JetBrainsMono/master/install_manual.sh)" + # Font install based on: https://github.com/JetBrains/JetBrainsMono/blob/v2.304/install_manual.sh + gh release download "v2.304" \ + -R "JetBrains/JetBrainsMono" \ + --pattern "JetBrainsMono-*.zip" \ + --output "${RUNNER_TEMP}/JetBrainsMono.zip" + mkdir -p ~/.local/share/fonts + unzip -o "${RUNNER_TEMP}/JetBrainsMono.zip" -d ~/.local/share/fonts + fc-cache -f - name: Install freeze if: steps.lockfile.outputs.CHANGED == 'true' From 3425b0ce9b6f3192a8bfd6dc2e7bee07807a6202 Mon Sep 17 00:00:00 2001 From: Jacob Floyd Date: Wed, 24 Sep 2025 14:21:43 -0500 Subject: [PATCH 041/102] gha: add missing GH_TOKEN var --- .github/workflows/lockfiles.yaml | 2 ++ 1 file changed, 2 insertions(+) diff --git a/.github/workflows/lockfiles.yaml b/.github/workflows/lockfiles.yaml index 8dc88c1113..ddb1d72b12 100644 --- a/.github/workflows/lockfiles.yaml +++ b/.github/workflows/lockfiles.yaml @@ -215,6 +215,8 @@ jobs: - name: Install rsvg-convert and JetBrains Mono font for freeze if: steps.lockfile.outputs.CHANGED == 'true' + env: + GH_TOKEN: ${{ github.token }} # rsvg-convert doesn't cause a panic like the resvg lib freeze uses, and it's faster. # But, rsvg-convert doesn't handle woff font freeze embeds in the svg, so install the font. run: | From 3c2f54ffbda1e727e02be77b49b40b9da43742ce Mon Sep 17 00:00:00 2001 From: Jacob Floyd Date: Wed, 24 Sep 2025 14:58:37 -0500 Subject: [PATCH 042/102] gha: drop font script download --- .github/workflows/lockfiles.yaml | 2 -- 1 file changed, 2 deletions(-) diff --git a/.github/workflows/lockfiles.yaml b/.github/workflows/lockfiles.yaml index ddb1d72b12..871793ca62 100644 --- a/.github/workflows/lockfiles.yaml +++ b/.github/workflows/lockfiles.yaml @@ -221,8 +221,6 @@ jobs: # But, rsvg-convert doesn't handle woff font freeze embeds in the svg, so install the font. run: | sudo DEBIAN_FRONTEND=noninteractive apt-get -o Dpkg::Use-Pty=0 install librsvg2-bin - # yes, this is not great - bash -c "$(curl -fsSL https://raw.githubusercontent.com/JetBrains/JetBrainsMono/master/install_manual.sh)" # Font install based on: https://github.com/JetBrains/JetBrainsMono/blob/v2.304/install_manual.sh gh release download "v2.304" \ -R "JetBrains/JetBrainsMono" \ From 83a76d10b07070eba24ec9b4a445a5e5b62d74e1 Mon Sep 17 00:00:00 2001 From: Jacob Floyd Date: Wed, 24 Sep 2025 15:37:22 -0500 Subject: [PATCH 043/102] gha: improve shell quoting in run scripts --- .github/workflows/lockfiles.yaml | 65 ++++++++++++++++---------------- 1 file changed, 33 insertions(+), 32 deletions(-) diff --git a/.github/workflows/lockfiles.yaml b/.github/workflows/lockfiles.yaml index 871793ca62..8347acb94e 100644 --- a/.github/workflows/lockfiles.yaml +++ b/.github/workflows/lockfiles.yaml @@ -197,12 +197,12 @@ jobs: env: PR_BASE_REF: ${{ needs.pr.outputs.PR_BASE_REF }} run: | - mkdir -p ${OUTPUT_DIR}/ - git checkout ${{ env.PR_BASE_REF }} -- ${LOCKFILE} # diff is for whole PR not just a commit. - pants generate-lockfiles '--resolve=${{ matrix.resolve }}' 2> >(tee ${STDERR_LOG} >&2 ) - cp ${LOCKFILE} ${OUTPUT_DIR}/ + mkdir -p "${OUTPUT_DIR}/" + git checkout "${{ env.PR_BASE_REF }}" -- "${LOCKFILE}" # diff is for whole PR not just a commit. + pants generate-lockfiles '--resolve=${{ matrix.resolve }}' 2> >(tee "${STDERR_LOG}" >&2 ) + cp "${LOCKFILE}" "${OUTPUT_DIR}/" CHANGED=$( - if git diff ${{ env.PR_BASE_REF }} --exit-code --quiet -- ${LOCKFILE}; then + if git diff "${{ env.PR_BASE_REF }}" --exit-code --quiet -- "${LOCKFILE}"; then echo "false" else echo "true" @@ -266,8 +266,8 @@ jobs: for ext in svg png; do # The diff output applies ansi underlines across multiple lines, but freeze formats # each line separately. So, use sed to repeat ansi chars per line. - sed -e "${SED_SCRIPT}" ${STDERR_LOG} \ - | freeze --config full --language ansi --theme ${theme} --output ${STDERR_LOG}.${theme}.${ext} - + sed -e "${SED_SCRIPT}" "${STDERR_LOG}" \ + | freeze --config full --language ansi --theme "${theme}" --output "${STDERR_LOG}.${theme}.${ext}" - done done @@ -289,14 +289,15 @@ jobs: /^\x1B\[0m$/d # drop blank line after heading (replaced with line of overline chars) s/\x1B\[[0-9]\+m//g # strip out ansi escapes run: | - sed -e "${SED_SCRIPT}" > ${STDERR_LOG}.txt + sed -e "${SED_SCRIPT}" "${STDERR_LOG}" | tee "${STDERR_LOG}.txt" | head -n5 + echo ... - name: Prepare Job Summary run: | ( echo '## ${{ matrix.resolve }} Lockfile Diff' echo - if [ "${{ steps.lockfile.outputs.CHANGED }} != true" ]; then + if [ "${{ steps.lockfile.outputs.CHANGED }}" != true ]; then echo 'No changes required for ${{ env.LOCKFILE }}' else echo '' @@ -312,21 +313,21 @@ jobs: echo ' Lockfile diff: ${{ env.LOCKFILE }} (plain text)' echo echo '```' - cat ${STDERR_LOG}.txt + cat "${STDERR_LOG}.txt" echo '```' fi echo - ) | tee -a ${STDERR_LOG}.md >> ${GITHUB_STEP_SUMMARY} + ) | tee -a "${STDERR_LOG}.md" >> "${GITHUB_STEP_SUMMARY}" ( echo '> [!NOTE]' echo '> Diff base (git ref): *${{ needs.pr.outputs.PR_BASE_REF }}*' echo - ) >> ${GITHUB_STEP_SUMMARY} # only add this note to the job summary - if [ "${{ steps.lockfile.outputs.CHANGED }} == true" ]; then + ) >> "${GITHUB_STEP_SUMMARY}" # only add this note to the job summary + if [ "${{ steps.lockfile.outputs.CHANGED }}" == true ]; then ( echo '
' echo - ) | tee -a ${STDERR_LOG}.md >> ${GITHUB_STEP_SUMMARY} + ) | tee -a "${STDERR_LOG}.md" >> "${GITHUB_STEP_SUMMARY}" fi - name: Upload lockfile and lockfile diff files @@ -378,12 +379,12 @@ jobs: # and because we cannot rely on 'jobs..outputs.*' for matrix jobs. - name: Setup env run: | - echo "COMMIT_MSG=${{ env.OUTPUT_BASE_DIR }}/commit_msg.txt" >> ${GITHUB_ENV} - echo "PR_COMMENT=${{ env.OUTPUT_BASE_DIR }}/pr_comment.md" >> ${GITHUB_ENV} + echo "COMMIT_MSG=${{ env.OUTPUT_BASE_DIR }}/commit_msg.txt" >> "${GITHUB_ENV}" + echo "PR_COMMENT=${{ env.OUTPUT_BASE_DIR }}/pr_comment.md" >> "${GITHUB_ENV}" CHANGED="false" for resolve in ${RESOLVES}; do LOCKFILE=$(jq -r '.["'"${resolve}"'"]' <<< "${LOCKFILES}") - CHANGED_FILE="${{ env.OUTPUT_BASE_DIR }}/lockfile-${resolve}/$(basename ${LOCKFILE}).CHANGED" + CHANGED_FILE="${{ env.OUTPUT_BASE_DIR }}/lockfile-${resolve}/$(basename "${LOCKFILE}").CHANGED" if [ -e "${CHANGED_FILE}" ] && [ "$(cat "${CHANGED_FILE}")" == true ]; then CHANGED="true" echo "${LOCKFILE} was modified" @@ -391,24 +392,24 @@ jobs: echo "${LOCKFILE} was NOT modified" fi done - echo "CHANGED=${CHANGED}" >> ${GITHUB_ENV} + echo "CHANGED=${CHANGED}" >> "${GITHUB_ENV}" - name: Prepare commit if: env.CHANGED == 'true' run: | - echo "pants generate-lockfiles: ${RESOLVES_CSV}" > ${COMMIT_MSG} - echo >> ${COMMIT_MSG} + echo "pants generate-lockfiles: ${RESOLVES_CSV}" > "${COMMIT_MSG}" + echo >> "${COMMIT_MSG}" for resolve in ${RESOLVES}; do LOCKFILE=$(jq -r '.["'"${resolve}"'"]' <<< "${LOCKFILES}") cp "${{ env.OUTPUT_BASE_DIR }}/lockfile-${resolve}/$(basename ${LOCKFILE})" "${LOCKFILE}" git add "${LOCKFILE}" STDERR_LOG="${{ env.OUTPUT_BASE_DIR }}/lockfile-${resolve}/stderr.log" - if [ -e ${STDERR_LOG}.txt ]; then - cat ${STDERR_LOG}.txt >> ${COMMIT_MSG} + if [ -e "${STDERR_LOG}.txt" ]; then + cat "${STDERR_LOG}.txt" >> "${COMMIT_MSG}" else echo "${STDERR_LOG}.txt is missing" - echo "No changes to: ${LOCKFILE}" >> ${COMMIT_MSG} - echo >> ${COMMIT_MSG} + echo "No changes to: ${LOCKFILE}" >> "${COMMIT_MSG}" + echo >> "${COMMIT_MSG}" fi done @@ -416,20 +417,20 @@ jobs: env: RUN_LINK: ${{ github.server_url }}/${{ github.repository }}/actions/runs/${{ github.run_id }}) run: | - echo "# Lockfile Diffs" > ${PR_COMMENT} - echo >> ${PR_COMMENT} + echo "# Lockfile Diffs" > "${PR_COMMENT}" + echo >> "${PR_COMMENT}" for resolve in ${RESOLVES}; do LOCKFILE=$(jq -r '.["'"${resolve}"'"]' <<< "${LOCKFILES}") STDERR_LOG="${{ env.OUTPUT_BASE_DIR }}/lockfile-${resolve}/stderr.log" - if [ -e ${STDERR_LOG}.md ]; then - cat ${STDERR_LOG}.md >> ${PR_COMMENT} + if [ -e "${STDERR_LOG}.md" ]; then + cat "${STDERR_LOG}.md" >> "${PR_COMMENT}" else echo "${STDERR_LOG}.md is missing" - echo "No changes to: `${LOCKFILE}`" >> ${PR_COMMENT} - echo >> ${PR_COMMENT} + echo "No changes to: `${LOCKFILE}`" >> "${PR_COMMENT}" + echo >> "${PR_COMMENT}" fi done - echo ":robot: [GitHub Actions Workflow Run](${RUN_LINK})" >> ${PR_COMMENT} + echo ":robot: [GitHub Actions Workflow Run](${RUN_LINK})" >> "${PR_COMMENT}" - name: Commit and push if: env.CHANGED == 'true' @@ -456,7 +457,7 @@ jobs: env: GH_TOKEN: ${{ github.token }} run: > - gh pr comment ${{ inputs.pr }} + gh pr comment "${{ inputs.pr }}" --body-file "${PR_COMMENT}" --edit-last --create-if-none From 79c20efef17ac3f4aeb11ec46234981f8535de56 Mon Sep 17 00:00:00 2001 From: Jacob Floyd Date: Wed, 24 Sep 2025 16:02:36 -0500 Subject: [PATCH 044/102] gha: fix CHANGED detection and more shell quoting --- .github/workflows/lockfiles.yaml | 9 ++++----- 1 file changed, 4 insertions(+), 5 deletions(-) diff --git a/.github/workflows/lockfiles.yaml b/.github/workflows/lockfiles.yaml index 8347acb94e..e85ab61a9b 100644 --- a/.github/workflows/lockfiles.yaml +++ b/.github/workflows/lockfiles.yaml @@ -208,10 +208,10 @@ jobs: echo "true" fi ) - echo "CHANGED=${CHANGED}" | tee -a ${GITHUB_OUTPUT} + echo "CHANGED=${CHANGED}" | tee -a "${GITHUB_OUTPUT}" # The last job's outputs overwrite the outputs of other jobs in matrix. # So, matrix jobs should not use job outputs. Workaround: use artifacts. - echo "${CHANGED}" > "${OUTPUT_DIR}/$(basename ${LOCKFILE}).CHANGED" + echo "${CHANGED}" > "${OUTPUT_DIR}/$(basename "${LOCKFILE}").CHANGED" - name: Install rsvg-convert and JetBrains Mono font for freeze if: steps.lockfile.outputs.CHANGED == 'true' @@ -381,18 +381,17 @@ jobs: run: | echo "COMMIT_MSG=${{ env.OUTPUT_BASE_DIR }}/commit_msg.txt" >> "${GITHUB_ENV}" echo "PR_COMMENT=${{ env.OUTPUT_BASE_DIR }}/pr_comment.md" >> "${GITHUB_ENV}" - CHANGED="false" for resolve in ${RESOLVES}; do LOCKFILE=$(jq -r '.["'"${resolve}"'"]' <<< "${LOCKFILES}") CHANGED_FILE="${{ env.OUTPUT_BASE_DIR }}/lockfile-${resolve}/$(basename "${LOCKFILE}").CHANGED" - if [ -e "${CHANGED_FILE}" ] && [ "$(cat "${CHANGED_FILE}")" == true ]; then + if [ -e "${CHANGED_FILE}" ] && [ "$(cat "${CHANGED_FILE}" | tr -d '[:space:]')" == true ]; then CHANGED="true" echo "${LOCKFILE} was modified" else echo "${LOCKFILE} was NOT modified" fi done - echo "CHANGED=${CHANGED}" >> "${GITHUB_ENV}" + echo "CHANGED=${CHANGED:-false}" | tee -a "${GITHUB_ENV}" - name: Prepare commit if: env.CHANGED == 'true' From 9c49cbc6e77bdd57924b624d425a7dd1dfcbe036 Mon Sep 17 00:00:00 2001 From: Jacob Floyd Date: Wed, 24 Sep 2025 16:28:01 -0500 Subject: [PATCH 045/102] gha: fix a shell quoting issue and debug dir contents --- .github/workflows/lockfiles.yaml | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/.github/workflows/lockfiles.yaml b/.github/workflows/lockfiles.yaml index e85ab61a9b..1734275830 100644 --- a/.github/workflows/lockfiles.yaml +++ b/.github/workflows/lockfiles.yaml @@ -392,6 +392,7 @@ jobs: fi done echo "CHANGED=${CHANGED:-false}" | tee -a "${GITHUB_ENV}" + ls -l "${{ env.OUTPUT_BASE_DIR }}" "${{ env.OUTPUT_BASE_DIR }}"/* - name: Prepare commit if: env.CHANGED == 'true' @@ -425,7 +426,7 @@ jobs: cat "${STDERR_LOG}.md" >> "${PR_COMMENT}" else echo "${STDERR_LOG}.md is missing" - echo "No changes to: `${LOCKFILE}`" >> "${PR_COMMENT}" + echo 'No changes to: `'"${LOCKFILE}"'`' >> "${PR_COMMENT}" echo >> "${PR_COMMENT}" fi done From 3416fcb3aaa3dc3597353d50cb35ae59ceb4a5ef Mon Sep 17 00:00:00 2001 From: Jacob Floyd Date: Wed, 24 Sep 2025 16:40:26 -0500 Subject: [PATCH 046/102] gha: fix handling only one resolve --- .github/workflows/lockfiles.yaml | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/.github/workflows/lockfiles.yaml b/.github/workflows/lockfiles.yaml index 1734275830..1288aa8951 100644 --- a/.github/workflows/lockfiles.yaml +++ b/.github/workflows/lockfiles.yaml @@ -383,7 +383,10 @@ jobs: echo "PR_COMMENT=${{ env.OUTPUT_BASE_DIR }}/pr_comment.md" >> "${GITHUB_ENV}" for resolve in ${RESOLVES}; do LOCKFILE=$(jq -r '.["'"${resolve}"'"]' <<< "${LOCKFILES}") - CHANGED_FILE="${{ env.OUTPUT_BASE_DIR }}/lockfile-${resolve}/$(basename "${LOCKFILE}").CHANGED" + ARTIFACT_DIR="${{ env.OUTPUT_BASE_DIR }}/lockfile-${resolve}" + # Add a symlink when actions/aritifact-download skips making subdir for only one artifact + [ "${resolve}" == "${RESOLVES}" ] && ln -s "." "${ARTIFACT_DIR}" + CHANGED_FILE="${ARTIFACT_DIR}/$(basename "${LOCKFILE}").CHANGED" if [ -e "${CHANGED_FILE}" ] && [ "$(cat "${CHANGED_FILE}" | tr -d '[:space:]')" == true ]; then CHANGED="true" echo "${LOCKFILE} was modified" From b00801c79b438ca193145b4af3fc028ac2081e12 Mon Sep 17 00:00:00 2001 From: Jacob Floyd Date: Wed, 24 Sep 2025 18:32:04 -0500 Subject: [PATCH 047/102] gha: fix link --- .github/workflows/lockfiles.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/lockfiles.yaml b/.github/workflows/lockfiles.yaml index 1288aa8951..0b36db5d06 100644 --- a/.github/workflows/lockfiles.yaml +++ b/.github/workflows/lockfiles.yaml @@ -418,7 +418,7 @@ jobs: - name: Prepare PR comment env: - RUN_LINK: ${{ github.server_url }}/${{ github.repository }}/actions/runs/${{ github.run_id }}) + RUN_LINK: ${{ github.server_url }}/${{ github.repository }}/actions/runs/${{ github.run_id }} run: | echo "# Lockfile Diffs" > "${PR_COMMENT}" echo >> "${PR_COMMENT}" From aba57d2d45f086ac1f9bf78b434ddd48022aaeee Mon Sep 17 00:00:00 2001 From: Jacob Floyd Date: Wed, 24 Sep 2025 18:45:09 -0500 Subject: [PATCH 048/102] gha: use force push on re-runs creating new PRs --- .github/workflows/lockfiles.yaml | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/.github/workflows/lockfiles.yaml b/.github/workflows/lockfiles.yaml index 0b36db5d06..7cfa9bed5a 100644 --- a/.github/workflows/lockfiles.yaml +++ b/.github/workflows/lockfiles.yaml @@ -437,9 +437,11 @@ jobs: - name: Commit and push if: env.CHANGED == 'true' + # git push --force is for workflow re-runs, but only for new PRs. run: | git commit -F "${COMMIT_MSG}" - git push -u origin "${{ needs.pr.outputs.PR_REF }}" + git push${{ inputs.pr == 'new' && ' --force' || '' }} \ + -u origin "${{ needs.pr.outputs.PR_REF }}" - name: Create new PR if: inputs.pr == 'new' && env.CHANGED == 'true' From 9e559b32944afb499ef3ed558546cac53c9e5750 Mon Sep 17 00:00:00 2001 From: Jacob Floyd Date: Wed, 24 Sep 2025 18:53:29 -0500 Subject: [PATCH 049/102] gha: gh needs org/ in team name --- .github/workflows/lockfiles.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/lockfiles.yaml b/.github/workflows/lockfiles.yaml index 7cfa9bed5a..7e17c1fef5 100644 --- a/.github/workflows/lockfiles.yaml +++ b/.github/workflows/lockfiles.yaml @@ -452,7 +452,7 @@ jobs: --base "${{ needs.pr.outputs.PR_BASE_REF }}" --title "pants generate-lockfiles: ${RESOLVES_CSV}" --body-file "${PR_COMMENT}" - --reviewer "Maintainers" + --reviewer "${{ github.repository_owner }}/Maintainers" --assignee "${{ github.event.sender.login }}" --label "external dependency" --label "python3" From 8ddffa22d31bbdb6933ac706720caeac566d9666 Mon Sep 17 00:00:00 2001 From: Jacob Floyd Date: Wed, 24 Sep 2025 19:36:11 -0500 Subject: [PATCH 050/102] gha: permissions --- .github/workflows/lockfiles.yaml | 9 ++++++++- 1 file changed, 8 insertions(+), 1 deletion(-) diff --git a/.github/workflows/lockfiles.yaml b/.github/workflows/lockfiles.yaml index 7e17c1fef5..d45355eda2 100644 --- a/.github/workflows/lockfiles.yaml +++ b/.github/workflows/lockfiles.yaml @@ -1,6 +1,11 @@ --- name: Regenerate Lockfiles +permissions: + actions: read + contents: write + pull-requests: write + on: workflow_dispatch: inputs: @@ -452,10 +457,12 @@ jobs: --base "${{ needs.pr.outputs.PR_BASE_REF }}" --title "pants generate-lockfiles: ${RESOLVES_CSV}" --body-file "${PR_COMMENT}" - --reviewer "${{ github.repository_owner }}/Maintainers" --assignee "${{ github.event.sender.login }}" --label "external dependency" --label "python3" + # github.token cannot be given read access to organizations.teams, + # so, setting reviewer to a team would mean managing a PAT or similar. + # --reviewer "${{ github.repository_owner }}/Maintainers" - name: Update or Add Comment on existing PR if: inputs.pr != 'new' From 410622a568a41b1dab16a803a576a5025353849a Mon Sep 17 00:00:00 2001 From: Jacob Floyd Date: Wed, 24 Sep 2025 21:44:35 -0500 Subject: [PATCH 051/102] gha: use data uri to embed png in md --- .github/workflows/lockfiles.yaml | 21 ++++++++++++++++----- 1 file changed, 16 insertions(+), 5 deletions(-) diff --git a/.github/workflows/lockfiles.yaml b/.github/workflows/lockfiles.yaml index d45355eda2..be1c004545 100644 --- a/.github/workflows/lockfiles.yaml +++ b/.github/workflows/lockfiles.yaml @@ -306,12 +306,23 @@ jobs: echo 'No changes required for ${{ env.LOCKFILE }}' else echo '' - echo ' ' - echo ' ' - echo ' ' - echo ' ' + echo ' ' + + echo ' ' + + echo ' ' + echo ' Terminal screenshot of lockfile diff in color. The text from the image is included below.' + echo -n ' src="data:image/png;base64,' + cat "${STDERR_LOG}.github-dark.png" | base64 -w0 + echo '">' + echo '' echo echo '
' From dbd6de6ae186ff2f9e62cdd259535646e9e18a70 Mon Sep 17 00:00:00 2001 From: Jacob Floyd Date: Wed, 24 Sep 2025 21:53:29 -0500 Subject: [PATCH 052/102] gha: try minimizing pants output and pngs --- .github/workflows/lockfiles.yaml | 6 +----- 1 file changed, 1 insertion(+), 5 deletions(-) diff --git a/.github/workflows/lockfiles.yaml b/.github/workflows/lockfiles.yaml index be1c004545..9eca9b5527 100644 --- a/.github/workflows/lockfiles.yaml +++ b/.github/workflows/lockfiles.yaml @@ -201,6 +201,7 @@ jobs: id: lockfile env: PR_BASE_REF: ${{ needs.pr.outputs.PR_BASE_REF }} + PANTS_STATS_LOG: 'False' run: | mkdir -p "${OUTPUT_DIR}/" git checkout "${{ env.PR_BASE_REF }}" -- "${LOCKFILE}" # diff is for whole PR not just a commit. @@ -308,11 +309,6 @@ jobs: echo '' echo ' ' - echo ' ' - echo ' Date: Wed, 24 Sep 2025 22:34:11 -0500 Subject: [PATCH 053/102] gha: minimize freeze screenshots --- .github/workflows/lockfiles.yaml | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/.github/workflows/lockfiles.yaml b/.github/workflows/lockfiles.yaml index 9eca9b5527..35f539298d 100644 --- a/.github/workflows/lockfiles.yaml +++ b/.github/workflows/lockfiles.yaml @@ -202,6 +202,7 @@ jobs: env: PR_BASE_REF: ${{ needs.pr.outputs.PR_BASE_REF }} PANTS_STATS_LOG: 'False' + PANTS_LEVEL: warn # skip info-level log messages run: | mkdir -p "${OUTPUT_DIR}/" git checkout "${{ env.PR_BASE_REF }}" -- "${LOCKFILE}" # diff is for whole PR not just a commit. @@ -273,7 +274,8 @@ jobs: # The diff output applies ansi underlines across multiple lines, but freeze formats # each line separately. So, use sed to repeat ansi chars per line. sed -e "${SED_SCRIPT}" "${STDERR_LOG}" \ - | freeze --config full --language ansi --theme "${theme}" --output "${STDERR_LOG}.${theme}.${ext}" - + | freeze --config full --language ansi --theme "${theme}" --width 640 \ + --output "${STDERR_LOG}.${theme}.${ext}" - done done @@ -407,7 +409,6 @@ jobs: fi done echo "CHANGED=${CHANGED:-false}" | tee -a "${GITHUB_ENV}" - ls -l "${{ env.OUTPUT_BASE_DIR }}" "${{ env.OUTPUT_BASE_DIR }}"/* - name: Prepare commit if: env.CHANGED == 'true' From ca7d0e08010dce9b8e622586583b1c3a1fa7fedb Mon Sep 17 00:00:00 2001 From: Jacob Floyd Date: Wed, 24 Sep 2025 22:57:18 -0500 Subject: [PATCH 054/102] gha: use markdown image not light/dark html --- .github/workflows/lockfiles.yaml | 19 +++++-------------- 1 file changed, 5 insertions(+), 14 deletions(-) diff --git a/.github/workflows/lockfiles.yaml b/.github/workflows/lockfiles.yaml index 35f539298d..a2def48973 100644 --- a/.github/workflows/lockfiles.yaml +++ b/.github/workflows/lockfiles.yaml @@ -274,7 +274,7 @@ jobs: # The diff output applies ansi underlines across multiple lines, but freeze formats # each line separately. So, use sed to repeat ansi chars per line. sed -e "${SED_SCRIPT}" "${STDERR_LOG}" \ - | freeze --config full --language ansi --theme "${theme}" --width 640 \ + | freeze --config full --language ansi --theme "${theme}" --width 720 \ --output "${STDERR_LOG}.${theme}.${ext}" - done done @@ -308,20 +308,11 @@ jobs: if [ "${{ steps.lockfile.outputs.CHANGED }}" != true ]; then echo 'No changes required for ${{ env.LOCKFILE }}' else - echo '' - echo ' ' - - echo ' ' - - echo ' Terminal screenshot of lockfile diff in color. The text from the image is included below.' - - echo '' + echo ')' echo echo '
' echo ' Lockfile diff: ${{ env.LOCKFILE }} (plain text)' From 11afb3f3252c1a3be1ea5c4cc1d79a5f8b421a8e Mon Sep 17 00:00:00 2001 From: Jacob Floyd Date: Fri, 26 Sep 2025 11:18:10 -0500 Subject: [PATCH 055/102] gha: add job summary to PR create/comment --- .github/workflows/lockfiles.yaml | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/.github/workflows/lockfiles.yaml b/.github/workflows/lockfiles.yaml index a2def48973..cdc8546932 100644 --- a/.github/workflows/lockfiles.yaml +++ b/.github/workflows/lockfiles.yaml @@ -452,6 +452,8 @@ jobs: env: GH_TOKEN: ${{ github.token }} run: > + ( + echo '## New PR'; echo; gh pr create --base "${{ needs.pr.outputs.PR_BASE_REF }}" --title "pants generate-lockfiles: ${RESOLVES_CSV}" @@ -459,6 +461,7 @@ jobs: --assignee "${{ github.event.sender.login }}" --label "external dependency" --label "python3" + ) | tee -a "${GITHUB_STEP_SUMMARY}" # github.token cannot be given read access to organizations.teams, # so, setting reviewer to a team would mean managing a PAT or similar. # --reviewer "${{ github.repository_owner }}/Maintainers" @@ -468,7 +471,10 @@ jobs: env: GH_TOKEN: ${{ github.token }} run: > + ( + echo '## Add/Update PR Comment'; echo; gh pr comment "${{ inputs.pr }}" --body-file "${PR_COMMENT}" --edit-last --create-if-none + ) | tee -a "${GITHUB_STEP_SUMMARY}" From 9cf28e8f7aa5ebb4475899c90cef76351bf3a5b6 Mon Sep 17 00:00:00 2001 From: Jacob Floyd Date: Fri, 26 Sep 2025 11:18:46 -0500 Subject: [PATCH 056/102] gha: use s3 to host diff images --- .github/workflows/lockfiles.yaml | 57 +++++++++++++++++++++++++++++--- 1 file changed, 52 insertions(+), 5 deletions(-) diff --git a/.github/workflows/lockfiles.yaml b/.github/workflows/lockfiles.yaml index cdc8546932..21df97a64f 100644 --- a/.github/workflows/lockfiles.yaml +++ b/.github/workflows/lockfiles.yaml @@ -4,6 +4,7 @@ name: Regenerate Lockfiles permissions: actions: read contents: write + id-token: write pull-requests: write on: @@ -23,6 +24,12 @@ env: # logs and screenshots go in {OUTPUT_BASE_DIR}/lockfile-{resolve}/ (where "lockfile-{resolve}" is the artifact name) OUTPUT_BASE_DIR: dist/lockfiles # /dist/ is in .gitignore + # We upload the diff image(s) (terminal screenshot(s)) to S3 so we can use it in GitHub flavored markdown. + AWS_ACCOUNT_ID: "053075847820" + AWS_S3_BUCKET: st2-gha + AWS_S3_BUCKET_ROOT: ${{ github.repository }}/runs/${{ github.run_id }} + AWS_REGION: us-west-2 # Oregon + defaults: run: shell: bash @@ -279,6 +286,49 @@ jobs: done done + - name: Configure AWS Credentials + id: aws + uses: aws-actions/configure-aws-credentials@v5.0.0 + with: + aws-region: ${{ env.AWS_REGION }} + role-session-name: GitHubActions-${{ github.run_id }} + role-to-assume: arn:aws:iam::${{ env.ACCOUNT_ID }}:role/GitHubActions@org=${{ github.repository_owner }},repo=${{ github.event.repository.name }} + allowed-account-ids: ${{ env.ACCOUNT_ID }} + # This should be the minimal set of permissions needed by this workflow. + inline-session-policy: >- + { + "Version": "2012-10-17", + "Statement": [ + { + "Sid": "AllowPutObject", + "Effect": "Allow", + "Action": ["s3:PutObject", "s3:PutObjectTagging"], + "Resource": ["arn:aws:s3:::${{ env.AWS_S3_BUCKET }}/${{ env.AWS_S3_BUCKET_ROOT }}/*"] + } + ] + } + output-credentials: true + output-env-credentials: false + + - name: Upload lockfile diff picture to S3 + # GitHub flavored markdown does not accept data:image/png URIs for images. + # And GitHub does not provide any convenient APIs for uploading image assets. + # So, we upload to AWS and save the URI for in generated markdown below. + if: steps.lockfile.outputs.CHANGED == 'true' + id: s3 + uses: hkusu/s3-upload-action@v2.1.0 + with: + aws-access-key-id: ${{ steps.aws.outputs.aws-access-key-id }} + aws-secret-access-key: ${{ steps.aws.outputs.aws-secret-access-key }} + aws-region: ${{ env.AWS_REGION }} + aws-bucket: ${{ env.AWS_S3_BUCKET }} + bucket-root: ${{ env.AWS_S3_BUCKET_ROOT }} + destination-dir: lockfile-${{ matrix.resolve }} + file-path: ${{ env.STDERR_LOG }}.github-dark.png + output-file-url: 'true' + content-type: image/png + public: 'true' + - name: Prepare text-only lockfile diff for commit message if: steps.lockfile.outputs.CHANGED == 'true' env: @@ -308,11 +358,8 @@ jobs: if [ "${{ steps.lockfile.outputs.CHANGED }}" != true ]; then echo 'No changes required for ${{ env.LOCKFILE }}' else - # We do not have anywhere to serve images from, so they are embedded. - echo -n '![Terminal screenshot of lockfile diff in color. The text from the image is included below.](' - echo -n 'data:image/png;base64,' - cat "${STDERR_LOG}.github-dark.png" | base64 -w0 - echo ')' + echo -n '![Terminal screenshot of lockfile diff in color. The text from the image is included below.]' + echo '(${{ steps.s3.output.file-url }})' echo echo '
' echo ' Lockfile diff: ${{ env.LOCKFILE }} (plain text)' From 82028f1945a0377819a7f1bcf01f2cb2a32260f6 Mon Sep 17 00:00:00 2001 From: Jacob Floyd Date: Fri, 26 Sep 2025 11:25:25 -0500 Subject: [PATCH 057/102] gha: fix var access --- .github/workflows/lockfiles.yaml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/lockfiles.yaml b/.github/workflows/lockfiles.yaml index 21df97a64f..e96fa3ed5e 100644 --- a/.github/workflows/lockfiles.yaml +++ b/.github/workflows/lockfiles.yaml @@ -292,8 +292,8 @@ jobs: with: aws-region: ${{ env.AWS_REGION }} role-session-name: GitHubActions-${{ github.run_id }} - role-to-assume: arn:aws:iam::${{ env.ACCOUNT_ID }}:role/GitHubActions@org=${{ github.repository_owner }},repo=${{ github.event.repository.name }} - allowed-account-ids: ${{ env.ACCOUNT_ID }} + role-to-assume: arn:aws:iam::${{ env.AWS_ACCOUNT_ID }}:role/GitHubActions@org=${{ github.repository_owner }},repo=${{ github.event.repository.name }} + allowed-account-ids: ${{ env.AWS_ACCOUNT_ID }} # This should be the minimal set of permissions needed by this workflow. inline-session-policy: >- { From e17c71ad7c7c1a7f0d7905c3ed75a786d4877ce3 Mon Sep 17 00:00:00 2001 From: Jacob Floyd Date: Fri, 26 Sep 2025 14:46:56 -0500 Subject: [PATCH 058/102] gha: use aws cli instead of s3-upload-action --- .github/workflows/lockfiles.yaml | 35 ++++++++++++++++---------------- 1 file changed, 17 insertions(+), 18 deletions(-) diff --git a/.github/workflows/lockfiles.yaml b/.github/workflows/lockfiles.yaml index e96fa3ed5e..f0aee87e38 100644 --- a/.github/workflows/lockfiles.yaml +++ b/.github/workflows/lockfiles.yaml @@ -25,10 +25,10 @@ env: OUTPUT_BASE_DIR: dist/lockfiles # /dist/ is in .gitignore # We upload the diff image(s) (terminal screenshot(s)) to S3 so we can use it in GitHub flavored markdown. + AWS_REGION: us-west-2 # Oregon AWS_ACCOUNT_ID: "053075847820" AWS_S3_BUCKET: st2-gha AWS_S3_BUCKET_ROOT: ${{ github.repository }}/runs/${{ github.run_id }} - AWS_REGION: us-west-2 # Oregon defaults: run: @@ -287,7 +287,6 @@ jobs: done - name: Configure AWS Credentials - id: aws uses: aws-actions/configure-aws-credentials@v5.0.0 with: aws-region: ${{ env.AWS_REGION }} @@ -307,27 +306,25 @@ jobs: } ] } - output-credentials: true - output-env-credentials: false - name: Upload lockfile diff picture to S3 # GitHub flavored markdown does not accept data:image/png URIs for images. # And GitHub does not provide any convenient APIs for uploading image assets. # So, we upload to AWS and save the URI for in generated markdown below. if: steps.lockfile.outputs.CHANGED == 'true' - id: s3 - uses: hkusu/s3-upload-action@v2.1.0 - with: - aws-access-key-id: ${{ steps.aws.outputs.aws-access-key-id }} - aws-secret-access-key: ${{ steps.aws.outputs.aws-secret-access-key }} - aws-region: ${{ env.AWS_REGION }} - aws-bucket: ${{ env.AWS_S3_BUCKET }} - bucket-root: ${{ env.AWS_S3_BUCKET_ROOT }} - destination-dir: lockfile-${{ matrix.resolve }} - file-path: ${{ env.STDERR_LOG }}.github-dark.png - output-file-url: 'true' - content-type: image/png - public: 'true' + # NOTE: The packaged s3 actions are too old, incomplete, or do not allow setting content-type + tags. + env: + AWS_S3_PATH: ${{ env.AWS_S3_BUCKET_ROOT }}/lockfile-${{ matrix.resolve }} + run: | + aws s3 cp \ + "${STDERR_LOG}.github-dark.png" \ + "s3://${{ env.AWS_S3_BUCKET }}/${{ env.AWS_S3_PATH }}/" \ + --content-type image/png + aws s3api put-object-tagging \ + --expected-bucket-owner "${AWS_ACCOUNT_ID}" \ + --bucket "${{ env.AWS_S3_BUCKET }}" \ + --key "${{ env.AWS_S3_PATH }}/$(basename ${STDERR_LOG}.github-dark.png)" \ + --tagging '{"Service": "GitHub Actions"}' - name: Prepare text-only lockfile diff for commit message if: steps.lockfile.outputs.CHANGED == 'true' @@ -351,6 +348,8 @@ jobs: echo ... - name: Prepare Job Summary + env: + IMG_BASE_URI: 'https://${{ env.AWS_S3_BUCKET }}.s3.${{ env.AWS_REGION }}.amazonaws.com/${{ env.AWS_S3_BUCKET_ROOT }}/lockfile-${{ matrix.resolve }}' run: | ( echo '## ${{ matrix.resolve }} Lockfile Diff' @@ -359,7 +358,7 @@ jobs: echo 'No changes required for ${{ env.LOCKFILE }}' else echo -n '![Terminal screenshot of lockfile diff in color. The text from the image is included below.]' - echo '(${{ steps.s3.output.file-url }})' + echo "(${IMG_BASE_URI}/$(basename ${STDERR_LOG}.github-dark.png))" echo echo '
' echo ' Lockfile diff: ${{ env.LOCKFILE }} (plain text)' From 40757c68d7f4ace9f2f774c8f2e6b99223747de6 Mon Sep 17 00:00:00 2001 From: Jacob Floyd Date: Fri, 26 Sep 2025 14:52:58 -0500 Subject: [PATCH 059/102] gha: fix s3 object tagging with aws cli --- .github/workflows/lockfiles.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/lockfiles.yaml b/.github/workflows/lockfiles.yaml index f0aee87e38..b76f8ef0dc 100644 --- a/.github/workflows/lockfiles.yaml +++ b/.github/workflows/lockfiles.yaml @@ -324,7 +324,7 @@ jobs: --expected-bucket-owner "${AWS_ACCOUNT_ID}" \ --bucket "${{ env.AWS_S3_BUCKET }}" \ --key "${{ env.AWS_S3_PATH }}/$(basename ${STDERR_LOG}.github-dark.png)" \ - --tagging '{"Service": "GitHub Actions"}' + --tagging '{"TagSet":[{"Key":"Service","Value":"GitHub Actions"}]}' - name: Prepare text-only lockfile diff for commit message if: steps.lockfile.outputs.CHANGED == 'true' From e2e279674dd1940611aabf0b29a1eaf45b3e3242 Mon Sep 17 00:00:00 2001 From: Jacob Floyd Date: Fri, 26 Sep 2025 15:48:48 -0500 Subject: [PATCH 060/102] gha: rework RESOLVES var RESOLVES is designed to be a snippet in bash, not for use as a bash var. --- .github/workflows/lockfiles.yaml | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/.github/workflows/lockfiles.yaml b/.github/workflows/lockfiles.yaml index b76f8ef0dc..b3d6417f16 100644 --- a/.github/workflows/lockfiles.yaml +++ b/.github/workflows/lockfiles.yaml @@ -400,7 +400,7 @@ jobs: runs-on: ubuntu-22.04 env: # preserve the order of resolves from input (eg pants.toml has st2 first, before tools) - RESOLVES: ${{ join(fromJSON(needs.resolves.outputs.JSON), ''' ''') }} + RESOLVES: "'${{ join(fromJSON(needs.resolves.outputs.JSON), ''' ''') }}'" RESOLVES_CSV: ${{ join(fromJSON(needs.resolves.outputs.JSON), ', ') }} LOCKFILES: ${{ needs.resolves.outputs.LOCKFILES }} # see also the 'Setup env' step @@ -432,11 +432,11 @@ jobs: run: | echo "COMMIT_MSG=${{ env.OUTPUT_BASE_DIR }}/commit_msg.txt" >> "${GITHUB_ENV}" echo "PR_COMMENT=${{ env.OUTPUT_BASE_DIR }}/pr_comment.md" >> "${GITHUB_ENV}" - for resolve in ${RESOLVES}; do + for resolve in ${{ env.RESOLVES }}; do LOCKFILE=$(jq -r '.["'"${resolve}"'"]' <<< "${LOCKFILES}") ARTIFACT_DIR="${{ env.OUTPUT_BASE_DIR }}/lockfile-${resolve}" # Add a symlink when actions/aritifact-download skips making subdir for only one artifact - [ "${resolve}" == "${RESOLVES}" ] && ln -s "." "${ARTIFACT_DIR}" + [ "'${resolve}'" == "${RESOLVES}" ] && ln -s "." "${ARTIFACT_DIR}" CHANGED_FILE="${ARTIFACT_DIR}/$(basename "${LOCKFILE}").CHANGED" if [ -e "${CHANGED_FILE}" ] && [ "$(cat "${CHANGED_FILE}" | tr -d '[:space:]')" == true ]; then CHANGED="true" @@ -452,7 +452,7 @@ jobs: run: | echo "pants generate-lockfiles: ${RESOLVES_CSV}" > "${COMMIT_MSG}" echo >> "${COMMIT_MSG}" - for resolve in ${RESOLVES}; do + for resolve in ${{ env.RESOLVES }}; do LOCKFILE=$(jq -r '.["'"${resolve}"'"]' <<< "${LOCKFILES}") cp "${{ env.OUTPUT_BASE_DIR }}/lockfile-${resolve}/$(basename ${LOCKFILE})" "${LOCKFILE}" git add "${LOCKFILE}" @@ -472,7 +472,7 @@ jobs: run: | echo "# Lockfile Diffs" > "${PR_COMMENT}" echo >> "${PR_COMMENT}" - for resolve in ${RESOLVES}; do + for resolve in ${{ env.RESOLVES }}; do LOCKFILE=$(jq -r '.["'"${resolve}"'"]' <<< "${LOCKFILES}") STDERR_LOG="${{ env.OUTPUT_BASE_DIR }}/lockfile-${resolve}/stderr.log" if [ -e "${STDERR_LOG}.md" ]; then From a22ca676de6c9d8a0e70e10966805547a1487737 Mon Sep 17 00:00:00 2001 From: Jacob Floyd Date: Fri, 26 Sep 2025 15:58:32 -0500 Subject: [PATCH 061/102] gha: strip more ansi sequences from text output --- .github/workflows/lockfiles.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/lockfiles.yaml b/.github/workflows/lockfiles.yaml index b3d6417f16..5961c6ad6c 100644 --- a/.github/workflows/lockfiles.yaml +++ b/.github/workflows/lockfiles.yaml @@ -342,7 +342,7 @@ jobs: g # replace the original line with lines from the "hold" space } /^\x1B\[0m$/d # drop blank line after heading (replaced with line of overline chars) - s/\x1B\[[0-9]\+m//g # strip out ansi escapes + s/\x1B\[[0-9;]\+m//g # strip out ansi escapes run: | sed -e "${SED_SCRIPT}" "${STDERR_LOG}" | tee "${STDERR_LOG}.txt" | head -n5 echo ... From 1167363c5316404df99eb473350e13cb58b8a415 Mon Sep 17 00:00:00 2001 From: Jacob Floyd Date: Fri, 26 Sep 2025 16:05:34 -0500 Subject: [PATCH 062/102] gha: debug --- .github/workflows/lockfiles.yaml | 2 ++ 1 file changed, 2 insertions(+) diff --git a/.github/workflows/lockfiles.yaml b/.github/workflows/lockfiles.yaml index 5961c6ad6c..39f83898a6 100644 --- a/.github/workflows/lockfiles.yaml +++ b/.github/workflows/lockfiles.yaml @@ -432,6 +432,8 @@ jobs: run: | echo "COMMIT_MSG=${{ env.OUTPUT_BASE_DIR }}/commit_msg.txt" >> "${GITHUB_ENV}" echo "PR_COMMENT=${{ env.OUTPUT_BASE_DIR }}/pr_comment.md" >> "${GITHUB_ENV}" + set -x + ls "${{ env.OUTPUT_BASE_DIR }}" "${{ env.OUTPUT_BASE_DIR }}/*/" for resolve in ${{ env.RESOLVES }}; do LOCKFILE=$(jq -r '.["'"${resolve}"'"]' <<< "${LOCKFILES}") ARTIFACT_DIR="${{ env.OUTPUT_BASE_DIR }}/lockfile-${resolve}" From bcd3437591a6edc6de6efccce2588f4931dc3270 Mon Sep 17 00:00:00 2001 From: Jacob Floyd Date: Fri, 26 Sep 2025 16:22:19 -0500 Subject: [PATCH 063/102] gha: fix download-artifact usage --- .github/workflows/lockfiles.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/lockfiles.yaml b/.github/workflows/lockfiles.yaml index 39f83898a6..a9aaaa1473 100644 --- a/.github/workflows/lockfiles.yaml +++ b/.github/workflows/lockfiles.yaml @@ -424,7 +424,7 @@ jobs: with: pattern: lockfile-* # lockfile-{resolve} path: ${{ env.OUTPUT_BASE_DIR }} - merge-multiple: true # unpack in {path}/{artifact_name}/ + merge-multiple: false # unpack in {path}/{artifact_name}/ # This step exists because we cannot use the 'env' context in 'jobs..env.*', # and because we cannot rely on 'jobs..outputs.*' for matrix jobs. From 905e9804c985dd28b732d0f28b9e3568a44029d6 Mon Sep 17 00:00:00 2001 From: Jacob Floyd Date: Fri, 26 Sep 2025 16:31:17 -0500 Subject: [PATCH 064/102] gha: drop debug --- .github/workflows/lockfiles.yaml | 2 -- 1 file changed, 2 deletions(-) diff --git a/.github/workflows/lockfiles.yaml b/.github/workflows/lockfiles.yaml index a9aaaa1473..9d647709f4 100644 --- a/.github/workflows/lockfiles.yaml +++ b/.github/workflows/lockfiles.yaml @@ -432,8 +432,6 @@ jobs: run: | echo "COMMIT_MSG=${{ env.OUTPUT_BASE_DIR }}/commit_msg.txt" >> "${GITHUB_ENV}" echo "PR_COMMENT=${{ env.OUTPUT_BASE_DIR }}/pr_comment.md" >> "${GITHUB_ENV}" - set -x - ls "${{ env.OUTPUT_BASE_DIR }}" "${{ env.OUTPUT_BASE_DIR }}/*/" for resolve in ${{ env.RESOLVES }}; do LOCKFILE=$(jq -r '.["'"${resolve}"'"]' <<< "${LOCKFILES}") ARTIFACT_DIR="${{ env.OUTPUT_BASE_DIR }}/lockfile-${resolve}" From e97f335e24577629205541f635a5ca7f62622010 Mon Sep 17 00:00:00 2001 From: Jacob Floyd Date: Fri, 26 Sep 2025 16:40:34 -0500 Subject: [PATCH 065/102] gha: drop alert box from job summary it does not work consistently --- .github/workflows/lockfiles.yaml | 17 +++++------------ 1 file changed, 5 insertions(+), 12 deletions(-) diff --git a/.github/workflows/lockfiles.yaml b/.github/workflows/lockfiles.yaml index 9d647709f4..f750fdf219 100644 --- a/.github/workflows/lockfiles.yaml +++ b/.github/workflows/lockfiles.yaml @@ -355,7 +355,7 @@ jobs: echo '## ${{ matrix.resolve }} Lockfile Diff' echo if [ "${{ steps.lockfile.outputs.CHANGED }}" != true ]; then - echo 'No changes required for ${{ env.LOCKFILE }}' + echo 'No changes required for ${{ env.LOCKFILE }} _(from ${{ needs.pr.outputs.PR_BASE_REF }})_' else echo -n '![Terminal screenshot of lockfile diff in color. The text from the image is included below.]' echo "(${IMG_BASE_URI}/$(basename ${STDERR_LOG}.github-dark.png))" @@ -366,20 +366,13 @@ jobs: echo '```' cat "${STDERR_LOG}.txt" echo '```' + echo + echo '_(diff from ${{ needs.pr.outputs.PR_BASE_REF }})_' + echo + echo '
' fi echo ) | tee -a "${STDERR_LOG}.md" >> "${GITHUB_STEP_SUMMARY}" - ( - echo '> [!NOTE]' - echo '> Diff base (git ref): *${{ needs.pr.outputs.PR_BASE_REF }}*' - echo - ) >> "${GITHUB_STEP_SUMMARY}" # only add this note to the job summary - if [ "${{ steps.lockfile.outputs.CHANGED }}" == true ]; then - ( - echo '
' - echo - ) | tee -a "${STDERR_LOG}.md" >> "${GITHUB_STEP_SUMMARY}" - fi - name: Upload lockfile and lockfile diff files uses: actions/upload-artifact@v4 From 1dc8b7728d72f6d697420b21f7ff1ee3d15c8219 Mon Sep 17 00:00:00 2001 From: Jacob Floyd Date: Fri, 26 Sep 2025 16:51:06 -0500 Subject: [PATCH 066/102] gha: improve debuggability of workflow --- .github/workflows/lockfiles.yaml | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/.github/workflows/lockfiles.yaml b/.github/workflows/lockfiles.yaml index f750fdf219..7cb7556794 100644 --- a/.github/workflows/lockfiles.yaml +++ b/.github/workflows/lockfiles.yaml @@ -451,10 +451,11 @@ jobs: git add "${LOCKFILE}" STDERR_LOG="${{ env.OUTPUT_BASE_DIR }}/lockfile-${resolve}/stderr.log" if [ -e "${STDERR_LOG}.txt" ]; then + echo "Adding ${STDERR_LOG}.txt to ${COMMIT_MSG}" cat "${STDERR_LOG}.txt" >> "${COMMIT_MSG}" else echo "${STDERR_LOG}.txt is missing" - echo "No changes to: ${LOCKFILE}" >> "${COMMIT_MSG}" + echo "No changes to: ${LOCKFILE}" | tee -a "${COMMIT_MSG}" echo >> "${COMMIT_MSG}" fi done @@ -469,10 +470,11 @@ jobs: LOCKFILE=$(jq -r '.["'"${resolve}"'"]' <<< "${LOCKFILES}") STDERR_LOG="${{ env.OUTPUT_BASE_DIR }}/lockfile-${resolve}/stderr.log" if [ -e "${STDERR_LOG}.md" ]; then + echo "Adding ${STDERR_LOG}.md to ${PR_COMMENT}" cat "${STDERR_LOG}.md" >> "${PR_COMMENT}" else echo "${STDERR_LOG}.md is missing" - echo 'No changes to: `'"${LOCKFILE}"'`' >> "${PR_COMMENT}" + echo 'No changes to: `'"${LOCKFILE}"'`' | tee -a "${PR_COMMENT}" echo >> "${PR_COMMENT}" fi done From 96049c892d7d64bf0c644237a9d8bae2b6e54adf Mon Sep 17 00:00:00 2001 From: Jacob Floyd Date: Fri, 26 Sep 2025 18:57:45 -0500 Subject: [PATCH 067/102] gha: clean up commit message --- .github/workflows/lockfiles.yaml | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/.github/workflows/lockfiles.yaml b/.github/workflows/lockfiles.yaml index 7cb7556794..54c1dcd70e 100644 --- a/.github/workflows/lockfiles.yaml +++ b/.github/workflows/lockfiles.yaml @@ -445,6 +445,7 @@ jobs: run: | echo "pants generate-lockfiles: ${RESOLVES_CSV}" > "${COMMIT_MSG}" echo >> "${COMMIT_MSG}" + NO_CHANGES="" for resolve in ${{ env.RESOLVES }}; do LOCKFILE=$(jq -r '.["'"${resolve}"'"]' <<< "${LOCKFILES}") cp "${{ env.OUTPUT_BASE_DIR }}/lockfile-${resolve}/$(basename ${LOCKFILE})" "${LOCKFILE}" @@ -455,10 +456,10 @@ jobs: cat "${STDERR_LOG}.txt" >> "${COMMIT_MSG}" else echo "${STDERR_LOG}.txt is missing" - echo "No changes to: ${LOCKFILE}" | tee -a "${COMMIT_MSG}" - echo >> "${COMMIT_MSG}" + NO_CHANGES="${NO_CHANGES}No changes to ${LOCKFILE}"$'\n\n' fi done + echo "${NO_CHANGES}" | tee -a "${COMMIT_MSG}" - name: Prepare PR comment env: From 30a19c18e2ae3d4123d448dc159e375d2c4070ad Mon Sep 17 00:00:00 2001 From: Jacob Floyd Date: Sat, 27 Sep 2025 12:39:39 -0500 Subject: [PATCH 068/102] gha: use st2stanley to create commits --- .github/workflows/lockfiles.yaml | 25 +++++++++++++++++++------ 1 file changed, 19 insertions(+), 6 deletions(-) diff --git a/.github/workflows/lockfiles.yaml b/.github/workflows/lockfiles.yaml index 54c1dcd70e..e255f3b00f 100644 --- a/.github/workflows/lockfiles.yaml +++ b/.github/workflows/lockfiles.yaml @@ -24,6 +24,17 @@ env: # logs and screenshots go in {OUTPUT_BASE_DIR}/lockfile-{resolve}/ (where "lockfile-{resolve}" is the artifact name) OUTPUT_BASE_DIR: dist/lockfiles # /dist/ is in .gitignore + # see https://api.github.com/users/github-actions[bot] + BOT_GHA_NAME: 'github-actions[bot]' + BOT_GHA_LOGIN: 'github-actions[bot]' + BOT_GHA_ID: '41898282' + BOT_GHA_EMAIL: '41898282+github-actions[bot]@users.noreply.github.com' + # see https://api.github.com/users/st2stanley + BOT_ST2_NAME: 'st2stanley[bot]' + BOT_ST2_LOGIN: 'st2stanley' + BOT_ST2_ID: '7807286' + BOT_ST2_EMAIL: '7807286+st2stanley@users.noreply.github.com' + # We upload the diff image(s) (terminal screenshot(s)) to S3 so we can use it in GitHub flavored markdown. AWS_REGION: us-west-2 # Oregon AWS_ACCOUNT_ID: "053075847820" @@ -126,11 +137,11 @@ jobs: number: "new" url: "" closed: false - author: # see https://api.github.com/users/github-actions[bot] - id: 41898282 + author: + id: ${{ env.BOT_ST2_ID }} is_bot: true - login: "github-actions[bot]" - name: "github-actions[bot]" + login: "${{ env.BOT_ST2_LOGIN }}" + name: "${{ env.BOT_ST2_NAME }}" maintainerCanModify: true headRepositoryOwner: id: "${GITHUB_REPOSITORY_OWNER_ID}" @@ -408,8 +419,10 @@ jobs: - name: Create branch for new PR if: inputs.pr == 'new' run: | - git config --local user.name "github-actions[bot]" - git config --local user.email "41898282+github-actions[bot]@users.noreply.github.com" + git config --local author.name "${{ env.BOT_ST2_NAME }}" + git config --local author.email "${{ env.BOT_ST2_EMAIL }}" + git config --local committer.name "${{ env.BOT_GHA_NAME }}" + git config --local committer.email "${{ env.BOT_GHA_EMAIL }}" git checkout -b "${{ needs.pr.outputs.PR_REF }}" FETCH_HEAD - name: Download lockfiles and lockfile diff files From 286f7ba0e4537ff99d0b9983bd697e3a2f0a7498 Mon Sep 17 00:00:00 2001 From: Jacob Floyd Date: Sat, 27 Sep 2025 18:43:37 -0500 Subject: [PATCH 069/102] refactor scripts/fixate-requirements.py for reuse --- scripts/fixate-requirements.py | 50 +++++++++++++++++++--------------- scripts/fixate_requirements.py | 1 + 2 files changed, 29 insertions(+), 22 deletions(-) create mode 120000 scripts/fixate_requirements.py diff --git a/scripts/fixate-requirements.py b/scripts/fixate-requirements.py index e7b8377297..64af60260d 100755 --- a/scripts/fixate-requirements.py +++ b/scripts/fixate-requirements.py @@ -124,6 +124,33 @@ def locate_file(path, must_exist=False): return path +def load_fixed_requirements(file_path): + fixed = load_requirements(locate_file(file_path, must_exist=True)) + + # Make sure there are no duplicate / conflicting definitions + fixedreq_hash = {} + for req in fixed: + if hasattr(req, "requirement"): + parsedreq = parse_req_from_line(req.requirement, req.line_source) + project_name = parsedreq.requirement.name + + if not req.requirement: + continue + else: + project_name = req.name + + if not req.req: + continue + + if project_name in fixedreq_hash: + raise ValueError( + 'Duplicate definition for dependency "%s"' % (project_name) + ) + + fixedreq_hash[project_name] = req + return fixedreq_hash + + def merge_source_requirements(sources): """ Read requirements source files and merge it's content. @@ -173,29 +200,8 @@ def write_requirements( skip = skip or [] requirements = merge_source_requirements(sources) - fixed = load_requirements(locate_file(fixed_requirements, must_exist=True)) - - # Make sure there are no duplicate / conflicting definitions - fixedreq_hash = {} - for req in fixed: - if hasattr(req, "requirement"): - parsedreq = parse_req_from_line(req.requirement, req.line_source) - project_name = parsedreq.requirement.name - - if not req.requirement: - continue - else: - project_name = req.name - - if not req.req: - continue - if project_name in fixedreq_hash: - raise ValueError( - 'Duplicate definition for dependency "%s"' % (project_name) - ) - - fixedreq_hash[project_name] = req + fixedreq_hash = load_fixed_requirements(fixed_requirements) lines_to_write = [] links = set() diff --git a/scripts/fixate_requirements.py b/scripts/fixate_requirements.py new file mode 120000 index 0000000000..791bff1747 --- /dev/null +++ b/scripts/fixate_requirements.py @@ -0,0 +1 @@ +fixate-requirements.py \ No newline at end of file From 8fb9cdf11cebe049a5eec6132f3e28ff15d62632 Mon Sep 17 00:00:00 2001 From: Jacob Floyd Date: Sat, 27 Sep 2025 18:45:42 -0500 Subject: [PATCH 070/102] draft scripts/lockfiles_to_reqs.py --- scripts/lockfiles_to_reqs.py | 112 +++++++++++++++++++++++++++++++++++ 1 file changed, 112 insertions(+) create mode 100644 scripts/lockfiles_to_reqs.py diff --git a/scripts/lockfiles_to_reqs.py b/scripts/lockfiles_to_reqs.py new file mode 100644 index 0000000000..aef5b857d0 --- /dev/null +++ b/scripts/lockfiles_to_reqs.py @@ -0,0 +1,112 @@ +#!/usr/bin/env python +# Copyright 2025 The StackStorm Authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import copy +import json +from pathlib import Path + +from fixate_requirements import load_fixed_requirements, parse_req_from_line + + +FIXED_REQUIREMENTS = "fixed-requirements.txt" +TEST_REQUIREMENTS = "test-requirements.txt" + +_LOCKFILE = "lockfiles/{resolve}.lock" +TOOL_RESOLVES = ("st2", "bandit", "flake8", "pylint", "black") +# irrelevant resolves: "pants-plugins", "twine" +LOCKFILES = tuple(_LOCKFILE.format(tool) for tool in TOOL_RESOLVES) + + +def strip_comments_from_pex_json_lockfile(lockfile_bytes: bytes) -> bytes: + """ + Copied from code by Pants Project Contributors (Apache 2.0 licensed): + https://github.com/pantsbuild/pants/blob/release_2.25.0/src/python/pants/backend/python/util_rules/pex_requirements.py#L119-L127 + + TODO: delete this once we getrid of the legacy fixate requirements files. + """ + return b"\n".join( line for line in lockfile_bytes.splitlines() if not line.lstrip().startswith(b"//") ) + + +def _update(old_req, name, version): + parsedreq = parse_req_from_line(req.requirement, req.line_source) + assert parsedreq.requirement.name == name + specs = tuple(parsedreq.requirement.specifier) + if len(specs) != 1: + return False + spec = specs[0] + if spec.operator == '==' and spec.version != version: + # only change pins; ignore any version range + new_spec = spec.__class__(f"=={version}", spec.prereleases or None) + new_specs = specs.__class__([new_spec], specs.prereleases or None) + new_req = copy.deepcopy(parsedreq.requirement) + new_req.specifier = new_specs + # = dataclasses.replace(parsedreq, requirement=new_req) + + return str(new_req) + return False + + +def plan_update(old_reqs, name, version, reqs_updates): + if name in old_reqs: + old_req = old_reqs[name] + updated_line = _update(old_req, name, version) + if updated_line is not None: + reqs_updates[name] = updated_line + + +def do_updates(path, reqs_updates): + lines = path.read_text().splitlines() + for name, updated_line in reqs_updates.items(): + line_source = fixed_reqs[name].line_source + # line_source fmt is "line of " + _, line_number, _ = line_source.split(maxsplits=2) + line_index = line_number - 1 + lines[line_index] = updated_line + path.write_text("\n".join(lines) + "\n") + + +def main(): + fixed_path = Path(FIXED_REQUIREMENTS).resolve() + test_path = Path(TEST_REQUIREMENTS).resolve() + fixed_reqs = load_fxed_requirements(FIXED_REQUIREMENTS) + test_reqs = load_fxed_requirements(TEST_REQUIREMENTS) + + fixed_reqs_updates = {} + test_reqs_updates = {} + + handled = [] + for lockfile in LOCKFILES: + lockfile_bytes = strip_comments_from_pex_json_lockfile( + Path(lockfile).read_bytes() + ) + pex_lock = json.loads(lockfile_bytes.decode("utf-8")) + locked_requirements = pex_lock["locked_resolves"][0]["locked_requirements"] + locked_reqs_name_version_map = { + req["project_name"]: req["version"] + for req in locked_requirements + } + for name, version in locked_reqs_name_version_map.items(): + if name in handled: + continue + plan_update(fixed_reqs, name, version, fixed_reqs_updates) + plan_update(test_reqs, name, version, test_reqs_updates) + handled.append(name) + + do_updates(fixed_path, fixed_reqs_updates) + do_updates(test_path, test_reqs_updates) + + +if __name__ == "__main__": + main() From f7efdc488a020de777b206ad2f421db24964174a Mon Sep 17 00:00:00 2001 From: Jacob Floyd Date: Sat, 27 Sep 2025 19:08:46 -0500 Subject: [PATCH 071/102] lint w/ flake8 --- scripts/lockfiles_to_reqs.py | 20 ++++++++++---------- 1 file changed, 10 insertions(+), 10 deletions(-) diff --git a/scripts/lockfiles_to_reqs.py b/scripts/lockfiles_to_reqs.py index aef5b857d0..c315a7efdb 100644 --- a/scripts/lockfiles_to_reqs.py +++ b/scripts/lockfiles_to_reqs.py @@ -36,11 +36,13 @@ def strip_comments_from_pex_json_lockfile(lockfile_bytes: bytes) -> bytes: TODO: delete this once we getrid of the legacy fixate requirements files. """ - return b"\n".join( line for line in lockfile_bytes.splitlines() if not line.lstrip().startswith(b"//") ) + return b"\n".join( + line for line in lockfile_bytes.splitlines() if not line.lstrip().startswith(b"//") + ) def _update(old_req, name, version): - parsedreq = parse_req_from_line(req.requirement, req.line_source) + parsedreq = parse_req_from_line(old_req.requirement, old_req.line_source) assert parsedreq.requirement.name == name specs = tuple(parsedreq.requirement.specifier) if len(specs) != 1: @@ -52,8 +54,6 @@ def _update(old_req, name, version): new_specs = specs.__class__([new_spec], specs.prereleases or None) new_req = copy.deepcopy(parsedreq.requirement) new_req.specifier = new_specs - # = dataclasses.replace(parsedreq, requirement=new_req) - return str(new_req) return False @@ -66,10 +66,10 @@ def plan_update(old_reqs, name, version, reqs_updates): reqs_updates[name] = updated_line -def do_updates(path, reqs_updates): +def do_updates(path, old_reqs, reqs_updates): lines = path.read_text().splitlines() for name, updated_line in reqs_updates.items(): - line_source = fixed_reqs[name].line_source + line_source = old_reqs[name].line_source # line_source fmt is "line of " _, line_number, _ = line_source.split(maxsplits=2) line_index = line_number - 1 @@ -80,8 +80,8 @@ def do_updates(path, reqs_updates): def main(): fixed_path = Path(FIXED_REQUIREMENTS).resolve() test_path = Path(TEST_REQUIREMENTS).resolve() - fixed_reqs = load_fxed_requirements(FIXED_REQUIREMENTS) - test_reqs = load_fxed_requirements(TEST_REQUIREMENTS) + fixed_reqs = load_fixed_requirements(FIXED_REQUIREMENTS) + test_reqs = load_fixed_requirements(TEST_REQUIREMENTS) fixed_reqs_updates = {} test_reqs_updates = {} @@ -104,8 +104,8 @@ def main(): plan_update(test_reqs, name, version, test_reqs_updates) handled.append(name) - do_updates(fixed_path, fixed_reqs_updates) - do_updates(test_path, test_reqs_updates) + do_updates(fixed_path, fixed_reqs, fixed_reqs_updates) + do_updates(test_path, test_reqs, test_reqs_updates) if __name__ == "__main__": From 043d26d86010ad113431e09dc8794c6cbcbe9e5d Mon Sep 17 00:00:00 2001 From: Jacob Floyd Date: Sat, 27 Sep 2025 21:11:33 -0500 Subject: [PATCH 072/102] fmt black and fix flake8 issue --- scripts/lockfiles_to_reqs.py | 13 +++++++------ 1 file changed, 7 insertions(+), 6 deletions(-) diff --git a/scripts/lockfiles_to_reqs.py b/scripts/lockfiles_to_reqs.py index c315a7efdb..2bed42fedc 100644 --- a/scripts/lockfiles_to_reqs.py +++ b/scripts/lockfiles_to_reqs.py @@ -37,18 +37,20 @@ def strip_comments_from_pex_json_lockfile(lockfile_bytes: bytes) -> bytes: TODO: delete this once we getrid of the legacy fixate requirements files. """ return b"\n".join( - line for line in lockfile_bytes.splitlines() if not line.lstrip().startswith(b"//") + line + for line in lockfile_bytes.splitlines() + if not line.lstrip().startswith(b"//") ) def _update(old_req, name, version): parsedreq = parse_req_from_line(old_req.requirement, old_req.line_source) assert parsedreq.requirement.name == name - specs = tuple(parsedreq.requirement.specifier) + specs = parsedreq.requirement.specifier if len(specs) != 1: return False - spec = specs[0] - if spec.operator == '==' and spec.version != version: + spec = tuple(specs)[0] + if spec.operator == "==" and spec.version != version: # only change pins; ignore any version range new_spec = spec.__class__(f"=={version}", spec.prereleases or None) new_specs = specs.__class__([new_spec], specs.prereleases or None) @@ -94,8 +96,7 @@ def main(): pex_lock = json.loads(lockfile_bytes.decode("utf-8")) locked_requirements = pex_lock["locked_resolves"][0]["locked_requirements"] locked_reqs_name_version_map = { - req["project_name"]: req["version"] - for req in locked_requirements + req["project_name"]: req["version"] for req in locked_requirements } for name, version in locked_reqs_name_version_map.items(): if name in handled: From 2791c74967dc47a49a9998d7655392dfdf5e813c Mon Sep 17 00:00:00 2001 From: Jacob Floyd Date: Mon, 29 Sep 2025 13:38:58 -0500 Subject: [PATCH 073/102] catch missing auth backend errors sooner --- tools/config_gen.py | 9 +++++++++ 1 file changed, 9 insertions(+) diff --git a/tools/config_gen.py b/tools/config_gen.py index 972171af71..96832fced9 100755 --- a/tools/config_gen.py +++ b/tools/config_gen.py @@ -225,6 +225,15 @@ def main(args): mod.register_opts(ignore_errors=True) _read_current_config(opt_groups) _clear_config() + if config == "st2auth.config": + from st2auth import ( + backends as auth_backends, + ) # late import to let config get set up first. + + available_backends = auth_backends.get_available_backends() + assert ( + len(available_backends) == 3 + ), f"Expected 3 available auth backends, got {len(available_backends)}: {available_backends}" _read_groups(opt_groups) From c0828e9e80aa77144b51e54b38079f1441a7065e Mon Sep 17 00:00:00 2001 From: Jacob Floyd Date: Mon, 29 Sep 2025 20:06:54 -0500 Subject: [PATCH 074/102] tools/config_gen.py: enhance debugging --- tools/config_gen.py | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/tools/config_gen.py b/tools/config_gen.py index 96832fced9..777ec86901 100755 --- a/tools/config_gen.py +++ b/tools/config_gen.py @@ -17,6 +17,7 @@ from __future__ import absolute_import import collections import importlib +import logging import six import sys import traceback @@ -238,4 +239,9 @@ def main(args): if __name__ == "__main__": + # display errors on stderr to facilitate debugging + log_level = logging.DEBUG + logging.basicConfig( + format="%(asctime)s %(levelname)s [-] %(message)s", level=log_level + ) main(sys.argv) From 536ba2465b5a44888d0da1ef7772e0589f7408b2 Mon Sep 17 00:00:00 2001 From: Jacob Floyd Date: Mon, 29 Sep 2025 21:01:09 -0500 Subject: [PATCH 075/102] tools/config_gen.py: WARNING logs should be enough The error I needed was actually shown in an ERROR level log message, so this should display the errors without spamming build logs too much. --- tools/config_gen.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/tools/config_gen.py b/tools/config_gen.py index 777ec86901..b4f459cfff 100755 --- a/tools/config_gen.py +++ b/tools/config_gen.py @@ -239,8 +239,8 @@ def main(args): if __name__ == "__main__": - # display errors on stderr to facilitate debugging - log_level = logging.DEBUG + # display warnings+errors on stderr to facilitate debugging + log_level = logging.WARNING logging.basicConfig( format="%(asctime)s %(levelname)s [-] %(message)s", level=log_level ) From 7f1d9c276684066cf66b1ae609e57a6b7d57f0d7 Mon Sep 17 00:00:00 2001 From: Jacob Floyd Date: Mon, 29 Sep 2025 21:50:04 -0500 Subject: [PATCH 076/102] scripts/lockfiles_to_reqs.py: make it work --- scripts/lockfiles_to_reqs.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/scripts/lockfiles_to_reqs.py b/scripts/lockfiles_to_reqs.py index 2bed42fedc..0831016ad2 100644 --- a/scripts/lockfiles_to_reqs.py +++ b/scripts/lockfiles_to_reqs.py @@ -26,7 +26,7 @@ _LOCKFILE = "lockfiles/{resolve}.lock" TOOL_RESOLVES = ("st2", "bandit", "flake8", "pylint", "black") # irrelevant resolves: "pants-plugins", "twine" -LOCKFILES = tuple(_LOCKFILE.format(tool) for tool in TOOL_RESOLVES) +LOCKFILES = tuple(_LOCKFILE.format(resolve=tool) for tool in TOOL_RESOLVES) def strip_comments_from_pex_json_lockfile(lockfile_bytes: bytes) -> bytes: @@ -73,8 +73,8 @@ def do_updates(path, old_reqs, reqs_updates): for name, updated_line in reqs_updates.items(): line_source = old_reqs[name].line_source # line_source fmt is "line of " - _, line_number, _ = line_source.split(maxsplits=2) - line_index = line_number - 1 + _, line_number, _ = line_source.split(maxsplit=2) + line_index = int(line_number) - 1 lines[line_index] = updated_line path.write_text("\n".join(lines) + "\n") From 40deedc7a88d4a092fc340543b00d664786face2 Mon Sep 17 00:00:00 2001 From: Jacob Floyd Date: Mon, 29 Sep 2025 21:52:17 -0500 Subject: [PATCH 077/102] scripts/lockfiles_to_reqs.py: add logging --- scripts/lockfiles_to_reqs.py | 71 +++++++++++++++++++++++++++--------- 1 file changed, 54 insertions(+), 17 deletions(-) diff --git a/scripts/lockfiles_to_reqs.py b/scripts/lockfiles_to_reqs.py index 0831016ad2..4de9354cfb 100644 --- a/scripts/lockfiles_to_reqs.py +++ b/scripts/lockfiles_to_reqs.py @@ -15,11 +15,15 @@ import copy import json +import logging from pathlib import Path from fixate_requirements import load_fixed_requirements, parse_req_from_line +LOG = logging.getLogger(__name__) + + FIXED_REQUIREMENTS = "fixed-requirements.txt" TEST_REQUIREMENTS = "test-requirements.txt" @@ -47,25 +51,41 @@ def _update(old_req, name, version): parsedreq = parse_req_from_line(old_req.requirement, old_req.line_source) assert parsedreq.requirement.name == name specs = parsedreq.requirement.specifier - if len(specs) != 1: - return False + if len(specs) == 0: + # name-only dep. Nothing to do. Skipping. + return None + elif len(specs) > 1: + LOG.warning( + "Cannot automatically update comma separated version specifier: %s", specs + ) + return None spec = tuple(specs)[0] - if spec.operator == "==" and spec.version != version: - # only change pins; ignore any version range - new_spec = spec.__class__(f"=={version}", spec.prereleases or None) - new_specs = specs.__class__([new_spec], specs.prereleases or None) - new_req = copy.deepcopy(parsedreq.requirement) - new_req.specifier = new_specs - return str(new_req) - return False + if spec.version != version: + if spec.operator != "==": + LOG.warning( + "Cannot safely auto-change version specifier of %s from %s%s to ==%s", + name, + spec.operator, + spec.version, + version, + ) + else: + # only change pins; ignore any version range + new_spec = spec.__class__(f"=={version}", spec.prereleases or None) + new_specs = specs.__class__([new_spec], specs.prereleases or None) + new_req = copy.deepcopy(parsedreq.requirement) + new_req.specifier = new_specs + return str(new_req) + return None def plan_update(old_reqs, name, version, reqs_updates): - if name in old_reqs: - old_req = old_reqs[name] - updated_line = _update(old_req, name, version) - if updated_line is not None: - reqs_updates[name] = updated_line + if name not in old_reqs: + return + old_req = old_reqs[name] + updated_line = _update(old_req, name, version) + if updated_line is not None: + reqs_updates[name] = updated_line def do_updates(path, old_reqs, reqs_updates): @@ -88,6 +108,7 @@ def main(): fixed_reqs_updates = {} test_reqs_updates = {} + LOG.info("Looking for verion changes") handled = [] for lockfile in LOCKFILES: lockfile_bytes = strip_comments_from_pex_json_lockfile( @@ -100,14 +121,30 @@ def main(): } for name, version in locked_reqs_name_version_map.items(): if name in handled: + # st2.lock goes first so we can just ignore duplicates from tool lockfiles. continue plan_update(fixed_reqs, name, version, fixed_reqs_updates) plan_update(test_reqs, name, version, test_reqs_updates) handled.append(name) - do_updates(fixed_path, fixed_reqs, fixed_reqs_updates) - do_updates(test_path, test_reqs, test_reqs_updates) + if not fixed_reqs_updates: + LOG.info("No updates required in %s", fixed_path) + else: + LOG.info("Updating %s", fixed_path) + do_updates(fixed_path, fixed_reqs, fixed_reqs_updates) + + if not test_reqs_updates: + LOG.info("No updates required in %s", test_path) + else: + LOG.info("Updating %s", test_path) + do_updates(test_path, test_reqs, test_reqs_updates) + + LOG.info("DONE") if __name__ == "__main__": + log_level = logging.INFO + logging.basicConfig( + format="%(asctime)s %(levelname)s [-] %(message)s", level=log_level + ) main() From 6f9202f5f8c9239fdd6ebf4ade133949fb8332d5 Mon Sep 17 00:00:00 2001 From: Jacob Floyd Date: Mon, 29 Sep 2025 22:33:30 -0500 Subject: [PATCH 078/102] update lockfiles_to_reqs so it runs fixate-requirments as well --- scripts/lockfiles_to_reqs.py | 44 +++++++++++++++++++++++++++++------- 1 file changed, 36 insertions(+), 8 deletions(-) diff --git a/scripts/lockfiles_to_reqs.py b/scripts/lockfiles_to_reqs.py index 4de9354cfb..dcf6b3c3d0 100644 --- a/scripts/lockfiles_to_reqs.py +++ b/scripts/lockfiles_to_reqs.py @@ -18,7 +18,7 @@ import logging from pathlib import Path -from fixate_requirements import load_fixed_requirements, parse_req_from_line +from fixate_requirements import load_fixed_requirements, parse_req_from_line, write_requirements LOG = logging.getLogger(__name__) @@ -99,7 +99,7 @@ def do_updates(path, old_reqs, reqs_updates): path.write_text("\n".join(lines) + "\n") -def main(): +def copy_locked_versions_into_legacy_requirements_files(): fixed_path = Path(FIXED_REQUIREMENTS).resolve() test_path = Path(TEST_REQUIREMENTS).resolve() fixed_reqs = load_fixed_requirements(FIXED_REQUIREMENTS) @@ -128,18 +128,45 @@ def main(): handled.append(name) if not fixed_reqs_updates: - LOG.info("No updates required in %s", fixed_path) + LOG.info("No updates required in %s", FIXED_REQUIREMENTS) else: - LOG.info("Updating %s", fixed_path) + LOG.info("Updating %s", FIXED_REQUIREMENTS) do_updates(fixed_path, fixed_reqs, fixed_reqs_updates) if not test_reqs_updates: - LOG.info("No updates required in %s", test_path) + LOG.info("No updates required in %s", TEST_REQUIREMENTS) else: - LOG.info("Updating %s", test_path) + LOG.info("Updating %s", TEST_REQUIREMENTS) do_updates(test_path, test_reqs, test_reqs_updates) - LOG.info("DONE") + LOG.info("Done updating %s and %s", FIXED_REQUIREMENTS, TEST_REQUIREMENTS) + + +def fixate_legacy_requirements_files(): # based on .requirements Makefile target + skip=["virtualenv", "virtualenv-osx"] + + workspace = Path(".") + sources = list(workspace.glob("st2*/in-requirements.txt")) + sources.extend(list(workspace.glob("contrib/runners/*/in-requirements.txt"))) + + output = "requirements.txt" + LOG.info("Updating (fixating) %s files with requirements from %s", output, FIXED_REQUIREMENTS) + write_requirements( + sources=[str(source) for source in sources], + fixed_requirements=FIXED_REQUIREMENTS, + output_file=output, + skip=skip, + ) + + for source in sources: + output = str(source.with_name("requirements.txt")) + write_requirements( + sources=[str(source)], + fixed_requirements=FIXED_REQUIREMENTS, + output_file=output, + skip=skip, + ) + LOG.info("Done updating (fixating) requirements.txt files") if __name__ == "__main__": @@ -147,4 +174,5 @@ def main(): logging.basicConfig( format="%(asctime)s %(levelname)s [-] %(message)s", level=log_level ) - main() + copy_locked_versions_into_legacy_requirements_files() + fixate_legacy_requirements_files() From cf12105650cc2d8fb254eccea411930ca69c6b5c Mon Sep 17 00:00:00 2001 From: Jacob Floyd Date: Mon, 29 Sep 2025 22:55:03 -0500 Subject: [PATCH 079/102] gha: run scripts/lockfiles_to_reqs.py --- .github/workflows/lockfiles.yaml | 6 ++++++ scripts/BUILD | 11 ++++++++++- 2 files changed, 16 insertions(+), 1 deletion(-) diff --git a/.github/workflows/lockfiles.yaml b/.github/workflows/lockfiles.yaml index e255f3b00f..1f560c6ff6 100644 --- a/.github/workflows/lockfiles.yaml +++ b/.github/workflows/lockfiles.yaml @@ -494,6 +494,12 @@ jobs: done echo ":robot: [GitHub Actions Workflow Run](${RUN_LINK})" >> "${PR_COMMENT}" + - name: LEGACY - copy lockfile updates into legacy requirements files + if: env.CHANGED == 'true' + run: | + pants run scripts/lockfiles_to_reqs.py + git add *requirements.txt st2*/requirements.txt contrib/runners/*/requirements.txt + - name: Commit and push if: env.CHANGED == 'true' # git push --force is for workflow re-runs, but only for new PRs. diff --git a/scripts/BUILD b/scripts/BUILD index c4eddad01b..646b7171bd 100644 --- a/scripts/BUILD +++ b/scripts/BUILD @@ -1,4 +1,13 @@ -python_sources() +python_sources( + overrides={ + # TODO: delete fixate*.py and lockfiles_to_reqs.py once migration to pants is complete. + "lockfiles_to_reqs.py": dict( + # Run this with `pants run scripts/lockfiles_to_reqs.py` + # It runs in-repo (aka in the workspace) and directly edits files. + run_goal_use_sandbox=False, + ) + } +) shell_sources( name="shell", From 4ae37af07db86d85d720a538d2f61f3554a68b31 Mon Sep 17 00:00:00 2001 From: Jacob Floyd Date: Tue, 30 Sep 2025 13:03:02 -0500 Subject: [PATCH 080/102] gha: handle workflow re-runs --- .github/workflows/lockfiles.yaml | 46 +++++++++++++++++++++++++++----- 1 file changed, 39 insertions(+), 7 deletions(-) diff --git a/.github/workflows/lockfiles.yaml b/.github/workflows/lockfiles.yaml index 1f560c6ff6..99514ec1f1 100644 --- a/.github/workflows/lockfiles.yaml +++ b/.github/workflows/lockfiles.yaml @@ -127,8 +127,12 @@ jobs: headRepository,\ headRefName,\ baseRefName" + NEW_PR_REF: "regen-lockfiles-${{ github.run_id }}" run: | - if [[ "${{ inputs.pr }}" == new ]]; then + NEW_PR_NUMBER="$(gh pr list --json number --head "${NEW_PR_REF}" --state open)" + pr_list_rc = $? + if [[ "${{ inputs.pr }}" == new ]] && [ ${pr_list_rc} -gt 0 ]; then + CHECKOUT_REF="${GITHUB_REF}" echo "Planning new Pull Request metadata ..." PR=$( yq -e -p yaml . -o json -I 0 \ @@ -149,11 +153,22 @@ jobs: headRepository: id: "${GITHUB_REPOSITORY_ID}" name: "${GITHUB_REPOSITORY#*/}" - headRefName: "regen-lockfiles-${GITHUB_RUN_ID}" + headRefName: "${NEW_PR_REF}" baseRefName: "${GITHUB_REF_NAME}" HEREYAML ) - CHECKOUT_REF="${GITHUB_REF}" + elif [[ "${{ inputs.pr }}" == new ]] && [ ${pr_list_rc} -eq 0 ]; then + CHECKOUT_REF="${GITHUB_REF}" # will be force pushed + echo "Planning new Pull Request metadata (PR #${NEW_PR_NUMBER} - workflow re-run ${GITHUB_RUN_ATTEMPT})..." + PR=$(gh pr view "${NEW_PR_NUMBER}" --json "${PR_FIELDS}") + pr_search_rc=$? + if [ ${pr_search_rc} -gt 0 ]; then + echo "Error looking up PR from prior workflow run." + echo "JSON=${PR}" + exit 2 + fi + echo "Found Pull Request #${NEW_PR_NUMBER} by @$(jq -r .author.login <<< "${PR}")" + echo "URL: $(jq -r .url <<< "${PR}")" else CHECKOUT_REF="refs/pull/${{ inputs.pr }}/merge" echo "Searching for Pull Request #${{ inputs.pr }} ..." @@ -161,12 +176,15 @@ jobs: pr_search_rc=$? if [ ${pr_search_rc} -gt 0 ]; then echo "Pull Request #${{ inputs.pr }} not found!" + echo "JSON=${PR}" exit 2 elif (jq -e .closed <<< "${PR}" >/dev/null); then echo "Pull Request #${{ inputs.pr }} is closed!" + echo "JSON=${PR}" exit 3 elif ! (jq -e .maintainerCanModify <<< "${PR}" >/dev/null); then echo "Pull Request #${{ inputs.pr }} does not allow maintainer modification!" + echo "JSON=${PR}" exit 4 fi echo "Found Pull Request #${{ inputs.pr }} by @$(jq -r .author.login <<< "${PR}")" @@ -416,13 +434,16 @@ jobs: submodules: 'true' ref: ${{ needs.pr.outputs.CHECKOUT_REF }} - - name: Create branch for new PR - if: inputs.pr == 'new' + - name: Configure git author, committer run: | git config --local author.name "${{ env.BOT_ST2_NAME }}" git config --local author.email "${{ env.BOT_ST2_EMAIL }}" git config --local committer.name "${{ env.BOT_GHA_NAME }}" git config --local committer.email "${{ env.BOT_GHA_EMAIL }}" + + - name: Create branch for new PR + if: inputs.pr == 'new' + run: | git checkout -b "${{ needs.pr.outputs.PR_REF }}" FETCH_HEAD - name: Download lockfiles and lockfile diff files @@ -500,7 +521,7 @@ jobs: pants run scripts/lockfiles_to_reqs.py git add *requirements.txt st2*/requirements.txt contrib/runners/*/requirements.txt - - name: Commit and push + - name: Commit and ${{ inputs.pr == 'new' && 'force-' || '' }}push if: env.CHANGED == 'true' # git push --force is for workflow re-runs, but only for new PRs. run: | @@ -509,7 +530,7 @@ jobs: -u origin "${{ needs.pr.outputs.PR_REF }}" - name: Create new PR - if: inputs.pr == 'new' && env.CHANGED == 'true' + if: inputs.pr == 'new' && fromJSON(needs.pr.outputs.JSON).number == 'new' && env.CHANGED == 'true' env: GH_TOKEN: ${{ github.token }} run: > @@ -527,6 +548,17 @@ jobs: # so, setting reviewer to a team would mean managing a PAT or similar. # --reviewer "${{ github.repository_owner }}/Maintainers" + - name: Update new PR (workflow re-run ${{ github.run_attempt }}) + if: inputs.pr == 'new' && fromJSON(needs.pr.outputs.JSON).number != 'new' && env.CHANGED == 'true' + env: + GH_TOKEN: ${{ github.token }} + run: > + ( + echo '## New PR Update'; echo; + gh pr edit "${{ fromJSON(needs.pr.outputs.JSON).number }}" + --body-file "${PR_COMMENT}" + ) | tee -a "${GITHUB_STEP_SUMMARY}" + - name: Update or Add Comment on existing PR if: inputs.pr != 'new' env: From b59eed610142f7246ba03e193033c0e713d49692 Mon Sep 17 00:00:00 2001 From: Jacob Floyd Date: Tue, 30 Sep 2025 13:11:23 -0500 Subject: [PATCH 081/102] gha: fix gh calls w/o checkout --- .github/workflows/lockfiles.yaml | 10 ++++++---- 1 file changed, 6 insertions(+), 4 deletions(-) diff --git a/.github/workflows/lockfiles.yaml b/.github/workflows/lockfiles.yaml index 99514ec1f1..4ac135d894 100644 --- a/.github/workflows/lockfiles.yaml +++ b/.github/workflows/lockfiles.yaml @@ -129,8 +129,10 @@ jobs: baseRefName" NEW_PR_REF: "regen-lockfiles-${{ github.run_id }}" run: | - NEW_PR_NUMBER="$(gh pr list --json number --head "${NEW_PR_REF}" --state open)" - pr_list_rc = $? + if [[ "${{ inputs.pr }}" == new ]]; then + NEW_PR_NUMBER="$(gh pr list --json number --repo "${GITHUB_REPOSITORY}" --head "${NEW_PR_REF}" --state open)" + pr_list_rc = $? + fi if [[ "${{ inputs.pr }}" == new ]] && [ ${pr_list_rc} -gt 0 ]; then CHECKOUT_REF="${GITHUB_REF}" echo "Planning new Pull Request metadata ..." @@ -160,7 +162,7 @@ jobs: elif [[ "${{ inputs.pr }}" == new ]] && [ ${pr_list_rc} -eq 0 ]; then CHECKOUT_REF="${GITHUB_REF}" # will be force pushed echo "Planning new Pull Request metadata (PR #${NEW_PR_NUMBER} - workflow re-run ${GITHUB_RUN_ATTEMPT})..." - PR=$(gh pr view "${NEW_PR_NUMBER}" --json "${PR_FIELDS}") + PR=$(gh pr view "${NEW_PR_NUMBER}" --repo "${GITHUB_REPOSITORY}" --json "${PR_FIELDS}") pr_search_rc=$? if [ ${pr_search_rc} -gt 0 ]; then echo "Error looking up PR from prior workflow run." @@ -172,7 +174,7 @@ jobs: else CHECKOUT_REF="refs/pull/${{ inputs.pr }}/merge" echo "Searching for Pull Request #${{ inputs.pr }} ..." - PR=$(gh pr view "${{ inputs.pr }}" --json "${PR_FIELDS}") + PR=$(gh pr view "${{ inputs.pr }}" --repo "${GITHUB_REPOSITORY}" --json "${PR_FIELDS}") pr_search_rc=$? if [ ${pr_search_rc} -gt 0 ]; then echo "Pull Request #${{ inputs.pr }} not found!" From f3546baabda6f32100fb45616f3f1e075184a207 Mon Sep 17 00:00:00 2001 From: Jacob Floyd Date: Tue, 30 Sep 2025 13:27:13 -0500 Subject: [PATCH 082/102] gha: fix maintainerCanModify check --- .github/workflows/lockfiles.yaml | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/.github/workflows/lockfiles.yaml b/.github/workflows/lockfiles.yaml index 4ac135d894..ad25e2134a 100644 --- a/.github/workflows/lockfiles.yaml +++ b/.github/workflows/lockfiles.yaml @@ -184,7 +184,8 @@ jobs: echo "Pull Request #${{ inputs.pr }} is closed!" echo "JSON=${PR}" exit 3 - elif ! (jq -e .maintainerCanModify <<< "${PR}" >/dev/null); then + elif [[ "$(jq -r .headRepositoryOwner.login <<< "${PR}")" != "${GITHUB_REPOSITORY_OWNER}" ]] \ + && ! jq -e .maintainerCanModify <<< "${PR}" >/dev/null; then echo "Pull Request #${{ inputs.pr }} does not allow maintainer modification!" echo "JSON=${PR}" exit 4 From e504edce3c6d8e0c08a43a32221e827da54bba4f Mon Sep 17 00:00:00 2001 From: Jacob Floyd Date: Tue, 30 Sep 2025 13:34:21 -0500 Subject: [PATCH 083/102] gha: git diff using remote name because the base branch has not been checked out, it is not available in its short form --- .github/workflows/lockfiles.yaml | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/.github/workflows/lockfiles.yaml b/.github/workflows/lockfiles.yaml index ad25e2134a..d7c8c74e8b 100644 --- a/.github/workflows/lockfiles.yaml +++ b/.github/workflows/lockfiles.yaml @@ -244,11 +244,12 @@ jobs: PANTS_LEVEL: warn # skip info-level log messages run: | mkdir -p "${OUTPUT_DIR}/" - git checkout "${{ env.PR_BASE_REF }}" -- "${LOCKFILE}" # diff is for whole PR not just a commit. + git remote -v + git checkout "origin/${{ env.PR_BASE_REF }}" -- "${LOCKFILE}" # diff is for whole PR not just a commit. pants generate-lockfiles '--resolve=${{ matrix.resolve }}' 2> >(tee "${STDERR_LOG}" >&2 ) cp "${LOCKFILE}" "${OUTPUT_DIR}/" CHANGED=$( - if git diff "${{ env.PR_BASE_REF }}" --exit-code --quiet -- "${LOCKFILE}"; then + if git diff "origin/${{ env.PR_BASE_REF }}" --exit-code --quiet -- "${LOCKFILE}"; then echo "false" else echo "true" From 269a625bbfa13014e4678a0e07a1730b273cb900 Mon Sep 17 00:00:00 2001 From: Jacob Floyd Date: Tue, 30 Sep 2025 13:41:06 -0500 Subject: [PATCH 084/102] gha: fetch PR_BASE_REF --- .github/workflows/lockfiles.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/lockfiles.yaml b/.github/workflows/lockfiles.yaml index d7c8c74e8b..1d39b9378d 100644 --- a/.github/workflows/lockfiles.yaml +++ b/.github/workflows/lockfiles.yaml @@ -244,7 +244,7 @@ jobs: PANTS_LEVEL: warn # skip info-level log messages run: | mkdir -p "${OUTPUT_DIR}/" - git remote -v + git fetch --depth=1 origin "${{ env.PR_BASE_REF }}" git checkout "origin/${{ env.PR_BASE_REF }}" -- "${LOCKFILE}" # diff is for whole PR not just a commit. pants generate-lockfiles '--resolve=${{ matrix.resolve }}' 2> >(tee "${STDERR_LOG}" >&2 ) cp "${LOCKFILE}" "${OUTPUT_DIR}/" From 36156d76d69bdd16d1235a1e2a7a372b28e6a864 Mon Sep 17 00:00:00 2001 From: Jacob Floyd Date: Tue, 30 Sep 2025 13:53:33 -0500 Subject: [PATCH 085/102] setup pants before updating legacy requirements files --- .github/workflows/lockfiles.yaml | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/.github/workflows/lockfiles.yaml b/.github/workflows/lockfiles.yaml index 1d39b9378d..d452c3d974 100644 --- a/.github/workflows/lockfiles.yaml +++ b/.github/workflows/lockfiles.yaml @@ -519,6 +519,13 @@ jobs: done echo ":robot: [GitHub Actions Workflow Run](${RUN_LINK})" >> "${PR_COMMENT}" + - name: Initialize Pants and its GHA caches + if: env.CHANGED == 'true' + uses: ./.github/actions/init-pants + with: + # To ignore a bad cache, bump the cache* integer. + gha-cache-key: cache0-BUILD + - name: LEGACY - copy lockfile updates into legacy requirements files if: env.CHANGED == 'true' run: | From 5146365ed18fc61167466add1f33089971371b1b Mon Sep 17 00:00:00 2001 From: Jacob Floyd Date: Tue, 30 Sep 2025 14:06:26 -0500 Subject: [PATCH 086/102] gha: print pants generate-lockfiles command when running it --- .github/workflows/lockfiles.yaml | 2 ++ 1 file changed, 2 insertions(+) diff --git a/.github/workflows/lockfiles.yaml b/.github/workflows/lockfiles.yaml index d452c3d974..651b15ae7c 100644 --- a/.github/workflows/lockfiles.yaml +++ b/.github/workflows/lockfiles.yaml @@ -246,7 +246,9 @@ jobs: mkdir -p "${OUTPUT_DIR}/" git fetch --depth=1 origin "${{ env.PR_BASE_REF }}" git checkout "origin/${{ env.PR_BASE_REF }}" -- "${LOCKFILE}" # diff is for whole PR not just a commit. + set -x pants generate-lockfiles '--resolve=${{ matrix.resolve }}' 2> >(tee "${STDERR_LOG}" >&2 ) + { set +x; } 2>/dev/null cp "${LOCKFILE}" "${OUTPUT_DIR}/" CHANGED=$( if git diff "origin/${{ env.PR_BASE_REF }}" --exit-code --quiet -- "${LOCKFILE}"; then From be37e388e28a72885a3a8e0d2a1bef889073fe31 Mon Sep 17 00:00:00 2001 From: Jacob Floyd Date: Tue, 30 Sep 2025 15:01:09 -0500 Subject: [PATCH 087/102] gha: handle pushing to existing PR from detached HEAD --- .github/workflows/lockfiles.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/lockfiles.yaml b/.github/workflows/lockfiles.yaml index 651b15ae7c..b6a590845b 100644 --- a/.github/workflows/lockfiles.yaml +++ b/.github/workflows/lockfiles.yaml @@ -540,7 +540,7 @@ jobs: run: | git commit -F "${COMMIT_MSG}" git push${{ inputs.pr == 'new' && ' --force' || '' }} \ - -u origin "${{ needs.pr.outputs.PR_REF }}" + origin "HEAD:${{ needs.pr.outputs.PR_REF }}" - name: Create new PR if: inputs.pr == 'new' && fromJSON(needs.pr.outputs.JSON).number == 'new' && env.CHANGED == 'true' From 96c0fa67c03bef861a938e0bda1833004fa60af8 Mon Sep 17 00:00:00 2001 From: Jacob Floyd Date: Tue, 30 Sep 2025 15:06:52 -0500 Subject: [PATCH 088/102] gha: create local git branch instead of pushing from detached HEAD --- .github/workflows/lockfiles.yaml | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/.github/workflows/lockfiles.yaml b/.github/workflows/lockfiles.yaml index b6a590845b..1101fc42f9 100644 --- a/.github/workflows/lockfiles.yaml +++ b/.github/workflows/lockfiles.yaml @@ -447,8 +447,7 @@ jobs: git config --local committer.name "${{ env.BOT_GHA_NAME }}" git config --local committer.email "${{ env.BOT_GHA_EMAIL }}" - - name: Create branch for new PR - if: inputs.pr == 'new' + - name: Create local branch for PR run: | git checkout -b "${{ needs.pr.outputs.PR_REF }}" FETCH_HEAD From f0e53a4893cc5205c695f763044848496325f75d Mon Sep 17 00:00:00 2001 From: Jacob Floyd Date: Tue, 30 Sep 2025 15:38:18 -0500 Subject: [PATCH 089/102] gha: fix pushing to existing PRs --- .github/workflows/lockfiles.yaml | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/.github/workflows/lockfiles.yaml b/.github/workflows/lockfiles.yaml index 1101fc42f9..f5b652e422 100644 --- a/.github/workflows/lockfiles.yaml +++ b/.github/workflows/lockfiles.yaml @@ -122,6 +122,7 @@ jobs: url,\ closed,\ author,\ + isCrossRepository,\ maintainerCanModify,\ headRepositoryOwner,\ headRepository,\ @@ -148,6 +149,7 @@ jobs: is_bot: true login: "${{ env.BOT_ST2_LOGIN }}" name: "${{ env.BOT_ST2_NAME }}" + isCrossRepository: false maintainerCanModify: true headRepositoryOwner: id: "${GITHUB_REPOSITORY_OWNER_ID}" @@ -172,7 +174,7 @@ jobs: echo "Found Pull Request #${NEW_PR_NUMBER} by @$(jq -r .author.login <<< "${PR}")" echo "URL: $(jq -r .url <<< "${PR}")" else - CHECKOUT_REF="refs/pull/${{ inputs.pr }}/merge" + CHECKOUT_REF="refs/pull/${{ inputs.pr }}/head" # use /head not /merge! echo "Searching for Pull Request #${{ inputs.pr }} ..." PR=$(gh pr view "${{ inputs.pr }}" --repo "${GITHUB_REPOSITORY}" --json "${PR_FIELDS}") pr_search_rc=$? @@ -539,7 +541,8 @@ jobs: run: | git commit -F "${COMMIT_MSG}" git push${{ inputs.pr == 'new' && ' --force' || '' }} \ - origin "HEAD:${{ needs.pr.outputs.PR_REF }}" + "${GITHUB_SERVER_URL}/${{ needs.pr.outputs.PR_REPO }}" \ + "HEAD:${{ needs.pr.outputs.PR_REF }}" - name: Create new PR if: inputs.pr == 'new' && fromJSON(needs.pr.outputs.JSON).number == 'new' && env.CHANGED == 'true' From 9ed0c1283f923bef096b8b9e833963fb6cd7d612 Mon Sep 17 00:00:00 2001 From: Jacob Floyd Date: Tue, 30 Sep 2025 17:05:19 -0500 Subject: [PATCH 090/102] gha: fix NEW_PR_NUMBER lookup --- .github/workflows/lockfiles.yaml | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/.github/workflows/lockfiles.yaml b/.github/workflows/lockfiles.yaml index f5b652e422..6210464756 100644 --- a/.github/workflows/lockfiles.yaml +++ b/.github/workflows/lockfiles.yaml @@ -131,10 +131,10 @@ jobs: NEW_PR_REF: "regen-lockfiles-${{ github.run_id }}" run: | if [[ "${{ inputs.pr }}" == new ]]; then - NEW_PR_NUMBER="$(gh pr list --json number --repo "${GITHUB_REPOSITORY}" --head "${NEW_PR_REF}" --state open)" - pr_list_rc = $? + NEW_PR_NUMBER="$(gh pr list --repo "${GITHUB_REPOSITORY}" --head "${NEW_PR_REF}" --state open \ + --json number --jq 'if .|length == 0 then "" else .[].number end')" fi - if [[ "${{ inputs.pr }}" == new ]] && [ ${pr_list_rc} -gt 0 ]; then + if [[ "${{ inputs.pr }}" == new ]] && [[ "${NEW_PR_NUMBER}" == "" ]]; then CHECKOUT_REF="${GITHUB_REF}" echo "Planning new Pull Request metadata ..." PR=$( @@ -161,7 +161,7 @@ jobs: baseRefName: "${GITHUB_REF_NAME}" HEREYAML ) - elif [[ "${{ inputs.pr }}" == new ]] && [ ${pr_list_rc} -eq 0 ]; then + elif [[ "${{ inputs.pr }}" == new ]] && [[ "${NEW_PR_NUMBER}" != "" ]]; then CHECKOUT_REF="${GITHUB_REF}" # will be force pushed echo "Planning new Pull Request metadata (PR #${NEW_PR_NUMBER} - workflow re-run ${GITHUB_RUN_ATTEMPT})..." PR=$(gh pr view "${NEW_PR_NUMBER}" --repo "${GITHUB_REPOSITORY}" --json "${PR_FIELDS}") From 6bbaaa3c518c22b3d9c14a648170067a93c8b3ac Mon Sep 17 00:00:00 2001 From: Jacob Floyd Date: Tue, 30 Sep 2025 19:38:23 -0500 Subject: [PATCH 091/102] gha: rework git remote handling for checkout, commit, push It needs to handle new PR and exiting PR. The existing PR can be from a branch in a fork or in this repo. --- .github/workflows/lockfiles.yaml | 15 +++++++++++---- 1 file changed, 11 insertions(+), 4 deletions(-) diff --git a/.github/workflows/lockfiles.yaml b/.github/workflows/lockfiles.yaml index 6210464756..97ce5ea432 100644 --- a/.github/workflows/lockfiles.yaml +++ b/.github/workflows/lockfiles.yaml @@ -450,8 +450,17 @@ jobs: git config --local committer.email "${{ env.BOT_GHA_EMAIL }}" - name: Create local branch for PR + env: + PR_IS_CROSS_REPOSITORY: ${{ fromJSON(needs.pr.outputs.JSON).isCrossRepository }} + PR_REMOTE: "origin" + PR_REF: ${{ needs.pr.outputs.PR_REF }} run: | - git checkout -b "${{ needs.pr.outputs.PR_REF }}" FETCH_HEAD + if [[ "${PR_IS_CROSS_REPOSITORY}" == "true" ]]; then + PR_REMOTE="fork" + git remote add "${PR_REMOTE}" "${GITHUB_SERVER_URL}/${{ needs.pr.outputs.PR_REPO }}" + fi + echo "PR_REMOTE=${PR_REMOTE}" >> "${GITHUB_ENV}" + git checkout -b "${PR_REF}" --track "${PR_REMOTE}/${PR_REF}" - name: Download lockfiles and lockfile diff files uses: actions/download-artifact@v5 @@ -540,9 +549,7 @@ jobs: # git push --force is for workflow re-runs, but only for new PRs. run: | git commit -F "${COMMIT_MSG}" - git push${{ inputs.pr == 'new' && ' --force' || '' }} \ - "${GITHUB_SERVER_URL}/${{ needs.pr.outputs.PR_REPO }}" \ - "HEAD:${{ needs.pr.outputs.PR_REF }}" + git push${{ inputs.pr == 'new' && ' --force' || '' }} "${PR_REMOTE}" "${{ needs.pr.outputs.PR_REF }}" - name: Create new PR if: inputs.pr == 'new' && fromJSON(needs.pr.outputs.JSON).number == 'new' && env.CHANGED == 'true' From 751bc3b4e33f2958c26df891edf2c8ab4152249f Mon Sep 17 00:00:00 2001 From: Jacob Floyd Date: Tue, 30 Sep 2025 19:47:41 -0500 Subject: [PATCH 092/102] gha: rework git remote handling for checkout, commit, push It needs to handle new PR and exiting PR. The existing PR can be from a branch in a fork or in this repo. --- .github/workflows/lockfiles.yaml | 19 +++++++++---------- 1 file changed, 9 insertions(+), 10 deletions(-) diff --git a/.github/workflows/lockfiles.yaml b/.github/workflows/lockfiles.yaml index 97ce5ea432..29b64b0429 100644 --- a/.github/workflows/lockfiles.yaml +++ b/.github/workflows/lockfiles.yaml @@ -442,25 +442,24 @@ jobs: submodules: 'true' ref: ${{ needs.pr.outputs.CHECKOUT_REF }} - - name: Configure git author, committer + - name: Configure git author, committer, remote + env: + PR_IS_CROSS_REPOSITORY: ${{ fromJSON(needs.pr.outputs.JSON).isCrossRepository }} + PR_REMOTE: "origin" run: | git config --local author.name "${{ env.BOT_ST2_NAME }}" git config --local author.email "${{ env.BOT_ST2_EMAIL }}" git config --local committer.name "${{ env.BOT_GHA_NAME }}" git config --local committer.email "${{ env.BOT_GHA_EMAIL }}" - - - name: Create local branch for PR - env: - PR_IS_CROSS_REPOSITORY: ${{ fromJSON(needs.pr.outputs.JSON).isCrossRepository }} - PR_REMOTE: "origin" - PR_REF: ${{ needs.pr.outputs.PR_REF }} - run: | if [[ "${PR_IS_CROSS_REPOSITORY}" == "true" ]]; then PR_REMOTE="fork" git remote add "${PR_REMOTE}" "${GITHUB_SERVER_URL}/${{ needs.pr.outputs.PR_REPO }}" fi echo "PR_REMOTE=${PR_REMOTE}" >> "${GITHUB_ENV}" - git checkout -b "${PR_REF}" --track "${PR_REMOTE}/${PR_REF}" + + - name: Create local branch for PR + run: | + git checkout -b "${{ needs.pr.outputs.PR_REF }}" - name: Download lockfiles and lockfile diff files uses: actions/download-artifact@v5 @@ -549,7 +548,7 @@ jobs: # git push --force is for workflow re-runs, but only for new PRs. run: | git commit -F "${COMMIT_MSG}" - git push${{ inputs.pr == 'new' && ' --force' || '' }} "${PR_REMOTE}" "${{ needs.pr.outputs.PR_REF }}" + git push${{ inputs.pr == 'new' && ' --force' || '' }} -u "${PR_REMOTE}" "${{ needs.pr.outputs.PR_REF }}" - name: Create new PR if: inputs.pr == 'new' && fromJSON(needs.pr.outputs.JSON).number == 'new' && env.CHANGED == 'true' From e5ad76f7b29b67d26cc3fd075dd9ef58a11f395b Mon Sep 17 00:00:00 2001 From: Jacob Floyd Date: Sat, 27 Sep 2025 14:11:53 -0500 Subject: [PATCH 093/102] update changelog entry --- CHANGELOG.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/CHANGELOG.rst b/CHANGELOG.rst index bfc3a1f2b3..5131e17027 100644 --- a/CHANGELOG.rst +++ b/CHANGELOG.rst @@ -83,7 +83,7 @@ Added #6118 #6141 #6133 #6120 #6181 #6183 #6200 #6237 #6229 #6240 #6241 #6244 #6251 #6253 #6254 #6258 #6259 #6260 #6269 #6275 #6279 #6278 #6282 #6283 #6273 #6287 #6306 #6307 #6311 #6314 #6315 #6317 #6319 #6312 #6320 #6321 #6323 #6324 #6325 #6326 #6327 #6328 - #6329 #6330 #6337 + #6329 #6330 #6337 #6356 Contributed by @cognifloyd * Build of ST2 EL9 packages #6153 Contributed by @amanda11 From 9327862f7506adf8aeccfadafe9421e3880a4f2a Mon Sep 17 00:00:00 2001 From: Jacob Floyd Date: Mon, 29 Sep 2025 19:55:32 -0500 Subject: [PATCH 094/102] examples.forloop_parse_github_repos: better handle bs4 output bs4 improved their typing allowing pylint to catch a possible coding error in this example. Ultimately it would not have been an issue because of the broad try/except block, but this makes pylint happy. The result set can be None, so handle that case explicitly. --- .../actions/pythonactions/forloop_parse_github_repos.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/contrib/examples/actions/pythonactions/forloop_parse_github_repos.py b/contrib/examples/actions/pythonactions/forloop_parse_github_repos.py index 3ee1a85e57..447cd21146 100644 --- a/contrib/examples/actions/pythonactions/forloop_parse_github_repos.py +++ b/contrib/examples/actions/pythonactions/forloop_parse_github_repos.py @@ -27,7 +27,7 @@ def run(self, content): repo_list = soup.find_all("h3") output = {} - for each_item in repo_list: + for each_item in repo_list or (): repo_half_url = each_item.find("a")["href"] repo_name = repo_half_url.split("/")[-1] repo_url = "https://github.com" + repo_half_url From 6567d3aa3b39ca623b041fe1974a0be427c572bd Mon Sep 17 00:00:00 2001 From: "st2stanley[bot]" <7807286+st2stanley@users.noreply.github.com> Date: Wed, 1 Oct 2025 00:53:39 +0000 Subject: [PATCH 095/102] pants generate-lockfiles: st2, bandit, black, flake8, pants-plugins, pylint, twine MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit __________________________________________________________________ Lockfile diff: lockfiles/st2.lock [st2] __________________________________________________________________ == Upgraded dependencies == ‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾ bcrypt 4.3.0 --> 5.0.0 beautifulsoup4 4.13.5 --> 4.14.2 httplib2 0.30.0 --> 0.31.0 psutil 7.0.0 --> 7.1.0 pycparser 2.22 --> 2.23 pynacl 1.5.0 --> 1.6.0 pyyaml 6.0.2 --> 6.0.3 simplejson 3.20.1 --> 3.20.2 st2-auth-backend-flat-file 0.3.0 --> 0.4.1 wcwidth 0.2.13 --> 0.2.14 __________________________________________________________________ == Removed dependencies == ‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾ passlib 1.7.4 __________________________________________________________________ Lockfile diff: lockfiles/bandit.lock [bandit] __________________________________________________________________ == Upgraded dependencies == ‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾ pyyaml 6.0.2 --> 6.0.3 __________________________________________________________________ Lockfile diff: lockfiles/pants-plugins.lock [pants-plugins] __________________________________________________________________ == Upgraded dependencies == ‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾ pyparsing 3.2.3 --> 3.2.5 pyyaml 6.0.2 --> 6.0.3 __________________________________________________________________ Lockfile diff: lockfiles/twine.lock [twine] __________________________________________________________________ == Upgraded dependencies == ‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾ pycparser 2.22 --> 2.23 No changes to lockfiles/black.lock No changes to lockfiles/flake8.lock No changes to lockfiles/pylint.lock --- fixed-requirements.txt | 10 +- lockfiles/bandit.lock | 110 +++--- lockfiles/pants-plugins.lock | 44 +-- lockfiles/st2.lock | 627 +++++++++++++++++------------------ lockfiles/twine.lock | 10 +- requirements.txt | 10 +- st2actions/requirements.txt | 2 +- st2auth/requirements.txt | 2 +- st2client/requirements.txt | 6 +- st2common/requirements.txt | 2 +- st2tests/requirements.txt | 2 +- test-requirements.txt | 6 +- 12 files changed, 409 insertions(+), 422 deletions(-) diff --git a/fixed-requirements.txt b/fixed-requirements.txt index 411a0508a1..873e5d4e4a 100644 --- a/fixed-requirements.txt +++ b/fixed-requirements.txt @@ -48,7 +48,7 @@ pygments==2.19.2 python-keyczar==0.716 pytz==2025.2 pywinrm==0.5.0 -pyyaml==6.0.2 +pyyaml==6.0.3 redis==6.1.1 requests==2.32.4 # urllib3 is a transitive dep @@ -59,8 +59,8 @@ semver==3.0.4 six==1.17.0 argparse==1.4.0 argcomplete==3.6.2 -prettytable==3.10.2 -importlib-metadata==7.1.0 +prettytable==3.11.0 +importlib-metadata==8.5.0 # 20250909 virtualenv 20.34.0 requires typing-extension>=4.13.2 typing-extensions>=4.13.2; python_version < "3.11" # NOTE: sseclient has various issues which sometimes hang the connection for a long time, etc. @@ -79,11 +79,11 @@ webob==1.8.9 webtest==3.0.1 zake==0.2.2 # test requirements below -bcrypt==4.3.0 +bcrypt==5.0.0 jinja2==3.1.6 mock==5.2.0 pytest==7.0.1 -psutil==7.0.0 +psutil==7.1.0 python-dateutil==2.9.0.post0 python-statsd==2.1.0 orjson==3.10.15 diff --git a/lockfiles/bandit.lock b/lockfiles/bandit.lock index a7bccf9852..000e93b574 100644 --- a/lockfiles/bandit.lock +++ b/lockfiles/bandit.lock @@ -228,144 +228,144 @@ "artifacts": [ { "algorithm": "sha256", - "hash": "a9f8c2e67970f13b16084e04f134610fd1d374bf477b17ec1599185cf611d725", - "url": "https://files.pythonhosted.org/packages/82/72/04fcad41ca56491995076630c3ec1e834be241664c0c09a64c9a2589b507/PyYAML-6.0.2-cp39-cp39-musllinux_1_1_x86_64.whl" + "hash": "27c0abcb4a5dac13684a37f76e701e054692a9b2d3064b70f5e4eb54810553d7", + "url": "https://files.pythonhosted.org/packages/dd/62/71c27c94f457cf4418ef8ccc71735324c549f7e3ea9d34aba50874563561/pyyaml-6.0.3-cp39-cp39-musllinux_1_2_x86_64.whl" }, { "algorithm": "sha256", - "hash": "a8786accb172bd8afb8be14490a16625cbc387036876ab6ba70912730faf8e1f", - "url": "https://files.pythonhosted.org/packages/0a/02/6ec546cd45143fdf9840b2c6be8d875116a64076218b61d68e12548e5839/PyYAML-6.0.2-cp39-cp39-macosx_11_0_arm64.whl" + "hash": "9c57bb8c96f6d1808c030b1687b9b5fb476abaa47f0db9c0101f5e9f394e97f4", + "url": "https://files.pythonhosted.org/packages/02/72/d972384252432d57f248767556ac083793292a4adf4e2d85dfe785ec2659/PyYAML-6.0.3-cp38-cp38-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl" }, { "algorithm": "sha256", - "hash": "d8e03406cac8513435335dbab54c0d385e4a49e4945d2909a581c83647ca0290", - "url": "https://files.pythonhosted.org/packages/0e/9a/8cc68be846c972bda34f6c2a93abb644fb2476f4dcc924d52175786932c9/PyYAML-6.0.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl" + "hash": "02ea2dfa234451bbb8772601d7b8e426c2bfa197136796224e50e35a78777956", + "url": "https://files.pythonhosted.org/packages/05/14/52d505b5c59ce73244f59c7a50ecf47093ce4765f116cdb98286a71eeca2/pyyaml-6.0.3-cp310-cp310-macosx_11_0_arm64.whl" }, { "algorithm": "sha256", - "hash": "d84a1718ee396f54f3a086ea0a66d8e552b2ab2017ef8b420e92edbc841c352d", - "url": "https://files.pythonhosted.org/packages/20/52/551c69ca1501d21c0de51ddafa8c23a0191ef296ff098e98358f69080577/PyYAML-6.0.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl" + "hash": "d76623373421df22fb4cf8817020cbb7ef15c725b9d5e45f17e189bfc384190f", + "url": "https://files.pythonhosted.org/packages/05/8e/961c0007c59b8dd7729d542c61a4d537767a59645b82a0b521206e1e25c2/pyyaml-6.0.3.tar.gz" }, { "algorithm": "sha256", - "hash": "3b1fdb9dc17f5a7677423d508ab4f243a726dea51fa5e70992e59a7411c89d19", - "url": "https://files.pythonhosted.org/packages/3d/32/e7bd8535d22ea2874cef6a81021ba019474ace0d13a4819c2a4bce79bd6a/PyYAML-6.0.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl" + "hash": "10892704fc220243f5305762e276552a0395f7beb4dbf9b14ec8fd43b57f126c", + "url": "https://files.pythonhosted.org/packages/0c/62/d2eb46264d4b157dae1275b573017abec435397aa59cbcdab6fc978a8af4/pyyaml-6.0.3-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl" }, { "algorithm": "sha256", - "hash": "797b4f722ffa07cc8d62053e4cff1486fa6dc094105d13fea7b1de7d8bf71c9e", - "url": "https://files.pythonhosted.org/packages/45/73/0f49dacd6e82c9430e46f4a027baa4ca205e8b0a9dce1397f44edc23559d/PyYAML-6.0.2-cp311-cp311-musllinux_1_1_x86_64.whl" + "hash": "c2514fceb77bc5e7a2f7adfaa1feb2fb311607c9cb518dbc378688ec73d8292f", + "url": "https://files.pythonhosted.org/packages/0d/a2/09f67a3589cb4320fb5ce90d3fd4c9752636b8b6ad8f34b54d76c5a54693/PyYAML-6.0.3-cp38-cp38-macosx_10_13_x86_64.whl" }, { "algorithm": "sha256", - "hash": "8824b5a04a04a047e72eea5cec3bc266db09e35de6bdfe34c9436ac5ee27d237", - "url": "https://files.pythonhosted.org/packages/49/ee/14c54df452143b9ee9f0f29074d7ca5516a36edb0b4cc40c3f280131656f/PyYAML-6.0.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl" + "hash": "850774a7879607d3a6f50d36d04f00ee69e7fc816450e5f7e58d7f17f1ae5c00", + "url": "https://files.pythonhosted.org/packages/10/cb/16c3f2cf3266edd25aaa00d6c4350381c8b012ed6f5276675b9eba8d9ff4/pyyaml-6.0.3-cp311-cp311-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl" }, { "algorithm": "sha256", - "hash": "7c36280e6fb8385e520936c3cb3b8042851904eba0e58d277dca80a5cfed590b", - "url": "https://files.pythonhosted.org/packages/4d/61/de363a97476e766574650d742205be468921a7b532aa2499fcd886b62530/PyYAML-6.0.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl" + "hash": "652cb6edd41e718550aad172851962662ff2681490a8a711af6a4d288dd96824", + "url": "https://files.pythonhosted.org/packages/16/19/13de8e4377ed53079ee996e1ab0a9c33ec2faf808a4647b7b4c0d46dd239/pyyaml-6.0.3-cp311-cp311-macosx_11_0_arm64.whl" }, { "algorithm": "sha256", - "hash": "5ac9328ec4831237bec75defaf839f7d4564be1e6b25ac710bd1a96321cc8317", - "url": "https://files.pythonhosted.org/packages/51/16/6af8d6a6b210c8e54f1406a6b9481febf9c64a3109c541567e35a49aa2e7/PyYAML-6.0.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl" + "hash": "5e0b74767e5f8c593e8c9b5912019159ed0533c70051e9cce3e8b6aa699fcd69", + "url": "https://files.pythonhosted.org/packages/1f/15/2bc9c8faf6450a8b3c9fc5448ed869c599c0a74ba2669772b1f3a0040180/pyyaml-6.0.3-cp310-cp310-musllinux_1_2_x86_64.whl" }, { "algorithm": "sha256", - "hash": "d584d9ec91ad65861cc08d42e834324ef890a082e591037abe114850ff7bbc3e", - "url": "https://files.pythonhosted.org/packages/54/ed/79a089b6be93607fa5cdaedf301d7dfb23af5f25c398d5ead2525b063e17/pyyaml-6.0.2.tar.gz" + "hash": "22ba7cfcad58ef3ecddc7ed1db3409af68d023b7f940da23c6c2a1890976eda6", + "url": "https://files.pythonhosted.org/packages/25/a2/b725b61ac76a75583ae7104b3209f75ea44b13cfd026aa535ece22b7f22e/PyYAML-6.0.3-cp38-cp38-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl" }, { "algorithm": "sha256", - "hash": "23502f431948090f597378482b4812b0caae32c22213aecf3b55325e049a6c68", - "url": "https://files.pythonhosted.org/packages/5c/20/8347dcabd41ef3a3cdc4f7b7a2aff3d06598c8779faa189cdbf878b626a4/PyYAML-6.0.2-cp310-cp310-musllinux_1_1_x86_64.whl" + "hash": "66291b10affd76d76f54fad28e22e51719ef9ba22b29e1d7d03d6777a9174198", + "url": "https://files.pythonhosted.org/packages/2f/3a/61b9db1d28f00f8fd0ae760459a5c4bf1b941baf714e207b6eb0657d2578/pyyaml-6.0.3-cp310-cp310-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl" }, { "algorithm": "sha256", - "hash": "688ba32a1cffef67fd2e9398a2efebaea461578b0923624778664cc1c914db5d", - "url": "https://files.pythonhosted.org/packages/65/d8/b7a1db13636d7fb7d4ff431593c510c8b8fca920ade06ca8ef20015493c5/PyYAML-6.0.2-cp39-cp39-macosx_10_9_x86_64.whl" + "hash": "b30236e45cf30d2b8e7b3e85881719e98507abed1011bf463a8fa23e9c3e98a8", + "url": "https://files.pythonhosted.org/packages/43/f7/0e6a5ae5599c838c696adb4e6330a59f463265bfa1e116cfd1fbb0abaaae/pyyaml-6.0.3-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl" }, { "algorithm": "sha256", - "hash": "ec031d5d2feb36d1d1a24380e4db6d43695f3748343d99434e6f5f9156aaa2ed", - "url": "https://files.pythonhosted.org/packages/6b/4e/1523cb902fd98355e2e9ea5e5eb237cbc5f3ad5f3075fa65087aa0ecb669/PyYAML-6.0.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl" + "hash": "44edc647873928551a01e7a563d7452ccdebee747728c1080d881d68af7b997e", + "url": "https://files.pythonhosted.org/packages/6d/16/a95b6757765b7b031c9374925bb718d55e0a9ba8a1b6a12d25962ea44347/pyyaml-6.0.3-cp311-cp311-macosx_10_13_x86_64.whl" }, { "algorithm": "sha256", - "hash": "d7fded462629cfa4b685c5416b949ebad6cec74af5e2d42905d41e257e0869f5", - "url": "https://files.pythonhosted.org/packages/74/cc/20c34d00f04d785f2028737e2e2a8254e1425102e730fee1d6396f832577/PyYAML-6.0.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl" + "hash": "6344df0d5755a2c9a276d4473ae6b90647e216ab4757f8426893b5dd2ac3f369", + "url": "https://files.pythonhosted.org/packages/6f/b0/b2227677b2d1036d84f5ee95eb948e7af53d59fe3e4328784e4d290607e0/PyYAML-6.0.3-cp38-cp38-musllinux_1_2_x86_64.whl" }, { "algorithm": "sha256", - "hash": "24471b829b3bf607e04e88d79542a9d48bb037c2267d7927a874e6c205ca7e9a", - "url": "https://files.pythonhosted.org/packages/74/d9/323a59d506f12f498c2097488d80d16f4cf965cee1791eab58b56b19f47a/PyYAML-6.0.2-cp38-cp38-macosx_10_9_x86_64.whl" + "hash": "b8bb0864c5a28024fac8a632c443c87c5aa6f215c0b126c449ae1a150412f31d", + "url": "https://files.pythonhosted.org/packages/71/60/917329f640924b18ff085ab889a11c763e0b573da888e8404ff486657602/pyyaml-6.0.3-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl" }, { "algorithm": "sha256", - "hash": "3ad2a3decf9aaba3d29c8f537ac4b243e36bef957511b4766cb0057d32b0be85", - "url": "https://files.pythonhosted.org/packages/75/e4/2c27590dfc9992f73aabbeb9241ae20220bd9452df27483b6e56d3975cc5/PyYAML-6.0.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl" + "hash": "0150219816b6a1fa26fb4699fb7daa9caf09eb1999f3b70fb6e786805e80375a", + "url": "https://files.pythonhosted.org/packages/73/b9/793686b2d54b531203c160ef12bec60228a0109c79bae6c1277961026770/pyyaml-6.0.3-cp39-cp39-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl" }, { "algorithm": "sha256", - "hash": "1e2120ef853f59c7419231f3bf4e7021f1b936f6ebd222406c3b60212205d2ee", - "url": "https://files.pythonhosted.org/packages/8b/62/b9faa998fd185f65c1371643678e4d58254add437edb764a08c5a98fb986/PyYAML-6.0.2-cp311-cp311-macosx_11_0_arm64.whl" + "hash": "9c7708761fccb9397fe64bbc0395abcae8c4bf7b0eac081e12b809bf47700d0b", + "url": "https://files.pythonhosted.org/packages/7a/1e/7acc4f0e74c4b3d9531e24739e0ab832a5edf40e64fbae1a9c01941cabd7/pyyaml-6.0.3-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl" }, { "algorithm": "sha256", - "hash": "82d09873e40955485746739bcb8b4586983670466c23382c19cffecbf1fd8706", - "url": "https://files.pythonhosted.org/packages/8c/ab/6226d3df99900e580091bb44258fde77a8433511a86883bd4681ea19a858/PyYAML-6.0.2-cp38-cp38-musllinux_1_1_x86_64.whl" + "hash": "418cf3f2111bc80e0933b2cd8cd04f286338bb88bdc7bc8e6dd775ebde60b5e0", + "url": "https://files.pythonhosted.org/packages/8b/ef/abd085f06853af0cd59fa5f913d61a8eab65d7639ff2a658d18a25d6a89d/pyyaml-6.0.3-cp310-cp310-musllinux_1_2_aarch64.whl" }, { "algorithm": "sha256", - "hash": "0a9a2848a5b7feac301353437eb7d5957887edbf81d56e903999a75a3d743086", - "url": "https://files.pythonhosted.org/packages/9b/95/a3fac87cb7158e231b5a6012e438c647e1a87f09f8e0d123acec8ab8bf71/PyYAML-6.0.2-cp310-cp310-macosx_10_9_x86_64.whl" + "hash": "b865addae83924361678b652338317d1bd7e79b1f4596f96b96c77a5a34b34da", + "url": "https://files.pythonhosted.org/packages/9f/62/67fc8e68a75f738c9200422bf65693fb79a4cd0dc5b23310e5202e978090/pyyaml-6.0.3-cp39-cp39-macosx_10_13_x86_64.whl" }, { "algorithm": "sha256", - "hash": "ff3824dc5261f50c9b0dfb3be22b4567a6f938ccce4587b38952d85fd9e9afe4", - "url": "https://files.pythonhosted.org/packages/9b/97/ecc1abf4a823f5ac61941a9c00fe501b02ac3ab0e373c3857f7d4b83e2b6/PyYAML-6.0.2-cp311-cp311-musllinux_1_1_aarch64.whl" + "hash": "efd7b85f94a6f21e4932043973a7ba2613b059c4a000551892ac9f1d11f5baf3", + "url": "https://files.pythonhosted.org/packages/a7/3b/6c58ac0fa7c4e1b35e48024eb03d00817438310447f93ef4431673c24138/PyYAML-6.0.3-cp38-cp38-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl" }, { "algorithm": "sha256", - "hash": "5d225db5a45f21e78dd9358e58a98702a0302f2659a3c6cd320564b75b86f47c", - "url": "https://files.pythonhosted.org/packages/ad/0c/c804f5f922a9a6563bab712d8dcc70251e8af811fce4524d57c2c0fd49a4/PyYAML-6.0.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl" + "hash": "fa160448684b4e94d80416c0fa4aac48967a969efe22931448d853ada8baf926", + "url": "https://files.pythonhosted.org/packages/a9/86/a137b39a611def2ed78b0e66ce2fe13ee701a07c07aebe55c340ed2a050e/pyyaml-6.0.3-cp39-cp39-musllinux_1_2_aarch64.whl" }, { "algorithm": "sha256", - "hash": "936d68689298c36b53b29f23c6dbb74de12b4ac12ca6cfe0e047bedceea56180", - "url": "https://files.pythonhosted.org/packages/b7/33/5504b3a9a4464893c32f118a9cc045190a91637b119a9c881da1cf6b7a72/PyYAML-6.0.2-cp310-cp310-musllinux_1_1_aarch64.whl" + "hash": "c3355370a2c156cffb25e876646f149d5d68f5e0a3ce86a5084dd0b64a994917", + "url": "https://files.pythonhosted.org/packages/ae/92/861f152ce87c452b11b9d0977952259aa7df792d71c1053365cc7b09cc08/pyyaml-6.0.3-cp39-cp39-macosx_11_0_arm64.whl" }, { "algorithm": "sha256", - "hash": "29717114e51c84ddfba879543fb232a6ed60086602313ca38cce623c1d62cfbf", - "url": "https://files.pythonhosted.org/packages/c7/7a/68bd47624dab8fd4afbfd3c48e3b79efe09098ae941de5b58abcbadff5cb/PyYAML-6.0.2-cp310-cp310-macosx_11_0_arm64.whl" + "hash": "3c5677e12444c15717b902a5798264fa7909e41153cdf9ef7ad571b704a63dd9", + "url": "https://files.pythonhosted.org/packages/d0/cd/f0cfc8c74f8a030017a2b9c771b7f47e5dd702c3e28e5b2071374bda2948/pyyaml-6.0.3-cp39-cp39-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl" }, { "algorithm": "sha256", - "hash": "0b69e4ce7a131fe56b7e4d770c67429700908fc0752af059838b1cfb41960e4e", - "url": "https://files.pythonhosted.org/packages/d7/12/7322c1e30b9be969670b672573d45479edef72c9a0deac3bb2868f5d7469/PyYAML-6.0.2-cp39-cp39-musllinux_1_1_aarch64.whl" + "hash": "1d37d57ad971609cf3c53ba6a7e365e40660e3be0e5175fa9f2365a379d6095a", + "url": "https://files.pythonhosted.org/packages/dd/6f/529b0f316a9fd167281a6c3826b5583e6192dba792dd55e3203d3f8e655a/pyyaml-6.0.3-cp311-cp311-musllinux_1_2_aarch64.whl" }, { "algorithm": "sha256", - "hash": "f753120cb8181e736c57ef7636e83f31b9c0d1722c516f7e86cf15b7aa57ff12", - "url": "https://files.pythonhosted.org/packages/e9/6c/6e1b7f40181bc4805e2e07f4abc10a88ce4648e7e95ff1abe4ae4014a9b2/PyYAML-6.0.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl" + "hash": "5ed875a24292240029e4483f9d4a4b8a1ae08843b9c54f43fcc11e404532a8a5", + "url": "https://files.pythonhosted.org/packages/ef/b2/18f2bd28cd2055a79a46c9b0895c0b3d987ce40ee471cecf58a1a0199805/pyyaml-6.0.3-cp39-cp39-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl" }, { "algorithm": "sha256", - "hash": "cc1c1159b3d456576af7a3e4d1ba7e6924cb39de8f67111c735f6fc832082774", - "url": "https://files.pythonhosted.org/packages/f8/aa/7af4e81f7acba21a4c6be026da38fd2b872ca46226673c89a758ebdc4fd2/PyYAML-6.0.2-cp311-cp311-macosx_10_9_x86_64.whl" + "hash": "37503bfbfc9d2c40b344d06b2199cf0e96e97957ab1c1b546fd4f87e53e5d3e4", + "url": "https://files.pythonhosted.org/packages/f2/6a/b627b4e0c1dd03718543519ffb2f1deea4a1e6d42fbab8021936a4d22589/pyyaml-6.0.3-cp311-cp311-musllinux_1_2_x86_64.whl" }, { "algorithm": "sha256", - "hash": "9056c1ecd25795207ad294bcf39f2db3d845767be0ea6e6a34d856f006006083", - "url": "https://files.pythonhosted.org/packages/fd/7f/2c3697bba5d4aa5cc2afe81826d73dfae5f049458e44732c7a0938baa673/PyYAML-6.0.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl" + "hash": "214ed4befebe12df36bcc8bc2b64b396ca31be9304b8f59e25c11cf94a4c033b", + "url": "https://files.pythonhosted.org/packages/f4/a0/39350dd17dd6d6c6507025c0e53aef67a9293a6d37d3511f23ea510d5800/pyyaml-6.0.3-cp310-cp310-macosx_10_13_x86_64.whl" } ], "project_name": "pyyaml", "requires_dists": [], "requires_python": ">=3.8", - "version": "6.0.2" + "version": "6.0.3" }, { "artifacts": [ diff --git a/lockfiles/pants-plugins.lock b/lockfiles/pants-plugins.lock index c229af4fe5..605eccff89 100644 --- a/lockfiles/pants-plugins.lock +++ b/lockfiles/pants-plugins.lock @@ -576,13 +576,13 @@ "artifacts": [ { "algorithm": "sha256", - "hash": "a749938e02d6fd0b59b356ca504a24982314bb090c383e3cf201c95ef7e2bfcf", - "url": "https://files.pythonhosted.org/packages/05/e7/df2285f3d08fee213f2d041540fa4fc9ca6c2d44cf36d3a035bf2a8d2bcc/pyparsing-3.2.3-py3-none-any.whl" + "hash": "e38a4f02064cf41fe6593d328d0512495ad1f3d8a91c4f73fc401b3079a59a5e", + "url": "https://files.pythonhosted.org/packages/10/5e/1aa9a93198c6b64513c9d7752de7422c06402de6600a8767da1524f9570b/pyparsing-3.2.5-py3-none-any.whl" }, { "algorithm": "sha256", - "hash": "b9c13f1ab8b3b542f72e28f634bad4de758ab3ce4546e4301970ad6fa77c38be", - "url": "https://files.pythonhosted.org/packages/bb/22/f1129e69d94ffff626bdb5c835506b3a5b4f3d070f17ea295e12c2c6f60f/pyparsing-3.2.3.tar.gz" + "hash": "2df8d5b7b2802ef88e8d016a2eb9c7aeaa923529cd251ed0fe4608275d4105b6", + "url": "https://files.pythonhosted.org/packages/f2/a5/181488fc2b9d093e3972d2a472855aae8a03f000592dbfce716a512b3359/pyparsing-3.2.5.tar.gz" } ], "project_name": "pyparsing", @@ -591,7 +591,7 @@ "railroad-diagrams; extra == \"diagrams\"" ], "requires_python": ">=3.9", - "version": "3.2.3" + "version": "3.2.5" }, { "artifacts": [ @@ -658,49 +658,49 @@ "artifacts": [ { "algorithm": "sha256", - "hash": "797b4f722ffa07cc8d62053e4cff1486fa6dc094105d13fea7b1de7d8bf71c9e", - "url": "https://files.pythonhosted.org/packages/45/73/0f49dacd6e82c9430e46f4a027baa4ca205e8b0a9dce1397f44edc23559d/PyYAML-6.0.2-cp311-cp311-musllinux_1_1_x86_64.whl" + "hash": "37503bfbfc9d2c40b344d06b2199cf0e96e97957ab1c1b546fd4f87e53e5d3e4", + "url": "https://files.pythonhosted.org/packages/f2/6a/b627b4e0c1dd03718543519ffb2f1deea4a1e6d42fbab8021936a4d22589/pyyaml-6.0.3-cp311-cp311-musllinux_1_2_x86_64.whl" }, { "algorithm": "sha256", - "hash": "5ac9328ec4831237bec75defaf839f7d4564be1e6b25ac710bd1a96321cc8317", - "url": "https://files.pythonhosted.org/packages/51/16/6af8d6a6b210c8e54f1406a6b9481febf9c64a3109c541567e35a49aa2e7/PyYAML-6.0.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl" + "hash": "d76623373421df22fb4cf8817020cbb7ef15c725b9d5e45f17e189bfc384190f", + "url": "https://files.pythonhosted.org/packages/05/8e/961c0007c59b8dd7729d542c61a4d537767a59645b82a0b521206e1e25c2/pyyaml-6.0.3.tar.gz" }, { "algorithm": "sha256", - "hash": "d584d9ec91ad65861cc08d42e834324ef890a082e591037abe114850ff7bbc3e", - "url": "https://files.pythonhosted.org/packages/54/ed/79a089b6be93607fa5cdaedf301d7dfb23af5f25c398d5ead2525b063e17/pyyaml-6.0.2.tar.gz" + "hash": "10892704fc220243f5305762e276552a0395f7beb4dbf9b14ec8fd43b57f126c", + "url": "https://files.pythonhosted.org/packages/0c/62/d2eb46264d4b157dae1275b573017abec435397aa59cbcdab6fc978a8af4/pyyaml-6.0.3-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl" }, { "algorithm": "sha256", - "hash": "3ad2a3decf9aaba3d29c8f537ac4b243e36bef957511b4766cb0057d32b0be85", - "url": "https://files.pythonhosted.org/packages/75/e4/2c27590dfc9992f73aabbeb9241ae20220bd9452df27483b6e56d3975cc5/PyYAML-6.0.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl" + "hash": "850774a7879607d3a6f50d36d04f00ee69e7fc816450e5f7e58d7f17f1ae5c00", + "url": "https://files.pythonhosted.org/packages/10/cb/16c3f2cf3266edd25aaa00d6c4350381c8b012ed6f5276675b9eba8d9ff4/pyyaml-6.0.3-cp311-cp311-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl" }, { "algorithm": "sha256", - "hash": "1e2120ef853f59c7419231f3bf4e7021f1b936f6ebd222406c3b60212205d2ee", - "url": "https://files.pythonhosted.org/packages/8b/62/b9faa998fd185f65c1371643678e4d58254add437edb764a08c5a98fb986/PyYAML-6.0.2-cp311-cp311-macosx_11_0_arm64.whl" + "hash": "652cb6edd41e718550aad172851962662ff2681490a8a711af6a4d288dd96824", + "url": "https://files.pythonhosted.org/packages/16/19/13de8e4377ed53079ee996e1ab0a9c33ec2faf808a4647b7b4c0d46dd239/pyyaml-6.0.3-cp311-cp311-macosx_11_0_arm64.whl" }, { "algorithm": "sha256", - "hash": "ff3824dc5261f50c9b0dfb3be22b4567a6f938ccce4587b38952d85fd9e9afe4", - "url": "https://files.pythonhosted.org/packages/9b/97/ecc1abf4a823f5ac61941a9c00fe501b02ac3ab0e373c3857f7d4b83e2b6/PyYAML-6.0.2-cp311-cp311-musllinux_1_1_aarch64.whl" + "hash": "44edc647873928551a01e7a563d7452ccdebee747728c1080d881d68af7b997e", + "url": "https://files.pythonhosted.org/packages/6d/16/a95b6757765b7b031c9374925bb718d55e0a9ba8a1b6a12d25962ea44347/pyyaml-6.0.3-cp311-cp311-macosx_10_13_x86_64.whl" }, { "algorithm": "sha256", - "hash": "5d225db5a45f21e78dd9358e58a98702a0302f2659a3c6cd320564b75b86f47c", - "url": "https://files.pythonhosted.org/packages/ad/0c/c804f5f922a9a6563bab712d8dcc70251e8af811fce4524d57c2c0fd49a4/PyYAML-6.0.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl" + "hash": "b8bb0864c5a28024fac8a632c443c87c5aa6f215c0b126c449ae1a150412f31d", + "url": "https://files.pythonhosted.org/packages/71/60/917329f640924b18ff085ab889a11c763e0b573da888e8404ff486657602/pyyaml-6.0.3-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl" }, { "algorithm": "sha256", - "hash": "cc1c1159b3d456576af7a3e4d1ba7e6924cb39de8f67111c735f6fc832082774", - "url": "https://files.pythonhosted.org/packages/f8/aa/7af4e81f7acba21a4c6be026da38fd2b872ca46226673c89a758ebdc4fd2/PyYAML-6.0.2-cp311-cp311-macosx_10_9_x86_64.whl" + "hash": "1d37d57ad971609cf3c53ba6a7e365e40660e3be0e5175fa9f2365a379d6095a", + "url": "https://files.pythonhosted.org/packages/dd/6f/529b0f316a9fd167281a6c3826b5583e6192dba792dd55e3203d3f8e655a/pyyaml-6.0.3-cp311-cp311-musllinux_1_2_aarch64.whl" } ], "project_name": "pyyaml", "requires_dists": [], "requires_python": ">=3.8", - "version": "6.0.2" + "version": "6.0.3" }, { "artifacts": [ diff --git a/lockfiles/st2.lock b/lockfiles/st2.lock index e506db1705..6f7b2ec5fb 100644 --- a/lockfiles/st2.lock +++ b/lockfiles/st2.lock @@ -324,168 +324,148 @@ "artifacts": [ { "algorithm": "sha256", - "hash": "57967b7a28d855313a963aaea51bf6df89f833db4320da458e5b3c5ab6d4c938", - "url": "https://files.pythonhosted.org/packages/63/13/47bba97924ebe86a62ef83dc75b7c8a881d53c535f83e2c54c4bd701e05c/bcrypt-4.3.0-pp311-pypy311_pp73-manylinux_2_34_x86_64.whl" + "hash": "6b8f520b61e8781efee73cba14e3e8c9556ccfb375623f4f97429544734545b4", + "url": "https://files.pythonhosted.org/packages/e4/f8/972c96f5a2b6c4b3deca57009d93e946bbdbe2241dca9806d502f29dd3ee/bcrypt-5.0.0-pp311-pypy311_pp73-manylinux_2_34_x86_64.whl" }, { "algorithm": "sha256", - "hash": "e965a9c1e9a393b8005031ff52583cedc15b7884fce7deb8b0346388837d6cfe", - "url": "https://files.pythonhosted.org/packages/0c/a1/9898ea3faac0b156d457fd73a3cb9c2855c6fd063e44b8522925cdd8ce46/bcrypt-4.3.0-cp39-abi3-musllinux_1_2_aarch64.whl" + "hash": "2b732e7d388fa22d48920baa267ba5d97cca38070b69c0e2d37087b381c681fd", + "url": "https://files.pythonhosted.org/packages/0b/7e/d4e47d2df1641a36d1212e5c0514f5291e1a956a7749f1e595c07a972038/bcrypt-5.0.0-cp38-abi3-manylinux_2_28_armv7l.manylinux_2_31_armv7l.whl" }, { "algorithm": "sha256", - "hash": "107d53b5c67e0bbc3f03ebf5b030e0403d24dda980f8e244795335ba7b4a027d", - "url": "https://files.pythonhosted.org/packages/10/4f/f77509f08bdff8806ecc4dc472b6e187c946c730565a7470db772d25df70/bcrypt-4.3.0-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl" + "hash": "0c8e093ea2532601a6f686edbc2c6b2ec24131ff5c52f7610dd64fa4553b5464", + "url": "https://files.pythonhosted.org/packages/0f/c3/0ae57a68be2039287ec28bc463b82e4b8dc23f9d12c0be331f4782e19108/bcrypt-5.0.0-cp38-abi3-manylinux_2_28_x86_64.whl" }, { "algorithm": "sha256", - "hash": "f81b0ed2639568bf14749112298f9e4e2b28853dab50a8b357e31798686a036d", - "url": "https://files.pythonhosted.org/packages/11/22/5ada0b9af72b60cbc4c9a399fdde4af0feaa609d27eb0adc61607997a3fa/bcrypt-4.3.0-cp38-abi3-macosx_10_12_universal2.whl" + "hash": "d79e5c65dcc9af213594d6f7f1fa2c98ad3fc10431e7aa53c176b441943efbdd", + "url": "https://files.pythonhosted.org/packages/17/72/c344825e3b83c5389a369c8a8e58ffe1480b8a699f46c127c34580c4666b/bcrypt-5.0.0-cp38-abi3-manylinux_2_28_aarch64.whl" }, { "algorithm": "sha256", - "hash": "17a854d9a7a476a89dcef6c8bd119ad23e0f82557afbd2c442777a16408e614f", - "url": "https://files.pythonhosted.org/packages/1c/0a/644b2731194b0d7646f3210dc4d80c7fee3ecb3a1f791a6e0ae6bb8684e3/bcrypt-4.3.0-cp39-abi3-musllinux_1_1_aarch64.whl" + "hash": "f8429e1c410b4073944f03bd778a9e066e7fad723564a52ff91841d278dfc822", + "url": "https://files.pythonhosted.org/packages/24/b4/11f8a31d8b67cca3371e046db49baa7c0594d71eb40ac8121e2fc0888db0/bcrypt-5.0.0-cp39-abi3-manylinux_2_28_x86_64.whl" }, { "algorithm": "sha256", - "hash": "41261d64150858eeb5ff43c753c4b216991e0ae16614a308a15d909503617732", - "url": "https://files.pythonhosted.org/packages/27/fb/910d3a1caa2d249b6040a5caf9f9866c52114d51523ac2fb47578a27faee/bcrypt-4.3.0-cp38-abi3-musllinux_1_1_x86_64.whl" + "hash": "3ca8a166b1140436e058298a34d88032ab62f15aae1c598580333dc21d27ef10", + "url": "https://files.pythonhosted.org/packages/25/ae/479f81d3f4594456a01ea2f05b132a519eff9ab5768a70430fa1132384b1/bcrypt-5.0.0-cp39-abi3-musllinux_1_2_aarch64.whl" }, { "algorithm": "sha256", - "hash": "3e36506d001e93bffe59754397572f21bb5dc7c83f54454c990c74a468cd589e", - "url": "https://files.pythonhosted.org/packages/29/5b/4547d5c49b85f0337c13929f2ccbe08b7283069eea3550a457914fc078aa/bcrypt-4.3.0-cp38-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl" + "hash": "f0ce778135f60799d89c9693b9b398819d15f1921ba15fe719acb3178215a7db", + "url": "https://files.pythonhosted.org/packages/36/c4/ed00ed32f1040f7990dac7115f82273e3c03da1e1a1587a778d8cea496d8/bcrypt-5.0.0-cp39-abi3-manylinux_2_28_aarch64.whl" }, { "algorithm": "sha256", - "hash": "b693dbb82b3c27a1604a3dff5bfc5418a7e6a781bb795288141e5f80cf3a3492", - "url": "https://files.pythonhosted.org/packages/35/18/7d9dc16a3a4d530d0a9b845160e9e5d8eb4f00483e05d44bb4116a1861da/bcrypt-4.3.0-pp310-pypy310_pp73-manylinux_2_34_aarch64.whl" + "hash": "560ddb6ec730386e7b3b26b8b4c88197aaed924430e7b74666a586ac997249ef", + "url": "https://files.pythonhosted.org/packages/3b/71/427945e6ead72ccffe77894b2655b695ccf14ae1866cd977e185d606dd2f/bcrypt-5.0.0-cp38-abi3-manylinux2014_x86_64.manylinux_2_17_x86_64.whl" }, { "algorithm": "sha256", - "hash": "97eea7408db3a5bcce4a55d13245ab3fa566e23b4c67cd227062bb49e26c585d", - "url": "https://files.pythonhosted.org/packages/39/48/46f623f1b0c7dc2e5de0b8af5e6f5ac4cc26408ac33f3d424e5ad8da4a90/bcrypt-4.3.0-cp38-abi3-manylinux_2_34_x86_64.whl" + "hash": "89042e61b5e808b67daf24a434d89bab164d4de1746b37a8d173b6b14f3db9ff", + "url": "https://files.pythonhosted.org/packages/43/0a/405c753f6158e0f3f14b00b462d8bca31296f7ecfc8fc8bc7919c0c7d73a/bcrypt-5.0.0-cp38-abi3-manylinux_2_34_x86_64.whl" }, { "algorithm": "sha256", - "hash": "79e70b8342a33b52b55d93b3a59223a844962bef479f6a0ea318ebbcadf71505", - "url": "https://files.pythonhosted.org/packages/40/f2/71b4ed65ce38982ecdda0ff20c3ad1b15e71949c78b2c053df53629ce940/bcrypt-4.3.0-cp39-abi3-musllinux_1_2_x86_64.whl" + "hash": "5b1589f4839a0899c146e8892efe320c0fa096568abd9b95593efac50a87cb75", + "url": "https://files.pythonhosted.org/packages/45/2b/77424511adb11e6a99e3a00dcc7745034bee89036ad7d7e255a7e47be7d8/bcrypt-5.0.0-cp38-abi3-manylinux_2_34_aarch64.whl" }, { "algorithm": "sha256", - "hash": "191354ebfe305e84f344c5964c7cd5f924a3bfc5d405c75ad07f232b6dffb49f", - "url": "https://files.pythonhosted.org/packages/49/8b/70671c3ce9c0fca4a6cc3cc6ccbaa7e948875a2e62cbd146e04a4011899c/bcrypt-4.3.0-cp38-abi3-musllinux_1_1_aarch64.whl" + "hash": "c2388ca94ffee269b6038d48747f4ce8df0ffbea43f31abfa18ac72f0218effb", + "url": "https://files.pythonhosted.org/packages/45/b6/4c1205dde5e464ea3bd88e8742e19f899c16fa8916fb8510a851fae985b5/bcrypt-5.0.0-cp38-abi3-manylinux2014_aarch64.manylinux_2_17_aarch64.whl" }, { "algorithm": "sha256", - "hash": "a839320bf27d474e52ef8cb16449bb2ce0ba03ca9f44daba6d93fa1d8828e48a", - "url": "https://files.pythonhosted.org/packages/4c/b1/1289e21d710496b88340369137cc4c5f6ee036401190ea116a7b4ae6d32a/bcrypt-4.3.0-pp311-pypy311_pp73-manylinux_2_28_aarch64.whl" + "hash": "046ad6db88edb3c5ece4369af997938fb1c19d6a699b9c1b27b0db432faae4c4", + "url": "https://files.pythonhosted.org/packages/54/79/875f9558179573d40a9cc743038ac2bf67dfb79cecb1e8b5d70e88c94c3d/bcrypt-5.0.0-pp311-pypy311_pp73-manylinux_2_28_x86_64.whl" }, { "algorithm": "sha256", - "hash": "3004df1b323d10021fda07a813fd33e0fd57bef0e9a480bb143877f6cba996fe", - "url": "https://files.pythonhosted.org/packages/4d/4d/c43332dcaaddb7710a8ff5269fcccba97ed3c85987ddaa808db084267b9a/bcrypt-4.3.0-cp39-abi3-manylinux_2_34_aarch64.whl" + "hash": "3cf67a804fc66fc217e6914a5635000259fbbbb12e78a99488e4d5ba445a71eb", + "url": "https://files.pythonhosted.org/packages/55/ab/a0727a4547e383e2e22a630e0f908113db37904f58719dc48d4622139b5c/bcrypt-5.0.0-cp38-abi3-musllinux_1_2_x86_64.whl" }, { "algorithm": "sha256", - "hash": "62f26585e8b219cdc909b6a0069efc5e4267e25d4a3770a364ac58024f62a761", - "url": "https://files.pythonhosted.org/packages/50/b8/6294eb84a3fef3b67c69b4470fcdd5326676806bf2519cda79331ab3c3a9/bcrypt-4.3.0-cp38-abi3-manylinux_2_28_x86_64.whl" + "hash": "0c418ca99fd47e9c59a301744d63328f17798b5947b0f791e9af3c1c499c2d0a", + "url": "https://files.pythonhosted.org/packages/5d/ba/2af136406e1c3839aea9ecadc2f6be2bcd1eff255bd451dd39bcf302c47a/bcrypt-5.0.0-cp39-abi3-macosx_10_12_universal2.whl" }, { "algorithm": "sha256", - "hash": "c950d682f0952bafcceaf709761da0a32a942272fad381081b51096ffa46cea1", - "url": "https://files.pythonhosted.org/packages/55/2d/0c7e5ab0524bf1a443e34cdd3926ec6f5879889b2f3c32b2f5074e99ed53/bcrypt-4.3.0-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl" + "hash": "5feebf85a9cefda32966d8171f5db7e3ba964b77fdfe31919622256f80f9cf42", + "url": "https://files.pythonhosted.org/packages/5f/85/e4fbfc46f14f47b0d20493669a625da5827d07e8a88ee460af6cd9768b44/bcrypt-5.0.0-cp39-abi3-musllinux_1_1_x86_64.whl" }, { "algorithm": "sha256", - "hash": "beeefe437218a65322fbd0069eb437e7c98137e08f22c4660ac2dc795c31f8bb", - "url": "https://files.pythonhosted.org/packages/62/e6/baff635a4f2c42e8788fe1b1633911c38551ecca9a749d1052d296329da6/bcrypt-4.3.0-cp38-abi3-manylinux_2_34_aarch64.whl" + "hash": "e3cf5b2560c7b5a142286f69bde914494b6d8f901aaa71e453078388a50881c4", + "url": "https://files.pythonhosted.org/packages/62/83/b3efc285d4aadc1fa83db385ec64dcfa1707e890eb42f03b127d66ac1b7b/bcrypt-5.0.0-cp38-abi3-musllinux_1_1_aarch64.whl" }, { "algorithm": "sha256", - "hash": "7c03296b85cb87db865d91da79bf63d5609284fc0cab9472fdd8367bbd830753", - "url": "https://files.pythonhosted.org/packages/6d/64/e042fc8262e971347d9230d9abbe70d68b0a549acd8611c83cebd3eaec67/bcrypt-4.3.0-cp38-abi3-manylinux_2_28_armv7l.manylinux_2_31_armv7l.whl" + "hash": "fc746432b951e92b58317af8e0ca746efe93e66555f1b40888865ef5bf56446b", + "url": "https://files.pythonhosted.org/packages/84/29/6237f151fbfe295fe3e074ecc6d44228faa1e842a81f6d34a02937ee1736/bcrypt-5.0.0-cp38-abi3-macosx_10_12_universal2.whl" }, { "algorithm": "sha256", - "hash": "0d3efb1157edebfd9128e4e46e2ac1a64e0c1fe46fb023158a407c7892b0f8c3", - "url": "https://files.pythonhosted.org/packages/6e/c1/3fa0e9e4e0bfd3fd77eb8b52ec198fd6e1fd7e9402052e43f23483f956dd/bcrypt-4.3.0-cp39-abi3-macosx_10_12_universal2.whl" + "hash": "db99dca3b1fdc3db87d7c57eac0c82281242d1eabf19dcb8a6b10eb29a2e72d1", + "url": "https://files.pythonhosted.org/packages/89/48/44590e3fc158620f680a978aafe8f87a4c4320da81ed11552f0323aa9a57/bcrypt-5.0.0-cp39-abi3-musllinux_1_1_aarch64.whl" }, { "algorithm": "sha256", - "hash": "bdc6a24e754a555d7316fa4774e64c6c3997d27ed2d1964d55920c7c227bc4ce", - "url": "https://files.pythonhosted.org/packages/94/41/19be9fe17e4ffc5d10b7b67f10e459fc4eee6ffe9056a88de511920cfd8d/bcrypt-4.3.0-pp311-pypy311_pp73-manylinux_2_28_x86_64.whl" + "hash": "7edda91d5ab52b15636d9c30da87d2cc84f426c72b9dba7a9b4fe142ba11f534", + "url": "https://files.pythonhosted.org/packages/8a/75/4aa9f5a4d40d762892066ba1046000b329c7cd58e888a6db878019b282dc/bcrypt-5.0.0-pp311-pypy311_pp73-manylinux_2_28_aarch64.whl" }, { "algorithm": "sha256", - "hash": "f6746e6fec103fcd509b96bacdfdaa2fbde9a553245dbada284435173a6f1aef", - "url": "https://files.pythonhosted.org/packages/9b/5d/805ef1a749c965c46b28285dfb5cd272a7ed9fa971f970435a5133250182/bcrypt-4.3.0-cp39-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl" + "hash": "f632fd56fc4e61564f78b46a2269153122db34988e78b6be8b32d28507b7eaeb", + "url": "https://files.pythonhosted.org/packages/95/7d/47ee337dacecde6d234890fe929936cb03ebc4c3a7460854bbd9c97780b8/bcrypt-5.0.0-cp38-abi3-musllinux_1_1_x86_64.whl" }, { "algorithm": "sha256", - "hash": "55a935b8e9a1d2def0626c4269db3fcd26728cbff1e84f0341465c31c4ee56d8", - "url": "https://files.pythonhosted.org/packages/aa/73/05687a9ef89edebdd8ad7474c16d8af685eb4591c3c38300bb6aad4f0076/bcrypt-4.3.0-pp311-pypy311_pp73-manylinux_2_34_aarch64.whl" + "hash": "edfcdcedd0d0f05850c52ba3127b1fce70b9f89e0fe5ff16517df7e81fa3cbb8", + "url": "https://files.pythonhosted.org/packages/ac/31/79f11865f8078e192847d2cb526e3fa27c200933c982c5b2869720fa5fce/bcrypt-5.0.0-cp39-abi3-manylinux_2_34_aarch64.whl" }, { "algorithm": "sha256", - "hash": "afe327968aaf13fc143a56a3360cb27d4ad0345e34da12c7290f1b00b8fe9a8b", - "url": "https://files.pythonhosted.org/packages/ab/2b/698580547a4a4988e415721b71eb45e80c879f0fb04a62da131f45987b96/bcrypt-4.3.0-cp39-abi3-manylinux_2_28_aarch64.whl" + "hash": "ddb4e1500f6efdd402218ffe34d040a1196c072e07929b9820f363a1fd1f4191", + "url": "https://files.pythonhosted.org/packages/ac/ee/2f4985dbad090ace5ad1f7dd8ff94477fe089b5fab2040bd784a3d5f187b/bcrypt-5.0.0-cp39-abi3-manylinux2014_aarch64.manylinux_2_17_aarch64.whl" }, { "algorithm": "sha256", - "hash": "864f8f19adbe13b7de11ba15d85d4a428c7e2f344bac110f667676a0ff84924b", - "url": "https://files.pythonhosted.org/packages/b8/8c/252a1edc598dc1ce57905be173328eda073083826955ee3c97c7ff5ba584/bcrypt-4.3.0-cp38-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl" + "hash": "dcd58e2b3a908b5ecc9b9df2f0085592506ac2d5110786018ee5e160f28e0911", + "url": "https://files.pythonhosted.org/packages/bc/fe/975adb8c216174bf70fc17535f75e85ac06ed5252ea077be10d9cff5ce24/bcrypt-5.0.0-pp311-pypy311_pp73-manylinux_2_34_aarch64.whl" }, { "algorithm": "sha256", - "hash": "3a3fd2204178b6d2adcf09cb4f6426ffef54762577a7c9b54c159008cb288c18", - "url": "https://files.pythonhosted.org/packages/bb/5d/6d7433e0f3cd46ce0b43cd65e1db465ea024dbb8216fb2404e919c2ad77b/bcrypt-4.3.0.tar.gz" + "hash": "f748f7c2d6fd375cc93d3fba7ef4a9e3a092421b8dbf34d8d4dc06be9492dfdd", + "url": "https://files.pythonhosted.org/packages/d4/36/3329e2518d70ad8e2e5817d5a4cac6bba05a47767ec416c7d020a965f408/bcrypt-5.0.0.tar.gz" }, { "algorithm": "sha256", - "hash": "842d08d75d9fe9fb94b18b071090220697f9f184d4547179b60734846461ed59", - "url": "https://files.pythonhosted.org/packages/be/21/7dbaf3fa1745cb63f776bb046e481fbababd7d344c5324eab47f5ca92dd2/bcrypt-4.3.0-cp38-abi3-manylinux_2_28_aarch64.whl" + "hash": "611f0a17aa4a25a69362dcc299fda5c8a3d4f160e2abb3831041feb77393a14a", + "url": "https://files.pythonhosted.org/packages/d4/8d/5e43d9584b3b3591a6f9b68f755a4da879a59712981ef5ad2a0ac1379f7a/bcrypt-5.0.0-cp39-abi3-manylinux_2_34_x86_64.whl" }, { "algorithm": "sha256", - "hash": "f1e3ffa1365e8702dc48c8b360fef8d7afeca482809c5e45e653af82ccd088c1", - "url": "https://files.pythonhosted.org/packages/cb/c6/8fedca4c2ada1b6e889c52d2943b2f968d3427e5d65f595620ec4c06fa2f/bcrypt-4.3.0-cp39-abi3-manylinux_2_28_x86_64.whl" + "hash": "801cad5ccb6b87d1b430f183269b94c24f248dddbbc5c1f78b6ed231743e001c", + "url": "https://files.pythonhosted.org/packages/d6/3a/43d494dfb728f55f4e1cf8fd435d50c16a2d75493225b54c8d06122523c6/bcrypt-5.0.0-cp38-abi3-musllinux_1_2_aarch64.whl" }, { "algorithm": "sha256", - "hash": "08bacc884fd302b611226c01014eca277d48f0a05187666bca23aac0dad6fe24", - "url": "https://files.pythonhosted.org/packages/ce/d4/755ce19b6743394787fbd7dff6bf271b27ee9b5912a97242e3caf125885b/bcrypt-4.3.0-cp39-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl" + "hash": "61afc381250c3182d9078551e3ac3a41da14154fbff647ddf52a769f588c4172", + "url": "https://files.pythonhosted.org/packages/df/d2/36a086dee1473b14276cd6ea7f61aef3b2648710b5d7f1c9e032c29b859f/bcrypt-5.0.0-cp39-abi3-musllinux_1_2_x86_64.whl" }, { "algorithm": "sha256", - "hash": "6fb1fd3ab08c0cbc6826a2e0447610c6f09e983a281b919ed721ad32236b8b23", - "url": "https://files.pythonhosted.org/packages/dc/62/2a871837c0bb6ab0c9a88bf54de0fc021a6a08832d4ea313ed92a669d437/bcrypt-4.3.0-cp39-abi3-musllinux_1_1_x86_64.whl" + "hash": "7aeef54b60ceddb6f30ee3db090351ecf0d40ec6e2abf41430997407a46d2254", + "url": "https://files.pythonhosted.org/packages/e4/6e/b77ade812672d15cf50842e167eead80ac3514f3beacac8902915417f8b7/bcrypt-5.0.0-cp39-abi3-manylinux2014_x86_64.manylinux_2_17_x86_64.whl" }, { "algorithm": "sha256", - "hash": "531457e5c839d8caea9b589a1bcfe3756b0547d7814e9ce3d437f17da75c32b0", - "url": "https://files.pythonhosted.org/packages/dc/7f/1e36379e169a7df3a14a1c160a49b7b918600a6008de43ff20d479e6f4b5/bcrypt-4.3.0-cp39-abi3-manylinux_2_34_x86_64.whl" - }, - { - "algorithm": "sha256", - "hash": "33752b1ba962ee793fa2b6321404bf20011fe45b9afd2a842139de3011898fef", - "url": "https://files.pythonhosted.org/packages/dc/cf/7cf3a05b66ce466cfb575dbbda39718d45a609daa78500f57fa9f36fa3c0/bcrypt-4.3.0-cp38-abi3-musllinux_1_2_aarch64.whl" - }, - { - "algorithm": "sha256", - "hash": "b6354d3760fcd31994a14c89659dee887f1351a06e5dac3c1142307172a79f90", - "url": "https://files.pythonhosted.org/packages/df/c4/ae6921088adf1e37f2a3a6a688e72e7d9e45fdd3ae5e0bc931870c1ebbda/bcrypt-4.3.0-pp310-pypy310_pp73-manylinux_2_34_x86_64.whl" - }, - { - "algorithm": "sha256", - "hash": "50e6e80a4bfd23a25f5c05b90167c19030cf9f87930f7cb2eacb99f45d1c3304", - "url": "https://files.pythonhosted.org/packages/e3/b8/e970ecc6d7e355c0d892b7f733480f4aa8509f99b33e71550242cf0b7e63/bcrypt-4.3.0-cp38-abi3-musllinux_1_2_x86_64.whl" - }, - { - "algorithm": "sha256", - "hash": "d9af79d322e735b1fc33404b5765108ae0ff232d4b54666d46730f8ac1a43676", - "url": "https://files.pythonhosted.org/packages/f2/87/62e1e426418204db520f955ffd06f1efd389feca893dad7095bf35612eec/bcrypt-4.3.0-cp39-abi3-manylinux_2_28_armv7l.manylinux_2_31_armv7l.whl" + "hash": "a71f70ee269671460b37a449f5ff26982a6f2ba493b3eabdd687b4bf35f875ac", + "url": "https://files.pythonhosted.org/packages/e7/c4/fa6e16145e145e87f1fa351bbd54b429354fd72145cd3d4e0c5157cf4c70/bcrypt-5.0.0-cp39-abi3-manylinux_2_28_armv7l.manylinux_2_31_armv7l.whl" } ], "project_name": "bcrypt", @@ -494,19 +474,19 @@ "pytest!=3.3.0,>=3.2.1; extra == \"tests\"" ], "requires_python": ">=3.8", - "version": "4.3.0" + "version": "5.0.0" }, { "artifacts": [ { "algorithm": "sha256", - "hash": "642085eaa22233aceadff9c69651bc51e8bf3f874fb6d7104ece2beb24b47c4a", - "url": "https://files.pythonhosted.org/packages/04/eb/f4151e0c7377a6e08a38108609ba5cede57986802757848688aeedd1b9e8/beautifulsoup4-4.13.5-py3-none-any.whl" + "hash": "5ef6fa3a8cbece8488d66985560f97ed091e22bbc4e9c2338508a9d5de6d4515", + "url": "https://files.pythonhosted.org/packages/94/fe/3aed5d0be4d404d12d36ab97e2f1791424d9ca39c2f754a6285d59a3b01d/beautifulsoup4-4.14.2-py3-none-any.whl" }, { "algorithm": "sha256", - "hash": "5e70131382930e7c3de33450a2f54a63d5e4b19386eab43a5b34d594268f3695", - "url": "https://files.pythonhosted.org/packages/85/2e/3e5079847e653b1f6dc647aa24549d68c6addb4c595cc0d902d1b19308ad/beautifulsoup4-4.13.5.tar.gz" + "hash": "2a98ab9f944a11acee9cc848508ec28d9228abfd522ef0fad6a02a72e0ded69e", + "url": "https://files.pythonhosted.org/packages/77/e9/df2358efd7659577435e2177bfa69cba6c33216681af51a707193dec162a/beautifulsoup4-4.14.2.tar.gz" } ], "project_name": "beautifulsoup4", @@ -520,7 +500,7 @@ "typing-extensions>=4.0.0" ], "requires_python": ">=3.7.0", - "version": "4.13.5" + "version": "4.14.2" }, { "artifacts": [ @@ -2116,13 +2096,13 @@ "artifacts": [ { "algorithm": "sha256", - "hash": "d10443a2bdfe0ea5dbb17e016726146d48b574208dafd41e854cf34e7d78842c", - "url": "https://files.pythonhosted.org/packages/87/7c/f35bd530a35654ef3ff81f5e102572b8b620361659e090beb85a73a3bcc9/httplib2-0.30.0-py3-none-any.whl" + "hash": "b9cd78abea9b4e43a7714c6e0f8b6b8561a6fc1e95d5dbd367f5bf0ef35f5d24", + "url": "https://files.pythonhosted.org/packages/8c/a2/0d269db0f6163be503775dc8b6a6fa15820cc9fdc866f6ba608d86b721f2/httplib2-0.31.0-py3-none-any.whl" }, { "algorithm": "sha256", - "hash": "d5b23c11fcf8e57e00ff91b7008656af0f6242c8886fd97065c97509e4e548c5", - "url": "https://files.pythonhosted.org/packages/5b/75/1d10a90b3411f707c10c226fa918cf4f5e0578113caa223369130f702b6b/httplib2-0.30.0.tar.gz" + "hash": "ac7ab497c50975147d4f7b1ade44becc7df2f8954d42b38b3d69c515f531135c", + "url": "https://files.pythonhosted.org/packages/52/77/6653db69c1f7ecfe5e3f9726fdadc981794656fcd7d98c4209fecfea9993/httplib2-0.31.0.tar.gz" } ], "project_name": "httplib2", @@ -2130,7 +2110,7 @@ "pyparsing<4,>=3.0.4" ], "requires_python": ">=3.6", - "version": "0.30.0" + "version": "0.31.0" }, { "artifacts": [ @@ -3439,31 +3419,6 @@ "requires_python": ">=3.6", "version": "3.5.1" }, - { - "artifacts": [ - { - "algorithm": "sha256", - "hash": "aa6bca462b8d8bda89c70b382f0c298a20b5560af6cbfa2dce410c0a2fb669f1", - "url": "https://files.pythonhosted.org/packages/3b/a4/ab6b7589382ca3df236e03faa71deac88cae040af60c071a78d254a62172/passlib-1.7.4-py2.py3-none-any.whl" - }, - { - "algorithm": "sha256", - "hash": "defd50f72b65c5402ab2c573830a6978e5f202ad0d984793c8dde2c4152ebe04", - "url": "https://files.pythonhosted.org/packages/b6/06/9da9ee59a67fae7761aab3ccc84fa4f3f33f125b370f1ccdb915bf967c11/passlib-1.7.4.tar.gz" - } - ], - "project_name": "passlib", - "requires_dists": [ - "argon2-cffi>=18.2.0; extra == \"argon2\"", - "bcrypt>=3.1.0; extra == \"bcrypt\"", - "cloud-sptheme>=1.10.1; extra == \"build-docs\"", - "cryptography; extra == \"totp\"", - "sphinx>=1.6; extra == \"build-docs\"", - "sphinxcontrib-fulltoc>=1.2.0; extra == \"build-docs\"" - ], - "requires_python": null, - "version": "1.7.4" - }, { "artifacts": [ { @@ -3694,50 +3649,57 @@ "artifacts": [ { "algorithm": "sha256", - "hash": "a5f098451abc2828f7dc6b58d44b532b22f2088f4999a937557b603ce72b1993", - "url": "https://files.pythonhosted.org/packages/eb/a2/709e0fe2f093556c17fbafda93ac032257242cabcc7ff3369e2cb76a97aa/psutil-7.0.0-cp36-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl" + "hash": "7d4a113425c037300de3ac8b331637293da9be9713855c4fc9d2d97436d7259d", + "url": "https://files.pythonhosted.org/packages/f4/58/c4f976234bf6d4737bc8c02a81192f045c307b72cf39c9e5c5a2d78927f6/psutil-7.1.0-cp36-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl" }, { "algorithm": "sha256", - "hash": "39db632f6bb862eeccf56660871433e111b6ea58f2caea825571951d4b6aa3da", - "url": "https://files.pythonhosted.org/packages/04/8b/30f930733afe425e3cbfc0e1468a30a18942350c1a8816acfade80c005c4/psutil-7.0.0-cp36-abi3-macosx_11_0_arm64.whl" + "hash": "5d007560c8c372efdff9e4579c2846d71de737e4605f611437255e81efcca2c5", + "url": "https://files.pythonhosted.org/packages/38/61/f76959fba841bf5b61123fbf4b650886dc4094c6858008b5bf73d9057216/psutil-7.1.0-cp36-abi3-macosx_11_0_arm64.whl" }, { "algorithm": "sha256", - "hash": "7be9c3eba38beccb6495ea33afd982a44074b78f28c434a1f51cc07fd315c456", - "url": "https://files.pythonhosted.org/packages/2a/80/336820c1ad9286a4ded7e845b2eccfcb27851ab8ac6abece774a6ff4d3de/psutil-7.0.0.tar.gz" + "hash": "76168cef4397494250e9f4e73eb3752b146de1dd950040b29186d0cce1d5ca13", + "url": "https://files.pythonhosted.org/packages/46/62/ce4051019ee20ce0ed74432dd73a5bb087a6704284a470bb8adff69a0932/psutil-7.1.0-cp36-abi3-macosx_10_9_x86_64.whl" }, { "algorithm": "sha256", - "hash": "1fcee592b4c6f146991ca55919ea3d1f8926497a713ed7faaf8225e174581e91", - "url": "https://files.pythonhosted.org/packages/2a/ed/d362e84620dd22876b55389248e522338ed1bf134a5edd3b8231d7207f6d/psutil-7.0.0-cp36-abi3-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl" + "hash": "22e4454970b32472ce7deaa45d045b34d3648ce478e26a04c7e858a0a6e75ff3", + "url": "https://files.pythonhosted.org/packages/88/7a/37c99d2e77ec30d63398ffa6a660450b8a62517cabe44b3e9bae97696e8d/psutil-7.1.0-cp36-abi3-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl" }, { "algorithm": "sha256", - "hash": "4b1388a4f6875d7e2aff5c4ca1cc16c545ed41dd8bb596cefea80111db353a34", - "url": "https://files.pythonhosted.org/packages/bf/b9/b0eb3f3cbcb734d930fdf839431606844a825b23eaf9a6ab371edac8162c/psutil-7.0.0-cp36-abi3-manylinux_2_12_x86_64.manylinux2010_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl" + "hash": "8c70e113920d51e89f212dd7be06219a9b88014e63a4cec69b684c327bc474e3", + "url": "https://files.pythonhosted.org/packages/9d/de/04c8c61232f7244aa0a4b9a9fbd63a89d5aeaf94b2fc9d1d16e2faa5cbb0/psutil-7.1.0-cp36-abi3-manylinux_2_12_x86_64.manylinux2010_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl" }, { "algorithm": "sha256", - "hash": "101d71dc322e3cffd7cea0650b09b3d08b8e7c4109dd6809fe452dfd00e58b25", - "url": "https://files.pythonhosted.org/packages/ed/e6/2d26234410f8b8abdbf891c9da62bee396583f713fb9f3325a4760875d22/psutil-7.0.0-cp36-abi3-macosx_10_9_x86_64.whl" + "hash": "655708b3c069387c8b77b072fc429a57d0e214221d01c0a772df7dfedcb3bcd2", + "url": "https://files.pythonhosted.org/packages/b3/31/4723d756b59344b643542936e37a31d1d3204bcdc42a7daa8ee9eb06fb50/psutil-7.1.0.tar.gz" } ], "project_name": "psutil", "requires_dists": [ "abi3audit; extra == \"dev\"", - "black==24.10.0; extra == \"dev\"", + "black; extra == \"dev\"", "check-manifest; extra == \"dev\"", "coverage; extra == \"dev\"", "packaging; extra == \"dev\"", "pylint; extra == \"dev\"", "pyperf; extra == \"dev\"", "pypinfo; extra == \"dev\"", + "pyreadline; os_name == \"nt\" and extra == \"dev\"", "pytest-cov; extra == \"dev\"", + "pytest-instafail; extra == \"dev\"", + "pytest-instafail; extra == \"test\"", + "pytest-subtests; extra == \"dev\"", + "pytest-subtests; extra == \"test\"", "pytest-xdist; extra == \"dev\"", "pytest-xdist; extra == \"test\"", "pytest; extra == \"dev\"", "pytest; extra == \"test\"", + "pywin32; (os_name == \"nt\" and platform_python_implementation != \"PyPy\") and extra == \"dev\"", + "pywin32; (os_name == \"nt\" and platform_python_implementation != \"PyPy\") and extra == \"test\"", "requests; extra == \"dev\"", "rstcheck; extra == \"dev\"", "ruff; extra == \"dev\"", @@ -3749,10 +3711,14 @@ "twine; extra == \"dev\"", "virtualenv; extra == \"dev\"", "vulture; extra == \"dev\"", - "wheel; extra == \"dev\"" + "wheel; (os_name == \"nt\" and platform_python_implementation != \"PyPy\") and extra == \"dev\"", + "wheel; (os_name == \"nt\" and platform_python_implementation != \"PyPy\") and extra == \"test\"", + "wheel; extra == \"dev\"", + "wmi; (os_name == \"nt\" and platform_python_implementation != \"PyPy\") and extra == \"dev\"", + "wmi; (os_name == \"nt\" and platform_python_implementation != \"PyPy\") and extra == \"test\"" ], "requires_python": ">=3.6", - "version": "7.0.0" + "version": "7.1.0" }, { "artifacts": [ @@ -3832,19 +3798,19 @@ "artifacts": [ { "algorithm": "sha256", - "hash": "c3702b6d3dd8c7abc1afa565d7e63d53a1d0bd86cdc24edd75470f4de499cfcc", - "url": "https://files.pythonhosted.org/packages/13/a3/a812df4e2dd5696d1f351d58b8fe16a405b234ad2886a0dab9183fb78109/pycparser-2.22-py3-none-any.whl" + "hash": "e5c6e8d3fbad53479cab09ac03729e0a9faf2bee3db8208a550daf5af81a5934", + "url": "https://files.pythonhosted.org/packages/a0/e3/59cd50310fc9b59512193629e1984c1f95e5c8ae6e5d8c69532ccc65a7fe/pycparser-2.23-py3-none-any.whl" }, { "algorithm": "sha256", - "hash": "491c8be9c040f5390f5bf44a5b07752bd07f56edf992381b05c701439eec10f6", - "url": "https://files.pythonhosted.org/packages/1d/b2/31537cf4b1ca988837256c910a668b553fceb8f069bedc4b1c826024b52c/pycparser-2.22.tar.gz" + "hash": "78816d4f24add8f10a06d6f05b4d424ad9e96cfebf68a4ddc99c65c0720d00c2", + "url": "https://files.pythonhosted.org/packages/fe/cf/d2d3b9f5699fb1e4615c8e32ff220203e43b248e1dfcc6736ad9057731ca/pycparser-2.23.tar.gz" } ], "project_name": "pycparser", "requires_dists": [], "requires_python": ">=3.8", - "version": "2.22" + "version": "2.23" }, { "artifacts": [ @@ -4206,55 +4172,78 @@ "artifacts": [ { "algorithm": "sha256", - "hash": "61f642bf2378713e2c2e1de73444a3778e5f0a38be6fee0fe532fe30060282ff", - "url": "https://files.pythonhosted.org/packages/fd/1a/cc308a884bd299b651f1633acb978e8596c71c33ca85e9dc9fa33a5399b9/PyNaCl-1.5.0-cp36-abi3-musllinux_1_1_x86_64.whl" + "hash": "4a25cfede801f01e54179b8ff9514bd7b5944da560b7040939732d1804d25419", + "url": "https://files.pythonhosted.org/packages/63/ef/d972ce3d92ae05c9091363cf185e8646933f91c376e97b8be79ea6e96c22/pynacl-1.6.0-cp38-abi3-musllinux_1_2_x86_64.whl" }, { "algorithm": "sha256", - "hash": "a422368fc821589c228f4c49438a368831cb5bbc0eab5ebe1d7fac9dded6567b", - "url": "https://files.pythonhosted.org/packages/3d/85/c262db650e86812585e2bc59e497a8f59948a005325a11bbbc9ecd3fe26b/PyNaCl-1.5.0-cp36-abi3-musllinux_1_1_aarch64.whl" + "hash": "cb36deafe6e2bce3b286e5d1f3e1c246e0ccdb8808ddb4550bb2792f2df298f2", + "url": "https://files.pythonhosted.org/packages/06/c6/a3124dee667a423f2c637cfd262a54d67d8ccf3e160f3c50f622a85b7723/pynacl-1.6.0.tar.gz" }, { "algorithm": "sha256", - "hash": "52cb72a79269189d4e0dc537556f4740f7f0a9ec41c1322598799b0bdad4ef92", - "url": "https://files.pythonhosted.org/packages/59/bb/fddf10acd09637327a97ef89d2a9d621328850a72f1fdc8c08bdf72e385f/PyNaCl-1.5.0-cp36-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_24_aarch64.whl" + "hash": "16dd347cdc8ae0b0f6187a2608c0af1c8b7ecbbe6b4a06bff8253c192f696990", + "url": "https://files.pythonhosted.org/packages/0c/64/3ce958a5817fd3cc6df4ec14441c43fd9854405668d73babccf77f9597a3/pynacl-1.6.0-cp38-abi3-manylinux2014_aarch64.manylinux_2_17_aarch64.whl" }, { "algorithm": "sha256", - "hash": "a36d4a9dda1f19ce6e03c9a784a2921a4b726b02e1c736600ca9c22029474394", - "url": "https://files.pythonhosted.org/packages/5d/70/87a065c37cca41a75f2ce113a5a2c2aa7533be648b184ade58971b5f7ccc/PyNaCl-1.5.0-cp36-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl" + "hash": "49c336dd80ea54780bcff6a03ee1a476be1612423010472e60af83452aa0f442", + "url": "https://files.pythonhosted.org/packages/12/30/5efcef3406940cda75296c6d884090b8a9aad2dcc0c304daebb5ae99fb4a/pynacl-1.6.0-cp38-abi3-manylinux_2_34_x86_64.whl" }, { "algorithm": "sha256", - "hash": "06b8f6fa7f5de8d5d2f7573fe8c863c051225a27b61e6860fd047b1775807858", - "url": "https://files.pythonhosted.org/packages/66/28/ca86676b69bf9f90e710571b67450508484388bfce09acf8a46f0b8c785f/PyNaCl-1.5.0-cp36-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl" + "hash": "25720bad35dfac34a2bcdd61d9e08d6bfc6041bebc7751d9c9f2446cf1e77d64", + "url": "https://files.pythonhosted.org/packages/41/94/028ff0434a69448f61348d50d2c147dda51aabdd4fbc93ec61343332174d/pynacl-1.6.0-cp38-abi3-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl" }, { "algorithm": "sha256", - "hash": "8ac7448f09ab85811607bdd21ec2464495ac8b7c66d146bf545b0f08fb9220ba", - "url": "https://files.pythonhosted.org/packages/a7/22/27582568be639dfe22ddb3902225f91f2f17ceff88ce80e4db396c8986da/PyNaCl-1.5.0.tar.gz" + "hash": "8bfaa0a28a1ab718bad6239979a5a57a8d1506d0caf2fba17e524dbb409441cf", + "url": "https://files.pythonhosted.org/packages/52/bc/a5cff7f8c30d5f4c26a07dfb0bcda1176ab8b2de86dda3106c00a02ad787/pynacl-1.6.0-cp38-abi3-manylinux_2_26_x86_64.manylinux_2_28_x86_64.whl" }, { "algorithm": "sha256", - "hash": "401002a4aaa07c9414132aaed7f6836ff98f59277a234704ff66878c2ee4a0d1", - "url": "https://files.pythonhosted.org/packages/ce/75/0b8ede18506041c0bf23ac4d8e2971b4161cd6ce630b177d0a08eb0d8857/PyNaCl-1.5.0-cp36-abi3-macosx_10_10_universal2.whl" + "hash": "f4b3824920e206b4f52abd7de621ea7a44fd3cb5c8daceb7c3612345dfc54f2e", + "url": "https://files.pythonhosted.org/packages/63/37/87c72df19857c5b3b47ace6f211a26eb862ada495cc96daa372d96048fca/pynacl-1.6.0-cp38-abi3-macosx_10_10_universal2.whl" }, { "algorithm": "sha256", - "hash": "0c84947a22519e013607c9be43706dd42513f9e6ae5d39d3613ca1e142fba44d", - "url": "https://files.pythonhosted.org/packages/ee/87/f1bb6a595f14a327e8285b9eb54d41fef76c585a0edef0a45f6fc95de125/PyNaCl-1.5.0-cp36-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl" + "hash": "6b393bc5e5a0eb86bb85b533deb2d2c815666665f840a09e0aa3362bb6088736", + "url": "https://files.pythonhosted.org/packages/6d/38/9e9e9b777a1c4c8204053733e1a0269672c0bd40852908c9ad6b6eaba82c/pynacl-1.6.0-cp38-abi3-musllinux_1_2_aarch64.whl" + }, + { + "algorithm": "sha256", + "hash": "ef214b90556bb46a485b7da8258e59204c244b1b5b576fb71848819b468c44a7", + "url": "https://files.pythonhosted.org/packages/7a/20/c397be374fd5d84295046e398de4ba5f0722dc14450f65db76a43c121471/pynacl-1.6.0-cp38-abi3-manylinux_2_34_aarch64.whl" + }, + { + "algorithm": "sha256", + "hash": "140373378e34a1f6977e573033d1dd1de88d2a5d90ec6958c9485b2fd9f3eb90", + "url": "https://files.pythonhosted.org/packages/a3/76/8a62702fb657d6d9104ce13449db221a345665d05e6a3fdefb5a7cafd2ad/pynacl-1.6.0-cp38-abi3-musllinux_1_1_x86_64.whl" + }, + { + "algorithm": "sha256", + "hash": "f3482abf0f9815e7246d461fab597aa179b7524628a4bc36f86a7dc418d2608d", + "url": "https://files.pythonhosted.org/packages/be/e1/a8fe1248cc17ccb03b676d80fa90763760a6d1247da434844ea388d0816c/pynacl-1.6.0-cp38-abi3-musllinux_1_1_aarch64.whl" + }, + { + "algorithm": "sha256", + "hash": "16c60daceee88d04f8d41d0a4004a7ed8d9a5126b997efd2933e08e93a3bd850", + "url": "https://files.pythonhosted.org/packages/e4/8a/3f0dd297a0a33fa3739c255feebd0206bb1df0b44c52fbe2caf8e8bc4425/pynacl-1.6.0-cp38-abi3-manylinux2014_x86_64.manylinux_2_17_x86_64.whl" } ], "project_name": "pynacl", "requires_dists": [ - "cffi>=1.4.1", + "cffi>=1.4.1; platform_python_implementation != \"PyPy\" and python_version < \"3.14\"", + "cffi>=2.0.0; platform_python_implementation != \"PyPy\" and python_version >= \"3.14\"", "hypothesis>=3.27.0; extra == \"tests\"", - "pytest!=3.3.0,>=3.2.1; extra == \"tests\"", - "sphinx-rtd-theme; extra == \"docs\"", - "sphinx>=1.6.5; extra == \"docs\"" + "pytest-cov>=2.10.1; extra == \"tests\"", + "pytest-xdist>=3.5.0; extra == \"tests\"", + "pytest>=7.4.0; extra == \"tests\"", + "sphinx<7; extra == \"docs\"", + "sphinx_rtd_theme; extra == \"docs\"" ], - "requires_python": ">=3.6", - "version": "1.5.0" + "requires_python": ">=3.8", + "version": "1.6.0" }, { "artifacts": [ @@ -4735,144 +4724,144 @@ "artifacts": [ { "algorithm": "sha256", - "hash": "a9f8c2e67970f13b16084e04f134610fd1d374bf477b17ec1599185cf611d725", - "url": "https://files.pythonhosted.org/packages/82/72/04fcad41ca56491995076630c3ec1e834be241664c0c09a64c9a2589b507/PyYAML-6.0.2-cp39-cp39-musllinux_1_1_x86_64.whl" + "hash": "27c0abcb4a5dac13684a37f76e701e054692a9b2d3064b70f5e4eb54810553d7", + "url": "https://files.pythonhosted.org/packages/dd/62/71c27c94f457cf4418ef8ccc71735324c549f7e3ea9d34aba50874563561/pyyaml-6.0.3-cp39-cp39-musllinux_1_2_x86_64.whl" }, { "algorithm": "sha256", - "hash": "a8786accb172bd8afb8be14490a16625cbc387036876ab6ba70912730faf8e1f", - "url": "https://files.pythonhosted.org/packages/0a/02/6ec546cd45143fdf9840b2c6be8d875116a64076218b61d68e12548e5839/PyYAML-6.0.2-cp39-cp39-macosx_11_0_arm64.whl" + "hash": "9c57bb8c96f6d1808c030b1687b9b5fb476abaa47f0db9c0101f5e9f394e97f4", + "url": "https://files.pythonhosted.org/packages/02/72/d972384252432d57f248767556ac083793292a4adf4e2d85dfe785ec2659/PyYAML-6.0.3-cp38-cp38-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl" }, { "algorithm": "sha256", - "hash": "d8e03406cac8513435335dbab54c0d385e4a49e4945d2909a581c83647ca0290", - "url": "https://files.pythonhosted.org/packages/0e/9a/8cc68be846c972bda34f6c2a93abb644fb2476f4dcc924d52175786932c9/PyYAML-6.0.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl" + "hash": "02ea2dfa234451bbb8772601d7b8e426c2bfa197136796224e50e35a78777956", + "url": "https://files.pythonhosted.org/packages/05/14/52d505b5c59ce73244f59c7a50ecf47093ce4765f116cdb98286a71eeca2/pyyaml-6.0.3-cp310-cp310-macosx_11_0_arm64.whl" }, { "algorithm": "sha256", - "hash": "d84a1718ee396f54f3a086ea0a66d8e552b2ab2017ef8b420e92edbc841c352d", - "url": "https://files.pythonhosted.org/packages/20/52/551c69ca1501d21c0de51ddafa8c23a0191ef296ff098e98358f69080577/PyYAML-6.0.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl" + "hash": "d76623373421df22fb4cf8817020cbb7ef15c725b9d5e45f17e189bfc384190f", + "url": "https://files.pythonhosted.org/packages/05/8e/961c0007c59b8dd7729d542c61a4d537767a59645b82a0b521206e1e25c2/pyyaml-6.0.3.tar.gz" }, { "algorithm": "sha256", - "hash": "3b1fdb9dc17f5a7677423d508ab4f243a726dea51fa5e70992e59a7411c89d19", - "url": "https://files.pythonhosted.org/packages/3d/32/e7bd8535d22ea2874cef6a81021ba019474ace0d13a4819c2a4bce79bd6a/PyYAML-6.0.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl" + "hash": "10892704fc220243f5305762e276552a0395f7beb4dbf9b14ec8fd43b57f126c", + "url": "https://files.pythonhosted.org/packages/0c/62/d2eb46264d4b157dae1275b573017abec435397aa59cbcdab6fc978a8af4/pyyaml-6.0.3-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl" }, { "algorithm": "sha256", - "hash": "797b4f722ffa07cc8d62053e4cff1486fa6dc094105d13fea7b1de7d8bf71c9e", - "url": "https://files.pythonhosted.org/packages/45/73/0f49dacd6e82c9430e46f4a027baa4ca205e8b0a9dce1397f44edc23559d/PyYAML-6.0.2-cp311-cp311-musllinux_1_1_x86_64.whl" + "hash": "c2514fceb77bc5e7a2f7adfaa1feb2fb311607c9cb518dbc378688ec73d8292f", + "url": "https://files.pythonhosted.org/packages/0d/a2/09f67a3589cb4320fb5ce90d3fd4c9752636b8b6ad8f34b54d76c5a54693/PyYAML-6.0.3-cp38-cp38-macosx_10_13_x86_64.whl" }, { "algorithm": "sha256", - "hash": "8824b5a04a04a047e72eea5cec3bc266db09e35de6bdfe34c9436ac5ee27d237", - "url": "https://files.pythonhosted.org/packages/49/ee/14c54df452143b9ee9f0f29074d7ca5516a36edb0b4cc40c3f280131656f/PyYAML-6.0.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl" + "hash": "850774a7879607d3a6f50d36d04f00ee69e7fc816450e5f7e58d7f17f1ae5c00", + "url": "https://files.pythonhosted.org/packages/10/cb/16c3f2cf3266edd25aaa00d6c4350381c8b012ed6f5276675b9eba8d9ff4/pyyaml-6.0.3-cp311-cp311-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl" }, { "algorithm": "sha256", - "hash": "7c36280e6fb8385e520936c3cb3b8042851904eba0e58d277dca80a5cfed590b", - "url": "https://files.pythonhosted.org/packages/4d/61/de363a97476e766574650d742205be468921a7b532aa2499fcd886b62530/PyYAML-6.0.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl" + "hash": "652cb6edd41e718550aad172851962662ff2681490a8a711af6a4d288dd96824", + "url": "https://files.pythonhosted.org/packages/16/19/13de8e4377ed53079ee996e1ab0a9c33ec2faf808a4647b7b4c0d46dd239/pyyaml-6.0.3-cp311-cp311-macosx_11_0_arm64.whl" }, { "algorithm": "sha256", - "hash": "5ac9328ec4831237bec75defaf839f7d4564be1e6b25ac710bd1a96321cc8317", - "url": "https://files.pythonhosted.org/packages/51/16/6af8d6a6b210c8e54f1406a6b9481febf9c64a3109c541567e35a49aa2e7/PyYAML-6.0.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl" + "hash": "5e0b74767e5f8c593e8c9b5912019159ed0533c70051e9cce3e8b6aa699fcd69", + "url": "https://files.pythonhosted.org/packages/1f/15/2bc9c8faf6450a8b3c9fc5448ed869c599c0a74ba2669772b1f3a0040180/pyyaml-6.0.3-cp310-cp310-musllinux_1_2_x86_64.whl" }, { "algorithm": "sha256", - "hash": "d584d9ec91ad65861cc08d42e834324ef890a082e591037abe114850ff7bbc3e", - "url": "https://files.pythonhosted.org/packages/54/ed/79a089b6be93607fa5cdaedf301d7dfb23af5f25c398d5ead2525b063e17/pyyaml-6.0.2.tar.gz" + "hash": "22ba7cfcad58ef3ecddc7ed1db3409af68d023b7f940da23c6c2a1890976eda6", + "url": "https://files.pythonhosted.org/packages/25/a2/b725b61ac76a75583ae7104b3209f75ea44b13cfd026aa535ece22b7f22e/PyYAML-6.0.3-cp38-cp38-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl" }, { "algorithm": "sha256", - "hash": "23502f431948090f597378482b4812b0caae32c22213aecf3b55325e049a6c68", - "url": "https://files.pythonhosted.org/packages/5c/20/8347dcabd41ef3a3cdc4f7b7a2aff3d06598c8779faa189cdbf878b626a4/PyYAML-6.0.2-cp310-cp310-musllinux_1_1_x86_64.whl" + "hash": "66291b10affd76d76f54fad28e22e51719ef9ba22b29e1d7d03d6777a9174198", + "url": "https://files.pythonhosted.org/packages/2f/3a/61b9db1d28f00f8fd0ae760459a5c4bf1b941baf714e207b6eb0657d2578/pyyaml-6.0.3-cp310-cp310-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl" }, { "algorithm": "sha256", - "hash": "688ba32a1cffef67fd2e9398a2efebaea461578b0923624778664cc1c914db5d", - "url": "https://files.pythonhosted.org/packages/65/d8/b7a1db13636d7fb7d4ff431593c510c8b8fca920ade06ca8ef20015493c5/PyYAML-6.0.2-cp39-cp39-macosx_10_9_x86_64.whl" + "hash": "b30236e45cf30d2b8e7b3e85881719e98507abed1011bf463a8fa23e9c3e98a8", + "url": "https://files.pythonhosted.org/packages/43/f7/0e6a5ae5599c838c696adb4e6330a59f463265bfa1e116cfd1fbb0abaaae/pyyaml-6.0.3-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl" }, { "algorithm": "sha256", - "hash": "ec031d5d2feb36d1d1a24380e4db6d43695f3748343d99434e6f5f9156aaa2ed", - "url": "https://files.pythonhosted.org/packages/6b/4e/1523cb902fd98355e2e9ea5e5eb237cbc5f3ad5f3075fa65087aa0ecb669/PyYAML-6.0.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl" + "hash": "44edc647873928551a01e7a563d7452ccdebee747728c1080d881d68af7b997e", + "url": "https://files.pythonhosted.org/packages/6d/16/a95b6757765b7b031c9374925bb718d55e0a9ba8a1b6a12d25962ea44347/pyyaml-6.0.3-cp311-cp311-macosx_10_13_x86_64.whl" }, { "algorithm": "sha256", - "hash": "d7fded462629cfa4b685c5416b949ebad6cec74af5e2d42905d41e257e0869f5", - "url": "https://files.pythonhosted.org/packages/74/cc/20c34d00f04d785f2028737e2e2a8254e1425102e730fee1d6396f832577/PyYAML-6.0.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl" + "hash": "6344df0d5755a2c9a276d4473ae6b90647e216ab4757f8426893b5dd2ac3f369", + "url": "https://files.pythonhosted.org/packages/6f/b0/b2227677b2d1036d84f5ee95eb948e7af53d59fe3e4328784e4d290607e0/PyYAML-6.0.3-cp38-cp38-musllinux_1_2_x86_64.whl" }, { "algorithm": "sha256", - "hash": "24471b829b3bf607e04e88d79542a9d48bb037c2267d7927a874e6c205ca7e9a", - "url": "https://files.pythonhosted.org/packages/74/d9/323a59d506f12f498c2097488d80d16f4cf965cee1791eab58b56b19f47a/PyYAML-6.0.2-cp38-cp38-macosx_10_9_x86_64.whl" + "hash": "b8bb0864c5a28024fac8a632c443c87c5aa6f215c0b126c449ae1a150412f31d", + "url": "https://files.pythonhosted.org/packages/71/60/917329f640924b18ff085ab889a11c763e0b573da888e8404ff486657602/pyyaml-6.0.3-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl" }, { "algorithm": "sha256", - "hash": "3ad2a3decf9aaba3d29c8f537ac4b243e36bef957511b4766cb0057d32b0be85", - "url": "https://files.pythonhosted.org/packages/75/e4/2c27590dfc9992f73aabbeb9241ae20220bd9452df27483b6e56d3975cc5/PyYAML-6.0.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl" + "hash": "0150219816b6a1fa26fb4699fb7daa9caf09eb1999f3b70fb6e786805e80375a", + "url": "https://files.pythonhosted.org/packages/73/b9/793686b2d54b531203c160ef12bec60228a0109c79bae6c1277961026770/pyyaml-6.0.3-cp39-cp39-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl" }, { "algorithm": "sha256", - "hash": "1e2120ef853f59c7419231f3bf4e7021f1b936f6ebd222406c3b60212205d2ee", - "url": "https://files.pythonhosted.org/packages/8b/62/b9faa998fd185f65c1371643678e4d58254add437edb764a08c5a98fb986/PyYAML-6.0.2-cp311-cp311-macosx_11_0_arm64.whl" + "hash": "9c7708761fccb9397fe64bbc0395abcae8c4bf7b0eac081e12b809bf47700d0b", + "url": "https://files.pythonhosted.org/packages/7a/1e/7acc4f0e74c4b3d9531e24739e0ab832a5edf40e64fbae1a9c01941cabd7/pyyaml-6.0.3-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl" }, { "algorithm": "sha256", - "hash": "82d09873e40955485746739bcb8b4586983670466c23382c19cffecbf1fd8706", - "url": "https://files.pythonhosted.org/packages/8c/ab/6226d3df99900e580091bb44258fde77a8433511a86883bd4681ea19a858/PyYAML-6.0.2-cp38-cp38-musllinux_1_1_x86_64.whl" + "hash": "418cf3f2111bc80e0933b2cd8cd04f286338bb88bdc7bc8e6dd775ebde60b5e0", + "url": "https://files.pythonhosted.org/packages/8b/ef/abd085f06853af0cd59fa5f913d61a8eab65d7639ff2a658d18a25d6a89d/pyyaml-6.0.3-cp310-cp310-musllinux_1_2_aarch64.whl" }, { "algorithm": "sha256", - "hash": "0a9a2848a5b7feac301353437eb7d5957887edbf81d56e903999a75a3d743086", - "url": "https://files.pythonhosted.org/packages/9b/95/a3fac87cb7158e231b5a6012e438c647e1a87f09f8e0d123acec8ab8bf71/PyYAML-6.0.2-cp310-cp310-macosx_10_9_x86_64.whl" + "hash": "b865addae83924361678b652338317d1bd7e79b1f4596f96b96c77a5a34b34da", + "url": "https://files.pythonhosted.org/packages/9f/62/67fc8e68a75f738c9200422bf65693fb79a4cd0dc5b23310e5202e978090/pyyaml-6.0.3-cp39-cp39-macosx_10_13_x86_64.whl" }, { "algorithm": "sha256", - "hash": "ff3824dc5261f50c9b0dfb3be22b4567a6f938ccce4587b38952d85fd9e9afe4", - "url": "https://files.pythonhosted.org/packages/9b/97/ecc1abf4a823f5ac61941a9c00fe501b02ac3ab0e373c3857f7d4b83e2b6/PyYAML-6.0.2-cp311-cp311-musllinux_1_1_aarch64.whl" + "hash": "efd7b85f94a6f21e4932043973a7ba2613b059c4a000551892ac9f1d11f5baf3", + "url": "https://files.pythonhosted.org/packages/a7/3b/6c58ac0fa7c4e1b35e48024eb03d00817438310447f93ef4431673c24138/PyYAML-6.0.3-cp38-cp38-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl" }, { "algorithm": "sha256", - "hash": "5d225db5a45f21e78dd9358e58a98702a0302f2659a3c6cd320564b75b86f47c", - "url": "https://files.pythonhosted.org/packages/ad/0c/c804f5f922a9a6563bab712d8dcc70251e8af811fce4524d57c2c0fd49a4/PyYAML-6.0.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl" + "hash": "fa160448684b4e94d80416c0fa4aac48967a969efe22931448d853ada8baf926", + "url": "https://files.pythonhosted.org/packages/a9/86/a137b39a611def2ed78b0e66ce2fe13ee701a07c07aebe55c340ed2a050e/pyyaml-6.0.3-cp39-cp39-musllinux_1_2_aarch64.whl" }, { "algorithm": "sha256", - "hash": "936d68689298c36b53b29f23c6dbb74de12b4ac12ca6cfe0e047bedceea56180", - "url": "https://files.pythonhosted.org/packages/b7/33/5504b3a9a4464893c32f118a9cc045190a91637b119a9c881da1cf6b7a72/PyYAML-6.0.2-cp310-cp310-musllinux_1_1_aarch64.whl" + "hash": "c3355370a2c156cffb25e876646f149d5d68f5e0a3ce86a5084dd0b64a994917", + "url": "https://files.pythonhosted.org/packages/ae/92/861f152ce87c452b11b9d0977952259aa7df792d71c1053365cc7b09cc08/pyyaml-6.0.3-cp39-cp39-macosx_11_0_arm64.whl" }, { "algorithm": "sha256", - "hash": "29717114e51c84ddfba879543fb232a6ed60086602313ca38cce623c1d62cfbf", - "url": "https://files.pythonhosted.org/packages/c7/7a/68bd47624dab8fd4afbfd3c48e3b79efe09098ae941de5b58abcbadff5cb/PyYAML-6.0.2-cp310-cp310-macosx_11_0_arm64.whl" + "hash": "3c5677e12444c15717b902a5798264fa7909e41153cdf9ef7ad571b704a63dd9", + "url": "https://files.pythonhosted.org/packages/d0/cd/f0cfc8c74f8a030017a2b9c771b7f47e5dd702c3e28e5b2071374bda2948/pyyaml-6.0.3-cp39-cp39-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl" }, { "algorithm": "sha256", - "hash": "0b69e4ce7a131fe56b7e4d770c67429700908fc0752af059838b1cfb41960e4e", - "url": "https://files.pythonhosted.org/packages/d7/12/7322c1e30b9be969670b672573d45479edef72c9a0deac3bb2868f5d7469/PyYAML-6.0.2-cp39-cp39-musllinux_1_1_aarch64.whl" + "hash": "1d37d57ad971609cf3c53ba6a7e365e40660e3be0e5175fa9f2365a379d6095a", + "url": "https://files.pythonhosted.org/packages/dd/6f/529b0f316a9fd167281a6c3826b5583e6192dba792dd55e3203d3f8e655a/pyyaml-6.0.3-cp311-cp311-musllinux_1_2_aarch64.whl" }, { "algorithm": "sha256", - "hash": "f753120cb8181e736c57ef7636e83f31b9c0d1722c516f7e86cf15b7aa57ff12", - "url": "https://files.pythonhosted.org/packages/e9/6c/6e1b7f40181bc4805e2e07f4abc10a88ce4648e7e95ff1abe4ae4014a9b2/PyYAML-6.0.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl" + "hash": "5ed875a24292240029e4483f9d4a4b8a1ae08843b9c54f43fcc11e404532a8a5", + "url": "https://files.pythonhosted.org/packages/ef/b2/18f2bd28cd2055a79a46c9b0895c0b3d987ce40ee471cecf58a1a0199805/pyyaml-6.0.3-cp39-cp39-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl" }, { "algorithm": "sha256", - "hash": "cc1c1159b3d456576af7a3e4d1ba7e6924cb39de8f67111c735f6fc832082774", - "url": "https://files.pythonhosted.org/packages/f8/aa/7af4e81f7acba21a4c6be026da38fd2b872ca46226673c89a758ebdc4fd2/PyYAML-6.0.2-cp311-cp311-macosx_10_9_x86_64.whl" + "hash": "37503bfbfc9d2c40b344d06b2199cf0e96e97957ab1c1b546fd4f87e53e5d3e4", + "url": "https://files.pythonhosted.org/packages/f2/6a/b627b4e0c1dd03718543519ffb2f1deea4a1e6d42fbab8021936a4d22589/pyyaml-6.0.3-cp311-cp311-musllinux_1_2_x86_64.whl" }, { "algorithm": "sha256", - "hash": "9056c1ecd25795207ad294bcf39f2db3d845767be0ea6e6a34d856f006006083", - "url": "https://files.pythonhosted.org/packages/fd/7f/2c3697bba5d4aa5cc2afe81826d73dfae5f049458e44732c7a0938baa673/PyYAML-6.0.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl" + "hash": "214ed4befebe12df36bcc8bc2b64b396ca31be9304b8f59e25c11cf94a4c033b", + "url": "https://files.pythonhosted.org/packages/f4/a0/39350dd17dd6d6c6507025c0e53aef67a9293a6d37d3511f23ea510d5800/pyyaml-6.0.3-cp310-cp310-macosx_10_13_x86_64.whl" } ], "project_name": "pyyaml", "requires_dists": [], "requires_python": ">=3.8", - "version": "6.0.2" + "version": "6.0.3" }, { "artifacts": [ @@ -5335,239 +5324,239 @@ "artifacts": [ { "algorithm": "sha256", - "hash": "8a6c1bbac39fa4a79f83cbf1df6ccd8ff7069582a9fd8db1e52cea073bc2c697", - "url": "https://files.pythonhosted.org/packages/4b/30/00f02a0a921556dd5a6db1ef2926a1bc7a8bbbfb1c49cfed68a275b8ab2b/simplejson-3.20.1-py3-none-any.whl" + "hash": "3b6bb7fb96efd673eac2e4235200bfffdc2353ad12c54117e1e4e2fc485ac017", + "url": "https://files.pythonhosted.org/packages/05/5b/83e1ff87eb60ca706972f7e02e15c0b33396e7bdbd080069a5d1b53cf0d8/simplejson-3.20.2-py3-none-any.whl" }, { "algorithm": "sha256", - "hash": "6e18345c8dda5d699be8166b61f9d80aaee4545b709f1363f60813dc032dac53", - "url": "https://files.pythonhosted.org/packages/09/68/1e81ed83f38906c8859f2b973afb19302357d6003e724a6105cee0f61ec7/simplejson-3.20.1-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl" + "hash": "792debfba68d8dd61085ffb332d72b9f5b38269cda0c99f92c7a054382f55246", + "url": "https://files.pythonhosted.org/packages/08/a1/69a6e4ec69b585724cc9bee2d7f725c155d3ab8f9d3925b67c709a6e5a19/simplejson-3.20.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl" }, { "algorithm": "sha256", - "hash": "4a586ce4f78cec11f22fe55c5bee0f067e803aab9bad3441afe2181693b5ebb5", - "url": "https://files.pythonhosted.org/packages/15/ca/56a6a2a33cbcf330c4d71af3f827c47e4e0ba791e78f2642f3d1ab02ff31/simplejson-3.20.1-cp310-cp310-macosx_10_9_x86_64.whl" + "hash": "f1036be00b5edaddbddbb89c0f80ed229714a941cfd21e51386dc69c237201c2", + "url": "https://files.pythonhosted.org/packages/09/36/4e282f5211b34620f1b2e4b51d9ddaab5af82219b9b7b78360a33f7e5387/simplejson-3.20.2-cp310-cp310-musllinux_1_2_aarch64.whl" }, { "algorithm": "sha256", - "hash": "eea7e2b7d858f6fdfbf0fe3cb846d6bd8a45446865bc09960e51f3d473c2271b", - "url": "https://files.pythonhosted.org/packages/21/47/50157810876c2a7ebbd6e6346ec25eda841fe061fecaa02538a7742a3d2a/simplejson-3.20.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl" + "hash": "2fb1259ca9c385b0395bad59cdbf79535a5a84fb1988f339a49bfbc57455a35a", + "url": "https://files.pythonhosted.org/packages/09/4f/be411eeb52ab21d6d4c00722b632dd2bd430c01a47dfed3c15ef5ad7ee6e/simplejson-3.20.2-cp39-cp39-macosx_11_0_arm64.whl" }, { "algorithm": "sha256", - "hash": "e580aa65d5f6c3bf41b9b4afe74be5d5ddba9576701c107c772d936ea2b5043a", - "url": "https://files.pythonhosted.org/packages/25/c4/627214fb418cd4a17fb0230ff0b6c3bb4a85cbb48dd69c85dcc3b85df828/simplejson-3.20.1-cp310-cp310-macosx_10_9_universal2.whl" + "hash": "c01379b4861c3b0aa40cba8d44f2b448f5743999aa68aaa5d3ef7049d4a28a2d", + "url": "https://files.pythonhosted.org/packages/0f/33/c3277db8931f0ae9e54b9292668863365672d90fb0f632f4cf9829cb7d68/simplejson-3.20.2-cp310-cp310-macosx_10_9_x86_64.whl" }, { "algorithm": "sha256", - "hash": "ceab2ce2acdc7fbaa433a93006758db6ba9a659e80c4faa13b80b9d2318e9b17", - "url": "https://files.pythonhosted.org/packages/26/94/cab4db9530b6ca9d62f16a260e8311b04130ccd670dab75e958fcb44590e/simplejson-3.20.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl" + "hash": "0cec1868b237fe9fb2d466d6ce0c7b772e005aadeeda582d867f6f1ec9710cad", + "url": "https://files.pythonhosted.org/packages/13/eb/7e087b061d6f94e6ba41c2e589267b9349fd3abb27ce59080c1c89fe9785/simplejson-3.20.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl" }, { "algorithm": "sha256", - "hash": "a3c2df555ee4016148fa192e2b9cd9e60bc1d40769366134882685e90aee2a1e", - "url": "https://files.pythonhosted.org/packages/31/db/00d1a8d9b036db98f678c8a3c69ed17d2894d1768d7a00576e787ad3e546/simplejson-3.20.1-cp310-cp310-musllinux_1_2_aarch64.whl" + "hash": "b538f9d9e503b0dd43af60496780cb50755e4d8e5b34e5647b887675c1ae9fee", + "url": "https://files.pythonhosted.org/packages/15/5d/b6d0b71508e503c759a0a7563cb2c28716ec8af9828ca9f5b59023011406/simplejson-3.20.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl" }, { "algorithm": "sha256", - "hash": "71b75d448fd0ceb2e7c90e72bb82c41f8462550d48529980bc0bab1d2495bfbb", - "url": "https://files.pythonhosted.org/packages/36/d9/87e5586e79d6a840eb4278e8b6a4c064a6ebe2276b211af20899e407629a/simplejson-3.20.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl" + "hash": "ab998e416ded6c58f549a22b6a8847e75a9e1ef98eb9fbb2863e1f9e61a4105b", + "url": "https://files.pythonhosted.org/packages/19/24/40b3e5a3ca5e6f80cc1c639fcd5565ae087e72e8656dea780f02302ddc97/simplejson-3.20.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl" }, { "algorithm": "sha256", - "hash": "e39eaa57c7757daa25bcd21f976c46be443b73dd6c3da47fe5ce7b7048ccefe2", - "url": "https://files.pythonhosted.org/packages/3c/2f/d0ff0b772d4ef092876eb85c99bc591c446b0502715551dad7dfc7f7c2c0/simplejson-3.20.1-cp39-cp39-macosx_11_0_arm64.whl" + "hash": "0e9b6d845a603b2eef3394eb5e21edb8626cd9ae9a8361d14e267eb969dbe413", + "url": "https://files.pythonhosted.org/packages/1d/48/7241daa91d0bf19126589f6a8dcbe8287f4ed3d734e76fd4a092708947be/simplejson-3.20.2-cp311-cp311-musllinux_1_2_x86_64.whl" }, { "algorithm": "sha256", - "hash": "6d4f320c33277a5b715db5bf5b10dae10c19076bd6d66c2843e04bd12d1f1ea5", - "url": "https://files.pythonhosted.org/packages/40/22/11c0f746bdb44c297cea8a37d8f7ccb75ea6681132aadfb9f820d9a52647/simplejson-3.20.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl" + "hash": "b8205f113082e7d8f667d6cd37d019a7ee5ef30b48463f9de48e1853726c6127", + "url": "https://files.pythonhosted.org/packages/2a/13/f290da83da1083051b1665e2508a70821fc1a62c4b6d73f5c7baadcba26c/simplejson-3.20.2-cp38-cp38-macosx_10_9_universal2.whl" }, { "algorithm": "sha256", - "hash": "e91703a4c5fec53e36875ae426ad785f4120bd1d93b65bed4752eeccd1789e0c", - "url": "https://files.pythonhosted.org/packages/48/c7/361e7f6695b56001a04e0a5cc623cd6c82ea2f45e872e61213e405cc8a24/simplejson-3.20.1-cp39-cp39-macosx_10_9_x86_64.whl" + "hash": "d8b6ff02fc7b8555c906c24735908854819b0d0dc85883d453e23ca4c0445d01", + "url": "https://files.pythonhosted.org/packages/2b/22/5e268bbcbe9f75577491e406ec0a5536f5b2fa91a3b52031fea51cd83e1d/simplejson-3.20.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl" }, { "algorithm": "sha256", - "hash": "51b41f284d603c4380732d7d619f8b34bd04bc4aa0ed0ed5f4ffd0539b14da44", - "url": "https://files.pythonhosted.org/packages/4b/8f/9991582665a7b6d95415e439bb4fbaa4faf0f77231666675a0fd1de54107/simplejson-3.20.1-cp39-cp39-musllinux_1_2_ppc64le.whl" + "hash": "5a7bbac80bdb82a44303f5630baee140aee208e5a4618e8b9fde3fc400a42671", + "url": "https://files.pythonhosted.org/packages/2e/d9/15036d7f43c6208fb0fbc827f9f897c1f577fba02aeb7a8a223581da4925/simplejson-3.20.2-cp39-cp39-musllinux_1_2_aarch64.whl" }, { "algorithm": "sha256", - "hash": "3466d2839fdc83e1af42e07b90bc8ff361c4e8796cd66722a40ba14e458faddd", - "url": "https://files.pythonhosted.org/packages/4c/2d/ca3caeea0bdc5efc5503d5f57a2dfb56804898fb196dfada121323ee0ccb/simplejson-3.20.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl" + "hash": "438680ddde57ea87161a4824e8de04387b328ad51cfdf1eaf723623a3014b7aa", + "url": "https://files.pythonhosted.org/packages/3f/49/976f59b42a6956d4aeb075ada16ad64448a985704bc69cd427a2245ce835/simplejson-3.20.2-cp311-cp311-musllinux_1_2_aarch64.whl" }, { "algorithm": "sha256", - "hash": "a8011f1dd1d676befcd4d675ebdbfdbbefd3bf350052b956ba8c699fca7d8cef", - "url": "https://files.pythonhosted.org/packages/4c/ba/d32fe890a5edaf4a8518adf043bccf7866b600123f512a6de0988cf36810/simplejson-3.20.1-cp39-cp39-macosx_10_9_universal2.whl" + "hash": "5fe7a6ce14d1c300d80d08695b7f7e633de6cd72c80644021874d985b3393649", + "url": "https://files.pythonhosted.org/packages/41/f4/a1ac5ed32f7ed9a088d62a59d410d4c204b3b3815722e2ccfb491fa8251b/simplejson-3.20.2.tar.gz" }, { "algorithm": "sha256", - "hash": "2e671dd62051129185d3a9a92c60101f56cbc174854a1a3dfb69114ebd9e1699", - "url": "https://files.pythonhosted.org/packages/50/69/2d307ed022eba08c4ee51e1d8ba9dfa01a8bb276ee9aa70d7911d1044f34/simplejson-3.20.1-cp38-cp38-musllinux_1_2_i686.whl" + "hash": "51eccc4e353eed3c50e0ea2326173acdc05e58f0c110405920b989d481287e51", + "url": "https://files.pythonhosted.org/packages/43/91/43695f17b69e70c4b0b03247aa47fb3989d338a70c4b726bbdc2da184160/simplejson-3.20.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl" }, { "algorithm": "sha256", - "hash": "78520f04b7548a5e476b5396c0847e066f1e0a4c0c5e920da1ad65e95f410b11", - "url": "https://files.pythonhosted.org/packages/52/21/57fc47eab8c1c73390b933a5ba9271f08e3e1ec83162c580357f28f5b97c/simplejson-3.20.1-cp310-cp310-musllinux_1_2_i686.whl" + "hash": "e2162b2a43614727ec3df75baeda8881ab129824aa1b49410d4b6c64f55a45b4", + "url": "https://files.pythonhosted.org/packages/44/2b/dd9ec681115aa65620d57c88eb741bd7e7bc303ac6247554d854ee5168e6/simplejson-3.20.2-cp38-cp38-musllinux_1_2_i686.whl" }, { "algorithm": "sha256", - "hash": "f924b485537b640dc69434565463fd6fc0c68c65a8c6e01a823dd26c9983cf79", - "url": "https://files.pythonhosted.org/packages/54/53/2d93128bb55861b2fa36c5944f38da51a0bc6d83e513afc6f7838440dd15/simplejson-3.20.1-cp311-cp311-musllinux_1_2_aarch64.whl" + "hash": "bfe704864b5fead4f21c8d448a89ee101c9b0fc92a5f40b674111da9272b3a90", + "url": "https://files.pythonhosted.org/packages/48/5a/92bc0c1da0e805d4894ffe15a76af733e276d27eede5361c8be1c028e692/simplejson-3.20.2-cp38-cp38-macosx_11_0_arm64.whl" }, { "algorithm": "sha256", - "hash": "6e6697a3067d281f01de0fe96fc7cba4ea870d96d7deb7bfcf85186d74456503", - "url": "https://files.pythonhosted.org/packages/54/ee/3c6e91989cdf65ec75e75662d9f15cfe167a792b893806169ea5b1da6fd2/simplejson-3.20.1-cp39-cp39-musllinux_1_2_x86_64.whl" + "hash": "3e22a5fb7b1437ffb057e02e1936a3bfb19084ae9d221ec5e9f4cf85f69946b6", + "url": "https://files.pythonhosted.org/packages/4b/42/6c9af551e5a8d0f171d6dce3d9d1260068927f7b80f1f09834e07887c8c4/simplejson-3.20.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl" }, { "algorithm": "sha256", - "hash": "7eaae2b88eb5da53caaffdfa50e2e12022553949b88c0df4f9a9663609373f72", - "url": "https://files.pythonhosted.org/packages/63/26/1c894a1c2bd95dc8be0cf5a2fa73b0d173105b6ca18c90cb981ff10443d0/simplejson-3.20.1-cp310-cp310-musllinux_1_2_x86_64.whl" + "hash": "cb11c09c99253a74c36925d461c86ea25f0140f3b98ff678322734ddc0f038d7", + "url": "https://files.pythonhosted.org/packages/5c/a2/1526d4152806670124dd499ff831726a92bd7e029e8349c4affa78ea8845/simplejson-3.20.2-cp39-cp39-musllinux_1_2_ppc64le.whl" }, { "algorithm": "sha256", - "hash": "88a7baa8211089b9e58d78fbc1b0b322103f3f3d459ff16f03a36cece0d0fcf0", - "url": "https://files.pythonhosted.org/packages/71/c7/1970916e0c51794fff89f76da2f632aaf0b259b87753c88a8c409623d3e1/simplejson-3.20.1-cp311-cp311-macosx_10_9_x86_64.whl" + "hash": "b392e11c6165d4a0fde41754a0e13e1d88a5ad782b245a973dd4b2bdb4e5076a", + "url": "https://files.pythonhosted.org/packages/5d/02/290f7282eaa6ebe945d35c47e6534348af97472446951dce0d144e013f4c/simplejson-3.20.2-cp311-cp311-macosx_11_0_arm64.whl" }, { "algorithm": "sha256", - "hash": "03db8cb64154189a92a7786209f24e391644f3a3fa335658be2df2af1960b8d8", - "url": "https://files.pythonhosted.org/packages/74/30/20001219d6fdca4aaa3974c96dfb6955a766b4e2cc950505a5b51fd050b0/simplejson-3.20.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl" + "hash": "cac78470ae68b8d8c41b6fca97f5bf8e024ca80d5878c7724e024540f5cdaadb", + "url": "https://files.pythonhosted.org/packages/60/c7/30bae30424ace8cd791ca660fed454ed9479233810fe25c3f3eab3d9dc7b/simplejson-3.20.2-cp311-cp311-musllinux_1_2_i686.whl" }, { "algorithm": "sha256", - "hash": "325b8c107253d3217e89d7b50c71015b5b31e2433e6c5bf38967b2f80630a8ca", - "url": "https://files.pythonhosted.org/packages/76/59/74bc90d1c051bc2432c96b34bd4e8036875ab58b4fcbe4d6a5a76985f853/simplejson-3.20.1-cp311-cp311-macosx_10_9_universal2.whl" + "hash": "c34e028a2ba8553a208ded1da5fa8501833875078c4c00a50dffc33622057881", + "url": "https://files.pythonhosted.org/packages/66/6f/3bd0007b64881a90a058c59a4869b1b4f130ddb86a726f884fafc67e5ef7/simplejson-3.20.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl" }, { "algorithm": "sha256", - "hash": "2b6436c48e64378fa844d8c9e58a5ed0352bbcfd4028369a9b46679b7ab79d2d", - "url": "https://files.pythonhosted.org/packages/78/e9/b7c4c26f29b41cc41ba5f0224c47adbfa7f28427418edfd58ab122f3b584/simplejson-3.20.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl" + "hash": "e022b2c4c54cb4855e555f64aa3377e3e5ca912c372fa9e3edcc90ebbad93dce", + "url": "https://files.pythonhosted.org/packages/6b/92/a75df930e2ff29e37654b65fa6eebef53812fa7258a9df9c7ddbf60610d7/simplejson-3.20.2-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl" }, { "algorithm": "sha256", - "hash": "e25b2a0c396f3b84fb89573d07b0e1846ed563eb364f2ea8230ca92b8a8cb786", - "url": "https://files.pythonhosted.org/packages/7b/9a/088179435b2c6036c67ca03baf0623e22dbb6b02ddd0fa2451b3a4786207/simplejson-3.20.1-cp38-cp38-musllinux_1_2_ppc64le.whl" + "hash": "4ad4eac7d858947a30d2c404e61f16b84d16be79eb6fb316341885bdde864fa8", + "url": "https://files.pythonhosted.org/packages/6b/a2/cd2e10b880368305d89dd540685b8bdcc136df2b3c76b5ddd72596254539/simplejson-3.20.2-cp311-cp311-macosx_10_9_x86_64.whl" }, { "algorithm": "sha256", - "hash": "1bd6bfe5678d73fbd5328eea6a35216503796428fc47f1237432522febaf3a0c", - "url": "https://files.pythonhosted.org/packages/83/c5/0dbf3045eb6701a4b32cbbfa2813efa1b354078383c727ad593ebe280536/simplejson-3.20.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl" + "hash": "2bfc1c396ad972ba4431130b42307b2321dba14d988580c1ac421ec6a6b7cee3", + "url": "https://files.pythonhosted.org/packages/71/b4/800f14728e2ad666f420dfdb57697ca128aeae7f991b35759c09356b829a/simplejson-3.20.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl" }, { "algorithm": "sha256", - "hash": "4a8e197e4cf6d42c2c57e7c52cd7c1e7b3e37c5911df1314fb393320131e2101", - "url": "https://files.pythonhosted.org/packages/8f/53/f058e3a23a88834e113cf4a353acb5569f26995a07653d79425cffbb2c1b/simplejson-3.20.1-cp38-cp38-macosx_11_0_arm64.whl" + "hash": "5ef70ec8fe1569872e5a3e4720c1e1dcb823879a3c78bc02589eb88fab920b1f", + "url": "https://files.pythonhosted.org/packages/73/cc/18374fb9dfcb4827b692ca5a33bdb607384ca06cdb645e0b863022dae8a3/simplejson-3.20.2-cp39-cp39-musllinux_1_2_i686.whl" }, { "algorithm": "sha256", - "hash": "e66712b17d8425bb7ff8968d4c7c7fd5a2dd7bd63728b28356223c000dd2f91f", - "url": "https://files.pythonhosted.org/packages/95/60/8c97cdc93096437b0aca2745aca63c880fe2315fd7f6a6ce6edbb344a2ae/simplejson-3.20.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl" + "hash": "6d7286dc11af60a2f76eafb0c2acde2d997e87890e37e24590bb513bec9f1bc5", + "url": "https://files.pythonhosted.org/packages/78/09/2bf3761de89ea2d91bdce6cf107dcd858892d0adc22c995684878826cc6b/simplejson-3.20.2-cp310-cp310-macosx_10_9_universal2.whl" }, { "algorithm": "sha256", - "hash": "9e8eacf6a3491bf76ea91a8d46726368a6be0eb94993f60b8583550baae9439e", - "url": "https://files.pythonhosted.org/packages/99/4c/dac310a98f897ad3435b4bdc836d92e78f09e38c5dbf28211ed21dc59fa2/simplejson-3.20.1-cp311-cp311-musllinux_1_2_i686.whl" + "hash": "7524e19c2da5ef281860a3d74668050c6986be15c9dd99966034ba47c68828c2", + "url": "https://files.pythonhosted.org/packages/79/3e/7f3b7b97351c53746e7b996fcd106986cda1954ab556fd665314756618d2/simplejson-3.20.2-cp311-cp311-musllinux_1_2_ppc64le.whl" }, { "algorithm": "sha256", - "hash": "90b573693d1526bed576f6817e2a492eaaef68f088b57d7a9e83d122bbb49e51", - "url": "https://files.pythonhosted.org/packages/9a/6b/8d1e076c543277c1d603230eec24f4dd75ebce46d351c0679526d202981f/simplejson-3.20.1-cp39-cp39-musllinux_1_2_aarch64.whl" + "hash": "5de26f11d5aca575d3825dddc65f69fdcba18f6ca2b4db5cef16f41f969cef15", + "url": "https://files.pythonhosted.org/packages/86/6f/2de88bea65e0fdb91cc55624bd77e2eaa5c3acccc59287b058b376acc9a2/simplejson-3.20.2-cp38-cp38-musllinux_1_2_aarch64.whl" }, { "algorithm": "sha256", - "hash": "489c3a43116082bad56795215786313832ba3991cca1f55838e52a553f451ab6", - "url": "https://files.pythonhosted.org/packages/9d/d7/19782d2c5393a44d3426be822ac0925b540d8c0ccb0431c18444794bfad3/simplejson-3.20.1-cp38-cp38-musllinux_1_2_x86_64.whl" + "hash": "e11a1d6b2f7e72ca546bdb4e6374b237ebae9220e764051b867111df83acbd13", + "url": "https://files.pythonhosted.org/packages/92/ee/8f45174d2988ec5242ab3c9229693ed6b839a4eb77ee42d7c470cc5846ab/simplejson-3.20.2-cp38-cp38-musllinux_1_2_ppc64le.whl" }, { "algorithm": "sha256", - "hash": "7551682b60bba3a9e2780742e101cf0a64250e76de7d09b1c4b0c8a7c7cc6834", - "url": "https://files.pythonhosted.org/packages/a9/85/89a0d847239a2696068503810880ee73b87177465a86a7942c95012fb875/simplejson-3.20.1-cp38-cp38-macosx_10_9_universal2.whl" + "hash": "fc8da64929ef0ff16448b602394a76fd9968a39afff0692e5ab53669df1f047f", + "url": "https://files.pythonhosted.org/packages/99/d9/d23e9f96762224870af95adafcd5d4426f5285b046ed331b034c6d5a8554/simplejson-3.20.2-cp38-cp38-macosx_10_9_x86_64.whl" }, { "algorithm": "sha256", - "hash": "74a1608f9e6e8c27a4008d70a54270868306d80ed48c9df7872f9f4b8ac87808", - "url": "https://files.pythonhosted.org/packages/a9/c8/3d92b67e03a3b6207d97202669f9454ed700b35ade9bd4428265a078fb6c/simplejson-3.20.1-cp310-cp310-macosx_11_0_arm64.whl" + "hash": "306e83d7c331ad833d2d43c76a67f476c4b80c4a13334f6e34bb110e6105b3bd", + "url": "https://files.pythonhosted.org/packages/9b/4b/fdcaf444ac1c3cbf1c52bf00320c499e1cf05d373a58a3731ae627ba5e2d/simplejson-3.20.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl" }, { "algorithm": "sha256", - "hash": "f4bd49ecde87b0fe9f55cc971449a32832bca9910821f7072bbfae1155eaa007", - "url": "https://files.pythonhosted.org/packages/ad/cc/7cfd78d1e0fa5e57350b98cfe77353b6dfa13dce21afa4060e1019223852/simplejson-3.20.1-cp310-cp310-musllinux_1_2_ppc64le.whl" + "hash": "66f7c78c6ef776f8bd9afaad455e88b8197a51e95617bcc44b50dd974a7825ba", + "url": "https://files.pythonhosted.org/packages/a4/77/fc16d41b5f67a2591c9b6ff7b0f6aed2b2aed1b6912bb346b61279697638/simplejson-3.20.2-cp39-cp39-musllinux_1_2_x86_64.whl" }, { "algorithm": "sha256", - "hash": "e64139b4ec4f1f24c142ff7dcafe55a22b811a74d86d66560c8815687143037d", - "url": "https://files.pythonhosted.org/packages/af/92/51b417685abd96b31308b61b9acce7ec50d8e1de8fbc39a7fd4962c60689/simplejson-3.20.1.tar.gz" + "hash": "daf7cd18fe99eb427fa6ddb6b437cfde65125a96dc27b93a8969b6fe90a1dbea", + "url": "https://files.pythonhosted.org/packages/a5/ac/ab88e99111307eba64bcfbef45e8aa57240a19e019c2dc29269806d2f4a0/simplejson-3.20.2-cp38-cp38-musllinux_1_2_x86_64.whl" }, { "algorithm": "sha256", - "hash": "69dd28d4ce38390ea4aaf212902712c0fd1093dc4c1ff67e09687c3c3e15a749", - "url": "https://files.pythonhosted.org/packages/b8/c8/b072b741129406a7086a0799c6f5d13096231bf35fdd87a0cffa789687fc/simplejson-3.20.1-cp311-cp311-musllinux_1_2_x86_64.whl" + "hash": "5d6f5bacb8cdee64946b45f2680afa3f54cd38e62471ceda89f777693aeca4e4", + "url": "https://files.pythonhosted.org/packages/aa/b0/94ad2cf32f477c449e1f63c863d8a513e2408d651c4e58fe4b6a7434e168/simplejson-3.20.2-cp310-cp310-musllinux_1_2_i686.whl" }, { "algorithm": "sha256", - "hash": "a2cc4f6486f9f515b62f5831ff1888886619b84fc837de68f26d919ba7bbdcbc", - "url": "https://files.pythonhosted.org/packages/bb/9e/da184f0e9bb3a5d7ffcde713bd41b4fe46cca56b6f24d9bd155fac56805a/simplejson-3.20.1-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl" + "hash": "b3bf76512ccb07d47944ebdca44c65b781612d38b9098566b4bb40f713fc4047", + "url": "https://files.pythonhosted.org/packages/b8/2d/7c4968c60ddc8b504b77301cc80d6e75cd0269b81a779b01d66d8f36dcb8/simplejson-3.20.2-cp39-cp39-macosx_10_9_universal2.whl" }, { "algorithm": "sha256", - "hash": "6bd09c8c75666e7f62a33d2f1fb57f81da1fcbb19a9fe7d7910b5756e1dd6048", - "url": "https://files.pythonhosted.org/packages/c0/54/160fb59ef3441aae419d26a3fdb57648755594de43afb4406c25cf4908a1/simplejson-3.20.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl" + "hash": "06190b33cd7849efc413a5738d3da00b90e4a5382fd3d584c841ac20fb828c6f", + "url": "https://files.pythonhosted.org/packages/b9/3e/96898c6c66d9dca3f9bd14d7487bf783b4acc77471b42f979babbb68d4ca/simplejson-3.20.2-cp311-cp311-macosx_10_9_universal2.whl" }, { "algorithm": "sha256", - "hash": "299b1007b8101d50d95bc0db1bf5c38dc372e85b504cf77f596462083ee77e3f", - "url": "https://files.pythonhosted.org/packages/c8/0d/98cc5909180463f1d75fac7180de62d4cdb4e82c4fef276b9e591979372c/simplejson-3.20.1-cp311-cp311-macosx_11_0_arm64.whl" + "hash": "6a8f1c307edf5fbf0c6db3396c5d3471409c4a40c7a2a466fbc762f20d46601a", + "url": "https://files.pythonhosted.org/packages/b9/8c/8fc2c2734ac9e514124635b25ca8f7e347db1ded4a30417ee41e78e6d61c/simplejson-3.20.2-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl" }, { "algorithm": "sha256", - "hash": "272cc767826e924a6bd369ea3dbf18e166ded29059c7a4d64d21a9a22424b5b5", - "url": "https://files.pythonhosted.org/packages/d1/46/7b74803de10d4157c5cd2e89028897fa733374667bc5520a44b23b6c887a/simplejson-3.20.1-cp39-cp39-musllinux_1_2_i686.whl" + "hash": "40ca7cbe7d2f423b97ed4e70989ef357f027a7e487606628c11b79667639dc84", + "url": "https://files.pythonhosted.org/packages/bc/42/1ae6f9735d8fe47a638c5a342b835a2108ae7d7f79e7f83224d72c87cc81/simplejson-3.20.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl" }, { "algorithm": "sha256", - "hash": "3e7963197d958fcf9e98b212b80977d56c022384621ff463d98afc3b6b1ce7e8", - "url": "https://files.pythonhosted.org/packages/db/c4/6d59270529aa4c1ca6dfffefa5c72469fa471fdaac7188532b7296e10880/simplejson-3.20.1-cp38-cp38-musllinux_1_2_aarch64.whl" + "hash": "3a97249ee1aee005d891b5a211faf58092a309f3d9d440bc269043b08f662eda", + "url": "https://files.pythonhosted.org/packages/c1/b9/c54eef4226c6ac8e9a389bbe5b21fef116768f97a2dc1a683c716ffe66ef/simplejson-3.20.2-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl" }, { "algorithm": "sha256", - "hash": "d492ed8e92f3a9f9be829205f44b1d0a89af6582f0cf43e0d129fa477b93fe0c", - "url": "https://files.pythonhosted.org/packages/e1/33/d3e0779d5c58245e7370c98eb969275af6b7a4a5aec3b97cbf85f09ad328/simplejson-3.20.1-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl" + "hash": "f820a6ac2ef0bc338ae4963f4f82ccebdb0824fe9caf6d660670c578abe01013", + "url": "https://files.pythonhosted.org/packages/c4/83/21550f81a50cd03599f048a2d588ffb7f4c4d8064ae091511e8e5848eeaa/simplejson-3.20.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl" }, { "algorithm": "sha256", - "hash": "03ec618ed65caab48e81e3ed29586236a8e57daef792f1f3bb59504a7e98cd10", - "url": "https://files.pythonhosted.org/packages/e1/94/a30a5211a90d67725a3e8fcc1c788189f2ae2ed2b96b63ed15d0b7f5d6bb/simplejson-3.20.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl" + "hash": "c0a341f7cc2aae82ee2b31f8a827fd2e51d09626f8b3accc441a6907c88aedb7", + "url": "https://files.pythonhosted.org/packages/c7/28/c32121064b1ec2fb7b5d872d9a1abda62df064d35e0160eddfa907118343/simplejson-3.20.2-cp310-cp310-musllinux_1_2_x86_64.whl" }, { "algorithm": "sha256", - "hash": "bd9577ec1c8c3a43040e3787711e4c257c70035b7551a21854b5dec88dad09e1", - "url": "https://files.pythonhosted.org/packages/e6/74/018dd0d40cb7dd339af0a9fa992a264f347844c6ce9a6cd7a527d37d215b/simplejson-3.20.1-cp38-cp38-macosx_10_9_x86_64.whl" + "hash": "21e7a066528a5451433eb3418184f05682ea0493d14e9aae690499b7e1eb6b81", + "url": "https://files.pythonhosted.org/packages/cf/54/d76c0e72ad02450a3e723b65b04f49001d0e73218ef6a220b158a64639cb/simplejson-3.20.2-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl" }, { "algorithm": "sha256", - "hash": "a7e15b716d09f318c8cda3e20f82fae81684ce3d3acd1d7770fa3007df1769de", - "url": "https://files.pythonhosted.org/packages/eb/33/3c2cbeab7d0e8f8ba36e9cd5d85ee68e6d7ed4be93bbedd75090f5bc2961/simplejson-3.20.1-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl" + "hash": "8db6841fb796ec5af632f677abf21c6425a1ebea0d9ac3ef1a340b8dc69f52b8", + "url": "https://files.pythonhosted.org/packages/e5/46/827731e4163be3f987cb8ee90f5d444161db8f540b5e735355faa098d9bc/simplejson-3.20.2-cp310-cp310-musllinux_1_2_ppc64le.whl" }, { "algorithm": "sha256", - "hash": "cd2cdead1d3197f0ff43373cf4730213420523ba48697743e135e26f3d179f38", - "url": "https://files.pythonhosted.org/packages/ee/08/cdb6821f1058eb5db46d252de69ff7e6c53f05f1bae6368fe20d5b51d37e/simplejson-3.20.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl" + "hash": "214e26acf2dfb9ff3314e65c4e168a6b125bced0e2d99a65ea7b0f169db1e562", + "url": "https://files.pythonhosted.org/packages/e8/e4/d96b56fb87f245240b514c1fe552e76c17e09f0faa1f61137b2296f81529/simplejson-3.20.2-cp39-cp39-macosx_10_9_x86_64.whl" }, { "algorithm": "sha256", - "hash": "d34d04bf90b4cea7c22d8b19091633908f14a096caa301b24c2f3d85b5068fb8", - "url": "https://files.pythonhosted.org/packages/ee/22/d7ba958cfed39827335b82656b1c46f89678faecda9a7677b47e87b48ee6/simplejson-3.20.1-cp311-cp311-musllinux_1_2_ppc64le.whl" + "hash": "a16b029ca25645b3bc44e84a4f941efa51bf93c180b31bd704ce6349d1fc77c1", + "url": "https://files.pythonhosted.org/packages/fa/ea/ae47b04d03c7c8a7b7b1a8b39a6e27c3bd424e52f4988d70aca6293ff5e5/simplejson-3.20.2-cp310-cp310-macosx_11_0_arm64.whl" } ], "project_name": "simplejson", "requires_dists": [], "requires_python": "!=3.0.*,!=3.1.*,!=3.2.*,>=2.5", - "version": "3.20.1" + "version": "3.20.2" }, { "artifacts": [ @@ -5645,21 +5634,21 @@ "artifacts": [ { "algorithm": "sha256", - "hash": "6d7019d4d0d249b3116e0dc0c4a890c88e7795fbcd67ccbdadcccb4b2b1dce8d", - "url": "https://files.pythonhosted.org/packages/f8/a1/df863f4bb3524ce1ef186a02f0612544f10bb219e788db8483a867708a30/st2_auth_backend_flat_file-0.3.0-py3-none-any.whl" + "hash": "ca78524b296fd1ece20a4515d2eb745d31a06e7e7845beb1bf1dbf0baa35687b", + "url": "https://files.pythonhosted.org/packages/28/c7/ce6a10b839e08482d686290201f05b227e724c2aeafadc3bf579dbe7c346/st2_auth_backend_flat_file-0.4.1-py3-none-any.whl" }, { "algorithm": "sha256", - "hash": "ffd3e752b57a25a7d236fe3c09818f7ff4c897ee1938d2abd7f349fc720a9031", - "url": "https://files.pythonhosted.org/packages/8d/ff/f7ca53ded009ae427f22f617de1e73d88b4221218111159a5205b52e2b94/st2-auth-backend-flat-file-0.3.0.tar.gz" + "hash": "6eb2e6e4f957fc31e85698232fbc7cda0b84580a93fd87f8797eae95c5dd4659", + "url": "https://files.pythonhosted.org/packages/8e/5e/19b41cc213bedef9107a8ad0d8c21107bc7c26dd54697fd8611051914c41/st2-auth-backend-flat-file-0.4.1.tar.gz" } ], "project_name": "st2-auth-backend-flat-file", "requires_dists": [ - "passlib[bcrypt]<1.8.0,>=1.7.1" + "bcrypt>=4.3.0" ], "requires_python": ">=3.8", - "version": "0.3.0" + "version": "0.4.1" }, { "artifacts": [ @@ -5694,7 +5683,7 @@ "artifacts": [ { "algorithm": "sha256", - "hash": "d34d5cbb539182553ec8b35a0763decb6b66bd37aa51f3a61db1eb30449f6f10", + "hash": "f445faec6e5f55437f379be08dd78aa2f1181ed9dbd04fe19bddfb6f4c2fe911", "url": "git+https://github.com/StackStorm/st2-rbac-backend.git@master" } ], @@ -6345,21 +6334,19 @@ "artifacts": [ { "algorithm": "sha256", - "hash": "3da69048e4540d84af32131829ff948f1e022c1c6bdb8d6102117aac784f6859", - "url": "https://files.pythonhosted.org/packages/fd/84/fd2ba7aafacbad3c4201d395674fc6348826569da3c0937e75505ead3528/wcwidth-0.2.13-py2.py3-none-any.whl" + "hash": "a7bb560c8aee30f9957e5f9895805edd20602f2d7f720186dfd906e82b4982e1", + "url": "https://files.pythonhosted.org/packages/af/b5/123f13c975e9f27ab9c0770f514345bd406d0e8d3b7a0723af9d43f710af/wcwidth-0.2.14-py2.py3-none-any.whl" }, { "algorithm": "sha256", - "hash": "72ea0c06399eb286d978fdedb6923a9eb47e1c486ce63e9b4e64fc18303972b5", - "url": "https://files.pythonhosted.org/packages/6c/63/53559446a878410fc5a5974feb13d31d78d752eb18aeba59c7fef1af7598/wcwidth-0.2.13.tar.gz" + "hash": "4d478375d31bc5395a3c55c40ccdf3354688364cd61c4f6adacaa9215d0b3605", + "url": "https://files.pythonhosted.org/packages/24/30/6b0809f4510673dc723187aeaf24c7f5459922d01e2f794277a3dfb90345/wcwidth-0.2.14.tar.gz" } ], "project_name": "wcwidth", - "requires_dists": [ - "backports.functools-lru-cache>=1.2.1; python_version < \"3.2\"" - ], - "requires_python": null, - "version": "0.2.13" + "requires_dists": [], + "requires_python": ">=3.6", + "version": "0.2.14" }, { "artifacts": [ diff --git a/lockfiles/twine.lock b/lockfiles/twine.lock index 14a32def44..0789ebaa53 100644 --- a/lockfiles/twine.lock +++ b/lockfiles/twine.lock @@ -1087,19 +1087,19 @@ "artifacts": [ { "algorithm": "sha256", - "hash": "c3702b6d3dd8c7abc1afa565d7e63d53a1d0bd86cdc24edd75470f4de499cfcc", - "url": "https://files.pythonhosted.org/packages/13/a3/a812df4e2dd5696d1f351d58b8fe16a405b234ad2886a0dab9183fb78109/pycparser-2.22-py3-none-any.whl" + "hash": "e5c6e8d3fbad53479cab09ac03729e0a9faf2bee3db8208a550daf5af81a5934", + "url": "https://files.pythonhosted.org/packages/a0/e3/59cd50310fc9b59512193629e1984c1f95e5c8ae6e5d8c69532ccc65a7fe/pycparser-2.23-py3-none-any.whl" }, { "algorithm": "sha256", - "hash": "491c8be9c040f5390f5bf44a5b07752bd07f56edf992381b05c701439eec10f6", - "url": "https://files.pythonhosted.org/packages/1d/b2/31537cf4b1ca988837256c910a668b553fceb8f069bedc4b1c826024b52c/pycparser-2.22.tar.gz" + "hash": "78816d4f24add8f10a06d6f05b4d424ad9e96cfebf68a4ddc99c65c0720d00c2", + "url": "https://files.pythonhosted.org/packages/fe/cf/d2d3b9f5699fb1e4615c8e32ff220203e43b248e1dfcc6736ad9057731ca/pycparser-2.23.tar.gz" } ], "project_name": "pycparser", "requires_dists": [], "requires_python": ">=3.8", - "version": "2.22" + "version": "2.23" }, { "artifacts": [ diff --git a/requirements.txt b/requirements.txt index d2dd4bf19d..58de748575 100644 --- a/requirements.txt +++ b/requirements.txt @@ -10,7 +10,7 @@ amqp==5.3.1 apscheduler==3.11.0 argcomplete==3.6.2 backports.zoneinfo[tzdata]; python_version<"3.9" -bcrypt==4.3.0 +bcrypt==5.0.0 cffi==1.17.1 chardet==5.2.0 ciso8601 @@ -24,7 +24,7 @@ gitdb==4.0.12 gitpython==3.1.45 greenlet==3.1.1 gunicorn==23.0.0 -importlib-metadata==7.1.0 +importlib-metadata==8.5.0 jinja2==3.1.6 jsonpath-rw==1.4.0 jsonschema==3.2.0 @@ -39,9 +39,9 @@ orquesta@ git+https://github.com/StackStorm/orquesta.git@5ba1467614b2ef8b4709b2c oslo.config==9.6.0 oslo.utils==7.3.0 paramiko==3.5.1 -prettytable==3.10.2 +prettytable==3.11.0 prompt-toolkit==3.0.52 -psutil==7.0.0 +psutil==7.1.0 pyOpenSSL pygments==2.19.2 pyinotify==0.9.6 ; platform_system=="Linux" @@ -55,7 +55,7 @@ python-json-logger python-statsd==2.1.0 pytz==2025.2 pywinrm==0.5.0 -pyyaml==6.0.2 +pyyaml==6.0.3 redis==6.1.1 requests==2.32.4 retrying==1.4.2 diff --git a/st2actions/requirements.txt b/st2actions/requirements.txt index 07e1665435..befa78509a 100644 --- a/st2actions/requirements.txt +++ b/st2actions/requirements.txt @@ -20,7 +20,7 @@ pyinotify==0.9.6 ; platform_system=="Linux" pyparsing==3.1.4 python-dateutil==2.9.0.post0 python-json-logger -pyyaml==6.0.2 +pyyaml==6.0.3 requests==2.32.4 setuptools<78 six==1.17.0 diff --git a/st2auth/requirements.txt b/st2auth/requirements.txt index 65fe94a584..19dbe7456f 100644 --- a/st2auth/requirements.txt +++ b/st2auth/requirements.txt @@ -5,7 +5,7 @@ # If you want to update depdencies for a single component, modify the # in-requirements.txt for that component and then run 'make requirements' to # update the component requirements.txt -bcrypt==4.3.0 +bcrypt==5.0.0 eventlet==0.39.1 gunicorn==23.0.0 oslo.config==9.6.0 diff --git a/st2client/requirements.txt b/st2client/requirements.txt index 9751f730c7..30534da048 100644 --- a/st2client/requirements.txt +++ b/st2client/requirements.txt @@ -10,18 +10,18 @@ cffi==1.17.1 chardet==5.2.0 cryptography==43.0.3 editor==1.6.6 -importlib-metadata==7.1.0 +importlib-metadata==8.5.0 jsonpath-rw==1.4.0 jsonschema==3.2.0 orjson==3.10.15 -prettytable==3.10.2 +prettytable==3.11.0 prompt-toolkit==3.0.52 pyOpenSSL pygments==2.19.2 pysocks python-dateutil==2.9.0.post0 pytz==2025.2 -pyyaml==6.0.2 +pyyaml==6.0.3 requests==2.32.4 six==1.17.0 sseclient-py==1.8.0 diff --git a/st2common/requirements.txt b/st2common/requirements.txt index 7c2e0901e7..29c1dcd6fa 100644 --- a/st2common/requirements.txt +++ b/st2common/requirements.txt @@ -34,7 +34,7 @@ pyOpenSSL pymongo==4.6.3 python-dateutil==2.9.0.post0 python-statsd==2.1.0 -pyyaml==6.0.2 +pyyaml==6.0.3 redis==6.1.1 requests==2.32.4 retrying==1.4.2 diff --git a/st2tests/requirements.txt b/st2tests/requirements.txt index 3bbd077da1..27f69000ae 100644 --- a/st2tests/requirements.txt +++ b/st2tests/requirements.txt @@ -7,7 +7,7 @@ # update the component requirements.txt RandomWords mock==5.2.0 -psutil==7.0.0 +psutil==7.1.0 pyrabbit pytest==7.0.1 webtest==3.0.1 diff --git a/test-requirements.txt b/test-requirements.txt index fdf1e3f36f..9967526293 100644 --- a/test-requirements.txt +++ b/test-requirements.txt @@ -15,7 +15,7 @@ black==22.3.0 pre-commit==3.5.0 bandit==1.7.10 isort>=4.2.5 -mock==5.1.0 +mock==5.2.0 tabulate # 4.5.0 required for Jinja-3.1.3 support but >5.0 required by rstcheck and lower than 7.2 which drops py3.8 support sphinx>=5.0.0,<7.2.0 @@ -23,12 +23,12 @@ sphinx-autobuild # pin alabaster (sphinx dependency) or pip installs one that is not compatible alabaster<0.7.14 # Required by st2client tests -pyyaml==6.0.2 +pyyaml==6.0.3 # Constrain pygments required by editor to align with st2 core version pygments==2.19.2 RandomWords gunicorn==23.0.0 -psutil==7.0.0 +psutil==7.1.0 webtest==3.0.1 # Bump to latest to meet sphinx requirements. rstcheck==6.2.1 From 75414e28cc2eead63cae40c255b6f13aa6e56bd9 Mon Sep 17 00:00:00 2001 From: Jacob Floyd Date: Tue, 30 Sep 2025 21:14:19 -0500 Subject: [PATCH 096/102] typo fixes --- .github/workflows/lockfiles.yaml | 2 +- scripts/lockfiles_to_reqs.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/lockfiles.yaml b/.github/workflows/lockfiles.yaml index 29b64b0429..27886683fa 100644 --- a/.github/workflows/lockfiles.yaml +++ b/.github/workflows/lockfiles.yaml @@ -347,7 +347,7 @@ jobs: - name: Upload lockfile diff picture to S3 # GitHub flavored markdown does not accept data:image/png URIs for images. # And GitHub does not provide any convenient APIs for uploading image assets. - # So, we upload to AWS and save the URI for in generated markdown below. + # So, we upload to AWS and save the URI for use in generated markdown below. if: steps.lockfile.outputs.CHANGED == 'true' # NOTE: The packaged s3 actions are too old, incomplete, or do not allow setting content-type + tags. env: diff --git a/scripts/lockfiles_to_reqs.py b/scripts/lockfiles_to_reqs.py index dcf6b3c3d0..3fdbe65336 100644 --- a/scripts/lockfiles_to_reqs.py +++ b/scripts/lockfiles_to_reqs.py @@ -38,7 +38,7 @@ def strip_comments_from_pex_json_lockfile(lockfile_bytes: bytes) -> bytes: Copied from code by Pants Project Contributors (Apache 2.0 licensed): https://github.com/pantsbuild/pants/blob/release_2.25.0/src/python/pants/backend/python/util_rules/pex_requirements.py#L119-L127 - TODO: delete this once we getrid of the legacy fixate requirements files. + TODO: delete this once we get rid of the legacy fixate requirements files. """ return b"\n".join( line From 74ca6b2da382bd1951d8b497e4658f371b6b26d6 Mon Sep 17 00:00:00 2001 From: Jacob Floyd Date: Tue, 30 Sep 2025 21:14:41 -0500 Subject: [PATCH 097/102] fmt w/ black --- scripts/lockfiles_to_reqs.py | 14 +++++++++++--- 1 file changed, 11 insertions(+), 3 deletions(-) diff --git a/scripts/lockfiles_to_reqs.py b/scripts/lockfiles_to_reqs.py index 3fdbe65336..50d5b96fc8 100644 --- a/scripts/lockfiles_to_reqs.py +++ b/scripts/lockfiles_to_reqs.py @@ -18,7 +18,11 @@ import logging from pathlib import Path -from fixate_requirements import load_fixed_requirements, parse_req_from_line, write_requirements +from fixate_requirements import ( + load_fixed_requirements, + parse_req_from_line, + write_requirements, +) LOG = logging.getLogger(__name__) @@ -143,14 +147,18 @@ def copy_locked_versions_into_legacy_requirements_files(): def fixate_legacy_requirements_files(): # based on .requirements Makefile target - skip=["virtualenv", "virtualenv-osx"] + skip = ["virtualenv", "virtualenv-osx"] workspace = Path(".") sources = list(workspace.glob("st2*/in-requirements.txt")) sources.extend(list(workspace.glob("contrib/runners/*/in-requirements.txt"))) output = "requirements.txt" - LOG.info("Updating (fixating) %s files with requirements from %s", output, FIXED_REQUIREMENTS) + LOG.info( + "Updating (fixating) %s files with requirements from %s", + output, + FIXED_REQUIREMENTS, + ) write_requirements( sources=[str(source) for source in sources], fixed_requirements=FIXED_REQUIREMENTS, From 6c52aef5ad8ba76b836b7897785e166811592015 Mon Sep 17 00:00:00 2001 From: Jacob Floyd Date: Tue, 30 Sep 2025 22:31:01 -0500 Subject: [PATCH 098/102] update Makefile in scripts/lockfiles_to_reqs.py --- scripts/lockfiles_to_reqs.py | 47 ++++++++++++++++++++++++++++++++++++ 1 file changed, 47 insertions(+) diff --git a/scripts/lockfiles_to_reqs.py b/scripts/lockfiles_to_reqs.py index 50d5b96fc8..3a4e552f62 100644 --- a/scripts/lockfiles_to_reqs.py +++ b/scripts/lockfiles_to_reqs.py @@ -30,6 +30,7 @@ FIXED_REQUIREMENTS = "fixed-requirements.txt" TEST_REQUIREMENTS = "test-requirements.txt" +MAKEFILE = "Makefile" _LOCKFILE = "lockfiles/{resolve}.lock" TOOL_RESOLVES = ("st2", "bandit", "flake8", "pylint", "black") @@ -103,14 +104,50 @@ def do_updates(path, old_reqs, reqs_updates): path.write_text("\n".join(lines) + "\n") +def load_makefile_reqs(path): + lines = path.read_text().splitlines() + line_prefixes = {"pip": "PIP_VERSION ?= ", "setuptools": "SETUPTOOLS_VERSION ?= "} + requirements = {"pip": None, "setuptools": None} + for index, line in enumerate(lines): + for name, prefix in line_prefixes.items(): + if line.startswith(prefix): + version = line[len(prefix) :].strip() + requirements[name] = (index, prefix, version) + if None not in requirements.values(): + break + return requirements + + +def plan_makefile_update(old_reqs, name, version, reqs_updates): + if name not in old_reqs: + # this shouldn't happen + return + index, prefix, old_version = old_reqs[name] + if old_version != version: + reqs_updates[name] = (index, f"{prefix}{version}") + + +def do_makefile_updates(path, reqs_updates): + lines = path.read_text().splitlines() + for name, info in reqs_updates.items(): + index, line = info + lines[index] = line + path.write_text("\n".join(lines) + "\n") + + def copy_locked_versions_into_legacy_requirements_files(): fixed_path = Path(FIXED_REQUIREMENTS).resolve() test_path = Path(TEST_REQUIREMENTS).resolve() + makefile_path = Path(MAKEFILE).resolve() + fixed_reqs = load_fixed_requirements(FIXED_REQUIREMENTS) test_reqs = load_fixed_requirements(TEST_REQUIREMENTS) + makefile_reqs = load_makefile_reqs(makefile_path) + locked_in_makefile = ("pip", "setuptools") fixed_reqs_updates = {} test_reqs_updates = {} + makefile_reqs_updates = {} LOG.info("Looking for verion changes") handled = [] @@ -129,6 +166,10 @@ def copy_locked_versions_into_legacy_requirements_files(): continue plan_update(fixed_reqs, name, version, fixed_reqs_updates) plan_update(test_reqs, name, version, test_reqs_updates) + if name in locked_in_makefile: + plan_makefile_update( + makefile_reqs, name, version, makefile_reqs_updates + ) handled.append(name) if not fixed_reqs_updates: @@ -143,6 +184,12 @@ def copy_locked_versions_into_legacy_requirements_files(): LOG.info("Updating %s", TEST_REQUIREMENTS) do_updates(test_path, test_reqs, test_reqs_updates) + if not makefile_reqs_updates: + LOG.info("No updates required in %s", MAKEFILE) + else: + LOG.info("Updating %s", MAKEFILE) + do_makefile_updates(makefile_path, makefile_reqs_updates) + LOG.info("Done updating %s and %s", FIXED_REQUIREMENTS, TEST_REQUIREMENTS) From 90b206d566d62293546db1851400d87facb58607 Mon Sep 17 00:00:00 2001 From: guzzijones12 Date: Thu, 18 Sep 2025 11:02:04 -0400 Subject: [PATCH 099/102] use pip install for st2client checking Cherry-picked from: 089d46c1df3405336c108283747600dc8773b7b6 --- Makefile | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Makefile b/Makefile index de4787198e..84679bceda 100644 --- a/Makefile +++ b/Makefile @@ -537,7 +537,7 @@ flake8: requirements .flake8 $(VIRTUALENV_ST2CLIENT_DIR)/bin/pip install --upgrade "pip==$(PIP_VERSION)" $(VIRTUALENV_ST2CLIENT_DIR)/bin/pip install --upgrade "setuptools==$(SETUPTOOLS_VERSION)" - $(VIRTUALENV_ST2CLIENT_DIR)/bin/activate; cd st2client ; ../$(VIRTUALENV_ST2CLIENT_DIR)/bin/python setup.py install ; cd .. + $(VIRTUALENV_ST2CLIENT_DIR)/bin/activate; cd st2client ; ../$(VIRTUALENV_ST2CLIENT_DIR)/bin/pip install . ; cd .. $(VIRTUALENV_ST2CLIENT_DIR)/bin/st2 --version $(VIRTUALENV_ST2CLIENT_DIR)/bin/python -c "import st2client" From c62447749cfec43ac0e35c95b9c7028a5f31530d Mon Sep 17 00:00:00 2001 From: guzzijones12 Date: Mon, 22 Sep 2025 11:34:25 -0400 Subject: [PATCH 100/102] update st2client license --- st2client/setup.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/st2client/setup.py b/st2client/setup.py index 1fe8ec1af3..4111297bae 100644 --- a/st2client/setup.py +++ b/st2client/setup.py @@ -46,12 +46,12 @@ author="StackStorm", author_email="info@stackstorm.com", url="https://stackstorm.com/", + license="Apache-2.0", classifiers=[ "Development Status :: 5 - Production/Stable", "Intended Audience :: Information Technology", "Intended Audience :: Developers", "Intended Audience :: System Administrators", - "License :: OSI Approved :: Apache Software License", "Operating System :: POSIX :: Linux", "Programming Language :: Python", "Programming Language :: Python :: 3", From a17fab80d678083a1d3e38d50e112207fd899374 Mon Sep 17 00:00:00 2001 From: guzzijones12 Date: Tue, 23 Sep 2025 09:22:04 -0400 Subject: [PATCH 101/102] st2client classifiers --- st2client/setup.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/st2client/setup.py b/st2client/setup.py index 4111297bae..f6173561f8 100644 --- a/st2client/setup.py +++ b/st2client/setup.py @@ -55,7 +55,9 @@ "Operating System :: POSIX :: Linux", "Programming Language :: Python", "Programming Language :: Python :: 3", - "Programming Language :: Python :: 3.6", + "Programming Language :: Python :: 3.9", + "Programming Language :: Python :: 3.10", + "Programming Language :: Python :: 3.11", ], install_requires=install_reqs, dependency_links=dep_links, From 8a99dddfd22f5a4e478a066a350d3e80d6406572 Mon Sep 17 00:00:00 2001 From: Jacob Floyd Date: Tue, 30 Sep 2025 23:18:49 -0500 Subject: [PATCH 102/102] constrain pyOpenSSL dep to fix CircleCI --- fixed-requirements.txt | 5 +++-- requirements.txt | 2 +- st2client/requirements.txt | 2 +- st2common/requirements.txt | 2 +- 4 files changed, 6 insertions(+), 5 deletions(-) diff --git a/fixed-requirements.txt b/fixed-requirements.txt index 873e5d4e4a..b83d0eca98 100644 --- a/fixed-requirements.txt +++ b/fixed-requirements.txt @@ -39,8 +39,9 @@ pyinotify==0.9.6 ; platform_system=="Linux" pymongo==4.6.3 pyparsing==3.1.4 zstandard==0.23.0 -# pyOpenSSL 23.1.0 supports cryptography up to 40.0.x -#pyOpenSSL==23.1.0 +# pyOpenSSL is a candidate for removal and is not in lockfiles/st2.lock since it is not used. +# pyOpenSSL 25.1.0 supports cryptography>=41.0.5,<46 +pyOpenSSL<25.2 # 202403: switch from python-editor to editor for py3.10 support editor==1.6.6 # editor dependency, required here for inclusion in st2client setup.py diff --git a/requirements.txt b/requirements.txt index 58de748575..782de672fc 100644 --- a/requirements.txt +++ b/requirements.txt @@ -42,7 +42,7 @@ paramiko==3.5.1 prettytable==3.11.0 prompt-toolkit==3.0.52 psutil==7.1.0 -pyOpenSSL +pyOpenSSL<25.2 pygments==2.19.2 pyinotify==0.9.6 ; platform_system=="Linux" pymongo==4.6.3 diff --git a/st2client/requirements.txt b/st2client/requirements.txt index 30534da048..bf6a42b3ef 100644 --- a/st2client/requirements.txt +++ b/st2client/requirements.txt @@ -16,7 +16,7 @@ jsonschema==3.2.0 orjson==3.10.15 prettytable==3.11.0 prompt-toolkit==3.0.52 -pyOpenSSL +pyOpenSSL<25.2 pygments==2.19.2 pysocks python-dateutil==2.9.0.post0 diff --git a/st2common/requirements.txt b/st2common/requirements.txt index 29c1dcd6fa..1aa9d079ee 100644 --- a/st2common/requirements.txt +++ b/st2common/requirements.txt @@ -30,7 +30,7 @@ orjson==3.10.15 orquesta@ git+https://github.com/StackStorm/orquesta.git@5ba1467614b2ef8b4709b2ca89e68baa671e8975 oslo.config==9.6.0 paramiko==3.5.1 -pyOpenSSL +pyOpenSSL<25.2 pymongo==4.6.3 python-dateutil==2.9.0.post0 python-statsd==2.1.0