Compare commits

..

No commits in common. "699cd6103f50bf5c3b2f070c70712d109c168e6c" and "3fbcf7ccf443305955ce16db9de8401f7dc1c7dd" have entirely different histories.

5 changed files with 128 additions and 89 deletions

View File

@ -17,12 +17,12 @@ repos:
- --honor-noqa - --honor-noqa
- repo: https://github.com/Lucas-C/pre-commit-hooks.git - repo: https://github.com/Lucas-C/pre-commit-hooks.git
rev: v1.5.5 rev: v1.5.4
hooks: hooks:
- id: remove-tabs - id: remove-tabs
- repo: https://github.com/python-jsonschema/check-jsonschema.git - repo: https://github.com/python-jsonschema/check-jsonschema.git
rev: 0.28.1 rev: 0.27.3
hooks: hooks:
- id: check-github-actions - id: check-github-actions
- id: check-github-workflows - id: check-github-workflows
@ -67,7 +67,7 @@ repos:
- id: codespell - id: codespell
- repo: https://github.com/adrienverge/yamllint.git - repo: https://github.com/adrienverge/yamllint.git
rev: v1.35.1 rev: v1.33.0
hooks: hooks:
- id: yamllint - id: yamllint
files: \.(yaml|yml)$ files: \.(yaml|yml)$
@ -78,13 +78,13 @@ repos:
- --strict - --strict
- repo: https://github.com/PyCQA/flake8.git - repo: https://github.com/PyCQA/flake8.git
rev: 7.0.0 rev: 6.1.0
hooks: hooks:
- id: flake8 - id: flake8
alias: flake8-no-wps
name: flake8 WPS-excluded
args: args:
- --ignore - --ignore
# NOTE: WPS326: Found implicit string concatenation
# NOTE: WPS332: Found walrus operator
- >- - >-
D100, D100,
D101, D101,
@ -92,6 +92,22 @@ repos:
D107, D107,
E402, E402,
E501, E501,
additional_dependencies:
- flake8-2020 ~= 1.7.0
- flake8-pytest-style ~= 1.6.0
- repo: https://github.com/PyCQA/flake8.git
# NOTE: This is kept at v4 for until WPS starts supporting flake v5.
rev: 4.0.1 # enforce-version: 4.0.1
hooks:
- id: flake8
alias: flake8-only-wps
name: flake8 WPS-only
args:
- --ignore
# NOTE: WPS326: Found implicit string concatenation
# NOTE: WPS332: Found walrus operator
- >-
WPS102, WPS102,
WPS110, WPS110,
WPS111, WPS111,
@ -108,14 +124,13 @@ repos:
WPS440, WPS440,
WPS441, WPS441,
WPS453, WPS453,
- --select
- WPS
additional_dependencies: additional_dependencies:
- flake8-2020 ~= 1.7.0 - wemake-python-styleguide ~= 0.17.0
- flake8-pytest-style ~= 1.6.0
- wemake-python-styleguide ~= 0.19.0
language_version: python3.11 # flake8-commas doesn't work w/ Python 3.12
- repo: https://github.com/PyCQA/pylint.git - repo: https://github.com/PyCQA/pylint.git
rev: v3.1.0 rev: v3.0.3
hooks: hooks:
- id: pylint - id: pylint
args: args:
@ -134,4 +149,36 @@ repos:
- --output-format - --output-format
- colorized - colorized
- repo: local
hooks:
- id: enforced-flake8-version
name: Verify that enforced flake8 version stays unchanged
description: >-
This is a sanity check and fixer that makes sure that
the `flake8` version in this file remains matching the
corresponding request in the `# enforce-version` comment.
# Using Python here because using
# shell test does not always work in CIs:
entry: >-
python -c 'import pathlib, re, sys;
pre_commit_config = pathlib.Path(sys.argv[1]);
cfg_txt = pre_commit_config.read_text();
new_cfg_txt = re.sub(
r"(?P<spaces>\s+)rev:\s(?:\d+\.\d+\.\d+)\s{0,2}"
r"#\senforce-version:\s(?P<enforced_version>\d+\.\d+\.\d+)"
r"[ \t\f\v]*",
r"\g<spaces>rev: \g<enforced_version> "
r"# enforce-version: \g<enforced_version>",
cfg_txt,
);
cfg_txt != new_cfg_txt and
pre_commit_config.write_text(new_cfg_txt)
'
pass_filenames: true
language: system
files: >-
^\.pre-commit-config\.ya?ml$
types:
- yaml
... ...

View File

@ -10,7 +10,7 @@ from urllib.parse import urlparse
import id # pylint: disable=redefined-builtin import id # pylint: disable=redefined-builtin
import requests import requests
_GITHUB_STEP_SUMMARY = Path(os.getenv('GITHUB_STEP_SUMMARY')) _GITHUB_STEP_SUMMARY = Path(os.getenv("GITHUB_STEP_SUMMARY"))
# The top-level error message that gets rendered. # The top-level error message that gets rendered.
# This message wraps one of the other templates/messages defined below. # This message wraps one of the other templates/messages defined below.
@ -45,7 +45,7 @@ permissions:
``` ```
Learn more at https://docs.github.com/en/actions/deployment/security-hardening-your-deployments/about-security-hardening-with-openid-connect#adding-permissions-settings. Learn more at https://docs.github.com/en/actions/deployment/security-hardening-your-deployments/about-security-hardening-with-openid-connect#adding-permissions-settings.
""" # noqa: S105; not a password """
# Specialization of the token retrieval failure case, when we know that # Specialization of the token retrieval failure case, when we know that
# the failure cause is use within a third-party PR. # the failure cause is use within a third-party PR.
@ -59,7 +59,7 @@ even if `id-token: write` is explicitly configured.
To fix this, change your publishing workflow to use an event that To fix this, change your publishing workflow to use an event that
forks of your repository cannot trigger (such as tag or release forks of your repository cannot trigger (such as tag or release
creation, or a manually triggered workflow dispatch). creation, or a manually triggered workflow dispatch).
""" # noqa: S105; not a password """
# Rendered if the package index refuses the given OIDC token. # Rendered if the package index refuses the given OIDC token.
_SERVER_REFUSED_TOKEN_EXCHANGE_MESSAGE = """ _SERVER_REFUSED_TOKEN_EXCHANGE_MESSAGE = """
@ -71,7 +71,7 @@ This generally indicates a trusted publisher configuration error, but could
also indicate an internal error on GitHub or PyPI's part. also indicate an internal error on GitHub or PyPI's part.
{rendered_claims} {rendered_claims}
""" # noqa: S105; not a password """
_RENDERED_CLAIMS = """ _RENDERED_CLAIMS = """
The claims rendered below are **for debugging purposes only**. You should **not** The claims rendered below are **for debugging purposes only**. You should **not**
@ -97,7 +97,7 @@ Token request failed: the index produced an unexpected
This strongly suggests a server configuration or downtime issue; wait This strongly suggests a server configuration or downtime issue; wait
a few minutes and try again. a few minutes and try again.
""" # noqa: S105; not a password """
# Rendered if the package index's token response isn't a valid API token payload. # Rendered if the package index's token response isn't a valid API token payload.
_SERVER_TOKEN_RESPONSE_MALFORMED_MESSAGE = """ _SERVER_TOKEN_RESPONSE_MALFORMED_MESSAGE = """
@ -105,30 +105,30 @@ Token response error: the index gave us an invalid response.
This strongly suggests a server configuration or downtime issue; wait This strongly suggests a server configuration or downtime issue; wait
a few minutes and try again. a few minutes and try again.
""" # noqa: S105; not a password """
def die(msg: str) -> NoReturn: def die(msg: str) -> NoReturn:
with _GITHUB_STEP_SUMMARY.open('a', encoding='utf-8') as io: with _GITHUB_STEP_SUMMARY.open("a", encoding="utf-8") as io:
print(_ERROR_SUMMARY_MESSAGE.format(message=msg), file=io) print(_ERROR_SUMMARY_MESSAGE.format(message=msg), file=io)
# HACK: GitHub Actions' annotations don't work across multiple lines naively; # HACK: GitHub Actions' annotations don't work across multiple lines naively;
# translating `\n` into `%0A` (i.e., HTML percent-encoding) is known to work. # translating `\n` into `%0A` (i.e., HTML percent-encoding) is known to work.
# See: https://github.com/actions/toolkit/issues/193 # See: https://github.com/actions/toolkit/issues/193
msg = msg.replace('\n', '%0A') msg = msg.replace("\n", "%0A")
print(f'::error::Trusted publishing exchange failure: {msg}', file=sys.stderr) print(f"::error::Trusted publishing exchange failure: {msg}", file=sys.stderr)
sys.exit(1) sys.exit(1)
def debug(msg: str): def debug(msg: str):
print(f'::debug::{msg.title()}', file=sys.stderr) print(f"::debug::{msg.title()}", file=sys.stderr)
def get_normalized_input(name: str) -> str | None: def get_normalized_input(name: str) -> str | None:
name = f'INPUT_{name.upper()}' name = f"INPUT_{name.upper()}"
if val := os.getenv(name): if val := os.getenv(name):
return val return val
return os.getenv(name.replace('-', '_')) return os.getenv(name.replace("-", "_"))
def assert_successful_audience_call(resp: requests.Response, domain: str): def assert_successful_audience_call(resp: requests.Response, domain: str):
@ -140,13 +140,13 @@ def assert_successful_audience_call(resp: requests.Response, domain: str):
# This index supports OIDC, but forbids the client from using # This index supports OIDC, but forbids the client from using
# it (either because it's disabled, ratelimited, etc.) # it (either because it's disabled, ratelimited, etc.)
die( die(
f'audience retrieval failed: repository at {domain} has trusted publishing disabled', f"audience retrieval failed: repository at {domain} has trusted publishing disabled",
) )
case HTTPStatus.NOT_FOUND: case HTTPStatus.NOT_FOUND:
# This index does not support OIDC. # This index does not support OIDC.
die( die(
'audience retrieval failed: repository at ' "audience retrieval failed: repository at "
f'{domain} does not indicate trusted publishing support', f"{domain} does not indicate trusted publishing support",
) )
case other: case other:
status = HTTPStatus(other) status = HTTPStatus(other)
@ -154,67 +154,67 @@ def assert_successful_audience_call(resp: requests.Response, domain: str):
# something we expect. This can happen if the index is broken, in maintenance mode, # something we expect. This can happen if the index is broken, in maintenance mode,
# misconfigured, etc. # misconfigured, etc.
die( die(
'audience retrieval failed: repository at ' "audience retrieval failed: repository at "
f'{domain} responded with unexpected {other}: {status.phrase}', f"{domain} responded with unexpected {other}: {status.phrase}",
) )
def render_claims(token: str) -> str: def render_claims(token: str) -> str:
_, payload, _ = token.split('.', 2) _, payload, _ = token.split(".", 2)
# urlsafe_b64decode needs padding; JWT payloads don't contain any. # urlsafe_b64decode needs padding; JWT payloads don't contain any.
payload += '=' * (4 - (len(payload) % 4)) payload += "=" * (4 - (len(payload) % 4))
claims = json.loads(base64.urlsafe_b64decode(payload)) claims = json.loads(base64.urlsafe_b64decode(payload))
def _get(name: str) -> str: # noqa: WPS430 def _get(name: str) -> str: # noqa: WPS430
return claims.get(name, 'MISSING') return claims.get(name, "MISSING")
return _RENDERED_CLAIMS.format( return _RENDERED_CLAIMS.format(
sub=_get('sub'), sub=_get("sub"),
repository=_get('repository'), repository=_get("repository"),
repository_owner=_get('repository_owner'), repository_owner=_get("repository_owner"),
repository_owner_id=_get('repository_owner_id'), repository_owner_id=_get("repository_owner_id"),
job_workflow_ref=_get('job_workflow_ref'), job_workflow_ref=_get("job_workflow_ref"),
ref=_get('ref'), ref=_get("ref"),
) )
def event_is_third_party_pr() -> bool: def event_is_third_party_pr() -> bool:
# Non-`pull_request` events cannot be from third-party PRs. # Non-`pull_request` events cannot be from third-party PRs.
if os.getenv('GITHUB_EVENT_NAME') != 'pull_request': if os.getenv("GITHUB_EVENT_NAME") != "pull_request":
return False return False
event_path = os.getenv('GITHUB_EVENT_PATH') event_path = os.getenv("GITHUB_EVENT_PATH")
if not event_path: if not event_path:
# No GITHUB_EVENT_PATH indicates a weird GitHub or runner bug. # No GITHUB_EVENT_PATH indicates a weird GitHub or runner bug.
debug('unexpected: no GITHUB_EVENT_PATH to check') debug("unexpected: no GITHUB_EVENT_PATH to check")
return False return False
try: try:
event = json.loads(Path(event_path).read_bytes()) event = json.loads(Path(event_path).read_bytes())
except json.JSONDecodeError: except json.JSONDecodeError:
debug('unexpected: GITHUB_EVENT_PATH does not contain valid JSON') debug("unexpected: GITHUB_EVENT_PATH does not contain valid JSON")
return False return False
try: try:
return event['pull_request']['head']['repo']['fork'] return event["pull_request"]["head"]["repo"]["fork"]
except KeyError: except KeyError:
return False return False
repository_url = get_normalized_input('repository-url') repository_url = get_normalized_input("repository-url")
repository_domain = urlparse(repository_url).netloc repository_domain = urlparse(repository_url).netloc
token_exchange_url = f'https://{repository_domain}/_/oidc/mint-token' token_exchange_url = f"https://{repository_domain}/_/oidc/mint-token"
# Indices are expected to support `https://{domain}/_/oidc/audience`, # Indices are expected to support `https://{domain}/_/oidc/audience`,
# which tells OIDC exchange clients which audience to use. # which tells OIDC exchange clients which audience to use.
audience_url = f'https://{repository_domain}/_/oidc/audience' audience_url = f"https://{repository_domain}/_/oidc/audience"
audience_resp = requests.get(audience_url, timeout=5) # S113 wants a timeout audience_resp = requests.get(audience_url)
assert_successful_audience_call(audience_resp, repository_domain) assert_successful_audience_call(audience_resp, repository_domain)
oidc_audience = audience_resp.json()['audience'] oidc_audience = audience_resp.json()["audience"]
debug(f'selected trusted publishing exchange endpoint: {token_exchange_url}') debug(f"selected trusted publishing exchange endpoint: {token_exchange_url}")
try: try:
oidc_token = id.detect_credential(audience=oidc_audience) oidc_token = id.detect_credential(audience=oidc_audience)
@ -229,8 +229,7 @@ except id.IdentityError as identity_error:
# Now we can do the actual token exchange. # Now we can do the actual token exchange.
mint_token_resp = requests.post( mint_token_resp = requests.post(
token_exchange_url, token_exchange_url,
json={'token': oidc_token}, json={"token": oidc_token},
timeout=5, # S113 wants a timeout
) )
try: try:
@ -247,9 +246,9 @@ except requests.JSONDecodeError:
# On failure, the JSON response includes the list of errors that # On failure, the JSON response includes the list of errors that
# occurred during minting. # occurred during minting.
if not mint_token_resp.ok: if not mint_token_resp.ok:
reasons = '\n'.join( reasons = "\n".join(
f'* `{error["code"]}`: {error["description"]}' f"* `{error['code']}`: {error['description']}"
for error in mint_token_payload['errors'] for error in mint_token_payload["errors"]
) )
rendered_claims = render_claims(oidc_token) rendered_claims = render_claims(oidc_token)
@ -261,12 +260,12 @@ if not mint_token_resp.ok:
), ),
) )
pypi_token = mint_token_payload.get('token') pypi_token = mint_token_payload.get("token")
if pypi_token is None: if pypi_token is None:
die(_SERVER_TOKEN_RESPONSE_MALFORMED_MESSAGE) die(_SERVER_TOKEN_RESPONSE_MALFORMED_MESSAGE)
# Mask the newly minted PyPI token, so that we don't accidentally leak it in logs. # Mask the newly minted PyPI token, so that we don't accidentally leak it in logs.
print(f'::add-mask::{pypi_token}', file=sys.stderr) print(f"::add-mask::{pypi_token}", file=sys.stderr)
# This final print will be captured by the subshell in `twine-upload.sh`. # This final print will be captured by the subshell in `twine-upload.sh`.
print(pypi_token) print(pypi_token)

View File

@ -4,15 +4,15 @@ import sys
packages_dir = pathlib.Path(sys.argv[1]).resolve().absolute() packages_dir = pathlib.Path(sys.argv[1]).resolve().absolute()
print('Showing hash values of files to be uploaded:') print("Showing hash values of files to be uploaded:")
for file_object in packages_dir.iterdir(): for file_object in packages_dir.iterdir():
sha256 = hashlib.sha256() sha256 = hashlib.sha256()
md5 = hashlib.md5() # noqa: S324; only use for reference md5 = hashlib.md5()
blake2_256 = hashlib.blake2b(digest_size=256 // 8) blake2_256 = hashlib.blake2b(digest_size=256 // 8)
print(file_object) print(file_object)
print('') print("")
content = file_object.read_bytes() content = file_object.read_bytes()
@ -20,7 +20,7 @@ for file_object in packages_dir.iterdir():
md5.update(content) md5.update(content)
blake2_256.update(content) blake2_256.update(content)
print(f'SHA256: {sha256.hexdigest()}') print(f"SHA256: {sha256.hexdigest()}")
print(f'MD5: {md5.hexdigest()}') print(f"MD5: {md5.hexdigest()}")
print(f'BLAKE2-256: {blake2_256.hexdigest()}') print(f"BLAKE2-256: {blake2_256.hexdigest()}")
print('') print("")

View File

@ -12,47 +12,41 @@ cffi==1.16.0
# via cryptography # via cryptography
charset-normalizer==3.3.2 charset-normalizer==3.3.2
# via requests # via requests
cryptography==42.0.7 cryptography==42.0.5
# via secretstorage # via secretstorage
docutils==0.21.2 docutils==0.20.1
# via readme-renderer # via readme-renderer
id==1.4.0 id==1.3.0
# via -r runtime.in # via -r runtime.in
idna==3.7 idna==3.7
# via requests # via requests
importlib-metadata==7.1.0 importlib-metadata==7.0.2
# via twine # via twine
jaraco-classes==3.4.0 jaraco-classes==3.3.1
# via keyring
jaraco-context==5.3.0
# via keyring
jaraco-functools==4.0.1
# via keyring # via keyring
jeepney==0.8.0 jeepney==0.8.0
# via # via
# keyring # keyring
# secretstorage # secretstorage
keyring==25.2.1 keyring==24.3.1
# via twine # via twine
markdown-it-py==3.0.0 markdown-it-py==3.0.0
# via rich # via rich
mdurl==0.1.2 mdurl==0.1.2
# via markdown-it-py # via markdown-it-py
more-itertools==10.2.0 more-itertools==10.2.0
# via # via jaraco-classes
# jaraco-classes nh3==0.2.15
# jaraco-functools
nh3==0.2.17
# via readme-renderer # via readme-renderer
pkginfo==1.10.0 pkginfo==1.10.0
# via twine # via twine
pycparser==2.22 pycparser==2.21
# via cffi # via cffi
pydantic==2.7.1 pydantic==2.6.3
# via id # via id
pydantic-core==2.18.2 pydantic-core==2.16.3
# via pydantic # via pydantic
pygments==2.18.0 pygments==2.17.2
# via # via
# readme-renderer # readme-renderer
# rich # rich
@ -72,9 +66,9 @@ rich==13.7.1
# via twine # via twine
secretstorage==3.3.3 secretstorage==3.3.3
# via keyring # via keyring
twine==5.1.0 twine==5.0.0
# via -r runtime.in # via -r runtime.in
typing-extensions==4.11.0 typing-extensions==4.10.0
# via # via
# pydantic # pydantic
# pydantic-core # pydantic-core
@ -82,5 +76,5 @@ urllib3==2.2.1
# via # via
# requests # requests
# twine # twine
zipp==3.18.2 zipp==3.17.0
# via importlib-metadata # via importlib-metadata

View File

@ -40,9 +40,9 @@ INPUT_VERIFY_METADATA="$(get-normalized-input 'verify-metadata')"
INPUT_SKIP_EXISTING="$(get-normalized-input 'skip-existing')" INPUT_SKIP_EXISTING="$(get-normalized-input 'skip-existing')"
INPUT_PRINT_HASH="$(get-normalized-input 'print-hash')" INPUT_PRINT_HASH="$(get-normalized-input 'print-hash')"
PASSWORD_DEPRECATION_NUDGE="::error title=Password-based uploads disabled::\ PASSWORD_DEPRECATION_NUDGE="::error title=Password-based uploads deprecated::\
As of 2024, PyPI requires all users to enable Two-Factor \ Starting in 2024, PyPI will require all users to enable Two-Factor \
Authentication. This consequently requires all users to switch \ Authentication. This will consequently require all users to switch \
to either Trusted Publishers (preferred) or API tokens for package \ to either Trusted Publishers (preferred) or API tokens for package \
uploads. Read more: \ uploads. Read more: \
https://blog.pypi.org/posts/2023-05-25-securing-pypi-with-2fa/" https://blog.pypi.org/posts/2023-05-25-securing-pypi-with-2fa/"
@ -74,7 +74,6 @@ else
if [[ "${INPUT_REPOSITORY_URL}" =~ pypi\.org ]]; then if [[ "${INPUT_REPOSITORY_URL}" =~ pypi\.org ]]; then
echo "${PASSWORD_DEPRECATION_NUDGE}" echo "${PASSWORD_DEPRECATION_NUDGE}"
echo "${TRUSTED_PUBLISHING_NUDGE}" echo "${TRUSTED_PUBLISHING_NUDGE}"
exit 1
fi fi
fi fi
@ -121,9 +120,9 @@ if [[ ${INPUT_VERIFY_METADATA,,} != "false" ]] ; then
twine check ${INPUT_PACKAGES_DIR%%/}/* twine check ${INPUT_PACKAGES_DIR%%/}/*
fi fi
TWINE_EXTRA_ARGS=--disable-progress-bar TWINE_EXTRA_ARGS=
if [[ ${INPUT_SKIP_EXISTING,,} != "false" ]] ; then if [[ ${INPUT_SKIP_EXISTING,,} != "false" ]] ; then
TWINE_EXTRA_ARGS="${TWINE_EXTRA_ARGS} --skip-existing" TWINE_EXTRA_ARGS=--skip-existing
fi fi
if [[ ${INPUT_VERBOSE,,} != "false" ]] ; then if [[ ${INPUT_VERBOSE,,} != "false" ]] ; then