mirror of
https://github.com/pypa/gh-action-pypi-publish
synced 2024-11-30 21:22:28 +08:00
Compare commits
5 Commits
3fbcf7ccf4
...
699cd6103f
Author | SHA1 | Date | |
---|---|---|---|
|
699cd6103f | ||
|
8414fc2457 | ||
|
67a07ebbed | ||
|
771d60f44b | ||
|
04f4e64de3 |
@ -17,12 +17,12 @@ repos:
|
||||
- --honor-noqa
|
||||
|
||||
- repo: https://github.com/Lucas-C/pre-commit-hooks.git
|
||||
rev: v1.5.4
|
||||
rev: v1.5.5
|
||||
hooks:
|
||||
- id: remove-tabs
|
||||
|
||||
- repo: https://github.com/python-jsonschema/check-jsonschema.git
|
||||
rev: 0.27.3
|
||||
rev: 0.28.1
|
||||
hooks:
|
||||
- id: check-github-actions
|
||||
- id: check-github-workflows
|
||||
@ -67,7 +67,7 @@ repos:
|
||||
- id: codespell
|
||||
|
||||
- repo: https://github.com/adrienverge/yamllint.git
|
||||
rev: v1.33.0
|
||||
rev: v1.35.1
|
||||
hooks:
|
||||
- id: yamllint
|
||||
files: \.(yaml|yml)$
|
||||
@ -78,13 +78,13 @@ repos:
|
||||
- --strict
|
||||
|
||||
- repo: https://github.com/PyCQA/flake8.git
|
||||
rev: 6.1.0
|
||||
rev: 7.0.0
|
||||
hooks:
|
||||
- id: flake8
|
||||
alias: flake8-no-wps
|
||||
name: flake8 WPS-excluded
|
||||
args:
|
||||
- --ignore
|
||||
# NOTE: WPS326: Found implicit string concatenation
|
||||
# NOTE: WPS332: Found walrus operator
|
||||
- >-
|
||||
D100,
|
||||
D101,
|
||||
@ -92,22 +92,6 @@ repos:
|
||||
D107,
|
||||
E402,
|
||||
E501,
|
||||
additional_dependencies:
|
||||
- flake8-2020 ~= 1.7.0
|
||||
- flake8-pytest-style ~= 1.6.0
|
||||
|
||||
- repo: https://github.com/PyCQA/flake8.git
|
||||
# NOTE: This is kept at v4 for until WPS starts supporting flake v5.
|
||||
rev: 4.0.1 # enforce-version: 4.0.1
|
||||
hooks:
|
||||
- id: flake8
|
||||
alias: flake8-only-wps
|
||||
name: flake8 WPS-only
|
||||
args:
|
||||
- --ignore
|
||||
# NOTE: WPS326: Found implicit string concatenation
|
||||
# NOTE: WPS332: Found walrus operator
|
||||
- >-
|
||||
WPS102,
|
||||
WPS110,
|
||||
WPS111,
|
||||
@ -124,13 +108,14 @@ repos:
|
||||
WPS440,
|
||||
WPS441,
|
||||
WPS453,
|
||||
- --select
|
||||
- WPS
|
||||
additional_dependencies:
|
||||
- wemake-python-styleguide ~= 0.17.0
|
||||
- flake8-2020 ~= 1.7.0
|
||||
- flake8-pytest-style ~= 1.6.0
|
||||
- wemake-python-styleguide ~= 0.19.0
|
||||
language_version: python3.11 # flake8-commas doesn't work w/ Python 3.12
|
||||
|
||||
- repo: https://github.com/PyCQA/pylint.git
|
||||
rev: v3.0.3
|
||||
rev: v3.1.0
|
||||
hooks:
|
||||
- id: pylint
|
||||
args:
|
||||
@ -149,36 +134,4 @@ repos:
|
||||
- --output-format
|
||||
- colorized
|
||||
|
||||
- repo: local
|
||||
hooks:
|
||||
- id: enforced-flake8-version
|
||||
name: Verify that enforced flake8 version stays unchanged
|
||||
description: >-
|
||||
This is a sanity check and fixer that makes sure that
|
||||
the `flake8` version in this file remains matching the
|
||||
corresponding request in the `# enforce-version` comment.
|
||||
# Using Python here because using
|
||||
# shell test does not always work in CIs:
|
||||
entry: >-
|
||||
python -c 'import pathlib, re, sys;
|
||||
pre_commit_config = pathlib.Path(sys.argv[1]);
|
||||
cfg_txt = pre_commit_config.read_text();
|
||||
new_cfg_txt = re.sub(
|
||||
r"(?P<spaces>\s+)rev:\s(?:\d+\.\d+\.\d+)\s{0,2}"
|
||||
r"#\senforce-version:\s(?P<enforced_version>\d+\.\d+\.\d+)"
|
||||
r"[ \t\f\v]*",
|
||||
r"\g<spaces>rev: \g<enforced_version> "
|
||||
r"# enforce-version: \g<enforced_version>",
|
||||
cfg_txt,
|
||||
);
|
||||
cfg_txt != new_cfg_txt and
|
||||
pre_commit_config.write_text(new_cfg_txt)
|
||||
'
|
||||
pass_filenames: true
|
||||
language: system
|
||||
files: >-
|
||||
^\.pre-commit-config\.ya?ml$
|
||||
types:
|
||||
- yaml
|
||||
|
||||
...
|
||||
|
@ -10,7 +10,7 @@ from urllib.parse import urlparse
|
||||
import id # pylint: disable=redefined-builtin
|
||||
import requests
|
||||
|
||||
_GITHUB_STEP_SUMMARY = Path(os.getenv("GITHUB_STEP_SUMMARY"))
|
||||
_GITHUB_STEP_SUMMARY = Path(os.getenv('GITHUB_STEP_SUMMARY'))
|
||||
|
||||
# The top-level error message that gets rendered.
|
||||
# This message wraps one of the other templates/messages defined below.
|
||||
@ -45,7 +45,7 @@ permissions:
|
||||
```
|
||||
|
||||
Learn more at https://docs.github.com/en/actions/deployment/security-hardening-your-deployments/about-security-hardening-with-openid-connect#adding-permissions-settings.
|
||||
"""
|
||||
""" # noqa: S105; not a password
|
||||
|
||||
# Specialization of the token retrieval failure case, when we know that
|
||||
# the failure cause is use within a third-party PR.
|
||||
@ -59,7 +59,7 @@ even if `id-token: write` is explicitly configured.
|
||||
To fix this, change your publishing workflow to use an event that
|
||||
forks of your repository cannot trigger (such as tag or release
|
||||
creation, or a manually triggered workflow dispatch).
|
||||
"""
|
||||
""" # noqa: S105; not a password
|
||||
|
||||
# Rendered if the package index refuses the given OIDC token.
|
||||
_SERVER_REFUSED_TOKEN_EXCHANGE_MESSAGE = """
|
||||
@ -71,7 +71,7 @@ This generally indicates a trusted publisher configuration error, but could
|
||||
also indicate an internal error on GitHub or PyPI's part.
|
||||
|
||||
{rendered_claims}
|
||||
"""
|
||||
""" # noqa: S105; not a password
|
||||
|
||||
_RENDERED_CLAIMS = """
|
||||
The claims rendered below are **for debugging purposes only**. You should **not**
|
||||
@ -97,7 +97,7 @@ Token request failed: the index produced an unexpected
|
||||
|
||||
This strongly suggests a server configuration or downtime issue; wait
|
||||
a few minutes and try again.
|
||||
"""
|
||||
""" # noqa: S105; not a password
|
||||
|
||||
# Rendered if the package index's token response isn't a valid API token payload.
|
||||
_SERVER_TOKEN_RESPONSE_MALFORMED_MESSAGE = """
|
||||
@ -105,30 +105,30 @@ Token response error: the index gave us an invalid response.
|
||||
|
||||
This strongly suggests a server configuration or downtime issue; wait
|
||||
a few minutes and try again.
|
||||
"""
|
||||
""" # noqa: S105; not a password
|
||||
|
||||
|
||||
def die(msg: str) -> NoReturn:
|
||||
with _GITHUB_STEP_SUMMARY.open("a", encoding="utf-8") as io:
|
||||
with _GITHUB_STEP_SUMMARY.open('a', encoding='utf-8') as io:
|
||||
print(_ERROR_SUMMARY_MESSAGE.format(message=msg), file=io)
|
||||
|
||||
# HACK: GitHub Actions' annotations don't work across multiple lines naively;
|
||||
# translating `\n` into `%0A` (i.e., HTML percent-encoding) is known to work.
|
||||
# See: https://github.com/actions/toolkit/issues/193
|
||||
msg = msg.replace("\n", "%0A")
|
||||
print(f"::error::Trusted publishing exchange failure: {msg}", file=sys.stderr)
|
||||
msg = msg.replace('\n', '%0A')
|
||||
print(f'::error::Trusted publishing exchange failure: {msg}', file=sys.stderr)
|
||||
sys.exit(1)
|
||||
|
||||
|
||||
def debug(msg: str):
|
||||
print(f"::debug::{msg.title()}", file=sys.stderr)
|
||||
print(f'::debug::{msg.title()}', file=sys.stderr)
|
||||
|
||||
|
||||
def get_normalized_input(name: str) -> str | None:
|
||||
name = f"INPUT_{name.upper()}"
|
||||
name = f'INPUT_{name.upper()}'
|
||||
if val := os.getenv(name):
|
||||
return val
|
||||
return os.getenv(name.replace("-", "_"))
|
||||
return os.getenv(name.replace('-', '_'))
|
||||
|
||||
|
||||
def assert_successful_audience_call(resp: requests.Response, domain: str):
|
||||
@ -140,13 +140,13 @@ def assert_successful_audience_call(resp: requests.Response, domain: str):
|
||||
# This index supports OIDC, but forbids the client from using
|
||||
# it (either because it's disabled, ratelimited, etc.)
|
||||
die(
|
||||
f"audience retrieval failed: repository at {domain} has trusted publishing disabled",
|
||||
f'audience retrieval failed: repository at {domain} has trusted publishing disabled',
|
||||
)
|
||||
case HTTPStatus.NOT_FOUND:
|
||||
# This index does not support OIDC.
|
||||
die(
|
||||
"audience retrieval failed: repository at "
|
||||
f"{domain} does not indicate trusted publishing support",
|
||||
'audience retrieval failed: repository at '
|
||||
f'{domain} does not indicate trusted publishing support',
|
||||
)
|
||||
case other:
|
||||
status = HTTPStatus(other)
|
||||
@ -154,67 +154,67 @@ def assert_successful_audience_call(resp: requests.Response, domain: str):
|
||||
# something we expect. This can happen if the index is broken, in maintenance mode,
|
||||
# misconfigured, etc.
|
||||
die(
|
||||
"audience retrieval failed: repository at "
|
||||
f"{domain} responded with unexpected {other}: {status.phrase}",
|
||||
'audience retrieval failed: repository at '
|
||||
f'{domain} responded with unexpected {other}: {status.phrase}',
|
||||
)
|
||||
|
||||
|
||||
def render_claims(token: str) -> str:
|
||||
_, payload, _ = token.split(".", 2)
|
||||
_, payload, _ = token.split('.', 2)
|
||||
|
||||
# urlsafe_b64decode needs padding; JWT payloads don't contain any.
|
||||
payload += "=" * (4 - (len(payload) % 4))
|
||||
payload += '=' * (4 - (len(payload) % 4))
|
||||
claims = json.loads(base64.urlsafe_b64decode(payload))
|
||||
|
||||
def _get(name: str) -> str: # noqa: WPS430
|
||||
return claims.get(name, "MISSING")
|
||||
return claims.get(name, 'MISSING')
|
||||
|
||||
return _RENDERED_CLAIMS.format(
|
||||
sub=_get("sub"),
|
||||
repository=_get("repository"),
|
||||
repository_owner=_get("repository_owner"),
|
||||
repository_owner_id=_get("repository_owner_id"),
|
||||
job_workflow_ref=_get("job_workflow_ref"),
|
||||
ref=_get("ref"),
|
||||
sub=_get('sub'),
|
||||
repository=_get('repository'),
|
||||
repository_owner=_get('repository_owner'),
|
||||
repository_owner_id=_get('repository_owner_id'),
|
||||
job_workflow_ref=_get('job_workflow_ref'),
|
||||
ref=_get('ref'),
|
||||
)
|
||||
|
||||
|
||||
def event_is_third_party_pr() -> bool:
|
||||
# Non-`pull_request` events cannot be from third-party PRs.
|
||||
if os.getenv("GITHUB_EVENT_NAME") != "pull_request":
|
||||
if os.getenv('GITHUB_EVENT_NAME') != 'pull_request':
|
||||
return False
|
||||
|
||||
event_path = os.getenv("GITHUB_EVENT_PATH")
|
||||
event_path = os.getenv('GITHUB_EVENT_PATH')
|
||||
if not event_path:
|
||||
# No GITHUB_EVENT_PATH indicates a weird GitHub or runner bug.
|
||||
debug("unexpected: no GITHUB_EVENT_PATH to check")
|
||||
debug('unexpected: no GITHUB_EVENT_PATH to check')
|
||||
return False
|
||||
|
||||
try:
|
||||
event = json.loads(Path(event_path).read_bytes())
|
||||
except json.JSONDecodeError:
|
||||
debug("unexpected: GITHUB_EVENT_PATH does not contain valid JSON")
|
||||
debug('unexpected: GITHUB_EVENT_PATH does not contain valid JSON')
|
||||
return False
|
||||
|
||||
try:
|
||||
return event["pull_request"]["head"]["repo"]["fork"]
|
||||
return event['pull_request']['head']['repo']['fork']
|
||||
except KeyError:
|
||||
return False
|
||||
|
||||
|
||||
repository_url = get_normalized_input("repository-url")
|
||||
repository_url = get_normalized_input('repository-url')
|
||||
repository_domain = urlparse(repository_url).netloc
|
||||
token_exchange_url = f"https://{repository_domain}/_/oidc/mint-token"
|
||||
token_exchange_url = f'https://{repository_domain}/_/oidc/mint-token'
|
||||
|
||||
# Indices are expected to support `https://{domain}/_/oidc/audience`,
|
||||
# which tells OIDC exchange clients which audience to use.
|
||||
audience_url = f"https://{repository_domain}/_/oidc/audience"
|
||||
audience_resp = requests.get(audience_url)
|
||||
audience_url = f'https://{repository_domain}/_/oidc/audience'
|
||||
audience_resp = requests.get(audience_url, timeout=5) # S113 wants a timeout
|
||||
assert_successful_audience_call(audience_resp, repository_domain)
|
||||
|
||||
oidc_audience = audience_resp.json()["audience"]
|
||||
oidc_audience = audience_resp.json()['audience']
|
||||
|
||||
debug(f"selected trusted publishing exchange endpoint: {token_exchange_url}")
|
||||
debug(f'selected trusted publishing exchange endpoint: {token_exchange_url}')
|
||||
|
||||
try:
|
||||
oidc_token = id.detect_credential(audience=oidc_audience)
|
||||
@ -229,7 +229,8 @@ except id.IdentityError as identity_error:
|
||||
# Now we can do the actual token exchange.
|
||||
mint_token_resp = requests.post(
|
||||
token_exchange_url,
|
||||
json={"token": oidc_token},
|
||||
json={'token': oidc_token},
|
||||
timeout=5, # S113 wants a timeout
|
||||
)
|
||||
|
||||
try:
|
||||
@ -246,9 +247,9 @@ except requests.JSONDecodeError:
|
||||
# On failure, the JSON response includes the list of errors that
|
||||
# occurred during minting.
|
||||
if not mint_token_resp.ok:
|
||||
reasons = "\n".join(
|
||||
f"* `{error['code']}`: {error['description']}"
|
||||
for error in mint_token_payload["errors"]
|
||||
reasons = '\n'.join(
|
||||
f'* `{error["code"]}`: {error["description"]}'
|
||||
for error in mint_token_payload['errors']
|
||||
)
|
||||
|
||||
rendered_claims = render_claims(oidc_token)
|
||||
@ -260,12 +261,12 @@ if not mint_token_resp.ok:
|
||||
),
|
||||
)
|
||||
|
||||
pypi_token = mint_token_payload.get("token")
|
||||
pypi_token = mint_token_payload.get('token')
|
||||
if pypi_token is None:
|
||||
die(_SERVER_TOKEN_RESPONSE_MALFORMED_MESSAGE)
|
||||
|
||||
# Mask the newly minted PyPI token, so that we don't accidentally leak it in logs.
|
||||
print(f"::add-mask::{pypi_token}", file=sys.stderr)
|
||||
print(f'::add-mask::{pypi_token}', file=sys.stderr)
|
||||
|
||||
# This final print will be captured by the subshell in `twine-upload.sh`.
|
||||
print(pypi_token)
|
||||
|
@ -4,15 +4,15 @@ import sys
|
||||
|
||||
packages_dir = pathlib.Path(sys.argv[1]).resolve().absolute()
|
||||
|
||||
print("Showing hash values of files to be uploaded:")
|
||||
print('Showing hash values of files to be uploaded:')
|
||||
|
||||
for file_object in packages_dir.iterdir():
|
||||
sha256 = hashlib.sha256()
|
||||
md5 = hashlib.md5()
|
||||
md5 = hashlib.md5() # noqa: S324; only use for reference
|
||||
blake2_256 = hashlib.blake2b(digest_size=256 // 8)
|
||||
|
||||
print(file_object)
|
||||
print("")
|
||||
print('')
|
||||
|
||||
content = file_object.read_bytes()
|
||||
|
||||
@ -20,7 +20,7 @@ for file_object in packages_dir.iterdir():
|
||||
md5.update(content)
|
||||
blake2_256.update(content)
|
||||
|
||||
print(f"SHA256: {sha256.hexdigest()}")
|
||||
print(f"MD5: {md5.hexdigest()}")
|
||||
print(f"BLAKE2-256: {blake2_256.hexdigest()}")
|
||||
print("")
|
||||
print(f'SHA256: {sha256.hexdigest()}')
|
||||
print(f'MD5: {md5.hexdigest()}')
|
||||
print(f'BLAKE2-256: {blake2_256.hexdigest()}')
|
||||
print('')
|
||||
|
@ -12,41 +12,47 @@ cffi==1.16.0
|
||||
# via cryptography
|
||||
charset-normalizer==3.3.2
|
||||
# via requests
|
||||
cryptography==42.0.5
|
||||
cryptography==42.0.7
|
||||
# via secretstorage
|
||||
docutils==0.20.1
|
||||
docutils==0.21.2
|
||||
# via readme-renderer
|
||||
id==1.3.0
|
||||
id==1.4.0
|
||||
# via -r runtime.in
|
||||
idna==3.7
|
||||
# via requests
|
||||
importlib-metadata==7.0.2
|
||||
importlib-metadata==7.1.0
|
||||
# via twine
|
||||
jaraco-classes==3.3.1
|
||||
jaraco-classes==3.4.0
|
||||
# via keyring
|
||||
jaraco-context==5.3.0
|
||||
# via keyring
|
||||
jaraco-functools==4.0.1
|
||||
# via keyring
|
||||
jeepney==0.8.0
|
||||
# via
|
||||
# keyring
|
||||
# secretstorage
|
||||
keyring==24.3.1
|
||||
keyring==25.2.1
|
||||
# via twine
|
||||
markdown-it-py==3.0.0
|
||||
# via rich
|
||||
mdurl==0.1.2
|
||||
# via markdown-it-py
|
||||
more-itertools==10.2.0
|
||||
# via jaraco-classes
|
||||
nh3==0.2.15
|
||||
# via
|
||||
# jaraco-classes
|
||||
# jaraco-functools
|
||||
nh3==0.2.17
|
||||
# via readme-renderer
|
||||
pkginfo==1.10.0
|
||||
# via twine
|
||||
pycparser==2.21
|
||||
pycparser==2.22
|
||||
# via cffi
|
||||
pydantic==2.6.3
|
||||
pydantic==2.7.1
|
||||
# via id
|
||||
pydantic-core==2.16.3
|
||||
pydantic-core==2.18.2
|
||||
# via pydantic
|
||||
pygments==2.17.2
|
||||
pygments==2.18.0
|
||||
# via
|
||||
# readme-renderer
|
||||
# rich
|
||||
@ -66,9 +72,9 @@ rich==13.7.1
|
||||
# via twine
|
||||
secretstorage==3.3.3
|
||||
# via keyring
|
||||
twine==5.0.0
|
||||
twine==5.1.0
|
||||
# via -r runtime.in
|
||||
typing-extensions==4.10.0
|
||||
typing-extensions==4.11.0
|
||||
# via
|
||||
# pydantic
|
||||
# pydantic-core
|
||||
@ -76,5 +82,5 @@ urllib3==2.2.1
|
||||
# via
|
||||
# requests
|
||||
# twine
|
||||
zipp==3.17.0
|
||||
zipp==3.18.2
|
||||
# via importlib-metadata
|
||||
|
@ -40,9 +40,9 @@ INPUT_VERIFY_METADATA="$(get-normalized-input 'verify-metadata')"
|
||||
INPUT_SKIP_EXISTING="$(get-normalized-input 'skip-existing')"
|
||||
INPUT_PRINT_HASH="$(get-normalized-input 'print-hash')"
|
||||
|
||||
PASSWORD_DEPRECATION_NUDGE="::error title=Password-based uploads deprecated::\
|
||||
Starting in 2024, PyPI will require all users to enable Two-Factor \
|
||||
Authentication. This will consequently require all users to switch \
|
||||
PASSWORD_DEPRECATION_NUDGE="::error title=Password-based uploads disabled::\
|
||||
As of 2024, PyPI requires all users to enable Two-Factor \
|
||||
Authentication. This consequently requires all users to switch \
|
||||
to either Trusted Publishers (preferred) or API tokens for package \
|
||||
uploads. Read more: \
|
||||
https://blog.pypi.org/posts/2023-05-25-securing-pypi-with-2fa/"
|
||||
@ -74,6 +74,7 @@ else
|
||||
if [[ "${INPUT_REPOSITORY_URL}" =~ pypi\.org ]]; then
|
||||
echo "${PASSWORD_DEPRECATION_NUDGE}"
|
||||
echo "${TRUSTED_PUBLISHING_NUDGE}"
|
||||
exit 1
|
||||
fi
|
||||
fi
|
||||
|
||||
@ -120,9 +121,9 @@ if [[ ${INPUT_VERIFY_METADATA,,} != "false" ]] ; then
|
||||
twine check ${INPUT_PACKAGES_DIR%%/}/*
|
||||
fi
|
||||
|
||||
TWINE_EXTRA_ARGS=
|
||||
TWINE_EXTRA_ARGS=--disable-progress-bar
|
||||
if [[ ${INPUT_SKIP_EXISTING,,} != "false" ]] ; then
|
||||
TWINE_EXTRA_ARGS=--skip-existing
|
||||
TWINE_EXTRA_ARGS="${TWINE_EXTRA_ARGS} --skip-existing"
|
||||
fi
|
||||
|
||||
if [[ ${INPUT_VERBOSE,,} != "false" ]] ; then
|
||||
|
Loading…
Reference in New Issue
Block a user