binarybeachio: Bucket-4 trusted-JWT auth — replaces in-place github.py patch
Migrates this fork to the binarybeachio platform-architecture pivot: oauth2-proxy at the edge enforces a Zitadel session, the auth-bridge mints a short-lived RS256 JWT, and a NEW additive endpoint at /auth/sign-in-trusted/ verifies the JWT, claims its jti against shared-redis (single-use replay protection, fail-closed), find-or-creates the User, and starts a Django session via user_login(). Net surface vs. upstream-clean: 1 new view file + 1 url path + 1 exports __init__ entry + 7 reserved error codes (6000-6099 range). github.py and the GitHub-button rebrand patch are reverted to upstream — sign-in entry-point UX is now driven by Traefik redirectregex on /sign-in* in infrastructure/plane/docker-compose.yml. Replay protection contract: jti claim minted by bridge, consumed via Redis SETNX with ttl = exp - now + 30s. Documented at binarybeachio/docs/architecture/bridge-jwt-replay-protection.md. Public-key transport: BB_BRIDGE_PUBLIC_KEY_URL env points at the in-cluster bridge's /.well-known/bb-bridge.pub.pem (avoids the env-PEM corruption issue Coolify has with backslash-escaped keys). Endpoint is implicitly disabled (404) when env unset — vanilla upstream behavior preserved. Storage patches (Patch 2) unchanged. Brand asset preserved (dormant). Pre-migration source state preserved on branch pre-migration-2026-05-04. Co-Authored-By: Claude Opus 4.7 (1M context) <noreply@anthropic.com>
This commit is contained in:
parent
d950222749
commit
712612865d
10 changed files with 490 additions and 232 deletions
|
|
@ -71,6 +71,17 @@ AUTHENTICATION_ERROR_CODES = {
|
|||
"RATE_LIMIT_EXCEEDED": 5900,
|
||||
# Unknown
|
||||
"AUTHENTICATION_FAILED": 5999,
|
||||
# binarybeachio fork addition (Bucket-4 trusted-JWT entry-point) — see
|
||||
# views/app/trusted.py and BINARYBEACHIO.md. Codes 6000-6099 are reserved
|
||||
# for fork additions to keep them outside the upstream-allocated 5000-5999
|
||||
# range and reduce upstream-merge collision risk.
|
||||
"TRUSTED_JWT_ENDPOINT_DISABLED": 6000,
|
||||
"TRUSTED_JWT_TOKEN_MISSING": 6001,
|
||||
"TRUSTED_JWT_TOKEN_INVALID": 6002,
|
||||
"TRUSTED_JWT_TOKEN_EXPIRED": 6003,
|
||||
"TRUSTED_JWT_TOKEN_REPLAYED": 6004,
|
||||
"TRUSTED_JWT_REPLAY_STORE_DOWN": 6005,
|
||||
"TRUSTED_JWT_KEY_FETCH_FAILED": 6006,
|
||||
}
|
||||
|
||||
|
||||
|
|
|
|||
|
|
@ -1,41 +1,14 @@
|
|||
# Copyright (c) 2023-present Plane Software, Inc. and contributors
|
||||
# SPDX-License-Identifier: AGPL-3.0-only
|
||||
# See the LICENSE file for details.
|
||||
#
|
||||
# binarybeachio fork — see BINARYBEACHIO.md at repo root.
|
||||
# This file is patched to repurpose Plane's "GitHub" OAuth provider as a
|
||||
# generic OIDC provider, so we can point /auth/github/ at our self-hosted
|
||||
# Zitadel instance without paying for Plane Pro/Business edition's first-party
|
||||
# OIDC support.
|
||||
#
|
||||
# Touch points kept stable to minimize merge conflicts on Plane upgrades:
|
||||
# - class name `GitHubOAuthProvider` (callers import it by name)
|
||||
# - `provider = "github"` (DB rows keyed on this string)
|
||||
# - env var names `GITHUB_CLIENT_ID` / `GITHUB_CLIENT_SECRET`
|
||||
# - URL routes `/auth/github/...` (frontend hardcodes these)
|
||||
#
|
||||
# What changed:
|
||||
# - `auth_url` / `token_url` / `userinfo_url` are now read from env, default
|
||||
# to the Zitadel instance at $ZITADEL_DOMAIN. If `ZITADEL_DOMAIN` is unset
|
||||
# the original GitHub URLs apply, so vanilla GitHub OAuth still works as a
|
||||
# fallback (lets us re-test against upstream behavior without reverting).
|
||||
# - Scope flipped from "read:user user:email" to "openid email profile" when
|
||||
# pointed at Zitadel (or any OIDC IdP).
|
||||
# - `__get_email` removed — standard OIDC userinfo includes `email` directly.
|
||||
# - User claim mapping switched to OIDC standard: sub, name, given_name,
|
||||
# family_name, email, picture.
|
||||
# - Fixed upstream bug where `expires_in` (a duration in seconds) was being
|
||||
# passed to datetime.fromtimestamp() (which expects an epoch timestamp).
|
||||
# - Dropped `is_user_in_organization` — Zitadel handles authorization itself
|
||||
# via project grants/roles. The `GITHUB_ORGANIZATION_ID` env stays read
|
||||
# (no-op) to avoid breaking deployments that have it set.
|
||||
|
||||
# Python imports
|
||||
import os
|
||||
from datetime import timedelta
|
||||
from datetime import datetime
|
||||
from urllib.parse import urlencode
|
||||
|
||||
from django.utils import timezone
|
||||
import pytz
|
||||
import requests
|
||||
|
||||
from plane.authentication.adapter.error import (
|
||||
AUTHENTICATION_ERROR_CODES,
|
||||
|
|
@ -47,30 +20,15 @@ from plane.authentication.adapter.oauth import OauthAdapter
|
|||
from plane.license.utils.instance_value import get_configuration_value
|
||||
|
||||
|
||||
def _zitadel_default(path: str) -> str | None:
|
||||
"""Build a Zitadel endpoint URL from $ZITADEL_DOMAIN if set."""
|
||||
domain = os.environ.get("ZITADEL_DOMAIN")
|
||||
return f"https://{domain}{path}" if domain else None
|
||||
|
||||
|
||||
class GitHubOAuthProvider(OauthAdapter):
|
||||
# Endpoint URLs — env-driven. Defaults derived from $ZITADEL_DOMAIN if set,
|
||||
# falling back to GitHub.com to preserve upstream behavior when unset.
|
||||
token_url = os.environ.get("OIDC_TOKEN_URL") or (
|
||||
_zitadel_default("/oauth/v2/token") or "https://github.com/login/oauth/access_token"
|
||||
)
|
||||
userinfo_url = os.environ.get("OIDC_USERINFO_URL") or (
|
||||
_zitadel_default("/oidc/v1/userinfo") or "https://api.github.com/user"
|
||||
)
|
||||
_auth_url_base = os.environ.get("OIDC_AUTH_URL") or (
|
||||
_zitadel_default("/oauth/v2/authorize") or "https://github.com/login/oauth/authorize"
|
||||
)
|
||||
token_url = "https://github.com/login/oauth/access_token"
|
||||
userinfo_url = "https://api.github.com/user"
|
||||
org_membership_url = "https://api.github.com/orgs"
|
||||
|
||||
provider = "github"
|
||||
scope = "read:user user:email"
|
||||
|
||||
# Scopes — OIDC standard when ZITADEL_DOMAIN is set; GitHub-flavored otherwise
|
||||
# to match unpatched upstream behavior for fallback testing.
|
||||
scope = "openid email profile" if os.environ.get("ZITADEL_DOMAIN") else "read:user user:email"
|
||||
organization_scope = "read:org"
|
||||
|
||||
def __init__(self, request, code=None, state=None, callback=None):
|
||||
GITHUB_CLIENT_ID, GITHUB_CLIENT_SECRET, GITHUB_ORGANIZATION_ID = get_configuration_value([
|
||||
|
|
@ -96,13 +54,11 @@ class GitHubOAuthProvider(OauthAdapter):
|
|||
|
||||
client_id = GITHUB_CLIENT_ID
|
||||
client_secret = GITHUB_CLIENT_SECRET
|
||||
# Read but unused — kept for API compatibility with deployments that
|
||||
# had this set under upstream Plane. Authorization in our setup is
|
||||
# handled by Zitadel project grants, not client-side org membership.
|
||||
self.organization_id = GITHUB_ORGANIZATION_ID
|
||||
|
||||
# Build redirect_uri — must match what's registered with the IdP.
|
||||
# Plane's frontend hardcodes /auth/github/callback/ so we keep that path.
|
||||
if self.organization_id:
|
||||
self.scope += f" {self.organization_scope}"
|
||||
|
||||
redirect_uri = f"""{"https" if request.is_secure() else "http"}://{request.get_host()}/auth/github/callback/"""
|
||||
url_params = {
|
||||
"client_id": client_id,
|
||||
|
|
@ -110,21 +66,7 @@ class GitHubOAuthProvider(OauthAdapter):
|
|||
"scope": self.scope,
|
||||
"state": state,
|
||||
}
|
||||
# OIDC requires response_type=code; GitHub OAuth tolerates it.
|
||||
# `prompt=select_account` makes Zitadel show its account chooser even
|
||||
# when only one session exists — the user explicitly chooses which
|
||||
# identity to use rather than being silently passed through. Without
|
||||
# this, the OIDC default is "session exists → log in immediately,"
|
||||
# which is technically correct SSO but is an unfamiliar UX coming
|
||||
# from Google/GitHub style flows that always show a picker.
|
||||
# Override per-request by setting `OIDC_PROMPT=` (empty) or another
|
||||
# value (`login` to force re-auth, `consent` to force consent screen).
|
||||
if os.environ.get("ZITADEL_DOMAIN"):
|
||||
url_params["response_type"] = "code"
|
||||
prompt = os.environ.get("OIDC_PROMPT", "select_account")
|
||||
if prompt:
|
||||
url_params["prompt"] = prompt
|
||||
auth_url = f"{self._auth_url_base}?{urlencode(url_params)}"
|
||||
auth_url = f"https://github.com/login/oauth/authorize?{urlencode(url_params)}"
|
||||
super().__init__(
|
||||
request,
|
||||
self.provider,
|
||||
|
|
@ -141,84 +83,93 @@ class GitHubOAuthProvider(OauthAdapter):
|
|||
|
||||
def set_token_data(self):
|
||||
data = {
|
||||
"grant_type": "authorization_code",
|
||||
"client_id": self.client_id,
|
||||
"client_secret": self.client_secret,
|
||||
"code": self.code,
|
||||
"redirect_uri": self.redirect_uri,
|
||||
}
|
||||
token_response = self.get_user_token(data=data, headers={"Accept": "application/json"})
|
||||
|
||||
# Fix upstream bug: `expires_in` is a duration (seconds) per RFC 6749,
|
||||
# not an epoch timestamp. Compute absolute expiry correctly.
|
||||
expires_in = token_response.get("expires_in")
|
||||
access_token_expired_at = (
|
||||
timezone.now() + timedelta(seconds=int(expires_in)) if expires_in else None
|
||||
)
|
||||
# `refresh_token_expired_at` is non-standard; some IdPs return it as
|
||||
# absolute, some as duration. Zitadel doesn't return it at all. Keep the
|
||||
# original interpretation as-epoch for backward-compat with upstream.
|
||||
refresh_expired_raw = token_response.get("refresh_token_expired_at")
|
||||
if refresh_expired_raw:
|
||||
from datetime import datetime
|
||||
import pytz
|
||||
refresh_token_expired_at = datetime.fromtimestamp(refresh_expired_raw, tz=pytz.utc)
|
||||
else:
|
||||
refresh_token_expired_at = None
|
||||
|
||||
super().set_token_data({
|
||||
"access_token": token_response.get("access_token"),
|
||||
"refresh_token": token_response.get("refresh_token", None),
|
||||
"access_token_expired_at": access_token_expired_at,
|
||||
"refresh_token_expired_at": refresh_token_expired_at,
|
||||
"access_token_expired_at": (
|
||||
datetime.fromtimestamp(token_response.get("expires_in"), tz=pytz.utc)
|
||||
if token_response.get("expires_in")
|
||||
else None
|
||||
),
|
||||
"refresh_token_expired_at": (
|
||||
datetime.fromtimestamp(token_response.get("refresh_token_expired_at"), tz=pytz.utc)
|
||||
if token_response.get("refresh_token_expired_at")
|
||||
else None
|
||||
),
|
||||
"id_token": token_response.get("id_token", ""),
|
||||
})
|
||||
|
||||
def set_user_data(self):
|
||||
user_info_response = self.get_user_response()
|
||||
|
||||
# Claim mapping. When ZITADEL_DOMAIN is set, use OIDC standard claims;
|
||||
# otherwise fall back to GitHub's quirky shape (no email in userinfo,
|
||||
# `name` instead of `given_name`/`family_name`).
|
||||
if os.environ.get("ZITADEL_DOMAIN"):
|
||||
email = user_info_response.get("email")
|
||||
if not email:
|
||||
def __get_email(self, headers):
|
||||
try:
|
||||
# Github does not provide email in user response
|
||||
emails_url = "https://api.github.com/user/emails"
|
||||
emails_response = requests.get(emails_url, headers=headers).json()
|
||||
# Ensure the response is a list before iterating
|
||||
if not isinstance(emails_response, list):
|
||||
self.logger.error("Unexpected response format from GitHub emails API")
|
||||
raise AuthenticationException(
|
||||
error_code=AUTHENTICATION_ERROR_CODES["GITHUB_OAUTH_PROVIDER_ERROR"],
|
||||
error_message="GITHUB_OAUTH_PROVIDER_ERROR",
|
||||
)
|
||||
super().set_user_data({
|
||||
"email": email,
|
||||
"user": {
|
||||
"provider_id": user_info_response.get("sub"),
|
||||
"email": email,
|
||||
"avatar": user_info_response.get("picture"),
|
||||
"first_name": user_info_response.get("given_name") or user_info_response.get("name", "").split(" ", 1)[0],
|
||||
"last_name": user_info_response.get("family_name") or (user_info_response.get("name", "").split(" ", 1)[1] if " " in user_info_response.get("name", "") else ""),
|
||||
"is_password_autoset": True,
|
||||
},
|
||||
})
|
||||
return
|
||||
email = next((email["email"] for email in emails_response if email["primary"]), None)
|
||||
if not email:
|
||||
self.logger.error("No primary email found for user")
|
||||
raise AuthenticationException(
|
||||
error_code=AUTHENTICATION_ERROR_CODES["GITHUB_OAUTH_PROVIDER_ERROR"],
|
||||
error_message="GITHUB_OAUTH_PROVIDER_ERROR",
|
||||
)
|
||||
return email
|
||||
except requests.RequestException:
|
||||
self.logger.warning(
|
||||
"Error getting email from GitHub",
|
||||
)
|
||||
raise AuthenticationException(
|
||||
error_code=AUTHENTICATION_ERROR_CODES["GITHUB_OAUTH_PROVIDER_ERROR"],
|
||||
error_message="GITHUB_OAUTH_PROVIDER_ERROR",
|
||||
)
|
||||
|
||||
# Fallback: vanilla GitHub OAuth — keep upstream behavior. Email comes
|
||||
# from a separate /user/emails call.
|
||||
import requests
|
||||
def is_user_in_organization(self, github_username):
|
||||
headers = {"Authorization": f"Bearer {self.token_data.get('access_token')}"}
|
||||
response = requests.get(
|
||||
f"{self.org_membership_url}/{self.organization_id}/memberships/{github_username}",
|
||||
headers=headers,
|
||||
)
|
||||
return response.status_code == 200 # 200 means the user is a member
|
||||
|
||||
def set_user_data(self):
|
||||
user_info_response = self.get_user_response()
|
||||
headers = {
|
||||
"Authorization": f"Bearer {self.token_data.get('access_token')}",
|
||||
"Accept": "application/json",
|
||||
}
|
||||
emails_response = requests.get("https://api.github.com/user/emails", headers=headers).json()
|
||||
if not isinstance(emails_response, list):
|
||||
raise AuthenticationException(
|
||||
error_code=AUTHENTICATION_ERROR_CODES["GITHUB_OAUTH_PROVIDER_ERROR"],
|
||||
error_message="GITHUB_OAUTH_PROVIDER_ERROR",
|
||||
)
|
||||
email = next((e["email"] for e in emails_response if e["primary"]), None)
|
||||
if not email:
|
||||
raise AuthenticationException(
|
||||
error_code=AUTHENTICATION_ERROR_CODES["GITHUB_OAUTH_PROVIDER_ERROR"],
|
||||
error_message="GITHUB_OAUTH_PROVIDER_ERROR",
|
||||
)
|
||||
|
||||
if self.organization_id:
|
||||
if not self.is_user_in_organization(user_info_response.get("login")):
|
||||
self.logger.warning(
|
||||
"User is not in organization",
|
||||
extra={
|
||||
"organization_id": self.organization_id,
|
||||
"user_login": user_info_response.get("login"),
|
||||
},
|
||||
)
|
||||
raise AuthenticationException(
|
||||
error_code=AUTHENTICATION_ERROR_CODES["GITHUB_USER_NOT_IN_ORG"],
|
||||
error_message="GITHUB_USER_NOT_IN_ORG",
|
||||
)
|
||||
|
||||
email = self.__get_email(headers=headers)
|
||||
self.logger.debug(
|
||||
"Email found",
|
||||
extra={
|
||||
"email": email,
|
||||
},
|
||||
)
|
||||
super().set_user_data({
|
||||
"email": email,
|
||||
"user": {
|
||||
|
|
|
|||
|
|
@ -44,6 +44,8 @@ from .views import (
|
|||
GiteaOauthInitiateEndpoint,
|
||||
GiteaCallbackSpaceEndpoint,
|
||||
GiteaOauthInitiateSpaceEndpoint,
|
||||
# binarybeachio fork addition — see views/app/trusted.py.
|
||||
TrustedSignInEndpoint,
|
||||
)
|
||||
|
||||
urlpatterns = [
|
||||
|
|
@ -150,4 +152,7 @@ urlpatterns = [
|
|||
GiteaCallbackSpaceEndpoint.as_view(),
|
||||
name="space-gitea-callback",
|
||||
),
|
||||
# binarybeachio fork addition — Bucket-4 trusted-JWT entry-point.
|
||||
# See views/app/trusted.py and BINARYBEACHIO.md.
|
||||
path("sign-in-trusted/", TrustedSignInEndpoint.as_view(), name="sign-in-trusted"),
|
||||
]
|
||||
|
|
|
|||
|
|
@ -41,3 +41,7 @@ from .space.password_management import (
|
|||
ResetPasswordSpaceEndpoint,
|
||||
)
|
||||
from .app.password_management import ForgotPasswordEndpoint, ResetPasswordEndpoint
|
||||
|
||||
# binarybeachio fork addition (Bucket-4 trusted-JWT entry-point) — see
|
||||
# views/app/trusted.py and BINARYBEACHIO.md.
|
||||
from .app.trusted import TrustedSignInEndpoint
|
||||
|
|
|
|||
271
apps/api/plane/authentication/views/app/trusted.py
Normal file
271
apps/api/plane/authentication/views/app/trusted.py
Normal file
|
|
@ -0,0 +1,271 @@
|
|||
# Copyright (c) 2023-present Plane Software, Inc. and contributors
|
||||
# SPDX-License-Identifier: AGPL-3.0-only
|
||||
# See the LICENSE file for details.
|
||||
#
|
||||
# binarybeachio fork addition — see BINARYBEACHIO.md at repo root.
|
||||
#
|
||||
# Bucket-4 trusted-JWT entry-point. Validates a short-lived RS256 JWT signed
|
||||
# by the binarybeachio auth-bridge (private key BRIDGE_SIGNING_KEY), enforces
|
||||
# single-use replay protection via shared-redis SETNX (per the contract in
|
||||
# `binarybeachio/docs/architecture/bridge-jwt-replay-protection.md`), then
|
||||
# finds-or-creates the corresponding User and starts a Django session via
|
||||
# the existing user_login() helper.
|
||||
#
|
||||
# Endpoint behavior when not configured:
|
||||
# - If BB_BRIDGE_PUBLIC_KEY_URL env is unset → 404 (endpoint disabled).
|
||||
# Vanilla upstream behavior is preserved out-of-the-box; the trusted-JWT
|
||||
# entry-point only exists in deployments that explicitly opt in.
|
||||
#
|
||||
# Public-key transport:
|
||||
# - Fetched at request time from BB_BRIDGE_PUBLIC_KEY_URL (typically
|
||||
# `http://auth-bridge-<uuid>:3000/.well-known/bb-bridge.pub.pem`).
|
||||
# - Cached in-process for 5 minutes; auto-refreshed on signature failure
|
||||
# to handle bridge key rotation transparently.
|
||||
# - This sidesteps the env-PEM corruption issue: putting RSA PEMs through
|
||||
# Coolify's .env writer escapes backslashes (`\n` → `\\n`), which
|
||||
# corrupts the multi-line PEM. HTTP fetch never traverses that path.
|
||||
# See bb-activepieces-fork/.../trusted-jwt-verifier.ts module-doc for
|
||||
# the original write-up.
|
||||
#
|
||||
# Replay protection:
|
||||
# - Bridge mints with a UUIDv4 `jti` claim.
|
||||
# - This view atomically SETNX `bb_bridge_jti:<jti>` in shared-redis with
|
||||
# TTL = (exp - now) + 30s clock-skew tolerance.
|
||||
# - Fail closed: if Redis is unavailable, REJECT. Auth correctness >
|
||||
# auth availability; break-glass admin (email+password) covers operator
|
||||
# access during a Redis outage.
|
||||
|
||||
import logging
|
||||
import os
|
||||
import time
|
||||
from typing import Optional, Tuple
|
||||
from urllib.parse import urlparse
|
||||
|
||||
import jwt as pyjwt
|
||||
import redis
|
||||
import requests
|
||||
from django.http import HttpResponseRedirect, HttpResponseNotFound
|
||||
from django.views import View
|
||||
|
||||
from plane.authentication.adapter.error import (
|
||||
AUTHENTICATION_ERROR_CODES,
|
||||
AuthenticationException,
|
||||
)
|
||||
from plane.authentication.utils.host import base_host
|
||||
from plane.authentication.utils.login import user_login
|
||||
from plane.authentication.utils.redirection_path import get_redirection_path
|
||||
from plane.authentication.utils.user_auth_workflow import post_user_auth_workflow
|
||||
from plane.db.models import User
|
||||
from plane.settings.redis import redis_instance
|
||||
from plane.utils.path_validator import get_safe_redirect_url
|
||||
|
||||
log = logging.getLogger("plane.authentication.trusted")
|
||||
|
||||
# Audience the bridge sets in JWTs minted for Plane (signBridgeJwt(..., audience: 'plane')).
|
||||
_EXPECTED_AUDIENCE = "plane"
|
||||
# Issuer the bridge sets (every adapter shares this).
|
||||
_EXPECTED_ISSUER = "bb-bridge"
|
||||
# Replay-store key prefix per bridge-jwt-replay-protection.md.
|
||||
_JTI_KEY_PREFIX = "bb_bridge_jti:"
|
||||
# Clock-skew tolerance applied to exp/iat checks.
|
||||
_CLOCK_SKEW_SECONDS = 30
|
||||
# Public-key cache (in-process). Keyed on URL so test/dev with multiple
|
||||
# bridges per process is safe. _key_cache: {url: (pem, fetched_at_epoch)}.
|
||||
_KEY_CACHE_TTL_SECONDS = 5 * 60
|
||||
_key_cache: dict[str, Tuple[str, float]] = {}
|
||||
|
||||
|
||||
def _bridge_public_key_url() -> Optional[str]:
|
||||
"""Returns the configured bridge public-key URL, or None if disabled.
|
||||
|
||||
The endpoint is implicitly disabled (returns 404) when this env is unset —
|
||||
the regression-safe default for builds shipped without the bridge wired up.
|
||||
"""
|
||||
return os.environ.get("BB_BRIDGE_PUBLIC_KEY_URL") or None
|
||||
|
||||
|
||||
def _fetch_bridge_public_key(url: str, *, force_refresh: bool = False) -> str:
|
||||
"""Fetch (and cache) the bridge's public key PEM. Refetches on signature
|
||||
failure or after the cache TTL elapses. Falls back to stale cache if a
|
||||
refresh fails — temporarily-unreachable bridge shouldn't brick logins."""
|
||||
now = time.time()
|
||||
cached = _key_cache.get(url)
|
||||
if not force_refresh and cached and (now - cached[1]) < _KEY_CACHE_TTL_SECONDS:
|
||||
return cached[0]
|
||||
try:
|
||||
resp = requests.get(url, timeout=3.0, headers={"accept": "application/x-pem-file"})
|
||||
resp.raise_for_status()
|
||||
pem = resp.text
|
||||
if "-----BEGIN PUBLIC KEY-----" not in pem:
|
||||
raise ValueError(f"non-PEM body from {url} (first 80: {pem[:80]!r})")
|
||||
_key_cache[url] = (pem, now)
|
||||
return pem
|
||||
except Exception as exc:
|
||||
if cached:
|
||||
log.warning("bridge public-key fetch failed; using stale cache", extra={"url": url, "err": str(exc)})
|
||||
return cached[0]
|
||||
raise
|
||||
|
||||
|
||||
def _consume_jti(jti: str, exp_epoch: int) -> Tuple[bool, Optional[str]]:
|
||||
"""Atomically mark a `jti` consumed in shared-redis. Returns (first_use, error_code).
|
||||
|
||||
- (True, None) → not previously consumed; admit the request.
|
||||
- (False, code) → either already consumed (TRUSTED_JWT_TOKEN_REPLAYED) or
|
||||
the replay store is unavailable (TRUSTED_JWT_REPLAY_STORE_DOWN). Either
|
||||
way, REJECT the request (fail closed).
|
||||
|
||||
TTL = (exp - now) + 30s clock-skew tolerance, with a 30s minimum floor for
|
||||
edge cases where exp is already past at consumption time (signature still
|
||||
valid under clock-skew tolerance).
|
||||
"""
|
||||
if not jti or not exp_epoch:
|
||||
return False, "TRUSTED_JWT_TOKEN_INVALID"
|
||||
try:
|
||||
client = redis_instance()
|
||||
except Exception as exc:
|
||||
log.error("replay store init failed", extra={"err": str(exc)})
|
||||
return False, "TRUSTED_JWT_REPLAY_STORE_DOWN"
|
||||
try:
|
||||
ttl = max(int(exp_epoch - time.time()) + _CLOCK_SKEW_SECONDS, 30)
|
||||
# SET key value NX EX ttl -- returns True on first-set, None if already set.
|
||||
ok = client.set(_JTI_KEY_PREFIX + jti, "1", nx=True, ex=ttl)
|
||||
if ok is None:
|
||||
return False, "TRUSTED_JWT_TOKEN_REPLAYED"
|
||||
return True, None
|
||||
except redis.RedisError as exc:
|
||||
log.error("replay store SETNX failed", extra={"err": str(exc), "jti": jti})
|
||||
return False, "TRUSTED_JWT_REPLAY_STORE_DOWN"
|
||||
|
||||
|
||||
def _redirect_with_error(request, error_code: str, error_message: str, next_path: str) -> HttpResponseRedirect:
|
||||
"""Surface the failure as a Plane-style redirect to the host with error params."""
|
||||
exc = AuthenticationException(
|
||||
error_code=AUTHENTICATION_ERROR_CODES[error_code],
|
||||
error_message=error_message,
|
||||
)
|
||||
return HttpResponseRedirect(
|
||||
get_safe_redirect_url(
|
||||
base_url=base_host(request=request, is_app=True),
|
||||
next_path=next_path,
|
||||
params=exc.get_error_dict(),
|
||||
)
|
||||
)
|
||||
|
||||
|
||||
def _verify_with_retry(token: str, public_key_url: str) -> dict:
|
||||
"""Verify the JWT, refetching the bridge key once on signature failure to
|
||||
transparently handle bridge key rotation. Other verify failures (expired,
|
||||
wrong issuer/audience, malformed) do NOT trigger a refetch — those are
|
||||
tampering or clock issues, not key drift."""
|
||||
pem = _fetch_bridge_public_key(public_key_url)
|
||||
try:
|
||||
return pyjwt.decode(
|
||||
token,
|
||||
pem,
|
||||
algorithms=["RS256"],
|
||||
audience=_EXPECTED_AUDIENCE,
|
||||
issuer=_EXPECTED_ISSUER,
|
||||
leeway=_CLOCK_SKEW_SECONDS,
|
||||
options={"require": ["exp", "iat", "sub", "email", "jti"]},
|
||||
)
|
||||
except pyjwt.InvalidSignatureError:
|
||||
log.warning("trusted-jwt signature failed; refetching bridge key", extra={"url": public_key_url})
|
||||
pem = _fetch_bridge_public_key(public_key_url, force_refresh=True)
|
||||
return pyjwt.decode(
|
||||
token,
|
||||
pem,
|
||||
algorithms=["RS256"],
|
||||
audience=_EXPECTED_AUDIENCE,
|
||||
issuer=_EXPECTED_ISSUER,
|
||||
leeway=_CLOCK_SKEW_SECONDS,
|
||||
options={"require": ["exp", "iat", "sub", "email", "jti"]},
|
||||
)
|
||||
|
||||
|
||||
class TrustedSignInEndpoint(View):
|
||||
"""GET /auth/sign-in-trusted/?token=<jwt>&next_path=<rel-path>
|
||||
|
||||
The bridge 302s the browser here after a successful oauth2-proxy session
|
||||
is established. We verify the JWT, claim its `jti` to prevent replay,
|
||||
find-or-create the User, and call user_login() to set the Django session
|
||||
cookie. Then 302 the user to next_path on the same host.
|
||||
"""
|
||||
|
||||
def get(self, request):
|
||||
public_key_url = _bridge_public_key_url()
|
||||
if not public_key_url:
|
||||
# Endpoint disabled — bridge not wired up in this deployment.
|
||||
return HttpResponseNotFound()
|
||||
|
||||
# Validate next_path on every exit — even error redirects honor it so
|
||||
# the user lands somewhere sensible. get_safe_redirect_url further
|
||||
# constrains to the trusted base host.
|
||||
next_path = request.GET.get("next_path") or "/"
|
||||
|
||||
token = request.GET.get("token")
|
||||
if not token:
|
||||
return _redirect_with_error(request, "TRUSTED_JWT_TOKEN_MISSING", "TRUSTED_JWT_TOKEN_MISSING", next_path)
|
||||
|
||||
try:
|
||||
claims = _verify_with_retry(token, public_key_url)
|
||||
except pyjwt.ExpiredSignatureError:
|
||||
return _redirect_with_error(request, "TRUSTED_JWT_TOKEN_EXPIRED", "TRUSTED_JWT_TOKEN_EXPIRED", next_path)
|
||||
except pyjwt.InvalidTokenError as e:
|
||||
log.warning("trusted-jwt invalid", extra={"err_class": e.__class__.__name__})
|
||||
return _redirect_with_error(request, "TRUSTED_JWT_TOKEN_INVALID", f"TRUSTED_JWT_TOKEN_INVALID: {e.__class__.__name__}", next_path)
|
||||
except Exception as e:
|
||||
log.error("trusted-jwt key fetch failed", extra={"err": str(e)})
|
||||
return _redirect_with_error(request, "TRUSTED_JWT_KEY_FETCH_FAILED", "TRUSTED_JWT_KEY_FETCH_FAILED", next_path)
|
||||
|
||||
# Replay enforcement — atomic SETNX in shared-redis. Fail closed.
|
||||
first_use, replay_err = _consume_jti(claims.get("jti", ""), int(claims.get("exp", 0)))
|
||||
if not first_use:
|
||||
log.warning(
|
||||
"trusted-jwt rejected by replay-store",
|
||||
extra={"jti": claims.get("jti"), "sub": claims.get("sub"), "code": replay_err},
|
||||
)
|
||||
return _redirect_with_error(request, replay_err or "TRUSTED_JWT_TOKEN_REPLAYED", replay_err or "TRUSTED_JWT_TOKEN_REPLAYED", next_path)
|
||||
|
||||
email = (claims.get("email") or "").strip().lower()
|
||||
if not email:
|
||||
return _redirect_with_error(request, "TRUSTED_JWT_TOKEN_INVALID", "TRUSTED_JWT_TOKEN_NO_EMAIL", next_path)
|
||||
|
||||
# Find-or-create. Plane's User model uses email as a unique natural key;
|
||||
# other OAuth providers do the same lookup via the OauthAdapter base.
|
||||
# We mirror that behavior here without going through OauthAdapter — this
|
||||
# endpoint is a NEW entry-point, not a fifth OAuth provider.
|
||||
user, created = User.objects.get_or_create(
|
||||
email=email,
|
||||
defaults={
|
||||
"first_name": claims.get("first_name") or claims.get("given_name") or "",
|
||||
"last_name": claims.get("last_name") or claims.get("family_name") or "",
|
||||
"is_password_autoset": True,
|
||||
},
|
||||
)
|
||||
|
||||
# Plane's existing post-auth workflow (default workspace, invitations, etc.)
|
||||
post_user_auth_workflow(user=user, is_signup=created, request=request)
|
||||
|
||||
# Set Django session cookie via the existing helper.
|
||||
user_login(request=request, user=user, is_app=True)
|
||||
|
||||
log.info(
|
||||
"trusted-jwt sign-in",
|
||||
extra={
|
||||
"jti": claims.get("jti"),
|
||||
"sub": claims.get("sub"),
|
||||
"email": email,
|
||||
"tenant": claims.get("tenant"),
|
||||
"created": created,
|
||||
},
|
||||
)
|
||||
|
||||
target = next_path or get_redirection_path(user=user)
|
||||
return HttpResponseRedirect(
|
||||
get_safe_redirect_url(
|
||||
base_url=base_host(request=request, is_app=True),
|
||||
next_path=target,
|
||||
params={},
|
||||
)
|
||||
)
|
||||
Loading…
Add table
Add a link
Reference in a new issue