Add a code review step which uses a short-lived provider token (#7932)

This commit is contained in:
Jack Amadeo
2026-03-24 08:57:40 -04:00
committed by GitHub
parent ce160b183d
commit 928f4ac46a
11 changed files with 1165 additions and 0 deletions

44
.github/recipes/code-review.yaml vendored Normal file
View File

@@ -0,0 +1,44 @@
version: "1.0.0"
title: GitHub PR Code Review
description: Perform a code review of a GitHub pull request.
parameters:
- key: pr_directory
input_type: string
requirement: required
description: Path to the directory with pr.md and pr.diff
- key: instructions
input_type: string
requirement: required
description: Specific instructions for the code review.
extensions:
- type: builtin
name: developer
- type: stdio
name: code_review
cmd: uv
args:
- run
- '{{ recipe_dir }}/../scripts/pr-review-mcp.py'
prompt: |
Review the code changes downloaded from a GitHub pull request.
The PR metadata is located at {{ pr_directory }}/pr.md.
The proposed diff you are to review is located at {{ pr_directory }}/pr.diff.
The base branch is checked out in the working directory.
Use the tools you have to review the diff and examine the code changes and context.
Use the code review tools to add feedback on specific parts of the diff.
There is no need to call the finish_review tool unless absolutely necessary to add
summary content not covered by inline comments.
Be concise with your comments. Just a few sentences per comment at most, with no
extra formatting. Just the gist of the problem is enough.
** Important **
Don't add nit-pick comments and avoid matters of opinion.
Adhere closely to the code review instructions below.
Don't add add feedback outside the scope of the instructions below.
# Code review instructions
{{ instructions }}

84
.github/scripts/pr-review-mcp.py vendored Executable file
View File

@@ -0,0 +1,84 @@
#!/usr/bin/env -S uv run --script
# /// script
# requires-python = ">=3.11"
# dependencies = ["mcp"]
# ///
"""MCP server for collecting PR review comments and conclusion."""
import json
import os
from pathlib import Path
from mcp.server.fastmcp import FastMCP
server = FastMCP("pr-review")
output_dir = Path(os.environ.get("REVIEW_OUTPUT_DIR", "/tmp"))
def _append_comment(comment: dict) -> int:
"""Append a comment to the comments file and return the new total."""
comments_file = output_dir / "comments.json"
comments = json.loads(comments_file.read_text()) if comments_file.exists() else []
comments.append(comment)
comments_file.write_text(json.dumps(comments, indent=2))
return len(comments)
@server.tool()
def add_comment(
path: str,
line: int,
body: str,
suggestion: str | None = None,
side: str = "RIGHT",
start_line: int | None = None,
) -> str:
"""Add a review comment on a specific line in the PR diff.
Args:
path: The relative file path in the repository (e.g. "src/main.rs").
line: The line number in the file that the comment applies to.
For added or modified lines, use the line number in the new version of the file (side=RIGHT).
For deleted lines, use the line number in the old version of the file (side=LEFT).
body: The review comment text (Markdown supported).
suggestion: Optional replacement code for the line(s). When provided, GitHub renders an
"Apply suggestion" button the author can click. The suggestion replaces the
entire line (or range if start_line is set).
side: Which version of the file the line number refers to.
"RIGHT" for the new/modified version (default), "LEFT" for the old/deleted version.
start_line: For multi-line comments, the first line of the range. When set, `line` is the last line.
"""
if suggestion is not None:
body = (
f"{body}\n\n```suggestion\n{suggestion}\n```"
if body
else f"```suggestion\n{suggestion}\n```"
)
comment = {"path": path, "line": line, "side": side, "body": body}
if start_line is not None:
comment["start_line"] = start_line
comment["start_side"] = side
total = _append_comment(comment)
return f"Comment added on {path}:{line} ({total} total)."
@server.tool()
def finish_review(body: str = "") -> str:
"""Finish the review.
Args:
body: Optional top-level review body (Markdown supported). Only include if it
contains information not already covered by inline comments. Most reviews
should leave this empty.
"""
conclusion = {"body": body, "event": "COMMENT"}
conclusion_file = output_dir / "conclusion.json"
conclusion_file.write_text(json.dumps(conclusion, indent=2))
return "Review finished."
if __name__ == "__main__":
server.run(transport="stdio")

210
.github/workflows/code-review.yml vendored Normal file
View File

@@ -0,0 +1,210 @@
name: Code Review
on:
workflow_dispatch:
inputs:
pr_number:
description: 'PR number to review'
required: true
type: number
oidc_proxy_url:
description: 'OIDC proxy URL (overrides repo variable)'
required: false
type: string
review_instructions:
description: 'Instructions for the code review'
required: false
default: 'Review the changes for correctness'
type: string
# pull_request_target:
# types: [opened, synchronize, reopened]
concurrency:
group: code-review-${{ github.event.pull_request.number || github.event.inputs.pr_number }}
cancel-in-progress: true
jobs:
prepare:
runs-on: ubuntu-latest
permissions:
id-token: write
contents: read
pull-requests: read
steps:
- name: Mint OIDC token
id: oidc
uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8
env:
TOKEN_AUDIENCE: goose-oidc-proxy
with:
script: |
const token = await core.getIDToken(process.env.TOKEN_AUDIENCE);
core.setOutput('token', token);
core.setSecret(token);
- name: Gather PR metadata and diff
env:
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
run: |
PR_NUMBER=${{ github.event.pull_request.number || github.event.inputs.pr_number }}
mkdir -p /tmp/code-review
gh pr view "$PR_NUMBER" --repo "$GITHUB_REPOSITORY" --json title --jq '.title' > /tmp/code-review/title.txt
gh pr view "$PR_NUMBER" --repo "$GITHUB_REPOSITORY" --json body --jq '.body // "(no description)"' > /tmp/code-review/body.txt
gh pr view "$PR_NUMBER" --repo "$GITHUB_REPOSITORY" --json headRefOid --jq '.headRefOid' > /tmp/code-review/commit_id.txt
gh pr diff "$PR_NUMBER" --repo "$GITHUB_REPOSITORY" > /tmp/code-review/pr.diff
TITLE=$(cat /tmp/code-review/title.txt)
BODY=$(cat /tmp/code-review/body.txt)
printf '# %s\n\n## Description\n\n%s\n' "$TITLE" "$BODY" > /tmp/code-review/pr.md
- name: Write OIDC token to file
run: echo "$OIDC_TOKEN" > /tmp/code-review/oidc-token.txt
env:
OIDC_TOKEN: ${{ steps.oidc.outputs.token }}
- name: Upload review inputs
uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6
with:
name: code-review-inputs
path: /tmp/code-review/
retention-days: 1
review:
needs: prepare
runs-on: ubuntu-latest
permissions: {}
timeout-minutes: 15
container:
image: ghcr.io/block/goose:latest
options: --user root
env:
HOME: /tmp/goose-home
steps:
- name: Checkout base branch
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
- name: Install tools
run: |
apt-get update
apt-get install -y ripgrep jq
- name: Download review inputs
uses: actions/download-artifact@3e5f45b2cfb9172054b4087a40e8e0b5a5461e7c # v8
with:
name: code-review-inputs
path: /tmp/code-review
- name: Install custom provider and run goose review
env:
GOOSE_PROVIDER: anthropic-oidc-proxy
GOOSE_MODEL: ${{ vars.GOOSE_CODE_REVIEW_MODEL || 'claude-opus-4-6' }}
OIDC_PROXY_URL: ${{ github.event.inputs.oidc_proxy_url || vars.OIDC_PROXY_URL }}
REVIEW_INSTRUCTIONS: ${{ github.event.inputs.review_instructions || 'Review the changes for correctness' }}
run: |
ANTHROPIC_OIDC_PROXY_API_KEY=$(cat /tmp/code-review/oidc-token.txt)
export ANTHROPIC_OIDC_PROXY_API_KEY
rm /tmp/code-review/oidc-token.txt
mkdir -p "$HOME/.local/share/goose/sessions"
mkdir -p "$HOME/.config/goose/custom_providers"
cp oidc-proxy/anthropic-oidc-proxy.json "$HOME/.config/goose/custom_providers/"
goose run \
--recipe .github/recipes/code-review.yaml \
--params pr_directory=/tmp/code-review \
--params instructions="$REVIEW_INSTRUCTIONS"
- name: Upload review output
if: always()
uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6
with:
name: code-review-output
path: |
/tmp/conclusion.json
/tmp/comments.json
if-no-files-found: ignore
retention-days: 30
submit:
needs: [prepare, review]
runs-on: ubuntu-latest
permissions:
pull-requests: write
if: always() && needs.review.result == 'success'
steps:
- name: Download review inputs
uses: actions/download-artifact@3e5f45b2cfb9172054b4087a40e8e0b5a5461e7c # v8
with:
name: code-review-inputs
path: /tmp/inputs
- name: Download review output
uses: actions/download-artifact@3e5f45b2cfb9172054b4087a40e8e0b5a5461e7c # v8
with:
name: code-review-output
path: /tmp/output
- name: Submit review
env:
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
PR_NUMBER: ${{ github.event.pull_request.number || github.event.inputs.pr_number }}
run: |
CONCLUSION="/tmp/output/conclusion.json"
COMMENTS="/tmp/output/comments.json"
COMMIT_ID=$(cat /tmp/inputs/commit_id.txt)
if [ ! -f "$COMMENTS" ]; then
echo "No comments produced, skipping review submission."
exit 0
fi
if [ ! -f "$CONCLUSION" ]; then
echo '{"body":"","event":"COMMENT"}' > "$CONCLUSION"
fi
jq -n \
--arg commit_id "$COMMIT_ID" \
--slurpfile conclusion "$CONCLUSION" \
--slurpfile comments "$COMMENTS" \
'{
commit_id: $commit_id,
event: $conclusion[0].event,
comments: $comments[0]
}
| if $conclusion[0].body != "" then .body = $conclusion[0].body else . end
' > /tmp/review-payload.json
if gh api \
--method POST \
-H "Accept: application/vnd.github+json" \
"repos/$GITHUB_REPOSITORY/pulls/$PR_NUMBER/reviews" \
--input /tmp/review-payload.json; then
echo "Review submitted with inline comments."
exit 0
fi
echo "Inline comments failed, folding into review body."
jq -n \
--arg commit_id "$COMMIT_ID" \
--slurpfile conclusion "$CONCLUSION" \
--slurpfile comments "$COMMENTS" \
'{
commit_id: $commit_id,
body: (
$conclusion[0].body + "\n\n---\n\n## Inline Comments\n\n" +
([$comments[0][] |
"### `" + .path + "` (line " + (.line | tostring) + ")\n\n" + .body
] | join("\n\n---\n\n"))
),
event: $conclusion[0].event
}' \
| gh api \
--method POST \
-H "Accept: application/vnd.github+json" \
"repos/$GITHUB_REPOSITORY/pulls/$PR_NUMBER/reviews" \
--input -

4
oidc-proxy/.gitignore vendored Normal file
View File

@@ -0,0 +1,4 @@
node_modules/
dist/
.wrangler/
.dev.vars

111
oidc-proxy/README.md Normal file
View File

@@ -0,0 +1,111 @@
# OIDC Proxy
A Cloudflare Worker that authenticates GitHub Actions OIDC tokens and proxies requests to an upstream API with an injected API key. This lets CI workflows call APIs without storing long-lived secrets in GitHub.
## How it works
```
GitHub Actions (OIDC token) → Worker (validate JWT, inject API key) → Upstream API
```
1. A GitHub Actions workflow mints an OIDC token with a configured audience
2. The workflow sends requests to this proxy, passing the OIDC token as the API key
3. The worker validates the JWT against GitHub's JWKS, checks issuer/audience/age/repo
4. If valid, the request is forwarded to the upstream API with the real API key injected
## Setup
```bash
cd oidc-proxy
npm install
```
## Configuration
Edit `wrangler.toml` for your upstream:
| Variable | Description |
|---|---|
| `OIDC_ISSUER` | `https://token.actions.githubusercontent.com` |
| `OIDC_AUDIENCE` | The audience your workflow requests (e.g. `goose-oidc-proxy`) |
| `MAX_TOKEN_AGE_SECONDS` | Max age of OIDC token in seconds (default: `1200` = 20 min) |
| `MAX_REQUESTS_PER_TOKEN` | Max requests per OIDC token (default: `200`) |
| `RATE_LIMIT_PER_SECOND` | Max requests per second per token (default: `2`) |
| `ALLOWED_REPOS` | *(optional)* Comma-separated `owner/repo` list |
| `ALLOWED_REFS` | *(optional)* Comma-separated allowed refs |
| `UPSTREAM_URL` | The upstream API base URL |
| `UPSTREAM_AUTH_HEADER` | Header name for the API key (e.g. `x-api-key`, `Authorization`) |
| `UPSTREAM_AUTH_PREFIX` | *(optional)* Prefix before the key (e.g. `Bearer `) — omit for raw value |
| `CORS_ORIGIN` | *(optional)* Allowed CORS origin |
| `CORS_EXTRA_HEADERS` | *(optional)* Additional CORS allowed headers |
Set your upstream API key as a secret:
```bash
npx wrangler secret put UPSTREAM_API_KEY
```
### Example: Anthropic
```toml
UPSTREAM_URL = "https://api.anthropic.com"
UPSTREAM_AUTH_HEADER = "x-api-key"
CORS_EXTRA_HEADERS = "anthropic-version"
```
### Example: OpenAI-compatible
```toml
UPSTREAM_URL = "https://api.openai.com"
UPSTREAM_AUTH_HEADER = "Authorization"
UPSTREAM_AUTH_PREFIX = "Bearer "
```
## Usage in GitHub Actions
```yaml
permissions:
id-token: write
steps:
- name: Get OIDC token
id: oidc
uses: actions/github-script@v7
with:
script: |
const token = await core.getIDToken('goose-oidc-proxy');
core.setOutput('token', token);
core.setSecret(token);
- name: Call API through proxy
env:
ANTHROPIC_BASE_URL: https://oidc-proxy.your-subdomain.workers.dev
ANTHROPIC_API_KEY: ${{ steps.oidc.outputs.token }}
run: goose run --recipe my-recipe.yaml
```
## Testing
```bash
npm test
```
## Deploy
```bash
npx wrangler secret put UPSTREAM_API_KEY
npm run deploy
```
## Token budget and rate limiting
Each OIDC token is tracked by its `jti` (JWT ID) claim using a Durable Object. This provides:
- **Budget**: Each token is limited to `MAX_REQUESTS_PER_TOKEN` total requests (default: 200). Once exhausted, the proxy returns `429` with `{"error": "Token budget exhausted"}`.
- **Rate limit**: Each token is limited to `RATE_LIMIT_PER_SECOND` requests per second (default: 2). When exceeded, the proxy returns `429` with `{"error": "Rate limit exceeded"}` and a `Retry-After: 1` header.
Both limits are enforced atomically — the Durable Object processes one request at a time per token, so there are no race conditions.
## Token age vs expiry
GitHub OIDC tokens expire after ~5 minutes. For longer-running jobs, set `MAX_TOKEN_AGE_SECONDS` to allow recently-expired tokens. When set, the proxy checks the token's `iat` (issued-at) claim instead of `exp`.

View File

@@ -0,0 +1,27 @@
{
"name": "anthropic-oidc-proxy",
"engine": "anthropic",
"display_name": "Anthropic (OIDC Proxy)",
"description": "Anthropic API via OIDC proxy — uses a short-lived OIDC token instead of a long-lived API key",
"api_key_env": "ANTHROPIC_OIDC_PROXY_API_KEY",
"base_url": "${OIDC_PROXY_URL}",
"models": [
{
"name": "claude-opus-4-6",
"context_limit": 200000
},
{
"name": "claude-sonnet-4-6",
"context_limit": 200000
}
],
"supports_streaming": true,
"env_vars": [
{
"name": "OIDC_PROXY_URL",
"required": true,
"secret": false,
"description": "URL of the OIDC proxy worker (e.g. https://oidc-proxy.your-subdomain.workers.dev)"
}
]
}

15
oidc-proxy/package.json Normal file
View File

@@ -0,0 +1,15 @@
{
"name": "oidc-proxy",
"version": "1.0.0",
"private": true,
"scripts": {
"dev": "wrangler dev",
"deploy": "wrangler deploy",
"test": "vitest run"
},
"devDependencies": {
"wrangler": "^4.0.0",
"vitest": "^3.0.0",
"@cloudflare/vitest-pool-workers": "^0.8.0"
}
}

323
oidc-proxy/src/index.js Normal file
View File

@@ -0,0 +1,323 @@
export class TokenBucket {
constructor(state) {
this.state = state;
this.count = 0;
this.timestamps = [];
this.initialized = false;
}
async initialize() {
if (this.initialized) return;
const stored = await this.state.storage.get("count");
if (stored !== undefined) this.count = stored;
this.initialized = true;
}
async fetch(request) {
await this.initialize();
const url = new URL(request.url);
if (url.pathname === "/check") {
const { maxRequests, ratePerSecond } = await request.json();
if (this.count >= maxRequests) {
return Response.json({ allowed: false, error: "budget_exhausted" });
}
const now = Date.now();
this.timestamps = this.timestamps.filter((t) => now - t < 1000);
if (this.timestamps.length >= ratePerSecond) {
return Response.json(
{ allowed: false, error: "rate_limited" },
{ headers: { "Retry-After": "1" } },
);
}
this.count++;
this.timestamps.push(now);
await this.state.storage.put("count", this.count);
return Response.json({
allowed: true,
remaining: maxRequests - this.count,
});
}
return Response.json({ error: "not found" }, { status: 404 });
}
}
export default {
async fetch(request, env) {
if (request.method === "OPTIONS") {
return handleCors(env);
}
const token =
request.headers.get("x-api-key") ||
request.headers.get("Authorization")?.replace("Bearer ", "");
if (!token) {
return jsonResponse(401, { error: "Missing authentication" });
}
const result = await verifyOidcToken(token, env);
if (!result.valid) {
return jsonResponse(401, { error: result.reason });
}
// Check rate limit and budget via Durable Object
const bucketCheck = await checkTokenBucket(result.jti, env);
if (!bucketCheck.allowed) {
if (bucketCheck.error === "rate_limited") {
return jsonResponse(
429,
{ error: "Rate limit exceeded" },
{ "Retry-After": "1" },
);
}
return jsonResponse(429, { error: "Token budget exhausted" });
}
const url = new URL(request.url);
const upstreamUrl = `${env.UPSTREAM_URL}${url.pathname}${url.search}`;
const headers = new Headers(request.headers);
headers.delete("Authorization");
headers.delete("x-api-key");
const authHeader = env.UPSTREAM_AUTH_HEADER || "Authorization";
const authPrefix = env.UPSTREAM_AUTH_PREFIX;
headers.set(
authHeader,
authPrefix
? `${authPrefix}${env.UPSTREAM_API_KEY}`
: env.UPSTREAM_API_KEY,
);
headers.set("Host", new URL(env.UPSTREAM_URL).host);
const response = await fetch(upstreamUrl, {
method: request.method,
headers,
body: request.body,
});
const respHeaders = new Headers(response.headers);
// Workers' fetch auto-decompresses but keeps the Content-Encoding header,
// which would cause clients to try decompressing already-decompressed data.
respHeaders.delete("Content-Encoding");
respHeaders.delete("Content-Length");
respHeaders.set("Access-Control-Allow-Origin", env.CORS_ORIGIN || "*");
return new Response(response.body, {
status: response.status,
headers: respHeaders,
});
},
};
// --- Token bucket (Durable Object) ---
async function checkTokenBucket(jti, env) {
const maxRequests = parseInt(env.MAX_REQUESTS_PER_TOKEN || "200", 10);
const ratePerSecond = parseInt(env.RATE_LIMIT_PER_SECOND || "2", 10);
const id = env.TOKEN_BUCKET.idFromName(jti);
const stub = env.TOKEN_BUCKET.get(id);
const resp = await stub.fetch("https://bucket/check", {
method: "POST",
body: JSON.stringify({ maxRequests, ratePerSecond }),
});
return resp.json();
}
// --- OIDC JWT verification using Web Crypto API ---
let jwksCache = null;
let jwksCacheTime = 0;
const JWKS_CACHE_TTL_MS = 60 * 60 * 1000; // 1 hour
async function fetchJwks(issuer) {
const now = Date.now();
if (jwksCache && now - jwksCacheTime < JWKS_CACHE_TTL_MS) {
return jwksCache;
}
const wellKnownUrl = `${issuer.replace(/\/$/, "")}/.well-known/openid-configuration`;
const configResp = await fetch(wellKnownUrl);
if (!configResp.ok) {
throw new Error(`Failed to fetch OIDC config: ${configResp.status}`);
}
const config = await configResp.json();
const jwksResp = await fetch(config.jwks_uri);
if (!jwksResp.ok) {
throw new Error(`Failed to fetch JWKS: ${jwksResp.status}`);
}
jwksCache = await jwksResp.json();
jwksCacheTime = now;
return jwksCache;
}
function base64UrlDecode(str) {
const padded = str.replace(/-/g, "+").replace(/_/g, "/");
const binary = atob(padded);
return Uint8Array.from(binary, (c) => c.charCodeAt(0));
}
function decodeJwtPart(b64url) {
return JSON.parse(new TextDecoder().decode(base64UrlDecode(b64url)));
}
const ALG_MAP = {
RS256: { name: "RSASSA-PKCS1-v1_5", hash: "SHA-256" },
RS384: { name: "RSASSA-PKCS1-v1_5", hash: "SHA-384" },
RS512: { name: "RSASSA-PKCS1-v1_5", hash: "SHA-512" },
ES256: { name: "ECDSA", namedCurve: "P-256", hash: "SHA-256" },
ES384: { name: "ECDSA", namedCurve: "P-384", hash: "SHA-384" },
};
async function verifyOidcToken(token, env) {
try {
const parts = token.split(".");
if (parts.length !== 3) {
return { valid: false, reason: "Malformed JWT" };
}
const [headerB64, payloadB64, sigB64] = parts;
const header = decodeJwtPart(headerB64);
const payload = decodeJwtPart(payloadB64);
if (env.MAX_TOKEN_AGE_SECONDS && payload.iat) {
const age = Date.now() / 1000 - payload.iat;
if (age > parseInt(env.MAX_TOKEN_AGE_SECONDS, 10)) {
return { valid: false, reason: "Token too old" };
}
} else if (!payload.exp || payload.exp < Date.now() / 1000) {
return { valid: false, reason: "Token expired" };
}
const expectedIssuer = env.OIDC_ISSUER.replace(/\/$/, "");
const actualIssuer = (payload.iss || "").replace(/\/$/, "");
if (actualIssuer !== expectedIssuer) {
return { valid: false, reason: "Invalid issuer" };
}
if (env.OIDC_AUDIENCE) {
const audiences = Array.isArray(payload.aud)
? payload.aud
: [payload.aud];
if (!audiences.includes(env.OIDC_AUDIENCE)) {
return { valid: false, reason: "Invalid audience" };
}
}
if (env.ALLOWED_REPOS) {
const allowed = env.ALLOWED_REPOS.split(",").map((r) => r.trim());
if (!allowed.includes(payload.repository)) {
return {
valid: false,
reason: `Repository '${payload.repository}' not allowed`,
};
}
}
if (env.ALLOWED_REFS) {
const allowed = env.ALLOWED_REFS.split(",").map((r) => r.trim());
if (!allowed.includes(payload.ref)) {
return { valid: false, reason: `Ref '${payload.ref}' not allowed` };
}
}
const jwks = await fetchJwks(env.OIDC_ISSUER);
let jwk = jwks.keys.find((k) => k.kid === header.kid);
if (!jwk) {
jwksCache = null;
const refreshed = await fetchJwks(env.OIDC_ISSUER);
jwk = refreshed.keys.find((k) => k.kid === header.kid);
if (!jwk) {
return { valid: false, reason: "No matching key in JWKS" };
}
}
const sigResult = await verifySignature(
header,
jwk,
headerB64,
payloadB64,
sigB64,
);
if (!sigResult.valid) return sigResult;
const jti = payload.jti || `${payload.iss}:${payload.iat}:${payload.sub}`;
return { valid: true, jti };
} catch (err) {
return { valid: false, reason: `Verification error: ${err.message}` };
}
}
async function verifySignature(header, jwk, headerB64, payloadB64, sigB64) {
const alg = ALG_MAP[header.alg];
if (!alg) {
return { valid: false, reason: `Unsupported algorithm: ${header.alg}` };
}
const keyAlgorithm = alg.namedCurve
? { name: alg.name, namedCurve: alg.namedCurve }
: { name: alg.name, hash: alg.hash };
const cryptoKey = await crypto.subtle.importKey(
"jwk",
jwk,
keyAlgorithm,
false,
["verify"],
);
const data = new TextEncoder().encode(`${headerB64}.${payloadB64}`);
const signature = base64UrlDecode(sigB64);
const verifyAlgorithm =
alg.name === "ECDSA" ? { name: alg.name, hash: alg.hash } : alg.name;
const valid = await crypto.subtle.verify(
verifyAlgorithm,
cryptoKey,
signature,
data,
);
if (!valid) {
return { valid: false, reason: "Invalid signature" };
}
return { valid: true };
}
// --- Helpers ---
function jsonResponse(status, body, extraHeaders = {}) {
return new Response(JSON.stringify(body), {
status,
headers: { "Content-Type": "application/json", ...extraHeaders },
});
}
function handleCors(env) {
const extraHeaders = env.CORS_EXTRA_HEADERS || "";
const baseHeaders = "Authorization, Content-Type, x-api-key";
const allowHeaders = extraHeaders
? `${baseHeaders}, ${extraHeaders}`
: baseHeaders;
return new Response(null, {
status: 204,
headers: {
"Access-Control-Allow-Origin": env.CORS_ORIGIN || "*",
"Access-Control-Allow-Methods": "GET, POST, PUT, PATCH, DELETE, OPTIONS",
"Access-Control-Allow-Headers": allowHeaders,
"Access-Control-Max-Age": "86400",
},
});
}

View File

@@ -0,0 +1,306 @@
import {
env,
createExecutionContext,
waitOnExecutionContext,
fetchMock,
} from "cloudflare:test";
import { describe, it, expect, beforeAll, afterEach } from "vitest";
import worker from "../src/index.js";
let testKeyPair;
let testJwk;
const TEST_KID = "test-kid-001";
beforeAll(async () => {
testKeyPair = await crypto.subtle.generateKey(
{
name: "RSASSA-PKCS1-v1_5",
modulusLength: 2048,
publicExponent: new Uint8Array([1, 0, 1]),
hash: "SHA-256",
},
true,
["sign", "verify"],
);
const exported = await crypto.subtle.exportKey("jwk", testKeyPair.publicKey);
testJwk = { ...exported, kid: TEST_KID, alg: "RS256", use: "sig" };
});
afterEach(() => {
fetchMock.deactivate();
});
function base64UrlEncode(data) {
const str = typeof data === "string" ? data : JSON.stringify(data);
return btoa(str).replace(/=/g, "").replace(/\+/g, "-").replace(/\//g, "_");
}
async function createSignedJwt(payload, kid = TEST_KID) {
const header = { alg: "RS256", typ: "JWT", kid };
const headerB64 = base64UrlEncode(header);
const payloadB64 = base64UrlEncode(payload);
const data = new TextEncoder().encode(`${headerB64}.${payloadB64}`);
const signature = await crypto.subtle.sign(
"RSASSA-PKCS1-v1_5",
testKeyPair.privateKey,
data,
);
const sigB64 = btoa(String.fromCharCode(...new Uint8Array(signature)))
.replace(/=/g, "")
.replace(/\+/g, "-")
.replace(/\//g, "_");
return `${headerB64}.${payloadB64}.${sigB64}`;
}
let jtiCounter = 0;
function validPayload(overrides = {}) {
const now = Math.floor(Date.now() / 1000);
return {
iss: "https://token.actions.githubusercontent.com",
aud: "goose-oidc-proxy",
iat: now - 10,
exp: now + 300,
jti: `test-jti-${++jtiCounter}`,
repository: "block/goose",
ref: "refs/heads/main",
sub: "repo:block/goose:ref:refs/heads/main",
...overrides,
};
}
function mockAll(upstreamStatus = 200, upstreamBody = { ok: true }) {
fetchMock.activate();
fetchMock.disableNetConnect();
const oidc = fetchMock.get("https://token.actions.githubusercontent.com");
oidc
.intercept({ path: "/.well-known/openid-configuration", method: "GET" })
.reply(
200,
JSON.stringify({
jwks_uri:
"https://token.actions.githubusercontent.com/.well-known/jwks",
}),
)
.persist();
oidc
.intercept({ path: "/.well-known/jwks", method: "GET" })
.reply(200, JSON.stringify({ keys: [testJwk] }))
.persist();
const upstream = fetchMock.get("https://api.anthropic.com");
upstream
.intercept({ path: /.*/, method: "POST" })
.reply(upstreamStatus, JSON.stringify(upstreamBody));
}
function mockAllPersistent(upstreamStatus = 200, upstreamBody = { ok: true }) {
fetchMock.activate();
fetchMock.disableNetConnect();
const oidc = fetchMock.get("https://token.actions.githubusercontent.com");
oidc
.intercept({ path: "/.well-known/openid-configuration", method: "GET" })
.reply(
200,
JSON.stringify({
jwks_uri:
"https://token.actions.githubusercontent.com/.well-known/jwks",
}),
)
.persist();
oidc
.intercept({ path: "/.well-known/jwks", method: "GET" })
.reply(200, JSON.stringify({ keys: [testJwk] }))
.persist();
const upstream = fetchMock.get("https://api.anthropic.com");
upstream
.intercept({ path: /.*/, method: "POST" })
.reply(upstreamStatus, JSON.stringify(upstreamBody))
.persist();
}
// Mock TokenBucket Durable Object for unit tests
function mockTokenBucket(overrides = {}) {
const defaults = { allowed: true, remaining: 199 };
const response = { ...defaults, ...overrides };
return {
idFromName: () => "mock-id",
get: () => ({
fetch: async () => Response.json(response),
}),
};
}
function testEnv(overrides = {}) {
return {
OIDC_ISSUER: "https://token.actions.githubusercontent.com",
OIDC_AUDIENCE: "goose-oidc-proxy",
UPSTREAM_URL: "https://api.anthropic.com",
UPSTREAM_AUTH_HEADER: "x-api-key",
UPSTREAM_API_KEY: "sk-ant-real-key",
ALLOWED_REPOS: "block/goose",
MAX_TOKEN_AGE_SECONDS: "1200",
MAX_REQUESTS_PER_TOKEN: "200",
RATE_LIMIT_PER_SECOND: "2",
TOKEN_BUCKET: mockTokenBucket(),
...overrides,
};
}
describe("rejects invalid requests", () => {
it("missing auth", async () => {
const request = new Request("https://proxy.example.com/v1/messages");
const ctx = createExecutionContext();
const response = await worker.fetch(request, testEnv(), ctx);
await waitOnExecutionContext(ctx);
expect(response.status).toBe(401);
expect((await response.json()).error).toBe("Missing authentication");
});
it("malformed token", async () => {
const request = new Request("https://proxy.example.com/v1/messages", {
headers: { "x-api-key": "not-a-jwt" },
});
const ctx = createExecutionContext();
const response = await worker.fetch(request, testEnv(), ctx);
await waitOnExecutionContext(ctx);
expect(response.status).toBe(401);
expect((await response.json()).error).toBe("Malformed JWT");
});
it("wrong claims (repo, audience, issuer)", async () => {
for (const [override, expectedError] of [
[{ repository: "evil/repo" }, "not allowed"],
[{ aud: "wrong" }, "Invalid audience"],
[{ iss: "https://evil.example.com" }, "Invalid issuer"],
]) {
const token = await createSignedJwt(validPayload(override));
const request = new Request("https://proxy.example.com/v1/messages", {
headers: { "x-api-key": token },
});
const ctx = createExecutionContext();
const response = await worker.fetch(request, testEnv(), ctx);
await waitOnExecutionContext(ctx);
expect(response.status).toBe(401);
expect((await response.json()).error).toContain(expectedError);
}
});
it("token too old", async () => {
const token = await createSignedJwt(
validPayload({ iat: Math.floor(Date.now() / 1000) - 1500 }),
);
const request = new Request("https://proxy.example.com/v1/messages", {
headers: { "x-api-key": token },
});
const ctx = createExecutionContext();
const response = await worker.fetch(request, testEnv(), ctx);
await waitOnExecutionContext(ctx);
expect(response.status).toBe(401);
expect((await response.json()).error).toBe("Token too old");
});
});
describe("proxies valid requests", () => {
it("forwards to upstream with injected API key", async () => {
const token = await createSignedJwt(validPayload());
mockAll(200, { id: "msg_123", type: "message" });
const request = new Request("https://proxy.example.com/v1/messages", {
method: "POST",
headers: {
"x-api-key": token,
"Content-Type": "application/json",
},
body: JSON.stringify({ model: "claude-sonnet-4-20250514", messages: [] }),
});
const ctx = createExecutionContext();
const response = await worker.fetch(request, testEnv(), ctx);
await waitOnExecutionContext(ctx);
expect(response.status).toBe(200);
expect((await response.json()).id).toBe("msg_123");
});
it("accepts recently-expired token within MAX_TOKEN_AGE_SECONDS", async () => {
const now = Math.floor(Date.now() / 1000);
const token = await createSignedJwt(
validPayload({ iat: now - 600, exp: now - 300 }),
);
mockAll(200, { ok: true });
const request = new Request("https://proxy.example.com/v1/messages", {
method: "POST",
headers: { "x-api-key": token, "Content-Type": "application/json" },
body: JSON.stringify({}),
});
const ctx = createExecutionContext();
const response = await worker.fetch(request, testEnv(), ctx);
await waitOnExecutionContext(ctx);
expect(response.status).toBe(200);
});
});
describe("token budget and rate limiting", () => {
it("rejects when budget exhausted", async () => {
const token = await createSignedJwt(validPayload());
mockAll();
const request = new Request("https://proxy.example.com/v1/messages", {
method: "POST",
headers: { "x-api-key": token, "Content-Type": "application/json" },
body: JSON.stringify({}),
});
const ctx = createExecutionContext();
const response = await worker.fetch(
request,
testEnv({
TOKEN_BUCKET: mockTokenBucket({
allowed: false,
error: "budget_exhausted",
}),
}),
ctx,
);
await waitOnExecutionContext(ctx);
expect(response.status).toBe(429);
expect((await response.json()).error).toBe("Token budget exhausted");
expect(response.headers.get("Retry-After")).toBeNull();
});
it("rejects with Retry-After when rate limited", async () => {
const token = await createSignedJwt(validPayload());
mockAll();
const request = new Request("https://proxy.example.com/v1/messages", {
method: "POST",
headers: { "x-api-key": token, "Content-Type": "application/json" },
body: JSON.stringify({}),
});
const ctx = createExecutionContext();
const response = await worker.fetch(
request,
testEnv({
TOKEN_BUCKET: mockTokenBucket({
allowed: false,
error: "rate_limited",
}),
}),
ctx,
);
await waitOnExecutionContext(ctx);
expect(response.status).toBe(429);
expect((await response.json()).error).toBe("Rate limit exceeded");
expect(response.headers.get("Retry-After")).toBe("1");
});
});

View File

@@ -0,0 +1,11 @@
import { defineWorkersConfig } from "@cloudflare/vitest-pool-workers/config";
export default defineWorkersConfig({
test: {
poolOptions: {
workers: {
wrangler: { configPath: "./wrangler.toml" },
},
},
},
});

30
oidc-proxy/wrangler.toml Normal file
View File

@@ -0,0 +1,30 @@
name = "oidc-proxy"
main = "src/index.js"
compatibility_date = "2026-03-01"
[durable_objects]
bindings = [{ name = "TOKEN_BUCKET", class_name = "TokenBucket" }]
[[migrations]]
tag = "v1"
new_classes = ["TokenBucket"]
[vars]
OIDC_ISSUER = "https://token.actions.githubusercontent.com"
OIDC_AUDIENCE = "goose-oidc-proxy"
MAX_TOKEN_AGE_SECONDS = "1200" # 20 minutes
MAX_REQUESTS_PER_TOKEN = "200"
RATE_LIMIT_PER_SECOND = "2"
ALLOWED_REPOS = "block/goose,aaif/goose"
# Upstream configuration
UPSTREAM_URL = "https://api.anthropic.com"
UPSTREAM_AUTH_HEADER = "x-api-key"
# UPSTREAM_AUTH_PREFIX is unset — Anthropic expects a raw key, not "Bearer <key>"
# Additional CORS headers for Anthropic SDK
CORS_EXTRA_HEADERS = "anthropic-version"
# Set the upstream API key as a secret:
# npx wrangler secret put UPSTREAM_API_KEY