Skip to content
Merged
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
204 changes: 123 additions & 81 deletions scripts/create-github-release.mjs
Original file line number Diff line number Diff line change
Expand Up @@ -8,35 +8,6 @@ import { tmpdir } from 'node:os'
const rootDir = path.join(import.meta.dirname, '..')
const ghToken = process.env.GH_TOKEN || process.env.GITHUB_TOKEN

// Get the previous release commit to diff against.
// This script runs right after the "ci: changeset release" commit is pushed,
// so HEAD is the release commit. We want commits between the previous release
// and this one (exclusive of both release commits).
const releaseLogs = execSync(
'git log --oneline --grep="ci: changeset release" --format=%H',
)
.toString()
.trim()
.split('\n')
.filter(Boolean)

// Current release commit is releaseLogs[0] (HEAD), previous is releaseLogs[1]
const currentRelease = releaseLogs[0] || 'HEAD'
const previousRelease = releaseLogs[1]
const rangeFrom = previousRelease || `${currentRelease}~1`

// Get commits between previous release and current release (exclude both)
const rawLog = execSync(
`git log ${rangeFrom}..${currentRelease} --pretty=format:"%h %ae %s" --no-merges`,
)
.toString()
.trim()

const commits = rawLog
.split('\n')
.filter(Boolean)
.filter((line) => !line.includes('ci: changeset release'))

// Resolve GitHub usernames from commit author emails
const usernameCache = {}
async function resolveUsername(email) {
Expand All @@ -57,70 +28,141 @@ async function resolveUsername(email) {
}
}

// Group commits by conventional commit type
const groups = {}
for (const line of commits) {
// Format: "<hash> <email> <type>(<scope>): <subject>" or "<hash> <email> <type>: <subject>"
const match = line.match(/^(\w+)\s+(\S+)\s+(\w+)(?:\(([^)]+)\))?:\s*(.+)$/)
if (match) {
const [, hash, email, type, scope, subject] = match
const key = type.charAt(0).toUpperCase() + type.slice(1)
if (!groups[key]) groups[key] = []
groups[key].push({ hash, email, scope, subject })
} else {
// Non-conventional commits (merge commits, etc.) go to Other
if (!groups['Other']) groups['Other'] = []
const parts = line.split(' ')
const hash = parts[0]
const email = parts[1]
const subject = parts.slice(2).join(' ')
groups['Other'].push({ hash, email, scope: null, subject })
// Resolve a commit hash to a "by @username" string
const authorCache = {}
async function resolveAuthorForCommit(hash) {
if (authorCache[hash] !== undefined) return authorCache[hash]

try {
const email = execSync(`git log -1 --format=%ae ${hash}`, {
encoding: 'utf-8',
stdio: ['pipe', 'pipe', 'ignore'],
}).trim()
const username = await resolveUsername(email)
const result = username ? ` by @${username}` : ''
authorCache[hash] = result
return result
} catch {
authorCache[hash] = ''
return ''
}
}

// Build changelog markdown
const typeOrder = [
'Feat',
'Fix',
'Refactor',
'Perf',
'Test',
'Docs',
'Chore',
'Ci',
'Other',
]
const typeIndex = (t) => {
const i = typeOrder.indexOf(t)
return i === -1 ? 99 : i
// Append author info to changelog lines that contain commit hashes
async function appendAuthors(content) {
const lines = content.split('\n')
const result = []

for (const line of lines) {
// Match commit hash links like [`9a4d924`](url)
const commitMatch = line.match(/\[`([a-f0-9]{7,})`\]/)
if (commitMatch && line.startsWith('- ')) {
const author = await resolveAuthorForCommit(commitMatch[1])
Comment on lines +57 to +60
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

⚠️ Potential issue | 🟡 Minor

🧩 Analysis chain

🏁 Script executed:

#!/bin/bash
python <<'PY'
import pathlib
import re
import subprocess
import sys

all_shas = subprocess.check_output(["git", "rev-list", "--all"], text=True).splitlines()

shorts = sorted(set(
    m.group(1)
    for p in pathlib.Path("packages").glob("*/CHANGELOG.md")
    for m in re.finditer(r'\[`([a-f0-9]{7,})`\]\(', p.read_text())
))

ambiguous = []
for short in shorts:
    count = sum(sha.startswith(short) for sha in all_shas)
    if count != 1:
        ambiguous.append((short, count))

if ambiguous:
    print("Ambiguous short SHAs referenced from changelogs:")
    for short, count in ambiguous:
        print(f"{short}: {count} matching commits")
    sys.exit(1)

print("All current changelog short SHAs resolve uniquely; parsing the full SHA is still safer.")
PY

Repository: TanStack/router

Length of output: 146


Parse the full commit SHA from the link target instead of the abbreviated hash.

The current regex extracts only the 7-character display hash. While all short SHAs in the current changelogs resolve uniquely, parsing the full 40-character SHA from the link URL is safer and avoids potential ambiguities if the repository grows.

Suggested change
-    const commitMatch = line.match(/\[`([a-f0-9]{7,})`\]/)
+    const commitMatch = line.match(
+      /\[`[a-f0-9]{7,}`\]\([^)]*\/commit\/([a-f0-9]{40})\)/
+    )
     if (commitMatch && line.startsWith('- ')) {
       const author = await resolveAuthorForCommit(commitMatch[1])
🤖 Prompt for AI Agents
Verify each finding against the current code and only fix it if needed.

In `@scripts/create-github-release.mjs` around lines 57 - 60, The regex currently
captures only the 7-char displayed SHA from the link text (commitMatch) which
can be ambiguous; update the parsing so you extract the full 40-character SHA
from the link target/URL in the same line before calling resolveAuthorForCommit.
Locate the block that defines commitMatch and the call to
resolveAuthorForCommit, change the regex to capture the link target (or
separately parse the URL portion) and pull the 40-char hex SHA (a-f0-9{40}) from
that URL, then pass that full SHA into resolveAuthorForCommit instead of the
short hash.

result.push(author ? `${line}${author}` : line)
} else {
result.push(line)
}
}

return result.join('\n')
}
const sortedTypes = Object.keys(groups).sort(
(a, b) => typeIndex(a) - typeIndex(b),

// Get the previous release commit to diff against.
// This script runs right after the "ci: changeset release" commit is pushed,
// so HEAD is the release commit.
const releaseLogs = execSync(
'git log --oneline --grep="ci: changeset release" --format=%H',
)
.toString()
.trim()
.split('\n')
.filter(Boolean)

const currentRelease = releaseLogs[0] || 'HEAD'
const previousRelease = releaseLogs[1]

// Find packages that were actually bumped by comparing versions
const packagesDir = path.join(rootDir, 'packages')
const allPkgJsonPaths = globSync('*/package.json', { cwd: packagesDir })

const bumpedPackages = []
for (const relPath of allPkgJsonPaths) {
const fullPath = path.join(packagesDir, relPath)
const currentPkg = JSON.parse(fs.readFileSync(fullPath, 'utf-8'))
if (currentPkg.private) continue

// Get the version from the previous release commit
if (previousRelease) {
try {
const prevContent = execSync(
`git show ${previousRelease}:packages/${relPath}`,
{ encoding: 'utf-8', stdio: ['pipe', 'pipe', 'ignore'] },
)
const prevPkg = JSON.parse(prevContent)
if (prevPkg.version !== currentPkg.version) {
bumpedPackages.push({
name: currentPkg.name,
version: currentPkg.version,
prevVersion: prevPkg.version,
dir: path.dirname(relPath),
})
}
} catch {
// Package didn't exist in previous release — it's new
bumpedPackages.push({
name: currentPkg.name,
version: currentPkg.version,
prevVersion: null,
dir: path.dirname(relPath),
})
}
} else {
// No previous release — include all non-private packages
bumpedPackages.push({
name: currentPkg.name,
version: currentPkg.version,
prevVersion: null,
dir: path.dirname(relPath),
})
}
}

bumpedPackages.sort((a, b) => a.name.localeCompare(b.name))

// Extract changelog entries from changeset-generated CHANGELOG.md files.
// Changesets writes entries under "## <version>" headers. We extract the
// content under the current version header for each bumped package.
let changelogMd = ''
for (const type of sortedTypes) {
changelogMd += `### ${type}\n\n`
for (const { hash, email, scope, subject } of groups[type]) {
const scopeStr = scope ? `${scope}: ` : ''
const username = await resolveUsername(email)
const authorStr = username ? ` by @${username}` : ''
changelogMd += `- ${scopeStr}${subject} (${hash})${authorStr}\n`
for (const pkg of bumpedPackages) {
const changelogPath = path.join(packagesDir, pkg.dir, 'CHANGELOG.md')
if (!fs.existsSync(changelogPath)) continue

const changelog = fs.readFileSync(changelogPath, 'utf-8')

// Find the section for the current version: starts with "## <version>"
// and ends at the next "## " or end of file
const versionHeader = `## ${pkg.version}`
const startIdx = changelog.indexOf(versionHeader)
if (startIdx === -1) continue

const afterHeader = startIdx + versionHeader.length
const nextSection = changelog.indexOf('\n## ', afterHeader)
const section =
nextSection === -1
? changelog.slice(afterHeader)
: changelog.slice(afterHeader, nextSection)

const content = section.trim()
if (content) {
const withAuthors = await appendAuthors(content)
changelogMd += `#### ${pkg.name}\n\n${withAuthors}\n\n`
}
changelogMd += '\n'
}

if (!changelogMd) {
changelogMd = '- None\n\n'
changelogMd = '- No changelog entries\n\n'
}

// Collect all publishable package versions
const packagesDir = path.join(rootDir, 'packages')
const pkgs = globSync('*/package.json', { cwd: packagesDir })
.map((p) => JSON.parse(fs.readFileSync(path.join(packagesDir, p), 'utf-8')))
.filter((p) => !p.private)
.sort((a, b) => a.name.localeCompare(b.name))

const now = new Date()
const date = now.toISOString().slice(0, 10)
const time = now.toISOString().slice(11, 16).replace(':', '')
Expand All @@ -136,7 +178,7 @@ const body = `Release ${titleDate}
${changelogMd}
## Packages

${pkgs.map((p) => `- ${p.name}@${p.version}`).join('\n')}
${bumpedPackages.map((p) => `- ${p.name}@${p.version}`).join('\n')}
`

// Create the release
Expand Down
Loading