[25.05] ci/treefmt: add biome (#435283)

This commit is contained in:
Wolfgang Walther
2025-08-20 14:55:25 +00:00
committed by GitHub
13 changed files with 527 additions and 481 deletions

View File

@@ -23,8 +23,7 @@ insert_final_newline = false
# see https://nixos.org/nixpkgs/manual/#chap-conventions
# Match json/lockfiles/markdown/nix/perl/python/ruby/shell/docbook files, set indent to spaces
[*.{bash,json,lock,md,nix,pl,pm,py,rb,sh,xml}]
[*.{bash,css,js,json,lock,md,nix,pl,pm,py,rb,sh,xml}]
indent_style = space
# Match docbook files, set indent width of one
@@ -32,7 +31,7 @@ indent_style = space
indent_size = 1
# Match json/lockfiles/markdown/nix/ruby files, set indent width of two
[*.{json,lock,md,nix,rb}]
[*.{js,json,lock,md,nix,rb}]
indent_size = 2
# Match all the Bash code in Nix files, set indent width of two

View File

@@ -1,4 +0,0 @@
# TODO: Move to top-level via staging PR
[*.js]
indent_style = space
indent_size = 2

View File

@@ -42,6 +42,22 @@ let
programs.actionlint.enable = true;
programs.biome = {
enable = true;
settings.formatter = {
useEditorconfig = true;
};
settings.javascript.formatter = {
quoteStyle = "single";
semicolons = "asNeeded";
};
settings.json.formatter.enabled = false;
};
settings.formatter.biome.excludes = [
"*.min.js"
"pkgs/*"
];
programs.keep-sorted.enable = true;
# This uses nixfmt underneath,

View File

@@ -1,7 +1,5 @@
module.exports = async function ({ github, context, core, dry }) {
module.exports = async ({ github, context, core, dry }) => {
const { execFileSync } = require('node:child_process')
const { readFile } = require('node:fs/promises')
const { join } = require('node:path')
const { classify } = require('../supportedBranches.js')
const withRateLimit = require('./withRateLimit.js')
@@ -18,13 +16,13 @@ module.exports = async function ({ github, context, core, dry }) {
run_id: context.runId,
per_page: 100,
})
).find(({ name }) => name == 'Check / cherry-pick').html_url +
).find(({ name }) => name === 'Check / cherry-pick').html_url +
'?pr=' +
pull_number
async function extract({ sha, commit }) {
const noCherryPick = Array.from(
commit.message.matchAll(/^Not-cherry-picked-because: (.*)$/g)
commit.message.matchAll(/^Not-cherry-picked-because: (.*)$/g),
).at(0)
if (noCherryPick)
@@ -148,8 +146,7 @@ module.exports = async function ({ github, context, core, dry }) {
const fetch = extracted
.filter(({ severity }) => !severity)
.map(({ sha, original_sha }) => [ sha, original_sha ])
.flat()
.flatMap(({ sha, original_sha }) => [sha, original_sha])
if (fetch.length > 0) {
// Fetching all commits we need for diff at once is much faster than any other method.
@@ -163,7 +160,9 @@ module.exports = async function ({ github, context, core, dry }) {
])
}
const results = extracted.map(result => result.severity ? result : diff(result))
const results = extracted.map((result) =>
result.severity ? result : diff(result),
)
// Log all results without truncation, with better highlighting and all whitespace changes to the job log.
results.forEach(({ sha, commit, severity, message, colored_diff }) => {
@@ -177,7 +176,7 @@ module.exports = async function ({ github, context, core, dry }) {
// Only create step summary below in case of warnings or errors.
// Also clean up older reviews, when all checks are good now.
if (results.every(({ severity }) => severity == 'info')) {
if (results.every(({ severity }) => severity === 'info')) {
if (!dry) {
await Promise.all(
(
@@ -186,9 +185,9 @@ module.exports = async function ({ github, context, core, dry }) {
pull_number,
})
)
.filter((review) => review.user.login == 'github-actions[bot]')
.filter((review) => review.user.login === 'github-actions[bot]')
.map(async (review) => {
if (review.state == 'CHANGES_REQUESTED') {
if (review.state === 'CHANGES_REQUESTED') {
await github.rest.pulls.dismissReview({
...context.repo,
pull_number,
@@ -214,34 +213,64 @@ module.exports = async function ({ github, context, core, dry }) {
// In the case of "error" severity, we also fail the job.
// Those should be considered blocking and not be dismissable via review.
if (results.some(({ severity }) => severity == 'error'))
if (results.some(({ severity }) => severity === 'error'))
process.exitCode = 1
core.summary.addRaw('This report is automatically generated by the `PR / Check / cherry-pick` CI workflow.', true)
core.summary.addRaw(
'This report is automatically generated by the `PR / Check / cherry-pick` CI workflow.',
true,
)
core.summary.addEOL()
core.summary.addRaw("Some of the commits in this PR require the author's and reviewer's attention.", true)
core.summary.addRaw(
"Some of the commits in this PR require the author's and reviewer's attention.",
true,
)
core.summary.addEOL()
if (results.some(({ type }) => type === 'no-commit-hash')) {
core.summary.addRaw('Please follow the [backporting guidelines](https://github.com/NixOS/nixpkgs/blob/master/CONTRIBUTING.md#how-to-backport-pull-requests) and cherry-pick with the `-x` flag.', true)
core.summary.addRaw('This requires changes to the unstable `master` and `staging` branches first, before backporting them.', true)
core.summary.addRaw(
'Please follow the [backporting guidelines](https://github.com/NixOS/nixpkgs/blob/master/CONTRIBUTING.md#how-to-backport-pull-requests) and cherry-pick with the `-x` flag.',
true,
)
core.summary.addRaw(
'This requires changes to the unstable `master` and `staging` branches first, before backporting them.',
true,
)
core.summary.addEOL()
core.summary.addRaw('Occasionally, commits are not cherry-picked at all, for example when updating minor versions of packages which have already advanced to the next major on unstable.', true)
core.summary.addRaw('These commits can optionally be marked with a `Not-cherry-picked-because: <reason>` footer.', true)
core.summary.addRaw(
'Occasionally, commits are not cherry-picked at all, for example when updating minor versions of packages which have already advanced to the next major on unstable.',
true,
)
core.summary.addRaw(
'These commits can optionally be marked with a `Not-cherry-picked-because: <reason>` footer.',
true,
)
core.summary.addEOL()
}
if (results.some(({ type }) => type === 'diff')) {
core.summary.addRaw('Sometimes it is not possible to cherry-pick exactly the same patch.', true)
core.summary.addRaw('This most frequently happens when resolving merge conflicts.', true)
core.summary.addRaw('The range-diff will help to review the resolution of conflicts.', true)
core.summary.addRaw(
'Sometimes it is not possible to cherry-pick exactly the same patch.',
true,
)
core.summary.addRaw(
'This most frequently happens when resolving merge conflicts.',
true,
)
core.summary.addRaw(
'The range-diff will help to review the resolution of conflicts.',
true,
)
core.summary.addEOL()
}
core.summary.addRaw('If you need to merge this PR despite the warnings, please [dismiss](https://docs.github.com/en/pull-requests/collaborating-with-pull-requests/reviewing-changes-in-pull-requests/dismissing-a-pull-request-review) this review shortly before merging.', true)
core.summary.addRaw(
'If you need to merge this PR despite the warnings, please [dismiss](https://docs.github.com/en/pull-requests/collaborating-with-pull-requests/reviewing-changes-in-pull-requests/dismissing-a-pull-request-review) this review shortly before merging.',
true,
)
results.forEach(({ severity, message, diff }) => {
if (severity == 'info') return
if (severity === 'info') return
// The docs for markdown alerts only show examples with markdown blockquote syntax, like this:
// > [!WARNING]
@@ -256,7 +285,7 @@ module.exports = async function ({ github, context, core, dry }) {
// Whether this is intended or just an implementation detail is unclear.
core.summary.addRaw('<blockquote>')
core.summary.addRaw(
`\n\n[!${({ important: 'IMPORTANT', warning: 'WARNING', error: 'CAUTION' })[severity]}]`,
`\n\n[!${{ important: 'IMPORTANT', warning: 'WARNING', error: 'CAUTION' }[severity]}]`,
true,
)
core.summary.addRaw(`${message}`, true)
@@ -305,18 +334,18 @@ module.exports = async function ({ github, context, core, dry }) {
})
).find(
(review) =>
review.user.login == 'github-actions[bot]' &&
review.user.login === 'github-actions[bot]' &&
// If a review is still pending, we can just update this instead
// of posting a new one.
(review.state == 'CHANGES_REQUESTED' ||
(review.state === 'CHANGES_REQUESTED' ||
// No need to post a new review, if an older one with the exact
// same content had already been dismissed.
review.body == body),
review.body === body),
)
if (dry) {
if (pendingReview)
core.info('pending review found: ' + pendingReview.html_url)
core.info(`pending review found: ${pendingReview.html_url}`)
else core.info('no pending review found')
} else {
// Either of those two requests could fail for very long comments. This can only happen

View File

@@ -1,4 +1,4 @@
module.exports = async function ({ github, context, core, dry }) {
module.exports = async ({ github, context, core, dry }) => {
const path = require('node:path')
const { DefaultArtifactClient } = require('@actions/artifact')
const { readFile, writeFile } = require('node:fs/promises')
@@ -27,7 +27,7 @@ module.exports = async function ({ github, context, core, dry }) {
const approvals = new Set(
reviews
.filter((review) => review.state == 'APPROVED')
.filter((review) => review.state === 'APPROVED')
.map((review) => review.user?.id),
)
@@ -37,7 +37,7 @@ module.exports = async function ({ github, context, core, dry }) {
// This is intentionally less than the time that Eval takes, so that the label job
// running after Eval can indeed label the PR as conflicted if that is the case.
const merge_commit_sha_valid =
new Date() - new Date(pull_request.created_at) > 3 * 60 * 1000
Date.now() - new Date(pull_request.created_at) > 3 * 60 * 1000
const prLabels = {
// We intentionally don't use the mergeable or mergeable_state attributes.
@@ -53,8 +53,8 @@ module.exports = async function ({ github, context, core, dry }) {
// The second pass will then read the result from the first pass and set the label.
'2.status: merge conflict':
merge_commit_sha_valid && !pull_request.merge_commit_sha,
'12.approvals: 1': approvals.size == 1,
'12.approvals: 2': approvals.size == 2,
'12.approvals: 1': approvals.size === 1,
'12.approvals: 2': approvals.size === 2,
'12.approvals: 3+': approvals.size >= 3,
'12.first-time contribution': [
'NONE',
@@ -104,8 +104,8 @@ module.exports = async function ({ github, context, core, dry }) {
// existing reviews, too.
'9.needs: reviewer':
!pull_request.draft &&
pull_request.requested_reviewers.length == 0 &&
reviews.length == 0,
pull_request.requested_reviewers.length === 0 &&
reviews.length === 0,
})
}
@@ -125,8 +125,7 @@ module.exports = async function ({ github, context, core, dry }) {
// called "comparison", yet, will skip the download.
const expired =
!artifact ||
new Date(artifact?.expires_at ?? 0) <
new Date(new Date().getTime() + 60 * 1000)
new Date(artifact?.expires_at ?? 0) < new Date(Date.now() + 60 * 1000)
log('Artifact expires at', artifact?.expires_at ?? '<n/a>')
if (!expired) {
stats.artifacts++
@@ -175,7 +174,7 @@ module.exports = async function ({ github, context, core, dry }) {
async function handle({ item, stats }) {
try {
const log = (k, v, skip) => {
core.info(`#${item.number} - ${k}: ${v}` + (skip ? ' (skipped)' : ''))
core.info(`#${item.number} - ${k}: ${v}${skip ? ' (skipped)' : ''}`)
return skip
}
@@ -257,7 +256,7 @@ module.exports = async function ({ github, context, core, dry }) {
// No need for an API request, if all labels are the same.
const hasChanges = Object.keys(after).some(
(name) => (before[name] ?? false) != after[name],
(name) => (before[name] ?? false) !== after[name],
)
if (log('Has changes', hasChanges, !hasChanges)) return
@@ -297,13 +296,15 @@ module.exports = async function ({ github, context, core, dry }) {
// Go back as far as the last successful run of this workflow to make sure
// we are not leaving anyone behind on GHA failures.
// Defaults to go back 1 hour on the first run.
new Date(lastRun?.created_at ?? new Date().getTime() - 1 * 60 * 60 * 1000).getTime(),
new Date(
lastRun?.created_at ?? Date.now() - 1 * 60 * 60 * 1000,
).getTime(),
// Go back max. 1 day to prevent hitting all API rate limits immediately,
// when GH API returns a wrong workflow by accident.
new Date().getTime() - 24 * 60 * 60 * 1000,
Date.now() - 24 * 60 * 60 * 1000,
),
)
core.info('cutoff timestamp: ' + cutoff.toISOString())
core.info(`cutoff timestamp: ${cutoff.toISOString()}`)
const updatedItems = await github.paginate(
github.rest.search.issuesAndPullRequests,
@@ -400,12 +401,12 @@ module.exports = async function ({ github, context, core, dry }) {
.concat(updatedItems, allItems.data)
.filter(
(thisItem, idx, arr) =>
idx ==
arr.findIndex((firstItem) => firstItem.number == thisItem.number),
idx ===
arr.findIndex((firstItem) => firstItem.number === thisItem.number),
)
;(await Promise.allSettled(items.map((item) => handle({ item, stats }))))
.filter(({ status }) => status == 'rejected')
.filter(({ status }) => status === 'rejected')
.map(({ reason }) =>
core.setFailed(`${reason.message}\n${reason.cause.stack}`),
)

View File

@@ -1,4 +1,4 @@
module.exports = async function ({ github, context, core }) {
module.exports = async ({ github, context, core }) => {
const pull_number = context.payload.pull_request.number
for (const retryInterval of [5, 10, 20, 40, 80]) {

View File

@@ -1,4 +1,4 @@
module.exports = async function ({ github, core }, callback) {
module.exports = async ({ github, core }, callback) => {
const Bottleneck = require('bottleneck')
const stats = {
@@ -23,7 +23,7 @@ module.exports = async function ({ github, core }, callback) {
// Requests to a different host do not count against the rate limit.
if (options.url.startsWith('https://github.com')) return request(options)
// Requests to the /rate_limit endpoint do not count against the rate limit.
if (options.url == '/rate_limit') return request(options)
if (options.url === '/rate_limit') return request(options)
// Search requests are in a different resource group, which allows 30 requests / minute.
// We do less than a handful each run, so not implementing throttling for now.
if (options.url.startsWith('/search/')) return request(options)

View File

@@ -15,14 +15,18 @@ const typeConfig = {
}
function split(branch) {
return { ...branch.match(/(?<prefix>.+?)(-(?<version>\d{2}\.\d{2}|unstable)(?:-(?<suffix>.*))?)?$/).groups }
return {
...branch.match(
/(?<prefix>.+?)(-(?<version>\d{2}\.\d{2}|unstable)(?:-(?<suffix>.*))?)?$/,
).groups,
}
}
function classify(branch) {
const { prefix, version } = split(branch)
return {
stable: (version ?? 'unstable') !== 'unstable',
type: typeConfig[prefix] ?? [ 'wip' ]
type: typeConfig[prefix] ?? ['wip'],
}
}

View File

@@ -1,3 +1,5 @@
document.addEventListener('DOMContentLoaded', function(event) {
anchors.add('h1[id]:not(div.note h1, div.warning h1, div.tip h1, div.caution h1, div.important h1), h2[id]:not(div.note h2, div.warning h2, div.tip h2, div.caution h2, div.important h2), h3[id]:not(div.note h3, div.warning h3, div.tip h3, div.caution h3, div.important h3), h4[id]:not(div.note h4, div.warning h4, div.tip h4, div.caution h4, div.important h4), h5[id]:not(div.note h5, div.warning h5, div.tip h5, div.caution h5, div.important h5), h6[id]:not(div.note h6, div.warning h6, div.tip h6, div.caution h6, div.important h6)');
});
document.addEventListener('DOMContentLoaded', () => {
anchors.add(
'h1[id]:not(div.note h1, div.warning h1, div.tip h1, div.caution h1, div.important h1), h2[id]:not(div.note h2, div.warning h2, div.tip h2, div.caution h2, div.important h2), h3[id]:not(div.note h3, div.warning h3, div.tip h3, div.caution h3, div.important h3), h4[id]:not(div.note h4, div.warning h4, div.tip h4, div.caution h4, div.important h4), h5[id]:not(div.note h5, div.warning h5, div.tip h5, div.caution h5, div.important h5), h6[id]:not(div.note h6, div.warning h6, div.tip h6, div.caution h6, div.important h6)',
)
})

View File

@@ -1,193 +1,193 @@
html {
line-height: 1.15;
-webkit-text-size-adjust: 100%;
line-height: 1.15;
-webkit-text-size-adjust: 100%;
}
body {
margin: 0;
margin: 0;
}
.book,
.appendix {
margin: auto;
width: 100%;
margin: auto;
width: 100%;
}
@media screen and (min-width: 768px) {
.book,
.appendix {
max-width: 46rem;
}
.book,
.appendix {
max-width: 46rem;
}
}
@media screen and (min-width: 992px) {
.book,
.appendix {
max-width: 60rem;
}
.book,
.appendix {
max-width: 60rem;
}
}
@media screen and (min-width: 1200px) {
.book,
.appendix {
max-width: 73rem;
}
.book,
.appendix {
max-width: 73rem;
}
}
.book .list-of-examples {
display: none;
display: none;
}
h1 {
font-size: 2em;
margin: 0.67em 0;
font-size: 2em;
margin: 0.67em 0;
}
hr {
box-sizing: content-box;
height: 0;
overflow: visible;
box-sizing: content-box;
height: 0;
overflow: visible;
}
pre {
font-family: monospace, monospace;
font-size: 1em;
font-family: monospace;
font-size: 1em;
}
a {
background-color: transparent;
background-color: transparent;
}
strong {
font-weight: bolder;
font-weight: bolder;
}
code {
font-family: monospace, monospace;
font-size: 1em;
font-family: monospace;
font-size: 1em;
}
sup {
font-size: 75%;
line-height: 0;
position: relative;
vertical-align: baseline;
font-size: 75%;
line-height: 0;
position: relative;
vertical-align: baseline;
}
sup {
top: -0.5em;
top: -0.5em;
}
::-webkit-file-upload-button {
-webkit-appearance: button;
font: inherit;
-webkit-appearance: button;
font: inherit;
}
pre {
overflow: auto;
overflow: auto;
}
*,
*::before,
*::after {
box-sizing: border-box;
box-sizing: border-box;
}
html {
font-size: 100%;
line-height: 1.77777778;
font-size: 100%;
line-height: 1.77777778;
}
@media screen and (min-width: 4000px) {
html {
background: #000;
}
html {
background: #000;
}
html body {
margin: auto;
max-width: 250rem;
}
html body {
margin: auto;
max-width: 250rem;
}
}
@media screen and (max-width: 320px) {
html {
font-size: calc(16 / 320 * 100vw);
}
html {
font-size: calc(16 / 320 * 100vw);
}
}
body {
font-size: 1rem;
font-family: "Roboto", sans-serif;
font-weight: 300;
color: var(--main-text-color);
background-color: var(--background);
min-height: 100vh;
display: flex;
flex-direction: column;
font-size: 1rem;
font-family: "Roboto", sans-serif;
font-weight: 300;
color: var(--main-text-color);
background-color: var(--background);
min-height: 100vh;
display: flex;
flex-direction: column;
}
@media screen and (max-width: 767.9px) {
body {
padding-left: 1rem;
padding-right: 1rem;
}
body {
padding-left: 1rem;
padding-right: 1rem;
}
}
a {
text-decoration: none;
border-bottom: 1px solid;
color: var(--link-color);
text-decoration: none;
border-bottom: 1px solid;
color: var(--link-color);
}
ul {
padding: 0;
margin-top: 0;
margin-right: 0;
margin-bottom: 1rem;
margin-left: 1rem;
padding: 0;
margin-top: 0;
margin-right: 0;
margin-bottom: 1rem;
margin-left: 1rem;
}
table {
border-collapse: collapse;
width: 100%;
margin-bottom: 1rem;
border-collapse: collapse;
width: 100%;
margin-bottom: 1rem;
}
thead th {
text-align: left;
text-align: left;
}
hr {
margin-top: 1rem;
margin-bottom: 1rem;
margin-top: 1rem;
margin-bottom: 1rem;
}
h1 {
font-weight: 800;
line-height: 110%;
font-size: 200%;
margin-bottom: 1rem;
color: var(--heading-color);
font-weight: 800;
line-height: 110%;
font-size: 200%;
margin-bottom: 1rem;
color: var(--heading-color);
}
h2 {
font-weight: 800;
line-height: 110%;
font-size: 170%;
margin-bottom: 0.625rem;
color: var(--heading-color);
font-weight: 800;
line-height: 110%;
font-size: 170%;
margin-bottom: 0.625rem;
color: var(--heading-color);
}
h2:not(:first-child) {
margin-top: 1rem;
margin-top: 1rem;
}
h3 {
font-weight: 800;
line-height: 110%;
margin-bottom: 1rem;
font-size: 150%;
color: var(--heading-color);
font-weight: 800;
line-height: 110%;
margin-bottom: 1rem;
font-size: 150%;
color: var(--heading-color);
}
.note h3,
@@ -195,73 +195,73 @@ h3 {
.warning h3,
.caution h3,
.important h3 {
font-size: 120%;
font-size: 120%;
}
h4 {
font-weight: 800;
line-height: 110%;
margin-bottom: 1rem;
font-size: 140%;
color: var(--heading-color);
font-weight: 800;
line-height: 110%;
margin-bottom: 1rem;
font-size: 140%;
color: var(--heading-color);
}
h5 {
font-weight: 800;
line-height: 110%;
margin-bottom: 1rem;
font-size: 130%;
color: var(--small-heading-color);
font-weight: 800;
line-height: 110%;
margin-bottom: 1rem;
font-size: 130%;
color: var(--small-heading-color);
}
h6 {
font-weight: 800;
line-height: 110%;
margin-bottom: 1rem;
font-size: 120%;
font-weight: 800;
line-height: 110%;
margin-bottom: 1rem;
font-size: 120%;
}
strong {
font-weight: bold;
font-weight: bold;
}
p {
margin-top: 0;
margin-bottom: 1rem;
margin-top: 0;
margin-bottom: 1rem;
}
dt > *:first-child,
dd > *:first-child {
margin-top: 0;
margin-top: 0;
}
dt > *:last-child,
dd > *:last-child {
margin-bottom: 0;
margin-bottom: 0;
}
pre,
code {
font-family: monospace;
font-family: monospace;
}
code {
color: #ff8657;
background: #f4f4f4;
display: inline-block;
padding: 0 0.5rem;
border: 1px solid #d8d8d8;
border-radius: 0.5rem;
line-height: 1.57777778;
color: #ff8657;
background: #f4f4f4;
display: inline-block;
padding: 0 0.5rem;
border: 1px solid #d8d8d8;
border-radius: 0.5rem;
line-height: 1.57777778;
}
div.book .programlisting,
div.appendix .programlisting {
border-radius: 0.5rem;
padding: 1rem;
overflow: auto;
background: var(--codeblock-background);
color: var(--codeblock-text-color);
border-radius: 0.5rem;
padding: 1rem;
overflow: auto;
background: var(--codeblock-background);
color: var(--codeblock-text-color);
}
div.book .note,
@@ -274,11 +274,11 @@ div.appendix .tip,
div.appendix .warning,
div.appendix .caution,
div.appendix .important {
margin-bottom: 1rem;
border-radius: 0.5rem;
padding: 1.5rem;
overflow: auto;
background: #f4f4f4;
margin-bottom: 1rem;
border-radius: 0.5rem;
padding: 1.5rem;
overflow: auto;
background: #f4f4f4;
}
div.book .note > .title,
@@ -291,11 +291,10 @@ div.appendix .tip > .title,
div.appendix .warning > .title,
div.appendix .caution > .title,
div.appendix .important > .title {
font-weight: 800;
line-height: 110%;
margin-bottom: 1rem;
color: inherit;
margin-bottom: 0;
font-weight: 800;
line-height: 110%;
color: inherit;
margin-bottom: 0;
}
div.book .note > :first-child,
@@ -308,7 +307,7 @@ div.appendix .tip > :first-child,
div.appendix .warning > :first-child,
div.appendix .caution > :first-child,
div.appendix .important > :first-child {
margin-top: 0;
margin-top: 0;
}
div.book .note > :last-child,
@@ -321,122 +320,122 @@ div.appendix .tip > :last-child,
div.appendix .warning > :last-child,
div.appendix .caution > :last-child,
div.appendix .important > :last-child {
margin-bottom: 0;
margin-bottom: 0;
}
div.book .note,
div.book .tip,
div.appendix .note,
div.appendix .tip {
color: var(--note-text-color);
background: var(--note-background);
color: var(--note-text-color);
background: var(--note-background);
}
div.book .warning,
div.book .caution,
div.appendix .warning,
div.appendix .caution {
color: var(--warning-text-color);
background-color: var(--warning-background);
color: var(--warning-text-color);
background-color: var(--warning-background);
}
div.book .section,
div.appendix .section {
margin-top: 2em;
margin-top: 2em;
}
div.book div.example,
div.appendix div.example {
margin-top: 1.5em;
margin-top: 1.5em;
}
div.book div.example details,
div.appendix div.example details {
padding: 5px;
padding: 5px;
}
div.book div.example details[open],
div.appendix div.example details[open] {
border: 1px solid #aaa;
border-radius: 4px;
border: 1px solid #aaa;
border-radius: 4px;
}
div.book div.example details > summary,
div.appendix div.example details > summary {
cursor: pointer;
cursor: pointer;
}
div.book br.example-break,
div.appendix br.example-break {
display: none;
display: none;
}
div.book div.footnotes > hr,
div.appendix div.footnotes > hr {
border-color: #d8d8d8;
border-color: #d8d8d8;
}
div.book div.footnotes > br,
div.appendix div.footnotes > br {
display: none;
display: none;
}
div.book dt,
div.appendix dt {
margin-top: 1em;
margin-top: 1em;
}
div.book .toc dt,
div.appendix .toc dt {
margin-top: 0;
margin-top: 0;
}
div.book .list-of-examples dt,
div.appendix .list-of-examples dt {
margin-top: 0;
margin-top: 0;
}
div.book code,
div.appendix code {
padding: 0;
border: 0;
background-color: inherit;
color: inherit;
font-size: 100%;
-webkit-hyphens: none;
-moz-hyphens: none;
hyphens: none;
padding: 0;
border: 0;
background-color: inherit;
color: inherit;
font-size: 100%;
-webkit-hyphens: none;
-moz-hyphens: none;
hyphens: none;
}
div.book div.toc,
div.appendix div.toc {
margin-bottom: 3em;
border-bottom: 0.0625rem solid #d8d8d8;
margin-bottom: 3em;
border-bottom: 0.0625rem solid #d8d8d8;
}
div.book div.toc dd,
div.appendix div.toc dd {
margin-left: 2em;
margin-left: 2em;
}
div.book span.command,
div.appendix span.command {
font-family: monospace;
-webkit-hyphens: none;
-moz-hyphens: none;
hyphens: none;
font-family: monospace;
-webkit-hyphens: none;
-moz-hyphens: none;
hyphens: none;
}
div.book .informaltable th,
div.book .informaltable td,
div.appendix .informaltable th,
div.appendix .informaltable td {
padding: 0.5rem;
padding: 0.5rem;
}
div.book .variablelist .term,
div.appendix .variablelist .term {
font-weight: 500;
font-weight: 500;
}
/*
@@ -444,50 +443,50 @@ div.appendix .variablelist .term {
For more details, see https://highlightjs.readthedocs.io/en/latest/css-classes-reference.html#stylable-scopes
*/
.hljs-meta.prompt_ {
user-select: none;
-webkit-user-select: none;
user-select: none;
-webkit-user-select: none;
}
:root {
--background: #fff;
--main-text-color: #000;
--link-color: #405d99;
--heading-color: #6586c8;
--small-heading-color: #6a6a6a;
--note-text-color: #5277c3;
--note-background: #f2f8fd;
--warning-text-color: #cc3900;
--warning-background: #fff5e1;
--codeblock-background: #f2f8fd;
--codeblock-text-color: #000;
--background: #fff;
--main-text-color: #000;
--link-color: #405d99;
--heading-color: #6586c8;
--small-heading-color: #6a6a6a;
--note-text-color: #5277c3;
--note-background: #f2f8fd;
--warning-text-color: #cc3900;
--warning-background: #fff5e1;
--codeblock-background: #f2f8fd;
--codeblock-text-color: #000;
}
@media (prefers-color-scheme: dark) {
:root {
--background: #242424;
--main-text-color: #fff;
--link-color: #6586c8;
--small-heading-color: #fff;
--note-background: none;
--warning-background: none;
--codeblock-background: #393939;
--codeblock-text-color: #fff;
}
:root {
--background: #242424;
--main-text-color: #fff;
--link-color: #6586c8;
--small-heading-color: #fff;
--note-background: none;
--warning-background: none;
--codeblock-background: #393939;
--codeblock-text-color: #fff;
}
div.book .note,
div.book .tip,
div.appendix .note,
div.appendix .tip,
div.book .warning,
div.book .caution,
div.appendix .warning,
div.appendix .caution {
border: 2px solid;
font-weight: 400;
}
div.book .note,
div.book .tip,
div.appendix .note,
div.appendix .tip,
div.book .warning,
div.book .caution,
div.appendix .warning,
div.appendix .caution {
border: 2px solid;
font-weight: 400;
}
}
@font-face {
font-family: Roboto;
src: url(Roboto.ttf);
font-family: Roboto;
src: url(Roboto.ttf);
}

View File

@@ -3,17 +3,17 @@ const path = require('path')
// This has to match the logic in pkgs/development/tools/yarn2nix-moretea/yarn2nix/lib/urlToName.js
// so that fixup_yarn_lock produces the same paths
const urlToName = url => {
const isCodeloadGitTarballUrl = url.startsWith('https://codeload.github.com/') && url.includes('/tar.gz/')
const isCodeloadGitTarballUrl = url.startsWith('https://codeload.github.com/') && url.includes('/tar.gz/')
if (url.startsWith('file:')) {
return url
} else if (url.startsWith('git+') || isCodeloadGitTarballUrl) {
return path.basename(url)
} else {
return url
.replace(/https:\/\/(.)*(.com)\//g, '') // prevents having long directory names
.replace(/[@/%:-]/g, '_') // replace @ and : and - and % characters with underscore
}
if (url.startsWith('file:')) {
return url
} else if (url.startsWith('git+') || isCodeloadGitTarballUrl) {
return path.basename(url)
} else {
return url
.replace(/https:\/\/(.)*(.com)\//g, '') // prevents having long directory names
.replace(/[@/%:-]/g, '_') // replace @ and : and - and % characters with underscore
}
}
module.exports = { urlToName };

View File

@@ -7,79 +7,79 @@ const lockfile = require('./yarnpkg-lockfile.js')
const { urlToName } = require('./common.js')
const fixupYarnLock = async (lockContents, verbose) => {
const lockData = lockfile.parse(lockContents)
const lockData = lockfile.parse(lockContents)
const fixedData = Object.fromEntries(
Object.entries(lockData.object)
.map(([dep, pkg]) => {
if (pkg.resolved === undefined) {
console.warn(`no resolved URL for package ${dep}`)
var maybeFile = dep.split("@", 2)[1]
if (maybeFile.startsWith("file:")) {
console.log(`Rewriting URL for local file dependency ${dep}`)
pkg.resolved = maybeFile
}
return [dep, pkg]
}
const [ url, hash ] = pkg.resolved.split("#", 2)
const fixedData = Object.fromEntries(
Object.entries(lockData.object)
.map(([dep, pkg]) => {
if (pkg.resolved === undefined) {
console.warn(`no resolved URL for package ${dep}`)
var maybeFile = dep.split("@", 2)[1]
if (maybeFile.startsWith("file:")) {
console.log(`Rewriting URL for local file dependency ${dep}`)
pkg.resolved = maybeFile
}
return [dep, pkg]
}
const [ url, hash ] = pkg.resolved.split("#", 2)
if (hash || url.startsWith("https://codeload.github.com/")) {
if (verbose) console.log(`Removing integrity for git dependency ${dep}`)
delete pkg.integrity
}
if (hash || url.startsWith("https://codeload.github.com/")) {
if (verbose) console.log(`Removing integrity for git dependency ${dep}`)
delete pkg.integrity
}
if (verbose) console.log(`Rewriting URL ${url} for dependency ${dep}`)
pkg.resolved = urlToName(url)
if (hash)
pkg.resolved += `#${hash}`
if (verbose) console.log(`Rewriting URL ${url} for dependency ${dep}`)
pkg.resolved = urlToName(url)
if (hash)
pkg.resolved += `#${hash}`
return [dep, pkg]
})
)
return [dep, pkg]
})
)
if (verbose) console.log('Done')
if (verbose) console.log('Done')
return fixedData
return fixedData
}
const showUsage = async () => {
process.stderr.write(`
process.stderr.write(`
syntax: fixup-yarn-lock [path to yarn.lock] [options]
Options:
-h --help Show this help
-v --verbose Verbose output
`)
process.exit(1)
process.exit(1)
}
const main = async () => {
const args = process.argv.slice(2)
let next, lockFile, verbose
while (next = args.shift()) {
if (next == '--verbose' || next == '-v') {
verbose = true
} else if (next == '--help' || next == '-h') {
showUsage()
} else if (!lockFile) {
lockFile = next
} else {
showUsage()
}
}
let lockContents
try {
lockContents = await fs.promises.readFile(lockFile || 'yarn.lock', 'utf-8')
} catch {
showUsage()
}
const args = process.argv.slice(2)
let next, lockFile, verbose
while (next = args.shift()) {
if (next == '--verbose' || next == '-v') {
verbose = true
} else if (next == '--help' || next == '-h') {
showUsage()
} else if (!lockFile) {
lockFile = next
} else {
showUsage()
}
}
let lockContents
try {
lockContents = await fs.promises.readFile(lockFile || 'yarn.lock', 'utf-8')
} catch {
showUsage()
}
const fixedData = await fixupYarnLock(lockContents, verbose)
await fs.promises.writeFile(lockFile || 'yarn.lock', lockfile.stringify(fixedData))
const fixedData = await fixupYarnLock(lockContents, verbose)
await fs.promises.writeFile(lockFile || 'yarn.lock', lockfile.stringify(fixedData))
}
main()
.catch(e => {
console.error(e)
process.exit(1)
})
.catch(e => {
console.error(e)
process.exit(1)
})

View File

@@ -15,155 +15,155 @@ const { urlToName } = require('./common.js')
const execFile = promisify(child_process.execFile)
const exec = async (...args) => {
const res = await execFile(...args)
if (res.error) throw new Error(res.stderr)
return res
const res = await execFile(...args)
if (res.error) throw new Error(res.stderr)
return res
}
const downloadFileHttps = (fileName, url, expectedHash, hashType = 'sha1') => {
return new Promise((resolve, reject) => {
const get = (url, redirects = 0) => https.get(url, (res) => {
if(redirects > 10) {
reject('Too many redirects!');
return;
}
if(res.statusCode === 301 || res.statusCode === 302) {
return get(res.headers.location, redirects + 1)
}
const file = fs.createWriteStream(fileName)
const hash = crypto.createHash(hashType)
res.pipe(file)
res.pipe(hash).setEncoding('hex')
res.on('end', () => {
file.close()
const h = hash.read()
if (expectedHash === undefined){
console.log(`Warning: lockfile url ${url} doesn't end in "#<hash>" to validate against. Downloaded file had hash ${h}.`);
} else if (h != expectedHash) return reject(new Error(`hash mismatch, expected ${expectedHash}, got ${h} for ${url}`))
resolve()
})
res.on('error', e => reject(e))
})
get(url)
})
return new Promise((resolve, reject) => {
const get = (url, redirects = 0) => https.get(url, (res) => {
if(redirects > 10) {
reject('Too many redirects!');
return;
}
if(res.statusCode === 301 || res.statusCode === 302) {
return get(res.headers.location, redirects + 1)
}
const file = fs.createWriteStream(fileName)
const hash = crypto.createHash(hashType)
res.pipe(file)
res.pipe(hash).setEncoding('hex')
res.on('end', () => {
file.close()
const h = hash.read()
if (expectedHash === undefined){
console.log(`Warning: lockfile url ${url} doesn't end in "#<hash>" to validate against. Downloaded file had hash ${h}.`);
} else if (h != expectedHash) return reject(new Error(`hash mismatch, expected ${expectedHash}, got ${h} for ${url}`))
resolve()
})
res.on('error', e => reject(e))
})
get(url)
})
}
const downloadGit = async (fileName, url, rev) => {
await exec('nix-prefetch-git', [
'--out', fileName + '.tmp',
'--url', url,
'--rev', rev,
'--builder'
])
await exec('nix-prefetch-git', [
'--out', fileName + '.tmp',
'--url', url,
'--rev', rev,
'--builder'
])
await exec('tar', [
// hopefully make it reproducible across runs and systems
'--owner=0', '--group=0', '--numeric-owner', '--format=gnu', '--sort=name', '--mtime=@1',
await exec('tar', [
// hopefully make it reproducible across runs and systems
'--owner=0', '--group=0', '--numeric-owner', '--format=gnu', '--sort=name', '--mtime=@1',
// Set u+w because tar-fs can't unpack archives with read-only dirs: https://github.com/mafintosh/tar-fs/issues/79
'--mode', 'u+w',
// Set u+w because tar-fs can't unpack archives with read-only dirs: https://github.com/mafintosh/tar-fs/issues/79
'--mode', 'u+w',
'-C', fileName + '.tmp',
'-cf', fileName, '.'
])
'-C', fileName + '.tmp',
'-cf', fileName, '.'
])
await exec('rm', [ '-rf', fileName + '.tmp', ])
await exec('rm', [ '-rf', fileName + '.tmp', ])
}
const isGitUrl = pattern => {
// https://github.com/yarnpkg/yarn/blob/3119382885ea373d3c13d6a846de743eca8c914b/src/resolvers/exotics/git-resolver.js#L15-L47
const GIT_HOSTS = ['github.com', 'gitlab.com', 'bitbucket.com', 'bitbucket.org']
const GIT_PATTERN_MATCHERS = [/^git:/, /^git\+.+:/, /^ssh:/, /^https?:.+\.git$/, /^https?:.+\.git#.+/]
// https://github.com/yarnpkg/yarn/blob/3119382885ea373d3c13d6a846de743eca8c914b/src/resolvers/exotics/git-resolver.js#L15-L47
const GIT_HOSTS = ['github.com', 'gitlab.com', 'bitbucket.com', 'bitbucket.org']
const GIT_PATTERN_MATCHERS = [/^git:/, /^git\+.+:/, /^ssh:/, /^https?:.+\.git$/, /^https?:.+\.git#.+/]
for (const matcher of GIT_PATTERN_MATCHERS) if (matcher.test(pattern)) return true
for (const matcher of GIT_PATTERN_MATCHERS) if (matcher.test(pattern)) return true
const {hostname, path} = url.parse(pattern)
if (hostname && path && GIT_HOSTS.indexOf(hostname) >= 0
// only if dependency is pointing to a git repo,
// e.g. facebook/flow and not file in a git repo facebook/flow/archive/v1.0.0.tar.gz
&& path.split('/').filter(p => !!p).length === 2
) return true
const {hostname, path} = url.parse(pattern)
if (hostname && path && GIT_HOSTS.indexOf(hostname) >= 0
// only if dependency is pointing to a git repo,
// e.g. facebook/flow and not file in a git repo facebook/flow/archive/v1.0.0.tar.gz
&& path.split('/').filter(p => !!p).length === 2
) return true
return false
return false
}
const downloadPkg = (pkg, verbose) => {
for (let marker of ['@file:', '@link:']) {
const split = pkg.key.split(marker)
if (split.length == 2) {
console.info(`ignoring lockfile entry "${split[0]}" which points at path "${split[1]}"`)
return
} else if (split.length > 2) {
throw new Error(`The lockfile entry key "${pkg.key}" contains "${marker}" more than once. Processing is not implemented.`)
}
}
for (let marker of ['@file:', '@link:']) {
const split = pkg.key.split(marker)
if (split.length == 2) {
console.info(`ignoring lockfile entry "${split[0]}" which points at path "${split[1]}"`)
return
} else if (split.length > 2) {
throw new Error(`The lockfile entry key "${pkg.key}" contains "${marker}" more than once. Processing is not implemented.`)
}
}
if (pkg.resolved === undefined) {
throw new Error(`The lockfile entry with key "${pkg.key}" cannot be downloaded because it is missing the "resolved" attribute, which should contain the URL to download from. The lockfile might be invalid.`)
}
if (pkg.resolved === undefined) {
throw new Error(`The lockfile entry with key "${pkg.key}" cannot be downloaded because it is missing the "resolved" attribute, which should contain the URL to download from. The lockfile might be invalid.`)
}
const [ url, hash ] = pkg.resolved.split('#')
if (verbose) console.log('downloading ' + url)
const fileName = urlToName(url)
const s = url.split('/')
if (url.startsWith('https://codeload.github.com/') && url.includes('/tar.gz/')) {
return downloadGit(fileName, `https://github.com/${s[3]}/${s[4]}.git`, s[s.length-1])
} else if (url.startsWith('https://github.com/') && url.endsWith('.tar.gz') &&
(
s.length <= 5 || // https://github.com/owner/repo.tgz#feedface...
s[5] == "archive" // https://github.com/owner/repo/archive/refs/tags/v0.220.1.tar.gz
)) {
return downloadGit(fileName, `https://github.com/${s[3]}/${s[4]}.git`, s[s.length-1].replace(/.tar.gz$/, ''))
} else if (isGitUrl(url)) {
return downloadGit(fileName, url.replace(/^git\+/, ''), hash)
} else if (url.startsWith('https://')) {
if (typeof pkg.integrity === 'string' || pkg.integrity instanceof String) {
const [ type, checksum ] = pkg.integrity.split('-')
return downloadFileHttps(fileName, url, Buffer.from(checksum, 'base64').toString('hex'), type)
}
return downloadFileHttps(fileName, url, hash)
} else if (url.startsWith('file:')) {
console.warn(`ignoring unsupported file:path url "${url}"`)
} else {
throw new Error('don\'t know how to download "' + url + '"')
}
const [ url, hash ] = pkg.resolved.split('#')
if (verbose) console.log('downloading ' + url)
const fileName = urlToName(url)
const s = url.split('/')
if (url.startsWith('https://codeload.github.com/') && url.includes('/tar.gz/')) {
return downloadGit(fileName, `https://github.com/${s[3]}/${s[4]}.git`, s[s.length-1])
} else if (url.startsWith('https://github.com/') && url.endsWith('.tar.gz') &&
(
s.length <= 5 || // https://github.com/owner/repo.tgz#feedface...
s[5] == "archive" // https://github.com/owner/repo/archive/refs/tags/v0.220.1.tar.gz
)) {
return downloadGit(fileName, `https://github.com/${s[3]}/${s[4]}.git`, s[s.length-1].replace(/.tar.gz$/, ''))
} else if (isGitUrl(url)) {
return downloadGit(fileName, url.replace(/^git\+/, ''), hash)
} else if (url.startsWith('https://')) {
if (typeof pkg.integrity === 'string' || pkg.integrity instanceof String) {
const [ type, checksum ] = pkg.integrity.split('-')
return downloadFileHttps(fileName, url, Buffer.from(checksum, 'base64').toString('hex'), type)
}
return downloadFileHttps(fileName, url, hash)
} else if (url.startsWith('file:')) {
console.warn(`ignoring unsupported file:path url "${url}"`)
} else {
throw new Error('don\'t know how to download "' + url + '"')
}
}
const performParallel = tasks => {
const worker = async () => {
while (tasks.length > 0) await tasks.shift()()
}
const worker = async () => {
while (tasks.length > 0) await tasks.shift()()
}
const workers = []
for (let i = 0; i < 4; i++) {
workers.push(worker())
}
const workers = []
for (let i = 0; i < 4; i++) {
workers.push(worker())
}
return Promise.all(workers)
return Promise.all(workers)
}
// This could be implemented using [`Map.groupBy`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Map/groupBy),
// but that method is only supported starting with Node 21
const uniqueBy = (arr, callback) => {
const map = new Map()
for (const elem of arr) {
map.set(callback(elem), elem)
}
return [...map.values()]
const map = new Map()
for (const elem of arr) {
map.set(callback(elem), elem)
}
return [...map.values()]
}
const prefetchYarnDeps = async (lockContents, verbose) => {
const lockData = lockfile.parse(lockContents)
await performParallel(
uniqueBy(Object.entries(lockData.object), ([_, value]) => value.resolved)
.map(([key, value]) => () => downloadPkg({ key, ...value }, verbose))
)
await fs.promises.writeFile('yarn.lock', lockContents)
if (verbose) console.log('Done')
const lockData = lockfile.parse(lockContents)
await performParallel(
uniqueBy(Object.entries(lockData.object), ([_, value]) => value.resolved)
.map(([key, value]) => () => downloadPkg({ key, ...value }, verbose))
)
await fs.promises.writeFile('yarn.lock', lockContents)
if (verbose) console.log('Done')
}
const showUsage = async () => {
process.stderr.write(`
process.stderr.write(`
syntax: prefetch-yarn-deps [path to yarn.lock] [options]
Options:
@@ -171,50 +171,50 @@ Options:
-v --verbose Verbose output
--builder Only perform the download to current directory, then exit
`)
process.exit(1)
process.exit(1)
}
const main = async () => {
const args = process.argv.slice(2)
let next, lockFile, verbose, isBuilder
while (next = args.shift()) {
if (next == '--builder') {
isBuilder = true
} else if (next == '--verbose' || next == '-v') {
verbose = true
} else if (next == '--help' || next == '-h') {
showUsage()
} else if (!lockFile) {
lockFile = next
} else {
showUsage()
}
}
let lockContents
try {
lockContents = await fs.promises.readFile(lockFile || 'yarn.lock', 'utf-8')
} catch {
showUsage()
}
const args = process.argv.slice(2)
let next, lockFile, verbose, isBuilder
while (next = args.shift()) {
if (next == '--builder') {
isBuilder = true
} else if (next == '--verbose' || next == '-v') {
verbose = true
} else if (next == '--help' || next == '-h') {
showUsage()
} else if (!lockFile) {
lockFile = next
} else {
showUsage()
}
}
let lockContents
try {
lockContents = await fs.promises.readFile(lockFile || 'yarn.lock', 'utf-8')
} catch {
showUsage()
}
if (isBuilder) {
await prefetchYarnDeps(lockContents, verbose)
} else {
const { stdout: tmpDir } = await exec('mktemp', [ '-d' ])
if (isBuilder) {
await prefetchYarnDeps(lockContents, verbose)
} else {
const { stdout: tmpDir } = await exec('mktemp', [ '-d' ])
try {
process.chdir(tmpDir.trim())
await prefetchYarnDeps(lockContents, verbose)
const { stdout: hash } = await exec('nix-hash', [ '--type', 'sha256', '--base32', tmpDir.trim() ])
console.log(hash)
} finally {
await exec('rm', [ '-rf', tmpDir.trim() ])
}
}
try {
process.chdir(tmpDir.trim())
await prefetchYarnDeps(lockContents, verbose)
const { stdout: hash } = await exec('nix-hash', [ '--type', 'sha256', '--base32', tmpDir.trim() ])
console.log(hash)
} finally {
await exec('rm', [ '-rf', tmpDir.trim() ])
}
}
}
main()
.catch(e => {
console.error(e)
process.exit(1)
})
.catch(e => {
console.error(e)
process.exit(1)
})