mirror of
https://github.com/CHN-beta/nixpkgs.git
synced 2026-01-10 09:50:28 +08:00
Merge remote-tracking branch 'upstream/nixos-25.05' into nixos-25.05
This commit is contained in:
@@ -23,8 +23,7 @@ insert_final_newline = false
|
||||
|
||||
# see https://nixos.org/nixpkgs/manual/#chap-conventions
|
||||
|
||||
# Match json/lockfiles/markdown/nix/perl/python/ruby/shell/docbook files, set indent to spaces
|
||||
[*.{bash,json,lock,md,nix,pl,pm,py,rb,sh,xml}]
|
||||
[*.{bash,css,js,json,lock,md,nix,pl,pm,py,rb,sh,xml}]
|
||||
indent_style = space
|
||||
|
||||
# Match docbook files, set indent width of one
|
||||
@@ -32,7 +31,7 @@ indent_style = space
|
||||
indent_size = 1
|
||||
|
||||
# Match json/lockfiles/markdown/nix/ruby files, set indent width of two
|
||||
[*.{json,lock,md,nix,rb}]
|
||||
[*.{js,json,lock,md,nix,rb}]
|
||||
indent_size = 2
|
||||
|
||||
# Match all the Bash code in Nix files, set indent width of two
|
||||
|
||||
@@ -193,6 +193,9 @@ cffc27daf06c77c0d76bc35d24b929cb9d68c3c9
|
||||
# nixos/kanidm: inherit lib, nixfmt
|
||||
8f18393d380079904d072007fb19dc64baef0a3a
|
||||
|
||||
# fetchhg: format after refactoring with lib.extendMkDerivation and make overridable (#423539)
|
||||
34a5b1eb23129f8fb62c677e3760903f6d43228f
|
||||
|
||||
# fetchurl: nixfmt-rfc-style
|
||||
ce21e97a1f20dee15da85c084f9d1148d84f853b
|
||||
|
||||
@@ -266,3 +269,7 @@ a034fb50f79816c6738fb48b48503b09ea3b0132
|
||||
|
||||
# treewide: switch instances of lib.teams.*.members to the new meta.teams attribute
|
||||
05580f4b4433fda48fff30f60dfd303d6ee05d21
|
||||
|
||||
# nixfmt 1.0.0
|
||||
a46262ae77e4016fe5a4a390c4a39c0c1b266428 # !autorebase nix-shell --run treefmt
|
||||
aefcb0d50d1124314429a11ed6b7aaaedf2861c5 # !autorebase nix-shell --run treefmt
|
||||
|
||||
2
.github/ISSUE_TEMPLATE/01_bug_report.yml
vendored
2
.github/ISSUE_TEMPLATE/01_bug_report.yml
vendored
@@ -100,7 +100,7 @@ body:
|
||||
label: "Notify maintainers"
|
||||
description: |
|
||||
Please mention the people who are in the **Maintainers** list of the offending package. This is done by by searching for the package on the [NixOS Package Search](https://search.nixos.org/packages) and mentioning the people listed under **Maintainers** by prefixing their GitHub usernames with an '@' character. Please add the mentions above the `---` characters in the template below.
|
||||
value: |
|
||||
value: |2
|
||||
|
||||
|
||||
---
|
||||
|
||||
@@ -114,7 +114,7 @@ body:
|
||||
Please mention the people who are in the **Maintainers** list of the offending package. This is done by by searching for the package on the [NixOS Package Search](https://search.nixos.org/packages) and mentioning the people listed under **Maintainers** by prefixing their GitHub usernames with an '@' character. Please add the mentions above the `---` characters in the template below.
|
||||
|
||||
If this issue is related to the Darwin packaging architecture as a whole, or is related to the core Darwin frameworks, consider mentioning the `@NixOS/darwin-core` team.
|
||||
value: |
|
||||
value: |2
|
||||
|
||||
|
||||
---
|
||||
|
||||
@@ -104,7 +104,7 @@ body:
|
||||
Please note that the maintainer attribute name does not always match the maintainer's GitHub username. If that occurs, try looking in [`maintainers/maintainer-list.nix`](https://github.com/NixOS/nixpkgs/blob/master/maintainers/maintainer-list.nix) for the maintainer attribute name, and checking if the maintainer has a listed GitHub username.
|
||||
|
||||
If in doubt, check `git blame` for whoever last touched the module, or check the associated package's maintainers. Please add the mentions above the `---` characters.
|
||||
value: |
|
||||
value: |2
|
||||
|
||||
|
||||
---
|
||||
|
||||
2
.github/ISSUE_TEMPLATE/04_build_failure.yml
vendored
2
.github/ISSUE_TEMPLATE/04_build_failure.yml
vendored
@@ -109,7 +109,7 @@ body:
|
||||
label: "Notify maintainers"
|
||||
description: |
|
||||
Please mention the people who are in the **Maintainers** list of the offending package. This is done by by searching for the package on the [NixOS Package Search](https://search.nixos.org/packages) and mentioning the people listed under **Maintainers** by prefixing their GitHub usernames with an '@' character. Please add the mentions above the `---` characters in the template below.
|
||||
value: |
|
||||
value: |2
|
||||
|
||||
|
||||
---
|
||||
|
||||
2
.github/ISSUE_TEMPLATE/05_update_request.yml
vendored
2
.github/ISSUE_TEMPLATE/05_update_request.yml
vendored
@@ -84,7 +84,7 @@ body:
|
||||
label: "Notify maintainers"
|
||||
description: |
|
||||
Please mention the people who are in the **Maintainers** list of the offending package. This is done by by searching for the package on the [NixOS Package Search](https://search.nixos.org/packages) and mentioning the people listed under **Maintainers** by prefixing their GitHub usernames with an '@' character. Please add the mentions above the `---` characters in the template below.
|
||||
value: |
|
||||
value: |2
|
||||
|
||||
|
||||
---
|
||||
|
||||
2
.github/ISSUE_TEMPLATE/06_module_request.yml
vendored
2
.github/ISSUE_TEMPLATE/06_module_request.yml
vendored
@@ -60,7 +60,7 @@ body:
|
||||
label: "Notify maintainers"
|
||||
description: |
|
||||
Please mention the people who are in the **Maintainers** list of the offending package. This is done by by searching for the package on the [NixOS Package Search](https://search.nixos.org/packages) and mentioning the people listed under **Maintainers** by prefixing their GitHub usernames with an '@' character. Please add the mentions above the `---` characters in the template below.
|
||||
value: |
|
||||
value: |2
|
||||
|
||||
|
||||
---
|
||||
|
||||
@@ -62,7 +62,7 @@ body:
|
||||
label: "Notify maintainers"
|
||||
description: |
|
||||
Please mention the people who are in the **Maintainers** list of the offending package. This is done by by searching for the package on the [NixOS Package Search](https://search.nixos.org/packages) and mentioning the people listed under **Maintainers** by prefixing their GitHub usernames with an '@' character. Please add the mentions above the `---` characters in the template below.
|
||||
value: |
|
||||
value: |2
|
||||
|
||||
|
||||
---
|
||||
|
||||
@@ -46,7 +46,7 @@ body:
|
||||
label: "Notify maintainers"
|
||||
description: |
|
||||
Please mention the people who are in the **Maintainers** list of the offending package. This is done by by searching for the package on the [NixOS Package Search](https://search.nixos.org/packages) and mentioning the people listed under **Maintainers** by prefixing their GitHub usernames with an '@' character. Please add the mentions above the `---` characters in the template below.
|
||||
value: |
|
||||
value: |2
|
||||
|
||||
|
||||
---
|
||||
|
||||
@@ -119,7 +119,7 @@ body:
|
||||
label: "Notify maintainers"
|
||||
description: |
|
||||
Please mention the people who are in the **Maintainers** list of the offending package. This is done by by searching for the package on the [NixOS Package Search](https://search.nixos.org/packages) and mentioning the people listed under **Maintainers** by prefixing their GitHub usernames with an '@' character. Please add the mentions above the `---` characters in the template below.
|
||||
value: |
|
||||
value: |2
|
||||
|
||||
|
||||
---
|
||||
|
||||
36
.github/ISSUE_TEMPLATE/10_package_request.yml
vendored
Normal file
36
.github/ISSUE_TEMPLATE/10_package_request.yml
vendored
Normal file
@@ -0,0 +1,36 @@
|
||||
name: "Request: Nix Package"
|
||||
description: "Package requests are no longer accepted. Please open a Pull Request with your desired package instead."
|
||||
title: "Package Request"
|
||||
labels: ["0.kind: packaging request", "4.workflow: auto-close"]
|
||||
body:
|
||||
- type: "markdown"
|
||||
attributes:
|
||||
value: |
|
||||
<p align="center">
|
||||
<a href="https://nixos.org">
|
||||
<picture>
|
||||
<source media="(prefers-color-scheme: light)" srcset="https://raw.githubusercontent.com/NixOS/nixos-artwork/refs/heads/master/logo/nixos.svg">
|
||||
<source media="(prefers-color-scheme: dark)" srcset="https://raw.githubusercontent.com/NixOS/nixos-artwork/refs/heads/master/logo/nixos-white.svg">
|
||||
<img src="https://raw.githubusercontent.com/NixOS/nixos-artwork/refs/heads/master/logo/nixos.svg" width="400px" alt="NixOS logo">
|
||||
</picture>
|
||||
</a>
|
||||
</p>
|
||||
|
||||
Thank you for your interest in packaging new software in Nixpkgs. Unfortunately, to mitigate the unsustainable growth of unmaintained packages, **Nixpkgs is no longer accepting package requests** via Issues.
|
||||
|
||||
As a [volunteer community][community], we are always open to new contributors. If you wish to see this package in Nixpkgs, **we encourage you to [contribute] it yourself**, via a Pull Request. Anyone can [become a package maintainer][maintainers]! You can find language-specific packaging information in the [Nixpkgs Manual][nixpkgs]. Should you need any help, please reach out to the community on [Matrix] or [Discourse].
|
||||
|
||||
[community]: https://nixos.org/community
|
||||
[contribute]: https://github.com/NixOS/nixpkgs/blob/master/pkgs/README.md#quick-start-to-adding-a-package
|
||||
[maintainers]: https://github.com/NixOS/nixpkgs/blob/master/maintainers/README.md
|
||||
[nixpkgs]: https://nixos.org/manual/nixpkgs/unstable/
|
||||
[Matrix]: https://matrix.to/#/#dev:nixos.org
|
||||
[Discourse]: https://discourse.nixos.org/c/dev/14
|
||||
|
||||
---
|
||||
- type: "checkboxes"
|
||||
id: "ignored"
|
||||
attributes:
|
||||
label: "Issues for new package requests are not accepted. Please open a Pull Request instead."
|
||||
options:
|
||||
- label: "I didn't read any of that."
|
||||
96
.github/actions/checkout/action.yml
vendored
Normal file
96
.github/actions/checkout/action.yml
vendored
Normal file
@@ -0,0 +1,96 @@
|
||||
name: Checkout
|
||||
|
||||
description: 'Checkout into trusted / untrusted / pinned folders consistently.'
|
||||
|
||||
inputs:
|
||||
merged-as-untrusted-at:
|
||||
description: "Whether and which SHA to checkout for the merge commit in the ./nixpkgs/untrusted folder."
|
||||
target-as-trusted-at:
|
||||
description: "Whether and which SHA to checkout for the target commit in the ./nixpkgs/trusted folder."
|
||||
|
||||
runs:
|
||||
using: composite
|
||||
steps:
|
||||
- uses: actions/github-script@60a0d83039c74a4aee543508d2ffcb1c3799cdea # v7.0.1
|
||||
env:
|
||||
MERGED_SHA: ${{ inputs.merged-as-untrusted-at }}
|
||||
TARGET_SHA: ${{ inputs.target-as-trusted-at }}
|
||||
with:
|
||||
script: |
|
||||
const { spawn } = require('node:child_process')
|
||||
const { join } = require('node:path')
|
||||
|
||||
async function run(cmd, ...args) {
|
||||
return new Promise((resolve, reject) => {
|
||||
const proc = spawn(cmd, args, {
|
||||
stdio: 'inherit'
|
||||
})
|
||||
proc.on('close', (code) => {
|
||||
if (code === 0) resolve()
|
||||
else reject(code)
|
||||
})
|
||||
})
|
||||
}
|
||||
|
||||
// These are set automatically by the spare checkout for .github/actions.
|
||||
// Undo them, otherwise git fetch below will not do anything.
|
||||
await run('git', 'config', 'unset', 'remote.origin.promisor')
|
||||
await run('git', 'config', 'unset', 'remote.origin.partialclonefilter')
|
||||
|
||||
// Getting the pinned SHA via API allows us to do one single fetch call for all commits.
|
||||
// Otherwise we would have to fetch merged/target first, read pinned, fetch again.
|
||||
// A single fetch call comes with a lot less overhead. The fetch takes essentially the
|
||||
// same time no matter whether its 1, 2 or 3 commits at once.
|
||||
async function getPinnedSha(ref) {
|
||||
if (!ref) return undefined
|
||||
const { content, encoding } = (await github.rest.repos.getContent({
|
||||
...context.repo,
|
||||
path: 'ci/pinned.json',
|
||||
ref,
|
||||
})).data
|
||||
const pinned = JSON.parse(Buffer.from(content, encoding).toString())
|
||||
return pinned.pins.nixpkgs.revision
|
||||
}
|
||||
|
||||
const commits = [
|
||||
{
|
||||
sha: process.env.MERGED_SHA,
|
||||
path: 'untrusted',
|
||||
},
|
||||
{
|
||||
sha: await getPinnedSha(process.env.MERGED_SHA),
|
||||
path: 'untrusted-pinned'
|
||||
},
|
||||
{
|
||||
sha: process.env.TARGET_SHA,
|
||||
path: 'trusted',
|
||||
},
|
||||
{
|
||||
sha: await getPinnedSha(process.env.TARGET_SHA),
|
||||
path: 'trusted-pinned'
|
||||
}
|
||||
].filter(({ sha }) => Boolean(sha))
|
||||
|
||||
console.log('Checking out the following commits:', commits)
|
||||
|
||||
// Fetching all commits at once is much faster than doing multiple checkouts.
|
||||
// This would fail without --refetch, because the we had a partial clone before, but changed it above.
|
||||
await run('git', 'fetch', '--depth=1', '--refetch', 'origin', ...(commits.map(({ sha }) => sha)))
|
||||
|
||||
// Checking out onto tmpfs takes 1s and is faster by at least factor 10x.
|
||||
await run('mkdir', 'nixpkgs')
|
||||
switch (process.env.RUNNER_OS) {
|
||||
case 'macOS':
|
||||
await run('sudo', 'mount_tmpfs', 'nixpkgs')
|
||||
break
|
||||
case 'Linux':
|
||||
await run('sudo', 'mount', '-t', 'tmpfs', 'tmpfs', 'nixpkgs')
|
||||
break
|
||||
}
|
||||
|
||||
// Create all worktrees in parallel.
|
||||
await Promise.all(commits.map(async ({ sha, path }) => {
|
||||
await run('git', 'worktree', 'add', join('nixpkgs', path), sha, '--no-checkout')
|
||||
await run('git', '-C', join('nixpkgs', path), 'sparse-checkout', 'disable')
|
||||
await run('git', '-C', join('nixpkgs', path), 'checkout', '--progress')
|
||||
}))
|
||||
88
.github/actions/get-merge-commit/action.yml
vendored
88
.github/actions/get-merge-commit/action.yml
vendored
@@ -1,88 +0,0 @@
|
||||
name: Get merge commit
|
||||
|
||||
description: 'Checks whether the Pull Request is mergeable and checks out the repo at up to two commits: The result of a temporary merge of the head branch into the target branch ("merged"), and the parent of that commit on the target branch ("target"). Handles push events and merge conflicts gracefully.'
|
||||
|
||||
inputs:
|
||||
merged-as-untrusted:
|
||||
description: "Whether to checkout the merge commit in the ./untrusted folder."
|
||||
type: boolean
|
||||
target-as-trusted:
|
||||
description: "Whether to checkout the target commit in the ./trusted folder."
|
||||
type: boolean
|
||||
|
||||
outputs:
|
||||
mergedSha:
|
||||
description: "The merge commit SHA"
|
||||
value: ${{ steps.commits.outputs.mergedSha }}
|
||||
targetSha:
|
||||
description: "The target commit SHA"
|
||||
value: ${{ steps.commits.outputs.targetSha }}
|
||||
|
||||
runs:
|
||||
using: composite
|
||||
steps:
|
||||
- id: commits
|
||||
uses: actions/github-script@60a0d83039c74a4aee543508d2ffcb1c3799cdea # v7.0.1
|
||||
with:
|
||||
script: |
|
||||
if (context.eventName == 'push') return core.setOutput('mergedSha', context.sha)
|
||||
|
||||
for (const retryInterval of [5, 10, 20, 40, 80]) {
|
||||
console.log("Checking whether the pull request can be merged...")
|
||||
const prInfo = (await github.rest.pulls.get({
|
||||
owner: context.repo.owner,
|
||||
repo: context.repo.repo,
|
||||
pull_number: context.payload.pull_request.number
|
||||
})).data
|
||||
|
||||
if (prInfo.state != 'open') throw new Error ("PR is not open anymore.")
|
||||
|
||||
if (prInfo.mergeable == null) {
|
||||
console.log(`GitHub is still computing whether this PR can be merged, waiting ${retryInterval} seconds before trying again...`)
|
||||
await new Promise(resolve => setTimeout(resolve, retryInterval * 1000))
|
||||
continue
|
||||
}
|
||||
|
||||
let mergedSha, targetSha
|
||||
|
||||
if (prInfo.mergeable) {
|
||||
console.log("The PR can be merged.")
|
||||
|
||||
mergedSha = prInfo.merge_commit_sha
|
||||
targetSha = (await github.rest.repos.getCommit({
|
||||
owner: context.repo.owner,
|
||||
repo: context.repo.repo,
|
||||
ref: prInfo.merge_commit_sha
|
||||
})).data.parents[0].sha
|
||||
} else {
|
||||
console.log("The PR has a merge conflict.")
|
||||
|
||||
mergedSha = prInfo.head.sha
|
||||
targetSha = (await github.rest.repos.compareCommitsWithBasehead({
|
||||
owner: context.repo.owner,
|
||||
repo: context.repo.repo,
|
||||
basehead: `${prInfo.base.sha}...${prInfo.head.sha}`
|
||||
})).data.merge_base_commit.sha
|
||||
}
|
||||
|
||||
console.log(`Checking the commits:\nmerged:${mergedSha}\ntarget:${targetSha}`)
|
||||
core.setOutput('mergedSha', mergedSha)
|
||||
core.setOutput('targetSha', targetSha)
|
||||
return
|
||||
}
|
||||
throw new Error("Not retrying anymore. It's likely that GitHub is having internal issues: check https://www.githubstatus.com.")
|
||||
|
||||
# Would be great to do the checkouts in git worktrees of the existing spare checkout instead,
|
||||
# but Nix is broken with them:
|
||||
# https://github.com/NixOS/nix/issues/6073
|
||||
- if: inputs.merged-as-untrusted && steps.commits.outputs.mergedSha
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
with:
|
||||
ref: ${{ steps.commits.outputs.mergedSha }}
|
||||
path: untrusted
|
||||
|
||||
- if: inputs.target-as-trusted && steps.commits.outputs.targetSha
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
with:
|
||||
ref: ${{ steps.commits.outputs.targetSha }}
|
||||
path: trusted
|
||||
2
.github/dependabot.yml
vendored
2
.github/dependabot.yml
vendored
@@ -4,4 +4,4 @@ updates:
|
||||
directory: "/"
|
||||
schedule:
|
||||
interval: "weekly"
|
||||
labels: [ ]
|
||||
labels: []
|
||||
|
||||
18
.github/labeler-development-branches.yml
vendored
18
.github/labeler-development-branches.yml
vendored
@@ -3,21 +3,21 @@
|
||||
|
||||
"4.workflow: package set update":
|
||||
- any:
|
||||
- head-branch:
|
||||
- '-updates$'
|
||||
- head-branch:
|
||||
- '-updates$'
|
||||
|
||||
"4.workflow: staging":
|
||||
- any:
|
||||
- head-branch:
|
||||
- '^staging-next$'
|
||||
- '^staging-next-'
|
||||
- head-branch:
|
||||
- '^staging-next$'
|
||||
- '^staging-next-'
|
||||
|
||||
"6.topic: haskell":
|
||||
- any:
|
||||
- head-branch:
|
||||
- '^haskell-updates$'
|
||||
- head-branch:
|
||||
- '^haskell-updates$'
|
||||
|
||||
"6.topic: python":
|
||||
- any:
|
||||
- head-branch:
|
||||
- '^python-updates$'
|
||||
- head-branch:
|
||||
- '^python-updates$'
|
||||
|
||||
33
.github/labeler-no-sync.yml
vendored
33
.github/labeler-no-sync.yml
vendored
@@ -5,28 +5,21 @@
|
||||
|
||||
"6.topic: policy discussion":
|
||||
- any:
|
||||
- changed-files:
|
||||
- any-glob-to-any-file:
|
||||
- .github/**/*
|
||||
- CONTRIBUTING.md
|
||||
- pkgs/README.md
|
||||
- nixos/README.md
|
||||
- maintainers/README.md
|
||||
- lib/README.md
|
||||
- doc/README.md
|
||||
- changed-files:
|
||||
- any-glob-to-any-file:
|
||||
- .github/**/*
|
||||
- CONTRIBUTING.md
|
||||
- pkgs/README.md
|
||||
- nixos/README.md
|
||||
- maintainers/README.md
|
||||
- lib/README.md
|
||||
- doc/README.md
|
||||
|
||||
"8.has: documentation":
|
||||
- any:
|
||||
- changed-files:
|
||||
- any-glob-to-any-file:
|
||||
- doc/**/*
|
||||
- nixos/doc/**/*
|
||||
|
||||
"backport release-24.11":
|
||||
- any:
|
||||
- changed-files:
|
||||
- any-glob-to-any-file:
|
||||
- .github/workflows/*
|
||||
- ci/**/*.*
|
||||
- changed-files:
|
||||
- any-glob-to-any-file:
|
||||
- doc/**/*
|
||||
- nixos/doc/**/*
|
||||
|
||||
# keep-sorted end
|
||||
|
||||
784
.github/labeler.yml
vendored
784
.github/labeler.yml
vendored
@@ -5,599 +5,599 @@
|
||||
|
||||
"4.workflow: backport":
|
||||
- any:
|
||||
- base-branch:
|
||||
- '^release-'
|
||||
- '^staging-\d'
|
||||
- '^staging-next-\d'
|
||||
- base-branch:
|
||||
- '^release-'
|
||||
- '^staging-\d'
|
||||
- '^staging-next-\d'
|
||||
|
||||
# NOTE: bsd, darwin and cross-compilation labels are handled by ofborg
|
||||
"6.topic: agda":
|
||||
- any:
|
||||
- changed-files:
|
||||
- any-glob-to-any-file:
|
||||
- doc/languages-frameworks/agda.section.md
|
||||
- nixos/tests/agda.nix
|
||||
- pkgs/build-support/agda/**/*
|
||||
- pkgs/development/libraries/agda/**/*
|
||||
- pkgs/top-level/agda-packages.nix
|
||||
- changed-files:
|
||||
- any-glob-to-any-file:
|
||||
- doc/languages-frameworks/agda.section.md
|
||||
- nixos/tests/agda.nix
|
||||
- pkgs/build-support/agda/**/*
|
||||
- pkgs/development/libraries/agda/**/*
|
||||
- pkgs/top-level/agda-packages.nix
|
||||
|
||||
"6.topic: cinnamon":
|
||||
- any:
|
||||
- changed-files:
|
||||
- any-glob-to-any-file:
|
||||
- nixos/modules/services/x11/desktop-managers/cinnamon.nix
|
||||
- nixos/tests/cinnamon.nix
|
||||
- nixos/tests/cinnamon-wayland.nix
|
||||
- pkgs/by-name/ci/cinnamon-*/**/*
|
||||
- pkgs/by-name/cj/cjs/**/*
|
||||
- pkgs/by-name/mu/muffin/**/*
|
||||
- pkgs/by-name/ne/nemo/**/*
|
||||
- pkgs/by-name/ne/nemo-*/**/*
|
||||
- changed-files:
|
||||
- any-glob-to-any-file:
|
||||
- nixos/modules/services/x11/desktop-managers/cinnamon.nix
|
||||
- nixos/tests/cinnamon.nix
|
||||
- nixos/tests/cinnamon-wayland.nix
|
||||
- pkgs/by-name/ci/cinnamon-*/**/*
|
||||
- pkgs/by-name/cj/cjs/**/*
|
||||
- pkgs/by-name/mu/muffin/**/*
|
||||
- pkgs/by-name/ne/nemo/**/*
|
||||
- pkgs/by-name/ne/nemo-*/**/*
|
||||
|
||||
"6.topic: continuous integration":
|
||||
- any:
|
||||
- changed-files:
|
||||
- any-glob-to-any-file:
|
||||
- .github/**/*
|
||||
- ci/**/*.*
|
||||
- changed-files:
|
||||
- any-glob-to-any-file:
|
||||
- .github/**/*
|
||||
- ci/**/*.*
|
||||
|
||||
"6.topic: coq":
|
||||
- any:
|
||||
- changed-files:
|
||||
- any-glob-to-any-file:
|
||||
- pkgs/applications/science/logic/coq/**/*
|
||||
- pkgs/development/coq-modules/**/*
|
||||
- pkgs/top-level/coq-packages.nix
|
||||
- changed-files:
|
||||
- any-glob-to-any-file:
|
||||
- pkgs/applications/science/logic/coq/**/*
|
||||
- pkgs/development/coq-modules/**/*
|
||||
- pkgs/top-level/coq-packages.nix
|
||||
|
||||
"6.topic: COSMIC":
|
||||
- any:
|
||||
- changed-files:
|
||||
- any-glob-to-any-file:
|
||||
- nixos/modules/services/desktop-managers/cosmic.nix
|
||||
- nixos/modules/services/display-managers/cosmic-greeter.nix
|
||||
- nixos/tests/cosmic.nix
|
||||
- pkgs/by-name/co/cosmic-*/**/*
|
||||
- pkgs/by-name/xd/xdg-desktop-portal-cosmic/*
|
||||
- changed-files:
|
||||
- any-glob-to-any-file:
|
||||
- nixos/modules/services/desktop-managers/cosmic.nix
|
||||
- nixos/modules/services/display-managers/cosmic-greeter.nix
|
||||
- nixos/tests/cosmic.nix
|
||||
- pkgs/by-name/co/cosmic-*/**/*
|
||||
- pkgs/by-name/xd/xdg-desktop-portal-cosmic/*
|
||||
|
||||
"6.topic: crystal":
|
||||
- any:
|
||||
- changed-files:
|
||||
- any-glob-to-any-file:
|
||||
- pkgs/development/compilers/crystal/**/*
|
||||
- changed-files:
|
||||
- any-glob-to-any-file:
|
||||
- pkgs/development/compilers/crystal/**/*
|
||||
|
||||
"6.topic: cuda":
|
||||
- any:
|
||||
- changed-files:
|
||||
- any-glob-to-any-file:
|
||||
- pkgs/development/cuda-modules/**/*
|
||||
- pkgs/top-level/cuda-packages.nix
|
||||
- changed-files:
|
||||
- any-glob-to-any-file:
|
||||
- pkgs/development/cuda-modules/**/*
|
||||
- pkgs/top-level/cuda-packages.nix
|
||||
|
||||
"6.topic: deepin":
|
||||
- any:
|
||||
- changed-files:
|
||||
- any-glob-to-any-file:
|
||||
- nixos/modules/services/desktops/deepin/**/*
|
||||
- pkgs/desktops/deepin/**/*
|
||||
- changed-files:
|
||||
- any-glob-to-any-file:
|
||||
- nixos/modules/services/desktops/deepin/**/*
|
||||
- pkgs/desktops/deepin/**/*
|
||||
|
||||
"6.topic: docker tools":
|
||||
- any:
|
||||
- changed-files:
|
||||
- any-glob-to-any-file:
|
||||
- pkgs/applications/virtualization/docker/**/*
|
||||
- changed-files:
|
||||
- any-glob-to-any-file:
|
||||
- pkgs/applications/virtualization/docker/**/*
|
||||
|
||||
"6.topic: dotnet":
|
||||
- any:
|
||||
- changed-files:
|
||||
- any-glob-to-any-file:
|
||||
- doc/languages-frameworks/dotnet.section.md
|
||||
- maintainers/scripts/update-dotnet-lockfiles.nix
|
||||
- pkgs/build-support/dotnet/**/*
|
||||
- pkgs/development/compilers/dotnet/**/*
|
||||
- pkgs/test/dotnet/**/*
|
||||
- pkgs/top-level/dotnet-packages.nix
|
||||
- changed-files:
|
||||
- any-glob-to-any-file:
|
||||
- doc/languages-frameworks/dotnet.section.md
|
||||
- maintainers/scripts/update-dotnet-lockfiles.nix
|
||||
- pkgs/build-support/dotnet/**/*
|
||||
- pkgs/development/compilers/dotnet/**/*
|
||||
- pkgs/test/dotnet/**/*
|
||||
- pkgs/top-level/dotnet-packages.nix
|
||||
|
||||
"6.topic: emacs":
|
||||
- any:
|
||||
- changed-files:
|
||||
- any-glob-to-any-file:
|
||||
- nixos/modules/services/editors/emacs.nix
|
||||
- nixos/modules/services/editors/emacs.xml
|
||||
- nixos/tests/emacs-daemon.nix
|
||||
- pkgs/applications/editors/emacs/build-support/**/*
|
||||
- pkgs/applications/editors/emacs/elisp-packages/**/*
|
||||
- pkgs/applications/editors/emacs/**/*
|
||||
- pkgs/top-level/emacs-packages.nix
|
||||
- changed-files:
|
||||
- any-glob-to-any-file:
|
||||
- nixos/modules/services/editors/emacs.nix
|
||||
- nixos/modules/services/editors/emacs.xml
|
||||
- nixos/tests/emacs-daemon.nix
|
||||
- pkgs/applications/editors/emacs/build-support/**/*
|
||||
- pkgs/applications/editors/emacs/elisp-packages/**/*
|
||||
- pkgs/applications/editors/emacs/**/*
|
||||
- pkgs/top-level/emacs-packages.nix
|
||||
|
||||
"6.topic: Enlightenment DE":
|
||||
- any:
|
||||
- changed-files:
|
||||
- any-glob-to-any-file:
|
||||
- nixos/modules/services/x11/desktop-managers/enlightenment.nix
|
||||
- pkgs/desktops/enlightenment/**/*
|
||||
- pkgs/development/python-modules/python-efl/*
|
||||
- changed-files:
|
||||
- any-glob-to-any-file:
|
||||
- nixos/modules/services/x11/desktop-managers/enlightenment.nix
|
||||
- pkgs/desktops/enlightenment/**/*
|
||||
- pkgs/development/python-modules/python-efl/*
|
||||
|
||||
"6.topic: erlang":
|
||||
- any:
|
||||
- changed-files:
|
||||
- any-glob-to-any-file:
|
||||
- doc/languages-frameworks/beam.section.md
|
||||
- pkgs/development/beam-modules/**/*
|
||||
- pkgs/development/interpreters/elixir/**/*
|
||||
- pkgs/development/interpreters/erlang/**/*
|
||||
- pkgs/development/tools/build-managers/rebar/**/*
|
||||
- pkgs/development/tools/build-managers/rebar3/**/*
|
||||
- pkgs/development/tools/erlang/**/*
|
||||
- pkgs/top-level/beam-packages.nix
|
||||
- changed-files:
|
||||
- any-glob-to-any-file:
|
||||
- doc/languages-frameworks/beam.section.md
|
||||
- pkgs/development/beam-modules/**/*
|
||||
- pkgs/development/interpreters/elixir/**/*
|
||||
- pkgs/development/interpreters/erlang/**/*
|
||||
- pkgs/development/tools/build-managers/rebar/**/*
|
||||
- pkgs/development/tools/build-managers/rebar3/**/*
|
||||
- pkgs/development/tools/erlang/**/*
|
||||
- pkgs/top-level/beam-packages.nix
|
||||
|
||||
"6.topic: fetch":
|
||||
- any:
|
||||
- changed-files:
|
||||
- any-glob-to-any-file:
|
||||
- pkgs/build-support/fetch*/**/*
|
||||
- changed-files:
|
||||
- any-glob-to-any-file:
|
||||
- pkgs/build-support/fetch*/**/*
|
||||
|
||||
"6.topic: flakes":
|
||||
- any:
|
||||
- changed-files:
|
||||
- any-glob-to-any-file:
|
||||
- '**/flake.nix'
|
||||
- lib/systems/flake-systems.nix
|
||||
- nixos/modules/config/nix-flakes.nix
|
||||
- changed-files:
|
||||
- any-glob-to-any-file:
|
||||
- '**/flake.nix'
|
||||
- lib/systems/flake-systems.nix
|
||||
- nixos/modules/config/nix-flakes.nix
|
||||
|
||||
"6.topic: flutter":
|
||||
- any:
|
||||
- changed-files:
|
||||
- any-glob-to-any-file:
|
||||
- pkgs/build-support/flutter/*.nix
|
||||
- pkgs/development/compilers/flutter/**/*.nix
|
||||
- changed-files:
|
||||
- any-glob-to-any-file:
|
||||
- pkgs/build-support/flutter/*.nix
|
||||
- pkgs/development/compilers/flutter/**/*.nix
|
||||
|
||||
"6.topic: games":
|
||||
- any:
|
||||
- changed-files:
|
||||
- any-glob-to-any-file:
|
||||
- pkgs/games/**/*
|
||||
- changed-files:
|
||||
- any-glob-to-any-file:
|
||||
- pkgs/games/**/*
|
||||
|
||||
"6.topic: GNOME":
|
||||
- any:
|
||||
- changed-files:
|
||||
- any-glob-to-any-file:
|
||||
- doc/languages-frameworks/gnome.section.md
|
||||
- nixos/modules/services/desktops/gnome/**/*
|
||||
- nixos/modules/services/x11/desktop-managers/gnome.nix
|
||||
- nixos/tests/gnome-xorg.nix
|
||||
- nixos/tests/gnome.nix
|
||||
- pkgs/desktops/gnome/**/*
|
||||
- changed-files:
|
||||
- any-glob-to-any-file:
|
||||
- doc/languages-frameworks/gnome.section.md
|
||||
- nixos/modules/services/desktops/gnome/**/*
|
||||
- nixos/modules/services/x11/desktop-managers/gnome.nix
|
||||
- nixos/tests/gnome-xorg.nix
|
||||
- nixos/tests/gnome.nix
|
||||
- pkgs/desktops/gnome/**/*
|
||||
|
||||
"6.topic: golang":
|
||||
- any:
|
||||
- changed-files:
|
||||
- any-glob-to-any-file:
|
||||
- doc/languages-frameworks/go.section.md
|
||||
- pkgs/build-support/go/**/*
|
||||
- pkgs/development/compilers/go/**/*
|
||||
- changed-files:
|
||||
- any-glob-to-any-file:
|
||||
- doc/languages-frameworks/go.section.md
|
||||
- pkgs/build-support/go/**/*
|
||||
- pkgs/development/compilers/go/**/*
|
||||
|
||||
"6.topic: hardware":
|
||||
- any:
|
||||
- changed-files:
|
||||
- any-glob-to-any-file:
|
||||
- nixos/modules/hardware/**/*
|
||||
- changed-files:
|
||||
- any-glob-to-any-file:
|
||||
- nixos/modules/hardware/**/*
|
||||
|
||||
"6.topic: haskell":
|
||||
- any:
|
||||
- changed-files:
|
||||
- any-glob-to-any-file:
|
||||
- doc/languages-frameworks/haskell.section.md
|
||||
- maintainers/scripts/haskell/**/*
|
||||
- pkgs/development/compilers/ghc/**/*
|
||||
- pkgs/development/haskell-modules/**/*
|
||||
- pkgs/development/tools/haskell/**/*
|
||||
- pkgs/test/haskell/**/*
|
||||
- pkgs/top-level/haskell-packages.nix
|
||||
- pkgs/top-level/release-haskell.nix
|
||||
- changed-files:
|
||||
- any-glob-to-any-file:
|
||||
- doc/languages-frameworks/haskell.section.md
|
||||
- maintainers/scripts/haskell/**/*
|
||||
- pkgs/development/compilers/ghc/**/*
|
||||
- pkgs/development/haskell-modules/**/*
|
||||
- pkgs/development/tools/haskell/**/*
|
||||
- pkgs/test/haskell/**/*
|
||||
- pkgs/top-level/haskell-packages.nix
|
||||
- pkgs/top-level/release-haskell.nix
|
||||
|
||||
"6.topic: java":
|
||||
- any:
|
||||
- changed-files:
|
||||
- any-glob-to-any-file:
|
||||
# Distributions
|
||||
- pkgs/development/compilers/adoptopenjdk-icedtea-web/**/*
|
||||
- pkgs/development/compilers/corretto/**/*
|
||||
- pkgs/development/compilers/graalvm/**/*
|
||||
- pkgs/development/compilers/openjdk/**/*
|
||||
- pkgs/by-name/op/openjfx/**/*
|
||||
- pkgs/development/compilers/semeru-bin/**/*
|
||||
- pkgs/development/compilers/temurin-bin/**/*
|
||||
- pkgs/development/compilers/zulu/**/*
|
||||
# Documentation
|
||||
- doc/languages-frameworks/java.section.md
|
||||
# Gradle
|
||||
- doc/languages-frameworks/gradle.section.md
|
||||
- pkgs/development/tools/build-managers/gradle/**/*
|
||||
- pkgs/by-name/gr/gradle-completion/**/*
|
||||
# Maven
|
||||
- pkgs/by-name/ma/maven/**/*
|
||||
- doc/languages-frameworks/maven.section.md
|
||||
# Ant
|
||||
- pkgs/by-name/an/ant/**/*
|
||||
# javaPackages attrset
|
||||
- pkgs/development/java-modules/**/*
|
||||
- pkgs/top-level/java-packages.nix
|
||||
# Maintainer tooling
|
||||
- pkgs/by-name/ni/nixpkgs-openjdk-updater/**/*
|
||||
# Misc
|
||||
- nixos/modules/programs/java.nix
|
||||
- changed-files:
|
||||
- any-glob-to-any-file:
|
||||
# Distributions
|
||||
- pkgs/development/compilers/adoptopenjdk-icedtea-web/**/*
|
||||
- pkgs/development/compilers/corretto/**/*
|
||||
- pkgs/development/compilers/graalvm/**/*
|
||||
- pkgs/development/compilers/openjdk/**/*
|
||||
- pkgs/by-name/op/openjfx/**/*
|
||||
- pkgs/development/compilers/semeru-bin/**/*
|
||||
- pkgs/development/compilers/temurin-bin/**/*
|
||||
- pkgs/development/compilers/zulu/**/*
|
||||
# Documentation
|
||||
- doc/languages-frameworks/java.section.md
|
||||
# Gradle
|
||||
- doc/languages-frameworks/gradle.section.md
|
||||
- pkgs/development/tools/build-managers/gradle/**/*
|
||||
- pkgs/by-name/gr/gradle-completion/**/*
|
||||
# Maven
|
||||
- pkgs/by-name/ma/maven/**/*
|
||||
- doc/languages-frameworks/maven.section.md
|
||||
# Ant
|
||||
- pkgs/by-name/an/ant/**/*
|
||||
# javaPackages attrset
|
||||
- pkgs/development/java-modules/**/*
|
||||
- pkgs/top-level/java-packages.nix
|
||||
# Maintainer tooling
|
||||
- pkgs/by-name/ni/nixpkgs-openjdk-updater/**/*
|
||||
# Misc
|
||||
- nixos/modules/programs/java.nix
|
||||
|
||||
"6.topic: jitsi":
|
||||
- any:
|
||||
- changed-files:
|
||||
- any-glob-to-any-file:
|
||||
- nixos/modules/services/networking/jitsi-videobridge.nix
|
||||
- nixos/modules/services/web-apps/jitsi-meet.nix
|
||||
- pkgs/servers/web-apps/jitsi-meet/**/*
|
||||
- pkgs/servers/jitsi-videobridge/**/*
|
||||
- pkgs/applications/networking/instant-messengers/jitsi/**/*
|
||||
- changed-files:
|
||||
- any-glob-to-any-file:
|
||||
- nixos/modules/services/networking/jitsi-videobridge.nix
|
||||
- nixos/modules/services/web-apps/jitsi-meet.nix
|
||||
- pkgs/servers/web-apps/jitsi-meet/**/*
|
||||
- pkgs/servers/jitsi-videobridge/**/*
|
||||
- pkgs/applications/networking/instant-messengers/jitsi/**/*
|
||||
|
||||
"6.topic: julia":
|
||||
- any:
|
||||
- changed-files:
|
||||
- any-glob-to-any-file:
|
||||
- doc/languages-frameworks/julia.section.md
|
||||
- pkgs/development/compilers/julia/**/*
|
||||
- pkgs/development/julia-modules/**/*
|
||||
- changed-files:
|
||||
- any-glob-to-any-file:
|
||||
- doc/languages-frameworks/julia.section.md
|
||||
- pkgs/development/compilers/julia/**/*
|
||||
- pkgs/development/julia-modules/**/*
|
||||
|
||||
"6.topic: jupyter":
|
||||
- any:
|
||||
- changed-files:
|
||||
- any-glob-to-any-file:
|
||||
- pkgs/development/python-modules/jupyter*/**/*
|
||||
- pkgs/development/python-modules/mkdocs-jupyter/*
|
||||
- nixos/modules/services/development/jupyter/**/*
|
||||
- pkgs/applications/editors/jupyter-kernels/**/*
|
||||
- pkgs/applications/editors/jupyter/**/*
|
||||
- changed-files:
|
||||
- any-glob-to-any-file:
|
||||
- pkgs/development/python-modules/jupyter*/**/*
|
||||
- pkgs/development/python-modules/mkdocs-jupyter/*
|
||||
- nixos/modules/services/development/jupyter/**/*
|
||||
- pkgs/applications/editors/jupyter-kernels/**/*
|
||||
- pkgs/applications/editors/jupyter/**/*
|
||||
|
||||
"6.topic: k3s":
|
||||
- any:
|
||||
- changed-files:
|
||||
- any-glob-to-any-file:
|
||||
- nixos/modules/services/cluster/k3s/**/*
|
||||
- nixos/tests/k3s/**/*
|
||||
- pkgs/applications/networking/cluster/k3s/**/*
|
||||
- changed-files:
|
||||
- any-glob-to-any-file:
|
||||
- nixos/modules/services/cluster/k3s/**/*
|
||||
- nixos/tests/k3s/**/*
|
||||
- pkgs/applications/networking/cluster/k3s/**/*
|
||||
|
||||
"6.topic: kernel":
|
||||
- any:
|
||||
- changed-files:
|
||||
- any-glob-to-any-file:
|
||||
- pkgs/build-support/kernel/**/*
|
||||
- pkgs/os-specific/linux/kernel/**/*
|
||||
- changed-files:
|
||||
- any-glob-to-any-file:
|
||||
- pkgs/build-support/kernel/**/*
|
||||
- pkgs/os-specific/linux/kernel/**/*
|
||||
|
||||
"6.topic: lib":
|
||||
- any:
|
||||
- changed-files:
|
||||
- any-glob-to-any-file:
|
||||
- lib/**
|
||||
- changed-files:
|
||||
- any-glob-to-any-file:
|
||||
- lib/**
|
||||
|
||||
"6.topic: llvm/clang":
|
||||
- any:
|
||||
- changed-files:
|
||||
- any-glob-to-any-file:
|
||||
- pkgs/development/compilers/llvm/**/*
|
||||
- changed-files:
|
||||
- any-glob-to-any-file:
|
||||
- pkgs/development/compilers/llvm/**/*
|
||||
|
||||
"6.topic: lua":
|
||||
- any:
|
||||
- changed-files:
|
||||
- any-glob-to-any-file:
|
||||
- pkgs/development/tools/misc/luarocks/*
|
||||
- pkgs/development/interpreters/lua-5/**/*
|
||||
- pkgs/development/interpreters/luajit/**/*
|
||||
- pkgs/development/lua-modules/**/*
|
||||
- pkgs/top-level/lua-packages.nix
|
||||
- changed-files:
|
||||
- any-glob-to-any-file:
|
||||
- pkgs/development/tools/misc/luarocks/*
|
||||
- pkgs/development/interpreters/lua-5/**/*
|
||||
- pkgs/development/interpreters/luajit/**/*
|
||||
- pkgs/development/lua-modules/**/*
|
||||
- pkgs/top-level/lua-packages.nix
|
||||
|
||||
"6.topic: Lumina DE":
|
||||
- any:
|
||||
- changed-files:
|
||||
- any-glob-to-any-file:
|
||||
- nixos/modules/services/x11/desktop-managers/lumina.nix
|
||||
- pkgs/desktops/lumina/**/*
|
||||
- changed-files:
|
||||
- any-glob-to-any-file:
|
||||
- nixos/modules/services/x11/desktop-managers/lumina.nix
|
||||
- pkgs/desktops/lumina/**/*
|
||||
|
||||
"6.topic: LXQt":
|
||||
- any:
|
||||
- changed-files:
|
||||
- any-glob-to-any-file:
|
||||
- nixos/modules/services/x11/desktop-managers/lxqt.nix
|
||||
- pkgs/desktops/lxqt/**/*
|
||||
- changed-files:
|
||||
- any-glob-to-any-file:
|
||||
- nixos/modules/services/x11/desktop-managers/lxqt.nix
|
||||
- pkgs/desktops/lxqt/**/*
|
||||
|
||||
"6.topic: mate":
|
||||
- any:
|
||||
- changed-files:
|
||||
- any-glob-to-any-file:
|
||||
- nixos/modules/services/x11/desktop-managers/mate.nix
|
||||
- nixos/tests/mate.nix
|
||||
- pkgs/desktops/mate/**/*
|
||||
- changed-files:
|
||||
- any-glob-to-any-file:
|
||||
- nixos/modules/services/x11/desktop-managers/mate.nix
|
||||
- nixos/tests/mate.nix
|
||||
- pkgs/desktops/mate/**/*
|
||||
|
||||
"6.topic: module system":
|
||||
- any:
|
||||
- changed-files:
|
||||
- any-glob-to-any-file:
|
||||
- lib/modules.nix
|
||||
- lib/types.nix
|
||||
- lib/options.nix
|
||||
- lib/tests/modules.sh
|
||||
- lib/tests/modules/**
|
||||
- changed-files:
|
||||
- any-glob-to-any-file:
|
||||
- lib/modules.nix
|
||||
- lib/types.nix
|
||||
- lib/options.nix
|
||||
- lib/tests/modules.sh
|
||||
- lib/tests/modules/**
|
||||
|
||||
"6.topic: musl":
|
||||
- any:
|
||||
- changed-files:
|
||||
- any-glob-to-any-file:
|
||||
- pkgs/os-specific/linux/musl/**/*
|
||||
- changed-files:
|
||||
- any-glob-to-any-file:
|
||||
- pkgs/os-specific/linux/musl/**/*
|
||||
|
||||
"6.topic: nim":
|
||||
- any:
|
||||
- changed-files:
|
||||
- any-glob-to-any-file:
|
||||
- doc/languages-frameworks/nim.section.md
|
||||
- pkgs/build-support/build-nim-package.nix
|
||||
- pkgs/build-support/build-nim-sbom.nix
|
||||
- pkgs/by-name/ni/nim*
|
||||
- pkgs/top-level/nim-overrides.nix
|
||||
- changed-files:
|
||||
- any-glob-to-any-file:
|
||||
- doc/languages-frameworks/nim.section.md
|
||||
- pkgs/build-support/build-nim-package.nix
|
||||
- pkgs/build-support/build-nim-sbom.nix
|
||||
- pkgs/by-name/ni/nim*
|
||||
- pkgs/top-level/nim-overrides.nix
|
||||
|
||||
"6.topic: nixos":
|
||||
- any:
|
||||
- changed-files:
|
||||
- any-glob-to-any-file:
|
||||
- nixos/**/*
|
||||
- pkgs/by-name/sw/switch-to-configuration-ng/**/*
|
||||
- pkgs/by-name/ni/nixos-rebuild-ng/**/*
|
||||
- pkgs/os-specific/linux/nixos-rebuild/**/*
|
||||
- changed-files:
|
||||
- any-glob-to-any-file:
|
||||
- nixos/**/*
|
||||
- pkgs/by-name/sw/switch-to-configuration-ng/**/*
|
||||
- pkgs/by-name/ni/nixos-rebuild-ng/**/*
|
||||
- pkgs/os-specific/linux/nixos-rebuild/**/*
|
||||
|
||||
"6.topic: nixos-container":
|
||||
- any:
|
||||
- changed-files:
|
||||
- any-glob-to-any-file:
|
||||
- nixos/modules/virtualisation/nixos-containers.nix
|
||||
- pkgs/tools/virtualization/nixos-container/**/*
|
||||
- changed-files:
|
||||
- any-glob-to-any-file:
|
||||
- nixos/modules/virtualisation/nixos-containers.nix
|
||||
- pkgs/tools/virtualization/nixos-container/**/*
|
||||
|
||||
"6.topic: nodejs":
|
||||
- any:
|
||||
- changed-files:
|
||||
- any-glob-to-any-file:
|
||||
- doc/languages-frameworks/javascript.section.md
|
||||
- pkgs/build-support/node/**/*
|
||||
- pkgs/development/node-packages/**/*
|
||||
- pkgs/development/tools/yarn/*
|
||||
- pkgs/development/tools/yarn2nix-moretea/**/*
|
||||
- pkgs/development/tools/pnpm/**/*
|
||||
- pkgs/development/web/nodejs/*
|
||||
- changed-files:
|
||||
- any-glob-to-any-file:
|
||||
- doc/languages-frameworks/javascript.section.md
|
||||
- pkgs/build-support/node/**/*
|
||||
- pkgs/development/node-packages/**/*
|
||||
- pkgs/development/tools/yarn/*
|
||||
- pkgs/development/tools/yarn2nix-moretea/**/*
|
||||
- pkgs/development/tools/pnpm/**/*
|
||||
- pkgs/development/web/nodejs/*
|
||||
|
||||
"6.topic: nvidia":
|
||||
- any:
|
||||
- changed-files:
|
||||
- any-glob-to-any-file:
|
||||
- nixos/modules/hardware/video/nvidia.nix
|
||||
- nixos/modules/services/hardware/nvidia-container-toolkit/**/*
|
||||
- nixos/modules/services/hardware/nvidia-optimus.nix
|
||||
- pkgs/os-specific/linux/nvidia-x11/**/*
|
||||
- changed-files:
|
||||
- any-glob-to-any-file:
|
||||
- nixos/modules/hardware/video/nvidia.nix
|
||||
- nixos/modules/services/hardware/nvidia-container-toolkit/**/*
|
||||
- nixos/modules/services/hardware/nvidia-optimus.nix
|
||||
- pkgs/os-specific/linux/nvidia-x11/**/*
|
||||
|
||||
"6.topic: ocaml":
|
||||
- any:
|
||||
- changed-files:
|
||||
- any-glob-to-any-file:
|
||||
- doc/languages-frameworks/ocaml.section.md
|
||||
- pkgs/development/compilers/ocaml/**/*
|
||||
- pkgs/development/compilers/reason/**/*
|
||||
- pkgs/development/ocaml-modules/**/*
|
||||
- pkgs/development/tools/ocaml/**/*
|
||||
- pkgs/top-level/ocaml-packages.nix
|
||||
- changed-files:
|
||||
- any-glob-to-any-file:
|
||||
- doc/languages-frameworks/ocaml.section.md
|
||||
- pkgs/development/compilers/ocaml/**/*
|
||||
- pkgs/development/compilers/reason/**/*
|
||||
- pkgs/development/ocaml-modules/**/*
|
||||
- pkgs/development/tools/ocaml/**/*
|
||||
- pkgs/top-level/ocaml-packages.nix
|
||||
|
||||
"6.topic: pantheon":
|
||||
- any:
|
||||
- changed-files:
|
||||
- any-glob-to-any-file:
|
||||
- nixos/modules/services/desktops/pantheon/**/*
|
||||
- nixos/modules/services/x11/desktop-managers/pantheon.nix
|
||||
- nixos/modules/services/x11/display-managers/lightdm-greeters/pantheon.nix
|
||||
- nixos/tests/pantheon.nix
|
||||
- pkgs/desktops/pantheon/**/*
|
||||
- changed-files:
|
||||
- any-glob-to-any-file:
|
||||
- nixos/modules/services/desktops/pantheon/**/*
|
||||
- nixos/modules/services/x11/desktop-managers/pantheon.nix
|
||||
- nixos/modules/services/x11/display-managers/lightdm-greeters/pantheon.nix
|
||||
- nixos/tests/pantheon.nix
|
||||
- pkgs/desktops/pantheon/**/*
|
||||
|
||||
"6.topic: php":
|
||||
- any:
|
||||
- changed-files:
|
||||
- any-glob-to-any-file:
|
||||
- doc/languages-frameworks/php.section.md
|
||||
- nixos/tests/php/**/*
|
||||
- pkgs/build-support/php/**/*
|
||||
- pkgs/development/interpreters/php/**/*
|
||||
- pkgs/development/php-packages/**/*
|
||||
- pkgs/test/php/default.nix
|
||||
- pkgs/top-level/php-packages.nix
|
||||
- changed-files:
|
||||
- any-glob-to-any-file:
|
||||
- doc/languages-frameworks/php.section.md
|
||||
- nixos/tests/php/**/*
|
||||
- pkgs/build-support/php/**/*
|
||||
- pkgs/development/interpreters/php/**/*
|
||||
- pkgs/development/php-packages/**/*
|
||||
- pkgs/test/php/default.nix
|
||||
- pkgs/top-level/php-packages.nix
|
||||
|
||||
"6.topic: printing":
|
||||
- any:
|
||||
- changed-files:
|
||||
- any-glob-to-any-file:
|
||||
- nixos/modules/services/printing/cupsd.nix
|
||||
- pkgs/misc/cups/**/*
|
||||
- changed-files:
|
||||
- any-glob-to-any-file:
|
||||
- nixos/modules/services/printing/cupsd.nix
|
||||
- pkgs/misc/cups/**/*
|
||||
|
||||
"6.topic: python":
|
||||
- any:
|
||||
- changed-files:
|
||||
- any-glob-to-any-file:
|
||||
- doc/languages-frameworks/python.section.md
|
||||
- pkgs/development/interpreters/python/**/*
|
||||
- pkgs/development/python-modules/**/*
|
||||
- pkgs/top-level/python-packages.nix
|
||||
- changed-files:
|
||||
- any-glob-to-any-file:
|
||||
- doc/languages-frameworks/python.section.md
|
||||
- pkgs/development/interpreters/python/**/*
|
||||
- pkgs/development/python-modules/**/*
|
||||
- pkgs/top-level/python-packages.nix
|
||||
|
||||
"6.topic: qt/kde":
|
||||
- any:
|
||||
- changed-files:
|
||||
- any-glob-to-any-file:
|
||||
- doc/languages-frameworks/qt.section.md
|
||||
- nixos/modules/services/x11/desktop-managers/plasma5.nix
|
||||
- nixos/tests/plasma5.nix
|
||||
- pkgs/applications/kde/**/*
|
||||
- pkgs/desktops/plasma-5/**/*
|
||||
- pkgs/development/libraries/kde-frameworks/**/*
|
||||
- pkgs/development/libraries/qt-5/**/*
|
||||
- changed-files:
|
||||
- any-glob-to-any-file:
|
||||
- doc/languages-frameworks/qt.section.md
|
||||
- nixos/modules/services/x11/desktop-managers/plasma5.nix
|
||||
- nixos/tests/plasma5.nix
|
||||
- pkgs/applications/kde/**/*
|
||||
- pkgs/desktops/plasma-5/**/*
|
||||
- pkgs/development/libraries/kde-frameworks/**/*
|
||||
- pkgs/development/libraries/qt-5/**/*
|
||||
|
||||
"6.topic: R":
|
||||
- any:
|
||||
- changed-files:
|
||||
- any-glob-to-any-file:
|
||||
- pkgs/applications/science/math/R/**/*
|
||||
- pkgs/development/r-modules/**/*
|
||||
- changed-files:
|
||||
- any-glob-to-any-file:
|
||||
- pkgs/applications/science/math/R/**/*
|
||||
- pkgs/development/r-modules/**/*
|
||||
|
||||
"6.topic: rocm":
|
||||
- any:
|
||||
- changed-files:
|
||||
- any-glob-to-any-file:
|
||||
- pkgs/development/rocm-modules/**/*
|
||||
- changed-files:
|
||||
- any-glob-to-any-file:
|
||||
- pkgs/development/rocm-modules/**/*
|
||||
|
||||
"6.topic: ruby":
|
||||
- any:
|
||||
- changed-files:
|
||||
- any-glob-to-any-file:
|
||||
- doc/languages-frameworks/ruby.section.md
|
||||
- pkgs/development/interpreters/ruby/**/*
|
||||
- pkgs/development/ruby-modules/**/*
|
||||
- pkgs/top-level/ruby-packages.nix
|
||||
- changed-files:
|
||||
- any-glob-to-any-file:
|
||||
- doc/languages-frameworks/ruby.section.md
|
||||
- pkgs/development/interpreters/ruby/**/*
|
||||
- pkgs/development/ruby-modules/**/*
|
||||
- pkgs/top-level/ruby-packages.nix
|
||||
|
||||
"6.topic: rust":
|
||||
- any:
|
||||
- changed-files:
|
||||
- any-glob-to-any-file:
|
||||
- doc/languages-frameworks/rust.section.md
|
||||
- pkgs/build-support/rust/**/*
|
||||
- pkgs/development/compilers/rust/**/*
|
||||
- changed-files:
|
||||
- any-glob-to-any-file:
|
||||
- doc/languages-frameworks/rust.section.md
|
||||
- pkgs/build-support/rust/**/*
|
||||
- pkgs/development/compilers/rust/**/*
|
||||
|
||||
"6.topic: stdenv":
|
||||
- any:
|
||||
- changed-files:
|
||||
- any-glob-to-any-file:
|
||||
- pkgs/stdenv/**/*
|
||||
- changed-files:
|
||||
- any-glob-to-any-file:
|
||||
- pkgs/stdenv/**/*
|
||||
|
||||
"6.topic: steam":
|
||||
- any:
|
||||
- changed-files:
|
||||
- any-glob-to-any-file:
|
||||
- pkgs/games/steam/**/*
|
||||
- changed-files:
|
||||
- any-glob-to-any-file:
|
||||
- pkgs/games/steam/**/*
|
||||
|
||||
"6.topic: systemd":
|
||||
- any:
|
||||
- changed-files:
|
||||
- any-glob-to-any-file:
|
||||
- pkgs/os-specific/linux/systemd/**/*
|
||||
- nixos/modules/system/boot/systemd*/**/*
|
||||
- changed-files:
|
||||
- any-glob-to-any-file:
|
||||
- pkgs/os-specific/linux/systemd/**/*
|
||||
- nixos/modules/system/boot/systemd*/**/*
|
||||
|
||||
"6.topic: tcl":
|
||||
- any:
|
||||
- changed-files:
|
||||
- any-glob-to-any-file:
|
||||
- doc/languages-frameworks/tcl.section.md
|
||||
- pkgs/development/interpreters/tcl/*
|
||||
- pkgs/development/tcl-modules/**/*
|
||||
- pkgs/top-level/tcl-packages.nix
|
||||
- changed-files:
|
||||
- any-glob-to-any-file:
|
||||
- doc/languages-frameworks/tcl.section.md
|
||||
- pkgs/development/interpreters/tcl/*
|
||||
- pkgs/development/tcl-modules/**/*
|
||||
- pkgs/top-level/tcl-packages.nix
|
||||
|
||||
"6.topic: teams":
|
||||
- any:
|
||||
- changed-files:
|
||||
- any-glob-to-any-file:
|
||||
- maintainers/team-list.nix
|
||||
- changed-files:
|
||||
- any-glob-to-any-file:
|
||||
- maintainers/team-list.nix
|
||||
|
||||
"6.topic: testing":
|
||||
- any:
|
||||
- changed-files:
|
||||
- any-glob-to-any-file:
|
||||
# NOTE: Let's keep the scope limited to test frameworks that are
|
||||
# *developed in this repo*;
|
||||
# - not individual tests
|
||||
# - not packages for test frameworks
|
||||
- pkgs/build-support/testers/**
|
||||
- nixos/lib/testing/**
|
||||
- nixos/lib/test-driver/**
|
||||
- nixos/tests/nixos-test-driver/**
|
||||
- nixos/lib/testing-python.nix # legacy
|
||||
- nixos/tests/make-test-python.nix # legacy
|
||||
# lib/debug.nix has a test framework (runTests) but it's not the main focus
|
||||
- changed-files:
|
||||
- any-glob-to-any-file:
|
||||
# NOTE: Let's keep the scope limited to test frameworks that are
|
||||
# *developed in this repo*;
|
||||
# - not individual tests
|
||||
# - not packages for test frameworks
|
||||
- pkgs/build-support/testers/**
|
||||
- nixos/lib/testing/**
|
||||
- nixos/lib/test-driver/**
|
||||
- nixos/tests/nixos-test-driver/**
|
||||
- nixos/lib/testing-python.nix # legacy
|
||||
- nixos/tests/make-test-python.nix # legacy
|
||||
# lib/debug.nix has a test framework (runTests) but it's not the main focus
|
||||
|
||||
"6.topic: TeX":
|
||||
- any:
|
||||
- changed-files:
|
||||
- any-glob-to-any-file:
|
||||
- doc/languages-frameworks/texlive.section.md
|
||||
- pkgs/test/texlive/**
|
||||
- pkgs/tools/typesetting/tex/**/*
|
||||
- changed-files:
|
||||
- any-glob-to-any-file:
|
||||
- doc/languages-frameworks/texlive.section.md
|
||||
- pkgs/test/texlive/**
|
||||
- pkgs/tools/typesetting/tex/**/*
|
||||
|
||||
"6.topic: updaters":
|
||||
- any:
|
||||
- changed-files:
|
||||
- any-glob-to-any-file:
|
||||
- pkgs/common-updater/**/*
|
||||
- changed-files:
|
||||
- any-glob-to-any-file:
|
||||
- pkgs/common-updater/**/*
|
||||
|
||||
"6.topic: vim":
|
||||
- any:
|
||||
- changed-files:
|
||||
- any-glob-to-any-file:
|
||||
- doc/languages-frameworks/vim.section.md
|
||||
- pkgs/applications/editors/vim/**/*
|
||||
- pkgs/applications/editors/vim/plugins/**/*
|
||||
- nixos/modules/programs/neovim.nix
|
||||
- pkgs/applications/editors/neovim/**/*
|
||||
- changed-files:
|
||||
- any-glob-to-any-file:
|
||||
- doc/languages-frameworks/vim.section.md
|
||||
- pkgs/applications/editors/vim/**/*
|
||||
- pkgs/applications/editors/vim/plugins/**/*
|
||||
- nixos/modules/programs/neovim.nix
|
||||
- pkgs/applications/editors/neovim/**/*
|
||||
|
||||
"6.topic: vscode":
|
||||
- any:
|
||||
- changed-files:
|
||||
- any-glob-to-any-file:
|
||||
- pkgs/applications/editors/vscode/**/*
|
||||
- changed-files:
|
||||
- any-glob-to-any-file:
|
||||
- pkgs/applications/editors/vscode/**/*
|
||||
|
||||
"6.topic: windows":
|
||||
- any:
|
||||
- changed-files:
|
||||
- any-glob-to-any-file:
|
||||
- pkgs/os-specific/windows/**/*
|
||||
- changed-files:
|
||||
- any-glob-to-any-file:
|
||||
- pkgs/os-specific/windows/**/*
|
||||
|
||||
"6.topic: xen-project":
|
||||
- any:
|
||||
- changed-files:
|
||||
- any-glob-to-any-file:
|
||||
- nixos/modules/virtualisation/xen*
|
||||
- pkgs/by-name/xe/xen/*
|
||||
- pkgs/by-name/qe/qemu_xen/*
|
||||
- pkgs/by-name/xe/xen-guest-agent/*
|
||||
- pkgs/by-name/xt/xtf/*
|
||||
- pkgs/build-support/xen/*
|
||||
- pkgs/development/ocaml-modules/xen*/*
|
||||
- pkgs/development/ocaml-modules/vchan/*
|
||||
- changed-files:
|
||||
- any-glob-to-any-file:
|
||||
- nixos/modules/virtualisation/xen*
|
||||
- pkgs/by-name/xe/xen/*
|
||||
- pkgs/by-name/qe/qemu_xen/*
|
||||
- pkgs/by-name/xe/xen-guest-agent/*
|
||||
- pkgs/by-name/xt/xtf/*
|
||||
- pkgs/build-support/xen/*
|
||||
- pkgs/development/ocaml-modules/xen*/*
|
||||
- pkgs/development/ocaml-modules/vchan/*
|
||||
|
||||
"6.topic: xfce":
|
||||
- any:
|
||||
- changed-files:
|
||||
- any-glob-to-any-file:
|
||||
- nixos/doc/manual/configuration/xfce.xml
|
||||
- nixos/modules/services/x11/desktop-managers/xfce.nix
|
||||
- nixos/tests/xfce.nix
|
||||
- pkgs/desktops/xfce/**/*
|
||||
- changed-files:
|
||||
- any-glob-to-any-file:
|
||||
- nixos/doc/manual/configuration/xfce.xml
|
||||
- nixos/modules/services/x11/desktop-managers/xfce.nix
|
||||
- nixos/tests/xfce.nix
|
||||
- pkgs/desktops/xfce/**/*
|
||||
|
||||
"6.topic: zig":
|
||||
- any:
|
||||
- changed-files:
|
||||
- any-glob-to-any-file:
|
||||
- pkgs/development/compilers/zig/**/*
|
||||
- doc/hooks/zig.section.md
|
||||
- changed-files:
|
||||
- any-glob-to-any-file:
|
||||
- pkgs/development/compilers/zig/**/*
|
||||
- doc/hooks/zig.section.md
|
||||
|
||||
"8.has: changelog":
|
||||
- any:
|
||||
- changed-files:
|
||||
- any-glob-to-any-file:
|
||||
- nixos/doc/manual/release-notes/**/*
|
||||
- changed-files:
|
||||
- any-glob-to-any-file:
|
||||
- nixos/doc/manual/release-notes/**/*
|
||||
|
||||
"8.has: maintainer-list (update)":
|
||||
- any:
|
||||
- changed-files:
|
||||
- any-glob-to-any-file:
|
||||
- maintainers/maintainer-list.nix
|
||||
- changed-files:
|
||||
- any-glob-to-any-file:
|
||||
- maintainers/maintainer-list.nix
|
||||
|
||||
"8.has: module (update)":
|
||||
- any:
|
||||
- changed-files:
|
||||
- any-glob-to-any-file:
|
||||
- nixos/modules/**/*
|
||||
- changed-files:
|
||||
- any-glob-to-any-file:
|
||||
- nixos/modules/**/*
|
||||
|
||||
# keep-sorted end
|
||||
|
||||
9
.github/stale.yml
vendored
9
.github/stale.yml
vendored
@@ -1,9 +0,0 @@
|
||||
# Configuration for probot-stale - https://github.com/probot/stale
|
||||
daysUntilStale: 180
|
||||
daysUntilClose: false
|
||||
exemptLabels:
|
||||
- "1.severity: security"
|
||||
- "2.status: never-stale"
|
||||
staleLabel: "2.status: stale"
|
||||
markComment: false
|
||||
closeComment: false
|
||||
73
.github/workflows/README.md
vendored
73
.github/workflows/README.md
vendored
@@ -2,19 +2,76 @@
|
||||
|
||||
Some architectural notes about key decisions and concepts in our workflows:
|
||||
|
||||
- Instead of `pull_request` we use [`pull_request_target`](https://docs.github.com/actions/writing-workflows/choosing-when-your-workflow-runs/events-that-trigger-workflows#pull_request_target) for all PR-related workflows. This has the advantage that those workflows will run without prior approval for external contributors.
|
||||
- Instead of `pull_request` we use [`pull_request_target`](https://docs.github.com/actions/writing-workflows/choosing-when-your-workflow-runs/events-that-trigger-workflows#pull_request_target) for all PR-related workflows.
|
||||
This has the advantage that those workflows will run without prior approval for external contributors.
|
||||
|
||||
- Running on `pull_request_target` also optionally provides us with a GH_TOKEN with elevated privileges (write access), which we need to do things like adding labels, requesting reviewers or pushing branches. **Note about security:** We need to be careful to limit the scope of elevated privileges as much as possible. Thus they should be lowered to the minimum with `permissions: {}` in every workflow by default.
|
||||
- Running on `pull_request_target` also optionally provides us with a GH_TOKEN with elevated privileges (write access), which we need to do things like adding labels, requesting reviewers or pushing branches.
|
||||
**Note about security:** We need to be careful to limit the scope of elevated privileges as much as possible.
|
||||
Thus they should be lowered to the minimum with `permissions: {}` in every workflow by default.
|
||||
|
||||
- By definition `pull_request_target` runs in the context of the **base** of the pull request. This means, that the workflow files to run will be taken from the base branch, not the PR, and actions/checkout will not checkout the PR, but the base branch, by default. To protect our secrets, we need to make sure to **never execute code** from the pull request and always evaluate or build nix code from the pull request with the **sandbox enabled**.
|
||||
- By definition `pull_request_target` runs in the context of the **base** of the pull request.
|
||||
This means, that the workflow files to run will be taken from the base branch, not the PR, and actions/checkout will not checkout the PR, but the base branch, by default.
|
||||
To protect our secrets, we need to make sure to **never execute code** from the pull request and always evaluate or build nix code from the pull request with the **sandbox enabled**.
|
||||
|
||||
- To test the pull request's contents, we checkout the "test merge commit". This is a temporary commit that GitHub creates automatically as "what would happen, if this PR was merged into the base branch now?". The checkout could be done via the virtual branch `refs/pull/<pr-number>/merge`, but doing so would cause failures when this virtual branch doesn't exist (anymore). This can happen when the PR has conflicts, in which case the virtual branch is not created, or when the PR is getting merged while workflows are still running, in which case the branch won't exist anymore at the time of checkout. Thus, we use the `get-merge-commit.yml` workflow to check whether the PR is mergeable and the test merge commit exists and only then run the relevant jobs.
|
||||
- To test the pull request's contents, we checkout the "test merge commit".
|
||||
This is a temporary commit that GitHub creates automatically as "what would happen, if this PR was merged into the base branch now?".
|
||||
The checkout could be done via the virtual branch `refs/pull/<pr-number>/merge`, but doing so would cause failures when this virtual branch doesn't exist (anymore).
|
||||
This can happen when the PR has conflicts, in which case the virtual branch is not created, or when the PR is getting merged while workflows are still running, in which case the branch won't exist anymore at the time of checkout.
|
||||
Thus, we use the `prepare` job to check whether the PR is mergeable and the test merge commit exists and only then run the relevant jobs.
|
||||
|
||||
- Various workflows need to make comparisons against the base branch. In this case, we checkout the parent of the "test merge commit" for best results. Note, that this is not necessarily the same as the default commit that actions/checkout would use, which is also a commit from the base branch (see above), but might be older.
|
||||
- Various workflows need to make comparisons against the base branch.
|
||||
In this case, we checkout the parent of the "test merge commit" for best results.
|
||||
Note, that this is not necessarily the same as the default commit that actions/checkout would use, which is also a commit from the base branch (see above), but might be older.
|
||||
|
||||
## Terminology
|
||||
|
||||
- **base commit**: The pull_request_target event's context commit, i.e. the base commit given by GitHub Actions. Same as `github.event.pull_request.base.sha`.
|
||||
- **head commit**: The HEAD commit in the pull request's branch. Same as `github.event.pull_request.head.sha`.
|
||||
- **merge commit**: The temporary "test merge commit" that GitHub Actions creates and updates for the pull request. Same as `refs/pull/${{ github.event.pull_request.number }}/merge`.
|
||||
- **base commit**: The pull_request_target event's context commit, i.e. the base commit given by GitHub Actions.
|
||||
Same as `github.event.pull_request.base.sha`.
|
||||
- **head commit**: The HEAD commit in the pull request's branch.
|
||||
Same as `github.event.pull_request.head.sha`.
|
||||
- **merge commit**: The temporary "test merge commit" that GitHub Actions creates and updates for the pull request.
|
||||
Same as `refs/pull/${{ github.event.pull_request.number }}/merge`.
|
||||
- **target commit**: The base branch's parent of the "test merge commit" to compare against.
|
||||
|
||||
## Concurrency Groups
|
||||
|
||||
We use [GitHub's Concurrency Groups](https://docs.github.com/en/actions/writing-workflows/choosing-what-your-workflow-does/control-the-concurrency-of-workflows-and-jobs) to cancel older jobs on pushes to Pull Requests.
|
||||
When two workflows are in the same group, a newer workflow cancels an older workflow.
|
||||
Thus, it is important how to construct the group keys:
|
||||
|
||||
- Because we want to run jobs for different events at same time, we add `github.event_name` to the key.
|
||||
This is the case for the `pull_request` which runs on changes to the workflow files to test the new files and the same workflow from the base branch run via `pull_request_event`.
|
||||
|
||||
- We don't want workflows of different Pull Requests to cancel each other, so we include `github.event.pull_request.number`.
|
||||
The [GitHub docs](https://docs.github.com/en/actions/writing-workflows/choosing-what-your-workflow-does/control-the-concurrency-of-workflows-and-jobs#example-using-a-fallback-value) show using `github.head_ref` for this purpose, but this doesn't work well with forks: Different users could have the same head branch name in their forks and run CI for their PRs at the same time.
|
||||
|
||||
- Sometimes, there is no `pull_request.number`.
|
||||
To ensure non-PR runs are never cancelled, we add a fallback of `github.run_id`.
|
||||
This is a unique value for each workflow run.
|
||||
|
||||
- Of course, we run multiple workflows at the same time, so we add `github.workflow` to the key.
|
||||
Otherwise workflows would cancel each other.
|
||||
|
||||
- There is a special case for reusable workflows called via `workflow_call` - they will have `github.workflow` set to their parent workflow's name.
|
||||
Thus, they would cancel each other.
|
||||
That's why we additionally hardcode the name of the workflow as well.
|
||||
|
||||
This results in a key with the following semantics:
|
||||
|
||||
```
|
||||
<running-workflow>-<triggering-workflow>-<triggered-event>-<pull-request/fallback>
|
||||
```
|
||||
|
||||
## Required Status Checks
|
||||
|
||||
The "Required Status Checks" branch ruleset is implemented in two top-level workflows: `pr.yml` and `merge-group.yml`.
|
||||
|
||||
The PR workflow defines all checks that need to succeed to add a Pull Request to the Merge Queue.
|
||||
If no Merge Queue is set up for a branch, the PR workflow defines the checks required to merge into the target branch.
|
||||
|
||||
The Merge Group workflow defines all checks that are run as part of the Merge Queue.
|
||||
Only when these pass, a Pull Request is finally merged into the target branch.
|
||||
They don't apply when no Merge Queue is set up.
|
||||
|
||||
Both workflows work with the same `no PR failures` status check.
|
||||
This name can never be changed, because it's used in the branch ruleset for these rules.
|
||||
|
||||
43
.github/workflows/backport.yml
vendored
43
.github/workflows/backport.yml
vendored
@@ -11,32 +11,44 @@ on:
|
||||
|
||||
permissions:
|
||||
contents: read
|
||||
issues: write
|
||||
pull-requests: write
|
||||
|
||||
defaults:
|
||||
run:
|
||||
shell: bash
|
||||
|
||||
jobs:
|
||||
backport:
|
||||
name: Backport Pull Request
|
||||
if: vars.NIXPKGS_CI_APP_ID && github.event.pull_request.merged == true && (github.event.action != 'labeled' || startsWith(github.event.label.name, 'backport'))
|
||||
runs-on: ubuntu-24.04-arm
|
||||
timeout-minutes: 3
|
||||
steps:
|
||||
# Use a GitHub App to create the PR so that CI gets triggered
|
||||
# The App is scoped to Repository > Contents and Pull Requests: write for Nixpkgs
|
||||
- uses: actions/create-github-app-token@df432ceedc7162793a195dd1713ff69aefc7379e # v2.0.6
|
||||
- uses: actions/create-github-app-token@a8d616148505b5069dccd32f177bb87d7f39123b # v2.1.1
|
||||
id: app-token
|
||||
with:
|
||||
app-id: ${{ vars.NIXPKGS_CI_APP_ID }}
|
||||
private-key: ${{ secrets.NIXPKGS_CI_APP_PRIVATE_KEY }}
|
||||
permission-contents: write
|
||||
permission-pull-requests: write
|
||||
permission-workflows: write
|
||||
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
with:
|
||||
ref: ${{ github.event.pull_request.head.sha }}
|
||||
token: ${{ steps.app-token.outputs.token }}
|
||||
|
||||
- name: Log current API rate limits
|
||||
env:
|
||||
GH_TOKEN: ${{ steps.app-token.outputs.token }}
|
||||
run: gh api /rate_limit | jq
|
||||
|
||||
- name: Create backport PRs
|
||||
id: backport
|
||||
uses: korthout/backport-action@436145e922f9561fc5ea157ff406f21af2d6b363 # v3.2.0
|
||||
uses: korthout/backport-action@ca4972adce8039ff995e618f5fc02d1b7961f27a # v3.3.0
|
||||
with:
|
||||
# Config README: https://github.com/korthout/backport-action#backport-action
|
||||
copy_labels_pattern: 'severity:\ssecurity'
|
||||
@@ -47,15 +59,20 @@ jobs:
|
||||
* [ ] Before merging, ensure that this backport is [acceptable for the release](https://github.com/NixOS/nixpkgs/blob/master/CONTRIBUTING.md#changes-acceptable-for-releases).
|
||||
* Even as a non-committer, if you find that it is not acceptable, leave a comment.
|
||||
|
||||
- name: Log current API rate limits
|
||||
env:
|
||||
GH_TOKEN: ${{ steps.app-token.outputs.token }}
|
||||
run: gh api /rate_limit | jq
|
||||
|
||||
- name: "Add 'has: port to stable' label"
|
||||
if: steps.backport.outputs.created_pull_numbers != ''
|
||||
env:
|
||||
# Not the app on purpose to avoid triggering another workflow run after adding this label
|
||||
GH_TOKEN: ${{ github.token }}
|
||||
REPOSITORY: ${{ github.repository }}
|
||||
NUMBER: ${{ github.event.number }}
|
||||
run: |
|
||||
gh api \
|
||||
--method POST \
|
||||
/repos/"$REPOSITORY"/issues/"$NUMBER"/labels \
|
||||
-f "labels[]=8.has: port to stable"
|
||||
uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8.0.0
|
||||
with:
|
||||
# Not using the app on purpose to avoid triggering another workflow run after adding this label.
|
||||
script: |
|
||||
await github.rest.issues.addLabels({
|
||||
owner: context.repo.owner,
|
||||
repo: context.repo.repo,
|
||||
issue_number: context.payload.pull_request.number,
|
||||
labels: [ '8.has: port to stable' ]
|
||||
})
|
||||
|
||||
103
.github/workflows/build.yml
vendored
Normal file
103
.github/workflows/build.yml
vendored
Normal file
@@ -0,0 +1,103 @@
|
||||
name: Build
|
||||
|
||||
on:
|
||||
workflow_call:
|
||||
inputs:
|
||||
baseBranch:
|
||||
required: true
|
||||
type: string
|
||||
mergedSha:
|
||||
required: true
|
||||
type: string
|
||||
secrets:
|
||||
CACHIX_AUTH_TOKEN:
|
||||
required: true
|
||||
|
||||
permissions: {}
|
||||
|
||||
defaults:
|
||||
run:
|
||||
shell: bash
|
||||
|
||||
jobs:
|
||||
build:
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
include:
|
||||
- runner: ubuntu-24.04
|
||||
name: x86_64-linux
|
||||
systems: x86_64-linux
|
||||
builds: [shell, manual-nixos, lib-tests, tarball]
|
||||
desc: shell, docs, lib, tarball
|
||||
- runner: ubuntu-24.04-arm
|
||||
name: aarch64-linux
|
||||
systems: aarch64-linux
|
||||
builds: [shell, manual-nixos, manual-nixpkgs, manual-nixpkgs-tests]
|
||||
desc: shell, docs
|
||||
- runner: macos-14
|
||||
name: darwin
|
||||
systems: aarch64-darwin x86_64-darwin
|
||||
builds: [shell]
|
||||
desc: shell
|
||||
name: '${{ matrix.name }}: ${{ matrix.desc }}'
|
||||
runs-on: ${{ matrix.runner }}
|
||||
timeout-minutes: 60
|
||||
steps:
|
||||
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
with:
|
||||
sparse-checkout: .github/actions
|
||||
- name: Checkout the merge commit
|
||||
uses: ./.github/actions/checkout
|
||||
with:
|
||||
merged-as-untrusted-at: ${{ inputs.mergedSha }}
|
||||
|
||||
- uses: cachix/install-nix-action@7be5dee1421f63d07e71ce6e0a9f8a4b07c2a487 # v31
|
||||
with:
|
||||
# Sandbox is disabled on MacOS by default.
|
||||
extra_nix_config: sandbox = true
|
||||
|
||||
- uses: cachix/cachix-action@0fc020193b5a1fa3ac4575aa3a7d3aa6a35435ad # v16
|
||||
with:
|
||||
# The nixpkgs-ci cache should not be trusted or used outside of Nixpkgs and its forks' CI.
|
||||
name: ${{ vars.CACHIX_NAME || 'nixpkgs-ci' }}
|
||||
extraPullNames: nixpkgs-ci
|
||||
authToken: ${{ secrets.CACHIX_AUTH_TOKEN }}
|
||||
pushFilter: '(-source$|-nixpkgs-tarball-)'
|
||||
|
||||
- run: nix-env --install -f nixpkgs/untrusted-pinned -A nix-build-uncached
|
||||
|
||||
- name: Build shell
|
||||
if: contains(matrix.builds, 'shell')
|
||||
run: echo "${{ matrix.systems }}" | xargs -n1 nix-build-uncached nixpkgs/untrusted/ci --arg nixpkgs ./nixpkgs/untrusted-pinned -A shell --argstr system
|
||||
|
||||
- name: Build NixOS manual
|
||||
if: |
|
||||
contains(matrix.builds, 'manual-nixos') && !cancelled() &&
|
||||
contains(fromJSON(inputs.baseBranch).type, 'primary')
|
||||
run: nix-build-uncached nixpkgs/untrusted/ci --arg nixpkgs ./nixpkgs/untrusted-pinned -A manual-nixos --out-link nixos-manual
|
||||
|
||||
- name: Build Nixpkgs manual
|
||||
if: contains(matrix.builds, 'manual-nixpkgs') && !cancelled()
|
||||
run: nix-build-uncached nixpkgs/untrusted/ci --arg nixpkgs ./nixpkgs/untrusted-pinned -A manual-nixpkgs -A manual-nixpkgs-tests
|
||||
|
||||
- name: Build Nixpkgs manual tests
|
||||
if: contains(matrix.builds, 'manual-nixpkgs-tests') && !cancelled()
|
||||
run: nix-build-uncached nixpkgs/untrusted/ci --arg nixpkgs ./nixpkgs/untrusted-pinned -A manual-nixpkgs-tests
|
||||
|
||||
- name: Build lib tests
|
||||
if: contains(matrix.builds, 'lib-tests') && !cancelled()
|
||||
run: nix-build-uncached nixpkgs/untrusted/ci --arg nixpkgs ./nixpkgs/untrusted-pinned -A lib-tests
|
||||
|
||||
- name: Build tarball
|
||||
if: contains(matrix.builds, 'tarball') && !cancelled()
|
||||
run: nix-build-uncached nixpkgs/untrusted/ci --arg nixpkgs ./nixpkgs/untrusted-pinned -A tarball
|
||||
|
||||
- name: Upload NixOS manual
|
||||
if: |
|
||||
contains(matrix.builds, 'manual-nixos') && !cancelled() &&
|
||||
contains(fromJSON(inputs.baseBranch).type, 'primary')
|
||||
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
|
||||
with:
|
||||
name: nixos-manual-${{ matrix.name }}
|
||||
path: nixos-manual
|
||||
31
.github/workflows/check-cherry-picks.yml
vendored
31
.github/workflows/check-cherry-picks.yml
vendored
@@ -1,31 +0,0 @@
|
||||
name: "Check cherry-picks"
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
paths:
|
||||
- .github/workflows/check-cherry-picks.yml
|
||||
pull_request_target:
|
||||
branches:
|
||||
- 'release-**'
|
||||
- 'staging-**'
|
||||
- '!staging-next'
|
||||
|
||||
permissions: {}
|
||||
|
||||
jobs:
|
||||
check:
|
||||
name: cherry-pick-check
|
||||
runs-on: ubuntu-24.04-arm
|
||||
steps:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
with:
|
||||
fetch-depth: 0
|
||||
filter: blob:none
|
||||
path: trusted
|
||||
|
||||
- name: Check cherry-picks
|
||||
env:
|
||||
BASE_SHA: ${{ github.event.pull_request.base.sha }}
|
||||
HEAD_SHA: ${{ github.event.pull_request.head.sha }}
|
||||
run: |
|
||||
./trusted/maintainers/scripts/check-cherry-picks.sh "$BASE_SHA" "$HEAD_SHA"
|
||||
42
.github/workflows/check-format.yml
vendored
42
.github/workflows/check-format.yml
vendored
@@ -1,42 +0,0 @@
|
||||
name: Check that files are formatted
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
paths:
|
||||
- .github/workflows/check-format.yml
|
||||
pull_request_target:
|
||||
|
||||
permissions: {}
|
||||
|
||||
jobs:
|
||||
nixos:
|
||||
name: fmt-check
|
||||
runs-on: ubuntu-24.04-arm
|
||||
steps:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
with:
|
||||
sparse-checkout: .github/actions
|
||||
- name: Check if the PR can be merged and checkout the merge commit
|
||||
uses: ./.github/actions/get-merge-commit
|
||||
with:
|
||||
merged-as-untrusted: true
|
||||
|
||||
- uses: cachix/install-nix-action@526118121621777ccd86f79b04685a9319637641 # v31
|
||||
with:
|
||||
extra_nix_config: sandbox = true
|
||||
|
||||
- name: Check that files are formatted
|
||||
run: |
|
||||
# Note that it's fine to run this on untrusted code because:
|
||||
# - There's no secrets accessible here
|
||||
# - The build is sandboxed
|
||||
if ! nix-build untrusted/ci -A fmt.check; then
|
||||
echo "Some files are not properly formatted"
|
||||
echo "Please format them by going to the Nixpkgs root directory and running one of:"
|
||||
echo " nix-shell --run treefmt"
|
||||
echo " nix develop --command treefmt"
|
||||
echo " nix fmt"
|
||||
echo "Make sure your branch is up to date with master; rebase if not."
|
||||
echo "If you're having trouble, please ping @NixOS/nix-formatting"
|
||||
exit 1
|
||||
fi
|
||||
44
.github/workflows/check-shell.yml
vendored
44
.github/workflows/check-shell.yml
vendored
@@ -1,44 +0,0 @@
|
||||
name: "Check shell"
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
paths:
|
||||
- .github/workflows/check-shell.yml
|
||||
pull_request_target:
|
||||
paths:
|
||||
- 'shell.nix'
|
||||
- 'ci/**'
|
||||
|
||||
permissions: {}
|
||||
|
||||
jobs:
|
||||
shell-check:
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
include:
|
||||
- runner: ubuntu-24.04
|
||||
system: x86_64-linux
|
||||
- runner: ubuntu-24.04-arm
|
||||
system: aarch64-linux
|
||||
- runner: macos-13
|
||||
system: x86_64-darwin
|
||||
- runner: macos-14
|
||||
system: aarch64-darwin
|
||||
|
||||
name: shell-check-${{ matrix.system }}
|
||||
runs-on: ${{ matrix.runner }}
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
with:
|
||||
sparse-checkout: .github/actions
|
||||
- name: Check if the PR can be merged and checkout the merge commit
|
||||
uses: ./.github/actions/get-merge-commit
|
||||
with:
|
||||
merged-as-untrusted: true
|
||||
|
||||
- uses: cachix/install-nix-action@526118121621777ccd86f79b04685a9319637641 # v31
|
||||
|
||||
- name: Build shell
|
||||
run: nix-build untrusted/ci -A shell
|
||||
142
.github/workflows/check.yml
vendored
Normal file
142
.github/workflows/check.yml
vendored
Normal file
@@ -0,0 +1,142 @@
|
||||
name: Check
|
||||
|
||||
on:
|
||||
workflow_call:
|
||||
inputs:
|
||||
baseBranch:
|
||||
required: true
|
||||
type: string
|
||||
headBranch:
|
||||
required: true
|
||||
type: string
|
||||
mergedSha:
|
||||
required: true
|
||||
type: string
|
||||
ownersCanFail:
|
||||
required: true
|
||||
type: boolean
|
||||
targetSha:
|
||||
required: true
|
||||
type: string
|
||||
secrets:
|
||||
CACHIX_AUTH_TOKEN:
|
||||
required: true
|
||||
OWNER_RO_APP_PRIVATE_KEY:
|
||||
required: true
|
||||
|
||||
permissions: {}
|
||||
|
||||
defaults:
|
||||
run:
|
||||
shell: bash
|
||||
|
||||
jobs:
|
||||
commits:
|
||||
permissions:
|
||||
pull-requests: write
|
||||
runs-on: ubuntu-24.04-arm
|
||||
timeout-minutes: 3
|
||||
steps:
|
||||
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
with:
|
||||
path: trusted
|
||||
sparse-checkout: |
|
||||
ci/github-script
|
||||
|
||||
- name: Install dependencies
|
||||
run: npm install bottleneck
|
||||
|
||||
- name: Log current API rate limits
|
||||
env:
|
||||
GH_TOKEN: ${{ github.token }}
|
||||
run: gh api /rate_limit | jq
|
||||
|
||||
- name: Check commits
|
||||
id: check
|
||||
uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8.0.0
|
||||
env:
|
||||
TARGETS_STABLE: ${{ fromJSON(inputs.baseBranch).stable && !contains(fromJSON(inputs.headBranch).type, 'development') }}
|
||||
with:
|
||||
script: |
|
||||
const targetsStable = JSON.parse(process.env.TARGETS_STABLE)
|
||||
require('./trusted/ci/github-script/commits.js')({
|
||||
github,
|
||||
context,
|
||||
core,
|
||||
dry: context.eventName == 'pull_request',
|
||||
cherryPicks: context.eventName == 'pull_request' || targetsStable,
|
||||
})
|
||||
|
||||
- name: Log current API rate limits
|
||||
env:
|
||||
GH_TOKEN: ${{ github.token }}
|
||||
run: gh api /rate_limit | jq
|
||||
|
||||
# For checking code owners, this job depends on a GitHub App with the following permissions:
|
||||
# - Permissions:
|
||||
# - Repository > Administration: read-only
|
||||
# - Organization > Members: read-only
|
||||
# - Install App on this repository, setting these variables:
|
||||
# - OWNER_RO_APP_ID (variable)
|
||||
# - OWNER_RO_APP_PRIVATE_KEY (secret)
|
||||
#
|
||||
# This should not use the same app as the job to request reviewers, because this job requires
|
||||
# handling untrusted PR input.
|
||||
owners:
|
||||
runs-on: ubuntu-24.04-arm
|
||||
continue-on-error: ${{ inputs.ownersCanFail }}
|
||||
timeout-minutes: 5
|
||||
steps:
|
||||
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
with:
|
||||
sparse-checkout: .github/actions
|
||||
- name: Checkout merge and target commits
|
||||
uses: ./.github/actions/checkout
|
||||
with:
|
||||
merged-as-untrusted-at: ${{ inputs.mergedSha }}
|
||||
target-as-trusted-at: ${{ inputs.targetSha }}
|
||||
|
||||
- uses: cachix/install-nix-action@7be5dee1421f63d07e71ce6e0a9f8a4b07c2a487 # v31
|
||||
|
||||
- uses: cachix/cachix-action@0fc020193b5a1fa3ac4575aa3a7d3aa6a35435ad # v16
|
||||
with:
|
||||
# The nixpkgs-ci cache should not be trusted or used outside of Nixpkgs and its forks' CI.
|
||||
name: ${{ vars.CACHIX_NAME || 'nixpkgs-ci' }}
|
||||
extraPullNames: nixpkgs-ci
|
||||
authToken: ${{ secrets.CACHIX_AUTH_TOKEN }}
|
||||
pushFilter: -source$
|
||||
|
||||
- name: Build codeowners validator
|
||||
run: nix-build nixpkgs/trusted/ci --arg nixpkgs ./nixpkgs/trusted-pinned -A codeownersValidator
|
||||
|
||||
- uses: actions/create-github-app-token@a8d616148505b5069dccd32f177bb87d7f39123b # v2.1.1
|
||||
if: github.event_name == 'pull_request_target' && vars.OWNER_RO_APP_ID
|
||||
id: app-token
|
||||
with:
|
||||
app-id: ${{ vars.OWNER_RO_APP_ID }}
|
||||
private-key: ${{ secrets.OWNER_RO_APP_PRIVATE_KEY }}
|
||||
permission-administration: read
|
||||
permission-members: read
|
||||
|
||||
- name: Log current API rate limits
|
||||
if: steps.app-token.outputs.token
|
||||
env:
|
||||
GH_TOKEN: ${{ steps.app-token.outputs.token }}
|
||||
run: gh api /rate_limit | jq
|
||||
|
||||
- name: Validate codeowners
|
||||
if: steps.app-token.outputs.token
|
||||
env:
|
||||
OWNERS_FILE: nixpkgs/untrusted/ci/OWNERS
|
||||
GITHUB_ACCESS_TOKEN: ${{ steps.app-token.outputs.token }}
|
||||
REPOSITORY_PATH: nixpkgs/untrusted
|
||||
OWNER_CHECKER_REPOSITORY: ${{ github.repository }}
|
||||
# Set this to "notowned,avoid-shadowing" to check that all files are owned by somebody
|
||||
EXPERIMENTAL_CHECKS: "avoid-shadowing"
|
||||
run: result/bin/codeowners-validator
|
||||
|
||||
- name: Log current API rate limits
|
||||
if: steps.app-token.outputs.token
|
||||
env:
|
||||
GH_TOKEN: ${{ steps.app-token.outputs.token }}
|
||||
run: gh api /rate_limit | jq
|
||||
117
.github/workflows/codeowners-v2.yml
vendored
117
.github/workflows/codeowners-v2.yml
vendored
@@ -1,117 +0,0 @@
|
||||
# This workflow depends on two GitHub Apps with the following permissions:
|
||||
# - For checking code owners:
|
||||
# - Permissions:
|
||||
# - Repository > Administration: read-only
|
||||
# - Organization > Members: read-only
|
||||
# - Install App on this repository, setting these variables:
|
||||
# - OWNER_RO_APP_ID (variable)
|
||||
# - OWNER_RO_APP_PRIVATE_KEY (secret)
|
||||
# - For requesting code owners:
|
||||
# - Permissions:
|
||||
# - Repository > Administration: read-only
|
||||
# - Organization > Members: read-only
|
||||
# - Repository > Pull Requests: read-write
|
||||
# - Install App on this repository, setting these variables:
|
||||
# - OWNER_APP_ID (variable)
|
||||
# - OWNER_APP_PRIVATE_KEY (secret)
|
||||
#
|
||||
# This split is done because checking code owners requires handling untrusted PR input,
|
||||
# while requesting code owners requires PR write access, and those shouldn't be mixed.
|
||||
#
|
||||
# Note that the latter is also used for ./eval.yml requesting reviewers.
|
||||
|
||||
name: Codeowners v2
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
paths:
|
||||
- .github/workflows/codeowners-v2.yml
|
||||
pull_request_target:
|
||||
types: [opened, ready_for_review, synchronize, reopened]
|
||||
|
||||
permissions: {}
|
||||
|
||||
env:
|
||||
OWNERS_FILE: ci/OWNERS
|
||||
# Don't do anything on draft PRs
|
||||
DRY_MODE: ${{ github.event.pull_request.draft && '1' || '' }}
|
||||
|
||||
jobs:
|
||||
# Check that code owners is valid
|
||||
check:
|
||||
name: Check
|
||||
runs-on: ubuntu-24.04-arm
|
||||
if: github.repository_owner == 'NixOS'
|
||||
steps:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
with:
|
||||
sparse-checkout: .github/actions
|
||||
- name: Check if the PR can be merged and checkout the merge and target commits
|
||||
uses: ./.github/actions/get-merge-commit
|
||||
with:
|
||||
merged-as-untrusted: true
|
||||
target-as-trusted: true
|
||||
|
||||
- uses: cachix/install-nix-action@526118121621777ccd86f79b04685a9319637641 # v31
|
||||
|
||||
- uses: cachix/cachix-action@0fc020193b5a1fa3ac4575aa3a7d3aa6a35435ad # v16
|
||||
with:
|
||||
# This cache is for the nixpkgs repo checks and should not be trusted or used elsewhere.
|
||||
name: nixpkgs-ci
|
||||
authToken: '${{ secrets.CACHIX_AUTH_TOKEN }}'
|
||||
|
||||
- name: Build codeowners validator
|
||||
run: nix-build trusted/ci -A codeownersValidator
|
||||
|
||||
- uses: actions/create-github-app-token@df432ceedc7162793a195dd1713ff69aefc7379e # v2.0.6
|
||||
if: vars.OWNER_RO_APP_ID
|
||||
id: app-token
|
||||
with:
|
||||
app-id: ${{ vars.OWNER_RO_APP_ID }}
|
||||
private-key: ${{ secrets.OWNER_RO_APP_PRIVATE_KEY }}
|
||||
permission-administration: read
|
||||
permission-members: read
|
||||
|
||||
- name: Validate codeowners
|
||||
if: steps.app-token.outputs.token
|
||||
env:
|
||||
OWNERS_FILE: untrusted/${{ env.OWNERS_FILE }}
|
||||
GITHUB_ACCESS_TOKEN: ${{ steps.app-token.outputs.token }}
|
||||
REPOSITORY_PATH: untrusted
|
||||
OWNER_CHECKER_REPOSITORY: ${{ github.repository }}
|
||||
# Set this to "notowned,avoid-shadowing" to check that all files are owned by somebody
|
||||
EXPERIMENTAL_CHECKS: "avoid-shadowing"
|
||||
run: result/bin/codeowners-validator
|
||||
|
||||
# Request reviews from code owners
|
||||
request:
|
||||
name: Request
|
||||
runs-on: ubuntu-24.04-arm
|
||||
if: github.repository_owner == 'NixOS'
|
||||
steps:
|
||||
- uses: cachix/install-nix-action@526118121621777ccd86f79b04685a9319637641 # v31
|
||||
|
||||
# Important: Because we use pull_request_target, this checks out the base branch of the PR, not the PR head.
|
||||
# This is intentional, because we need to request the review of owners as declared in the base branch.
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
with:
|
||||
path: trusted
|
||||
|
||||
- uses: actions/create-github-app-token@df432ceedc7162793a195dd1713ff69aefc7379e # v2.0.6
|
||||
if: vars.OWNER_APP_ID
|
||||
id: app-token
|
||||
with:
|
||||
app-id: ${{ vars.OWNER_APP_ID }}
|
||||
private-key: ${{ secrets.OWNER_APP_PRIVATE_KEY }}
|
||||
permission-administration: read
|
||||
permission-members: read
|
||||
permission-pull-requests: write
|
||||
|
||||
- name: Build review request package
|
||||
run: nix-build trusted/ci -A requestReviews
|
||||
|
||||
- name: Request reviews
|
||||
if: steps.app-token.outputs.token
|
||||
env:
|
||||
GH_TOKEN: ${{ steps.app-token.outputs.token }}
|
||||
run: result/bin/request-code-owner-reviews.sh ${{ github.repository }} ${{ github.event.number }} "$OWNERS_FILE"
|
||||
66
.github/workflows/dismissed-review.yml
vendored
Normal file
66
.github/workflows/dismissed-review.yml
vendored
Normal file
@@ -0,0 +1,66 @@
|
||||
name: Dismissed review
|
||||
|
||||
on:
|
||||
workflow_run:
|
||||
workflows:
|
||||
- Review dismissed
|
||||
types: [completed]
|
||||
|
||||
concurrency:
|
||||
group: dismissed-review-${{ github.workflow }}-${{ github.event_name }}-${{ github.event.pull_request.number || github.run_id }}
|
||||
cancel-in-progress: true
|
||||
|
||||
permissions:
|
||||
pull-requests: write
|
||||
|
||||
defaults:
|
||||
run:
|
||||
shell: bash
|
||||
|
||||
jobs:
|
||||
# The `check-cherry-picks` workflow creates review comments which reviewers
|
||||
# are encouraged to manually dismiss if they're not relevant.
|
||||
# When a CI-generated review is dismissed, this job automatically minimizes
|
||||
# it, preventing it from cluttering the PR.
|
||||
minimize:
|
||||
name: Minimize as resolved
|
||||
runs-on: ubuntu-24.04-arm
|
||||
timeout-minutes: 2
|
||||
steps:
|
||||
- uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8.0.0
|
||||
with:
|
||||
script: |
|
||||
// PRs from forks don't have any PRs associated by default.
|
||||
// Thus, we request the PR number with an API call *to* the fork's repo.
|
||||
// Multiple pull requests can be open from the same head commit, either via
|
||||
// different base branches or head branches.
|
||||
const { head_repository, head_sha, repository } = context.payload.workflow_run
|
||||
await Promise.all(
|
||||
(await github.paginate(github.rest.repos.listPullRequestsAssociatedWithCommit, {
|
||||
owner: head_repository.owner.login,
|
||||
repo: head_repository.name,
|
||||
commit_sha: head_sha
|
||||
}))
|
||||
.filter(pull_request => pull_request.base.repo.id == repository.id)
|
||||
.map(async (pull_request) =>
|
||||
Promise.all(
|
||||
(await github.paginate(github.rest.pulls.listReviews, {
|
||||
owner: context.repo.owner,
|
||||
repo: context.repo.repo,
|
||||
pull_number: pull_request.number
|
||||
})).filter(review =>
|
||||
review.user?.login == 'github-actions[bot]' &&
|
||||
review.state == 'DISMISSED'
|
||||
).map(review => github.graphql(`
|
||||
mutation($node_id:ID!) {
|
||||
minimizeComment(input: {
|
||||
classifier: RESOLVED,
|
||||
subjectId: $node_id
|
||||
})
|
||||
{ clientMutationId }
|
||||
}`,
|
||||
{ node_id: review.node_id }
|
||||
))
|
||||
)
|
||||
)
|
||||
)
|
||||
38
.github/workflows/edited.yml
vendored
38
.github/workflows/edited.yml
vendored
@@ -16,34 +16,44 @@ on:
|
||||
pull_request_target:
|
||||
types: [edited]
|
||||
|
||||
concurrency:
|
||||
group: edited-${{ github.workflow }}-${{ github.event_name }}-${{ github.event.pull_request.number || github.run_id }}
|
||||
cancel-in-progress: true
|
||||
|
||||
permissions: {}
|
||||
|
||||
defaults:
|
||||
run:
|
||||
shell: bash
|
||||
|
||||
jobs:
|
||||
base:
|
||||
name: Trigger jobs
|
||||
runs-on: ubuntu-24.04
|
||||
if: github.event.changes.base.ref.from && github.event.changes.base.ref.from != github.event.pull_request.base.ref
|
||||
timeout-minutes: 2
|
||||
steps:
|
||||
# Use a GitHub App to create the PR so that CI gets triggered
|
||||
# The App is scoped to Repository > Contents and Pull Requests: write for Nixpkgs
|
||||
# We only need Pull Requests: write here, but the app is also used for backports.
|
||||
- uses: actions/create-github-app-token@df432ceedc7162793a195dd1713ff69aefc7379e # v2.0.6
|
||||
- uses: actions/create-github-app-token@a8d616148505b5069dccd32f177bb87d7f39123b # v2.1.1
|
||||
id: app-token
|
||||
with:
|
||||
app-id: ${{ vars.NIXPKGS_CI_APP_ID }}
|
||||
private-key: ${{ secrets.NIXPKGS_CI_APP_PRIVATE_KEY }}
|
||||
permission-pull-requests: write
|
||||
|
||||
- env:
|
||||
GH_TOKEN: ${{ steps.app-token.outputs.token }}
|
||||
REPOSITORY: ${{ github.repository }}
|
||||
NUMBER: ${{ github.event.number }}
|
||||
run: |
|
||||
gh api \
|
||||
--method PATCH \
|
||||
/repos/"$REPOSITORY"/pulls/"$NUMBER" \
|
||||
-f "state=closed"
|
||||
gh api \
|
||||
--method PATCH \
|
||||
/repos/"$REPOSITORY"/pulls/"$NUMBER" \
|
||||
-f "state=open"
|
||||
- uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8.0.0
|
||||
with:
|
||||
github-token: ${{ steps.app-token.outputs.token }}
|
||||
script: |
|
||||
function changeState(state) {
|
||||
return github.rest.pulls.update({
|
||||
owner: context.repo.owner,
|
||||
repo: context.repo.repo,
|
||||
pull_number: context.payload.pull_request.number,
|
||||
state
|
||||
})
|
||||
}
|
||||
await changeState('closed')
|
||||
await changeState('open')
|
||||
|
||||
34
.github/workflows/eval-aliases.yml
vendored
34
.github/workflows/eval-aliases.yml
vendored
@@ -1,34 +0,0 @@
|
||||
name: Eval aliases
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
paths:
|
||||
- .github/workflows/eval-aliases.yml
|
||||
pull_request_target:
|
||||
|
||||
permissions: {}
|
||||
|
||||
jobs:
|
||||
eval-aliases:
|
||||
name: Eval nixpkgs with aliases enabled
|
||||
runs-on: ubuntu-24.04-arm
|
||||
steps:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
with:
|
||||
sparse-checkout: .github/actions
|
||||
- name: Check if the PR can be merged and checkout the merge commit
|
||||
uses: ./.github/actions/get-merge-commit
|
||||
with:
|
||||
merged-as-untrusted: true
|
||||
|
||||
- name: Install Nix
|
||||
uses: cachix/install-nix-action@526118121621777ccd86f79b04685a9319637641 # v31
|
||||
with:
|
||||
extra_nix_config: sandbox = true
|
||||
|
||||
- name: Ensure flake outputs on all systems still evaluate
|
||||
run: nix flake check --all-systems --no-build ./untrusted
|
||||
|
||||
- name: Query nixpkgs with aliases enabled to check for basic syntax errors
|
||||
run: |
|
||||
time nix-env -I ./untrusted -f ./untrusted -qa '*' --option restrict-eval true --option allow-import-from-derivation false >/dev/null
|
||||
464
.github/workflows/eval.yml
vendored
464
.github/workflows/eval.yml
vendored
@@ -1,55 +1,80 @@
|
||||
name: Eval
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
paths:
|
||||
- .github/workflows/eval.yml
|
||||
pull_request_target:
|
||||
types: [opened, ready_for_review, synchronize, reopened]
|
||||
push:
|
||||
# Keep this synced with ci/request-reviews/dev-branches.txt
|
||||
branches:
|
||||
- master
|
||||
- staging
|
||||
- release-*
|
||||
- staging-*
|
||||
- haskell-updates
|
||||
- python-updates
|
||||
workflow_call:
|
||||
inputs:
|
||||
mergedSha:
|
||||
required: true
|
||||
type: string
|
||||
targetSha:
|
||||
type: string
|
||||
systems:
|
||||
required: true
|
||||
type: string
|
||||
testVersions:
|
||||
required: false
|
||||
default: false
|
||||
type: boolean
|
||||
secrets:
|
||||
CACHIX_AUTH_TOKEN:
|
||||
required: true
|
||||
|
||||
permissions: {}
|
||||
|
||||
defaults:
|
||||
run:
|
||||
shell: bash
|
||||
|
||||
jobs:
|
||||
prepare:
|
||||
name: Prepare
|
||||
versions:
|
||||
if: inputs.testVersions
|
||||
runs-on: ubuntu-24.04-arm
|
||||
outputs:
|
||||
mergedSha: ${{ steps.get-merge-commit.outputs.mergedSha }}
|
||||
targetSha: ${{ steps.get-merge-commit.outputs.targetSha }}
|
||||
systems: ${{ steps.systems.outputs.systems }}
|
||||
versions: ${{ steps.versions.outputs.versions }}
|
||||
steps:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
with:
|
||||
path: trusted
|
||||
sparse-checkout: |
|
||||
.github/actions
|
||||
ci/supportedSystems.json
|
||||
- name: Check if the PR can be merged and get the test merge commit
|
||||
uses: ./.github/actions/get-merge-commit
|
||||
id: get-merge-commit
|
||||
ci/supportedVersions.nix
|
||||
|
||||
- name: Load supported systems
|
||||
id: systems
|
||||
- name: Check out the PR at the test merge commit
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
with:
|
||||
ref: ${{ inputs.mergedSha }}
|
||||
path: untrusted
|
||||
sparse-checkout: |
|
||||
ci/pinned.json
|
||||
|
||||
- name: Install Nix
|
||||
uses: cachix/install-nix-action@7be5dee1421f63d07e71ce6e0a9f8a4b07c2a487 # v31
|
||||
|
||||
- name: Load supported versions
|
||||
id: versions
|
||||
run: |
|
||||
echo "systems=$(jq -c <ci/supportedSystems.json)" >> "$GITHUB_OUTPUT"
|
||||
echo "versions=$(trusted/ci/supportedVersions.nix --arg pinnedJson untrusted/ci/pinned.json)" >> "$GITHUB_OUTPUT"
|
||||
|
||||
outpaths:
|
||||
name: Outpaths
|
||||
eval:
|
||||
runs-on: ubuntu-24.04-arm
|
||||
needs: [ prepare ]
|
||||
needs: versions
|
||||
if: ${{ !cancelled() && !failure() }}
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
system: ${{ fromJSON(needs.prepare.outputs.systems) }}
|
||||
system: ${{ fromJSON(inputs.systems) }}
|
||||
version:
|
||||
- "" # Default Eval triggering rebuild labels and such.
|
||||
- ${{ fromJSON(needs.versions.outputs.versions || '[]') }} # Only for ci/pinned.json updates.
|
||||
# Failures for versioned Evals will be collected in a separate job below
|
||||
# to not interrupt main Eval's compare step.
|
||||
continue-on-error: ${{ matrix.version != '' }}
|
||||
name: ${{ matrix.system }}${{ matrix.version && format(' @ {0}', matrix.version) || '' }}
|
||||
timeout-minutes: 15
|
||||
steps:
|
||||
# This is not supposed to be used and just acts as a fallback.
|
||||
# Without swap, when Eval runs OOM, it will fail badly with a
|
||||
# job that is sometimes not interruptible anymore.
|
||||
# If Eval starts swapping, decrease chunkSize to keep it fast.
|
||||
- name: Enable swap
|
||||
run: |
|
||||
sudo fallocate -l 10G /swap
|
||||
@@ -57,132 +82,119 @@ jobs:
|
||||
sudo mkswap /swap
|
||||
sudo swapon /swap
|
||||
|
||||
- name: Check out the PR at the test merge commit
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
with:
|
||||
ref: ${{ needs.prepare.outputs.mergedSha }}
|
||||
path: untrusted
|
||||
sparse-checkout: .github/actions
|
||||
- name: Check out the PR at merged and target commits
|
||||
uses: ./.github/actions/checkout
|
||||
with:
|
||||
merged-as-untrusted-at: ${{ inputs.mergedSha }}
|
||||
target-as-trusted-at: ${{ inputs.targetSha }}
|
||||
|
||||
- name: Install Nix
|
||||
uses: cachix/install-nix-action@526118121621777ccd86f79b04685a9319637641 # v31
|
||||
with:
|
||||
extra_nix_config: sandbox = true
|
||||
uses: cachix/install-nix-action@7be5dee1421f63d07e71ce6e0a9f8a4b07c2a487 # v31
|
||||
|
||||
- name: Evaluate the ${{ matrix.system }} output paths for all derivation attributes
|
||||
- uses: cachix/cachix-action@0fc020193b5a1fa3ac4575aa3a7d3aa6a35435ad # v16
|
||||
with:
|
||||
# The nixpkgs-ci cache should not be trusted or used outside of Nixpkgs and its forks' CI.
|
||||
name: ${{ vars.CACHIX_NAME || 'nixpkgs-ci' }}
|
||||
extraPullNames: nixpkgs-ci
|
||||
authToken: ${{ secrets.CACHIX_AUTH_TOKEN }}
|
||||
pushFilter: '(-source|-single-chunk)$'
|
||||
|
||||
- name: Evaluate the ${{ matrix.system }} output paths at the merge commit
|
||||
env:
|
||||
MATRIX_SYSTEM: ${{ matrix.system }}
|
||||
MATRIX_VERSION: ${{ matrix.version || 'nixVersions.latest' }}
|
||||
run: |
|
||||
nix-build untrusted/ci -A eval.singleSystem \
|
||||
nix-build nixpkgs/untrusted/ci --arg nixpkgs ./nixpkgs/untrusted-pinned -A eval.singleSystem \
|
||||
--argstr evalSystem "$MATRIX_SYSTEM" \
|
||||
--arg chunkSize 10000 \
|
||||
--arg chunkSize 8000 \
|
||||
--argstr nixPath "$MATRIX_VERSION" \
|
||||
--out-link merged
|
||||
# If it uses too much memory, slightly decrease chunkSize
|
||||
# If it uses too much memory, slightly decrease chunkSize.
|
||||
# Note: Keep the same further down in sync!
|
||||
|
||||
- name: Upload the output paths and eval stats
|
||||
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
|
||||
with:
|
||||
name: merged-${{ matrix.system }}
|
||||
path: merged/*
|
||||
|
||||
- name: Get target run id
|
||||
if: needs.prepare.outputs.targetSha
|
||||
id: targetRunId
|
||||
# Running the attrpath generation step separately from the outpath step afterwards.
|
||||
# The idea is that, *if* Eval on the target branch has not finished, yet, we will
|
||||
# generate the attrpaths in the meantime - and the separate command command afterwards
|
||||
# will check cachix again for whether Eval has finished. If no Eval result from the
|
||||
# target branch can be found the second time, we proceed to run it in here. Attrpaths
|
||||
# generation takes roughly 30 seconds, so for every normal use-case this should be more
|
||||
# than enough of a head start for Eval on the target branch to finish.
|
||||
# This edge-case, that Eval on the target branch is delayed is unlikely to happen anyway:
|
||||
# For a commit to become the target commit of a PR, it must *already* be on the branch.
|
||||
# Normally, CI should always start running on that push event *before* it starts running
|
||||
# on the PR.
|
||||
- name: Evaluate the ${{ matrix.system }} attribute paths at the target commit
|
||||
if: inputs.targetSha
|
||||
env:
|
||||
GH_TOKEN: ${{ github.token }}
|
||||
MATRIX_SYSTEM: ${{ matrix.system }}
|
||||
REPOSITORY: ${{ github.repository }}
|
||||
TARGET_SHA: ${{ needs.prepare.outputs.targetSha }}
|
||||
run: |
|
||||
# Get the latest eval.yml workflow run for the PR's target commit
|
||||
if ! run=$(gh api --method GET /repos/"$REPOSITORY"/actions/workflows/eval.yml/runs \
|
||||
-f head_sha="$TARGET_SHA" -f event=push \
|
||||
--jq '.workflow_runs | sort_by(.run_started_at) | .[-1]') \
|
||||
|| [[ -z "$run" ]]; then
|
||||
echo "Could not find an eval.yml workflow run for $TARGET_SHA, cannot make comparison"
|
||||
exit 1
|
||||
fi
|
||||
echo "Comparing against $(jq .html_url <<< "$run")"
|
||||
runId=$(jq .id <<< "$run")
|
||||
nix-build nixpkgs/trusted/ci --arg nixpkgs ./nixpkgs/trusted-pinned -A eval.attrpathsSuperset \
|
||||
--argstr evalSystem "$MATRIX_SYSTEM" \
|
||||
--argstr nixPath "nixVersions.latest"
|
||||
|
||||
if ! job=$(gh api --method GET /repos/"$REPOSITORY"/actions/runs/"$runId"/jobs \
|
||||
--jq ".jobs[] | select (.name == \"Outpaths ($MATRIX_SYSTEM)\")") \
|
||||
|| [[ -z "$job" ]]; then
|
||||
echo "Could not find the Outpaths ($MATRIX_SYSTEM) job for workflow run $runId, cannot make comparison"
|
||||
exit 1
|
||||
fi
|
||||
jobId=$(jq .id <<< "$job")
|
||||
conclusion=$(jq -r .conclusion <<< "$job")
|
||||
|
||||
while [[ "$conclusion" == null || "$conclusion" == "" ]]; do
|
||||
echo "Job not done, waiting 10 seconds before checking again"
|
||||
sleep 10
|
||||
conclusion=$(gh api /repos/"$REPOSITORY"/actions/jobs/"$jobId" --jq '.conclusion')
|
||||
done
|
||||
|
||||
if [[ "$conclusion" != "success" ]]; then
|
||||
echo "Job was not successful (conclusion: $conclusion), cannot make comparison"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
echo "targetRunId=$runId" >> "$GITHUB_OUTPUT"
|
||||
|
||||
- uses: actions/download-artifact@v4
|
||||
if: steps.targetRunId.outputs.targetRunId
|
||||
with:
|
||||
run-id: ${{ steps.targetRunId.outputs.targetRunId }}
|
||||
name: merged-${{ matrix.system }}
|
||||
path: target
|
||||
github-token: ${{ github.token }}
|
||||
merge-multiple: true
|
||||
- name: Evaluate the ${{ matrix.system }} output paths at the target commit
|
||||
if: inputs.targetSha
|
||||
env:
|
||||
MATRIX_SYSTEM: ${{ matrix.system }}
|
||||
# This should be very quick, because it pulls the eval results from Cachix.
|
||||
run: |
|
||||
nix-build nixpkgs/trusted/ci --arg nixpkgs ./nixpkgs/trusted-pinned -A eval.singleSystem \
|
||||
--argstr evalSystem "$MATRIX_SYSTEM" \
|
||||
--arg chunkSize 8000 \
|
||||
--argstr nixPath "nixVersions.latest" \
|
||||
--out-link target
|
||||
|
||||
- name: Compare outpaths against the target branch
|
||||
if: steps.targetRunId.outputs.targetRunId
|
||||
if: inputs.targetSha
|
||||
env:
|
||||
MATRIX_SYSTEM: ${{ matrix.system }}
|
||||
run: |
|
||||
nix-build untrusted/ci -A eval.diff \
|
||||
nix-build nixpkgs/untrusted/ci --arg nixpkgs ./nixpkgs/untrusted-pinned -A eval.diff \
|
||||
--arg beforeDir ./target \
|
||||
--arg afterDir "$(readlink ./merged)" \
|
||||
--arg afterDir ./merged \
|
||||
--argstr evalSystem "$MATRIX_SYSTEM" \
|
||||
--out-link diff
|
||||
|
||||
- name: Upload outpaths diff and stats
|
||||
if: steps.targetRunId.outputs.targetRunId
|
||||
if: inputs.targetSha
|
||||
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
|
||||
with:
|
||||
name: diff-${{ matrix.system }}
|
||||
name: ${{ matrix.version && format('{0}-', matrix.version) || '' }}diff-${{ matrix.system }}
|
||||
path: diff/*
|
||||
|
||||
tag:
|
||||
name: Tag
|
||||
compare:
|
||||
runs-on: ubuntu-24.04-arm
|
||||
needs: [ prepare, outpaths ]
|
||||
if: needs.prepare.outputs.targetSha
|
||||
needs: [eval]
|
||||
if: inputs.targetSha && !cancelled() && !failure()
|
||||
permissions:
|
||||
pull-requests: write
|
||||
statuses: write
|
||||
timeout-minutes: 5
|
||||
steps:
|
||||
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
with:
|
||||
sparse-checkout: .github/actions
|
||||
- name: Check out the PR at the target commit
|
||||
uses: ./.github/actions/checkout
|
||||
with:
|
||||
merged-as-untrusted-at: ${{ inputs.mergedSha }}
|
||||
target-as-trusted-at: ${{ inputs.targetSha }}
|
||||
|
||||
- name: Download output paths and eval stats for all systems
|
||||
uses: actions/download-artifact@fa0a91b85d4f404e444e00e005971372dc801d16 # v4.1.8
|
||||
uses: actions/download-artifact@634f93cb2916e3fdff6788551b99b062d0335ce0 # v5.0.0
|
||||
with:
|
||||
pattern: diff-*
|
||||
path: diff
|
||||
merge-multiple: true
|
||||
|
||||
- name: Check out the PR at the target commit
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
with:
|
||||
ref: ${{ needs.prepare.outputs.targetSha }}
|
||||
path: trusted
|
||||
|
||||
- name: Install Nix
|
||||
uses: cachix/install-nix-action@526118121621777ccd86f79b04685a9319637641 # v31
|
||||
with:
|
||||
extra_nix_config: sandbox = true
|
||||
uses: cachix/install-nix-action@7be5dee1421f63d07e71ce6e0a9f8a4b07c2a487 # v31
|
||||
|
||||
- name: Combine all output paths and eval stats
|
||||
run: |
|
||||
nix-build trusted/ci -A eval.combine \
|
||||
nix-build nixpkgs/trusted/ci --arg nixpkgs ./nixpkgs/trusted-pinned -A eval.combine \
|
||||
--arg diffDir ./diff \
|
||||
--out-link combined
|
||||
|
||||
@@ -190,13 +202,12 @@ jobs:
|
||||
env:
|
||||
AUTHOR_ID: ${{ github.event.pull_request.user.id }}
|
||||
run: |
|
||||
git -C trusted fetch --depth 1 origin ${{ needs.prepare.outputs.mergedSha }}
|
||||
git -C trusted diff --name-only ${{ needs.prepare.outputs.mergedSha }} \
|
||||
git -C nixpkgs/trusted diff --name-only ${{ inputs.mergedSha }} \
|
||||
| jq --raw-input --slurp 'split("\n")[:-1]' > touched-files.json
|
||||
|
||||
# Use the target branch to get accurate maintainer info
|
||||
nix-build trusted/ci -A eval.compare \
|
||||
--arg combinedDir "$(realpath ./combined)" \
|
||||
nix-build nixpkgs/trusted/ci --arg nixpkgs ./nixpkgs/trusted-pinned -A eval.compare \
|
||||
--arg combinedDir ./combined \
|
||||
--arg touchedFilesJson ./touched-files.json \
|
||||
--argstr githubAuthorId "$AUTHOR_ID" \
|
||||
--out-link comparison
|
||||
@@ -209,87 +220,142 @@ jobs:
|
||||
name: comparison
|
||||
path: comparison/*
|
||||
|
||||
- name: Build the requestReviews derivation
|
||||
run: nix-build trusted/ci -A requestReviews
|
||||
|
||||
- name: Labelling pull request
|
||||
if: ${{ github.event_name == 'pull_request_target' && github.repository_owner == 'NixOS' }}
|
||||
env:
|
||||
GH_TOKEN: ${{ github.token }}
|
||||
REPOSITORY: ${{ github.repository }}
|
||||
NUMBER: ${{ github.event.number }}
|
||||
run: |
|
||||
# Get all currently set labels that we manage
|
||||
gh api \
|
||||
/repos/"$REPOSITORY"/issues/"$NUMBER"/labels \
|
||||
--jq '.[].name | select(startswith("10.rebuild") or . == "11.by: package-maintainer")' \
|
||||
| sort > before
|
||||
|
||||
# And the labels that should be there
|
||||
jq -r '.labels[]' comparison/changed-paths.json \
|
||||
| sort > after
|
||||
|
||||
# Remove the ones not needed anymore
|
||||
while read -r toRemove; do
|
||||
echo "Removing label $toRemove"
|
||||
gh api \
|
||||
--method DELETE \
|
||||
/repos/"$REPOSITORY"/issues/"$NUMBER"/labels/"$toRemove"
|
||||
done < <(comm -23 before after)
|
||||
|
||||
# And add the ones that aren't set already
|
||||
while read -r toAdd; do
|
||||
echo "Adding label $toAdd"
|
||||
gh api \
|
||||
--method POST \
|
||||
/repos/"$REPOSITORY"/issues/"$NUMBER"/labels \
|
||||
-f "labels[]=$toAdd"
|
||||
done < <(comm -13 before after)
|
||||
|
||||
- name: Add eval summary to commit statuses
|
||||
if: ${{ github.event_name == 'pull_request_target' && github.repository_owner == 'NixOS' }}
|
||||
env:
|
||||
GH_TOKEN: ${{ github.token }}
|
||||
PR_HEAD_SHA: ${{ github.event.pull_request.head.sha }}
|
||||
NUMBER: ${{ github.event.number }}
|
||||
run: |
|
||||
description=$(jq -r '
|
||||
"Package: added " + (.attrdiff.added | length | tostring) +
|
||||
", removed " + (.attrdiff.removed | length | tostring) +
|
||||
", changed " + (.attrdiff.changed | length | tostring) +
|
||||
", Rebuild: linux " + (.rebuildCountByKernel.linux | tostring) +
|
||||
", darwin " + (.rebuildCountByKernel.darwin | tostring)
|
||||
' <comparison/changed-paths.json)
|
||||
target_url="$GITHUB_SERVER_URL/$GITHUB_REPOSITORY/actions/runs/$GITHUB_RUN_ID?pr=$NUMBER"
|
||||
gh api --method POST \
|
||||
-H "Accept: application/vnd.github+json" -H "X-GitHub-Api-Version: 2022-11-28" \
|
||||
"/repos/$GITHUB_REPOSITORY/statuses/$PR_HEAD_SHA" \
|
||||
-f "context=Eval / Summary" -f "state=success" -f "description=$description" -f "target_url=$target_url"
|
||||
|
||||
# See ./codeowners-v2.yml, reuse the same App because we need the same permissions
|
||||
# Can't use the token received from permissions above, because it can't get enough permissions
|
||||
- uses: actions/create-github-app-token@df432ceedc7162793a195dd1713ff69aefc7379e # v2.0.6
|
||||
if: vars.OWNER_APP_ID
|
||||
id: app-token
|
||||
if: ${{ github.event_name == 'pull_request_target' }}
|
||||
uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8.0.0
|
||||
with:
|
||||
app-id: ${{ vars.OWNER_APP_ID }}
|
||||
private-key: ${{ secrets.OWNER_APP_PRIVATE_KEY }}
|
||||
permission-administration: read
|
||||
permission-members: read
|
||||
permission-pull-requests: write
|
||||
script: |
|
||||
const { readFile } = require('node:fs/promises')
|
||||
const changed = JSON.parse(await readFile('comparison/changed-paths.json', 'utf-8'))
|
||||
const description =
|
||||
'Package: ' + [
|
||||
`added ${changed.attrdiff.added.length}`,
|
||||
`removed ${changed.attrdiff.removed.length}`,
|
||||
`changed ${changed.attrdiff.changed.length}`
|
||||
].join(', ') +
|
||||
' — Rebuild: ' + [
|
||||
`linux ${changed.rebuildCountByKernel.linux}`,
|
||||
`darwin ${changed.rebuildCountByKernel.darwin}`
|
||||
].join(', ')
|
||||
|
||||
- name: Requesting maintainer reviews
|
||||
if: ${{ steps.app-token.outputs.token && github.repository_owner == 'NixOS' }}
|
||||
const { serverUrl, repo, runId, payload } = context
|
||||
const target_url =
|
||||
`${serverUrl}/${repo.owner}/${repo.repo}/actions/runs/${runId}?pr=${payload.pull_request.number}`
|
||||
|
||||
await github.rest.repos.createCommitStatus({
|
||||
...repo,
|
||||
sha: payload.pull_request.head.sha,
|
||||
context: 'Eval Summary',
|
||||
state: 'success',
|
||||
description,
|
||||
target_url
|
||||
})
|
||||
|
||||
# Creates a matrix of Eval performance for various versions and systems.
|
||||
report:
|
||||
runs-on: ubuntu-24.04-arm
|
||||
needs: [versions, eval]
|
||||
steps:
|
||||
- name: Download output paths and eval stats for all versions
|
||||
uses: actions/download-artifact@634f93cb2916e3fdff6788551b99b062d0335ce0 # v5.0.0
|
||||
with:
|
||||
pattern: "*-diff-*"
|
||||
path: versions
|
||||
|
||||
- name: Add version comparison table to job summary
|
||||
uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8.0.0
|
||||
env:
|
||||
GH_TOKEN: ${{ github.token }}
|
||||
REPOSITORY: ${{ github.repository }}
|
||||
NUMBER: ${{ github.event.number }}
|
||||
AUTHOR: ${{ github.event.pull_request.user.login }}
|
||||
# Don't request reviewers on draft PRs
|
||||
DRY_MODE: ${{ github.event.pull_request.draft && '1' || '' }}
|
||||
SYSTEMS: ${{ inputs.systems }}
|
||||
VERSIONS: ${{ needs.versions.outputs.versions }}
|
||||
with:
|
||||
script: |
|
||||
const { readFileSync } = require('node:fs')
|
||||
const path = require('node:path')
|
||||
|
||||
const systems = JSON.parse(process.env.SYSTEMS)
|
||||
const versions = JSON.parse(process.env.VERSIONS)
|
||||
|
||||
core.summary.addHeading('Lix/Nix version comparison')
|
||||
core.summary.addTable(
|
||||
[].concat(
|
||||
[
|
||||
[{ data: 'Version', header: true }].concat(
|
||||
systems.map((system) => ({ data: system, header: true })),
|
||||
),
|
||||
],
|
||||
versions.map((version) =>
|
||||
[{ data: version }].concat(
|
||||
systems.map((system) => {
|
||||
try {
|
||||
const artifact = path.join('versions', `${version}-diff-${system}`)
|
||||
const time = Math.round(
|
||||
parseFloat(
|
||||
readFileSync(
|
||||
path.join(artifact, 'after', system, 'total-time'),
|
||||
'utf-8',
|
||||
),
|
||||
),
|
||||
)
|
||||
const diff = JSON.parse(
|
||||
readFileSync(path.join(artifact, system, 'diff.json'), 'utf-8'),
|
||||
)
|
||||
const attrs = [].concat(
|
||||
diff.added,
|
||||
diff.removed,
|
||||
diff.changed,
|
||||
diff.rebuilds
|
||||
).filter(attr =>
|
||||
// Exceptions related to dev shells, which changed at some time between 2.18 and 2.24.
|
||||
!attr.startsWith('tests.devShellTools.nixos.') &&
|
||||
!attr.startsWith('tests.devShellTools.unstructuredDerivationInputEnv.')
|
||||
)
|
||||
if (attrs.length > 0) {
|
||||
core.setFailed(
|
||||
`${version} on ${system} has changed outpaths!\nNote: Please make sure to update ci/pinned.json separately from changes to other packages.`,
|
||||
)
|
||||
return { data: ':x:' }
|
||||
}
|
||||
return { data: time }
|
||||
} catch {
|
||||
core.warning(`${version} on ${system} did not produce artifact.`)
|
||||
return { data: ':warning:' }
|
||||
}
|
||||
}),
|
||||
),
|
||||
),
|
||||
),
|
||||
)
|
||||
core.summary.addRaw(
|
||||
'\n*Evaluation time in seconds without downloading dependencies.*',
|
||||
true,
|
||||
)
|
||||
core.summary.addRaw('\n*:warning: Job did not report a result.*', true)
|
||||
core.summary.addRaw(
|
||||
'\n*:x: Job produced different outpaths than the target branch.*',
|
||||
true,
|
||||
)
|
||||
core.summary.write()
|
||||
|
||||
misc:
|
||||
if: ${{ github.event_name != 'push' }}
|
||||
runs-on: ubuntu-24.04-arm
|
||||
timeout-minutes: 10
|
||||
steps:
|
||||
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
with:
|
||||
sparse-checkout: .github/actions
|
||||
- name: Checkout the merge commit
|
||||
uses: ./.github/actions/checkout
|
||||
with:
|
||||
merged-as-untrusted-at: ${{ inputs.mergedSha }}
|
||||
|
||||
- name: Install Nix
|
||||
uses: cachix/install-nix-action@7be5dee1421f63d07e71ce6e0a9f8a4b07c2a487 # v31
|
||||
|
||||
- name: Run misc eval tasks in parallel
|
||||
run: |
|
||||
# maintainers.json contains GitHub IDs. Look up handles to request reviews from.
|
||||
# There appears to be no API to request reviews based on GitHub IDs
|
||||
jq -r 'keys[]' comparison/maintainers.json \
|
||||
| while read -r id; do gh api /user/"$id" --jq .login; done \
|
||||
| GH_TOKEN=${{ steps.app-token.outputs.token }} result/bin/request-reviewers.sh "$REPOSITORY" "$NUMBER" "$AUTHOR"
|
||||
# Ensure flake outputs on all systems still evaluate
|
||||
nix flake check --all-systems --no-build './nixpkgs/untrusted?shallow=1' &
|
||||
# Query nixpkgs with aliases enabled to check for basic syntax errors
|
||||
nix-env -I ./nixpkgs/untrusted -f ./nixpkgs/untrusted -qa '*' --option restrict-eval true --option allow-import-from-derivation false >/dev/null &
|
||||
wait
|
||||
|
||||
123
.github/workflows/labels.yml
vendored
123
.github/workflows/labels.yml
vendored
@@ -3,57 +3,118 @@
|
||||
# access to the GitHub API. This means that it should not evaluate user input in
|
||||
# a way that allows code injection.
|
||||
|
||||
name: "Label PR"
|
||||
name: Labels
|
||||
|
||||
on:
|
||||
pull_request_target:
|
||||
schedule:
|
||||
- cron: '07,17,27,37,47,57 * * * *'
|
||||
workflow_call:
|
||||
inputs:
|
||||
headBranch:
|
||||
required: true
|
||||
type: string
|
||||
secrets:
|
||||
NIXPKGS_CI_APP_PRIVATE_KEY:
|
||||
required: true
|
||||
workflow_dispatch:
|
||||
|
||||
concurrency:
|
||||
# This explicitly avoids using `run_id` for the concurrency key to make sure that only
|
||||
# *one* scheduled run can run at a time.
|
||||
group: labels-${{ github.workflow }}-${{ github.event_name }}-${{ github.event.pull_request.number }}
|
||||
# PR-triggered runs will be cancelled, but scheduled runs will be queued.
|
||||
cancel-in-progress: ${{ github.event_name != 'schedule' }}
|
||||
|
||||
# This is used as fallback without app only.
|
||||
# This happens when testing in forks without setting up that app.
|
||||
permissions:
|
||||
contents: read
|
||||
issues: write
|
||||
pull-requests: write
|
||||
|
||||
defaults:
|
||||
run:
|
||||
shell: bash
|
||||
|
||||
jobs:
|
||||
labels:
|
||||
name: label-pr
|
||||
update:
|
||||
runs-on: ubuntu-24.04-arm
|
||||
if: "github.repository_owner == 'NixOS' && !contains(github.event.pull_request.title, '[skip treewide]')"
|
||||
if: github.event_name != 'schedule' || github.repository_owner == 'NixOS'
|
||||
steps:
|
||||
- uses: actions/labeler@8558fd74291d67161a8a78ce36a881fa63b766a9 # v5.0.0
|
||||
if: |
|
||||
github.event.pull_request.head.repo.owner.login != 'NixOS' || !(
|
||||
github.head_ref == 'haskell-updates' ||
|
||||
github.head_ref == 'python-updates' ||
|
||||
github.head_ref == 'staging-next' ||
|
||||
startsWith(github.head_ref, 'staging-next-')
|
||||
)
|
||||
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
with:
|
||||
repo-token: ${{ secrets.GITHUB_TOKEN }}
|
||||
sparse-checkout: |
|
||||
ci/github-script
|
||||
|
||||
- name: Install dependencies
|
||||
run: npm install @actions/artifact bottleneck
|
||||
|
||||
# Use a GitHub App, because it has much higher rate limits: 12,500 instead of 5,000 req / hour.
|
||||
- uses: actions/create-github-app-token@a8d616148505b5069dccd32f177bb87d7f39123b # v2.1.1
|
||||
if: github.event_name != 'pull_request' && vars.NIXPKGS_CI_APP_ID
|
||||
id: app-token
|
||||
with:
|
||||
app-id: ${{ vars.NIXPKGS_CI_APP_ID }}
|
||||
private-key: ${{ secrets.NIXPKGS_CI_APP_PRIVATE_KEY }}
|
||||
permission-issues: write
|
||||
permission-pull-requests: write
|
||||
|
||||
- name: Log current API rate limits
|
||||
env:
|
||||
GH_TOKEN: ${{ steps.app-token.outputs.token || github.token }}
|
||||
run: gh api /rate_limit | jq
|
||||
|
||||
- name: Labels from API data and Eval results
|
||||
uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8.0.0
|
||||
with:
|
||||
github-token: ${{ steps.app-token.outputs.token || github.token }}
|
||||
retries: 3
|
||||
script: |
|
||||
require('./ci/github-script/labels.js')({
|
||||
github,
|
||||
context,
|
||||
core,
|
||||
dry: context.eventName == 'pull_request'
|
||||
})
|
||||
|
||||
- name: Log current API rate limits
|
||||
env:
|
||||
GH_TOKEN: ${{ steps.app-token.outputs.token || github.token }}
|
||||
run: gh api /rate_limit | jq
|
||||
|
||||
- uses: actions/labeler@634933edcd8ababfe52f92936142cc22ac488b1b # v6.0.1
|
||||
name: Labels from touched files
|
||||
if: |
|
||||
github.event_name == 'pull_request_target' &&
|
||||
!contains(fromJSON(inputs.headBranch).type, 'development')
|
||||
with:
|
||||
repo-token: ${{ steps.app-token.outputs.token }}
|
||||
configuration-path: .github/labeler.yml # default
|
||||
sync-labels: true
|
||||
- uses: actions/labeler@8558fd74291d67161a8a78ce36a881fa63b766a9 # v5.0.0
|
||||
|
||||
- uses: actions/labeler@634933edcd8ababfe52f92936142cc22ac488b1b # v6.0.1
|
||||
name: Labels from touched files (no sync)
|
||||
if: |
|
||||
github.event.pull_request.head.repo.owner.login != 'NixOS' || !(
|
||||
github.head_ref == 'haskell-updates' ||
|
||||
github.head_ref == 'python-updates' ||
|
||||
github.head_ref == 'staging-next' ||
|
||||
startsWith(github.head_ref, 'staging-next-')
|
||||
)
|
||||
github.event_name == 'pull_request_target' &&
|
||||
!contains(fromJSON(inputs.headBranch).type, 'development')
|
||||
with:
|
||||
repo-token: ${{ secrets.GITHUB_TOKEN }}
|
||||
repo-token: ${{ steps.app-token.outputs.token }}
|
||||
configuration-path: .github/labeler-no-sync.yml
|
||||
sync-labels: false
|
||||
- uses: actions/labeler@8558fd74291d67161a8a78ce36a881fa63b766a9 # v5.0.0
|
||||
|
||||
- uses: actions/labeler@634933edcd8ababfe52f92936142cc22ac488b1b # v6.0.1
|
||||
name: Labels from touched files (development branches)
|
||||
# Development branches like staging-next, haskell-updates and python-updates get special labels.
|
||||
# This is to avoid the mass of labels there, which is mostly useless - and really annoying for
|
||||
# the backport labels.
|
||||
if: |
|
||||
github.event.pull_request.head.repo.owner.login == 'NixOS' && (
|
||||
github.head_ref == 'haskell-updates' ||
|
||||
github.head_ref == 'python-updates' ||
|
||||
github.head_ref == 'staging-next' ||
|
||||
startsWith(github.head_ref, 'staging-next-')
|
||||
)
|
||||
github.event_name == 'pull_request_target' &&
|
||||
contains(fromJSON(inputs.headBranch).type, 'development')
|
||||
with:
|
||||
repo-token: ${{ secrets.GITHUB_TOKEN }}
|
||||
repo-token: ${{ steps.app-token.outputs.token }}
|
||||
configuration-path: .github/labeler-development-branches.yml
|
||||
sync-labels: true
|
||||
|
||||
- name: Log current API rate limits
|
||||
env:
|
||||
GH_TOKEN: ${{ steps.app-token.outputs.token || github.token }}
|
||||
run: gh api /rate_limit | jq
|
||||
|
||||
33
.github/workflows/lib-tests.yml
vendored
33
.github/workflows/lib-tests.yml
vendored
@@ -1,33 +0,0 @@
|
||||
name: "Building Nixpkgs lib-tests"
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
paths:
|
||||
- .github/workflows/lib-tests.yml
|
||||
pull_request_target:
|
||||
paths:
|
||||
- 'lib/**'
|
||||
- 'maintainers/**'
|
||||
|
||||
permissions: {}
|
||||
|
||||
jobs:
|
||||
nixpkgs-lib-tests:
|
||||
name: nixpkgs-lib-tests
|
||||
runs-on: ubuntu-24.04
|
||||
steps:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
with:
|
||||
sparse-checkout: .github/actions
|
||||
- name: Check if the PR can be merged and checkout the merge commit
|
||||
uses: ./.github/actions/get-merge-commit
|
||||
with:
|
||||
merged-as-untrusted: true
|
||||
|
||||
- uses: cachix/install-nix-action@526118121621777ccd86f79b04685a9319637641 # v31
|
||||
with:
|
||||
extra_nix_config: sandbox = true
|
||||
|
||||
- name: Building Nixpkgs lib-tests
|
||||
run: |
|
||||
nix-build untrusted/ci -A lib-tests
|
||||
119
.github/workflows/lint.yml
vendored
Normal file
119
.github/workflows/lint.yml
vendored
Normal file
@@ -0,0 +1,119 @@
|
||||
name: Lint
|
||||
|
||||
on:
|
||||
workflow_call:
|
||||
inputs:
|
||||
mergedSha:
|
||||
required: true
|
||||
type: string
|
||||
targetSha:
|
||||
required: true
|
||||
type: string
|
||||
secrets:
|
||||
CACHIX_AUTH_TOKEN:
|
||||
required: true
|
||||
|
||||
permissions: {}
|
||||
|
||||
defaults:
|
||||
run:
|
||||
shell: bash
|
||||
|
||||
jobs:
|
||||
treefmt:
|
||||
runs-on: ubuntu-24.04-arm
|
||||
timeout-minutes: 10
|
||||
steps:
|
||||
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
with:
|
||||
sparse-checkout: .github/actions
|
||||
- name: Checkout the merge commit
|
||||
uses: ./.github/actions/checkout
|
||||
with:
|
||||
merged-as-untrusted-at: ${{ inputs.mergedSha }}
|
||||
|
||||
- uses: cachix/install-nix-action@7be5dee1421f63d07e71ce6e0a9f8a4b07c2a487 # v31
|
||||
|
||||
# TODO: Figure out how to best enable caching for the treefmt job. Cachix won't work well,
|
||||
# because the cache would be invalidated on every commit - treefmt checks every file.
|
||||
# Maybe we can cache treefmt's eval-cache somehow.
|
||||
|
||||
- name: Check that files are formatted
|
||||
run: |
|
||||
# Note that it's fine to run this on untrusted code because:
|
||||
# - There's no secrets accessible here
|
||||
# - The build is sandboxed
|
||||
if ! nix-build nixpkgs/untrusted/ci --arg nixpkgs ./nixpkgs/untrusted-pinned -A fmt.check; then
|
||||
echo "Some files are not properly formatted"
|
||||
echo "Please format them by going to the Nixpkgs root directory and running one of:"
|
||||
echo " nix-shell --run treefmt"
|
||||
echo " nix develop --command treefmt"
|
||||
echo " nix fmt"
|
||||
echo "Make sure your branch is up to date with master; rebase if not."
|
||||
echo "If you're having trouble, please ping @NixOS/nix-formatting"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
parse:
|
||||
runs-on: ubuntu-24.04-arm
|
||||
timeout-minutes: 10
|
||||
steps:
|
||||
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
with:
|
||||
sparse-checkout: .github/actions
|
||||
- name: Checkout the merge commit
|
||||
uses: ./.github/actions/checkout
|
||||
with:
|
||||
merged-as-untrusted-at: ${{ inputs.mergedSha }}
|
||||
|
||||
- uses: cachix/install-nix-action@7be5dee1421f63d07e71ce6e0a9f8a4b07c2a487 # v31
|
||||
|
||||
- uses: cachix/cachix-action@0fc020193b5a1fa3ac4575aa3a7d3aa6a35435ad # v16
|
||||
with:
|
||||
# The nixpkgs-ci cache should not be trusted or used outside of Nixpkgs and its forks' CI.
|
||||
name: ${{ vars.CACHIX_NAME || 'nixpkgs-ci' }}
|
||||
extraPullNames: nixpkgs-ci
|
||||
authToken: ${{ secrets.CACHIX_AUTH_TOKEN }}
|
||||
pushFilter: -source$
|
||||
|
||||
- name: Parse all nix files
|
||||
run: |
|
||||
# Tests multiple versions at once, let's make sure all of them run, so keep-going.
|
||||
nix-build nixpkgs/untrusted/ci --arg nixpkgs ./nixpkgs/untrusted-pinned -A parse --keep-going
|
||||
|
||||
nixpkgs-vet:
|
||||
runs-on: ubuntu-24.04-arm
|
||||
timeout-minutes: 10
|
||||
steps:
|
||||
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
with:
|
||||
sparse-checkout: .github/actions
|
||||
- name: Checkout merge and target commits
|
||||
uses: ./.github/actions/checkout
|
||||
with:
|
||||
merged-as-untrusted-at: ${{ inputs.mergedSha }}
|
||||
target-as-trusted-at: ${{ inputs.targetSha }}
|
||||
|
||||
- uses: cachix/install-nix-action@7be5dee1421f63d07e71ce6e0a9f8a4b07c2a487 # v31
|
||||
|
||||
- uses: cachix/cachix-action@0fc020193b5a1fa3ac4575aa3a7d3aa6a35435ad # v16
|
||||
with:
|
||||
# The nixpkgs-ci cache should not be trusted or used outside of Nixpkgs and its forks' CI.
|
||||
name: ${{ vars.CACHIX_NAME || 'nixpkgs-ci' }}
|
||||
extraPullNames: nixpkgs-ci
|
||||
authToken: ${{ secrets.CACHIX_AUTH_TOKEN }}
|
||||
pushFilter: -source$
|
||||
|
||||
- name: Running nixpkgs-vet
|
||||
env:
|
||||
# Force terminal colors to be enabled. The library that `nixpkgs-vet` uses respects https://bixense.com/clicolors/
|
||||
CLICOLOR_FORCE: 1
|
||||
run: |
|
||||
if nix-build nixpkgs/untrusted/ci --arg nixpkgs ./nixpkgs/untrusted-pinned -A nixpkgs-vet --arg base "./nixpkgs/trusted" --arg head "./nixpkgs/untrusted"; then
|
||||
exit 0
|
||||
else
|
||||
exitCode=$?
|
||||
echo "To run locally: ./ci/nixpkgs-vet.sh $GITHUB_BASE_REF https://github.com/$GITHUB_REPOSITORY.git"
|
||||
echo "If you're having trouble, ping @NixOS/nixpkgs-vet"
|
||||
exit "$exitCode"
|
||||
fi
|
||||
62
.github/workflows/manual-nixos-v2.yml
vendored
62
.github/workflows/manual-nixos-v2.yml
vendored
@@ -1,62 +0,0 @@
|
||||
name: "Build NixOS manual v2"
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
paths:
|
||||
- .github/workflows/manual-nixos-v2.yml
|
||||
pull_request_target:
|
||||
branches:
|
||||
- master
|
||||
paths:
|
||||
- "nixos/**"
|
||||
# Also build when the nixpkgs doc changed, since we take things like
|
||||
# the release notes and some css and js files from there.
|
||||
# See nixos/doc/manual/default.nix
|
||||
- "doc/**"
|
||||
# Build when something in lib changes
|
||||
# Since the lib functions are used to 'massage' the options before producing the manual
|
||||
- "lib/**"
|
||||
|
||||
permissions: {}
|
||||
|
||||
jobs:
|
||||
nixos:
|
||||
name: nixos-manual-build
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
include:
|
||||
- runner: ubuntu-24.04
|
||||
system: x86_64-linux
|
||||
- runner: ubuntu-24.04-arm
|
||||
system: aarch64-linux
|
||||
runs-on: ${{ matrix.runner }}
|
||||
steps:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
with:
|
||||
sparse-checkout: .github/actions
|
||||
- name: Check if the PR can be merged and checkout the merge commit
|
||||
uses: ./.github/actions/get-merge-commit
|
||||
with:
|
||||
merged-as-untrusted: true
|
||||
|
||||
- uses: cachix/install-nix-action@526118121621777ccd86f79b04685a9319637641 # v31
|
||||
with:
|
||||
extra_nix_config: sandbox = true
|
||||
|
||||
- uses: cachix/cachix-action@0fc020193b5a1fa3ac4575aa3a7d3aa6a35435ad # v16
|
||||
with:
|
||||
# This cache is for the nixpkgs repo checks and should not be trusted or used elsewhere.
|
||||
name: nixpkgs-ci
|
||||
authToken: "${{ secrets.CACHIX_AUTH_TOKEN }}"
|
||||
|
||||
- name: Build NixOS manual
|
||||
id: build-manual
|
||||
run: NIX_PATH=nixpkgs=$(pwd)/untrusted nix-build --option restrict-eval true untrusted/ci -A manual-nixos --argstr system ${{ matrix.system }}
|
||||
|
||||
- name: Upload NixOS manual
|
||||
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
|
||||
with:
|
||||
name: nixos-manual-${{ matrix.system }}
|
||||
path: result/
|
||||
if-no-files-found: error
|
||||
41
.github/workflows/manual-nixpkgs-v2.yml
vendored
41
.github/workflows/manual-nixpkgs-v2.yml
vendored
@@ -1,41 +0,0 @@
|
||||
name: "Build Nixpkgs manual v2"
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
paths:
|
||||
- .github/workflows/manual-nixpkgs-v2.yml
|
||||
pull_request_target:
|
||||
branches:
|
||||
- master
|
||||
paths:
|
||||
- 'doc/**'
|
||||
- 'lib/**'
|
||||
- 'pkgs/by-name/ni/nixdoc/**'
|
||||
|
||||
permissions: {}
|
||||
|
||||
jobs:
|
||||
nixpkgs:
|
||||
name: nixpkgs-manual-build
|
||||
runs-on: ubuntu-24.04-arm
|
||||
steps:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
with:
|
||||
sparse-checkout: .github/actions
|
||||
- name: Check if the PR can be merged and checkout the merge commit
|
||||
uses: ./.github/actions/get-merge-commit
|
||||
with:
|
||||
merged-as-untrusted: true
|
||||
|
||||
- uses: cachix/install-nix-action@526118121621777ccd86f79b04685a9319637641 # v31
|
||||
with:
|
||||
extra_nix_config: sandbox = true
|
||||
|
||||
- uses: cachix/cachix-action@0fc020193b5a1fa3ac4575aa3a7d3aa6a35435ad # v16
|
||||
with:
|
||||
# This cache is for the nixpkgs repo checks and should not be trusted or used elsewhere.
|
||||
name: nixpkgs-ci
|
||||
authToken: '${{ secrets.CACHIX_AUTH_TOKEN }}'
|
||||
|
||||
- name: Building Nixpkgs manual
|
||||
run: NIX_PATH=nixpkgs=$(pwd)/untrusted nix-build --option restrict-eval true untrusted/ci -A manual-nixpkgs -A manual-nixpkgs-tests
|
||||
57
.github/workflows/merge-group.yml
vendored
Normal file
57
.github/workflows/merge-group.yml
vendored
Normal file
@@ -0,0 +1,57 @@
|
||||
name: Merge Group
|
||||
|
||||
on:
|
||||
merge_group:
|
||||
workflow_call:
|
||||
inputs:
|
||||
mergedSha:
|
||||
required: true
|
||||
type: string
|
||||
targetSha:
|
||||
required: true
|
||||
type: string
|
||||
secrets:
|
||||
CACHIX_AUTH_TOKEN:
|
||||
required: true
|
||||
|
||||
permissions: {}
|
||||
|
||||
jobs:
|
||||
lint:
|
||||
name: Lint
|
||||
uses: ./.github/workflows/lint.yml
|
||||
secrets:
|
||||
CACHIX_AUTH_TOKEN: ${{ secrets.CACHIX_AUTH_TOKEN }}
|
||||
with:
|
||||
mergedSha: ${{ inputs.mergedSha || github.event.merge_group.head_sha }}
|
||||
targetSha: ${{ inputs.targetSha || github.event.merge_group.base_sha }}
|
||||
|
||||
# This job's only purpose is to create the target for the "Required Status Checks" branch ruleset.
|
||||
# It "needs" all the jobs that should block the Merge Queue.
|
||||
unlock:
|
||||
if: github.event_name != 'pull_request' && always()
|
||||
# Modify this list to add or remove jobs from required status checks.
|
||||
needs:
|
||||
- lint
|
||||
runs-on: ubuntu-24.04-arm
|
||||
permissions:
|
||||
statuses: write
|
||||
steps:
|
||||
- uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8.0.0
|
||||
env:
|
||||
RESULTS: ${{ toJSON(needs.*.result) }}
|
||||
with:
|
||||
script: |
|
||||
const { serverUrl, repo, runId, payload } = context
|
||||
const target_url =
|
||||
`${serverUrl}/${repo.owner}/${repo.repo}/actions/runs/${runId}`
|
||||
await github.rest.repos.createCommitStatus({
|
||||
...repo,
|
||||
sha: payload.merge_group.head_sha,
|
||||
// WARNING:
|
||||
// Do NOT change the name of this, otherwise the rule will not catch it anymore.
|
||||
// This would prevent all PRs from merging.
|
||||
context: 'no PR failures',
|
||||
state: JSON.parse(process.env.RESULTS).every(result => result == 'success') ? 'success' : 'error',
|
||||
target_url,
|
||||
})
|
||||
33
.github/workflows/nix-parse-v2.yml
vendored
33
.github/workflows/nix-parse-v2.yml
vendored
@@ -1,33 +0,0 @@
|
||||
name: "Check whether nix files are parseable v2"
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
paths:
|
||||
- .github/workflows/nix-parse-v2.yml
|
||||
pull_request_target:
|
||||
|
||||
permissions: {}
|
||||
|
||||
jobs:
|
||||
tests:
|
||||
name: nix-files-parseable-check
|
||||
runs-on: ubuntu-24.04-arm
|
||||
if: "!contains(github.event.pull_request.title, '[skip treewide]')"
|
||||
steps:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
with:
|
||||
sparse-checkout: .github/actions
|
||||
- name: Check if the PR can be merged and checkout the merge commit
|
||||
uses: ./.github/actions/get-merge-commit
|
||||
with:
|
||||
merged-as-untrusted: true
|
||||
|
||||
- uses: cachix/install-nix-action@526118121621777ccd86f79b04685a9319637641 # v31
|
||||
with:
|
||||
extra_nix_config: sandbox = true
|
||||
nix_path: nixpkgs=channel:nixpkgs-unstable
|
||||
|
||||
- name: Parse all nix files
|
||||
run: |
|
||||
# Tests multiple versions at once, let's make sure all of them run, so keep-going.
|
||||
nix-build untrusted/ci -A parse --keep-going
|
||||
49
.github/workflows/nixpkgs-vet.yml
vendored
49
.github/workflows/nixpkgs-vet.yml
vendored
@@ -1,49 +0,0 @@
|
||||
# `nixpkgs-vet` is a tool to vet Nixpkgs: its architecture, package structure, and more.
|
||||
# Among other checks, it makes sure that `pkgs/by-name` (see `../../pkgs/by-name/README.md`) follows the validity rules outlined in [RFC 140](https://github.com/NixOS/rfcs/pull/140).
|
||||
# When you make changes to this workflow, please also update `ci/nixpkgs-vet.sh` to reflect the impact of your work to the CI.
|
||||
# See https://github.com/NixOS/nixpkgs-vet for details on the tool and its checks.
|
||||
|
||||
name: Vet nixpkgs
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
paths:
|
||||
- .github/workflows/nixpkgs-vet.yml
|
||||
pull_request_target:
|
||||
|
||||
permissions: {}
|
||||
|
||||
# We don't use a concurrency group here, because the action is triggered quite often (due to the PR edit trigger), and contributors would get notified on any canceled run.
|
||||
# There is a feature request for suppressing notifications on concurrency-canceled runs: https://github.com/orgs/community/discussions/13015
|
||||
|
||||
jobs:
|
||||
check:
|
||||
name: nixpkgs-vet
|
||||
runs-on: ubuntu-24.04-arm
|
||||
# This should take 1 minute at most, but let's be generous. The default of 6 hours is definitely too long.
|
||||
timeout-minutes: 10
|
||||
steps:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
with:
|
||||
sparse-checkout: .github/actions
|
||||
- name: Check if the PR can be merged and checkout merged and target commits
|
||||
uses: ./.github/actions/get-merge-commit
|
||||
with:
|
||||
merged-as-untrusted: true
|
||||
target-as-trusted: true
|
||||
|
||||
- uses: cachix/install-nix-action@526118121621777ccd86f79b04685a9319637641 # v31
|
||||
|
||||
- name: Running nixpkgs-vet
|
||||
env:
|
||||
# Force terminal colors to be enabled. The library that `nixpkgs-vet` uses respects https://bixense.com/clicolors/
|
||||
CLICOLOR_FORCE: 1
|
||||
run: |
|
||||
if nix-build untrusted/ci -A nixpkgs-vet --arg base "./trusted" --arg head "./untrusted"; then
|
||||
exit 0
|
||||
else
|
||||
exitCode=$?
|
||||
echo "To run locally: ./ci/nixpkgs-vet.sh $GITHUB_BASE_REF https://github.com/$GITHUB_REPOSITORY.git"
|
||||
echo "If you're having trouble, ping @NixOS/nixpkgs-vet"
|
||||
exit "$exitCode"
|
||||
fi
|
||||
26
.github/workflows/no-channel.yml
vendored
26
.github/workflows/no-channel.yml
vendored
@@ -1,26 +0,0 @@
|
||||
name: "No channel PR"
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
paths:
|
||||
- .github/workflows/no-channel.yml
|
||||
pull_request_target:
|
||||
|
||||
permissions: {}
|
||||
|
||||
jobs:
|
||||
fail:
|
||||
if: |
|
||||
startsWith(github.event.pull_request.base.ref, 'nixos-') ||
|
||||
startsWith(github.event.pull_request.base.ref, 'nixpkgs-')
|
||||
name: "This PR is targeting a channel branch"
|
||||
runs-on: ubuntu-24.04-arm
|
||||
steps:
|
||||
- run: |
|
||||
cat <<EOF
|
||||
The nixos-* and nixpkgs-* branches are pushed to by the channel
|
||||
release script and should not be merged into directly.
|
||||
|
||||
Please target the equivalent release-* branch or master instead.
|
||||
EOF
|
||||
exit 1
|
||||
6
.github/workflows/periodic-merge-24h.yml
vendored
6
.github/workflows/periodic-merge-24h.yml
vendored
@@ -11,11 +11,15 @@ on:
|
||||
schedule:
|
||||
# * is a special character in YAML so you have to quote this string
|
||||
# Merge every 24 hours
|
||||
- cron: '0 0 * * *'
|
||||
- cron: '0 0 * * *'
|
||||
workflow_dispatch:
|
||||
|
||||
permissions: {}
|
||||
|
||||
defaults:
|
||||
run:
|
||||
shell: bash
|
||||
|
||||
jobs:
|
||||
periodic-merge:
|
||||
if: github.repository_owner == 'NixOS'
|
||||
|
||||
6
.github/workflows/periodic-merge-6h.yml
vendored
6
.github/workflows/periodic-merge-6h.yml
vendored
@@ -11,11 +11,15 @@ on:
|
||||
schedule:
|
||||
# * is a special character in YAML so you have to quote this string
|
||||
# Merge every 6 hours
|
||||
- cron: '0 */6 * * *'
|
||||
- cron: '0 */6 * * *'
|
||||
workflow_dispatch:
|
||||
|
||||
permissions: {}
|
||||
|
||||
defaults:
|
||||
run:
|
||||
shell: bash
|
||||
|
||||
jobs:
|
||||
periodic-merge:
|
||||
if: github.repository_owner == 'NixOS'
|
||||
|
||||
9
.github/workflows/periodic-merge.yml
vendored
9
.github/workflows/periodic-merge.yml
vendored
@@ -12,13 +12,18 @@ on:
|
||||
required: true
|
||||
type: string
|
||||
|
||||
defaults:
|
||||
run:
|
||||
shell: bash
|
||||
|
||||
jobs:
|
||||
merge:
|
||||
runs-on: ubuntu-24.04-arm
|
||||
timeout-minutes: 5
|
||||
steps:
|
||||
# Use a GitHub App to create the PR so that CI gets triggered
|
||||
# The App is scoped to Repository > Contents and Pull Requests: write for Nixpkgs
|
||||
- uses: actions/create-github-app-token@df432ceedc7162793a195dd1713ff69aefc7379e # v2.0.6
|
||||
- uses: actions/create-github-app-token@a8d616148505b5069dccd32f177bb87d7f39123b # v2.1.1
|
||||
id: app-token
|
||||
with:
|
||||
app-id: ${{ vars.NIXPKGS_CI_APP_ID }}
|
||||
@@ -26,7 +31,7 @@ jobs:
|
||||
permission-contents: write
|
||||
permission-pull-requests: write
|
||||
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
|
||||
- name: Find merge base between two branches
|
||||
if: contains(inputs.from, ' ')
|
||||
|
||||
158
.github/workflows/pr.yml
vendored
Normal file
158
.github/workflows/pr.yml
vendored
Normal file
@@ -0,0 +1,158 @@
|
||||
name: PR
|
||||
|
||||
on:
|
||||
pull_request_target:
|
||||
workflow_call:
|
||||
secrets:
|
||||
CACHIX_AUTH_TOKEN:
|
||||
required: true
|
||||
NIXPKGS_CI_APP_PRIVATE_KEY:
|
||||
required: true
|
||||
OWNER_APP_PRIVATE_KEY:
|
||||
# The Test workflow should not actually request reviews from owners.
|
||||
required: false
|
||||
OWNER_RO_APP_PRIVATE_KEY:
|
||||
required: true
|
||||
|
||||
concurrency:
|
||||
group: pr-${{ github.workflow }}-${{ github.event_name }}-${{ github.event.pull_request.number || github.run_id }}
|
||||
cancel-in-progress: true
|
||||
|
||||
permissions: {}
|
||||
|
||||
jobs:
|
||||
prepare:
|
||||
runs-on: ubuntu-24.04-arm
|
||||
permissions:
|
||||
# wrong branch review comment
|
||||
pull-requests: write
|
||||
outputs:
|
||||
baseBranch: ${{ steps.prepare.outputs.base }}
|
||||
headBranch: ${{ steps.prepare.outputs.head }}
|
||||
mergedSha: ${{ steps.prepare.outputs.mergedSha }}
|
||||
targetSha: ${{ steps.prepare.outputs.targetSha }}
|
||||
systems: ${{ steps.prepare.outputs.systems }}
|
||||
touched: ${{ steps.prepare.outputs.touched }}
|
||||
steps:
|
||||
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
with:
|
||||
sparse-checkout-cone-mode: true # default, for clarity
|
||||
sparse-checkout: |
|
||||
ci/github-script
|
||||
- id: prepare
|
||||
uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8.0.0
|
||||
with:
|
||||
script: |
|
||||
require('./ci/github-script/prepare.js')({
|
||||
github,
|
||||
context,
|
||||
core,
|
||||
dry: context.eventName == 'pull_request',
|
||||
})
|
||||
|
||||
check:
|
||||
name: Check
|
||||
needs: [prepare]
|
||||
uses: ./.github/workflows/check.yml
|
||||
permissions:
|
||||
# cherry-picks
|
||||
pull-requests: write
|
||||
secrets:
|
||||
CACHIX_AUTH_TOKEN: ${{ secrets.CACHIX_AUTH_TOKEN }}
|
||||
OWNER_RO_APP_PRIVATE_KEY: ${{ secrets.OWNER_RO_APP_PRIVATE_KEY }}
|
||||
with:
|
||||
baseBranch: ${{ needs.prepare.outputs.baseBranch }}
|
||||
headBranch: ${{ needs.prepare.outputs.headBranch }}
|
||||
mergedSha: ${{ needs.prepare.outputs.mergedSha }}
|
||||
targetSha: ${{ needs.prepare.outputs.targetSha }}
|
||||
ownersCanFail: ${{ !contains(fromJSON(needs.prepare.outputs.touched), 'owners') }}
|
||||
|
||||
lint:
|
||||
name: Lint
|
||||
needs: [prepare]
|
||||
uses: ./.github/workflows/lint.yml
|
||||
secrets:
|
||||
CACHIX_AUTH_TOKEN: ${{ secrets.CACHIX_AUTH_TOKEN }}
|
||||
with:
|
||||
mergedSha: ${{ needs.prepare.outputs.mergedSha }}
|
||||
targetSha: ${{ needs.prepare.outputs.targetSha }}
|
||||
|
||||
eval:
|
||||
name: Eval
|
||||
needs: [prepare]
|
||||
uses: ./.github/workflows/eval.yml
|
||||
permissions:
|
||||
# compare
|
||||
statuses: write
|
||||
secrets:
|
||||
CACHIX_AUTH_TOKEN: ${{ secrets.CACHIX_AUTH_TOKEN }}
|
||||
with:
|
||||
mergedSha: ${{ needs.prepare.outputs.mergedSha }}
|
||||
targetSha: ${{ needs.prepare.outputs.targetSha }}
|
||||
systems: ${{ needs.prepare.outputs.systems }}
|
||||
testVersions: ${{ contains(fromJSON(needs.prepare.outputs.touched), 'pinned') && !contains(fromJSON(needs.prepare.outputs.headBranch).type, 'development') }}
|
||||
|
||||
labels:
|
||||
name: Labels
|
||||
needs: [prepare, eval]
|
||||
uses: ./.github/workflows/labels.yml
|
||||
permissions:
|
||||
issues: write
|
||||
pull-requests: write
|
||||
secrets:
|
||||
NIXPKGS_CI_APP_PRIVATE_KEY: ${{ secrets.NIXPKGS_CI_APP_PRIVATE_KEY }}
|
||||
with:
|
||||
headBranch: ${{ needs.prepare.outputs.headBranch }}
|
||||
|
||||
reviewers:
|
||||
name: Reviewers
|
||||
needs: [prepare, eval]
|
||||
if: |
|
||||
needs.prepare.outputs.targetSha &&
|
||||
!contains(fromJSON(needs.prepare.outputs.headBranch).type, 'development')
|
||||
uses: ./.github/workflows/reviewers.yml
|
||||
secrets:
|
||||
OWNER_APP_PRIVATE_KEY: ${{ secrets.OWNER_APP_PRIVATE_KEY }}
|
||||
|
||||
build:
|
||||
name: Build
|
||||
needs: [prepare]
|
||||
uses: ./.github/workflows/build.yml
|
||||
secrets:
|
||||
CACHIX_AUTH_TOKEN: ${{ secrets.CACHIX_AUTH_TOKEN }}
|
||||
with:
|
||||
baseBranch: ${{ needs.prepare.outputs.baseBranch }}
|
||||
mergedSha: ${{ needs.prepare.outputs.mergedSha }}
|
||||
|
||||
# This job's only purpose is to create the target for the "Required Status Checks" branch ruleset.
|
||||
# It "needs" all the jobs that should block merging a PR.
|
||||
unlock:
|
||||
if: github.event_name != 'pull_request' && always()
|
||||
# Modify this list to add or remove jobs from required status checks.
|
||||
needs:
|
||||
- check
|
||||
- lint
|
||||
- eval
|
||||
- build
|
||||
runs-on: ubuntu-24.04-arm
|
||||
permissions:
|
||||
statuses: write
|
||||
steps:
|
||||
- uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8.0.0
|
||||
env:
|
||||
RESULTS: ${{ toJSON(needs.*.result) }}
|
||||
with:
|
||||
script: |
|
||||
const { serverUrl, repo, runId, payload } = context
|
||||
const target_url =
|
||||
`${serverUrl}/${repo.owner}/${repo.repo}/actions/runs/${runId}?pr=${payload.pull_request.number}`
|
||||
await github.rest.repos.createCommitStatus({
|
||||
...repo,
|
||||
sha: payload.pull_request.head.sha,
|
||||
// WARNING:
|
||||
// Do NOT change the name of this, otherwise the rule will not catch it anymore.
|
||||
// This would prevent all PRs from merging.
|
||||
context: 'no PR failures',
|
||||
state: JSON.parse(process.env.RESULTS).every(status => status == 'success') ? 'success' : 'error',
|
||||
target_url,
|
||||
})
|
||||
50
.github/workflows/push.yml
vendored
Normal file
50
.github/workflows/push.yml
vendored
Normal file
@@ -0,0 +1,50 @@
|
||||
name: Push
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- master
|
||||
- staging
|
||||
- release-*
|
||||
- staging-*
|
||||
- haskell-updates
|
||||
workflow_call:
|
||||
inputs:
|
||||
mergedSha:
|
||||
required: true
|
||||
type: string
|
||||
secrets:
|
||||
CACHIX_AUTH_TOKEN:
|
||||
required: true
|
||||
|
||||
permissions: {}
|
||||
|
||||
jobs:
|
||||
prepare:
|
||||
runs-on: ubuntu-24.04-arm
|
||||
outputs:
|
||||
systems: ${{ steps.systems.outputs.systems }}
|
||||
steps:
|
||||
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
with:
|
||||
sparse-checkout: |
|
||||
ci/supportedSystems.json
|
||||
|
||||
- name: Load supported systems
|
||||
id: systems
|
||||
run: |
|
||||
echo "systems=$(jq -c <ci/supportedSystems.json)" >> "$GITHUB_OUTPUT"
|
||||
|
||||
eval:
|
||||
name: Eval
|
||||
needs: [prepare]
|
||||
uses: ./.github/workflows/eval.yml
|
||||
# Those are not actually used on push, but will throw an error if not set.
|
||||
permissions:
|
||||
# compare
|
||||
statuses: write
|
||||
secrets:
|
||||
CACHIX_AUTH_TOKEN: ${{ secrets.CACHIX_AUTH_TOKEN }}
|
||||
with:
|
||||
mergedSha: ${{ inputs.mergedSha || github.sha }}
|
||||
systems: ${{ needs.prepare.outputs.systems }}
|
||||
17
.github/workflows/review-dismissed.yml
vendored
Normal file
17
.github/workflows/review-dismissed.yml
vendored
Normal file
@@ -0,0 +1,17 @@
|
||||
name: Review dismissed
|
||||
|
||||
on:
|
||||
pull_request_review:
|
||||
types: [dismissed]
|
||||
|
||||
permissions: {}
|
||||
|
||||
defaults:
|
||||
run:
|
||||
shell: bash
|
||||
|
||||
jobs:
|
||||
trigger:
|
||||
runs-on: ubuntu-24.04-arm
|
||||
steps:
|
||||
- run: echo This is a no-op only used as a trigger for workflow_run.
|
||||
170
.github/workflows/reviewers.yml
vendored
Normal file
170
.github/workflows/reviewers.yml
vendored
Normal file
@@ -0,0 +1,170 @@
|
||||
# This workflow will request reviews from the maintainers of each package
|
||||
# listed in the PR's most recent eval comparison artifact.
|
||||
|
||||
name: Reviewers
|
||||
|
||||
on:
|
||||
pull_request_target:
|
||||
types: [ready_for_review]
|
||||
workflow_call:
|
||||
secrets:
|
||||
OWNER_APP_PRIVATE_KEY:
|
||||
required: true
|
||||
|
||||
concurrency:
|
||||
group: reviewers-${{ github.workflow }}-${{ github.event_name }}-${{ github.event.pull_request.number || github.run_id }}
|
||||
cancel-in-progress: true
|
||||
|
||||
permissions: {}
|
||||
|
||||
defaults:
|
||||
run:
|
||||
shell: bash
|
||||
|
||||
jobs:
|
||||
request:
|
||||
runs-on: ubuntu-24.04-arm
|
||||
timeout-minutes: 20
|
||||
steps:
|
||||
- name: Check out the PR at the base commit
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
with:
|
||||
path: trusted
|
||||
sparse-checkout: ci
|
||||
|
||||
- name: Install Nix
|
||||
uses: cachix/install-nix-action@7be5dee1421f63d07e71ce6e0a9f8a4b07c2a487 # v31
|
||||
|
||||
- name: Build the requestReviews derivation
|
||||
run: nix-build trusted/ci -A requestReviews
|
||||
|
||||
# For requesting reviewers, this job depends on a GitHub App with the following permissions:
|
||||
# - Permissions:
|
||||
# - Repository > Administration: read-only
|
||||
# - Organization > Members: read-only
|
||||
# - Repository > Pull Requests: read-write
|
||||
# - Install App on this repository, setting these variables:
|
||||
# - OWNER_APP_ID (variable)
|
||||
# - OWNER_APP_PRIVATE_KEY (secret)
|
||||
#
|
||||
# Can't use the token received from permissions above, because it can't get enough permissions.
|
||||
- uses: actions/create-github-app-token@a8d616148505b5069dccd32f177bb87d7f39123b # v2.1.1
|
||||
if: github.event_name == 'pull_request_target' && vars.OWNER_APP_ID
|
||||
id: app-token
|
||||
with:
|
||||
app-id: ${{ vars.OWNER_APP_ID }}
|
||||
private-key: ${{ secrets.OWNER_APP_PRIVATE_KEY }}
|
||||
permission-administration: read
|
||||
permission-members: read
|
||||
permission-pull-requests: write
|
||||
|
||||
- name: Log current API rate limits (app-token)
|
||||
if: ${{ steps.app-token.outputs.token }}
|
||||
env:
|
||||
GH_TOKEN: ${{ steps.app-token.outputs.token }}
|
||||
run: gh api /rate_limit | jq
|
||||
|
||||
- name: Requesting code owner reviews
|
||||
if: steps.app-token.outputs.token
|
||||
env:
|
||||
GH_TOKEN: ${{ steps.app-token.outputs.token }}
|
||||
REPOSITORY: ${{ github.repository }}
|
||||
NUMBER: ${{ github.event.number }}
|
||||
# Don't do anything on draft PRs
|
||||
DRY_MODE: ${{ github.event.pull_request.draft && '1' || '' }}
|
||||
run: result/bin/request-code-owner-reviews.sh "$REPOSITORY" "$NUMBER" ci/OWNERS
|
||||
|
||||
- name: Log current API rate limits (app-token)
|
||||
if: ${{ steps.app-token.outputs.token }}
|
||||
env:
|
||||
GH_TOKEN: ${{ steps.app-token.outputs.token }}
|
||||
run: gh api /rate_limit | jq
|
||||
|
||||
- name: Log current API rate limits (github.token)
|
||||
env:
|
||||
GH_TOKEN: ${{ github.token }}
|
||||
run: gh api /rate_limit | jq
|
||||
|
||||
# In the regular case, this workflow is called via workflow_call from the eval workflow directly.
|
||||
# In the more special case, when a PR is undrafted an eval run will have started already.
|
||||
- name: Wait for comparison to be done
|
||||
uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8.0.0
|
||||
id: eval
|
||||
with:
|
||||
script: |
|
||||
const run_id = (await github.rest.actions.listWorkflowRuns({
|
||||
owner: context.repo.owner,
|
||||
repo: context.repo.repo,
|
||||
workflow_id: context.eventName === 'pull_request' ? 'test.yml' : 'pr.yml',
|
||||
event: context.eventName,
|
||||
head_sha: context.payload.pull_request.head.sha
|
||||
})).data.workflow_runs[0].id
|
||||
|
||||
core.setOutput('run-id', run_id)
|
||||
|
||||
// Waiting 120 * 5 sec = 10 min. max.
|
||||
// The extreme case is an Eval run that just started when the PR is undrafted.
|
||||
// Eval takes max 5-6 minutes, normally.
|
||||
for (let i = 0; i < 120; i++) {
|
||||
const result = await github.rest.actions.listWorkflowRunArtifacts({
|
||||
owner: context.repo.owner,
|
||||
repo: context.repo.repo,
|
||||
run_id,
|
||||
name: 'comparison'
|
||||
})
|
||||
if (result.data.total_count > 0) return
|
||||
await new Promise(resolve => setTimeout(resolve, 5000))
|
||||
}
|
||||
throw new Error("No comparison artifact found.")
|
||||
|
||||
- name: Log current API rate limits (github.token)
|
||||
env:
|
||||
GH_TOKEN: ${{ github.token }}
|
||||
run: gh api /rate_limit | jq
|
||||
|
||||
- name: Download the comparison results
|
||||
uses: actions/download-artifact@634f93cb2916e3fdff6788551b99b062d0335ce0 # v5.0.0
|
||||
with:
|
||||
run-id: ${{ steps.eval.outputs.run-id }}
|
||||
github-token: ${{ github.token }}
|
||||
pattern: comparison
|
||||
path: comparison
|
||||
merge-multiple: true
|
||||
|
||||
- name: Log current API rate limits (app-token)
|
||||
if: ${{ steps.app-token.outputs.token }}
|
||||
env:
|
||||
GH_TOKEN: ${{ steps.app-token.outputs.token }}
|
||||
run: gh api /rate_limit | jq
|
||||
|
||||
- name: Log current API rate limits (github.token)
|
||||
env:
|
||||
GH_TOKEN: ${{ github.token }}
|
||||
run: gh api /rate_limit | jq
|
||||
|
||||
- name: Requesting maintainer reviews
|
||||
if: ${{ steps.app-token.outputs.token }}
|
||||
env:
|
||||
GH_TOKEN: ${{ github.token }}
|
||||
REPOSITORY: ${{ github.repository }}
|
||||
NUMBER: ${{ github.event.number }}
|
||||
AUTHOR: ${{ github.event.pull_request.user.login }}
|
||||
# Don't request reviewers on draft PRs
|
||||
DRY_MODE: ${{ github.event.pull_request.draft && '1' || '' }}
|
||||
run: |
|
||||
# maintainers.json contains GitHub IDs. Look up handles to request reviews from.
|
||||
# There appears to be no API to request reviews based on GitHub IDs
|
||||
jq -r 'keys[]' comparison/maintainers.json \
|
||||
| while read -r id; do gh api /user/"$id" --jq .login; done \
|
||||
| GH_TOKEN=${{ steps.app-token.outputs.token }} result/bin/request-reviewers.sh "$REPOSITORY" "$NUMBER" "$AUTHOR"
|
||||
|
||||
- name: Log current API rate limits (app-token)
|
||||
if: ${{ steps.app-token.outputs.token }}
|
||||
env:
|
||||
GH_TOKEN: ${{ steps.app-token.outputs.token }}
|
||||
run: gh api /rate_limit | jq
|
||||
|
||||
- name: Log current API rate limits (github.token)
|
||||
env:
|
||||
GH_TOKEN: ${{ github.token }}
|
||||
run: gh api /rate_limit | jq
|
||||
114
.github/workflows/test.yml
vendored
Normal file
114
.github/workflows/test.yml
vendored
Normal file
@@ -0,0 +1,114 @@
|
||||
name: Test
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
|
||||
concurrency:
|
||||
group: test-${{ github.workflow }}-${{ github.event_name }}-${{ github.event.pull_request.number || github.run_id }}
|
||||
cancel-in-progress: true
|
||||
|
||||
permissions: {}
|
||||
|
||||
jobs:
|
||||
prepare:
|
||||
runs-on: ubuntu-24.04-arm
|
||||
outputs:
|
||||
merge-group: ${{ steps.files.outputs.merge-group }}
|
||||
mergedSha: ${{ steps.prepare.outputs.mergedSha }}
|
||||
pr: ${{ steps.files.outputs.pr }}
|
||||
push: ${{ steps.files.outputs.push }}
|
||||
targetSha: ${{ steps.prepare.outputs.targetSha }}
|
||||
steps:
|
||||
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
with:
|
||||
sparse-checkout-cone-mode: true # default, for clarity
|
||||
sparse-checkout: |
|
||||
ci/github-script
|
||||
- id: prepare
|
||||
uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8.0.0
|
||||
with:
|
||||
script: |
|
||||
require('./ci/github-script/prepare.js')({
|
||||
github,
|
||||
context,
|
||||
core,
|
||||
// Review comments will be posted by the main PR workflow on the pull_request_target event.
|
||||
dry: false,
|
||||
})
|
||||
|
||||
- name: Determine changed files
|
||||
id: files
|
||||
uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8.0.0
|
||||
with:
|
||||
script: |
|
||||
const files = (await github.paginate(github.rest.pulls.listFiles, {
|
||||
...context.repo,
|
||||
pull_number: context.payload.pull_request.number,
|
||||
per_page: 100,
|
||||
})).map(file => file.filename)
|
||||
|
||||
if (files.some(file => [
|
||||
'.github/workflows/lint.yml',
|
||||
'.github/workflows/merge-group.yml',
|
||||
'.github/workflows/test.yml',
|
||||
].includes(file))) core.setOutput('merge-group', true)
|
||||
|
||||
if (files.some(file => [
|
||||
'.github/actions/checkout/action.yml',
|
||||
'.github/workflows/build.yml',
|
||||
'.github/workflows/check.yml',
|
||||
'.github/workflows/eval.yml',
|
||||
'.github/workflows/labels.yml',
|
||||
'.github/workflows/lint.yml',
|
||||
'.github/workflows/pr.yml',
|
||||
'.github/workflows/reviewers.yml',
|
||||
'.github/workflows/test.yml',
|
||||
].includes(file))) core.setOutput('pr', true)
|
||||
|
||||
if (files.some(file => [
|
||||
'.github/workflows/eval.yml',
|
||||
'.github/workflows/push.yml',
|
||||
'.github/workflows/test.yml',
|
||||
].includes(file))) core.setOutput('push', true)
|
||||
|
||||
merge-group:
|
||||
if: needs.prepare.outputs.merge-group
|
||||
name: Merge Group
|
||||
needs: [prepare]
|
||||
uses: ./.github/workflows/merge-group.yml
|
||||
# Those are actually only used on the merge_group event, but will throw an error if not set.
|
||||
permissions:
|
||||
statuses: write
|
||||
secrets:
|
||||
CACHIX_AUTH_TOKEN: ${{ secrets.CACHIX_AUTH_TOKEN }}
|
||||
with:
|
||||
mergedSha: ${{ needs.prepare.outputs.mergedSha }}
|
||||
targetSha: ${{ needs.prepare.outputs.targetSha }}
|
||||
|
||||
pr:
|
||||
if: needs.prepare.outputs.pr
|
||||
name: PR
|
||||
needs: [prepare]
|
||||
uses: ./.github/workflows/pr.yml
|
||||
# Those are actually only used on the pull_request_target event, but will throw an error if not set.
|
||||
permissions:
|
||||
issues: write
|
||||
pull-requests: write
|
||||
statuses: write
|
||||
secrets:
|
||||
CACHIX_AUTH_TOKEN: ${{ secrets.CACHIX_AUTH_TOKEN }}
|
||||
NIXPKGS_CI_APP_PRIVATE_KEY: ${{ secrets.NIXPKGS_CI_APP_PRIVATE_KEY }}
|
||||
OWNER_RO_APP_PRIVATE_KEY: ${{ secrets.OWNER_RO_APP_PRIVATE_KEY }}
|
||||
|
||||
push:
|
||||
if: needs.prepare.outputs.push
|
||||
name: Push
|
||||
needs: [prepare]
|
||||
uses: ./.github/workflows/push.yml
|
||||
# Those are not actually used on the push or pull_request events, but will throw an error if not set.
|
||||
permissions:
|
||||
statuses: write
|
||||
secrets:
|
||||
CACHIX_AUTH_TOKEN: ${{ secrets.CACHIX_AUTH_TOKEN }}
|
||||
with:
|
||||
mergedSha: ${{ needs.prepare.outputs.mergedSha }}
|
||||
21
.mergify.yml
21
.mergify.yml
@@ -1,21 +0,0 @@
|
||||
queue_rules:
|
||||
# This rule is for https://docs.mergify.com/commands/queue/
|
||||
# and can be triggered with: @mergifyio queue
|
||||
- name: default
|
||||
merge_conditions:
|
||||
# all github action checks in this list are required to merge a pull request
|
||||
- check-success=Attributes
|
||||
- check-success=Check
|
||||
- check-success=Outpaths (aarch64-darwin)
|
||||
- check-success=Outpaths (aarch64-linux)
|
||||
- check-success=Outpaths (x86_64-darwin)
|
||||
- check-success=Outpaths (x86_64-linux)
|
||||
- check-success=Process
|
||||
- check-success=Request
|
||||
- check-success=editorconfig-check
|
||||
- check-success=label-pr
|
||||
- check-success=nix-files-parseable-check
|
||||
- check-success=nixfmt-check
|
||||
- check-success=nixpkgs-vet
|
||||
# queue up to 5 pull requests at a time
|
||||
batch_size: 5
|
||||
@@ -559,7 +559,12 @@ If you have any problems with formatting, please ping the
|
||||
- Functions should list their expected arguments as precisely as possible. That is, write
|
||||
|
||||
```nix
|
||||
{ stdenv, fetchurl, perl }: <...>
|
||||
{
|
||||
stdenv,
|
||||
fetchurl,
|
||||
perl,
|
||||
}:
|
||||
<...>
|
||||
```
|
||||
|
||||
instead of
|
||||
@@ -571,17 +576,25 @@ If you have any problems with formatting, please ping the
|
||||
or
|
||||
|
||||
```nix
|
||||
{ stdenv, fetchurl, perl, ... }: <...>
|
||||
{
|
||||
stdenv,
|
||||
fetchurl,
|
||||
perl,
|
||||
...
|
||||
}:
|
||||
<...>
|
||||
```
|
||||
|
||||
For functions that are truly generic in the number of arguments (such as wrappers around `mkDerivation`) that have some required arguments, you should write them using an `@`-pattern:
|
||||
|
||||
```nix
|
||||
{ stdenv, doCoverageAnalysis ? false, ... } @ args:
|
||||
{
|
||||
stdenv,
|
||||
doCoverageAnalysis ? false,
|
||||
...
|
||||
}@args:
|
||||
|
||||
stdenv.mkDerivation (args // {
|
||||
foo = if doCoverageAnalysis then "bla" else "";
|
||||
})
|
||||
stdenv.mkDerivation (args // { foo = if doCoverageAnalysis then "bla" else ""; })
|
||||
```
|
||||
|
||||
instead of
|
||||
@@ -589,41 +602,36 @@ If you have any problems with formatting, please ping the
|
||||
```nix
|
||||
args:
|
||||
|
||||
args.stdenv.mkDerivation (args // {
|
||||
foo = if args ? doCoverageAnalysis && args.doCoverageAnalysis then "bla" else "";
|
||||
})
|
||||
args.stdenv.mkDerivation (
|
||||
args
|
||||
// {
|
||||
foo = if args ? doCoverageAnalysis && args.doCoverageAnalysis then "bla" else "";
|
||||
}
|
||||
)
|
||||
```
|
||||
|
||||
- Unnecessary string conversions should be avoided. Do
|
||||
|
||||
```nix
|
||||
{
|
||||
rev = version;
|
||||
}
|
||||
{ rev = version; }
|
||||
```
|
||||
|
||||
instead of
|
||||
|
||||
```nix
|
||||
{
|
||||
rev = "${version}";
|
||||
}
|
||||
{ rev = "${version}"; }
|
||||
```
|
||||
|
||||
- Building lists conditionally _should_ be done with `lib.optional(s)` instead of using `if cond then [ ... ] else null` or `if cond then [ ... ] else [ ]`.
|
||||
|
||||
```nix
|
||||
{
|
||||
buildInputs = lib.optional stdenv.hostPlatform.isDarwin iconv;
|
||||
}
|
||||
{ buildInputs = lib.optional stdenv.hostPlatform.isDarwin iconv; }
|
||||
```
|
||||
|
||||
instead of
|
||||
|
||||
```nix
|
||||
{
|
||||
buildInputs = if stdenv.hostPlatform.isDarwin then [ iconv ] else null;
|
||||
}
|
||||
{ buildInputs = if stdenv.hostPlatform.isDarwin then [ iconv ] else null; }
|
||||
```
|
||||
|
||||
As an exception, an explicit conditional expression with null can be used when fixing a important bug without triggering a mass rebuild.
|
||||
|
||||
29
ci/OWNERS
29
ci/OWNERS
@@ -15,12 +15,9 @@
|
||||
|
||||
# CI
|
||||
/.github/*_TEMPLATE* @SigmaSquadron
|
||||
/.github/actions @NixOS/Security @Mic92 @zowoq @infinisil @azuwis @wolfgangwalther
|
||||
/.github/workflows @NixOS/Security @Mic92 @zowoq @infinisil @azuwis @wolfgangwalther
|
||||
/.github/workflows/check-format.yml @infinisil @wolfgangwalther
|
||||
/.github/workflows/codeowners-v2.yml @infinisil @wolfgangwalther
|
||||
/.github/workflows/nixpkgs-vet.yml @infinisil @philiptaron @wolfgangwalther
|
||||
/ci @infinisil @philiptaron @NixOS/Security @wolfgangwalther
|
||||
/.github/actions @NixOS/nixpkgs-ci
|
||||
/.github/workflows @NixOS/nixpkgs-ci
|
||||
/ci @NixOS/nixpkgs-ci
|
||||
/ci/OWNERS @infinisil @philiptaron
|
||||
|
||||
# Development support
|
||||
@@ -29,13 +26,19 @@
|
||||
|
||||
# Libraries
|
||||
/lib @infinisil @hsjobeki
|
||||
/lib/systems @alyssais @ericson2314 @NixOS/stdenv
|
||||
/lib/generators.nix @infinisil @hsjobeki @Profpatsch
|
||||
/lib/cli.nix @infinisil @hsjobeki @Profpatsch
|
||||
/lib/debug.nix @infinisil @hsjobeki @Profpatsch
|
||||
/lib/asserts.nix @infinisil @hsjobeki @Profpatsch
|
||||
/lib/path/* @infinisil @hsjobeki
|
||||
/lib/fileset @infinisil @hsjobeki
|
||||
## Standard environment–related libraries
|
||||
/lib/customisation.nix @alyssais @NixOS/stdenv
|
||||
/lib/derivations.nix @alyssais @NixOS/stdenv
|
||||
/lib/fetchers.nix @alyssais @NixOS/stdenv
|
||||
/lib/meta.nix @alyssais @NixOS/stdenv
|
||||
/lib/source-types.nix @alyssais @NixOS/stdenv
|
||||
/lib/systems @alyssais @NixOS/stdenv
|
||||
## Libraries / Module system
|
||||
/lib/modules.nix @infinisil @roberth @hsjobeki
|
||||
/lib/types.nix @infinisil @roberth @hsjobeki
|
||||
@@ -59,6 +62,7 @@
|
||||
/pkgs/build-support/cc-wrapper @Ericson2314
|
||||
/pkgs/build-support/bintools-wrapper @Ericson2314
|
||||
/pkgs/build-support/setup-hooks @Ericson2314
|
||||
/pkgs/build-support/setup-hooks/arrayUtilities @ConnorBaker
|
||||
/pkgs/build-support/setup-hooks/auto-patchelf.sh @layus
|
||||
/pkgs/by-name/au/auto-patchelf @layus
|
||||
|
||||
@@ -221,7 +225,10 @@ nixos/modules/installer/tools/nix-fallback-paths.nix @NixOS/nix-team @raitobeza
|
||||
/nixos/tests/snapcast.nix @mweinelt
|
||||
|
||||
# Browsers
|
||||
/pkgs/applications/networking/browsers/firefox @mweinelt
|
||||
/pkgs/build-support/build-mozilla-mach @mweinelt
|
||||
/pkgs/applications/networking/browsers/firefox/update.nix
|
||||
/pkgs/applications/networking/browsers/firefox/packages/firefox.nix @mweinelt
|
||||
/pkgs/applications/networking/browsers/firefox/packages/firefox-esr-*.nix @mweinelt
|
||||
/pkgs/applications/networking/browsers/chromium @emilylange @networkException
|
||||
/nixos/tests/chromium.nix @emilylange @networkException
|
||||
|
||||
@@ -284,6 +291,10 @@ pkgs/development/python-modules/buildcatrust/ @ajs124 @lukegb @mweinelt
|
||||
/pkgs/servers/home-assistant @mweinelt
|
||||
/pkgs/by-name/es/esphome @mweinelt
|
||||
|
||||
# Linux kernel
|
||||
/pkgs/top-level/linux-kernels.nix @NixOS/linux-kernel
|
||||
/pkgs/os-specific/linux/kernel/ @NixOS/linux-kernel
|
||||
|
||||
# Network Time Daemons
|
||||
/pkgs/by-name/ch/chrony @thoughtpolice
|
||||
/pkgs/by-name/nt/ntp @thoughtpolice
|
||||
@@ -475,4 +486,4 @@ pkgs/by-name/oc/octodns/ @anthonyroussel
|
||||
pkgs/by-name/te/teleport* @arianvp @justinas @sigma @tomberek @freezeboy @techknowlogick @JuliusFreudenberger
|
||||
|
||||
# Warp-terminal
|
||||
pkgs/by-name/wa/warp-terminal/ @emilytrau @imadnyc @donteatoreo @johnrtitor
|
||||
pkgs/by-name/wa/warp-terminal/ @emilytrau @imadnyc @FlameFlag @johnrtitor
|
||||
|
||||
50
ci/README.md
50
ci/README.md
@@ -6,37 +6,47 @@ This is in contrast with [`maintainers/scripts`](../maintainers/scripts) which i
|
||||
## Pinned Nixpkgs
|
||||
|
||||
CI may need certain packages from Nixpkgs.
|
||||
In order to ensure that the needed packages are generally available without building,
|
||||
[`pinned-nixpkgs.json`](./pinned-nixpkgs.json) contains a pinned Nixpkgs version tested by Hydra.
|
||||
In order to ensure that the needed packages are generally available without building, [`pinned.json`](./pinned.json) contains a pinned Nixpkgs version tested by Hydra.
|
||||
|
||||
Run [`update-pinned-nixpkgs.sh`](./update-pinned-nixpkgs.sh) to update it.
|
||||
Run [`update-pinned.sh`](./update-pinned.sh) to update it.
|
||||
|
||||
## `ci/nixpkgs-vet.sh BASE_BRANCH [REPOSITORY]`
|
||||
|
||||
Runs the [`nixpkgs-vet` tool](https://github.com/NixOS/nixpkgs-vet) on the HEAD commit, closely matching what CI does. This can't do exactly the same as CI, because CI needs to rely on GitHub's server-side Git history to compute the mergeability of PRs before the check can be started.
|
||||
Runs the [`nixpkgs-vet` tool](https://github.com/NixOS/nixpkgs-vet) on the HEAD commit, closely matching what CI does.
|
||||
This can't do exactly the same as CI, because CI needs to rely on GitHub's server-side Git history to compute the mergeability of PRs before the check can be started.
|
||||
In turn, when contributors are running this tool locally, we don't want to have to push commits to test them, and we can also rely on the local Git history to do the mergeability check.
|
||||
|
||||
Arguments:
|
||||
|
||||
- `BASE_BRANCH`: The base branch to use, e.g. master or release-24.05
|
||||
- `REPOSITORY`: The repository from which to fetch the base branch. Defaults to <https://github.com/NixOS/nixpkgs.git>.
|
||||
- `REPOSITORY`: The repository from which to fetch the base branch.
|
||||
Defaults to <https://github.com/NixOS/nixpkgs.git>.
|
||||
|
||||
## `ci/nixpkgs-vet`
|
||||
# Branch classification
|
||||
|
||||
This directory contains scripts and files used and related to [`nixpkgs-vet`](https://github.com/NixOS/nixpkgs-vet/), which the CI uses to implement `pkgs/by-name` checks, along with many other Nixpkgs architecture rules.
|
||||
See also the [CI GitHub Action](../.github/workflows/nixpkgs-vet.yml).
|
||||
For the purposes of CI, branches in the NixOS/nixpkgs repository are classified as follows:
|
||||
|
||||
## `ci/nixpkgs-vet/update-pinned-tool.sh`
|
||||
- **Channel** branches
|
||||
- `nixos-` or `nixpkgs-` prefix
|
||||
- Are only updated from `master` or `release-` branches, when hydra passes.
|
||||
- Otherwise not worked on, Pull Requests are not allowed.
|
||||
- Long-lived, no deletion, no force push.
|
||||
- **Primary development** branches
|
||||
- `release-` prefix and `master`
|
||||
- Pull Requests required.
|
||||
- Long-lived, no deletion, no force push.
|
||||
- **Secondary development** branches
|
||||
- `staging-` prefix and `haskell-updates`
|
||||
- Pull Requests normally required, except when merging development branches into each other.
|
||||
- Long-lived, no deletion, no force push.
|
||||
- **Work-In-Progress** branches
|
||||
- `backport-`, `revert-` and `wip-` prefixes.
|
||||
- Deprecated: All other branches, not matched by channel/development.
|
||||
- Pull Requests are optional.
|
||||
- Short-lived, force push allowed, deleted after merge.
|
||||
|
||||
Updates the pinned [`nixpkgs-vet` tool](https://github.com/NixOS/nixpkgs-vet) in [`ci/nixpkgs-vet/pinned-version.txt`](./nixpkgs-vet/pinned-version.txt) to the latest [release](https://github.com/NixOS/nixpkgs-vet/releases).
|
||||
Some branches also have a version component, which is either `unstable` or `YY.MM`.
|
||||
|
||||
Each release contains a pre-built `x86_64-linux` version of the tool which is used by CI.
|
||||
|
||||
This script currently needs to be called manually when the CI tooling needs to be updated.
|
||||
|
||||
Why not just build the tooling right from the PRs Nixpkgs version?
|
||||
|
||||
- Because it allows CI to check all PRs, even if they would break the CI tooling.
|
||||
- Because it makes the CI check very fast, since no Nix builds need to be done, even for mass rebuilds.
|
||||
- Because it improves security, since we don't have to build potentially untrusted code from PRs.
|
||||
The tool only needs a very minimal Nix evaluation at runtime, which can work with [readonly-mode](https://nixos.org/manual/nix/stable/command-ref/opt-common.html#opt-readonly-mode) and [restrict-eval](https://nixos.org/manual/nix/stable/command-ref/conf-file.html#conf-restrict-eval).
|
||||
`ci/supportedBranches.js` is a script imported by CI to classify the base and head branches of a Pull Request.
|
||||
This classification will then be used to skip certain jobs.
|
||||
This script can also be run locally to print basic test cases.
|
||||
|
||||
@@ -20,7 +20,7 @@ buildGoModule {
|
||||
})
|
||||
# Undoes part of the above PR: We don't want to require write access
|
||||
# to the repository, that's only needed for GitHub's native CODEOWNERS.
|
||||
# Furthermore, it removes an unneccessary check from the code
|
||||
# Furthermore, it removes an unnecessary check from the code
|
||||
# that breaks tokens generated for GitHub Apps.
|
||||
./permissions.patch
|
||||
# Allows setting a custom CODEOWNERS path using the OWNERS_FILE env var
|
||||
|
||||
105
ci/default.nix
105
ci/default.nix
@@ -1,33 +1,34 @@
|
||||
let
|
||||
pinnedNixpkgs = builtins.fromJSON (builtins.readFile ./pinned-nixpkgs.json);
|
||||
pinned = (builtins.fromJSON (builtins.readFile ./pinned.json)).pins;
|
||||
in
|
||||
{
|
||||
system ? builtins.currentSystem,
|
||||
|
||||
nixpkgs ? null,
|
||||
nixPath ? "nixVersions.latest",
|
||||
}:
|
||||
let
|
||||
nixpkgs' =
|
||||
if nixpkgs == null then
|
||||
fetchTarball {
|
||||
url = "https://github.com/NixOS/nixpkgs/archive/${pinnedNixpkgs.rev}.tar.gz";
|
||||
sha256 = pinnedNixpkgs.sha256;
|
||||
inherit (pinned.nixpkgs) url;
|
||||
sha256 = pinned.nixpkgs.hash;
|
||||
}
|
||||
else
|
||||
nixpkgs;
|
||||
|
||||
pkgs = import nixpkgs' {
|
||||
inherit system;
|
||||
config = { };
|
||||
overlays = [ ];
|
||||
# Nixpkgs generally — and CI specifically — do not use aliases,
|
||||
# because we want to ensure they are not load-bearing.
|
||||
allowAliases = false;
|
||||
};
|
||||
|
||||
fmt =
|
||||
let
|
||||
treefmtNixSrc = fetchTarball {
|
||||
# Master at 2025-02-12
|
||||
url = "https://github.com/numtide/treefmt-nix/archive/4f09b473c936d41582dd744e19f34ec27592c5fd.tar.gz";
|
||||
sha256 = "051vh6raskrxw5k6jncm8zbk9fhbzgm1gxpq9gm5xw1b6wgbgcna";
|
||||
inherit (pinned.treefmt-nix) url;
|
||||
sha256 = pinned.treefmt-nix.hash;
|
||||
};
|
||||
treefmtEval = (import treefmtNixSrc).evalModule pkgs {
|
||||
# Important: The auto-rebase script uses `git filter-branch --tree-filter`,
|
||||
@@ -46,12 +47,45 @@ let
|
||||
|
||||
programs.actionlint.enable = true;
|
||||
|
||||
programs.biome = {
|
||||
enable = true;
|
||||
settings.formatter = {
|
||||
useEditorconfig = true;
|
||||
};
|
||||
settings.javascript.formatter = {
|
||||
quoteStyle = "single";
|
||||
semicolons = "asNeeded";
|
||||
};
|
||||
settings.json.formatter.enabled = false;
|
||||
};
|
||||
settings.formatter.biome.excludes = [
|
||||
"*.min.js"
|
||||
"pkgs/*"
|
||||
];
|
||||
|
||||
programs.keep-sorted.enable = true;
|
||||
|
||||
# This uses nixfmt-rfc-style underneath,
|
||||
# the default formatter for Nix code.
|
||||
# This uses nixfmt underneath, the default formatter for Nix code.
|
||||
# See https://github.com/NixOS/nixfmt
|
||||
programs.nixfmt.enable = true;
|
||||
programs.nixfmt = {
|
||||
enable = true;
|
||||
package = pkgs.nixfmt;
|
||||
};
|
||||
|
||||
programs.yamlfmt = {
|
||||
enable = true;
|
||||
settings.formatter = {
|
||||
retain_line_breaks = true;
|
||||
};
|
||||
};
|
||||
settings.formatter.yamlfmt.excludes = [
|
||||
# Breaks helm templating
|
||||
"nixos/tests/k3s/k3s-test-chart/templates/*"
|
||||
# Aligns comments with whitespace
|
||||
"pkgs/development/haskell-modules/configuration-hackage2nix/main.yaml"
|
||||
# TODO: Fix formatting for auto-generated file
|
||||
"pkgs/development/haskell-modules/configuration-hackage2nix/transitive-broken.yaml"
|
||||
];
|
||||
|
||||
settings.formatter.editorconfig-checker = {
|
||||
command = "${pkgs.lib.getExe pkgs.editorconfig-checker}";
|
||||
@@ -59,6 +93,23 @@ let
|
||||
includes = [ "*" ];
|
||||
priority = 1;
|
||||
};
|
||||
|
||||
# TODO: Upstream this into treefmt-nix eventually:
|
||||
# https://github.com/numtide/treefmt-nix/issues/387
|
||||
settings.formatter.markdown-code-runner = {
|
||||
command = pkgs.lib.getExe pkgs.markdown-code-runner;
|
||||
options =
|
||||
let
|
||||
config = pkgs.writers.writeTOML "markdown-code-runner-config" {
|
||||
presets.nixfmt = {
|
||||
language = "nix";
|
||||
command = [ (pkgs.lib.getExe pkgs.nixfmt) ];
|
||||
};
|
||||
};
|
||||
in
|
||||
[ "--config=${config}" ];
|
||||
includes = [ "*.md" ];
|
||||
};
|
||||
};
|
||||
fs = pkgs.lib.fileset;
|
||||
nixFilesSrc = fs.toSource {
|
||||
@@ -73,22 +124,42 @@ let
|
||||
};
|
||||
|
||||
in
|
||||
{
|
||||
rec {
|
||||
inherit pkgs fmt;
|
||||
requestReviews = pkgs.callPackage ./request-reviews { };
|
||||
codeownersValidator = pkgs.callPackage ./codeowners-validator { };
|
||||
eval = pkgs.callPackage ./eval { };
|
||||
|
||||
# FIXME(lf-): it might be useful to test other Nix implementations
|
||||
# (nixVersions.stable and Lix) here somehow at some point to ensure we don't
|
||||
# have eval divergence.
|
||||
eval = pkgs.callPackage ./eval {
|
||||
nix = pkgs.lib.getAttrFromPath (pkgs.lib.splitString "." nixPath) pkgs;
|
||||
};
|
||||
|
||||
# CI jobs
|
||||
lib-tests = import ../lib/tests/release.nix { inherit pkgs; };
|
||||
manual-nixos = (import ../nixos/release.nix { }).manual.${system} or null;
|
||||
manual-nixpkgs = (import ../pkgs/top-level/release.nix { }).manual;
|
||||
manual-nixpkgs-tests = (import ../pkgs/top-level/release.nix { }).manual.tests;
|
||||
nixpkgs-vet = pkgs.callPackage ./nixpkgs-vet.nix { };
|
||||
manual-nixpkgs = (import ../doc { inherit pkgs; });
|
||||
manual-nixpkgs-tests = (import ../doc { inherit pkgs; }).tests;
|
||||
nixpkgs-vet = pkgs.callPackage ./nixpkgs-vet.nix {
|
||||
nix = pkgs.nixVersions.latest;
|
||||
};
|
||||
parse = pkgs.lib.recurseIntoAttrs {
|
||||
latest = pkgs.callPackage ./parse.nix { nix = pkgs.nixVersions.latest; };
|
||||
lix = pkgs.callPackage ./parse.nix { nix = pkgs.lix; };
|
||||
minimum = pkgs.callPackage ./parse.nix { nix = pkgs.nixVersions.minimum; };
|
||||
nix_2_28 = pkgs.callPackage ./parse.nix { nix = pkgs.nixVersions.nix_2_28; };
|
||||
};
|
||||
shell = import ../shell.nix { inherit nixpkgs system; };
|
||||
tarball = import ../pkgs/top-level/make-tarball.nix {
|
||||
# Mirrored from top-level release.nix:
|
||||
nixpkgs = {
|
||||
outPath = pkgs.lib.cleanSource ../.;
|
||||
revCount = 1234;
|
||||
shortRev = "abcdef";
|
||||
revision = "0000000000000000000000000000000000000000";
|
||||
};
|
||||
officialRelease = false;
|
||||
inherit pkgs lib-tests;
|
||||
nix = pkgs.nixVersions.latest;
|
||||
};
|
||||
}
|
||||
|
||||
@@ -2,20 +2,47 @@
|
||||
|
||||
The code in this directory is used by the [eval.yml](../../.github/workflows/eval.yml) GitHub Actions workflow to evaluate the majority of Nixpkgs for all PRs, effectively making sure that when the development branches are processed by Hydra, no evaluation failures are encountered.
|
||||
|
||||
Furthermore it also allows local evaluation using
|
||||
Furthermore it also allows local evaluation using:
|
||||
|
||||
```
|
||||
nix-build ci -A eval.full \
|
||||
--max-jobs 4 \
|
||||
--cores 2 \
|
||||
--arg chunkSize 10000 \
|
||||
--arg evalSystems '["x86_64-linux" "aarch64-darwin"]'
|
||||
nix-build ci -A eval.baseline
|
||||
```
|
||||
|
||||
- `--max-jobs`: The maximum number of derivations to run at the same time. Only each [supported system](../supportedSystems.json) gets a separate derivation, so it doesn't make sense to set this higher than that number.
|
||||
- `--cores`: The number of cores to use for each job. Recommended to set this to the amount of cores on your system divided by `--max-jobs`.
|
||||
- `chunkSize`: The number of attributes that are evaluated simultaneously on a single core. Lowering this decreases memory usage at the cost of increased evaluation time. If this is too high, there won't be enough chunks to process them in parallel, and will also increase evaluation time.
|
||||
- `evalSystems`: The set of systems for which `nixpkgs` should be evaluated. Defaults to the four official platforms (`x86_64-linux`, `aarch64-linux`, `x86_64-darwin` and `aarch64-darwin`).
|
||||
The two most important arguments are:
|
||||
- `--arg evalSystems`: The set of systems for which `nixpkgs` should be evaluated.
|
||||
Defaults to the four official platforms (`x86_64-linux`, `aarch64-linux`, `x86_64-darwin` and `aarch64-darwin`).
|
||||
Example: `--arg evalSystems '["x86_64-linux" "aarch64-darwin"]'`
|
||||
- `--arg quickTest`: Enables testing a single chunk of the current system only for quick iteration.
|
||||
Example: `--arg quickTest true`
|
||||
|
||||
A good default is to set `chunkSize` to 10000, which leads to about 3.6GB max memory usage per core, so suitable for fully utilising machines with 4 cores and 16GB memory, 8 cores and 32GB memory or 16 cores and 64GB memory.
|
||||
The following arguments can be used to fine-tune performance:
|
||||
- `--max-jobs`: The maximum number of derivations to run at the same time.
|
||||
Only each [supported system](../supportedSystems.json) gets a separate derivation, so it doesn't make sense to set this higher than that number.
|
||||
- `--cores`: The number of cores to use for each job.
|
||||
Recommended to set this to the amount of cores on your system divided by `--max-jobs`.
|
||||
- `--arg chunkSize`: The number of attributes that are evaluated simultaneously on a single core.
|
||||
Lowering this decreases memory usage at the cost of increased evaluation time.
|
||||
If this is too high, there won't be enough chunks to process them in parallel, and will also increase evaluation time.
|
||||
The default is 5000.
|
||||
Example: `--arg chunkSize 10000`
|
||||
|
||||
Note that 16GB memory is the recommended minimum, while with less than 8GB memory evaluation time suffers greatly.
|
||||
|
||||
## Local eval with rebuilds / comparison
|
||||
|
||||
To compare two commits locally, first run the following on the baseline commit:
|
||||
|
||||
```
|
||||
nix-build ci -A eval.baseline --out-link baseline
|
||||
```
|
||||
|
||||
Then, on the commit with your changes:
|
||||
|
||||
```
|
||||
nix-build ci -A eval.full --arg baseline ./baseline
|
||||
```
|
||||
|
||||
Keep in mind to otherwise pass the same set of arguments for both commands (`evalSystems`, `quickTest`, `chunkSize`).
|
||||
Running this command will evaluate the difference between the baseline statistics and the ones at the time of running the command.
|
||||
From that difference, it will produce a human-readable report in `$out/step-summary.md`.
|
||||
If no packages were added or removed, then performance statistics will also be generated as part of this report.
|
||||
|
||||
83
ci/eval/attrpaths.nix
Normal file
83
ci/eval/attrpaths.nix
Normal file
@@ -0,0 +1,83 @@
|
||||
# This expression will, as efficiently as possible, dump a
|
||||
# *superset* of all attrpaths of derivations which might be
|
||||
# part of a release on *any* platform.
|
||||
#
|
||||
# This expression runs single-threaded under all current Nix
|
||||
# implementations, but much faster and with much less memory
|
||||
# used than ./outpaths.nix itself.
|
||||
#
|
||||
# Once you have the list of attrnames you can split it up into
|
||||
# $NUM_CORES batches and evaluate the outpaths separately for each
|
||||
# batch, in parallel.
|
||||
#
|
||||
# To dump the attrnames:
|
||||
#
|
||||
# nix-instantiate --eval --strict --json ci/eval/attrpaths.nix -A names
|
||||
#
|
||||
{
|
||||
lib ? import (path + "/lib"),
|
||||
trace ? false,
|
||||
path ? ./../..,
|
||||
}:
|
||||
let
|
||||
|
||||
# TODO: Use mapAttrsToListRecursiveCond when this PR lands:
|
||||
# https://github.com/NixOS/nixpkgs/pull/395160
|
||||
justAttrNames =
|
||||
path: value:
|
||||
let
|
||||
result =
|
||||
if path == [ "AAAAAASomeThingsFailToEvaluate" ] || !(lib.isAttrs value) then
|
||||
[ ]
|
||||
else if lib.isDerivation value then
|
||||
[ path ]
|
||||
else
|
||||
lib.pipe value [
|
||||
(lib.mapAttrsToList (
|
||||
name: value:
|
||||
lib.addErrorContext "while evaluating package set attribute path '${
|
||||
lib.showAttrPath (path ++ [ name ])
|
||||
}'" (justAttrNames (path ++ [ name ]) value)
|
||||
))
|
||||
lib.concatLists
|
||||
];
|
||||
in
|
||||
lib.traceIf trace "** ${lib.showAttrPath path}" result;
|
||||
|
||||
outpaths = import ./outpaths.nix {
|
||||
inherit path;
|
||||
attrNamesOnly = true;
|
||||
};
|
||||
|
||||
paths = [
|
||||
# Some of the following are based on variants, which are disabled with `attrNamesOnly = true`.
|
||||
# Until these have been removed from release.nix / hydra, we manually add them to the list.
|
||||
[
|
||||
"pkgsLLVM"
|
||||
"stdenv"
|
||||
]
|
||||
[
|
||||
"pkgsArocc"
|
||||
"stdenv"
|
||||
]
|
||||
[
|
||||
"pkgsZig"
|
||||
"stdenv"
|
||||
]
|
||||
[
|
||||
"pkgsStatic"
|
||||
"stdenv"
|
||||
]
|
||||
[
|
||||
"pkgsMusl"
|
||||
"stdenv"
|
||||
]
|
||||
]
|
||||
++ justAttrNames [ ] outpaths;
|
||||
|
||||
names = map lib.showAttrPath paths;
|
||||
|
||||
in
|
||||
{
|
||||
inherit paths names;
|
||||
}
|
||||
@@ -1,5 +1,4 @@
|
||||
# This file works in tandem with ../../ci/eval/default.nix
|
||||
# It turns ./release-outpaths.nix into chunks of a fixed size
|
||||
# This turns ./outpaths.nix into chunks of a fixed size.
|
||||
{
|
||||
lib ? import ../../lib,
|
||||
path ? ../..,
|
||||
@@ -7,7 +6,6 @@
|
||||
attrpathFile,
|
||||
chunkSize,
|
||||
myChunk,
|
||||
checkMeta,
|
||||
includeBroken,
|
||||
systems,
|
||||
}:
|
||||
@@ -16,9 +14,9 @@ let
|
||||
attrpaths = lib.importJSON attrpathFile;
|
||||
myAttrpaths = lib.sublist (chunkSize * myChunk) chunkSize attrpaths;
|
||||
|
||||
unfiltered = import ./release-outpaths.nix {
|
||||
unfiltered = import ./outpaths.nix {
|
||||
inherit path;
|
||||
inherit checkMeta includeBroken systems;
|
||||
inherit includeBroken systems;
|
||||
};
|
||||
|
||||
# Turns the unfiltered recursive attribute set into one that is limited to myAttrpaths
|
||||
@@ -13,6 +13,13 @@
|
||||
byName ? false,
|
||||
}:
|
||||
let
|
||||
# Usually we expect a derivation, but when evaluating in multiple separate steps, we pass
|
||||
# nix store paths around. These need to be turned into (fake) derivations again to track
|
||||
# dependencies properly.
|
||||
# We use two steps for evaluation, because we compare results from two different checkouts.
|
||||
# CI additionalls spreads evaluation across multiple workers.
|
||||
combined = if lib.isDerivation combinedDir then combinedDir else lib.toDerivation combinedDir;
|
||||
|
||||
/*
|
||||
Derivation that computes which packages are affected (added, changed or removed) between two revisions of nixpkgs.
|
||||
Note: "platforms" are "x86_64-linux", "aarch64-darwin", ...
|
||||
@@ -31,10 +38,10 @@ let
|
||||
changed: ["package2", "package3"],
|
||||
removed: ["package4"],
|
||||
},
|
||||
labels: [
|
||||
"10.rebuild-darwin: 1-10",
|
||||
"10.rebuild-linux: 1-10"
|
||||
],
|
||||
labels: {
|
||||
"10.rebuild-darwin: 1-10": true,
|
||||
"10.rebuild-linux: 1-10": true
|
||||
},
|
||||
rebuildsByKernel: {
|
||||
darwin: ["package1", "package2"],
|
||||
linux: ["package1", "package2", "package3"]
|
||||
@@ -73,12 +80,13 @@ let
|
||||
;
|
||||
|
||||
# Attrs
|
||||
# - keys: "added", "changed" and "removed"
|
||||
# - keys: "added", "changed", "removed" and "rebuilds"
|
||||
# - values: lists of `packagePlatformPath`s
|
||||
diffAttrs = builtins.fromJSON (builtins.readFile "${combinedDir}/combined-diff.json");
|
||||
diffAttrs = builtins.fromJSON (builtins.readFile "${combined}/combined-diff.json");
|
||||
|
||||
rebuilds = diffAttrs.added ++ diffAttrs.changed;
|
||||
rebuildsPackagePlatformAttrs = convertToPackagePlatformAttrs rebuilds;
|
||||
changedPackagePlatformAttrs = convertToPackagePlatformAttrs diffAttrs.changed;
|
||||
rebuildsPackagePlatformAttrs = convertToPackagePlatformAttrs diffAttrs.rebuilds;
|
||||
removedPackagePlatformAttrs = convertToPackagePlatformAttrs diffAttrs.removed;
|
||||
|
||||
changed-paths =
|
||||
let
|
||||
@@ -90,38 +98,41 @@ let
|
||||
in
|
||||
writeText "changed-paths.json" (
|
||||
builtins.toJSON {
|
||||
attrdiff = lib.mapAttrs (_: extractPackageNames) diffAttrs;
|
||||
attrdiff = lib.mapAttrs (_: extractPackageNames) { inherit (diffAttrs) added changed removed; };
|
||||
inherit
|
||||
rebuildsByPlatform
|
||||
rebuildsByKernel
|
||||
rebuildCountByKernel
|
||||
;
|
||||
labels =
|
||||
(getLabels rebuildCountByKernel)
|
||||
# Adds "10.rebuild-*-stdenv" label if the "stdenv" attribute was changed
|
||||
++ lib.mapAttrsToList (kernel: _: "10.rebuild-${kernel}-stdenv") (
|
||||
lib.filterAttrs (_: kernelRebuilds: kernelRebuilds ? "stdenv") rebuildsByKernel
|
||||
)
|
||||
# Adds the "11.by: package-maintainer" label if all of the packages directly
|
||||
# changed are maintained by the PR's author. (https://github.com/NixOS/ofborg/blob/df400f44502d4a4a80fa283d33f2e55a4e43ee90/ofborg/src/tagger.rs#L83-L88)
|
||||
++ lib.optional (
|
||||
maintainers ? ${githubAuthorId}
|
||||
&& lib.all (lib.flip lib.elem maintainers.${githubAuthorId}) (
|
||||
lib.flatten (lib.attrValues maintainers)
|
||||
)
|
||||
) "11.by: package-maintainer";
|
||||
getLabels rebuildCountByKernel
|
||||
# Sets "10.rebuild-*-stdenv" label to whether the "stdenv" attribute was changed.
|
||||
// lib.mapAttrs' (
|
||||
kernel: rebuilds: lib.nameValuePair "10.rebuild-${kernel}-stdenv" (lib.elem "stdenv" rebuilds)
|
||||
) rebuildsByKernel
|
||||
# Set the "11.by: package-maintainer" label to whether all packages directly
|
||||
# changed are maintained by the PR's author.
|
||||
// {
|
||||
"11.by: package-maintainer" =
|
||||
maintainers ? ${githubAuthorId}
|
||||
&& lib.all (lib.flip lib.elem maintainers.${githubAuthorId}) (
|
||||
lib.flatten (lib.attrValues maintainers)
|
||||
);
|
||||
};
|
||||
}
|
||||
);
|
||||
|
||||
maintainers = callPackage ./maintainers.nix { } {
|
||||
changedattrs = lib.attrNames (lib.groupBy (a: a.name) rebuildsPackagePlatformAttrs);
|
||||
changedattrs = lib.attrNames (lib.groupBy (a: a.name) changedPackagePlatformAttrs);
|
||||
changedpathsjson = touchedFilesJson;
|
||||
removedattrs = lib.attrNames (lib.groupBy (a: a.name) removedPackagePlatformAttrs);
|
||||
inherit byName;
|
||||
};
|
||||
in
|
||||
runCommand "compare"
|
||||
{
|
||||
nativeBuildInputs = [
|
||||
# Don't depend on -dev outputs to reduce closure size for CI.
|
||||
nativeBuildInputs = map lib.getBin [
|
||||
jq
|
||||
(python3.withPackages (
|
||||
ps: with ps; [
|
||||
@@ -135,8 +146,8 @@ runCommand "compare"
|
||||
maintainers = builtins.toJSON maintainers;
|
||||
passAsFile = [ "maintainers" ];
|
||||
env = {
|
||||
BEFORE_DIR = "${combinedDir}/before";
|
||||
AFTER_DIR = "${combinedDir}/after";
|
||||
BEFORE_DIR = "${combined}/before";
|
||||
AFTER_DIR = "${combined}/after";
|
||||
};
|
||||
}
|
||||
''
|
||||
@@ -144,6 +155,12 @@ runCommand "compare"
|
||||
|
||||
cp ${changed-paths} $out/changed-paths.json
|
||||
|
||||
{
|
||||
echo
|
||||
echo "# Packages"
|
||||
echo
|
||||
jq -r -f ${./generate-step-summary.jq} < ${changed-paths}
|
||||
} >> $out/step-summary.md
|
||||
|
||||
if jq -e '(.attrdiff.added | length == 0) and (.attrdiff.removed | length == 0)' "${changed-paths}" > /dev/null; then
|
||||
# Chunks have changed between revisions
|
||||
@@ -173,7 +190,5 @@ runCommand "compare"
|
||||
} >> $out/step-summary.md
|
||||
fi
|
||||
|
||||
jq -r -f ${./generate-step-summary.jq} < ${changed-paths} >> $out/step-summary.md
|
||||
|
||||
cp "$maintainersPath" "$out/maintainers.json"
|
||||
''
|
||||
|
||||
@@ -1,10 +1,10 @@
|
||||
{
|
||||
lib,
|
||||
}:
|
||||
# Almost directly vendored from https://github.com/NixOS/ofborg/blob/5a4e743f192fb151915fcbe8789922fa401ecf48/ofborg/src/maintainers.nix
|
||||
{
|
||||
changedattrs,
|
||||
changedpathsjson,
|
||||
removedattrs,
|
||||
byName ? false,
|
||||
}:
|
||||
let
|
||||
@@ -21,43 +21,27 @@ let
|
||||
|
||||
anyMatchingFiles = files: builtins.any anyMatchingFile files;
|
||||
|
||||
enrichedAttrs = builtins.map (name: {
|
||||
path = lib.splitString "." name;
|
||||
name = name;
|
||||
}) changedattrs;
|
||||
|
||||
validPackageAttributes = builtins.filter (
|
||||
pkg:
|
||||
if (lib.attrsets.hasAttrByPath pkg.path pkgs) then
|
||||
(
|
||||
let
|
||||
value = lib.attrsets.attrByPath pkg.path null pkgs;
|
||||
in
|
||||
if (builtins.tryEval value).success then
|
||||
if value != null then true else builtins.trace "${pkg.name} exists but is null" false
|
||||
else
|
||||
builtins.trace "Failed to access ${pkg.name} even though it exists" false
|
||||
)
|
||||
else
|
||||
builtins.trace "Failed to locate ${pkg.name}." false
|
||||
) enrichedAttrs;
|
||||
|
||||
attrsWithPackages = builtins.map (
|
||||
pkg: pkg // { package = lib.attrsets.attrByPath pkg.path null pkgs; }
|
||||
) validPackageAttributes;
|
||||
|
||||
attrsWithMaintainers = builtins.map (
|
||||
pkg:
|
||||
let
|
||||
meta = pkg.package.meta or { };
|
||||
in
|
||||
pkg
|
||||
// {
|
||||
# TODO: Refactor this so we can ping entire teams instead of the individual members.
|
||||
# Note that this will require keeping track of GH team IDs in "maintainers/teams.nix".
|
||||
maintainers = meta.maintainers or [ ];
|
||||
}
|
||||
) attrsWithPackages;
|
||||
attrsWithMaintainers = lib.pipe (changedattrs ++ removedattrs) [
|
||||
(builtins.map (
|
||||
name:
|
||||
let
|
||||
# Some packages might be reported as changed on a different platform, but
|
||||
# not even have an attribute on the platform the maintainers are requested on.
|
||||
# Fallback to `null` for these to filter them out below.
|
||||
package = lib.attrByPath (lib.splitString "." name) null pkgs;
|
||||
in
|
||||
{
|
||||
inherit name package;
|
||||
# TODO: Refactor this so we can ping entire teams instead of the individual members.
|
||||
# Note that this will require keeping track of GH team IDs in "maintainers/teams.nix".
|
||||
maintainers = package.meta.maintainers or [ ];
|
||||
}
|
||||
))
|
||||
# No need to match up packages without maintainers with their files.
|
||||
# This also filters out attributes where `packge = null`, which is the
|
||||
# case for libintl, for example.
|
||||
(builtins.filter (pkg: pkg.maintainers != [ ]))
|
||||
];
|
||||
|
||||
relevantFilenames =
|
||||
drv:
|
||||
|
||||
@@ -151,7 +151,7 @@ rec {
|
||||
lib.genAttrs [ "linux" "darwin" ] filterKernel;
|
||||
|
||||
/*
|
||||
Maps an attrs of `kernel - rebuild counts` mappings to a list of labels
|
||||
Maps an attrs of `kernel - rebuild counts` mappings to an attrs of labels
|
||||
|
||||
Turns
|
||||
{
|
||||
@@ -159,54 +159,37 @@ rec {
|
||||
darwin = 1;
|
||||
}
|
||||
into
|
||||
[
|
||||
"10.rebuild-darwin: 1"
|
||||
"10.rebuild-darwin: 1-10"
|
||||
"10.rebuild-linux: 11-100"
|
||||
]
|
||||
{
|
||||
"10.rebuild-darwin: 1" = true;
|
||||
"10.rebuild-darwin: 1-10" = true;
|
||||
"10.rebuild-darwin: 11-100" = false;
|
||||
# [...]
|
||||
"10.rebuild-darwin: 1" = false;
|
||||
"10.rebuild-darwin: 1-10" = false;
|
||||
"10.rebuild-linux: 11-100" = true;
|
||||
# [...]
|
||||
}
|
||||
*/
|
||||
getLabels =
|
||||
rebuildCountByKernel:
|
||||
lib.concatLists (
|
||||
lib.mergeAttrsList (
|
||||
lib.mapAttrsToList (
|
||||
kernel: rebuildCount:
|
||||
let
|
||||
numbers =
|
||||
if rebuildCount == 0 then
|
||||
[ "0" ]
|
||||
else if rebuildCount == 1 then
|
||||
[
|
||||
"1"
|
||||
"1-10"
|
||||
]
|
||||
else if rebuildCount <= 10 then
|
||||
[ "1-10" ]
|
||||
else if rebuildCount <= 100 then
|
||||
[ "11-100" ]
|
||||
else if rebuildCount <= 500 then
|
||||
[ "101-500" ]
|
||||
else if rebuildCount <= 1000 then
|
||||
[
|
||||
"501-1000"
|
||||
"501+"
|
||||
]
|
||||
else if rebuildCount <= 2500 then
|
||||
[
|
||||
"1001-2500"
|
||||
"501+"
|
||||
]
|
||||
else if rebuildCount <= 5000 then
|
||||
[
|
||||
"2501-5000"
|
||||
"501+"
|
||||
]
|
||||
else
|
||||
[
|
||||
"5001+"
|
||||
"501+"
|
||||
];
|
||||
range = from: to: from <= rebuildCount && (to == null || rebuildCount <= to);
|
||||
in
|
||||
lib.forEach numbers (number: "10.rebuild-${kernel}: ${number}")
|
||||
lib.mapAttrs' (number: lib.nameValuePair "10.rebuild-${kernel}: ${number}") {
|
||||
"0" = range 0 0;
|
||||
"1" = range 1 1;
|
||||
"1-10" = range 1 10;
|
||||
"11-100" = range 11 100;
|
||||
"101-500" = range 101 500;
|
||||
"501-1000" = range 501 1000;
|
||||
"501+" = range 501 null;
|
||||
"1001-2500" = range 1001 2500;
|
||||
"2501-5000" = range 2501 5000;
|
||||
"5001+" = range 5001 null;
|
||||
}
|
||||
) rebuildCountByKernel
|
||||
);
|
||||
}
|
||||
|
||||
@@ -1,15 +1,22 @@
|
||||
# Evaluates all the accessible paths in nixpkgs.
|
||||
# *This only builds on Linux* since it requires the Linux sandbox isolation to
|
||||
# be able to write in various places while evaluating inside the sandbox.
|
||||
#
|
||||
# This file is used by nixpkgs CI (see .github/workflows/eval.yml) as well as
|
||||
# being used directly as an entry point in Lix's CI (in `flake.nix` in the Lix
|
||||
# repo).
|
||||
#
|
||||
# If you know you are doing a breaking API change, please ping the nixpkgs CI
|
||||
# maintainers and the Lix maintainers (`nix eval -f . lib.teams.lix`).
|
||||
{
|
||||
callPackage,
|
||||
lib,
|
||||
runCommand,
|
||||
writeShellScript,
|
||||
writeText,
|
||||
symlinkJoin,
|
||||
time,
|
||||
procps,
|
||||
nixVersions,
|
||||
busybox,
|
||||
jq,
|
||||
python3,
|
||||
nix,
|
||||
}:
|
||||
|
||||
let
|
||||
@@ -19,20 +26,22 @@ let
|
||||
root = ../..;
|
||||
fileset = unions (
|
||||
map (lib.path.append ../..) [
|
||||
".version"
|
||||
"ci/supportedSystems.json"
|
||||
"ci/eval/attrpaths.nix"
|
||||
"ci/eval/chunk.nix"
|
||||
"ci/eval/outpaths.nix"
|
||||
"default.nix"
|
||||
"doc"
|
||||
"lib"
|
||||
"maintainers"
|
||||
"modules"
|
||||
"nixos"
|
||||
"pkgs"
|
||||
".version"
|
||||
"ci/supportedSystems.json"
|
||||
]
|
||||
);
|
||||
};
|
||||
|
||||
nix = nixVersions.latest;
|
||||
|
||||
supportedSystems = builtins.fromJSON (builtins.readFile ../supportedSystems.json);
|
||||
|
||||
attrpathsSuperset =
|
||||
@@ -42,9 +51,10 @@ let
|
||||
runCommand "attrpaths-superset.json"
|
||||
{
|
||||
src = nixpkgs;
|
||||
nativeBuildInputs = [
|
||||
# Don't depend on -dev outputs to reduce closure size for CI.
|
||||
nativeBuildInputs = map lib.getBin [
|
||||
busybox
|
||||
nix
|
||||
time
|
||||
];
|
||||
}
|
||||
''
|
||||
@@ -53,13 +63,12 @@ let
|
||||
export GC_INITIAL_HEAP_SIZE=4g
|
||||
command time -f "Attribute eval done [%MKB max resident, %Es elapsed] %C" \
|
||||
nix-instantiate --eval --strict --json --show-trace \
|
||||
"$src/pkgs/top-level/release-attrpaths-superset.nix" \
|
||||
"$src/ci/eval/attrpaths.nix" \
|
||||
-A paths \
|
||||
-I "$src" \
|
||||
--option restrict-eval true \
|
||||
--option allow-import-from-derivation false \
|
||||
--option eval-system "${evalSystem}" \
|
||||
--arg enableWarnings false > $out/paths.json
|
||||
--option eval-system "${evalSystem}" > $out/paths.json
|
||||
'';
|
||||
|
||||
singleSystem =
|
||||
@@ -67,13 +76,14 @@ let
|
||||
# The system to evaluate.
|
||||
# Note that this is intentionally not called `system`,
|
||||
# because `--argstr system` would only be passed to the ci/default.nix file!
|
||||
evalSystem,
|
||||
evalSystem ? builtins.currentSystem,
|
||||
# The path to the `paths.json` file from `attrpathsSuperset`
|
||||
attrpathFile ? "${attrpathsSuperset { inherit evalSystem; }}/paths.json",
|
||||
# The number of attributes per chunk, see ./README.md for more info.
|
||||
chunkSize,
|
||||
checkMeta ? true,
|
||||
includeBroken ? true,
|
||||
chunkSize ? 5000,
|
||||
|
||||
# Don't try to eval packages marked as broken.
|
||||
includeBroken ? false,
|
||||
# Whether to just evaluate a single chunk for quick testing
|
||||
quickTest ? false,
|
||||
}:
|
||||
@@ -91,7 +101,7 @@ let
|
||||
set +e
|
||||
command time -o "$outputDir/timestats/$myChunk" \
|
||||
-f "Chunk $myChunk on $system done [%MKB max resident, %Es elapsed] %C" \
|
||||
nix-env -f "${nixpkgs}/pkgs/top-level/release-attrpaths-parallel.nix" \
|
||||
nix-env -f "${nixpkgs}/ci/eval/chunk.nix" \
|
||||
--eval-system "$system" \
|
||||
--option restrict-eval true \
|
||||
--option allow-import-from-derivation false \
|
||||
@@ -102,7 +112,6 @@ let
|
||||
--arg myChunk "$myChunk" \
|
||||
--arg attrpathFile "${attrpathFile}" \
|
||||
--arg systems "[ \"$system\" ]" \
|
||||
--arg checkMeta ${lib.boolToString checkMeta} \
|
||||
--arg includeBroken ${lib.boolToString includeBroken} \
|
||||
-I ${nixpkgs} \
|
||||
-I ${attrpathFile} \
|
||||
@@ -124,15 +133,17 @@ let
|
||||
in
|
||||
runCommand "nixpkgs-eval-${evalSystem}"
|
||||
{
|
||||
nativeBuildInputs = [
|
||||
nix
|
||||
time
|
||||
procps
|
||||
# Don't depend on -dev outputs to reduce closure size for CI.
|
||||
nativeBuildInputs = map lib.getBin [
|
||||
busybox
|
||||
jq
|
||||
nix
|
||||
];
|
||||
env = {
|
||||
inherit evalSystem chunkSize;
|
||||
};
|
||||
__structuredAttrs = true;
|
||||
unsafeDiscardReferences.out = true;
|
||||
}
|
||||
''
|
||||
export NIX_STATE_DIR=$(mktemp -d)
|
||||
@@ -153,14 +164,14 @@ let
|
||||
# Record and print stats on free memory and swap in the background
|
||||
(
|
||||
while true; do
|
||||
availMemory=$(free -b | grep Mem | awk '{print $7}')
|
||||
freeSwap=$(free -b | grep Swap | awk '{print $4}')
|
||||
echo "Available memory: $(( availMemory / 1024 / 1024 )) MiB, free swap: $(( freeSwap / 1024 / 1024 )) MiB"
|
||||
availMemory=$(free -m | grep Mem | awk '{print $7}')
|
||||
freeSwap=$(free -m | grep Swap | awk '{print $4}')
|
||||
echo "Available memory: $(( availMemory )) MiB, free swap: $(( freeSwap )) MiB"
|
||||
|
||||
if [[ ! -f "$out/${evalSystem}/min-avail-memory" ]] || (( availMemory < $(<$out/${evalSystem}/min-avail-memory) )); then
|
||||
echo "$availMemory" > $out/${evalSystem}/min-avail-memory
|
||||
fi
|
||||
if [[ ! -f $out/${evalSystem}/min-free-swap ]] || (( availMemory < $(<$out/${evalSystem}/min-free-swap) )); then
|
||||
if [[ ! -f $out/${evalSystem}/min-free-swap ]] || (( freeSwap < $(<$out/${evalSystem}/min-free-swap) )); then
|
||||
echo "$freeSwap" > $out/${evalSystem}/min-free-swap
|
||||
fi
|
||||
sleep 4
|
||||
@@ -199,7 +210,8 @@ let
|
||||
}:
|
||||
runCommand "combined-eval"
|
||||
{
|
||||
nativeBuildInputs = [
|
||||
# Don't depend on -dev outputs to reduce closure size for CI.
|
||||
nativeBuildInputs = map lib.getBin [
|
||||
jq
|
||||
];
|
||||
}
|
||||
@@ -211,7 +223,8 @@ let
|
||||
reduce .[] as $item ({}; {
|
||||
added: (.added + $item.added),
|
||||
changed: (.changed + $item.changed),
|
||||
removed: (.removed + $item.removed)
|
||||
removed: (.removed + $item.removed),
|
||||
rebuilds: (.rebuilds + $item.rebuilds)
|
||||
})
|
||||
' > $out/combined-diff.json
|
||||
|
||||
@@ -228,36 +241,62 @@ let
|
||||
|
||||
compare = callPackage ./compare { };
|
||||
|
||||
baseline =
|
||||
{
|
||||
# Whether to evaluate on a specific set of systems, by default all are evaluated
|
||||
evalSystems ? if quickTest then [ "x86_64-linux" ] else supportedSystems,
|
||||
# The number of attributes per chunk, see ./README.md for more info.
|
||||
chunkSize ? 5000,
|
||||
quickTest ? false,
|
||||
}:
|
||||
symlinkJoin {
|
||||
name = "nixpkgs-eval-baseline";
|
||||
paths = map (
|
||||
evalSystem:
|
||||
singleSystem {
|
||||
inherit quickTest evalSystem chunkSize;
|
||||
}
|
||||
) evalSystems;
|
||||
};
|
||||
|
||||
full =
|
||||
{
|
||||
# Whether to evaluate on a specific set of systems, by default all are evaluated
|
||||
evalSystems ? if quickTest then [ "x86_64-linux" ] else supportedSystems,
|
||||
# The number of attributes per chunk, see ./README.md for more info.
|
||||
chunkSize,
|
||||
chunkSize ? 5000,
|
||||
quickTest ? false,
|
||||
baseline,
|
||||
# Which maintainer should be considered the author?
|
||||
# Defaults to nixpkgs-ci which is not a maintainer and skips the check.
|
||||
githubAuthorId ? "nixpkgs-ci",
|
||||
# What files have been touched? Defaults to none; use the expression below to calculate it.
|
||||
# ```
|
||||
# git diff --name-only --merge-base master HEAD \
|
||||
# | jq --raw-input --slurp 'split("\n")[:-1]' > touched-files.json
|
||||
# ```
|
||||
touchedFilesJson ? builtins.toFile "touched-files.json" "[ ]",
|
||||
}:
|
||||
let
|
||||
diffs = symlinkJoin {
|
||||
name = "diffs";
|
||||
name = "nixpkgs-eval-diffs";
|
||||
paths = map (
|
||||
evalSystem:
|
||||
let
|
||||
eval = singleSystem {
|
||||
inherit quickTest evalSystem chunkSize;
|
||||
};
|
||||
in
|
||||
diff {
|
||||
inherit evalSystem;
|
||||
# Local "full" evaluation doesn't do a real diff.
|
||||
beforeDir = eval;
|
||||
afterDir = eval;
|
||||
beforeDir = baseline;
|
||||
afterDir = singleSystem {
|
||||
inherit quickTest evalSystem chunkSize;
|
||||
};
|
||||
}
|
||||
) evalSystems;
|
||||
};
|
||||
comparisonReport = compare {
|
||||
combinedDir = combine { diffDir = diffs; };
|
||||
inherit touchedFilesJson githubAuthorId;
|
||||
};
|
||||
in
|
||||
combine {
|
||||
diffDir = diffs;
|
||||
};
|
||||
comparisonReport;
|
||||
|
||||
in
|
||||
{
|
||||
@@ -268,7 +307,8 @@ in
|
||||
combine
|
||||
compare
|
||||
# The above three are used by separate VMs in a GitHub workflow,
|
||||
# while the below is intended for testing on a single local machine
|
||||
# while the below are intended for testing on a single local machine
|
||||
baseline
|
||||
full
|
||||
;
|
||||
}
|
||||
|
||||
@@ -11,6 +11,14 @@
|
||||
}:
|
||||
|
||||
let
|
||||
# Usually we expect a derivation, but when evaluating in multiple separate steps, we pass
|
||||
# nix store paths around. These need to be turned into (fake) derivations again to track
|
||||
# dependencies properly.
|
||||
# We use two steps for evaluation, because we compare results from two different checkouts.
|
||||
# CI additionalls spreads evaluation across multiple workers.
|
||||
before = if lib.isDerivation beforeDir then beforeDir else lib.toDerivation beforeDir;
|
||||
after = if lib.isDerivation afterDir then afterDir else lib.toDerivation afterDir;
|
||||
|
||||
/*
|
||||
Computes the key difference between two attrs
|
||||
|
||||
@@ -18,13 +26,20 @@ let
|
||||
added: [ <keys only in the second object> ],
|
||||
removed: [ <keys only in the first object> ],
|
||||
changed: [ <keys with different values between the two objects> ],
|
||||
rebuilds: [ <keys in the second object with values not present at all in first object> ],
|
||||
}
|
||||
*/
|
||||
diff =
|
||||
old: new:
|
||||
let
|
||||
filterKeys = cond: attrs: lib.attrNames (lib.filterAttrs cond attrs);
|
||||
oldOutputs = lib.pipe old [
|
||||
(lib.mapAttrsToList (_: lib.attrValues))
|
||||
lib.concatLists
|
||||
(lib.flip lib.genAttrs (_: true))
|
||||
];
|
||||
in
|
||||
old: new: {
|
||||
{
|
||||
added = filterKeys (n: _: !(old ? ${n})) new;
|
||||
removed = filterKeys (n: _: !(new ? ${n})) old;
|
||||
changed = filterKeys (
|
||||
@@ -35,6 +50,16 @@ let
|
||||
# Filter out attributes that are the same as the new value
|
||||
&& (v != (new.${n}))
|
||||
) old;
|
||||
# A "rebuild" is every attrpath ...
|
||||
rebuilds = filterKeys (
|
||||
_: pkg:
|
||||
# ... that has at least one output ...
|
||||
lib.any (
|
||||
output:
|
||||
# ... which has not been built in "old" already.
|
||||
!(oldOutputs ? ${output})
|
||||
) (lib.attrValues pkg)
|
||||
) new;
|
||||
};
|
||||
|
||||
getAttrs =
|
||||
@@ -47,15 +72,15 @@ let
|
||||
in
|
||||
builtins.fromJSON data;
|
||||
|
||||
beforeAttrs = getAttrs beforeDir;
|
||||
afterAttrs = getAttrs afterDir;
|
||||
beforeAttrs = getAttrs before;
|
||||
afterAttrs = getAttrs after;
|
||||
diffAttrs = diff beforeAttrs afterAttrs;
|
||||
diffJson = writeText "diff.json" (builtins.toJSON diffAttrs);
|
||||
in
|
||||
runCommand "diff" { } ''
|
||||
mkdir -p $out/${evalSystem}
|
||||
|
||||
cp -r ${beforeDir} $out/before
|
||||
cp -r ${afterDir} $out/after
|
||||
cp -r ${before} $out/before
|
||||
cp -r ${after} $out/after
|
||||
cp ${diffJson} $out/${evalSystem}/diff.json
|
||||
''
|
||||
|
||||
@@ -1,19 +1,16 @@
|
||||
#!/usr/bin/env nix-shell
|
||||
# When using as a callable script, passing `--argstr path some/path` overrides $PWD.
|
||||
#!nix-shell -p nix -i "nix-env -qaP --no-name --out-path --arg checkMeta true -f pkgs/top-level/release-outpaths.nix"
|
||||
#!nix-shell -p nix -i "nix-env -qaP --no-name --out-path -f ci/eval/outpaths.nix"
|
||||
|
||||
# Vendored from:
|
||||
# https://raw.githubusercontent.com/NixOS/ofborg/74f38efa7ef6f0e8e71ec3bfc675ae4fb57d7491/ofborg/src/outpaths.nix
|
||||
{
|
||||
checkMeta,
|
||||
includeBroken ? true, # set this to false to exclude meta.broken packages from the output
|
||||
path ? ./../..,
|
||||
|
||||
# used by pkgs/top-level/release-attrnames-superset.nix
|
||||
# used by ./attrpaths.nix
|
||||
attrNamesOnly ? false,
|
||||
|
||||
# Set this to `null` to build for builtins.currentSystem only
|
||||
systems ? builtins.fromJSON (builtins.readFile ../../ci/supportedSystems.json),
|
||||
systems ? builtins.fromJSON (builtins.readFile ../supportedSystems.json),
|
||||
}:
|
||||
let
|
||||
lib = import (path + "/lib");
|
||||
@@ -27,9 +24,10 @@ let
|
||||
config = {
|
||||
allowAliases = false;
|
||||
allowBroken = includeBroken;
|
||||
allowUnfree = false;
|
||||
allowUnfree = true;
|
||||
allowInsecurePredicate = x: true;
|
||||
checkMeta = checkMeta;
|
||||
allowVariants = !attrNamesOnly;
|
||||
checkMeta = true;
|
||||
|
||||
handleEvalIssue =
|
||||
reason: errormsg:
|
||||
3
ci/github-script/.editorconfig
Normal file
3
ci/github-script/.editorconfig
Normal file
@@ -0,0 +1,3 @@
|
||||
[run]
|
||||
indent_style = space
|
||||
indent_size = 2
|
||||
2
ci/github-script/.gitignore
vendored
Normal file
2
ci/github-script/.gitignore
vendored
Normal file
@@ -0,0 +1,2 @@
|
||||
node_modules
|
||||
step-summary.md
|
||||
2
ci/github-script/.npmrc
Normal file
2
ci/github-script/.npmrc
Normal file
@@ -0,0 +1,2 @@
|
||||
package-lock-only = true
|
||||
save-exact = true
|
||||
17
ci/github-script/README.md
Normal file
17
ci/github-script/README.md
Normal file
@@ -0,0 +1,17 @@
|
||||
# GitHub specific CI scripts
|
||||
|
||||
This folder contains [`actions/github-script`](https://github.com/actions/github-script)-based JavaScript code.
|
||||
It provides a `nix-shell` environment to run and test these actions locally.
|
||||
|
||||
To run any of the scripts locally:
|
||||
|
||||
- Enter `nix-shell` in `./ci/github-script`.
|
||||
- Ensure `gh` is authenticated.
|
||||
|
||||
## Check commits
|
||||
|
||||
Run `./run commits OWNER REPO PR`, where OWNER is your username or "NixOS", REPO is the name of your fork or "nixpkgs" and PR is the number of the pull request to check.
|
||||
|
||||
## Labeler
|
||||
|
||||
Run `./run labels OWNER REPO`, where OWNER is your username or "NixOS" and REPO the name of your fork or "nixpkgs".
|
||||
321
ci/github-script/commits.js
Normal file
321
ci/github-script/commits.js
Normal file
@@ -0,0 +1,321 @@
|
||||
module.exports = async ({ github, context, core, dry, cherryPicks }) => {
|
||||
const { execFileSync } = require('node:child_process')
|
||||
const { classify } = require('../supportedBranches.js')
|
||||
const withRateLimit = require('./withRateLimit.js')
|
||||
const { dismissReviews, postReview } = require('./reviews.js')
|
||||
|
||||
await withRateLimit({ github, core }, async (stats) => {
|
||||
stats.prs = 1
|
||||
|
||||
const pull_number = context.payload.pull_request.number
|
||||
|
||||
const job_url =
|
||||
context.runId &&
|
||||
(
|
||||
await github.paginate(github.rest.actions.listJobsForWorkflowRun, {
|
||||
...context.repo,
|
||||
run_id: context.runId,
|
||||
per_page: 100,
|
||||
})
|
||||
).find(({ name }) => name.endsWith('Check / commits')).html_url +
|
||||
'?pr=' +
|
||||
pull_number
|
||||
|
||||
async function extract({ sha, commit }) {
|
||||
const noCherryPick = Array.from(
|
||||
commit.message.matchAll(/^Not-cherry-picked-because: (.*)$/gm),
|
||||
).at(0)
|
||||
|
||||
if (noCherryPick)
|
||||
return {
|
||||
sha,
|
||||
commit,
|
||||
severity: 'important',
|
||||
message: `${sha} is not a cherry-pick, because: ${noCherryPick[1]}. Please review this commit manually.`,
|
||||
type: 'no-cherry-pick',
|
||||
}
|
||||
|
||||
// Using the last line with "cherry" + hash, because a chained backport
|
||||
// can result in multiple of those lines. Only the last one counts.
|
||||
const cherry = Array.from(
|
||||
commit.message.matchAll(/cherry.*([0-9a-f]{40})/g),
|
||||
).at(-1)
|
||||
|
||||
if (!cherry)
|
||||
return {
|
||||
sha,
|
||||
commit,
|
||||
severity: 'warning',
|
||||
message: `Couldn't locate original commit hash in message of ${sha}.`,
|
||||
type: 'no-commit-hash',
|
||||
}
|
||||
|
||||
const original_sha = cherry[1]
|
||||
|
||||
let branches
|
||||
try {
|
||||
branches = (
|
||||
await github.request({
|
||||
// This is an undocumented endpoint to fetch the branches a commit is part of.
|
||||
// There is no equivalent in neither the REST nor the GraphQL API.
|
||||
// The endpoint itself is unlikely to go away, because GitHub uses it to display
|
||||
// the list of branches on the detail page of a commit.
|
||||
url: `https://github.com/${context.repo.owner}/${context.repo.repo}/branch_commits/${original_sha}`,
|
||||
headers: {
|
||||
accept: 'application/json',
|
||||
},
|
||||
})
|
||||
).data.branches
|
||||
.map(({ branch }) => branch)
|
||||
.filter((branch) => classify(branch).type.includes('development'))
|
||||
} catch (e) {
|
||||
// For some unknown reason a 404 error comes back as 500 without any more details in a GitHub Actions runner.
|
||||
// Ignore these to return a regular error message below.
|
||||
if (![404, 500].includes(e.status)) throw e
|
||||
}
|
||||
if (!branches?.length)
|
||||
return {
|
||||
sha,
|
||||
commit,
|
||||
severity: 'error',
|
||||
message: `${original_sha} given in ${sha} not found in any pickable branch.`,
|
||||
}
|
||||
|
||||
return {
|
||||
sha,
|
||||
commit,
|
||||
original_sha,
|
||||
}
|
||||
}
|
||||
|
||||
function diff({ sha, commit, original_sha }) {
|
||||
const diff = execFileSync('git', [
|
||||
'-C',
|
||||
__dirname,
|
||||
'range-diff',
|
||||
'--no-color',
|
||||
'--ignore-all-space',
|
||||
'--no-notes',
|
||||
// 100 means "any change will be reported"; 0 means "no change will be reported"
|
||||
'--creation-factor=100',
|
||||
`${original_sha}~..${original_sha}`,
|
||||
`${sha}~..${sha}`,
|
||||
])
|
||||
.toString()
|
||||
.split('\n')
|
||||
// First line contains commit SHAs, which we'll print separately.
|
||||
.slice(1)
|
||||
// # The output of `git range-diff` is indented with 4 spaces, but we'll control indentation manually.
|
||||
.map((line) => line.replace(/^ {4}/, ''))
|
||||
|
||||
if (!diff.some((line) => line.match(/^[+-]{2}/)))
|
||||
return {
|
||||
sha,
|
||||
commit,
|
||||
severity: 'info',
|
||||
message: `✔ ${original_sha} is highly similar to ${sha}.`,
|
||||
}
|
||||
|
||||
const colored_diff = execFileSync('git', [
|
||||
'-C',
|
||||
__dirname,
|
||||
'range-diff',
|
||||
'--color',
|
||||
'--no-notes',
|
||||
'--creation-factor=100',
|
||||
`${original_sha}~..${original_sha}`,
|
||||
`${sha}~..${sha}`,
|
||||
]).toString()
|
||||
|
||||
return {
|
||||
sha,
|
||||
commit,
|
||||
diff,
|
||||
colored_diff,
|
||||
severity: 'warning',
|
||||
message: `Difference between ${sha} and original ${original_sha} may warrant inspection.`,
|
||||
type: 'diff',
|
||||
}
|
||||
}
|
||||
|
||||
// For now we short-circuit the list of commits when cherryPicks should not be checked.
|
||||
// This will not run any checks, but still trigger the "dismiss reviews" part below.
|
||||
const commits = !cherryPicks
|
||||
? []
|
||||
: await github.paginate(github.rest.pulls.listCommits, {
|
||||
...context.repo,
|
||||
pull_number,
|
||||
})
|
||||
|
||||
const extracted = await Promise.all(commits.map(extract))
|
||||
|
||||
const fetch = extracted
|
||||
.filter(({ severity }) => !severity)
|
||||
.flatMap(({ sha, original_sha }) => [sha, original_sha])
|
||||
|
||||
if (fetch.length > 0) {
|
||||
// Fetching all commits we need for diff at once is much faster than any other method.
|
||||
execFileSync('git', [
|
||||
'-C',
|
||||
__dirname,
|
||||
'fetch',
|
||||
'--depth=2',
|
||||
'origin',
|
||||
...fetch,
|
||||
])
|
||||
}
|
||||
|
||||
const results = extracted.map((result) =>
|
||||
result.severity ? result : diff(result),
|
||||
)
|
||||
|
||||
// Log all results without truncation, with better highlighting and all whitespace changes to the job log.
|
||||
results.forEach(({ sha, commit, severity, message, colored_diff }) => {
|
||||
core.startGroup(`Commit ${sha}`)
|
||||
core.info(`Author: ${commit.author.name} ${commit.author.email}`)
|
||||
core.info(`Date: ${new Date(commit.author.date)}`)
|
||||
switch (severity) {
|
||||
case 'error':
|
||||
core.error(message)
|
||||
break
|
||||
case 'warning':
|
||||
core.warning(message)
|
||||
break
|
||||
default:
|
||||
core.info(message)
|
||||
}
|
||||
core.endGroup()
|
||||
if (colored_diff) core.info(colored_diff)
|
||||
})
|
||||
|
||||
// Only create step summary below in case of warnings or errors.
|
||||
// Also clean up older reviews, when all checks are good now.
|
||||
// An empty results array will always trigger this condition, which is helpful
|
||||
// to clean up reviews created by the prepare step when on the wrong branch.
|
||||
if (results.every(({ severity }) => severity === 'info')) {
|
||||
await dismissReviews({ github, context, dry })
|
||||
return
|
||||
}
|
||||
|
||||
// In the case of "error" severity, we also fail the job.
|
||||
// Those should be considered blocking and not be dismissable via review.
|
||||
if (results.some(({ severity }) => severity === 'error'))
|
||||
process.exitCode = 1
|
||||
|
||||
core.summary.addRaw(
|
||||
'This report is automatically generated by the `PR / Check / cherry-pick` CI workflow.',
|
||||
true,
|
||||
)
|
||||
core.summary.addEOL()
|
||||
core.summary.addRaw(
|
||||
"Some of the commits in this PR require the author's and reviewer's attention.",
|
||||
true,
|
||||
)
|
||||
core.summary.addEOL()
|
||||
|
||||
if (results.some(({ type }) => type === 'no-commit-hash')) {
|
||||
core.summary.addRaw(
|
||||
'Please follow the [backporting guidelines](https://github.com/NixOS/nixpkgs/blob/master/CONTRIBUTING.md#how-to-backport-pull-requests) and cherry-pick with the `-x` flag.',
|
||||
true,
|
||||
)
|
||||
core.summary.addRaw(
|
||||
'This requires changes to the unstable `master` and `staging` branches first, before backporting them.',
|
||||
true,
|
||||
)
|
||||
core.summary.addEOL()
|
||||
core.summary.addRaw(
|
||||
'Occasionally, commits are not cherry-picked at all, for example when updating minor versions of packages which have already advanced to the next major on unstable.',
|
||||
true,
|
||||
)
|
||||
core.summary.addRaw(
|
||||
'These commits can optionally be marked with a `Not-cherry-picked-because: <reason>` footer.',
|
||||
true,
|
||||
)
|
||||
core.summary.addEOL()
|
||||
}
|
||||
|
||||
if (results.some(({ type }) => type === 'diff')) {
|
||||
core.summary.addRaw(
|
||||
'Sometimes it is not possible to cherry-pick exactly the same patch.',
|
||||
true,
|
||||
)
|
||||
core.summary.addRaw(
|
||||
'This most frequently happens when resolving merge conflicts.',
|
||||
true,
|
||||
)
|
||||
core.summary.addRaw(
|
||||
'The range-diff will help to review the resolution of conflicts.',
|
||||
true,
|
||||
)
|
||||
core.summary.addEOL()
|
||||
}
|
||||
|
||||
core.summary.addRaw(
|
||||
'If you need to merge this PR despite the warnings, please [dismiss](https://docs.github.com/en/pull-requests/collaborating-with-pull-requests/reviewing-changes-in-pull-requests/dismissing-a-pull-request-review) this review shortly before merging.',
|
||||
true,
|
||||
)
|
||||
|
||||
results.forEach(({ severity, message, diff }) => {
|
||||
if (severity === 'info') return
|
||||
|
||||
// The docs for markdown alerts only show examples with markdown blockquote syntax, like this:
|
||||
// > [!WARNING]
|
||||
// > message
|
||||
// However, our testing shows that this also works with a `<blockquote>` html tag, as long as there
|
||||
// is an empty line:
|
||||
// <blockquote>
|
||||
//
|
||||
// [!WARNING]
|
||||
// message
|
||||
// </blockquote>
|
||||
// Whether this is intended or just an implementation detail is unclear.
|
||||
core.summary.addRaw('<blockquote>')
|
||||
core.summary.addRaw(
|
||||
`\n\n[!${{ important: 'IMPORTANT', warning: 'WARNING', error: 'CAUTION' }[severity]}]`,
|
||||
true,
|
||||
)
|
||||
core.summary.addRaw(`${message}`, true)
|
||||
|
||||
if (diff) {
|
||||
// Limit the output to 10k bytes and remove the last, potentially incomplete line, because GitHub
|
||||
// comments are limited in length. The value of 10k is arbitrary with the assumption, that after
|
||||
// the range-diff becomes a certain size, a reviewer is better off reviewing the regular diff in
|
||||
// GitHub's UI anyway, thus treating the commit as "new" and not cherry-picked.
|
||||
// Note: if multiple commits are close to the limit, this approach could still lead to a comment
|
||||
// that's too long. We think this is unlikely to happen, and so don't deal with it explicitly.
|
||||
const truncated = []
|
||||
let total_length = 0
|
||||
for (line of diff) {
|
||||
total_length += line.length
|
||||
if (total_length > 10000) {
|
||||
truncated.push('', '[...truncated...]')
|
||||
break
|
||||
} else {
|
||||
truncated.push(line)
|
||||
}
|
||||
}
|
||||
|
||||
core.summary.addRaw('<details><summary>Show diff</summary>')
|
||||
core.summary.addRaw('\n\n``````````diff', true)
|
||||
core.summary.addRaw(truncated.join('\n'), true)
|
||||
core.summary.addRaw('``````````', true)
|
||||
core.summary.addRaw('</details>')
|
||||
}
|
||||
|
||||
core.summary.addRaw('</blockquote>')
|
||||
})
|
||||
|
||||
if (job_url)
|
||||
core.summary.addRaw(
|
||||
`\n\n_Hint: The full diffs are also available in the [runner logs](${job_url}) with slightly better highlighting._`,
|
||||
)
|
||||
|
||||
const body = core.summary.stringify()
|
||||
core.summary.write()
|
||||
|
||||
// Posting a review could fail for very long comments. This can only happen with
|
||||
// multiple commits all hitting the truncation limit for the diff. If you ever hit
|
||||
// this case, consider just splitting up those commits into multiple PRs.
|
||||
await postReview({ github, context, core, dry, body })
|
||||
})
|
||||
}
|
||||
469
ci/github-script/labels.js
Normal file
469
ci/github-script/labels.js
Normal file
@@ -0,0 +1,469 @@
|
||||
module.exports = async ({ github, context, core, dry }) => {
|
||||
const path = require('node:path')
|
||||
const { DefaultArtifactClient } = require('@actions/artifact')
|
||||
const { readFile, writeFile } = require('node:fs/promises')
|
||||
const withRateLimit = require('./withRateLimit.js')
|
||||
|
||||
const artifactClient = new DefaultArtifactClient()
|
||||
|
||||
async function handlePullRequest({ item, stats }) {
|
||||
const log = (k, v) => core.info(`PR #${item.number} - ${k}: ${v}`)
|
||||
|
||||
const pull_number = item.number
|
||||
|
||||
// This API request is important for the merge-conflict label, because it triggers the
|
||||
// creation of a new test merge commit. This is needed to actually determine the state of a PR.
|
||||
const pull_request = (
|
||||
await github.rest.pulls.get({
|
||||
...context.repo,
|
||||
pull_number,
|
||||
})
|
||||
).data
|
||||
|
||||
const reviews = await github.paginate(github.rest.pulls.listReviews, {
|
||||
...context.repo,
|
||||
pull_number,
|
||||
})
|
||||
|
||||
const approvals = new Set(
|
||||
reviews
|
||||
.filter((review) => review.state === 'APPROVED')
|
||||
.map((review) => review.user?.id),
|
||||
)
|
||||
|
||||
// After creation of a Pull Request, `merge_commit_sha` will be null initially:
|
||||
// The very first merge commit will only be calculated after a little while.
|
||||
// To avoid labeling the PR as conflicted before that, we wait a few minutes.
|
||||
// This is intentionally less than the time that Eval takes, so that the label job
|
||||
// running after Eval can indeed label the PR as conflicted if that is the case.
|
||||
const merge_commit_sha_valid =
|
||||
Date.now() - new Date(pull_request.created_at) > 3 * 60 * 1000
|
||||
|
||||
const prLabels = {
|
||||
// We intentionally don't use the mergeable or mergeable_state attributes.
|
||||
// Those have an intermediate state while the test merge commit is created.
|
||||
// This doesn't work well for us, because we might have just triggered another
|
||||
// test merge commit creation by request the pull request via API at the start
|
||||
// of this function.
|
||||
// The attribute merge_commit_sha keeps the old value of null or the hash *until*
|
||||
// the new test merge commit has either successfully been created or failed so.
|
||||
// This essentially means we are updating the merge conflict label in two steps:
|
||||
// On the first pass of the day, we just fetch the pull request, which triggers
|
||||
// the creation. At this stage, the label is likely not updated, yet.
|
||||
// The second pass will then read the result from the first pass and set the label.
|
||||
'2.status: merge conflict':
|
||||
merge_commit_sha_valid && !pull_request.merge_commit_sha,
|
||||
'12.approvals: 1': approvals.size === 1,
|
||||
'12.approvals: 2': approvals.size === 2,
|
||||
'12.approvals: 3+': approvals.size >= 3,
|
||||
'12.first-time contribution': [
|
||||
'NONE',
|
||||
'FIRST_TIMER',
|
||||
'FIRST_TIME_CONTRIBUTOR',
|
||||
].includes(pull_request.author_association),
|
||||
}
|
||||
|
||||
const { id: run_id, conclusion } =
|
||||
(
|
||||
await github.rest.actions.listWorkflowRuns({
|
||||
...context.repo,
|
||||
workflow_id: 'pr.yml',
|
||||
event: 'pull_request_target',
|
||||
exclude_pull_requests: true,
|
||||
head_sha: pull_request.head.sha,
|
||||
})
|
||||
).data.workflow_runs[0] ??
|
||||
// TODO: Remove this after 2025-09-17, at which point all eval.yml artifacts will have expired.
|
||||
(
|
||||
await github.rest.actions.listWorkflowRuns({
|
||||
...context.repo,
|
||||
// In older PRs, we need eval.yml instead of pr.yml.
|
||||
workflow_id: 'eval.yml',
|
||||
event: 'pull_request_target',
|
||||
status: 'success',
|
||||
exclude_pull_requests: true,
|
||||
head_sha: pull_request.head.sha,
|
||||
})
|
||||
).data.workflow_runs[0] ??
|
||||
{}
|
||||
|
||||
// Newer PRs might not have run Eval to completion, yet.
|
||||
// Older PRs might not have an eval.yml workflow, yet.
|
||||
// In either case we continue without fetching an artifact on a best-effort basis.
|
||||
log('Last eval run', run_id ?? '<n/a>')
|
||||
|
||||
if (conclusion === 'success') {
|
||||
// Check for any human reviews other than GitHub actions and other GitHub apps.
|
||||
// Accounts could be deleted as well, so don't count them.
|
||||
const humanReviews = reviews.filter(
|
||||
(r) =>
|
||||
r.user && !r.user.login.endsWith('[bot]') && r.user.type !== 'Bot',
|
||||
)
|
||||
|
||||
Object.assign(prLabels, {
|
||||
// We only set this label if the latest eval run was successful, because if it was not, it
|
||||
// *could* have requested reviewers. We will let the PR author fix CI first, before "escalating"
|
||||
// this PR to "needs: reviewer".
|
||||
// Since the first Eval run on a PR always sets rebuild labels, the same PR will be "recently
|
||||
// updated" for the next scheduled run. Thus, this label will still be set within a few minutes
|
||||
// after a PR is created, if required.
|
||||
// Note that a "requested reviewer" disappears once they have given a review, so we check
|
||||
// existing reviews, too.
|
||||
'9.needs: reviewer':
|
||||
!pull_request.draft &&
|
||||
pull_request.requested_reviewers.length === 0 &&
|
||||
humanReviews.length === 0,
|
||||
})
|
||||
}
|
||||
|
||||
const artifact =
|
||||
run_id &&
|
||||
(
|
||||
await github.rest.actions.listWorkflowRunArtifacts({
|
||||
...context.repo,
|
||||
run_id,
|
||||
name: 'comparison',
|
||||
})
|
||||
).data.artifacts[0]
|
||||
|
||||
// Instead of checking the boolean artifact.expired, we will give us a minute to
|
||||
// actually download the artifact in the next step and avoid that race condition.
|
||||
// Older PRs, where the workflow run was already eval.yml, but the artifact was not
|
||||
// called "comparison", yet, will skip the download.
|
||||
const expired =
|
||||
!artifact ||
|
||||
new Date(artifact?.expires_at ?? 0) < new Date(Date.now() + 60 * 1000)
|
||||
log('Artifact expires at', artifact?.expires_at ?? '<n/a>')
|
||||
if (!expired) {
|
||||
stats.artifacts++
|
||||
|
||||
await artifactClient.downloadArtifact(artifact.id, {
|
||||
findBy: {
|
||||
repositoryName: context.repo.repo,
|
||||
repositoryOwner: context.repo.owner,
|
||||
token: core.getInput('github-token'),
|
||||
},
|
||||
path: path.resolve(pull_number.toString()),
|
||||
expectedHash: artifact.digest,
|
||||
})
|
||||
|
||||
const maintainers = new Set(
|
||||
Object.keys(
|
||||
JSON.parse(
|
||||
await readFile(`${pull_number}/maintainers.json`, 'utf-8'),
|
||||
),
|
||||
).map((m) => Number.parseInt(m, 10)),
|
||||
)
|
||||
|
||||
const evalLabels = JSON.parse(
|
||||
await readFile(`${pull_number}/changed-paths.json`, 'utf-8'),
|
||||
).labels
|
||||
|
||||
Object.assign(
|
||||
prLabels,
|
||||
// Ignore `evalLabels` if it's an array.
|
||||
// This can happen for older eval runs, before we switched to objects.
|
||||
// The old eval labels would have been set by the eval run,
|
||||
// so now they'll be present in `before`.
|
||||
// TODO: Simplify once old eval results have expired (~2025-10)
|
||||
Array.isArray(evalLabels) ? undefined : evalLabels,
|
||||
{
|
||||
'12.approved-by: package-maintainer': Array.from(maintainers).some(
|
||||
(m) => approvals.has(m),
|
||||
),
|
||||
},
|
||||
)
|
||||
}
|
||||
|
||||
return prLabels
|
||||
}
|
||||
|
||||
// Returns true if the issue was closed. In this case, the labeling does not need to
|
||||
// continue for this issue. Returns false if no action was taken.
|
||||
async function handleAutoClose(item) {
|
||||
const issue_number = item.number
|
||||
|
||||
if (item.labels.some(({ name }) => name === '0.kind: packaging request')) {
|
||||
const body = [
|
||||
'Thank you for your interest in packaging new software in Nixpkgs. Unfortunately, to mitigate the unsustainable growth of unmaintained packages, **Nixpkgs is no longer accepting package requests** via Issues.',
|
||||
'',
|
||||
'As a [volunteer community][community], we are always open to new contributors. If you wish to see this package in Nixpkgs, **we encourage you to [contribute] it yourself**, via a Pull Request. Anyone can [become a package maintainer][maintainers]! You can find language-specific packaging information in the [Nixpkgs Manual][nixpkgs]. Should you need any help, please reach out to the community on [Matrix] or [Discourse].',
|
||||
'',
|
||||
'[community]: https://nixos.org/community',
|
||||
'[contribute]: https://github.com/NixOS/nixpkgs/blob/master/pkgs/README.md#quick-start-to-adding-a-package',
|
||||
'[maintainers]: https://github.com/NixOS/nixpkgs/blob/master/maintainers/README.md',
|
||||
'[nixpkgs]: https://nixos.org/manual/nixpkgs/unstable/',
|
||||
'[Matrix]: https://matrix.to/#/#dev:nixos.org',
|
||||
'[Discourse]: https://discourse.nixos.org/c/dev/14',
|
||||
].join('\n')
|
||||
|
||||
core.info(`Issue #${item.number}: auto-closed`)
|
||||
|
||||
if (!dry) {
|
||||
await github.rest.issues.createComment({
|
||||
...context.repo,
|
||||
issue_number,
|
||||
body,
|
||||
})
|
||||
|
||||
await github.rest.issues.update({
|
||||
...context.repo,
|
||||
issue_number,
|
||||
state: 'closed',
|
||||
state_reason: 'not_planned',
|
||||
})
|
||||
}
|
||||
|
||||
return true
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
async function handle({ item, stats }) {
|
||||
try {
|
||||
const log = (k, v, skip) => {
|
||||
core.info(`#${item.number} - ${k}: ${v}${skip ? ' (skipped)' : ''}`)
|
||||
return skip
|
||||
}
|
||||
|
||||
log('Last updated at', item.updated_at)
|
||||
log('URL', item.html_url)
|
||||
|
||||
const issue_number = item.number
|
||||
|
||||
const itemLabels = {}
|
||||
|
||||
if (item.pull_request || context.payload.pull_request) {
|
||||
stats.prs++
|
||||
Object.assign(itemLabels, await handlePullRequest({ item, stats }))
|
||||
} else {
|
||||
stats.issues++
|
||||
if (item.labels.some(({ name }) => name === '4.workflow: auto-close')) {
|
||||
// If this returns true, the issue was closed. In this case we return, to not
|
||||
// label the issue anymore. Most importantly this avoids unlabeling stale issues
|
||||
// which are closed via auto-close.
|
||||
if (await handleAutoClose(item)) return
|
||||
}
|
||||
}
|
||||
|
||||
const latest_event_at = new Date(
|
||||
(
|
||||
await github.paginate(github.rest.issues.listEventsForTimeline, {
|
||||
...context.repo,
|
||||
issue_number,
|
||||
per_page: 100,
|
||||
})
|
||||
)
|
||||
.filter(({ event }) =>
|
||||
[
|
||||
// These events are hand-picked from:
|
||||
// https://docs.github.com/en/rest/using-the-rest-api/issue-event-types?apiVersion=2022-11-28
|
||||
// Each of those causes a PR/issue to *not* be considered as stale anymore.
|
||||
// Most of these use created_at.
|
||||
'assigned',
|
||||
'commented', // uses updated_at, because that could be > created_at
|
||||
'committed', // uses committer.date
|
||||
'head_ref_force_pushed',
|
||||
'milestoned',
|
||||
'pinned',
|
||||
'ready_for_review',
|
||||
'renamed',
|
||||
'reopened',
|
||||
'review_dismissed',
|
||||
'review_requested',
|
||||
'reviewed', // uses submitted_at
|
||||
'unlocked',
|
||||
'unmarked_as_duplicate',
|
||||
].includes(event),
|
||||
)
|
||||
.map(
|
||||
({ created_at, updated_at, committer, submitted_at }) =>
|
||||
new Date(
|
||||
updated_at ?? created_at ?? submitted_at ?? committer.date,
|
||||
),
|
||||
)
|
||||
// Reverse sort by date value. The default sort() sorts by string representation, which is bad for dates.
|
||||
.sort((a, b) => b - a)
|
||||
.at(0) ?? item.created_at,
|
||||
)
|
||||
log('latest_event_at', latest_event_at.toISOString())
|
||||
|
||||
const stale_at = new Date(new Date().setDate(new Date().getDate() - 180))
|
||||
|
||||
// Create a map (Label -> Boolean) of all currently set labels.
|
||||
// Each label is set to True and can be disabled later.
|
||||
const before = Object.fromEntries(
|
||||
(
|
||||
await github.paginate(github.rest.issues.listLabelsOnIssue, {
|
||||
...context.repo,
|
||||
issue_number,
|
||||
})
|
||||
).map(({ name }) => [name, true]),
|
||||
)
|
||||
|
||||
Object.assign(itemLabels, {
|
||||
'2.status: stale':
|
||||
!before['1.severity: security'] && latest_event_at < stale_at,
|
||||
})
|
||||
|
||||
const after = Object.assign({}, before, itemLabels)
|
||||
|
||||
// No need for an API request, if all labels are the same.
|
||||
const hasChanges = Object.keys(after).some(
|
||||
(name) => (before[name] ?? false) !== after[name],
|
||||
)
|
||||
if (log('Has changes', hasChanges, !hasChanges)) return
|
||||
|
||||
// Skipping labeling on a pull_request event, because we have no privileges.
|
||||
const labels = Object.entries(after)
|
||||
.filter(([, value]) => value)
|
||||
.map(([name]) => name)
|
||||
if (log('Set labels', labels, dry)) return
|
||||
|
||||
await github.rest.issues.setLabels({
|
||||
...context.repo,
|
||||
issue_number,
|
||||
labels,
|
||||
})
|
||||
} catch (cause) {
|
||||
throw new Error(`Labeling #${item.number} failed.`, { cause })
|
||||
}
|
||||
}
|
||||
|
||||
await withRateLimit({ github, core }, async (stats) => {
|
||||
if (context.payload.pull_request) {
|
||||
await handle({ item: context.payload.pull_request, stats })
|
||||
} else {
|
||||
const lastRun = (
|
||||
await github.rest.actions.listWorkflowRuns({
|
||||
...context.repo,
|
||||
workflow_id: 'labels.yml',
|
||||
event: 'schedule',
|
||||
status: 'success',
|
||||
exclude_pull_requests: true,
|
||||
per_page: 1,
|
||||
})
|
||||
).data.workflow_runs[0]
|
||||
|
||||
const cutoff = new Date(
|
||||
Math.max(
|
||||
// Go back as far as the last successful run of this workflow to make sure
|
||||
// we are not leaving anyone behind on GHA failures.
|
||||
// Defaults to go back 1 hour on the first run.
|
||||
new Date(
|
||||
lastRun?.created_at ?? Date.now() - 1 * 60 * 60 * 1000,
|
||||
).getTime(),
|
||||
// Go back max. 1 day to prevent hitting all API rate limits immediately,
|
||||
// when GH API returns a wrong workflow by accident.
|
||||
Date.now() - 24 * 60 * 60 * 1000,
|
||||
),
|
||||
)
|
||||
core.info(`cutoff timestamp: ${cutoff.toISOString()}`)
|
||||
|
||||
const updatedItems = await github.paginate(
|
||||
github.rest.search.issuesAndPullRequests,
|
||||
{
|
||||
q: [
|
||||
`repo:"${context.repo.owner}/${context.repo.repo}"`,
|
||||
'is:open',
|
||||
`updated:>=${cutoff.toISOString()}`,
|
||||
].join(' AND '),
|
||||
per_page: 100,
|
||||
// TODO: Remove in 2025-10, when it becomes the default.
|
||||
advanced_search: true,
|
||||
},
|
||||
)
|
||||
|
||||
let cursor
|
||||
|
||||
// No workflow run available the first time.
|
||||
if (lastRun) {
|
||||
// The cursor to iterate through the full list of issues and pull requests
|
||||
// is passed between jobs as an artifact.
|
||||
const artifact = (
|
||||
await github.rest.actions.listWorkflowRunArtifacts({
|
||||
...context.repo,
|
||||
run_id: lastRun.id,
|
||||
name: 'pagination-cursor',
|
||||
})
|
||||
).data.artifacts[0]
|
||||
|
||||
// If the artifact is not available, the next iteration starts at the beginning.
|
||||
if (artifact) {
|
||||
stats.artifacts++
|
||||
|
||||
const { downloadPath } = await artifactClient.downloadArtifact(
|
||||
artifact.id,
|
||||
{
|
||||
findBy: {
|
||||
repositoryName: context.repo.repo,
|
||||
repositoryOwner: context.repo.owner,
|
||||
token: core.getInput('github-token'),
|
||||
},
|
||||
expectedHash: artifact.digest,
|
||||
},
|
||||
)
|
||||
|
||||
cursor = await readFile(path.resolve(downloadPath, 'cursor'), 'utf-8')
|
||||
}
|
||||
}
|
||||
|
||||
// From GitHub's API docs:
|
||||
// GitHub's REST API considers every pull request an issue, but not every issue is a pull request.
|
||||
// For this reason, "Issues" endpoints may return both issues and pull requests in the response.
|
||||
// You can identify pull requests by the pull_request key.
|
||||
const allItems = await github.rest.issues.listForRepo({
|
||||
...context.repo,
|
||||
state: 'open',
|
||||
sort: 'created',
|
||||
direction: 'asc',
|
||||
per_page: 100,
|
||||
after: cursor,
|
||||
})
|
||||
|
||||
// Regex taken and comment adjusted from:
|
||||
// https://github.com/octokit/plugin-paginate-rest.js/blob/8e5da25f975d2f31dda6b8b588d71f2c768a8df2/src/iterator.ts#L36-L41
|
||||
// `allItems.headers.link` format:
|
||||
// <https://api.github.com/repositories/4542716/issues?page=3&per_page=100&after=Y3Vyc29yOnYyOpLPAAABl8qNnYDOvnSJxA%3D%3D>; rel="next",
|
||||
// <https://api.github.com/repositories/4542716/issues?page=1&per_page=100&before=Y3Vyc29yOnYyOpLPAAABl8xFV9DOvoouJg%3D%3D>; rel="prev"
|
||||
// Sets `next` to undefined if "next" URL is not present or `link` header is not set.
|
||||
const next = ((allItems.headers.link ?? '').match(
|
||||
/<([^<>]+)>;\s*rel="next"/,
|
||||
) ?? [])[1]
|
||||
if (next) {
|
||||
cursor = new URL(next).searchParams.get('after')
|
||||
const uploadPath = path.resolve('cursor')
|
||||
await writeFile(uploadPath, cursor, 'utf-8')
|
||||
if (dry) {
|
||||
core.info(`pagination-cursor: ${cursor} (upload skipped)`)
|
||||
} else {
|
||||
// No stats.artifacts++, because this does not allow passing a custom token.
|
||||
// Thus, the upload will not happen with the app token, but the default github.token.
|
||||
await artifactClient.uploadArtifact(
|
||||
'pagination-cursor',
|
||||
[uploadPath],
|
||||
path.resolve('.'),
|
||||
{
|
||||
retentionDays: 1,
|
||||
},
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
// Some items might be in both search results, so filtering out duplicates as well.
|
||||
const items = []
|
||||
.concat(updatedItems, allItems.data)
|
||||
.filter(
|
||||
(thisItem, idx, arr) =>
|
||||
idx ===
|
||||
arr.findIndex((firstItem) => firstItem.number === thisItem.number),
|
||||
)
|
||||
|
||||
;(await Promise.allSettled(items.map((item) => handle({ item, stats }))))
|
||||
.filter(({ status }) => status === 'rejected')
|
||||
.map(({ reason }) =>
|
||||
core.setFailed(`${reason.message}\n${reason.cause.stack}`),
|
||||
)
|
||||
}
|
||||
})
|
||||
}
|
||||
1908
ci/github-script/package-lock.json
generated
Normal file
1908
ci/github-script/package-lock.json
generated
Normal file
File diff suppressed because it is too large
Load Diff
10
ci/github-script/package.json
Normal file
10
ci/github-script/package.json
Normal file
@@ -0,0 +1,10 @@
|
||||
{
|
||||
"private": true,
|
||||
"dependencies": {
|
||||
"@actions/artifact": "2.3.2",
|
||||
"@actions/core": "1.11.1",
|
||||
"@actions/github": "6.0.1",
|
||||
"bottleneck": "2.19.5",
|
||||
"commander": "14.0.0"
|
||||
}
|
||||
}
|
||||
232
ci/github-script/prepare.js
Normal file
232
ci/github-script/prepare.js
Normal file
@@ -0,0 +1,232 @@
|
||||
const { classify } = require('../supportedBranches.js')
|
||||
const { postReview } = require('./reviews.js')
|
||||
|
||||
module.exports = async ({ github, context, core, dry }) => {
|
||||
const pull_number = context.payload.pull_request.number
|
||||
|
||||
for (const retryInterval of [5, 10, 20, 40, 80]) {
|
||||
core.info('Checking whether the pull request can be merged...')
|
||||
const prInfo = (
|
||||
await github.rest.pulls.get({
|
||||
...context.repo,
|
||||
pull_number,
|
||||
})
|
||||
).data
|
||||
|
||||
if (prInfo.state !== 'open') throw new Error('PR is not open anymore.')
|
||||
|
||||
if (prInfo.mergeable == null) {
|
||||
core.info(
|
||||
`GitHub is still computing whether this PR can be merged, waiting ${retryInterval} seconds before trying again...`,
|
||||
)
|
||||
await new Promise((resolve) => setTimeout(resolve, retryInterval * 1000))
|
||||
continue
|
||||
}
|
||||
|
||||
const { base, head } = prInfo
|
||||
|
||||
const baseClassification = classify(base.ref)
|
||||
core.setOutput('base', baseClassification)
|
||||
console.log('base classification:', baseClassification)
|
||||
|
||||
const headClassification =
|
||||
base.repo.full_name === head.repo.full_name
|
||||
? classify(head.ref)
|
||||
: // PRs from forks are always considered WIP.
|
||||
{ type: ['wip'] }
|
||||
core.setOutput('head', headClassification)
|
||||
console.log('head classification:', headClassification)
|
||||
|
||||
if (baseClassification.type.includes('channel')) {
|
||||
const { stable, version } = baseClassification
|
||||
const correctBranch = stable ? `release-${version}` : 'master'
|
||||
const body = [
|
||||
'The `nixos-*` and `nixpkgs-*` branches are pushed to by the channel release script and should not be merged into directly.',
|
||||
'',
|
||||
`Please target \`${correctBranch}\` instead.`,
|
||||
].join('\n')
|
||||
|
||||
await postReview({ github, context, core, dry, body })
|
||||
|
||||
throw new Error('The PR targets a channel branch.')
|
||||
}
|
||||
|
||||
if (headClassification.type.includes('wip')) {
|
||||
// In the following, we look at the git history to determine the base branch that
|
||||
// this Pull Request branched off of. This is *supposed* to be the branch that it
|
||||
// merges into, but humans make mistakes. Once that happens we want to error out as
|
||||
// early as possible.
|
||||
|
||||
// To determine the "real base", we are looking at the merge-base of primary development
|
||||
// branches and the head of the PR. The merge-base which results in the least number of
|
||||
// commits between that base and head is the real base. We can query for this via GitHub's
|
||||
// REST API. There can be multiple candidates for the real base with the same number of
|
||||
// commits. In this case we pick the "best" candidate by a fixed ordering of branches,
|
||||
// as defined in ci/supportedBranches.js.
|
||||
//
|
||||
// These requests take a while, when comparing against the wrong release - they need
|
||||
// to look at way more than 10k commits in that case. Thus, we try to minimize the
|
||||
// number of requests across releases:
|
||||
// - First, we look at the primary development branches only: master and release-xx.yy.
|
||||
// The branch with the fewest commits gives us the release this PR belongs to.
|
||||
// - We then compare this number against the relevant staging branches for this release
|
||||
// to find the exact branch that this belongs to.
|
||||
|
||||
// All potential development branches
|
||||
const branches = (
|
||||
await github.paginate(github.rest.repos.listBranches, {
|
||||
...context.repo,
|
||||
per_page: 100,
|
||||
})
|
||||
).map(({ name }) => classify(name))
|
||||
|
||||
// All stable primary development branches from latest to oldest.
|
||||
const releases = branches
|
||||
.filter(({ stable, type }) => type.includes('primary') && stable)
|
||||
.sort((a, b) => b.version.localeCompare(a.version))
|
||||
|
||||
async function mergeBase({ branch, order, version }) {
|
||||
const { data } = await github.rest.repos.compareCommitsWithBasehead({
|
||||
...context.repo,
|
||||
basehead: `${branch}...${head.sha}`,
|
||||
// Pagination for this endpoint is about the commits listed, which we don't care about.
|
||||
per_page: 1,
|
||||
// Taking the second page skips the list of files of this changeset.
|
||||
page: 2,
|
||||
})
|
||||
return {
|
||||
branch,
|
||||
order,
|
||||
version,
|
||||
commits: data.total_commits,
|
||||
sha: data.merge_base_commit.sha,
|
||||
}
|
||||
}
|
||||
|
||||
// Multiple branches can be OK at the same time, if the PR was created of a merge-base,
|
||||
// thus storing as array.
|
||||
let candidates = [await mergeBase(classify('master'))]
|
||||
for (const release of releases) {
|
||||
const nextCandidate = await mergeBase(release)
|
||||
if (candidates[0].commits === nextCandidate.commits)
|
||||
candidates.push(nextCandidate)
|
||||
if (candidates[0].commits > nextCandidate.commits)
|
||||
candidates = [nextCandidate]
|
||||
// The number 10000 is principally arbitrary, but the GitHub API returns this value
|
||||
// when the number of commits exceeds it in reality. The difference between two stable releases
|
||||
// is certainly more than 10k commits, thus this works for us as well: If we're targeting
|
||||
// a wrong release, the number *will* be 10000.
|
||||
if (candidates[0].commits < 10000) break
|
||||
}
|
||||
|
||||
core.info(`This PR is for NixOS ${candidates[0].version}.`)
|
||||
|
||||
// Secondary development branches for the selected version only.
|
||||
const secondary = branches.filter(
|
||||
({ branch, type, version }) =>
|
||||
type.includes('secondary') && version === candidates[0].version,
|
||||
)
|
||||
|
||||
// Make sure that we always check the current target as well, even if its a WIP branch.
|
||||
// If it's not a WIP branch, it was already included in either releases or secondary.
|
||||
if (classify(base.ref).type.includes('wip')) {
|
||||
secondary.push(classify(base.ref))
|
||||
}
|
||||
|
||||
for (const branch of secondary) {
|
||||
const nextCandidate = await mergeBase(branch)
|
||||
if (candidates[0].commits === nextCandidate.commits)
|
||||
candidates.push(nextCandidate)
|
||||
if (candidates[0].commits > nextCandidate.commits)
|
||||
candidates = [nextCandidate]
|
||||
}
|
||||
|
||||
// If the current branch is among the candidates, this is always better than any other,
|
||||
// thus sorting at -1.
|
||||
candidates = candidates
|
||||
.map((candidate) =>
|
||||
candidate.branch === base.ref
|
||||
? { ...candidate, order: -1 }
|
||||
: candidate,
|
||||
)
|
||||
.sort((a, b) => a.order - b.order)
|
||||
|
||||
const best = candidates.at(0)
|
||||
|
||||
core.info('The base branches for this PR are:')
|
||||
core.info(`github: ${base.ref}`)
|
||||
core.info(
|
||||
`candidates: ${candidates.map(({ branch }) => branch).join(',')}`,
|
||||
)
|
||||
core.info(`best candidate: ${best.branch}`)
|
||||
|
||||
if (best.branch !== base.ref) {
|
||||
const current = await mergeBase(classify(base.ref))
|
||||
const body = [
|
||||
`The PR's base branch is set to \`${current.branch}\`, but ${current.commits === 10000 ? 'at least 10000' : current.commits - best.commits} commits from the \`${best.branch}\` branch are included. Make sure you know the [right base branch for your changes](https://github.com/NixOS/nixpkgs/blob/master/CONTRIBUTING.md#branch-conventions), then:`,
|
||||
`- If the changes should go to the \`${best.branch}\` branch, [change the base branch](https://docs.github.com/en/pull-requests/collaborating-with-pull-requests/proposing-changes-to-your-work-with-pull-requests/changing-the-base-branch-of-a-pull-request).`,
|
||||
`- If the changes should go to the \`${current.branch}\` branch, rebase your PR onto the correct merge-base:`,
|
||||
' ```bash',
|
||||
` # git rebase --onto $(git merge-base upstream/${current.branch} HEAD) $(git merge-base upstream/${best.branch} HEAD)`,
|
||||
` git rebase --onto ${current.sha} ${best.sha}`,
|
||||
` git push --force-with-lease`,
|
||||
' ```',
|
||||
].join('\n')
|
||||
|
||||
await postReview({ github, context, core, dry, body })
|
||||
|
||||
throw new Error(`The PR contains commits from a different base.`)
|
||||
}
|
||||
}
|
||||
|
||||
let mergedSha, targetSha
|
||||
|
||||
if (prInfo.mergeable) {
|
||||
core.info('The PR can be merged.')
|
||||
|
||||
mergedSha = prInfo.merge_commit_sha
|
||||
targetSha = (
|
||||
await github.rest.repos.getCommit({
|
||||
...context.repo,
|
||||
ref: prInfo.merge_commit_sha,
|
||||
})
|
||||
).data.parents[0].sha
|
||||
} else {
|
||||
core.warning('The PR has a merge conflict.')
|
||||
|
||||
mergedSha = head.sha
|
||||
targetSha = (
|
||||
await github.rest.repos.compareCommitsWithBasehead({
|
||||
...context.repo,
|
||||
basehead: `${base.sha}...${head.sha}`,
|
||||
})
|
||||
).data.merge_base_commit.sha
|
||||
}
|
||||
|
||||
core.info(
|
||||
`Checking the commits:\nmerged: ${mergedSha}\ntarget: ${targetSha}`,
|
||||
)
|
||||
core.setOutput('mergedSha', mergedSha)
|
||||
core.setOutput('targetSha', targetSha)
|
||||
|
||||
core.setOutput('systems', require('../supportedSystems.json'))
|
||||
|
||||
const files = (
|
||||
await github.paginate(github.rest.pulls.listFiles, {
|
||||
...context.repo,
|
||||
pull_number: context.payload.pull_request.number,
|
||||
per_page: 100,
|
||||
})
|
||||
).map((file) => file.filename)
|
||||
|
||||
const touched = []
|
||||
if (files.includes('ci/pinned.json')) touched.push('pinned')
|
||||
if (files.includes('ci/OWNERS')) touched.push('owners')
|
||||
core.setOutput('touched', touched)
|
||||
|
||||
return
|
||||
}
|
||||
throw new Error(
|
||||
"Not retrying anymore. It's likely that GitHub is having internal issues: check https://www.githubstatus.com.",
|
||||
)
|
||||
}
|
||||
85
ci/github-script/reviews.js
Normal file
85
ci/github-script/reviews.js
Normal file
@@ -0,0 +1,85 @@
|
||||
async function dismissReviews({ github, context, dry }) {
|
||||
const pull_number = context.payload.pull_request.number
|
||||
|
||||
if (dry) {
|
||||
return
|
||||
}
|
||||
|
||||
await Promise.all(
|
||||
(
|
||||
await github.paginate(github.rest.pulls.listReviews, {
|
||||
...context.repo,
|
||||
pull_number,
|
||||
})
|
||||
)
|
||||
.filter((review) => review.user?.login === 'github-actions[bot]')
|
||||
.map(async (review) => {
|
||||
if (review.state === 'CHANGES_REQUESTED') {
|
||||
await github.rest.pulls.dismissReview({
|
||||
...context.repo,
|
||||
pull_number,
|
||||
review_id: review.id,
|
||||
message: 'All good now, thank you!',
|
||||
})
|
||||
}
|
||||
await github.graphql(
|
||||
`mutation($node_id:ID!) {
|
||||
minimizeComment(input: {
|
||||
classifier: RESOLVED,
|
||||
subjectId: $node_id
|
||||
})
|
||||
{ clientMutationId }
|
||||
}`,
|
||||
{ node_id: review.node_id },
|
||||
)
|
||||
}),
|
||||
)
|
||||
}
|
||||
|
||||
async function postReview({ github, context, core, dry, body }) {
|
||||
const pull_number = context.payload.pull_request.number
|
||||
|
||||
const pendingReview = (
|
||||
await github.paginate(github.rest.pulls.listReviews, {
|
||||
...context.repo,
|
||||
pull_number,
|
||||
})
|
||||
).find(
|
||||
(review) =>
|
||||
review.user?.login === 'github-actions[bot]' &&
|
||||
// If a review is still pending, we can just update this instead
|
||||
// of posting a new one.
|
||||
(review.state === 'CHANGES_REQUESTED' ||
|
||||
// No need to post a new review, if an older one with the exact
|
||||
// same content had already been dismissed.
|
||||
review.body === body),
|
||||
)
|
||||
|
||||
if (dry) {
|
||||
if (pendingReview)
|
||||
core.info(`pending review found: ${pendingReview.html_url}`)
|
||||
else core.info('no pending review found')
|
||||
core.info(body)
|
||||
} else {
|
||||
if (pendingReview) {
|
||||
await github.rest.pulls.updateReview({
|
||||
...context.repo,
|
||||
pull_number,
|
||||
review_id: pendingReview.id,
|
||||
body,
|
||||
})
|
||||
} else {
|
||||
await github.rest.pulls.createReview({
|
||||
...context.repo,
|
||||
pull_number,
|
||||
event: 'REQUEST_CHANGES',
|
||||
body,
|
||||
})
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
dismissReviews,
|
||||
postReview,
|
||||
}
|
||||
86
ci/github-script/run
Executable file
86
ci/github-script/run
Executable file
@@ -0,0 +1,86 @@
|
||||
#!/usr/bin/env -S node --import ./run
|
||||
import { execSync } from 'node:child_process'
|
||||
import { closeSync, mkdtempSync, openSync, rmSync } from 'node:fs'
|
||||
import { tmpdir } from 'node:os'
|
||||
import { join } from 'node:path'
|
||||
import { program } from 'commander'
|
||||
import * as core from '@actions/core'
|
||||
import { getOctokit } from '@actions/github'
|
||||
|
||||
async function run(action, owner, repo, pull_number, options = {}) {
|
||||
const token = execSync('gh auth token', { encoding: 'utf-8' }).trim()
|
||||
|
||||
const github = getOctokit(token)
|
||||
|
||||
const payload = !pull_number ? {} : {
|
||||
pull_request: (await github.rest.pulls.get({
|
||||
owner,
|
||||
repo,
|
||||
pull_number,
|
||||
})).data
|
||||
}
|
||||
|
||||
process.env['INPUT_GITHUB-TOKEN'] = token
|
||||
|
||||
closeSync(openSync('step-summary.md', 'w'))
|
||||
process.env.GITHUB_STEP_SUMMARY = 'step-summary.md'
|
||||
|
||||
await action({
|
||||
github,
|
||||
context: {
|
||||
payload,
|
||||
repo: {
|
||||
owner,
|
||||
repo,
|
||||
},
|
||||
},
|
||||
core,
|
||||
dry: true,
|
||||
...options,
|
||||
})
|
||||
}
|
||||
|
||||
program
|
||||
.command('prepare')
|
||||
.description('Prepare relevant information of a pull request.')
|
||||
.argument('<owner>', 'Owner of the GitHub repository to check (Example: NixOS)')
|
||||
.argument('<repo>', 'Name of the GitHub repository to check (Example: nixpkgs)')
|
||||
.argument('<pr>', 'Number of the Pull Request to check')
|
||||
.option('--no-dry', 'Make actual modifications')
|
||||
.action(async (owner, repo, pr, options) => {
|
||||
const prepare = (await import('./prepare.js')).default
|
||||
await run(prepare, owner, repo, pr, options)
|
||||
})
|
||||
|
||||
program
|
||||
.command('commits')
|
||||
.description('Check commit structure of a pull request.')
|
||||
.argument('<owner>', 'Owner of the GitHub repository to check (Example: NixOS)')
|
||||
.argument('<repo>', 'Name of the GitHub repository to check (Example: nixpkgs)')
|
||||
.argument('<pr>', 'Number of the Pull Request to check')
|
||||
.option('--no-cherry-picks', 'Do not expect cherry-picks.')
|
||||
.action(async (owner, repo, pr, options) => {
|
||||
const commits = (await import('./commits.js')).default
|
||||
await run(commits, owner, repo, pr, options)
|
||||
})
|
||||
|
||||
program
|
||||
.command('labels')
|
||||
.description('Manage labels on pull requests.')
|
||||
.argument('<owner>', 'Owner of the GitHub repository to label (Example: NixOS)')
|
||||
.argument('<repo>', 'Name of the GitHub repository to label (Example: nixpkgs)')
|
||||
.argument('[pr]', 'Number of the Pull Request to label')
|
||||
.option('--no-dry', 'Make actual modifications')
|
||||
.action(async (owner, repo, pr, options) => {
|
||||
const labels = (await import('./labels.js')).default
|
||||
const tmp = mkdtempSync(join(tmpdir(), 'github-script-'))
|
||||
try {
|
||||
process.env.GITHUB_WORKSPACE = tmp
|
||||
process.chdir(tmp)
|
||||
await run(labels, owner, repo, pr, options)
|
||||
} finally {
|
||||
rmSync(tmp, { recursive: true })
|
||||
}
|
||||
})
|
||||
|
||||
await program.parse()
|
||||
25
ci/github-script/shell.nix
Normal file
25
ci/github-script/shell.nix
Normal file
@@ -0,0 +1,25 @@
|
||||
{
|
||||
system ? builtins.currentSystem,
|
||||
pkgs ? (import ../. { inherit system; }).pkgs,
|
||||
}:
|
||||
|
||||
pkgs.callPackage (
|
||||
{
|
||||
gh,
|
||||
importNpmLock,
|
||||
mkShell,
|
||||
nodejs,
|
||||
}:
|
||||
mkShell {
|
||||
packages = [
|
||||
gh
|
||||
importNpmLock.hooks.linkNodeModulesHook
|
||||
nodejs
|
||||
];
|
||||
|
||||
npmDeps = importNpmLock.buildNodeModules {
|
||||
npmRoot = ./.;
|
||||
inherit nodejs;
|
||||
};
|
||||
}
|
||||
) { }
|
||||
63
ci/github-script/withRateLimit.js
Normal file
63
ci/github-script/withRateLimit.js
Normal file
@@ -0,0 +1,63 @@
|
||||
module.exports = async ({ github, core }, callback) => {
|
||||
const Bottleneck = require('bottleneck')
|
||||
|
||||
const stats = {
|
||||
issues: 0,
|
||||
prs: 0,
|
||||
requests: 0,
|
||||
artifacts: 0,
|
||||
}
|
||||
|
||||
// Rate-Limiting and Throttling, see for details:
|
||||
// https://github.com/octokit/octokit.js/issues/1069#throttling
|
||||
// https://docs.github.com/en/rest/using-the-rest-api/best-practices-for-using-the-rest-api
|
||||
const allLimits = new Bottleneck({
|
||||
// Avoid concurrent requests
|
||||
maxConcurrent: 1,
|
||||
// Will be updated with first `updateReservoir()` call below.
|
||||
reservoir: 0,
|
||||
})
|
||||
// Pause between mutative requests
|
||||
const writeLimits = new Bottleneck({ minTime: 1000 }).chain(allLimits)
|
||||
github.hook.wrap('request', async (request, options) => {
|
||||
// Requests to a different host do not count against the rate limit.
|
||||
if (options.url.startsWith('https://github.com')) return request(options)
|
||||
// Requests to the /rate_limit endpoint do not count against the rate limit.
|
||||
if (options.url === '/rate_limit') return request(options)
|
||||
// Search requests are in a different resource group, which allows 30 requests / minute.
|
||||
// We do less than a handful each run, so not implementing throttling for now.
|
||||
if (options.url.startsWith('/search/')) return request(options)
|
||||
stats.requests++
|
||||
if (['POST', 'PUT', 'PATCH', 'DELETE'].includes(options.method))
|
||||
return writeLimits.schedule(request.bind(null, options))
|
||||
else return allLimits.schedule(request.bind(null, options))
|
||||
})
|
||||
|
||||
async function updateReservoir() {
|
||||
let response
|
||||
try {
|
||||
response = await github.rest.rateLimit.get()
|
||||
} catch (err) {
|
||||
core.error(`Failed updating reservoir:\n${err}`)
|
||||
// Keep retrying on failed rate limit requests instead of exiting the script early.
|
||||
return
|
||||
}
|
||||
// Always keep 1000 spare requests for other jobs to do their regular duty.
|
||||
// They normally use below 100, so 1000 is *plenty* of room to work with.
|
||||
const reservoir = Math.max(0, response.data.resources.core.remaining - 1000)
|
||||
core.info(`Updating reservoir to: ${reservoir}`)
|
||||
allLimits.updateSettings({ reservoir })
|
||||
}
|
||||
await updateReservoir()
|
||||
// Update remaining requests every minute to account for other jobs running in parallel.
|
||||
const reservoirUpdater = setInterval(updateReservoir, 60 * 1000)
|
||||
|
||||
try {
|
||||
await callback(stats)
|
||||
} finally {
|
||||
clearInterval(reservoirUpdater)
|
||||
core.notice(
|
||||
`Processed ${stats.prs} PRs, ${stats.issues} Issues, made ${stats.requests + stats.artifacts} API requests and downloaded ${stats.artifacts} artifacts.`,
|
||||
)
|
||||
}
|
||||
}
|
||||
@@ -13,9 +13,15 @@ let
|
||||
with lib.fileset;
|
||||
path:
|
||||
toSource {
|
||||
fileset = (gitTracked path);
|
||||
fileset = difference (gitTracked path) (unions [
|
||||
(path + /.github)
|
||||
(path + /ci)
|
||||
]);
|
||||
root = path;
|
||||
};
|
||||
|
||||
filteredBase = filtered base;
|
||||
filteredHead = filtered head;
|
||||
in
|
||||
runCommand "nixpkgs-vet"
|
||||
{
|
||||
@@ -25,7 +31,29 @@ runCommand "nixpkgs-vet"
|
||||
env.NIXPKGS_VET_NIX_PACKAGE = nix;
|
||||
}
|
||||
''
|
||||
nixpkgs-vet --base ${filtered base} ${filtered head}
|
||||
export NIX_STATE_DIR=$(mktemp -d)
|
||||
|
||||
nixpkgs-vet --base ${filteredBase} ${filteredHead}
|
||||
|
||||
# TODO: Upstream into nixpkgs-vet, see:
|
||||
# https://github.com/NixOS/nixpkgs-vet/issues/164
|
||||
badFiles=$(find ${filteredHead}/pkgs -type f -name '*.nix' -print | xargs grep -l '^[^#]*<nixpkgs/' || true)
|
||||
if [[ -n $badFiles ]]; then
|
||||
echo "Nixpkgs is not allowed to use <nixpkgs> to refer to itself."
|
||||
echo "The offending files:"
|
||||
echo "$badFiles"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# TODO: Upstream into nixpkgs-vet, see:
|
||||
# https://github.com/NixOS/nixpkgs-vet/issues/166
|
||||
conflictingPaths=$(find ${filteredHead} | awk '{ print $1 " " tolower($1) }' | sort -k2 | uniq -D -f 1 | cut -d ' ' -f 1)
|
||||
if [[ -n $conflictingPaths ]]; then
|
||||
echo "Files in nixpkgs must not vary only by case."
|
||||
echo "The offending paths:"
|
||||
echo "$conflictingPaths"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
touch $out
|
||||
''
|
||||
|
||||
@@ -61,9 +61,6 @@ trace "Done"
|
||||
trace -n "Merging base branch into the HEAD commit in $tmp/merged.. "
|
||||
git -C "$tmp/merged" merge -q --no-edit "$baseSha"
|
||||
trace -e "\e[34m$(git -C "$tmp/merged" rev-parse HEAD)\e[0m"
|
||||
trace -n "Reading pinned nixpkgs-vet version from pinned-version.txt.. "
|
||||
toolVersion=$(<"$tmp/merged/ci/nixpkgs-vet/pinned-version.txt")
|
||||
trace -e "\e[34m$toolVersion\e[0m"
|
||||
|
||||
trace "Running nixpkgs-vet.."
|
||||
nix-build ci -A nixpkgs-vet --argstr base "$tmp/base" --argstr head "$tmp/merged"
|
||||
nix-build ci -A nixpkgs-vet --arg base "$tmp/base" --arg head "$tmp/merged"
|
||||
|
||||
@@ -1 +0,0 @@
|
||||
0.1.4
|
||||
@@ -1,22 +0,0 @@
|
||||
#!/usr/bin/env nix-shell
|
||||
#!nix-shell -i bash -p jq curl
|
||||
|
||||
set -o pipefail -o errexit -o nounset
|
||||
|
||||
trace() { echo >&2 "$@"; }
|
||||
|
||||
SCRIPT_DIR=$( cd -- "$( dirname -- "${BASH_SOURCE[0]}" )" &> /dev/null && pwd )
|
||||
|
||||
repository=NixOS/nixpkgs-vet
|
||||
pin_file=$SCRIPT_DIR/pinned-version.txt
|
||||
|
||||
trace -n "Fetching latest release of $repository.. "
|
||||
latestRelease=$(curl -sSfL \
|
||||
-H "Accept: application/vnd.github+json" \
|
||||
-H "X-GitHub-Api-Version: 2022-11-28" \
|
||||
https://api.github.com/repos/"$repository"/releases/latest)
|
||||
latestVersion=$(jq .tag_name -r <<< "$latestRelease")
|
||||
trace "$latestVersion"
|
||||
|
||||
trace "Updating $pin_file"
|
||||
echo "$latestVersion" > "$pin_file"
|
||||
@@ -1,4 +0,0 @@
|
||||
{
|
||||
"rev": "eaeed9530c76ce5f1d2d8232e08bec5e26f18ec1",
|
||||
"sha256": "132nimgi1g88fbhddk4b8b1qk68jly494x2mnphyk3xa1d2wy9q7"
|
||||
}
|
||||
31
ci/pinned.json
Normal file
31
ci/pinned.json
Normal file
@@ -0,0 +1,31 @@
|
||||
{
|
||||
"pins": {
|
||||
"nixpkgs": {
|
||||
"type": "Git",
|
||||
"repository": {
|
||||
"type": "GitHub",
|
||||
"owner": "NixOS",
|
||||
"repo": "nixpkgs"
|
||||
},
|
||||
"branch": "nixpkgs-unstable",
|
||||
"submodules": false,
|
||||
"revision": "641d909c4a7538f1539da9240dedb1755c907e40",
|
||||
"url": "https://github.com/NixOS/nixpkgs/archive/641d909c4a7538f1539da9240dedb1755c907e40.tar.gz",
|
||||
"hash": "10hpb1aw884k3zzcy1mhf47dqvfagiyx7kr6hg0p5xcwg04mkx8x"
|
||||
},
|
||||
"treefmt-nix": {
|
||||
"type": "Git",
|
||||
"repository": {
|
||||
"type": "GitHub",
|
||||
"owner": "numtide",
|
||||
"repo": "treefmt-nix"
|
||||
},
|
||||
"branch": "main",
|
||||
"submodules": false,
|
||||
"revision": "7d81f6fb2e19bf84f1c65135d1060d829fae2408",
|
||||
"url": "https://github.com/numtide/treefmt-nix/archive/7d81f6fb2e19bf84f1c65135d1060d829fae2408.tar.gz",
|
||||
"hash": "1cg20q8ja8k2nb7mzy95hgmd8whxapc3fbyndh1ip5dr6d1grxfs"
|
||||
}
|
||||
},
|
||||
"version": 5
|
||||
}
|
||||
@@ -17,15 +17,12 @@ stdenvNoCC.mkDerivation {
|
||||
./get-code-owners.sh
|
||||
./request-reviewers.sh
|
||||
./request-code-owner-reviews.sh
|
||||
./verify-base-branch.sh
|
||||
./dev-branches.txt
|
||||
];
|
||||
};
|
||||
nativeBuildInputs = [ makeWrapper ];
|
||||
dontBuild = true;
|
||||
installPhase = ''
|
||||
mkdir -p $out/bin
|
||||
mv dev-branches.txt $out/bin
|
||||
for bin in *.sh; do
|
||||
mv "$bin" "$out/bin"
|
||||
wrapProgram "$out/bin/$bin" \
|
||||
|
||||
@@ -1,9 +0,0 @@
|
||||
# Trusted development branches:
|
||||
# These generally require PRs to update and are built by Hydra.
|
||||
# Keep this synced with the branches in .github/workflows/eval.yml
|
||||
master
|
||||
staging
|
||||
release-*
|
||||
staging-*
|
||||
haskell-updates
|
||||
python-updates
|
||||
@@ -1,6 +1,6 @@
|
||||
#!/usr/bin/env bash
|
||||
|
||||
# Requests reviews for a PR after verifying that the base branch is correct
|
||||
# Requests reviews for a PR
|
||||
|
||||
set -euo pipefail
|
||||
tmp=$(mktemp -d)
|
||||
@@ -11,14 +11,6 @@ log() {
|
||||
echo "$@" >&2
|
||||
}
|
||||
|
||||
effect() {
|
||||
if [[ -n "${DRY_MODE:-}" ]]; then
|
||||
log "Skipping in dry mode:" "${@@Q}"
|
||||
else
|
||||
"$@"
|
||||
fi
|
||||
}
|
||||
|
||||
if (( $# < 3 )); then
|
||||
log "Usage: $0 GITHUB_REPO PR_NUMBER OWNERS_FILE"
|
||||
exit 1
|
||||
@@ -63,20 +55,6 @@ git -C "$tmp/nixpkgs.git" config remote.fork.promisor true
|
||||
git -C "$tmp/nixpkgs.git" fetch --no-tags fork "$prBranch"
|
||||
headRef=$(git -C "$tmp/nixpkgs.git" rev-parse refs/remotes/fork/"$prBranch")
|
||||
|
||||
log "Checking correctness of the base branch"
|
||||
if ! "$SCRIPT_DIR"/verify-base-branch.sh "$tmp/nixpkgs.git" "$headRef" "$baseRepo" "$baseBranch" "$prRepo" "$prBranch" | tee "$tmp/invalid-base-error" >&2; then
|
||||
log "Posting error as comment"
|
||||
if ! response=$(effect gh api \
|
||||
--method POST \
|
||||
-H "Accept: application/vnd.github+json" \
|
||||
-H "X-GitHub-Api-Version: 2022-11-28" \
|
||||
"/repos/$baseRepo/issues/$prNumber/comments" \
|
||||
-F "body=@$tmp/invalid-base-error"); then
|
||||
log "Failed to post the comment: $response"
|
||||
fi
|
||||
exit 1
|
||||
fi
|
||||
|
||||
log "Requesting reviews from code owners"
|
||||
"$SCRIPT_DIR"/get-code-owners.sh "$tmp/nixpkgs.git" "$ownersFile" "$baseBranch" "$headRef" | \
|
||||
"$SCRIPT_DIR"/request-reviewers.sh "$baseRepo" "$prNumber" "$prAuthor"
|
||||
|
||||
@@ -1,104 +0,0 @@
|
||||
#!/usr/bin/env bash
|
||||
|
||||
# Check that a PR doesn't include commits from other development branches.
|
||||
# Fails with next steps if it does
|
||||
|
||||
set -euo pipefail
|
||||
tmp=$(mktemp -d)
|
||||
trap 'rm -rf "$tmp"' exit
|
||||
SCRIPT_DIR=$(dirname "$0")
|
||||
|
||||
log() {
|
||||
echo "$@" >&2
|
||||
}
|
||||
|
||||
# Small helper to check whether an element is in a list
|
||||
# Usage: `elementIn foo "${list[@]}"`
|
||||
elementIn() {
|
||||
local e match=$1
|
||||
shift
|
||||
for e; do
|
||||
if [[ "$e" == "$match" ]]; then
|
||||
return 0
|
||||
fi
|
||||
done
|
||||
return 1
|
||||
}
|
||||
|
||||
if (( $# < 6 )); then
|
||||
log "Usage: $0 LOCAL_REPO HEAD_REF BASE_REPO BASE_BRANCH PR_REPO PR_BRANCH"
|
||||
exit 1
|
||||
fi
|
||||
localRepo=$1
|
||||
headRef=$2
|
||||
baseRepo=$3
|
||||
baseBranch=$4
|
||||
prRepo=$5
|
||||
prBranch=$6
|
||||
|
||||
# All development branches
|
||||
devBranchPatterns=()
|
||||
while read -r pattern; do
|
||||
if [[ "$pattern" != '#'* ]]; then
|
||||
devBranchPatterns+=("$pattern")
|
||||
fi
|
||||
done < "$SCRIPT_DIR/dev-branches.txt"
|
||||
|
||||
git -C "$localRepo" branch --list --format "%(refname:short)" "${devBranchPatterns[@]}" > "$tmp/dev-branches"
|
||||
readarray -t devBranches < "$tmp/dev-branches"
|
||||
|
||||
if [[ "$baseRepo" == "$prRepo" ]] && elementIn "$prBranch" "${devBranches[@]}"; then
|
||||
log "This PR merges $prBranch into $baseBranch, no commit check necessary"
|
||||
exit 0
|
||||
fi
|
||||
|
||||
# The current merge base of the PR
|
||||
prMergeBase=$(git -C "$localRepo" merge-base "$baseBranch" "$headRef")
|
||||
log "The PR's merge base with the base branch $baseBranch is $prMergeBase"
|
||||
|
||||
# This is purely for debugging
|
||||
git -C "$localRepo" rev-list --reverse "$baseBranch".."$headRef" > "$tmp/pr-commits"
|
||||
log "The PR includes these $(wc -l < "$tmp/pr-commits") commits:"
|
||||
cat <"$tmp/pr-commits" >&2
|
||||
|
||||
for testBranch in "${devBranches[@]}"; do
|
||||
|
||||
if [[ -z "$(git -C "$localRepo" rev-list -1 --since="1 month ago" "$testBranch")" ]]; then
|
||||
log "Not checking $testBranch, was inactive for the last month"
|
||||
continue
|
||||
fi
|
||||
log "Checking if commits from $testBranch are included in the PR"
|
||||
|
||||
# We need to check for any commits that are in the PR which are also in the test branch.
|
||||
# We could check each commit from the PR individually, but that's unnecessarily slow.
|
||||
#
|
||||
# This does _almost_ what we want: `git rev-list --count headRef testBranch ^baseBranch`,
|
||||
# except that it includes commits that are reachable from _either_ headRef or testBranch,
|
||||
# instead of restricting it to ones reachable by both
|
||||
|
||||
# Easily fixable though, because we can use `git merge-base testBranch headRef`
|
||||
# to get the least common ancestor (aka merge base) commit reachable by both.
|
||||
# If the branch being tested is indeed the right base branch,
|
||||
# this is then also the commit from that branch that the PR is based on top of.
|
||||
testMergeBase=$(git -C "$localRepo" merge-base "$testBranch" "$headRef")
|
||||
|
||||
# And then use the `git rev-list --count`, but replacing the non-working
|
||||
# `headRef testBranch` with the merge base of the two.
|
||||
extraCommits=$(git -C "$localRepo" rev-list --count "$testMergeBase" ^"$baseBranch")
|
||||
|
||||
if (( extraCommits != 0 )); then
|
||||
log -e "\e[33m"
|
||||
echo "The PR's base branch is set to $baseBranch, but $extraCommits commits from the $testBranch branch are included. Make sure you know the [right base branch for your changes](https://github.com/NixOS/nixpkgs/blob/master/CONTRIBUTING.md#branch-conventions), then:"
|
||||
echo "- If the changes should go to the $testBranch branch, [change the base branch](https://docs.github.com/en/pull-requests/collaborating-with-pull-requests/proposing-changes-to-your-work-with-pull-requests/changing-the-base-branch-of-a-pull-request) to $testBranch"
|
||||
echo "- If the changes should go to the $baseBranch branch, rebase your PR onto the merge base with the $baseBranch branch:"
|
||||
echo " \`\`\`bash"
|
||||
echo " # git rebase --onto \$(git merge-base upstream/$baseBranch HEAD) \$(git merge-base upstream/$testBranch HEAD)"
|
||||
echo " git rebase --onto $prMergeBase $testMergeBase"
|
||||
echo " git push --force-with-lease"
|
||||
echo " \`\`\`"
|
||||
log -e "\e[m"
|
||||
exit 1
|
||||
fi
|
||||
done
|
||||
|
||||
log "Base branch is correct, no commits from development branches are included"
|
||||
80
ci/supportedBranches.js
Executable file
80
ci/supportedBranches.js
Executable file
@@ -0,0 +1,80 @@
|
||||
#!/usr/bin/env nix-shell
|
||||
/*
|
||||
#!nix-shell -i node -p nodejs
|
||||
*/
|
||||
|
||||
const typeConfig = {
|
||||
master: ['development', 'primary'],
|
||||
release: ['development', 'primary'],
|
||||
staging: ['development', 'secondary'],
|
||||
'staging-next': ['development', 'secondary'],
|
||||
'haskell-updates': ['development', 'secondary'],
|
||||
nixos: ['channel'],
|
||||
nixpkgs: ['channel'],
|
||||
}
|
||||
|
||||
// "order" ranks the development branches by how likely they are the intended base branch
|
||||
// when they are an otherwise equally good fit according to ci/github-script/prepare.js.
|
||||
const orderConfig = {
|
||||
master: 0,
|
||||
release: 1,
|
||||
staging: 2,
|
||||
'haskell-updates': 3,
|
||||
'staging-next': 4,
|
||||
}
|
||||
|
||||
function split(branch) {
|
||||
return {
|
||||
...branch.match(
|
||||
/(?<prefix>.+?)(-(?<version>\d{2}\.\d{2}|unstable)(?:-(?<suffix>.*))?)?$/,
|
||||
).groups,
|
||||
}
|
||||
}
|
||||
|
||||
function classify(branch) {
|
||||
const { prefix, version } = split(branch)
|
||||
return {
|
||||
branch,
|
||||
order: orderConfig[prefix] ?? Infinity,
|
||||
stable: (version ?? 'unstable') !== 'unstable',
|
||||
type: typeConfig[prefix] ?? ['wip'],
|
||||
version: version ?? 'unstable',
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = { classify }
|
||||
|
||||
// If called directly via CLI, runs the following tests:
|
||||
if (!module.parent) {
|
||||
console.log('split(branch)')
|
||||
function testSplit(branch) {
|
||||
console.log(branch, split(branch))
|
||||
}
|
||||
testSplit('master')
|
||||
testSplit('release-25.05')
|
||||
testSplit('staging')
|
||||
testSplit('staging-next')
|
||||
testSplit('staging-25.05')
|
||||
testSplit('staging-next-25.05')
|
||||
testSplit('nixpkgs-25.05-darwin')
|
||||
testSplit('nixpkgs-unstable')
|
||||
testSplit('haskell-updates')
|
||||
testSplit('backport-123-to-release-25.05')
|
||||
|
||||
console.log('')
|
||||
|
||||
console.log('classify(branch)')
|
||||
function testClassify(branch) {
|
||||
console.log(branch, classify(branch))
|
||||
}
|
||||
testClassify('master')
|
||||
testClassify('release-25.05')
|
||||
testClassify('staging')
|
||||
testClassify('staging-next')
|
||||
testClassify('staging-25.05')
|
||||
testClassify('staging-next-25.05')
|
||||
testClassify('nixpkgs-25.05-darwin')
|
||||
testClassify('nixpkgs-unstable')
|
||||
testClassify('haskell-updates')
|
||||
testClassify('backport-123-to-release-25.05')
|
||||
}
|
||||
32
ci/supportedVersions.nix
Executable file
32
ci/supportedVersions.nix
Executable file
@@ -0,0 +1,32 @@
|
||||
#!/usr/bin/env -S nix-instantiate --eval --strict --json --arg unused true
|
||||
# Unused argument to trigger nix-instantiate calling this function with the default arguments.
|
||||
{
|
||||
pinnedJson ? ./pinned.json,
|
||||
}:
|
||||
let
|
||||
pinned = (builtins.fromJSON (builtins.readFile pinnedJson)).pins;
|
||||
nixpkgs = fetchTarball {
|
||||
inherit (pinned.nixpkgs) url;
|
||||
sha256 = pinned.nixpkgs.hash;
|
||||
};
|
||||
pkgs = import nixpkgs {
|
||||
config.allowAliases = false;
|
||||
};
|
||||
|
||||
inherit (pkgs) lib;
|
||||
|
||||
lix = lib.pipe pkgs.lixPackageSets [
|
||||
(lib.filterAttrs (_: set: lib.isDerivation set.lix or null && set.lix.meta.available))
|
||||
lib.attrNames
|
||||
(lib.filter (name: lib.match "lix_[0-9_]+|git" name != null))
|
||||
(map (name: "lixPackageSets.${name}.lix"))
|
||||
];
|
||||
|
||||
nix = lib.pipe pkgs.nixVersions [
|
||||
(lib.filterAttrs (_: drv: lib.isDerivation drv && drv.meta.available))
|
||||
lib.attrNames
|
||||
(lib.filter (name: lib.match "nix_[0-9_]+|git" name != null))
|
||||
(map (name: "nixVersions.${name}"))
|
||||
];
|
||||
in
|
||||
lix ++ nix
|
||||
@@ -1,17 +0,0 @@
|
||||
#!/usr/bin/env nix-shell
|
||||
#!nix-shell -i bash -p jq
|
||||
|
||||
set -euo pipefail
|
||||
|
||||
# https://stackoverflow.com/a/246128
|
||||
SCRIPT_DIR=$( cd -- "$( dirname -- "${BASH_SOURCE[0]}" )" &> /dev/null && pwd )
|
||||
|
||||
repo=https://github.com/nixos/nixpkgs
|
||||
branch=nixpkgs-unstable
|
||||
file=$SCRIPT_DIR/pinned-nixpkgs.json
|
||||
|
||||
defaultRev=$(git ls-remote "$repo" refs/heads/"$branch" | cut -f1)
|
||||
rev=${1:-$defaultRev}
|
||||
sha256=$(nix-prefetch-url --unpack "$repo/archive/$rev.tar.gz" --name source)
|
||||
|
||||
jq -n --arg rev "$rev" --arg sha256 "$sha256" '$ARGS.named' | tee /dev/stderr > $file
|
||||
8
ci/update-pinned.sh
Executable file
8
ci/update-pinned.sh
Executable file
@@ -0,0 +1,8 @@
|
||||
#!/usr/bin/env nix-shell
|
||||
#!nix-shell -i bash -p npins
|
||||
|
||||
set -euo pipefail
|
||||
|
||||
cd "$(dirname "${BASH_SOURCE[0]}")"
|
||||
|
||||
npins --lock-file pinned.json update
|
||||
@@ -1,3 +1,5 @@
|
||||
document.addEventListener('DOMContentLoaded', function(event) {
|
||||
anchors.add('h1[id]:not(div.note h1, div.warning h1, div.tip h1, div.caution h1, div.important h1), h2[id]:not(div.note h2, div.warning h2, div.tip h2, div.caution h2, div.important h2), h3[id]:not(div.note h3, div.warning h3, div.tip h3, div.caution h3, div.important h3), h4[id]:not(div.note h4, div.warning h4, div.tip h4, div.caution h4, div.important h4), h5[id]:not(div.note h5, div.warning h5, div.tip h5, div.caution h5, div.important h5), h6[id]:not(div.note h6, div.warning h6, div.tip h6, div.caution h6, div.important h6)');
|
||||
});
|
||||
document.addEventListener('DOMContentLoaded', () => {
|
||||
anchors.add(
|
||||
'h1[id]:not(div.note h1, div.warning h1, div.tip h1, div.caution h1, div.important h1), h2[id]:not(div.note h2, div.warning h2, div.tip h2, div.caution h2, div.important h2), h3[id]:not(div.note h3, div.warning h3, div.tip h3, div.caution h3, div.important h3), h4[id]:not(div.note h4, div.warning h4, div.tip h4, div.caution h4, div.important h4), h5[id]:not(div.note h5, div.warning h5, div.tip h5, div.caution h5, div.important h5), h6[id]:not(div.note h6, div.warning h6, div.tip h6, div.caution h6, div.important h6)',
|
||||
)
|
||||
})
|
||||
|
||||
@@ -836,7 +836,7 @@ Used with CVS. Expects `cvsRoot`, `tag`, and `hash`.
|
||||
|
||||
## `fetchhg` {#fetchhg}
|
||||
|
||||
Used with Mercurial. Expects `url`, `rev`, and `hash`.
|
||||
Used with Mercurial. Expects `url`, `rev`, `hash`, overridable with [`<pkg>.overrideAttrs`](#sec-pkg-overrideAttrs).
|
||||
|
||||
A number of fetcher functions wrap part of `fetchurl` and `fetchzip`. They are mainly convenience functions intended for commonly used destinations of source code in Nixpkgs. These wrapper fetchers are listed below.
|
||||
|
||||
@@ -886,6 +886,24 @@ If `fetchSubmodules` is `true`, `fetchFromSourcehut` uses `fetchgit`
|
||||
or `fetchhg` with `fetchSubmodules` or `fetchSubrepos` set to `true`,
|
||||
respectively. Otherwise, the fetcher uses `fetchzip`.
|
||||
|
||||
## `fetchFromRadicle` {#fetchfromradicle}
|
||||
|
||||
This is used with Radicle repositories. The arguments expected are similar to `fetchgit`.
|
||||
|
||||
Requires a `seed` argument (e.g. `seed.radicle.xyz` or `rosa.radicle.xyz`) and a `repo` argument
|
||||
(the repository id *without* the `rad:` prefix). Also accepts an optional `node` argument which
|
||||
contains the id of the node from which to fetch the specified ref. If `node` is `null` (the
|
||||
default), a canonical ref is fetched instead.
|
||||
|
||||
```nix
|
||||
fetchFromRadicle {
|
||||
seed = "seed.radicle.xyz";
|
||||
repo = "z3gqcJUoA1n9HaHKufZs5FCSGazv5"; # heartwood
|
||||
tag = "releases/1.3.0";
|
||||
hash = "sha256-4o88BWKGGOjCIQy7anvzbA/kPOO+ZsLMzXJhE61odjw=";
|
||||
}
|
||||
```
|
||||
|
||||
## `requireFile` {#requirefile}
|
||||
|
||||
`requireFile` allows requesting files that cannot be fetched automatically, but whose content is known.
|
||||
|
||||
@@ -60,10 +60,7 @@ lib.extendMkDerivation {
|
||||
}@args:
|
||||
{
|
||||
# Arguments to pass
|
||||
inherit
|
||||
preferLocalBuild
|
||||
allowSubstitute
|
||||
;
|
||||
inherit preferLocalBuild allowSubstitute;
|
||||
# Some expressions involving specialArg
|
||||
greeting = if specialArg "hi" then "hi" else "hello";
|
||||
};
|
||||
|
||||
@@ -37,9 +37,7 @@ let
|
||||
hash = "sha256-he1uGC1M/nFcKpMM9JKY4oeexJcnzV0ZRxhTjtJz6xw=";
|
||||
};
|
||||
in
|
||||
appimageTools.wrapType2 {
|
||||
inherit pname version src;
|
||||
}
|
||||
appimageTools.wrapType2 { inherit pname version src; }
|
||||
```
|
||||
|
||||
:::
|
||||
@@ -104,9 +102,7 @@ let
|
||||
hash = "sha256-/hMPvYdnVB1XjKgU2v47HnVvW4+uC3rhRjbucqin4iI=";
|
||||
};
|
||||
|
||||
appimageContents = appimageTools.extract {
|
||||
inherit pname version src;
|
||||
};
|
||||
appimageContents = appimageTools.extract { inherit pname version src; };
|
||||
in
|
||||
appimageTools.wrapType2 {
|
||||
inherit pname version src;
|
||||
|
||||
@@ -33,10 +33,7 @@ You may also want to consider [dockerTools](#sec-pkgs-dockerTools) for your cont
|
||||
The following derivation will construct a flat-file binary cache containing the closure of `hello`.
|
||||
|
||||
```nix
|
||||
{ mkBinaryCache, hello }:
|
||||
mkBinaryCache {
|
||||
rootPaths = [ hello ];
|
||||
}
|
||||
{ mkBinaryCache, hello }: mkBinaryCache { rootPaths = [ hello ]; }
|
||||
```
|
||||
|
||||
Build the cache on a machine.
|
||||
|
||||
@@ -1577,9 +1577,7 @@ This example uses [](#ex-dockerTools-streamNixShellImage-hello) as a starting po
|
||||
dockerTools.streamNixShellImage {
|
||||
tag = "latest";
|
||||
drv = hello.overrideAttrs (old: {
|
||||
nativeBuildInputs = old.nativeBuildInputs or [ ] ++ [
|
||||
cowsay
|
||||
];
|
||||
nativeBuildInputs = old.nativeBuildInputs or [ ] ++ [ cowsay ];
|
||||
});
|
||||
}
|
||||
```
|
||||
|
||||
@@ -82,9 +82,7 @@ This example uses `ociTools.buildContainer` to create a simple container that ru
|
||||
bash,
|
||||
}:
|
||||
ociTools.buildContainer {
|
||||
args = [
|
||||
(lib.getExe bash)
|
||||
];
|
||||
args = [ (lib.getExe bash) ];
|
||||
|
||||
readonly = false;
|
||||
}
|
||||
|
||||
@@ -7,20 +7,18 @@ For hermeticity, Nix derivations do not allow any state to be carried over betwe
|
||||
However, we can tell Nix explicitly what the previous build state was, by representing that previous state as a derivation output. This allows the passed build state to be used for an incremental build.
|
||||
|
||||
To change a normal derivation to a checkpoint based build, these steps must be taken:
|
||||
- apply `prepareCheckpointBuild` on the desired derivation, e.g.
|
||||
```nix
|
||||
{
|
||||
checkpointArtifacts = (pkgs.checkpointBuildTools.prepareCheckpointBuild pkgs.virtualbox);
|
||||
}
|
||||
```
|
||||
- change something you want in the sources of the package, e.g. use a source override:
|
||||
```nix
|
||||
{
|
||||
changedVBox = pkgs.virtualbox.overrideAttrs (old: {
|
||||
src = path/to/vbox/sources;
|
||||
});
|
||||
}
|
||||
```
|
||||
```nix
|
||||
{
|
||||
checkpointArtifacts = (pkgs.checkpointBuildTools.prepareCheckpointBuild pkgs.virtualbox);
|
||||
}
|
||||
```
|
||||
```nix
|
||||
{
|
||||
changedVBox = pkgs.virtualbox.overrideAttrs (old: {
|
||||
src = path/to/vbox/sources;
|
||||
});
|
||||
}
|
||||
```
|
||||
- use `mkCheckpointBuild changedVBox checkpointArtifacts`
|
||||
- enjoy shorter build times
|
||||
|
||||
@@ -30,10 +28,7 @@ To change a normal derivation to a checkpoint based build, these steps must be t
|
||||
pkgs ? import <nixpkgs> { },
|
||||
}:
|
||||
let
|
||||
inherit (pkgs.checkpointBuildTools)
|
||||
prepareCheckpointBuild
|
||||
mkCheckpointBuild
|
||||
;
|
||||
inherit (pkgs.checkpointBuildTools) prepareCheckpointBuild mkCheckpointBuild;
|
||||
helloCheckpoint = prepareCheckpointBuild pkgs.hello;
|
||||
changedHello = pkgs.hello.overrideAttrs (_: {
|
||||
doCheck = false;
|
||||
|
||||
@@ -15,9 +15,7 @@ If the `moduleNames` argument is omitted, `hasPkgConfigModules` will use `meta.p
|
||||
|
||||
```nix
|
||||
{
|
||||
passthru.tests.pkg-config = testers.hasPkgConfigModules {
|
||||
package = finalAttrs.finalPackage;
|
||||
};
|
||||
passthru.tests.pkg-config = testers.hasPkgConfigModules { package = finalAttrs.finalPackage; };
|
||||
|
||||
meta.pkgConfigModules = [ "libfoo" ];
|
||||
}
|
||||
@@ -54,9 +52,7 @@ If you have a static site that can be built with Nix, you can use `lycheeLinkChe
|
||||
# Check hyperlinks in the `nix` documentation
|
||||
|
||||
```nix
|
||||
testers.lycheeLinkCheck {
|
||||
site = nix.doc + "/share/doc/nix/manual";
|
||||
}
|
||||
testers.lycheeLinkCheck { site = nix.doc + "/share/doc/nix/manual"; }
|
||||
```
|
||||
|
||||
:::
|
||||
@@ -249,9 +245,7 @@ The default argument to the command is `--version`, and the version to be checke
|
||||
This example will run the command `hello --version`, and then check that the version of the `hello` package is in the output of the command.
|
||||
|
||||
```nix
|
||||
{
|
||||
passthru.tests.version = testers.testVersion { package = hello; };
|
||||
}
|
||||
{ passthru.tests.version = testers.testVersion { package = hello; }; }
|
||||
```
|
||||
|
||||
:::
|
||||
|
||||
@@ -152,9 +152,7 @@ runCommandWith {
|
||||
|
||||
Likewise, `runCommandCC name derivationArgs buildCommand` is equivalent to
|
||||
```nix
|
||||
runCommandWith {
|
||||
inherit name derivationArgs;
|
||||
} buildCommand
|
||||
runCommandWith { inherit name derivationArgs; } buildCommand
|
||||
```
|
||||
:::
|
||||
|
||||
@@ -713,7 +711,10 @@ concatTextFile
|
||||
# Writes contents of files to /nix/store/<store path>
|
||||
concatText
|
||||
"my-file"
|
||||
[ file1 file2 ]
|
||||
[
|
||||
file1
|
||||
file2
|
||||
]
|
||||
|
||||
# Writes contents of files to /nix/store/<store path>
|
||||
concatScript
|
||||
@@ -790,7 +791,7 @@ The result is equivalent to the output of `nix-store -q --requisites`.
|
||||
For example,
|
||||
|
||||
```nix
|
||||
writeClosure [ (writeScriptBin "hi" ''${hello}/bin/hello'') ]
|
||||
writeClosure [ (writeScriptBin "hi" "${hello}/bin/hello") ]
|
||||
```
|
||||
|
||||
produces an output path `/nix/store/<hash>-runtime-deps` containing
|
||||
@@ -816,7 +817,7 @@ This produces the equivalent of `nix-store -q --references`.
|
||||
For example,
|
||||
|
||||
```nix
|
||||
writeDirectReferencesToFile (writeScriptBin "hi" ''${hello}/bin/hello'')
|
||||
writeDirectReferencesToFile (writeScriptBin "hi" "${hello}/bin/hello")
|
||||
```
|
||||
|
||||
produces an output path `/nix/store/<hash>-runtime-references` containing
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user